From 4abbbff6187389c290018e95f71dee6aaad44d32 Mon Sep 17 00:00:00 2001 From: ginjia Date: Mon, 20 Nov 2023 19:43:29 +0800 Subject: [PATCH] =?UTF-8?q?fix=20an=20issue=20that=20ipex=20occupy=20too?= =?UTF-8?q?=20much=20memory,=20it=20will=20not=20impact=20per=E2=80=A6=20(?= =?UTF-8?q?#5625)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix an issue that ipex occupy too much memory, it will not impact performance * make style --------- Co-authored-by: root Co-authored-by: Meng Guoqing --- examples/community/stable_diffusion_ipex.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/examples/community/stable_diffusion_ipex.py b/examples/community/stable_diffusion_ipex.py index 03eb137217..385227db0b 100644 --- a/examples/community/stable_diffusion_ipex.py +++ b/examples/community/stable_diffusion_ipex.py @@ -252,9 +252,7 @@ class StableDiffusionIPEXPipeline(DiffusionPipeline, TextualInversionLoaderMixin # optimize with ipex if dtype == torch.bfloat16: - self.unet = ipex.optimize( - self.unet.eval(), dtype=torch.bfloat16, inplace=True, sample_input=unet_input_example - ) + self.unet = ipex.optimize(self.unet.eval(), dtype=torch.bfloat16, inplace=True) self.vae.decoder = ipex.optimize(self.vae.decoder.eval(), dtype=torch.bfloat16, inplace=True) self.text_encoder = ipex.optimize(self.text_encoder.eval(), dtype=torch.bfloat16, inplace=True) if self.safety_checker is not None: @@ -264,8 +262,6 @@ class StableDiffusionIPEXPipeline(DiffusionPipeline, TextualInversionLoaderMixin self.unet.eval(), dtype=torch.float32, inplace=True, - sample_input=unet_input_example, - level="O1", weights_prepack=True, auto_kernel_selection=False, ) @@ -273,7 +269,6 @@ class StableDiffusionIPEXPipeline(DiffusionPipeline, TextualInversionLoaderMixin self.vae.decoder.eval(), dtype=torch.float32, inplace=True, - level="O1", weights_prepack=True, auto_kernel_selection=False, ) @@ -281,7 +276,6 @@ class StableDiffusionIPEXPipeline(DiffusionPipeline, TextualInversionLoaderMixin self.text_encoder.eval(), dtype=torch.float32, inplace=True, - level="O1", weights_prepack=True, auto_kernel_selection=False, ) @@ -290,7 +284,6 @@ class StableDiffusionIPEXPipeline(DiffusionPipeline, TextualInversionLoaderMixin self.safety_checker.eval(), dtype=torch.float32, inplace=True, - level="O1", weights_prepack=True, auto_kernel_selection=False, )