diff --git a/tests/models/autoencoders/test_models_autoencoder_dc.py b/tests/models/autoencoders/test_models_autoencoder_dc.py index d34001e7b9..b1b5531d01 100644 --- a/tests/models/autoencoders/test_models_autoencoder_dc.py +++ b/tests/models/autoencoders/test_models_autoencoder_dc.py @@ -82,3 +82,7 @@ class AutoencoderDCTests(ModelTesterMixin, AutoencoderTesterMixin, unittest.Test @unittest.skipIf(IS_GITHUB_ACTIONS, reason="Skipping test inside GitHub Actions environment") def test_layerwise_casting_inference(self): super().test_layerwise_casting_inference() + + @unittest.skipIf(IS_GITHUB_ACTIONS, reason="Skipping test inside GitHub Actions environment") + def test_layerwise_casting_memory(self): + super().test_layerwise_casting_memory() diff --git a/tests/pipelines/test_pipelines_common.py b/tests/pipelines/test_pipelines_common.py index 2af4ad0314..e2bbce7b0e 100644 --- a/tests/pipelines/test_pipelines_common.py +++ b/tests/pipelines/test_pipelines_common.py @@ -1422,7 +1422,18 @@ class PipelineTesterMixin: def test_save_load_float16(self, expected_max_diff=1e-2): components = self.get_dummy_components() for name, module in components.items(): - if hasattr(module, "half"): + # Account for components with _keep_in_fp32_modules + if hasattr(module, "_keep_in_fp32_modules") and module._keep_in_fp32_modules is not None: + for name, param in module.named_parameters(): + if any( + module_to_keep_in_fp32 in name.split(".") + for module_to_keep_in_fp32 in module._keep_in_fp32_modules + ): + param.data = param.data.to(torch_device).to(torch.float32) + else: + param.data = param.data.to(torch_device).to(torch.float16) + + elif hasattr(module, "half"): components[name] = module.to(torch_device).half() pipe = self.pipeline_class(**components)