1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

adjust unit tests for test_save_load_float16 (#12500)

* adjust unit tests for wan pipeline

Signed-off-by: Liu, Kaixuan <kaixuan.liu@intel.com>

* update code

Signed-off-by: Liu, Kaixuan <kaixuan.liu@intel.com>

* avoid adjusting common `get_dummy_components` API

Signed-off-by: Liu, Kaixuan <kaixuan.liu@intel.com>

* use `form_pretrained` to `transformer` and `transformer_2`

Signed-off-by: Liu, Kaixuan <kaixuan.liu@intel.com>

* update code

Signed-off-by: Liu, Kaixuan <kaixuan.liu@intel.com>

* update

Signed-off-by: Liu, Kaixuan <kaixuan.liu@intel.com>

---------

Signed-off-by: Liu, Kaixuan <kaixuan.liu@intel.com>
Co-authored-by: Sayak Paul <spsayakpaul@gmail.com>
Co-authored-by: Dhruv Nair <dhruv.nair@gmail.com>
This commit is contained in:
kaixuanliu
2025-11-13 14:27:12 +08:00
committed by GitHub
parent d8e4805816
commit 7a001c3ee2

View File

@@ -1422,7 +1422,18 @@ class PipelineTesterMixin:
def test_save_load_float16(self, expected_max_diff=1e-2):
components = self.get_dummy_components()
for name, module in components.items():
if hasattr(module, "half"):
# Account for components with _keep_in_fp32_modules
if hasattr(module, "_keep_in_fp32_modules") and module._keep_in_fp32_modules is not None:
for name, param in module.named_parameters():
if any(
module_to_keep_in_fp32 in name.split(".")
for module_to_keep_in_fp32 in module._keep_in_fp32_modules
):
param.data = param.data.to(torch_device).to(torch.float32)
else:
param.data = param.data.to(torch_device).to(torch.float16)
elif hasattr(module, "half"):
components[name] = module.to(torch_device).half()
pipe = self.pipeline_class(**components)