From 8f2d13c684e282b258e5fbc73744eda4dc5f3cb6 Mon Sep 17 00:00:00 2001 From: Dhruv Nair Date: Thu, 29 Feb 2024 22:47:39 +0530 Subject: [PATCH] Fix setting fp16 dtype in AnimateDiff convert script. (#7127) * update * update --- scripts/convert_animatediff_motion_module_to_diffusers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/convert_animatediff_motion_module_to_diffusers.py b/scripts/convert_animatediff_motion_module_to_diffusers.py index 9c5d236fd7..ceb967acd3 100644 --- a/scripts/convert_animatediff_motion_module_to_diffusers.py +++ b/scripts/convert_animatediff_motion_module_to_diffusers.py @@ -30,6 +30,7 @@ def get_args(): parser.add_argument("--output_path", type=str, required=True) parser.add_argument("--use_motion_mid_block", action="store_true") parser.add_argument("--motion_max_seq_length", type=int, default=32) + parser.add_argument("--save_fp16", action="store_true") return parser.parse_args() @@ -48,4 +49,6 @@ if __name__ == "__main__": # skip loading position embeddings adapter.load_state_dict(conv_state_dict, strict=False) adapter.save_pretrained(args.output_path) - adapter.save_pretrained(args.output_path, variant="fp16", torch_dtype=torch.float16) + + if args.save_fp16: + adapter.to(torch.float16).save_pretrained(args.output_path, variant="fp16")