From 509741aea7de6c7fc37a64558aee6e63e6e03340 Mon Sep 17 00:00:00 2001 From: BootesVoid <78485654+AMohamedAakhil@users.noreply.github.com> Date: Tue, 21 May 2024 23:59:11 -0700 Subject: [PATCH] fix: Attribute error in Logger object (logger.warning) (#8183) --- src/diffusers/loaders/single_file_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/diffusers/loaders/single_file_utils.py b/src/diffusers/loaders/single_file_utils.py index f90b4cc89a..4add9fd296 100644 --- a/src/diffusers/loaders/single_file_utils.py +++ b/src/diffusers/loaders/single_file_utils.py @@ -826,8 +826,8 @@ def convert_ldm_unet_checkpoint(checkpoint, config, extract_ema=False, **kwargs) # at least a 100 parameters have to start with `model_ema` in order for the checkpoint to be EMA if sum(k.startswith("model_ema") for k in keys) > 100 and extract_ema: - logger.warninging("Checkpoint has both EMA and non-EMA weights.") - logger.warninging( + logger.warning("Checkpoint has both EMA and non-EMA weights.") + logger.warning( "In this conversion only the EMA weights are extracted. If you want to instead extract the non-EMA" " weights (useful to continue fine-tuning), please make sure to remove the `--extract_ema` flag." ) @@ -837,7 +837,7 @@ def convert_ldm_unet_checkpoint(checkpoint, config, extract_ema=False, **kwargs) unet_state_dict[key.replace(unet_key, "")] = checkpoint.get(flat_ema_key) else: if sum(k.startswith("model_ema") for k in keys) > 100: - logger.warninging( + logger.warning( "In this conversion only the non-EMA weights are extracted. If you want to instead extract the EMA" " weights (usually better for inference), please make sure to add the `--extract_ema` flag." )