1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

[lora_conversion] Enhance key handling for OneTrainer components in LORA conversion utility (#11441) (#11487)

* [lora_conversion] Enhance key handling for OneTrainer components in LORA conversion utility (#11441)

* Update src/diffusers/loaders/lora_conversion_utils.py

Co-authored-by: Sayak Paul <spsayakpaul@gmail.com>

---------

Co-authored-by: Sayak Paul <spsayakpaul@gmail.com>
This commit is contained in:
Valeriy Selitskiy
2025-05-06 15:14:58 +02:00
committed by GitHub
parent 8c661ea586
commit 79371661d1

View File

@@ -727,8 +727,25 @@ def _convert_kohya_flux_lora_to_diffusers(state_dict):
elif k.startswith("lora_te1_"):
has_te_keys = True
continue
elif k.startswith("lora_transformer_context_embedder"):
diffusers_key = "context_embedder"
elif k.startswith("lora_transformer_norm_out_linear"):
diffusers_key = "norm_out.linear"
elif k.startswith("lora_transformer_proj_out"):
diffusers_key = "proj_out"
elif k.startswith("lora_transformer_x_embedder"):
diffusers_key = "x_embedder"
elif k.startswith("lora_transformer_time_text_embed_guidance_embedder_linear_"):
i = int(k.split("lora_transformer_time_text_embed_guidance_embedder_linear_")[-1])
diffusers_key = f"time_text_embed.guidance_embedder.linear_{i}"
elif k.startswith("lora_transformer_time_text_embed_text_embedder_linear_"):
i = int(k.split("lora_transformer_time_text_embed_text_embedder_linear_")[-1])
diffusers_key = f"time_text_embed.text_embedder.linear_{i}"
elif k.startswith("lora_transformer_time_text_embed_timestep_embedder_linear_"):
i = int(k.split("lora_transformer_time_text_embed_timestep_embedder_linear_")[-1])
diffusers_key = f"time_text_embed.timestep_embedder.linear_{i}"
else:
raise NotImplementedError
raise NotImplementedError(f"Handling for key ({k}) is not implemented.")
if "attn_" in k:
if "_to_out_0" in k: