1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-29 07:22:12 +03:00

fix module_to_save bug

This commit is contained in:
lavinal712
2025-03-23 10:27:40 +00:00
parent 7c25a06591
commit 0719c20f5e
2 changed files with 15 additions and 31 deletions

View File

@@ -123,18 +123,20 @@ def _maybe_adjust_config_for_control_lora(config):
modules_to_save = []
for module in target_modules_before:
if "base_layer" in module:
continue
elif "modules_to_save" in module:
base_name = module.split(".modules_to_save.", 1)[0]
modules_to_save.append(base_name)
else:
if module.endswith("weight"):
base_name = ".".join(module.split(".")[:-1])
if base_name and base_name not in modules_to_save:
target_modules.append(module)
modules_to_save.append(base_name)
elif module.endswith("bias"):
base_name = ".".join(module.split(".")[:-1])
if ".".join([base_name, "weight"]) in target_modules_before:
modules_to_save.append(base_name)
else:
target_modules.append(base_name)
else:
target_modules.append(module)
config["target_modules"] = target_modules
config["modules_to_save"] = modules_to_save
config["target_modules"] = list(set(target_modules))
config["modules_to_save"] = list(set(modules_to_save))
return config
@@ -299,6 +301,9 @@ class PeftAdapterMixin:
lora_config_kwargs = _maybe_adjust_config(lora_config_kwargs)
if is_control_lora:
lora_config_kwargs = _maybe_adjust_config_for_control_lora(lora_config_kwargs)
import json
with open("lora_config_kwargs.json", "w") as f:
json.dump(lora_config_kwargs, f, indent=2)
if "use_dora" in lora_config_kwargs:
if lora_config_kwargs["use_dora"]:

View File

@@ -51,55 +51,34 @@ UNET_TO_DIFFUSERS = {
}
CONTROL_LORA_TO_DIFFUSERS = {
".to_q.bias": ".to_q.base_layer.bias",
".to_q.down": ".to_q.lora_A.weight",
".to_q.up": ".to_q.lora_B.weight",
".to_k.bias": ".to_k.base_layer.bias",
".to_k.down": ".to_k.lora_A.weight",
".to_k.up": ".to_k.lora_B.weight",
".to_v.bias": ".to_v.base_layer.bias",
".to_v.down": ".to_v.lora_A.weight",
".to_v.up": ".to_v.lora_B.weight",
".to_out.0.bias": ".to_out.0.base_layer.bias",
".to_out.0.down": ".to_out.0.lora_A.weight",
".to_out.0.up": ".to_out.0.lora_B.weight",
".ff.net.0.proj.bias": ".ff.net.0.proj.base_layer.bias",
".ff.net.0.proj.down": ".ff.net.0.proj.lora_A.weight",
".ff.net.0.proj.up": ".ff.net.0.proj.lora_B.weight",
".ff.net.2.bias": ".ff.net.2.base_layer.bias",
".ff.net.2.down": ".ff.net.2.lora_A.weight",
".ff.net.2.up": ".ff.net.2.lora_B.weight",
".proj_in.bias": ".proj_in.base_layer.bias",
".proj_in.down": ".proj_in.lora_A.weight",
".proj_in.up": ".proj_in.lora_B.weight",
".proj_out.bias": ".proj_out.base_layer.bias",
".proj_out.down": ".proj_out.lora_A.weight",
".proj_out.up": ".proj_out.lora_B.weight",
".conv.bias": ".conv.base_layer.bias",
".conv.down": ".conv.lora_A.weight",
".conv.up": ".conv.lora_B.weight",
**{f".conv{i}.bias": f".conv{i}.base_layer.bias" for i in range(1, 3)},
**{f".conv{i}.down": f".conv{i}.lora_A.weight" for i in range(1, 3)},
**{f".conv{i}.up": f".conv{i}.lora_B.weight" for i in range(1, 3)},
"conv_in.bias": "conv_in.base_layer.bias",
"conv_in.down": "conv_in.lora_A.weight",
"conv_in.up": "conv_in.lora_B.weight",
".conv_shortcut.bias": ".conv_shortcut.base_layer.bias",
".conv_shortcut.down": ".conv_shortcut.lora_A.weight",
".conv_shortcut.up": ".conv_shortcut.lora_B.weight",
**{f".linear_{i}.bias": f".linear_{i}.base_layer.bias" for i in range(1, 3)},
**{f".linear_{i}.down": f".linear_{i}.lora_A.weight" for i in range(1, 3)},
**{f".linear_{i}.up": f".linear_{i}.lora_B.weight" for i in range(1, 3)},
"time_emb_proj.bias": "time_emb_proj.base_layer.bias",
"time_emb_proj.down": "time_emb_proj.lora_A.weight",
"time_emb_proj.up": "time_emb_proj.lora_B.weight",
"controlnet_cond_embedding.conv_in.bias": "controlnet_cond_embedding.conv_in.modules_to_save.bias",
"controlnet_cond_embedding.conv_out.bias": "controlnet_cond_embedding.conv_out.modules_to_save.bias",
**{f"controlnet_cond_embedding.blocks.{i}.bias": f"controlnet_cond_embedding.blocks.{i}.modules_to_save.bias" for i in range(6)},
**{f"controlnet_down_blocks.{i}.bias": f"controlnet_down_blocks.{i}.modules_to_save.bias" for i in range(9)},
"controlnet_mid_block.bias": "controlnet_mid_block.modules_to_save.bias",
".norm.bias": ".norm.modules_to_save.bias",
**{f".norm{i}.bias": f".norm{i}.modules_to_save.bias" for i in range(1, 4)},
}
DIFFUSERS_TO_PEFT = {