From ac07b6dc6aa3db18e1e6726194e9258fe440ff5b Mon Sep 17 00:00:00 2001 From: jnhuang <115004993+Rbrq03@users.noreply.github.com> Date: Wed, 13 Mar 2024 22:52:44 +0800 Subject: [PATCH] Fix Wrong Text-encoder Grad Setting in Custom_Diffusion Training (#7302) fix index in set textencoder grad Co-authored-by: Sayak Paul --- examples/custom_diffusion/train_custom_diffusion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/custom_diffusion/train_custom_diffusion.py b/examples/custom_diffusion/train_custom_diffusion.py index d80e3388a6..454faff0a1 100644 --- a/examples/custom_diffusion/train_custom_diffusion.py +++ b/examples/custom_diffusion/train_custom_diffusion.py @@ -1178,7 +1178,7 @@ def main(args): grads_text_encoder = text_encoder.get_input_embeddings().weight.grad # Get the index for tokens that we want to zero the grads for index_grads_to_zero = torch.arange(len(tokenizer)) != modifier_token_id[0] - for i in range(len(modifier_token_id[1:])): + for i in range(1, len(modifier_token_id)): index_grads_to_zero = index_grads_to_zero & ( torch.arange(len(tokenizer)) != modifier_token_id[i] )