From f20e4afbaad0b3bb63f036e653dc92fc3c55436a Mon Sep 17 00:00:00 2001 From: DN6 Date: Tue, 10 Jun 2025 23:56:08 +0530 Subject: [PATCH] update --- src/diffusers/pipelines/flux/pipeline_flux.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/diffusers/pipelines/flux/pipeline_flux.py b/src/diffusers/pipelines/flux/pipeline_flux.py index bc9a147b4c..5e2df4c910 100644 --- a/src/diffusers/pipelines/flux/pipeline_flux.py +++ b/src/diffusers/pipelines/flux/pipeline_flux.py @@ -248,7 +248,7 @@ class FluxPipeline( padding="max_length", max_length=max_sequence_length, truncation=True, - return_length=(self.variant == "chroma"), + return_length=True, return_overflowing_tokens=False, return_tensors="pt", ) @@ -262,6 +262,7 @@ class FluxPipeline( f" {max_sequence_length} tokens: {removed_text}" ) + text_inputs.attention_mask[:, : text_inputs.length + 1] = 1.0 prompt_embeds = self.text_encoder_2( text_input_ids.to(device), output_hidden_states=False, attention_mask=text_inputs.attention_mask.to(device) )[0]