mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-29 07:22:12 +03:00
update
This commit is contained in:
@@ -417,7 +417,9 @@ class ChromaPipeline(
|
||||
width,
|
||||
negative_prompt=None,
|
||||
prompt_embeds=None,
|
||||
prompt_attention_mask=None,
|
||||
negative_prompt_embeds=None,
|
||||
negative_prompt_attention_mask=None,
|
||||
callback_on_step_end_tensor_inputs=None,
|
||||
max_sequence_length=None,
|
||||
):
|
||||
@@ -451,6 +453,14 @@ class ChromaPipeline(
|
||||
f" {negative_prompt_embeds}. Please make sure to only forward one of the two."
|
||||
)
|
||||
|
||||
if prompt_embeds is not None and prompt_attention_mask is None:
|
||||
raise ValueError("Cannot provide `prompt_embeds` without also providing `prompt_attention_mask")
|
||||
|
||||
if negative_prompt_embeds is not None and negative_prompt_attention_mask is None:
|
||||
raise ValueError(
|
||||
"Cannot provide `negative_prompt_embeds` without also providing `negative_prompt_attention_mask"
|
||||
)
|
||||
|
||||
if max_sequence_length is not None and max_sequence_length > 512:
|
||||
raise ValueError(f"`max_sequence_length` cannot be greater than 512 but is {max_sequence_length}")
|
||||
|
||||
@@ -722,7 +732,9 @@ class ChromaPipeline(
|
||||
width,
|
||||
negative_prompt=negative_prompt,
|
||||
prompt_embeds=prompt_embeds,
|
||||
prompt_attention_mask=prompt_attention_mask,
|
||||
negative_prompt_embeds=negative_prompt_embeds,
|
||||
negative_prompt_attention_mask=negative_prompt_attention_mask,
|
||||
callback_on_step_end_tensor_inputs=callback_on_step_end_tensor_inputs,
|
||||
max_sequence_length=max_sequence_length,
|
||||
)
|
||||
|
||||
@@ -492,14 +492,13 @@ class ChromaImg2ImgPipeline(
|
||||
f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:"
|
||||
f" {negative_prompt_embeds}. Please make sure to only forward one of the two."
|
||||
)
|
||||
if prompt_attention_mask is not None and negative_prompt_attention_mask is None:
|
||||
raise ValueError(
|
||||
"Cannot provide `prompt_attention_mask` without also providing `negative_prompt_attention_mask`"
|
||||
)
|
||||
|
||||
if negative_prompt_attention_mask is not None and prompt_attention_mask is None:
|
||||
if prompt_embeds is not None and prompt_attention_mask is None:
|
||||
raise ValueError("Cannot provide `prompt_embeds` without also providing `prompt_attention_mask")
|
||||
|
||||
if negative_prompt_embeds is not None and negative_prompt_attention_mask is None:
|
||||
raise ValueError(
|
||||
"Cannot provide `negative_prompt_attention_mask` without also providing `prompt_attention_mask`"
|
||||
"Cannot provide `negative_prompt_embeds` without also providing `negative_prompt_attention_mask"
|
||||
)
|
||||
|
||||
if max_sequence_length is not None and max_sequence_length > 512:
|
||||
|
||||
Reference in New Issue
Block a user