From 1066fe4cbc24ba21f9d0dec7886f35bb91afa281 Mon Sep 17 00:00:00 2001 From: Greg Hunkins Date: Wed, 12 Jun 2024 11:50:13 -0400 Subject: [PATCH] =?UTF-8?q?=F0=9F=A4=AB=20Quiet=20IP=20Adapter=20Mask=20Wa?= =?UTF-8?q?rning=20(#8475)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * quiet attn parameters * fix lint * make style && make quality --------- Co-authored-by: Sayak Paul --- src/diffusers/models/attention_processor.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/diffusers/models/attention_processor.py b/src/diffusers/models/attention_processor.py index de4b072259..1dc9f69459 100644 --- a/src/diffusers/models/attention_processor.py +++ b/src/diffusers/models/attention_processor.py @@ -539,7 +539,10 @@ class Attention(nn.Module): # For standard processors that are defined here, `**cross_attention_kwargs` is empty attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys()) - unused_kwargs = [k for k, _ in cross_attention_kwargs.items() if k not in attn_parameters] + quiet_attn_parameters = {"ip_adapter_masks"} + unused_kwargs = [ + k for k, _ in cross_attention_kwargs.items() if k not in attn_parameters and k not in quiet_attn_parameters + ] if len(unused_kwargs) > 0: logger.warning( f"cross_attention_kwargs {unused_kwargs} are not expected by {self.processor.__class__.__name__} and will be ignored."