mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-27 17:22:53 +03:00
Make passing the IP Adapter mask to the attention mechanism optional (#10346)
Make passing the IP Adapter mask to the attention mechanism optional if there is no need to apply it to a given IP Adapter.
This commit is contained in:
@@ -4839,6 +4839,8 @@ class IPAdapterAttnProcessor(nn.Module):
|
||||
)
|
||||
else:
|
||||
for index, (mask, scale, ip_state) in enumerate(zip(ip_adapter_masks, self.scale, ip_hidden_states)):
|
||||
if mask is None:
|
||||
continue
|
||||
if not isinstance(mask, torch.Tensor) or mask.ndim != 4:
|
||||
raise ValueError(
|
||||
"Each element of the ip_adapter_masks array should be a tensor with shape "
|
||||
@@ -5056,6 +5058,8 @@ class IPAdapterAttnProcessor2_0(torch.nn.Module):
|
||||
)
|
||||
else:
|
||||
for index, (mask, scale, ip_state) in enumerate(zip(ip_adapter_masks, self.scale, ip_hidden_states)):
|
||||
if mask is None:
|
||||
continue
|
||||
if not isinstance(mask, torch.Tensor) or mask.ndim != 4:
|
||||
raise ValueError(
|
||||
"Each element of the ip_adapter_masks array should be a tensor with shape "
|
||||
|
||||
Reference in New Issue
Block a user