1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

[CI] More fixes for Fast GPU Tests on main (#9300)

update
This commit is contained in:
Dhruv Nair
2024-09-02 17:51:48 +05:30
committed by GitHub
parent 0e6a8403f6
commit 007ad0e2aa
5 changed files with 12 additions and 0 deletions

View File

@@ -417,6 +417,9 @@ class ModelTesterMixin:
@require_torch_gpu
def test_set_attn_processor_for_determinism(self):
if self.uses_custom_attn_processor:
return
torch.use_deterministic_algorithms(False)
if self.forward_requires_fresh_args:
model = self.model_class(**self.init_dict)

View File

@@ -32,6 +32,9 @@ class FluxTransformerTests(ModelTesterMixin, unittest.TestCase):
# We override the items here because the transformer under consideration is small.
model_split_percents = [0.7, 0.6, 0.6]
# Skip setting testing with default: AttnProcessor
uses_custom_attn_processor = True
@property
def dummy_input(self):
batch_size = 1

View File

@@ -25,6 +25,9 @@ class FluxPipelineFastTests(unittest.TestCase, PipelineTesterMixin):
params = frozenset(["prompt", "height", "width", "guidance_scale", "prompt_embeds", "pooled_prompt_embeds"])
batch_params = frozenset(["prompt"])
# there is no xformers processor for Flux
test_xformers_attention = False
def get_dummy_components(self):
torch.manual_seed(0)
transformer = FluxTransformer2DModel(

View File

@@ -37,6 +37,7 @@ class StableDiffusion3PAGPipelineFastTests(unittest.TestCase, PipelineTesterMixi
]
)
batch_params = frozenset(["prompt", "negative_prompt"])
test_xformers_attention = False
def get_dummy_components(self):
torch.manual_seed(0)

View File

@@ -68,6 +68,8 @@ class StableAudioPipelineFastTests(PipelineTesterMixin, unittest.TestCase):
"callback_steps",
]
)
# There is not xformers version of the StableAudioPipeline custom attention processor
test_xformers_attention = False
def get_dummy_components(self):
torch.manual_seed(0)