1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-29 07:22:12 +03:00
This commit is contained in:
sayakpaul
2025-10-03 17:39:55 +05:30
parent c4bcf72084
commit dae161ed26
4 changed files with 13 additions and 8 deletions

View File

@@ -124,8 +124,10 @@ class TestCogVideoXLoRA(PeftLoraLoaderMixinTests):
def test_simple_inference_with_text_denoiser_lora_unfused(self):
super().test_simple_inference_with_text_denoiser_lora_unfused(expected_atol=9e-3)
def test_lora_scale_kwargs_match_fusion(self):
super().test_lora_scale_kwargs_match_fusion(expected_atol=9e-3, expected_rtol=9e-3)
def test_lora_scale_kwargs_match_fusion(self, base_pipe_output):
super().test_lora_scale_kwargs_match_fusion(
base_pipe_output=base_pipe_output, expected_atol=9e-3, expected_rtol=9e-3
)
@pytest.mark.parametrize(
"offload_type, use_stream",

View File

@@ -754,7 +754,7 @@ class TestFluxLoRAIntegration:
seed = 0
@pytest.fixture(scope="function")
def pipeline(self, torch_device):
def pipeline(self):
gc.collect()
backend_empty_cache(torch_device)
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
@@ -873,10 +873,10 @@ class TestFluxControlLoRAIntegration:
prompt = "A robot made of exotic candies and chocolates of different kinds."
@pytest.fixture(scope="function")
def pipeline(self, torch_device):
def pipeline(self):
gc.collect()
backend_empty_cache(torch_device)
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
pipe = FluxControlPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
try:
yield pipe
finally:

View File

@@ -37,6 +37,7 @@ from ..testing_utils import (
require_peft_backend,
require_torch_accelerator,
skip_mps,
torch_device,
)
@@ -207,7 +208,7 @@ class TestHunyuanVideoLoRAIntegration:
seed = 0
@pytest.fixture(scope="function")
def pipeline(self, torch_device):
def pipeline(self):
gc.collect()
backend_empty_cache(torch_device)

View File

@@ -132,7 +132,7 @@ class TestStableDiffusionXLLoRA(PeftLoraLoaderMixinTests):
expected_atol=expected_atol, expected_rtol=expected_rtol
)
def test_lora_scale_kwargs_match_fusion(self):
def test_lora_scale_kwargs_match_fusion(self, base_pipe_output):
if torch.cuda.is_available():
expected_atol = 9e-2
expected_rtol = 9e-2
@@ -140,7 +140,9 @@ class TestStableDiffusionXLLoRA(PeftLoraLoaderMixinTests):
expected_atol = 1e-3
expected_rtol = 1e-3
super().test_lora_scale_kwargs_match_fusion(expected_atol=expected_atol, expected_rtol=expected_rtol)
super().test_lora_scale_kwargs_match_fusion(
base_pipe_output=base_pipe_output, expected_atol=expected_atol, expected_rtol=expected_rtol
)
@slow