1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

[Tests] skip nan lora tests on PyTorch 2.5.1 CPU. (#9975)

* skip nan lora tests on PyTorch 2.5.1 CPU.

* cog

* use xfail

* correct xfail

* add condition

* tests
This commit is contained in:
Sayak Paul
2024-11-22 12:45:21 +05:30
committed by GitHub
parent cd6ca9df29
commit 2e86a3f023
3 changed files with 21 additions and 0 deletions

View File

@@ -16,6 +16,7 @@ import sys
import unittest
import numpy as np
import pytest
import torch
from transformers import AutoTokenizer, T5EncoderModel
@@ -29,6 +30,7 @@ from diffusers import (
from diffusers.utils.testing_utils import (
floats_tensor,
is_peft_available,
is_torch_version,
require_peft_backend,
skip_mps,
torch_device,
@@ -126,6 +128,11 @@ class CogVideoXLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
return noise, input_ids, pipeline_inputs
@skip_mps
@pytest.mark.xfail(
condtion=torch.device(torch_device).type == "cpu" and is_torch_version(">=", "2.5"),
reason="Test currently fails on CPU and PyTorch 2.5.1 but not on PyTorch 2.4.1.",
strict=True,
)
def test_lora_fuse_nan(self):
for scheduler_cls in self.scheduler_classes:
components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls)

View File

@@ -16,6 +16,7 @@ import sys
import unittest
import numpy as np
import pytest
import torch
from transformers import AutoTokenizer, T5EncoderModel
@@ -23,6 +24,7 @@ from diffusers import AutoencoderKLMochi, FlowMatchEulerDiscreteScheduler, Mochi
from diffusers.utils.testing_utils import (
floats_tensor,
is_peft_available,
is_torch_version,
require_peft_backend,
skip_mps,
torch_device,
@@ -105,6 +107,11 @@ class MochiLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
return noise, input_ids, pipeline_inputs
@pytest.mark.xfail(
condtion=torch.device(torch_device).type == "cpu" and is_torch_version(">=", "2.5"),
reason="Test currently fails on CPU and PyTorch 2.5.1 but not on PyTorch 2.4.1.",
strict=True,
)
def test_lora_fuse_nan(self):
for scheduler_cls in self.scheduler_classes:
components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls)

View File

@@ -19,6 +19,7 @@ import unittest
from itertools import product
import numpy as np
import pytest
import torch
from diffusers import (
@@ -32,6 +33,7 @@ from diffusers.utils.import_utils import is_peft_available
from diffusers.utils.testing_utils import (
CaptureLogger,
floats_tensor,
is_torch_version,
require_peft_backend,
require_peft_version_greater,
require_transformers_version_greater,
@@ -1510,6 +1512,11 @@ class PeftLoraLoaderMixinTests:
)
@skip_mps
@pytest.mark.xfail(
condtion=torch.device(torch_device).type == "cpu" and is_torch_version(">=", "2.5"),
reason="Test currently fails on CPU and PyTorch 2.5.1 but not on PyTorch 2.4.1.",
strict=True,
)
def test_lora_fuse_nan(self):
for scheduler_cls in self.scheduler_classes:
components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls)