mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-29 07:22:12 +03:00
[tests] xfail some kandinsky tests. (#12364)
xfail some kandinsky tests.
This commit is contained in:
@@ -27,8 +27,8 @@ else:
|
||||
_import_structure["pipeline_qwenimage_controlnet"] = ["QwenImageControlNetPipeline"]
|
||||
_import_structure["pipeline_qwenimage_controlnet_inpaint"] = ["QwenImageControlNetInpaintPipeline"]
|
||||
_import_structure["pipeline_qwenimage_edit"] = ["QwenImageEditPipeline"]
|
||||
_import_structure["pipeline_qwenimage_edit_plus"] = ["QwenImageEditPlusPipeline"]
|
||||
_import_structure["pipeline_qwenimage_edit_inpaint"] = ["QwenImageEditInpaintPipeline"]
|
||||
_import_structure["pipeline_qwenimage_edit_plus"] = ["QwenImageEditPlusPipeline"]
|
||||
_import_structure["pipeline_qwenimage_img2img"] = ["QwenImageImg2ImgPipeline"]
|
||||
_import_structure["pipeline_qwenimage_inpaint"] = ["QwenImageInpaintPipeline"]
|
||||
|
||||
|
||||
@@ -18,11 +18,13 @@ import random
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
import torch
|
||||
from transformers import XLMRobertaTokenizerFast
|
||||
|
||||
from diffusers import DDIMScheduler, KandinskyPipeline, KandinskyPriorPipeline, UNet2DConditionModel, VQModel
|
||||
from diffusers.pipelines.kandinsky.text_encoder import MCLIPConfig, MultilingualCLIP
|
||||
from diffusers.utils import is_transformers_version
|
||||
|
||||
from ...testing_utils import (
|
||||
backend_empty_cache,
|
||||
@@ -215,6 +217,9 @@ class KandinskyPipelineFastTests(PipelineTesterMixin, unittest.TestCase):
|
||||
dummy = Dummies()
|
||||
return dummy.get_dummy_inputs(device=device, seed=seed)
|
||||
|
||||
@pytest.mark.xfail(
|
||||
condition=is_transformers_version(">=", "4.56.2"), reason="Latest transformers changes the slices", strict=True
|
||||
)
|
||||
def test_kandinsky(self):
|
||||
device = "cpu"
|
||||
|
||||
|
||||
@@ -16,8 +16,10 @@
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from diffusers import KandinskyCombinedPipeline, KandinskyImg2ImgCombinedPipeline, KandinskyInpaintCombinedPipeline
|
||||
from diffusers.utils import is_transformers_version
|
||||
|
||||
from ...testing_utils import enable_full_determinism, require_torch_accelerator, torch_device
|
||||
from ..test_pipelines_common import PipelineTesterMixin
|
||||
@@ -73,6 +75,9 @@ class KandinskyPipelineCombinedFastTests(PipelineTesterMixin, unittest.TestCase)
|
||||
)
|
||||
return inputs
|
||||
|
||||
@pytest.mark.xfail(
|
||||
condition=is_transformers_version(">=", "4.56.2"), reason="Latest transformers changes the slices", strict=True
|
||||
)
|
||||
def test_kandinsky(self):
|
||||
device = "cpu"
|
||||
|
||||
@@ -181,6 +186,9 @@ class KandinskyPipelineImg2ImgCombinedFastTests(PipelineTesterMixin, unittest.Te
|
||||
inputs.pop("negative_image_embeds")
|
||||
return inputs
|
||||
|
||||
@pytest.mark.xfail(
|
||||
condition=is_transformers_version(">=", "4.56.2"), reason="Latest transformers changes the slices", strict=True
|
||||
)
|
||||
def test_kandinsky(self):
|
||||
device = "cpu"
|
||||
|
||||
@@ -292,6 +300,9 @@ class KandinskyPipelineInpaintCombinedFastTests(PipelineTesterMixin, unittest.Te
|
||||
inputs.pop("negative_image_embeds")
|
||||
return inputs
|
||||
|
||||
@pytest.mark.xfail(
|
||||
condition=is_transformers_version(">=", "4.56.2"), reason="Latest transformers changes the slices", strict=True
|
||||
)
|
||||
def test_kandinsky(self):
|
||||
device = "cpu"
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ import random
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
import torch
|
||||
from PIL import Image
|
||||
from transformers import XLMRobertaTokenizerFast
|
||||
@@ -31,6 +32,7 @@ from diffusers import (
|
||||
VQModel,
|
||||
)
|
||||
from diffusers.pipelines.kandinsky.text_encoder import MCLIPConfig, MultilingualCLIP
|
||||
from diffusers.utils import is_transformers_version
|
||||
|
||||
from ...testing_utils import (
|
||||
backend_empty_cache,
|
||||
@@ -237,6 +239,9 @@ class KandinskyImg2ImgPipelineFastTests(PipelineTesterMixin, unittest.TestCase):
|
||||
dummies = Dummies()
|
||||
return dummies.get_dummy_inputs(device=device, seed=seed)
|
||||
|
||||
@pytest.mark.xfail(
|
||||
condition=is_transformers_version(">=", "4.56.2"), reason="Latest transformers changes the slices", strict=True
|
||||
)
|
||||
def test_kandinsky_img2img(self):
|
||||
device = "cpu"
|
||||
|
||||
|
||||
@@ -18,12 +18,14 @@ import random
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
import torch
|
||||
from PIL import Image
|
||||
from transformers import XLMRobertaTokenizerFast
|
||||
|
||||
from diffusers import DDIMScheduler, KandinskyInpaintPipeline, KandinskyPriorPipeline, UNet2DConditionModel, VQModel
|
||||
from diffusers.pipelines.kandinsky.text_encoder import MCLIPConfig, MultilingualCLIP
|
||||
from diffusers.utils import is_transformers_version
|
||||
|
||||
from ...testing_utils import (
|
||||
backend_empty_cache,
|
||||
@@ -231,6 +233,9 @@ class KandinskyInpaintPipelineFastTests(PipelineTesterMixin, unittest.TestCase):
|
||||
dummies = Dummies()
|
||||
return dummies.get_dummy_inputs(device=device, seed=seed)
|
||||
|
||||
@pytest.mark.xfail(
|
||||
condition=is_transformers_version(">=", "4.56.2"), reason="Latest transformers changes the slices", strict=True
|
||||
)
|
||||
def test_kandinsky_inpaint(self):
|
||||
device = "cpu"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user