mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-29 07:22:12 +03:00
update
This commit is contained in:
@@ -821,6 +821,7 @@ def _fetch_class_library_tuple(module):
|
||||
# import it here to avoid circular import
|
||||
diffusers_module = importlib.import_module(__name__.split(".")[0])
|
||||
pipelines = getattr(diffusers_module, "pipelines")
|
||||
deprecated_pipelines = getattr(pipelines, "deprecated")
|
||||
|
||||
# register the config from the original module, not the dynamo compiled one
|
||||
not_compiled_module = _unwrap_model(module)
|
||||
@@ -831,7 +832,9 @@ def _fetch_class_library_tuple(module):
|
||||
pipeline_dir = module_path_items[-2] if len(module_path_items) > 2 else None
|
||||
|
||||
path = not_compiled_module.__module__.split(".")
|
||||
is_pipeline_module = pipeline_dir in path and hasattr(pipelines, pipeline_dir)
|
||||
is_pipeline_module = pipeline_dir in path and (
|
||||
hasattr(pipelines, pipeline_dir) or hasattr(deprecated_pipelines, pipeline_dir)
|
||||
)
|
||||
|
||||
# if library is not in LOADABLE_CLASSES, then it is a custom module.
|
||||
# Or if it's a pipeline module, then the module is inside the pipeline
|
||||
|
||||
@@ -22,10 +22,10 @@ from transformers.models.blip_2.configuration_blip_2 import Blip2Config
|
||||
from transformers.models.clip.configuration_clip import CLIPTextConfig
|
||||
|
||||
from diffusers import AutoencoderKL, BlipDiffusionPipeline, PNDMScheduler, UNet2DConditionModel
|
||||
from diffusers.utils.testing_utils import enable_full_determinism
|
||||
from diffusers.pipelines.deprecated.blip_diffusion.blip_image_processing import BlipImageProcessor
|
||||
from diffusers.pipelines.deprecated.blip_diffusion.modeling_blip2 import Blip2QFormerModel
|
||||
from diffusers.pipelines.deprecated.blip_diffusion.modeling_ctx_clip import ContextCLIPTextModel
|
||||
from diffusers.utils.testing_utils import enable_full_determinism
|
||||
|
||||
from ..test_pipelines_common import PipelineTesterMixin
|
||||
|
||||
@@ -195,9 +195,9 @@ class BlipDiffusionPipelineFastTests(PipelineTesterMixin, unittest.TestCase):
|
||||
[0.5329548, 0.8372512, 0.33269387, 0.82096875, 0.43657133, 0.3783, 0.5953028, 0.51934963, 0.42142007]
|
||||
)
|
||||
|
||||
assert (
|
||||
np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
|
||||
), f" expected_slice {image_slice.flatten()}, but got {image_slice.flatten()}"
|
||||
assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2, (
|
||||
f" expected_slice {image_slice.flatten()}, but got {image_slice.flatten()}"
|
||||
)
|
||||
|
||||
@unittest.skip("Test not supported because of complexities in deriving query_embeds.")
|
||||
def test_encode_prompt_works_in_isolation(self):
|
||||
|
||||
Reference in New Issue
Block a user