diff --git a/tests/pipelines/controlnet/test_controlnet.py b/tests/pipelines/controlnet/test_controlnet.py index a34f81ff8d..235cf60dbe 100644 --- a/tests/pipelines/controlnet/test_controlnet.py +++ b/tests/pipelines/controlnet/test_controlnet.py @@ -31,7 +31,7 @@ from diffusers import ( UNet2DConditionModel, ) from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_controlnet import MultiControlNetModel -from diffusers.utils import load_image, load_numpy, randn_tensor, slow, torch_device +from diffusers.utils import load_image, load_numpy, nightly, randn_tensor, slow, torch_device from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import ( enable_full_determinism, @@ -925,42 +925,6 @@ class ControlNetPipelineSlowTests(unittest.TestCase): expected_slice = np.array([0.1338, 0.1597, 0.1202, 0.1687, 0.1377, 0.1017, 0.2070, 0.1574, 0.1348]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 - def test_load_local(self): - controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11p_sd15_canny") - pipe_1 = StableDiffusionControlNetPipeline.from_pretrained( - "runwayml/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet - ) - - controlnet = ControlNetModel.from_single_file( - "https://huggingface.co/lllyasviel/ControlNet-v1-1/blob/main/control_v11p_sd15_canny.pth" - ) - pipe_2 = StableDiffusionControlNetPipeline.from_single_file( - "https://huggingface.co/runwayml/stable-diffusion-v1-5/blob/main/v1-5-pruned-emaonly.safetensors", - safety_checker=None, - controlnet=controlnet, - ) - pipes = [pipe_1, pipe_2] - images = [] - - for pipe in pipes: - pipe.enable_model_cpu_offload() - pipe.set_progress_bar_config(disable=None) - - generator = torch.Generator(device="cpu").manual_seed(0) - prompt = "bird" - image = load_image( - "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd_controlnet/bird_canny.png" - ) - - output = pipe(prompt, image, generator=generator, output_type="np", num_inference_steps=3) - images.append(output.images[0]) - - del pipe - gc.collect() - torch.cuda.empty_cache() - - assert np.abs(images[0] - images[1]).sum() < 1e-3 - @slow @require_torch_gpu @@ -1000,3 +964,48 @@ class StableDiffusionMultiControlNetPipelineSlowTests(unittest.TestCase): ) assert np.abs(expected_image - image).max() < 5e-2 + + +@nightly +@require_torch_gpu +class StableDiffusionMultiControlNetPipelineNightlyTests(unittest.TestCase): + def tearDown(self): + super().tearDown() + gc.collect() + torch.cuda.empty_cache() + + def test_load_local(self): + controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11p_sd15_canny") + pipe_1 = StableDiffusionControlNetPipeline.from_pretrained( + "runwayml/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet + ) + + controlnet = ControlNetModel.from_single_file( + "https://huggingface.co/lllyasviel/ControlNet-v1-1/blob/main/control_v11p_sd15_canny.pth" + ) + pipe_2 = StableDiffusionControlNetPipeline.from_single_file( + "https://huggingface.co/runwayml/stable-diffusion-v1-5/blob/main/v1-5-pruned-emaonly.safetensors", + safety_checker=None, + controlnet=controlnet, + ) + pipes = [pipe_1, pipe_2] + images = [] + + for pipe in pipes: + pipe.enable_model_cpu_offload() + pipe.set_progress_bar_config(disable=None) + + generator = torch.Generator(device="cpu").manual_seed(0) + prompt = "bird" + image = load_image( + "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd_controlnet/bird_canny.png" + ) + + output = pipe(prompt, image, generator=generator, output_type="np", num_inference_steps=3) + images.append(output.images[0]) + + del pipe + gc.collect() + torch.cuda.empty_cache() + + assert np.abs(images[0] - images[1]).sum() < 1e-3 diff --git a/tests/pipelines/controlnet/test_controlnet_img2img.py b/tests/pipelines/controlnet/test_controlnet_img2img.py index 2b9ec7e463..9faaefb28c 100644 --- a/tests/pipelines/controlnet/test_controlnet_img2img.py +++ b/tests/pipelines/controlnet/test_controlnet_img2img.py @@ -33,7 +33,7 @@ from diffusers import ( UNet2DConditionModel, ) from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_controlnet import MultiControlNetModel -from diffusers.utils import floats_tensor, load_image, load_numpy, randn_tensor, slow, torch_device +from diffusers.utils import floats_tensor, load_image, load_numpy, nightly, randn_tensor, slow, torch_device from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu @@ -402,6 +402,15 @@ class ControlNetImg2ImgPipelineSlowTests(unittest.TestCase): assert np.abs(expected_image - image).max() < 9e-2 + +@nightly +@require_torch_gpu +class ControlNetImg2ImgPipelineNightlyTests(unittest.TestCase): + def tearDown(self): + super().tearDown() + gc.collect() + torch.cuda.empty_cache() + def test_load_local(self): controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11p_sd15_canny") pipe_1 = StableDiffusionControlNetImg2ImgPipeline.from_pretrained( diff --git a/tests/pipelines/controlnet/test_controlnet_inpaint.py b/tests/pipelines/controlnet/test_controlnet_inpaint.py index cb9b53e612..0389c76617 100644 --- a/tests/pipelines/controlnet/test_controlnet_inpaint.py +++ b/tests/pipelines/controlnet/test_controlnet_inpaint.py @@ -33,7 +33,7 @@ from diffusers import ( UNet2DConditionModel, ) from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_controlnet import MultiControlNetModel -from diffusers.utils import floats_tensor, load_image, load_numpy, randn_tensor, slow, torch_device +from diffusers.utils import floats_tensor, load_image, load_numpy, nightly, randn_tensor, slow, torch_device from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu @@ -544,6 +544,15 @@ class ControlNetInpaintPipelineSlowTests(unittest.TestCase): assert np.abs(expected_image - image).max() < 9e-2 + +@nightly +@require_torch_gpu +class ControlNetInpaintPipelineNightlyTests(unittest.TestCase): + def tearDown(self): + super().tearDown() + gc.collect() + torch.cuda.empty_cache() + def test_load_local(self): controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11p_sd15_canny") pipe_1 = StableDiffusionControlNetInpaintPipeline.from_pretrained(