From af13a90ebdcdc240d6cb159ffbe8f7661f20828d Mon Sep 17 00:00:00 2001 From: Dhruv Nair Date: Tue, 20 Feb 2024 21:43:23 +0530 Subject: [PATCH] Remove `disable_full_determinism` from StableVideoDiffusion xformers test. (#7039) * update * update --- .../stable_video_diffusion/test_stable_video_diffusion.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py b/tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py index 60c4112838..5a3c79422c 100644 --- a/tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py +++ b/tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py @@ -22,7 +22,6 @@ from diffusers.utils import is_accelerate_available, is_accelerate_version, load from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import ( CaptureLogger, - disable_full_determinism, enable_full_determinism, floats_tensor, numpy_cosine_similarity_distance, @@ -34,6 +33,9 @@ from diffusers.utils.testing_utils import ( from ..test_pipelines_common import PipelineTesterMixin +enable_full_determinism() + + def to_np(tensor): if isinstance(tensor, torch.Tensor): tensor = tensor.detach().cpu().numpy() @@ -465,8 +467,6 @@ class StableVideoDiffusionPipelineFastTests(PipelineTesterMixin, unittest.TestCa reason="XFormers attention is only available with CUDA and `xformers` installed", ) def test_xformers_attention_forwardGenerator_pass(self): - disable_full_determinism() - expected_max_diff = 9e-4 if not self.test_xformers_attention: @@ -496,8 +496,6 @@ class StableVideoDiffusionPipelineFastTests(PipelineTesterMixin, unittest.TestCa max_diff = np.abs(to_np(output_with_offload) - to_np(output_without_offload)).max() self.assertLess(max_diff, expected_max_diff, "XFormers attention should not affect the inference results") - enable_full_determinism() - @slow @require_torch_gpu