1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

Fix a grammatical error in the raise messages (#8272)

Fix grammatical error
This commit is contained in:
Tolga Cangöz
2024-05-24 21:15:00 +03:00
committed by GitHub
parent 1096f88e2b
commit db33af065b
27 changed files with 38 additions and 38 deletions

View File

@@ -565,7 +565,7 @@ class LCMSchedulerWithTimestamp(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -477,7 +477,7 @@ class LCMScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -218,7 +218,7 @@ class UFOGenScheduler(SchedulerMixin, ConfigMixin):
betas = torch.linspace(-6, 6, num_train_timesteps)
self.betas = torch.sigmoid(betas) * (beta_end - beta_start) + beta_start
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -211,7 +211,7 @@ class DDIMScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -207,7 +207,7 @@ class DDIMInverseScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -218,7 +218,7 @@ class DDIMParallelScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -211,7 +211,7 @@ class DDPMScheduler(SchedulerMixin, ConfigMixin):
betas = torch.linspace(-6, 6, num_train_timesteps)
self.betas = torch.sigmoid(betas) * (beta_end - beta_start) + beta_start
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -219,7 +219,7 @@ class DDPMParallelScheduler(SchedulerMixin, ConfigMixin):
betas = torch.linspace(-6, 6, num_train_timesteps)
self.betas = torch.sigmoid(betas) * (beta_end - beta_start) + beta_start
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -152,7 +152,7 @@ class DEISMultistepScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)
@@ -170,13 +170,13 @@ class DEISMultistepScheduler(SchedulerMixin, ConfigMixin):
if algorithm_type in ["dpmsolver", "dpmsolver++"]:
self.register_to_config(algorithm_type="deis")
else:
raise NotImplementedError(f"{algorithm_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{algorithm_type} is not implemented for {self.__class__}")
if solver_type not in ["logrho"]:
if solver_type in ["midpoint", "heun", "bh1", "bh2"]:
self.register_to_config(solver_type="logrho")
else:
raise NotImplementedError(f"solver type {solver_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"solver type {solver_type} is not implemented for {self.__class__}")
# setable values
self.num_inference_steps = None

View File

@@ -229,7 +229,7 @@ class DPMSolverMultistepScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
if rescale_betas_zero_snr:
self.betas = rescale_zero_terminal_snr(self.betas)
@@ -256,13 +256,13 @@ class DPMSolverMultistepScheduler(SchedulerMixin, ConfigMixin):
if algorithm_type == "deis":
self.register_to_config(algorithm_type="dpmsolver++")
else:
raise NotImplementedError(f"{algorithm_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{algorithm_type} is not implemented for {self.__class__}")
if solver_type not in ["midpoint", "heun"]:
if solver_type in ["logrho", "bh1", "bh2"]:
self.register_to_config(solver_type="midpoint")
else:
raise NotImplementedError(f"{solver_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{solver_type} is not implemented for {self.__class__}")
if algorithm_type not in ["dpmsolver++", "sde-dpmsolver++"] and final_sigmas_type == "zero":
raise ValueError(

View File

@@ -182,9 +182,9 @@ class FlaxDPMSolverMultistepScheduler(FlaxSchedulerMixin, ConfigMixin):
# settings for DPM-Solver
if self.config.algorithm_type not in ["dpmsolver", "dpmsolver++"]:
raise NotImplementedError(f"{self.config.algorithm_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{self.config.algorithm_type} is not implemented for {self.__class__}")
if self.config.solver_type not in ["midpoint", "heun"]:
raise NotImplementedError(f"{self.config.solver_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{self.config.solver_type} is not implemented for {self.__class__}")
# standard deviation of the initial noise distribution
init_noise_sigma = jnp.array(1.0, dtype=self.dtype)

View File

@@ -178,7 +178,7 @@ class DPMSolverMultistepInverseScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)
@@ -196,13 +196,13 @@ class DPMSolverMultistepInverseScheduler(SchedulerMixin, ConfigMixin):
if algorithm_type == "deis":
self.register_to_config(algorithm_type="dpmsolver++")
else:
raise NotImplementedError(f"{algorithm_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{algorithm_type} is not implemented for {self.__class__}")
if solver_type not in ["midpoint", "heun"]:
if solver_type in ["logrho", "bh1", "bh2"]:
self.register_to_config(solver_type="midpoint")
else:
raise NotImplementedError(f"{solver_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{solver_type} is not implemented for {self.__class__}")
# setable values
self.num_inference_steps = None

View File

@@ -184,7 +184,7 @@ class DPMSolverSDEScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)

View File

@@ -172,7 +172,7 @@ class DPMSolverSinglestepScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)
@@ -190,12 +190,12 @@ class DPMSolverSinglestepScheduler(SchedulerMixin, ConfigMixin):
if algorithm_type == "deis":
self.register_to_config(algorithm_type="dpmsolver++")
else:
raise NotImplementedError(f"{algorithm_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{algorithm_type} is not implemented for {self.__class__}")
if solver_type not in ["midpoint", "heun"]:
if solver_type in ["logrho", "bh1", "bh2"]:
self.register_to_config(solver_type="midpoint")
else:
raise NotImplementedError(f"{solver_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{solver_type} is not implemented for {self.__class__}")
if algorithm_type != "dpmsolver++" and final_sigmas_type == "zero":
raise ValueError(

View File

@@ -119,7 +119,7 @@ class EDMDPMSolverMultistepScheduler(SchedulerMixin, ConfigMixin):
if solver_type in ["logrho", "bh1", "bh2"]:
self.register_to_config(solver_type="midpoint")
else:
raise NotImplementedError(f"{solver_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{solver_type} is not implemented for {self.__class__}")
if algorithm_type not in ["dpmsolver++", "sde-dpmsolver++"] and final_sigmas_type == "zero":
raise ValueError(

View File

@@ -190,7 +190,7 @@ class EulerAncestralDiscreteScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
if rescale_betas_zero_snr:
self.betas = rescale_zero_terminal_snr(self.betas)

View File

@@ -205,7 +205,7 @@ class EulerDiscreteScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
if rescale_betas_zero_snr:
self.betas = rescale_zero_terminal_snr(self.betas)

View File

@@ -135,7 +135,7 @@ class HeunDiscreteScheduler(SchedulerMixin, ConfigMixin):
elif beta_schedule == "exp":
self.betas = betas_for_alpha_bar(num_train_timesteps, alpha_transform_type="exp")
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)

View File

@@ -129,7 +129,7 @@ class KDPM2AncestralDiscreteScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)

View File

@@ -128,7 +128,7 @@ class KDPM2DiscreteScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)

View File

@@ -224,7 +224,7 @@ class LCMScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -149,7 +149,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)

View File

@@ -135,7 +135,7 @@ class PNDMScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)

View File

@@ -143,7 +143,7 @@ class RePaintScheduler(SchedulerMixin, ConfigMixin):
betas = torch.linspace(-6, 6, num_train_timesteps)
self.betas = torch.sigmoid(betas) * (beta_end - beta_start) + beta_start
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)

View File

@@ -180,7 +180,7 @@ class SASolverScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
self.alphas = 1.0 - self.betas
self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)
@@ -194,7 +194,7 @@ class SASolverScheduler(SchedulerMixin, ConfigMixin):
self.init_noise_sigma = 1.0
if algorithm_type not in ["data_prediction", "noise_prediction"]:
raise NotImplementedError(f"{algorithm_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{algorithm_type} is not implemented for {self.__class__}")
# setable values
self.num_inference_steps = None

View File

@@ -225,7 +225,7 @@ class TCDScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
# Rescale for zero SNR
if rescale_betas_zero_snr:

View File

@@ -211,7 +211,7 @@ class UniPCMultistepScheduler(SchedulerMixin, ConfigMixin):
# Glide cosine schedule
self.betas = betas_for_alpha_bar(num_train_timesteps)
else:
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}")
if rescale_betas_zero_snr:
self.betas = rescale_zero_terminal_snr(self.betas)
@@ -237,7 +237,7 @@ class UniPCMultistepScheduler(SchedulerMixin, ConfigMixin):
if solver_type in ["midpoint", "heun", "logrho"]:
self.register_to_config(solver_type="bh2")
else:
raise NotImplementedError(f"{solver_type} does is not implemented for {self.__class__}")
raise NotImplementedError(f"{solver_type} is not implemented for {self.__class__}")
self.predict_x0 = predict_x0
# setable values