1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-29 07:22:12 +03:00

[Stable diffusion] Hot fix

This commit is contained in:
Patrick von Platen
2022-08-16 16:17:32 +00:00
parent ea2e177c1d
commit b50a9ae383
2 changed files with 6 additions and 6 deletions

View File

@@ -96,6 +96,10 @@ class StableDiffusionPipeline(DiffusionPipeline):
self.scheduler.set_timesteps(num_inference_steps, **extra_set_kwargs)
# if we use LMSDiscreteScheduler, let's make sure latents are mulitplied by sigmas
if isinstance(self.scheduler, LMSDiscreteScheduler):
latents = latents * self.scheduler.sigmas[0]
# prepare extra kwargs for the scheduler step, since not all schedulers have the same signature
# eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.
# eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502
@@ -105,10 +109,6 @@ class StableDiffusionPipeline(DiffusionPipeline):
if accepts_eta:
extra_step_kwargs["eta"] = eta
self.scheduler.set_timesteps(num_inference_steps)
if isinstance(self.scheduler, LMSDiscreteScheduler):
latents = latents * self.scheduler.sigmas[0]
for i, t in tqdm(enumerate(self.scheduler.timesteps)):
# expand the latents if we are doing classifier free guidance
latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents

View File

@@ -36,8 +36,8 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
tensor_format="pt",
):
"""
Linear Multistep Scheduler for discrete beta schedules.
Based on the original k-diffusion implementation by Katherine Crowson:
Linear Multistep Scheduler for discrete beta schedules. Based on the original k-diffusion implementation by
Katherine Crowson:
https://github.com/crowsonkb/k-diffusion/blob/481677d114f6ea445aa009cf5bd7a9cdee909e47/k_diffusion/sampling.py#L181
"""