diff --git a/src/diffusers/pipelines/stable_diffusion/convert_from_ckpt.py b/src/diffusers/pipelines/stable_diffusion/convert_from_ckpt.py index 0ea318bc3b..45788a8e29 100644 --- a/src/diffusers/pipelines/stable_diffusion/convert_from_ckpt.py +++ b/src/diffusers/pipelines/stable_diffusion/convert_from_ckpt.py @@ -20,6 +20,7 @@ import tempfile import torch +import requests from diffusers import ( AutoencoderKL, DDIMScheduler, @@ -860,11 +861,10 @@ def load_pipeline_from_original_stable_diffusion_ckpt( if key_name in checkpoint and checkpoint[key_name].shape[-1] == 1024: if not os.path.isfile("v2-inference-v.yaml"): # model_type = "v2" - os.system( - "wget -P" + r = requests.get( " https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference-v.yaml" - f" -O {original_config_file}" ) + open(original_config_file, "wb").write(r.content) if global_step == 110000: # v2.1 needs to upcast attention @@ -872,11 +872,10 @@ def load_pipeline_from_original_stable_diffusion_ckpt( else: if not os.path.isfile("v1-inference.yaml"): # model_type = "v1" - os.system( - "wget" + r = requests.get( " https://raw.githubusercontent.com/CompVis/stable-diffusion/main/configs/stable-diffusion/v1-inference.yaml" - f" -O {original_config_file}" ) + open(original_config_file, "wb").write(r.content) original_config = OmegaConf.load(original_config_file)