1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

change max_shard_size to 10GB (#8445)

* change max_shard_size to 10GB

* add notes to the documentation

* Update src/diffusers/models/modeling_utils.py

Co-authored-by: Lucain <lucainp@gmail.com>

* change to abs limit

---------

Co-authored-by: Lucain <lucainp@gmail.com>
This commit is contained in:
Sayak Paul
2024-06-12 13:49:13 +01:00
committed by GitHub
parent 0a1c13af79
commit d38f69ea25

View File

@@ -270,7 +270,7 @@ class ModelMixin(torch.nn.Module, PushToHubMixin):
save_function: Optional[Callable] = None,
safe_serialization: bool = True,
variant: Optional[str] = None,
max_shard_size: Union[int, str] = "5GB",
max_shard_size: Union[int, str] = "10GB",
push_to_hub: bool = False,
**kwargs,
):
@@ -293,10 +293,13 @@ class ModelMixin(torch.nn.Module, PushToHubMixin):
Whether to save the model using `safetensors` or the traditional PyTorch way with `pickle`.
variant (`str`, *optional*):
If specified, weights are saved in the format `pytorch_model.<variant>.bin`.
max_shard_size (`int` or `str`, defaults to `"5GB"`):
max_shard_size (`int` or `str`, defaults to `"10GB"`):
The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size
lower than this size. If expressed as a string, needs to be digits followed by a unit (like `"5GB"`).
If expressed as an integer, the unit is bytes.
If expressed as an integer, the unit is bytes. Note that this limit will be decreased after a certain
period of time (starting from Oct 2024) to allow users to upgrade to the latest version of `diffusers`.
This is to establish a common default size for this argument across different libraries in the Hugging
Face ecosystem (`transformers`, and `accelerate`, for example).
push_to_hub (`bool`, *optional*, defaults to `False`):
Whether or not to push your model to the Hugging Face Hub after saving it. You can specify the
repository you want to push to with `repo_id` (will default to the name of `save_directory` in your