1
0
mirror of https://github.com/huggingface/diffusers.git synced 2026-01-27 17:22:53 +03:00

Fixed multi-token textual inversion training (#4452)

* added placeholder token concatenation during training

* Update examples/textual_inversion/textual_inversion.py

Co-authored-by: Patrick von Platen <patrick.v.platen@gmail.com>

---------

Co-authored-by: Patrick von Platen <patrick.v.platen@gmail.com>
This commit is contained in:
manosplitsis
2023-08-04 13:21:31 +03:00
committed by GitHub
parent 801a5e2199
commit 79ef9e528c

View File

@@ -708,7 +708,7 @@ def main():
data_root=args.train_data_dir,
tokenizer=tokenizer,
size=args.resolution,
placeholder_token=args.placeholder_token,
placeholder_token=(" ".join(tokenizer.convert_ids_to_tokens(placeholder_token_ids))),
repeats=args.repeats,
learnable_property=args.learnable_property,
center_crop=args.center_crop,