From cde02b061b6f13012dfefe76bc8abf5e6ec6d3f3 Mon Sep 17 00:00:00 2001 From: Chengxi Guo Date: Sat, 19 Jul 2025 07:38:58 +0800 Subject: [PATCH] Fix kontext finetune issue when batch size >1 (#11921) set drop_last to True Signed-off-by: mymusise --- examples/dreambooth/train_dreambooth_lora_flux_kontext.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/dreambooth/train_dreambooth_lora_flux_kontext.py b/examples/dreambooth/train_dreambooth_lora_flux_kontext.py index 5bd9b8684d..984e0c50c3 100644 --- a/examples/dreambooth/train_dreambooth_lora_flux_kontext.py +++ b/examples/dreambooth/train_dreambooth_lora_flux_kontext.py @@ -1614,7 +1614,7 @@ def main(args): ) if args.cond_image_column is not None: logger.info("I2I fine-tuning enabled.") - batch_sampler = BucketBatchSampler(train_dataset, batch_size=args.train_batch_size, drop_last=False) + batch_sampler = BucketBatchSampler(train_dataset, batch_size=args.train_batch_size, drop_last=True) train_dataloader = torch.utils.data.DataLoader( train_dataset, batch_sampler=batch_sampler,