Skip to content

Commit

Permalink
Fixed sequential batch size bug.
Browse files Browse the repository at this point in the history
Signed-off-by: Andrzej Szablewski <[email protected]>
  • Loading branch information
TheRootOf3 committed Mar 25, 2024
1 parent de166d5 commit 2d6cf7d
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion llm_unlearn_ucl/unlearn_harm.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,7 +428,7 @@ def main(args) -> None:

if args.sequential > 0:
# NOTE: sequential/batch unlearning
num_batches_per_epoch = args.samples_count // args.batch_size
num_batches_per_epoch = args.samples_count // args.sequential // args.batch_size

for seq, (train_normal_loader, train_bad_loader) in enumerate(
zip(train_normal_loaders, train_bad_loaders)
Expand Down Expand Up @@ -459,6 +459,7 @@ def main(args) -> None:
accelerator.backward(loss / num_batches_per_epoch)
bad_loss /= num_batches_per_epoch
accu_bad_loss += bad_loss.item()
# If args.batch_size < args.samples_count//args.sequential, always perform gradient accumulation.
epoch_num += 1
final_model_tag = epoch_num
optimizer.step()
Expand Down

0 comments on commit 2d6cf7d

Please sign in to comment.