Skip to content

Commit

Permalink
fix: losses from earlier stages not showing up
Browse files Browse the repository at this point in the history
  • Loading branch information
nmvrs committed Jun 6, 2024
1 parent f1e3a70 commit 65004c7
Showing 1 changed file with 4 additions and 8 deletions.
12 changes: 4 additions & 8 deletions moai/core/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,10 +313,10 @@ def closure(tensors, index, steps, stage, optimizer, objective):

batch = benedict.benedict(batch, keyattr_enabled=False)
batch[C._MOAI_METRICS_] = {}
# batch[C._MOAI_LOSSES_] = {
# "raw": {},
# "weighted": {},
# }
batch[C._MOAI_LOSSES_] = {
"raw": {},
"weighted": {},
}
# TODO: check for refresh optimizers each step
for stage, proc in self.process[C._FIT_][C._BATCH_].items():
flows = proc[C._FLOWS_]
Expand All @@ -341,10 +341,6 @@ def closure(tensors, index, steps, stage, optimizer, objective):
closure, batch, batch_idx, flows, stage, optimizer, objective
)
for iter in range(proc.get(C._ITERATIONS_, 1)):
batch[C._MOAI_LOSSES_] = {
"raw": {},
"weighted": {},
}
if ( # when the strategy handles accumulation, we want to always call the optimizer step
not self.trainer.strategy.handles_gradient_accumulation
and self.trainer.fit_loop._should_accumulate()
Expand Down

0 comments on commit 65004c7

Please sign in to comment.