Skip to content

Commit

Permalink
fix passing of metric to log function
Browse files Browse the repository at this point in the history
  • Loading branch information
sfluegel committed Jan 5, 2024
1 parent 27ec370 commit 6c72805
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions chebai/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,10 @@ def _log_metrics(self, prefix, metrics, batch_size):
# don't use sync_dist=True if the metric is a torchmetrics-metric
# (see https://github.com/Lightning-AI/pytorch-lightning/discussions/6501#discussioncomment-569757)
for metric_name, metric in metrics.items():
m = metric.compute()
m = None # m = metric.compute()
if isinstance(m, dict):
# todo: is this case needed? it requires logging values directly which does not give accurate results
# with the current metric-setup
for k, m2 in m.items():
self.log(
f"{prefix}{metric_name}{k}",
Expand All @@ -137,7 +139,7 @@ def _log_metrics(self, prefix, metrics, batch_size):
else:
self.log(
f"{prefix}{metric_name}",
m,
metric,
batch_size=batch_size,
on_step=False,
on_epoch=True,
Expand Down

0 comments on commit 6c72805

Please sign in to comment.