Skip to content

Commit

Permalink
merge in fastai#3554, refactor code, docs update
Browse files Browse the repository at this point in the history
  • Loading branch information
warner-benjamin committed Jan 19, 2022
1 parent e509b29 commit 85c5fae
Show file tree
Hide file tree
Showing 3 changed files with 192 additions and 184 deletions.
4 changes: 2 additions & 2 deletions fastai/_nbdev.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,10 +463,10 @@
"flatten_check": "13a_learner.ipynb",
"Metric": "13a_learner.ipynb",
"AvgMetric": "13a_learner.ipynb",
"AccumMetric": "13a_learner.ipynb",
"AvgSmoothMetric": "13a_learner.ipynb",
"AvgLoss": "13a_learner.ipynb",
"AvgSmoothLoss": "13a_learner.ipynb",
"AvgSmoothMetric": "13a_learner.ipynb",
"AccumMetric": "13a_learner.ipynb",
"ValueMetric": "13a_learner.ipynb",
"Recorder": "13a_learner.ipynb",
"Learner.freeze_to": "13a_learner.ipynb",
Expand Down
104 changes: 53 additions & 51 deletions fastai/learner.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
__all__ = ['CancelStepException', 'CancelFitException', 'CancelEpochException', 'CancelTrainException',
'CancelValidException', 'CancelBatchException', 'LogMetric', 'MetricType', 'ActivationType',
'replacing_yield', 'verify_metric', 'save_model', 'load_model', 'Learner', 'before_batch_cb', 'load_learner',
'to_detach_from_dl', 'Metric', 'AvgMetric', 'AvgLoss', 'AvgSmoothLoss', 'AvgSmoothMetric', 'AccumMetric',
'to_detach_from_dl', 'Metric', 'AvgMetric', 'AccumMetric', 'AvgSmoothMetric', 'AvgLoss', 'AvgSmoothLoss',
'ValueMetric', 'Recorder']

# Cell
Expand Down Expand Up @@ -277,7 +277,7 @@ def show_results(self, ds_idx=1, dl=None, max_n=9, shuffle=True, **kwargs):
if dl is None: dl = self.dls[ds_idx].new(shuffle=shuffle)
b = dl.one_batch()
_,_,preds = self.get_preds(dl=[b], with_decoded=True)
self.dls.show_results(b, preds, max_n=max_n, **kwargs)
dl.show_results(b, preds, max_n=max_n, **kwargs)

def show_training_loop(self):
indent = 0
Expand Down Expand Up @@ -476,55 +476,6 @@ def name(self):
if self._name: return self._name
else: return self.func.func.__name__ if hasattr(self.func, 'func') else self.func.__name__

# Cell
class AvgLoss(Metric):
"Average the losses taking into account potential different batch sizes"
def reset(self): self.total,self.count = 0.,0
def accumulate(self, learn):
bs = find_bs(learn.yb)
self.total += learn.to_detach(learn.loss.mean())*bs
self.count += bs
@property
def value(self): return self.total/self.count if self.count != 0 else None
@property
def name(self): return "loss"

# Cell
class AvgSmoothLoss(Metric):
"Smooth average of the losses (exponentially weighted with `beta`)"
def __init__(self, beta=0.98): self.beta = beta
def reset(self): self.count,self.val = 0,tensor(0.)
def accumulate(self, learn):
self.count += 1
self.val = torch.lerp(to_detach(learn.loss.mean(), gather=False), self.val, self.beta)
@property
def value(self): return self.val/(1-self.beta**self.count)

# Cell
@delegates(Metric)
class AvgSmoothMetric(Metric):
"Smooth average the values of `func` (exponentially weighted with `beta`)"
def __init__(self, func, beta=0.98, **kwargs):
super().__init__(**self._split_kwargs(Metric.__init__, **kwargs))
self.func, self.fkwargs = func, self._split_kwargs(func, **kwargs)
self.beta, self.log_metric = beta, LogMetric.Train

def reset(self): self.count,self.val = 0,tensor(0.)

def accumulate(self, learn):
super().accumulate(learn)
if self.to_np: self.pred,self.targ = learn.to_detach(self.pred).numpy(),learn.to_detach(self.targ).numpy()
val = self.func(self.targ, self.pred, **self.fkwargs) if self.invert_arg else self.func(self.pred, self.targ, **self.fkwargs)
if self.to_np: self.val = self.val*self.beta + val*(1-self.beta)
else: self.val = torch.lerp(to_detach(val, gather=False), self.val, self.beta)
self.count += 1

@property
def value(self): return self.val/(1-self.beta**self.count) if self.count != 0 else None

@property
def name(self): return self.func.func.__name__ if hasattr(self.func, 'func') else self.func.__name__

# Cell
@delegates(Metric)
class AccumMetric(Metric):
Expand Down Expand Up @@ -566,6 +517,57 @@ def value(self):
@property
def name(self): return self.func.func.__name__ if hasattr(self.func, 'func') else self.func.__name__

# Cell
@delegates(Metric)
class AvgSmoothMetric(Metric):
"Smooth average the values of `func` (exponentially weighted with `beta`)"
def __init__(self, func, beta=0.98, **kwargs):
super().__init__(**self._split_kwargs(Metric.__init__, **kwargs))
self.func, self.fkwargs = func, self._split_kwargs(func, **kwargs)
self.beta, self.log_metric = beta, LogMetric.Train

def reset(self): self.count,self.val = 0,tensor(0.)

def accumulate(self, learn):
super().accumulate(learn)
if self.to_np: self.pred,self.targ = learn.to_detach(self.pred).numpy(),learn.to_detach(self.targ).numpy()
val = self.func(self.targ, self.pred, **self.fkwargs) if self.invert_arg else self.func(self.pred, self.targ, **self.fkwargs)
if self.to_np: self.val = self.val*self.beta + val*(1-self.beta)
else: self.val = torch.lerp(to_detach(val, gather=False), self.val, self.beta)
self.count += 1

@property
def value(self): return self.val/(1-self.beta**self.count) if self.count != 0 else None

@property
def name(self):
if self._name: return self._name
else: return self.func.func.__name__ if hasattr(self.func, 'func') else self.func.__name__

# Cell
class AvgLoss(Metric):
"Average the losses taking into account potential different batch sizes"
def reset(self): self.total,self.count = 0.,0
def accumulate(self, learn):
bs = find_bs(learn.yb)
self.total += learn.to_detach(learn.loss.mean())*bs
self.count += bs
@property
def value(self): return self.total/self.count if self.count != 0 else None
@property
def name(self): return "loss"

# Cell
class AvgSmoothLoss(Metric):
"Smooth average of the losses (exponentially weighted with `beta`)"
def __init__(self, beta=0.98): self.beta = beta
def reset(self): self.count,self.val = 0,tensor(0.)
def accumulate(self, learn):
self.count += 1
self.val = torch.lerp(to_detach(learn.loss.mean(), gather=False), self.val, self.beta)
@property
def value(self): return self.val/(1-self.beta**self.count)

# Cell
class ValueMetric(Metric):
"Use to include a pre-calculated metric value (for instance calculated in a `Callback`) and returned by `func`"
Expand Down
Loading

0 comments on commit 85c5fae

Please sign in to comment.