-
Notifications
You must be signed in to change notification settings - Fork 6
/
utils.py
66 lines (52 loc) · 1.95 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import os
import torch
import numpy as np
def split_feature(tensor, type="split"):
"""
type = ["split", "cross"]
"""
C = tensor.size(1)
if type == "split":
return tensor[:, :C // 2, ...], tensor[:, C // 2:, ...]
elif type == "cross":
return tensor[:, 0::2, ...], tensor[:, 1::2, ...]
def save_model(model, optim, scheduler, dir, iteration):
path = os.path.join(dir, "checkpoint_{}.pth.tar".format(iteration))
state = {}
state["iteration"] = iteration
state["modelname"] = model.__class__.__name__
state["model"] = model.state_dict()
state["optim"] = optim.state_dict()
if scheduler is not None:
state["scheduler"] = scheduler.state_dict()
else:
state["scheduler"] = None
torch.save(state, path)
def load_state(path, cuda):
if cuda:
print ("load to gpu")
state = torch.load(path)
else:
print ("load to cpu")
state = torch.load(path, map_location=lambda storage, loc: storage)
return state
def _fast_hist(label_true, label_pred, n_class):
mask = (label_true >= 0) & (label_true < n_class)
hist = np.bincount(
n_class * label_true[mask].astype(int) +
label_pred[mask].astype(int), minlength=n_class ** 2).reshape(n_class, n_class)
return hist
def compute_accuracy(label_trues, label_preds, n_class):
hist = np.zeros((n_class, n_class))
for lt, lp in zip(label_trues, label_preds):
hist += _fast_hist(lt.flatten(), lp.flatten(), n_class)
acc = np.diag(hist).sum() / hist.sum()
acc_cls = np.diag(hist) / hist.sum(axis=1)
acc_cls = np.nanmean(acc_cls)
iu = np.diag(hist) / (hist.sum(axis=1) + hist.sum(axis=0) - np.diag(hist))
mean_iu = np.nanmean(iu)
freq = hist.sum(axis=1) / hist.sum()
fwavacc = (freq[freq > 0] * iu[freq > 0]).sum()
return acc, acc_cls, mean_iu, fwavacc
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)