@@ -49,12 +49,12 @@ def load_data(data_root_dir):
49
49
if name is 'train' :
50
50
"""
51
51
使用hard negative mining方式
52
- 初始正负样本比例为1:3, 由于正样本数远小于负样本,所以以正样本数为基准,在负样本集中随机提取3倍的负样本作为初始负样本集
52
+ 初始正负样本比例为1:1。 由于正样本数远小于负样本,所以以正样本数为基准,在负样本集中随机提取同样数目负样本作为初始负样本集
53
53
"""
54
54
positive_list = data_set .get_positives ()
55
55
negative_list = data_set .get_negatives ()
56
56
57
- init_negative_idxs = random .sample (range (len (negative_list )), len (positive_list ) * 3 )
57
+ init_negative_idxs = random .sample (range (len (negative_list )), len (positive_list ))
58
58
init_negative_list = [negative_list [idx ] for idx in range (len (negative_list )) if idx in init_negative_idxs ]
59
59
remain_negative_list = [negative_list [idx ] for idx in range (len (negative_list ))
60
60
if idx not in init_negative_idxs ]
@@ -266,9 +266,11 @@ def train_model(data_loaders, model, criterion, optimizer, lr_scheduler, num_epo
266
266
model = model .to (device )
267
267
268
268
criterion = hinge_loss
269
- optimizer = optim .SGD (model .parameters (), lr = 1e-3 , momentum = 0.9 )
270
- lr_schduler = optim .lr_scheduler .StepLR (optimizer , step_size = 7 , gamma = 0.1 )
269
+ # 由于初始训练集数量很少,所以降低学习率
270
+ optimizer = optim .SGD (model .parameters (), lr = 1e-4 , momentum = 0.9 )
271
+ # 共训练10轮,每隔4论减少一次学习率
272
+ lr_schduler = optim .lr_scheduler .StepLR (optimizer , step_size = 4 , gamma = 0.1 )
271
273
272
- best_model = train_model (data_loaders , model , criterion , optimizer , lr_schduler , num_epochs = 25 , device = device )
274
+ best_model = train_model (data_loaders , model , criterion , optimizer , lr_schduler , num_epochs = 10 , device = device )
273
275
# 保存最好的模型参数
274
276
save_model (best_model , 'models/best_linear_svm_alexnet_car.pth' )
0 commit comments