forked from Yacalis/celeba-classification
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Config.py
48 lines (40 loc) · 1.77 KB
/
Config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 20 15:21:00 2018
@author: Yacalis
"""
import configargparse
class Config:
def __init__(self):
self.config, unparsed = self.main()
if unparsed:
print(f'unparsed config options: {unparsed}')
# raise Exception(f'[!] Something is wrong - there are \
# unrecognized parameters present: {unparsed}')
return
@staticmethod
def main() -> (object, object):
parser = configargparse.ArgParser()
# Callbacks
cback_arg = parser.add_argument_group('Callbacks')
# Earlystopping
cback_arg.add_argument('--es_min_delta', type=float, default=0.01)
cback_arg.add_argument('--es_patience', type=int, default=4)
# ReduceLROnPlateau
cback_arg.add_argument('--lr_epsilon', type=float, default=0.01)
cback_arg.add_argument('--lr_factor', type=float, default=0.5)
cback_arg.add_argument('--lr_min_lr', type=float, default=1e-07)
cback_arg.add_argument('--lr_patience', type=int, default=2)
# Model Checkpoint
cback_arg.add_argument('--period', type=int, default=10)
# Training and testing
train_arg = parser.add_argument_group('Training')
train_arg.add_argument('--optimizer', type=str, default='adam')
train_arg.add_argument('--batch_size', type=int, default=4)
train_arg.add_argument('--epochs', type=int, default=20)
train_arg.add_argument('--change_lr', type=bool, default=True)
train_arg.add_argument('--change_bs', type=bool, default=False)
# options for complexity are: simple, complex, or single
train_arg.add_argument('--complexity', type=str, default='celeba')
return parser.parse_known_args()