-
Notifications
You must be signed in to change notification settings - Fork 3
/
ClassificationScanConfig.py
127 lines (101 loc) · 3.68 KB
/
ClassificationScanConfig.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
import random
import getopt
from DLTools.Permutator import *
import sys,argparse
from numpy import arange
import os
# Input for Mixing Generator
FileSearch="/data/LArIAT/h5_files_2D_3D/2D_h5/*.h5"
from multiprocessing import cpu_count
from DLTools.Utils import gpu_count
max_threads=12
n_threads=int(min(round(cpu_count()/gpu_count()),max_threads))
print "Found",cpu_count(),"CPUs and",gpu_count(),"GPUs. Using",n_threads,"threads. max_threads =",max_threads
Particles= ['electron', 'antielectron',
'pion0',
'photon',
'pionPlus', 'pionMinus',
#'proton', 'antiproton',
'muon', 'antimuon',
'kaonMinus', 'kaonPlus']
# Generation Model
Config={
"MaxEvents":int(.5e6),
"NTestSamples":25000,
"Particles":Particles,
"NClasses":len(Particles),
"Epochs":1000,
"BatchSize":128,
"DownSampleSize":8,
"ScanWindowSize":256,
"Normalize":True,
"EnergyCut":0.61,
# Configures the parallel data generator that read the input.
# These have been optimized by hand. Your system may have
# more optimal configuration.
"n_threads":n_threads, # Number of workers when using mixing generator.
"n_threads_cache":4, # Number of workers reading cached data.
"multiplier":1, # Read N batches worth of data in each worker
# How weights are initialized
"WeightInitialization":"'normal'",
# Model
"View1":True,
"View2":True,
"Width":32,
"Depth":2,
# No specific reason to pick these. Needs study.
# Note that the optimizer name should be the class name (https://keras.io/optimizers/)
"loss":"'categorical_crossentropy'",
"activation":"'relu'",
"BatchNormLayers":True,
"DropoutLayers":True,
# Specify the optimizer class name as True (see: https://keras.io/optimizers/)
# and parameters (using constructor keywords as parameter name).
# Note if parameter is not specified, default values are used.
"optimizer":"'RMSprop'",
"lr":0.01,
"decay":0.01,
# Parameter monitored by Callbacks
"monitor":"'val_loss'",
# Active Callbacks
# Specify the CallBack class name as True (see: https://keras.io/callbacks/)
# and parameters (using constructor keywords as parameter name,
# with classname added).
"ModelCheckpoint":True,
"Model_Chekpoint_save_best_only":False,
# Configure Running time callback
# Set RunningTime to a value to stop training after N seconds.
"RunningTime": 2*3600,
# Load last trained version of this model configuration. (based on Name var below)
"LoadPreviousModel":True,
}
# Parameters to scan and their scan points.
Params={ "Width":[32,64,128,256,512],
"Depth":range(1,5) }
# Get all possible configurations.
PS=Permutator(Params)
Combos=PS.Permutations()
print "HyperParameter Scan: ", len(Combos), "possible combiniations."
# HyperParameter sets are numbered. You can iterate through them using
# the -s option followed by an integer .
i=0
if "HyperParamSet" in dir():
i=int(HyperParamSet)
for k in Combos[i]: Config[k]=Combos[i][k]
# Build a name for the this configuration using the parameters we are
# scanning.
Name="LArTPCDNN"
for MetaData in Params.keys():
val=str(Config[MetaData]).replace('"',"")
Name+="_"+val.replace("'","")
if "HyperParamSet" in dir():
print "______________________________________"
print "ScanConfiguration"
print "______________________________________"
print "Picked combination: ",i
print "Combo["+str(i)+"]="+str(Combos[i])
print "Model Filename: ",Name
print "______________________________________"
else:
for ii,c in enumerate(Combos):
print "Combo["+str(ii)+"]="+str(c)