Skip to content

Commit

Permalink
Misc updates
Browse files Browse the repository at this point in the history
Update r2 models to optimze on that instead of MSE. All models now will print out the summary if print is called on them.

Removed print from base generator

Tinkered with example files
  • Loading branch information
jacobbieker committed Oct 26, 2018
1 parent e8e8745 commit 7cdb1c0
Show file tree
Hide file tree
Showing 8 changed files with 16 additions and 17 deletions.
3 changes: 1 addition & 2 deletions examples/building_hdf5.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from factnn import GammaPreprocessor, GammaDiffusePreprocessor, ProtonPreprocessor
import os.path

"""
This is just to show how to make all the HDF5 files, its simply the same as in the energy, separation, and source_detection
files but without the need to import tensorflow or anything
Expand All @@ -10,7 +9,7 @@
obs_dir = [base_dir + "public/"]
gamma_dir = [base_dir + "sim/gamma/"]
proton_dir = [base_dir + "sim/proton/"]
gamma_dl2 = "gamma_simulations_diffuse_facttools_dl2.hdf5"
gamma_dl2 = "../gamma_simulations_diffuse_facttools_dl2.hdf5"

shape = [35,60]
rebin_size = 10
Expand Down
5 changes: 3 additions & 2 deletions examples/energy.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

energy_generator_configuration = {
'seed': 1337,
'batch_size': 64,
'batch_size': 32,
'input': '../gamma.hdf5',
'start_slice': 0,
'number_slices': 25,
Expand Down Expand Up @@ -54,7 +54,7 @@
'strides_lstm': 1,
'num_fc': 3,
'pooling': True,
'neurons': [32, 16, 8, 16, 32, 48, 64],
'neurons': [32, 32, 16, 16, 16, 32, 48, 64],
'shape': [25, 38, 38, 1],
'start_slice': 0,
'number_slices': 25,
Expand All @@ -63,6 +63,7 @@
}

energy_model = EnergyModel(config=energy_model_configuration)
print(energy_model)

"""
Expand Down
8 changes: 4 additions & 4 deletions examples/source_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@


source_model_configuration = {
'conv_dropout': 0.3,
'lstm_dropout': 0.4,
'conv_dropout': 0.2,
'lstm_dropout': 0.3,
'fc_dropout': 0.5,
'num_conv3d': 1,
'kernel_conv3d': 2,
Expand All @@ -76,8 +76,8 @@
}

sign_model_configuration = {
'conv_dropout': 0.3,
'lstm_dropout': 0.4,
'conv_dropout': 0.2,
'lstm_dropout': 0.3,
'fc_dropout': 0.5,
'num_conv3d': 2,
'kernel_conv3d': 2,
Expand Down
1 change: 0 additions & 1 deletion factnn/data/base_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def __next__(self):
:return:
'''
if not self.from_directory:
print("Shape: " + str(self.input_shape))
if self.chunked:
if self.mode == "train":
while True:
Expand Down
3 changes: 2 additions & 1 deletion factnn/models/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,8 @@ def save(self):
return NotImplemented

def __str__(self):
return NotImplemented
self.model.summary()
return ""

def __repr__(self):
return NotImplemented
6 changes: 3 additions & 3 deletions factnn/models/energy_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def r2(y_true, y_pred):
from keras import backend as K
SS_res = K.sum(K.square(y_true - y_pred))
SS_tot = K.sum(K.square(y_true - K.mean(y_true)))
return 1 - SS_res / (SS_tot + K.epsilon())
return -1.*(1 - SS_res / (SS_tot + K.epsilon()))


class EnergyModel(BaseModel):
Expand Down Expand Up @@ -67,8 +67,8 @@ def create(self):

# Final Dense layer
model.add(Dense(1, activation='linear'))
model.compile(optimizer='adam', loss='mse',
metrics=['mae', r2])
model.compile(optimizer='adam', loss=r2,
metrics=['mae', 'mse'])

self.model = model

Expand Down
6 changes: 3 additions & 3 deletions factnn/models/source_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def r2(y_true, y_pred):
from keras import backend as K
SS_res = K.sum(K.square(y_true - y_pred))
SS_tot = K.sum(K.square(y_true - K.mean(y_true)))
return (1 - SS_res / (SS_tot + K.epsilon()))
return -1.*(1 - SS_res / (SS_tot + K.epsilon()))


class DispModel(BaseModel):
Expand Down Expand Up @@ -67,8 +67,8 @@ def create(self):

# Final Dense layer
model.add(Dense(1, activation='linear'))
model.compile(optimizer='adam', loss='mse',
metrics=['mae', r2])
model.compile(optimizer='adam', loss=r2,
metrics=['mae', 'mse'])

self.model = model

Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# Requirements automatically generated by pigar.
# https://github.com/damnever/pigar
Keras==2.2.0
PyYAML == 3.12
astropy==3.0.3
h5py==2.8.0
Expand Down

0 comments on commit 7cdb1c0

Please sign in to comment.