Skip to content

Commit

Permalink
Start on combining time dimension
Browse files Browse the repository at this point in the history
Now can clunkily sum up parts of the time dimension to make the files load quicker. In addition, the time dimension can also be put as the channels for streaming in files now, so that there is an easier possibility of using other neural network architectures that only take 4D tensors instead of 5D ones.

API is still work in progress.
  • Loading branch information
jacobbieker committed Nov 1, 2018
1 parent 34ddc51 commit 792e2a4
Show file tree
Hide file tree
Showing 7 changed files with 337 additions and 50 deletions.
54 changes: 47 additions & 7 deletions examples/flow_from_files_separation.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from factnn import GammaPreprocessor, ProtonPreprocessor, SeparationGenerator, SeparationModel
import os.path
from factnn.utils import kfold
from keras.models import load_model

base_dir = "../ihp-pc41.ethz.ch/public/phs/"
obs_dir = [base_dir + "public/"]
Expand Down Expand Up @@ -60,7 +61,7 @@

separation_generator_configuration = {
'seed': 1337,
'batch_size': 8,
'batch_size': 16,
'start_slice': 0,
'number_slices': shape[1] - shape[0],
'mode': 'train',
Expand All @@ -81,10 +82,15 @@
separation_train.train_preprocessor = gamma_train_preprocessor
separation_train.validate_preprocessor = gamma_validate_preprocessor

separation_validate.proton_train_preprocessor = proton_train_preprocessor
separation_validate.proton_validate_preprocessor = proton_validate_preprocessor
separation_validate.train_preprocessor = gamma_train_preprocessor
separation_validate.validate_preprocessor = gamma_validate_preprocessor

separation_model_configuration = {
'conv_dropout': 0.1,
'lstm_dropout': 0.2,
'fc_dropout': 0.4,
'conv_dropout': 0.4,
'lstm_dropout': 0.5,
'fc_dropout': 0.5,
'num_conv3d': 0,
'kernel_conv3d': 3,
'strides_conv3d': 1,
Expand All @@ -93,7 +99,7 @@
'strides_lstm': 2,
'num_fc': 2,
'pooling': True,
'neurons': [8, 16, 32, 8, 16],
'neurons': [16, 16, 16, 16, 16],
'shape': [gamma_train_preprocessor.shape[3], gamma_train_preprocessor.shape[2], gamma_train_preprocessor.shape[1], 1],
'start_slice': 0,
'number_slices': shape[1] - shape[0],
Expand All @@ -103,6 +109,10 @@
separation_model = SeparationModel(config=separation_model_configuration)

print(separation_model)

# Save the base model to use for the kfold validation
separation_model.save("Base_Separation.hdf5")
separation_model.model.save_weights("Base_Separation_weights.hdf5")
"""
Now run the models with the generators!
Expand All @@ -114,5 +124,35 @@

from examples.open_crab_sample_constants import NUM_EVENTS_GAMMA, NUM_EVENTS_PROTON

separation_model.train(train_generator=separation_train, validate_generator=separation_validate, val_num=int(NUM_EVENTS_PROTON*0.8*0.2), num_events=int(NUM_EVENTS_PROTON*0.8*0.8))

for fold in range(5):
print(fold)
# Now change preprocessors
gamma_configuration['paths'] = gamma_indexes[0][fold]
proton_configuration['paths'] = proton_indexes[0][fold]
proton_train_preprocessor = ProtonPreprocessor(config=proton_configuration)
gamma_train_preprocessor = GammaPreprocessor(config=gamma_configuration)

gamma_configuration['paths'] = gamma_indexes[1][fold]
proton_configuration['paths'] = proton_indexes[1][fold]

proton_validate_preprocessor = ProtonPreprocessor(config=proton_configuration)
gamma_validate_preprocessor = GammaPreprocessor(config=gamma_configuration)

separation_validate = SeparationGenerator(config=separation_generator_configuration)
separation_train = SeparationGenerator(config=separation_generator_configuration)

separation_validate.mode = "validate"
separation_train.mode = "train"

separation_train.proton_train_preprocessor = proton_train_preprocessor
separation_train.proton_validate_preprocessor = proton_validate_preprocessor
separation_train.train_preprocessor = gamma_train_preprocessor
separation_train.validate_preprocessor = gamma_validate_preprocessor

separation_validate.proton_train_preprocessor = proton_train_preprocessor
separation_validate.proton_validate_preprocessor = proton_validate_preprocessor
separation_validate.train_preprocessor = gamma_train_preprocessor
separation_validate.validate_preprocessor = gamma_validate_preprocessor
separation_model.model.load_weights("Base_Separation_weights.hdf5")
separation_model.train(train_generator=separation_train, validate_generator=separation_validate, val_num=int(NUM_EVENTS_PROTON*0.8*0.2), num_events=int(NUM_EVENTS_PROTON*0.8*0.8))
separation_model.save("fold_" + str(fold) + "_separation.hdf5")
157 changes: 157 additions & 0 deletions examples/flow_sep_outside.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
from factnn import GammaPreprocessor, ProtonPreprocessor, SeparationGenerator, SeparationModel
import os.path
from factnn.utils import kfold
from keras.models import load_model

base_dir = "../ihp-pc41.ethz.ch/public/phs/"
obs_dir = [base_dir + "public/"]
gamma_dir = [base_dir + "sim/gamma/"]
proton_dir = [base_dir + "sim/proton/"]

shape = [30,70]
rebin_size = 5

# Get paths from the directories
gamma_paths = []
for directory in gamma_dir:
for root, dirs, files in os.walk(directory):
for file in files:
if file.endswith("phs.jsonl.gz"):
gamma_paths.append(os.path.join(root, file))


# Get paths from the directories
proton_paths = []
for directory in proton_dir:
for root, dirs, files in os.walk(directory):
for file in files:
if file.endswith("phs.jsonl.gz"):
proton_paths.append(os.path.join(root, file))


# Now do the Kfold Cross validation Part for both sets of paths
gamma_indexes = kfold.split_data(gamma_paths, kfolds=5)
proton_indexes = kfold.split_data(proton_paths, kfolds=5)


gamma_configuration = {
'rebin_size': rebin_size,
'output_file': "../gamma.hdf5",
'shape': shape,
'paths': gamma_indexes[0][0],
'as_channels': True
}

proton_configuration = {
'rebin_size': rebin_size,
'output_file': "../proton.hdf5",
'shape': shape,
'paths': proton_indexes[0][0],
'as_channels': True
}


proton_train_preprocessor = ProtonPreprocessor(config=proton_configuration)
gamma_train_preprocessor = GammaPreprocessor(config=gamma_configuration)

gamma_configuration['paths'] = gamma_indexes[1][0]
proton_configuration['paths'] = proton_indexes[1][0]

proton_validate_preprocessor = ProtonPreprocessor(config=proton_configuration)
gamma_validate_preprocessor = GammaPreprocessor(config=gamma_configuration)


separation_generator_configuration = {
'seed': 1337,
'batch_size': 16,
'start_slice': 0,
'number_slices': shape[1] - shape[0],
'mode': 'train',
'chunked': False,
'augment': True,
'from_directory': True,
'input_shape': [-1, gamma_train_preprocessor.shape[3], gamma_train_preprocessor.shape[2], gamma_train_preprocessor.shape[1], 1],
'as_channels': True,
}

separation_validate = SeparationGenerator(config=separation_generator_configuration)
separation_train = SeparationGenerator(config=separation_generator_configuration)

separation_validate.mode = "validate"
separation_train.mode = "train"

separation_train.proton_train_preprocessor = proton_train_preprocessor
separation_train.proton_validate_preprocessor = proton_validate_preprocessor
separation_train.train_preprocessor = gamma_train_preprocessor
separation_train.validate_preprocessor = gamma_validate_preprocessor

separation_validate.proton_train_preprocessor = proton_train_preprocessor
separation_validate.proton_validate_preprocessor = proton_validate_preprocessor
separation_validate.train_preprocessor = gamma_train_preprocessor
separation_validate.validate_preprocessor = gamma_validate_preprocessor

from keras.layers import Dense, Dropout, Flatten, ConvLSTM2D, Conv3D, MaxPooling3D, Conv2D, MaxPooling2D
from keras.models import Sequential
import keras
import numpy as np

separation_model = Sequential()

#separation_model.add(ConvLSTM2D(32, kernel_size=3, strides=2,
# padding='same', input_shape=[gamma_train_preprocessor.shape[3], gamma_train_preprocessor.shape[2], gamma_train_preprocessor.shape[1], 1],
# activation='relu',
# dropout=0.3, recurrent_dropout=0.5,
# return_sequences=True))
separation_model.add(Conv2D(32, input_shape=[gamma_train_preprocessor.shape[1], gamma_train_preprocessor.shape[2], 5],
kernel_size=1, strides=1,
padding='same', activation='relu'))
separation_model.add(Conv2D(32,
kernel_size=3, strides=1,
padding='same', activation='relu'))
separation_model.add(MaxPooling2D())
separation_model.add(Dropout(0.4))
separation_model.add(Flatten())
separation_model.add(Dense(32))
separation_model.add(Dropout(0.5))
separation_model.add(Dense(64))
separation_model.add(Dense(2, activation='softmax'))
separation_model.compile(optimizer='adam', loss='categorical_crossentropy',
metrics=['acc'])

separation_model.summary()
model_checkpoint = keras.callbacks.ModelCheckpoint("Outside_test.hdf5",
monitor='val_loss',
verbose=0,
save_best_only=True,
save_weights_only=False,
mode='auto', period=1)
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0,
patience=10,
verbose=0, mode='auto')

tensorboard = keras.callbacks.TensorBoard(update_freq='epoch')

from examples.open_crab_sample_constants import NUM_EVENTS_GAMMA, NUM_EVENTS_PROTON

event_totals = 0.8*NUM_EVENTS_PROTON
train_num = event_totals * 0.8
val_num = event_totals * 0.2

separation_model.fit_generator(
generator=separation_train,
steps_per_epoch=int(np.floor(train_num / separation_train.batch_size)),
epochs=50,
verbose=1,
validation_data=separation_validate,
callbacks=[early_stop, model_checkpoint, tensorboard],
validation_steps=int(np.floor(val_num / separation_validate.batch_size))
)


# Save the base model to use for the kfold validation
"""
Now run the models with the generators!
"""

0 comments on commit 792e2a4

Please sign in to comment.