-
Notifications
You must be signed in to change notification settings - Fork 13
/
fast_paco_trainer.py
244 lines (218 loc) · 12.1 KB
/
fast_paco_trainer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
# -----------------------------------------------------------------------------
# Program Name: calvo_trainer.py
# Program Description: Rodan wrapper for Fast Calvo's classifier training
# -----------------------------------------------------------------------------
# Core
import logging
import os
import sys
# Third-party
from celery.utils.log import get_task_logger
import cv2
import numpy as np
import zipfile
from shutil import rmtree
# Project
from rodan.celery import app
from rodan.jobs.base import RodanTask
"""Wrap Patchwise (Fast) Calvo classifier training in Rodan."""
logger = get_task_logger(__name__)
class FastPacoTrainer(RodanTask):
name = "Training model for Patchwise Analysis of Music Document, Training"
author = "Jorge Calvo-Zaragoza, Francisco J. Castellanos, Gabriel Vigliensoni, and Ichiro Fujinaga"
description = "The job performs the training of many Selection Auto-Encoder model for the pixelwise analysis of music document images."
enabled = True
category = "OMR - Layout analysis"
interactive = False
settings = {
'title': 'Training parameters',
'type': 'object',
'properties': {
'Batch Size': {
'type': 'integer',
'minimum': 1,
'default': 8,
'maximum': 64,
},
'Maximum number of training epochs': {
'type': 'integer',
'minimum': 1,
'default': 50
},
'Maximum number of samples per label': {
'type': 'integer',
'minimum': 1,
'default': 1000
},
'Patience': {
'type': 'integer',
'minimum': 0,
'default': 15
},
'Patch height': {
'type': 'integer',
'minimum': 32,
'default': 256
},
'Patch width': {
'type': 'integer',
'minimum': 32,
'default': 256
},
},
'job_queue': 'GPU'
}
input_port_types = (
{'name': 'Multi-Sample Zip', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Background Model', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 1', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 2', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 3', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 4', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Sample 1', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 2', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
# We did not go this route because it would be more difficult for the user to track layers
# {'name': 'rgba PNG - Layers', 'minimum': 1, 'maximum': 10, 'resource_types': ['image/rgba+png']},
{'name': 'Sample 3', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 4', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 5', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 6', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 7', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 8', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 9', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 10', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 11', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 12', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 13', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 14', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 15', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 16', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 17', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 18', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 19', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Sample 20', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
# We did not go this route because it would be more difficult for the user to track layers
# {'name': 'rgba PNG - Layers', 'minimum': 1, 'maximum': 10, 'resource_types': ['image/rgba+png']},
)
output_port_types = (
# We did not go this route because it would be more difficult for the user to track layers
# {'name': 'Adjustable models', 'minimum': 1, 'maximum': 10, 'resource_types': ['keras/model+hdf5']},
{'name': 'Multi-Sample Zip', 'minimum': 0, 'maximum': 1, 'resource_types': ['application/zip']},
{'name': 'Log File', 'minimum': 1, 'maximum': 1, 'resource_types': ['text/plain']},
{'name': 'Background Model', 'minimum': 1, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 1', 'minimum': 1, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 2', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 3', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 4', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 5', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 6', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 7', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 8', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']},
{'name': 'Model 9', 'minimum': 0, 'maximum': 1, 'resource_types': ['keras/model+hdf5']}
)
def run_my_task(self, inputs, settings, outputs):
from Paco_classifier import training_engine_sae as training
from Paco_classifier.fast_trainer_lib import PacoTrainer
from Paco_classifier import preprocess
oldouts = sys.stdout, sys.stderr
if 'Log File' in outputs:
handler = logging.FileHandler(outputs['Log File'][0]['resource_path'])
handler.setFormatter(
logging.Formatter('%(asctime)s - %(name)s - %(message)s')
)
logger.addHandler(handler)
try:
# Settings
batch_size = settings['Batch Size']
patch_height = settings['Patch height']
patch_width = settings['Patch width']
max_number_of_epochs = settings['Maximum number of training epochs']
number_samples_per_class = settings['Maximum number of samples per label']
patience = settings["Patience"]
#------------------------------------------------------------
#TODO Include the training options in the configuration data
file_selection_mode = training.FileSelectionMode.SHUFFLE
sample_extraction_mode = training.SampleExtractionMode.RANDOM
#------------------------------------------------------------
# Initialize
if os.path.exists('unzipping_folder'):
rmtree('unzipping_folder')
os.mkdir('unzipping_folder')
new_input = {}
models = {}
create_folder = True
folder_num = 1
# Unzip Multi-Sample Zip to unzipping_folder
if 'Multi-Sample Zip' in inputs:
with zipfile.ZipFile(inputs['Multi-Sample Zip'][0]['resource_path'], 'r') as zip_ref:
zip_ref.extractall('unzipping_folder')
# Count number of directories inside unzipping_folder
dir_num = len(next(os.walk('unzipping_folder'))[1])
# Check and throw error if user inputs a normal sample into Multi-Sample Zip input
if dir_num == 0 and 'Multi-Sample Zip' in inputs:
raise Exception('Cannot input a normal sample into Multi-Sample Zip input')
for ipt in inputs:
# Add models to model dictionary
if 'Model' in ipt:
models[ipt] = inputs[ipt]
# Unzip other samples into unzipping_folder
elif ipt != 'Multi-Sample Zip':
dir_num += 1
with zipfile.ZipFile(inputs[ipt][0]['resource_path'], 'r') as zip_ref:
zip_ref.extractall('unzipping_folder/zip{}'.format(dir_num))
# Count number of Model output ports
num_model_outport = len([k for k in outputs if "Model" in k])
# Add unzipped samples from above to dictionary of layers
for folder in os.listdir('unzipping_folder'):
dir_path = os.path.join('unzipping_folder', folder)
full_path = os.path.join(os.getcwd(), dir_path)
if os.path.isdir(dir_path):
# Check if user inputs more models than layers
num_layers = (len([name for name in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, name))]) - 2)
if num_layers < len(models):
raise Exception('Number of models ({}) exceeds number of layers ({})'.format(len(models), num_layers))
if num_layers != num_model_outport:
raise Exception('Number of Model output ports ({}) differs to number of layers ({})'.format(num_model_outport, num_layers))
for f in os.listdir(dir_path):
if os.path.isfile(os.path.join(dir_path, f)):
layer_name = f.split(".")[0]
if create_folder:
new_input[layer_name] = []
new_input[layer_name].append({'resource_path': os.path.join(full_path, f)})
create_folder = False
# Create output port Multi-Sample Zip
if 'Multi-Sample Zip' in outputs:
with zipfile.ZipFile(outputs['Multi-Sample Zip'][0]['resource_path'], 'w') as zipObj:
# Iterate over all the files in directory
for folder in os.listdir('unzipping_folder'):
for f in os.listdir(os.path.join('unzipping_folder', folder)):
sub_path = os.path.join(folder, f)
full_path = os.path.join('unzipping_folder', sub_path)
zipObj.write(full_path, sub_path)
# SANITY CHECK
layer_dict = preprocess.preprocess(new_input, batch_size, patch_height, patch_width, number_samples_per_class)
rlevel = app.conf.CELERY_REDIRECT_STDOUTS_LEVEL
app.log.redirect_stdouts_to_logger(logger, rlevel)
# Fail if arbitrary layers are not equal before training occurs.
trainer = PacoTrainer(
batch_size,
patch_height,
patch_width,
max_number_of_epochs,
number_samples_per_class,
file_selection_mode,
sample_extraction_mode,
layer_dict,
outputs,
models,
patience
)
trainer.runTrainer()
# REMOVE UNZIP FOLDER
if os.path.exists('unzipping_folder'):
rmtree('unzipping_folder')
return True
finally:
sys.stdout, sys.stderr = oldouts
def my_error_information(self, exc, traceback):
pass