Skip to content
Permalink
Browse files

1

  • Loading branch information
iperov committed Nov 24, 2019
1 parent 1bfd65a commit 77b390c04bb521c971d3f76bbae018c86196fad1
Showing with 150 additions and 25 deletions.
  1. +79 −0 nnlib/nnlib.py
  2. +2 −10 samplelib/SampleGeneratorFace.py
  3. +20 −6 samplelib/SampleGeneratorFacePerson.py
  4. +49 −9 samplelib/SampleLoader.py
@@ -92,6 +92,7 @@ class nnlib(object):
Adam = nnlib.Adam
RMSprop = nnlib.RMSprop
LookaheadOptimizer = nnlib.LookaheadOptimizer
modelify = nnlib.modelify
gaussian_blur = nnlib.gaussian_blur
@@ -936,7 +937,85 @@ def get_config(self):
base_config = super(Adam, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
nnlib.Adam = Adam

class LookaheadOptimizer(keras.optimizers.Optimizer):
def __init__(self, optimizer, sync_period=5, slow_step=0.5, tf_cpu_mode=0, **kwargs):
super(LookaheadOptimizer, self).__init__(**kwargs)
self.optimizer = optimizer
self.tf_cpu_mode = tf_cpu_mode

with K.name_scope(self.__class__.__name__):
self.sync_period = K.variable(sync_period, dtype='int64', name='sync_period')
self.slow_step = K.variable(slow_step, name='slow_step')

@property
def lr(self):
return self.optimizer.lr

@lr.setter
def lr(self, lr):
self.optimizer.lr = lr

@property
def learning_rate(self):
return self.optimizer.learning_rate

@learning_rate.setter
def learning_rate(self, learning_rate):
self.optimizer.learning_rate = learning_rate

@property
def iterations(self):
return self.optimizer.iterations

def get_updates(self, loss, params):
sync_cond = K.equal((self.iterations + 1) // self.sync_period * self.sync_period, (self.iterations + 1))

e = K.tf.device("/cpu:0") if self.tf_cpu_mode > 0 else None
if e: e.__enter__()
slow_params = [K.variable(K.get_value(p), name='sp_{}'.format(i)) for i, p in enumerate(params)]
if e: e.__exit__(None, None, None)


self.updates = self.optimizer.get_updates(loss, params)
slow_updates = []
for p, sp in zip(params, slow_params):

e = K.tf.device("/cpu:0") if self.tf_cpu_mode == 2 else None
if e: e.__enter__()
sp_t = sp + self.slow_step * (p - sp)
if e: e.__exit__(None, None, None)

slow_updates.append(K.update(sp, K.switch(
sync_cond,
sp_t,
sp,
)))
slow_updates.append(K.update_add(p, K.switch(
sync_cond,
sp_t - p,
K.zeros_like(p),
)))

self.updates += slow_updates
self.weights = self.optimizer.weights + slow_params
return self.updates

def get_config(self):
config = {
'optimizer': keras.optimizers.serialize(self.optimizer),
'sync_period': int(K.get_value(self.sync_period)),
'slow_step': float(K.get_value(self.slow_step)),
}
base_config = super(LookaheadOptimizer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))

@classmethod
def from_config(cls, config):
optimizer = keras.optimizers.deserialize(config.pop('optimizer'))
return cls(optimizer, **config)
nnlib.LookaheadOptimizer = LookaheadOptimizer

class DenseMaxout(keras.layers.Layer):
"""A dense maxout layer.
A `MaxoutDense` layer takes the element-wise maximum of
@@ -24,8 +24,8 @@ def __init__ (self, samples_path, debug=False, batch_size=1,
random_ct_samples_path=None,
sample_process_options=SampleProcessor.Options(),
output_sample_types=[],
person_id_mode=False,
add_sample_idx=False,
use_caching=False,
generators_count=2,
generators_random_seed=None,
**kwargs):
@@ -34,7 +34,6 @@ def __init__ (self, samples_path, debug=False, batch_size=1,
self.sample_process_options = sample_process_options
self.output_sample_types = output_sample_types
self.add_sample_idx = add_sample_idx
self.person_id_mode = person_id_mode

if sort_by_yaw_target_samples_path is not None:
self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET
@@ -48,7 +47,7 @@ def __init__ (self, samples_path, debug=False, batch_size=1,

self.generators_random_seed = generators_random_seed

samples = SampleLoader.load (self.sample_type, self.samples_path, sort_by_yaw_target_samples_path, person_id_mode=person_id_mode)
samples = SampleLoader.load (self.sample_type, self.samples_path, sort_by_yaw_target_samples_path, use_caching=use_caching)
np.random.shuffle(samples)
self.samples_len = len(samples)

@@ -149,19 +148,12 @@ def batch_func(self, param ):
if self.add_sample_idx:
batches += [ [] ]
i_sample_idx = len(batches)-1

if self.person_id_mode:
batches += [ [] ]
i_person_id = len(batches)-1

for i in range(len(x)):
batches[i].append ( x[i] )

if self.add_sample_idx:
batches[i_sample_idx].append (idx)

if self.person_id_mode:
batches[i_person_id].append ( np.array([sample.person_id]) )

break

@@ -22,8 +22,9 @@ def __init__ (self, samples_path, debug=False, batch_size=1,
sample_process_options=SampleProcessor.Options(),
output_sample_types=[],
person_id_mode=1,
use_caching=False,
generators_count=2,
generators_random_seed=None,
generators_random_seed=None,
**kwargs):

super().__init__(samples_path, debug, batch_size)
@@ -35,14 +36,27 @@ def __init__ (self, samples_path, debug=False, batch_size=1,
raise ValueError("len(generators_random_seed) != generators_count")
self.generators_random_seed = generators_random_seed

samples = SampleLoader.load (SampleType.FACE, self.samples_path, person_id_mode=True)
samples = SampleLoader.load (SampleType.FACE, self.samples_path, person_id_mode=True, use_caching=use_caching)

if person_id_mode==1:
np.random.shuffle(samples)

new_samples = []
for s in samples:
new_samples += s
while len(samples) > 0:
for i in range( len(samples)-1, -1, -1):
sample = samples[i]

if len(sample) > 0:
new_samples.append(sample.pop(0))

if len(sample) == 0:
samples.pop(i)
samples = new_samples
np.random.shuffle(samples)
#new_samples = []
#for s in samples:
# new_samples += s
#samples = new_samples
#np.random.shuffle(samples)

self.samples_len = len(samples)

@@ -116,7 +130,7 @@ def batch_func(self, param ):
if self.person_id_mode==1:
if len(shuffle_idxs) == 0:
shuffle_idxs = samples_idxs.copy()
np.random.shuffle(shuffle_idxs)
#np.random.shuffle(shuffle_idxs)

idx = shuffle_idxs.pop()
sample = samples[ idx ]
@@ -1,4 +1,5 @@
import operator
import pickle
import traceback
from enum import IntEnum
from pathlib import Path
@@ -23,7 +24,7 @@ def get_person_id_max_count(samples_path):
return len ( Path_utils.get_all_dir_names(samples_path) )

@staticmethod
def load(sample_type, samples_path, target_samples_path=None, person_id_mode=False):
def load(sample_type, samples_path, target_samples_path=None, person_id_mode=True, use_caching=False):
cache = SampleLoader.cache

if str(samples_path) not in cache.keys():
@@ -36,15 +37,54 @@ def load(sample_type, samples_path, target_samples_path=None, person_id_mode=Fal
datas[sample_type] = [ Sample(filename=filename) for filename in io.progress_bar_generator( Path_utils.get_image_paths(samples_path), "Loading") ]
elif sample_type == SampleType.FACE:
if datas[sample_type] is None:
if person_id_mode:
dir_names = Path_utils.get_all_dir_names(samples_path)
all_samples = []
for i, dir_name in io.progress_bar_generator( [*enumerate(dir_names)] , "Loading"):
all_samples += SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename, person_id=i) for filename in Path_utils.get_image_paths( samples_path / dir_name ) ], silent=True )
datas[sample_type] = all_samples
else:

if not use_caching:
datas[sample_type] = SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename) for filename in Path_utils.get_image_paths(samples_path) ] )

else:
samples_dat = samples_path / 'samples.dat'
if samples_dat.exists():
io.log_info (f"Using saved samples info from '{samples_dat}' ")

all_samples = pickle.loads(samples_dat.read_bytes())

if person_id_mode:
for samples in all_samples:
for sample in samples:
sample.filename = str( samples_path / Path(sample.filename) )
else:
for sample in all_samples:
sample.filename = str( samples_path / Path(sample.filename) )

datas[sample_type] = all_samples

else:
if person_id_mode:
dir_names = Path_utils.get_all_dir_names(samples_path)
all_samples = []
for i, dir_name in io.progress_bar_generator( [*enumerate(dir_names)] , "Loading"):
all_samples += [ SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename, person_id=i) for filename in Path_utils.get_image_paths( samples_path / dir_name ) ], silent=True ) ]
datas[sample_type] = all_samples
else:
datas[sample_type] = all_samples = SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename) for filename in Path_utils.get_image_paths(samples_path) ] )

if person_id_mode:
for samples in all_samples:
for sample in samples:
sample.filename = str(Path(sample.filename).relative_to(samples_path))
else:
for sample in all_samples:
sample.filename = str(Path(sample.filename).relative_to(samples_path))

samples_dat.write_bytes (pickle.dumps(all_samples))

if person_id_mode:
for samples in all_samples:
for sample in samples:
sample.filename = str( samples_path / Path(sample.filename) )
else:
for sample in all_samples:
sample.filename = str( samples_path / Path(sample.filename) )

elif sample_type == SampleType.FACE_TEMPORAL_SORTED:
if datas[sample_type] is None:
datas[sample_type] = SampleLoader.upgradeToFaceTemporalSortedSamples( SampleLoader.load(SampleType.FACE, samples_path) )

0 comments on commit 77b390c

Please sign in to comment.
You can’t perform that action at this time.