Skip to content

Commit

Permalink
logs: replace prints to stdout with logging (#369)
Browse files Browse the repository at this point in the history
* logs: move keras model summary to log.info

* logs: remove prints in favor of logging
  • Loading branch information
yoptar committed Aug 10, 2018
1 parent 0d57948 commit 04d0299
Show file tree
Hide file tree
Showing 7 changed files with 12 additions and 11 deletions.
3 changes: 1 addition & 2 deletions deeppavlov/core/models/keras_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,7 @@ def init_model_from_scratch(self, model_name: str, optimizer_name: str,
Returns:
compiled model with given network and learning parameters
"""
log.info("[initializing `{}` from scratch]".format(self.__class__.__name__))
print(model_name)
log.info(f'[initializing `{self.__class__.__name__}` from scratch as {model_name}]')
model_func = getattr(self, model_name, None)
if callable(model_func):
model = model_func(**self.opt)
Expand Down
7 changes: 5 additions & 2 deletions deeppavlov/models/classifiers/keras_classification_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def __init__(self, text_size: int,
"lear_rate": self.opt.get('lear_rate'),
"lear_rate_decay": self.opt.get('lear_rate_decay')}

self.model = self.load(**params)
self.model: Model = self.load(**params)
self._change_not_fixed_params(text_size=text_size, model_name=model_name,
optimizer=optimizer, loss=loss,
lear_rate=lear_rate, lear_rate_decay=lear_rate_decay,
Expand All @@ -133,7 +133,10 @@ def __init__(self, text_size: int,
if self.opt['fasttext_md5'] != current_fasttext_md5:
raise ConfigError(
"Given fasttext model does NOT match fasttext model used previously to train loaded model")
print("Model was successfully initialized!\nModel summary:\n{}".format(self.model.summary()))

summary = ['Model was successfully initialized!', 'Model summary:']
self.model.summary(print_fn=summary.append)
log.info('\n'.join(summary))

def _change_not_fixed_params(self, **kwargs) -> None:
"""
Expand Down
3 changes: 1 addition & 2 deletions deeppavlov/models/classifiers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,7 @@ def log_metrics(names: [list, np.ndarray], values: [list, np.ndarray],
Returns:
None
"""
sys.stdout.write("\r") # back to previous line
log.info("{} -->\t".format(mode))
log.info("\r{} -->\t".format(mode))
if updates is not None:
log.info("updates: {}\t".format(updates))

Expand Down
2 changes: 1 addition & 1 deletion deeppavlov/models/preprocessors/squad_preprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def convert_idx(text: str, tokens: List[str]) -> List[Tuple[int, int]]:
for token in tokens:
current = text.find(token, current)
if current < 0:
print("Token {} cannot be found".format(token))
logger.error("Token {} cannot be found".format(token))
raise Exception()
spans.append((current, current + len(token)))
current += len(token)
Expand Down
5 changes: 3 additions & 2 deletions deeppavlov/models/ranking/ranking_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,8 @@ def make_hard_triplets(self, x, y, net):
if not no_samples:
break
if no_samples:
print("There is no negative examples with distances greater than positive examples distances.")
log.error("There are no negative examples with distances"
" greater than positive examples distances.")
exit(0)
else:
if self.num_hardest_negatives is not None:
Expand Down Expand Up @@ -285,7 +286,7 @@ def make_hard_triplets(self, x, y, net):
rp = [el[1] for el in triplets]
rn = [el[2] for el in triplets]
ratio = sum(hrds) / len(hrds)
print("Ratio of semi-hard negative samples is %f" % ratio)
log.info("Ratio of semi-hard negative samples is %f" % ratio)
return [(c, rp), (c, rn)]

def get_semi_hard_negative_ind(self, i, j, k, distances, anchor_negative_dist, batch_size, num_samples):
Expand Down
2 changes: 1 addition & 1 deletion deeppavlov/models/seq2seq_go_bot/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def _filter(tokens):
preds = [list(_filter(self.tgt_vocab(utter_idxs)))
for utter_idxs in pred_idxs]
if self.debug:
print("Dialog prediction = \"{}\"".format(preds[-1]))
log.debug("Dialog prediction = \"{}\"".format(preds[-1]))
return preds

def save(self):
Expand Down
1 change: 0 additions & 1 deletion deeppavlov/models/squad/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@ def __call__(self, inputs, seq_len, keep_prob=1.0, is_train=None, concat_layers=
gru_fw, gru_bw = self.grus[layer]
init_fw, init_bw = self.inits[layer]
mask_fw, mask_bw = self.dropout_mask[layer]
print(outputs)
with tf.variable_scope('fw_{}'.format(layer), reuse=tf.AUTO_REUSE):
with tf.variable_scope('cudnn_gru', reuse=tf.AUTO_REUSE):
out_fw, _ = tf.nn.dynamic_rnn(cell=gru_fw, inputs=outputs[-1] * mask_fw, time_major=True,
Expand Down

0 comments on commit 04d0299

Please sign in to comment.