Skip to content

Commit

Permalink
updated arg ID models, and removed dy.parameter() from argid
Browse files Browse the repository at this point in the history
  • Loading branch information
swabhs committed Oct 4, 2018
1 parent 478f254 commit bd2a88e
Showing 1 changed file with 8 additions and 8 deletions.
16 changes: 8 additions & 8 deletions sesame/argid.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,8 @@
"hidden_dim": 64,
"use_dropout": USE_DROPOUT,
"pretrained_embedding_dim": PRETDIM,
"num_epochs": 100 if not options.exemplar else 250,
"patience": 25,
"num_epochs": 10 if not options.exemplar else 25,
"patience": 3,
"eval_after_every_epochs": 100,
"dev_eval_epoch_frequency": 5}
configuration_file = os.path.join(model_dir, "configuration.json")
Expand Down Expand Up @@ -867,7 +867,7 @@ def print_eval_result(examples, expredictions, logger):
sys.stderr.write("\n[test] wpr = %.5f (%.1f/%.1f) wre = %.5f (%.1f/%.1f)\n"
"[test] upr = %.5f (%.1f/%.1f) ure = %.5f (%.1f/%.1f)\n"
"[test] lpr = %.5f (%.1f/%.1f) lre = %.5f (%.1f/%.1f)\n"
"[test] wf1 = %.5f uf1 = %.5f lf1 = %.5f [took %.3f s]\n"
"[test] wf1 = %.5f uf1 = %.5f lf1 = %.5f [took %.3fs]\n"
% (corp_wp, corp_tokres[0], corp_tokres[1] + corp_tokres[0],
corp_wr, corp_tokres[0], corp_tokres[-1] + corp_tokres[0],
corp_up, corp_ures[0], corp_ures[1] + corp_ures[0],
Expand Down Expand Up @@ -904,9 +904,9 @@ def print_eval_result(examples, expredictions, logger):
random.shuffle(trainexamples)

for idx, trex in enumerate(trainexamples, 1):
if (idx - 1) % LOSS_EVAL_EPOCH == 0:
if (idx - 1) % LOSS_EVAL_EPOCH == 0 and idx > 1:
adam.status()
sys.stderr.write("epoch = %d.%d loss = %.6f [took %.3f s]\n" % (
sys.stderr.write("epoch=%d.%d loss=%.4f [took %.3fs]\n" % (
epoch, idx-1, (loss/idx), time.time() - starttime))
starttime = time.time()

Expand Down Expand Up @@ -968,7 +968,7 @@ def print_eval_result(examples, expredictions, logger):
sys.stderr.write(" -- saving to {}".format(model_file_name))
model.save(model_file_name)
last_updated_epoch = epoch
sys.stderr.write(" [took %.3f s]\n" % (time.time() - devstarttime))
sys.stderr.write(" [took %.3fs]\n" % (time.time() - devstarttime))
starttime = time.time()
if epoch - last_updated_epoch > PATIENCE:
sys.stderr.write("Ran out of patience, ending training.\n")
Expand Down Expand Up @@ -999,7 +999,7 @@ def print_eval_result(examples, expredictions, logger):
testargmax = decode(exfs[tidx - 1], len(testex.tokens), valid_fes_for_frame)
testpredictions.append(testargmax)

sys.stderr.write(" [took %.3f s]\n" % (time.time() - teststarttime))
sys.stderr.write(" [took %.3fs]\n" % (time.time() - teststarttime))
sys.stderr.write("printing output conll to " + out_conll_file + " ... ")
print_as_conll(devexamples, testpredictions)
sys.stderr.write("done!\n")
Expand All @@ -1026,7 +1026,7 @@ def print_eval_result(examples, expredictions, logger):
testidx=tidx - 1)
testpredictions.append(testargmax)

sys.stderr.write(" [took %.3f s]\n" % (time.time() - teststarttime))
sys.stderr.write(" [took %.3fs]\n" % (time.time() - teststarttime))
sys.stderr.write("printing output conll to " + out_conll_file + " ... ")
print_as_conll(devexamples, testpredictions)
sys.stderr.write("done!\n")
Expand Down

0 comments on commit bd2a88e

Please sign in to comment.