Skip to content

Commit

Permalink
display mean and std when benchmark
Browse files Browse the repository at this point in the history
  • Loading branch information
sfchen committed Mar 24, 2017
1 parent 79c69c3 commit 4ce4a76
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
2 changes: 1 addition & 1 deletion draw.py
Expand Up @@ -77,7 +77,7 @@ def plot_benchmark(scores_arr, algorithms_arr, filename):
plt.figure(1, figsize=(8,8))
plt.title(title, size=20, color='#333333')
plt.xlim(1, passes)
plt.ylim(0.95, 1.001)
plt.ylim(0.97, 1.001)
plt.ylabel('Score', size=16, color='#333333')
plt.xlabel('Validation pass (sorted by score)', size=16, color='#333333')
for i in xrange(len(scores_arr)):
Expand Down
2 changes: 2 additions & 0 deletions train.py
Expand Up @@ -241,6 +241,8 @@ def main():
for m in xrange(len(models)):
print("\nbenchmark with: " + names[m])
scores_arr.append(train(models[m], data, label, samples, options, True))
for m in xrange(len(models)):
print("\nbenchmark mean score with: " + names[m] + " mean " + str(np.mean(scores_arr[m])) + ", std " + str(np.std(scores_arr[m])))
print("\nploting benchmark result...")
plot_benchmark(scores_arr, names, "benchmark.png")
else:
Expand Down

0 comments on commit 4ce4a76

Please sign in to comment.