Skip to content

Commit

Permalink
sets default logdir for writer
Browse files Browse the repository at this point in the history
  • Loading branch information
lanpa committed Aug 18, 2017
1 parent b4d84aa commit 0ac261a
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 32 deletions.
5 changes: 2 additions & 3 deletions demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@
import torchvision.utils as vutils
import numpy as np
import torchvision.models as models
from datetime import datetime
from tensorboard import SummaryWriter
resnet18 = models.resnet18(True)
writer = SummaryWriter('runs/'+datetime.now().strftime('%B%d %H:%M:%S'))
resnet18 = models.resnet18(False)
writer = SummaryWriter()
sample_rate = 44100
freqs = [262, 294, 330, 349, 392, 440, 440, 440, 440, 440, 440]
for n_iter in range(100):
Expand Down
43 changes: 19 additions & 24 deletions demo_embedding.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,35 @@
import torch
import torch.nn as nn
from torch.optim import Adam
from torch.autograd.variable import Variable
import torch.nn.functional as F
from collections import OrderedDict
from tensorboard import SummaryWriter
from datetime import datetime
from torch.utils.data import TensorDataset,DataLoader
import os
from torch.autograd.variable import Variable
from tensorboard import SummaryWriter
from torch.utils.data import TensorDataset, DataLoader

#EMBEDDING VISUALIZATION FOR A TWO-CLASSES PROBLEM

#just a bunch of layers
class M(nn.Module):
def __init__(self):
super(M,self).__init__()
self.cn1 = nn.Conv2d(in_channels=1,out_channels=64,kernel_size=3)
self.cn2 = nn.Conv2d(in_channels=64,out_channels=32,kernel_size=3)
self.fc1 = nn.Linear(in_features=128,out_features=2)
def forward(self,i):
super(M, self).__init__()
self.cn1 = nn.Conv2d(in_channels=1, out_channels=64, kernel_size=3)
self.cn2 = nn.Conv2d(in_channels=64, out_channels=32, kernel_size=3)
self.fc1 = nn.Linear(in_features=128, out_features=2)
def forward(self, i):
i = self.cn1(i)
i = F.relu(i)
i = F.max_pool2d(i,2)
i = F.max_pool2d(i, 2)
i =self.cn2(i)
i = F.relu(i)
i = F.max_pool2d(i,2)
i = i.view(len(i),-1)
i = F.max_pool2d(i, 2)
i = i.view(len(i), -1)
i = self.fc1(i)
i = F.log_softmax(i)
return i

#get some random data around value
def get_data(value,shape):
data= torch.ones(shape)*value
def get_data(value, shape):
data = torch.ones(shape)*value
#add some noise
data += torch.randn(shape)**2
return data
Expand All @@ -47,13 +44,12 @@ def get_data(value,shape):
#network
m = M()
#loss and optim
loss = torch.nn.NLLLoss()
optimizer = Adam(params=m.parameters())
loss = nn.NLLLoss()
optimizer = torch.optim.Adam(params=m.parameters())
#settings for train and log
num_epochs = 20
embedding_log = 5
writer_name = datetime.now().strftime('%B%d %H:%M:%S')
writer = SummaryWriter(os.path.join("runs",writer_name))
writer = SummaryWriter()

#TRAIN
for epoch in range(num_epochs):
Expand All @@ -77,12 +73,11 @@ def get_data(value,shape):
if j % embedding_log == 0:
print("loss_value:{}".format(loss_value.data[0]))
#we need 3 dimension for tensor to visualize it!
out = torch.cat((out,torch.ones(len(out),1)),1)
#write the embedding for the timestep
out = torch.cat((out, torch.ones(len(out), 1)), 1)
writer.add_embedding(out.data, metadata=label_batch.data, label_img=data_batch.data, global_step=n_iter)

writer.close()

#tensorboard --logdir runs
#you should now see a dropdown list with all the timestep,
# tensorboard --logdir runs
# you should now see a dropdown list with all the timestep,
# last timestep should have a visible separation between the two classes
3 changes: 1 addition & 2 deletions demo_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import numpy as np
import torch.nn.functional as F
import torchvision.models as models
from datetime import datetime
from tensorboard import SummaryWriter

class Mnist(nn.Module):
Expand Down Expand Up @@ -33,7 +32,7 @@ def forward(self, x):
# if you want to show the input tensor, set requires_grad=True
res = model(torch.autograd.Variable(torch.Tensor(1,1,28,28), requires_grad=True))

writer = SummaryWriter('runs/'+datetime.now().strftime('%B%d %H:%M:%S'))
writer = SummaryWriter()
writer.add_graph(model, res)

writer.close()
10 changes: 7 additions & 3 deletions tensorboard/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,13 +218,17 @@ class SummaryWriter(object):
to add data to the file directly from the training loop, without slowing down
training.
"""
def __init__(self, log_dir):
def __init__(self, log_dir=None, comment=''):
"""
Args:
log_dir (string): save location
log_dir (string): save location, defaults to current runs/DATE_TIME_HOSTNAME
comment (string): comment that appends to the default log_dir
"""
if log_dir == None:
import socket
from datetime import datetime
log_dir = os.path.join('runs', datetime.now().strftime('%b%d_%H-%M-%S')+'_'+socket.gethostname()+comment)
self.file_writer = FileWriter(logdir=log_dir)
v = 1E-12
buckets = []
Expand Down

0 comments on commit 0ac261a

Please sign in to comment.