Skip to content

Commit

Permalink
Use dense layer as decoder
Browse files Browse the repository at this point in the history
  • Loading branch information
pbloem committed Apr 26, 2018
1 parent 044530d commit 2614800
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 9 deletions.
25 changes: 17 additions & 8 deletions g1vae.experiment.py
Expand Up @@ -45,9 +45,9 @@ class GraphASHLayer(gaussian.HyperLayer):
"""

def __init__(self, nodes, out_shape, k, additional=0, sigma_scale=0.1, fix_values=False, sigma_floor=0.0, subsample=None):
def __init__(self, nodes, out_shape, k, additional=0, sigma_scale=0.1, fix_values=False, min_sigma=0.0, subsample=None):

super().__init__(in_rank=2, out_shape=out_shape, additional=additional, bias_type=gaussian.Bias.DENSE, subsample=subsample, sigma_floor=sigma_floor)
super().__init__(in_rank=2, out_shape=out_shape, additional=additional, bias_type=gaussian.Bias.DENSE, subsample=subsample, sigma_floor=min_sigma)

self.n = nodes

Expand Down Expand Up @@ -136,7 +136,8 @@ def generate_er(n=128, m=512, num=64):
SIZE = 60000
PLOT = True

def go(nodes=128, links=512, batch=64, epochs=350, k=750, kpe=7, additional=512, modelname='baseline', cuda=False, seed=1, bias=True, lr=0.001, lambd=0.01, subsample=None, fix_values=False):
def go(nodes=128, links=512, batch=64, epochs=350, k=750, kpe=7, additional=512, modelname='baseline', cuda=False,
seed=1, bias=True, lr=0.001, lambd=0.01, subsample=None, fix_values=False, min_sigma=0.0):

FT = torch.cuda.FloatTensor if cuda else torch.FloatTensor

Expand All @@ -154,10 +155,12 @@ def go(nodes=128, links=512, batch=64, epochs=350, k=750, kpe=7, additional=512,

zsize = 256

encoder = GraphASHLayer(nodes, (zsize * 2, ), k=kpe, additional=additional, subsample=subsample, fix_values=fix_values)
encoder = GraphASHLayer(nodes, (zsize * 2, ), k=kpe, additional=additional, subsample=subsample, fix_values=fix_values, min_sigma=min_sigma)

decoder = gaussian.CASHLayer((1, zsize), SHAPE, poolsize=1, k=k, additional=additional, has_bias=bias,
has_channels=True, adaptive_bias=False, subsample=subsample, fix_values=fix_values)
# decoder = gaussian.CASHLayer((1, zsize), SHAPE, poolsize=1, k=k, additional=additional, has_bias=bias,
# has_channels=True, adaptive_bias=False, subsample=subsample, fix_values=fix_values,
# min_sigma=min_sigma)
decoder = nn.Linear(zsize, util.prod(SHAPE))

if cuda:
encoder.cuda()
Expand Down Expand Up @@ -198,7 +201,8 @@ def go(nodes=128, links=512, batch=64, epochs=350, k=750, kpe=7, additional=512,

sample = sample.unsqueeze(1)

reconstruction = nn.functional.sigmoid(decoder(sample))
reconstruction = decoder(sample).view(-1, *SHAPE)
reconstruction = nn.functional.sigmoid(reconstruction)

loss = vae_loss(batch_dense, reconstruction, mu, logvar)

Expand Down Expand Up @@ -297,11 +301,16 @@ def go(nodes=128, links=512, batch=64, epochs=350, k=750, kpe=7, additional=512,
help="Whather to force the values to be 1",
action="store_true")

parser.add_argument("-W", "--min-sigma",
dest="min_sigma",
help="Minimum value of sigma.",
default=0.0, type=float)

options = parser.parse_args()

print('OPTIONS ', options)
LOG.info('OPTIONS ' + str(options))

go(batch=options.batch_size, nodes=options.nodes, links=options.links, k=options.k, kpe=options.kpe, bias=options.bias,
additional=options.additional, modelname=options.model, cuda=options.cuda,
lr=options.lr, lambd=options.lambd, subsample=options.subsample, fix_values=options.fix_values)
lr=options.lr, lambd=options.lambd, subsample=options.subsample, fix_values=options.fix_values, min_sigma=options.min_sigma)
3 changes: 2 additions & 1 deletion mnist-rec.experiment.py
Expand Up @@ -215,9 +215,10 @@ def go(batch=64, epochs=350, k=750, additional=512, model_name='non-adaptive', c

plt.cla()
for j, s in enumerate(sigs):
plt.plot([j] * len(s), s, linewidth=0, marker='.')
plt.plot([j] * len(s), s, linewidth=0, marker='.', alpha=0.2)

plt.savefig('sigmas.pdf')
plt.savefig('sigmas.png')

total = 0.0
num = 0
Expand Down

0 comments on commit 2614800

Please sign in to comment.