Skip to content

Commit

Permalink
Merge pull request #48 from jxhe/master
Browse files Browse the repository at this point in the history
modify vae_text example to fit new transformer interface
  • Loading branch information
ZhitingHu committed Nov 12, 2018
2 parents 9098cb6 + cea4f86 commit f2a040b
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 4 deletions.
8 changes: 6 additions & 2 deletions examples/vae_text/config_trans_ptb.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,7 @@
trans_hparams = {
'output_layer_bias': False,
'embedding_dropout': embedding_dropout,
'attention_dropout': attention_dropout,
'residual_dropout': residual_dropout,
'num_heads': 8,
'num_blocks': num_blocks,
'dim': hidden_size,
'initializer': {
Expand All @@ -85,6 +83,12 @@
'distribution':'uniform',
},
},
'multihead_attention': {
'dropout_rate': attention_dropout,
'num_heads': 8,
'num_units': hidden_size,
'output_dim': hidden_size
},
'poswise_feedforward': {
'name':'fnn',
'layers':[
Expand Down
8 changes: 6 additions & 2 deletions examples/vae_text/config_trans_yahoo.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,7 @@
trans_hparams = {
'output_layer_bias': False,
'embedding_dropout': embedding_dropout,
'attention_dropout': attention_dropout,
'residual_dropout': residual_dropout,
'num_heads': 8,
'num_blocks': num_blocks,
'dim': hidden_size,
'initializer': {
Expand All @@ -85,6 +83,12 @@
'distribution':'uniform',
},
},
'multihead_attention': {
'dropout_rate': attention_dropout,
'num_heads': 8,
'num_units': hidden_size,
'output_dim': hidden_size
},
'poswise_feedforward': {
'name':'fnn',
'layers':[
Expand Down

0 comments on commit f2a040b

Please sign in to comment.