Skip to content

Commit

Permalink
fix: bug fix for the example of hyperparameter optimization.
Browse files Browse the repository at this point in the history
  • Loading branch information
sishida21 committed Oct 31, 2019
1 parent e710820 commit 49aba31
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 6 deletions.
10 changes: 5 additions & 5 deletions example_model/opt_param.py
@@ -1,7 +1,7 @@
import tensorflow as tf
import numpy as np
import joblib
import layers
import kgcn.layers
import tensorflow.contrib.keras as K

def build_placeholders(info,config,batch_size=4):
Expand Down Expand Up @@ -39,14 +39,14 @@ def build_model(placeholders,info,config,batch_size=4):
input_dim=info.feature_dim
print(info.param["num_gcn_layer"])
for i in range(int(info.param["num_gcn_layer"])):
layer=layers.GraphConv(internal_dim,adj_channel_num)(layer,adj=in_adjs)
layer=layers.GraphBatchNormalization()(layer,
layer=kgcn.layers.GraphConv(internal_dim,adj_channel_num)(layer,adj=in_adjs)
layer=kgcn.layers.GraphBatchNormalization()(layer,
max_node_num=info.graph_node_num,enabled_node_nums=enabled_node_nums)
layer=tf.sigmoid(layer)
layer=K.layers.Dropout(dropout_rate)(layer)
layer=layers.GraphDense(internal_dim)(layer)
layer=kgcn.layers.GraphDense(internal_dim)(layer)
layer=tf.sigmoid(layer)
layer=layers.GraphGather()(layer)
layer=kgcn.layers.GraphGather()(layer)
output_dim=2
layer=K.layers.Dense(output_dim)(layer)
prediction=tf.nn.softmax(layer)
Expand Down
3 changes: 2 additions & 1 deletion example_param/domain.json
Expand Up @@ -2,7 +2,8 @@
{
"name":"num_gcn_layer",
"type": "discrete",
"domain": [0,1,2]
"domain": [0,1,2],
"data_type": "int"
}
]

0 comments on commit 49aba31

Please sign in to comment.