Skip to content

Commit

Permalink
SoftmaxとGlobalAveragePoolはとりあえずIdentityにするよう修正し、DropoutをIdentityに変えると…
Browse files Browse the repository at this point in the history
…きも余計なパラメータを初期化しておくよう修正
  • Loading branch information
Masato Hori committed Feb 1, 2018
1 parent 2ff5523 commit f88f400
Showing 1 changed file with 16 additions and 13 deletions.
29 changes: 16 additions & 13 deletions python/test/utils/test_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,13 +98,14 @@ def convert_to_function(node):
elif axis_count > 1:
raise ValueError("More than one axis was specifed as the Concat Axis")
elif node.op_type == "Softmax":
func.type = "Identity"
# default to channel axis
func.softmax_param.axis = 1
for attr in node.attribute:
if attr.name == "axis":
if attr.type != AttributeProto.INT:
raise ValueError("Softmax axis must be a single integer")
func.softmax_param.axis = attr.i
#func.softmax_param.axis = 1
#for attr in node.attribute:
# if attr.name == "axis":
# if attr.type != AttributeProto.INT:
# raise ValueError("Softmax axis must be a single integer")
# func.softmax_param.axis = attr.i
elif node.op_type == "Dropout":
# Dropout requires a ratio to be set
for attr in node.attribute:
Expand All @@ -115,6 +116,7 @@ def convert_to_function(node):
# is_test is True meaning we will not be applying dropout.
# We are simply going to pass through the input values
# by using the Identity function
func.ClearField("dropout_param")
func.type = "Identity"
# We break here so we don't write any needless attributes
break
Expand Down Expand Up @@ -169,12 +171,13 @@ def convert_to_function(node):
# Do we really need this? (Default value should be set by NNabla)
cp.dilation.dim.extend([1 for _ in range(dim)])
elif node.op_type == "GlobalAveragePool":
# We substitute GlobalAveragePool with an AveragePool
# that has the same kernel size as the input WxH
app = func.average_pooling_param
app.kernel.dim.extend([3,3])
app.stride.dim.extend([3,3])
app.pad.dim.extend([0,0])
func.type = "Identity"
## We substitute GlobalAveragePool with an AveragePool
## that has the same kernel size as the input WxH
#app = func.average_pooling_param
#app.kernel.dim.extend([3,3])
#app.stride.dim.extend([3,3])
#app.pad.dim.extend([0,0])
elif node.op_type == "MaxPool":
mpp = func.max_pooling_param
dims = []
Expand Down Expand Up @@ -662,7 +665,6 @@ def test_onnx_nnp_conversion_squeezenet(tmpdir, nnp_fixture):
path = os.path.join(onnx_dir, onnx_name)
# Process onnx with caffe2 backend
model = onnx.load(path)
pdb.set_trace()
if show_onnx:
print(model)
img = np.random.rand(1,3,224,224).astype(np.float32)
Expand Down Expand Up @@ -703,6 +705,7 @@ def change_to_copy(node):
nnpdir = tmpdir.mkdir("nnp")
p = os.path.join(str(nnpdir), nnp_name)
nnpex.export_nnp(p)
pdb.set_trace()
# read exported nnp and run network
nn_net = nnload.load([p])
#exe = run_executor(nn_net, exec_name)
Expand Down

0 comments on commit f88f400

Please sign in to comment.