Skip to content

Commit

Permalink
backward compat for graphing with pytorch<=1.1 (#497)
Browse files Browse the repository at this point in the history
  • Loading branch information
lanpa committed Aug 24, 2019
1 parent 4ad2ac6 commit 9b9933a
Showing 1 changed file with 17 additions and 3 deletions.
20 changes: 17 additions & 3 deletions tensorboardX/pytorch_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
'kind', 'outputs', 'outputsSize', 'scopeName']
methods_IO = ['node', 'offset', 'debugName'] # 'unique' <int> , 'type' <Tensor<class 'torch._C.Type'>>

backward_mode = False

class NodeBase(object):
def __init__(self,
Expand Down Expand Up @@ -44,14 +45,18 @@ def __init__(self, node_cpp, valid_methods):
super(NodePy, self).__init__(node_cpp)
valid_methods = valid_methods[:]
self.inputs = []

global backward_mode
for m in valid_methods:
if m == 'inputs' or m == 'outputs':
list_of_node = list(getattr(node_cpp, m)())
io_unique_names = []
io_tensor_sizes = []
for n in list_of_node:
io_unique_names.append(n.debugName())
if backward_mode:
io_unique_names.append(n.uniqueName())
else:
io_unique_names.append(n.debugName())

if n.type().kind() == 'CompleteTensorType':
io_tensor_sizes.append(n.type().sizes())
else:
Expand All @@ -61,7 +66,10 @@ def __init__(self, node_cpp, valid_methods):
setattr(self, m + 'tensor_size', io_tensor_sizes)

else:
setattr(self, m, getattr(node_cpp, m)())
if m == 'debugName' and backward_mode:
setattr(self, m, getattr(node_cpp, 'uniqueName')())
else:
setattr(self, m, getattr(node_cpp, m)())


class NodePyIO(NodePy):
Expand Down Expand Up @@ -211,6 +219,12 @@ def parse(graph, args=None, omit_useless_nodes=True):

nodes_py = GraphPy()
for i, node in enumerate(graph.inputs()):
global backward_mode
if not backward_mode:
try:
node.debugName()
except:
backward_mode = True
if omit_useless_nodes:
if len(node.uses()) == 0: # number of user of the node (= number of outputs/ fanout)
continue
Expand Down

0 comments on commit 9b9933a

Please sign in to comment.