Skip to content

Commit

Permalink
added some docs
Browse files Browse the repository at this point in the history
  • Loading branch information
LeviBorodenko committed Jan 27, 2020
1 parent fc960c0 commit 04cf9e3
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 3 deletions.
12 changes: 12 additions & 0 deletions publish.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
git add .
git commit -m "$1"
git add .
git commit -m "$1"
git push

git checkout gh-pages
python setup.py build
git add .
git commit -n -m $1
git push
git checkout master
26 changes: 24 additions & 2 deletions src/dgcnn/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,8 +243,30 @@ def call(self, inputs):
# concat them to (N x sum(c_i)) signal
Z = tf.concat(rec_conv_signals, axis=-1)

# Check if we apply SortPooling
if not self.use_sortpooling:
# if we do not sortpool or flatten then simply
# return Z
if not self.use_sortpooling and not self.flatten_signals:
return Z

# if we do not sortpool but want to flatten
# then simply flatten Z and return it
if not self.use_sortpooling and self.flatten_signals:

# Wanting to flatten means we convert our Z that has shape
# (..., n, sum(c_i)) to (..., n * sum(c_i)).

# shape that we need
####

# (None) or (None, None) for non-temporal or
# temporal data
outer_shape = tf.shape(X)[:-2]
inner_shape = tf.constant([X.shape[-2] * sum(self.hidden_conv_units)])

shape = tf.concat([outer_shape, inner_shape], axis=0)

Z = tf.reshape(Z, shape)

return Z

# apply SortPooling
Expand Down
2 changes: 1 addition & 1 deletion src/dgcnn/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class GraphConvolution(layers.Layer):
These are the (temporal) graph signals.
T (tensor): Shape (batch, timesteps (optional), N, N).
The corresponding adjacency matrices.
The corresponding transition matrices.
Keyword Arguments:
num_hidden_features (int):
Expand Down

0 comments on commit 04cf9e3

Please sign in to comment.