Skip to content

Commit

Permalink
A few fixes to improve doc
Browse files Browse the repository at this point in the history
  • Loading branch information
neubig committed May 30, 2017
1 parent e1889c0 commit 83fce21
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 17 deletions.
4 changes: 2 additions & 2 deletions doc/source/doc_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def get_func_name(func_str):
return name


def create_doc_copy(in_file='../../python/_dynet.pyx', out_file='dynet.py'):
def create_doc_copy(in_file, out_file):

in_comment = False
in_func = False
Expand All @@ -68,7 +68,7 @@ def create_doc_copy(in_file='../../python/_dynet.pyx', out_file='dynet.py'):
with open(in_file, 'r') as pyx:
for l in pyx:
# Check if this line is a function declaration (def or cpdef)
is_func = re.match(r'(\s*)(?:cp)?def (.*)\((.*)\):', l, re.I)
is_func = re.match(r'(\s*)(?:cp)?def ([^\(]*)\((.*)\):', l, re.I)
if is_func:
# If the previous line was a function, print pass
if in_func:
Expand Down
6 changes: 4 additions & 2 deletions xnmt/batcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@ class Batcher:
PAIR_SRC = 0
PAIR_TRG = 1

def __init__(self, batch_size, pad_token=Vocab.ES):
def __init__(self, batch_size, pad_token=None):
self.batch_size = batch_size
self.pad_token = pad_token
# The only reason why we don't set Vocab.ES as the default is because it currently
# breaks our documentation pipeline
self.pad_token = pad_token if pad_token != None else Vocab.ES

@staticmethod
def is_batch_sent(sent):
Expand Down
8 changes: 2 additions & 6 deletions xnmt/embedder.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,14 @@ class Embedder:
def embed(self, word):
"""
Embed a single word.
:param word:
This will generally be an integer word ID, but could also be something
like a string.
:param word: This will generally be an integer word ID, but could also be something like a string.
"""
raise NotImplementedError('embed must be implemented in Embedder subclasses')

def embed_sent(self, sent):
"""
Embed a full sent worth of words.
:param sent:
This will generally be a list of word IDs, but could also be a list
of strings or some other format.
:param sent: This will generally be a list of word IDs, but could also be a list of strings or some other format.
"""
raise NotImplementedError('embed_sent must be implemented in Embedder subclasses')

Expand Down
7 changes: 2 additions & 5 deletions xnmt/encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,8 @@ class Encoder:
def transduce(self, sent):
"""
Encode inputs into outputs.
:param sent: The input to be encoded. This is duck-typed, so it is the
appropriate input for this particular type of encoder. Frequently it will
be a list of word embeddings, but it can be anything else.
:returns: The encoded output. Frequently this will be a list of expressions
representing the encoded vectors for each word.
:param sent: The input to be encoded. This is duck-typed, so it is the appropriate input for this particular type of encoder. Frequently it will be a list of word embeddings, but it can be anything else.
:returns: The encoded output. Frequently this will be a list of expressions representing the encoded vectors for each word.
"""
raise NotImplementedError('transduce must be implemented in Encoder subclasses')

Expand Down
6 changes: 4 additions & 2 deletions xnmt/search_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@ def generate_output(self):

class BeamSearch(SearchStrategy):

def __init__(self, b, max_len=100, len_norm=NoNormalization()):
def __init__(self, b, max_len=100, len_norm=None):
self.b = b
self.max_len = max_len
self.len_norm = len_norm
# The only reason why we don't set NoNormalization as the default is because it currently
# breaks our documentation pipeline
self.len_norm = len_norm if len_norm != None else NoNormalization()

class Hypothesis:
def __init__(self, score, id, state):
Expand Down

0 comments on commit 83fce21

Please sign in to comment.