Skip to content

Commit

Permalink
✏️ Fixing typos.
Browse files Browse the repository at this point in the history
  • Loading branch information
BrikerMan committed May 22, 2019
1 parent dce5719 commit 8b38efb
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 6 deletions.
8 changes: 4 additions & 4 deletions kashgari/pre_processors/classification_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ def process_x_dataset(self,
target = utils.get_list_subset(dataset, subset)
else:
target = dataset
numezied_target = self.numerize_token_sequences(target)
numerized_samples = self.numerize_token_sequences(target)
target_maxlen = utils.get_tuple_item(maxlens, index)
padded_target = pad_sequences(numezied_target, target_maxlen)
padded_target = pad_sequences(numerized_samples, target_maxlen)
result.append(padded_target)
if len(result) == 1:
return result[0]
Expand All @@ -58,8 +58,8 @@ def process_y_dataset(self,
target = utils.get_list_subset(dataset, subset)
else:
target = dataset
numezied_target = self.numerize_label_sequences(target)
one_hot_result = to_categorical(numezied_target, len(self.label2idx))
numerized_samples = self.numerize_label_sequences(target)
one_hot_result = to_categorical(numerized_samples, len(self.label2idx))
result.append(one_hot_result)
if len(result) == 1:
return result[0]
Expand Down
4 changes: 2 additions & 2 deletions kashgari/pre_processors/labeling_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,9 @@ def _process_sequence(self,
target = utils.get_list_subset(dataset, subset)
else:
target = dataset
numezied_target = numerize_function(target)
numerized_samples = numerize_function(target)
target_maxlen = utils.get_tuple_item(maxlens, index)
padded_target = pad_sequences(numezied_target, target_maxlen)
padded_target = pad_sequences(numerized_samples, target_maxlen)
result.append(padded_target)
if len(result) == 1:
return result[0]
Expand Down

0 comments on commit 8b38efb

Please sign in to comment.