Skip to content

Commit

Permalink
🐛 Fixing GPT2 bug.
Browse files Browse the repository at this point in the history
  • Loading branch information
BrikerMan committed Jun 26, 2019
1 parent f6b65f6 commit ff03b36
Showing 1 changed file with 5 additions and 6 deletions.
11 changes: 5 additions & 6 deletions kashgari/embeddings/gpt_2_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,11 @@ def __init__(self,
processor:
from_saved_model:
"""
super(GPT2Embedding, self).__init__(task=task,
sequence_length=sequence_length,
embedding_size=0,
processor=processor,
from_saved_model=from_saved_model)
if isinstance(sequence_length, tuple):
if len(sequence_length) > 2:
raise ValueError('BERT only more 2')
Expand All @@ -61,12 +66,6 @@ def __init__(self,
if sequence_length == 'variable':
self.sequence_length = None

super(GPT2Embedding, self).__init__(task=task,
sequence_length=sequence_length,
embedding_size=0,
processor=processor,
from_saved_model=from_saved_model)

self.processor.token_pad = 'pad'
self.processor.token_unk = 'unk'
self.processor.token_bos = 'pad'
Expand Down

0 comments on commit ff03b36

Please sign in to comment.