This repository has been archived by the owner on Nov 3, 2023. It is now read-only.
fix dialogpt dual usage of END_IDX #3256
Merged
Merged
Changes from 3 commits
Commits
Show all changes
8 commits
Select commit
Hold shift + click to select a range
6ade0eb
fix dialogpt dual usage of endoftext
jxmsML 0946ef0
add null_idx = -1
jxmsML 0a37ef0
dialog bs test
jxmsML 7ee25bf
Set null_idx in model and decoder, add to dialogpt test
jxmsML 3665e94
small formats
jxmsML 38796fd
accidental delete old test
jxmsML 590ceb7
reviewer comment
jxmsML 1fec4f8
Merge branch 'master' into dialogptdebug
jxmsML File filter
Filter by extension
Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -20,7 +20,6 @@ | |
) | ||
|
||
SPECIAL_TOKENS = {"bos_token": "<bos>", "eos_token": "<eos>", "pad_token": "<pad>"} | ||
|
||
NO_OP = "x" | ||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,6 +6,12 @@ | |
|
||
import unittest | ||
import parlai.utils.testing as testing_utils | ||
from parlai.core.agents import create_agent | ||
import sys | ||
import warnings | ||
|
||
if not sys.warnoptions: | ||
warnings.simplefilter("ignore") | ||
stephenroller marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
|
||
@testing_utils.skipUnlessGPU | ||
|
@@ -16,6 +22,64 @@ class TestDialogptModel(unittest.TestCase): | |
Checks that DialoGPT gets a certain performance on the integration test task. | ||
""" | ||
|
||
def _test_batchsize(self, batchsize, add_special_tokens): | ||
utterances = [ | ||
'How is your day so far?', | ||
'I hope you you have a good day.', | ||
"Nice to meet you. My name is John. ", | ||
"I've got a feeling we're not in Kansas anymore.", | ||
] | ||
opt = { | ||
'model': 'hugging_face/dialogpt', | ||
'gpt2_size': 'small', | ||
'text_truncate': 100, | ||
'label_truncate': 20, | ||
'beam_min_length': 1, | ||
'inference': 'beam', | ||
'beam_size': 1, | ||
'add_special_tokens': add_special_tokens, | ||
'batchsize': batchsize, | ||
'add_start_token': False, | ||
} | ||
dialogpt = create_agent(opt) | ||
|
||
results_single = [] | ||
agents = [dialogpt.clone() for _ in utterances] | ||
for u, a in zip(utterances, agents): | ||
a.observe({'text': u, 'episode_done': True}) | ||
generation = a.act()['text'] | ||
results_single.append(generation) | ||
|
||
results_batched = [] | ||
for idx in range(len(utterances) // batchsize): | ||
agents = [dialogpt.clone() for _ in range(batchsize)] | ||
batch = utterances[idx * batchsize : (idx + 1) * batchsize] | ||
obs = [] | ||
for i, a in enumerate(agents): | ||
obs.append(a.observe({'text': batch[i], 'episode_done': True})) | ||
generations = [x['text'] for x in dialogpt.batch_act(obs)] | ||
results_batched += generations | ||
|
||
print(f'results_single = {results_single}') | ||
print(f'results_batched = {results_batched}') | ||
assert results_single == results_batched | ||
|
||
def test_batchsize(self): | ||
""" | ||
Ensures dialogpt provides the same generation results regardless of batchsize. | ||
""" | ||
for batchsize in [2, 2, 4, 2]: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ah This is me testing generation consistensy on randomized initialization. The pr is work in progress. |
||
for add_special_tokens in [True]: | ||
if batchsize > 1 and not add_special_tokens: | ||
continue | ||
with self.subTest( | ||
f'test_batchsize with bs={batchsize} and add_special_token={add_special_tokens}' | ||
): | ||
print( | ||
f'_____________test_batchsize with bs={batchsize} and add_special_token={add_special_tokens}' | ||
) | ||
self._test_batchsize(batchsize, add_special_tokens) | ||
|
||
@testing_utils.retry(ntries=3, log_retry=True) | ||
def test_dialogpt(self): | ||
valid, test = testing_utils.train_model( | ||
|
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
when is the latter condition not going to be true? if you are inheriting from this model but changing things?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
when
-bs 1 --add_special_token True
? basically I only want to override the NULL_IDX if it's the same as END_IDX.