Skip to content

Commit

Permalink
TensorFlow tests: having from_pt set to True requires torch to be ins…
Browse files Browse the repository at this point in the history
…talled. (#10664)

* TF model exists for Blenderbot 400M

* Marian

* RAG
  • Loading branch information
LysandreJik committed Mar 12, 2021
1 parent 543d054 commit 184ef8e
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 11 deletions.
2 changes: 1 addition & 1 deletion tests/test_modeling_tf_blenderbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ def tokenizer(self):

@cached_property
def model(self):
model = TFAutoModelForSeq2SeqLM.from_pretrained(self.model_name, from_pt=True)
model = TFAutoModelForSeq2SeqLM.from_pretrained(self.model_name)
return model

@slow
Expand Down
2 changes: 1 addition & 1 deletion tests/test_modeling_tf_marian.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def eos_token_id(self) -> int:
@cached_property
def model(self):
warnings.simplefilter("error")
model: TFMarianMTModel = TFAutoModelForSeq2SeqLM.from_pretrained(self.model_name, from_pt=True)
model: TFMarianMTModel = TFAutoModelForSeq2SeqLM.from_pretrained(self.model_name)
assert isinstance(model, TFMarianMTModel)
c = model.config
self.assertListEqual(c.bad_words_ids, [[c.pad_token_id]])
Expand Down
14 changes: 5 additions & 9 deletions tests/test_modeling_tf_rag.py
Original file line number Diff line number Diff line change
Expand Up @@ -562,7 +562,7 @@ def sequence_model(self):
)

def token_model_nq_checkpoint(self, retriever):
return TFRagTokenForGeneration.from_pretrained("facebook/rag-token-nq", from_pt=True, retriever=retriever)
return TFRagTokenForGeneration.from_pretrained("facebook/rag-token-nq", retriever=retriever)

def get_rag_config(self):
question_encoder_config = AutoConfig.from_pretrained("facebook/dpr-question_encoder-single-nq-base")
Expand Down Expand Up @@ -799,7 +799,7 @@ def test_data_questions(self):
def test_rag_token_greedy_search(self):
tokenizer = RagTokenizer.from_pretrained("facebook/rag-token-nq")
retriever = RagRetriever.from_pretrained("facebook/rag-token-nq", index_name="exact", use_dummy_dataset=True)
rag_token = TFRagTokenForGeneration.from_pretrained("facebook/rag-token-nq", retriever=retriever, from_pt=True)
rag_token = TFRagTokenForGeneration.from_pretrained("facebook/rag-token-nq", retriever=retriever)

# check first two questions
input_dict = tokenizer(
Expand Down Expand Up @@ -833,7 +833,7 @@ def test_rag_token_generate_batch(self):
# NOTE: gold labels comes from num_beam=4, so this is effectively beam-search test
tokenizer = RagTokenizer.from_pretrained("facebook/rag-token-nq")
retriever = RagRetriever.from_pretrained("facebook/rag-token-nq", index_name="exact", use_dummy_dataset=True)
rag_token = TFRagTokenForGeneration.from_pretrained("facebook/rag-token-nq", retriever=retriever, from_pt=True)
rag_token = TFRagTokenForGeneration.from_pretrained("facebook/rag-token-nq", retriever=retriever)

input_dict = tokenizer(
self.test_data_questions,
Expand Down Expand Up @@ -877,9 +877,7 @@ def test_rag_sequence_generate_batch(self):
retriever = RagRetriever.from_pretrained(
"facebook/rag-sequence-nq", index_name="exact", use_dummy_dataset=True
)
rag_sequence = TFRagSequenceForGeneration.from_pretrained(
"facebook/rag-sequence-nq", retriever=retriever, from_pt=True
)
rag_sequence = TFRagSequenceForGeneration.from_pretrained("facebook/rag-sequence-nq", retriever=retriever)

input_dict = tokenizer(
self.test_data_questions,
Expand Down Expand Up @@ -923,9 +921,7 @@ def test_rag_sequence_generate_batch_from_context_input_ids(self):
retriever = RagRetriever.from_pretrained(
"facebook/rag-sequence-nq", index_name="exact", use_dummy_dataset=True
)
rag_sequence = TFRagSequenceForGeneration.from_pretrained(
"facebook/rag-sequence-nq", retriever=retriever, from_pt=True
)
rag_sequence = TFRagSequenceForGeneration.from_pretrained("facebook/rag-sequence-nq", retriever=retriever)
input_dict = tokenizer(
self.test_data_questions,
return_tensors="tf",
Expand Down

0 comments on commit 184ef8e

Please sign in to comment.