Skip to content

Commit

Permalink
ignore logs from transformers (#32)
Browse files Browse the repository at this point in the history
* ignore logs from transformers

* remove duplicate info of evaluation result

Co-authored-by: Ronak <rpradeep@uwaterloo.ca>
  • Loading branch information
MXueguang and ronakice committed May 28, 2020
1 parent f3485ac commit 591e7ff
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 9 deletions.
2 changes: 2 additions & 0 deletions pygaggle/logger.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import coloredlogs
import logging


__all__ = []


coloredlogs.install(level='INFO',
fmt='%(asctime)s [%(levelname)s] %(module)s: %(message)s')
logging.getLogger("transformers").setLevel(logging.WARNING)
15 changes: 6 additions & 9 deletions pygaggle/run/evaluate_passage_ranker.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
UnsupervisedTransformerReranker,
T5Reranker,
SequenceClassificationTransformerReranker
)
)
from pygaggle.rerank.random import RandomReranker
from pygaggle.rerank.similarity import CosineSimilarityMatrixProvider
from pygaggle.model import (SimpleBatchTokenizer,
Expand Down Expand Up @@ -92,8 +92,8 @@ def construct_transformer(options:
model = AutoModel.from_pretrained(options.model_name_or_path,
from_tf=options.from_tf).to(device).eval()
tokenizer = SimpleBatchTokenizer(AutoTokenizer.from_pretrained(
options.tokenizer_name),
options.batch_size)
options.tokenizer_name),
options.batch_size)
provider = CosineSimilarityMatrixProvider()
return UnsupervisedTransformerReranker(model, tokenizer, provider)

Expand All @@ -106,11 +106,11 @@ def construct_seq_class_transformer(options: PassageRankingEvaluationOptions
except AttributeError:
# Hotfix for BioBERT MS MARCO. Refactor.
BertForSequenceClassification.bias = torch.nn.Parameter(
torch.zeros(2))
torch.zeros(2))
BertForSequenceClassification.weight = torch.nn.Parameter(
torch.zeros(2, 768))
torch.zeros(2, 768))
model = BertForSequenceClassification.from_pretrained(
options.model_name_or_path, from_tf=options.from_tf)
options.model_name_or_path, from_tf=options.from_tf)
model.classifier.weight = BertForSequenceClassification.weight
model.classifier.bias = BertForSequenceClassification.bias
device = torch.device(options.device)
Expand Down Expand Up @@ -167,11 +167,8 @@ def main():
writer = MsMarcoWriter(args.output_file, args.overwrite_output)
evaluator = RerankerEvaluator(reranker, options.metrics, writer=writer)
width = max(map(len, args.metrics)) + 1
stdout = []
for metric in evaluator.evaluate(examples):
logging.info(f'{metric.name:<{width}}{metric.value:.5}')
stdout.append(f'{metric.name}\t{metric.value}')
print('\n'.join(stdout))


if __name__ == '__main__':
Expand Down

0 comments on commit 591e7ff

Please sign in to comment.