Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix boolean progress_bar for disabling tqdm progressbar #863

Merged
merged 1 commit into from Feb 26, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion haystack/document_store/faiss.py
Expand Up @@ -210,7 +210,7 @@ def update_embeddings(
only_documents_without_embedding=not update_existing_embeddings
)
batched_documents = get_batches_from_generator(result, batch_size)
with tqdm(total=document_count, disable=self.progress_bar) as progress_bar:
with tqdm(total=document_count, disable=not self.progress_bar) as progress_bar:
for document_batch in batched_documents:
embeddings = retriever.embed_passages(document_batch) # type: ignore
assert len(document_batch) == len(embeddings)
Expand Down
2 changes: 1 addition & 1 deletion haystack/document_store/memory.py
Expand Up @@ -200,7 +200,7 @@ def update_embeddings(
document_count = len(result)
logger.info(f"Updating embeddings for {document_count} docs ...")
batched_documents = get_batches_from_generator(result, batch_size)
with tqdm(total=document_count, disable=self.progress_bar) as progress_bar:
with tqdm(total=document_count, disable=not self.progress_bar) as progress_bar:
for document_batch in batched_documents:
embeddings = retriever.embed_passages(document_batch) # type: ignore
assert len(document_batch) == len(embeddings)
Expand Down
4 changes: 2 additions & 2 deletions haystack/document_store/milvus.py
Expand Up @@ -177,7 +177,7 @@ def write_documents(
add_vectors = False if document_objects[0].embedding is None else True

batched_documents = get_batches_from_generator(document_objects, batch_size)
with tqdm(total=len(document_objects), disable=self.progress_bar) as progress_bar:
with tqdm(total=len(document_objects), disable=not self.progress_bar) as progress_bar:
for document_batch in batched_documents:
vector_ids = []
if add_vectors:
Expand Down Expand Up @@ -257,7 +257,7 @@ def update_embeddings(
only_documents_without_embedding=not update_existing_embeddings
)
batched_documents = get_batches_from_generator(result, batch_size)
with tqdm(total=document_count, disable=self.progress_bar) as progress_bar:
with tqdm(total=document_count, disable=not self.progress_bar) as progress_bar:
for document_batch in batched_documents:
self._delete_vector_ids_from_milvus(documents=document_batch, index=index)

Expand Down
4 changes: 2 additions & 2 deletions haystack/reader/farm.py
Expand Up @@ -96,7 +96,7 @@ def __init__(
self.inferencer = QAInferencer.load(model_name_or_path, batch_size=batch_size, gpu=use_gpu,
task_type="question_answering", max_seq_len=max_seq_len,
doc_stride=doc_stride, num_processes=num_processes, revision=model_version,
disable_tqdm=progress_bar)
disable_tqdm=not progress_bar)
self.inferencer.model.prediction_heads[0].context_window_size = context_window_size
self.inferencer.model.prediction_heads[0].no_ans_boost = no_ans_boost
self.inferencer.model.prediction_heads[0].n_best = top_k_per_candidate + 1 # including possible no_answer
Expand Down Expand Up @@ -230,7 +230,7 @@ def train(
evaluate_every=evaluate_every,
device=device,
use_amp=use_amp,
disable_tqdm=self.progress_bar
disable_tqdm=not self.progress_bar
)


Expand Down
2 changes: 1 addition & 1 deletion haystack/retriever/dense.py
Expand Up @@ -213,7 +213,7 @@ def _get_predictions(self, dicts):
if len(dataset) == 1:
disable_tqdm=True
else:
disable_tqdm = self.progress_bar
disable_tqdm = not self.progress_bar

for i, batch in enumerate(tqdm(data_loader, desc=f"Creating Embeddings", unit=" Batches", disable=disable_tqdm)):
batch = {key: batch[key].to(self.device) for key in batch}
Expand Down