Skip to content
This repository has been archived by the owner on Nov 22, 2023. It is now read-only.

Commit

Permalink
search: Implement AppSearchBackend.unindex_all_offers
Browse files Browse the repository at this point in the history
It's a bit rough (there is no error handling), but I suppose it should
be enough.
  • Loading branch information
dbaty committed Jul 16, 2021
1 parent c61e4bd commit 8a11c94
Showing 1 changed file with 18 additions and 4 deletions.
22 changes: 18 additions & 4 deletions src/pcapi/core/search/backends/appsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,10 +193,7 @@ def unindex_offer_ids(self, offer_ids: Iterable[int]) -> None:
self.appsearch_client.delete_documents(offer_ids)

def unindex_all_offers(self) -> None:
pass
# FIXME (dbaty): remove all indexed documents from the engine.
# There does not seem to be any way to do that, except by
# iterating over all indexed documents and removing them.
self.appsearch_client.delete_all_documents()

def serialize_offer(self, offer: offers_models.Offer) -> dict:
dates = []
Expand Down Expand Up @@ -340,6 +337,23 @@ def delete_documents(self, offer_ids: Iterable[int]):
response = requests.delete(self.documents_url, headers=self.headers, data=data)
response.raise_for_status()

def delete_all_documents(self):
if settings.IS_PROD:
raise ValueError("You cannot delete all documents on production.")
# As of 2021-07-16, there is no endpoint to delete all
# documents. We need to fetch all documents.
# Error handling is done by the caller.
list_url = f"{self.documents_url}/list"
page = 1
while True:
page_data = {"page": {"page": page, "size": DOCUMENTS_PER_REQUEST_LIMIT}}
response = requests.get(list_url, headers=self.headers, json=page_data)
document_ids = [document["id"] for document in response.json()["results"]]
if not document_ids:
break
self.delete_documents(document_ids)
page += 1


class AppSearchJsonEncoder(json.JSONEncoder):
def default(self, obj):
Expand Down

0 comments on commit 8a11c94

Please sign in to comment.