Skip to content

Commit

Permalink
Merge branch 'vNext-Dev' into geearl/7045-show-processing-log
Browse files Browse the repository at this point in the history
  • Loading branch information
georearl committed Feb 29, 2024
2 parents d0281a6 + e9085af commit 699a3d1
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 1 deletion.
7 changes: 7 additions & 0 deletions app/backend/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -579,6 +579,13 @@ async def retryFile(request: Request):
raw_file = blob.download_blob().readall()
# Overwrite the existing blob with new data
blob.upload_blob(raw_file, overwrite=True)
statusLog.upsert_document(document_path=filePath,
status='Resubmitted to the processing pipeline',
status_classification=StatusClassification.INFO,
state=State.QUEUED,
fresh_start=False)
statusLog.save_document(document_path=filePath)

except Exception as ex:
logging.exception("Exception in /retryFile")
raise HTTPException(status_code=500, detail=str(ex)) from ex
Expand Down
22 changes: 21 additions & 1 deletion functions/FileUploadedFunc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@
import azure.functions as func
from azure.storage.blob import BlobServiceClient, generate_blob_sas
from azure.storage.queue import QueueClient, TextBase64EncodePolicy
from azure.search.documents import SearchClient
from azure.core.credentials import AzureKeyCredential



azure_blob_connection_string = os.environ["BLOB_CONNECTION_STRING"]
Expand All @@ -26,6 +29,9 @@
azure_blob_content_container = os.environ["BLOB_STORAGE_ACCOUNT_OUTPUT_CONTAINER_NAME"]
azure_blob_endpoint = os.environ["BLOB_STORAGE_ACCOUNT_ENDPOINT"]
azure_blob_key = os.environ["AZURE_BLOB_STORAGE_KEY"]
azure_search_service_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"]
azure_search_service_index = os.environ["AZURE_SEARCH_INDEX"]
azure_search_service_key = os.environ["AZURE_SEARCH_SERVICE_KEY"]

function_name = "FileUploadedFunc"

Expand Down Expand Up @@ -84,9 +90,23 @@ def main(myblob: func.InputStream):
# first remove the container prefix
myblob_filename = myblob.name.split("/", 1)[1]
blobs = blob_container.list_blobs(name_starts_with=myblob_filename)
# Iterate through the blobs and delete each one

# instantiate the search sdk elements
search_client = SearchClient(azure_search_service_endpoint,
azure_search_service_index,
AzureKeyCredential(azure_search_service_key))
search_id_list_to_delete = []

# Iterate through the blobs and delete each one from blob and the search index
for blob in blobs:
blob_client.get_blob_client(container=azure_blob_content_container, blob=blob.name).delete_blob()
search_id_list_to_delete.append({"id": blob.name})

if len(search_id_list_to_delete) > 0:
search_client.delete_documents(documents=search_id_list_to_delete)
logging.debug("Succesfully deleted items from AI Search index.")
else:
logging.debug("No items to delete from AI Search index.")

# Queue message with a random backoff so as not to put the next function under unnecessary load
queue_client = QueueClient.from_connection_string(azure_blob_connection_string, queue_name, message_encode_policy=TextBase64EncodePolicy())
Expand Down

0 comments on commit 699a3d1

Please sign in to comment.