Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 6 additions & 9 deletions MyFunctionProject/additional_functions.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
import logging
import azure.functions as func

bp = func.Blueprint()
def main(myblob: func.InputStream):
logging.info(f"Processing blob: Name={myblob.name}, Size={myblob.length} bytes")

@bp.function_name('AdditionalHTTPFunction')
@bp.route(route="brandnewroute")
def test_function(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
return func.HttpResponse(
"Wow hello this worked!!!",
status_code=200
)
blob_bytes = myblob.read()
summary = process_csv(blob_bytes)

logging.info(f"Processing completed. Summary: {summary}")
108 changes: 31 additions & 77 deletions MyFunctionProject/function_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,85 +5,39 @@
import csv
import codecs
import os
import io
from azure.storage.blob import BlobServiceClient
from additional_functions import bp

app = func.FunctionApp()

app.register_blueprint(bp)

@app.function_name('FirstHTTPFunction')
@app.route(route="myroute", auth_level=func.AuthLevel.ANONYMOUS)
def test_function(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
return func.HttpResponse(
"Wow this first HTTP Function works!!!!",
status_code=200
)

@app.function_name('SecondHTTPFunction')
@app.route(route="newroute")
def test_function(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Starting the second HTTP Function request.')
def process_csv(blob_content):
summary = {"processed": 0, "errors": 0}

name = req.params.get('name')
if name:
message = f"Hello, {name}, so glad this Function worked!!"
else:
message = "Hello, so glad this Function worked!!"
return func.HttpResponse(
message,
status_code=200
)


@app.function_name(name="MyFirstBlobFunction")
@app.blob_trigger(arg_name="myblob",
path="newcontainer/People.csv",
connection="AzureWebJobsStorage")
def test_function(myblob: func.InputStream):
logging.info(f"Python blob Function triggered after the People.csv file was uploaded to the newcontainer. So cool!!!! \n"
f"Printing the name of the blob path: {myblob.name}"
)
blob_name = myblob.name.split('/')[-1]
metadata_log = f"Blob: {blob_name}, Size: {myblob.length} bytes, Last Modified: {myblob.last_modified}"
logging.info(metadata_log)

# Optional: Append metadata to log file in blob storage
try:
# Get connection string from app settings
connection_string = os.environ.get('AzureWebJobsStorage')
if connection_string:
# Create a BlobServiceClient
blob_service_client = BlobServiceClient.from_connection_string(connection_string)
# Get container client
container_client = blob_service_client.get_container_client("newcontainer")

# Get the metadata log blob client
log_blob_name = "metadata.log"
blob_client = container_client.get_blob_client(log_blob_name)

# Check if the log file exists, if not create it
try:
# Try to download the existing log
existing_log = blob_client.download_blob().readall().decode('utf-8')
log_content = existing_log + "\n" + metadata_log
except Exception:
# Log file doesn't exist yet, create new log content
log_content = metadata_log

# Upload the updated log
blob_client.upload_blob(log_content, overwrite=True)
logging.info(f"Metadata appended to {log_blob_name}")
except Exception as e:
logging.error(f"Error appending to metadata log: {str(e)}")


@app.function_name(name="ReadFileBlobFunction")
@app.blob_trigger(arg_name="readfile",
path="newcontainer/People2.csv",
connection="AzureWebJobsStorage")
def main(readfile: func.InputStream):
reader=csv.reader(codecs.iterdecode(readfile,'utf-8'))
for line in reader:
print(line)
try:
# Try to decode CSV content safely
decoded_content = blob_content.decode('utf-8-sig', errors='replace')
reader = csv.DictReader(io.StringIO(decoded_content))

# Check for headers
if not reader.fieldnames:
raise csv.Error("Missing headers in CSV file.")

for row in reader:
try:
# Your row processing logic here
# Example: print(row)
summary["processed"] += 1
except Exception as e:
summary["errors"] += 1
logging.error(f"Row processing error: {e}")

except (csv.Error, UnicodeDecodeError) as e:
summary["errors"] += 1
logging.error(f"Failed to read CSV: {e}")
except Exception as e:
summary["errors"] += 1
logging.error(f"Unexpected error: {e}")
finally:
logging.info(f"CSV Summary: {summary}")

return summary