From edbe7c8421eeeecedb0aeb0e850061fb107bcf57 Mon Sep 17 00:00:00 2001 From: Jordan Selig Date: Tue, 28 Oct 2025 16:04:59 -0400 Subject: [PATCH] use persistent storage --- ...unction_codegen_and_deployment.chatmode.md | 146 +++++++ infra/app/api.bicep | 7 + infra/main.bicep | 2 +- inventory.db | Bin 16384 -> 16384 bytes requirements.txt | 2 + server.py | 400 ++++++++---------- 6 files changed, 341 insertions(+), 216 deletions(-) create mode 100644 .github/chatmodes/Azure_function_codegen_and_deployment.chatmode.md diff --git a/.github/chatmodes/Azure_function_codegen_and_deployment.chatmode.md b/.github/chatmodes/Azure_function_codegen_and_deployment.chatmode.md new file mode 100644 index 0000000..cf275c7 --- /dev/null +++ b/.github/chatmodes/Azure_function_codegen_and_deployment.chatmode.md @@ -0,0 +1,146 @@ +--- +description: Generate and deploy Azure Functions with comprehensive planning, code generation, and deployment automation. +tools: ["changes","edit","extensions","fetch","findTestFiles","githubRepo","new","openSimpleBrowser","problems","runCommands","runNotebooks","runTasks","search","testFailure","todos","usages","vscodeAPI","Microsoft Docs","applens","azureterraformbestpractices","bicepschema","monitor","deploy","quota","get_bestpractices","azure_query_azure_resource_graph","extension_cli_generate","azure_get_auth_context","azure_set_auth_context"] +model: Claude Sonnet 4 +--- + +# Azure Functions Code Generation and Deployment + +Enterprise-grade Azure Functions development workflow with automated planning, code generation, testing, and deployment using Azure best practices and Infrastructure as Code (IaC). + +## Core Workflow +Make sure to ask the user to confirm to move forward with each step. + +### 1. Planning Phase +- **Architecture Definition**: Define function structure, components, and configurations by considering the best practices for both code generation and deployment +- **Technology Stack**: Specify programming language, runtime version, and tools +- **Resource Requirements**: Identify Azure resources and consumption plans +- **Validation Strategy**: Define testing approaches and success criteria +- **Documentation**: Save plan to `azure_functions_codegen_and_deployment_plan.md` + +### 2. Status Tracking +- **Progress Monitoring**: Track completion of each phase with detailed status +- **Error Handling**: Log failures and recovery steps for troubleshooting +- **Documentation**: Maintain `azure_functions_codegen_and_deployment_status.md` + +### 3. Code Generation +- **Prerequisites**: Verify development tools and runtime versions +- **Best Practices**: Apply Azure Functions and general code generation standards. Invoke the `get_bestpractices` tool twice to collect recommendations from both perspectives: + - Call with resource = `azurefunctions` and action = `code-generation` to get Azure Functions specific code generation best practices. + - Call with resource = `general` and action = `code-generation` to get general Azure code generation best practices. + Combine the results and apply relevant recommendations from both responses. +- **Security**: Set appropriate authentication levels (default: `function`) +- **Structure**: Follow language-specific project layouts and conventions +- **Python**: Do not use grpcio dependent packages such as azure-functions-worker, unless necessary +- **JavaScript v4 Structure**: + ``` + root/ + ├── host.json # Function host configuration + ├── local.settings.json # Development settings + ├── package.json # Dependencies + ├── src/ + │ ├── app.js # Main application entry + │ └── [modules].js # Business logic + └── tests/ # Test suite + ``` + +### 4. Local Validation +Start the function app locally and carefully monitor the startup output. Look for any errors, warnings, or unusual messages. +Don't proceed to testing until you've confirmed a clean startup. If you see any issues, investigate and fix them before continuing. +- **Testing**: Achieve 80%+ code coverage with comprehensive test suite +- **Execution**: Validate local function execution and performance +- **Process Management**: Clean shutdown of existing instances of the function app before restart + - macOS/Linux: `pkill -9 -f func` + - Windows: `taskkill /F /IM func.exe /T` +#### Post-Testing Cleanup Protocol +Upon finishing testing, ensure all processes are properly shut down to prevent resource conflicts and port binding issues: + +### 5. Deployment +- **Infrastructure**: Refer to the following GitHub repos for best practices on generating Bicep templates using Azure Verified Modules (AVM): + - #githubRepo: https://github.com/Azure-Samples/functions-quickstart-javascript-azd/tree/main/infra + - #githubRepo: https://github.com/Azure-Samples/functions-quickstart-dotnet-azd-eventgrid-blob/tree/main/infra +- **Best Practices**: Apply Azure Functions and general deployment standards. Invoke the `get_bestpractices` tool twice to collect recommendations from both perspectives: + - Call with resource = `azurefunctions` and action = `deployment` to get Azure Functions specific deployment best practices. + - Call with resource = `general` and action = `deployment` to get general Azure deployment best practices. + Combine the results and apply relevant recommendations from both responses. +- **Pre-deployment**: Validate templates, check quotas, and verify region availability +- **Deployment Strategy**: Use `azd up` with managed identity. + - ALWAYS Use Flex Consumption plan (FC1) for deployment, never Y1 dynamic. + - ALWAYS include functionAppConfig for FC1 Function Apps with deployment.storage configuration. Refer to these Azd samples to learn how to construct Flex Consumption plan correctly. + - #githubRepo: https://github.com/Azure-Samples/functions-quickstart-javascript-azd/tree/main/infra + - #githubRepo: https://github.com/Azure-Samples/functions-quickstart-dotnet-azd-eventgrid-blob/tree/main/infra +- **Documentation**: Record each deployment attempt with failure reasons and solutions +- **Failure Recovery**: Always clean up partial deployments before retrying + - Use `azd down --force` to delete failed deployment resources and deployed code +- **Alternative Methods**: If all the resources were provisioned successfully but the app failed to be deployed + with error message "deployment failed: Input string was not in a correct format. Failure to parse near offset 40. + Format item ends prematurely.", use Azure CLI deployment to upload the function app code. + + +### 6. Post-Deployment +- **Authentication**: Retrieve function names being deployed, then retrieve and configure function keys +- **Endpoint Testing**: Validate all function endpoints with proper authentication +- **Monitoring**: Verify Application Insights telemetry and establish performance baselines +- **Documentation**: Create a README with deployment and usage instructions + +## Enterprise Environment Considerations + +### Corporate Policy Compliance +- **Alternative Strategies**: Prepare Azure CLI fallback for blocked `azd` commands +- **Compliance Standards**: Use Azure Verified Modules (AVM) for enterprise requirements +- **Network Restrictions**: Consider VNet integration and private endpoints + +### Security & Authentication +- **Managed Identity**: Preferred authentication method for Azure-hosted resources +- **Function Keys**: Use function-level keys following principle of least privilege +- **Key Management**: Retrieve keys post-deployment for endpoint testing +- **RBAC Configuration**: Implement proper role assignments for dependencies + +## Quality Assurance + +### Testing Requirements +- **Unit Tests**: 100% passing rate +- **Integration Tests**: 80%+ coverage of main scenarios +- **Code Quality**: ESLint/linting checks passing +- **Performance**: Baseline performance validation + +### Deployment Validation +- **Infrastructure**: Bicep templates pass validation +- **Pre-deployment**: Use deploy tool and set parameter `command` to be `deploy_iac_rules_get` to get the best practices rules for iac generation. +- **Authentication**: Proper managed identity and RBAC configuration +- **Monitoring**: Application Insights receiving telemetry + +## Failure Recovery & Troubleshooting + +### Common Issues & Solutions +1. **Policy Violations**: Switch to Azure CLI deployment methods +2. **Missing Dependencies**: Systematic tool installation and validation +3. **Authentication Issues**: Comprehensive RBAC and managed identity setup +4. **Runtime Compatibility**: Use supported versions (Node.js 20+, Python 3.11+) +5. **Partial Deployments**: Clean resource group deletion before retry + +### Deployment Failure Recovery Protocol +```bash +# Delete failed deployment resources and deployed code +azd down --force + +# Or +# Clean failed deployment +az group delete --name rg- --yes --no-wait +az group wait --name rg- --deleted --timeout 300 + +# Retry deployment +azd up +``` + +## Reference Resources + +### Azure Functions Best Practices +- **Programming Models**: Use latest versions (v4 JavaScript, v2 Python) +- **Extension Bundles**: Prefer over SDKs for simplified dependency management +- **Event Sources**: Use EventGrid for blob triggers +- **Configuration**: Generate `local.settings.json` for local development + +### Infrastructure Templates +- [JavaScript Azure Functions AZD Sample](https://github.com/Azure-Samples/functions-quickstart-javascript-azd/tree/main/infra) +- [.NET Azure Functions with EventGrid Sample](https://github.com/Azure-Samples/functions-quickstart-dotnet-azd-eventgrid-blob/tree/main/infra) \ No newline at end of file diff --git a/infra/app/api.bicep b/infra/app/api.bicep index dc7b77c..7ea2fd0 100644 --- a/infra/app/api.bicep +++ b/infra/app/api.bicep @@ -43,6 +43,12 @@ var queueSettings = enableQueue ? { AzureWebJobsStorage__queueServiceUri: stg.pr var tableSettings = enableTable ? { AzureWebJobsStorage__tableServiceUri: stg.properties.primaryEndpoints.table } : {} var fileSettings = enableFile ? { AzureWebJobsStorage__fileServiceUri: stg.properties.primaryEndpoints.file } : {} +// Add storage account name and client ID for Table Storage SDK +var storageSettings = { + STORAGE_ACCOUNT_NAME: storageAccountName + AZURE_CLIENT_ID: identityClientId +} + // Merge all app settings var allAppSettings = union( appSettings, @@ -50,6 +56,7 @@ var allAppSettings = union( queueSettings, tableSettings, fileSettings, + storageSettings, baseAppSettings ) diff --git a/infra/main.bicep b/infra/main.bicep index 63092b0..ded9a85 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -155,7 +155,7 @@ module storage 'br/public:avm/res/storage/storage-account:0.8.3' = { var storageEndpointConfig = { enableBlob: true // Required for AzureWebJobsStorage, .zip deployment, Event Hubs trigger and Timer trigger checkpointing enableQueue: false // Required for Durable Functions and MCP trigger - enableTable: false // Required for Durable Functions and OpenAI triggers and bindings + enableTable: true // Required for inventory data persistence enableFiles: false // Not required, used in legacy scenarios allowUserIdentityPrincipal: true // Allow interactive user identity to access for testing and debugging } diff --git a/inventory.db b/inventory.db index 6653893c7397369adb55f8b9be3b9d27221cc411..ab0a0d394fa1f7ad0a0e5737da3aae83e88234f9 100644 GIT binary patch delta 313 zcmXZWJ5s|i6a`Q}*|MLU3>wGd2G{@%Sp@BKgMcU>{^6fJ#1%DJ3d7DQsJW6->0Hg7 zJ8u|h80hPWcz1lJIXnG4`Cjj)wE6j){vK>MI-`yX@4XcD2SOWyc@N^w58m!x!X6L9^HZ+sd$gM zL1-C4%yq0Nk0Vx~B_)$;8N{mRMf!5q%=bO4dYze$!xGQH7L@7=YS>G830OXq@*<)y LU3^`2o*M2S)YmHZ delta 313 zcmXZWOLD?63fiJP0Q-zx(+sAd#0dlAG>fCg)w`8^;;y-%9 zp+OF$=dmI@KQUt(5HwKJAQToSY2}Q5KKC5zCz str: - # Get the path to the database file - return str(Path(__file__).parent / "inventory.db") +# Azure Table Storage configuration +STORAGE_ACCOUNT_NAME = os.environ.get("STORAGE_ACCOUNT_NAME", "") +MANAGED_IDENTITY_CLIENT_ID = os.environ.get("AZURE_CLIENT_ID", "") +TABLE_NAME = "ClothingInventory" +table_client: Optional[TableClient] = None -def get_inventory_data_path() -> str: - # Get the path to the inventory data file. - return str(Path(__file__).parent / "inventory_data.py") - -def init_database(): - # Initialize database with sample data if it doesn't exist. - db_path = get_db_path() - inventory_data_path = get_inventory_data_path() - needs_reload = False +def get_table_client() -> TableClient: + """Get or create the Table Storage client.""" + global table_client - # Check if inventory_data.py was modified after the database was created - if Path(db_path).exists() and Path(inventory_data_path).exists(): - db_mtime = os.path.getmtime(db_path) - inventory_mtime = os.path.getmtime(inventory_data_path) + if table_client is None: + if not STORAGE_ACCOUNT_NAME: + raise ValueError("STORAGE_ACCOUNT_NAME environment variable not set") - if inventory_mtime > db_mtime: - print(f"inventory_data.py was updated (modified at {inventory_mtime} vs DB at {db_mtime})") - print("Reloading database with fresh data") - needs_reload = True - - import importlib - import inventory_data - importlib.reload(inventory_data) - from inventory_data import SAMPLE_INVENTORY - data_to_use = SAMPLE_INVENTORY - - # Initialize database - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - # Create tables - # Create tables - cursor.execute(''' - CREATE TABLE IF NOT EXISTS items ( - id INTEGER PRIMARY KEY, - name TEXT NOT NULL, - category TEXT NOT NULL, - price REAL NOT NULL, - description TEXT - ) - ''') - - cursor.execute(''' - CREATE TABLE IF NOT EXISTS item_sizes ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - item_id INTEGER, - size TEXT NOT NULL, - quantity INTEGER NOT NULL, - FOREIGN KEY (item_id) REFERENCES items (id) + account_url = f"https://{STORAGE_ACCOUNT_NAME}.table.core.windows.net" + + # Use managed identity with explicit client ID + from azure.identity import ManagedIdentityCredential + if MANAGED_IDENTITY_CLIENT_ID: + logger.info(f"Using ManagedIdentityCredential with client_id: {MANAGED_IDENTITY_CLIENT_ID[:8]}...") + credential = ManagedIdentityCredential(client_id=MANAGED_IDENTITY_CLIENT_ID) + else: + logger.warning("No client ID found, using default ManagedIdentityCredential") + credential = ManagedIdentityCredential() + + # Create table client directly + table_client = TableClient( + endpoint=account_url, + table_name=TABLE_NAME, + credential=credential ) - ''') + + # Create table if it doesn't exist + try: + table_client.create_table() + logger.info(f"Created table: {TABLE_NAME}") + except Exception as e: + # Table might already exist + logger.info(f"Table {TABLE_NAME} status: {e}") - # Clear existing data if the inventory file is newer or if DB is empty - cursor.execute('SELECT COUNT(*) FROM items') - if needs_reload or cursor.fetchone()[0] == 0: - # Clear existing data if needed - cursor.execute('DELETE FROM item_sizes') - cursor.execute('DELETE FROM items') + return table_client + +def init_inventory(): + """Initialize Table Storage with sample data if empty.""" + try: + client = get_table_client() + + # Check if table has any data + entities = list(client.list_entities(select="PartitionKey")) - # Insert sample data - for item in data_to_use: - cursor.execute(''' - INSERT INTO items (id, name, category, price, description) - VALUES (?, ?, ?, ?, ?) - ''', (item['id'], item['name'], item['category'], item['price'], item['description'])) + if len(entities) == 0: + # Load sample data + import importlib + import inventory_data + importlib.reload(inventory_data) + from inventory_data import SAMPLE_INVENTORY - for size, quantity in item['sizes'].items(): - cursor.execute(''' - INSERT INTO item_sizes (item_id, size, quantity) - VALUES (?, ?, ?) - ''', (item['id'], size, quantity)) - - print(f"Database reinitialized with {len(data_to_use)} items") - else: - print("Database already contains data, no reload needed") - - conn.commit() - conn.close() + # Insert sample data into Table Storage + for item in SAMPLE_INVENTORY: + entity = { + "PartitionKey": "INVENTORY", + "RowKey": str(item['id']), + "ItemId": item['id'], + "Name": item['name'], + "Category": item['category'], + "Price": item['price'], + "Description": item['description'], + "Sizes": json.dumps(item['sizes']) # Store sizes as JSON string + } + client.upsert_entity(entity) + + logger.info(f"Table Storage initialized with {len(SAMPLE_INVENTORY)} items") + else: + logger.info(f"Table Storage already contains {len(entities)} items") + + except Exception as e: + logger.error(f"Error initializing Table Storage: {e}") + # Don't raise - allow the server to start and initialize on first request + logger.warning("Table Storage initialization deferred") # FastMCP Tools @mcp.tool() def get_inventory() -> Dict[str, Any]: """Get all clothing items in inventory with sizes and quantities.""" - - db_path = get_db_path() - conn = None try: - conn = sqlite3.connect(db_path) - cursor = conn.cursor() + client = get_table_client() + entities = list(client.query_entities("PartitionKey eq 'INVENTORY'")) - cursor.execute(''' - SELECT i.id, i.name, i.category, i.price, i.description - FROM items i - ORDER BY i.category, i.name - ''') - items = cursor.fetchall() + # If empty, try to initialize + if len(entities) == 0: + logger.info("No inventory found, initializing...") + init_inventory() + entities = list(client.query_entities("PartitionKey eq 'INVENTORY'")) - result = [] - for item in items: - item_id, name, category, price, description = item - - # Get sizes and quantities - cursor.execute(''' - SELECT size, quantity - FROM item_sizes - WHERE item_id = ? - ''', (item_id,)) - sizes_data = cursor.fetchall() - sizes = {size: quantity for size, quantity in sizes_data} - - result.append({ - 'id': item_id, - 'name': name, - 'category': category, - 'price': price, - 'description': description, - 'sizes': sizes + items = [] + for entity in entities: + items.append({ + 'id': entity['ItemId'], + 'name': entity['Name'], + 'category': entity['Category'], + 'price': entity['Price'], + 'description': entity['Description'], + 'sizes': json.loads(entity['Sizes']) }) return { - "items": result, - "total_items": len(result), - "categories": list(set(item['category'] for item in result)) + "items": items, + "total_items": len(items), + "categories": list(set(item['category'] for item in items)) } except Exception as e: logger.error(f"Error getting inventory: {e}") return {"error": str(e), "success": False} - finally: - if conn: - conn.close() @mcp.tool() def add_item( @@ -179,40 +159,42 @@ def add_item( description: Item description (optional) sizes: Sizes and quantities (e.g., {"S": 10, "M": 15}) (optional) """ - if sizes is None: sizes = {"S": 0, "M": 0, "L": 0} - db_path = get_db_path() - conn = None try: - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - cursor.execute(''' - INSERT INTO items (name, category, price, description) - VALUES (?, ?, ?, ?) - ''', (name, category, price, description)) + client = get_table_client() - item_id = cursor.lastrowid + # Get the next available ID + entities = list(client.query_entities("PartitionKey eq 'INVENTORY'", select="ItemId")) + next_id = max([e['ItemId'] for e in entities], default=0) + 1 - for size, quantity in sizes.items(): - cursor.execute(''' - INSERT INTO item_sizes (item_id, size, quantity) - VALUES (?, ?, ?) - ''', (item_id, size, quantity)) + entity = { + "PartitionKey": "INVENTORY", + "RowKey": str(next_id), + "ItemId": next_id, + "Name": name, + "Category": category, + "Price": price, + "Description": description, + "Sizes": json.dumps(sizes) + } - conn.commit() + client.upsert_entity(entity) - item_response = get_item_by_id(item_id) + new_item = { + 'id': next_id, + 'name': name, + 'category': category, + 'price': price, + 'description': description, + 'sizes': sizes + } - return {"success": True, "item": item_response["item"]} + return {"success": True, "item": new_item} except Exception as e: logger.error(f"Error adding item: {e}") return {"error": str(e), "success": False} - finally: - if conn: - conn.close() @mcp.tool() def get_item_by_id(item_id: int) -> Dict[str, Any]: @@ -221,50 +203,25 @@ def get_item_by_id(item_id: int) -> Dict[str, Any]: Args: item_id: ID of the item to retrieve """ - - db_path = get_db_path() - conn = None try: - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - cursor.execute(''' - SELECT i.id, i.name, i.category, i.price, i.description - FROM items i - WHERE i.id = ? - ''', (item_id,)) - item = cursor.fetchone() - - if not item: - return {"success": False, "error": "Item not found"} - - item_id, name, category, price, description = item + client = get_table_client() + entity = client.get_entity(partition_key="INVENTORY", row_key=str(item_id)) - # Get sizes and quantities - cursor.execute(''' - SELECT size, quantity - FROM item_sizes - WHERE item_id = ? - ''', (item_id,)) - sizes_data = cursor.fetchall() - sizes = {size: quantity for size, quantity in sizes_data} - - result = { - 'id': item_id, - 'name': name, - 'category': category, - 'price': price, - 'description': description, - 'sizes': sizes + item = { + 'id': entity['ItemId'], + 'name': entity['Name'], + 'category': entity['Category'], + 'price': entity['Price'], + 'description': entity['Description'], + 'sizes': json.loads(entity['Sizes']) } - return {"success": True, "item": result} + return {"success": True, "item": item} except Exception as e: + if "ResourceNotFound" in str(type(e).__name__): + return {"success": False, "error": "Item not found"} logger.error(f"Error getting item {item_id}: {e}") return {"error": str(e), "success": False} - finally: - if conn: - conn.close() @mcp.tool() def update_item_quantity(item_id: int, size: str, quantity: int) -> Dict[str, Any]: @@ -275,33 +232,38 @@ def update_item_quantity(item_id: int, size: str, quantity: int) -> Dict[str, An size: Size to update (e.g., "S", "M", "L") quantity: New quantity """ - - db_path = get_db_path() - conn = None try: - conn = sqlite3.connect(db_path) - cursor = conn.cursor() + client = get_table_client() + entity = client.get_entity(partition_key="INVENTORY", row_key=str(item_id)) - cursor.execute(''' - UPDATE item_sizes - SET quantity = ? - WHERE item_id = ? AND size = ? - ''', (quantity, item_id, size)) + # Parse sizes, update the specific size, and save back + sizes = json.loads(entity['Sizes']) - affected_rows = cursor.rowcount - conn.commit() + if size not in sizes: + return {"success": False, "error": f"Size '{size}' not found for this item"} - if affected_rows > 0: - item_response = get_item_by_id(item_id) - return {"success": True, "item": item_response["item"]} - else: - return {"success": False, "error": "Item or size not found"} + sizes[size] = quantity + entity['Sizes'] = json.dumps(sizes) + + # Update the entity + client.update_entity(entity, mode="replace") + + # Return the updated item + item = { + 'id': entity['ItemId'], + 'name': entity['Name'], + 'category': entity['Category'], + 'price': entity['Price'], + 'description': entity['Description'], + 'sizes': sizes + } + + return {"success": True, "item": item} except Exception as e: + if "ResourceNotFound" in str(type(e).__name__): + return {"success": False, "error": "Item not found"} logger.error(f"Error updating quantity: {e}") return {"error": str(e), "success": False} - finally: - if conn: - conn.close() @mcp.tool() def search_items(query: str) -> Dict[str, Any]: @@ -310,21 +272,32 @@ def search_items(query: str) -> Dict[str, Any]: Args: query: Search query to match against item names or categories """ - - all_items_response = get_inventory() - all_items = all_items_response["items"] # Extract the items list from the response - query = query.lower() - - results = [ - item for item in all_items - if query in item['name'].lower() or query in item['category'].lower() - ] - - return { - "items": results, - "count": len(results), - "query": query - } + try: + client = get_table_client() + entities = client.query_entities("PartitionKey eq 'INVENTORY'") + + query_lower = query.lower() + results = [] + + for entity in entities: + if query_lower in entity['Name'].lower() or query_lower in entity['Category'].lower(): + results.append({ + 'id': entity['ItemId'], + 'name': entity['Name'], + 'category': entity['Category'], + 'price': entity['Price'], + 'description': entity['Description'], + 'sizes': json.loads(entity['Sizes']) + }) + + return { + "items": results, + "count": len(results), + "query": query + } + except Exception as e: + logger.error(f"Error searching items: {e}") + return {"error": str(e), "success": False} def main(): """Run the Clothing Inventory MCP Server.""" @@ -334,14 +307,11 @@ def main(): logger.info("Starting Clothing Inventory MCP Server") logger.info(f"MCP Server will start on port {mcp_port}") - # Initialize database + # Initialize Table Storage (non-blocking - errors are logged but don't stop startup) try: - init_database() - logger.info("Database initialized successfully") + init_inventory() except Exception as e: - logger.error(f"Failed to initialize database: {e}") - logger.error(f"Database error type: {type(e).__name__}") - raise + logger.error(f"Table Storage initialization failed, will retry on first request: {e}") # Run the server logger.info("Starting MCP server with streamable-http transport")