From f5e72acc17fb5cffd6e03fe2704d75824306b593 Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Tue, 7 May 2024 21:37:51 +0900 Subject: [PATCH 1/8] remove env.sample file --- .env.sample | 3 --- Makefile | 5 ++++- main.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) delete mode 100644 .env.sample diff --git a/.env.sample b/.env.sample deleted file mode 100644 index 39c1f85..0000000 --- a/.env.sample +++ /dev/null @@ -1,3 +0,0 @@ -# Basic -SOLUTION_NAME = "SANDBOX" -BACKEND_URL = "http://localhost:8000" diff --git a/Makefile b/Makefile index 8c80d86..fe8e8fb 100644 --- a/Makefile +++ b/Makefile @@ -99,6 +99,7 @@ ci-test-docker: docker-lint docker-build docker-scan docker-run ## run CI test f # Application # --- SOLUTION_NAME ?= "SANDBOX" +BACKEND_URL ?= "http://localhost:8000" .PHONY: backend backend: ## run backend @@ -106,7 +107,9 @@ backend: ## run backend .PHONY: frontend frontend: ## run frontend - poetry run streamlit run main.py -- frontend -- --solution-name=$(SOLUTION_NAME) + poetry run streamlit run main.py -- frontend -- \ + --solution-name=$(SOLUTION_NAME) \ + --backend-url=$(BACKEND_URL) # --- # Azure Functions diff --git a/main.py b/main.py index f55492d..c5ed1d3 100644 --- a/main.py +++ b/main.py @@ -41,8 +41,8 @@ def backend( @app.command() def frontend( - solution_name: Annotated[str, typer.Option(help="Solution name")] = os.getenv("SOLUTION_NAME"), - backend_url: Annotated[str, typer.Option(help="Backend URL")] = os.getenv("BACKEND_URL", "http://localhost:8000/"), + solution_name: Annotated[str, typer.Option(help="Solution name")] = "SANDBOX", + backend_url: Annotated[str, typer.Option(help="Backend URL")] = "http://localhost:8000/", debug: Annotated[bool, typer.Option(help="Enable debug mode")] = False, ): from frontend.entrypoint import start From 25aa44be09c5ea4f28ad9d5bb1479a0e8dd5e5fe Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Tue, 7 May 2024 22:00:30 +0900 Subject: [PATCH 2/8] rename document intelligence --- Makefile | 2 +- azure_ai_document_intelligence.env.sample | 2 ++ backend/fastapi.py | 4 +-- .../azure_ai_document_intelligence.py | 36 +++++++++++++++++++ backend/internals/document_intelligence.py | 33 ----------------- .../routers/azure_ai_document_intelligence.py | 36 +++++++++++++++++++ backend/routers/document_intelligence.py | 32 ----------------- ...e.py => azure_ai_document_intelligence.py} | 0 .../azure_ai_document_intelligence.py | 14 ++++++++ backend/settings/document_intelligence.py | 12 ------- document_intelligence.env.sample | 2 -- 11 files changed, 91 insertions(+), 82 deletions(-) create mode 100644 azure_ai_document_intelligence.env.sample create mode 100644 backend/internals/azure_ai_document_intelligence.py delete mode 100644 backend/internals/document_intelligence.py create mode 100644 backend/routers/azure_ai_document_intelligence.py delete mode 100644 backend/routers/document_intelligence.py rename backend/schemas/{document_intelligence.py => azure_ai_document_intelligence.py} (100%) create mode 100644 backend/settings/azure_ai_document_intelligence.py delete mode 100644 backend/settings/document_intelligence.py delete mode 100644 document_intelligence.env.sample diff --git a/Makefile b/Makefile index fe8e8fb..b04cb55 100644 --- a/Makefile +++ b/Makefile @@ -78,7 +78,7 @@ docker-run: ## run Docker container --volume $(PWD)/azure_openai.env.sample:/app/azure_openai.env \ --volume $(PWD)/azure_storage.env.sample:/app/azure_storage.env \ --volume $(PWD)/azure_storage_queue.env.sample:/app/azure_storage_queue.env \ - --volume $(PWD)/document_intelligence.env.sample:/app/document_intelligence.env \ + --volume $(PWD)/azure_ai_document_intelligence.env.sample:/app/azure_ai_document_intelligence.env \ $(DOCKER_REPO_NAME)/$(DOCKER_IMAGE_NAME):$(DOCKER_TAG) \ $(DOCKER_COMMAND) diff --git a/azure_ai_document_intelligence.env.sample b/azure_ai_document_intelligence.env.sample new file mode 100644 index 0000000..6afba98 --- /dev/null +++ b/azure_ai_document_intelligence.env.sample @@ -0,0 +1,2 @@ +AZURE_AI_DOCUMENT_INTELLIGENCE_ENDPOINT="https://.cognitiveservices.azure.com" +AZURE_AI_DOCUMENT_INTELLIGENCE_API_KEY="" diff --git a/backend/fastapi.py b/backend/fastapi.py index 59b5905..bda0c50 100644 --- a/backend/fastapi.py +++ b/backend/fastapi.py @@ -1,19 +1,19 @@ from fastapi import FastAPI from fastapi.openapi.utils import get_openapi +from backend.routers import azure_ai_document_intelligence as azure_ai_document_intelligence_router from backend.routers import azure_ai_vision as azure_ai_vision_router from backend.routers import azure_event_grid as azure_event_grid_router from backend.routers import azure_openai as azure_openai_router from backend.routers import azure_storage as azure_storage_router from backend.routers import azure_storage_queue as azure_storage_queue_router -from backend.routers import document_intelligence as document_intelligence_router app = FastAPI( docs_url="/", ) app.include_router(azure_openai_router.router) -app.include_router(document_intelligence_router.router) +app.include_router(azure_ai_document_intelligence_router.router) app.include_router(azure_storage_router.router) app.include_router(azure_ai_vision_router.router) app.include_router(azure_event_grid_router.router) diff --git a/backend/internals/azure_ai_document_intelligence.py b/backend/internals/azure_ai_document_intelligence.py new file mode 100644 index 0000000..535dd77 --- /dev/null +++ b/backend/internals/azure_ai_document_intelligence.py @@ -0,0 +1,36 @@ +from logging import getLogger + +from azure.ai.documentintelligence import DocumentIntelligenceClient +from azure.ai.documentintelligence.models import AnalyzeDocumentRequest, AnalyzeResult, ContentFormat +from azure.core.credentials import AzureKeyCredential + +from backend.settings.azure_ai_document_intelligence import Settings + +logger = getLogger(__name__) + + +class AzureAiDocumentIntelligenceClient: + def __init__(self, settings: Settings) -> None: + self.settings = settings + + def get_document_intelligence_client(self) -> DocumentIntelligenceClient: + return DocumentIntelligenceClient( + endpoint=self.settings.azure_ai_document_intelligence_endpoint, + credential=AzureKeyCredential(self.settings.azure_ai_document_intelligence_api_key), + ) + + def analyze_document( + self, + bytes_source: bytes, + ) -> AnalyzeResult: + client = self.get_document_intelligence_client() + poller = client.begin_analyze_document( + model_id="prebuilt-read", + analyze_request=AnalyzeDocumentRequest( + bytes_source=bytes_source, + ), + output_content_format=ContentFormat.MARKDOWN, + ) + result = poller.result() + logger.info(result) + return result diff --git a/backend/internals/document_intelligence.py b/backend/internals/document_intelligence.py deleted file mode 100644 index 6b6f072..0000000 --- a/backend/internals/document_intelligence.py +++ /dev/null @@ -1,33 +0,0 @@ -from logging import getLogger - -from azure.ai.documentintelligence import DocumentIntelligenceClient -from azure.ai.documentintelligence.models import AnalyzeDocumentRequest, ContentFormat -from azure.core.credentials import AzureKeyCredential - -from backend.schemas import document_intelligence as document_intelligence_schemas -from backend.settings import document_intelligence as document_intelligence_settings - -logger = getLogger(__name__) - -settings = document_intelligence_settings.Settings() - - -def analyze_document( - body: document_intelligence_schemas.AnalyzeDocumentRequest, -) -> document_intelligence_schemas.AnalyzeDocumentResponse: - client = DocumentIntelligenceClient( - endpoint=settings.document_intelligence_endpoint, - credential=AzureKeyCredential(settings.document_intelligence_api_key), - ) - poller = client.begin_analyze_document( - model_id="prebuilt-read", - analyze_request=AnalyzeDocumentRequest( - bytes_source=body.content, - ), - output_content_format=ContentFormat.MARKDOWN, - ) - result = poller.result() - logger.info(result) - return document_intelligence_schemas.AnalyzeDocumentResponse( - content=result.content, - ) diff --git a/backend/routers/azure_ai_document_intelligence.py b/backend/routers/azure_ai_document_intelligence.py new file mode 100644 index 0000000..211ce61 --- /dev/null +++ b/backend/routers/azure_ai_document_intelligence.py @@ -0,0 +1,36 @@ +from logging import getLogger + +from fastapi import APIRouter, UploadFile + +from backend.internals import azure_ai_document_intelligence +from backend.schemas import azure_ai_document_intelligence as azure_ai_document_intelligence_schemas +from backend.settings.azure_ai_document_intelligence import Settings + +logger = getLogger(__name__) +client = azure_ai_document_intelligence.AzureAiDocumentIntelligenceClient( + settings=Settings(), +) +router = APIRouter( + prefix="/azure_ai_document_intelligence", + tags=["azure_ai_document_intelligence"], + responses={404: {"description": "Not found"}}, +) + + +@router.post( + "/analyze_document/", + response_model=azure_ai_document_intelligence_schemas.AnalyzeDocumentResponse, + status_code=200, +) +async def analyze_document(file: UploadFile): + try: + content = await file.read() + result = client.analyze_document( + bytes_source=content, + ) + except Exception as e: + logger.error(f"Failed to read file: {e}") + raise + return azure_ai_document_intelligence_schemas.AnalyzeDocumentResponse( + content=result.content, + ) diff --git a/backend/routers/document_intelligence.py b/backend/routers/document_intelligence.py deleted file mode 100644 index 76c3141..0000000 --- a/backend/routers/document_intelligence.py +++ /dev/null @@ -1,32 +0,0 @@ -from logging import getLogger - -from fastapi import APIRouter, UploadFile - -from backend.internals import document_intelligence -from backend.schemas import document_intelligence as document_intelligence_schemas - -logger = getLogger(__name__) - -router = APIRouter( - prefix="/document_intelligence", - tags=["document_intelligence"], - responses={404: {"description": "Not found"}}, -) - - -@router.post( - "/analyze_document/", - response_model=document_intelligence_schemas.AnalyzeDocumentResponse, - status_code=200, -) -async def analyze_document(file: UploadFile): - try: - content = await file.read() - except Exception as e: - logger.error(f"Failed to read file: {e}") - raise - return document_intelligence.analyze_document( - body=document_intelligence_schemas.AnalyzeDocumentRequest( - content=content, - ) - ) diff --git a/backend/schemas/document_intelligence.py b/backend/schemas/azure_ai_document_intelligence.py similarity index 100% rename from backend/schemas/document_intelligence.py rename to backend/schemas/azure_ai_document_intelligence.py diff --git a/backend/settings/azure_ai_document_intelligence.py b/backend/settings/azure_ai_document_intelligence.py new file mode 100644 index 0000000..4b11e4c --- /dev/null +++ b/backend/settings/azure_ai_document_intelligence.py @@ -0,0 +1,14 @@ +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + azure_ai_document_intelligence_endpoint: str = ( + "https://.cognitiveservices.azure.com/" + ) + azure_ai_document_intelligence_api_key: str = "" + + model_config = SettingsConfigDict( + env_file="azure_ai_document_intelligence.env", + env_file_encoding="utf-8", + extra="ignore", + ) diff --git a/backend/settings/document_intelligence.py b/backend/settings/document_intelligence.py deleted file mode 100644 index 6b4afdd..0000000 --- a/backend/settings/document_intelligence.py +++ /dev/null @@ -1,12 +0,0 @@ -from pydantic_settings import BaseSettings, SettingsConfigDict - - -class Settings(BaseSettings): - document_intelligence_endpoint: str = "https://.cognitiveservices.azure.com/" - document_intelligence_api_key: str = "" - - model_config = SettingsConfigDict( - env_file="document_intelligence.env", - env_file_encoding="utf-8", - extra="ignore", - ) diff --git a/document_intelligence.env.sample b/document_intelligence.env.sample deleted file mode 100644 index 8f5eb07..0000000 --- a/document_intelligence.env.sample +++ /dev/null @@ -1,2 +0,0 @@ -DOCUMENT_INTELLIGENCE_ENDPOINT="https://.cognitiveservices.azure.com" -DOCUMENT_INTELLIGENCE_API_KEY="" From 17d7d1bdbfed5fc5ad5b5e2e2d43de7916376dca Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Tue, 7 May 2024 22:16:51 +0900 Subject: [PATCH 3/8] rename azure storage blob --- Makefile | 4 ++-- azure_storage.env.sample | 3 --- azure_storage_blob.env.sample | 3 +++ backend/fastapi.py | 6 +++--- ...azure_storage.py => azure_storage_blob.py} | 8 ++++---- ...azure_storage.py => azure_storage_blob.py} | 20 +++++++++---------- ...azure_storage.py => azure_storage_blob.py} | 0 ...azure_storage.py => azure_storage_blob.py} | 6 +++--- 8 files changed, 25 insertions(+), 25 deletions(-) delete mode 100644 azure_storage.env.sample create mode 100644 azure_storage_blob.env.sample rename backend/internals/{azure_storage.py => azure_storage_blob.py} (89%) rename backend/routers/{azure_storage.py => azure_storage_blob.py} (74%) rename backend/schemas/{azure_storage.py => azure_storage_blob.py} (100%) rename backend/settings/{azure_storage.py => azure_storage_blob.py} (60%) diff --git a/Makefile b/Makefile index b04cb55..a31754f 100644 --- a/Makefile +++ b/Makefile @@ -72,13 +72,13 @@ docker-build: ## build Docker image docker-run: ## run Docker container docker run --rm \ --publish 8888:8888 \ + --volume $(PWD)/azure_ai_document_intelligence.env.sample:/app/azure_ai_document_intelligence.env \ --volume $(PWD)/azure_ai_speech.env.sample:/app/azure_ai_speech.env \ --volume $(PWD)/azure_ai_vision.env.sample:/app/azure_ai_vision.env \ --volume $(PWD)/azure_event_grid.env.sample:/app/azure_event_grid.env \ --volume $(PWD)/azure_openai.env.sample:/app/azure_openai.env \ - --volume $(PWD)/azure_storage.env.sample:/app/azure_storage.env \ + --volume $(PWD)/azure_storage_blob.env.sample:/app/azure_storage_blob.env \ --volume $(PWD)/azure_storage_queue.env.sample:/app/azure_storage_queue.env \ - --volume $(PWD)/azure_ai_document_intelligence.env.sample:/app/azure_ai_document_intelligence.env \ $(DOCKER_REPO_NAME)/$(DOCKER_IMAGE_NAME):$(DOCKER_TAG) \ $(DOCKER_COMMAND) diff --git a/azure_storage.env.sample b/azure_storage.env.sample deleted file mode 100644 index 223efc2..0000000 --- a/azure_storage.env.sample +++ /dev/null @@ -1,3 +0,0 @@ -AZURE_STORAGE_ACCOUNT_NAME = "" -AZURE_STORAGE_SAS_TOKEN = "" -AZURE_STORAGE_BLOB_CONTAINER_NAME = "" diff --git a/azure_storage_blob.env.sample b/azure_storage_blob.env.sample new file mode 100644 index 0000000..e706518 --- /dev/null +++ b/azure_storage_blob.env.sample @@ -0,0 +1,3 @@ +AZURE_STORAGE_BLOB_ACCOUNT_NAME = "" +AZURE_STORAGE_BLOB_SAS_TOKEN = "" +AZURE_STORAGE_BLOB_CONTAINER_NAME = "" diff --git a/backend/fastapi.py b/backend/fastapi.py index bda0c50..8cbf8c3 100644 --- a/backend/fastapi.py +++ b/backend/fastapi.py @@ -5,18 +5,18 @@ from backend.routers import azure_ai_vision as azure_ai_vision_router from backend.routers import azure_event_grid as azure_event_grid_router from backend.routers import azure_openai as azure_openai_router -from backend.routers import azure_storage as azure_storage_router +from backend.routers import azure_storage_blob as azure_storage_blob_router from backend.routers import azure_storage_queue as azure_storage_queue_router app = FastAPI( docs_url="/", ) -app.include_router(azure_openai_router.router) app.include_router(azure_ai_document_intelligence_router.router) -app.include_router(azure_storage_router.router) app.include_router(azure_ai_vision_router.router) app.include_router(azure_event_grid_router.router) +app.include_router(azure_openai_router.router) +app.include_router(azure_storage_blob_router.router) app.include_router(azure_storage_queue_router.router) diff --git a/backend/internals/azure_storage.py b/backend/internals/azure_storage_blob.py similarity index 89% rename from backend/internals/azure_storage.py rename to backend/internals/azure_storage_blob.py index 74531cb..7830436 100644 --- a/backend/internals/azure_storage.py +++ b/backend/internals/azure_storage_blob.py @@ -2,19 +2,19 @@ from azure.storage.blob import BlobServiceClient -from backend.settings import azure_storage as azure_storage_settings +from backend.settings import azure_storage_blob as azure_storage_settings logger = getLogger(__name__) -class BlobStorageClient: +class Client: def __init__(self, settings: azure_storage_settings.Settings): self.settings = settings def get_blob_service_client(self) -> BlobServiceClient: return BlobServiceClient( - account_url=f"https://{self.settings.azure_storage_account_name}.blob.core.windows.net", - credential=self.settings.azure_storage_sas_token, + account_url=f"https://{self.settings.azure_storage_blob_account_name}.blob.core.windows.net", + credential=self.settings.azure_storage_blob_sas_token, ) def upload_blob_stream( diff --git a/backend/routers/azure_storage.py b/backend/routers/azure_storage_blob.py similarity index 74% rename from backend/routers/azure_storage.py rename to backend/routers/azure_storage_blob.py index 21ffb32..8f09ea8 100644 --- a/backend/routers/azure_storage.py +++ b/backend/routers/azure_storage_blob.py @@ -3,18 +3,18 @@ from fastapi import APIRouter, UploadFile, status from fastapi.responses import JSONResponse -from backend.internals import azure_storage -from backend.schemas import azure_storage as azure_storage_schemas -from backend.settings.azure_storage import Settings as AzureStorageSettings +from backend.internals import azure_storage_blob +from backend.schemas import azure_storage_blob as azure_storage_schemas +from backend.settings.azure_storage_blob import Settings logger = getLogger(__name__) -blob_storage_client = azure_storage.BlobStorageClient( - settings=AzureStorageSettings(), +client = azure_storage_blob.Client( + settings=Settings(), ) router = APIRouter( - prefix="/azure_storage", - tags=["azure_storage"], + prefix="/azure_storage_blob", + tags=["azure_storage_blob"], responses={404: {"description": "Not found"}}, ) @@ -30,7 +30,7 @@ async def upload_blob( ): try: content = await file.read() - blob_storage_client.upload_blob_stream( + client.upload_blob_stream( blob_name=blob_name, stream=content, ) @@ -50,7 +50,7 @@ async def delete_blob( blob_name: str, ): try: - blob_storage_client.delete_blob( + client.delete_blob( blob_name=blob_name, ) except Exception as e: @@ -68,7 +68,7 @@ async def delete_blob( ) async def list_blobs(): try: - blobs = blob_storage_client.list_blobs() + blobs = client.list_blobs() except Exception as e: logger.error(f"Failed to upload blob: {e}") raise diff --git a/backend/schemas/azure_storage.py b/backend/schemas/azure_storage_blob.py similarity index 100% rename from backend/schemas/azure_storage.py rename to backend/schemas/azure_storage_blob.py diff --git a/backend/settings/azure_storage.py b/backend/settings/azure_storage_blob.py similarity index 60% rename from backend/settings/azure_storage.py rename to backend/settings/azure_storage_blob.py index f0679d1..4b35a0f 100644 --- a/backend/settings/azure_storage.py +++ b/backend/settings/azure_storage_blob.py @@ -2,11 +2,11 @@ class Settings(BaseSettings): - azure_storage_account_name: str = "" - azure_storage_sas_token: str = "" + azure_storage_blob_account_name: str = "" + azure_storage_blob_sas_token: str = "" azure_storage_blob_container_name: str = "" model_config = SettingsConfigDict( - env_file="azure_storage.env", + env_file="azure_storage_blob.env", env_file_encoding="utf-8", ) From 49fbc2714c801a4c3a4022a96a22e3bf2681e946 Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Tue, 7 May 2024 22:31:05 +0900 Subject: [PATCH 4/8] rename variables --- backend/fastapi.py | 15 +++++++++------ .../internals/azure_ai_document_intelligence.py | 2 +- backend/internals/azure_ai_vision.py | 2 +- backend/internals/azure_openai.py | 6 ++---- backend/internals/azure_storage_blob.py | 4 ++-- backend/internals/azure_storage_queue.py | 6 +++--- backend/routers/azure_ai_document_intelligence.py | 6 ++++-- backend/routers/azure_ai_vision.py | 5 +++-- backend/routers/azure_event_grid.py | 5 +++-- backend/routers/azure_openai.py | 5 +++-- backend/routers/azure_storage_blob.py | 5 +++-- backend/routers/azure_storage_queue.py | 9 +++++---- .../settings/azure_ai_document_intelligence.py | 1 - backend/settings/azure_storage_queue.py | 2 +- 14 files changed, 40 insertions(+), 33 deletions(-) diff --git a/backend/fastapi.py b/backend/fastapi.py index 8cbf8c3..eb4eba7 100644 --- a/backend/fastapi.py +++ b/backend/fastapi.py @@ -12,12 +12,15 @@ docs_url="/", ) -app.include_router(azure_ai_document_intelligence_router.router) -app.include_router(azure_ai_vision_router.router) -app.include_router(azure_event_grid_router.router) -app.include_router(azure_openai_router.router) -app.include_router(azure_storage_blob_router.router) -app.include_router(azure_storage_queue_router.router) +for router in [ + azure_ai_document_intelligence_router.router, + azure_ai_vision_router.router, + azure_event_grid_router.router, + azure_openai_router.router, + azure_storage_blob_router.router, + azure_storage_queue_router.router, +]: + app.include_router(router) def custom_openapi(): diff --git a/backend/internals/azure_ai_document_intelligence.py b/backend/internals/azure_ai_document_intelligence.py index 535dd77..31b667d 100644 --- a/backend/internals/azure_ai_document_intelligence.py +++ b/backend/internals/azure_ai_document_intelligence.py @@ -9,7 +9,7 @@ logger = getLogger(__name__) -class AzureAiDocumentIntelligenceClient: +class Client: def __init__(self, settings: Settings) -> None: self.settings = settings diff --git a/backend/internals/azure_ai_vision.py b/backend/internals/azure_ai_vision.py index 16ada73..b9898a8 100644 --- a/backend/internals/azure_ai_vision.py +++ b/backend/internals/azure_ai_vision.py @@ -9,7 +9,7 @@ logger = getLogger(__name__) -class AzureAiVisionClient: +class Client: def __init__(self, settings: Settings) -> None: self.settings = settings diff --git a/backend/internals/azure_openai.py b/backend/internals/azure_openai.py index 02d4e43..194e6bc 100644 --- a/backend/internals/azure_openai.py +++ b/backend/internals/azure_openai.py @@ -4,15 +4,13 @@ from openai import AzureOpenAI from openai.types.chat import ChatCompletion -from backend.settings import azure_openai as azure_openai_settings +from backend.settings.azure_openai import Settings logger = getLogger(__name__) -settings = azure_openai_settings.Settings() - class Client: - def __init__(self, settings: azure_openai_settings.Settings) -> None: + def __init__(self, settings: Settings) -> None: self.settings = settings def get_client(self) -> AzureOpenAI: diff --git a/backend/internals/azure_storage_blob.py b/backend/internals/azure_storage_blob.py index 7830436..f4fae99 100644 --- a/backend/internals/azure_storage_blob.py +++ b/backend/internals/azure_storage_blob.py @@ -2,13 +2,13 @@ from azure.storage.blob import BlobServiceClient -from backend.settings import azure_storage_blob as azure_storage_settings +from backend.settings.azure_storage_blob import Settings logger = getLogger(__name__) class Client: - def __init__(self, settings: azure_storage_settings.Settings): + def __init__(self, settings: Settings): self.settings = settings def get_blob_service_client(self) -> BlobServiceClient: diff --git a/backend/internals/azure_storage_queue.py b/backend/internals/azure_storage_queue.py index 09b53be..1186cc9 100644 --- a/backend/internals/azure_storage_queue.py +++ b/backend/internals/azure_storage_queue.py @@ -3,13 +3,13 @@ from azure.core.paging import ItemPaged from azure.storage.queue import QueueMessage, QueueServiceClient -from backend.settings.azure_storage_queue import AzureStorageQueueSettings +from backend.settings.azure_storage_queue import Settings logger = getLogger(__name__) -class AzureStorageQueueClient: - def __init__(self, settings: AzureStorageQueueSettings): +class Client: + def __init__(self, settings: Settings): self.client = QueueServiceClient.from_connection_string(settings.azure_storage_queue_connection_string) def create_queue( diff --git a/backend/routers/azure_ai_document_intelligence.py b/backend/routers/azure_ai_document_intelligence.py index 211ce61..0e807fa 100644 --- a/backend/routers/azure_ai_document_intelligence.py +++ b/backend/routers/azure_ai_document_intelligence.py @@ -2,14 +2,16 @@ from fastapi import APIRouter, UploadFile -from backend.internals import azure_ai_document_intelligence +from backend.internals.azure_ai_document_intelligence import Client from backend.schemas import azure_ai_document_intelligence as azure_ai_document_intelligence_schemas from backend.settings.azure_ai_document_intelligence import Settings logger = getLogger(__name__) -client = azure_ai_document_intelligence.AzureAiDocumentIntelligenceClient( + +client = Client( settings=Settings(), ) + router = APIRouter( prefix="/azure_ai_document_intelligence", tags=["azure_ai_document_intelligence"], diff --git a/backend/routers/azure_ai_vision.py b/backend/routers/azure_ai_vision.py index b60357b..e0aefa9 100644 --- a/backend/routers/azure_ai_vision.py +++ b/backend/routers/azure_ai_vision.py @@ -2,12 +2,13 @@ from fastapi import APIRouter, UploadFile -from backend.internals import azure_ai_vision +from backend.internals.azure_ai_vision import Client from backend.schemas import azure_ai_vision as azure_ai_vision_schemas from backend.settings.azure_ai_vision import Settings logger = getLogger(__name__) -client = azure_ai_vision.AzureAiVisionClient( + +client = Client( settings=Settings(), ) diff --git a/backend/routers/azure_event_grid.py b/backend/routers/azure_event_grid.py index e4e6b6f..daf9847 100644 --- a/backend/routers/azure_event_grid.py +++ b/backend/routers/azure_event_grid.py @@ -2,11 +2,12 @@ from fastapi import APIRouter -from backend.internals import azure_event_grid +from backend.internals.azure_event_grid import Client from backend.settings.azure_event_grid import Settings logger = getLogger(__name__) -client = azure_event_grid.Client( + +client = Client( settings=Settings(), ) diff --git a/backend/routers/azure_openai.py b/backend/routers/azure_openai.py index 62bd8ec..7cbd0ac 100644 --- a/backend/routers/azure_openai.py +++ b/backend/routers/azure_openai.py @@ -2,12 +2,13 @@ from fastapi import APIRouter, UploadFile -from backend.internals import azure_openai +from backend.internals.azure_openai import Client from backend.schemas import azure_openai as azure_openai_schemas from backend.settings.azure_openai import Settings logger = getLogger(__name__) -client = azure_openai.Client( + +client = Client( settings=Settings(), ) diff --git a/backend/routers/azure_storage_blob.py b/backend/routers/azure_storage_blob.py index 8f09ea8..cfa6ad4 100644 --- a/backend/routers/azure_storage_blob.py +++ b/backend/routers/azure_storage_blob.py @@ -3,12 +3,13 @@ from fastapi import APIRouter, UploadFile, status from fastapi.responses import JSONResponse -from backend.internals import azure_storage_blob +from backend.internals.azure_storage_blob import Client from backend.schemas import azure_storage_blob as azure_storage_schemas from backend.settings.azure_storage_blob import Settings logger = getLogger(__name__) -client = azure_storage_blob.Client( + +client = Client( settings=Settings(), ) diff --git a/backend/routers/azure_storage_queue.py b/backend/routers/azure_storage_queue.py index 491b421..80f3015 100644 --- a/backend/routers/azure_storage_queue.py +++ b/backend/routers/azure_storage_queue.py @@ -3,13 +3,14 @@ from fastapi import APIRouter from fastapi.responses import JSONResponse -from backend.internals.azure_storage_queue import AzureStorageQueueClient +from backend.internals.azure_storage_queue import Client from backend.schemas import azure_storage_queue as azure_storage_queue_schemas -from backend.settings.azure_storage_queue import AzureStorageQueueSettings +from backend.settings.azure_storage_queue import Settings logger = getLogger(__name__) -client = AzureStorageQueueClient( - settings=AzureStorageQueueSettings(), + +client = Client( + settings=Settings(), ) router = APIRouter( diff --git a/backend/settings/azure_ai_document_intelligence.py b/backend/settings/azure_ai_document_intelligence.py index 4b11e4c..3c4cc79 100644 --- a/backend/settings/azure_ai_document_intelligence.py +++ b/backend/settings/azure_ai_document_intelligence.py @@ -10,5 +10,4 @@ class Settings(BaseSettings): model_config = SettingsConfigDict( env_file="azure_ai_document_intelligence.env", env_file_encoding="utf-8", - extra="ignore", ) diff --git a/backend/settings/azure_storage_queue.py b/backend/settings/azure_storage_queue.py index 6759d09..0235b60 100644 --- a/backend/settings/azure_storage_queue.py +++ b/backend/settings/azure_storage_queue.py @@ -1,7 +1,7 @@ from pydantic_settings import BaseSettings, SettingsConfigDict -class AzureStorageQueueSettings(BaseSettings): +class Settings(BaseSettings): azure_storage_queue_connection_string: str = "" model_config = SettingsConfigDict( From fa2c86f32ac512e992e77f2e4d1d7cdf4eaf90cf Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Tue, 7 May 2024 22:41:03 +0900 Subject: [PATCH 5/8] make generate-openapi --- Makefile | 3 + client/api_client.py | 33 +- .../analyze_document_request_builder.py | 78 +++ ...i_document_intelligence_request_builder.py | 32 + .../azure_openai_request_builder.py | 10 + ...completions_with_vision_request_builder.py | 89 +++ .../azure_storage_blob_request_builder.py | 32 + .../blobs/blobs_request_builder.py | 81 +++ .../blobs/delete/delete_request_builder.py | 76 +++ .../blobs/upload/upload_request_builder.py | 87 +++ .../azure_storage_queue_request_builder.py | 42 ++ .../messages/messages_request_builder.py | 158 +++++ .../queues/queues_request_builder.py | 118 ++++ client/kiota-lock.json | 2 +- .../chat_completion_with_vision_response.py | 46 ++ client/models/create_queue_request.py | 46 ++ client/models/create_queue_response.py | 46 ++ client/models/delete_message_request.py | 54 ++ client/models/delete_message_response.py | 42 ++ client/models/delete_queue_request.py | 46 ++ client/models/delete_queue_response.py | 46 ++ client/models/send_message_request.py | 50 ++ client/models/send_message_response.py | 42 ++ specs/openapi.json | 560 +++++++++++++++--- 24 files changed, 1732 insertions(+), 87 deletions(-) create mode 100644 client/azure_ai_document_intelligence/analyze_document/analyze_document_request_builder.py create mode 100644 client/azure_ai_document_intelligence/azure_ai_document_intelligence_request_builder.py create mode 100644 client/azure_openai/chat_completions_with_vision/chat_completions_with_vision_request_builder.py create mode 100644 client/azure_storage_blob/azure_storage_blob_request_builder.py create mode 100644 client/azure_storage_blob/blobs/blobs_request_builder.py create mode 100644 client/azure_storage_blob/blobs/delete/delete_request_builder.py create mode 100644 client/azure_storage_blob/blobs/upload/upload_request_builder.py create mode 100644 client/azure_storage_queue/azure_storage_queue_request_builder.py create mode 100644 client/azure_storage_queue/messages/messages_request_builder.py create mode 100644 client/azure_storage_queue/queues/queues_request_builder.py create mode 100644 client/models/chat_completion_with_vision_response.py create mode 100644 client/models/create_queue_request.py create mode 100644 client/models/create_queue_response.py create mode 100644 client/models/delete_message_request.py create mode 100644 client/models/delete_message_response.py create mode 100644 client/models/delete_queue_request.py create mode 100644 client/models/delete_queue_response.py create mode 100644 client/models/send_message_request.py create mode 100644 client/models/send_message_response.py diff --git a/Makefile b/Makefile index a31754f..4f8d4be 100644 --- a/Makefile +++ b/Makefile @@ -152,3 +152,6 @@ generate-openapi-client: ## generate OpenAPI client --output ./client @echo "Get the list of dependencies" @kiota info -d ./specs/openapi.json --language Python + +.PHONY: generate-openapi +generate-openapi: generate-openapi-spec generate-openapi-client ## generate OpenAPI artifacts diff --git a/client/api_client.py b/client/api_client.py index d532399..158b949 100644 --- a/client/api_client.py +++ b/client/api_client.py @@ -14,11 +14,12 @@ from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union if TYPE_CHECKING: + from .azure_ai_document_intelligence.azure_ai_document_intelligence_request_builder import Azure_ai_document_intelligenceRequestBuilder from .azure_ai_vision.azure_ai_vision_request_builder import Azure_ai_visionRequestBuilder from .azure_event_grid.azure_event_grid_request_builder import Azure_event_gridRequestBuilder from .azure_openai.azure_openai_request_builder import Azure_openaiRequestBuilder - from .azure_storage.azure_storage_request_builder import Azure_storageRequestBuilder - from .document_intelligence.document_intelligence_request_builder import Document_intelligenceRequestBuilder + from .azure_storage_blob.azure_storage_blob_request_builder import Azure_storage_blobRequestBuilder + from .azure_storage_queue.azure_storage_queue_request_builder import Azure_storage_queueRequestBuilder class ApiClient(BaseRequestBuilder): """ @@ -40,9 +41,15 @@ def __init__(self,request_adapter: RequestAdapter) -> None: register_default_deserializer(JsonParseNodeFactory) register_default_deserializer(TextParseNodeFactory) register_default_deserializer(FormParseNodeFactory) - if not self.request_adapter.base_url: - self.request_adapter.base_url = "http://localhost:8000" - self.path_parameters["base_url"] = self.request_adapter.base_url + + @property + def azure_ai_document_intelligence(self) -> Azure_ai_document_intelligenceRequestBuilder: + """ + The azure_ai_document_intelligence property + """ + from .azure_ai_document_intelligence.azure_ai_document_intelligence_request_builder import Azure_ai_document_intelligenceRequestBuilder + + return Azure_ai_document_intelligenceRequestBuilder(self.request_adapter, self.path_parameters) @property def azure_ai_vision(self) -> Azure_ai_visionRequestBuilder: @@ -72,21 +79,21 @@ def azure_openai(self) -> Azure_openaiRequestBuilder: return Azure_openaiRequestBuilder(self.request_adapter, self.path_parameters) @property - def azure_storage(self) -> Azure_storageRequestBuilder: + def azure_storage_blob(self) -> Azure_storage_blobRequestBuilder: """ - The azure_storage property + The azure_storage_blob property """ - from .azure_storage.azure_storage_request_builder import Azure_storageRequestBuilder + from .azure_storage_blob.azure_storage_blob_request_builder import Azure_storage_blobRequestBuilder - return Azure_storageRequestBuilder(self.request_adapter, self.path_parameters) + return Azure_storage_blobRequestBuilder(self.request_adapter, self.path_parameters) @property - def document_intelligence(self) -> Document_intelligenceRequestBuilder: + def azure_storage_queue(self) -> Azure_storage_queueRequestBuilder: """ - The document_intelligence property + The azure_storage_queue property """ - from .document_intelligence.document_intelligence_request_builder import Document_intelligenceRequestBuilder + from .azure_storage_queue.azure_storage_queue_request_builder import Azure_storage_queueRequestBuilder - return Document_intelligenceRequestBuilder(self.request_adapter, self.path_parameters) + return Azure_storage_queueRequestBuilder(self.request_adapter, self.path_parameters) diff --git a/client/azure_ai_document_intelligence/analyze_document/analyze_document_request_builder.py b/client/azure_ai_document_intelligence/analyze_document/analyze_document_request_builder.py new file mode 100644 index 0000000..3dcf38a --- /dev/null +++ b/client/azure_ai_document_intelligence/analyze_document/analyze_document_request_builder.py @@ -0,0 +1,78 @@ +from __future__ import annotations +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.base_request_configuration import RequestConfiguration +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.method import Method +from kiota_abstractions.multipart_body import MultipartBody +from kiota_abstractions.request_adapter import RequestAdapter +from kiota_abstractions.request_information import RequestInformation +from kiota_abstractions.request_option import RequestOption +from kiota_abstractions.serialization import Parsable, ParsableFactory +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from ...models.analyze_document_response import AnalyzeDocumentResponse + from ...models.h_t_t_p_validation_error import HTTPValidationError + +class Analyze_documentRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_ai_document_intelligence/analyze_document + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new Analyze_documentRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_ai_document_intelligence/analyze_document", path_parameters) + + async def post(self,body: Optional[MultipartBody] = None, request_configuration: Optional[RequestConfiguration] = None) -> Optional[AnalyzeDocumentResponse]: + """ + Analyze Document + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[AnalyzeDocumentResponse] + """ + if not body: + raise TypeError("body cannot be null.") + request_info = self.to_post_request_information( + body, request_configuration + ) + from ...models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + from ...models.analyze_document_response import AnalyzeDocumentResponse + + return await self.request_adapter.send_async(request_info, AnalyzeDocumentResponse, error_mapping) + + def to_post_request_information(self,body: Optional[MultipartBody] = None, request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Analyze Document + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + if not body: + raise TypeError("body cannot be null.") + request_info = RequestInformation(Method.POST, self.url_template, self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + request_info.set_content_from_parsable(self.request_adapter, "multipart/form-data", body) + return request_info + + def with_url(self,raw_url: Optional[str] = None) -> Analyze_documentRequestBuilder: + """ + Returns a request builder with the provided arbitrary URL. Using this method means any other path or query parameters are ignored. + param raw_url: The raw URL to use for the request builder. + Returns: Analyze_documentRequestBuilder + """ + if not raw_url: + raise TypeError("raw_url cannot be null.") + return Analyze_documentRequestBuilder(self.request_adapter, raw_url) + + diff --git a/client/azure_ai_document_intelligence/azure_ai_document_intelligence_request_builder.py b/client/azure_ai_document_intelligence/azure_ai_document_intelligence_request_builder.py new file mode 100644 index 0000000..7cdb16c --- /dev/null +++ b/client/azure_ai_document_intelligence/azure_ai_document_intelligence_request_builder.py @@ -0,0 +1,32 @@ +from __future__ import annotations +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.request_adapter import RequestAdapter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from .analyze_document.analyze_document_request_builder import Analyze_documentRequestBuilder + +class Azure_ai_document_intelligenceRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_ai_document_intelligence + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new Azure_ai_document_intelligenceRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_ai_document_intelligence", path_parameters) + + @property + def analyze_document(self) -> Analyze_documentRequestBuilder: + """ + The analyze_document property + """ + from .analyze_document.analyze_document_request_builder import Analyze_documentRequestBuilder + + return Analyze_documentRequestBuilder(self.request_adapter, self.path_parameters) + + diff --git a/client/azure_openai/azure_openai_request_builder.py b/client/azure_openai/azure_openai_request_builder.py index 1a182c2..34d1d9a 100644 --- a/client/azure_openai/azure_openai_request_builder.py +++ b/client/azure_openai/azure_openai_request_builder.py @@ -6,6 +6,7 @@ if TYPE_CHECKING: from .chat_completions.chat_completions_request_builder import Chat_completionsRequestBuilder + from .chat_completions_with_vision.chat_completions_with_vision_request_builder import Chat_completions_with_visionRequestBuilder class Azure_openaiRequestBuilder(BaseRequestBuilder): """ @@ -29,4 +30,13 @@ def chat_completions(self) -> Chat_completionsRequestBuilder: return Chat_completionsRequestBuilder(self.request_adapter, self.path_parameters) + @property + def chat_completions_with_vision(self) -> Chat_completions_with_visionRequestBuilder: + """ + The chat_completions_with_vision property + """ + from .chat_completions_with_vision.chat_completions_with_vision_request_builder import Chat_completions_with_visionRequestBuilder + + return Chat_completions_with_visionRequestBuilder(self.request_adapter, self.path_parameters) + diff --git a/client/azure_openai/chat_completions_with_vision/chat_completions_with_vision_request_builder.py b/client/azure_openai/chat_completions_with_vision/chat_completions_with_vision_request_builder.py new file mode 100644 index 0000000..f7c1ee3 --- /dev/null +++ b/client/azure_openai/chat_completions_with_vision/chat_completions_with_vision_request_builder.py @@ -0,0 +1,89 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.base_request_configuration import RequestConfiguration +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.method import Method +from kiota_abstractions.multipart_body import MultipartBody +from kiota_abstractions.request_adapter import RequestAdapter +from kiota_abstractions.request_information import RequestInformation +from kiota_abstractions.request_option import RequestOption +from kiota_abstractions.serialization import Parsable, ParsableFactory +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from ...models.chat_completion_with_vision_response import ChatCompletionWithVisionResponse + from ...models.h_t_t_p_validation_error import HTTPValidationError + +class Chat_completions_with_visionRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_openai/chat_completions_with_vision + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new Chat_completions_with_visionRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_openai/chat_completions_with_vision{?system_prompt*,user_prompt*}", path_parameters) + + async def post(self,body: Optional[MultipartBody] = None, request_configuration: Optional[RequestConfiguration] = None) -> Optional[ChatCompletionWithVisionResponse]: + """ + Create Chat Completions With Vision + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[ChatCompletionWithVisionResponse] + """ + if not body: + raise TypeError("body cannot be null.") + request_info = self.to_post_request_information( + body, request_configuration + ) + from ...models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + from ...models.chat_completion_with_vision_response import ChatCompletionWithVisionResponse + + return await self.request_adapter.send_async(request_info, ChatCompletionWithVisionResponse, error_mapping) + + def to_post_request_information(self,body: Optional[MultipartBody] = None, request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Create Chat Completions With Vision + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + if not body: + raise TypeError("body cannot be null.") + request_info = RequestInformation(Method.POST, self.url_template, self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + request_info.set_content_from_parsable(self.request_adapter, "multipart/form-data", body) + return request_info + + def with_url(self,raw_url: Optional[str] = None) -> Chat_completions_with_visionRequestBuilder: + """ + Returns a request builder with the provided arbitrary URL. Using this method means any other path or query parameters are ignored. + param raw_url: The raw URL to use for the request builder. + Returns: Chat_completions_with_visionRequestBuilder + """ + if not raw_url: + raise TypeError("raw_url cannot be null.") + return Chat_completions_with_visionRequestBuilder(self.request_adapter, raw_url) + + @dataclass + class Chat_completions_with_visionRequestBuilderPostQueryParameters(): + """ + Create Chat Completions With Vision + """ + system_prompt: Optional[str] = None + + user_prompt: Optional[str] = None + + + diff --git a/client/azure_storage_blob/azure_storage_blob_request_builder.py b/client/azure_storage_blob/azure_storage_blob_request_builder.py new file mode 100644 index 0000000..dade635 --- /dev/null +++ b/client/azure_storage_blob/azure_storage_blob_request_builder.py @@ -0,0 +1,32 @@ +from __future__ import annotations +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.request_adapter import RequestAdapter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from .blobs.blobs_request_builder import BlobsRequestBuilder + +class Azure_storage_blobRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_storage_blob + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new Azure_storage_blobRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_storage_blob", path_parameters) + + @property + def blobs(self) -> BlobsRequestBuilder: + """ + The blobs property + """ + from .blobs.blobs_request_builder import BlobsRequestBuilder + + return BlobsRequestBuilder(self.request_adapter, self.path_parameters) + + diff --git a/client/azure_storage_blob/blobs/blobs_request_builder.py b/client/azure_storage_blob/blobs/blobs_request_builder.py new file mode 100644 index 0000000..a64bd57 --- /dev/null +++ b/client/azure_storage_blob/blobs/blobs_request_builder.py @@ -0,0 +1,81 @@ +from __future__ import annotations +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.base_request_configuration import RequestConfiguration +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.method import Method +from kiota_abstractions.request_adapter import RequestAdapter +from kiota_abstractions.request_information import RequestInformation +from kiota_abstractions.request_option import RequestOption +from kiota_abstractions.serialization import Parsable, ParsableFactory +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from .delete.delete_request_builder import DeleteRequestBuilder + from .upload.upload_request_builder import UploadRequestBuilder + +class BlobsRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_storage_blob/blobs + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new BlobsRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_storage_blob/blobs", path_parameters) + + async def get(self,request_configuration: Optional[RequestConfiguration] = None) -> Optional[UntypedNode]: + """ + List Blobs + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[UntypedNode] + """ + request_info = self.to_get_request_information( + request_configuration + ) + if not self.request_adapter: + raise Exception("Http core is null") + return await self.request_adapter.send_async(request_info, UntypedNode, None) + + def to_get_request_information(self,request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + List Blobs + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + request_info = RequestInformation(Method.GET, self.url_template, self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + return request_info + + def with_url(self,raw_url: Optional[str] = None) -> BlobsRequestBuilder: + """ + Returns a request builder with the provided arbitrary URL. Using this method means any other path or query parameters are ignored. + param raw_url: The raw URL to use for the request builder. + Returns: BlobsRequestBuilder + """ + if not raw_url: + raise TypeError("raw_url cannot be null.") + return BlobsRequestBuilder(self.request_adapter, raw_url) + + @property + def delete_path(self) -> DeleteRequestBuilder: + """ + The deletePath property + """ + from .delete.delete_request_builder import DeleteRequestBuilder + + return DeleteRequestBuilder(self.request_adapter, self.path_parameters) + + @property + def upload(self) -> UploadRequestBuilder: + """ + The upload property + """ + from .upload.upload_request_builder import UploadRequestBuilder + + return UploadRequestBuilder(self.request_adapter, self.path_parameters) + + diff --git a/client/azure_storage_blob/blobs/delete/delete_request_builder.py b/client/azure_storage_blob/blobs/delete/delete_request_builder.py new file mode 100644 index 0000000..9f24500 --- /dev/null +++ b/client/azure_storage_blob/blobs/delete/delete_request_builder.py @@ -0,0 +1,76 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.base_request_configuration import RequestConfiguration +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.method import Method +from kiota_abstractions.request_adapter import RequestAdapter +from kiota_abstractions.request_information import RequestInformation +from kiota_abstractions.request_option import RequestOption +from kiota_abstractions.serialization import Parsable, ParsableFactory +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from ....models.h_t_t_p_validation_error import HTTPValidationError + +class DeleteRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_storage_blob/blobs/delete + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new DeleteRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_storage_blob/blobs/delete?blob_name={blob_name}", path_parameters) + + async def delete(self,request_configuration: Optional[RequestConfiguration] = None) -> Optional[UntypedNode]: + """ + Delete Blob + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[UntypedNode] + """ + request_info = self.to_delete_request_information( + request_configuration + ) + from ....models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + return await self.request_adapter.send_async(request_info, UntypedNode, error_mapping) + + def to_delete_request_information(self,request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Delete Blob + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + request_info = RequestInformation(Method.DELETE, self.url_template, self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + return request_info + + def with_url(self,raw_url: Optional[str] = None) -> DeleteRequestBuilder: + """ + Returns a request builder with the provided arbitrary URL. Using this method means any other path or query parameters are ignored. + param raw_url: The raw URL to use for the request builder. + Returns: DeleteRequestBuilder + """ + if not raw_url: + raise TypeError("raw_url cannot be null.") + return DeleteRequestBuilder(self.request_adapter, raw_url) + + @dataclass + class DeleteRequestBuilderDeleteQueryParameters(): + """ + Delete Blob + """ + blob_name: Optional[str] = None + + + diff --git a/client/azure_storage_blob/blobs/upload/upload_request_builder.py b/client/azure_storage_blob/blobs/upload/upload_request_builder.py new file mode 100644 index 0000000..2be5569 --- /dev/null +++ b/client/azure_storage_blob/blobs/upload/upload_request_builder.py @@ -0,0 +1,87 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.base_request_configuration import RequestConfiguration +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.method import Method +from kiota_abstractions.multipart_body import MultipartBody +from kiota_abstractions.request_adapter import RequestAdapter +from kiota_abstractions.request_information import RequestInformation +from kiota_abstractions.request_option import RequestOption +from kiota_abstractions.serialization import Parsable, ParsableFactory +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from ....models.blob_upload_response import BlobUploadResponse + from ....models.h_t_t_p_validation_error import HTTPValidationError + +class UploadRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_storage_blob/blobs/upload + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new UploadRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_storage_blob/blobs/upload?blob_name={blob_name}", path_parameters) + + async def post(self,body: Optional[MultipartBody] = None, request_configuration: Optional[RequestConfiguration] = None) -> Optional[BlobUploadResponse]: + """ + Upload Blob + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[BlobUploadResponse] + """ + if not body: + raise TypeError("body cannot be null.") + request_info = self.to_post_request_information( + body, request_configuration + ) + from ....models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + from ....models.blob_upload_response import BlobUploadResponse + + return await self.request_adapter.send_async(request_info, BlobUploadResponse, error_mapping) + + def to_post_request_information(self,body: Optional[MultipartBody] = None, request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Upload Blob + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + if not body: + raise TypeError("body cannot be null.") + request_info = RequestInformation(Method.POST, self.url_template, self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + request_info.set_content_from_parsable(self.request_adapter, "multipart/form-data", body) + return request_info + + def with_url(self,raw_url: Optional[str] = None) -> UploadRequestBuilder: + """ + Returns a request builder with the provided arbitrary URL. Using this method means any other path or query parameters are ignored. + param raw_url: The raw URL to use for the request builder. + Returns: UploadRequestBuilder + """ + if not raw_url: + raise TypeError("raw_url cannot be null.") + return UploadRequestBuilder(self.request_adapter, raw_url) + + @dataclass + class UploadRequestBuilderPostQueryParameters(): + """ + Upload Blob + """ + blob_name: Optional[str] = None + + + diff --git a/client/azure_storage_queue/azure_storage_queue_request_builder.py b/client/azure_storage_queue/azure_storage_queue_request_builder.py new file mode 100644 index 0000000..af49e78 --- /dev/null +++ b/client/azure_storage_queue/azure_storage_queue_request_builder.py @@ -0,0 +1,42 @@ +from __future__ import annotations +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.request_adapter import RequestAdapter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from .messages.messages_request_builder import MessagesRequestBuilder + from .queues.queues_request_builder import QueuesRequestBuilder + +class Azure_storage_queueRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_storage_queue + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new Azure_storage_queueRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_storage_queue", path_parameters) + + @property + def messages(self) -> MessagesRequestBuilder: + """ + The messages property + """ + from .messages.messages_request_builder import MessagesRequestBuilder + + return MessagesRequestBuilder(self.request_adapter, self.path_parameters) + + @property + def queues(self) -> QueuesRequestBuilder: + """ + The queues property + """ + from .queues.queues_request_builder import QueuesRequestBuilder + + return QueuesRequestBuilder(self.request_adapter, self.path_parameters) + + diff --git a/client/azure_storage_queue/messages/messages_request_builder.py b/client/azure_storage_queue/messages/messages_request_builder.py new file mode 100644 index 0000000..588dee4 --- /dev/null +++ b/client/azure_storage_queue/messages/messages_request_builder.py @@ -0,0 +1,158 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.base_request_configuration import RequestConfiguration +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.method import Method +from kiota_abstractions.request_adapter import RequestAdapter +from kiota_abstractions.request_information import RequestInformation +from kiota_abstractions.request_option import RequestOption +from kiota_abstractions.serialization import Parsable, ParsableFactory +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from ...models.delete_message_request import DeleteMessageRequest + from ...models.delete_message_response import DeleteMessageResponse + from ...models.h_t_t_p_validation_error import HTTPValidationError + from ...models.send_message_request import SendMessageRequest + from ...models.send_message_response import SendMessageResponse + +class MessagesRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_storage_queue/messages + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new MessagesRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_storage_queue/messages?queue_name={queue_name}{&max_messages*}", path_parameters) + + async def delete(self,body: Optional[DeleteMessageRequest] = None, request_configuration: Optional[RequestConfiguration] = None) -> Optional[DeleteMessageResponse]: + """ + Delete Message + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[DeleteMessageResponse] + """ + if not body: + raise TypeError("body cannot be null.") + request_info = self.to_delete_request_information( + body, request_configuration + ) + from ...models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + from ...models.delete_message_response import DeleteMessageResponse + + return await self.request_adapter.send_async(request_info, DeleteMessageResponse, error_mapping) + + async def get(self,request_configuration: Optional[RequestConfiguration] = None) -> Optional[UntypedNode]: + """ + Receive Messages + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[UntypedNode] + """ + request_info = self.to_get_request_information( + request_configuration + ) + from ...models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + return await self.request_adapter.send_async(request_info, UntypedNode, error_mapping) + + async def post(self,body: Optional[SendMessageRequest] = None, request_configuration: Optional[RequestConfiguration] = None) -> Optional[SendMessageResponse]: + """ + Send Message + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[SendMessageResponse] + """ + if not body: + raise TypeError("body cannot be null.") + request_info = self.to_post_request_information( + body, request_configuration + ) + from ...models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + from ...models.send_message_response import SendMessageResponse + + return await self.request_adapter.send_async(request_info, SendMessageResponse, error_mapping) + + def to_delete_request_information(self,body: Optional[DeleteMessageRequest] = None, request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Delete Message + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + if not body: + raise TypeError("body cannot be null.") + request_info = RequestInformation(Method.DELETE, '{+baseurl}/azure_storage_queue/messages', self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + request_info.set_content_from_parsable(self.request_adapter, "application/json", body) + return request_info + + def to_get_request_information(self,request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Receive Messages + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + request_info = RequestInformation(Method.GET, self.url_template, self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + return request_info + + def to_post_request_information(self,body: Optional[SendMessageRequest] = None, request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Send Message + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + if not body: + raise TypeError("body cannot be null.") + request_info = RequestInformation(Method.POST, '{+baseurl}/azure_storage_queue/messages', self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + request_info.set_content_from_parsable(self.request_adapter, "application/json", body) + return request_info + + def with_url(self,raw_url: Optional[str] = None) -> MessagesRequestBuilder: + """ + Returns a request builder with the provided arbitrary URL. Using this method means any other path or query parameters are ignored. + param raw_url: The raw URL to use for the request builder. + Returns: MessagesRequestBuilder + """ + if not raw_url: + raise TypeError("raw_url cannot be null.") + return MessagesRequestBuilder(self.request_adapter, raw_url) + + @dataclass + class MessagesRequestBuilderGetQueryParameters(): + """ + Receive Messages + """ + max_messages: Optional[int] = None + + queue_name: Optional[str] = None + + + diff --git a/client/azure_storage_queue/queues/queues_request_builder.py b/client/azure_storage_queue/queues/queues_request_builder.py new file mode 100644 index 0000000..1724ed2 --- /dev/null +++ b/client/azure_storage_queue/queues/queues_request_builder.py @@ -0,0 +1,118 @@ +from __future__ import annotations +from kiota_abstractions.base_request_builder import BaseRequestBuilder +from kiota_abstractions.base_request_configuration import RequestConfiguration +from kiota_abstractions.get_path_parameters import get_path_parameters +from kiota_abstractions.method import Method +from kiota_abstractions.request_adapter import RequestAdapter +from kiota_abstractions.request_information import RequestInformation +from kiota_abstractions.request_option import RequestOption +from kiota_abstractions.serialization import Parsable, ParsableFactory +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +if TYPE_CHECKING: + from ...models.create_queue_request import CreateQueueRequest + from ...models.create_queue_response import CreateQueueResponse + from ...models.delete_queue_request import DeleteQueueRequest + from ...models.delete_queue_response import DeleteQueueResponse + from ...models.h_t_t_p_validation_error import HTTPValidationError + +class QueuesRequestBuilder(BaseRequestBuilder): + """ + Builds and executes requests for operations under /azure_storage_queue/queues + """ + def __init__(self,request_adapter: RequestAdapter, path_parameters: Union[str, Dict[str, Any]]) -> None: + """ + Instantiates a new QueuesRequestBuilder and sets the default values. + param path_parameters: The raw url or the url-template parameters for the request. + param request_adapter: The request adapter to use to execute the requests. + Returns: None + """ + super().__init__(request_adapter, "{+baseurl}/azure_storage_queue/queues", path_parameters) + + async def delete(self,body: Optional[DeleteQueueRequest] = None, request_configuration: Optional[RequestConfiguration] = None) -> Optional[DeleteQueueResponse]: + """ + Delete Queue + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[DeleteQueueResponse] + """ + if not body: + raise TypeError("body cannot be null.") + request_info = self.to_delete_request_information( + body, request_configuration + ) + from ...models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + from ...models.delete_queue_response import DeleteQueueResponse + + return await self.request_adapter.send_async(request_info, DeleteQueueResponse, error_mapping) + + async def post(self,body: Optional[CreateQueueRequest] = None, request_configuration: Optional[RequestConfiguration] = None) -> Optional[CreateQueueResponse]: + """ + Create Queue + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: Optional[CreateQueueResponse] + """ + if not body: + raise TypeError("body cannot be null.") + request_info = self.to_post_request_information( + body, request_configuration + ) + from ...models.h_t_t_p_validation_error import HTTPValidationError + + error_mapping: Dict[str, ParsableFactory] = { + "422": HTTPValidationError, + } + if not self.request_adapter: + raise Exception("Http core is null") + from ...models.create_queue_response import CreateQueueResponse + + return await self.request_adapter.send_async(request_info, CreateQueueResponse, error_mapping) + + def to_delete_request_information(self,body: Optional[DeleteQueueRequest] = None, request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Delete Queue + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + if not body: + raise TypeError("body cannot be null.") + request_info = RequestInformation(Method.DELETE, self.url_template, self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + request_info.set_content_from_parsable(self.request_adapter, "application/json", body) + return request_info + + def to_post_request_information(self,body: Optional[CreateQueueRequest] = None, request_configuration: Optional[RequestConfiguration] = None) -> RequestInformation: + """ + Create Queue + param body: The request body + param request_configuration: Configuration for the request such as headers, query parameters, and middleware options. + Returns: RequestInformation + """ + if not body: + raise TypeError("body cannot be null.") + request_info = RequestInformation(Method.POST, self.url_template, self.path_parameters) + request_info.configure(request_configuration) + request_info.headers.try_add("Accept", "application/json") + request_info.set_content_from_parsable(self.request_adapter, "application/json", body) + return request_info + + def with_url(self,raw_url: Optional[str] = None) -> QueuesRequestBuilder: + """ + Returns a request builder with the provided arbitrary URL. Using this method means any other path or query parameters are ignored. + param raw_url: The raw URL to use for the request builder. + Returns: QueuesRequestBuilder + """ + if not raw_url: + raise TypeError("raw_url cannot be null.") + return QueuesRequestBuilder(self.request_adapter, raw_url) + + diff --git a/client/kiota-lock.json b/client/kiota-lock.json index 05b8e6b..60c4fe6 100644 --- a/client/kiota-lock.json +++ b/client/kiota-lock.json @@ -1,5 +1,5 @@ { - "descriptionHash": "7B099024A20A8AF46F71A50244FAA38CFA626E063643DD3BFBC0B21CDF20D04C6F376C3F876E344548EA40E667F0F6F00EF4C57F94AB17470B15571B561A97E3", + "descriptionHash": "33294584D9A9F2D9EF9E217027985AFC393F4AFDDF50F96B66D509AC6FEDAB03206496E6355858C5852801AEA17968899B56CA2A90B7BC248BC5D322CDEA5FF4", "descriptionLocation": "../specs/openapi.json", "lockFileVersion": "1.0.0", "kiotaVersion": "1.13.0", diff --git a/client/models/chat_completion_with_vision_response.py b/client/models/chat_completion_with_vision_response.py new file mode 100644 index 0000000..f7c2f25 --- /dev/null +++ b/client/models/chat_completion_with_vision_response.py @@ -0,0 +1,46 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class ChatCompletionWithVisionResponse(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + # The content property + content: Optional[str] = None + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> ChatCompletionWithVisionResponse: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: ChatCompletionWithVisionResponse + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return ChatCompletionWithVisionResponse() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + "content": lambda n : setattr(self, 'content', n.get_str_value()), + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_str_value("content", self.content) + writer.write_additional_data_value(self.additional_data) + + diff --git a/client/models/create_queue_request.py b/client/models/create_queue_request.py new file mode 100644 index 0000000..67813b0 --- /dev/null +++ b/client/models/create_queue_request.py @@ -0,0 +1,46 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class CreateQueueRequest(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + # The queue_name property + queue_name: Optional[str] = None + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> CreateQueueRequest: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: CreateQueueRequest + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return CreateQueueRequest() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + "queue_name": lambda n : setattr(self, 'queue_name', n.get_str_value()), + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_str_value("queue_name", self.queue_name) + writer.write_additional_data_value(self.additional_data) + + diff --git a/client/models/create_queue_response.py b/client/models/create_queue_response.py new file mode 100644 index 0000000..ef3b422 --- /dev/null +++ b/client/models/create_queue_response.py @@ -0,0 +1,46 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class CreateQueueResponse(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + # The queue_name property + queue_name: Optional[str] = None + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> CreateQueueResponse: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: CreateQueueResponse + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return CreateQueueResponse() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + "queue_name": lambda n : setattr(self, 'queue_name', n.get_str_value()), + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_str_value("queue_name", self.queue_name) + writer.write_additional_data_value(self.additional_data) + + diff --git a/client/models/delete_message_request.py b/client/models/delete_message_request.py new file mode 100644 index 0000000..c80bdc7 --- /dev/null +++ b/client/models/delete_message_request.py @@ -0,0 +1,54 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class DeleteMessageRequest(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + # The message_id property + message_id: Optional[str] = None + # The pop_receipt property + pop_receipt: Optional[str] = None + # The queue_name property + queue_name: Optional[str] = None + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> DeleteMessageRequest: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: DeleteMessageRequest + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return DeleteMessageRequest() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + "message_id": lambda n : setattr(self, 'message_id', n.get_str_value()), + "pop_receipt": lambda n : setattr(self, 'pop_receipt', n.get_str_value()), + "queue_name": lambda n : setattr(self, 'queue_name', n.get_str_value()), + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_str_value("message_id", self.message_id) + writer.write_str_value("pop_receipt", self.pop_receipt) + writer.write_str_value("queue_name", self.queue_name) + writer.write_additional_data_value(self.additional_data) + + diff --git a/client/models/delete_message_response.py b/client/models/delete_message_response.py new file mode 100644 index 0000000..f8cab9e --- /dev/null +++ b/client/models/delete_message_response.py @@ -0,0 +1,42 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class DeleteMessageResponse(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> DeleteMessageResponse: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: DeleteMessageResponse + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return DeleteMessageResponse() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_additional_data_value(self.additional_data) + + diff --git a/client/models/delete_queue_request.py b/client/models/delete_queue_request.py new file mode 100644 index 0000000..d81ef87 --- /dev/null +++ b/client/models/delete_queue_request.py @@ -0,0 +1,46 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class DeleteQueueRequest(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + # The queue_name property + queue_name: Optional[str] = None + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> DeleteQueueRequest: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: DeleteQueueRequest + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return DeleteQueueRequest() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + "queue_name": lambda n : setattr(self, 'queue_name', n.get_str_value()), + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_str_value("queue_name", self.queue_name) + writer.write_additional_data_value(self.additional_data) + + diff --git a/client/models/delete_queue_response.py b/client/models/delete_queue_response.py new file mode 100644 index 0000000..f1d451c --- /dev/null +++ b/client/models/delete_queue_response.py @@ -0,0 +1,46 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class DeleteQueueResponse(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + # The queue_name property + queue_name: Optional[str] = None + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> DeleteQueueResponse: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: DeleteQueueResponse + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return DeleteQueueResponse() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + "queue_name": lambda n : setattr(self, 'queue_name', n.get_str_value()), + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_str_value("queue_name", self.queue_name) + writer.write_additional_data_value(self.additional_data) + + diff --git a/client/models/send_message_request.py b/client/models/send_message_request.py new file mode 100644 index 0000000..8caa751 --- /dev/null +++ b/client/models/send_message_request.py @@ -0,0 +1,50 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class SendMessageRequest(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + # The message property + message: Optional[str] = None + # The queue_name property + queue_name: Optional[str] = None + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> SendMessageRequest: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: SendMessageRequest + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return SendMessageRequest() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + "message": lambda n : setattr(self, 'message', n.get_str_value()), + "queue_name": lambda n : setattr(self, 'queue_name', n.get_str_value()), + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_str_value("message", self.message) + writer.write_str_value("queue_name", self.queue_name) + writer.write_additional_data_value(self.additional_data) + + diff --git a/client/models/send_message_response.py b/client/models/send_message_response.py new file mode 100644 index 0000000..9af858c --- /dev/null +++ b/client/models/send_message_response.py @@ -0,0 +1,42 @@ +from __future__ import annotations +from dataclasses import dataclass, field +from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter +from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union + +@dataclass +class SendMessageResponse(AdditionalDataHolder, Parsable): + # Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. + additional_data: Dict[str, Any] = field(default_factory=dict) + + + @staticmethod + def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> SendMessageResponse: + """ + Creates a new instance of the appropriate class based on discriminator value + param parse_node: The parse node to use to read the discriminator value and create the object + Returns: SendMessageResponse + """ + if not parse_node: + raise TypeError("parse_node cannot be null.") + return SendMessageResponse() + + def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]: + """ + The deserialization information for the current model + Returns: Dict[str, Callable[[ParseNode], None]] + """ + fields: Dict[str, Callable[[Any], None]] = { + } + return fields + + def serialize(self,writer: SerializationWriter) -> None: + """ + Serializes information the current object + param writer: Serialization writer to use to serialize this model + Returns: None + """ + if not writer: + raise TypeError("writer cannot be null.") + writer.write_additional_data_value(self.additional_data) + + diff --git a/specs/openapi.json b/specs/openapi.json index 4db434f..f27b6e9 100644 --- a/specs/openapi.json +++ b/specs/openapi.json @@ -8,12 +8,209 @@ "url": "https://news.microsoft.com/wp-content/uploads/prod/2022/05/Microsoft-logo_rgb_c-gray-1024x459.png" } }, - "servers": [ - { - "url": "http://localhost:8000" - } - ], "paths": { + "/azure_ai_document_intelligence/analyze_document/": { + "post": { + "tags": [ + "azure_ai_document_intelligence" + ], + "summary": "Analyze Document", + "operationId": "analyze_document_azure_ai_document_intelligence_analyze_document__post", + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_analyze_document_azure_ai_document_intelligence_analyze_document__post" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AnalyzeDocumentResponse" + } + } + } + }, + "404": { + "description": "Not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/azure_ai_vision/image/analyze/": { + "post": { + "tags": [ + "azure_ai_vision" + ], + "summary": "Analyze Image", + "operationId": "analyze_image_azure_ai_vision_image_analyze__post", + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_analyze_image_azure_ai_vision_image_analyze__post" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ImageAnalysisResponse" + } + } + } + }, + "404": { + "description": "Not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/azure_ai_vision/image/vectorize/": { + "post": { + "tags": [ + "azure_ai_vision" + ], + "summary": "Vectorize Image", + "operationId": "vectorize_image_azure_ai_vision_image_vectorize__post", + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_vectorize_image_azure_ai_vision_image_vectorize__post" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "404": { + "description": "Not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/azure_event_grid/event_grid_event/": { + "post": { + "tags": [ + "azure_event_grid" + ], + "summary": "Send Event Grid Event", + "operationId": "send_event_grid_event_azure_event_grid_event_grid_event__post", + "parameters": [ + { + "name": "data", + "in": "query", + "required": false, + "schema": { + "default": { + "team": "azure-sdk" + }, + "title": "Data" + } + }, + { + "name": "subject", + "in": "query", + "required": false, + "schema": { + "default": "Door1", + "title": "Subject" + } + }, + { + "name": "event_type", + "in": "query", + "required": false, + "schema": { + "default": "Azure.Sdk.Demo", + "title": "Event Type" + } + }, + { + "name": "data_version", + "in": "query", + "required": false, + "schema": { + "default": "2.0", + "title": "Data Version" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "404": { + "description": "Not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, "/azure_openai/chat_completions/": { "post": { "tags": [ @@ -58,22 +255,44 @@ } } }, - "/document_intelligence/analyze_document/": { + "/azure_openai/chat_completions_with_vision/": { "post": { "tags": [ - "document_intelligence" + "azure_openai" + ], + "summary": "Create Chat Completions With Vision", + "operationId": "create_chat_completions_with_vision_azure_openai_chat_completions_with_vision__post", + "parameters": [ + { + "name": "system_prompt", + "in": "query", + "required": false, + "schema": { + "type": "string", + "default": "You are a helpful assistant.", + "title": "System Prompt" + } + }, + { + "name": "user_prompt", + "in": "query", + "required": false, + "schema": { + "type": "string", + "default": "Please explain the attached image.", + "title": "User Prompt" + } + } ], - "summary": "Analyze Document", - "operationId": "analyze_document_document_intelligence_analyze_document__post", "requestBody": { + "required": true, "content": { "multipart/form-data": { "schema": { - "$ref": "#/components/schemas/Body_analyze_document_document_intelligence_analyze_document__post" + "$ref": "#/components/schemas/Body_create_chat_completions_with_vision_azure_openai_chat_completions_with_vision__post" } } - }, - "required": true + } }, "responses": { "200": { @@ -81,7 +300,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AnalyzeDocumentResponse" + "$ref": "#/components/schemas/ChatCompletionWithVisionResponse" } } } @@ -102,13 +321,13 @@ } } }, - "/azure_storage/blobs/upload/": { + "/azure_storage_blob/blobs/upload/": { "post": { "tags": [ - "azure_storage" + "azure_storage_blob" ], "summary": "Upload Blob", - "operationId": "upload_blob_azure_storage_blobs_upload__post", + "operationId": "upload_blob_azure_storage_blob_blobs_upload__post", "parameters": [ { "name": "blob_name", @@ -125,7 +344,7 @@ "content": { "multipart/form-data": { "schema": { - "$ref": "#/components/schemas/Body_upload_blob_azure_storage_blobs_upload__post" + "$ref": "#/components/schemas/Body_upload_blob_azure_storage_blob_blobs_upload__post" } } } @@ -157,13 +376,13 @@ } } }, - "/azure_storage/blobs/delete/": { + "/azure_storage_blob/blobs/delete/": { "delete": { "tags": [ - "azure_storage" + "azure_storage_blob" ], "summary": "Delete Blob", - "operationId": "delete_blob_azure_storage_blobs_delete__delete", + "operationId": "delete_blob_azure_storage_blob_blobs_delete__delete", "parameters": [ { "name": "blob_name", @@ -200,13 +419,13 @@ } } }, - "/azure_storage/blobs/": { + "/azure_storage_blob/blobs/": { "get": { "tags": [ - "azure_storage" + "azure_storage_blob" ], "summary": "List Blobs", - "operationId": "list_blobs_azure_storage_blobs__get", + "operationId": "list_blobs_azure_storage_blob_blobs__get", "responses": { "200": { "description": "Successful Response", @@ -222,18 +441,18 @@ } } }, - "/azure_ai_vision/image/analyze/": { + "/azure_storage_queue/queues/": { "post": { "tags": [ - "azure_ai_vision" + "azure_storage_queue" ], - "summary": "Analyze Image", - "operationId": "analyze_image_azure_ai_vision_image_analyze__post", + "summary": "Create Queue", + "operationId": "create_queue_azure_storage_queue_queues__post", "requestBody": { "content": { - "multipart/form-data": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Body_analyze_image_azure_ai_vision_image_analyze__post" + "$ref": "#/components/schemas/CreateQueueRequest" } } }, @@ -245,7 +464,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ImageAnalysisResponse" + "$ref": "#/components/schemas/CreateQueueResponse" } } } @@ -264,20 +483,18 @@ } } } - } - }, - "/azure_ai_vision/image/vectorize/": { - "post": { + }, + "delete": { "tags": [ - "azure_ai_vision" + "azure_storage_queue" ], - "summary": "Vectorize Image", - "operationId": "vectorize_image_azure_ai_vision_image_vectorize__post", + "summary": "Delete Queue", + "operationId": "delete_queue_azure_storage_queue_queues__delete", "requestBody": { "content": { - "multipart/form-data": { + "application/json": { "schema": { - "$ref": "#/components/schemas/Body_vectorize_image_azure_ai_vision_image_vectorize__post" + "$ref": "#/components/schemas/DeleteQueueRequest" } } }, @@ -288,7 +505,9 @@ "description": "Successful Response", "content": { "application/json": { - "schema": {} + "schema": { + "$ref": "#/components/schemas/DeleteQueueResponse" + } } } }, @@ -308,50 +527,73 @@ } } }, - "/azure_event_grid/event_grid_event/": { + "/azure_storage_queue/messages/": { "post": { "tags": [ - "azure_event_grid" + "azure_storage_queue" ], - "summary": "Send Event Grid Event", - "operationId": "send_event_grid_event_azure_event_grid_event_grid_event__post", - "parameters": [ - { - "name": "data", - "in": "query", - "required": false, - "schema": { - "default": { - "team": "azure-sdk" - }, - "title": "Data" + "summary": "Send Message", + "operationId": "send_message_azure_storage_queue_messages__post", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SendMessageRequest" + } } - }, - { - "name": "subject", - "in": "query", - "required": false, - "schema": { - "default": "Door1", - "title": "Subject" + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SendMessageResponse" + } + } } }, + "404": { + "description": "Not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "get": { + "tags": [ + "azure_storage_queue" + ], + "summary": "Receive Messages", + "operationId": "receive_messages_azure_storage_queue_messages__get", + "parameters": [ { - "name": "event_type", + "name": "queue_name", "in": "query", - "required": false, + "required": true, "schema": { - "default": "Azure.Sdk.Demo", - "title": "Event Type" + "type": "string", + "title": "Queue Name" } }, { - "name": "data_version", + "name": "max_messages", "in": "query", "required": false, "schema": { - "default": "2.0", - "title": "Data Version" + "type": "integer", + "default": 1, + "title": "Max Messages" } } ], @@ -378,6 +620,48 @@ } } } + }, + "delete": { + "tags": [ + "azure_storage_queue" + ], + "summary": "Delete Message", + "operationId": "delete_message_azure_storage_queue_messages__delete", + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeleteMessageRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeleteMessageResponse" + } + } + } + }, + "404": { + "description": "Not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } } } }, @@ -409,7 +693,7 @@ ], "title": "BlobUploadResponse" }, - "Body_analyze_document_document_intelligence_analyze_document__post": { + "Body_analyze_document_azure_ai_document_intelligence_analyze_document__post": { "properties": { "file": { "type": "string", @@ -421,7 +705,7 @@ "required": [ "file" ], - "title": "Body_analyze_document_document_intelligence_analyze_document__post" + "title": "Body_analyze_document_azure_ai_document_intelligence_analyze_document__post" }, "Body_analyze_image_azure_ai_vision_image_analyze__post": { "properties": { @@ -437,7 +721,21 @@ ], "title": "Body_analyze_image_azure_ai_vision_image_analyze__post" }, - "Body_upload_blob_azure_storage_blobs_upload__post": { + "Body_create_chat_completions_with_vision_azure_openai_chat_completions_with_vision__post": { + "properties": { + "file": { + "type": "string", + "format": "binary", + "title": "File" + } + }, + "type": "object", + "required": [ + "file" + ], + "title": "Body_create_chat_completions_with_vision_azure_openai_chat_completions_with_vision__post" + }, + "Body_upload_blob_azure_storage_blob_blobs_upload__post": { "properties": { "file": { "type": "string", @@ -449,7 +747,7 @@ "required": [ "file" ], - "title": "Body_upload_blob_azure_storage_blobs_upload__post" + "title": "Body_upload_blob_azure_storage_blob_blobs_upload__post" }, "Body_vectorize_image_azure_ai_vision_image_vectorize__post": { "properties": { @@ -496,6 +794,99 @@ ], "title": "ChatCompletionResponse" }, + "ChatCompletionWithVisionResponse": { + "properties": { + "content": { + "type": "string", + "title": "Content" + } + }, + "type": "object", + "required": [ + "content" + ], + "title": "ChatCompletionWithVisionResponse" + }, + "CreateQueueRequest": { + "properties": { + "queue_name": { + "type": "string", + "title": "Queue Name" + } + }, + "type": "object", + "required": [ + "queue_name" + ], + "title": "CreateQueueRequest" + }, + "CreateQueueResponse": { + "properties": { + "queue_name": { + "type": "string", + "title": "Queue Name" + } + }, + "type": "object", + "required": [ + "queue_name" + ], + "title": "CreateQueueResponse" + }, + "DeleteMessageRequest": { + "properties": { + "queue_name": { + "type": "string", + "title": "Queue Name" + }, + "message_id": { + "type": "string", + "title": "Message Id" + }, + "pop_receipt": { + "type": "string", + "title": "Pop Receipt" + } + }, + "type": "object", + "required": [ + "queue_name", + "message_id", + "pop_receipt" + ], + "title": "DeleteMessageRequest" + }, + "DeleteMessageResponse": { + "properties": {}, + "type": "object", + "title": "DeleteMessageResponse" + }, + "DeleteQueueRequest": { + "properties": { + "queue_name": { + "type": "string", + "title": "Queue Name" + } + }, + "type": "object", + "required": [ + "queue_name" + ], + "title": "DeleteQueueRequest" + }, + "DeleteQueueResponse": { + "properties": { + "queue_name": { + "type": "string", + "title": "Queue Name" + } + }, + "type": "object", + "required": [ + "queue_name" + ], + "title": "DeleteQueueResponse" + }, "HTTPValidationError": { "properties": { "detail": { @@ -522,6 +913,29 @@ ], "title": "ImageAnalysisResponse" }, + "SendMessageRequest": { + "properties": { + "queue_name": { + "type": "string", + "title": "Queue Name" + }, + "message": { + "type": "string", + "title": "Message" + } + }, + "type": "object", + "required": [ + "queue_name", + "message" + ], + "title": "SendMessageRequest" + }, + "SendMessageResponse": { + "properties": {}, + "type": "object", + "title": "SendMessageResponse" + }, "ValidationError": { "properties": { "loc": { From 1f100a782b0526fa50a51338c72f5cf26b183a0e Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Tue, 7 May 2024 23:07:24 +0900 Subject: [PATCH 6/8] refactor frontend --- frontend/entrypoint.py | 36 ++++++++------------- frontend/solutions/azure_storage.py | 2 +- frontend/solutions/document_intelligence.py | 2 +- main.py | 10 +----- 4 files changed, 16 insertions(+), 34 deletions(-) diff --git a/frontend/entrypoint.py b/frontend/entrypoint.py index 1894a73..ff390f9 100644 --- a/frontend/entrypoint.py +++ b/frontend/entrypoint.py @@ -7,32 +7,22 @@ def start( - solution_type: SolutionType, + solution_name: str, backend_url: str, log_level: int, ) -> None: - if solution_type == SolutionType.SANDBOX: - return sandbox.start( - backend_url=backend_url, - log_level=log_level, - ) - if solution_type == SolutionType.TRANSCRIPTION: - return transcription.start( - backend_url=backend_url, - log_level=log_level, - ) - if solution_type == SolutionType.DOCUMENT_INTELLIGENCE: - return document_intelligence.start( - backend_url=backend_url, - log_level=log_level, - ) - if solution_type == SolutionType.AZURE_STORAGE: - return azure_storage.start( - backend_url=backend_url, - log_level=log_level, - ) - if solution_type == SolutionType.AZURE_AI_VISION: - return azure_ai_vision.start( + try: + solutions = { + SolutionType.SANDBOX.value: sandbox.start, + SolutionType.TRANSCRIPTION.value: transcription.start, + SolutionType.DOCUMENT_INTELLIGENCE.value: document_intelligence.start, + SolutionType.AZURE_STORAGE.value: azure_storage.start, + SolutionType.AZURE_AI_VISION.value: azure_ai_vision.start, + } + return solutions[solution_name.upper()]( backend_url=backend_url, log_level=log_level, ) + except KeyError: + logger.error(f"Invalid solution name: {solution_name}, please choose one of {list(SolutionType)}") + return diff --git a/frontend/solutions/azure_storage.py b/frontend/solutions/azure_storage.py index 32f120c..76584a1 100644 --- a/frontend/solutions/azure_storage.py +++ b/frontend/solutions/azure_storage.py @@ -40,7 +40,7 @@ def start( bytes_data = file_uploader.getvalue() response = asyncio.run( http_post_file( - url=urljoin(base=backend_url, url=f"/azure_storage/blobs/upload/?blob_name={blob_name}"), + url=urljoin(base=backend_url, url=f"/azure_storage_blob/blobs/upload/?blob_name={blob_name}"), data_bytes_io=BytesIO(bytes_data), ) ) diff --git a/frontend/solutions/document_intelligence.py b/frontend/solutions/document_intelligence.py index dbeac22..c149fd2 100644 --- a/frontend/solutions/document_intelligence.py +++ b/frontend/solutions/document_intelligence.py @@ -36,7 +36,7 @@ def start( bytes_data = file_uploader.getvalue() response = asyncio.run( http_post_file( - url=urljoin(base=backend_url, url="/document_intelligence/analyze_document/"), + url=urljoin(base=backend_url, url="/azure_ai_document_intelligence/analyze_document/"), data_bytes_io=BytesIO(bytes_data), ) ) diff --git a/main.py b/main.py index c5ed1d3..b69bfd0 100644 --- a/main.py +++ b/main.py @@ -46,18 +46,10 @@ def frontend( debug: Annotated[bool, typer.Option(help="Enable debug mode")] = False, ): from frontend.entrypoint import start - from frontend.solutions.types import SolutionType setup_logging(debug) - - try: - solution_type = SolutionType(solution_name.upper()) - except ValueError: - typer.echo(f"Invalid solution name: {solution_name}", err=True) - raise typer.Exit(code=1) - start( - solution_type=solution_type, + solution_name=solution_name, backend_url=backend_url, log_level=get_log_level(debug), ) From 6c3eb31ccb24983443d69e749a1c78f71e9c1e13 Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Tue, 7 May 2024 23:22:39 +0900 Subject: [PATCH 7/8] update dockerfiles --- .github/workflows/docker-release.yaml | 18 +++++++++++++--- Makefile | 30 ++++++++++++--------------- dockerfiles/backend.Dockerfile | 2 +- dockerfiles/frontend.Dockerfile | 24 +++++++++++++++++++++ 4 files changed, 53 insertions(+), 21 deletions(-) create mode 100644 dockerfiles/frontend.Dockerfile diff --git a/.github/workflows/docker-release.yaml b/.github/workflows/docker-release.yaml index c5602cf..86394a1 100644 --- a/.github/workflows/docker-release.yaml +++ b/.github/workflows/docker-release.yaml @@ -28,7 +28,7 @@ jobs: core.setOutput('tag', tag) core.setOutput('no-v', no_v) core.setOutput('no-dash', no_dash) - - name: Build and push tag ${{steps.set_version.outputs.no-dash}} + - name: Build and push tag ${{steps.set_version.outputs.no-dash}} for backend uses: docker/build-push-action@v5 with: context: . @@ -38,5 +38,17 @@ jobs: GIT_REVISION=${{ github.sha }} GIT_TAG=${{steps.set_version.outputs.no-dash}} tags: | - ${{ secrets.DOCKERHUB_USERNAME }}/azure-ai-services-solutions:${{steps.set_version.outputs.no-dash}} - ${{ secrets.DOCKERHUB_USERNAME }}/azure-ai-services-solutions:latest + ${{ secrets.DOCKERHUB_USERNAME }}/azure-ai-services-solutions:backend-${{steps.set_version.outputs.no-dash}} + ${{ secrets.DOCKERHUB_USERNAME }}/azure-ai-services-solutions:backend-latest + - name: Build and push tag ${{steps.set_version.outputs.no-dash}} for frontend + uses: docker/build-push-action@v5 + with: + context: . + file: ./dockerfiles/frontend.Dockerfile + push: true + build-args: | + GIT_REVISION=${{ github.sha }} + GIT_TAG=${{steps.set_version.outputs.no-dash}} + tags: | + ${{ secrets.DOCKERHUB_USERNAME }}/azure-ai-services-solutions:frontend-${{steps.set_version.outputs.no-dash}} + ${{ secrets.DOCKERHUB_USERNAME }}/azure-ai-services-solutions:frontend-latest diff --git a/Makefile b/Makefile index 4f8d4be..1e8aa09 100644 --- a/Makefile +++ b/Makefile @@ -51,9 +51,10 @@ ci-test: install-deps-dev format-check lint test ## run CI tests # --- DOCKER_REPO_NAME ?= ks6088ts DOCKER_IMAGE_NAME ?= azure-ai-services-solutions -DOCKER_COMMAND ?= python main.py --help -DOCKER_TAG ?= local -DOCKERFILE ?= dockerfiles/backend.Dockerfile +DOCKER_IMAGE_COMPONENT ?= backend +DOCKER_COMMAND ?= +DOCKER_TAG ?= $(DOCKER_IMAGE_COMPONENT)-$(GIT_TAG) +DOCKER_FILE ?= ./dockerfiles/$(DOCKER_IMAGE_COMPONENT).Dockerfile # Tools TOOLS_DIR ?= $(HOME)/.local/bin @@ -63,28 +64,18 @@ TRIVY_VERSION ?= 0.49.1 docker-build: ## build Docker image docker build \ --tag $(DOCKER_REPO_NAME)/$(DOCKER_IMAGE_NAME):$(DOCKER_TAG) \ - --file $(DOCKERFILE) \ + --file $(DOCKER_FILE) \ --build-arg GIT_REVISION=$(GIT_REVISION) \ --build-arg GIT_TAG=$(GIT_TAG) \ . .PHONY: docker-run docker-run: ## run Docker container - docker run --rm \ - --publish 8888:8888 \ - --volume $(PWD)/azure_ai_document_intelligence.env.sample:/app/azure_ai_document_intelligence.env \ - --volume $(PWD)/azure_ai_speech.env.sample:/app/azure_ai_speech.env \ - --volume $(PWD)/azure_ai_vision.env.sample:/app/azure_ai_vision.env \ - --volume $(PWD)/azure_event_grid.env.sample:/app/azure_event_grid.env \ - --volume $(PWD)/azure_openai.env.sample:/app/azure_openai.env \ - --volume $(PWD)/azure_storage_blob.env.sample:/app/azure_storage_blob.env \ - --volume $(PWD)/azure_storage_queue.env.sample:/app/azure_storage_queue.env \ - $(DOCKER_REPO_NAME)/$(DOCKER_IMAGE_NAME):$(DOCKER_TAG) \ - $(DOCKER_COMMAND) + docker run --rm $(DOCKER_REPO_NAME)/$(DOCKER_IMAGE_NAME):$(DOCKER_TAG) $(DOCKER_COMMAND) .PHONY: docker-lint docker-lint: ## lint Dockerfile - docker run --rm -i hadolint/hadolint < $(DOCKERFILE) + docker run --rm -i hadolint/hadolint < $(DOCKER_FILE) .PHONY: docker-scan docker-scan: ## scan Docker image @@ -92,8 +83,13 @@ docker-scan: ## scan Docker image @which trivy || curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b $(TOOLS_DIR) v$(TRIVY_VERSION) trivy image $(DOCKER_REPO_NAME)/$(DOCKER_IMAGE_NAME):$(DOCKER_TAG) +.PHONY: _ci-test-docker +_ci-test-docker: docker-lint docker-build docker-scan docker-run + .PHONY: ci-test-docker -ci-test-docker: docker-lint docker-build docker-scan docker-run ## run CI test for Docker +ci-test-docker: ## run CI test for Docker + $(MAKE) _ci-test-docker DOCKER_IMAGE_COMPONENT=backend + $(MAKE) _ci-test-docker DOCKER_IMAGE_COMPONENT=frontend # --- # Application diff --git a/dockerfiles/backend.Dockerfile b/dockerfiles/backend.Dockerfile index 4c46622..36817ff 100644 --- a/dockerfiles/backend.Dockerfile +++ b/dockerfiles/backend.Dockerfile @@ -21,4 +21,4 @@ COPY . . # Install dependencies RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt -CMD ["python", "main.py"] +CMD ["python", "main.py", "backend", "--help"] diff --git a/dockerfiles/frontend.Dockerfile b/dockerfiles/frontend.Dockerfile new file mode 100644 index 0000000..b90df7b --- /dev/null +++ b/dockerfiles/frontend.Dockerfile @@ -0,0 +1,24 @@ +FROM python:3.11.8-slim-bookworm as requirements-stage + +WORKDIR /tmp + +RUN pip install --no-cache-dir poetry==1.8.2 + +COPY ./pyproject.toml ./poetry.lock* /tmp/ + +RUN poetry export --with=frontend -f requirements.txt --output requirements.txt --without-hashes + +FROM python:3.11.8-slim-bookworm + +ARG GIT_REVISION="0000000" +ARG GIT_TAG="x.x.x" + +WORKDIR /app + +COPY --from=requirements-stage /tmp/requirements.txt /app/requirements.txt +COPY . . + +# Install dependencies +RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt + +CMD ["python", "main.py", "frontend", "--help"] From d6311de50d7c073554321e4c70da8e9405b0b42b Mon Sep 17 00:00:00 2001 From: ks6088ts Date: Tue, 7 May 2024 23:33:41 +0900 Subject: [PATCH 8/8] update docs --- README.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 02e3e76..b92be77 100644 --- a/README.md +++ b/README.md @@ -45,9 +45,20 @@ make docker-run make ci-test-docker ``` -To publish the docker image to Docker Hub, you need to set the following secrets in the repository settings. +## Deployment instructions + +### Docker Hub + +To publish the docker image to Docker Hub via GitHub Actions, you need to set the following secrets in the repository. ```shell gh secret set DOCKERHUB_USERNAME --body $DOCKERHUB_USERNAME gh secret set DOCKERHUB_TOKEN --body $DOCKERHUB_TOKEN ``` + +### Azure Functions + +To deploy the Azure Functions, you can refer to the following scripts. + +- [scripts/deploy-azure-functions.sh](./scripts/deploy-azure-functions.sh): Deploy the Azure Functions using Azure CLI. +- [scripts/destroy-azure-functions.sh](./scripts/destroy-azure-functions.sh): Destroy the Azure Functions using Azure CLI.