Skip to content

Commit

Permalink
Merge pull request #42 from ks6088ts-labs/bugfix/issue-41_fix-tempora…
Browse files Browse the repository at this point in the history
…ry-redirect

fix route
  • Loading branch information
ks6088ts authored May 7, 2024
2 parents 1fd0d1b + f671873 commit 16523d1
Show file tree
Hide file tree
Showing 15 changed files with 60 additions and 26 deletions.
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ docker-build: ## build Docker image
--file $(DOCKER_FILE) \
--build-arg GIT_REVISION=$(GIT_REVISION) \
--build-arg GIT_TAG=$(GIT_TAG) \
--no-cache \
.

.PHONY: docker-run
Expand Down
2 changes: 1 addition & 1 deletion azure_openai.env.sample
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
AZURE_OPENAI_ENDPOINT = "https://<aoai-name>.openai.azure.com/"
AZURE_OPENAI_ENDPOINT = "https://<aoai-name>.openai.azure.com"
AZURE_OPENAI_API_KEY = "<aoai-api-key>"
AZURE_OPENAI_API_VERSION = "2024-04-01-preview"
AZURE_OPENAI_EMBEDDING_MODEL = "text-embedding-ada-002"
Expand Down
2 changes: 1 addition & 1 deletion backend/routers/azure_ai_document_intelligence.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


@router.post(
"/analyze_document/",
"/analyze_document",
response_model=azure_ai_document_intelligence_schemas.AnalyzeDocumentResponse,
status_code=200,
)
Expand Down
4 changes: 2 additions & 2 deletions backend/routers/azure_ai_vision.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


@router.post(
"/image/analyze/",
"/image/analyze",
response_model=azure_ai_vision_schemas.ImageAnalysisResponse,
status_code=200,
)
Expand All @@ -39,7 +39,7 @@ async def analyze_image(file: UploadFile):


@router.post(
"/image/vectorize/",
"/image/vectorize",
status_code=200,
)
async def vectorize_image(file: UploadFile):
Expand Down
2 changes: 1 addition & 1 deletion backend/routers/azure_event_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@


@router.post(
"/event_grid_event/",
"/event_grid_event",
status_code=200,
)
async def send_event_grid_event(
Expand Down
4 changes: 2 additions & 2 deletions backend/routers/azure_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


@router.post(
"/chat_completions/",
"/chat_completions",
response_model=azure_openai_schemas.ChatCompletionResponse,
status_code=200,
)
Expand All @@ -35,7 +35,7 @@ async def create_chat_completions(body: azure_openai_schemas.ChatCompletionReque


@router.post(
"/chat_completions_with_vision/",
"/chat_completions_with_vision",
response_model=azure_openai_schemas.ChatCompletionWithVisionResponse,
status_code=200,
)
Expand Down
6 changes: 3 additions & 3 deletions backend/routers/azure_storage_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


@router.post(
"/blobs/upload/",
"/blobs/upload",
response_model=azure_storage_schemas.BlobUploadResponse,
status_code=200,
)
Expand All @@ -44,7 +44,7 @@ async def upload_blob(


@router.delete(
"/blobs/delete/",
"/blobs/delete",
status_code=200,
)
async def delete_blob(
Expand All @@ -64,7 +64,7 @@ async def delete_blob(


@router.get(
"/blobs/",
"/blobs",
status_code=200,
)
async def list_blobs():
Expand Down
10 changes: 5 additions & 5 deletions backend/routers/azure_storage_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


@router.post(
"/queues/",
"/queues",
response_model=azure_storage_queue_schemas.CreateQueueResponse,
status_code=200,
)
Expand All @@ -41,7 +41,7 @@ async def create_queue(


@router.delete(
"/queues/",
"/queues",
response_model=azure_storage_queue_schemas.DeleteQueueResponse,
status_code=200,
)
Expand All @@ -61,7 +61,7 @@ async def delete_queue(


@router.post(
"/messages/",
"/messages",
response_model=azure_storage_queue_schemas.SendMessageResponse,
status_code=200,
)
Expand All @@ -81,7 +81,7 @@ async def send_message(


@router.get(
"/messages/",
"/messages",
status_code=200,
)
async def receive_messages(
Expand Down Expand Up @@ -112,7 +112,7 @@ async def receive_messages(


@router.delete(
"/messages/",
"/messages",
response_model=azure_storage_queue_schemas.DeleteMessageResponse,
status_code=200,
)
Expand Down
2 changes: 1 addition & 1 deletion backend/settings/azure_ai_document_intelligence.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

class Settings(BaseSettings):
azure_ai_document_intelligence_endpoint: str = (
"https://<your-document-intelligence-name>.cognitiveservices.azure.com/"
"https://<your-document-intelligence-name>.cognitiveservices.azure.com"
)
azure_ai_document_intelligence_api_key: str = "<your-document-intelligence-api-key>"

Expand Down
2 changes: 1 addition & 1 deletion backend/settings/azure_ai_vision.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@


class Settings(BaseSettings):
azure_ai_vision_endpoint: str = "https://<name>.cognitiveservices.azure.com/"
azure_ai_vision_endpoint: str = "https://<name>.cognitiveservices.azure.com"
azure_ai_vision_api_key: str = "<api-key>"

model_config = SettingsConfigDict(
Expand Down
2 changes: 1 addition & 1 deletion backend/settings/azure_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@


class Settings(BaseSettings):
azure_openai_endpoint: str = "https://<aoai-name>.openai.azure.com/"
azure_openai_endpoint: str = "https://<aoai-name>.openai.azure.com"
azure_openai_api_key: str = "<aoai-api-key>"
azure_openai_api_version: str = "2024-02-01"
azure_openai_embedding_model: str = "text-embedding-ada-002"
Expand Down
43 changes: 38 additions & 5 deletions docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,44 @@
## Docker

```shell
# Build the Docker image
make docker-build

# Dry run the Docker container with default settings
make --dry-run docker-run DOCKER_COMMAND="python main.py backend --port 8888 --debug"
# Build the Docker image (optional)
make docker-build DOCKER_IMAGE_COMPONENT=backend GIT_TAG=latest
make docker-build DOCKER_IMAGE_COMPONENT=frontend GIT_TAG=latest

# Create environment files for each service
cp {NAME}.env.sample {NAME}.env

# Run the Docker container for the backend
docker run --rm \
--publish 8888:8888 \
--volume ${PWD}/azure_ai_document_intelligence.env:/app/azure_ai_document_intelligence.env \
--volume ${PWD}/azure_ai_vision.env:/app/azure_ai_vision.env \
--volume ${PWD}/azure_event_grid.env:/app/azure_event_grid.env \
--volume ${PWD}/azure_openai.env:/app/azure_openai.env \
--volume ${PWD}/azure_storage_blob.env:/app/azure_storage_blob.env \
--volume ${PWD}/azure_storage_queue.env:/app/azure_storage_queue.env \
ks6088ts/azure-ai-services-solutions:backend-latest \
python main.py backend \
--port 8888 \
--debug

# Access the backend: http://localhost:8888

# Run ngrok to expose the backend (for testing purposes only)
ngrok http 8888
NGROK_URL="<forwarding-url>"

# Run the Docker container for the frontend
docker run --rm \
--publish 8501:8501 \
--volume ${PWD}/azure_ai_speech.env:/app/azure_ai_speech.env \
ks6088ts/azure-ai-services-solutions:frontend-latest \
streamlit run main.py --server.port=8501 --server.address=0.0.0.0 -- frontend \
--solution-name sandbox \
--backend-url ${NGROK_URL} \
--debug

# Access the frontend: http://localhost:8501
```

# References
Expand Down
2 changes: 1 addition & 1 deletion frontend/solutions/azure_ai_vision.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def start(
bytes_data = file_uploader.getvalue()
response = asyncio.run(
http_post_file(
url=urljoin(base=backend_url, url="/azure_ai_vision/image/analyze/"),
url=urljoin(base=backend_url, url="/azure_ai_vision/image/analyze"),
data_bytes_io=BytesIO(bytes_data),
)
)
Expand Down
2 changes: 1 addition & 1 deletion frontend/solutions/document_intelligence.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def start(
bytes_data = file_uploader.getvalue()
response = asyncio.run(
http_post_file(
url=urljoin(base=backend_url, url="/azure_ai_document_intelligence/analyze_document/"),
url=urljoin(base=backend_url, url="/azure_ai_document_intelligence/analyze_document"),
data_bytes_io=BytesIO(bytes_data),
)
)
Expand Down
2 changes: 1 addition & 1 deletion main.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def backend(
@app.command()
def frontend(
solution_name: Annotated[str, typer.Option(help="Solution name")] = "SANDBOX",
backend_url: Annotated[str, typer.Option(help="Backend URL")] = "http://localhost:8000/",
backend_url: Annotated[str, typer.Option(help="Backend URL")] = "http://localhost:8000",
debug: Annotated[bool, typer.Option(help="Enable debug mode")] = False,
):
from frontend.entrypoint import start
Expand Down

0 comments on commit 16523d1

Please sign in to comment.