diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..c8a8d73b2 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +notebooks/dataset.zip filter=lfs diff=lfs merge=lfs -text diff --git a/.github/workflows/docs-build.yaml b/.github/workflows/docs-build.yaml new file mode 100644 index 000000000..c0abe1227 --- /dev/null +++ b/.github/workflows/docs-build.yaml @@ -0,0 +1,153 @@ +name: docs-build + +on: + pull_request: + branches: [ main, release-* ] + types: [ opened, synchronize ] + + push: + branches: [ main ] + tags: + - v* + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +defaults: + run: + shell: bash + +jobs: + build-docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build image + run: | + docker build --pull --tag docs-builder:latest --file docs/Dockerfile . + - name: Build docs + run: | + docker run -v $(pwd):/work -w /work docs-builder:latest sphinx-build -b html -d /tmp docs docs/_build/output + - name: Delete unnecessary files + run: | + sudo rm -rf docs/_build/jupyter_execute + sudo rm -rf docs/_build/.buildinfo + - name: Upload HTML + uses: actions/upload-artifact@v4 + with: + name: html-build-artifact + path: docs/_build/ + if-no-files-found: error + retention-days: 1 + - name: Store PR information + if: ${{ github.event_name == 'pull_request' }} + run: | + mkdir ./pr + echo ${{ github.event.number }} > ./pr/pr.txt + echo ${{ github.event.pull_request.merged }} > ./pr/merged.txt + echo ${{ github.event.action }} > ./pr/action.txt + - name: Upload PR information + if: ${{ github.event_name == 'pull_request' }} + uses: actions/upload-artifact@v4 + with: + name: pr + path: pr/ + + store-html: + needs: [ build-docs ] + if: ${{ github.event_name == 'push' }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: "gh-pages" + - name: Initialize Git configuration + run: | + git config user.name docs-build + git config user.email do-not-send@github.com + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: html-build-artifact + - name: Copy HTML directories + run: | + ls -asl + - name: Store bleeding edge docs from main + if: ${{ github.ref == 'refs/heads/main' }} + run: | + mkdir main || true + rsync -av --progress --delete output/ main/ + git add main + - name: Store docs for a release tag + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + env: + LATEST: ${{ contains(github.event.head_commit.message, '/not-latest') && 'not-true' || 'true' }} + run: | + printenv LATEST + if [[ "${GITHUB_REF}" =~ "-rc" ]]; then + echo "Not saving documents for release candidates." + exit 0 + fi + if [[ "${GITHUB_REF}" =~ v([0-9]+\.[0-9]+\.[0-9]+) ]]; then + TAG="${BASH_REMATCH[1]}" + mkdir "${TAG}" || true + rsync -av --progress --delete output/ "${TAG}/" + git add "${TAG}/" + if [[ "${LATEST}" == 'true' ]]; then + mkdir latest || true + rsync -av --progress --delete output/ latest/ + cp output/versions.json . + git add latest + git add versions.json + fi + fi + - name: Check or create dot-no-jekyll file + run: | + if [ -f ".nojekyll" ]; then + echo "The dot-no-jekyll file already exists." + exit 0 + fi + touch .nojekyll + git add .nojekyll + - name: Check or create redirect page + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + resp=$(grep 'http-equiv="refresh"' index.html 2>/dev/null) || true + if [ -n "${resp}" ]; then + echo "The redirect file already exists." + exit 0 + fi + # If any of these commands fail, fail the build. + html_url=$(gh api "repos/${GITHUB_REPOSITORY}/pages" --jq ".html_url") + # Beware ugly quotation mark avoidance in the foll lines. + echo '' > index.html + echo '' >> index.html + echo ' ' >> index.html + echo ' Redirect to documentation' >> index.html + echo ' ' >> index.html + echo ' ' >> index.html + echo ' ' >> index.html + echo ' ' >> index.html + echo ' ' >> index.html + echo ' ' >> index.html + echo '

Please follow the link to the ' >> index.html + echo 'latest documentation.

' >> index.html + echo ' ' >> index.html + echo '' >> index.html + git add index.html + - name: Commit changes to the GitHub Pages branch + run: | + git status + if git commit -m 'Pushing changes to GitHub Pages.'; then + git push -f + else + echo "Nothing changed." + fi diff --git a/.github/workflows/docs-preview-pr.yaml b/.github/workflows/docs-preview-pr.yaml new file mode 100644 index 000000000..362db16e3 --- /dev/null +++ b/.github/workflows/docs-preview-pr.yaml @@ -0,0 +1,117 @@ +name: docs-preview-pr + +on: + workflow_run: + workflows: [docs-build] + types: [completed] + +env: + WF_ID: ${{ github.event.workflow_run.id }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +jobs: + # Always determine if GitHub Pages are configured for this repo. + get-gh-pages-url: + if: + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + runs-on: ubuntu-latest + outputs: + url: ${{ steps.api-resp.outputs.html_url || '' }} + branch: ${{ steps.api-resp.outputs.branch || '' }} + steps: + - name: Check for GitHub Pages + id: api-resp + run: | + has_pages=$(gh api "repos/${GITHUB_REPOSITORY}" -q '.has_pages') + if [ "true" != "${has_pages}" ]; then + echo "GitHub pages is not active for the repository. Quitting." + return + fi + + url=$(gh api "repos/${GITHUB_REPOSITORY}/pages" -q '.html_url') + branch=$(gh api "repos/${GITHUB_REPOSITORY}/pages" -q '.source.branch') + + echo "html_url=${url}" >> $GITHUB_OUTPUT + echo "branch=${branch}" >> $GITHUB_OUTPUT + + # Identify the dir for the HTML. + store-html: + runs-on: ubuntu-latest + needs: [get-gh-pages-url] + if: needs.get-gh-pages-url.outputs.url != '' + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ needs.get-gh-pages-url.outputs.branch }} + - name: Initialize Git configuration + run: | + git config user.name docs-preview + git config user.email do-not-send-@github.com + - name: Download artifacts + run: | + gh run view "${WF_ID}" + gh run download "${WF_ID}" + PR=$(cat ./pr/pr.txt) + MERGED=$(cat ./pr/merged.txt) + ACTION=$(cat ./pr/action.txt) + echo "PR_NO=${PR}" >> $GITHUB_ENV + echo "MERGE_STATUS=${MERGED}" >> $GITHUB_ENV + echo "PR_ACTION=${ACTION}" >> $GITHUB_ENV + echo "REVIEW_DIR=review/" >> $GITHUB_ENV + echo "PR_REVIEW_DIR=review/pr-${PR}" >> $GITHUB_ENV + + # Remove the pr artifact directory so that it does not + # appear in listings or confuse git with untracked files. + rm -rf ./pr + + # Permutations: + # - PR was updated, PR_ACTION is !closed, need to delete review directory and update it. + # - PR was closed (regardless of merge), PR_ACTION is closed, need to delete review directory. + + # If this PR is still open, store HTML in a review directory. + - name: Handle HTML review directory for open PRs and updates to PRs + if: env.MERGE_STATUS == 'false' && env.PR_ACTION != 'closed' + run: | + rm -rf "${{ env.PR_REVIEW_DIR }}" 2>/dev/null || true + if [ ! -d "${{ env.REVIEW_DIR }}" ]; then + mkdir "${{ env.REVIEW_DIR }}" + fi + mv ./html-build-artifact/latest/ "${{ env.PR_REVIEW_DIR }}" + git add "${{ env.PR_REVIEW_DIR }}" + # If the PR was closed, merged or not, delete review directory. + - name: Delete HTML review directory for closed PRs + if: env.PR_ACTION == 'closed' + run: | + if [ -d ./html-build-artifact/ ]; then + rm -rf ./html-build-artifact/ 2>/dev/null + fi + if [ -d "${{ env.PR_REVIEW_DIR }}" ]; then + git rm -rf "${{ env.PR_REVIEW_DIR }}" + fi + - name: Commit changes to the GitHub Pages branch + run: | + git status + if git commit -m 'Pushing changes to GitHub Pages.'; then + git push -f + else + echo "Nothing changed." + fi + - name: Check for existing documentation review comment + run: | + result=$(gh pr view ${{ env.PR_NO }} --json comments -q 'any(.comments[].body; contains("Documentation preview"))') + echo "COMMENT_EXISTS=${result}" >> $GITHUB_ENV + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Add HTML review URL comment to a newly opened PR + if: env.MERGE_STATUS == 'false' && env.COMMENT_EXISTS == 'false' + env: + URL: ${{ needs.get-gh-pages-url.outputs.url }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + shell: bash + run: | + echo -e "## Documentation preview" > body + echo -e "" >> body + echo -e "<${{ env.URL }}${{ env.PR_REVIEW_DIR }}>" >> body + cat body + gh pr comment ${{ env.PR_NO }} --body-file body diff --git a/.github/workflows/docs-remove-stale-reviews.yaml b/.github/workflows/docs-remove-stale-reviews.yaml new file mode 100644 index 000000000..8b758c37a --- /dev/null +++ b/.github/workflows/docs-remove-stale-reviews.yaml @@ -0,0 +1,11 @@ +name: docs-remove-stale-reviews + +on: + schedule: + # 42 minutes after 0:00 UTC on Sundays + - cron: "42 0 * * 0" + workflow_dispatch: + +jobs: + remove: + uses: nvidia-merlin/.github/.github/workflows/docs-remove-stale-reviews-common.yaml@main diff --git a/.gitignore b/.gitignore index 9e2aa6240..18fcdccf7 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,15 @@ deploy/*.txt # Docker Compose exclusions volumes/ -uploaded_files/ \ No newline at end of file +uploaded_files/ + +# Visual Studio Code +.vscode + +# Node modules +**/node_modules + +# File from docs builds +docs/_* +docs/notebooks +docs/experimental diff --git a/CHANGELOG.md b/CHANGELOG.md index 460da65a3..b423ed734 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,48 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.4.0] - 2024-02-22 +## [0.5.0] - 2024-03-19 + +This release adds new dedicated RAG examples showcasing state of the art usecases, switches to the latest [API catalog endpoints from NVIDIA](https://build.nvidia.com/explore/discover) and also refactors the API interface of chain-server. This release also improves the developer experience by adding github pages based documentation and streamlining the example deployment flow using dedicated compose files. + +### Added + +- Github pages based documentation. +- New examples showcasing + - [Multi-turn RAG](./RetrievalAugmentedGeneration/examples/multi_turn_rag/) + - [Multi-modal RAG](./RetrievalAugmentedGeneration//examples/multimodal_rag/) + - [Structured data CSV RAG](./RetrievalAugmentedGeneration/examples/csv_rag/) +- Support for [delete and list APIs](./docs/api_reference/openapi_schema.json) in chain-server component +- Streamlined RAG example deployment + - Dedicated new [docker compose files](./deploy/compose/) for every examples. + - Dedicated [docker compose files](./deploy/compose/docker-compose-vectordb.yaml) for launching vector DB solutions. +- New configurations to control top k and confidence score of retrieval pipeline. +- Added [a notebook](./models/NeMo/slm/README.md) which covers how to train SLMs with various techniques using NeMo Framework. +- Added more [experimental examples](./experimental/README.md) showcasing new usecases. + - [NVIDIA ORAN chatbot multimodal Assistant](./experimental/oran-chatbot-multimodal/) + - [NVIDIA Retrieval Customization](./experimental/synthetic-data-retriever-customization/) + - [NVIDIA RAG Streaming Document Ingestion Pipeline](./experimental/streaming_ingest_rag/) + - [NVIDIA Live FM Radio ASR RAG](./experimental/fm-asr-streaming-rag/) +- [New dedicated notebook](./notebooks/10_RAG_for_HTML_docs_with_Langchain_NVIDIA_AI_Endpoints.ipynb) showcasing a RAG pipeline using web pages. + + +### Changed + +- Switched from NVIDIA AI Foundation to [NVIDIA API Catalog endpoints](https://build.nvidia.com/explore/discover) for accessing cloud hosted LLM models. +- Refactored [API schema of chain-server component](./docs/api_reference/openapi_schema.json) to support runtime allocation of llm parameters like temperature, max tokens, chat history etc. +- Renamed `llm-playground` service in compose files to `rag-playground`. +- Switched base containers for all components to ubuntu instead of pytorch and optimized container build time as well as container size. +- Deprecated yaml based configuration to avoid confusion, all configurations are now environment variable based. +- Removed requirement of hardcoding `NVIDIA_API_KEY` in `compose.env` file. +- Upgraded all python dependencies for chain-server and rag-playground services. + +### Fixed + +- Fixed a bug causing hallucinated answer when retriever fails to return any documents. +- Fixed some accuracy issues for all the examples. + + +## [0.4.0] - 2024-02-23 ### Added @@ -75,4 +116,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Fixed - [Fixed issue #13](https://github.com/NVIDIA/GenerativeAIExamples/issues/13) of pipeline not able to answer questions unrelated to knowledge base -- [Fixed issue #12](https://github.com/NVIDIA/GenerativeAIExamples/issues/12) typechecking while uploading PDF files +- [Fixed issue #12](https://github.com/NVIDIA/GenerativeAIExamples/issues/12) typechecking while uploading PDF files \ No newline at end of file diff --git a/README.md b/README.md index f90561093..922f0f4ae 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,9 @@ # NVIDIA Generative AI Examples +[![documentation](https://img.shields.io/badge/documentation-blue.svg)](https://nvidia.github.io/GenerativeAIExamples/latest) + ## Introduction + State-of-the-art Generative AI examples that are easy to deploy, test, and extend. All examples run on the high performance NVIDIA CUDA-X software stack and NVIDIA GPUs. ## NVIDIA NGC @@ -24,7 +27,7 @@ Examples support local and remote inference endpoints. If you have a GPU, you ca | Model | Embedding | Framework | Description | Multi-GPU | TRT-LLM | NVIDIA AI Foundation | Triton | Vector Database | |---------------|-----------------------|------------|-------------------------|-----------|------------|-------------|---------|--------| | llama-2 | e5-large-v2 | Llamaindex | Canonical QA Chatbot | [YES](RetrievalAugmentedGeneration/README.md#3-qa-chatbot-multi-gpu----a100h100l40s) | [YES](RetrievalAugmentedGeneration/README.md#2-qa-chatbot----a100h100l40s-gpu) | No | YES | Milvus/[PGVector]((RetrievalAugmentedGeneration/README.md#2-qa-chatbot----a100h100l40s-gpu))| -| mixtral_8x7b | nvolveqa_40k | Langchain | [Nvidia AI foundation based QA Chatbot](RetrievalAugmentedGeneration/README.md#1-qa-chatbot----nvidia-ai-foundation-inference-endpoint) | No | No | YES | YES | FAISS| +| mixtral_8x7b | nvolveqa_40k | Langchain | [Nvidia AI foundation based QA Chatbot](RetrievalAugmentedGeneration/README.md#1-qa-chatbot----nvidia-ai-foundation-inference-endpoint) | No | No | YES | YES | Milvus| | llama-2 | all-MiniLM-L6-v2 | Llama Index | [QA Chatbot, GeForce, Windows](https://github.com/NVIDIA/trt-llm-rag-windows/tree/release/1.0) | NO | YES | NO | NO | FAISS | | llama-2 | nvolveqa_40k | Langchain | [QA Chatbot, Task Decomposition Agent](./RetrievalAugmentedGeneration/README.md#5-qa-chatbot-with-task-decomposition-example----a100h100l40s) | No | No | YES | YES | FAISS | mixtral_8x7b | nvolveqa_40k | Langchain | [Minimilastic example showcasing RAG using Nvidia AI foundation models](./examples/README.md#rag-in-5-minutes-example) | No | No | YES | YES | FAISS| @@ -41,7 +44,7 @@ Enterprise RAG examples also support local and remote inference via [TensorRT-LL | Model | Embedding | Framework | Description | Multi-GPU | Multi-node | TRT-LLM | NVIDIA AI Foundation | Triton | Vector Database | |---------------|-----------------------|------------|--------|-------------------------|-----------|------------|-------------|---------|--------| -| llama-2 | NV-Embed-QA-003 | Llamaindex | QA Chatbot, Helm, k8s | NO | NO | [YES](./docs/developer-llm-operator/) | NO | YES | Milvus| +| llama-2 | NV-Embed-QA | Llamaindex | QA Chatbot, Helm, k8s | NO | NO | [YES](./docs/developer-llm-operator/) | NO | YES | Milvus| ## Tools @@ -59,7 +62,7 @@ These are open source connectors for NVIDIA-hosted and self-hosted API endpoints | Name | Framework | Chat | Text Embedding | Python | Description | |------|-----------|------|-----------|--------|-------------| |[NVIDIA AI Foundation Endpoints](https://python.langchain.com/docs/integrations/providers/nvidia) | [Langchain](https://www.langchain.com/) |[YES](https://python.langchain.com/docs/integrations/chat/nvidia_ai_endpoints)|[YES](https://python.langchain.com/docs/integrations/text_embedding/nvidia_ai_endpoints)|[YES](https://pypi.org/project/langchain-nvidia-ai-endpoints/)|Easy access to NVIDIA hosted models. Supports chat, embedding, code generation, steerLM, multimodal, and RAG.| -|[NVIDIA Triton + TensorRT-LLM](https://github.com/langchain-ai/langchain/tree/master/libs/partners/nvidia-trt) | [Langchain](https://www.langchain.com/) |[YES](https://github.com/langchain-ai/langchain/blob/master/libs/partners/nvidia-trt/docs/llms.ipynb)|[YES](https://github.com/langchain-ai/langchain/blob/master/libs/partners/nvidia-trt/docs/llms.ipynb)|[YES](https://pypi.org/project/langchain-nvidia-trt/)|This connector allows Langchain to remotely interact with a Triton inference server over GRPC or HTTP tfor optimized LLM inference.| +|[NVIDIA Triton + TensorRT-LLM](https://github.com/langchain-ai/langchain/tree/master/libs/partners/nvidia-trt) | [Langchain](https://www.langchain.com/) |[YES](https://github.com/langchain-ai/langchain-nvidia/blob/main/libs/trt/docs/llms.ipynb)|[YES](https://github.com/langchain-ai/langchain-nvidia/blob/main/libs/trt/docs/llms.ipynb)|[YES](https://pypi.org/project/langchain-nvidia-trt/)|This connector allows Langchain to remotely interact with a Triton inference server over GRPC or HTTP tfor optimized LLM inference.| |[NVIDIA Triton Inference Server](https://docs.llamaindex.ai/en/stable/examples/llm/nvidia_triton.html) | [LlamaIndex](https://www.llamaindex.ai/) |YES|YES|NO|Triton inference server provides API access to hosted LLM models over gRPC. | |[NVIDIA TensorRT-LLM](https://docs.llamaindex.ai/en/stable/examples/llm/nvidia_tensorrt.html) | [LlamaIndex](https://www.llamaindex.ai/) |YES|YES|NO|TensorRT-LLM provides a Python API to build TensorRT engines with state-of-the-art optimizations for LLM inference on NVIDIA GPUs. | diff --git a/RetrievalAugmentedGeneration/Dockerfile b/RetrievalAugmentedGeneration/Dockerfile index 78a278313..0cf19a4c6 100644 --- a/RetrievalAugmentedGeneration/Dockerfile +++ b/RetrievalAugmentedGeneration/Dockerfile @@ -1,24 +1,40 @@ -ARG BASE_IMAGE_URL=nvcr.io/nvidia/pytorch -ARG BASE_IMAGE_TAG=23.12-py3 +ARG BASE_IMAGE_URL=nvcr.io/nvidia/base/ubuntu +ARG BASE_IMAGE_TAG=20.04_x64_2022-09-23 FROM ${BASE_IMAGE_URL}:${BASE_IMAGE_TAG} -ARG EXAMPLE_NAME -COPY RetrievalAugmentedGeneration/__init__.py /opt/RetrievalAugmentedGeneration/ -COPY RetrievalAugmentedGeneration/common /opt/RetrievalAugmentedGeneration/common -COPY integrations /opt/integrations -COPY tools /opt/tools -RUN apt-get update && apt-get install -y libpq-dev +ENV PYTHONDONTWRITEBYTECODE=1 +ENV DEBIAN_FRONTEND noninteractive + +# Install required ubuntu packages for setting up python 3.10 +RUN apt update && \ + apt install -y dpkg openssl libgl1 linux-libc-dev libksba8 curl software-properties-common build-essential libssl-dev libffi-dev && \ + add-apt-repository ppa:deadsnakes/ppa && \ + apt update && apt install -y python3.10 python3.10-dev python3.10-distutils && \ + apt-get clean + +# Install pip for python3.10 +RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 + +RUN rm -rf /var/lib/apt/lists/* + +# Install common dependencies for all examples RUN --mount=type=bind,source=RetrievalAugmentedGeneration/requirements.txt,target=/opt/requirements.txt \ - python3 -m pip install --no-cache-dir -r /opt/requirements.txt + pip3 install --no-cache-dir -r /opt/requirements.txt +# Install any example specific dependency if available +ARG EXAMPLE_NAME COPY RetrievalAugmentedGeneration/examples/${EXAMPLE_NAME} /opt/RetrievalAugmentedGeneration/example RUN if [ -f "/opt/RetrievalAugmentedGeneration/example/requirements.txt" ] ; then \ - python3 -m pip install --no-cache-dir -r /opt/RetrievalAugmentedGeneration/example/requirements.txt ; else \ + pip3 install --no-cache-dir -r /opt/RetrievalAugmentedGeneration/example/requirements.txt ; else \ echo "Skipping example dependency installation, since requirements.txt was not found" ; \ fi -RUN apt-get remove python3-pip +# Copy required common modules for all examples +COPY RetrievalAugmentedGeneration/__init__.py /opt/RetrievalAugmentedGeneration/ +COPY RetrievalAugmentedGeneration/common /opt/RetrievalAugmentedGeneration/common +COPY integrations /opt/integrations +COPY tools /opt/tools WORKDIR /opt ENTRYPOINT ["uvicorn", "RetrievalAugmentedGeneration.common.server:app"] diff --git a/RetrievalAugmentedGeneration/README.md b/RetrievalAugmentedGeneration/README.md deleted file mode 100644 index 614d19fce..000000000 --- a/RetrievalAugmentedGeneration/README.md +++ /dev/null @@ -1,694 +0,0 @@ -# Retrieval Augmented Generation - -Retrieval Augmented Generation (RAG) generates up-to-date and domain-specific answers by connecting a Large Language Model (LLM) to your enterprise data. - -## Developer RAG Examples - -1. [QA Chatbot -- No-GPU using NVIDIA AI Foundation](#1-qa-chatbot----nvidia-ai-foundation-inference-endpoint) -2. [QA Chatbot -- A100/H100/L40S](#2-qa-chatbot----a100h100l40s-gpu) -3. [QA Chatbot -- Multi-GPU](#3-qa-chatbot-multi-gpu----a100h100l40s) -4. [QA Chatbot -- Quantized LLM model](#4-qa-chatbot-with-quantized-llm-model----a100h100l40s) -5. [QA Chatbot -- Task Decomposition](#5-qa-chatbot-with-task-decomposition-example----a100h100l40s) -6. [QA Chatbot -- NemoTron Model](#6-qa-chatbot----nemotron-model) - -
- -### 1: QA Chatbot -- NVIDIA AI Foundation inference endpoint - -This example deploys a developer RAG pipeline for chat QA and serves inferencing via the NVIDIA AI Foundation endpoint. - -Developers get free credits for 10K requests to any of the available models. - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ModelEmbeddingFrameworkDescriptionMulti-GPUTRT-LLMNVIDIA AI FoundationTritonVector Database
mixtral_8x7bnvolveqa_40kLangchainQA chatbotNONOYESNOFAISS
- -#### 1.1 Prepare the environment - -This example uses NVIDIA AI Foundation inference endpoint. - -1. Follow steps 1 - 5 in the ["Prepare the environment" section of example 02](#21-prepare-the-environment). - -#### 1.2 Deploy - -Follow [these instructions](../docs/rag/aiplayground.md) to sign up for an NVIDIA AI Foundation developer account and deploy this example. - -
- -### 2: QA Chatbot -- A100/H100/L40S GPU - -This example deploys a developer RAG pipeline for chat QA and serves inferencing via the NeMo Framework inference container. -> ⚠️ **NOTE**: This example requires an A100, H100, or L40S GPU. Refer to the [support matrix](../docs/rag/support_matrix.md) to understand memory requirements for the model you are deploying. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ModelEmbeddingFrameworkDescriptionMulti-GPUTRT-LLMNVIDIA AI FoundationTritonVector Database
llama-2e5-large-v2LlamaindexQA chatbotNOYESNOYESMilvus
llama-2e5-large-v2LlamaindexQA chatbotNOYESNOYESpgvector
- - -#### 2.1 Prepare the environment - -1. Install [Docker Engine and Docker Compose.](https://docs.docker.com/engine/install/ubuntu/) - -2. Verify NVIDIA GPU driver version 535 or later is installed. - - **Note**: This step is not required for Nvidia AI foundation workflow - -``` $ nvidia-smi --query-gpu=driver_version --format=csv,noheader -535.129.03 - -$ nvidia-smi -q -d compute - -==============NVSMI LOG============== - -Timestamp : Sun Nov 26 21:17:25 2023 -Driver Version : 535.129.03 -CUDA Version : 12.2 - -Attached GPUs : 1 -GPU 00000000:CA:00.0 - Compute Mode : Default -``` -Reference: [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) and [NVIDIA Linux driver installation instructions](https://docs.nvidia.com/datacenter/tesla/tesla-installation-notes/index.html) - -3. Clone the Generative AI examples Git repository. - -> ⚠️ **NOTE**: This example requires Git Large File Support (LFS) - -``` -sudo apt -y install git-lfs -git clone git@github.com:NVIDIA/GenerativeAIExamples.git -cd GenerativeAIExamples/ -git lfs pull -``` - -4. Verify the NVIDIA container toolkit is installed and configured as the default container runtime. - - **Note**: This step is not required for Nvidia AI foundation workflow - -``` -$ cat /etc/docker/daemon.json -{ - "default-runtime": "nvidia", - "runtimes": { - "nvidia": { - "path": "/usr/bin/nvidia-container-runtime", - "runtimeArgs": [] - } - } -} - -$ sudo docker run --rm --runtime=nvidia --gpus all ubuntu nvidia-smi -L -GPU 0: NVIDIA A100 80GB PCIe (UUID: GPU-d8ce95c1-12f7-3174-6395-e573163a2ace) -``` - -5. Create an NGC Account and API Key. - -Please refer to [instructions](https://docs.nvidia.com/ngc/gpu-cloud/ngc-overview/index.html) to create account and generate NGC API key. - -Login to `nvcr.io` using the following command: - -``` -docker login nvcr.io -``` - -6. [Optional] Enable Riva ASR and TTS. - - a. To launch a Riva server locally, please refer to the instructions in the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). - - - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. - - - Once the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. - - b. Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. - - - In `deploy/compose/compose.env`, make the following assignments as necessary: - ``` - export RIVA_API_URI=":" - export RIVA_API_KEY="" - export RIVA_FUNCTION_ID="" - ``` - -Reference: -- [Docker installation instructions (Ubuntu)](https://docs.docker.com/engine/install/ubuntu/) -- [NVIDIA Container Toolkit Installation instructions](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) - -#### 2.2 Deploy - -##### Downloading the model -You can download the model either from huggingface or meta. - -The steps mentioned here explains how to download from meta. If you are interested in downloading the model checkpoints from huggingface, follow the steps [here](../docs/rag/hf_model_download.md) instead. - -1. Clone the Llama Github. - -``` -git clone https://github.com/facebookresearch/llama.git -cd llama/ -``` - -2. Fill out Meta's [Llama request access form](https://ai.meta.com/resources/models-and-libraries/llama-downloads/). - -3. Download the model weights. - -- Select the Llama 2 and Llama Chat text boxes. -- After verifying your email, Meta will email you a download link. -- Download the llama-2-13b-chat model when prompted. - -``` -$ ./download.sh -Enter the URL from email: < https://download.llamameta.net/… etc> - -Enter the list of models to download without spaces (7B,13B,70B,7B-chat,13B-chat,70B-chat), or press Enter for all: 13B-chat -``` - -4. Copy the tokenizer to the model directory. - -``` -$ mv tokenizer* llama-2-13b-chat/ - -$ ls ~/git/llama/llama-2-13b-chat/ -checklist.chk consolidated.00.pth consolidated.01.pth params.json tokenizer.model tokenizer_checklist.chk -``` - -##### Deploying the model - -1. Set the absolute path to the model location in compose.env. - -``` -$ cd ~/git/GenerativeAIExamples - -$ grep MODEL deploy/compose/compose.env | grep -v \# -export MODEL_DIRECTORY="/home/nvidia/git/llama/llama-2-13b-chat/" -export MODEL_ARCHITECTURE="llama" -export MODEL_NAME="Llama-2-13b-chat" -``` - -2. Deploy the developer RAG example via Docker compose using milvus vector store, steps to deploy RAG example with pgvector vector store is [here](#deploying-with-pgvector-vector-store). - -> ⚠️ **NOTE**: It may take up to 5 minutes for the Triton server to start. The `-d` flag starts the services in the background. - -``` -$ source deploy/compose/compose.env; docker compose -f deploy/compose/docker-compose.yaml build - -$ docker compose -f deploy/compose/docker-compose.yaml up -d - -$ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" -CONTAINER ID NAMES STATUS -256da0ecdb7b llm-playground Up 48 minutes -2974aa4fb2ce chain-server Up 48 minutes -4a8c4aebe4ad notebook-server Up 48 minutes -5be2b57bb5c1 milvus-standalone Up 48 minutes (healthy) -ecf674c8139c llm-inference-server Up 48 minutes (healthy) -a6609c22c171 milvus-minio Up 48 minutes (healthy) -b23c0858c4d4 milvus-etcd Up 48 minutes (healthy) -``` - -Reference: -- [Meta Llama README](https://github.com/facebookresearch/llama/blob/main/README.md) -- [Meta Llama request access form](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) - -#### 2.3 Test - -1. Connect to the sample web application at ``http://host-ip:8090``. - -2. Check **[X] Enable TTS output** to allow the web app to read the answers to your queries aloud. - -3. Select the desired ASR language (`English (en-US)` for this test), TTS language (`English (en-US)` for this test) and TTS voice from the dropdown menus below the checkboxes to utilize the web app's voice-to-voice interaction capabilities. - -4. In the Converse tab, type "How many cores does the Grace superchip contain?" in the chat box and press Submit. Alternatively, click on the microphone button to the right of the text box and ask your query verbally. - -![Grace query failure](../notebooks/imgs/grace_noanswer_with_riva.png) - -5. If you encounter an error message reading "Media devices could not be accessed" when you first attempt to transcribe a voice query, - -![Media device access error](../notebooks/imgs/media_device_access_error.png) - -carry out the following steps: - - - Open ``chrome://flags`` in another browser tab. - - - Search for "insecure origins treated as secure". - - - Copy ``http://host-ip:8090`` into the associated text box. - - - Select "Enabled" in the adjacent dropdown menu. - - - Click on the "Relaunch" button at the bottom right of the page. - - - Grant ``http://host-ip:8090`` access to your microphone. - -![Fix media device access error in Chrome Flags](../notebooks/imgs/chrome_flags_fix_media_device_access_error.png) - -6. Upload the sample data set to the Knowledge Base tab. - -> ⚠️ **NOTE**: ``dataset.zip`` is located in the ``notebooks`` directory. Unzip the archive and upload the PDFs. - -> There is a timeout of `10 mins` set for the ingestion process. Uploading large files may see ingestion failure depending on network bandwidth. - -7. Return to **Converse** tab and check **[X] Use knowledge base**. - -8. Retype (or re-transcribe) the question: "How many cores does the Grace superchip contain?" - -![Grace query success](../notebooks/imgs/grace_answer_with_riva.png) - -> ⚠️ **NOTE**: Default prompts are optimized for llama chat model if you're using completion model then prompts need to be finetuned accordingly. - -#### Learn More - -Execute the Jupyter notebooks to explore optional features. - -Note: Jupyter notebook is supported for [default flow](../deploy/compose/docker-compose.yaml) i.e. trt-llm with milvus. -1. In a web browser, open Jupyter at ``http://host-ip:8888``. - -2. Execute the notebooks in order: - -- [Enable streaming responses from the LLM](../notebooks/01-llm-streaming-client.ipynb) -- [Document QA with LangChain](../notebooks/02_langchain_simple.ipynb) -- [Document QA with LlamaIndex](../notebooks/03_llama_index_simple.ipynb) -- [Advanced Document QA with LlamaIndex](../notebooks/04_llamaindex_hier_node_parser.ipynb) -- [Document QA via REST FastAPI Server](../notebooks/05_dataloader.ipynb) - -#### 2.4 Uninstall - -To uninstall, stop and remove the running containers. - -``` -cd deploy/compose -source compose.env -docker compose down -docker compose ps -q -``` - -#### Deploying with [pgvector](https://github.com/pgvector/pgvector) vector store -2. Deploy the developer RAG example via Docker compose. - -> ⚠️ **NOTE**: It may take up to 5 minutes for the Triton server to start. The `-d` flag starts the services in the background. - -``` -$ source deploy/compose/compose.env; docker compose -f deploy/compose/docker-compose-pgvector.yaml build - -$ docker compose -f deploy/compose/docker-compose-pgvector.yaml up -d - -$ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" -CONTAINER ID NAMES STATUS -0f6f091d892e llm-playground Up 22 hours -8d0ab09fcb98 chain-server Up 22 hours -85bd98ba3b24 notebook-server Up 22 hours -22f0d405b38b llm-inference-server Up 22 hours (healthy) -cbd3cf65ce7e pgvector Up 22 hours -``` - -After deployment is successful, you can follow steps from [Test](#23-test) to verify workflow. - -
- -### 3: QA Chatbot Multi-GPU -- A100/H100/L40S - -This example deploys a developer RAG pipeline for chat QA and serves inference via the NeMo Framework inference container across multiple GPUs. - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ModelEmbeddingFrameworkDescriptionMulti-GPUTRT-LLMNVIDIA AI FoundationTritonVector Database
llama-2e5-large-v2LlamaindexQA chatbotYESYESNOYESMilvus
- -#### 3.1 Prepare the environment - -1. Follow the steps in the ["Prepare the environment" section of example 02](#21-prepare-the-environment). - -#### 3.2 Deploy - -1. Follow steps 1 - 4 in the ["Deploy" section of example 02](#downloading-the-model) to stage the model weights. - -2. Find the GPU device ID. You can check this using `nvidia-smi` command. - -3. Assign LLM inference to specific GPUs by specifying the GPU ID(s) in the [docker compose file](../deploy/compose/docker-compose.yaml). - -``` - deploy: - resources: - reservations: - devices: - - driver: nvidia - # count: ${INFERENCE_GPU_COUNT:-all} # Comment this out - device_ids: ["0"] - capabilities: [gpu] -``` - -4. Follow steps in the ["Deploy the model" section of example 02](#deploying-the-model) to deploy via Docker compose. - -#### 3.3 Test - -1. Follow steps 1 - 5 in the ["Test" section of example 02](#23-test). - -2. Verify the correct GPU is serving the model using `nvidia-smi`. - -#### 3.4 Uninstall - -1. To unintstall, follow the ["Uninstall" steps in example 02"](#24-uninstall). - -
- - -### 4: QA Chatbot with Quantized LLM model -- A100/H100/L40S - -This example deploys a developer RAG pipeline for chat QA and serves inference via the NeMo Framework inference container across multiple GPUs using a quantized version of Llama-7b-chat model. - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ModelEmbeddingFrameworkDescriptionMulti-GPUTRT-LLMNVIDIA AI FoundationTritonVector Database
llama-2-7b-chate5-large-v2LlamaindexQA chatbotYESYESNOYESMilvus
- -#### 4.1 Prepare the environment - -1. Follow the steps in the ["Prepare the environment" section of example 02](#21-prepare-the-environment). - - -#### 4.2 Deploy -1. [Download Llama2-7b chat Chat Model Weights](#downloading-the-model) from huggingface as meta checkpoint does not have the required files to quantize it. - -> ⚠️ **NOTE**: For this initial version only 7B chat model is supported on A100/H100/L40 GPUs. - - -1. For quantization of the Llama2 model using AWQ, first clone the [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM/tree/release/0.5.0) repository separately and checkout release/v0.5.0. - - - Also copy the Llama2 model directory downloaded earlier to the TensorRT-LLM repo - -``` - git clone https://github.com/NVIDIA/TensorRT-LLM.git - cp -r TensorRT-LLM/ - cd TensorRT-LLM/ - git checkout release/0.5.0 -``` - -3. Now setup the TensorRT-LLM repo seprately using steps [here](https://github.com/NVIDIA/TensorRT-LLM/blob/release/0.5.0/docs/source/installation.md) - -4. Once the model is downloaded and TensorRT-LLM repo is setup, we can quantize the model using the TensorRT-LLM container. - - - Follow the steps from [here](https://github.com/NVIDIA/TensorRT-LLM/tree/v0.5.0/examples/llama#awq) to quantize using AWQ, run these commands inside the container. - - - While running the quantization script, make sure to point `--model_dir` to your downloaded Llama2 model directory - - - Once the quantization is completed, copy the generated PyTorch (.pt) file inside the model directory - - ``` - cp .pt - ``` - -5. Now, we will come back our repository, follow the steps below to deploy this quantized model using the inference server. - - - Update [compose.env](../deploy/compose/compose.env) with `MODEL_DIRECTORY` pointing to Llama2 model directory containing the quantized checkpoint. - - - Make sure the qantized PyTorch model (.pt) file generated using above steps is present inside the MODEL_DIRECTORY. - - - - Uncomment the QUANTIZATION variable which specifies quantization as "int4_awq" inside the [compose.env](../deploy/compose/compose.env). - ``` - export QUANTIZATION="int4_awq" - ``` - -6. Deploy the developer RAG example via Docker compose. - -> ⚠️ **NOTE**: It may take up to 5 minutes for the Triton server to start. The `-d` flag starts the services in the background. - -``` -$ source deploy/compose/compose.env; docker compose -f deploy/compose/docker-compose.yaml build - -$ docker compose -f deploy/compose/docker-compose.yaml up -d - -$ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" -CONTAINER ID NAMES STATUS -256da0ecdb7b llm-playground Up 48 minutes -2974aa4fb2ce chain-server Up 48 minutes -4a8c4aebe4ad notebook-server Up 48 minutes -5be2b57bb5c1 milvus-standalone Up 48 minutes (healthy) -ecf674c8139c llm-inference-server Up 48 minutes (healthy) -a6609c22c171 milvus-minio Up 48 minutes (healthy) -b23c0858c4d4 milvus-etcd Up 48 minutes (healthy) -``` - -#### 4.3 Test - -1. Follow steps 1 - 5 in the ["Test" section of example 02](#23-test). - -#### 4.4 Uninstall - -1. To uninstall, follow the ["Uninstall" steps in example 02"](#24-uninstall). - -
- -### 5: QA Chatbot with Task Decomposition example -- A100/H100/L40S - -This example deploys a recursive Task Decomposition example for chat QA. It uses the llama2-70b chat model (via the NVIDIA AI Foundation endpoint) for inference. - -It showcases how to perform RAG when the agent needs to access information from several different files/chunks or perform some computation on the answers. It uses a custom langchain agent that recursively breaks down the user's questions into subquestions that it attempts to answer. It has access to 2 tools - search (which performs standard RAG on a subquestion) and math (which poses a math question to the LLM). The agent continues to break down the question into sub-questions until it has the answers it needs to formulate the final answer. - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ModelEmbeddingFrameworkDescriptionMulti-GPUTRT-LLMNVIDIA AI FoundationTritonVector Database
llama2_70bnvolveqa_40kLangchainQA chatbotNONOYESNOFAISS
- -#### 5.1 Prepare the environment - -1. Follow the steps in the ["Prepare the environment" section of example 02](#21-prepare-the-environment). - - -#### 5.2 Deploy - -1. Follow the ["Deploy" section of example 01](#downloading-the-model) to setup your API key - -2. Change the RAG example in `deploy/compose/compose.env`. - ```shell - export RAG_EXAMPLE="query_decomposition_rag" - ``` - -3. Change the LLM in `deploy/compose/docker-compose-nv-ai-foundation.yaml` to `llama2_70b`. - ```yaml - query: - container_name: chain-server - ... - environment: - APP_LLM_MODELNAME: llama2_70b - ... - ``` - -4. Deploy the Query Decomposition RAG example via Docker compose. - -``` -$ source deploy/compose/compose.env; docker compose -f deploy/compose/docker-compose-nv-ai-foundation.yaml build - -$ docker compose -f deploy/compose/docker-compose-nv-ai-foundation.yaml up -d - -$ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" -CONTAINER ID NAMES STATUS -256da0ecdb7b llm-playground Up 48 minutes -2974aa4fb2ce chain-server Up 48 minutes -``` - -#### 5.3 Test - -1. Connect to the sample web application at ``http://host-ip:8090``. - -2. Upload 2 text documents in the Knowledge Base tab. The documents can contain different information - for example, one document can contain a company's revenue analysis for Q3 2023 and the other can contain a similar analysis for Q4 2023. - -3. Return to the **Converse** tab and check **[X] Use knowledge base**. - -4. Enter the question: "Which is greater - NVIDIA's datacenter revenue for Q4 2023 or the sum of its datacenter and gaming revenues for Q3 2023?" and hit submit to get the answer. - -#### 5.4 Uninstall - -1. To uninstall, follow the ["Uninstall" steps in example 02"](#24-uninstall). - -
- -### 6: QA Chatbot -- NemoTron Model - -This example deploys a developer RAG pipeline for chat QA and serves inference via the NeMo Framework inference container using NeMoTron model and showcases inference using sample notebook. - - -#### 6.1 Prepare the environment - -1. Follow the steps in the ["Prepare the environment" section of example 02](#21-prepare-the-environment). - -> ⚠️ **NOTE**: This example requires at least 100GB of GPU memory or two A100 GPUs for locally deploying the nemotron model. - - -#### 6.2 Deploy - -1. Download [NeMoTron chat checkpoint](https://huggingface.co/nvidia/nemotron-3-8b-chat-4k-sft) from HuggingFace - -``` -git-lfs clone https://huggingface.co/nvidia/nemotron-3-8b-chat-4k-sft -``` - -2. Make sure the absolute model path of nemotron-3-8b-chat-4k-sft model is updated in `/GenerativeAIExamples/deploy/compose/compose.env`. Set the below values in `compose.env` file. - -``` -export MODEL_DIRECTORY="/home/nvidia/nemotron-3-8b-chat-4k-sft" # Example path -export MODEL_ARCHITECTURE="gptnext" -export MODEL_NAME="nemotron-3-8b-chat-4k-sft" -``` - -3. Build and deploy the nemotron workflow - -``` -source deploy/compose/compose.env -docker compose -f deploy/compose/docker-compose-nemotron.yaml build -docker compose -f deploy/compose/docker-compose-nemotron.yaml up -d -``` -4. Check the deployment status by printing logs of `llm-inference-server` container - -Successful TRT-LLM conversion and Triton Inference Server deployment logs will display the following message -``` -I0107 03:03:38.638311 260 http_server.cc:3558] Started HTTPService at 0.0.0.0:8000 -I0107 03:03:38.679626 260 http_server.cc:187] Started Metrics Service at 0.0.0.0:8002 -``` - -#### 6.3 Test - -1. Run `02_langchain_simple.ipynb` for Document Question-Answering with LangChain based using NeMoTron model. - -[Optional] Run `00-llm-non-streaming-nemotron.ipynb` to send request to LLM. - -> ⚠️ **NOTE**: -- Nemotron models do not support streaming in this release. - -
- -### Learn More - -To deep dive into different components and workflow used by the examples, please refer to the [Developer Guide.](../docs/README.md) diff --git a/RetrievalAugmentedGeneration/common/configuration.py b/RetrievalAugmentedGeneration/common/configuration.py index bd80de789..eab991efe 100644 --- a/RetrievalAugmentedGeneration/common/configuration.py +++ b/RetrievalAugmentedGeneration/common/configuration.py @@ -70,7 +70,11 @@ class LLMConfig(ConfigWizard): default="triton-trt-llm", help_txt="The server type of the hosted model. Allowed values are triton-trt-llm and nemo-infer", ) - + model_name_pandas_ai: str = configfield( + "model_name_pandas_ai", + default="ai-mixtral-8x7b-instruct", + help_txt="The name of the ai catalog model to be used with PandasAI agent", + ) @configclass class TextSplitterConfig(ConfigWizard): @@ -80,6 +84,11 @@ class TextSplitterConfig(ConfigWizard): :cvar chunk_overlap: Text overlap in text splitter. """ + model_name: str = configfield( + "model_name", + default="intfloat/e5-large-v2", + help_txt="The name of Sentence Transformer model used for SentenceTransformer TextSplitter.", + ) chunk_size: int = configfield( "chunk_size", default=510, @@ -121,12 +130,33 @@ class EmbeddingConfig(ConfigWizard): ) +@configclass +class RetrieverConfig(ConfigWizard): + """Configuration class for the Retrieval pipeline. + + :cvar top_k: Number of relevant results to retrieve. + :cvar score_threshold: The minimum confidence score for the retrieved values to be considered. + """ + + top_k: int = configfield( + "top_k", + default=4, + help_txt="Number of relevant results to retrieve", + ) + score_threshold: float = configfield( + "score_threshold", + default=0.25, + help_txt="The minimum confidence score for the retrieved values to be considered", + ) + + @configclass class PromptsConfig(ConfigWizard): """Configuration class for the Prompts. :cvar chat_template: Prompt template for chat. :cvar rag_template: Prompt template for rag. + :cvar multi_turn_rag_template: Prompt template for multi-turn rag. """ chat_template: str = configfield( @@ -153,6 +183,18 @@ class PromptsConfig(ConfigWizard): ), help_txt="Prompt template for rag.", ) + multi_turn_rag_template: str = configfield( + "multi_turn_rag_template", + default=( + "You are a document chatbot. Help the user as they ask questions about documents." + " User message just asked: {input}\n\n" + " For this, we have retrieved the following potentially-useful info: " + " Conversation History Retrieved:\n{history}\n\n" + " Document Retrieved:\n{context}\n\n" + " Answer only from retrieved data. Make your response conversational." + ), + help_txt="Prompt template for rag.", + ) @configclass @@ -195,6 +237,12 @@ class AppConfig(ConfigWizard): help_txt="The configuration of embedding model.", default=EmbeddingConfig(), ) + retriever: RetrieverConfig = configfield( + "retriever", + env=False, + help_txt="The configuration of the retriever pipeline.", + default=RetrieverConfig(), + ) prompts: PromptsConfig = configfield( "prompts", env=False, diff --git a/RetrievalAugmentedGeneration/common/server.py b/RetrievalAugmentedGeneration/common/server.py index 01f7021b5..26ed50a58 100644 --- a/RetrievalAugmentedGeneration/common/server.py +++ b/RetrievalAugmentedGeneration/common/server.py @@ -16,6 +16,8 @@ """The definition of the Llama Index chain server.""" import base64 import os +import json +from uuid import uuid4 import shutil import logging from pathlib import Path @@ -25,7 +27,8 @@ from fastapi import FastAPI, File, UploadFile, Request from fastapi.responses import JSONResponse, StreamingResponse -from pydantic import BaseModel, Field +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel, Field, validator, constr from pymilvus.exceptions import MilvusException, MilvusUnavailableException from RetrievalAugmentedGeneration.common import utils, tracing @@ -35,22 +38,73 @@ # create the FastAPI server app = FastAPI() +# Allow access in browser from RAG UI and Storybook (development) +origins = [ + "*" +] +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=False, + allow_methods=["*"], + allow_headers=["*"], +) + EXAMPLE_DIR = "RetrievalAugmentedGeneration/example" -class Prompt(BaseModel): - """Definition of the Prompt API data type.""" +class Message(BaseModel): + """Definition of the Chat Message type.""" + role: str = Field(description="Role for a message AI, User and System", default="user", max_length=256) + content: str = Field(description="The input query/prompt to the pipeline.", default="I am going to Paris, what should I see?", max_length=131072) - question: str = Field(description="The input query/prompt to the pipeline.") - context: str = Field(description="Additional context for the question (optional)") - use_knowledge_base: bool = Field(description="Whether to use a knowledge base", default=True) - num_tokens: int = Field(description="The maximum number of tokens in the response.", default=50) + @validator('role') + def validate_role(cls, value): + valid_roles = {'user', 'assistant', 'system'} + if value.lower() not in valid_roles: + raise ValueError("Role must be one of 'user', 'assistant', or 'system'") + return value.lower() +class Prompt(BaseModel): + """Definition of the Prompt API data type.""" + messages: List[Message] = Field(..., description="A list of messages comprising the conversation so far. The roles of the messages must be alternating between user and assistant. The last input message should have role user. A message with the the system role is optional, and must be the very first message if it is present.", max_items=50000) + use_knowledge_base: bool = Field(..., description="Whether to use a knowledge base") + temperature: float = Field(0.2, description="The sampling temperature to use for text generation. The higher the temperature value is, the less deterministic the output text will be. It is not recommended to modify both temperature and top_p in the same call.", ge=0.1, le=1.0) + top_p: float = Field(0.7, description="The top-p sampling mass used for text generation. The top-p value determines the probability mass that is sampled at sampling time. For example, if top_p = 0.2, only the most likely tokens (summing to 0.2 cumulative probability) will be sampled. It is not recommended to modify both temperature and top_p in the same call.", ge=0.1, le=1.0) + max_tokens: int = Field(1024, description="The maximum number of tokens to generate in any given call. Note that the model is not aware of this value, and generation will simply stop at the number of tokens specified.", ge=0, le=1024) + # seed: int = Field(42, description="If specified, our system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result.") + # bad: List[str] = Field(None, description="A word or list of words not to use. The words are case sensitive.") + stop: List[constr(max_length=256)] = Field(None, description="A string or a list of strings where the API will stop generating further tokens. The returned text will not contain the stop sequence.", max_items=256) + # stream: bool = Field(True, description="If set, partial message deltas will be sent. Tokens will be sent as data-only server-sent events (SSE) as they become available (JSON responses are prefixed by data:), with the stream terminated by a data: [DONE] message.") + +class ChainResponseChoices(BaseModel): + """ Definition of Chain response choices""" + index: int = Field(default=0, ge=0, le=256) + message: Message = Field(default=Message(role="assistant", content="")) + finish_reason: str = Field(default="", max_length=4096) +class ChainResponse(BaseModel): + """Definition of Chain APIs resopnse data type""" + id: str = Field(default="", max_length=100000) + choices: List[ChainResponseChoices] = Field(default=[], max_items=256) class DocumentSearch(BaseModel): """Definition of the DocumentSearch API data type.""" - content: str = Field(description="The content or keywords to search for within documents.") - num_docs: int = Field(description="The maximum number of documents to return in the response.", default=4) + query: str = Field(description="The content or keywords to search for within documents.", max_length=131072) + top_k: int = Field(description="The maximum number of documents to return in the response.", default=4, ge=0, le=256) + +class DocumentChunk(BaseModel): + """Represents a chunk of a document.""" + content: str = Field(..., description="The content of the document chunk.", max_length=131072) + filename: str = Field(..., description="The name of the file the chunk belongs to.", max_length=4096) + score: float = Field(..., description="The relevance score of the chunk.") + +class DocumentSearchResponse(BaseModel): + """Represents a response from a document search.""" + chunks: List[DocumentChunk] = Field(..., description="List of document chunks.", max_items=256) + +class DocumentsResponse(BaseModel): + """Represents the response containing a list of documents.""" + documents: List[constr(max_length=131072)] = Field(..., description="List of filenames.", max_items=1000000) @app.on_event("startup") @@ -86,7 +140,7 @@ def import_example() -> None: raise NotImplementedError(f"Could not find a valid example class in {EXAMPLE_DIR}") -@app.post("/uploadDocument") +@app.post("/documents") @tracing.instrumentation_wrapper async def upload_document(request: Request, file: UploadFile = File(...)) -> JSONResponse: """Upload a document to the vector store.""" @@ -112,48 +166,151 @@ async def upload_document(request: Request, file: UploadFile = File(...)) -> JSO ) except Exception as e: - logger.error("Error from /uploadDocument endpoint. Ingestion of file: " + file.filename + " failed with error: " + str(e)) + logger.error("Error from POST /documents endpoint. Ingestion of file: " + file.filename + " failed with error: " + str(e)) return JSONResponse( content={"message": str(e)}, status_code=500 ) -@app.post("/generate") +@app.post("/generate", response_model=ChainResponse) @tracing.instrumentation_wrapper async def generate_answer(request: Request, prompt: Prompt) -> StreamingResponse: """Generate and stream the response to the provided prompt.""" + chat_history = prompt.messages + # The last user message will be the query for the rag or llm chain + last_user_message = next((message.content for message in reversed(chat_history) if message.role == 'user'), None) + + # Find and remove the last user message if present + for i in reversed(range(len(chat_history))): + if chat_history[i].role == 'user': + del chat_history[i] + break # Remove only the last user message + + # All the other information from the prompt like the temperature, top_p etc., are llm_settings + llm_settings = { + key: value + for key, value in vars(prompt).items() + if key not in ['messages', 'use_knowledge_base'] + } try: example = app.example() + generator = None if prompt.use_knowledge_base: logger.info("Knowledge base is enabled. Using rag chain for response generation.") - generator = example.rag_chain(prompt.question, prompt.num_tokens) - return StreamingResponse(generator, media_type="text/event-stream") - - generator = example.llm_chain(prompt.context, prompt.question, prompt.num_tokens) - return StreamingResponse(generator, media_type="text/event-stream") + generator = example.rag_chain(query=last_user_message, chat_history=chat_history, **llm_settings) + + else: + generator = example.llm_chain(query=last_user_message, chat_history=chat_history, **llm_settings) + + def response_generator(): + resp_id = str(uuid4()) + if generator: + for chunk in generator: + chain_response = ChainResponse() + response_choice = ChainResponseChoices( + index=0, + message=Message( + role="assistant", + content=chunk + ) + ) + chain_response.id = resp_id + chain_response.choices.append(response_choice) + yield "data: " + str(chain_response.json()) + "\n\n" + chain_response = ChainResponse() + response_choice = ChainResponseChoices(finish_reason="[DONE]") + chain_response.id = resp_id + chain_response.choices.append(response_choice) + yield "data: " + str(chain_response.json()) + "\n\n" + else: + chain_response = ChainResponse() + yield "data: " + str(chain_response.json()) + "\n\n" + + return StreamingResponse(response_generator(), media_type="text/event-stream") except (MilvusException, MilvusUnavailableException) as e: + exception_msg = "Error from milvus server. Please ensure you have ingested some documents. Please check chain-server logs for more details." + chain_response = ChainResponse() + response_choice = ChainResponseChoices( + index=0, + message=Message( + role="assistant", + content=exception_msg + ), + finish_reason="[DONE]" + ) + chain_response.choices.append(response_choice) logger.error(f"Error from Milvus database in /generate endpoint. Please ensure you have ingested some documents. Error details: {e}") - return StreamingResponse(iter(["Error from milvus server. Please ensure you have ingested some documents. Please check chain-server logs for more details."]), media_type="text/event-stream") + return StreamingResponse(iter(["data: " + str(chain_response.json()) + "\n\n"]), media_type="text/event-stream") except Exception as e: + exception_msg = "Error from chain server. Please check chain-server logs for more details." + chain_response = ChainResponse() + response_choice = ChainResponseChoices( + index=0, + message=Message( + role="assistant", + content=exception_msg + ), + finish_reason="[DONE]" + ) + chain_response.choices.append(response_choice) logger.error(f"Error from /generate endpoint. Error details: {e}") - return StreamingResponse(iter(["Error from chain server. Please check chain-server logs for more details."]), media_type="text/event-stream") + return StreamingResponse(iter(["data: " + str(chain_response.json()) + "\n\n"]), media_type="text/event-stream") -@app.post("/documentSearch") +@app.post("/search", response_model=DocumentSearchResponse) @tracing.instrumentation_wrapper -async def document_search(request: Request,data: DocumentSearch) -> List[Dict[str, Any]]: +async def document_search(request: Request,data: DocumentSearch) -> Dict[str, List[Dict[str, Any]]]: """Search for the most relevant documents for the given search parameters.""" try: example = app.example() if hasattr(example, "document_search") and callable(example.document_search): - return example.document_search(data.content, data.num_docs) - + search_result = example.document_search(data.query, data.top_k) + chunks = [] + for entry in search_result: + content = entry.get("content", "") # Default to empty string if "content" key doesn't exist + source = entry.get("source", "") # Default to empty string if "source" key doesn't exist + score = entry.get("score", 0.0) # Default to 0.0 if "score" key doesn't exist + chunk = DocumentChunk(content=content, filename=source, document_id="", score=score) + chunks.append(chunk) + return DocumentSearchResponse(chunks=chunks) raise NotImplementedError("Example class has not implemented the document_search method.") except Exception as e: - logger.error(f"Error from /documentSearch endpoint. Error details: {e}") - return [] \ No newline at end of file + logger.error(f"Error from POST /search endpoint. Error details: {e}") + return DocumentSearchResponse(chunks=[]) + +@app.get("/documents", response_model=DocumentsResponse) +@tracing.instrumentation_wrapper +async def get_documents(request: Request) -> DocumentsResponse: + """List available documents.""" + try: + example = app.example() + if hasattr(example, "get_documents") and callable(example.get_documents): + documents = example.get_documents() + return DocumentsResponse(documents=documents) + else: + raise NotImplementedError("Example class has not implemented the get_documents method.") + + except Exception as e: + logger.error(f"Error from GET /documents endpoint. Error details: {e}") + return JSONResponse(content={"message": "Error occurred while fetching documents."}, status_code=500) + +@app.delete("/documents") +@tracing.instrumentation_wrapper +async def delete_document(request: Request, filename: str) -> JSONResponse: + """Delete a document.""" + try: + example = app.example() + if hasattr(example, "delete_documents") and callable(example.delete_documents): + example.delete_documents([filename]) + return JSONResponse(content={"message": f"Document {filename} deleted successfully"}, status_code=200) + + raise NotImplementedError("Example class has not implemented the delete_document method.") + + except Exception as e: + logger.error(f"Error from DELETE /documents endpoint. Error details: {e}") + return JSONResponse(content={"message": f"Error deleting document {filename}"}, status_code=500) diff --git a/RetrievalAugmentedGeneration/common/utils.py b/RetrievalAugmentedGeneration/common/utils.py index b47aa965d..09c49cb18 100644 --- a/RetrievalAugmentedGeneration/common/utils.py +++ b/RetrievalAugmentedGeneration/common/utils.py @@ -17,9 +17,9 @@ import os import base64 import logging -from functools import lru_cache +from functools import lru_cache, wraps from urllib.parse import urlparse -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Callable, List, Optional logger = logging.getLogger(__name__) @@ -72,7 +72,6 @@ logger.error(f"Langchain community import failed with error: {e}") try: - from langchain_nvidia_ai_endpoints import ChatNVIDIA, NVIDIAEmbeddings from langchain_community.chat_models import ChatOpenAI except Exception as e: logger.error(f"NVIDIA AI connector import failed with error: {e}") @@ -89,7 +88,6 @@ from RetrievalAugmentedGeneration.common.configuration_wizard import ConfigWizard DEFAULT_MAX_CONTEXT = 1500 -DEFAULT_NUM_TOKENS = 150 TEXT_SPLITTER_EMBEDDING_MODEL = "intfloat/e5-large-v2" @@ -117,11 +115,21 @@ def _postprocess_nodes( return included_nodes - +def utils_cache(func: Callable) -> Callable: + """Use this to convert unhashable args to hashable ones""" + @wraps(func) + def wrapper(*args, **kwargs): + # Convert unhashable args to hashable ones + args_hashable = tuple(tuple(arg) if isinstance(arg, (list, dict, set)) else arg for arg in args) + kwargs_hashable = {key: tuple(value) if isinstance(value, (list, dict, set)) else value for key, value in kwargs.items()} + return func(*args_hashable, **kwargs_hashable) + return wrapper + +@utils_cache @lru_cache -def set_service_context() -> None: +def set_service_context(**kwargs) -> None: """Set the global service context.""" - llm = LangChainLLM(get_llm()) + llm = LangChainLLM(get_llm(**kwargs)) embedding = LangchainEmbedding(get_embedding_model()) service_context = ServiceContext.from_defaults( llm=llm, embed_model=embedding @@ -186,7 +194,8 @@ def get_vector_index(collection_name: str = "") -> VectorStoreIndex: overwrite=False) else: raise RuntimeError("Unable to find any supported Vector Store DB. Supported engines are milvus and pgvector.") - return VectorStoreIndex.from_vector_store(vector_store) + vector_store_index = VectorStoreIndex.from_vector_store(vector_store) + return vector_store_index def get_vectorstore_langchain(documents, document_embedder, collection_name: str = "") -> VectorStore: @@ -232,8 +241,9 @@ def get_doc_retriever(num_nodes: int = 4) -> "BaseRetriever": return index.as_retriever(similarity_top_k=num_nodes) -@lru_cache -def get_llm() -> LLM | SimpleChatModel: +@utils_cache +@lru_cache() +def get_llm(**kwargs) -> LLM | SimpleChatModel: """Create the LLM connection.""" settings = get_config() @@ -242,28 +252,61 @@ def get_llm() -> LLM | SimpleChatModel: trtllm = TensorRTLLM( # type: ignore server_url=settings.llm.server_url, model_name=settings.llm.model_name, - tokens=DEFAULT_NUM_TOKENS, + temperature = kwargs.get('temperature', None), + top_p = kwargs.get('top_p', None), + tokens = kwargs.get('max_tokens', None) ) + unused_params = [key for key in kwargs.keys() if key not in ['temperature', 'top_p', 'max_tokens', 'stream']] + if unused_params: + logger.warning(f"The following parameters from kwargs are not supported: {unused_params} for {settings.llm.model_engine}") return trtllm elif settings.llm.model_engine == "nv-ai-foundation": - return ChatNVIDIA(model=settings.llm.model_name) + from langchain_nvidia_ai_endpoints import ChatNVIDIA + + unused_params = [key for key in kwargs.keys() if key not in ['temperature', 'top_p', 'max_tokens']] + if unused_params: + logger.warning(f"The following parameters from kwargs are not supported: {unused_params} for {settings.llm.model_engine}") + return ChatNVIDIA(model=settings.llm.model_name, + temperature = kwargs.get('temperature', None), + top_p = kwargs.get('top_p', None), + max_tokens = kwargs.get('max_tokens', None)) + elif settings.llm.model_engine == "nv-api-catalog": + # Import custom ChatNVIDIA for api catalog + from integrations.langchain.llms.nv_api_catalog import ChatNVIDIA + + unused_params = [key for key in kwargs.keys() if key not in ['temperature', 'top_p', 'max_tokens']] + if unused_params: + logger.warning(f"The following parameters from kwargs are not supported: {unused_params} for {settings.llm.model_engine}") + return ChatNVIDIA(model=settings.llm.model_name, + temperature = kwargs.get('temperature', None), + top_p = kwargs.get('top_p', None), + max_tokens = kwargs.get('max_tokens', None)) elif settings.llm.model_engine == "nemo-infer": + unused_params = [key for key in kwargs.keys() if key not in ['temperature', 'top_p', 'max_tokens', 'stream']] + if unused_params: + logger.warning(f"The following parameters from kwargs are not supported: {unused_params} for {settings.llm.model_engine}") nemo_infer = NemoInfer( server_url=f"http://{settings.llm.server_url}/v1/completions", model=settings.llm.model_name, - tokens=DEFAULT_NUM_TOKENS, + temperature = kwargs.get('temperature', None), + top_p = kwargs.get('top_p', None), + tokens = kwargs.get('max_tokens', None) ) return nemo_infer elif settings.llm.model_engine == "nemo-infer-openai": + unused_params = [key for key in kwargs.keys() if key not in ['temperature', 'max_tokens', 'stream']] + if unused_params: + logger.warning(f"The following parameters from kwargs are not supported: {unused_params} for {settings.llm.model_engine}") nemo_infer = ChatOpenAI( openai_api_base=f"http://{settings.llm.server_url}/v1/", openai_api_key="xyz", model_name=settings.llm.model_name, - max_tokens=DEFAULT_NUM_TOKENS, + temperature = kwargs.get('temperature', 0.7), + max_tokens=kwargs.get('max_tokens', None) ) return nemo_infer else: - raise RuntimeError("Unable to find any supported Large Language Model server. Supported engines are triton-trt-llm and nv-ai-foundation.") + raise RuntimeError("Unable to find any supported Large Language Model server. Supported engines are triton-trt-llm, nv-ai-foundation, nv-api-catalog, nemo-infer and nemo-infer-openai.") @lru_cache @@ -276,7 +319,7 @@ def get_embedding_model() -> Embeddings: encode_kwargs = {"normalize_embeddings": False} settings = get_config() - logger.info(f"Using {settings.embeddings.model_engine} as model engine for embeddings") + logger.info(f"Using {settings.embeddings.model_engine} as model engine and {settings.embeddings.model_name} and model for embeddings") if settings.embeddings.model_engine == "huggingface": hf_embeddings = HuggingFaceEmbeddings( model_name=settings.embeddings.model_name, @@ -286,6 +329,10 @@ def get_embedding_model() -> Embeddings: # Load in a specific embedding model return hf_embeddings elif settings.embeddings.model_engine == "nv-ai-foundation": + from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings + return NVIDIAEmbeddings(model=settings.embeddings.model_name, model_type="passage") + elif settings.embeddings.model_engine == "nv-api-catalog": + from integrations.langchain.llms.nv_api_catalog import NVIDIAEmbeddings return NVIDIAEmbeddings(model=settings.embeddings.model_name, model_type="passage") elif settings.embeddings.model_engine == "nemo-embed": nemo_embed = NemoEmbeddings( @@ -314,8 +361,72 @@ def is_base64_encoded(s: str) -> bool: def get_text_splitter() -> SentenceTransformersTokenTextSplitter: """Return the token text splitter instance from langchain.""" + + embedding_model_name = TEXT_SPLITTER_EMBEDDING_MODEL + if get_config().text_splitter.model_name: + embedding_model_name = get_config().text_splitter.model_name + return SentenceTransformersTokenTextSplitter( - model_name=TEXT_SPLITTER_EMBEDDING_MODEL, - tokens_per_chunk=get_config().text_splitter.chunk_size, + model_name=embedding_model_name, + tokens_per_chunk=get_config().text_splitter.chunk_size - 2, chunk_overlap=get_config().text_splitter.chunk_overlap, ) + + +def get_docs_vectorstore_langchain(vectorstore: VectorStore) -> List[str]: + """Retrieves filenames stored in the vector store implemented in LangChain.""" + + settings = get_config() + try: + # No API availbe in LangChain for listing the docs, thus usig its private _dict + extract_filename = lambda metadata : os.path.splitext(os.path.basename(metadata['source']))[0] + if settings.vector_store.name == "faiss": + in_memory_docstore = vectorstore.docstore._dict + filenames = [extract_filename(doc.metadata) for doc in in_memory_docstore.values()] + filenames = list(set(filenames)) + return filenames + elif settings.vector_store.name == "pgvector": + # No API availbe in LangChain for listing the docs, thus usig its private _make_session + with vectorstore._make_session() as session: + embedding_doc_store = session.query(vectorstore.EmbeddingStore.custom_id, vectorstore.EmbeddingStore.document, vectorstore.EmbeddingStore.cmetadata).all() + filenames = set([extract_filename(metadata) for _, _, metadata in embedding_doc_store]) + return filenames + elif settings.vector_store.name == "milvus": + # Getting all the ID's > 0 + milvus_data = vectorstore.col.query(expr="pk >= 0", output_fields=["pk","source", "text"]) + filenames = set([extract_filename(metadata) for metadata in milvus_data]) + return filenames + except Exception as e: + logger.error(f"Error occurred while retrieving documents: {e}") + return [] + + +def del_docs_vectorstore_langchain(vectorstore: VectorStore, filenames: List[str]): + """Delete documents from the vector index implemented in LangChain.""" + + settings = get_config() + try: + # No other API availbe in LangChain for listing the docs, thus usig its private _dict + extract_filename = lambda metadata : os.path.splitext(os.path.basename(metadata['source']))[0] + if settings.vector_store.name == "faiss": + in_memory_docstore = vectorstore.docstore._dict + for filename in filenames: + ids_list = [doc_id for doc_id, doc_data in in_memory_docstore.items() if extract_filename(doc_data.metadata) == filename] + vectorstore.delete(ids_list) + logger.info(f"Deleted documents with filenames {filename}") + elif settings.vector_store.name == "pgvector": + with vectorstore._make_session() as session: + embedding_doc_store = session.query(vectorstore.EmbeddingStore.custom_id, vectorstore.EmbeddingStore.document, vectorstore.EmbeddingStore.cmetadata).all() + for filename in filenames: + ids_list = [doc_id for doc_id, doc_data, metadata in embedding_doc_store if extract_filename(metadata) == filename] + vectorstore.delete(ids_list) + logger.info(f"Deleted documents with filenames {filename}") + elif settings.vector_store.name == "milvus": + # Getting all the ID's > 0 + milvus_data = vectorstore.col.query(expr="pk >= 0", output_fields=["pk","source", "text"]) + for filename in filenames: + ids_list = [metadata["pk"] for metadata in milvus_data if extract_filename(metadata) == filename] + vectorstore.col.delete(f"pk in {ids_list}") + logger.info(f"Deleted documents with filenames {filename}") + except Exception as e: + logger.error(f"Error occurred while deleting documents: {e}") diff --git a/RetrievalAugmentedGeneration/examples/csv_rag/PdM_errors.csv b/RetrievalAugmentedGeneration/examples/csv_rag/PdM_errors.csv new file mode 100644 index 000000000..7ce832e9f --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/csv_rag/PdM_errors.csv @@ -0,0 +1,3920 @@ +datetime,machineID,errorID +2015-01-03 07:00:00,1,Error Code 1: Low Voltage +2015-01-03 20:00:00,1,Error Code 3: Excessive Vibration +2015-01-04 06:00:00,1,Error Code 5: Low Pressure +2015-01-10 15:00:00,1,Error Code 4: Invalid Rotation Angle +2015-01-22 10:00:00,1,Error Code 4: Invalid Rotation Angle +2015-01-25 15:00:00,1,Error Code 4: Invalid Rotation Angle +2015-01-27 04:00:00,1,Error Code 1: Low Voltage +2015-03-03 22:00:00,1,Error Code 2: High Pressure +2015-03-05 06:00:00,1,Error Code 1: Low Voltage +2015-03-20 18:00:00,1,Error Code 1: Low Voltage +2015-03-26 01:00:00,1,Error Code 2: High Pressure +2015-03-31 23:00:00,1,Error Code 1: Low Voltage +2015-04-19 06:00:00,1,Error Code 2: High Pressure +2015-04-19 06:00:00,1,Error Code 3: Excessive Vibration +2015-04-29 19:00:00,1,Error Code 4: Invalid Rotation Angle +2015-05-04 23:00:00,1,Error Code 2: High Pressure +2015-05-12 09:00:00,1,Error Code 1: Low Voltage +2015-05-21 07:00:00,1,Error Code 4: Invalid Rotation Angle +2015-05-24 02:00:00,1,Error Code 3: Excessive Vibration +2015-05-25 05:00:00,1,Error Code 1: Low Voltage +2015-06-09 06:00:00,1,Error Code 3: Excessive Vibration +2015-06-18 06:00:00,1,Error Code 5: Low Pressure +2015-06-23 10:00:00,1,Error Code 3: Excessive Vibration +2015-08-23 19:00:00,1,Error Code 1: Low Voltage +2015-08-30 01:00:00,1,Error Code 3: Excessive Vibration +2015-09-01 06:00:00,1,Error Code 5: Low Pressure +2015-09-13 17:00:00,1,Error Code 2: High Pressure +2015-09-15 06:00:00,1,Error Code 1: Low Voltage +2015-10-01 23:00:00,1,Error Code 1: Low Voltage +2015-10-15 05:00:00,1,Error Code 1: Low Voltage +2015-10-16 03:00:00,1,Error Code 3: Excessive Vibration +2015-10-16 06:00:00,1,Error Code 2: High Pressure +2015-10-16 06:00:00,1,Error Code 3: Excessive Vibration +2015-11-10 04:00:00,1,Error Code 3: Excessive Vibration +2015-12-15 06:00:00,1,Error Code 5: Low Pressure +2015-01-12 14:00:00,2,Error Code 4: Invalid Rotation Angle +2015-02-06 10:00:00,2,Error Code 4: Invalid Rotation Angle +2015-02-11 11:00:00,2,Error Code 1: Low Voltage +2015-03-11 22:00:00,2,Error Code 2: High Pressure +2015-03-18 06:00:00,2,Error Code 1: Low Voltage +2015-03-18 06:00:00,2,Error Code 2: High Pressure +2015-03-18 06:00:00,2,Error Code 3: Excessive Vibration +2015-03-26 03:00:00,2,Error Code 2: High Pressure +2015-03-27 17:00:00,2,Error Code 3: Excessive Vibration +2015-04-17 06:00:00,2,Error Code 2: High Pressure +2015-04-17 06:00:00,2,Error Code 3: Excessive Vibration +2015-05-07 08:00:00,2,Error Code 2: High Pressure +2015-05-13 10:00:00,2,Error Code 1: Low Voltage +2015-05-30 20:00:00,2,Error Code 5: Low Pressure +2015-06-18 14:00:00,2,Error Code 5: Low Pressure +2015-07-18 06:00:00,2,Error Code 4: Invalid Rotation Angle +2015-07-20 10:00:00,2,Error Code 2: High Pressure +2015-07-23 02:00:00,2,Error Code 1: Low Voltage +2015-08-03 10:00:00,2,Error Code 1: Low Voltage +2015-08-18 22:00:00,2,Error Code 5: Low Pressure +2015-08-23 10:00:00,2,Error Code 2: High Pressure +2015-08-28 08:00:00,2,Error Code 2: High Pressure +2015-09-16 13:00:00,2,Error Code 4: Invalid Rotation Angle +2015-09-19 21:00:00,2,Error Code 4: Invalid Rotation Angle +2015-09-23 19:00:00,2,Error Code 2: High Pressure +2015-11-08 21:00:00,2,Error Code 3: Excessive Vibration +2015-12-28 06:00:00,2,Error Code 2: High Pressure +2015-12-28 06:00:00,2,Error Code 3: Excessive Vibration +2015-01-06 06:00:00,3,Error Code 2: High Pressure +2015-01-06 06:00:00,3,Error Code 3: Excessive Vibration +2015-01-08 02:00:00,3,Error Code 1: Low Voltage +2015-01-11 05:00:00,3,Error Code 1: Low Voltage +2015-01-11 19:00:00,3,Error Code 3: Excessive Vibration +2015-01-28 17:00:00,3,Error Code 4: Invalid Rotation Angle +2015-02-05 06:00:00,3,Error Code 1: Low Voltage +2015-02-23 06:00:00,3,Error Code 4: Invalid Rotation Angle +2015-03-02 11:00:00,3,Error Code 4: Invalid Rotation Angle +2015-03-21 16:00:00,3,Error Code 3: Excessive Vibration +2015-04-06 16:00:00,3,Error Code 2: High Pressure +2015-04-09 09:00:00,3,Error Code 3: Excessive Vibration +2015-04-26 19:00:00,3,Error Code 2: High Pressure +2015-05-06 19:00:00,3,Error Code 4: Invalid Rotation Angle +2015-05-24 15:00:00,3,Error Code 2: High Pressure +2015-06-10 21:00:00,3,Error Code 4: Invalid Rotation Angle +2015-06-27 07:00:00,3,Error Code 1: Low Voltage +2015-07-09 21:00:00,3,Error Code 4: Invalid Rotation Angle +2015-07-18 19:00:00,3,Error Code 1: Low Voltage +2015-07-20 06:00:00,3,Error Code 2: High Pressure +2015-07-20 06:00:00,3,Error Code 3: Excessive Vibration +2015-07-25 23:00:00,3,Error Code 4: Invalid Rotation Angle +2015-07-29 17:00:00,3,Error Code 1: Low Voltage +2015-08-02 15:00:00,3,Error Code 1: Low Voltage +2015-08-07 04:00:00,3,Error Code 2: High Pressure +2015-09-08 04:00:00,3,Error Code 1: Low Voltage +2015-09-13 11:00:00,3,Error Code 1: Low Voltage +2015-09-18 10:00:00,3,Error Code 2: High Pressure +2015-10-01 02:00:00,3,Error Code 5: Low Pressure +2015-10-03 06:00:00,3,Error Code 2: High Pressure +2015-10-03 06:00:00,3,Error Code 3: Excessive Vibration +2015-10-23 14:00:00,3,Error Code 3: Excessive Vibration +2015-10-31 23:00:00,3,Error Code 2: High Pressure +2015-11-07 15:00:00,3,Error Code 4: Invalid Rotation Angle +2015-11-20 23:00:00,3,Error Code 1: Low Voltage +2015-12-02 06:00:00,3,Error Code 2: High Pressure +2015-12-02 06:00:00,3,Error Code 3: Excessive Vibration +2015-12-19 22:00:00,3,Error Code 5: Low Pressure +2015-12-25 12:00:00,3,Error Code 1: Low Voltage +2015-01-04 03:00:00,4,Error Code 3: Excessive Vibration +2015-01-06 01:00:00,4,Error Code 1: Low Voltage +2015-01-11 14:00:00,4,Error Code 3: Excessive Vibration +2015-01-13 18:00:00,4,Error Code 2: High Pressure +2015-01-14 21:00:00,4,Error Code 1: Low Voltage +2015-01-16 06:00:00,4,Error Code 2: High Pressure +2015-01-16 06:00:00,4,Error Code 3: Excessive Vibration +2015-02-12 15:00:00,4,Error Code 2: High Pressure +2015-02-15 06:00:00,4,Error Code 1: Low Voltage +2015-02-27 09:00:00,4,Error Code 1: Low Voltage +2015-03-19 05:00:00,4,Error Code 2: High Pressure +2015-03-22 22:00:00,4,Error Code 1: Low Voltage +2015-04-01 06:00:00,4,Error Code 2: High Pressure +2015-04-01 06:00:00,4,Error Code 3: Excessive Vibration +2015-04-06 14:00:00,4,Error Code 1: Low Voltage +2015-05-03 17:00:00,4,Error Code 2: High Pressure +2015-06-23 08:00:00,4,Error Code 2: High Pressure +2015-07-15 06:00:00,4,Error Code 2: High Pressure +2015-07-15 06:00:00,4,Error Code 3: Excessive Vibration +2015-07-24 05:00:00,4,Error Code 2: High Pressure +2015-07-27 17:00:00,4,Error Code 4: Invalid Rotation Angle +2015-08-10 06:00:00,4,Error Code 4: Invalid Rotation Angle +2015-08-29 06:00:00,4,Error Code 1: Low Voltage +2015-09-05 06:00:00,4,Error Code 2: High Pressure +2015-10-10 07:00:00,4,Error Code 1: Low Voltage +2015-10-13 06:00:00,4,Error Code 2: High Pressure +2015-10-13 06:00:00,4,Error Code 3: Excessive Vibration +2015-10-17 20:00:00,4,Error Code 1: Low Voltage +2015-10-28 16:00:00,4,Error Code 3: Excessive Vibration +2015-11-12 16:00:00,4,Error Code 3: Excessive Vibration +2015-12-04 06:00:00,4,Error Code 1: Low Voltage +2015-01-08 06:00:00,5,Error Code 2: High Pressure +2015-01-08 06:00:00,5,Error Code 3: Excessive Vibration +2015-01-31 19:00:00,5,Error Code 1: Low Voltage +2015-02-22 06:00:00,5,Error Code 1: Low Voltage +2015-02-24 14:00:00,5,Error Code 3: Excessive Vibration +2015-03-09 10:00:00,5,Error Code 2: High Pressure +2015-03-18 22:00:00,5,Error Code 1: Low Voltage +2015-03-21 15:00:00,5,Error Code 4: Invalid Rotation Angle +2015-03-21 16:00:00,5,Error Code 2: High Pressure +2015-04-06 02:00:00,5,Error Code 5: Low Pressure +2015-04-08 06:00:00,5,Error Code 1: Low Voltage +2015-05-08 15:00:00,5,Error Code 1: Low Voltage +2015-05-17 00:00:00,5,Error Code 1: Low Voltage +2015-06-01 22:00:00,5,Error Code 1: Low Voltage +2015-06-05 01:00:00,5,Error Code 4: Invalid Rotation Angle +2015-06-12 18:00:00,5,Error Code 3: Excessive Vibration +2015-06-22 06:00:00,5,Error Code 2: High Pressure +2015-06-22 06:00:00,5,Error Code 3: Excessive Vibration +2015-06-23 12:00:00,5,Error Code 3: Excessive Vibration +2015-07-12 22:00:00,5,Error Code 1: Low Voltage +2015-07-16 04:00:00,5,Error Code 3: Excessive Vibration +2015-07-26 08:00:00,5,Error Code 1: Low Voltage +2015-07-29 09:00:00,5,Error Code 4: Invalid Rotation Angle +2015-08-11 02:00:00,5,Error Code 4: Invalid Rotation Angle +2015-08-19 12:00:00,5,Error Code 2: High Pressure +2015-08-30 03:00:00,5,Error Code 2: High Pressure +2015-09-05 06:00:00,5,Error Code 1: Low Voltage +2015-09-06 06:00:00,5,Error Code 5: Low Pressure +2015-09-27 20:00:00,5,Error Code 1: Low Voltage +2015-10-05 06:00:00,5,Error Code 2: High Pressure +2015-10-05 06:00:00,5,Error Code 3: Excessive Vibration +2015-11-19 06:00:00,5,Error Code 1: Low Voltage +2015-12-04 02:00:00,5,Error Code 1: Low Voltage +2015-12-04 18:00:00,5,Error Code 5: Low Pressure +2015-12-15 16:00:00,5,Error Code 1: Low Voltage +2015-12-20 05:00:00,5,Error Code 4: Invalid Rotation Angle +2015-12-21 09:00:00,5,Error Code 4: Invalid Rotation Angle +2015-12-30 09:00:00,5,Error Code 2: High Pressure +2015-01-14 12:00:00,6,Error Code 2: High Pressure +2015-01-26 12:00:00,6,Error Code 1: Low Voltage +2015-02-03 22:00:00,6,Error Code 2: High Pressure +2015-03-29 20:00:00,6,Error Code 4: Invalid Rotation Angle +2015-04-05 21:00:00,6,Error Code 1: Low Voltage +2015-04-23 16:00:00,6,Error Code 3: Excessive Vibration +2015-04-25 16:00:00,6,Error Code 1: Low Voltage +2015-05-01 22:00:00,6,Error Code 2: High Pressure +2015-05-24 08:00:00,6,Error Code 5: Low Pressure +2015-06-01 01:00:00,6,Error Code 2: High Pressure +2015-06-03 07:00:00,6,Error Code 2: High Pressure +2015-08-11 04:00:00,6,Error Code 2: High Pressure +2015-08-20 06:00:00,6,Error Code 4: Invalid Rotation Angle +2015-09-14 10:00:00,6,Error Code 2: High Pressure +2015-09-18 20:00:00,6,Error Code 1: Low Voltage +2015-09-26 08:00:00,6,Error Code 2: High Pressure +2015-10-05 21:00:00,6,Error Code 1: Low Voltage +2015-10-15 16:00:00,6,Error Code 3: Excessive Vibration +2015-10-21 22:00:00,6,Error Code 1: Low Voltage +2015-10-23 19:00:00,6,Error Code 2: High Pressure +2015-11-13 03:00:00,6,Error Code 1: Low Voltage +2015-12-07 06:00:00,6,Error Code 4: Invalid Rotation Angle +2015-12-10 22:00:00,6,Error Code 3: Excessive Vibration +2015-01-17 20:00:00,7,Error Code 1: Low Voltage +2015-01-21 07:00:00,7,Error Code 3: Excessive Vibration +2015-01-23 06:00:00,7,Error Code 2: High Pressure +2015-01-23 06:00:00,7,Error Code 3: Excessive Vibration +2015-01-23 06:00:00,7,Error Code 5: Low Pressure +2015-02-07 06:00:00,7,Error Code 1: Low Voltage +2015-03-09 18:00:00,7,Error Code 4: Invalid Rotation Angle +2015-03-10 22:00:00,7,Error Code 2: High Pressure +2015-03-21 05:00:00,7,Error Code 3: Excessive Vibration +2015-03-27 22:00:00,7,Error Code 2: High Pressure +2015-04-12 08:00:00,7,Error Code 1: Low Voltage +2015-04-23 06:00:00,7,Error Code 1: Low Voltage +2015-04-23 09:00:00,7,Error Code 4: Invalid Rotation Angle +2015-05-08 06:00:00,7,Error Code 5: Low Pressure +2015-05-13 11:00:00,7,Error Code 2: High Pressure +2015-05-23 06:00:00,7,Error Code 2: High Pressure +2015-05-23 06:00:00,7,Error Code 3: Excessive Vibration +2015-06-24 08:00:00,7,Error Code 3: Excessive Vibration +2015-06-29 00:00:00,7,Error Code 1: Low Voltage +2015-07-07 06:00:00,7,Error Code 2: High Pressure +2015-07-07 06:00:00,7,Error Code 3: Excessive Vibration +2015-07-13 05:00:00,7,Error Code 2: High Pressure +2015-08-07 18:00:00,7,Error Code 2: High Pressure +2015-08-09 06:00:00,7,Error Code 3: Excessive Vibration +2015-08-11 12:00:00,7,Error Code 1: Low Voltage +2015-08-30 22:00:00,7,Error Code 4: Invalid Rotation Angle +2015-09-08 19:00:00,7,Error Code 2: High Pressure +2015-09-17 18:00:00,7,Error Code 1: Low Voltage +2015-09-20 06:00:00,7,Error Code 5: Low Pressure +2015-09-22 19:00:00,7,Error Code 2: High Pressure +2015-10-13 08:00:00,7,Error Code 4: Invalid Rotation Angle +2015-10-20 06:00:00,7,Error Code 2: High Pressure +2015-10-20 06:00:00,7,Error Code 3: Excessive Vibration +2015-10-31 10:00:00,7,Error Code 2: High Pressure +2015-11-18 14:00:00,7,Error Code 3: Excessive Vibration +2015-11-24 22:00:00,7,Error Code 3: Excessive Vibration +2015-12-09 11:00:00,7,Error Code 2: High Pressure +2015-12-16 13:00:00,7,Error Code 3: Excessive Vibration +2015-12-19 06:00:00,7,Error Code 1: Low Voltage +2015-01-05 05:00:00,8,Error Code 2: High Pressure +2015-01-07 07:00:00,8,Error Code 1: Low Voltage +2015-01-15 02:00:00,8,Error Code 1: Low Voltage +2015-02-03 06:00:00,8,Error Code 1: Low Voltage +2015-02-03 23:00:00,8,Error Code 4: Invalid Rotation Angle +2015-02-16 17:00:00,8,Error Code 3: Excessive Vibration +2015-02-20 23:00:00,8,Error Code 3: Excessive Vibration +2015-03-06 06:00:00,8,Error Code 2: High Pressure +2015-03-06 06:00:00,8,Error Code 3: Excessive Vibration +2015-03-21 06:00:00,8,Error Code 5: Low Pressure +2015-03-26 16:00:00,8,Error Code 2: High Pressure +2015-04-06 07:00:00,8,Error Code 2: High Pressure +2015-04-20 06:00:00,8,Error Code 2: High Pressure +2015-04-20 06:00:00,8,Error Code 3: Excessive Vibration +2015-04-30 22:00:00,8,Error Code 2: High Pressure +2015-05-08 08:00:00,8,Error Code 1: Low Voltage +2015-05-28 01:00:00,8,Error Code 2: High Pressure +2015-06-09 12:00:00,8,Error Code 3: Excessive Vibration +2015-06-28 20:00:00,8,Error Code 4: Invalid Rotation Angle +2015-07-02 10:00:00,8,Error Code 2: High Pressure +2015-07-04 17:00:00,8,Error Code 2: High Pressure +2015-08-03 06:00:00,8,Error Code 5: Low Pressure +2015-08-14 11:00:00,8,Error Code 3: Excessive Vibration +2015-08-24 09:00:00,8,Error Code 2: High Pressure +2015-09-18 11:00:00,8,Error Code 3: Excessive Vibration +2015-10-07 19:00:00,8,Error Code 4: Invalid Rotation Angle +2015-10-10 06:00:00,8,Error Code 4: Invalid Rotation Angle +2015-10-15 02:00:00,8,Error Code 3: Excessive Vibration +2015-10-17 06:00:00,8,Error Code 5: Low Pressure +2015-10-30 17:00:00,8,Error Code 5: Low Pressure +2015-11-03 08:00:00,8,Error Code 5: Low Pressure +2015-11-15 22:00:00,8,Error Code 2: High Pressure +2015-11-18 22:00:00,8,Error Code 3: Excessive Vibration +2015-11-19 10:00:00,8,Error Code 2: High Pressure +2015-12-15 03:00:00,8,Error Code 2: High Pressure +2015-12-16 19:00:00,8,Error Code 4: Invalid Rotation Angle +2015-12-19 11:00:00,8,Error Code 4: Invalid Rotation Angle +2015-12-23 07:00:00,8,Error Code 4: Invalid Rotation Angle +2015-12-23 17:00:00,8,Error Code 2: High Pressure +2015-12-24 07:00:00,8,Error Code 4: Invalid Rotation Angle +2015-12-30 21:00:00,8,Error Code 4: Invalid Rotation Angle +2016-01-01 05:00:00,8,Error Code 3: Excessive Vibration +2015-01-04 04:00:00,9,Error Code 4: Invalid Rotation Angle +2015-01-05 20:00:00,9,Error Code 3: Excessive Vibration +2015-01-25 02:00:00,9,Error Code 1: Low Voltage +2015-02-18 20:00:00,9,Error Code 2: High Pressure +2015-03-03 06:00:00,9,Error Code 1: Low Voltage +2015-03-09 11:00:00,9,Error Code 1: Low Voltage +2015-03-22 08:00:00,9,Error Code 3: Excessive Vibration +2015-04-01 05:00:00,9,Error Code 4: Invalid Rotation Angle +2015-04-10 12:00:00,9,Error Code 5: Low Pressure +2015-04-29 04:00:00,9,Error Code 1: Low Voltage +2015-06-05 11:00:00,9,Error Code 2: High Pressure +2015-06-14 13:00:00,9,Error Code 4: Invalid Rotation Angle +2015-06-16 06:00:00,9,Error Code 2: High Pressure +2015-06-16 06:00:00,9,Error Code 3: Excessive Vibration +2015-06-17 23:00:00,9,Error Code 4: Invalid Rotation Angle +2015-06-19 13:00:00,9,Error Code 1: Low Voltage +2015-06-22 00:00:00,9,Error Code 4: Invalid Rotation Angle +2015-07-01 06:00:00,9,Error Code 1: Low Voltage +2015-07-16 06:00:00,9,Error Code 2: High Pressure +2015-07-16 06:00:00,9,Error Code 3: Excessive Vibration +2015-07-30 09:00:00,9,Error Code 3: Excessive Vibration +2015-08-08 08:00:00,9,Error Code 4: Invalid Rotation Angle +2015-08-16 05:00:00,9,Error Code 1: Low Voltage +2015-08-18 13:00:00,9,Error Code 4: Invalid Rotation Angle +2015-08-18 15:00:00,9,Error Code 1: Low Voltage +2015-08-30 06:00:00,9,Error Code 2: High Pressure +2015-08-30 06:00:00,9,Error Code 3: Excessive Vibration +2015-09-14 06:00:00,9,Error Code 1: Low Voltage +2015-10-12 01:00:00,9,Error Code 2: High Pressure +2015-10-14 06:00:00,9,Error Code 2: High Pressure +2015-10-14 06:00:00,9,Error Code 3: Excessive Vibration +2015-11-13 06:00:00,9,Error Code 1: Low Voltage +2015-11-19 04:00:00,9,Error Code 3: Excessive Vibration +2015-11-22 14:00:00,9,Error Code 3: Excessive Vibration +2015-11-24 14:00:00,9,Error Code 3: Excessive Vibration +2015-12-01 03:00:00,9,Error Code 2: High Pressure +2015-12-13 06:00:00,9,Error Code 2: High Pressure +2015-12-13 06:00:00,9,Error Code 3: Excessive Vibration +2015-12-13 07:00:00,9,Error Code 4: Invalid Rotation Angle +2015-12-27 10:00:00,9,Error Code 2: High Pressure +2015-01-01 20:00:00,10,Error Code 1: Low Voltage +2015-01-06 04:00:00,10,Error Code 3: Excessive Vibration +2015-01-18 06:00:00,10,Error Code 2: High Pressure +2015-01-18 06:00:00,10,Error Code 3: Excessive Vibration +2015-01-22 05:00:00,10,Error Code 1: Low Voltage +2015-01-29 03:00:00,10,Error Code 4: Invalid Rotation Angle +2015-02-10 00:00:00,10,Error Code 1: Low Voltage +2015-02-11 03:00:00,10,Error Code 1: Low Voltage +2015-02-12 23:00:00,10,Error Code 5: Low Pressure +2015-02-13 10:00:00,10,Error Code 3: Excessive Vibration +2015-03-04 00:00:00,10,Error Code 2: High Pressure +2015-03-05 15:00:00,10,Error Code 3: Excessive Vibration +2015-03-18 11:00:00,10,Error Code 1: Low Voltage +2015-03-21 14:00:00,10,Error Code 4: Invalid Rotation Angle +2015-03-26 08:00:00,10,Error Code 1: Low Voltage +2015-04-03 06:00:00,10,Error Code 2: High Pressure +2015-04-03 06:00:00,10,Error Code 3: Excessive Vibration +2015-04-06 05:00:00,10,Error Code 2: High Pressure +2015-04-14 00:00:00,10,Error Code 3: Excessive Vibration +2015-04-14 09:00:00,10,Error Code 1: Low Voltage +2015-05-18 06:00:00,10,Error Code 2: High Pressure +2015-05-18 06:00:00,10,Error Code 3: Excessive Vibration +2015-05-31 03:00:00,10,Error Code 1: Low Voltage +2015-06-02 06:00:00,10,Error Code 1: Low Voltage +2015-06-12 12:00:00,10,Error Code 4: Invalid Rotation Angle +2015-06-16 06:00:00,10,Error Code 1: Low Voltage +2015-06-17 06:00:00,10,Error Code 2: High Pressure +2015-06-17 06:00:00,10,Error Code 3: Excessive Vibration +2015-07-01 14:00:00,10,Error Code 3: Excessive Vibration +2015-07-25 16:00:00,10,Error Code 3: Excessive Vibration +2015-07-29 04:00:00,10,Error Code 5: Low Pressure +2015-08-06 07:00:00,10,Error Code 2: High Pressure +2015-08-10 04:00:00,10,Error Code 3: Excessive Vibration +2015-08-17 08:00:00,10,Error Code 4: Invalid Rotation Angle +2015-09-10 11:00:00,10,Error Code 1: Low Voltage +2015-09-14 17:00:00,10,Error Code 1: Low Voltage +2015-09-18 05:00:00,10,Error Code 1: Low Voltage +2015-10-05 00:00:00,10,Error Code 1: Low Voltage +2015-10-27 13:00:00,10,Error Code 3: Excessive Vibration +2015-10-31 18:00:00,10,Error Code 1: Low Voltage +2015-11-09 17:00:00,10,Error Code 1: Low Voltage +2015-11-21 06:00:00,10,Error Code 3: Excessive Vibration +2015-12-08 21:00:00,10,Error Code 2: High Pressure +2015-12-17 09:00:00,10,Error Code 1: Low Voltage +2015-12-23 13:00:00,10,Error Code 2: High Pressure +2015-01-19 06:00:00,11,Error Code 2: High Pressure +2015-01-19 06:00:00,11,Error Code 3: Excessive Vibration +2015-02-15 01:00:00,11,Error Code 4: Invalid Rotation Angle +2015-02-18 06:00:00,11,Error Code 4: Invalid Rotation Angle +2015-02-26 23:00:00,11,Error Code 4: Invalid Rotation Angle +2015-03-20 07:00:00,11,Error Code 3: Excessive Vibration +2015-04-08 02:00:00,11,Error Code 1: Low Voltage +2015-04-12 12:00:00,11,Error Code 3: Excessive Vibration +2015-04-15 02:00:00,11,Error Code 1: Low Voltage +2015-04-19 06:00:00,11,Error Code 2: High Pressure +2015-04-19 06:00:00,11,Error Code 3: Excessive Vibration +2015-04-19 06:00:00,11,Error Code 4: Invalid Rotation Angle +2015-05-10 22:00:00,11,Error Code 4: Invalid Rotation Angle +2015-05-25 23:00:00,11,Error Code 1: Low Voltage +2015-06-08 08:00:00,11,Error Code 3: Excessive Vibration +2015-06-16 08:00:00,11,Error Code 1: Low Voltage +2015-06-17 02:00:00,11,Error Code 2: High Pressure +2015-06-21 13:00:00,11,Error Code 4: Invalid Rotation Angle +2015-07-15 00:00:00,11,Error Code 2: High Pressure +2015-07-18 07:00:00,11,Error Code 2: High Pressure +2015-08-20 14:00:00,11,Error Code 3: Excessive Vibration +2015-08-25 22:00:00,11,Error Code 2: High Pressure +2015-08-26 16:00:00,11,Error Code 4: Invalid Rotation Angle +2015-09-04 13:00:00,11,Error Code 4: Invalid Rotation Angle +2015-09-13 04:00:00,11,Error Code 1: Low Voltage +2015-09-29 17:00:00,11,Error Code 4: Invalid Rotation Angle +2015-10-01 06:00:00,11,Error Code 4: Invalid Rotation Angle +2015-10-08 01:00:00,11,Error Code 3: Excessive Vibration +2015-10-11 07:00:00,11,Error Code 1: Low Voltage +2015-10-20 16:00:00,11,Error Code 1: Low Voltage +2015-11-02 13:00:00,11,Error Code 4: Invalid Rotation Angle +2015-11-03 17:00:00,11,Error Code 2: High Pressure +2015-11-08 08:00:00,11,Error Code 3: Excessive Vibration +2015-11-14 09:00:00,11,Error Code 2: High Pressure +2015-12-04 01:00:00,11,Error Code 2: High Pressure +2015-12-11 10:00:00,11,Error Code 2: High Pressure +2015-12-15 06:00:00,11,Error Code 4: Invalid Rotation Angle +2015-12-15 21:00:00,11,Error Code 2: High Pressure +2015-12-17 03:00:00,11,Error Code 3: Excessive Vibration +2015-01-06 06:00:00,12,Error Code 1: Low Voltage +2015-01-06 06:00:00,12,Error Code 2: High Pressure +2015-01-06 06:00:00,12,Error Code 3: Excessive Vibration +2015-01-16 20:00:00,12,Error Code 3: Excessive Vibration +2015-02-06 13:00:00,12,Error Code 2: High Pressure +2015-03-14 15:00:00,12,Error Code 2: High Pressure +2015-03-22 06:00:00,12,Error Code 2: High Pressure +2015-03-22 06:00:00,12,Error Code 3: Excessive Vibration +2015-04-03 05:00:00,12,Error Code 3: Excessive Vibration +2015-04-05 00:00:00,12,Error Code 4: Invalid Rotation Angle +2015-04-10 17:00:00,12,Error Code 2: High Pressure +2015-04-25 19:00:00,12,Error Code 3: Excessive Vibration +2015-04-26 05:00:00,12,Error Code 4: Invalid Rotation Angle +2015-05-04 03:00:00,12,Error Code 2: High Pressure +2015-05-14 10:00:00,12,Error Code 1: Low Voltage +2015-05-29 12:00:00,12,Error Code 2: High Pressure +2015-06-13 10:00:00,12,Error Code 3: Excessive Vibration +2015-06-28 19:00:00,12,Error Code 3: Excessive Vibration +2015-07-05 06:00:00,12,Error Code 1: Low Voltage +2015-07-15 02:00:00,12,Error Code 5: Low Pressure +2015-07-17 09:00:00,12,Error Code 2: High Pressure +2015-07-22 18:00:00,12,Error Code 5: Low Pressure +2015-08-05 05:00:00,12,Error Code 2: High Pressure +2015-08-16 17:00:00,12,Error Code 2: High Pressure +2015-08-28 20:00:00,12,Error Code 2: High Pressure +2015-09-03 06:00:00,12,Error Code 1: Low Voltage +2015-09-04 20:00:00,12,Error Code 2: High Pressure +2015-09-20 05:00:00,12,Error Code 4: Invalid Rotation Angle +2015-09-30 12:00:00,12,Error Code 1: Low Voltage +2015-10-01 15:00:00,12,Error Code 2: High Pressure +2015-10-03 00:00:00,12,Error Code 4: Invalid Rotation Angle +2015-10-03 06:00:00,12,Error Code 2: High Pressure +2015-10-03 06:00:00,12,Error Code 3: Excessive Vibration +2015-10-03 22:00:00,12,Error Code 2: High Pressure +2015-10-08 23:00:00,12,Error Code 1: Low Voltage +2015-10-09 22:00:00,12,Error Code 1: Low Voltage +2015-11-04 22:00:00,12,Error Code 1: Low Voltage +2015-11-05 05:00:00,12,Error Code 1: Low Voltage +2015-11-27 00:00:00,12,Error Code 3: Excessive Vibration +2015-11-28 23:00:00,12,Error Code 2: High Pressure +2015-12-12 16:00:00,12,Error Code 4: Invalid Rotation Angle +2015-01-10 04:00:00,13,Error Code 5: Low Pressure +2015-01-13 21:00:00,13,Error Code 3: Excessive Vibration +2015-01-26 09:00:00,13,Error Code 2: High Pressure +2015-01-27 07:00:00,13,Error Code 1: Low Voltage +2015-01-28 20:00:00,13,Error Code 1: Low Voltage +2015-02-10 20:00:00,13,Error Code 1: Low Voltage +2015-02-13 10:00:00,13,Error Code 4: Invalid Rotation Angle +2015-02-14 05:00:00,13,Error Code 2: High Pressure +2015-02-25 20:00:00,13,Error Code 3: Excessive Vibration +2015-03-10 18:00:00,13,Error Code 3: Excessive Vibration +2015-03-20 16:00:00,13,Error Code 3: Excessive Vibration +2015-04-06 23:00:00,13,Error Code 2: High Pressure +2015-04-10 06:00:00,13,Error Code 4: Invalid Rotation Angle +2015-04-10 06:00:00,13,Error Code 5: Low Pressure +2015-04-13 18:00:00,13,Error Code 4: Invalid Rotation Angle +2015-05-05 09:00:00,13,Error Code 4: Invalid Rotation Angle +2015-06-09 06:00:00,13,Error Code 4: Invalid Rotation Angle +2015-06-24 06:00:00,13,Error Code 5: Low Pressure +2015-07-09 06:00:00,13,Error Code 2: High Pressure +2015-07-09 06:00:00,13,Error Code 3: Excessive Vibration +2015-07-29 04:00:00,13,Error Code 3: Excessive Vibration +2015-08-04 18:00:00,13,Error Code 2: High Pressure +2015-08-08 06:00:00,13,Error Code 2: High Pressure +2015-08-08 06:00:00,13,Error Code 3: Excessive Vibration +2015-08-23 06:00:00,13,Error Code 1: Low Voltage +2015-08-23 06:00:00,13,Error Code 4: Invalid Rotation Angle +2015-08-25 15:00:00,13,Error Code 3: Excessive Vibration +2015-09-03 20:00:00,13,Error Code 4: Invalid Rotation Angle +2015-09-18 22:00:00,13,Error Code 1: Low Voltage +2015-09-22 06:00:00,13,Error Code 5: Low Pressure +2015-09-29 12:00:00,13,Error Code 4: Invalid Rotation Angle +2015-10-04 07:00:00,13,Error Code 1: Low Voltage +2015-10-22 06:00:00,13,Error Code 1: Low Voltage +2015-10-23 18:00:00,13,Error Code 5: Low Pressure +2015-11-06 07:00:00,13,Error Code 1: Low Voltage +2015-11-26 03:00:00,13,Error Code 1: Low Voltage +2015-12-06 06:00:00,13,Error Code 5: Low Pressure +2015-12-13 17:00:00,13,Error Code 5: Low Pressure +2015-12-21 06:00:00,13,Error Code 1: Low Voltage +2015-12-21 06:00:00,13,Error Code 2: High Pressure +2015-12-21 06:00:00,13,Error Code 3: Excessive Vibration +2015-12-21 08:00:00,13,Error Code 1: Low Voltage +2015-01-01 08:00:00,14,Error Code 4: Invalid Rotation Angle +2015-01-03 16:00:00,14,Error Code 1: Low Voltage +2015-01-24 17:00:00,14,Error Code 4: Invalid Rotation Angle +2015-01-27 11:00:00,14,Error Code 4: Invalid Rotation Angle +2015-01-30 06:00:00,14,Error Code 2: High Pressure +2015-01-30 06:00:00,14,Error Code 3: Excessive Vibration +2015-02-09 20:00:00,14,Error Code 2: High Pressure +2015-02-18 21:00:00,14,Error Code 5: Low Pressure +2015-02-19 17:00:00,14,Error Code 5: Low Pressure +2015-03-01 06:00:00,14,Error Code 1: Low Voltage +2015-03-07 09:00:00,14,Error Code 1: Low Voltage +2015-03-07 17:00:00,14,Error Code 2: High Pressure +2015-03-16 18:00:00,14,Error Code 2: High Pressure +2015-03-17 07:00:00,14,Error Code 2: High Pressure +2015-04-17 04:00:00,14,Error Code 2: High Pressure +2015-04-20 01:00:00,14,Error Code 5: Low Pressure +2015-05-11 05:00:00,14,Error Code 3: Excessive Vibration +2015-05-12 04:00:00,14,Error Code 3: Excessive Vibration +2015-05-31 05:00:00,14,Error Code 5: Low Pressure +2015-06-02 20:00:00,14,Error Code 4: Invalid Rotation Angle +2015-06-19 08:00:00,14,Error Code 2: High Pressure +2015-06-25 02:00:00,14,Error Code 1: Low Voltage +2015-06-26 17:00:00,14,Error Code 2: High Pressure +2015-07-14 06:00:00,14,Error Code 1: Low Voltage +2015-07-14 08:00:00,14,Error Code 3: Excessive Vibration +2015-07-29 06:00:00,14,Error Code 2: High Pressure +2015-07-29 06:00:00,14,Error Code 3: Excessive Vibration +2015-08-14 12:00:00,14,Error Code 4: Invalid Rotation Angle +2015-08-22 13:00:00,14,Error Code 4: Invalid Rotation Angle +2015-08-28 10:00:00,14,Error Code 3: Excessive Vibration +2015-09-20 18:00:00,14,Error Code 2: High Pressure +2015-10-02 18:00:00,14,Error Code 3: Excessive Vibration +2015-10-13 11:00:00,14,Error Code 1: Low Voltage +2015-10-21 10:00:00,14,Error Code 3: Excessive Vibration +2015-11-30 14:00:00,14,Error Code 1: Low Voltage +2015-12-17 00:00:00,14,Error Code 5: Low Pressure +2015-12-22 01:00:00,14,Error Code 2: High Pressure +2015-12-23 12:00:00,14,Error Code 1: Low Voltage +2015-01-13 23:00:00,15,Error Code 4: Invalid Rotation Angle +2015-01-14 06:00:00,15,Error Code 3: Excessive Vibration +2015-01-19 06:00:00,15,Error Code 2: High Pressure +2015-01-19 06:00:00,15,Error Code 3: Excessive Vibration +2015-01-19 06:00:00,15,Error Code 5: Low Pressure +2015-01-22 23:00:00,15,Error Code 1: Low Voltage +2015-02-11 02:00:00,15,Error Code 2: High Pressure +2015-02-14 00:00:00,15,Error Code 1: Low Voltage +2015-03-21 23:00:00,15,Error Code 1: Low Voltage +2015-03-24 20:00:00,15,Error Code 1: Low Voltage +2015-03-28 23:00:00,15,Error Code 2: High Pressure +2015-04-06 10:00:00,15,Error Code 5: Low Pressure +2015-04-12 13:00:00,15,Error Code 4: Invalid Rotation Angle +2015-04-23 02:00:00,15,Error Code 2: High Pressure +2015-05-10 01:00:00,15,Error Code 1: Low Voltage +2015-05-30 04:00:00,15,Error Code 4: Invalid Rotation Angle +2015-06-01 21:00:00,15,Error Code 1: Low Voltage +2015-06-02 00:00:00,15,Error Code 2: High Pressure +2015-06-03 06:00:00,15,Error Code 1: Low Voltage +2015-06-11 06:00:00,15,Error Code 4: Invalid Rotation Angle +2015-06-17 11:00:00,15,Error Code 2: High Pressure +2015-06-18 06:00:00,15,Error Code 2: High Pressure +2015-06-22 10:00:00,15,Error Code 1: Low Voltage +2015-06-24 10:00:00,15,Error Code 1: Low Voltage +2015-07-03 17:00:00,15,Error Code 4: Invalid Rotation Angle +2015-07-05 15:00:00,15,Error Code 2: High Pressure +2015-07-05 21:00:00,15,Error Code 2: High Pressure +2015-07-07 14:00:00,15,Error Code 1: Low Voltage +2015-07-22 00:00:00,15,Error Code 2: High Pressure +2015-08-02 06:00:00,15,Error Code 1: Low Voltage +2015-08-09 15:00:00,15,Error Code 4: Invalid Rotation Angle +2015-09-01 06:00:00,15,Error Code 2: High Pressure +2015-09-23 15:00:00,15,Error Code 3: Excessive Vibration +2015-10-02 13:00:00,15,Error Code 3: Excessive Vibration +2015-10-05 20:00:00,15,Error Code 3: Excessive Vibration +2015-10-07 01:00:00,15,Error Code 1: Low Voltage +2015-10-07 20:00:00,15,Error Code 4: Invalid Rotation Angle +2015-10-15 13:00:00,15,Error Code 4: Invalid Rotation Angle +2015-10-26 01:00:00,15,Error Code 2: High Pressure +2015-10-31 06:00:00,15,Error Code 2: High Pressure +2015-10-31 06:00:00,15,Error Code 3: Excessive Vibration +2015-10-31 06:00:00,15,Error Code 5: Low Pressure +2015-11-06 14:00:00,15,Error Code 1: Low Voltage +2015-11-11 06:00:00,15,Error Code 3: Excessive Vibration +2015-11-13 19:00:00,15,Error Code 2: High Pressure +2015-11-14 19:00:00,15,Error Code 4: Invalid Rotation Angle +2015-11-19 13:00:00,15,Error Code 1: Low Voltage +2015-11-20 23:00:00,15,Error Code 4: Invalid Rotation Angle +2015-11-22 08:00:00,15,Error Code 3: Excessive Vibration +2015-12-21 22:00:00,15,Error Code 3: Excessive Vibration +2015-12-30 06:00:00,15,Error Code 5: Low Pressure +2015-01-16 06:00:00,16,Error Code 1: Low Voltage +2015-02-09 02:00:00,16,Error Code 3: Excessive Vibration +2015-02-13 00:00:00,16,Error Code 1: Low Voltage +2015-02-22 05:00:00,16,Error Code 1: Low Voltage +2015-02-27 16:00:00,16,Error Code 3: Excessive Vibration +2015-03-02 06:00:00,16,Error Code 4: Invalid Rotation Angle +2015-03-10 01:00:00,16,Error Code 3: Excessive Vibration +2015-03-10 19:00:00,16,Error Code 5: Low Pressure +2015-03-13 17:00:00,16,Error Code 5: Low Pressure +2015-03-25 12:00:00,16,Error Code 2: High Pressure +2015-04-01 06:00:00,16,Error Code 1: Low Voltage +2015-04-10 18:00:00,16,Error Code 1: Low Voltage +2015-04-22 03:00:00,16,Error Code 3: Excessive Vibration +2015-04-22 10:00:00,16,Error Code 1: Low Voltage +2015-04-28 08:00:00,16,Error Code 4: Invalid Rotation Angle +2015-04-28 14:00:00,16,Error Code 1: Low Voltage +2015-05-16 06:00:00,16,Error Code 4: Invalid Rotation Angle +2015-05-21 14:00:00,16,Error Code 1: Low Voltage +2015-05-28 17:00:00,16,Error Code 2: High Pressure +2015-05-31 06:00:00,16,Error Code 2: High Pressure +2015-05-31 06:00:00,16,Error Code 3: Excessive Vibration +2015-06-15 06:00:00,16,Error Code 1: Low Voltage +2015-06-22 11:00:00,16,Error Code 3: Excessive Vibration +2015-06-30 06:00:00,16,Error Code 2: High Pressure +2015-06-30 06:00:00,16,Error Code 3: Excessive Vibration +2015-06-30 06:00:00,16,Error Code 4: Invalid Rotation Angle +2015-07-07 12:00:00,16,Error Code 2: High Pressure +2015-07-07 17:00:00,16,Error Code 3: Excessive Vibration +2015-07-07 21:00:00,16,Error Code 4: Invalid Rotation Angle +2015-08-01 20:00:00,16,Error Code 4: Invalid Rotation Angle +2015-08-05 08:00:00,16,Error Code 1: Low Voltage +2015-08-14 06:00:00,16,Error Code 4: Invalid Rotation Angle +2015-09-22 00:00:00,16,Error Code 1: Low Voltage +2015-09-24 00:00:00,16,Error Code 1: Low Voltage +2015-10-04 12:00:00,16,Error Code 3: Excessive Vibration +2015-10-13 06:00:00,16,Error Code 4: Invalid Rotation Angle +2015-11-26 02:00:00,16,Error Code 3: Excessive Vibration +2015-11-27 06:00:00,16,Error Code 2: High Pressure +2015-11-27 06:00:00,16,Error Code 3: Excessive Vibration +2015-12-07 02:00:00,16,Error Code 1: Low Voltage +2015-12-16 20:00:00,16,Error Code 3: Excessive Vibration +2015-01-15 06:00:00,17,Error Code 4: Invalid Rotation Angle +2015-01-26 23:00:00,17,Error Code 2: High Pressure +2015-02-04 00:00:00,17,Error Code 3: Excessive Vibration +2015-02-13 01:00:00,17,Error Code 2: High Pressure +2015-02-14 06:00:00,17,Error Code 2: High Pressure +2015-02-14 06:00:00,17,Error Code 3: Excessive Vibration +2015-02-17 04:00:00,17,Error Code 2: High Pressure +2015-03-04 15:00:00,17,Error Code 1: Low Voltage +2015-03-06 08:00:00,17,Error Code 4: Invalid Rotation Angle +2015-03-10 18:00:00,17,Error Code 3: Excessive Vibration +2015-03-16 06:00:00,17,Error Code 4: Invalid Rotation Angle +2015-03-22 06:00:00,17,Error Code 1: Low Voltage +2015-03-27 22:00:00,17,Error Code 2: High Pressure +2015-03-31 06:00:00,17,Error Code 2: High Pressure +2015-03-31 06:00:00,17,Error Code 3: Excessive Vibration +2015-03-31 06:00:00,17,Error Code 5: Low Pressure +2015-04-16 12:00:00,17,Error Code 1: Low Voltage +2015-04-29 11:00:00,17,Error Code 4: Invalid Rotation Angle +2015-05-15 06:00:00,17,Error Code 1: Low Voltage +2015-05-19 14:00:00,17,Error Code 4: Invalid Rotation Angle +2015-05-24 16:00:00,17,Error Code 3: Excessive Vibration +2015-06-10 08:00:00,17,Error Code 1: Low Voltage +2015-06-14 06:00:00,17,Error Code 4: Invalid Rotation Angle +2015-07-14 06:00:00,17,Error Code 2: High Pressure +2015-07-14 06:00:00,17,Error Code 3: Excessive Vibration +2015-07-16 00:00:00,17,Error Code 3: Excessive Vibration +2015-07-24 20:00:00,17,Error Code 2: High Pressure +2015-07-25 13:00:00,17,Error Code 4: Invalid Rotation Angle +2015-07-28 04:00:00,17,Error Code 2: High Pressure +2015-07-29 06:00:00,17,Error Code 1: Low Voltage +2015-08-01 23:00:00,17,Error Code 1: Low Voltage +2015-08-08 22:00:00,17,Error Code 3: Excessive Vibration +2015-08-13 02:00:00,17,Error Code 1: Low Voltage +2015-08-28 06:00:00,17,Error Code 5: Low Pressure +2015-09-04 18:00:00,17,Error Code 1: Low Voltage +2015-09-06 04:00:00,17,Error Code 2: High Pressure +2015-09-09 00:00:00,17,Error Code 2: High Pressure +2015-09-16 06:00:00,17,Error Code 5: Low Pressure +2015-09-21 21:00:00,17,Error Code 3: Excessive Vibration +2015-10-07 18:00:00,17,Error Code 1: Low Voltage +2015-10-11 01:00:00,17,Error Code 2: High Pressure +2015-10-16 02:00:00,17,Error Code 2: High Pressure +2015-10-18 02:00:00,17,Error Code 4: Invalid Rotation Angle +2015-10-27 06:00:00,17,Error Code 4: Invalid Rotation Angle +2015-10-27 06:00:00,17,Error Code 5: Low Pressure +2015-11-25 18:00:00,17,Error Code 2: High Pressure +2015-11-26 06:00:00,17,Error Code 2: High Pressure +2015-11-26 06:00:00,17,Error Code 3: Excessive Vibration +2015-12-26 06:00:00,17,Error Code 1: Low Voltage +2015-01-06 19:00:00,18,Error Code 1: Low Voltage +2015-01-07 20:00:00,18,Error Code 5: Low Pressure +2015-01-10 02:00:00,18,Error Code 5: Low Pressure +2015-01-24 06:00:00,18,Error Code 1: Low Voltage +2015-01-29 06:00:00,18,Error Code 5: Low Pressure +2015-02-13 09:00:00,18,Error Code 4: Invalid Rotation Angle +2015-02-16 14:00:00,18,Error Code 1: Low Voltage +2015-03-07 10:00:00,18,Error Code 2: High Pressure +2015-03-10 18:00:00,18,Error Code 4: Invalid Rotation Angle +2015-03-25 05:00:00,18,Error Code 4: Invalid Rotation Angle +2015-03-25 06:00:00,18,Error Code 1: Low Voltage +2015-03-25 08:00:00,18,Error Code 2: High Pressure +2015-03-30 06:00:00,18,Error Code 1: Low Voltage +2015-04-02 07:00:00,18,Error Code 3: Excessive Vibration +2015-04-12 16:00:00,18,Error Code 4: Invalid Rotation Angle +2015-05-12 23:00:00,18,Error Code 5: Low Pressure +2015-05-14 06:00:00,18,Error Code 5: Low Pressure +2015-05-15 13:00:00,18,Error Code 2: High Pressure +2015-05-15 18:00:00,18,Error Code 3: Excessive Vibration +2015-05-19 15:00:00,18,Error Code 2: High Pressure +2015-05-24 14:00:00,18,Error Code 4: Invalid Rotation Angle +2015-06-06 06:00:00,18,Error Code 1: Low Voltage +2015-06-18 17:00:00,18,Error Code 2: High Pressure +2015-06-19 19:00:00,18,Error Code 1: Low Voltage +2015-06-28 06:00:00,18,Error Code 2: High Pressure +2015-06-28 06:00:00,18,Error Code 3: Excessive Vibration +2015-06-29 18:00:00,18,Error Code 1: Low Voltage +2015-07-05 05:00:00,18,Error Code 2: High Pressure +2015-07-06 08:00:00,18,Error Code 3: Excessive Vibration +2015-07-28 06:00:00,18,Error Code 2: High Pressure +2015-07-28 06:00:00,18,Error Code 3: Excessive Vibration +2015-07-29 08:00:00,18,Error Code 2: High Pressure +2015-07-30 02:00:00,18,Error Code 1: Low Voltage +2015-08-01 08:00:00,18,Error Code 3: Excessive Vibration +2015-08-27 06:00:00,18,Error Code 5: Low Pressure +2015-09-08 12:00:00,18,Error Code 4: Invalid Rotation Angle +2015-09-11 06:00:00,18,Error Code 2: High Pressure +2015-09-11 06:00:00,18,Error Code 3: Excessive Vibration +2015-10-06 00:00:00,18,Error Code 4: Invalid Rotation Angle +2015-10-09 14:00:00,18,Error Code 3: Excessive Vibration +2015-10-23 12:00:00,18,Error Code 5: Low Pressure +2015-10-26 06:00:00,18,Error Code 5: Low Pressure +2015-10-31 13:00:00,18,Error Code 1: Low Voltage +2015-11-09 12:00:00,18,Error Code 3: Excessive Vibration +2015-11-20 09:00:00,18,Error Code 2: High Pressure +2015-12-21 08:00:00,18,Error Code 4: Invalid Rotation Angle +2015-01-02 04:00:00,19,Error Code 2: High Pressure +2015-01-12 06:00:00,19,Error Code 1: Low Voltage +2015-01-23 23:00:00,19,Error Code 4: Invalid Rotation Angle +2015-01-24 06:00:00,19,Error Code 2: High Pressure +2015-02-26 06:00:00,19,Error Code 5: Low Pressure +2015-03-01 13:00:00,19,Error Code 4: Invalid Rotation Angle +2015-03-15 12:00:00,19,Error Code 4: Invalid Rotation Angle +2015-03-23 03:00:00,19,Error Code 1: Low Voltage +2015-04-04 10:00:00,19,Error Code 2: High Pressure +2015-04-12 06:00:00,19,Error Code 5: Low Pressure +2015-04-18 04:00:00,19,Error Code 1: Low Voltage +2015-05-12 01:00:00,19,Error Code 2: High Pressure +2015-06-18 03:00:00,19,Error Code 4: Invalid Rotation Angle +2015-06-26 06:00:00,19,Error Code 5: Low Pressure +2015-07-11 21:00:00,19,Error Code 4: Invalid Rotation Angle +2015-07-16 05:00:00,19,Error Code 4: Invalid Rotation Angle +2015-08-02 15:00:00,19,Error Code 2: High Pressure +2015-08-06 12:00:00,19,Error Code 1: Low Voltage +2015-09-06 14:00:00,19,Error Code 2: High Pressure +2015-09-09 06:00:00,19,Error Code 5: Low Pressure +2015-09-25 13:00:00,19,Error Code 4: Invalid Rotation Angle +2015-09-28 16:00:00,19,Error Code 3: Excessive Vibration +2015-10-07 10:00:00,19,Error Code 2: High Pressure +2015-11-04 17:00:00,19,Error Code 1: Low Voltage +2015-11-23 06:00:00,19,Error Code 5: Low Pressure +2015-12-08 06:00:00,19,Error Code 1: Low Voltage +2015-12-10 23:00:00,19,Error Code 1: Low Voltage +2015-12-22 16:00:00,19,Error Code 3: Excessive Vibration +2015-01-03 06:00:00,20,Error Code 2: High Pressure +2015-01-03 06:00:00,20,Error Code 3: Excessive Vibration +2015-01-18 06:00:00,20,Error Code 5: Low Pressure +2015-01-19 16:00:00,20,Error Code 1: Low Voltage +2015-02-01 02:00:00,20,Error Code 2: High Pressure +2015-02-02 06:00:00,20,Error Code 4: Invalid Rotation Angle +2015-02-03 23:00:00,20,Error Code 4: Invalid Rotation Angle +2015-02-04 22:00:00,20,Error Code 2: High Pressure +2015-02-16 23:00:00,20,Error Code 1: Low Voltage +2015-02-17 20:00:00,20,Error Code 3: Excessive Vibration +2015-02-23 14:00:00,20,Error Code 3: Excessive Vibration +2015-02-24 02:00:00,20,Error Code 4: Invalid Rotation Angle +2015-03-19 06:00:00,20,Error Code 2: High Pressure +2015-03-19 06:00:00,20,Error Code 3: Excessive Vibration +2015-04-03 06:00:00,20,Error Code 4: Invalid Rotation Angle +2015-04-09 19:00:00,20,Error Code 4: Invalid Rotation Angle +2015-04-12 12:00:00,20,Error Code 1: Low Voltage +2015-04-14 09:00:00,20,Error Code 2: High Pressure +2015-04-18 06:00:00,20,Error Code 5: Low Pressure +2015-04-28 06:00:00,20,Error Code 3: Excessive Vibration +2015-05-05 00:00:00,20,Error Code 4: Invalid Rotation Angle +2015-05-07 22:00:00,20,Error Code 4: Invalid Rotation Angle +2015-05-16 02:00:00,20,Error Code 1: Low Voltage +2015-05-26 04:00:00,20,Error Code 1: Low Voltage +2015-06-01 04:00:00,20,Error Code 3: Excessive Vibration +2015-06-04 15:00:00,20,Error Code 5: Low Pressure +2015-07-07 01:00:00,20,Error Code 1: Low Voltage +2015-07-11 05:00:00,20,Error Code 1: Low Voltage +2015-07-17 06:00:00,20,Error Code 1: Low Voltage +2015-08-11 21:00:00,20,Error Code 2: High Pressure +2015-08-16 01:00:00,20,Error Code 3: Excessive Vibration +2015-08-16 06:00:00,20,Error Code 5: Low Pressure +2015-09-30 06:00:00,20,Error Code 2: High Pressure +2015-09-30 06:00:00,20,Error Code 3: Excessive Vibration +2015-10-05 00:00:00,20,Error Code 5: Low Pressure +2015-10-12 10:00:00,20,Error Code 4: Invalid Rotation Angle +2015-10-25 01:00:00,20,Error Code 2: High Pressure +2015-10-30 06:00:00,20,Error Code 4: Invalid Rotation Angle +2015-11-11 07:00:00,20,Error Code 4: Invalid Rotation Angle +2015-11-14 06:00:00,20,Error Code 2: High Pressure +2015-11-14 06:00:00,20,Error Code 3: Excessive Vibration +2015-12-13 06:00:00,20,Error Code 1: Low Voltage +2015-12-14 06:00:00,20,Error Code 1: Low Voltage +2015-12-23 10:00:00,20,Error Code 2: High Pressure +2015-12-27 17:00:00,20,Error Code 4: Invalid Rotation Angle +2015-12-29 06:00:00,20,Error Code 5: Low Pressure +2015-12-30 00:00:00,20,Error Code 3: Excessive Vibration +2015-01-22 06:00:00,21,Error Code 4: Invalid Rotation Angle +2015-02-16 08:00:00,21,Error Code 2: High Pressure +2015-03-16 15:00:00,21,Error Code 2: High Pressure +2015-04-07 06:00:00,21,Error Code 5: Low Pressure +2015-04-08 08:00:00,21,Error Code 1: Low Voltage +2015-04-26 18:00:00,21,Error Code 1: Low Voltage +2015-05-07 06:00:00,21,Error Code 1: Low Voltage +2015-05-13 05:00:00,21,Error Code 4: Invalid Rotation Angle +2015-06-18 18:00:00,21,Error Code 2: High Pressure +2015-06-23 09:00:00,21,Error Code 1: Low Voltage +2015-06-26 19:00:00,21,Error Code 2: High Pressure +2015-07-09 21:00:00,21,Error Code 4: Invalid Rotation Angle +2015-08-05 06:00:00,21,Error Code 2: High Pressure +2015-08-05 06:00:00,21,Error Code 3: Excessive Vibration +2015-08-05 06:00:00,21,Error Code 5: Low Pressure +2015-08-09 05:00:00,21,Error Code 1: Low Voltage +2015-08-22 21:00:00,21,Error Code 2: High Pressure +2015-08-28 02:00:00,21,Error Code 2: High Pressure +2015-09-04 06:00:00,21,Error Code 4: Invalid Rotation Angle +2015-09-06 08:00:00,21,Error Code 2: High Pressure +2015-09-07 22:00:00,21,Error Code 3: Excessive Vibration +2015-09-19 06:00:00,21,Error Code 5: Low Pressure +2015-10-04 06:00:00,21,Error Code 2: High Pressure +2015-10-04 06:00:00,21,Error Code 3: Excessive Vibration +2015-10-05 13:00:00,21,Error Code 4: Invalid Rotation Angle +2015-10-09 17:00:00,21,Error Code 1: Low Voltage +2015-10-22 17:00:00,21,Error Code 1: Low Voltage +2015-11-01 07:00:00,21,Error Code 3: Excessive Vibration +2015-11-03 06:00:00,21,Error Code 5: Low Pressure +2015-11-05 02:00:00,21,Error Code 2: High Pressure +2015-11-06 08:00:00,21,Error Code 2: High Pressure +2015-11-08 19:00:00,21,Error Code 4: Invalid Rotation Angle +2015-11-12 01:00:00,21,Error Code 4: Invalid Rotation Angle +2015-11-26 19:00:00,21,Error Code 2: High Pressure +2015-12-03 06:00:00,21,Error Code 2: High Pressure +2015-12-03 06:00:00,21,Error Code 3: Excessive Vibration +2015-12-07 07:00:00,21,Error Code 1: Low Voltage +2015-12-18 06:00:00,21,Error Code 4: Invalid Rotation Angle +2015-12-21 15:00:00,21,Error Code 1: Low Voltage +2015-12-26 11:00:00,21,Error Code 4: Invalid Rotation Angle +2015-01-12 23:00:00,22,Error Code 1: Low Voltage +2015-01-13 01:00:00,22,Error Code 4: Invalid Rotation Angle +2015-01-18 01:00:00,22,Error Code 3: Excessive Vibration +2015-01-19 21:00:00,22,Error Code 5: Low Pressure +2015-01-22 05:00:00,22,Error Code 1: Low Voltage +2015-01-28 03:00:00,22,Error Code 3: Excessive Vibration +2015-02-04 17:00:00,22,Error Code 5: Low Pressure +2015-02-05 06:00:00,22,Error Code 4: Invalid Rotation Angle +2015-02-19 09:00:00,22,Error Code 1: Low Voltage +2015-03-22 06:00:00,22,Error Code 2: High Pressure +2015-03-22 06:00:00,22,Error Code 3: Excessive Vibration +2015-03-22 06:00:00,22,Error Code 5: Low Pressure +2015-03-22 22:00:00,22,Error Code 4: Invalid Rotation Angle +2015-03-28 15:00:00,22,Error Code 3: Excessive Vibration +2015-04-01 13:00:00,22,Error Code 3: Excessive Vibration +2015-04-06 06:00:00,22,Error Code 4: Invalid Rotation Angle +2015-04-21 06:00:00,22,Error Code 2: High Pressure +2015-04-21 06:00:00,22,Error Code 3: Excessive Vibration +2015-04-22 22:00:00,22,Error Code 1: Low Voltage +2015-04-27 22:00:00,22,Error Code 2: High Pressure +2015-04-28 00:00:00,22,Error Code 3: Excessive Vibration +2015-05-17 14:00:00,22,Error Code 5: Low Pressure +2015-05-21 06:00:00,22,Error Code 1: Low Voltage +2015-05-21 06:00:00,22,Error Code 4: Invalid Rotation Angle +2015-06-20 06:00:00,22,Error Code 5: Low Pressure +2015-06-21 22:00:00,22,Error Code 3: Excessive Vibration +2015-07-05 06:00:00,22,Error Code 2: High Pressure +2015-07-05 06:00:00,22,Error Code 3: Excessive Vibration +2015-07-05 21:00:00,22,Error Code 4: Invalid Rotation Angle +2015-07-07 19:00:00,22,Error Code 4: Invalid Rotation Angle +2015-07-19 08:00:00,22,Error Code 1: Low Voltage +2015-08-04 06:00:00,22,Error Code 5: Low Pressure +2015-08-09 00:00:00,22,Error Code 3: Excessive Vibration +2015-08-18 07:00:00,22,Error Code 1: Low Voltage +2015-08-19 06:00:00,22,Error Code 4: Invalid Rotation Angle +2015-08-19 12:00:00,22,Error Code 4: Invalid Rotation Angle +2015-08-20 18:00:00,22,Error Code 4: Invalid Rotation Angle +2015-08-23 22:00:00,22,Error Code 4: Invalid Rotation Angle +2015-08-27 02:00:00,22,Error Code 2: High Pressure +2015-08-30 10:00:00,22,Error Code 4: Invalid Rotation Angle +2015-09-18 06:00:00,22,Error Code 2: High Pressure +2015-09-18 06:00:00,22,Error Code 3: Excessive Vibration +2015-09-25 00:00:00,22,Error Code 2: High Pressure +2015-09-30 09:00:00,22,Error Code 4: Invalid Rotation Angle +2015-10-19 13:00:00,22,Error Code 1: Low Voltage +2015-10-23 06:00:00,22,Error Code 1: Low Voltage +2015-10-30 06:00:00,22,Error Code 5: Low Pressure +2015-11-02 06:00:00,22,Error Code 4: Invalid Rotation Angle +2015-11-04 16:00:00,22,Error Code 4: Invalid Rotation Angle +2015-11-11 12:00:00,22,Error Code 4: Invalid Rotation Angle +2015-11-17 06:00:00,22,Error Code 5: Low Pressure +2015-11-17 23:00:00,22,Error Code 3: Excessive Vibration +2015-11-24 02:00:00,22,Error Code 1: Low Voltage +2015-11-27 10:00:00,22,Error Code 3: Excessive Vibration +2015-12-02 18:00:00,22,Error Code 4: Invalid Rotation Angle +2015-12-04 15:00:00,22,Error Code 1: Low Voltage +2015-12-04 23:00:00,22,Error Code 2: High Pressure +2015-12-14 01:00:00,22,Error Code 4: Invalid Rotation Angle +2015-12-16 13:00:00,22,Error Code 3: Excessive Vibration +2015-12-26 11:00:00,22,Error Code 1: Low Voltage +2015-01-04 06:00:00,23,Error Code 2: High Pressure +2015-01-04 06:00:00,23,Error Code 3: Excessive Vibration +2015-01-19 06:00:00,23,Error Code 5: Low Pressure +2015-01-26 01:00:00,23,Error Code 2: High Pressure +2015-01-28 06:00:00,23,Error Code 5: Low Pressure +2015-02-15 01:00:00,23,Error Code 5: Low Pressure +2015-03-20 06:00:00,23,Error Code 1: Low Voltage +2015-03-20 06:00:00,23,Error Code 5: Low Pressure +2015-04-04 06:00:00,23,Error Code 1: Low Voltage +2015-04-06 06:00:00,23,Error Code 5: Low Pressure +2015-04-08 12:00:00,23,Error Code 2: High Pressure +2015-04-09 20:00:00,23,Error Code 1: Low Voltage +2015-05-01 17:00:00,23,Error Code 5: Low Pressure +2015-05-10 02:00:00,23,Error Code 1: Low Voltage +2015-06-03 06:00:00,23,Error Code 4: Invalid Rotation Angle +2015-06-18 19:00:00,23,Error Code 4: Invalid Rotation Angle +2015-07-01 00:00:00,23,Error Code 3: Excessive Vibration +2015-07-18 06:00:00,23,Error Code 2: High Pressure +2015-07-18 06:00:00,23,Error Code 3: Excessive Vibration +2015-07-25 03:00:00,23,Error Code 1: Low Voltage +2015-08-02 06:00:00,23,Error Code 4: Invalid Rotation Angle +2015-08-25 07:00:00,23,Error Code 1: Low Voltage +2015-09-01 06:00:00,23,Error Code 2: High Pressure +2015-09-01 06:00:00,23,Error Code 3: Excessive Vibration +2015-09-06 13:00:00,23,Error Code 1: Low Voltage +2015-09-19 05:00:00,23,Error Code 1: Low Voltage +2015-10-01 06:00:00,23,Error Code 4: Invalid Rotation Angle +2015-10-01 20:00:00,23,Error Code 2: High Pressure +2015-10-16 06:00:00,23,Error Code 5: Low Pressure +2015-10-25 14:00:00,23,Error Code 2: High Pressure +2015-10-30 13:00:00,23,Error Code 3: Excessive Vibration +2015-11-15 06:00:00,23,Error Code 4: Invalid Rotation Angle +2015-11-20 18:00:00,23,Error Code 2: High Pressure +2015-12-14 14:00:00,23,Error Code 2: High Pressure +2015-12-15 06:00:00,23,Error Code 5: Low Pressure +2015-12-23 01:00:00,23,Error Code 4: Invalid Rotation Angle +2015-12-31 20:00:00,23,Error Code 3: Excessive Vibration +2015-01-01 06:00:00,24,Error Code 1: Low Voltage +2015-02-15 06:00:00,24,Error Code 1: Low Voltage +2015-02-19 09:00:00,24,Error Code 3: Excessive Vibration +2015-04-01 06:00:00,24,Error Code 5: Low Pressure +2015-04-16 06:00:00,24,Error Code 4: Invalid Rotation Angle +2015-04-22 15:00:00,24,Error Code 2: High Pressure +2015-05-08 01:00:00,24,Error Code 1: Low Voltage +2015-05-09 04:00:00,24,Error Code 2: High Pressure +2015-05-17 10:00:00,24,Error Code 4: Invalid Rotation Angle +2015-06-11 19:00:00,24,Error Code 4: Invalid Rotation Angle +2015-06-30 06:00:00,24,Error Code 4: Invalid Rotation Angle +2015-07-15 04:00:00,24,Error Code 3: Excessive Vibration +2015-07-18 05:00:00,24,Error Code 4: Invalid Rotation Angle +2015-07-19 15:00:00,24,Error Code 2: High Pressure +2015-07-30 06:00:00,24,Error Code 5: Low Pressure +2015-08-02 06:00:00,24,Error Code 1: Low Voltage +2015-08-03 14:00:00,24,Error Code 3: Excessive Vibration +2015-08-19 12:00:00,24,Error Code 1: Low Voltage +2015-08-19 13:00:00,24,Error Code 4: Invalid Rotation Angle +2015-08-23 04:00:00,24,Error Code 2: High Pressure +2015-08-28 15:00:00,24,Error Code 1: Low Voltage +2015-08-30 23:00:00,24,Error Code 1: Low Voltage +2015-09-14 12:00:00,24,Error Code 4: Invalid Rotation Angle +2015-09-28 06:00:00,24,Error Code 1: Low Voltage +2015-10-01 04:00:00,24,Error Code 1: Low Voltage +2015-10-13 06:00:00,24,Error Code 4: Invalid Rotation Angle +2015-11-12 06:00:00,24,Error Code 5: Low Pressure +2015-11-19 15:00:00,24,Error Code 4: Invalid Rotation Angle +2015-11-27 06:00:00,24,Error Code 4: Invalid Rotation Angle +2015-12-07 02:00:00,24,Error Code 2: High Pressure +2015-12-17 01:00:00,24,Error Code 4: Invalid Rotation Angle +2015-12-17 16:00:00,24,Error Code 3: Excessive Vibration +2015-12-24 19:00:00,24,Error Code 3: Excessive Vibration +2015-01-06 06:00:00,25,Error Code 1: Low Voltage +2015-01-24 19:00:00,25,Error Code 4: Invalid Rotation Angle +2015-01-28 00:00:00,25,Error Code 2: High Pressure +2015-02-03 23:00:00,25,Error Code 2: High Pressure +2015-02-16 07:00:00,25,Error Code 1: Low Voltage +2015-02-27 09:00:00,25,Error Code 4: Invalid Rotation Angle +2015-03-04 06:00:00,25,Error Code 1: Low Voltage +2015-03-17 17:00:00,25,Error Code 3: Excessive Vibration +2015-03-23 21:00:00,25,Error Code 1: Low Voltage +2015-04-16 04:00:00,25,Error Code 2: High Pressure +2015-04-16 13:00:00,25,Error Code 2: High Pressure +2015-04-28 02:00:00,25,Error Code 1: Low Voltage +2015-05-03 06:00:00,25,Error Code 5: Low Pressure +2015-05-05 12:00:00,25,Error Code 1: Low Voltage +2015-05-22 06:00:00,25,Error Code 4: Invalid Rotation Angle +2015-06-02 06:00:00,25,Error Code 1: Low Voltage +2015-06-02 06:00:00,25,Error Code 2: High Pressure +2015-06-02 06:00:00,25,Error Code 3: Excessive Vibration +2015-06-04 14:00:00,25,Error Code 2: High Pressure +2015-06-06 20:00:00,25,Error Code 1: Low Voltage +2015-06-10 09:00:00,25,Error Code 3: Excessive Vibration +2015-06-11 22:00:00,25,Error Code 3: Excessive Vibration +2015-06-26 04:00:00,25,Error Code 4: Invalid Rotation Angle +2015-07-09 10:00:00,25,Error Code 3: Excessive Vibration +2015-07-26 03:00:00,25,Error Code 1: Low Voltage +2015-08-03 18:00:00,25,Error Code 3: Excessive Vibration +2015-08-16 06:00:00,25,Error Code 2: High Pressure +2015-08-16 06:00:00,25,Error Code 3: Excessive Vibration +2015-08-16 06:00:00,25,Error Code 5: Low Pressure +2015-08-18 00:00:00,25,Error Code 2: High Pressure +2015-09-01 11:00:00,25,Error Code 3: Excessive Vibration +2015-10-15 18:00:00,25,Error Code 2: High Pressure +2015-10-21 20:00:00,25,Error Code 5: Low Pressure +2015-10-30 06:00:00,25,Error Code 2: High Pressure +2015-10-30 06:00:00,25,Error Code 3: Excessive Vibration +2015-11-29 06:00:00,25,Error Code 5: Low Pressure +2015-12-11 09:00:00,25,Error Code 2: High Pressure +2015-01-29 22:00:00,26,Error Code 1: Low Voltage +2015-02-19 06:00:00,26,Error Code 1: Low Voltage +2015-02-27 01:00:00,26,Error Code 4: Invalid Rotation Angle +2015-03-17 09:00:00,26,Error Code 2: High Pressure +2015-03-28 07:00:00,26,Error Code 1: Low Voltage +2015-04-05 06:00:00,26,Error Code 1: Low Voltage +2015-04-14 23:00:00,26,Error Code 1: Low Voltage +2015-04-20 13:00:00,26,Error Code 3: Excessive Vibration +2015-04-25 16:00:00,26,Error Code 3: Excessive Vibration +2015-04-30 15:00:00,26,Error Code 3: Excessive Vibration +2015-05-04 02:00:00,26,Error Code 1: Low Voltage +2015-05-05 21:00:00,26,Error Code 3: Excessive Vibration +2015-05-13 15:00:00,26,Error Code 2: High Pressure +2015-05-17 16:00:00,26,Error Code 1: Low Voltage +2015-06-20 08:00:00,26,Error Code 1: Low Voltage +2015-06-25 05:00:00,26,Error Code 4: Invalid Rotation Angle +2015-07-01 22:00:00,26,Error Code 1: Low Voltage +2015-07-04 17:00:00,26,Error Code 3: Excessive Vibration +2015-07-11 11:00:00,26,Error Code 2: High Pressure +2015-07-26 15:00:00,26,Error Code 3: Excessive Vibration +2015-08-15 11:00:00,26,Error Code 1: Low Voltage +2015-08-16 18:00:00,26,Error Code 2: High Pressure +2015-08-18 06:00:00,26,Error Code 2: High Pressure +2015-08-18 06:00:00,26,Error Code 3: Excessive Vibration +2015-08-25 17:00:00,26,Error Code 4: Invalid Rotation Angle +2015-09-02 06:00:00,26,Error Code 1: Low Voltage +2015-09-17 15:00:00,26,Error Code 5: Low Pressure +2015-09-19 09:00:00,26,Error Code 3: Excessive Vibration +2015-10-04 12:00:00,26,Error Code 2: High Pressure +2015-10-12 11:00:00,26,Error Code 1: Low Voltage +2015-10-17 06:00:00,26,Error Code 2: High Pressure +2015-10-17 06:00:00,26,Error Code 3: Excessive Vibration +2015-10-24 19:00:00,26,Error Code 1: Low Voltage +2015-11-01 06:00:00,26,Error Code 1: Low Voltage +2015-11-10 13:00:00,26,Error Code 4: Invalid Rotation Angle +2015-11-16 06:00:00,26,Error Code 2: High Pressure +2015-11-16 06:00:00,26,Error Code 3: Excessive Vibration +2015-11-17 18:00:00,26,Error Code 4: Invalid Rotation Angle +2015-12-05 06:00:00,26,Error Code 3: Excessive Vibration +2015-12-07 14:00:00,26,Error Code 4: Invalid Rotation Angle +2015-12-25 17:00:00,26,Error Code 2: High Pressure +2015-12-26 04:00:00,26,Error Code 4: Invalid Rotation Angle +2015-01-03 21:00:00,27,Error Code 1: Low Voltage +2015-01-05 20:00:00,27,Error Code 1: Low Voltage +2015-01-06 18:00:00,27,Error Code 4: Invalid Rotation Angle +2015-01-09 06:00:00,27,Error Code 2: High Pressure +2015-01-09 06:00:00,27,Error Code 3: Excessive Vibration +2015-01-15 15:00:00,27,Error Code 4: Invalid Rotation Angle +2015-01-24 06:00:00,27,Error Code 1: Low Voltage +2015-01-31 19:00:00,27,Error Code 3: Excessive Vibration +2015-02-15 08:00:00,27,Error Code 1: Low Voltage +2015-02-16 18:00:00,27,Error Code 4: Invalid Rotation Angle +2015-02-23 06:00:00,27,Error Code 2: High Pressure +2015-02-23 06:00:00,27,Error Code 3: Excessive Vibration +2015-03-02 13:00:00,27,Error Code 2: High Pressure +2015-03-23 07:00:00,27,Error Code 2: High Pressure +2015-04-07 01:00:00,27,Error Code 5: Low Pressure +2015-04-23 10:00:00,27,Error Code 1: Low Voltage +2015-04-30 23:00:00,27,Error Code 3: Excessive Vibration +2015-05-08 07:00:00,27,Error Code 3: Excessive Vibration +2015-06-21 02:00:00,27,Error Code 3: Excessive Vibration +2015-06-21 10:00:00,27,Error Code 1: Low Voltage +2015-06-29 12:00:00,27,Error Code 4: Invalid Rotation Angle +2015-08-03 13:00:00,27,Error Code 1: Low Voltage +2015-08-23 14:00:00,27,Error Code 3: Excessive Vibration +2015-08-31 21:00:00,27,Error Code 5: Low Pressure +2015-09-14 03:00:00,27,Error Code 2: High Pressure +2015-10-08 04:00:00,27,Error Code 2: High Pressure +2015-10-20 21:00:00,27,Error Code 4: Invalid Rotation Angle +2015-10-23 06:00:00,27,Error Code 2: High Pressure +2015-10-25 22:00:00,27,Error Code 4: Invalid Rotation Angle +2015-11-02 14:00:00,27,Error Code 2: High Pressure +2015-12-26 17:00:00,27,Error Code 4: Invalid Rotation Angle +2015-01-11 06:00:00,28,Error Code 4: Invalid Rotation Angle +2015-01-25 22:00:00,28,Error Code 4: Invalid Rotation Angle +2015-01-26 14:00:00,28,Error Code 1: Low Voltage +2015-01-27 16:00:00,28,Error Code 3: Excessive Vibration +2015-02-01 16:00:00,28,Error Code 1: Low Voltage +2015-03-06 02:00:00,28,Error Code 2: High Pressure +2015-03-09 05:00:00,28,Error Code 4: Invalid Rotation Angle +2015-03-16 06:00:00,28,Error Code 2: High Pressure +2015-03-16 06:00:00,28,Error Code 3: Excessive Vibration +2015-03-16 12:00:00,28,Error Code 2: High Pressure +2015-03-30 07:00:00,28,Error Code 4: Invalid Rotation Angle +2015-03-31 07:00:00,28,Error Code 2: High Pressure +2015-04-12 01:00:00,28,Error Code 1: Low Voltage +2015-04-14 02:00:00,28,Error Code 1: Low Voltage +2015-04-30 06:00:00,28,Error Code 1: Low Voltage +2015-05-10 23:00:00,28,Error Code 1: Low Voltage +2015-05-24 18:00:00,28,Error Code 5: Low Pressure +2015-06-04 14:00:00,28,Error Code 3: Excessive Vibration +2015-06-09 20:00:00,28,Error Code 2: High Pressure +2015-06-12 16:00:00,28,Error Code 3: Excessive Vibration +2015-06-14 06:00:00,28,Error Code 2: High Pressure +2015-06-14 06:00:00,28,Error Code 3: Excessive Vibration +2015-06-18 05:00:00,28,Error Code 3: Excessive Vibration +2015-07-08 23:00:00,28,Error Code 4: Invalid Rotation Angle +2015-07-13 20:00:00,28,Error Code 2: High Pressure +2015-07-30 04:00:00,28,Error Code 2: High Pressure +2015-08-02 00:00:00,28,Error Code 1: Low Voltage +2015-08-03 03:00:00,28,Error Code 2: High Pressure +2015-08-13 06:00:00,28,Error Code 1: Low Voltage +2015-08-24 06:00:00,28,Error Code 3: Excessive Vibration +2015-09-07 22:00:00,28,Error Code 2: High Pressure +2015-09-09 11:00:00,28,Error Code 2: High Pressure +2015-09-28 23:00:00,28,Error Code 5: Low Pressure +2015-10-19 09:00:00,28,Error Code 1: Low Voltage +2015-11-13 12:00:00,28,Error Code 2: High Pressure +2015-11-26 23:00:00,28,Error Code 1: Low Voltage +2015-12-17 14:00:00,28,Error Code 3: Excessive Vibration +2015-12-24 09:00:00,28,Error Code 1: Low Voltage +2015-12-26 06:00:00,28,Error Code 1: Low Voltage +2015-12-27 16:00:00,28,Error Code 1: Low Voltage +2015-01-06 03:00:00,29,Error Code 1: Low Voltage +2015-01-09 10:00:00,29,Error Code 1: Low Voltage +2015-01-19 17:00:00,29,Error Code 1: Low Voltage +2015-01-24 20:00:00,29,Error Code 2: High Pressure +2015-01-26 20:00:00,29,Error Code 4: Invalid Rotation Angle +2015-02-04 20:00:00,29,Error Code 1: Low Voltage +2015-02-05 01:00:00,29,Error Code 5: Low Pressure +2015-02-11 18:00:00,29,Error Code 4: Invalid Rotation Angle +2015-03-02 06:00:00,29,Error Code 1: Low Voltage +2015-03-16 11:00:00,29,Error Code 3: Excessive Vibration +2015-03-17 08:00:00,29,Error Code 4: Invalid Rotation Angle +2015-03-26 16:00:00,29,Error Code 1: Low Voltage +2015-03-27 08:00:00,29,Error Code 4: Invalid Rotation Angle +2015-03-28 23:00:00,29,Error Code 4: Invalid Rotation Angle +2015-04-04 17:00:00,29,Error Code 3: Excessive Vibration +2015-04-09 16:00:00,29,Error Code 1: Low Voltage +2015-04-10 12:00:00,29,Error Code 2: High Pressure +2015-04-17 03:00:00,29,Error Code 4: Invalid Rotation Angle +2015-04-25 22:00:00,29,Error Code 2: High Pressure +2015-05-03 23:00:00,29,Error Code 3: Excessive Vibration +2015-05-04 18:00:00,29,Error Code 1: Low Voltage +2015-05-20 04:00:00,29,Error Code 1: Low Voltage +2015-05-24 18:00:00,29,Error Code 2: High Pressure +2015-06-12 16:00:00,29,Error Code 2: High Pressure +2015-07-06 04:00:00,29,Error Code 5: Low Pressure +2015-07-06 05:00:00,29,Error Code 1: Low Voltage +2015-07-14 06:00:00,29,Error Code 1: Low Voltage +2015-07-29 01:00:00,29,Error Code 3: Excessive Vibration +2015-07-29 16:00:00,29,Error Code 4: Invalid Rotation Angle +2015-08-01 02:00:00,29,Error Code 1: Low Voltage +2015-09-09 11:00:00,29,Error Code 1: Low Voltage +2015-09-22 04:00:00,29,Error Code 4: Invalid Rotation Angle +2015-10-01 11:00:00,29,Error Code 1: Low Voltage +2015-10-13 01:00:00,29,Error Code 4: Invalid Rotation Angle +2015-10-14 15:00:00,29,Error Code 4: Invalid Rotation Angle +2015-11-01 05:00:00,29,Error Code 1: Low Voltage +2015-11-15 08:00:00,29,Error Code 5: Low Pressure +2015-11-22 15:00:00,29,Error Code 2: High Pressure +2015-11-27 06:00:00,29,Error Code 2: High Pressure +2015-11-27 06:00:00,29,Error Code 3: Excessive Vibration +2015-12-16 10:00:00,29,Error Code 4: Invalid Rotation Angle +2015-12-18 03:00:00,29,Error Code 1: Low Voltage +2015-12-24 17:00:00,29,Error Code 2: High Pressure +2015-01-02 03:00:00,30,Error Code 1: Low Voltage +2015-01-05 14:00:00,30,Error Code 4: Invalid Rotation Angle +2015-01-08 06:00:00,30,Error Code 5: Low Pressure +2015-01-10 13:00:00,30,Error Code 2: High Pressure +2015-01-11 02:00:00,30,Error Code 1: Low Voltage +2015-01-16 06:00:00,30,Error Code 2: High Pressure +2015-01-18 16:00:00,30,Error Code 4: Invalid Rotation Angle +2015-01-29 11:00:00,30,Error Code 2: High Pressure +2015-02-13 10:00:00,30,Error Code 1: Low Voltage +2015-02-17 06:00:00,30,Error Code 1: Low Voltage +2015-03-18 19:00:00,30,Error Code 4: Invalid Rotation Angle +2015-03-22 12:00:00,30,Error Code 4: Invalid Rotation Angle +2015-03-24 06:00:00,30,Error Code 5: Low Pressure +2015-03-24 16:00:00,30,Error Code 2: High Pressure +2015-04-12 20:00:00,30,Error Code 3: Excessive Vibration +2015-04-26 00:00:00,30,Error Code 3: Excessive Vibration +2015-05-29 22:00:00,30,Error Code 1: Low Voltage +2015-06-13 03:00:00,30,Error Code 2: High Pressure +2015-06-22 17:00:00,30,Error Code 3: Excessive Vibration +2015-07-04 19:00:00,30,Error Code 1: Low Voltage +2015-07-24 04:00:00,30,Error Code 3: Excessive Vibration +2015-08-06 06:00:00,30,Error Code 2: High Pressure +2015-08-06 06:00:00,30,Error Code 3: Excessive Vibration +2015-08-06 06:00:00,30,Error Code 5: Low Pressure +2015-08-11 23:00:00,30,Error Code 1: Low Voltage +2015-08-20 22:00:00,30,Error Code 1: Low Voltage +2015-09-25 20:00:00,30,Error Code 1: Low Voltage +2015-10-02 16:00:00,30,Error Code 1: Low Voltage +2015-10-07 13:00:00,30,Error Code 2: High Pressure +2015-10-19 04:00:00,30,Error Code 2: High Pressure +2015-10-27 06:00:00,30,Error Code 2: High Pressure +2015-10-27 18:00:00,30,Error Code 5: Low Pressure +2015-11-02 17:00:00,30,Error Code 4: Invalid Rotation Angle +2015-11-13 21:00:00,30,Error Code 4: Invalid Rotation Angle +2015-11-18 20:00:00,30,Error Code 2: High Pressure +2015-11-21 10:00:00,30,Error Code 1: Low Voltage +2015-12-01 13:00:00,30,Error Code 4: Invalid Rotation Angle +2015-12-04 06:00:00,30,Error Code 5: Low Pressure +2015-12-27 17:00:00,30,Error Code 1: Low Voltage +2016-01-01 05:00:00,30,Error Code 2: High Pressure +2015-01-05 13:00:00,31,Error Code 1: Low Voltage +2015-01-08 15:00:00,31,Error Code 2: High Pressure +2015-02-03 05:00:00,31,Error Code 2: High Pressure +2015-03-22 06:00:00,31,Error Code 4: Invalid Rotation Angle +2015-04-05 07:00:00,31,Error Code 2: High Pressure +2015-05-21 06:00:00,31,Error Code 1: Low Voltage +2015-05-25 14:00:00,31,Error Code 4: Invalid Rotation Angle +2015-06-02 22:00:00,31,Error Code 1: Low Voltage +2015-06-03 19:00:00,31,Error Code 5: Low Pressure +2015-07-05 06:00:00,31,Error Code 3: Excessive Vibration +2015-07-05 06:00:00,31,Error Code 4: Invalid Rotation Angle +2015-07-06 23:00:00,31,Error Code 5: Low Pressure +2015-07-25 23:00:00,31,Error Code 2: High Pressure +2015-08-28 06:00:00,31,Error Code 2: High Pressure +2015-09-03 06:00:00,31,Error Code 4: Invalid Rotation Angle +2015-09-23 10:00:00,31,Error Code 2: High Pressure +2015-10-18 06:00:00,31,Error Code 2: High Pressure +2015-10-18 06:00:00,31,Error Code 3: Excessive Vibration +2015-11-07 08:00:00,31,Error Code 1: Low Voltage +2015-11-14 04:00:00,31,Error Code 4: Invalid Rotation Angle +2015-11-17 06:00:00,31,Error Code 1: Low Voltage +2015-11-17 06:00:00,31,Error Code 4: Invalid Rotation Angle +2015-11-20 19:00:00,31,Error Code 1: Low Voltage +2015-12-03 20:00:00,31,Error Code 2: High Pressure +2015-12-15 14:00:00,31,Error Code 3: Excessive Vibration +2015-12-18 09:00:00,31,Error Code 4: Invalid Rotation Angle +2015-01-09 06:00:00,32,Error Code 1: Low Voltage +2015-02-06 20:00:00,32,Error Code 3: Excessive Vibration +2015-02-08 06:00:00,32,Error Code 5: Low Pressure +2015-02-16 23:00:00,32,Error Code 2: High Pressure +2015-02-26 05:00:00,32,Error Code 4: Invalid Rotation Angle +2015-03-04 21:00:00,32,Error Code 5: Low Pressure +2015-03-10 06:00:00,32,Error Code 1: Low Voltage +2015-03-27 02:00:00,32,Error Code 2: High Pressure +2015-03-28 07:00:00,32,Error Code 2: High Pressure +2015-04-05 07:00:00,32,Error Code 4: Invalid Rotation Angle +2015-04-09 06:00:00,32,Error Code 5: Low Pressure +2015-04-30 16:00:00,32,Error Code 3: Excessive Vibration +2015-05-09 06:00:00,32,Error Code 2: High Pressure +2015-05-09 06:00:00,32,Error Code 3: Excessive Vibration +2015-05-20 10:00:00,32,Error Code 2: High Pressure +2015-05-23 07:00:00,32,Error Code 2: High Pressure +2015-06-01 00:00:00,32,Error Code 4: Invalid Rotation Angle +2015-06-23 06:00:00,32,Error Code 2: High Pressure +2015-06-23 06:00:00,32,Error Code 3: Excessive Vibration +2015-06-23 06:00:00,32,Error Code 5: Low Pressure +2015-07-07 20:00:00,32,Error Code 4: Invalid Rotation Angle +2015-07-30 21:00:00,32,Error Code 4: Invalid Rotation Angle +2015-08-07 06:00:00,32,Error Code 1: Low Voltage +2015-08-29 13:00:00,32,Error Code 2: High Pressure +2015-09-03 09:00:00,32,Error Code 1: Low Voltage +2015-09-04 20:00:00,32,Error Code 3: Excessive Vibration +2015-09-10 10:00:00,32,Error Code 3: Excessive Vibration +2015-09-12 06:00:00,32,Error Code 1: Low Voltage +2015-09-16 11:00:00,32,Error Code 4: Invalid Rotation Angle +2015-09-21 06:00:00,32,Error Code 5: Low Pressure +2015-10-07 14:00:00,32,Error Code 2: High Pressure +2015-10-26 14:00:00,32,Error Code 2: High Pressure +2015-10-26 21:00:00,32,Error Code 2: High Pressure +2015-11-05 06:00:00,32,Error Code 1: Low Voltage +2015-11-24 20:00:00,32,Error Code 4: Invalid Rotation Angle +2015-12-17 17:00:00,32,Error Code 4: Invalid Rotation Angle +2015-12-23 19:00:00,32,Error Code 2: High Pressure +2015-01-24 08:00:00,33,Error Code 5: Low Pressure +2015-01-27 23:00:00,33,Error Code 3: Excessive Vibration +2015-01-30 06:00:00,33,Error Code 1: Low Voltage +2015-01-30 06:00:00,33,Error Code 5: Low Pressure +2015-02-27 04:00:00,33,Error Code 1: Low Voltage +2015-03-13 10:00:00,33,Error Code 5: Low Pressure +2015-03-31 06:00:00,33,Error Code 5: Low Pressure +2015-04-14 22:00:00,33,Error Code 3: Excessive Vibration +2015-04-15 19:00:00,33,Error Code 1: Low Voltage +2015-04-30 06:00:00,33,Error Code 2: High Pressure +2015-04-30 06:00:00,33,Error Code 3: Excessive Vibration +2015-05-23 12:00:00,33,Error Code 1: Low Voltage +2015-05-30 06:00:00,33,Error Code 2: High Pressure +2015-05-30 06:00:00,33,Error Code 3: Excessive Vibration +2015-06-11 02:00:00,33,Error Code 3: Excessive Vibration +2015-06-14 06:00:00,33,Error Code 5: Low Pressure +2015-06-14 12:00:00,33,Error Code 1: Low Voltage +2015-07-01 00:00:00,33,Error Code 2: High Pressure +2015-07-03 02:00:00,33,Error Code 2: High Pressure +2015-07-11 13:00:00,33,Error Code 4: Invalid Rotation Angle +2015-08-16 17:00:00,33,Error Code 3: Excessive Vibration +2015-08-28 06:00:00,33,Error Code 5: Low Pressure +2015-08-31 00:00:00,33,Error Code 1: Low Voltage +2015-09-11 16:00:00,33,Error Code 1: Low Voltage +2015-09-14 06:00:00,33,Error Code 4: Invalid Rotation Angle +2015-09-27 06:00:00,33,Error Code 2: High Pressure +2015-09-27 06:00:00,33,Error Code 3: Excessive Vibration +2015-10-16 00:00:00,33,Error Code 2: High Pressure +2015-10-24 09:00:00,33,Error Code 4: Invalid Rotation Angle +2015-11-11 04:00:00,33,Error Code 4: Invalid Rotation Angle +2015-11-11 21:00:00,33,Error Code 1: Low Voltage +2015-11-14 17:00:00,33,Error Code 2: High Pressure +2015-11-26 06:00:00,33,Error Code 1: Low Voltage +2015-12-05 02:00:00,33,Error Code 2: High Pressure +2015-12-20 14:00:00,33,Error Code 5: Low Pressure +2015-12-24 08:00:00,33,Error Code 2: High Pressure +2015-12-28 02:00:00,33,Error Code 2: High Pressure +2015-01-14 16:00:00,34,Error Code 4: Invalid Rotation Angle +2015-02-08 04:00:00,34,Error Code 3: Excessive Vibration +2015-02-11 14:00:00,34,Error Code 2: High Pressure +2015-03-01 09:00:00,34,Error Code 3: Excessive Vibration +2015-04-02 13:00:00,34,Error Code 3: Excessive Vibration +2015-04-06 17:00:00,34,Error Code 1: Low Voltage +2015-04-15 06:00:00,34,Error Code 1: Low Voltage +2015-04-27 05:00:00,34,Error Code 1: Low Voltage +2015-05-11 14:00:00,34,Error Code 2: High Pressure +2015-06-20 05:00:00,34,Error Code 4: Invalid Rotation Angle +2015-06-22 20:00:00,34,Error Code 5: Low Pressure +2015-06-24 12:00:00,34,Error Code 1: Low Voltage +2015-07-09 14:00:00,34,Error Code 2: High Pressure +2015-07-25 16:00:00,34,Error Code 2: High Pressure +2015-09-04 04:00:00,34,Error Code 2: High Pressure +2015-09-25 08:00:00,34,Error Code 2: High Pressure +2015-09-26 19:00:00,34,Error Code 2: High Pressure +2015-09-30 19:00:00,34,Error Code 1: Low Voltage +2015-10-01 10:00:00,34,Error Code 4: Invalid Rotation Angle +2015-10-03 21:00:00,34,Error Code 4: Invalid Rotation Angle +2015-10-23 01:00:00,34,Error Code 4: Invalid Rotation Angle +2015-10-27 06:00:00,34,Error Code 2: High Pressure +2015-10-27 06:00:00,34,Error Code 3: Excessive Vibration +2015-10-29 06:00:00,34,Error Code 3: Excessive Vibration +2015-11-07 02:00:00,34,Error Code 2: High Pressure +2015-11-11 06:00:00,34,Error Code 1: Low Voltage +2015-11-25 05:00:00,34,Error Code 4: Invalid Rotation Angle +2015-12-04 16:00:00,34,Error Code 2: High Pressure +2015-12-07 18:00:00,34,Error Code 1: Low Voltage +2015-12-11 08:00:00,34,Error Code 2: High Pressure +2015-12-11 17:00:00,34,Error Code 3: Excessive Vibration +2015-12-15 20:00:00,34,Error Code 1: Low Voltage +2015-12-20 03:00:00,34,Error Code 4: Invalid Rotation Angle +2015-12-28 21:00:00,34,Error Code 2: High Pressure +2015-01-02 17:00:00,35,Error Code 1: Low Voltage +2015-01-05 06:00:00,35,Error Code 5: Low Pressure +2015-01-06 07:00:00,35,Error Code 4: Invalid Rotation Angle +2015-01-23 18:00:00,35,Error Code 1: Low Voltage +2015-02-04 06:00:00,35,Error Code 4: Invalid Rotation Angle +2015-02-09 13:00:00,35,Error Code 2: High Pressure +2015-03-13 10:00:00,35,Error Code 1: Low Voltage +2015-03-21 06:00:00,35,Error Code 5: Low Pressure +2015-03-27 05:00:00,35,Error Code 4: Invalid Rotation Angle +2015-04-05 06:00:00,35,Error Code 4: Invalid Rotation Angle +2015-04-13 07:00:00,35,Error Code 2: High Pressure +2015-04-14 12:00:00,35,Error Code 4: Invalid Rotation Angle +2015-05-05 06:00:00,35,Error Code 5: Low Pressure +2015-05-05 20:00:00,35,Error Code 4: Invalid Rotation Angle +2015-05-19 01:00:00,35,Error Code 2: High Pressure +2015-05-20 06:00:00,35,Error Code 1: Low Voltage +2015-06-01 03:00:00,35,Error Code 1: Low Voltage +2015-06-08 10:00:00,35,Error Code 2: High Pressure +2015-06-09 01:00:00,35,Error Code 1: Low Voltage +2015-07-05 19:00:00,35,Error Code 2: High Pressure +2015-07-19 06:00:00,35,Error Code 2: High Pressure +2015-07-19 06:00:00,35,Error Code 3: Excessive Vibration +2015-07-29 10:00:00,35,Error Code 4: Invalid Rotation Angle +2015-08-03 06:00:00,35,Error Code 1: Low Voltage +2015-08-07 11:00:00,35,Error Code 1: Low Voltage +2015-08-12 15:00:00,35,Error Code 1: Low Voltage +2015-08-22 02:00:00,35,Error Code 4: Invalid Rotation Angle +2015-08-28 08:00:00,35,Error Code 5: Low Pressure +2015-09-04 03:00:00,35,Error Code 5: Low Pressure +2015-09-17 06:00:00,35,Error Code 1: Low Voltage +2015-09-17 06:00:00,35,Error Code 5: Low Pressure +2015-09-18 06:00:00,35,Error Code 1: Low Voltage +2015-10-09 22:00:00,35,Error Code 4: Invalid Rotation Angle +2015-10-11 04:00:00,35,Error Code 4: Invalid Rotation Angle +2015-10-24 04:00:00,35,Error Code 3: Excessive Vibration +2015-10-29 00:00:00,35,Error Code 3: Excessive Vibration +2015-12-06 13:00:00,35,Error Code 4: Invalid Rotation Angle +2015-12-19 13:00:00,35,Error Code 4: Invalid Rotation Angle +2015-12-29 02:00:00,35,Error Code 5: Low Pressure +2015-12-30 04:00:00,35,Error Code 4: Invalid Rotation Angle +2015-01-06 02:00:00,36,Error Code 3: Excessive Vibration +2015-01-10 23:00:00,36,Error Code 1: Low Voltage +2015-01-16 01:00:00,36,Error Code 3: Excessive Vibration +2015-01-21 01:00:00,36,Error Code 3: Excessive Vibration +2015-01-21 10:00:00,36,Error Code 1: Low Voltage +2015-02-19 08:00:00,36,Error Code 1: Low Voltage +2015-02-20 06:00:00,36,Error Code 1: Low Voltage +2015-02-21 22:00:00,36,Error Code 5: Low Pressure +2015-03-20 14:00:00,36,Error Code 1: Low Voltage +2015-04-21 19:00:00,36,Error Code 1: Low Voltage +2015-04-21 20:00:00,36,Error Code 2: High Pressure +2015-04-26 19:00:00,36,Error Code 2: High Pressure +2015-06-04 10:00:00,36,Error Code 3: Excessive Vibration +2015-06-05 06:00:00,36,Error Code 1: Low Voltage +2015-06-11 06:00:00,36,Error Code 2: High Pressure +2015-06-20 06:00:00,36,Error Code 2: High Pressure +2015-06-20 06:00:00,36,Error Code 3: Excessive Vibration +2015-07-16 09:00:00,36,Error Code 1: Low Voltage +2015-07-28 08:00:00,36,Error Code 4: Invalid Rotation Angle +2015-08-02 04:00:00,36,Error Code 1: Low Voltage +2015-08-10 14:00:00,36,Error Code 3: Excessive Vibration +2015-08-28 18:00:00,36,Error Code 1: Low Voltage +2015-09-03 06:00:00,36,Error Code 2: High Pressure +2015-09-03 06:00:00,36,Error Code 3: Excessive Vibration +2015-10-02 00:00:00,36,Error Code 4: Invalid Rotation Angle +2015-10-04 15:00:00,36,Error Code 3: Excessive Vibration +2015-10-04 19:00:00,36,Error Code 2: High Pressure +2015-10-20 06:00:00,36,Error Code 4: Invalid Rotation Angle +2015-10-30 05:00:00,36,Error Code 2: High Pressure +2015-10-31 02:00:00,36,Error Code 2: High Pressure +2015-12-11 11:00:00,36,Error Code 3: Excessive Vibration +2015-12-18 16:00:00,36,Error Code 4: Invalid Rotation Angle +2015-01-03 06:00:00,37,Error Code 5: Low Pressure +2015-01-04 09:00:00,37,Error Code 2: High Pressure +2015-01-13 06:00:00,37,Error Code 1: Low Voltage +2015-01-18 23:00:00,37,Error Code 3: Excessive Vibration +2015-01-19 20:00:00,37,Error Code 1: Low Voltage +2015-02-02 06:00:00,37,Error Code 4: Invalid Rotation Angle +2015-03-16 23:00:00,37,Error Code 1: Low Voltage +2015-04-02 00:00:00,37,Error Code 4: Invalid Rotation Angle +2015-04-03 16:00:00,37,Error Code 3: Excessive Vibration +2015-04-18 06:00:00,37,Error Code 5: Low Pressure +2015-04-21 08:00:00,37,Error Code 3: Excessive Vibration +2015-04-27 19:00:00,37,Error Code 1: Low Voltage +2015-05-04 22:00:00,37,Error Code 2: High Pressure +2015-05-18 06:00:00,37,Error Code 4: Invalid Rotation Angle +2015-06-02 06:00:00,37,Error Code 5: Low Pressure +2015-06-02 19:00:00,37,Error Code 3: Excessive Vibration +2015-06-03 04:00:00,37,Error Code 2: High Pressure +2015-06-04 02:00:00,37,Error Code 1: Low Voltage +2015-06-17 06:00:00,37,Error Code 1: Low Voltage +2015-06-18 22:00:00,37,Error Code 4: Invalid Rotation Angle +2015-07-02 06:00:00,37,Error Code 4: Invalid Rotation Angle +2015-07-03 05:00:00,37,Error Code 1: Low Voltage +2015-07-11 22:00:00,37,Error Code 2: High Pressure +2015-07-17 06:00:00,37,Error Code 5: Low Pressure +2015-07-18 14:00:00,37,Error Code 2: High Pressure +2015-08-16 06:00:00,37,Error Code 2: High Pressure +2015-08-16 06:00:00,37,Error Code 3: Excessive Vibration +2015-08-24 17:00:00,37,Error Code 1: Low Voltage +2015-09-15 06:00:00,37,Error Code 2: High Pressure +2015-09-15 06:00:00,37,Error Code 3: Excessive Vibration +2015-09-25 15:00:00,37,Error Code 2: High Pressure +2015-09-30 06:00:00,37,Error Code 5: Low Pressure +2015-10-16 05:00:00,37,Error Code 4: Invalid Rotation Angle +2015-10-17 17:00:00,37,Error Code 2: High Pressure +2015-10-19 11:00:00,37,Error Code 1: Low Voltage +2015-11-02 07:00:00,37,Error Code 2: High Pressure +2015-11-03 12:00:00,37,Error Code 4: Invalid Rotation Angle +2015-11-14 06:00:00,37,Error Code 2: High Pressure +2015-11-14 06:00:00,37,Error Code 3: Excessive Vibration +2015-11-23 10:00:00,37,Error Code 2: High Pressure +2015-11-28 22:00:00,37,Error Code 2: High Pressure +2015-11-29 06:00:00,37,Error Code 4: Invalid Rotation Angle +2015-12-14 06:00:00,37,Error Code 5: Low Pressure +2015-01-07 02:00:00,38,Error Code 4: Invalid Rotation Angle +2015-01-16 06:00:00,38,Error Code 5: Low Pressure +2015-01-18 06:00:00,38,Error Code 3: Excessive Vibration +2015-01-20 01:00:00,38,Error Code 2: High Pressure +2015-02-21 10:00:00,38,Error Code 2: High Pressure +2015-03-03 20:00:00,38,Error Code 4: Invalid Rotation Angle +2015-03-12 02:00:00,38,Error Code 4: Invalid Rotation Angle +2015-03-22 12:00:00,38,Error Code 5: Low Pressure +2015-03-27 18:00:00,38,Error Code 4: Invalid Rotation Angle +2015-04-01 06:00:00,38,Error Code 2: High Pressure +2015-04-01 06:00:00,38,Error Code 3: Excessive Vibration +2015-04-03 14:00:00,38,Error Code 4: Invalid Rotation Angle +2015-04-13 22:00:00,38,Error Code 5: Low Pressure +2015-04-16 06:00:00,38,Error Code 5: Low Pressure +2015-05-01 13:00:00,38,Error Code 2: High Pressure +2015-05-09 12:00:00,38,Error Code 3: Excessive Vibration +2015-05-21 00:00:00,38,Error Code 3: Excessive Vibration +2015-05-23 15:00:00,38,Error Code 2: High Pressure +2015-06-06 22:00:00,38,Error Code 4: Invalid Rotation Angle +2015-06-20 06:00:00,38,Error Code 4: Invalid Rotation Angle +2015-06-30 06:00:00,38,Error Code 2: High Pressure +2015-06-30 06:00:00,38,Error Code 3: Excessive Vibration +2015-07-10 04:00:00,38,Error Code 5: Low Pressure +2015-08-02 04:00:00,38,Error Code 2: High Pressure +2015-08-13 13:00:00,38,Error Code 1: Low Voltage +2015-08-16 01:00:00,38,Error Code 3: Excessive Vibration +2015-08-17 01:00:00,38,Error Code 4: Invalid Rotation Angle +2015-08-18 01:00:00,38,Error Code 4: Invalid Rotation Angle +2015-08-20 16:00:00,38,Error Code 2: High Pressure +2015-08-29 06:00:00,38,Error Code 5: Low Pressure +2015-09-06 00:00:00,38,Error Code 5: Low Pressure +2015-09-17 10:00:00,38,Error Code 1: Low Voltage +2015-10-01 15:00:00,38,Error Code 4: Invalid Rotation Angle +2015-10-12 05:00:00,38,Error Code 1: Low Voltage +2015-10-14 09:00:00,38,Error Code 5: Low Pressure +2015-10-22 06:00:00,38,Error Code 2: High Pressure +2015-10-25 02:00:00,38,Error Code 3: Excessive Vibration +2015-11-18 13:00:00,38,Error Code 2: High Pressure +2015-12-12 06:00:00,38,Error Code 1: Low Voltage +2015-12-12 06:00:00,38,Error Code 5: Low Pressure +2015-12-15 12:00:00,38,Error Code 4: Invalid Rotation Angle +2015-12-19 11:00:00,38,Error Code 2: High Pressure +2015-01-20 14:00:00,39,Error Code 3: Excessive Vibration +2015-02-12 03:00:00,39,Error Code 3: Excessive Vibration +2015-02-18 19:00:00,39,Error Code 3: Excessive Vibration +2015-02-22 21:00:00,39,Error Code 2: High Pressure +2015-03-05 21:00:00,39,Error Code 2: High Pressure +2015-03-14 04:00:00,39,Error Code 3: Excessive Vibration +2015-03-24 05:00:00,39,Error Code 2: High Pressure +2015-03-29 04:00:00,39,Error Code 4: Invalid Rotation Angle +2015-03-31 06:00:00,39,Error Code 2: High Pressure +2015-03-31 06:00:00,39,Error Code 3: Excessive Vibration +2015-04-08 12:00:00,39,Error Code 4: Invalid Rotation Angle +2015-04-18 12:00:00,39,Error Code 3: Excessive Vibration +2015-05-14 15:00:00,39,Error Code 4: Invalid Rotation Angle +2015-07-13 19:00:00,39,Error Code 4: Invalid Rotation Angle +2015-07-14 03:00:00,39,Error Code 3: Excessive Vibration +2015-07-23 07:00:00,39,Error Code 2: High Pressure +2015-07-28 14:00:00,39,Error Code 4: Invalid Rotation Angle +2015-07-29 06:00:00,39,Error Code 1: Low Voltage +2015-08-02 18:00:00,39,Error Code 1: Low Voltage +2015-08-22 02:00:00,39,Error Code 5: Low Pressure +2015-09-07 02:00:00,39,Error Code 4: Invalid Rotation Angle +2015-09-07 09:00:00,39,Error Code 2: High Pressure +2015-09-24 10:00:00,39,Error Code 4: Invalid Rotation Angle +2015-09-25 19:00:00,39,Error Code 2: High Pressure +2015-09-30 01:00:00,39,Error Code 4: Invalid Rotation Angle +2015-10-03 11:00:00,39,Error Code 2: High Pressure +2015-10-05 10:00:00,39,Error Code 4: Invalid Rotation Angle +2015-10-05 11:00:00,39,Error Code 5: Low Pressure +2015-10-08 16:00:00,39,Error Code 5: Low Pressure +2015-10-12 06:00:00,39,Error Code 2: High Pressure +2015-10-12 06:00:00,39,Error Code 3: Excessive Vibration +2015-10-15 22:00:00,39,Error Code 1: Low Voltage +2015-11-11 10:00:00,39,Error Code 3: Excessive Vibration +2015-11-15 23:00:00,39,Error Code 4: Invalid Rotation Angle +2015-11-24 11:00:00,39,Error Code 2: High Pressure +2015-12-26 06:00:00,39,Error Code 2: High Pressure +2015-12-26 06:00:00,39,Error Code 3: Excessive Vibration +2015-01-03 06:00:00,40,Error Code 4: Invalid Rotation Angle +2015-02-03 03:00:00,40,Error Code 1: Low Voltage +2015-02-06 01:00:00,40,Error Code 1: Low Voltage +2015-02-16 22:00:00,40,Error Code 3: Excessive Vibration +2015-02-17 06:00:00,40,Error Code 1: Low Voltage +2015-02-24 23:00:00,40,Error Code 2: High Pressure +2015-02-26 13:00:00,40,Error Code 1: Low Voltage +2015-03-01 02:00:00,40,Error Code 1: Low Voltage +2015-03-03 13:00:00,40,Error Code 2: High Pressure +2015-03-04 10:00:00,40,Error Code 2: High Pressure +2015-04-03 06:00:00,40,Error Code 1: Low Voltage +2015-04-09 19:00:00,40,Error Code 4: Invalid Rotation Angle +2015-04-12 02:00:00,40,Error Code 1: Low Voltage +2015-04-15 03:00:00,40,Error Code 2: High Pressure +2015-04-21 03:00:00,40,Error Code 4: Invalid Rotation Angle +2015-05-12 16:00:00,40,Error Code 3: Excessive Vibration +2015-05-14 09:00:00,40,Error Code 3: Excessive Vibration +2015-06-02 06:00:00,40,Error Code 4: Invalid Rotation Angle +2015-06-17 06:00:00,40,Error Code 2: High Pressure +2015-06-17 06:00:00,40,Error Code 3: Excessive Vibration +2015-07-02 06:00:00,40,Error Code 1: Low Voltage +2015-07-15 01:00:00,40,Error Code 3: Excessive Vibration +2015-07-20 04:00:00,40,Error Code 2: High Pressure +2015-08-04 13:00:00,40,Error Code 1: Low Voltage +2015-08-05 19:00:00,40,Error Code 1: Low Voltage +2015-08-08 15:00:00,40,Error Code 4: Invalid Rotation Angle +2015-08-14 22:00:00,40,Error Code 1: Low Voltage +2015-08-16 06:00:00,40,Error Code 2: High Pressure +2015-08-16 06:00:00,40,Error Code 3: Excessive Vibration +2015-08-27 03:00:00,40,Error Code 2: High Pressure +2015-09-01 10:00:00,40,Error Code 4: Invalid Rotation Angle +2015-09-05 03:00:00,40,Error Code 5: Low Pressure +2015-09-20 16:00:00,40,Error Code 1: Low Voltage +2015-10-07 14:00:00,40,Error Code 1: Low Voltage +2015-10-12 01:00:00,40,Error Code 1: Low Voltage +2015-10-15 11:00:00,40,Error Code 5: Low Pressure +2015-10-30 12:00:00,40,Error Code 3: Excessive Vibration +2015-11-29 06:00:00,40,Error Code 1: Low Voltage +2015-12-14 06:00:00,40,Error Code 4: Invalid Rotation Angle +2015-12-17 05:00:00,40,Error Code 2: High Pressure +2015-12-20 13:00:00,40,Error Code 4: Invalid Rotation Angle +2015-12-28 18:00:00,40,Error Code 4: Invalid Rotation Angle +2015-01-03 21:00:00,41,Error Code 3: Excessive Vibration +2015-01-24 09:00:00,41,Error Code 1: Low Voltage +2015-01-31 06:00:00,41,Error Code 1: Low Voltage +2015-02-14 13:00:00,41,Error Code 2: High Pressure +2015-02-17 16:00:00,41,Error Code 3: Excessive Vibration +2015-02-20 22:00:00,41,Error Code 4: Invalid Rotation Angle +2015-03-05 03:00:00,41,Error Code 1: Low Voltage +2015-03-09 02:00:00,41,Error Code 2: High Pressure +2015-04-03 10:00:00,41,Error Code 3: Excessive Vibration +2015-04-16 14:00:00,41,Error Code 1: Low Voltage +2015-04-16 17:00:00,41,Error Code 2: High Pressure +2015-05-01 06:00:00,41,Error Code 1: Low Voltage +2015-05-02 05:00:00,41,Error Code 4: Invalid Rotation Angle +2015-05-10 01:00:00,41,Error Code 1: Low Voltage +2015-05-20 23:00:00,41,Error Code 1: Low Voltage +2015-05-26 19:00:00,41,Error Code 2: High Pressure +2015-06-11 23:00:00,41,Error Code 2: High Pressure +2015-06-13 07:00:00,41,Error Code 3: Excessive Vibration +2015-06-22 12:00:00,41,Error Code 2: High Pressure +2015-07-06 03:00:00,41,Error Code 3: Excessive Vibration +2015-08-07 21:00:00,41,Error Code 1: Low Voltage +2015-08-13 02:00:00,41,Error Code 1: Low Voltage +2015-08-20 04:00:00,41,Error Code 4: Invalid Rotation Angle +2015-08-23 02:00:00,41,Error Code 4: Invalid Rotation Angle +2015-08-24 20:00:00,41,Error Code 1: Low Voltage +2015-10-07 00:00:00,41,Error Code 2: High Pressure +2015-10-28 06:00:00,41,Error Code 1: Low Voltage +2015-11-03 10:00:00,41,Error Code 1: Low Voltage +2015-11-11 14:00:00,41,Error Code 1: Low Voltage +2015-11-26 00:00:00,41,Error Code 5: Low Pressure +2015-12-03 12:00:00,41,Error Code 2: High Pressure +2015-01-01 10:00:00,42,Error Code 2: High Pressure +2015-01-20 06:00:00,42,Error Code 1: Low Voltage +2015-01-23 05:00:00,42,Error Code 1: Low Voltage +2015-02-03 01:00:00,42,Error Code 4: Invalid Rotation Angle +2015-02-11 17:00:00,42,Error Code 2: High Pressure +2015-03-05 15:00:00,42,Error Code 1: Low Voltage +2015-03-10 04:00:00,42,Error Code 2: High Pressure +2015-03-11 14:00:00,42,Error Code 3: Excessive Vibration +2015-03-13 12:00:00,42,Error Code 1: Low Voltage +2015-04-05 06:00:00,42,Error Code 4: Invalid Rotation Angle +2015-04-10 00:00:00,42,Error Code 4: Invalid Rotation Angle +2015-04-11 20:00:00,42,Error Code 2: High Pressure +2015-04-20 06:00:00,42,Error Code 2: High Pressure +2015-05-05 06:00:00,42,Error Code 1: Low Voltage +2015-05-05 15:00:00,42,Error Code 4: Invalid Rotation Angle +2015-05-13 08:00:00,42,Error Code 3: Excessive Vibration +2015-06-04 06:00:00,42,Error Code 2: High Pressure +2015-06-04 06:00:00,42,Error Code 3: Excessive Vibration +2015-06-19 06:00:00,42,Error Code 4: Invalid Rotation Angle +2015-06-25 03:00:00,42,Error Code 2: High Pressure +2015-06-29 11:00:00,42,Error Code 3: Excessive Vibration +2015-07-21 05:00:00,42,Error Code 1: Low Voltage +2015-07-27 21:00:00,42,Error Code 1: Low Voltage +2015-07-28 03:00:00,42,Error Code 1: Low Voltage +2015-08-08 09:00:00,42,Error Code 3: Excessive Vibration +2015-08-14 22:00:00,42,Error Code 4: Invalid Rotation Angle +2015-08-18 06:00:00,42,Error Code 2: High Pressure +2015-08-18 06:00:00,42,Error Code 3: Excessive Vibration +2015-08-20 17:00:00,42,Error Code 3: Excessive Vibration +2015-09-17 06:00:00,42,Error Code 4: Invalid Rotation Angle +2015-10-11 21:00:00,42,Error Code 4: Invalid Rotation Angle +2015-10-19 10:00:00,42,Error Code 1: Low Voltage +2015-10-23 18:00:00,42,Error Code 4: Invalid Rotation Angle +2015-11-03 10:00:00,42,Error Code 3: Excessive Vibration +2015-11-16 06:00:00,42,Error Code 2: High Pressure +2015-11-16 06:00:00,42,Error Code 3: Excessive Vibration +2015-12-13 11:00:00,42,Error Code 3: Excessive Vibration +2015-12-16 06:00:00,42,Error Code 4: Invalid Rotation Angle +2015-01-01 07:00:00,43,Error Code 3: Excessive Vibration +2015-01-25 14:00:00,43,Error Code 3: Excessive Vibration +2015-02-04 06:00:00,43,Error Code 1: Low Voltage +2015-02-19 06:00:00,43,Error Code 5: Low Pressure +2015-03-02 10:00:00,43,Error Code 2: High Pressure +2015-03-05 19:00:00,43,Error Code 4: Invalid Rotation Angle +2015-03-11 09:00:00,43,Error Code 3: Excessive Vibration +2015-03-13 11:00:00,43,Error Code 3: Excessive Vibration +2015-03-18 18:00:00,43,Error Code 2: High Pressure +2015-03-21 06:00:00,43,Error Code 2: High Pressure +2015-03-21 06:00:00,43,Error Code 3: Excessive Vibration +2015-04-08 21:00:00,43,Error Code 2: High Pressure +2015-04-18 07:00:00,43,Error Code 1: Low Voltage +2015-05-10 13:00:00,43,Error Code 4: Invalid Rotation Angle +2015-05-18 13:00:00,43,Error Code 2: High Pressure +2015-05-20 06:00:00,43,Error Code 2: High Pressure +2015-05-20 06:00:00,43,Error Code 3: Excessive Vibration +2015-06-04 06:00:00,43,Error Code 1: Low Voltage +2015-06-15 22:00:00,43,Error Code 3: Excessive Vibration +2015-07-13 16:00:00,43,Error Code 1: Low Voltage +2015-07-14 13:00:00,43,Error Code 5: Low Pressure +2015-07-19 06:00:00,43,Error Code 1: Low Voltage +2015-08-03 06:00:00,43,Error Code 2: High Pressure +2015-08-03 06:00:00,43,Error Code 3: Excessive Vibration +2015-08-05 02:00:00,43,Error Code 3: Excessive Vibration +2015-08-06 21:00:00,43,Error Code 2: High Pressure +2015-08-20 04:00:00,43,Error Code 3: Excessive Vibration +2015-09-15 16:00:00,43,Error Code 1: Low Voltage +2015-09-16 23:00:00,43,Error Code 1: Low Voltage +2015-10-02 06:00:00,43,Error Code 5: Low Pressure +2015-11-01 06:00:00,43,Error Code 1: Low Voltage +2015-11-06 01:00:00,43,Error Code 1: Low Voltage +2015-11-09 07:00:00,43,Error Code 3: Excessive Vibration +2015-11-11 18:00:00,43,Error Code 2: High Pressure +2015-11-16 06:00:00,43,Error Code 2: High Pressure +2015-11-16 06:00:00,43,Error Code 3: Excessive Vibration +2015-11-26 18:00:00,43,Error Code 1: Low Voltage +2015-12-15 05:00:00,43,Error Code 4: Invalid Rotation Angle +2015-01-27 09:00:00,44,Error Code 4: Invalid Rotation Angle +2015-02-04 14:00:00,44,Error Code 1: Low Voltage +2015-02-11 19:00:00,44,Error Code 3: Excessive Vibration +2015-02-25 18:00:00,44,Error Code 1: Low Voltage +2015-03-01 12:00:00,44,Error Code 2: High Pressure +2015-03-06 14:00:00,44,Error Code 4: Invalid Rotation Angle +2015-03-15 06:00:00,44,Error Code 2: High Pressure +2015-03-15 06:00:00,44,Error Code 3: Excessive Vibration +2015-04-03 05:00:00,44,Error Code 1: Low Voltage +2015-04-03 15:00:00,44,Error Code 1: Low Voltage +2015-04-05 11:00:00,44,Error Code 4: Invalid Rotation Angle +2015-04-06 06:00:00,44,Error Code 4: Invalid Rotation Angle +2015-04-18 07:00:00,44,Error Code 4: Invalid Rotation Angle +2015-06-13 06:00:00,44,Error Code 1: Low Voltage +2015-06-13 20:00:00,44,Error Code 1: Low Voltage +2015-07-02 15:00:00,44,Error Code 5: Low Pressure +2015-07-28 06:00:00,44,Error Code 2: High Pressure +2015-07-28 06:00:00,44,Error Code 3: Excessive Vibration +2015-08-05 15:00:00,44,Error Code 4: Invalid Rotation Angle +2015-08-14 21:00:00,44,Error Code 3: Excessive Vibration +2015-08-27 06:00:00,44,Error Code 1: Low Voltage +2015-09-01 08:00:00,44,Error Code 2: High Pressure +2015-09-23 01:00:00,44,Error Code 2: High Pressure +2015-09-25 22:00:00,44,Error Code 1: Low Voltage +2015-09-27 21:00:00,44,Error Code 4: Invalid Rotation Angle +2015-10-03 09:00:00,44,Error Code 3: Excessive Vibration +2015-10-14 14:00:00,44,Error Code 5: Low Pressure +2015-10-22 22:00:00,44,Error Code 2: High Pressure +2015-11-14 01:00:00,44,Error Code 4: Invalid Rotation Angle +2015-11-22 14:00:00,44,Error Code 3: Excessive Vibration +2015-12-25 01:00:00,44,Error Code 3: Excessive Vibration +2015-12-25 20:00:00,44,Error Code 2: High Pressure +2015-01-20 01:00:00,45,Error Code 1: Low Voltage +2015-01-21 04:00:00,45,Error Code 2: High Pressure +2015-01-21 06:00:00,45,Error Code 2: High Pressure +2015-01-21 06:00:00,45,Error Code 3: Excessive Vibration +2015-01-21 15:00:00,45,Error Code 1: Low Voltage +2015-02-08 03:00:00,45,Error Code 2: High Pressure +2015-02-08 10:00:00,45,Error Code 2: High Pressure +2015-02-12 03:00:00,45,Error Code 4: Invalid Rotation Angle +2015-02-20 06:00:00,45,Error Code 5: Low Pressure +2015-02-23 13:00:00,45,Error Code 5: Low Pressure +2015-02-28 14:00:00,45,Error Code 3: Excessive Vibration +2015-03-28 20:00:00,45,Error Code 4: Invalid Rotation Angle +2015-03-29 19:00:00,45,Error Code 2: High Pressure +2015-04-26 01:00:00,45,Error Code 3: Excessive Vibration +2015-05-11 17:00:00,45,Error Code 2: High Pressure +2015-05-20 07:00:00,45,Error Code 2: High Pressure +2015-05-21 06:00:00,45,Error Code 5: Low Pressure +2015-05-24 11:00:00,45,Error Code 1: Low Voltage +2015-06-11 10:00:00,45,Error Code 1: Low Voltage +2015-06-20 13:00:00,45,Error Code 2: High Pressure +2015-07-20 06:00:00,45,Error Code 5: Low Pressure +2015-08-03 04:00:00,45,Error Code 2: High Pressure +2015-08-03 15:00:00,45,Error Code 3: Excessive Vibration +2015-08-12 19:00:00,45,Error Code 4: Invalid Rotation Angle +2015-09-04 06:00:00,45,Error Code 2: High Pressure +2015-10-02 06:00:00,45,Error Code 1: Low Voltage +2015-10-07 22:00:00,45,Error Code 4: Invalid Rotation Angle +2015-10-18 06:00:00,45,Error Code 5: Low Pressure +2015-10-27 04:00:00,45,Error Code 1: Low Voltage +2015-10-31 12:00:00,45,Error Code 3: Excessive Vibration +2015-11-02 06:00:00,45,Error Code 1: Low Voltage +2015-11-14 17:00:00,45,Error Code 3: Excessive Vibration +2015-12-09 18:00:00,45,Error Code 2: High Pressure +2015-12-19 03:00:00,45,Error Code 2: High Pressure +2015-12-27 17:00:00,45,Error Code 3: Excessive Vibration +2015-12-29 23:00:00,45,Error Code 1: Low Voltage +2015-12-30 20:00:00,45,Error Code 3: Excessive Vibration +2015-02-21 06:00:00,46,Error Code 2: High Pressure +2015-02-21 06:00:00,46,Error Code 3: Excessive Vibration +2015-02-22 08:00:00,46,Error Code 1: Low Voltage +2015-02-22 13:00:00,46,Error Code 2: High Pressure +2015-03-09 07:00:00,46,Error Code 3: Excessive Vibration +2015-03-17 10:00:00,46,Error Code 2: High Pressure +2015-05-08 14:00:00,46,Error Code 1: Low Voltage +2015-05-12 13:00:00,46,Error Code 2: High Pressure +2015-06-17 00:00:00,46,Error Code 1: Low Voltage +2015-06-27 01:00:00,46,Error Code 5: Low Pressure +2015-07-08 09:00:00,46,Error Code 1: Low Voltage +2015-08-05 00:00:00,46,Error Code 5: Low Pressure +2015-08-09 16:00:00,46,Error Code 1: Low Voltage +2015-08-17 04:00:00,46,Error Code 2: High Pressure +2015-08-22 02:00:00,46,Error Code 4: Invalid Rotation Angle +2015-08-30 14:00:00,46,Error Code 2: High Pressure +2015-09-04 18:00:00,46,Error Code 3: Excessive Vibration +2015-09-09 18:00:00,46,Error Code 3: Excessive Vibration +2015-09-12 21:00:00,46,Error Code 1: Low Voltage +2015-10-12 05:00:00,46,Error Code 3: Excessive Vibration +2015-10-19 18:00:00,46,Error Code 2: High Pressure +2015-10-23 08:00:00,46,Error Code 4: Invalid Rotation Angle +2015-10-23 17:00:00,46,Error Code 1: Low Voltage +2015-10-24 09:00:00,46,Error Code 4: Invalid Rotation Angle +2015-10-28 19:00:00,46,Error Code 1: Low Voltage +2015-11-08 17:00:00,46,Error Code 2: High Pressure +2015-11-18 06:00:00,46,Error Code 2: High Pressure +2015-11-18 06:00:00,46,Error Code 3: Excessive Vibration +2015-12-26 18:00:00,46,Error Code 4: Invalid Rotation Angle +2015-01-03 20:00:00,47,Error Code 3: Excessive Vibration +2015-01-14 19:00:00,47,Error Code 1: Low Voltage +2015-01-22 23:00:00,47,Error Code 3: Excessive Vibration +2015-01-23 06:00:00,47,Error Code 4: Invalid Rotation Angle +2015-02-10 02:00:00,47,Error Code 4: Invalid Rotation Angle +2015-03-11 22:00:00,47,Error Code 1: Low Voltage +2015-03-15 20:00:00,47,Error Code 4: Invalid Rotation Angle +2015-03-21 03:00:00,47,Error Code 3: Excessive Vibration +2015-03-24 06:00:00,47,Error Code 1: Low Voltage +2015-03-30 10:00:00,47,Error Code 4: Invalid Rotation Angle +2015-04-08 06:00:00,47,Error Code 2: High Pressure +2015-04-08 06:00:00,47,Error Code 3: Excessive Vibration +2015-04-25 18:00:00,47,Error Code 4: Invalid Rotation Angle +2015-05-21 23:00:00,47,Error Code 1: Low Voltage +2015-05-25 13:00:00,47,Error Code 1: Low Voltage +2015-06-22 06:00:00,47,Error Code 4: Invalid Rotation Angle +2015-07-07 06:00:00,47,Error Code 2: High Pressure +2015-07-07 06:00:00,47,Error Code 3: Excessive Vibration +2015-07-31 13:00:00,47,Error Code 2: High Pressure +2015-08-02 05:00:00,47,Error Code 3: Excessive Vibration +2015-08-06 06:00:00,47,Error Code 1: Low Voltage +2015-08-08 05:00:00,47,Error Code 1: Low Voltage +2015-08-14 15:00:00,47,Error Code 3: Excessive Vibration +2015-08-29 08:00:00,47,Error Code 2: High Pressure +2015-09-02 23:00:00,47,Error Code 2: High Pressure +2015-09-06 05:00:00,47,Error Code 4: Invalid Rotation Angle +2015-09-20 06:00:00,47,Error Code 4: Invalid Rotation Angle +2015-10-04 09:00:00,47,Error Code 2: High Pressure +2015-10-20 06:00:00,47,Error Code 2: High Pressure +2015-10-20 06:00:00,47,Error Code 3: Excessive Vibration +2015-11-06 21:00:00,47,Error Code 2: High Pressure +2015-11-10 23:00:00,47,Error Code 1: Low Voltage +2015-11-19 06:00:00,47,Error Code 2: High Pressure +2015-11-19 06:00:00,47,Error Code 3: Excessive Vibration +2015-11-22 19:00:00,47,Error Code 1: Low Voltage +2015-11-28 21:00:00,47,Error Code 5: Low Pressure +2015-11-29 13:00:00,47,Error Code 1: Low Voltage +2015-12-19 06:00:00,47,Error Code 2: High Pressure +2015-12-19 06:00:00,47,Error Code 3: Excessive Vibration +2015-01-10 06:00:00,48,Error Code 2: High Pressure +2015-01-10 06:00:00,48,Error Code 3: Excessive Vibration +2015-01-17 08:00:00,48,Error Code 1: Low Voltage +2015-02-01 21:00:00,48,Error Code 1: Low Voltage +2015-02-04 05:00:00,48,Error Code 2: High Pressure +2015-02-10 02:00:00,48,Error Code 2: High Pressure +2015-03-11 06:00:00,48,Error Code 1: Low Voltage +2015-03-26 20:00:00,48,Error Code 3: Excessive Vibration +2015-05-08 02:00:00,48,Error Code 1: Low Voltage +2015-05-10 06:00:00,48,Error Code 2: High Pressure +2015-05-10 06:00:00,48,Error Code 3: Excessive Vibration +2015-06-02 05:00:00,48,Error Code 5: Low Pressure +2015-06-05 13:00:00,48,Error Code 3: Excessive Vibration +2015-06-11 01:00:00,48,Error Code 1: Low Voltage +2015-06-11 07:00:00,48,Error Code 1: Low Voltage +2015-07-09 06:00:00,48,Error Code 2: High Pressure +2015-07-09 06:00:00,48,Error Code 3: Excessive Vibration +2015-07-24 06:00:00,48,Error Code 1: Low Voltage +2015-07-24 22:00:00,48,Error Code 3: Excessive Vibration +2015-08-07 10:00:00,48,Error Code 4: Invalid Rotation Angle +2015-09-07 06:00:00,48,Error Code 1: Low Voltage +2015-09-15 14:00:00,48,Error Code 4: Invalid Rotation Angle +2015-09-16 01:00:00,48,Error Code 1: Low Voltage +2015-09-16 21:00:00,48,Error Code 3: Excessive Vibration +2015-10-23 16:00:00,48,Error Code 3: Excessive Vibration +2015-10-27 14:00:00,48,Error Code 2: High Pressure +2015-11-06 06:00:00,48,Error Code 2: High Pressure +2015-11-06 06:00:00,48,Error Code 3: Excessive Vibration +2015-11-18 11:00:00,48,Error Code 1: Low Voltage +2015-11-27 15:00:00,48,Error Code 4: Invalid Rotation Angle +2015-01-03 11:00:00,49,Error Code 2: High Pressure +2015-01-13 14:00:00,49,Error Code 2: High Pressure +2015-01-26 18:00:00,49,Error Code 2: High Pressure +2015-01-28 15:00:00,49,Error Code 2: High Pressure +2015-01-31 19:00:00,49,Error Code 4: Invalid Rotation Angle +2015-02-01 11:00:00,49,Error Code 1: Low Voltage +2015-02-06 16:00:00,49,Error Code 5: Low Pressure +2015-02-15 06:00:00,49,Error Code 2: High Pressure +2015-02-16 20:00:00,49,Error Code 2: High Pressure +2015-02-19 22:00:00,49,Error Code 1: Low Voltage +2015-02-28 06:00:00,49,Error Code 5: Low Pressure +2015-03-06 17:00:00,49,Error Code 5: Low Pressure +2015-03-08 03:00:00,49,Error Code 5: Low Pressure +2015-03-14 19:00:00,49,Error Code 3: Excessive Vibration +2015-03-15 06:00:00,49,Error Code 4: Invalid Rotation Angle +2015-03-18 17:00:00,49,Error Code 3: Excessive Vibration +2015-03-24 20:00:00,49,Error Code 1: Low Voltage +2015-03-25 22:00:00,49,Error Code 1: Low Voltage +2015-04-14 06:00:00,49,Error Code 5: Low Pressure +2015-05-14 06:00:00,49,Error Code 2: High Pressure +2015-05-14 06:00:00,49,Error Code 3: Excessive Vibration +2015-05-16 02:00:00,49,Error Code 5: Low Pressure +2015-06-06 07:00:00,49,Error Code 3: Excessive Vibration +2015-06-18 01:00:00,49,Error Code 4: Invalid Rotation Angle +2015-06-18 04:00:00,49,Error Code 4: Invalid Rotation Angle +2015-07-13 06:00:00,49,Error Code 5: Low Pressure +2015-07-17 05:00:00,49,Error Code 3: Excessive Vibration +2015-07-28 06:00:00,49,Error Code 4: Invalid Rotation Angle +2015-07-28 23:00:00,49,Error Code 2: High Pressure +2015-08-09 04:00:00,49,Error Code 1: Low Voltage +2015-08-13 01:00:00,49,Error Code 2: High Pressure +2015-08-25 06:00:00,49,Error Code 5: Low Pressure +2015-08-30 18:00:00,49,Error Code 3: Excessive Vibration +2015-09-10 20:00:00,49,Error Code 2: High Pressure +2015-09-11 06:00:00,49,Error Code 2: High Pressure +2015-09-11 06:00:00,49,Error Code 3: Excessive Vibration +2015-09-11 06:00:00,49,Error Code 4: Invalid Rotation Angle +2015-10-09 08:00:00,49,Error Code 4: Invalid Rotation Angle +2015-10-09 18:00:00,49,Error Code 4: Invalid Rotation Angle +2015-10-11 06:00:00,49,Error Code 5: Low Pressure +2015-10-11 07:00:00,49,Error Code 3: Excessive Vibration +2015-10-19 18:00:00,49,Error Code 1: Low Voltage +2015-11-10 06:00:00,49,Error Code 4: Invalid Rotation Angle +2015-11-10 07:00:00,49,Error Code 5: Low Pressure +2015-11-18 03:00:00,49,Error Code 3: Excessive Vibration +2015-11-25 06:00:00,49,Error Code 2: High Pressure +2015-11-25 06:00:00,49,Error Code 3: Excessive Vibration +2015-11-30 15:00:00,49,Error Code 1: Low Voltage +2015-12-07 00:00:00,49,Error Code 5: Low Pressure +2015-12-19 15:00:00,49,Error Code 2: High Pressure +2015-12-25 06:00:00,49,Error Code 5: Low Pressure +2015-01-13 15:00:00,50,Error Code 3: Excessive Vibration +2015-01-16 03:00:00,50,Error Code 2: High Pressure +2015-01-28 02:00:00,50,Error Code 3: Excessive Vibration +2015-01-29 06:00:00,50,Error Code 1: Low Voltage +2015-01-30 09:00:00,50,Error Code 1: Low Voltage +2015-02-13 13:00:00,50,Error Code 2: High Pressure +2015-03-30 15:00:00,50,Error Code 2: High Pressure +2015-04-24 02:00:00,50,Error Code 4: Invalid Rotation Angle +2015-04-27 19:00:00,50,Error Code 5: Low Pressure +2015-04-29 06:00:00,50,Error Code 2: High Pressure +2015-04-29 06:00:00,50,Error Code 3: Excessive Vibration +2015-05-04 02:00:00,50,Error Code 1: Low Voltage +2015-05-24 06:00:00,50,Error Code 1: Low Voltage +2015-06-07 12:00:00,50,Error Code 3: Excessive Vibration +2015-07-02 11:00:00,50,Error Code 1: Low Voltage +2015-07-04 15:00:00,50,Error Code 2: High Pressure +2015-07-12 23:00:00,50,Error Code 3: Excessive Vibration +2015-07-16 13:00:00,50,Error Code 3: Excessive Vibration +2015-07-25 13:00:00,50,Error Code 3: Excessive Vibration +2015-08-06 09:00:00,50,Error Code 1: Low Voltage +2015-08-13 04:00:00,50,Error Code 1: Low Voltage +2015-08-30 08:00:00,50,Error Code 2: High Pressure +2015-09-08 01:00:00,50,Error Code 4: Invalid Rotation Angle +2015-09-11 06:00:00,50,Error Code 2: High Pressure +2015-09-11 06:00:00,50,Error Code 3: Excessive Vibration +2015-09-12 09:00:00,50,Error Code 2: High Pressure +2015-09-20 14:00:00,50,Error Code 2: High Pressure +2015-09-23 06:00:00,50,Error Code 3: Excessive Vibration +2015-09-25 11:00:00,50,Error Code 1: Low Voltage +2015-10-06 17:00:00,50,Error Code 1: Low Voltage +2015-10-10 08:00:00,50,Error Code 3: Excessive Vibration +2015-10-11 06:00:00,50,Error Code 2: High Pressure +2015-10-11 06:00:00,50,Error Code 3: Excessive Vibration +2015-10-19 20:00:00,50,Error Code 3: Excessive Vibration +2015-10-22 08:00:00,50,Error Code 1: Low Voltage +2015-10-22 17:00:00,50,Error Code 1: Low Voltage +2015-11-03 10:00:00,50,Error Code 1: Low Voltage +2015-11-12 11:00:00,50,Error Code 4: Invalid Rotation Angle +2015-11-15 18:00:00,50,Error Code 2: High Pressure +2015-11-16 05:00:00,50,Error Code 4: Invalid Rotation Angle +2015-12-23 00:00:00,50,Error Code 1: Low Voltage +2015-12-27 05:00:00,50,Error Code 2: High Pressure +2015-12-28 12:00:00,50,Error Code 1: Low Voltage +2015-01-08 15:00:00,51,Error Code 5: Low Pressure +2015-01-20 12:00:00,51,Error Code 4: Invalid Rotation Angle +2015-02-02 10:00:00,51,Error Code 5: Low Pressure +2015-02-16 02:00:00,51,Error Code 4: Invalid Rotation Angle +2015-03-01 01:00:00,51,Error Code 1: Low Voltage +2015-03-01 06:00:00,51,Error Code 5: Low Pressure +2015-03-07 19:00:00,51,Error Code 2: High Pressure +2015-03-08 09:00:00,51,Error Code 3: Excessive Vibration +2015-03-19 03:00:00,51,Error Code 4: Invalid Rotation Angle +2015-03-19 08:00:00,51,Error Code 2: High Pressure +2015-03-19 11:00:00,51,Error Code 2: High Pressure +2015-03-31 06:00:00,51,Error Code 2: High Pressure +2015-03-31 06:00:00,51,Error Code 3: Excessive Vibration +2015-03-31 16:00:00,51,Error Code 4: Invalid Rotation Angle +2015-04-04 20:00:00,51,Error Code 1: Low Voltage +2015-04-18 08:00:00,51,Error Code 1: Low Voltage +2015-05-14 20:00:00,51,Error Code 4: Invalid Rotation Angle +2015-05-23 12:00:00,51,Error Code 3: Excessive Vibration +2015-05-30 06:00:00,51,Error Code 2: High Pressure +2015-05-30 06:00:00,51,Error Code 3: Excessive Vibration +2015-06-28 10:00:00,51,Error Code 1: Low Voltage +2015-06-29 06:00:00,51,Error Code 2: High Pressure +2015-06-29 06:00:00,51,Error Code 3: Excessive Vibration +2015-07-01 06:00:00,51,Error Code 1: Low Voltage +2015-07-10 12:00:00,51,Error Code 1: Low Voltage +2015-07-14 06:00:00,51,Error Code 5: Low Pressure +2015-08-10 23:00:00,51,Error Code 4: Invalid Rotation Angle +2015-09-12 06:00:00,51,Error Code 2: High Pressure +2015-09-12 06:00:00,51,Error Code 3: Excessive Vibration +2015-09-17 19:00:00,51,Error Code 1: Low Voltage +2015-09-26 21:00:00,51,Error Code 1: Low Voltage +2015-09-27 06:00:00,51,Error Code 5: Low Pressure +2015-10-11 20:00:00,51,Error Code 2: High Pressure +2015-10-18 21:00:00,51,Error Code 2: High Pressure +2015-10-25 22:00:00,51,Error Code 1: Low Voltage +2015-11-26 06:00:00,51,Error Code 5: Low Pressure +2015-11-28 00:00:00,51,Error Code 4: Invalid Rotation Angle +2015-12-11 06:00:00,51,Error Code 2: High Pressure +2015-12-11 06:00:00,51,Error Code 3: Excessive Vibration +2015-12-13 21:00:00,51,Error Code 4: Invalid Rotation Angle +2015-01-02 13:00:00,52,Error Code 3: Excessive Vibration +2015-01-18 06:00:00,52,Error Code 2: High Pressure +2015-01-18 06:00:00,52,Error Code 3: Excessive Vibration +2015-01-18 06:00:00,52,Error Code 5: Low Pressure +2015-01-27 20:00:00,52,Error Code 4: Invalid Rotation Angle +2015-02-11 07:00:00,52,Error Code 2: High Pressure +2015-02-15 13:00:00,52,Error Code 1: Low Voltage +2015-02-17 15:00:00,52,Error Code 5: Low Pressure +2015-02-24 22:00:00,52,Error Code 1: Low Voltage +2015-03-02 02:00:00,52,Error Code 3: Excessive Vibration +2015-03-04 04:00:00,52,Error Code 1: Low Voltage +2015-03-19 03:00:00,52,Error Code 1: Low Voltage +2015-03-19 20:00:00,52,Error Code 4: Invalid Rotation Angle +2015-03-20 14:00:00,52,Error Code 2: High Pressure +2015-04-19 20:00:00,52,Error Code 3: Excessive Vibration +2015-06-14 23:00:00,52,Error Code 5: Low Pressure +2015-06-15 15:00:00,52,Error Code 1: Low Voltage +2015-06-20 02:00:00,52,Error Code 3: Excessive Vibration +2015-06-21 20:00:00,52,Error Code 1: Low Voltage +2015-06-27 13:00:00,52,Error Code 2: High Pressure +2015-06-27 19:00:00,52,Error Code 3: Excessive Vibration +2015-06-28 00:00:00,52,Error Code 2: High Pressure +2015-07-13 05:00:00,52,Error Code 2: High Pressure +2015-07-15 18:00:00,52,Error Code 1: Low Voltage +2015-07-17 06:00:00,52,Error Code 5: Low Pressure +2015-07-18 21:00:00,52,Error Code 3: Excessive Vibration +2015-07-19 09:00:00,52,Error Code 2: High Pressure +2015-07-27 04:00:00,52,Error Code 2: High Pressure +2015-08-01 06:00:00,52,Error Code 2: High Pressure +2015-08-01 06:00:00,52,Error Code 3: Excessive Vibration +2015-08-19 10:00:00,52,Error Code 2: High Pressure +2015-08-20 06:00:00,52,Error Code 2: High Pressure +2015-08-22 11:00:00,52,Error Code 4: Invalid Rotation Angle +2015-09-04 23:00:00,52,Error Code 3: Excessive Vibration +2015-09-15 10:00:00,52,Error Code 4: Invalid Rotation Angle +2015-09-28 04:00:00,52,Error Code 1: Low Voltage +2015-10-14 22:00:00,52,Error Code 1: Low Voltage +2015-10-21 06:00:00,52,Error Code 2: High Pressure +2015-10-24 04:00:00,52,Error Code 2: High Pressure +2015-10-30 06:00:00,52,Error Code 1: Low Voltage +2015-10-30 06:00:00,52,Error Code 5: Low Pressure +2015-11-28 10:00:00,52,Error Code 1: Low Voltage +2015-12-21 10:00:00,52,Error Code 2: High Pressure +2015-12-22 10:00:00,52,Error Code 4: Invalid Rotation Angle +2015-12-25 09:00:00,52,Error Code 1: Low Voltage +2015-12-29 06:00:00,52,Error Code 5: Low Pressure +2015-01-10 07:00:00,53,Error Code 1: Low Voltage +2015-01-18 08:00:00,53,Error Code 2: High Pressure +2015-01-25 22:00:00,53,Error Code 2: High Pressure +2015-02-07 17:00:00,53,Error Code 2: High Pressure +2015-02-10 03:00:00,53,Error Code 4: Invalid Rotation Angle +2015-02-16 00:00:00,53,Error Code 2: High Pressure +2015-02-24 23:00:00,53,Error Code 5: Low Pressure +2015-03-01 01:00:00,53,Error Code 3: Excessive Vibration +2015-03-01 03:00:00,53,Error Code 3: Excessive Vibration +2015-03-06 09:00:00,53,Error Code 5: Low Pressure +2015-03-17 01:00:00,53,Error Code 3: Excessive Vibration +2015-03-22 22:00:00,53,Error Code 1: Low Voltage +2015-03-28 06:00:00,53,Error Code 1: Low Voltage +2015-04-14 14:00:00,53,Error Code 2: High Pressure +2015-04-15 07:00:00,53,Error Code 4: Invalid Rotation Angle +2015-04-23 18:00:00,53,Error Code 1: Low Voltage +2015-04-25 10:00:00,53,Error Code 3: Excessive Vibration +2015-05-04 22:00:00,53,Error Code 1: Low Voltage +2015-05-05 14:00:00,53,Error Code 4: Invalid Rotation Angle +2015-05-12 06:00:00,53,Error Code 2: High Pressure +2015-05-12 06:00:00,53,Error Code 3: Excessive Vibration +2015-06-08 18:00:00,53,Error Code 5: Low Pressure +2015-06-16 03:00:00,53,Error Code 1: Low Voltage +2015-06-19 05:00:00,53,Error Code 1: Low Voltage +2015-06-25 12:00:00,53,Error Code 5: Low Pressure +2015-07-11 06:00:00,53,Error Code 1: Low Voltage +2015-08-26 02:00:00,53,Error Code 4: Invalid Rotation Angle +2015-10-09 06:00:00,53,Error Code 1: Low Voltage +2015-10-13 02:00:00,53,Error Code 3: Excessive Vibration +2015-10-18 07:00:00,53,Error Code 1: Low Voltage +2015-10-22 02:00:00,53,Error Code 3: Excessive Vibration +2015-10-24 05:00:00,53,Error Code 2: High Pressure +2015-11-04 09:00:00,53,Error Code 3: Excessive Vibration +2015-11-08 06:00:00,53,Error Code 2: High Pressure +2015-11-08 06:00:00,53,Error Code 3: Excessive Vibration +2015-11-12 22:00:00,53,Error Code 1: Low Voltage +2015-11-15 08:00:00,53,Error Code 4: Invalid Rotation Angle +2015-12-03 18:00:00,53,Error Code 3: Excessive Vibration +2015-12-06 13:00:00,53,Error Code 2: High Pressure +2015-12-08 06:00:00,53,Error Code 2: High Pressure +2015-12-08 06:00:00,53,Error Code 3: Excessive Vibration +2015-12-09 03:00:00,53,Error Code 4: Invalid Rotation Angle +2015-12-09 07:00:00,53,Error Code 3: Excessive Vibration +2015-12-27 10:00:00,53,Error Code 2: High Pressure +2015-12-27 22:00:00,53,Error Code 3: Excessive Vibration +2015-01-13 18:00:00,54,Error Code 5: Low Pressure +2015-01-14 06:00:00,54,Error Code 4: Invalid Rotation Angle +2015-01-17 16:00:00,54,Error Code 4: Invalid Rotation Angle +2015-01-18 03:00:00,54,Error Code 4: Invalid Rotation Angle +2015-01-21 21:00:00,54,Error Code 1: Low Voltage +2015-02-13 18:00:00,54,Error Code 3: Excessive Vibration +2015-03-09 14:00:00,54,Error Code 4: Invalid Rotation Angle +2015-03-12 18:00:00,54,Error Code 2: High Pressure +2015-03-25 07:00:00,54,Error Code 4: Invalid Rotation Angle +2015-03-30 06:00:00,54,Error Code 4: Invalid Rotation Angle +2015-04-09 01:00:00,54,Error Code 2: High Pressure +2015-04-14 19:00:00,54,Error Code 4: Invalid Rotation Angle +2015-04-19 23:00:00,54,Error Code 1: Low Voltage +2015-04-20 17:00:00,54,Error Code 3: Excessive Vibration +2015-04-29 06:00:00,54,Error Code 2: High Pressure +2015-04-29 06:00:00,54,Error Code 3: Excessive Vibration +2015-05-14 04:00:00,54,Error Code 1: Low Voltage +2015-05-14 10:00:00,54,Error Code 3: Excessive Vibration +2015-06-15 16:00:00,54,Error Code 1: Low Voltage +2015-06-19 14:00:00,54,Error Code 1: Low Voltage +2015-06-21 09:00:00,54,Error Code 3: Excessive Vibration +2015-06-28 06:00:00,54,Error Code 4: Invalid Rotation Angle +2015-07-01 01:00:00,54,Error Code 3: Excessive Vibration +2015-07-18 01:00:00,54,Error Code 2: High Pressure +2015-07-22 00:00:00,54,Error Code 1: Low Voltage +2015-07-24 19:00:00,54,Error Code 3: Excessive Vibration +2015-08-20 03:00:00,54,Error Code 2: High Pressure +2015-08-21 14:00:00,54,Error Code 2: High Pressure +2015-08-22 11:00:00,54,Error Code 5: Low Pressure +2015-08-24 15:00:00,54,Error Code 2: High Pressure +2015-09-11 05:00:00,54,Error Code 4: Invalid Rotation Angle +2015-09-17 20:00:00,54,Error Code 4: Invalid Rotation Angle +2015-10-10 02:00:00,54,Error Code 2: High Pressure +2015-10-29 04:00:00,54,Error Code 4: Invalid Rotation Angle +2015-10-30 10:00:00,54,Error Code 4: Invalid Rotation Angle +2015-11-02 15:00:00,54,Error Code 1: Low Voltage +2015-11-03 06:00:00,54,Error Code 2: High Pressure +2015-11-04 02:00:00,54,Error Code 1: Low Voltage +2015-11-12 14:00:00,54,Error Code 3: Excessive Vibration +2015-11-20 00:00:00,54,Error Code 2: High Pressure +2015-11-20 17:00:00,54,Error Code 2: High Pressure +2015-11-25 15:00:00,54,Error Code 4: Invalid Rotation Angle +2015-12-12 01:00:00,54,Error Code 3: Excessive Vibration +2015-12-25 06:00:00,54,Error Code 2: High Pressure +2015-12-25 06:00:00,54,Error Code 3: Excessive Vibration +2015-12-27 10:00:00,54,Error Code 4: Invalid Rotation Angle +2015-01-14 01:00:00,55,Error Code 5: Low Pressure +2015-01-18 09:00:00,55,Error Code 3: Excessive Vibration +2015-01-19 13:00:00,55,Error Code 1: Low Voltage +2015-01-31 10:00:00,55,Error Code 2: High Pressure +2015-03-04 16:00:00,55,Error Code 3: Excessive Vibration +2015-03-06 06:00:00,55,Error Code 5: Low Pressure +2015-03-10 18:00:00,55,Error Code 1: Low Voltage +2015-03-29 01:00:00,55,Error Code 4: Invalid Rotation Angle +2015-04-09 05:00:00,55,Error Code 1: Low Voltage +2015-04-20 06:00:00,55,Error Code 2: High Pressure +2015-04-20 06:00:00,55,Error Code 3: Excessive Vibration +2015-05-03 01:00:00,55,Error Code 2: High Pressure +2015-06-04 06:00:00,55,Error Code 5: Low Pressure +2015-06-04 23:00:00,55,Error Code 3: Excessive Vibration +2015-06-19 06:00:00,55,Error Code 1: Low Voltage +2015-07-15 04:00:00,55,Error Code 1: Low Voltage +2015-08-03 06:00:00,55,Error Code 1: Low Voltage +2015-08-06 17:00:00,55,Error Code 4: Invalid Rotation Angle +2015-08-15 16:00:00,55,Error Code 1: Low Voltage +2015-08-18 06:00:00,55,Error Code 5: Low Pressure +2015-08-24 21:00:00,55,Error Code 1: Low Voltage +2015-09-01 18:00:00,55,Error Code 1: Low Voltage +2015-09-02 06:00:00,55,Error Code 2: High Pressure +2015-09-02 06:00:00,55,Error Code 3: Excessive Vibration +2015-09-05 20:00:00,55,Error Code 1: Low Voltage +2015-09-07 00:00:00,55,Error Code 1: Low Voltage +2015-09-16 09:00:00,55,Error Code 2: High Pressure +2015-09-18 14:00:00,55,Error Code 3: Excessive Vibration +2015-09-23 16:00:00,55,Error Code 4: Invalid Rotation Angle +2015-10-10 14:00:00,55,Error Code 1: Low Voltage +2015-11-07 16:00:00,55,Error Code 1: Low Voltage +2015-11-11 06:00:00,55,Error Code 3: Excessive Vibration +2015-11-14 09:00:00,55,Error Code 5: Low Pressure +2015-11-30 15:00:00,55,Error Code 4: Invalid Rotation Angle +2015-12-01 06:00:00,55,Error Code 1: Low Voltage +2015-12-21 06:00:00,55,Error Code 3: Excessive Vibration +2015-12-21 19:00:00,55,Error Code 3: Excessive Vibration +2015-01-13 06:00:00,56,Error Code 4: Invalid Rotation Angle +2015-01-28 00:00:00,56,Error Code 3: Excessive Vibration +2015-02-07 23:00:00,56,Error Code 3: Excessive Vibration +2015-02-08 06:00:00,56,Error Code 2: High Pressure +2015-02-27 00:00:00,56,Error Code 1: Low Voltage +2015-03-14 06:00:00,56,Error Code 4: Invalid Rotation Angle +2015-03-17 15:00:00,56,Error Code 2: High Pressure +2015-04-13 06:00:00,56,Error Code 1: Low Voltage +2015-04-14 19:00:00,56,Error Code 1: Low Voltage +2015-04-15 13:00:00,56,Error Code 4: Invalid Rotation Angle +2015-04-28 06:00:00,56,Error Code 2: High Pressure +2015-04-28 06:00:00,56,Error Code 3: Excessive Vibration +2015-05-22 23:00:00,56,Error Code 2: High Pressure +2015-05-24 00:00:00,56,Error Code 1: Low Voltage +2015-06-11 19:00:00,56,Error Code 3: Excessive Vibration +2015-06-12 06:00:00,56,Error Code 4: Invalid Rotation Angle +2015-06-28 07:00:00,56,Error Code 2: High Pressure +2015-07-08 10:00:00,56,Error Code 4: Invalid Rotation Angle +2015-07-16 06:00:00,56,Error Code 3: Excessive Vibration +2015-08-05 12:00:00,56,Error Code 1: Low Voltage +2015-08-13 15:00:00,56,Error Code 3: Excessive Vibration +2015-08-21 14:00:00,56,Error Code 4: Invalid Rotation Angle +2015-09-09 12:00:00,56,Error Code 2: High Pressure +2015-09-21 03:00:00,56,Error Code 4: Invalid Rotation Angle +2015-09-22 01:00:00,56,Error Code 3: Excessive Vibration +2015-09-25 06:00:00,56,Error Code 2: High Pressure +2015-09-25 06:00:00,56,Error Code 3: Excessive Vibration +2015-09-25 06:00:00,56,Error Code 4: Invalid Rotation Angle +2015-10-12 10:00:00,56,Error Code 4: Invalid Rotation Angle +2015-11-06 09:00:00,56,Error Code 2: High Pressure +2015-11-09 06:00:00,56,Error Code 1: Low Voltage +2015-11-09 22:00:00,56,Error Code 4: Invalid Rotation Angle +2015-11-16 11:00:00,56,Error Code 4: Invalid Rotation Angle +2015-11-24 06:00:00,56,Error Code 2: High Pressure +2015-11-24 06:00:00,56,Error Code 3: Excessive Vibration +2015-12-13 04:00:00,56,Error Code 1: Low Voltage +2015-12-19 01:00:00,56,Error Code 3: Excessive Vibration +2015-12-24 06:00:00,56,Error Code 1: Low Voltage +2015-01-19 11:00:00,57,Error Code 2: High Pressure +2015-02-06 06:00:00,57,Error Code 1: Low Voltage +2015-02-11 12:00:00,57,Error Code 2: High Pressure +2015-03-05 21:00:00,57,Error Code 4: Invalid Rotation Angle +2015-03-07 04:00:00,57,Error Code 2: High Pressure +2015-03-11 19:00:00,57,Error Code 3: Excessive Vibration +2015-03-31 16:00:00,57,Error Code 1: Low Voltage +2015-04-02 10:00:00,57,Error Code 1: Low Voltage +2015-04-05 04:00:00,57,Error Code 4: Invalid Rotation Angle +2015-04-23 10:00:00,57,Error Code 3: Excessive Vibration +2015-04-28 05:00:00,57,Error Code 3: Excessive Vibration +2015-05-02 11:00:00,57,Error Code 2: High Pressure +2015-05-10 23:00:00,57,Error Code 2: High Pressure +2015-05-28 18:00:00,57,Error Code 2: High Pressure +2015-06-08 08:00:00,57,Error Code 2: High Pressure +2015-06-13 23:00:00,57,Error Code 2: High Pressure +2015-06-19 01:00:00,57,Error Code 4: Invalid Rotation Angle +2015-07-22 12:00:00,57,Error Code 4: Invalid Rotation Angle +2015-08-16 00:00:00,57,Error Code 3: Excessive Vibration +2015-08-17 12:00:00,57,Error Code 1: Low Voltage +2015-09-22 08:00:00,57,Error Code 1: Low Voltage +2015-10-04 01:00:00,57,Error Code 4: Invalid Rotation Angle +2015-10-09 21:00:00,57,Error Code 3: Excessive Vibration +2015-10-12 07:00:00,57,Error Code 2: High Pressure +2015-10-21 13:00:00,57,Error Code 1: Low Voltage +2015-11-03 06:00:00,57,Error Code 1: Low Voltage +2015-11-04 08:00:00,57,Error Code 2: High Pressure +2015-11-27 17:00:00,57,Error Code 4: Invalid Rotation Angle +2015-12-06 21:00:00,57,Error Code 1: Low Voltage +2015-12-13 19:00:00,57,Error Code 2: High Pressure +2015-12-14 15:00:00,57,Error Code 4: Invalid Rotation Angle +2015-12-16 20:00:00,57,Error Code 1: Low Voltage +2015-12-17 12:00:00,57,Error Code 1: Low Voltage +2015-01-07 07:00:00,58,Error Code 3: Excessive Vibration +2015-01-08 05:00:00,58,Error Code 4: Invalid Rotation Angle +2015-01-14 04:00:00,58,Error Code 1: Low Voltage +2015-01-30 06:00:00,58,Error Code 4: Invalid Rotation Angle +2015-02-06 18:00:00,58,Error Code 2: High Pressure +2015-02-15 12:00:00,58,Error Code 1: Low Voltage +2015-02-25 01:00:00,58,Error Code 1: Low Voltage +2015-03-20 01:00:00,58,Error Code 1: Low Voltage +2015-04-30 06:00:00,58,Error Code 4: Invalid Rotation Angle +2015-05-15 06:00:00,58,Error Code 1: Low Voltage +2015-05-21 22:00:00,58,Error Code 3: Excessive Vibration +2015-05-27 16:00:00,58,Error Code 3: Excessive Vibration +2015-05-30 06:00:00,58,Error Code 2: High Pressure +2015-05-30 06:00:00,58,Error Code 3: Excessive Vibration +2015-05-31 14:00:00,58,Error Code 1: Low Voltage +2015-07-07 05:00:00,58,Error Code 5: Low Pressure +2015-07-29 06:00:00,58,Error Code 4: Invalid Rotation Angle +2015-07-30 11:00:00,58,Error Code 3: Excessive Vibration +2015-08-05 21:00:00,58,Error Code 1: Low Voltage +2015-08-29 09:00:00,58,Error Code 3: Excessive Vibration +2015-09-05 07:00:00,58,Error Code 2: High Pressure +2015-09-12 07:00:00,58,Error Code 5: Low Pressure +2015-09-21 22:00:00,58,Error Code 2: High Pressure +2015-09-22 17:00:00,58,Error Code 2: High Pressure +2015-09-25 09:00:00,58,Error Code 1: Low Voltage +2015-09-25 18:00:00,58,Error Code 1: Low Voltage +2015-09-27 06:00:00,58,Error Code 4: Invalid Rotation Angle +2015-10-06 16:00:00,58,Error Code 1: Low Voltage +2015-10-09 20:00:00,58,Error Code 4: Invalid Rotation Angle +2015-10-16 00:00:00,58,Error Code 2: High Pressure +2015-11-07 09:00:00,58,Error Code 1: Low Voltage +2015-11-16 08:00:00,58,Error Code 2: High Pressure +2015-12-08 01:00:00,58,Error Code 2: High Pressure +2015-12-11 06:00:00,58,Error Code 2: High Pressure +2015-12-11 06:00:00,58,Error Code 3: Excessive Vibration +2015-01-03 02:00:00,59,Error Code 2: High Pressure +2015-01-12 02:00:00,59,Error Code 4: Invalid Rotation Angle +2015-01-19 09:00:00,59,Error Code 4: Invalid Rotation Angle +2015-01-29 08:00:00,59,Error Code 4: Invalid Rotation Angle +2015-02-01 14:00:00,59,Error Code 1: Low Voltage +2015-02-02 19:00:00,59,Error Code 1: Low Voltage +2015-02-03 00:00:00,59,Error Code 1: Low Voltage +2015-02-03 06:00:00,59,Error Code 5: Low Pressure +2015-02-09 15:00:00,59,Error Code 1: Low Voltage +2015-02-17 16:00:00,59,Error Code 1: Low Voltage +2015-02-22 03:00:00,59,Error Code 3: Excessive Vibration +2015-02-25 12:00:00,59,Error Code 1: Low Voltage +2015-03-29 07:00:00,59,Error Code 1: Low Voltage +2015-04-04 06:00:00,59,Error Code 5: Low Pressure +2015-04-05 07:00:00,59,Error Code 3: Excessive Vibration +2015-04-21 06:00:00,59,Error Code 2: High Pressure +2015-05-02 12:00:00,59,Error Code 3: Excessive Vibration +2015-05-17 09:00:00,59,Error Code 2: High Pressure +2015-05-19 06:00:00,59,Error Code 5: Low Pressure +2015-05-20 21:00:00,59,Error Code 4: Invalid Rotation Angle +2015-06-05 01:00:00,59,Error Code 2: High Pressure +2015-07-05 01:00:00,59,Error Code 4: Invalid Rotation Angle +2015-07-05 11:00:00,59,Error Code 2: High Pressure +2015-07-18 06:00:00,59,Error Code 1: Low Voltage +2015-08-02 06:00:00,59,Error Code 2: High Pressure +2015-08-02 06:00:00,59,Error Code 3: Excessive Vibration +2015-08-17 06:00:00,59,Error Code 5: Low Pressure +2015-08-23 02:00:00,59,Error Code 2: High Pressure +2015-08-27 13:00:00,59,Error Code 2: High Pressure +2015-10-16 06:00:00,59,Error Code 2: High Pressure +2015-10-16 06:00:00,59,Error Code 3: Excessive Vibration +2015-10-25 12:00:00,59,Error Code 1: Low Voltage +2015-10-30 17:00:00,59,Error Code 3: Excessive Vibration +2015-12-24 14:00:00,59,Error Code 3: Excessive Vibration +2015-12-29 13:00:00,59,Error Code 4: Invalid Rotation Angle +2015-01-02 08:00:00,60,Error Code 1: Low Voltage +2015-01-12 10:00:00,60,Error Code 3: Excessive Vibration +2015-01-16 08:00:00,60,Error Code 3: Excessive Vibration +2015-01-22 22:00:00,60,Error Code 3: Excessive Vibration +2015-01-24 07:00:00,60,Error Code 3: Excessive Vibration +2015-01-29 06:00:00,60,Error Code 2: High Pressure +2015-01-29 06:00:00,60,Error Code 3: Excessive Vibration +2015-02-16 19:00:00,60,Error Code 4: Invalid Rotation Angle +2015-02-20 14:00:00,60,Error Code 2: High Pressure +2015-02-21 15:00:00,60,Error Code 3: Excessive Vibration +2015-03-14 17:00:00,60,Error Code 1: Low Voltage +2015-03-19 11:00:00,60,Error Code 2: High Pressure +2015-03-22 03:00:00,60,Error Code 1: Low Voltage +2015-03-31 08:00:00,60,Error Code 5: Low Pressure +2015-04-03 12:00:00,60,Error Code 1: Low Voltage +2015-04-06 16:00:00,60,Error Code 2: High Pressure +2015-04-14 06:00:00,60,Error Code 2: High Pressure +2015-04-14 06:00:00,60,Error Code 3: Excessive Vibration +2015-05-05 10:00:00,60,Error Code 4: Invalid Rotation Angle +2015-05-10 22:00:00,60,Error Code 1: Low Voltage +2015-05-16 07:00:00,60,Error Code 1: Low Voltage +2015-05-22 13:00:00,60,Error Code 4: Invalid Rotation Angle +2015-05-26 01:00:00,60,Error Code 2: High Pressure +2015-06-01 13:00:00,60,Error Code 2: High Pressure +2015-06-07 04:00:00,60,Error Code 4: Invalid Rotation Angle +2015-06-10 01:00:00,60,Error Code 2: High Pressure +2015-06-18 09:00:00,60,Error Code 1: Low Voltage +2015-06-25 23:00:00,60,Error Code 2: High Pressure +2015-07-02 23:00:00,60,Error Code 2: High Pressure +2015-07-15 20:00:00,60,Error Code 4: Invalid Rotation Angle +2015-07-19 20:00:00,60,Error Code 4: Invalid Rotation Angle +2015-07-28 06:00:00,60,Error Code 2: High Pressure +2015-07-28 06:00:00,60,Error Code 3: Excessive Vibration +2015-08-26 10:00:00,60,Error Code 1: Low Voltage +2015-09-10 20:00:00,60,Error Code 4: Invalid Rotation Angle +2015-09-11 06:00:00,60,Error Code 2: High Pressure +2015-09-11 06:00:00,60,Error Code 3: Excessive Vibration +2015-09-13 20:00:00,60,Error Code 1: Low Voltage +2015-10-02 05:00:00,60,Error Code 2: High Pressure +2015-10-06 23:00:00,60,Error Code 2: High Pressure +2015-10-18 11:00:00,60,Error Code 3: Excessive Vibration +2015-10-23 02:00:00,60,Error Code 4: Invalid Rotation Angle +2015-11-22 08:00:00,60,Error Code 1: Low Voltage +2015-01-13 07:00:00,61,Error Code 3: Excessive Vibration +2015-02-08 11:00:00,61,Error Code 1: Low Voltage +2015-02-26 06:00:00,61,Error Code 2: High Pressure +2015-02-26 06:00:00,61,Error Code 3: Excessive Vibration +2015-03-15 00:00:00,61,Error Code 3: Excessive Vibration +2015-03-21 15:00:00,61,Error Code 1: Low Voltage +2015-04-14 23:00:00,61,Error Code 1: Low Voltage +2015-04-21 15:00:00,61,Error Code 3: Excessive Vibration +2015-04-26 01:00:00,61,Error Code 1: Low Voltage +2015-04-30 05:00:00,61,Error Code 5: Low Pressure +2015-05-13 03:00:00,61,Error Code 2: High Pressure +2015-07-26 00:00:00,61,Error Code 1: Low Voltage +2015-07-26 23:00:00,61,Error Code 1: Low Voltage +2015-08-05 15:00:00,61,Error Code 4: Invalid Rotation Angle +2015-08-10 06:00:00,61,Error Code 2: High Pressure +2015-08-10 06:00:00,61,Error Code 3: Excessive Vibration +2015-08-11 18:00:00,61,Error Code 1: Low Voltage +2015-08-15 03:00:00,61,Error Code 1: Low Voltage +2015-08-20 06:00:00,61,Error Code 1: Low Voltage +2015-08-27 23:00:00,61,Error Code 3: Excessive Vibration +2015-09-24 07:00:00,61,Error Code 2: High Pressure +2015-09-24 14:00:00,61,Error Code 1: Low Voltage +2015-09-26 14:00:00,61,Error Code 3: Excessive Vibration +2015-10-24 06:00:00,61,Error Code 1: Low Voltage +2015-10-29 00:00:00,61,Error Code 1: Low Voltage +2015-10-31 09:00:00,61,Error Code 5: Low Pressure +2015-11-05 16:00:00,61,Error Code 2: High Pressure +2015-11-23 00:00:00,61,Error Code 4: Invalid Rotation Angle +2015-11-23 06:00:00,61,Error Code 2: High Pressure +2015-11-23 06:00:00,61,Error Code 3: Excessive Vibration +2015-11-27 14:00:00,61,Error Code 4: Invalid Rotation Angle +2015-12-01 08:00:00,61,Error Code 5: Low Pressure +2015-12-06 08:00:00,61,Error Code 2: High Pressure +2015-12-15 17:00:00,61,Error Code 1: Low Voltage +2015-01-12 06:00:00,62,Error Code 1: Low Voltage +2015-01-22 10:00:00,62,Error Code 4: Invalid Rotation Angle +2015-01-27 06:00:00,62,Error Code 5: Low Pressure +2015-02-19 22:00:00,62,Error Code 1: Low Voltage +2015-02-21 20:00:00,62,Error Code 3: Excessive Vibration +2015-03-14 14:00:00,62,Error Code 1: Low Voltage +2015-03-19 18:00:00,62,Error Code 4: Invalid Rotation Angle +2015-03-28 19:00:00,62,Error Code 3: Excessive Vibration +2015-05-09 23:00:00,62,Error Code 3: Excessive Vibration +2015-05-27 06:00:00,62,Error Code 5: Low Pressure +2015-06-26 06:00:00,62,Error Code 2: High Pressure +2015-06-26 06:00:00,62,Error Code 3: Excessive Vibration +2015-07-14 09:00:00,62,Error Code 4: Invalid Rotation Angle +2015-08-10 06:00:00,62,Error Code 1: Low Voltage +2015-08-27 18:00:00,62,Error Code 3: Excessive Vibration +2015-08-31 19:00:00,62,Error Code 4: Invalid Rotation Angle +2015-09-22 23:00:00,62,Error Code 4: Invalid Rotation Angle +2015-09-30 03:00:00,62,Error Code 1: Low Voltage +2015-10-10 14:00:00,62,Error Code 2: High Pressure +2015-10-14 08:00:00,62,Error Code 1: Low Voltage +2015-11-05 06:00:00,62,Error Code 2: High Pressure +2015-11-05 10:00:00,62,Error Code 4: Invalid Rotation Angle +2015-12-08 06:00:00,62,Error Code 5: Low Pressure +2015-12-19 00:00:00,62,Error Code 4: Invalid Rotation Angle +2015-12-19 13:00:00,62,Error Code 2: High Pressure +2015-12-23 06:00:00,62,Error Code 2: High Pressure +2015-12-23 06:00:00,62,Error Code 3: Excessive Vibration +2015-12-23 11:00:00,62,Error Code 4: Invalid Rotation Angle +2015-01-19 04:00:00,63,Error Code 2: High Pressure +2015-01-27 06:00:00,63,Error Code 1: Low Voltage +2015-01-27 06:00:00,63,Error Code 2: High Pressure +2015-01-27 06:00:00,63,Error Code 3: Excessive Vibration +2015-02-04 04:00:00,63,Error Code 4: Invalid Rotation Angle +2015-02-09 17:00:00,63,Error Code 1: Low Voltage +2015-02-11 17:00:00,63,Error Code 3: Excessive Vibration +2015-02-15 13:00:00,63,Error Code 5: Low Pressure +2015-02-20 18:00:00,63,Error Code 2: High Pressure +2015-02-27 01:00:00,63,Error Code 3: Excessive Vibration +2015-03-04 09:00:00,63,Error Code 1: Low Voltage +2015-03-05 03:00:00,63,Error Code 4: Invalid Rotation Angle +2015-03-12 20:00:00,63,Error Code 2: High Pressure +2015-03-13 06:00:00,63,Error Code 5: Low Pressure +2015-03-28 03:00:00,63,Error Code 1: Low Voltage +2015-03-30 18:00:00,63,Error Code 3: Excessive Vibration +2015-04-16 10:00:00,63,Error Code 3: Excessive Vibration +2015-05-09 21:00:00,63,Error Code 5: Low Pressure +2015-05-12 06:00:00,63,Error Code 5: Low Pressure +2015-05-14 09:00:00,63,Error Code 1: Low Voltage +2015-05-20 02:00:00,63,Error Code 2: High Pressure +2015-05-25 07:00:00,63,Error Code 4: Invalid Rotation Angle +2015-05-25 22:00:00,63,Error Code 3: Excessive Vibration +2015-05-27 06:00:00,63,Error Code 2: High Pressure +2015-05-27 06:00:00,63,Error Code 3: Excessive Vibration +2015-06-15 08:00:00,63,Error Code 1: Low Voltage +2015-06-16 08:00:00,63,Error Code 4: Invalid Rotation Angle +2015-06-23 18:00:00,63,Error Code 3: Excessive Vibration +2015-06-29 19:00:00,63,Error Code 2: High Pressure +2015-07-26 06:00:00,63,Error Code 5: Low Pressure +2015-08-05 13:00:00,63,Error Code 1: Low Voltage +2015-08-08 16:00:00,63,Error Code 1: Low Voltage +2015-08-16 08:00:00,63,Error Code 2: High Pressure +2015-08-28 20:00:00,63,Error Code 4: Invalid Rotation Angle +2015-09-02 11:00:00,63,Error Code 4: Invalid Rotation Angle +2015-09-02 14:00:00,63,Error Code 4: Invalid Rotation Angle +2015-09-12 23:00:00,63,Error Code 1: Low Voltage +2015-09-14 14:00:00,63,Error Code 5: Low Pressure +2015-09-18 12:00:00,63,Error Code 1: Low Voltage +2015-09-27 14:00:00,63,Error Code 1: Low Voltage +2015-09-29 00:00:00,63,Error Code 3: Excessive Vibration +2015-10-20 06:00:00,63,Error Code 5: Low Pressure +2015-10-24 06:00:00,63,Error Code 5: Low Pressure +2015-10-29 11:00:00,63,Error Code 4: Invalid Rotation Angle +2015-11-08 06:00:00,63,Error Code 2: High Pressure +2015-11-08 06:00:00,63,Error Code 3: Excessive Vibration +2015-11-14 17:00:00,63,Error Code 1: Low Voltage +2015-12-06 08:00:00,63,Error Code 3: Excessive Vibration +2015-12-06 23:00:00,63,Error Code 1: Low Voltage +2015-12-15 03:00:00,63,Error Code 2: High Pressure +2015-01-04 06:00:00,64,Error Code 5: Low Pressure +2015-01-06 16:00:00,64,Error Code 1: Low Voltage +2015-01-09 21:00:00,64,Error Code 1: Low Voltage +2015-01-19 06:00:00,64,Error Code 2: High Pressure +2015-01-19 06:00:00,64,Error Code 3: Excessive Vibration +2015-01-23 11:00:00,64,Error Code 2: High Pressure +2015-01-24 13:00:00,64,Error Code 1: Low Voltage +2015-02-03 17:00:00,64,Error Code 2: High Pressure +2015-02-18 06:00:00,64,Error Code 5: Low Pressure +2015-02-19 12:00:00,64,Error Code 1: Low Voltage +2015-02-24 23:00:00,64,Error Code 2: High Pressure +2015-03-01 20:00:00,64,Error Code 1: Low Voltage +2015-03-01 23:00:00,64,Error Code 1: Low Voltage +2015-03-11 23:00:00,64,Error Code 3: Excessive Vibration +2015-03-26 23:00:00,64,Error Code 3: Excessive Vibration +2015-04-04 06:00:00,64,Error Code 2: High Pressure +2015-04-04 06:00:00,64,Error Code 3: Excessive Vibration +2015-04-28 17:00:00,64,Error Code 5: Low Pressure +2015-05-04 06:00:00,64,Error Code 5: Low Pressure +2015-05-07 00:00:00,64,Error Code 1: Low Voltage +2015-05-25 14:00:00,64,Error Code 2: High Pressure +2015-06-11 20:00:00,64,Error Code 3: Excessive Vibration +2015-06-15 00:00:00,64,Error Code 2: High Pressure +2015-09-02 14:00:00,64,Error Code 1: Low Voltage +2015-09-16 06:00:00,64,Error Code 5: Low Pressure +2015-09-29 20:00:00,64,Error Code 4: Invalid Rotation Angle +2015-10-02 15:00:00,64,Error Code 1: Low Voltage +2015-10-05 13:00:00,64,Error Code 3: Excessive Vibration +2015-10-06 02:00:00,64,Error Code 3: Excessive Vibration +2015-10-16 06:00:00,64,Error Code 2: High Pressure +2015-10-16 06:00:00,64,Error Code 3: Excessive Vibration +2015-10-28 05:00:00,64,Error Code 2: High Pressure +2015-11-14 06:00:00,64,Error Code 1: Low Voltage +2015-12-02 18:00:00,64,Error Code 2: High Pressure +2015-12-06 07:00:00,64,Error Code 2: High Pressure +2015-12-15 04:00:00,64,Error Code 1: Low Voltage +2015-12-30 06:00:00,64,Error Code 1: Low Voltage +2015-01-04 18:00:00,65,Error Code 2: High Pressure +2015-01-06 06:00:00,65,Error Code 4: Invalid Rotation Angle +2015-01-17 03:00:00,65,Error Code 1: Low Voltage +2015-02-01 05:00:00,65,Error Code 4: Invalid Rotation Angle +2015-02-28 08:00:00,65,Error Code 2: High Pressure +2015-03-11 11:00:00,65,Error Code 2: High Pressure +2015-03-16 06:00:00,65,Error Code 1: Low Voltage +2015-03-22 03:00:00,65,Error Code 4: Invalid Rotation Angle +2015-03-23 01:00:00,65,Error Code 4: Invalid Rotation Angle +2015-03-29 18:00:00,65,Error Code 4: Invalid Rotation Angle +2015-03-31 09:00:00,65,Error Code 4: Invalid Rotation Angle +2015-04-24 00:00:00,65,Error Code 1: Low Voltage +2015-05-15 09:00:00,65,Error Code 2: High Pressure +2015-05-17 06:00:00,65,Error Code 4: Invalid Rotation Angle +2015-05-19 13:00:00,65,Error Code 3: Excessive Vibration +2015-05-20 00:00:00,65,Error Code 3: Excessive Vibration +2015-05-30 06:00:00,65,Error Code 1: Low Voltage +2015-06-18 02:00:00,65,Error Code 3: Excessive Vibration +2015-06-29 06:00:00,65,Error Code 5: Low Pressure +2015-07-15 20:00:00,65,Error Code 2: High Pressure +2015-07-22 13:00:00,65,Error Code 2: High Pressure +2015-08-13 06:00:00,65,Error Code 1: Low Voltage +2015-08-13 06:00:00,65,Error Code 2: High Pressure +2015-08-13 06:00:00,65,Error Code 3: Excessive Vibration +2015-08-20 18:00:00,65,Error Code 1: Low Voltage +2015-08-21 17:00:00,65,Error Code 1: Low Voltage +2015-08-22 00:00:00,65,Error Code 4: Invalid Rotation Angle +2015-08-31 21:00:00,65,Error Code 2: High Pressure +2015-09-12 06:00:00,65,Error Code 5: Low Pressure +2015-10-02 13:00:00,65,Error Code 1: Low Voltage +2015-11-02 03:00:00,65,Error Code 4: Invalid Rotation Angle +2015-11-15 18:00:00,65,Error Code 4: Invalid Rotation Angle +2015-11-24 05:00:00,65,Error Code 2: High Pressure +2015-12-09 17:00:00,65,Error Code 2: High Pressure +2015-12-11 01:00:00,65,Error Code 4: Invalid Rotation Angle +2015-01-11 04:00:00,66,Error Code 1: Low Voltage +2015-01-21 11:00:00,66,Error Code 1: Low Voltage +2015-02-05 22:00:00,66,Error Code 1: Low Voltage +2015-02-08 10:00:00,66,Error Code 3: Excessive Vibration +2015-02-17 06:00:00,66,Error Code 1: Low Voltage +2015-03-06 23:00:00,66,Error Code 3: Excessive Vibration +2015-03-15 19:00:00,66,Error Code 4: Invalid Rotation Angle +2015-03-16 01:00:00,66,Error Code 3: Excessive Vibration +2015-04-13 03:00:00,66,Error Code 3: Excessive Vibration +2015-04-14 11:00:00,66,Error Code 2: High Pressure +2015-04-19 16:00:00,66,Error Code 3: Excessive Vibration +2015-05-01 09:00:00,66,Error Code 1: Low Voltage +2015-05-03 06:00:00,66,Error Code 2: High Pressure +2015-05-09 14:00:00,66,Error Code 2: High Pressure +2015-05-18 06:00:00,66,Error Code 1: Low Voltage +2015-05-18 06:00:00,66,Error Code 2: High Pressure +2015-05-18 06:00:00,66,Error Code 3: Excessive Vibration +2015-05-20 03:00:00,66,Error Code 1: Low Voltage +2015-05-30 18:00:00,66,Error Code 3: Excessive Vibration +2015-06-03 00:00:00,66,Error Code 1: Low Voltage +2015-06-03 05:00:00,66,Error Code 1: Low Voltage +2015-06-16 10:00:00,66,Error Code 2: High Pressure +2015-06-22 22:00:00,66,Error Code 4: Invalid Rotation Angle +2015-06-28 18:00:00,66,Error Code 1: Low Voltage +2015-07-14 08:00:00,66,Error Code 1: Low Voltage +2015-07-29 20:00:00,66,Error Code 4: Invalid Rotation Angle +2015-08-01 07:00:00,66,Error Code 2: High Pressure +2015-08-02 02:00:00,66,Error Code 4: Invalid Rotation Angle +2015-08-05 21:00:00,66,Error Code 1: Low Voltage +2015-08-07 06:00:00,66,Error Code 4: Invalid Rotation Angle +2015-09-08 12:00:00,66,Error Code 3: Excessive Vibration +2015-09-27 06:00:00,66,Error Code 2: High Pressure +2015-09-30 06:00:00,66,Error Code 1: Low Voltage +2015-10-04 15:00:00,66,Error Code 2: High Pressure +2015-10-30 08:00:00,66,Error Code 4: Invalid Rotation Angle +2015-11-11 06:00:00,66,Error Code 4: Invalid Rotation Angle +2015-11-14 07:00:00,66,Error Code 2: High Pressure +2015-11-26 17:00:00,66,Error Code 1: Low Voltage +2015-12-05 21:00:00,66,Error Code 2: High Pressure +2015-12-10 20:00:00,66,Error Code 1: Low Voltage +2015-12-14 06:00:00,66,Error Code 1: Low Voltage +2015-12-15 08:00:00,66,Error Code 2: High Pressure +2015-12-21 04:00:00,66,Error Code 4: Invalid Rotation Angle +2015-01-15 06:00:00,67,Error Code 2: High Pressure +2015-01-15 06:00:00,67,Error Code 3: Excessive Vibration +2015-01-24 17:00:00,67,Error Code 1: Low Voltage +2015-02-02 13:00:00,67,Error Code 1: Low Voltage +2015-02-12 09:00:00,67,Error Code 3: Excessive Vibration +2015-02-14 19:00:00,67,Error Code 5: Low Pressure +2015-02-20 05:00:00,67,Error Code 3: Excessive Vibration +2015-03-08 12:00:00,67,Error Code 3: Excessive Vibration +2015-03-16 06:00:00,67,Error Code 2: High Pressure +2015-03-16 06:00:00,67,Error Code 3: Excessive Vibration +2015-04-08 23:00:00,67,Error Code 2: High Pressure +2015-05-11 06:00:00,67,Error Code 3: Excessive Vibration +2015-05-12 04:00:00,67,Error Code 3: Excessive Vibration +2015-05-24 06:00:00,67,Error Code 3: Excessive Vibration +2015-05-30 06:00:00,67,Error Code 5: Low Pressure +2015-06-03 20:00:00,67,Error Code 2: High Pressure +2015-06-04 04:00:00,67,Error Code 3: Excessive Vibration +2015-06-14 06:00:00,67,Error Code 1: Low Voltage +2015-07-14 06:00:00,67,Error Code 2: High Pressure +2015-07-14 06:00:00,67,Error Code 3: Excessive Vibration +2015-07-23 00:00:00,67,Error Code 1: Low Voltage +2015-07-29 12:00:00,67,Error Code 4: Invalid Rotation Angle +2015-08-02 20:00:00,67,Error Code 5: Low Pressure +2015-08-16 22:00:00,67,Error Code 2: High Pressure +2015-08-28 06:00:00,67,Error Code 5: Low Pressure +2015-09-03 07:00:00,67,Error Code 2: High Pressure +2015-09-12 06:00:00,67,Error Code 2: High Pressure +2015-09-12 06:00:00,67,Error Code 3: Excessive Vibration +2015-09-16 16:00:00,67,Error Code 5: Low Pressure +2015-09-18 11:00:00,67,Error Code 1: Low Voltage +2015-09-27 06:00:00,67,Error Code 1: Low Voltage +2015-09-29 14:00:00,67,Error Code 3: Excessive Vibration +2015-10-12 06:00:00,67,Error Code 2: High Pressure +2015-10-12 06:00:00,67,Error Code 3: Excessive Vibration +2015-11-09 01:00:00,67,Error Code 1: Low Voltage +2015-11-24 05:00:00,67,Error Code 1: Low Voltage +2015-12-02 17:00:00,67,Error Code 4: Invalid Rotation Angle +2015-12-15 10:00:00,67,Error Code 1: Low Voltage +2015-12-21 01:00:00,67,Error Code 4: Invalid Rotation Angle +2015-01-02 06:00:00,68,Error Code 1: Low Voltage +2015-01-05 01:00:00,68,Error Code 1: Low Voltage +2015-01-08 16:00:00,68,Error Code 4: Invalid Rotation Angle +2015-01-17 14:00:00,68,Error Code 1: Low Voltage +2015-02-05 09:00:00,68,Error Code 3: Excessive Vibration +2015-02-09 16:00:00,68,Error Code 3: Excessive Vibration +2015-02-27 22:00:00,68,Error Code 3: Excessive Vibration +2015-03-01 16:00:00,68,Error Code 4: Invalid Rotation Angle +2015-03-11 07:00:00,68,Error Code 1: Low Voltage +2015-03-31 12:00:00,68,Error Code 1: Low Voltage +2015-04-01 06:00:00,68,Error Code 2: High Pressure +2015-04-01 06:00:00,68,Error Code 3: Excessive Vibration +2015-04-02 07:00:00,68,Error Code 1: Low Voltage +2015-04-04 09:00:00,68,Error Code 2: High Pressure +2015-04-17 03:00:00,68,Error Code 1: Low Voltage +2015-04-26 04:00:00,68,Error Code 1: Low Voltage +2015-04-30 06:00:00,68,Error Code 3: Excessive Vibration +2015-05-09 18:00:00,68,Error Code 5: Low Pressure +2015-05-14 21:00:00,68,Error Code 4: Invalid Rotation Angle +2015-05-24 03:00:00,68,Error Code 2: High Pressure +2015-05-24 16:00:00,68,Error Code 2: High Pressure +2015-06-11 17:00:00,68,Error Code 3: Excessive Vibration +2015-06-22 13:00:00,68,Error Code 1: Low Voltage +2015-06-26 15:00:00,68,Error Code 4: Invalid Rotation Angle +2015-08-13 17:00:00,68,Error Code 4: Invalid Rotation Angle +2015-08-14 06:00:00,68,Error Code 2: High Pressure +2015-08-14 06:00:00,68,Error Code 3: Excessive Vibration +2015-08-28 07:00:00,68,Error Code 3: Excessive Vibration +2015-09-08 13:00:00,68,Error Code 1: Low Voltage +2015-09-21 14:00:00,68,Error Code 2: High Pressure +2015-09-26 10:00:00,68,Error Code 3: Excessive Vibration +2015-11-14 12:00:00,68,Error Code 2: High Pressure +2015-11-27 06:00:00,68,Error Code 1: Low Voltage +2015-12-09 19:00:00,68,Error Code 3: Excessive Vibration +2015-12-12 06:00:00,68,Error Code 2: High Pressure +2015-12-12 06:00:00,68,Error Code 3: Excessive Vibration +2015-12-22 23:00:00,68,Error Code 3: Excessive Vibration +2015-12-24 09:00:00,68,Error Code 3: Excessive Vibration +2015-01-04 06:00:00,69,Error Code 2: High Pressure +2015-01-04 06:00:00,69,Error Code 3: Excessive Vibration +2015-01-14 09:00:00,69,Error Code 2: High Pressure +2015-01-19 06:00:00,69,Error Code 4: Invalid Rotation Angle +2015-02-03 06:00:00,69,Error Code 5: Low Pressure +2015-02-10 06:00:00,69,Error Code 2: High Pressure +2015-02-28 19:00:00,69,Error Code 3: Excessive Vibration +2015-03-01 08:00:00,69,Error Code 4: Invalid Rotation Angle +2015-03-15 00:00:00,69,Error Code 3: Excessive Vibration +2015-03-19 03:00:00,69,Error Code 1: Low Voltage +2015-03-29 13:00:00,69,Error Code 4: Invalid Rotation Angle +2015-04-04 09:00:00,69,Error Code 2: High Pressure +2015-04-19 06:00:00,69,Error Code 5: Low Pressure +2015-04-23 15:00:00,69,Error Code 1: Low Voltage +2015-04-27 15:00:00,69,Error Code 2: High Pressure +2015-05-19 06:00:00,69,Error Code 2: High Pressure +2015-05-19 06:00:00,69,Error Code 3: Excessive Vibration +2015-05-31 02:00:00,69,Error Code 2: High Pressure +2015-06-15 05:00:00,69,Error Code 2: High Pressure +2015-06-16 18:00:00,69,Error Code 1: Low Voltage +2015-06-28 04:00:00,69,Error Code 1: Low Voltage +2015-07-01 09:00:00,69,Error Code 1: Low Voltage +2015-07-09 11:00:00,69,Error Code 4: Invalid Rotation Angle +2015-07-28 01:00:00,69,Error Code 4: Invalid Rotation Angle +2015-08-02 02:00:00,69,Error Code 3: Excessive Vibration +2015-08-02 06:00:00,69,Error Code 4: Invalid Rotation Angle +2015-08-05 15:00:00,69,Error Code 2: High Pressure +2015-08-06 18:00:00,69,Error Code 5: Low Pressure +2015-08-17 06:00:00,69,Error Code 2: High Pressure +2015-08-17 06:00:00,69,Error Code 3: Excessive Vibration +2015-08-24 17:00:00,69,Error Code 3: Excessive Vibration +2015-09-01 06:00:00,69,Error Code 5: Low Pressure +2015-09-25 22:00:00,69,Error Code 4: Invalid Rotation Angle +2015-10-01 12:00:00,69,Error Code 4: Invalid Rotation Angle +2015-10-16 06:00:00,69,Error Code 5: Low Pressure +2015-10-18 19:00:00,69,Error Code 2: High Pressure +2015-10-24 14:00:00,69,Error Code 4: Invalid Rotation Angle +2015-10-30 12:00:00,69,Error Code 5: Low Pressure +2015-10-31 06:00:00,69,Error Code 4: Invalid Rotation Angle +2015-11-01 13:00:00,69,Error Code 4: Invalid Rotation Angle +2015-11-23 19:00:00,69,Error Code 2: High Pressure +2015-12-24 01:00:00,69,Error Code 2: High Pressure +2015-12-29 00:00:00,69,Error Code 2: High Pressure +2015-01-27 06:00:00,70,Error Code 1: Low Voltage +2015-02-08 20:00:00,70,Error Code 3: Excessive Vibration +2015-02-25 12:00:00,70,Error Code 3: Excessive Vibration +2015-03-01 17:00:00,70,Error Code 5: Low Pressure +2015-03-06 10:00:00,70,Error Code 2: High Pressure +2015-03-27 16:00:00,70,Error Code 3: Excessive Vibration +2015-04-20 12:00:00,70,Error Code 3: Excessive Vibration +2015-04-24 16:00:00,70,Error Code 5: Low Pressure +2015-05-05 20:00:00,70,Error Code 1: Low Voltage +2015-05-27 06:00:00,70,Error Code 2: High Pressure +2015-05-27 06:00:00,70,Error Code 3: Excessive Vibration +2015-05-29 06:00:00,70,Error Code 3: Excessive Vibration +2015-06-02 10:00:00,70,Error Code 1: Low Voltage +2015-06-21 02:00:00,70,Error Code 3: Excessive Vibration +2015-06-25 10:00:00,70,Error Code 4: Invalid Rotation Angle +2015-06-30 01:00:00,70,Error Code 1: Low Voltage +2015-07-11 06:00:00,70,Error Code 1: Low Voltage +2015-07-13 13:00:00,70,Error Code 2: High Pressure +2015-08-01 10:00:00,70,Error Code 3: Excessive Vibration +2015-08-08 03:00:00,70,Error Code 1: Low Voltage +2015-09-12 02:00:00,70,Error Code 3: Excessive Vibration +2015-09-12 06:00:00,70,Error Code 3: Excessive Vibration +2015-09-18 20:00:00,70,Error Code 4: Invalid Rotation Angle +2015-10-06 02:00:00,70,Error Code 3: Excessive Vibration +2015-10-18 21:00:00,70,Error Code 3: Excessive Vibration +2015-10-31 03:00:00,70,Error Code 4: Invalid Rotation Angle +2015-11-08 06:00:00,70,Error Code 1: Low Voltage +2015-11-10 15:00:00,70,Error Code 2: High Pressure +2015-11-11 05:00:00,70,Error Code 3: Excessive Vibration +2015-11-11 19:00:00,70,Error Code 4: Invalid Rotation Angle +2015-11-12 22:00:00,70,Error Code 1: Low Voltage +2015-11-21 09:00:00,70,Error Code 3: Excessive Vibration +2015-11-26 04:00:00,70,Error Code 2: High Pressure +2015-12-08 03:00:00,70,Error Code 3: Excessive Vibration +2015-12-08 06:00:00,70,Error Code 2: High Pressure +2015-12-08 06:00:00,70,Error Code 3: Excessive Vibration +2015-12-17 10:00:00,70,Error Code 2: High Pressure +2015-01-03 18:00:00,71,Error Code 3: Excessive Vibration +2015-01-10 06:00:00,71,Error Code 2: High Pressure +2015-01-10 06:00:00,71,Error Code 3: Excessive Vibration +2015-01-14 23:00:00,71,Error Code 4: Invalid Rotation Angle +2015-01-25 06:00:00,71,Error Code 4: Invalid Rotation Angle +2015-01-25 06:00:00,71,Error Code 5: Low Pressure +2015-01-27 16:00:00,71,Error Code 2: High Pressure +2015-02-09 06:00:00,71,Error Code 1: Low Voltage +2015-02-24 06:00:00,71,Error Code 2: High Pressure +2015-02-24 06:00:00,71,Error Code 3: Excessive Vibration +2015-03-05 11:00:00,71,Error Code 1: Low Voltage +2015-03-08 15:00:00,71,Error Code 1: Low Voltage +2015-03-11 06:00:00,71,Error Code 4: Invalid Rotation Angle +2015-03-16 16:00:00,71,Error Code 4: Invalid Rotation Angle +2015-03-26 06:00:00,71,Error Code 1: Low Voltage +2015-04-25 06:00:00,71,Error Code 2: High Pressure +2015-04-25 06:00:00,71,Error Code 3: Excessive Vibration +2015-05-05 23:00:00,71,Error Code 3: Excessive Vibration +2015-05-20 13:00:00,71,Error Code 3: Excessive Vibration +2015-06-07 17:00:00,71,Error Code 4: Invalid Rotation Angle +2015-06-09 06:00:00,71,Error Code 5: Low Pressure +2015-06-21 07:00:00,71,Error Code 1: Low Voltage +2015-06-24 06:00:00,71,Error Code 4: Invalid Rotation Angle +2015-06-28 13:00:00,71,Error Code 2: High Pressure +2015-07-08 07:00:00,71,Error Code 4: Invalid Rotation Angle +2015-07-12 02:00:00,71,Error Code 2: High Pressure +2015-07-24 06:00:00,71,Error Code 5: Low Pressure +2015-07-31 02:00:00,71,Error Code 2: High Pressure +2015-08-01 19:00:00,71,Error Code 2: High Pressure +2015-08-08 00:00:00,71,Error Code 2: High Pressure +2015-09-18 00:00:00,71,Error Code 5: Low Pressure +2015-09-22 06:00:00,71,Error Code 4: Invalid Rotation Angle +2015-10-07 06:00:00,71,Error Code 5: Low Pressure +2015-10-12 18:00:00,71,Error Code 4: Invalid Rotation Angle +2015-10-17 02:00:00,71,Error Code 4: Invalid Rotation Angle +2015-10-22 02:00:00,71,Error Code 2: High Pressure +2015-10-26 03:00:00,71,Error Code 3: Excessive Vibration +2015-10-27 19:00:00,71,Error Code 3: Excessive Vibration +2015-10-28 21:00:00,71,Error Code 1: Low Voltage +2015-11-17 09:00:00,71,Error Code 4: Invalid Rotation Angle +2015-11-21 10:00:00,71,Error Code 1: Low Voltage +2015-12-04 07:00:00,71,Error Code 4: Invalid Rotation Angle +2015-12-10 02:00:00,71,Error Code 1: Low Voltage +2015-12-27 18:00:00,71,Error Code 2: High Pressure +2015-12-29 04:00:00,71,Error Code 5: Low Pressure +2015-01-01 12:00:00,72,Error Code 2: High Pressure +2015-01-03 04:00:00,72,Error Code 1: Low Voltage +2015-01-17 17:00:00,72,Error Code 4: Invalid Rotation Angle +2015-02-15 03:00:00,72,Error Code 3: Excessive Vibration +2015-02-25 05:00:00,72,Error Code 1: Low Voltage +2015-02-25 07:00:00,72,Error Code 3: Excessive Vibration +2015-03-23 06:00:00,72,Error Code 1: Low Voltage +2015-04-21 03:00:00,72,Error Code 1: Low Voltage +2015-04-26 02:00:00,72,Error Code 3: Excessive Vibration +2015-04-29 03:00:00,72,Error Code 3: Excessive Vibration +2015-05-12 08:00:00,72,Error Code 2: High Pressure +2015-05-15 18:00:00,72,Error Code 2: High Pressure +2015-05-23 08:00:00,72,Error Code 3: Excessive Vibration +2015-05-24 12:00:00,72,Error Code 1: Low Voltage +2015-06-02 16:00:00,72,Error Code 1: Low Voltage +2015-06-14 19:00:00,72,Error Code 1: Low Voltage +2015-06-22 12:00:00,72,Error Code 1: Low Voltage +2015-07-01 08:00:00,72,Error Code 1: Low Voltage +2015-07-03 13:00:00,72,Error Code 1: Low Voltage +2015-07-07 15:00:00,72,Error Code 1: Low Voltage +2015-07-17 01:00:00,72,Error Code 1: Low Voltage +2015-08-03 08:00:00,72,Error Code 2: High Pressure +2015-08-06 13:00:00,72,Error Code 1: Low Voltage +2015-08-11 00:00:00,72,Error Code 2: High Pressure +2015-09-04 06:00:00,72,Error Code 1: Low Voltage +2015-09-09 12:00:00,72,Error Code 3: Excessive Vibration +2015-09-15 05:00:00,72,Error Code 5: Low Pressure +2015-09-15 07:00:00,72,Error Code 4: Invalid Rotation Angle +2015-09-19 04:00:00,72,Error Code 2: High Pressure +2015-09-25 05:00:00,72,Error Code 4: Invalid Rotation Angle +2015-09-27 16:00:00,72,Error Code 1: Low Voltage +2015-10-03 18:00:00,72,Error Code 2: High Pressure +2015-10-23 23:00:00,72,Error Code 1: Low Voltage +2015-10-24 20:00:00,72,Error Code 2: High Pressure +2015-11-03 03:00:00,72,Error Code 1: Low Voltage +2015-11-17 19:00:00,72,Error Code 4: Invalid Rotation Angle +2015-11-23 18:00:00,72,Error Code 5: Low Pressure +2015-11-27 05:00:00,72,Error Code 4: Invalid Rotation Angle +2015-11-27 12:00:00,72,Error Code 4: Invalid Rotation Angle +2015-12-04 06:00:00,72,Error Code 1: Low Voltage +2015-12-22 12:00:00,72,Error Code 5: Low Pressure +2015-12-22 23:00:00,72,Error Code 2: High Pressure +2015-01-01 06:00:00,73,Error Code 4: Invalid Rotation Angle +2015-01-04 13:00:00,73,Error Code 3: Excessive Vibration +2015-01-10 08:00:00,73,Error Code 2: High Pressure +2015-02-04 06:00:00,73,Error Code 1: Low Voltage +2015-02-11 22:00:00,73,Error Code 3: Excessive Vibration +2015-02-15 00:00:00,73,Error Code 5: Low Pressure +2015-02-15 06:00:00,73,Error Code 5: Low Pressure +2015-02-16 03:00:00,73,Error Code 2: High Pressure +2015-02-27 17:00:00,73,Error Code 2: High Pressure +2015-02-27 18:00:00,73,Error Code 5: Low Pressure +2015-03-03 17:00:00,73,Error Code 1: Low Voltage +2015-03-17 00:00:00,73,Error Code 3: Excessive Vibration +2015-03-22 04:00:00,73,Error Code 5: Low Pressure +2015-03-23 01:00:00,73,Error Code 1: Low Voltage +2015-04-01 06:00:00,73,Error Code 1: Low Voltage +2015-04-13 23:00:00,73,Error Code 4: Invalid Rotation Angle +2015-04-15 09:00:00,73,Error Code 4: Invalid Rotation Angle +2015-04-16 06:00:00,73,Error Code 5: Low Pressure +2015-04-22 12:00:00,73,Error Code 2: High Pressure +2015-05-01 06:00:00,73,Error Code 4: Invalid Rotation Angle +2015-05-01 16:00:00,73,Error Code 4: Invalid Rotation Angle +2015-05-16 06:00:00,73,Error Code 1: Low Voltage +2015-06-03 01:00:00,73,Error Code 1: Low Voltage +2015-06-06 01:00:00,73,Error Code 1: Low Voltage +2015-06-22 18:00:00,73,Error Code 1: Low Voltage +2015-07-02 16:00:00,73,Error Code 2: High Pressure +2015-07-13 11:00:00,73,Error Code 4: Invalid Rotation Angle +2015-07-15 06:00:00,73,Error Code 1: Low Voltage +2015-08-06 08:00:00,73,Error Code 1: Low Voltage +2015-08-22 19:00:00,73,Error Code 1: Low Voltage +2015-08-27 21:00:00,73,Error Code 3: Excessive Vibration +2015-08-29 06:00:00,73,Error Code 4: Invalid Rotation Angle +2015-08-31 08:00:00,73,Error Code 5: Low Pressure +2015-09-13 06:00:00,73,Error Code 5: Low Pressure +2015-10-18 01:00:00,73,Error Code 2: High Pressure +2015-11-12 06:00:00,73,Error Code 4: Invalid Rotation Angle +2015-11-27 06:00:00,73,Error Code 1: Low Voltage +2015-12-18 21:00:00,73,Error Code 2: High Pressure +2015-12-30 02:00:00,73,Error Code 1: Low Voltage +2015-01-15 15:00:00,74,Error Code 1: Low Voltage +2015-01-22 06:00:00,74,Error Code 1: Low Voltage +2015-03-01 21:00:00,74,Error Code 2: High Pressure +2015-03-08 06:00:00,74,Error Code 2: High Pressure +2015-03-08 06:00:00,74,Error Code 3: Excessive Vibration +2015-03-08 12:00:00,74,Error Code 3: Excessive Vibration +2015-03-15 08:00:00,74,Error Code 3: Excessive Vibration +2015-03-19 04:00:00,74,Error Code 5: Low Pressure +2015-03-31 04:00:00,74,Error Code 1: Low Voltage +2015-05-01 00:00:00,74,Error Code 1: Low Voltage +2015-05-07 06:00:00,74,Error Code 2: High Pressure +2015-05-07 06:00:00,74,Error Code 3: Excessive Vibration +2015-05-09 05:00:00,74,Error Code 1: Low Voltage +2015-05-26 14:00:00,74,Error Code 4: Invalid Rotation Angle +2015-05-27 05:00:00,74,Error Code 3: Excessive Vibration +2015-05-29 01:00:00,74,Error Code 2: High Pressure +2015-06-13 21:00:00,74,Error Code 1: Low Voltage +2015-06-21 06:00:00,74,Error Code 2: High Pressure +2015-06-21 06:00:00,74,Error Code 3: Excessive Vibration +2015-06-26 16:00:00,74,Error Code 2: High Pressure +2015-07-02 20:00:00,74,Error Code 4: Invalid Rotation Angle +2015-07-13 17:00:00,74,Error Code 1: Low Voltage +2015-07-17 18:00:00,74,Error Code 1: Low Voltage +2015-07-21 06:00:00,74,Error Code 2: High Pressure +2015-07-21 06:00:00,74,Error Code 3: Excessive Vibration +2015-08-01 04:00:00,74,Error Code 2: High Pressure +2015-08-01 11:00:00,74,Error Code 5: Low Pressure +2015-08-07 09:00:00,74,Error Code 4: Invalid Rotation Angle +2015-08-30 04:00:00,74,Error Code 4: Invalid Rotation Angle +2015-10-19 06:00:00,74,Error Code 2: High Pressure +2015-10-19 06:00:00,74,Error Code 3: Excessive Vibration +2015-11-05 10:00:00,74,Error Code 2: High Pressure +2015-11-07 11:00:00,74,Error Code 1: Low Voltage +2015-11-28 00:00:00,74,Error Code 3: Excessive Vibration +2015-12-10 15:00:00,74,Error Code 4: Invalid Rotation Angle +2015-12-22 06:00:00,74,Error Code 3: Excessive Vibration +2015-01-01 12:00:00,75,Error Code 1: Low Voltage +2015-01-04 05:00:00,75,Error Code 1: Low Voltage +2015-01-06 06:00:00,75,Error Code 1: Low Voltage +2015-01-08 01:00:00,75,Error Code 1: Low Voltage +2015-01-10 09:00:00,75,Error Code 3: Excessive Vibration +2015-01-14 08:00:00,75,Error Code 4: Invalid Rotation Angle +2015-02-02 13:00:00,75,Error Code 4: Invalid Rotation Angle +2015-02-05 06:00:00,75,Error Code 5: Low Pressure +2015-02-25 21:00:00,75,Error Code 2: High Pressure +2015-03-05 16:00:00,75,Error Code 2: High Pressure +2015-03-21 13:00:00,75,Error Code 3: Excessive Vibration +2015-04-05 17:00:00,75,Error Code 1: Low Voltage +2015-04-05 19:00:00,75,Error Code 3: Excessive Vibration +2015-04-06 06:00:00,75,Error Code 1: Low Voltage +2015-05-05 12:00:00,75,Error Code 2: High Pressure +2015-05-06 06:00:00,75,Error Code 5: Low Pressure +2015-05-13 10:00:00,75,Error Code 4: Invalid Rotation Angle +2015-05-25 04:00:00,75,Error Code 3: Excessive Vibration +2015-06-05 06:00:00,75,Error Code 2: High Pressure +2015-06-05 06:00:00,75,Error Code 3: Excessive Vibration +2015-06-24 08:00:00,75,Error Code 1: Low Voltage +2015-07-20 06:00:00,75,Error Code 2: High Pressure +2015-07-20 06:00:00,75,Error Code 3: Excessive Vibration +2015-07-23 05:00:00,75,Error Code 1: Low Voltage +2015-08-12 17:00:00,75,Error Code 2: High Pressure +2015-08-15 06:00:00,75,Error Code 3: Excessive Vibration +2015-08-19 06:00:00,75,Error Code 2: High Pressure +2015-08-19 06:00:00,75,Error Code 3: Excessive Vibration +2015-09-03 06:00:00,75,Error Code 5: Low Pressure +2015-09-04 23:00:00,75,Error Code 1: Low Voltage +2015-09-08 19:00:00,75,Error Code 2: High Pressure +2015-09-12 14:00:00,75,Error Code 1: Low Voltage +2015-09-28 04:00:00,75,Error Code 3: Excessive Vibration +2015-10-18 06:00:00,75,Error Code 2: High Pressure +2015-10-18 06:00:00,75,Error Code 3: Excessive Vibration +2015-11-14 21:00:00,75,Error Code 2: High Pressure +2015-11-26 00:00:00,75,Error Code 3: Excessive Vibration +2015-12-19 05:00:00,75,Error Code 3: Excessive Vibration +2015-01-01 08:00:00,76,Error Code 5: Low Pressure +2015-01-17 06:00:00,76,Error Code 4: Invalid Rotation Angle +2015-01-19 05:00:00,76,Error Code 1: Low Voltage +2015-01-26 16:00:00,76,Error Code 1: Low Voltage +2015-02-01 06:00:00,76,Error Code 1: Low Voltage +2015-02-09 17:00:00,76,Error Code 1: Low Voltage +2015-02-23 19:00:00,76,Error Code 3: Excessive Vibration +2015-03-03 06:00:00,76,Error Code 2: High Pressure +2015-03-03 06:00:00,76,Error Code 3: Excessive Vibration +2015-03-18 06:00:00,76,Error Code 1: Low Voltage +2015-03-23 16:00:00,76,Error Code 1: Low Voltage +2015-04-02 06:00:00,76,Error Code 4: Invalid Rotation Angle +2015-04-15 02:00:00,76,Error Code 1: Low Voltage +2015-05-02 06:00:00,76,Error Code 2: High Pressure +2015-05-02 06:00:00,76,Error Code 3: Excessive Vibration +2015-05-24 04:00:00,76,Error Code 1: Low Voltage +2015-05-31 00:00:00,76,Error Code 2: High Pressure +2015-05-31 03:00:00,76,Error Code 1: Low Voltage +2015-07-11 06:00:00,76,Error Code 2: High Pressure +2015-07-26 15:00:00,76,Error Code 1: Low Voltage +2015-08-08 22:00:00,76,Error Code 2: High Pressure +2015-08-15 06:00:00,76,Error Code 4: Invalid Rotation Angle +2015-09-02 04:00:00,76,Error Code 1: Low Voltage +2015-09-18 23:00:00,76,Error Code 2: High Pressure +2015-09-21 22:00:00,76,Error Code 1: Low Voltage +2015-09-29 06:00:00,76,Error Code 4: Invalid Rotation Angle +2015-10-08 02:00:00,76,Error Code 3: Excessive Vibration +2015-10-09 02:00:00,76,Error Code 2: High Pressure +2015-11-03 04:00:00,76,Error Code 1: Low Voltage +2015-11-10 01:00:00,76,Error Code 1: Low Voltage +2015-11-28 06:00:00,76,Error Code 4: Invalid Rotation Angle +2015-12-28 08:00:00,76,Error Code 4: Invalid Rotation Angle +2015-02-01 00:00:00,77,Error Code 1: Low Voltage +2015-02-15 03:00:00,77,Error Code 2: High Pressure +2015-02-25 17:00:00,77,Error Code 4: Invalid Rotation Angle +2015-03-06 14:00:00,77,Error Code 2: High Pressure +2015-04-21 15:00:00,77,Error Code 2: High Pressure +2015-04-23 07:00:00,77,Error Code 5: Low Pressure +2015-05-14 16:00:00,77,Error Code 4: Invalid Rotation Angle +2015-05-16 09:00:00,77,Error Code 2: High Pressure +2015-06-05 20:00:00,77,Error Code 1: Low Voltage +2015-08-05 16:00:00,77,Error Code 4: Invalid Rotation Angle +2015-08-07 20:00:00,77,Error Code 1: Low Voltage +2015-09-02 13:00:00,77,Error Code 1: Low Voltage +2015-09-03 13:00:00,77,Error Code 4: Invalid Rotation Angle +2015-09-03 20:00:00,77,Error Code 1: Low Voltage +2015-09-04 20:00:00,77,Error Code 2: High Pressure +2015-10-08 08:00:00,77,Error Code 1: Low Voltage +2015-10-16 20:00:00,77,Error Code 3: Excessive Vibration +2015-11-22 13:00:00,77,Error Code 1: Low Voltage +2015-12-03 07:00:00,77,Error Code 1: Low Voltage +2015-12-17 15:00:00,77,Error Code 1: Low Voltage +2015-12-20 08:00:00,77,Error Code 4: Invalid Rotation Angle +2015-12-22 07:00:00,77,Error Code 1: Low Voltage +2015-01-09 05:00:00,78,Error Code 3: Excessive Vibration +2015-01-12 08:00:00,78,Error Code 2: High Pressure +2015-01-13 02:00:00,78,Error Code 2: High Pressure +2015-01-13 08:00:00,78,Error Code 4: Invalid Rotation Angle +2015-01-13 21:00:00,78,Error Code 2: High Pressure +2015-01-17 08:00:00,78,Error Code 1: Low Voltage +2015-01-22 01:00:00,78,Error Code 1: Low Voltage +2015-02-04 04:00:00,78,Error Code 3: Excessive Vibration +2015-02-04 06:00:00,78,Error Code 5: Low Pressure +2015-02-11 15:00:00,78,Error Code 1: Low Voltage +2015-02-14 00:00:00,78,Error Code 5: Low Pressure +2015-02-15 12:00:00,78,Error Code 4: Invalid Rotation Angle +2015-02-19 06:00:00,78,Error Code 2: High Pressure +2015-02-19 06:00:00,78,Error Code 3: Excessive Vibration +2015-02-23 07:00:00,78,Error Code 3: Excessive Vibration +2015-03-06 06:00:00,78,Error Code 1: Low Voltage +2015-03-09 10:00:00,78,Error Code 5: Low Pressure +2015-03-28 15:00:00,78,Error Code 1: Low Voltage +2015-03-28 23:00:00,78,Error Code 3: Excessive Vibration +2015-04-04 07:00:00,78,Error Code 3: Excessive Vibration +2015-04-13 17:00:00,78,Error Code 3: Excessive Vibration +2015-04-16 11:00:00,78,Error Code 2: High Pressure +2015-04-17 07:00:00,78,Error Code 2: High Pressure +2015-04-17 17:00:00,78,Error Code 2: High Pressure +2015-04-21 21:00:00,78,Error Code 1: Low Voltage +2015-04-26 16:00:00,78,Error Code 3: Excessive Vibration +2015-04-29 18:00:00,78,Error Code 5: Low Pressure +2015-05-28 13:00:00,78,Error Code 2: High Pressure +2015-05-28 21:00:00,78,Error Code 4: Invalid Rotation Angle +2015-06-03 01:00:00,78,Error Code 2: High Pressure +2015-06-04 06:00:00,78,Error Code 5: Low Pressure +2015-06-11 04:00:00,78,Error Code 5: Low Pressure +2015-06-25 23:00:00,78,Error Code 3: Excessive Vibration +2015-06-30 05:00:00,78,Error Code 3: Excessive Vibration +2015-07-02 21:00:00,78,Error Code 2: High Pressure +2015-07-04 06:00:00,78,Error Code 1: Low Voltage +2015-07-10 05:00:00,78,Error Code 3: Excessive Vibration +2015-07-19 06:00:00,78,Error Code 5: Low Pressure +2015-07-23 01:00:00,78,Error Code 2: High Pressure +2015-07-23 06:00:00,78,Error Code 2: High Pressure +2015-08-18 15:00:00,78,Error Code 1: Low Voltage +2015-09-02 06:00:00,78,Error Code 5: Low Pressure +2015-09-12 00:00:00,78,Error Code 1: Low Voltage +2015-10-03 11:00:00,78,Error Code 2: High Pressure +2015-10-04 06:00:00,78,Error Code 2: High Pressure +2015-10-06 07:00:00,78,Error Code 3: Excessive Vibration +2015-11-01 06:00:00,78,Error Code 5: Low Pressure +2015-11-03 04:00:00,78,Error Code 1: Low Voltage +2015-11-09 05:00:00,78,Error Code 4: Invalid Rotation Angle +2015-12-09 12:00:00,78,Error Code 3: Excessive Vibration +2015-12-10 03:00:00,78,Error Code 4: Invalid Rotation Angle +2015-12-14 10:00:00,78,Error Code 1: Low Voltage +2015-12-16 06:00:00,78,Error Code 2: High Pressure +2015-12-16 06:00:00,78,Error Code 3: Excessive Vibration +2015-01-08 15:00:00,79,Error Code 3: Excessive Vibration +2015-02-15 11:00:00,79,Error Code 1: Low Voltage +2015-03-02 06:00:00,79,Error Code 2: High Pressure +2015-03-02 06:00:00,79,Error Code 3: Excessive Vibration +2015-03-06 15:00:00,79,Error Code 3: Excessive Vibration +2015-03-17 06:00:00,79,Error Code 5: Low Pressure +2015-03-24 03:00:00,79,Error Code 3: Excessive Vibration +2015-04-01 06:00:00,79,Error Code 1: Low Voltage +2015-04-26 21:00:00,79,Error Code 1: Low Voltage +2015-04-27 09:00:00,79,Error Code 3: Excessive Vibration +2015-05-01 22:00:00,79,Error Code 1: Low Voltage +2015-05-05 21:00:00,79,Error Code 4: Invalid Rotation Angle +2015-05-11 09:00:00,79,Error Code 3: Excessive Vibration +2015-05-16 06:00:00,79,Error Code 5: Low Pressure +2015-05-17 04:00:00,79,Error Code 2: High Pressure +2015-06-07 01:00:00,79,Error Code 3: Excessive Vibration +2015-06-11 17:00:00,79,Error Code 4: Invalid Rotation Angle +2015-06-15 06:00:00,79,Error Code 2: High Pressure +2015-06-15 06:00:00,79,Error Code 3: Excessive Vibration +2015-06-30 06:00:00,79,Error Code 1: Low Voltage +2015-07-26 07:00:00,79,Error Code 2: High Pressure +2015-07-31 17:00:00,79,Error Code 5: Low Pressure +2015-08-09 17:00:00,79,Error Code 1: Low Voltage +2015-08-10 08:00:00,79,Error Code 2: High Pressure +2015-08-29 10:00:00,79,Error Code 2: High Pressure +2015-09-06 09:00:00,79,Error Code 2: High Pressure +2015-09-08 15:00:00,79,Error Code 3: Excessive Vibration +2015-09-11 08:00:00,79,Error Code 2: High Pressure +2015-09-13 06:00:00,79,Error Code 2: High Pressure +2015-09-13 06:00:00,79,Error Code 3: Excessive Vibration +2015-09-14 00:00:00,79,Error Code 4: Invalid Rotation Angle +2015-09-26 01:00:00,79,Error Code 5: Low Pressure +2015-09-26 21:00:00,79,Error Code 2: High Pressure +2015-09-26 22:00:00,79,Error Code 3: Excessive Vibration +2015-09-28 06:00:00,79,Error Code 5: Low Pressure +2015-09-28 13:00:00,79,Error Code 1: Low Voltage +2015-10-02 14:00:00,79,Error Code 3: Excessive Vibration +2015-10-02 16:00:00,79,Error Code 1: Low Voltage +2015-10-04 22:00:00,79,Error Code 2: High Pressure +2015-10-14 21:00:00,79,Error Code 3: Excessive Vibration +2015-10-22 00:00:00,79,Error Code 4: Invalid Rotation Angle +2015-11-09 00:00:00,79,Error Code 1: Low Voltage +2015-11-11 18:00:00,79,Error Code 3: Excessive Vibration +2015-11-28 00:00:00,79,Error Code 3: Excessive Vibration +2015-12-12 06:00:00,79,Error Code 5: Low Pressure +2015-12-16 20:00:00,79,Error Code 1: Low Voltage +2015-12-17 00:00:00,79,Error Code 1: Low Voltage +2015-12-17 05:00:00,79,Error Code 2: High Pressure +2015-12-25 16:00:00,79,Error Code 4: Invalid Rotation Angle +2015-12-30 04:00:00,79,Error Code 2: High Pressure +2015-01-12 02:00:00,80,Error Code 4: Invalid Rotation Angle +2015-01-24 21:00:00,80,Error Code 5: Low Pressure +2015-02-05 15:00:00,80,Error Code 1: Low Voltage +2015-02-09 17:00:00,80,Error Code 3: Excessive Vibration +2015-02-21 13:00:00,80,Error Code 3: Excessive Vibration +2015-03-14 15:00:00,80,Error Code 3: Excessive Vibration +2015-03-16 12:00:00,80,Error Code 4: Invalid Rotation Angle +2015-03-20 12:00:00,80,Error Code 1: Low Voltage +2015-04-02 07:00:00,80,Error Code 4: Invalid Rotation Angle +2015-04-07 15:00:00,80,Error Code 1: Low Voltage +2015-04-12 16:00:00,80,Error Code 3: Excessive Vibration +2015-04-23 04:00:00,80,Error Code 3: Excessive Vibration +2015-05-02 09:00:00,80,Error Code 1: Low Voltage +2015-05-13 02:00:00,80,Error Code 4: Invalid Rotation Angle +2015-05-14 12:00:00,80,Error Code 4: Invalid Rotation Angle +2015-05-14 15:00:00,80,Error Code 2: High Pressure +2015-06-13 17:00:00,80,Error Code 1: Low Voltage +2015-07-07 04:00:00,80,Error Code 2: High Pressure +2015-07-11 06:00:00,80,Error Code 2: High Pressure +2015-07-11 06:00:00,80,Error Code 3: Excessive Vibration +2015-07-17 08:00:00,80,Error Code 3: Excessive Vibration +2015-08-04 07:00:00,80,Error Code 4: Invalid Rotation Angle +2015-08-06 19:00:00,80,Error Code 5: Low Pressure +2015-08-11 15:00:00,80,Error Code 4: Invalid Rotation Angle +2015-08-12 09:00:00,80,Error Code 2: High Pressure +2015-08-14 10:00:00,80,Error Code 2: High Pressure +2015-08-26 08:00:00,80,Error Code 2: High Pressure +2015-09-09 06:00:00,80,Error Code 2: High Pressure +2015-09-09 06:00:00,80,Error Code 3: Excessive Vibration +2015-09-13 05:00:00,80,Error Code 2: High Pressure +2015-09-22 15:00:00,80,Error Code 1: Low Voltage +2015-09-30 15:00:00,80,Error Code 1: Low Voltage +2015-10-07 07:00:00,80,Error Code 4: Invalid Rotation Angle +2015-10-17 00:00:00,80,Error Code 2: High Pressure +2015-10-24 21:00:00,80,Error Code 1: Low Voltage +2015-11-08 06:00:00,80,Error Code 2: High Pressure +2015-11-08 06:00:00,80,Error Code 3: Excessive Vibration +2015-11-14 08:00:00,80,Error Code 4: Invalid Rotation Angle +2015-11-22 16:00:00,80,Error Code 4: Invalid Rotation Angle +2015-11-28 08:00:00,80,Error Code 4: Invalid Rotation Angle +2015-12-08 06:00:00,80,Error Code 2: High Pressure +2015-12-08 06:00:00,80,Error Code 3: Excessive Vibration +2015-12-20 15:00:00,80,Error Code 1: Low Voltage +2015-12-31 15:00:00,80,Error Code 3: Excessive Vibration +2015-01-01 06:00:00,81,Error Code 1: Low Voltage +2015-01-16 06:00:00,81,Error Code 2: High Pressure +2015-01-16 06:00:00,81,Error Code 3: Excessive Vibration +2015-01-19 18:00:00,81,Error Code 4: Invalid Rotation Angle +2015-01-24 23:00:00,81,Error Code 4: Invalid Rotation Angle +2015-02-15 06:00:00,81,Error Code 1: Low Voltage +2015-02-17 12:00:00,81,Error Code 4: Invalid Rotation Angle +2015-03-02 00:00:00,81,Error Code 1: Low Voltage +2015-03-02 06:00:00,81,Error Code 2: High Pressure +2015-03-02 06:00:00,81,Error Code 3: Excessive Vibration +2015-03-09 19:00:00,81,Error Code 1: Low Voltage +2015-04-09 23:00:00,81,Error Code 1: Low Voltage +2015-04-16 06:00:00,81,Error Code 2: High Pressure +2015-04-16 06:00:00,81,Error Code 3: Excessive Vibration +2015-04-24 08:00:00,81,Error Code 1: Low Voltage +2015-04-24 21:00:00,81,Error Code 2: High Pressure +2015-05-05 11:00:00,81,Error Code 4: Invalid Rotation Angle +2015-05-05 14:00:00,81,Error Code 1: Low Voltage +2015-05-07 21:00:00,81,Error Code 3: Excessive Vibration +2015-05-09 20:00:00,81,Error Code 5: Low Pressure +2015-05-19 14:00:00,81,Error Code 1: Low Voltage +2015-05-19 19:00:00,81,Error Code 1: Low Voltage +2015-05-23 02:00:00,81,Error Code 2: High Pressure +2015-06-15 06:00:00,81,Error Code 2: High Pressure +2015-06-15 06:00:00,81,Error Code 3: Excessive Vibration +2015-06-22 20:00:00,81,Error Code 4: Invalid Rotation Angle +2015-07-29 00:00:00,81,Error Code 1: Low Voltage +2015-08-05 18:00:00,81,Error Code 1: Low Voltage +2015-08-19 06:00:00,81,Error Code 2: High Pressure +2015-08-29 06:00:00,81,Error Code 2: High Pressure +2015-09-18 11:00:00,81,Error Code 5: Low Pressure +2015-09-21 01:00:00,81,Error Code 1: Low Voltage +2015-09-28 08:00:00,81,Error Code 1: Low Voltage +2015-10-07 01:00:00,81,Error Code 1: Low Voltage +2015-10-07 16:00:00,81,Error Code 4: Invalid Rotation Angle +2015-10-13 06:00:00,81,Error Code 2: High Pressure +2015-10-13 06:00:00,81,Error Code 3: Excessive Vibration +2015-10-14 08:00:00,81,Error Code 1: Low Voltage +2015-10-14 22:00:00,81,Error Code 4: Invalid Rotation Angle +2015-10-16 22:00:00,81,Error Code 1: Low Voltage +2015-10-27 21:00:00,81,Error Code 2: High Pressure +2015-11-08 13:00:00,81,Error Code 2: High Pressure +2015-11-16 13:00:00,81,Error Code 2: High Pressure +2015-12-09 01:00:00,81,Error Code 2: High Pressure +2015-12-11 02:00:00,81,Error Code 3: Excessive Vibration +2015-12-12 06:00:00,81,Error Code 2: High Pressure +2015-12-12 06:00:00,81,Error Code 3: Excessive Vibration +2015-12-14 23:00:00,81,Error Code 1: Low Voltage +2015-12-24 02:00:00,81,Error Code 1: Low Voltage +2015-12-24 17:00:00,81,Error Code 1: Low Voltage +2015-12-27 20:00:00,81,Error Code 4: Invalid Rotation Angle +2015-01-05 03:00:00,82,Error Code 2: High Pressure +2015-01-10 05:00:00,82,Error Code 4: Invalid Rotation Angle +2015-01-21 00:00:00,82,Error Code 2: High Pressure +2015-01-27 06:00:00,82,Error Code 1: Low Voltage +2015-01-27 11:00:00,82,Error Code 3: Excessive Vibration +2015-02-05 21:00:00,82,Error Code 1: Low Voltage +2015-02-23 18:00:00,82,Error Code 5: Low Pressure +2015-02-25 17:00:00,82,Error Code 2: High Pressure +2015-03-06 06:00:00,82,Error Code 5: Low Pressure +2015-03-08 23:00:00,82,Error Code 3: Excessive Vibration +2015-03-19 06:00:00,82,Error Code 1: Low Voltage +2015-03-22 01:00:00,82,Error Code 4: Invalid Rotation Angle +2015-03-24 02:00:00,82,Error Code 2: High Pressure +2015-03-25 13:00:00,82,Error Code 1: Low Voltage +2015-03-28 06:00:00,82,Error Code 1: Low Voltage +2015-04-13 19:00:00,82,Error Code 1: Low Voltage +2015-04-23 14:00:00,82,Error Code 2: High Pressure +2015-04-24 00:00:00,82,Error Code 4: Invalid Rotation Angle +2015-05-12 06:00:00,82,Error Code 1: Low Voltage +2015-05-18 00:00:00,82,Error Code 4: Invalid Rotation Angle +2015-05-20 15:00:00,82,Error Code 1: Low Voltage +2015-06-19 06:00:00,82,Error Code 3: Excessive Vibration +2015-07-11 06:00:00,82,Error Code 4: Invalid Rotation Angle +2015-08-03 07:00:00,82,Error Code 2: High Pressure +2015-08-13 23:00:00,82,Error Code 1: Low Voltage +2015-09-09 13:00:00,82,Error Code 4: Invalid Rotation Angle +2015-10-18 06:00:00,82,Error Code 2: High Pressure +2015-10-29 14:00:00,82,Error Code 1: Low Voltage +2015-11-25 14:00:00,82,Error Code 3: Excessive Vibration +2015-12-06 14:00:00,82,Error Code 1: Low Voltage +2015-12-08 06:00:00,82,Error Code 2: High Pressure +2015-12-08 06:00:00,82,Error Code 3: Excessive Vibration +2015-12-27 15:00:00,82,Error Code 4: Invalid Rotation Angle +2015-01-15 06:00:00,83,Error Code 5: Low Pressure +2015-01-30 12:00:00,83,Error Code 2: High Pressure +2015-02-12 11:00:00,83,Error Code 1: Low Voltage +2015-03-09 17:00:00,83,Error Code 1: Low Voltage +2015-03-16 06:00:00,83,Error Code 5: Low Pressure +2015-04-09 06:00:00,83,Error Code 1: Low Voltage +2015-04-19 22:00:00,83,Error Code 3: Excessive Vibration +2015-05-02 16:00:00,83,Error Code 2: High Pressure +2015-05-15 06:00:00,83,Error Code 5: Low Pressure +2015-05-16 03:00:00,83,Error Code 3: Excessive Vibration +2015-05-19 17:00:00,83,Error Code 2: High Pressure +2015-05-25 08:00:00,83,Error Code 5: Low Pressure +2015-05-30 06:00:00,83,Error Code 2: High Pressure +2015-05-30 06:00:00,83,Error Code 3: Excessive Vibration +2015-05-31 22:00:00,83,Error Code 5: Low Pressure +2015-06-28 12:00:00,83,Error Code 4: Invalid Rotation Angle +2015-07-12 14:00:00,83,Error Code 3: Excessive Vibration +2015-07-14 06:00:00,83,Error Code 1: Low Voltage +2015-07-14 06:00:00,83,Error Code 5: Low Pressure +2015-07-16 17:00:00,83,Error Code 2: High Pressure +2015-07-22 04:00:00,83,Error Code 4: Invalid Rotation Angle +2015-07-29 10:00:00,83,Error Code 2: High Pressure +2015-08-13 06:00:00,83,Error Code 2: High Pressure +2015-08-13 06:00:00,83,Error Code 3: Excessive Vibration +2015-08-20 15:00:00,83,Error Code 5: Low Pressure +2015-08-21 23:00:00,83,Error Code 2: High Pressure +2015-08-26 17:00:00,83,Error Code 4: Invalid Rotation Angle +2015-08-28 06:00:00,83,Error Code 1: Low Voltage +2015-08-28 20:00:00,83,Error Code 1: Low Voltage +2015-09-12 06:00:00,83,Error Code 2: High Pressure +2015-09-12 06:00:00,83,Error Code 3: Excessive Vibration +2015-09-16 02:00:00,83,Error Code 3: Excessive Vibration +2015-09-29 04:00:00,83,Error Code 3: Excessive Vibration +2015-10-12 06:00:00,83,Error Code 1: Low Voltage +2015-10-16 18:00:00,83,Error Code 3: Excessive Vibration +2015-11-05 04:00:00,83,Error Code 3: Excessive Vibration +2015-11-06 19:00:00,83,Error Code 5: Low Pressure +2015-11-26 06:00:00,83,Error Code 5: Low Pressure +2015-11-28 04:00:00,83,Error Code 2: High Pressure +2015-12-16 08:00:00,83,Error Code 3: Excessive Vibration +2015-12-21 15:00:00,83,Error Code 3: Excessive Vibration +2015-12-26 06:00:00,83,Error Code 1: Low Voltage +2015-12-26 06:00:00,83,Error Code 2: High Pressure +2015-12-26 06:00:00,83,Error Code 3: Excessive Vibration +2015-01-04 06:00:00,84,Error Code 2: High Pressure +2015-01-04 06:00:00,84,Error Code 3: Excessive Vibration +2015-01-09 13:00:00,84,Error Code 4: Invalid Rotation Angle +2015-01-19 09:00:00,84,Error Code 1: Low Voltage +2015-02-03 06:00:00,84,Error Code 1: Low Voltage +2015-02-03 06:00:00,84,Error Code 2: High Pressure +2015-02-03 06:00:00,84,Error Code 3: Excessive Vibration +2015-02-07 21:00:00,84,Error Code 1: Low Voltage +2015-02-18 06:00:00,84,Error Code 1: Low Voltage +2015-02-18 20:00:00,84,Error Code 2: High Pressure +2015-03-15 08:00:00,84,Error Code 2: High Pressure +2015-04-04 06:00:00,84,Error Code 2: High Pressure +2015-04-04 06:00:00,84,Error Code 3: Excessive Vibration +2015-04-09 02:00:00,84,Error Code 1: Low Voltage +2015-04-10 13:00:00,84,Error Code 4: Invalid Rotation Angle +2015-04-13 20:00:00,84,Error Code 2: High Pressure +2015-04-21 07:00:00,84,Error Code 1: Low Voltage +2015-06-04 06:00:00,84,Error Code 2: High Pressure +2015-06-09 04:00:00,84,Error Code 5: Low Pressure +2015-06-12 02:00:00,84,Error Code 3: Excessive Vibration +2015-06-12 02:00:00,84,Error Code 1: Low Voltage +2015-06-13 13:00:00,84,Error Code 4: Invalid Rotation Angle +2015-07-02 00:00:00,84,Error Code 1: Low Voltage +2015-07-03 06:00:00,84,Error Code 1: Low Voltage +2015-07-06 13:00:00,84,Error Code 1: Low Voltage +2015-07-09 02:00:00,84,Error Code 2: High Pressure +2015-07-19 02:00:00,84,Error Code 5: Low Pressure +2015-07-29 13:00:00,84,Error Code 1: Low Voltage +2015-08-31 13:00:00,84,Error Code 3: Excessive Vibration +2015-09-01 06:00:00,84,Error Code 2: High Pressure +2015-09-01 06:00:00,84,Error Code 3: Excessive Vibration +2015-09-07 17:00:00,84,Error Code 3: Excessive Vibration +2015-09-08 08:00:00,84,Error Code 3: Excessive Vibration +2015-09-15 18:00:00,84,Error Code 3: Excessive Vibration +2015-09-29 04:00:00,84,Error Code 3: Excessive Vibration +2015-10-01 06:00:00,84,Error Code 1: Low Voltage +2015-10-04 05:00:00,84,Error Code 1: Low Voltage +2015-10-06 20:00:00,84,Error Code 3: Excessive Vibration +2015-10-17 02:00:00,84,Error Code 1: Low Voltage +2015-10-17 19:00:00,84,Error Code 5: Low Pressure +2015-10-26 09:00:00,84,Error Code 1: Low Voltage +2015-10-28 01:00:00,84,Error Code 2: High Pressure +2015-12-30 03:00:00,84,Error Code 1: Low Voltage +2015-01-14 06:00:00,85,Error Code 2: High Pressure +2015-01-14 06:00:00,85,Error Code 3: Excessive Vibration +2015-01-15 06:00:00,85,Error Code 3: Excessive Vibration +2015-01-23 02:00:00,85,Error Code 2: High Pressure +2015-01-29 06:00:00,85,Error Code 4: Invalid Rotation Angle +2015-02-08 07:00:00,85,Error Code 1: Low Voltage +2015-02-13 06:00:00,85,Error Code 5: Low Pressure +2015-02-15 02:00:00,85,Error Code 2: High Pressure +2015-02-15 02:00:00,85,Error Code 1: Low Voltage +2015-02-28 20:00:00,85,Error Code 5: Low Pressure +2015-03-15 05:00:00,85,Error Code 3: Excessive Vibration +2015-03-25 01:00:00,85,Error Code 2: High Pressure +2015-05-05 16:00:00,85,Error Code 3: Excessive Vibration +2015-05-29 06:00:00,85,Error Code 4: Invalid Rotation Angle +2015-05-30 16:00:00,85,Error Code 1: Low Voltage +2015-05-31 17:00:00,85,Error Code 3: Excessive Vibration +2015-06-06 05:00:00,85,Error Code 4: Invalid Rotation Angle +2015-06-21 01:00:00,85,Error Code 4: Invalid Rotation Angle +2015-06-28 06:00:00,85,Error Code 1: Low Voltage +2015-07-07 11:00:00,85,Error Code 4: Invalid Rotation Angle +2015-07-08 18:00:00,85,Error Code 3: Excessive Vibration +2015-07-13 07:00:00,85,Error Code 3: Excessive Vibration +2015-07-19 00:00:00,85,Error Code 2: High Pressure +2015-08-05 00:00:00,85,Error Code 2: High Pressure +2015-08-27 06:00:00,85,Error Code 5: Low Pressure +2015-08-31 09:00:00,85,Error Code 2: High Pressure +2015-09-06 15:00:00,85,Error Code 5: Low Pressure +2015-09-07 04:00:00,85,Error Code 5: Low Pressure +2015-09-11 06:00:00,85,Error Code 4: Invalid Rotation Angle +2015-09-13 15:00:00,85,Error Code 1: Low Voltage +2015-10-11 06:00:00,85,Error Code 2: High Pressure +2015-10-11 06:00:00,85,Error Code 3: Excessive Vibration +2015-10-17 18:00:00,85,Error Code 4: Invalid Rotation Angle +2015-10-24 02:00:00,85,Error Code 2: High Pressure +2015-11-10 06:00:00,85,Error Code 2: High Pressure +2015-11-10 06:00:00,85,Error Code 3: Excessive Vibration +2015-12-10 06:00:00,85,Error Code 4: Invalid Rotation Angle +2015-12-10 22:00:00,85,Error Code 2: High Pressure +2015-12-13 11:00:00,85,Error Code 1: Low Voltage +2015-12-25 06:00:00,85,Error Code 5: Low Pressure +2015-01-22 06:00:00,86,Error Code 2: High Pressure +2015-01-22 06:00:00,86,Error Code 3: Excessive Vibration +2015-02-10 05:00:00,86,Error Code 3: Excessive Vibration +2015-02-11 00:00:00,86,Error Code 3: Excessive Vibration +2015-03-02 16:00:00,86,Error Code 1: Low Voltage +2015-04-07 03:00:00,86,Error Code 4: Invalid Rotation Angle +2015-04-24 13:00:00,86,Error Code 3: Excessive Vibration +2015-05-11 00:00:00,86,Error Code 1: Low Voltage +2015-05-30 07:00:00,86,Error Code 1: Low Voltage +2015-06-17 07:00:00,86,Error Code 4: Invalid Rotation Angle +2015-06-26 12:00:00,86,Error Code 4: Invalid Rotation Angle +2015-07-14 06:00:00,86,Error Code 5: Low Pressure +2015-07-15 22:00:00,86,Error Code 3: Excessive Vibration +2015-07-27 23:00:00,86,Error Code 4: Invalid Rotation Angle +2015-08-09 00:00:00,86,Error Code 1: Low Voltage +2015-08-14 01:00:00,86,Error Code 2: High Pressure +2015-08-17 23:00:00,86,Error Code 1: Low Voltage +2015-08-20 23:00:00,86,Error Code 3: Excessive Vibration +2015-08-29 13:00:00,86,Error Code 1: Low Voltage +2015-09-04 06:00:00,86,Error Code 1: Low Voltage +2015-09-05 17:00:00,86,Error Code 2: High Pressure +2015-10-05 18:00:00,86,Error Code 3: Excessive Vibration +2015-10-19 16:00:00,86,Error Code 4: Invalid Rotation Angle +2015-11-18 06:00:00,86,Error Code 2: High Pressure +2015-11-18 06:00:00,86,Error Code 3: Excessive Vibration +2015-11-28 12:00:00,86,Error Code 2: High Pressure +2015-12-05 22:00:00,86,Error Code 1: Low Voltage +2015-01-01 20:00:00,87,Error Code 1: Low Voltage +2015-01-05 01:00:00,87,Error Code 3: Excessive Vibration +2015-01-16 07:00:00,87,Error Code 3: Excessive Vibration +2015-01-27 21:00:00,87,Error Code 5: Low Pressure +2015-02-01 23:00:00,87,Error Code 1: Low Voltage +2015-02-16 09:00:00,87,Error Code 4: Invalid Rotation Angle +2015-02-24 15:00:00,87,Error Code 4: Invalid Rotation Angle +2015-02-25 18:00:00,87,Error Code 4: Invalid Rotation Angle +2015-03-07 15:00:00,87,Error Code 1: Low Voltage +2015-03-08 20:00:00,87,Error Code 4: Invalid Rotation Angle +2015-03-12 06:00:00,87,Error Code 4: Invalid Rotation Angle +2015-03-13 03:00:00,87,Error Code 3: Excessive Vibration +2015-03-14 07:00:00,87,Error Code 3: Excessive Vibration +2015-03-27 06:00:00,87,Error Code 1: Low Voltage +2015-04-26 06:00:00,87,Error Code 2: High Pressure +2015-04-26 06:00:00,87,Error Code 3: Excessive Vibration +2015-04-26 14:00:00,87,Error Code 4: Invalid Rotation Angle +2015-06-04 03:00:00,87,Error Code 1: Low Voltage +2015-06-13 13:00:00,87,Error Code 4: Invalid Rotation Angle +2015-06-16 08:00:00,87,Error Code 1: Low Voltage +2015-06-17 10:00:00,87,Error Code 2: High Pressure +2015-06-17 18:00:00,87,Error Code 3: Excessive Vibration +2015-06-25 06:00:00,87,Error Code 4: Invalid Rotation Angle +2015-06-25 07:00:00,87,Error Code 2: High Pressure +2015-07-05 09:00:00,87,Error Code 1: Low Voltage +2015-07-18 12:00:00,87,Error Code 1: Low Voltage +2015-07-31 07:00:00,87,Error Code 2: High Pressure +2015-08-05 05:00:00,87,Error Code 1: Low Voltage +2015-08-09 06:00:00,87,Error Code 1: Low Voltage +2015-08-09 18:00:00,87,Error Code 1: Low Voltage +2015-08-13 01:00:00,87,Error Code 3: Excessive Vibration +2015-08-24 06:00:00,87,Error Code 4: Invalid Rotation Angle +2015-09-24 14:00:00,87,Error Code 4: Invalid Rotation Angle +2015-10-07 17:00:00,87,Error Code 1: Low Voltage +2015-10-09 21:00:00,87,Error Code 1: Low Voltage +2015-10-23 06:00:00,87,Error Code 4: Invalid Rotation Angle +2015-10-23 18:00:00,87,Error Code 2: High Pressure +2015-12-06 21:00:00,87,Error Code 1: Low Voltage +2015-12-22 06:00:00,87,Error Code 4: Invalid Rotation Angle +2015-01-06 01:00:00,88,Error Code 2: High Pressure +2015-01-18 06:00:00,88,Error Code 5: Low Pressure +2015-01-20 07:00:00,88,Error Code 5: Low Pressure +2015-01-23 13:00:00,88,Error Code 4: Invalid Rotation Angle +2015-01-30 02:00:00,88,Error Code 4: Invalid Rotation Angle +2015-01-31 04:00:00,88,Error Code 2: High Pressure +2015-02-02 06:00:00,88,Error Code 2: High Pressure +2015-02-02 06:00:00,88,Error Code 3: Excessive Vibration +2015-02-03 00:00:00,88,Error Code 4: Invalid Rotation Angle +2015-02-03 17:00:00,88,Error Code 4: Invalid Rotation Angle +2015-03-04 06:00:00,88,Error Code 2: High Pressure +2015-03-04 06:00:00,88,Error Code 3: Excessive Vibration +2015-03-18 05:00:00,88,Error Code 4: Invalid Rotation Angle +2015-04-03 06:00:00,88,Error Code 2: High Pressure +2015-04-03 06:00:00,88,Error Code 3: Excessive Vibration +2015-04-09 06:00:00,88,Error Code 2: High Pressure +2015-04-10 21:00:00,88,Error Code 2: High Pressure +2015-04-19 11:00:00,88,Error Code 4: Invalid Rotation Angle +2015-04-23 04:00:00,88,Error Code 1: Low Voltage +2015-05-06 22:00:00,88,Error Code 3: Excessive Vibration +2015-05-20 05:00:00,88,Error Code 3: Excessive Vibration +2015-06-09 09:00:00,88,Error Code 2: High Pressure +2015-06-24 14:00:00,88,Error Code 2: High Pressure +2015-07-02 06:00:00,88,Error Code 1: Low Voltage +2015-07-10 18:00:00,88,Error Code 2: High Pressure +2015-07-11 17:00:00,88,Error Code 3: Excessive Vibration +2015-07-17 01:00:00,88,Error Code 3: Excessive Vibration +2015-07-26 20:00:00,88,Error Code 1: Low Voltage +2015-08-09 11:00:00,88,Error Code 2: High Pressure +2015-08-30 10:00:00,88,Error Code 3: Excessive Vibration +2015-08-31 06:00:00,88,Error Code 2: High Pressure +2015-08-31 06:00:00,88,Error Code 3: Excessive Vibration +2015-08-31 06:00:00,88,Error Code 5: Low Pressure +2015-09-03 22:00:00,88,Error Code 2: High Pressure +2015-09-06 18:00:00,88,Error Code 1: Low Voltage +2015-09-23 17:00:00,88,Error Code 3: Excessive Vibration +2015-10-03 19:00:00,88,Error Code 2: High Pressure +2015-10-12 08:00:00,88,Error Code 2: High Pressure +2015-11-07 14:00:00,88,Error Code 1: Low Voltage +2015-11-17 06:00:00,88,Error Code 1: Low Voltage +2015-11-18 15:00:00,88,Error Code 4: Invalid Rotation Angle +2015-11-18 16:00:00,88,Error Code 1: Low Voltage +2015-11-19 12:00:00,88,Error Code 1: Low Voltage +2015-11-29 06:00:00,88,Error Code 2: High Pressure +2015-11-29 06:00:00,88,Error Code 3: Excessive Vibration +2015-11-29 06:00:00,88,Error Code 5: Low Pressure +2015-12-03 05:00:00,88,Error Code 4: Invalid Rotation Angle +2015-12-05 07:00:00,88,Error Code 3: Excessive Vibration +2015-12-28 19:00:00,88,Error Code 2: High Pressure +2015-12-29 06:00:00,88,Error Code 1: Low Voltage +2015-01-28 11:00:00,89,Error Code 3: Excessive Vibration +2015-02-06 06:00:00,89,Error Code 1: Low Voltage +2015-03-23 06:00:00,89,Error Code 5: Low Pressure +2015-04-08 00:00:00,89,Error Code 3: Excessive Vibration +2015-04-23 09:00:00,89,Error Code 2: High Pressure +2015-05-01 05:00:00,89,Error Code 2: High Pressure +2015-05-22 06:00:00,89,Error Code 5: Low Pressure +2015-05-27 15:00:00,89,Error Code 3: Excessive Vibration +2015-06-01 16:00:00,89,Error Code 3: Excessive Vibration +2015-06-12 15:00:00,89,Error Code 2: High Pressure +2015-06-17 11:00:00,89,Error Code 4: Invalid Rotation Angle +2015-06-21 06:00:00,89,Error Code 1: Low Voltage +2015-07-01 14:00:00,89,Error Code 4: Invalid Rotation Angle +2015-07-08 23:00:00,89,Error Code 3: Excessive Vibration +2015-07-11 12:00:00,89,Error Code 2: High Pressure +2015-07-21 06:00:00,89,Error Code 2: High Pressure +2015-07-21 06:00:00,89,Error Code 3: Excessive Vibration +2015-08-02 12:00:00,89,Error Code 5: Low Pressure +2015-08-06 14:00:00,89,Error Code 4: Invalid Rotation Angle +2015-08-15 19:00:00,89,Error Code 3: Excessive Vibration +2015-08-20 06:00:00,89,Error Code 2: High Pressure +2015-08-20 06:00:00,89,Error Code 3: Excessive Vibration +2015-08-22 12:00:00,89,Error Code 3: Excessive Vibration +2015-09-02 00:00:00,89,Error Code 2: High Pressure +2015-09-02 09:00:00,89,Error Code 4: Invalid Rotation Angle +2015-11-01 11:00:00,89,Error Code 5: Low Pressure +2015-11-01 18:00:00,89,Error Code 3: Excessive Vibration +2015-11-18 06:00:00,89,Error Code 2: High Pressure +2015-11-18 06:00:00,89,Error Code 3: Excessive Vibration +2015-11-27 00:00:00,89,Error Code 2: High Pressure +2015-12-24 04:00:00,89,Error Code 4: Invalid Rotation Angle +2015-01-05 11:00:00,90,Error Code 2: High Pressure +2015-01-10 19:00:00,90,Error Code 1: Low Voltage +2015-01-14 21:00:00,90,Error Code 1: Low Voltage +2015-01-19 06:00:00,90,Error Code 2: High Pressure +2015-01-19 06:00:00,90,Error Code 3: Excessive Vibration +2015-01-29 07:00:00,90,Error Code 4: Invalid Rotation Angle +2015-02-01 00:00:00,90,Error Code 4: Invalid Rotation Angle +2015-02-10 07:00:00,90,Error Code 1: Low Voltage +2015-02-13 18:00:00,90,Error Code 1: Low Voltage +2015-02-18 06:00:00,90,Error Code 2: High Pressure +2015-02-18 06:00:00,90,Error Code 3: Excessive Vibration +2015-03-20 06:00:00,90,Error Code 4: Invalid Rotation Angle +2015-03-24 04:00:00,90,Error Code 1: Low Voltage +2015-04-04 06:00:00,90,Error Code 1: Low Voltage +2015-04-20 12:00:00,90,Error Code 1: Low Voltage +2015-05-19 06:00:00,90,Error Code 2: High Pressure +2015-05-19 06:00:00,90,Error Code 3: Excessive Vibration +2015-05-22 00:00:00,90,Error Code 1: Low Voltage +2015-06-03 06:00:00,90,Error Code 4: Invalid Rotation Angle +2015-06-11 10:00:00,90,Error Code 1: Low Voltage +2015-06-14 02:00:00,90,Error Code 2: High Pressure +2015-07-02 10:00:00,90,Error Code 4: Invalid Rotation Angle +2015-07-07 03:00:00,90,Error Code 1: Low Voltage +2015-08-17 06:00:00,90,Error Code 1: Low Voltage +2015-08-17 06:00:00,90,Error Code 4: Invalid Rotation Angle +2015-08-24 23:00:00,90,Error Code 1: Low Voltage +2015-08-29 21:00:00,90,Error Code 1: Low Voltage +2015-09-08 08:00:00,90,Error Code 1: Low Voltage +2015-09-19 07:00:00,90,Error Code 4: Invalid Rotation Angle +2015-09-20 03:00:00,90,Error Code 1: Low Voltage +2015-10-01 04:00:00,90,Error Code 4: Invalid Rotation Angle +2015-10-01 06:00:00,90,Error Code 1: Low Voltage +2015-10-31 06:00:00,90,Error Code 4: Invalid Rotation Angle +2015-11-03 06:00:00,90,Error Code 1: Low Voltage +2015-11-17 09:00:00,90,Error Code 2: High Pressure +2015-11-18 02:00:00,90,Error Code 1: Low Voltage +2015-11-28 08:00:00,90,Error Code 1: Low Voltage +2015-11-30 06:00:00,90,Error Code 2: High Pressure +2015-11-30 06:00:00,90,Error Code 3: Excessive Vibration +2015-12-02 00:00:00,90,Error Code 2: High Pressure +2015-12-30 06:00:00,90,Error Code 1: Low Voltage +2015-01-31 06:00:00,91,Error Code 2: High Pressure +2015-01-31 06:00:00,91,Error Code 3: Excessive Vibration +2015-02-18 00:00:00,91,Error Code 2: High Pressure +2015-02-20 21:00:00,91,Error Code 4: Invalid Rotation Angle +2015-03-25 11:00:00,91,Error Code 2: High Pressure +2015-03-27 03:00:00,91,Error Code 3: Excessive Vibration +2015-04-01 06:00:00,91,Error Code 1: Low Voltage +2015-04-16 07:00:00,91,Error Code 3: Excessive Vibration +2015-05-08 12:00:00,91,Error Code 4: Invalid Rotation Angle +2015-05-13 02:00:00,91,Error Code 4: Invalid Rotation Angle +2015-05-14 03:00:00,91,Error Code 3: Excessive Vibration +2015-05-19 00:00:00,91,Error Code 2: High Pressure +2015-06-04 08:00:00,91,Error Code 2: High Pressure +2015-06-15 06:00:00,91,Error Code 1: Low Voltage +2015-06-26 21:00:00,91,Error Code 4: Invalid Rotation Angle +2015-06-27 03:00:00,91,Error Code 1: Low Voltage +2015-06-28 00:00:00,91,Error Code 2: High Pressure +2015-06-30 06:00:00,91,Error Code 2: High Pressure +2015-06-30 06:00:00,91,Error Code 3: Excessive Vibration +2015-07-12 09:00:00,91,Error Code 2: High Pressure +2015-07-19 18:00:00,91,Error Code 1: Low Voltage +2015-07-27 15:00:00,91,Error Code 4: Invalid Rotation Angle +2015-08-10 03:00:00,91,Error Code 3: Excessive Vibration +2015-08-18 19:00:00,91,Error Code 1: Low Voltage +2015-08-26 03:00:00,91,Error Code 1: Low Voltage +2015-09-12 20:00:00,91,Error Code 1: Low Voltage +2015-09-13 06:00:00,91,Error Code 1: Low Voltage +2015-09-25 14:00:00,91,Error Code 2: High Pressure +2015-09-28 18:00:00,91,Error Code 1: Low Voltage +2015-10-06 20:00:00,91,Error Code 5: Low Pressure +2015-10-20 18:00:00,91,Error Code 1: Low Voltage +2015-10-24 04:00:00,91,Error Code 3: Excessive Vibration +2015-10-28 06:00:00,91,Error Code 5: Low Pressure +2015-11-14 13:00:00,91,Error Code 3: Excessive Vibration +2015-11-16 06:00:00,91,Error Code 5: Low Pressure +2015-11-30 15:00:00,91,Error Code 4: Invalid Rotation Angle +2015-12-08 03:00:00,91,Error Code 2: High Pressure +2015-12-08 11:00:00,91,Error Code 1: Low Voltage +2015-12-16 13:00:00,91,Error Code 2: High Pressure +2015-12-19 03:00:00,91,Error Code 1: Low Voltage +2015-12-23 00:00:00,91,Error Code 4: Invalid Rotation Angle +2015-12-31 18:00:00,91,Error Code 4: Invalid Rotation Angle +2015-01-06 03:00:00,92,Error Code 1: Low Voltage +2015-01-07 14:00:00,92,Error Code 3: Excessive Vibration +2015-02-04 08:00:00,92,Error Code 5: Low Pressure +2015-02-09 06:00:00,92,Error Code 1: Low Voltage +2015-02-10 05:00:00,92,Error Code 2: High Pressure +2015-02-18 06:00:00,92,Error Code 4: Invalid Rotation Angle +2015-03-07 01:00:00,92,Error Code 1: Low Voltage +2015-03-07 20:00:00,92,Error Code 3: Excessive Vibration +2015-03-11 18:00:00,92,Error Code 4: Invalid Rotation Angle +2015-03-16 07:00:00,92,Error Code 3: Excessive Vibration +2015-03-23 13:00:00,92,Error Code 1: Low Voltage +2015-03-27 03:00:00,92,Error Code 2: High Pressure +2015-04-01 17:00:00,92,Error Code 1: Low Voltage +2015-04-09 08:00:00,92,Error Code 3: Excessive Vibration +2015-04-15 23:00:00,92,Error Code 1: Low Voltage +2015-04-19 06:00:00,92,Error Code 1: Low Voltage +2015-04-19 06:00:00,92,Error Code 2: High Pressure +2015-04-19 06:00:00,92,Error Code 3: Excessive Vibration +2015-05-10 18:00:00,92,Error Code 2: High Pressure +2015-05-19 06:00:00,92,Error Code 4: Invalid Rotation Angle +2015-05-27 04:00:00,92,Error Code 2: High Pressure +2015-05-30 22:00:00,92,Error Code 4: Invalid Rotation Angle +2015-06-02 13:00:00,92,Error Code 1: Low Voltage +2015-06-05 02:00:00,92,Error Code 2: High Pressure +2015-06-14 09:00:00,92,Error Code 4: Invalid Rotation Angle +2015-06-20 06:00:00,92,Error Code 1: Low Voltage +2015-06-22 19:00:00,92,Error Code 4: Invalid Rotation Angle +2015-07-03 06:00:00,92,Error Code 4: Invalid Rotation Angle +2015-07-05 23:00:00,92,Error Code 5: Low Pressure +2015-08-17 06:00:00,92,Error Code 4: Invalid Rotation Angle +2015-09-01 06:00:00,92,Error Code 2: High Pressure +2015-09-01 06:00:00,92,Error Code 3: Excessive Vibration +2015-09-10 04:00:00,92,Error Code 1: Low Voltage +2015-09-21 07:00:00,92,Error Code 1: Low Voltage +2015-10-01 06:00:00,92,Error Code 2: High Pressure +2015-10-01 06:00:00,92,Error Code 3: Excessive Vibration +2015-10-01 14:00:00,92,Error Code 3: Excessive Vibration +2015-10-10 04:00:00,92,Error Code 2: High Pressure +2015-10-17 12:00:00,92,Error Code 5: Low Pressure +2015-10-30 09:00:00,92,Error Code 3: Excessive Vibration +2015-11-23 10:00:00,92,Error Code 4: Invalid Rotation Angle +2015-11-29 12:00:00,92,Error Code 1: Low Voltage +2015-11-30 06:00:00,92,Error Code 4: Invalid Rotation Angle +2015-12-04 00:00:00,92,Error Code 4: Invalid Rotation Angle +2015-12-12 15:00:00,92,Error Code 4: Invalid Rotation Angle +2015-01-11 21:00:00,93,Error Code 4: Invalid Rotation Angle +2015-02-01 13:00:00,93,Error Code 1: Low Voltage +2015-02-12 16:00:00,93,Error Code 3: Excessive Vibration +2015-03-12 14:00:00,93,Error Code 1: Low Voltage +2015-03-16 06:00:00,93,Error Code 5: Low Pressure +2015-03-26 23:00:00,93,Error Code 4: Invalid Rotation Angle +2015-03-31 06:00:00,93,Error Code 2: High Pressure +2015-03-31 06:00:00,93,Error Code 3: Excessive Vibration +2015-04-15 06:00:00,93,Error Code 1: Low Voltage +2015-04-19 12:00:00,93,Error Code 3: Excessive Vibration +2015-04-30 06:00:00,93,Error Code 2: High Pressure +2015-04-30 06:00:00,93,Error Code 3: Excessive Vibration +2015-05-15 06:00:00,93,Error Code 5: Low Pressure +2015-05-15 07:00:00,93,Error Code 2: High Pressure +2015-05-24 09:00:00,93,Error Code 4: Invalid Rotation Angle +2015-06-02 09:00:00,93,Error Code 3: Excessive Vibration +2015-06-07 20:00:00,93,Error Code 3: Excessive Vibration +2015-06-29 06:00:00,93,Error Code 1: Low Voltage +2015-07-31 11:00:00,93,Error Code 1: Low Voltage +2015-09-02 04:00:00,93,Error Code 1: Low Voltage +2015-09-02 13:00:00,93,Error Code 2: High Pressure +2015-09-07 02:00:00,93,Error Code 2: High Pressure +2015-09-16 17:00:00,93,Error Code 3: Excessive Vibration +2015-09-20 03:00:00,93,Error Code 5: Low Pressure +2015-09-26 07:00:00,93,Error Code 3: Excessive Vibration +2015-10-06 21:00:00,93,Error Code 1: Low Voltage +2015-10-08 23:00:00,93,Error Code 3: Excessive Vibration +2015-10-30 22:00:00,93,Error Code 1: Low Voltage +2015-12-07 21:00:00,93,Error Code 1: Low Voltage +2015-12-11 21:00:00,93,Error Code 1: Low Voltage +2015-12-13 10:00:00,93,Error Code 2: High Pressure +2015-01-02 02:00:00,94,Error Code 1: Low Voltage +2015-01-02 07:00:00,94,Error Code 5: Low Pressure +2015-01-07 06:00:00,94,Error Code 2: High Pressure +2015-01-07 06:00:00,94,Error Code 3: Excessive Vibration +2015-02-06 22:00:00,94,Error Code 4: Invalid Rotation Angle +2015-02-16 13:00:00,94,Error Code 1: Low Voltage +2015-02-21 06:00:00,94,Error Code 5: Low Pressure +2015-02-22 10:00:00,94,Error Code 2: High Pressure +2015-03-08 06:00:00,94,Error Code 4: Invalid Rotation Angle +2015-03-28 14:00:00,94,Error Code 4: Invalid Rotation Angle +2015-04-11 23:00:00,94,Error Code 3: Excessive Vibration +2015-04-29 03:00:00,94,Error Code 1: Low Voltage +2015-05-07 06:00:00,94,Error Code 4: Invalid Rotation Angle +2015-05-22 06:00:00,94,Error Code 5: Low Pressure +2015-05-22 11:00:00,94,Error Code 4: Invalid Rotation Angle +2015-05-22 14:00:00,94,Error Code 5: Low Pressure +2015-06-05 02:00:00,94,Error Code 3: Excessive Vibration +2015-06-28 21:00:00,94,Error Code 2: High Pressure +2015-07-07 11:00:00,94,Error Code 4: Invalid Rotation Angle +2015-07-14 16:00:00,94,Error Code 3: Excessive Vibration +2015-07-28 00:00:00,94,Error Code 2: High Pressure +2015-07-29 10:00:00,94,Error Code 4: Invalid Rotation Angle +2015-08-12 07:00:00,94,Error Code 4: Invalid Rotation Angle +2015-08-20 01:00:00,94,Error Code 2: High Pressure +2015-08-20 06:00:00,94,Error Code 5: Low Pressure +2015-08-20 15:00:00,94,Error Code 4: Invalid Rotation Angle +2015-08-21 03:00:00,94,Error Code 3: Excessive Vibration +2015-08-26 23:00:00,94,Error Code 4: Invalid Rotation Angle +2015-09-04 09:00:00,94,Error Code 2: High Pressure +2015-09-18 03:00:00,94,Error Code 1: Low Voltage +2015-10-19 06:00:00,94,Error Code 4: Invalid Rotation Angle +2015-11-01 19:00:00,94,Error Code 2: High Pressure +2015-11-18 06:00:00,94,Error Code 2: High Pressure +2015-11-18 06:00:00,94,Error Code 3: Excessive Vibration +2015-11-18 06:00:00,94,Error Code 5: Low Pressure +2015-11-24 20:00:00,94,Error Code 5: Low Pressure +2015-12-02 08:00:00,94,Error Code 1: Low Voltage +2015-12-05 01:00:00,94,Error Code 1: Low Voltage +2015-12-05 20:00:00,94,Error Code 2: High Pressure +2015-12-08 00:00:00,94,Error Code 3: Excessive Vibration +2015-12-18 06:00:00,94,Error Code 2: High Pressure +2015-12-18 06:00:00,94,Error Code 3: Excessive Vibration +2015-12-30 08:00:00,94,Error Code 4: Invalid Rotation Angle +2015-12-31 23:00:00,94,Error Code 3: Excessive Vibration +2015-01-02 14:00:00,95,Error Code 2: High Pressure +2015-01-25 20:00:00,95,Error Code 4: Invalid Rotation Angle +2015-01-28 23:00:00,95,Error Code 2: High Pressure +2015-02-05 14:00:00,95,Error Code 4: Invalid Rotation Angle +2015-02-18 06:00:00,95,Error Code 5: Low Pressure +2015-03-03 20:00:00,95,Error Code 4: Invalid Rotation Angle +2015-03-10 03:00:00,95,Error Code 2: High Pressure +2015-03-16 04:00:00,95,Error Code 1: Low Voltage +2015-03-16 19:00:00,95,Error Code 1: Low Voltage +2015-03-17 04:00:00,95,Error Code 3: Excessive Vibration +2015-03-20 06:00:00,95,Error Code 1: Low Voltage +2015-03-20 06:00:00,95,Error Code 2: High Pressure +2015-03-20 06:00:00,95,Error Code 3: Excessive Vibration +2015-03-21 14:00:00,95,Error Code 1: Low Voltage +2015-04-13 14:00:00,95,Error Code 5: Low Pressure +2015-04-19 06:00:00,95,Error Code 4: Invalid Rotation Angle +2015-04-23 15:00:00,95,Error Code 4: Invalid Rotation Angle +2015-05-04 06:00:00,95,Error Code 1: Low Voltage +2015-05-12 21:00:00,95,Error Code 3: Excessive Vibration +2015-05-20 19:00:00,95,Error Code 4: Invalid Rotation Angle +2015-06-03 06:00:00,95,Error Code 4: Invalid Rotation Angle +2015-06-06 12:00:00,95,Error Code 4: Invalid Rotation Angle +2015-06-24 07:00:00,95,Error Code 4: Invalid Rotation Angle +2015-08-13 19:00:00,95,Error Code 2: High Pressure +2015-08-17 06:00:00,95,Error Code 5: Low Pressure +2015-08-19 01:00:00,95,Error Code 2: High Pressure +2015-08-25 17:00:00,95,Error Code 1: Low Voltage +2015-09-07 08:00:00,95,Error Code 1: Low Voltage +2015-09-16 06:00:00,95,Error Code 4: Invalid Rotation Angle +2015-10-09 02:00:00,95,Error Code 1: Low Voltage +2015-10-16 06:00:00,95,Error Code 5: Low Pressure +2015-10-28 12:00:00,95,Error Code 5: Low Pressure +2015-11-07 15:00:00,95,Error Code 1: Low Voltage +2015-11-19 08:00:00,95,Error Code 2: High Pressure +2015-12-20 02:00:00,95,Error Code 1: Low Voltage +2015-12-23 16:00:00,95,Error Code 3: Excessive Vibration +2015-12-30 06:00:00,95,Error Code 1: Low Voltage +2015-01-05 10:00:00,96,Error Code 3: Excessive Vibration +2015-01-05 14:00:00,96,Error Code 1: Low Voltage +2015-02-16 06:00:00,96,Error Code 4: Invalid Rotation Angle +2015-02-17 05:00:00,96,Error Code 5: Low Pressure +2015-02-25 22:00:00,96,Error Code 3: Excessive Vibration +2015-03-08 05:00:00,96,Error Code 2: High Pressure +2015-03-11 15:00:00,96,Error Code 1: Low Voltage +2015-03-31 13:00:00,96,Error Code 1: Low Voltage +2015-04-03 15:00:00,96,Error Code 3: Excessive Vibration +2015-04-13 14:00:00,96,Error Code 4: Invalid Rotation Angle +2015-04-20 02:00:00,96,Error Code 4: Invalid Rotation Angle +2015-04-24 14:00:00,96,Error Code 4: Invalid Rotation Angle +2015-05-01 03:00:00,96,Error Code 5: Low Pressure +2015-05-02 06:00:00,96,Error Code 4: Invalid Rotation Angle +2015-05-03 22:00:00,96,Error Code 3: Excessive Vibration +2015-05-07 10:00:00,96,Error Code 1: Low Voltage +2015-05-19 06:00:00,96,Error Code 2: High Pressure +2015-05-23 12:00:00,96,Error Code 2: High Pressure +2015-05-24 15:00:00,96,Error Code 2: High Pressure +2015-06-04 22:00:00,96,Error Code 5: Low Pressure +2015-06-06 02:00:00,96,Error Code 5: Low Pressure +2015-06-17 18:00:00,96,Error Code 3: Excessive Vibration +2015-07-13 16:00:00,96,Error Code 2: High Pressure +2015-07-16 06:00:00,96,Error Code 4: Invalid Rotation Angle +2015-07-29 03:00:00,96,Error Code 4: Invalid Rotation Angle +2015-08-03 21:00:00,96,Error Code 2: High Pressure +2015-08-11 19:00:00,96,Error Code 3: Excessive Vibration +2015-08-30 06:00:00,96,Error Code 4: Invalid Rotation Angle +2015-09-11 11:00:00,96,Error Code 4: Invalid Rotation Angle +2015-09-18 17:00:00,96,Error Code 1: Low Voltage +2015-09-21 06:00:00,96,Error Code 2: High Pressure +2015-09-23 13:00:00,96,Error Code 3: Excessive Vibration +2015-10-06 15:00:00,96,Error Code 3: Excessive Vibration +2015-10-07 07:00:00,96,Error Code 2: High Pressure +2015-10-16 14:00:00,96,Error Code 2: High Pressure +2015-10-17 00:00:00,96,Error Code 4: Invalid Rotation Angle +2015-10-22 00:00:00,96,Error Code 4: Invalid Rotation Angle +2015-11-25 05:00:00,96,Error Code 2: High Pressure +2015-12-28 06:00:00,96,Error Code 4: Invalid Rotation Angle +2015-01-01 12:00:00,97,Error Code 3: Excessive Vibration +2015-01-20 06:00:00,97,Error Code 4: Invalid Rotation Angle +2015-01-20 20:00:00,97,Error Code 4: Invalid Rotation Angle +2015-01-23 06:00:00,97,Error Code 4: Invalid Rotation Angle +2015-01-23 06:00:00,97,Error Code 5: Low Pressure +2015-01-24 08:00:00,97,Error Code 2: High Pressure +2015-01-26 09:00:00,97,Error Code 4: Invalid Rotation Angle +2015-02-09 01:00:00,97,Error Code 3: Excessive Vibration +2015-02-11 05:00:00,97,Error Code 1: Low Voltage +2015-03-06 20:00:00,97,Error Code 4: Invalid Rotation Angle +2015-03-13 15:00:00,97,Error Code 3: Excessive Vibration +2015-03-29 09:00:00,97,Error Code 1: Low Voltage +2015-04-09 09:00:00,97,Error Code 1: Low Voltage +2015-04-19 20:00:00,97,Error Code 4: Invalid Rotation Angle +2015-05-03 00:00:00,97,Error Code 1: Low Voltage +2015-05-06 00:00:00,97,Error Code 1: Low Voltage +2015-05-08 06:00:00,97,Error Code 5: Low Pressure +2015-05-28 08:00:00,97,Error Code 4: Invalid Rotation Angle +2015-06-15 06:00:00,97,Error Code 1: Low Voltage +2015-06-21 09:00:00,97,Error Code 1: Low Voltage +2015-06-29 18:00:00,97,Error Code 2: High Pressure +2015-07-07 06:00:00,97,Error Code 4: Invalid Rotation Angle +2015-07-15 11:00:00,97,Error Code 2: High Pressure +2015-07-21 11:00:00,97,Error Code 1: Low Voltage +2015-07-22 06:00:00,97,Error Code 5: Low Pressure +2015-08-05 00:00:00,97,Error Code 2: High Pressure +2015-08-08 11:00:00,97,Error Code 2: High Pressure +2015-08-11 20:00:00,97,Error Code 4: Invalid Rotation Angle +2015-08-14 07:00:00,97,Error Code 1: Low Voltage +2015-08-22 15:00:00,97,Error Code 1: Low Voltage +2015-08-30 17:00:00,97,Error Code 2: High Pressure +2015-09-02 12:00:00,97,Error Code 5: Low Pressure +2015-09-05 06:00:00,97,Error Code 4: Invalid Rotation Angle +2015-09-08 12:00:00,97,Error Code 1: Low Voltage +2015-09-22 23:00:00,97,Error Code 3: Excessive Vibration +2015-10-02 14:00:00,97,Error Code 4: Invalid Rotation Angle +2015-10-16 17:00:00,97,Error Code 4: Invalid Rotation Angle +2015-10-20 06:00:00,97,Error Code 2: High Pressure +2015-10-20 06:00:00,97,Error Code 3: Excessive Vibration +2015-10-31 22:00:00,97,Error Code 4: Invalid Rotation Angle +2015-11-03 01:00:00,97,Error Code 4: Invalid Rotation Angle +2015-11-04 08:00:00,97,Error Code 4: Invalid Rotation Angle +2015-11-19 06:00:00,97,Error Code 5: Low Pressure +2015-12-02 22:00:00,97,Error Code 2: High Pressure +2015-12-15 02:00:00,97,Error Code 1: Low Voltage +2015-01-17 06:00:00,98,Error Code 1: Low Voltage +2015-01-29 06:00:00,98,Error Code 4: Invalid Rotation Angle +2015-02-02 23:00:00,98,Error Code 5: Low Pressure +2015-02-13 06:00:00,98,Error Code 1: Low Voltage +2015-02-21 16:00:00,98,Error Code 3: Excessive Vibration +2015-02-28 06:00:00,98,Error Code 5: Low Pressure +2015-02-28 16:00:00,98,Error Code 4: Invalid Rotation Angle +2015-03-05 22:00:00,98,Error Code 4: Invalid Rotation Angle +2015-03-26 08:00:00,98,Error Code 3: Excessive Vibration +2015-03-29 14:00:00,98,Error Code 1: Low Voltage +2015-03-31 03:00:00,98,Error Code 1: Low Voltage +2015-04-02 17:00:00,98,Error Code 5: Low Pressure +2015-04-14 06:00:00,98,Error Code 4: Invalid Rotation Angle +2015-04-20 21:00:00,98,Error Code 2: High Pressure +2015-04-29 06:00:00,98,Error Code 2: High Pressure +2015-04-29 06:00:00,98,Error Code 3: Excessive Vibration +2015-04-29 06:00:00,98,Error Code 5: Low Pressure +2015-05-14 06:00:00,98,Error Code 1: Low Voltage +2015-05-29 06:00:00,98,Error Code 4: Invalid Rotation Angle +2015-06-05 18:00:00,98,Error Code 2: High Pressure +2015-06-13 06:00:00,98,Error Code 2: High Pressure +2015-06-13 06:00:00,98,Error Code 3: Excessive Vibration +2015-06-20 06:00:00,98,Error Code 2: High Pressure +2015-06-28 06:00:00,98,Error Code 1: Low Voltage +2015-06-28 06:00:00,98,Error Code 5: Low Pressure +2015-07-02 13:00:00,98,Error Code 2: High Pressure +2015-07-28 06:00:00,98,Error Code 4: Invalid Rotation Angle +2015-08-02 01:00:00,98,Error Code 4: Invalid Rotation Angle +2015-08-08 15:00:00,98,Error Code 2: High Pressure +2015-08-12 06:00:00,98,Error Code 1: Low Voltage +2015-08-24 12:00:00,98,Error Code 4: Invalid Rotation Angle +2015-08-27 06:00:00,98,Error Code 2: High Pressure +2015-08-27 06:00:00,98,Error Code 3: Excessive Vibration +2015-10-16 18:00:00,98,Error Code 2: High Pressure +2015-10-20 17:00:00,98,Error Code 1: Low Voltage +2015-10-21 08:00:00,98,Error Code 2: High Pressure +2015-10-23 09:00:00,98,Error Code 4: Invalid Rotation Angle +2015-10-26 04:00:00,98,Error Code 3: Excessive Vibration +2015-10-26 06:00:00,98,Error Code 2: High Pressure +2015-10-26 06:00:00,98,Error Code 3: Excessive Vibration +2015-11-03 23:00:00,98,Error Code 4: Invalid Rotation Angle +2015-11-07 10:00:00,98,Error Code 3: Excessive Vibration +2015-11-25 06:00:00,98,Error Code 4: Invalid Rotation Angle +2015-12-09 09:00:00,98,Error Code 1: Low Voltage +2015-12-27 22:00:00,98,Error Code 2: High Pressure +2015-01-17 06:00:00,99,Error Code 5: Low Pressure +2015-01-19 07:00:00,99,Error Code 2: High Pressure +2015-01-28 01:00:00,99,Error Code 2: High Pressure +2015-01-30 10:00:00,99,Error Code 2: High Pressure +2015-02-01 06:00:00,99,Error Code 1: Low Voltage +2015-02-06 02:00:00,99,Error Code 1: Low Voltage +2015-02-16 06:00:00,99,Error Code 2: High Pressure +2015-02-16 06:00:00,99,Error Code 3: Excessive Vibration +2015-02-25 22:00:00,99,Error Code 1: Low Voltage +2015-03-03 06:00:00,99,Error Code 4: Invalid Rotation Angle +2015-03-06 07:00:00,99,Error Code 2: High Pressure +2015-03-10 08:00:00,99,Error Code 1: Low Voltage +2015-03-18 06:00:00,99,Error Code 5: Low Pressure +2015-04-02 06:00:00,99,Error Code 2: High Pressure +2015-04-02 06:00:00,99,Error Code 3: Excessive Vibration +2015-04-17 06:00:00,99,Error Code 4: Invalid Rotation Angle +2015-04-17 16:00:00,99,Error Code 3: Excessive Vibration +2015-04-18 02:00:00,99,Error Code 3: Excessive Vibration +2015-04-20 11:00:00,99,Error Code 2: High Pressure +2015-04-29 10:00:00,99,Error Code 4: Invalid Rotation Angle +2015-05-01 13:00:00,99,Error Code 1: Low Voltage +2015-05-02 06:00:00,99,Error Code 2: High Pressure +2015-05-02 06:00:00,99,Error Code 3: Excessive Vibration +2015-05-02 14:00:00,99,Error Code 2: High Pressure +2015-05-12 02:00:00,99,Error Code 3: Excessive Vibration +2015-05-17 02:00:00,99,Error Code 1: Low Voltage +2015-05-17 06:00:00,99,Error Code 1: Low Voltage +2015-06-01 06:00:00,99,Error Code 5: Low Pressure +2015-06-05 03:00:00,99,Error Code 1: Low Voltage +2015-06-13 15:00:00,99,Error Code 3: Excessive Vibration +2015-06-18 16:00:00,99,Error Code 2: High Pressure +2015-07-01 06:00:00,99,Error Code 2: High Pressure +2015-07-01 06:00:00,99,Error Code 3: Excessive Vibration +2015-07-16 06:00:00,99,Error Code 4: Invalid Rotation Angle +2015-07-25 21:00:00,99,Error Code 4: Invalid Rotation Angle +2015-07-31 06:00:00,99,Error Code 2: High Pressure +2015-07-31 06:00:00,99,Error Code 3: Excessive Vibration +2015-08-13 00:00:00,99,Error Code 1: Low Voltage +2015-08-14 08:00:00,99,Error Code 2: High Pressure +2015-08-18 05:00:00,99,Error Code 2: High Pressure +2015-08-30 06:00:00,99,Error Code 4: Invalid Rotation Angle +2015-09-13 11:00:00,99,Error Code 4: Invalid Rotation Angle +2015-10-01 16:00:00,99,Error Code 2: High Pressure +2015-10-11 12:00:00,99,Error Code 3: Excessive Vibration +2015-10-13 20:00:00,99,Error Code 3: Excessive Vibration +2015-10-14 06:00:00,99,Error Code 2: High Pressure +2015-10-14 06:00:00,99,Error Code 3: Excessive Vibration +2015-10-15 09:00:00,99,Error Code 3: Excessive Vibration +2015-10-29 06:00:00,99,Error Code 5: Low Pressure +2015-11-04 05:00:00,99,Error Code 2: High Pressure +2015-11-25 13:00:00,99,Error Code 3: Excessive Vibration +2015-11-28 06:00:00,99,Error Code 4: Invalid Rotation Angle +2015-12-13 06:00:00,99,Error Code 5: Low Pressure +2015-12-20 02:00:00,99,Error Code 3: Excessive Vibration +2015-01-04 01:00:00,100,Error Code 2: High Pressure +2015-01-16 00:00:00,100,Error Code 4: Invalid Rotation Angle +2015-02-01 10:00:00,100,Error Code 1: Low Voltage +2015-02-11 06:00:00,100,Error Code 1: Low Voltage +2015-02-12 21:00:00,100,Error Code 1: Low Voltage +2015-03-08 15:00:00,100,Error Code 1: Low Voltage +2015-04-27 04:00:00,100,Error Code 4: Invalid Rotation Angle +2015-04-27 22:00:00,100,Error Code 5: Low Pressure +2015-05-16 23:00:00,100,Error Code 2: High Pressure +2015-05-17 13:00:00,100,Error Code 2: High Pressure +2015-05-22 02:00:00,100,Error Code 3: Excessive Vibration +2015-07-05 16:00:00,100,Error Code 3: Excessive Vibration +2015-07-19 01:00:00,100,Error Code 2: High Pressure +2015-08-14 16:00:00,100,Error Code 4: Invalid Rotation Angle +2015-08-30 15:00:00,100,Error Code 4: Invalid Rotation Angle +2015-09-09 06:00:00,100,Error Code 1: Low Voltage +2015-09-14 23:00:00,100,Error Code 3: Excessive Vibration +2015-10-03 05:00:00,100,Error Code 3: Excessive Vibration +2015-10-09 07:00:00,100,Error Code 1: Low Voltage +2015-10-17 02:00:00,100,Error Code 3: Excessive Vibration +2015-10-17 12:00:00,100,Error Code 1: Low Voltage +2015-10-24 23:00:00,100,Error Code 1: Low Voltage +2015-10-27 21:00:00,100,Error Code 2: High Pressure +2015-11-05 02:00:00,100,Error Code 3: Excessive Vibration +2015-11-07 17:00:00,100,Error Code 1: Low Voltage +2015-11-12 01:00:00,100,Error Code 1: Low Voltage +2015-11-21 08:00:00,100,Error Code 2: High Pressure +2015-12-04 02:00:00,100,Error Code 1: Low Voltage +2015-12-08 06:00:00,100,Error Code 2: High Pressure +2015-12-08 06:00:00,100,Error Code 3: Excessive Vibration +2015-12-22 03:00:00,100,Error Code 3: Excessive Vibration diff --git a/RetrievalAugmentedGeneration/examples/csv_rag/PdM_failures.csv b/RetrievalAugmentedGeneration/examples/csv_rag/PdM_failures.csv new file mode 100644 index 000000000..79b9e661e --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/csv_rag/PdM_failures.csv @@ -0,0 +1,762 @@ +datetime,machineID,failure +2015-01-05 06:00:00,1,Failure Code 4: Pressure buildup +2015-03-06 06:00:00,1,Failure Code 1: Oil level below minimum threshold +2015-04-20 06:00:00,1,Failure Code 2: Unstable vibrations +2015-06-19 06:00:00,1,Failure Code 4: Pressure buildup +2015-09-02 06:00:00,1,Failure Code 4: Pressure buildup +2015-10-17 06:00:00,1,Failure Code 2: Unstable vibrations +2015-12-16 06:00:00,1,Failure Code 4: Pressure buildup +2015-03-19 06:00:00,2,Failure Code 1: Oil level below minimum threshold +2015-03-19 06:00:00,2,Failure Code 2: Unstable vibrations +2015-04-18 06:00:00,2,Failure Code 2: Unstable vibrations +2015-12-29 06:00:00,2,Failure Code 2: Unstable vibrations +2015-01-07 06:00:00,3,Failure Code 2: Unstable vibrations +2015-02-06 06:00:00,3,Failure Code 1: Oil level below minimum threshold +2015-07-21 06:00:00,3,Failure Code 2: Unstable vibrations +2015-10-04 06:00:00,3,Failure Code 2: Unstable vibrations +2015-12-03 06:00:00,3,Failure Code 2: Unstable vibrations +2015-01-17 06:00:00,4,Failure Code 2: Unstable vibrations +2015-02-16 06:00:00,4,Failure Code 1: Oil level below minimum threshold +2015-04-02 06:00:00,4,Failure Code 2: Unstable vibrations +2015-07-16 06:00:00,4,Failure Code 2: Unstable vibrations +2015-08-30 06:00:00,4,Failure Code 1: Oil level below minimum threshold +2015-10-14 06:00:00,4,Failure Code 2: Unstable vibrations +2015-01-09 06:00:00,5,Failure Code 2: Unstable vibrations +2015-02-23 06:00:00,5,Failure Code 1: Oil level below minimum threshold +2015-04-09 06:00:00,5,Failure Code 1: Oil level below minimum threshold +2015-06-23 06:00:00,5,Failure Code 2: Unstable vibrations +2015-09-06 06:00:00,5,Failure Code 1: Oil level below minimum threshold +2015-10-06 06:00:00,5,Failure Code 2: Unstable vibrations +2015-11-20 06:00:00,5,Failure Code 1: Oil level below minimum threshold +2015-01-24 06:00:00,7,Failure Code 2: Unstable vibrations +2015-01-24 06:00:00,7,Failure Code 4: Pressure buildup +2015-02-08 06:00:00,7,Failure Code 1: Oil level below minimum threshold +2015-04-24 06:00:00,7,Failure Code 1: Oil level below minimum threshold +2015-05-09 06:00:00,7,Failure Code 4: Pressure buildup +2015-05-24 06:00:00,7,Failure Code 2: Unstable vibrations +2015-07-08 06:00:00,7,Failure Code 2: Unstable vibrations +2015-09-21 06:00:00,7,Failure Code 4: Pressure buildup +2015-10-21 06:00:00,7,Failure Code 2: Unstable vibrations +2015-12-20 06:00:00,7,Failure Code 1: Oil level below minimum threshold +2015-03-07 06:00:00,8,Failure Code 2: Unstable vibrations +2015-03-22 06:00:00,8,Failure Code 4: Pressure buildup +2015-04-21 06:00:00,8,Failure Code 2: Unstable vibrations +2015-08-04 06:00:00,8,Failure Code 4: Pressure buildup +2015-10-18 06:00:00,8,Failure Code 4: Pressure buildup +2015-03-04 06:00:00,9,Failure Code 1: Oil level below minimum threshold +2015-06-17 06:00:00,9,Failure Code 2: Unstable vibrations +2015-07-02 06:00:00,9,Failure Code 1: Oil level below minimum threshold +2015-07-17 06:00:00,9,Failure Code 2: Unstable vibrations +2015-08-31 06:00:00,9,Failure Code 2: Unstable vibrations +2015-09-15 06:00:00,9,Failure Code 1: Oil level below minimum threshold +2015-10-15 06:00:00,9,Failure Code 2: Unstable vibrations +2015-11-14 06:00:00,9,Failure Code 1: Oil level below minimum threshold +2015-12-14 06:00:00,9,Failure Code 2: Unstable vibrations +2015-01-19 06:00:00,10,Failure Code 2: Unstable vibrations +2015-04-04 06:00:00,10,Failure Code 2: Unstable vibrations +2015-05-19 06:00:00,10,Failure Code 2: Unstable vibrations +2015-06-03 06:00:00,10,Failure Code 1: Oil level below minimum threshold +2015-06-18 06:00:00,10,Failure Code 2: Unstable vibrations +2015-01-20 06:00:00,11,Failure Code 2: Unstable vibrations +2015-02-19 06:00:00,11,Failure Code 3: Fuse blown due to excess voltage +2015-04-20 06:00:00,11,Failure Code 2: Unstable vibrations +2015-04-20 06:00:00,11,Failure Code 3: Fuse blown due to excess voltage +2015-10-02 06:00:00,11,Failure Code 3: Fuse blown due to excess voltage +2015-12-16 06:00:00,11,Failure Code 3: Fuse blown due to excess voltage +2015-01-07 06:00:00,12,Failure Code 1: Oil level below minimum threshold +2015-01-07 06:00:00,12,Failure Code 2: Unstable vibrations +2015-03-23 06:00:00,12,Failure Code 2: Unstable vibrations +2015-07-06 06:00:00,12,Failure Code 1: Oil level below minimum threshold +2015-09-04 06:00:00,12,Failure Code 1: Oil level below minimum threshold +2015-10-04 06:00:00,12,Failure Code 2: Unstable vibrations +2015-04-11 06:00:00,13,Failure Code 3: Fuse blown due to excess voltage +2015-04-11 06:00:00,13,Failure Code 4: Pressure buildup +2015-06-10 06:00:00,13,Failure Code 3: Fuse blown due to excess voltage +2015-06-25 06:00:00,13,Failure Code 4: Pressure buildup +2015-07-10 06:00:00,13,Failure Code 2: Unstable vibrations +2015-08-09 06:00:00,13,Failure Code 2: Unstable vibrations +2015-08-24 06:00:00,13,Failure Code 1: Oil level below minimum threshold +2015-08-24 06:00:00,13,Failure Code 3: Fuse blown due to excess voltage +2015-09-23 06:00:00,13,Failure Code 4: Pressure buildup +2015-10-23 06:00:00,13,Failure Code 1: Oil level below minimum threshold +2015-12-07 06:00:00,13,Failure Code 4: Pressure buildup +2015-12-22 06:00:00,13,Failure Code 1: Oil level below minimum threshold +2015-12-22 06:00:00,13,Failure Code 2: Unstable vibrations +2015-01-31 06:00:00,14,Failure Code 2: Unstable vibrations +2015-03-02 06:00:00,14,Failure Code 1: Oil level below minimum threshold +2015-07-15 06:00:00,14,Failure Code 1: Oil level below minimum threshold +2015-07-30 06:00:00,14,Failure Code 2: Unstable vibrations +2015-01-20 06:00:00,15,Failure Code 2: Unstable vibrations +2015-01-20 06:00:00,15,Failure Code 4: Pressure buildup +2015-06-04 06:00:00,15,Failure Code 1: Oil level below minimum threshold +2015-08-03 06:00:00,15,Failure Code 1: Oil level below minimum threshold +2015-11-01 06:00:00,15,Failure Code 2: Unstable vibrations +2015-11-01 06:00:00,15,Failure Code 4: Pressure buildup +2015-12-31 06:00:00,15,Failure Code 4: Pressure buildup +2015-01-02 03:00:00,16,Failure Code 1: Oil level below minimum threshold +2015-01-02 03:00:00,16,Failure Code 3: Fuse blown due to excess voltage +2015-01-17 06:00:00,16,Failure Code 1: Oil level below minimum threshold +2015-03-03 06:00:00,16,Failure Code 3: Fuse blown due to excess voltage +2015-04-02 06:00:00,16,Failure Code 1: Oil level below minimum threshold +2015-05-17 06:00:00,16,Failure Code 3: Fuse blown due to excess voltage +2015-06-01 06:00:00,16,Failure Code 2: Unstable vibrations +2015-06-16 06:00:00,16,Failure Code 1: Oil level below minimum threshold +2015-07-01 06:00:00,16,Failure Code 2: Unstable vibrations +2015-07-01 06:00:00,16,Failure Code 3: Fuse blown due to excess voltage +2015-08-15 06:00:00,16,Failure Code 3: Fuse blown due to excess voltage +2015-10-14 06:00:00,16,Failure Code 3: Fuse blown due to excess voltage +2015-11-28 06:00:00,16,Failure Code 2: Unstable vibrations +2015-01-02 03:00:00,17,Failure Code 4: Pressure buildup +2015-01-16 06:00:00,17,Failure Code 3: Fuse blown due to excess voltage +2015-02-15 06:00:00,17,Failure Code 2: Unstable vibrations +2015-03-17 06:00:00,17,Failure Code 3: Fuse blown due to excess voltage +2015-04-01 06:00:00,17,Failure Code 2: Unstable vibrations +2015-04-01 06:00:00,17,Failure Code 4: Pressure buildup +2015-05-16 06:00:00,17,Failure Code 1: Oil level below minimum threshold +2015-06-15 06:00:00,17,Failure Code 3: Fuse blown due to excess voltage +2015-07-15 06:00:00,17,Failure Code 2: Unstable vibrations +2015-07-30 06:00:00,17,Failure Code 1: Oil level below minimum threshold +2015-08-29 06:00:00,17,Failure Code 4: Pressure buildup +2015-10-28 06:00:00,17,Failure Code 3: Fuse blown due to excess voltage +2015-10-28 06:00:00,17,Failure Code 4: Pressure buildup +2015-11-27 06:00:00,17,Failure Code 2: Unstable vibrations +2015-12-27 06:00:00,17,Failure Code 1: Oil level below minimum threshold +2015-01-30 06:00:00,18,Failure Code 4: Pressure buildup +2015-03-31 06:00:00,18,Failure Code 1: Oil level below minimum threshold +2015-05-15 06:00:00,18,Failure Code 4: Pressure buildup +2015-06-29 06:00:00,18,Failure Code 2: Unstable vibrations +2015-07-29 06:00:00,18,Failure Code 2: Unstable vibrations +2015-08-28 06:00:00,18,Failure Code 4: Pressure buildup +2015-09-12 06:00:00,18,Failure Code 2: Unstable vibrations +2015-10-27 06:00:00,18,Failure Code 4: Pressure buildup +2015-01-13 06:00:00,19,Failure Code 1: Oil level below minimum threshold +2015-02-27 06:00:00,19,Failure Code 4: Pressure buildup +2015-04-13 06:00:00,19,Failure Code 4: Pressure buildup +2015-06-27 06:00:00,19,Failure Code 4: Pressure buildup +2015-09-10 06:00:00,19,Failure Code 4: Pressure buildup +2015-11-24 06:00:00,19,Failure Code 4: Pressure buildup +2015-12-09 06:00:00,19,Failure Code 1: Oil level below minimum threshold +2015-01-04 06:00:00,20,Failure Code 2: Unstable vibrations +2015-01-19 06:00:00,20,Failure Code 4: Pressure buildup +2015-02-03 06:00:00,20,Failure Code 3: Fuse blown due to excess voltage +2015-03-20 06:00:00,20,Failure Code 2: Unstable vibrations +2015-04-04 06:00:00,20,Failure Code 3: Fuse blown due to excess voltage +2015-04-19 06:00:00,20,Failure Code 4: Pressure buildup +2015-07-18 06:00:00,20,Failure Code 1: Oil level below minimum threshold +2015-08-17 06:00:00,20,Failure Code 4: Pressure buildup +2015-10-01 06:00:00,20,Failure Code 2: Unstable vibrations +2015-10-31 06:00:00,20,Failure Code 3: Fuse blown due to excess voltage +2015-11-15 06:00:00,20,Failure Code 2: Unstable vibrations +2015-12-15 06:00:00,20,Failure Code 1: Oil level below minimum threshold +2015-12-30 06:00:00,20,Failure Code 4: Pressure buildup +2015-01-23 06:00:00,21,Failure Code 3: Fuse blown due to excess voltage +2015-04-08 06:00:00,21,Failure Code 4: Pressure buildup +2015-05-08 06:00:00,21,Failure Code 1: Oil level below minimum threshold +2015-08-06 06:00:00,21,Failure Code 2: Unstable vibrations +2015-08-06 06:00:00,21,Failure Code 4: Pressure buildup +2015-09-05 06:00:00,21,Failure Code 3: Fuse blown due to excess voltage +2015-09-20 06:00:00,21,Failure Code 4: Pressure buildup +2015-10-05 06:00:00,21,Failure Code 2: Unstable vibrations +2015-11-04 06:00:00,21,Failure Code 4: Pressure buildup +2015-12-04 06:00:00,21,Failure Code 2: Unstable vibrations +2015-12-19 06:00:00,21,Failure Code 3: Fuse blown due to excess voltage +2015-01-02 03:00:00,22,Failure Code 1: Oil level below minimum threshold +2015-02-06 06:00:00,22,Failure Code 3: Fuse blown due to excess voltage +2015-03-23 06:00:00,22,Failure Code 2: Unstable vibrations +2015-03-23 06:00:00,22,Failure Code 4: Pressure buildup +2015-04-07 06:00:00,22,Failure Code 3: Fuse blown due to excess voltage +2015-04-22 06:00:00,22,Failure Code 2: Unstable vibrations +2015-05-22 06:00:00,22,Failure Code 1: Oil level below minimum threshold +2015-05-22 06:00:00,22,Failure Code 3: Fuse blown due to excess voltage +2015-06-21 06:00:00,22,Failure Code 4: Pressure buildup +2015-07-06 06:00:00,22,Failure Code 2: Unstable vibrations +2015-08-05 06:00:00,22,Failure Code 4: Pressure buildup +2015-08-20 06:00:00,22,Failure Code 3: Fuse blown due to excess voltage +2015-09-19 06:00:00,22,Failure Code 2: Unstable vibrations +2015-11-03 06:00:00,22,Failure Code 3: Fuse blown due to excess voltage +2015-11-18 06:00:00,22,Failure Code 4: Pressure buildup +2015-01-05 06:00:00,23,Failure Code 2: Unstable vibrations +2015-01-20 06:00:00,23,Failure Code 4: Pressure buildup +2015-03-21 06:00:00,23,Failure Code 4: Pressure buildup +2015-04-05 06:00:00,23,Failure Code 1: Oil level below minimum threshold +2015-06-04 06:00:00,23,Failure Code 3: Fuse blown due to excess voltage +2015-07-19 06:00:00,23,Failure Code 2: Unstable vibrations +2015-08-03 06:00:00,23,Failure Code 3: Fuse blown due to excess voltage +2015-09-02 06:00:00,23,Failure Code 2: Unstable vibrations +2015-10-02 06:00:00,23,Failure Code 3: Fuse blown due to excess voltage +2015-10-17 06:00:00,23,Failure Code 4: Pressure buildup +2015-11-16 06:00:00,23,Failure Code 3: Fuse blown due to excess voltage +2015-12-16 06:00:00,23,Failure Code 4: Pressure buildup +2015-01-02 06:00:00,24,Failure Code 1: Oil level below minimum threshold +2015-02-16 06:00:00,24,Failure Code 1: Oil level below minimum threshold +2015-04-02 06:00:00,24,Failure Code 4: Pressure buildup +2015-04-17 06:00:00,24,Failure Code 3: Fuse blown due to excess voltage +2015-07-01 06:00:00,24,Failure Code 3: Fuse blown due to excess voltage +2015-07-31 06:00:00,24,Failure Code 4: Pressure buildup +2015-09-29 06:00:00,24,Failure Code 1: Oil level below minimum threshold +2015-10-14 06:00:00,24,Failure Code 3: Fuse blown due to excess voltage +2015-11-13 06:00:00,24,Failure Code 4: Pressure buildup +2015-11-28 06:00:00,24,Failure Code 3: Fuse blown due to excess voltage +2015-03-05 06:00:00,25,Failure Code 1: Oil level below minimum threshold +2015-05-04 06:00:00,25,Failure Code 4: Pressure buildup +2015-06-03 06:00:00,25,Failure Code 1: Oil level below minimum threshold +2015-06-03 06:00:00,25,Failure Code 2: Unstable vibrations +2015-08-17 06:00:00,25,Failure Code 2: Unstable vibrations +2015-08-17 06:00:00,25,Failure Code 4: Pressure buildup +2015-10-31 06:00:00,25,Failure Code 2: Unstable vibrations +2015-11-30 06:00:00,25,Failure Code 4: Pressure buildup +2015-04-06 06:00:00,26,Failure Code 1: Oil level below minimum threshold +2015-08-19 06:00:00,26,Failure Code 2: Unstable vibrations +2015-09-03 06:00:00,26,Failure Code 1: Oil level below minimum threshold +2015-10-18 06:00:00,26,Failure Code 2: Unstable vibrations +2015-11-02 06:00:00,26,Failure Code 1: Oil level below minimum threshold +2015-11-17 06:00:00,26,Failure Code 2: Unstable vibrations +2015-01-10 06:00:00,27,Failure Code 2: Unstable vibrations +2015-01-25 06:00:00,27,Failure Code 1: Oil level below minimum threshold +2015-02-24 06:00:00,27,Failure Code 2: Unstable vibrations +2015-03-17 06:00:00,28,Failure Code 2: Unstable vibrations +2015-05-01 06:00:00,28,Failure Code 1: Oil level below minimum threshold +2015-06-15 06:00:00,28,Failure Code 2: Unstable vibrations +2015-08-14 06:00:00,28,Failure Code 1: Oil level below minimum threshold +2015-12-27 06:00:00,28,Failure Code 1: Oil level below minimum threshold +2015-03-03 06:00:00,29,Failure Code 1: Oil level below minimum threshold +2015-11-28 06:00:00,29,Failure Code 2: Unstable vibrations +2015-01-09 06:00:00,30,Failure Code 4: Pressure buildup +2015-03-25 06:00:00,30,Failure Code 4: Pressure buildup +2015-08-07 06:00:00,30,Failure Code 2: Unstable vibrations +2015-08-07 06:00:00,30,Failure Code 4: Pressure buildup +2015-12-05 06:00:00,30,Failure Code 4: Pressure buildup +2015-03-23 06:00:00,31,Failure Code 3: Fuse blown due to excess voltage +2015-05-22 06:00:00,31,Failure Code 1: Oil level below minimum threshold +2015-07-06 06:00:00,31,Failure Code 3: Fuse blown due to excess voltage +2015-09-04 06:00:00,31,Failure Code 3: Fuse blown due to excess voltage +2015-10-19 06:00:00,31,Failure Code 2: Unstable vibrations +2015-11-18 06:00:00,31,Failure Code 1: Oil level below minimum threshold +2015-11-18 06:00:00,31,Failure Code 3: Fuse blown due to excess voltage +2015-01-10 06:00:00,32,Failure Code 1: Oil level below minimum threshold +2015-02-09 06:00:00,32,Failure Code 4: Pressure buildup +2015-03-11 06:00:00,32,Failure Code 1: Oil level below minimum threshold +2015-04-10 06:00:00,32,Failure Code 4: Pressure buildup +2015-05-10 06:00:00,32,Failure Code 2: Unstable vibrations +2015-06-24 06:00:00,32,Failure Code 2: Unstable vibrations +2015-06-24 06:00:00,32,Failure Code 4: Pressure buildup +2015-08-08 06:00:00,32,Failure Code 1: Oil level below minimum threshold +2015-09-22 06:00:00,32,Failure Code 4: Pressure buildup +2015-11-06 06:00:00,32,Failure Code 1: Oil level below minimum threshold +2015-01-31 06:00:00,33,Failure Code 1: Oil level below minimum threshold +2015-01-31 06:00:00,33,Failure Code 4: Pressure buildup +2015-04-01 06:00:00,33,Failure Code 4: Pressure buildup +2015-05-01 06:00:00,33,Failure Code 2: Unstable vibrations +2015-05-31 06:00:00,33,Failure Code 2: Unstable vibrations +2015-06-15 06:00:00,33,Failure Code 4: Pressure buildup +2015-08-29 06:00:00,33,Failure Code 4: Pressure buildup +2015-09-28 06:00:00,33,Failure Code 2: Unstable vibrations +2015-11-27 06:00:00,33,Failure Code 1: Oil level below minimum threshold +2015-04-16 06:00:00,34,Failure Code 1: Oil level below minimum threshold +2015-10-28 06:00:00,34,Failure Code 2: Unstable vibrations +2015-11-12 06:00:00,34,Failure Code 1: Oil level below minimum threshold +2015-01-02 03:00:00,35,Failure Code 1: Oil level below minimum threshold +2015-01-06 06:00:00,35,Failure Code 4: Pressure buildup +2015-02-05 06:00:00,35,Failure Code 3: Fuse blown due to excess voltage +2015-03-22 06:00:00,35,Failure Code 4: Pressure buildup +2015-04-06 06:00:00,35,Failure Code 3: Fuse blown due to excess voltage +2015-05-06 06:00:00,35,Failure Code 4: Pressure buildup +2015-05-21 06:00:00,35,Failure Code 1: Oil level below minimum threshold +2015-07-20 06:00:00,35,Failure Code 2: Unstable vibrations +2015-08-04 06:00:00,35,Failure Code 1: Oil level below minimum threshold +2015-09-18 06:00:00,35,Failure Code 1: Oil level below minimum threshold +2015-09-18 06:00:00,35,Failure Code 4: Pressure buildup +2015-02-21 06:00:00,36,Failure Code 1: Oil level below minimum threshold +2015-06-06 06:00:00,36,Failure Code 1: Oil level below minimum threshold +2015-06-21 06:00:00,36,Failure Code 2: Unstable vibrations +2015-09-04 06:00:00,36,Failure Code 2: Unstable vibrations +2015-01-04 06:00:00,37,Failure Code 4: Pressure buildup +2015-02-03 06:00:00,37,Failure Code 3: Fuse blown due to excess voltage +2015-04-19 06:00:00,37,Failure Code 4: Pressure buildup +2015-05-19 06:00:00,37,Failure Code 3: Fuse blown due to excess voltage +2015-06-03 06:00:00,37,Failure Code 4: Pressure buildup +2015-06-18 06:00:00,37,Failure Code 1: Oil level below minimum threshold +2015-07-03 06:00:00,37,Failure Code 3: Fuse blown due to excess voltage +2015-07-18 06:00:00,37,Failure Code 4: Pressure buildup +2015-08-17 06:00:00,37,Failure Code 2: Unstable vibrations +2015-09-16 06:00:00,37,Failure Code 2: Unstable vibrations +2015-10-01 06:00:00,37,Failure Code 4: Pressure buildup +2015-11-15 06:00:00,37,Failure Code 2: Unstable vibrations +2015-11-30 06:00:00,37,Failure Code 3: Fuse blown due to excess voltage +2015-12-15 06:00:00,37,Failure Code 4: Pressure buildup +2015-01-17 06:00:00,38,Failure Code 4: Pressure buildup +2015-04-02 06:00:00,38,Failure Code 2: Unstable vibrations +2015-04-17 06:00:00,38,Failure Code 4: Pressure buildup +2015-07-01 06:00:00,38,Failure Code 2: Unstable vibrations +2015-08-30 06:00:00,38,Failure Code 4: Pressure buildup +2015-12-13 06:00:00,38,Failure Code 1: Oil level below minimum threshold +2015-12-13 06:00:00,38,Failure Code 4: Pressure buildup +2015-04-01 06:00:00,39,Failure Code 2: Unstable vibrations +2015-07-30 06:00:00,39,Failure Code 1: Oil level below minimum threshold +2015-10-13 06:00:00,39,Failure Code 2: Unstable vibrations +2015-12-27 06:00:00,39,Failure Code 2: Unstable vibrations +2015-01-04 06:00:00,40,Failure Code 3: Fuse blown due to excess voltage +2015-02-18 06:00:00,40,Failure Code 1: Oil level below minimum threshold +2015-04-04 06:00:00,40,Failure Code 1: Oil level below minimum threshold +2015-06-03 06:00:00,40,Failure Code 3: Fuse blown due to excess voltage +2015-06-18 06:00:00,40,Failure Code 2: Unstable vibrations +2015-07-03 06:00:00,40,Failure Code 1: Oil level below minimum threshold +2015-08-17 06:00:00,40,Failure Code 2: Unstable vibrations +2015-11-30 06:00:00,40,Failure Code 1: Oil level below minimum threshold +2015-12-15 06:00:00,40,Failure Code 3: Fuse blown due to excess voltage +2015-02-01 06:00:00,41,Failure Code 1: Oil level below minimum threshold +2015-05-02 06:00:00,41,Failure Code 1: Oil level below minimum threshold +2015-10-29 06:00:00,41,Failure Code 1: Oil level below minimum threshold +2015-01-21 06:00:00,42,Failure Code 1: Oil level below minimum threshold +2015-04-06 06:00:00,42,Failure Code 3: Fuse blown due to excess voltage +2015-05-06 06:00:00,42,Failure Code 1: Oil level below minimum threshold +2015-06-05 06:00:00,42,Failure Code 2: Unstable vibrations +2015-06-20 06:00:00,42,Failure Code 3: Fuse blown due to excess voltage +2015-08-19 06:00:00,42,Failure Code 2: Unstable vibrations +2015-09-18 06:00:00,42,Failure Code 3: Fuse blown due to excess voltage +2015-11-17 06:00:00,42,Failure Code 2: Unstable vibrations +2015-12-17 06:00:00,42,Failure Code 3: Fuse blown due to excess voltage +2015-02-05 06:00:00,43,Failure Code 1: Oil level below minimum threshold +2015-02-20 06:00:00,43,Failure Code 4: Pressure buildup +2015-03-22 06:00:00,43,Failure Code 2: Unstable vibrations +2015-05-21 06:00:00,43,Failure Code 2: Unstable vibrations +2015-06-05 06:00:00,43,Failure Code 1: Oil level below minimum threshold +2015-07-20 06:00:00,43,Failure Code 1: Oil level below minimum threshold +2015-08-04 06:00:00,43,Failure Code 2: Unstable vibrations +2015-10-03 06:00:00,43,Failure Code 4: Pressure buildup +2015-11-02 06:00:00,43,Failure Code 1: Oil level below minimum threshold +2015-11-17 06:00:00,43,Failure Code 2: Unstable vibrations +2015-03-16 06:00:00,44,Failure Code 2: Unstable vibrations +2015-06-14 06:00:00,44,Failure Code 1: Oil level below minimum threshold +2015-07-29 06:00:00,44,Failure Code 2: Unstable vibrations +2015-08-28 06:00:00,44,Failure Code 1: Oil level below minimum threshold +2015-01-02 03:00:00,45,Failure Code 1: Oil level below minimum threshold +2015-01-22 06:00:00,45,Failure Code 2: Unstable vibrations +2015-02-21 06:00:00,45,Failure Code 4: Pressure buildup +2015-05-22 06:00:00,45,Failure Code 4: Pressure buildup +2015-07-21 06:00:00,45,Failure Code 4: Pressure buildup +2015-10-19 06:00:00,45,Failure Code 4: Pressure buildup +2015-11-03 06:00:00,45,Failure Code 1: Oil level below minimum threshold +2015-02-22 06:00:00,46,Failure Code 2: Unstable vibrations +2015-11-19 06:00:00,46,Failure Code 2: Unstable vibrations +2015-01-24 06:00:00,47,Failure Code 3: Fuse blown due to excess voltage +2015-03-25 06:00:00,47,Failure Code 1: Oil level below minimum threshold +2015-04-09 06:00:00,47,Failure Code 2: Unstable vibrations +2015-06-23 06:00:00,47,Failure Code 3: Fuse blown due to excess voltage +2015-07-08 06:00:00,47,Failure Code 2: Unstable vibrations +2015-08-07 06:00:00,47,Failure Code 1: Oil level below minimum threshold +2015-09-21 06:00:00,47,Failure Code 3: Fuse blown due to excess voltage +2015-10-21 06:00:00,47,Failure Code 2: Unstable vibrations +2015-11-20 06:00:00,47,Failure Code 2: Unstable vibrations +2015-12-20 06:00:00,47,Failure Code 2: Unstable vibrations +2015-01-11 06:00:00,48,Failure Code 2: Unstable vibrations +2015-03-12 06:00:00,48,Failure Code 1: Oil level below minimum threshold +2015-05-11 06:00:00,48,Failure Code 2: Unstable vibrations +2015-07-10 06:00:00,48,Failure Code 2: Unstable vibrations +2015-07-25 06:00:00,48,Failure Code 1: Oil level below minimum threshold +2015-09-08 06:00:00,48,Failure Code 1: Oil level below minimum threshold +2015-11-07 06:00:00,48,Failure Code 2: Unstable vibrations +2015-03-01 06:00:00,49,Failure Code 4: Pressure buildup +2015-03-16 06:00:00,49,Failure Code 3: Fuse blown due to excess voltage +2015-04-15 06:00:00,49,Failure Code 4: Pressure buildup +2015-05-15 06:00:00,49,Failure Code 2: Unstable vibrations +2015-07-14 06:00:00,49,Failure Code 4: Pressure buildup +2015-07-29 06:00:00,49,Failure Code 3: Fuse blown due to excess voltage +2015-09-12 06:00:00,49,Failure Code 2: Unstable vibrations +2015-09-12 06:00:00,49,Failure Code 3: Fuse blown due to excess voltage +2015-10-12 06:00:00,49,Failure Code 4: Pressure buildup +2015-11-11 06:00:00,49,Failure Code 3: Fuse blown due to excess voltage +2015-11-26 06:00:00,49,Failure Code 2: Unstable vibrations +2015-12-26 06:00:00,49,Failure Code 4: Pressure buildup +2015-01-30 06:00:00,50,Failure Code 1: Oil level below minimum threshold +2015-04-30 06:00:00,50,Failure Code 2: Unstable vibrations +2015-09-12 06:00:00,50,Failure Code 2: Unstable vibrations +2015-10-12 06:00:00,50,Failure Code 2: Unstable vibrations +2015-01-02 03:00:00,51,Failure Code 1: Oil level below minimum threshold +2015-03-02 06:00:00,51,Failure Code 4: Pressure buildup +2015-04-01 06:00:00,51,Failure Code 2: Unstable vibrations +2015-05-31 06:00:00,51,Failure Code 2: Unstable vibrations +2015-06-30 06:00:00,51,Failure Code 2: Unstable vibrations +2015-07-15 06:00:00,51,Failure Code 4: Pressure buildup +2015-09-13 06:00:00,51,Failure Code 2: Unstable vibrations +2015-09-28 06:00:00,51,Failure Code 4: Pressure buildup +2015-11-27 06:00:00,51,Failure Code 4: Pressure buildup +2015-12-12 06:00:00,51,Failure Code 2: Unstable vibrations +2015-01-19 06:00:00,52,Failure Code 2: Unstable vibrations +2015-01-19 06:00:00,52,Failure Code 4: Pressure buildup +2015-07-18 06:00:00,52,Failure Code 4: Pressure buildup +2015-08-02 06:00:00,52,Failure Code 2: Unstable vibrations +2015-10-31 06:00:00,52,Failure Code 1: Oil level below minimum threshold +2015-10-31 06:00:00,52,Failure Code 4: Pressure buildup +2015-12-30 06:00:00,52,Failure Code 4: Pressure buildup +2015-03-29 06:00:00,53,Failure Code 1: Oil level below minimum threshold +2015-05-13 06:00:00,53,Failure Code 2: Unstable vibrations +2015-07-12 06:00:00,53,Failure Code 1: Oil level below minimum threshold +2015-10-10 06:00:00,53,Failure Code 1: Oil level below minimum threshold +2015-11-09 06:00:00,53,Failure Code 2: Unstable vibrations +2015-12-09 06:00:00,53,Failure Code 2: Unstable vibrations +2015-01-15 06:00:00,54,Failure Code 3: Fuse blown due to excess voltage +2015-03-31 06:00:00,54,Failure Code 3: Fuse blown due to excess voltage +2015-04-30 06:00:00,54,Failure Code 2: Unstable vibrations +2015-06-29 06:00:00,54,Failure Code 3: Fuse blown due to excess voltage +2015-12-26 06:00:00,54,Failure Code 2: Unstable vibrations +2015-03-07 06:00:00,55,Failure Code 4: Pressure buildup +2015-04-21 06:00:00,55,Failure Code 2: Unstable vibrations +2015-06-05 06:00:00,55,Failure Code 4: Pressure buildup +2015-06-20 06:00:00,55,Failure Code 1: Oil level below minimum threshold +2015-08-04 06:00:00,55,Failure Code 1: Oil level below minimum threshold +2015-08-19 06:00:00,55,Failure Code 4: Pressure buildup +2015-09-03 06:00:00,55,Failure Code 2: Unstable vibrations +2015-12-02 06:00:00,55,Failure Code 1: Oil level below minimum threshold +2015-01-02 03:00:00,56,Failure Code 3: Fuse blown due to excess voltage +2015-01-14 06:00:00,56,Failure Code 3: Fuse blown due to excess voltage +2015-03-15 06:00:00,56,Failure Code 3: Fuse blown due to excess voltage +2015-04-14 06:00:00,56,Failure Code 1: Oil level below minimum threshold +2015-04-29 06:00:00,56,Failure Code 2: Unstable vibrations +2015-06-13 06:00:00,56,Failure Code 3: Fuse blown due to excess voltage +2015-09-26 06:00:00,56,Failure Code 2: Unstable vibrations +2015-09-26 06:00:00,56,Failure Code 3: Fuse blown due to excess voltage +2015-11-10 06:00:00,56,Failure Code 1: Oil level below minimum threshold +2015-11-25 06:00:00,56,Failure Code 2: Unstable vibrations +2015-12-25 06:00:00,56,Failure Code 1: Oil level below minimum threshold +2015-02-07 06:00:00,57,Failure Code 1: Oil level below minimum threshold +2015-11-04 06:00:00,57,Failure Code 1: Oil level below minimum threshold +2015-01-02 03:00:00,58,Failure Code 2: Unstable vibrations +2015-01-31 06:00:00,58,Failure Code 3: Fuse blown due to excess voltage +2015-05-01 06:00:00,58,Failure Code 3: Fuse blown due to excess voltage +2015-05-16 06:00:00,58,Failure Code 1: Oil level below minimum threshold +2015-05-31 06:00:00,58,Failure Code 2: Unstable vibrations +2015-07-30 06:00:00,58,Failure Code 3: Fuse blown due to excess voltage +2015-09-28 06:00:00,58,Failure Code 3: Fuse blown due to excess voltage +2015-12-12 06:00:00,58,Failure Code 2: Unstable vibrations +2015-01-02 03:00:00,59,Failure Code 1: Oil level below minimum threshold +2015-02-04 06:00:00,59,Failure Code 4: Pressure buildup +2015-04-05 06:00:00,59,Failure Code 4: Pressure buildup +2015-05-20 06:00:00,59,Failure Code 4: Pressure buildup +2015-07-19 06:00:00,59,Failure Code 1: Oil level below minimum threshold +2015-08-03 06:00:00,59,Failure Code 2: Unstable vibrations +2015-08-18 06:00:00,59,Failure Code 4: Pressure buildup +2015-10-17 06:00:00,59,Failure Code 2: Unstable vibrations +2015-01-30 06:00:00,60,Failure Code 2: Unstable vibrations +2015-04-15 06:00:00,60,Failure Code 2: Unstable vibrations +2015-07-29 06:00:00,60,Failure Code 2: Unstable vibrations +2015-09-12 06:00:00,60,Failure Code 2: Unstable vibrations +2015-02-27 06:00:00,61,Failure Code 2: Unstable vibrations +2015-08-11 06:00:00,61,Failure Code 2: Unstable vibrations +2015-10-25 06:00:00,61,Failure Code 1: Oil level below minimum threshold +2015-11-24 06:00:00,61,Failure Code 2: Unstable vibrations +2015-01-13 06:00:00,62,Failure Code 1: Oil level below minimum threshold +2015-01-28 06:00:00,62,Failure Code 4: Pressure buildup +2015-05-28 06:00:00,62,Failure Code 4: Pressure buildup +2015-06-27 06:00:00,62,Failure Code 2: Unstable vibrations +2015-08-11 06:00:00,62,Failure Code 1: Oil level below minimum threshold +2015-12-09 06:00:00,62,Failure Code 4: Pressure buildup +2015-12-24 06:00:00,62,Failure Code 2: Unstable vibrations +2015-01-28 06:00:00,63,Failure Code 1: Oil level below minimum threshold +2015-01-28 06:00:00,63,Failure Code 2: Unstable vibrations +2015-03-14 06:00:00,63,Failure Code 4: Pressure buildup +2015-05-13 06:00:00,63,Failure Code 4: Pressure buildup +2015-05-28 06:00:00,63,Failure Code 2: Unstable vibrations +2015-07-27 06:00:00,63,Failure Code 4: Pressure buildup +2015-10-25 06:00:00,63,Failure Code 4: Pressure buildup +2015-11-09 06:00:00,63,Failure Code 2: Unstable vibrations +2015-01-05 06:00:00,64,Failure Code 4: Pressure buildup +2015-01-20 06:00:00,64,Failure Code 2: Unstable vibrations +2015-02-19 06:00:00,64,Failure Code 4: Pressure buildup +2015-04-05 06:00:00,64,Failure Code 2: Unstable vibrations +2015-05-05 06:00:00,64,Failure Code 4: Pressure buildup +2015-09-17 06:00:00,64,Failure Code 4: Pressure buildup +2015-10-17 06:00:00,64,Failure Code 2: Unstable vibrations +2015-12-31 06:00:00,64,Failure Code 1: Oil level below minimum threshold +2015-03-17 06:00:00,65,Failure Code 1: Oil level below minimum threshold +2015-05-31 06:00:00,65,Failure Code 1: Oil level below minimum threshold +2015-06-30 06:00:00,65,Failure Code 4: Pressure buildup +2015-08-14 06:00:00,65,Failure Code 1: Oil level below minimum threshold +2015-08-14 06:00:00,65,Failure Code 2: Unstable vibrations +2015-09-13 06:00:00,65,Failure Code 4: Pressure buildup +2015-02-18 06:00:00,66,Failure Code 1: Oil level below minimum threshold +2015-05-19 06:00:00,66,Failure Code 1: Oil level below minimum threshold +2015-05-19 06:00:00,66,Failure Code 2: Unstable vibrations +2015-10-01 06:00:00,66,Failure Code 1: Oil level below minimum threshold +2015-12-15 06:00:00,66,Failure Code 1: Oil level below minimum threshold +2015-01-16 06:00:00,67,Failure Code 2: Unstable vibrations +2015-03-17 06:00:00,67,Failure Code 2: Unstable vibrations +2015-05-31 06:00:00,67,Failure Code 4: Pressure buildup +2015-06-15 06:00:00,67,Failure Code 1: Oil level below minimum threshold +2015-07-15 06:00:00,67,Failure Code 2: Unstable vibrations +2015-08-29 06:00:00,67,Failure Code 4: Pressure buildup +2015-09-13 06:00:00,67,Failure Code 2: Unstable vibrations +2015-09-28 06:00:00,67,Failure Code 1: Oil level below minimum threshold +2015-10-13 06:00:00,67,Failure Code 2: Unstable vibrations +2015-04-02 06:00:00,68,Failure Code 2: Unstable vibrations +2015-08-15 06:00:00,68,Failure Code 2: Unstable vibrations +2015-11-28 06:00:00,68,Failure Code 1: Oil level below minimum threshold +2015-12-13 06:00:00,68,Failure Code 2: Unstable vibrations +2015-01-05 06:00:00,69,Failure Code 2: Unstable vibrations +2015-01-20 06:00:00,69,Failure Code 3: Fuse blown due to excess voltage +2015-02-04 06:00:00,69,Failure Code 4: Pressure buildup +2015-04-20 06:00:00,69,Failure Code 4: Pressure buildup +2015-05-20 06:00:00,69,Failure Code 2: Unstable vibrations +2015-08-03 06:00:00,69,Failure Code 3: Fuse blown due to excess voltage +2015-08-18 06:00:00,69,Failure Code 2: Unstable vibrations +2015-09-02 06:00:00,69,Failure Code 4: Pressure buildup +2015-10-17 06:00:00,69,Failure Code 4: Pressure buildup +2015-11-01 06:00:00,69,Failure Code 3: Fuse blown due to excess voltage +2015-01-28 06:00:00,70,Failure Code 1: Oil level below minimum threshold +2015-05-28 06:00:00,70,Failure Code 2: Unstable vibrations +2015-07-12 06:00:00,70,Failure Code 1: Oil level below minimum threshold +2015-11-09 06:00:00,70,Failure Code 1: Oil level below minimum threshold +2015-12-09 06:00:00,70,Failure Code 2: Unstable vibrations +2015-01-11 06:00:00,71,Failure Code 2: Unstable vibrations +2015-01-26 06:00:00,71,Failure Code 3: Fuse blown due to excess voltage +2015-01-26 06:00:00,71,Failure Code 4: Pressure buildup +2015-02-10 06:00:00,71,Failure Code 1: Oil level below minimum threshold +2015-02-25 06:00:00,71,Failure Code 2: Unstable vibrations +2015-03-12 06:00:00,71,Failure Code 3: Fuse blown due to excess voltage +2015-03-27 06:00:00,71,Failure Code 1: Oil level below minimum threshold +2015-04-26 06:00:00,71,Failure Code 2: Unstable vibrations +2015-06-10 06:00:00,71,Failure Code 4: Pressure buildup +2015-06-25 06:00:00,71,Failure Code 3: Fuse blown due to excess voltage +2015-07-25 06:00:00,71,Failure Code 4: Pressure buildup +2015-09-23 06:00:00,71,Failure Code 3: Fuse blown due to excess voltage +2015-10-08 06:00:00,71,Failure Code 4: Pressure buildup +2015-03-24 06:00:00,72,Failure Code 1: Oil level below minimum threshold +2015-09-05 06:00:00,72,Failure Code 1: Oil level below minimum threshold +2015-01-02 03:00:00,73,Failure Code 2: Unstable vibrations +2015-01-02 06:00:00,73,Failure Code 3: Fuse blown due to excess voltage +2015-02-16 06:00:00,73,Failure Code 4: Pressure buildup +2015-04-02 06:00:00,73,Failure Code 1: Oil level below minimum threshold +2015-04-17 06:00:00,73,Failure Code 4: Pressure buildup +2015-05-02 06:00:00,73,Failure Code 3: Fuse blown due to excess voltage +2015-05-17 06:00:00,73,Failure Code 1: Oil level below minimum threshold +2015-07-16 06:00:00,73,Failure Code 1: Oil level below minimum threshold +2015-08-30 06:00:00,73,Failure Code 3: Fuse blown due to excess voltage +2015-09-14 06:00:00,73,Failure Code 4: Pressure buildup +2015-11-13 06:00:00,73,Failure Code 3: Fuse blown due to excess voltage +2015-11-28 06:00:00,73,Failure Code 1: Oil level below minimum threshold +2015-01-23 06:00:00,74,Failure Code 1: Oil level below minimum threshold +2015-03-09 06:00:00,74,Failure Code 2: Unstable vibrations +2015-05-08 06:00:00,74,Failure Code 2: Unstable vibrations +2015-06-22 06:00:00,74,Failure Code 2: Unstable vibrations +2015-07-22 06:00:00,74,Failure Code 2: Unstable vibrations +2015-10-20 06:00:00,74,Failure Code 2: Unstable vibrations +2015-01-07 06:00:00,75,Failure Code 1: Oil level below minimum threshold +2015-02-06 06:00:00,75,Failure Code 4: Pressure buildup +2015-04-07 06:00:00,75,Failure Code 1: Oil level below minimum threshold +2015-05-07 06:00:00,75,Failure Code 4: Pressure buildup +2015-06-06 06:00:00,75,Failure Code 2: Unstable vibrations +2015-07-21 06:00:00,75,Failure Code 2: Unstable vibrations +2015-08-20 06:00:00,75,Failure Code 2: Unstable vibrations +2015-09-04 06:00:00,75,Failure Code 4: Pressure buildup +2015-10-19 06:00:00,75,Failure Code 2: Unstable vibrations +2015-01-18 06:00:00,76,Failure Code 3: Fuse blown due to excess voltage +2015-02-02 06:00:00,76,Failure Code 1: Oil level below minimum threshold +2015-03-04 06:00:00,76,Failure Code 2: Unstable vibrations +2015-03-19 06:00:00,76,Failure Code 1: Oil level below minimum threshold +2015-04-03 06:00:00,76,Failure Code 3: Fuse blown due to excess voltage +2015-05-03 06:00:00,76,Failure Code 2: Unstable vibrations +2015-08-16 06:00:00,76,Failure Code 3: Fuse blown due to excess voltage +2015-09-30 06:00:00,76,Failure Code 3: Fuse blown due to excess voltage +2015-11-29 06:00:00,76,Failure Code 3: Fuse blown due to excess voltage +2015-02-05 06:00:00,78,Failure Code 4: Pressure buildup +2015-02-20 06:00:00,78,Failure Code 2: Unstable vibrations +2015-03-07 06:00:00,78,Failure Code 1: Oil level below minimum threshold +2015-06-05 06:00:00,78,Failure Code 4: Pressure buildup +2015-07-05 06:00:00,78,Failure Code 1: Oil level below minimum threshold +2015-07-20 06:00:00,78,Failure Code 4: Pressure buildup +2015-09-03 06:00:00,78,Failure Code 4: Pressure buildup +2015-11-02 06:00:00,78,Failure Code 4: Pressure buildup +2015-12-17 06:00:00,78,Failure Code 2: Unstable vibrations +2015-01-02 03:00:00,79,Failure Code 1: Oil level below minimum threshold +2015-03-03 06:00:00,79,Failure Code 2: Unstable vibrations +2015-03-18 06:00:00,79,Failure Code 4: Pressure buildup +2015-04-02 06:00:00,79,Failure Code 1: Oil level below minimum threshold +2015-05-17 06:00:00,79,Failure Code 4: Pressure buildup +2015-06-16 06:00:00,79,Failure Code 2: Unstable vibrations +2015-07-01 06:00:00,79,Failure Code 1: Oil level below minimum threshold +2015-09-14 06:00:00,79,Failure Code 2: Unstable vibrations +2015-09-29 06:00:00,79,Failure Code 4: Pressure buildup +2015-12-13 06:00:00,79,Failure Code 4: Pressure buildup +2015-01-02 03:00:00,80,Failure Code 2: Unstable vibrations +2015-07-12 06:00:00,80,Failure Code 2: Unstable vibrations +2015-09-10 06:00:00,80,Failure Code 2: Unstable vibrations +2015-11-09 06:00:00,80,Failure Code 2: Unstable vibrations +2015-12-09 06:00:00,80,Failure Code 2: Unstable vibrations +2015-01-02 06:00:00,81,Failure Code 1: Oil level below minimum threshold +2015-01-17 06:00:00,81,Failure Code 2: Unstable vibrations +2015-02-16 06:00:00,81,Failure Code 1: Oil level below minimum threshold +2015-03-03 06:00:00,81,Failure Code 2: Unstable vibrations +2015-04-17 06:00:00,81,Failure Code 2: Unstable vibrations +2015-06-16 06:00:00,81,Failure Code 2: Unstable vibrations +2015-10-14 06:00:00,81,Failure Code 2: Unstable vibrations +2015-12-13 06:00:00,81,Failure Code 2: Unstable vibrations +2015-01-28 06:00:00,82,Failure Code 1: Oil level below minimum threshold +2015-03-29 06:00:00,82,Failure Code 1: Oil level below minimum threshold +2015-05-13 06:00:00,82,Failure Code 1: Oil level below minimum threshold +2015-12-09 06:00:00,82,Failure Code 2: Unstable vibrations +2015-01-02 03:00:00,83,Failure Code 4: Pressure buildup +2015-01-16 06:00:00,83,Failure Code 4: Pressure buildup +2015-03-17 06:00:00,83,Failure Code 4: Pressure buildup +2015-05-16 06:00:00,83,Failure Code 4: Pressure buildup +2015-05-31 06:00:00,83,Failure Code 2: Unstable vibrations +2015-07-15 06:00:00,83,Failure Code 1: Oil level below minimum threshold +2015-07-15 06:00:00,83,Failure Code 4: Pressure buildup +2015-08-14 06:00:00,83,Failure Code 2: Unstable vibrations +2015-08-29 06:00:00,83,Failure Code 1: Oil level below minimum threshold +2015-09-13 06:00:00,83,Failure Code 2: Unstable vibrations +2015-10-13 06:00:00,83,Failure Code 1: Oil level below minimum threshold +2015-11-27 06:00:00,83,Failure Code 4: Pressure buildup +2015-12-27 06:00:00,83,Failure Code 1: Oil level below minimum threshold +2015-12-27 06:00:00,83,Failure Code 2: Unstable vibrations +2015-01-05 06:00:00,84,Failure Code 2: Unstable vibrations +2015-02-04 06:00:00,84,Failure Code 2: Unstable vibrations +2015-02-19 06:00:00,84,Failure Code 1: Oil level below minimum threshold +2015-04-05 06:00:00,84,Failure Code 2: Unstable vibrations +2015-07-04 06:00:00,84,Failure Code 1: Oil level below minimum threshold +2015-09-02 06:00:00,84,Failure Code 2: Unstable vibrations +2015-10-02 06:00:00,84,Failure Code 1: Oil level below minimum threshold +2015-01-15 06:00:00,85,Failure Code 2: Unstable vibrations +2015-01-30 06:00:00,85,Failure Code 3: Fuse blown due to excess voltage +2015-02-14 06:00:00,85,Failure Code 4: Pressure buildup +2015-05-30 06:00:00,85,Failure Code 3: Fuse blown due to excess voltage +2015-06-29 06:00:00,85,Failure Code 1: Oil level below minimum threshold +2015-08-28 06:00:00,85,Failure Code 4: Pressure buildup +2015-09-12 06:00:00,85,Failure Code 3: Fuse blown due to excess voltage +2015-10-12 06:00:00,85,Failure Code 2: Unstable vibrations +2015-11-11 06:00:00,85,Failure Code 2: Unstable vibrations +2015-12-11 06:00:00,85,Failure Code 3: Fuse blown due to excess voltage +2015-12-26 06:00:00,85,Failure Code 4: Pressure buildup +2015-01-02 03:00:00,86,Failure Code 1: Oil level below minimum threshold +2015-01-23 06:00:00,86,Failure Code 2: Unstable vibrations +2015-09-05 06:00:00,86,Failure Code 1: Oil level below minimum threshold +2015-11-19 06:00:00,86,Failure Code 2: Unstable vibrations +2015-01-02 03:00:00,87,Failure Code 1: Oil level below minimum threshold +2015-03-13 06:00:00,87,Failure Code 3: Fuse blown due to excess voltage +2015-03-28 06:00:00,87,Failure Code 1: Oil level below minimum threshold +2015-04-27 06:00:00,87,Failure Code 2: Unstable vibrations +2015-06-26 06:00:00,87,Failure Code 3: Fuse blown due to excess voltage +2015-08-10 06:00:00,87,Failure Code 1: Oil level below minimum threshold +2015-08-25 06:00:00,87,Failure Code 3: Fuse blown due to excess voltage +2015-10-24 06:00:00,87,Failure Code 3: Fuse blown due to excess voltage +2015-12-23 06:00:00,87,Failure Code 3: Fuse blown due to excess voltage +2015-01-19 06:00:00,88,Failure Code 4: Pressure buildup +2015-02-03 06:00:00,88,Failure Code 2: Unstable vibrations +2015-03-05 06:00:00,88,Failure Code 2: Unstable vibrations +2015-04-04 06:00:00,88,Failure Code 2: Unstable vibrations +2015-07-03 06:00:00,88,Failure Code 1: Oil level below minimum threshold +2015-09-01 06:00:00,88,Failure Code 2: Unstable vibrations +2015-09-01 06:00:00,88,Failure Code 4: Pressure buildup +2015-11-30 06:00:00,88,Failure Code 2: Unstable vibrations +2015-11-30 06:00:00,88,Failure Code 4: Pressure buildup +2015-12-30 06:00:00,88,Failure Code 1: Oil level below minimum threshold +2015-02-07 06:00:00,89,Failure Code 1: Oil level below minimum threshold +2015-03-24 06:00:00,89,Failure Code 4: Pressure buildup +2015-05-23 06:00:00,89,Failure Code 4: Pressure buildup +2015-06-22 06:00:00,89,Failure Code 1: Oil level below minimum threshold +2015-07-22 06:00:00,89,Failure Code 2: Unstable vibrations +2015-08-21 06:00:00,89,Failure Code 2: Unstable vibrations +2015-11-19 06:00:00,89,Failure Code 2: Unstable vibrations +2015-01-20 06:00:00,90,Failure Code 2: Unstable vibrations +2015-02-19 06:00:00,90,Failure Code 2: Unstable vibrations +2015-03-21 06:00:00,90,Failure Code 3: Fuse blown due to excess voltage +2015-04-05 06:00:00,90,Failure Code 1: Oil level below minimum threshold +2015-05-20 06:00:00,90,Failure Code 2: Unstable vibrations +2015-06-04 06:00:00,90,Failure Code 3: Fuse blown due to excess voltage +2015-08-18 06:00:00,90,Failure Code 1: Oil level below minimum threshold +2015-08-18 06:00:00,90,Failure Code 3: Fuse blown due to excess voltage +2015-10-02 06:00:00,90,Failure Code 1: Oil level below minimum threshold +2015-11-01 06:00:00,90,Failure Code 3: Fuse blown due to excess voltage +2015-12-01 06:00:00,90,Failure Code 2: Unstable vibrations +2015-12-31 06:00:00,90,Failure Code 1: Oil level below minimum threshold +2015-02-01 06:00:00,91,Failure Code 2: Unstable vibrations +2015-04-02 06:00:00,91,Failure Code 1: Oil level below minimum threshold +2015-06-16 06:00:00,91,Failure Code 1: Oil level below minimum threshold +2015-07-01 06:00:00,91,Failure Code 2: Unstable vibrations +2015-09-14 06:00:00,91,Failure Code 1: Oil level below minimum threshold +2015-10-29 06:00:00,91,Failure Code 4: Pressure buildup +2015-02-19 06:00:00,92,Failure Code 3: Fuse blown due to excess voltage +2015-04-20 06:00:00,92,Failure Code 1: Oil level below minimum threshold +2015-04-20 06:00:00,92,Failure Code 2: Unstable vibrations +2015-05-20 06:00:00,92,Failure Code 3: Fuse blown due to excess voltage +2015-07-04 06:00:00,92,Failure Code 3: Fuse blown due to excess voltage +2015-08-18 06:00:00,92,Failure Code 3: Fuse blown due to excess voltage +2015-09-02 06:00:00,92,Failure Code 2: Unstable vibrations +2015-10-02 06:00:00,92,Failure Code 2: Unstable vibrations +2015-12-01 06:00:00,92,Failure Code 3: Fuse blown due to excess voltage +2015-03-17 06:00:00,93,Failure Code 4: Pressure buildup +2015-04-01 06:00:00,93,Failure Code 2: Unstable vibrations +2015-04-16 06:00:00,93,Failure Code 1: Oil level below minimum threshold +2015-05-01 06:00:00,93,Failure Code 2: Unstable vibrations +2015-05-16 06:00:00,93,Failure Code 4: Pressure buildup +2015-06-30 06:00:00,93,Failure Code 1: Oil level below minimum threshold +2015-01-02 03:00:00,94,Failure Code 4: Pressure buildup +2015-01-08 06:00:00,94,Failure Code 2: Unstable vibrations +2015-02-22 06:00:00,94,Failure Code 4: Pressure buildup +2015-03-09 06:00:00,94,Failure Code 3: Fuse blown due to excess voltage +2015-05-08 06:00:00,94,Failure Code 3: Fuse blown due to excess voltage +2015-05-23 06:00:00,94,Failure Code 4: Pressure buildup +2015-08-21 06:00:00,94,Failure Code 4: Pressure buildup +2015-10-20 06:00:00,94,Failure Code 3: Fuse blown due to excess voltage +2015-11-19 06:00:00,94,Failure Code 2: Unstable vibrations +2015-11-19 06:00:00,94,Failure Code 4: Pressure buildup +2015-12-19 06:00:00,94,Failure Code 2: Unstable vibrations +2015-02-19 06:00:00,95,Failure Code 4: Pressure buildup +2015-03-21 06:00:00,95,Failure Code 2: Unstable vibrations +2015-04-20 06:00:00,95,Failure Code 3: Fuse blown due to excess voltage +2015-05-05 06:00:00,95,Failure Code 1: Oil level below minimum threshold +2015-06-04 06:00:00,95,Failure Code 3: Fuse blown due to excess voltage +2015-08-18 06:00:00,95,Failure Code 4: Pressure buildup +2015-09-17 06:00:00,95,Failure Code 3: Fuse blown due to excess voltage +2015-10-17 06:00:00,95,Failure Code 4: Pressure buildup +2015-12-31 06:00:00,95,Failure Code 1: Oil level below minimum threshold +2015-02-17 06:00:00,96,Failure Code 3: Fuse blown due to excess voltage +2015-05-03 06:00:00,96,Failure Code 3: Fuse blown due to excess voltage +2015-07-17 06:00:00,96,Failure Code 3: Fuse blown due to excess voltage +2015-08-31 06:00:00,96,Failure Code 3: Fuse blown due to excess voltage +2015-12-29 06:00:00,96,Failure Code 3: Fuse blown due to excess voltage +2015-01-24 06:00:00,97,Failure Code 3: Fuse blown due to excess voltage +2015-01-24 06:00:00,97,Failure Code 4: Pressure buildup +2015-05-09 06:00:00,97,Failure Code 4: Pressure buildup +2015-07-08 06:00:00,97,Failure Code 3: Fuse blown due to excess voltage +2015-07-23 06:00:00,97,Failure Code 4: Pressure buildup +2015-09-06 06:00:00,97,Failure Code 3: Fuse blown due to excess voltage +2015-10-21 06:00:00,97,Failure Code 2: Unstable vibrations +2015-11-20 06:00:00,97,Failure Code 4: Pressure buildup +2015-01-30 06:00:00,98,Failure Code 3: Fuse blown due to excess voltage +2015-02-14 06:00:00,98,Failure Code 1: Oil level below minimum threshold +2015-03-01 06:00:00,98,Failure Code 4: Pressure buildup +2015-04-15 06:00:00,98,Failure Code 3: Fuse blown due to excess voltage +2015-04-30 06:00:00,98,Failure Code 2: Unstable vibrations +2015-04-30 06:00:00,98,Failure Code 4: Pressure buildup +2015-05-15 06:00:00,98,Failure Code 1: Oil level below minimum threshold +2015-05-30 06:00:00,98,Failure Code 3: Fuse blown due to excess voltage +2015-06-14 06:00:00,98,Failure Code 2: Unstable vibrations +2015-06-29 06:00:00,98,Failure Code 1: Oil level below minimum threshold +2015-06-29 06:00:00,98,Failure Code 4: Pressure buildup +2015-07-29 06:00:00,98,Failure Code 3: Fuse blown due to excess voltage +2015-08-13 06:00:00,98,Failure Code 1: Oil level below minimum threshold +2015-08-28 06:00:00,98,Failure Code 2: Unstable vibrations +2015-10-27 06:00:00,98,Failure Code 2: Unstable vibrations +2015-11-26 06:00:00,98,Failure Code 3: Fuse blown due to excess voltage +2015-01-02 03:00:00,99,Failure Code 3: Fuse blown due to excess voltage +2015-01-18 06:00:00,99,Failure Code 4: Pressure buildup +2015-02-02 06:00:00,99,Failure Code 1: Oil level below minimum threshold +2015-02-17 06:00:00,99,Failure Code 2: Unstable vibrations +2015-03-04 06:00:00,99,Failure Code 3: Fuse blown due to excess voltage +2015-03-19 06:00:00,99,Failure Code 4: Pressure buildup +2015-04-03 06:00:00,99,Failure Code 2: Unstable vibrations +2015-04-18 06:00:00,99,Failure Code 3: Fuse blown due to excess voltage +2015-05-03 06:00:00,99,Failure Code 2: Unstable vibrations +2015-05-18 06:00:00,99,Failure Code 1: Oil level below minimum threshold +2015-06-02 06:00:00,99,Failure Code 4: Pressure buildup +2015-07-02 06:00:00,99,Failure Code 2: Unstable vibrations +2015-07-17 06:00:00,99,Failure Code 3: Fuse blown due to excess voltage +2015-08-01 06:00:00,99,Failure Code 2: Unstable vibrations +2015-08-31 06:00:00,99,Failure Code 3: Fuse blown due to excess voltage +2015-10-15 06:00:00,99,Failure Code 2: Unstable vibrations +2015-10-30 06:00:00,99,Failure Code 4: Pressure buildup +2015-11-29 06:00:00,99,Failure Code 3: Fuse blown due to excess voltage +2015-12-14 06:00:00,99,Failure Code 4: Pressure buildup +2015-02-12 06:00:00,100,Failure Code 1: Oil level below minimum threshold +2015-09-10 06:00:00,100,Failure Code 1: Oil level below minimum threshold +2015-12-09 06:00:00,100,Failure Code 2: Unstable vibrations diff --git a/RetrievalAugmentedGeneration/examples/csv_rag/PdM_machines.csv b/RetrievalAugmentedGeneration/examples/csv_rag/PdM_machines.csv new file mode 100644 index 000000000..54fb08a4b --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/csv_rag/PdM_machines.csv @@ -0,0 +1,101 @@ +machineID,model,age +1,model3,18 Months +2,model4,7 Months +3,model3,8 Months +4,model3,7 Months +5,model3,2 Months +6,model3,7 Months +7,model3,20 Months +8,model3,16 Months +9,model4,7 Months +10,model3,10 Months +11,model2,6 Months +12,model3,9 Months +13,model1,15 Months +14,model3,1 Months +15,model3,14 Months +16,model1,3 Months +17,model1,14 Months +18,model3,15 Months +19,model3,17 Months +20,model2,16 Months +21,model2,14 Months +22,model1,14 Months +23,model1,17 Months +24,model1,20 Months +25,model4,16 Months +26,model3,3 Months +27,model3,9 Months +28,model4,1 Months +29,model4,3 Months +30,model3,20 Months +31,model1,11 Months +32,model4,15 Months +33,model3,14 Months +34,model4,10 Months +35,model1,17 Months +36,model4,5 Months +37,model1,16 Months +38,model4,15 Months +39,model4,0 Months +40,model2,4 Months +41,model4,9 Months +42,model1,7 Months +43,model3,14 Months +44,model4,7 Months +45,model3,14 Months +46,model4,10 Months +47,model2,6 Months +48,model4,10 Months +49,model1,15 Months +50,model4,4 Months +51,model4,19 Months +52,model3,14 Months +53,model3,5 Months +54,model2,10 Months +55,model3,17 Months +56,model1,10 Months +57,model4,10 Months +58,model1,5 Months +59,model3,17 Months +60,model4,3 Months +61,model4,2 Months +62,model4,20 Months +63,model4,14 Months +64,model3,20 Months +65,model3,15 Months +66,model4,6 Months +67,model4,14 Months +68,model3,10 Months +69,model2,19 Months +70,model3,9 Months +71,model2,18 Months +72,model4,2 Months +73,model2,20 Months +74,model4,4 Months +75,model3,19 Months +76,model2,10 Months +77,model4,12 Months +78,model4,19 Months +79,model3,14 Months +80,model3,6 Months +81,model4,1 Months +82,model3,11 Months +83,model4,18 Months +84,model3,9 Months +85,model1,16 Months +86,model3,2 Months +87,model2,12 Months +88,model4,14 Months +89,model3,17 Months +90,model2,2 Months +91,model4,17 Months +92,model1,2 Months +93,model3,18 Months +94,model2,18 Months +95,model2,18 Months +96,model2,10 Months +97,model2,14 Months +98,model2,20 Months +99,model1,14 Months +100,model4,5 Months diff --git a/RetrievalAugmentedGeneration/examples/csv_rag/__init__.py b/RetrievalAugmentedGeneration/examples/csv_rag/__init__.py new file mode 100644 index 000000000..a08b2c204 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/csv_rag/__init__.py @@ -0,0 +1,14 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/RetrievalAugmentedGeneration/examples/csv_rag/chains.py b/RetrievalAugmentedGeneration/examples/csv_rag/chains.py new file mode 100644 index 000000000..dfda64c52 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/csv_rag/chains.py @@ -0,0 +1,199 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""LLM Chains for executing Retrival Augmented Generation.""" +import logging +import os +from typing import Generator, List + +import pandas as pd +from langchain.prompts import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + PromptTemplate, + SystemMessagePromptTemplate, +) +from langchain_core.output_parsers.string import StrOutputParser +from pandasai import Agent as PandasAI_Agent +from pandasai.responses.response_parser import ResponseParser + +from integrations.pandasai.llms.nv_aiplay import NVIDIA as PandasAI_NVIDIA +from RetrievalAugmentedGeneration.common.base import BaseExample +from RetrievalAugmentedGeneration.common.utils import get_config, get_llm + +# pylint: disable=no-name-in-module, disable=import-error +from RetrievalAugmentedGeneration.example.csv_utils import ( + extract_df_desc, + get_prompt_params, + parse_prompt_config, +) + +logger = logging.getLogger(__name__) +settings = get_config() + + +class PandasDataFrame(ResponseParser): + """Returns Pandas Dataframe instead of SmartDataFrame""" + + def __init__(self, context) -> None: + super().__init__(context) + + def format_dataframe(self, result): + return result["value"] + + +class CSVChatbot(BaseExample): + """RAG example showcasing CSV parsing using Pandas AI Agent""" + + def read_and_concatenate_csv(self, file_paths_txt): + """Reads CSVs and concatenates their data""" + + with open(file_paths_txt, "r", encoding="UTF-8") as file: + file_paths = file.read().splitlines() + + concatenated_df = pd.DataFrame() + reference_columns = None + reference_file = None + + for i, path in enumerate(file_paths): + df = pd.read_csv(path) + + if i == 0: + reference_columns = df.columns + concatenated_df = df + reference_file = path + else: + if not df.columns.equals(reference_columns): + raise ValueError( + f"Columns of the file {path} do not match the reference columns of {reference_file} file." + ) + concatenated_df = pd.concat([concatenated_df, df], ignore_index=True) + + return concatenated_df + + def ingest_docs(self, data_dir: str, filename: str): + """Ingest documents to the VectorDB.""" + + if not data_dir.endswith(".csv"): + raise ValueError(f"{data_dir} is not a valid CSV file") + + with open("ingested_csv_files.txt", "a", encoding="UTF-8") as f: + f.write(data_dir + "\n") + + self.read_and_concatenate_csv(file_paths_txt="ingested_csv_files.txt") + + logger.info("Document %s ingested successfully", filename) + + def llm_chain( + self, query: str, chat_history: List["Message"], **kwargs + ) -> Generator[str, None, None]: + """Execute a simple LLM chain using the components defined above.""" + + logger.info("Using llm to generate response directly without knowledge base.") + + system_message = [("system", get_config().prompts.chat_template)] + conversation_history = [(msg.role, msg.content) for msg in chat_history] + user_input = [("user", "{input}")] + + # Checking if conversation_history is not None and not empty + prompt = ChatPromptTemplate.from_messages( + system_message + conversation_history + user_input + ) if conversation_history else ChatPromptTemplate.from_messages( + system_message + user_input + ) + + logger.info("Using prompt for response: %s", prompt) + + chain = prompt | get_llm(**kwargs) | StrOutputParser() + return chain.stream({"input": query}) + + def rag_chain(self, query: str, chat_history: List["Message"], **kwargs) -> Generator[str, None, None]: + """Execute a Retrieval Augmented Generation chain using the components defined above.""" + + logger.info("Using rag to generate response from document") + llm = get_llm(**kwargs) + + if not os.path.exists("ingested_csv_files.txt"): + return iter(["No CSV file ingested"]) + + df = self.read_and_concatenate_csv(file_paths_txt="ingested_csv_files.txt") + df = df.fillna(0) + + df_desc = extract_df_desc(df) + prompt_config = parse_prompt_config( + "RetrievalAugmentedGeneration/example/csv_prompt_config.yaml" + ) + + logger.info(prompt_config.get("csv_prompts", [])) + data_retrieval_prompt_params = get_prompt_params( + prompt_config.get("csv_prompts", []) + ) + llm_data_retrieval = PandasAI_NVIDIA(temperature=0.2, model=settings.llm.model_name_pandas_ai) + + config_data_retrieval = { + "llm": llm_data_retrieval, + "response_parser": PandasDataFrame, + } + agent_data_retrieval = PandasAI_Agent( + [df], config=config_data_retrieval, memory_size=20 + ) + data_retrieval_prompt = ChatPromptTemplate( + messages=[ + SystemMessagePromptTemplate.from_template( + prompt_config.get("csv_data_retrieval_template", []) + ), + HumanMessagePromptTemplate.from_template("{query}"), + ], + input_variables=["description", "instructions", "data_frame", "query"], + ) + conversation_history = [(msg.role, msg.content) for msg in chat_history] + conversation_history_messages = ChatPromptTemplate.from_messages(conversation_history).messages + # Insert conversation_history between data_retrieval_prompt's SystemMessage & HumanMessage (query) + if conversation_history_messages: + data_retrieval_prompt.messages[1:1] = conversation_history_messages + + result_df = agent_data_retrieval.chat( + data_retrieval_prompt.format_prompt( + description=data_retrieval_prompt_params.get("description"), + instructions=data_retrieval_prompt_params.get("instructions"), + data_frame=df_desc, + query=query, + ).to_string() + ) + logger.info("Result Data Frame: %s", result_df) + if not result_df: + logger.warning("Retrieval failed to get any relevant context") + return iter(["No response generated from LLM, make sure your query is relavent to the ingested document."]) + + response_prompt_template = PromptTemplate( + template=prompt_config.get("csv_response_template", []), + input_variables=["query", "data"], + ) + response_prompt = response_prompt_template.format(query=query, data=result_df) + + logger.info("Using prompt for response: %s", response_prompt) + + chain = response_prompt_template | llm | StrOutputParser() + return chain.stream({"query": query, "data": result_df}) + + def get_documents(self): + """Retrieves filenames stored in the vector store.""" + decoded_filenames = [] + logger.error("get_documents not implemented") + return decoded_filenames + + def delete_documents(self, filenames: List[str]): + """Delete documents from the vector index.""" + logger.error("delete_documents not implemented") \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/csv_rag/csv_prompt_config.yaml b/RetrievalAugmentedGeneration/examples/csv_rag/csv_prompt_config.yaml new file mode 100644 index 000000000..4d8b84c04 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/csv_rag/csv_prompt_config.yaml @@ -0,0 +1,53 @@ +prompts: + + csv_data_retrieval_template: | + Provide a functional and accurate code based on the provided dataframe for the user's query. + + You have to mainly write python code that uses pandas library for extracting and processing information from the dataframe. + + The data you are provided contains information about: {description} + + Instructions: + - CSV file is already parsed, don't add any code for parsing csv. + - dfs is a list containing df a pandas dataframe. Always use the first entry from the list like df = dfs[0]. + {instructions} + + + csv_response_template: | + Provide a respone to user's queries based on the given Data point. \ + You are provided with the required data value and your job is just to form a natural language response based on the query and the data. \ + + Do not add anything extra in the response apart from the data. + + A simplest response can start with : + Here is what I found based on the data + + Althought this is a very basic response, you need to enhance it a bit. + + Query: + {query} + + Data: + {data} + + Response: + + csv_prompts: + - name: PdM_machines + description: Model type & age of the Machines + instructions: | + - If any column time period like hours, days, months or years, extract the numeric value for processing like sorting or calulating mean: + - For example if there is a age column with values like 7 months, 14 months etc, numeric value can be extracted like + df['age'] = df['age'].str.extract('(\d+)').astype(int) + - name: PdM_errors + description: These are errors encountered by the machines while in operating condition. Since, these errors don't shut down the machines, these are not considered as failures. The error date and times are rounded to the closest hour since the telemetry data is collected at an hourly rate. + instructions: | + - Convert the datetime column to pandas datetime like df['datetime'] = pd.to_datetime(df['datetime']) + - Use pandas datatime only for filtering date time columns based on date or time. Like df['datetime'].dt.day + - If year is not mentioned explicitly in queries containing dates, then consider the year to be 2015 by default. + - name: PdM_failures + description: Each record represents replacement of a component due to failure. This data is a subset of Maintenance data. This data is rounded to the closest hour since the telemetry data is collected at an hourly rate. + instructions: | + - Convert the datetime column to pandas datetime like df['datetime'] = pd.to_datetime(df['datetime']) + - Use pandas datatime only for filtering date time columns based on date or time. Like df['datetime'].dt.day + - If year is not mentioned explicitly in queries containing dates, then consider the year to be 2015 by default. \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/csv_rag/csv_utils.py b/RetrievalAugmentedGeneration/examples/csv_rag/csv_utils.py new file mode 100644 index 000000000..256728cdb --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/csv_rag/csv_utils.py @@ -0,0 +1,99 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" Module to provide utility functions for CSV RAG example""" + +import os +import json +from typing import Dict, List + +import yaml + + +def extract_df_desc(df) -> str: + """ + Convert a pandas DataFrame to a string with column names and up to 3 random rows. + + Args: + df (pandas.DataFrame): The DataFrame to convert. + + Returns: + str: A string representation of the DataFrame. + """ + column_names = ", ".join(df.columns) + sample_rows = df.sample(min(3, len(df))) + rows_str = sample_rows.to_string(header=False, index=False) + result = column_names + "\n" + rows_str + return result + + +def parse_prompt_config(config_path: str) -> Dict: + "Parses csv yaml config and returns the config as list of prompts" + # Check if the file exists + if not os.path.isfile(config_path): + raise FileNotFoundError(f"The file {config_path} does not exist") + + try: + with open(config_path, "r", encoding="UTF-8") as file: + data = yaml.safe_load(file) + + # Check if the expected key 'prompts' is in the data + if "prompts" not in data or not isinstance(data["prompts"], dict): + raise ValueError( + "Invalid YAML structure. Expected a 'prompts' key with a list of dictionaries." + ) + + env_prompts = None + if "CSV_PROMPTS" in os.environ: + try: + env_prompts = json.loads(os.environ["CSV_PROMPTS"]) + if env_prompts is not None: + data["prompts"]["csv_prompts"].extend(env_prompts["csv_prompts"]) + except Exception as e: + print(f"Exception in parsing CSV prompt from environment variable {e}") + + # return the dict + return data["prompts"] + except yaml.YAMLError as e: + raise ValueError(f"Error parsing YAML file: {e}") + + +def get_prompt_params(prompt_list: List) -> Dict[str, str]: + """ + Takes a list of dictionaries and returns a formatted string. + Each line in the string contains the 'id' and 'description' from one dictionary. + """ + csv_name = os.getenv("CSV_NAME") + + # Check if the environment variable is not found + if csv_name is None: + raise Exception("Environment variable CSV_NAME not found.") + + # Check if the environment variable is set to an empty string + if csv_name == "": + raise ValueError("Environment variable CSV_NAME is set to an empty string.") + + if not prompt_list: + raise ValueError("Config Prompt list is empty") + + for prompt in prompt_list: + if csv_name == prompt.get("name"): + print(f"Using prompt for {csv_name}") + return { + "description": prompt.get("description"), + "instructions": prompt.get("instructions"), + } + + return {} diff --git a/RetrievalAugmentedGeneration/examples/csv_rag/requirements.txt b/RetrievalAugmentedGeneration/examples/csv_rag/requirements.txt new file mode 100644 index 000000000..cf13dfa96 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/csv_rag/requirements.txt @@ -0,0 +1,3 @@ +pandas +pandasai==1.5.13 +numexpr \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/developer_rag/chains.py b/RetrievalAugmentedGeneration/examples/developer_rag/chains.py index a3184bc08..21fd3e972 100644 --- a/RetrievalAugmentedGeneration/examples/developer_rag/chains.py +++ b/RetrievalAugmentedGeneration/examples/developer_rag/chains.py @@ -46,8 +46,10 @@ logger = logging.getLogger(__name__) +text_splitter = None class QAChatbot(BaseExample): + def ingest_docs(self, data_dir: str, filename: str): """Ingest documents to the VectorDB.""" @@ -73,9 +75,14 @@ def ingest_docs(self, data_dir: str, filename: str): for document in documents: document.metadata = {"filename": encoded_filename} + document.excluded_embed_metadata_keys = ["filename", "page_label"] index = get_vector_index() - node_parser = LangchainNodeParser(get_text_splitter()) + + global text_splitter + if not text_splitter: + text_splitter = get_text_splitter() + node_parser = LangchainNodeParser(text_splitter) nodes = node_parser.get_nodes_from_documents(documents) index.insert_nodes(nodes) logger.info(f"Document {filename} ingested successfully") @@ -83,48 +90,57 @@ def ingest_docs(self, data_dir: str, filename: str): logger.error(f"Failed to ingest document due to exception {e}") raise ValueError("Failed to upload document. Please upload an unstructured text document.") - def llm_chain(self, context: str, question: str, num_tokens: int) -> Generator[str, None, None]: + def get_documents(self): + """Retrieves filenames stored in the vector store.""" + logger.warning("get documents is not supported in developer rag") + return [] + + def delete_documents(self, filenames: List[str]): + """Delete documents from the vector index.""" + logger.warning("Delete documents is not supported in developer rag") + + def llm_chain(self, query: str, chat_history: List["Message"], **kwargs) -> Generator[str, None, None]: """Execute a simple LLM chain using the components defined above.""" logger.info("Using llm to generate response directly without knowledge base.") - set_service_context() + set_service_context(**kwargs) + # TODO Include chat_history prompt = get_config().prompts.chat_template.format( - context_str=context, query_str=question + context_str="", query_str=query ) logger.info(f"Prompt used for response generation: {prompt}") - llm = LangChainLLM(get_llm()) - response = llm.stream_complete(prompt, tokens=num_tokens) + llm = LangChainLLM(get_llm(**kwargs)) + response = llm.stream_complete(prompt, tokens=kwargs.get('max_tokens', None)) gen_response = (resp.delta for resp in response) return gen_response - def rag_chain(self, prompt: str, num_tokens: int) -> Generator[str, None, None]: + def rag_chain(self, query: str, chat_history: List["Message"], **kwargs) -> Generator[str, None, None]: """Execute a Retrieval Augmented Generation chain using the components defined above.""" logger.info("Using rag to generate response from document") - set_service_context() - llm = LangChainLLM(get_llm()) + set_service_context(**kwargs) - try: - if get_config().llm.model_engine == "triton-trt-llm" or get_config().llm.model_engine == "nemo-infer": - llm.llm.tokens = num_tokens # type: ignore - else: - llm.llm.max_tokens = num_tokens - except Exception as e: - logger.error(f"Exception in setting llm tokens: {e}") - - retriever = get_doc_retriever(num_nodes=4) + retriever = get_doc_retriever(num_nodes=get_config().retriever.top_k) qa_template = Prompt(get_config().prompts.rag_template) - + logger.info(f"Prompt used for response generation: {qa_template}") + + # Handling Retrieval failure + nodes = retriever.retrieve(query) + if not nodes: + logger.warning("Retrieval failed to get any relevant context") + return iter(["No response generated from LLM, make sure your query is relavent to the ingested document."]) + + # TODO Include chat_history query_engine = RetrieverQueryEngine.from_args( retriever, text_qa_template=qa_template, node_postprocessors=[LimitRetrievedNodesLength()], streaming=True, ) - response = query_engine.query(prompt) + response = query_engine.query(query) # Properly handle an empty response if isinstance(response, StreamingResponse): @@ -137,7 +153,7 @@ def document_search(self, content: str, num_docs: int) -> List[Dict[str, Any]]: """Search for the most relevant documents for the given search parameters.""" try: - retriever = get_doc_retriever(num_nodes=num_docs) + retriever = get_doc_retriever(num_nodes=get_config().retriever.top_k) nodes = retriever.retrieve(content) output = [] for node in nodes: diff --git a/RetrievalAugmentedGeneration/examples/multi_turn_rag/chains.py b/RetrievalAugmentedGeneration/examples/multi_turn_rag/chains.py new file mode 100644 index 000000000..853c81f22 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multi_turn_rag/chains.py @@ -0,0 +1,264 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""RAG example showcasing multi-turn conversation.""" +import base64 +import os +import logging +from pathlib import Path +from typing import Generator, List, Dict, Any + +from langchain_community.document_loaders import UnstructuredFileLoader +from langchain.text_splitter import RecursiveCharacterTextSplitter +from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.prompts.chat import ChatPromptTemplate +from langchain_core.output_parsers.string import StrOutputParser +from langchain_core.runnables.passthrough import RunnableAssign + +# pylint: disable=no-name-in-module, disable=import-error +from RetrievalAugmentedGeneration.common.utils import ( + get_config, + get_llm, + get_vectorstore_langchain, + get_embedding_model, + get_text_splitter, + get_docs_vectorstore_langchain, + del_docs_vectorstore_langchain +) +from RetrievalAugmentedGeneration.common.base import BaseExample +from operator import itemgetter + +DOCS_DIR = os.path.abspath("./uploaded_files") +document_embedder = get_embedding_model() +docstore = None +text_splitter = None +settings = get_config() +logger = logging.getLogger(__name__) + +class MultiTurnChatbot(BaseExample): + + def save_memory_and_get_output(self, d, vstore): + """Accepts 'input'/'output' dictionary and saves to convstore""" + vstore.add_texts( + [ + f"User previously responded with {d.get('input')}", + f"Agent previously responded with {d.get('output')}", + ] + ) + return d.get("output") + + def ingest_docs(self, file_name: str, filename: str): + """Ingest documents to the VectorDB.""" + try: + # TODO: Load embedding created in older conversation, memory persistance + # We initialize class in every call therefore it should be global + global docstore + # Load raw documents from the directory + # Data is copied to `DOCS_DIR` in common.server:upload_document + _path = os.path.join(DOCS_DIR, filename) + raw_documents = UnstructuredFileLoader(_path).load() + + if raw_documents: + global text_splitter + if not text_splitter: + text_splitter = get_text_splitter() + + documents = text_splitter.split_documents(raw_documents) + if docstore: + docstore.add_documents(documents) + else: + docstore = get_vectorstore_langchain(documents, document_embedder) + else: + logger.warning("No documents available to process!") + except Exception as e: + logger.error(f"Failed to ingest document due to exception {e}") + raise ValueError( + "Failed to upload document. Please upload an unstructured text document." + ) + + def llm_chain( + self, query: str, chat_history: List["Message"], **kwargs + ) -> Generator[str, None, None]: + """Execute a simple LLM chain using the components defined above.""" + + logger.info("Using llm to generate response directly without knowledge base.") + system_message = [("system", settings.prompts.chat_template)] + conversation_history = [(msg.role, msg.content) for msg in chat_history] + user_message = [("user", query)] + + # Checking if conversation_history is not None and not empty + prompt_template = ChatPromptTemplate.from_messages( + system_message + conversation_history + ) if conversation_history else ChatPromptTemplate.from_messages( + system_message + user_message + ) + + llm = get_llm(**kwargs) + + chain = prompt_template | llm | StrOutputParser() + + return chain.stream({"context_str": "", "query_str": query}) + + def rag_chain(self, query: str, chat_history: List["Message"], **kwargs) -> Generator[str, None, None]: + """Execute a Retrieval Augmented Generation chain using the components defined above.""" + + logger.info("Using rag to generate response from document") + + # chat_prompt = ChatPromptTemplate.from_messages( + # [ + # ("system", settings.prompts.multi_turn_rag_template), + # ("user", "{input}"), + # ] + # ) + + # This is a workaround Prompt Template + chat_prompt = ChatPromptTemplate.from_messages( + [ + ("user", settings.prompts.multi_turn_rag_template + "User Query: {input}"), + ] + ) + + llm = get_llm(**kwargs) + stream_chain = chat_prompt | llm | StrOutputParser() + + convstore = get_vectorstore_langchain( + [], document_embedder, collection_name="conv_store" + ) + + resp_str = "" + # TODO Integrate chat_history + try: + if docstore: + + try: + logger.info(f"Getting retrieved top k values: {settings.retriever.top_k} with confidence threshold: {settings.retriever.score_threshold}") + retrieval_chain = ( + RunnableAssign( + {"context": itemgetter("input") | docstore.as_retriever(search_type="similarity_score_threshold", + search_kwargs={"score_threshold": settings.retriever.score_threshold, "k": settings.retriever.top_k})} + ) + | RunnableAssign( + {"history": itemgetter("input") | convstore.as_retriever(search_type="similarity_score_threshold", + search_kwargs={"score_threshold": settings.retriever.score_threshold, "k": settings.retriever.top_k})} + ) + ) + + # Handling Retrieval failure + docs = retrieval_chain.invoke({"input": query}) + if not docs: + logger.warning("Retrieval failed to get any relevant context") + return iter(["No response generated from LLM, make sure your query is relavent to the ingested document."]) + + chain = retrieval_chain | stream_chain + + for chunk in chain.stream({"input": query}): + yield chunk + resp_str += chunk + + self.save_memory_and_get_output( + {"input": query, "output": resp_str}, convstore + ) + + return chain.stream(query) + + except NotImplementedError: + # TODO: Optimize it, currently error is raised during stream + # check if there is better way to handle this similarity case + logger.info(f"Skipping similarity score as it's not supported by retriever") + # Some retriever like milvus don't have similarity score threshold implemented + retrieval_chain = ( + RunnableAssign( + {"context": itemgetter("input") | docstore.as_retriever()} + ) + | RunnableAssign( + {"history": itemgetter("input") | convstore.as_retriever()} + ) + ) + + # Handling Retrieval failure + docs = retrieval_chain.invoke({"input": query}) + if not docs: + logger.warning("Retrieval failed to get any relevant context") + return iter(["No response generated from LLM, make sure your query is relavent to the ingested document."]) + + chain = retrieval_chain | stream_chain + for chunk in chain.stream({"input": query}): + yield chunk + resp_str += chunk + + self.save_memory_and_get_output( + {"input": query, "output": resp_str}, convstore + ) + + return chain.stream(query) + + except Exception as e: + logger.warning(f"Failed to generate response due to exception {e}") + logger.warning( + "No response generated from LLM, make sure you've ingested document." + ) + return iter( + [ + "No response generated from LLM, make sure you have ingested document from the Knowledge Base Tab." + ] + ) + + def document_search(self, content: str, num_docs: int) -> List[Dict[str, Any]]: + """Search for the most relevant documents for the given search parameters.""" + + try: + if docstore != None: + try: + retriever = docstore.as_retriever( + search_type="similarity_score_threshold", + search_kwargs={"score_threshold": settings.retriever.score_threshold, "k": settings.retriever.top_k}, + ) + docs = retriever.invoke(content) + except NotImplementedError: + # Some retriever like milvus don't have similarity score threshold implemented + retriever = docstore.as_retriever() + docs = retriever.invoke(content) + + result = [] + for doc in docs: + result.append( + { + "source": os.path.basename(doc.metadata.get("source", "")), + "content": doc.page_content, + } + ) + return result + return [] + except Exception as e: + logger.error(f"Error from /documentSearch endpoint. Error details: {e}") + return [] + + def get_documents(self) -> List[str]: + """Retrieves filenames stored in the vector store.""" + try: + if docstore: + return get_docs_vectorstore_langchain(docstore) + except Exception as e: + logger.error(f"Vectorstore not initialized. Error details: {e}") + return [] + + + def delete_documents(self, filenames: List[str]): + """Delete documents from the vector index.""" + try: + if docstore: + return del_docs_vectorstore_langchain(docstore, filenames) + except Exception as e: + logger.error(f"Vectorstore not initialized. Error details: {e}") \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/__init__.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/__init__.py new file mode 100644 index 000000000..a08b2c204 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/__init__.py @@ -0,0 +1,14 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/chains.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/chains.py new file mode 100644 index 000000000..f58e6b155 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/chains.py @@ -0,0 +1,133 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os +from typing import Generator, List, Dict, Any +from functools import lru_cache +from traceback import print_exc + +from RetrievalAugmentedGeneration.common.utils import utils_cache + +logger = logging.getLogger(__name__) + +from RetrievalAugmentedGeneration.common.base import BaseExample +from RetrievalAugmentedGeneration.example.llm.llm_client import LLMClient +from RetrievalAugmentedGeneration.example.retriever.embedder import NVIDIAEmbedders +from RetrievalAugmentedGeneration.example.retriever.vector import MilvusVectorClient +from RetrievalAugmentedGeneration.example.retriever.retriever import Retriever +from RetrievalAugmentedGeneration.example.vectorstore.vectorstore_updater import update_vectorstore +from RetrievalAugmentedGeneration.common.utils import get_config + +settings = get_config() +sources = [] +RESPONSE_PARAPHRASING_MODEL = settings.llm.model_name + +@lru_cache +def get_vector_index(embed_dim: int = 1024) -> MilvusVectorClient: + return MilvusVectorClient(hostname="milvus", port="19530", collection_name=os.getenv('COLLECTION_NAME', "vector_db"), embedding_size=embed_dim) + +@lru_cache +def get_embedder(type: str = "query") -> NVIDIAEmbedders: + if type == "query": + embedder = NVIDIAEmbedders(name=settings.embeddings.model_name, type="query") + else: + embedder = NVIDIAEmbedders(name=settings.embeddings.model_name, type="passage") + return embedder + +@lru_cache +def get_doc_retriever(type: str = "query") -> Retriever: + embedder = get_embedder(type) + embedding_size = embedder.get_embedding_size() + return Retriever(embedder=get_embedder(type) , vector_client=get_vector_index(embedding_size)) + +@utils_cache +@lru_cache() +def get_llm(model_name, is_response_generator=False, **kwargs): + return LLMClient(model_name=model_name, is_response_generator=is_response_generator, **kwargs) + + +class MultimodalRAG(BaseExample): + + def ingest_docs(self, filepath: str, filename: str): + """Ingest documents to the VectorDB.""" + + try: + embedder = get_embedder(type="passage") + embedding_size = embedder.get_embedding_size() + update_vectorstore(os.path.abspath(filepath), get_vector_index(embedding_size), embedder, os.getenv('COLLECTION_NAME', "vector_db")) + except Exception as e: + logger.error(f"Failed to ingest document due to exception {e}") + print_exc() + raise ValueError("Failed to upload document. Please check chain server logs for details.") + + + def llm_chain( + self, query: str, chat_history: List["Message"], **kwargs + ) -> Generator[str, None, None]: + """Execute a simple LLM chain using the components defined above.""" + # TODO integrate chat_history + logger.info("Using llm to generate response directly without knowledge base.") + response = get_llm(model_name=RESPONSE_PARAPHRASING_MODEL, is_response_generator=True, **kwargs).chat_with_prompt(settings.prompts.chat_template, query) + return response + + + def rag_chain(self, query: str, chat_history: List["Message"], **kwargs) -> Generator[str, None, None]: + """Execute a Retrieval Augmented Generation chain using the components defined above.""" + + logger.info("Using rag to generate response from document") + # TODO integrate chat_history + try: + retriever = get_doc_retriever(type="query") + context, sources = retriever.get_relevant_docs(query, limit=settings.retriever.top_k) + if not context: + logger.warning("Retrieval failed to get any relevant context") + return iter(["No response generated from LLM, make sure your query is relavent to the ingested document."]) + + augmented_prompt = "Relevant documents:" + context + "\n\n[[QUESTION]]\n\n" + query + system_prompt = settings.prompts.rag_template + logger.info(f"Formulated prompt for RAG chain: {system_prompt}\n{augmented_prompt}") + response = get_llm(model_name=RESPONSE_PARAPHRASING_MODEL, is_response_generator=True, **kwargs).chat_with_prompt(settings.prompts.rag_template, augmented_prompt) + return response + + except Exception as e: + logger.warning(f"Failed to generate response due to exception {e}") + logger.warning( + "No response generated from LLM, make sure you've ingested document." + ) + return iter( + [ + "No response generated from LLM, make sure you have ingested document from the Knowledge Base Tab." + ] + ) + + def document_search(self, content: str, num_docs: int) -> List[Dict[str, Any]]: + """Search for the most relevant documents for the given search parameters.""" + + try: + logger.error("searching documents not implemented yet!") + except Exception as e: + logger.error(f"Error from POST /search endpoint. Error details: {e}") + return [] + + def get_documents(self): + """Retrieves filenames stored in the vector store.""" + decoded_filenames = [] + logger.error("get documents not implemented!") + return decoded_filenames + + def delete_documents(self, filenames: List[str]): + """Delete documents from the vector index.""" + logger.error("delete_documents not implemented") \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/llm/__init__.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/llm/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/llm/llm.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/llm/llm.py new file mode 100644 index 000000000..86e0e8ef9 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/llm/llm.py @@ -0,0 +1,101 @@ +# SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests +import json +import torch +from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline +from langchain_community.llms import HuggingFacePipeline + +from RetrievalAugmentedGeneration.common.utils import get_llm, get_config + + +class NvidiaLLM: + def __init__(self, model_name, is_response_generator: bool = False, **kwargs): + + # LLM is used for response generation as well as for generating description + # of images, only use llm from configuration for response generator + if is_response_generator: + self.llm = get_llm(**kwargs) + else: + settings = get_config() + # Use nv-ai-foundaion as default + if settings.llm.model_engine == "nv-api-catalog": + from integrations.langchain.llms.nv_api_catalog import ChatNVIDIA + self.llm = ChatNVIDIA(model=model_name, + temperature = kwargs.get('temperature', None), + top_p = kwargs.get('top_p', None), + max_tokens = kwargs.get('max_tokens', None)) + else: + from langchain_nvidia_ai_endpoints import ChatNVIDIA + self.llm = ChatNVIDIA(model=model_name, + temperature = kwargs.get('temperature', None), + top_p = kwargs.get('top_p', None), + max_tokens = kwargs.get('max_tokens', None)) + + +class LocalLLM: + def __init__(self, model_path, **kwargs): + tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True) + model = AutoModelForCausalLM.from_pretrained( + model_path, + torch_dtype=torch.float16, + trust_remote_code=True, + device_map="auto" + ) + + pipe = pipeline( + "text-generation", + model=model, + tokenizer=tokenizer, + max_length=kwargs.get('max_tokens',1024), + temperature=kwargs.get('temperature', 0.6), + top_p=kwargs.get('top_p', 0.3), + repetition_penalty=1.0 + ) + + self.llm = HuggingFacePipeline(pipeline=pipe) + + +def create_llm(model_name, model_type="NVIDIA", is_response_generator=False, **kwargs): + # Use LLM to generate answer + if model_type == "NVIDIA": + model = NvidiaLLM(model_name, is_response_generator, **kwargs) + elif model_type == "LOCAL": + model = LocalLLM(model_name, **kwargs) + else: + print("Error! Need model_name and model_type!") + exit() + + return model.llm + + +if __name__ == "__main__": + llm = create_llm("gpt2", "LOCAL") + + from langchain_core.output_parsers import StrOutputParser + from langchain_core.prompts import ChatPromptTemplate + from langchain import LLMChain + + system_prompt = "" + prompt = "who are you" + langchain_prompt = ChatPromptTemplate.from_messages([("system", system_prompt), ("user", "{input}")]) + chain = langchain_prompt | llm | StrOutputParser() + + response = chain.stream({"input": prompt}) + + for chunk in response: + print(chunk) + diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/llm/llm_client.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/llm/llm_client.py new file mode 100644 index 000000000..58d528eb2 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/llm/llm_client.py @@ -0,0 +1,37 @@ +# SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from RetrievalAugmentedGeneration.example.llm.llm import create_llm +from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.messages import HumanMessage + +class LLMClient: + def __init__(self, model_name="mixtral_8x7b", model_type="NVIDIA", is_response_generator=False, **kwargs): + self.llm = create_llm(model_name, model_type, is_response_generator, **kwargs) + + def chat_with_prompt(self, system_prompt, prompt): + langchain_prompt = ChatPromptTemplate.from_messages([("system", system_prompt), ("user", "{input}")]) + chain = langchain_prompt | self.llm | StrOutputParser() + response = chain.stream({"input": prompt}) + return response + + def multimodal_invoke(self, b64_string, steer=False, creativity=0, quality=9, complexity=0, verbosity=8): + message = HumanMessage(content=[{"type": "text", "text": "Describe this image in detail:"}, + {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{b64_string}"},}]) + if steer: + return self.llm.invoke([message], labels={"creativity": creativity, "quality": quality, "complexity": complexity, "verbosity": verbosity}) + else: + return self.llm.invoke([message]) \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/requirements.txt b/RetrievalAugmentedGeneration/examples/multimodal_rag/requirements.txt new file mode 100644 index 000000000..0f43b859c --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/requirements.txt @@ -0,0 +1,8 @@ +pymupdf==1.23.15 +gspread==6.0.0 +pandas==2.2.0 +Pillow==10.2.0 +pydantic==2.5.3 +pymilvus==2.3.5 +python_pptx==0.6.23 +Requests==2.31.0 \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/__init__.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/embedder.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/embedder.py new file mode 100644 index 000000000..183448adb --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/embedder.py @@ -0,0 +1,65 @@ +# SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from pydantic import BaseModel +from typing import Any, Optional + +from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings + +from RetrievalAugmentedGeneration.common.utils import get_embedding_model + +class Embedder(ABC, BaseModel): + + + @abstractmethod + def embed_query(self, text): + ... + + @abstractmethod + def embed_documents(self, documents, batch_size): + ... + + def get_embedding_size(self): + sample_text = "This is a sample text." + sample_embedding = self.embedder.embed_query(sample_text) + return len(sample_embedding) + +class NVIDIAEmbedders(Embedder): + name : str + type : str + embedder : Optional[Any] = None + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.embedder = get_embedding_model() + + + def embed_query(self, text): + return self.embedder.embed_query(text) + + + def embed_documents(self, documents, batch_size=10): + output = [] + batch_documents = [] + for i, doc in enumerate(documents): + batch_documents.append(doc) + if len(batch_documents) == batch_size: + output.extend(self.embedder.embed_documents(batch_documents)) + batch_documents = [] + else: + if len(batch_documents) > 0: + output.extend(self.embedder.embed_documents(batch_documents)) + return output \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/retriever.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/retriever.py new file mode 100644 index 000000000..804014bd5 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/retriever.py @@ -0,0 +1,32 @@ +# SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pydantic import BaseModel +from RetrievalAugmentedGeneration.example.retriever.embedder import Embedder +from RetrievalAugmentedGeneration.example.retriever.vector import VectorClient + +class Retriever(BaseModel): + + embedder : Embedder + vector_client : VectorClient + search_limit : int = 4 + + def get_relevant_docs(self, text, limit=None): + if limit is None: + limit = self.search_limit + query_vector = self.embedder.embed_query(text) + concatdocs, sources = self.vector_client.search([query_vector], limit) + return concatdocs, sources + diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/vector.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/vector.py new file mode 100644 index 000000000..7329aaf14 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/retriever/vector.py @@ -0,0 +1,147 @@ +# SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import Any +from pydantic import BaseModel +from pymilvus import connections, Collection, FieldSchema, CollectionSchema, DataType, utility + +class VectorClient(ABC, BaseModel): + + hostname : str + port : str + collection_name : str + + @abstractmethod + def connect(self): + ... + + def disconnect(self): + ... + + @abstractmethod + def search(self, query_vectors, limit=5): + ... + + @abstractmethod + def update(self): + ... + + +class MilvusVectorClient(VectorClient): + + hostname : str = "milvus" + port : str = "19530" + metric_type : str = "L2" + index_type : str = "GPU_IVF_FLAT" + nlist : int = 100 + index_field_name : str = "embedding" + nprobe : int = 5 + vector_db : Any = None + embedding_size: int = 1024 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.vector_db = self.connect(self.collection_name, self.hostname, self.port, embedding_size=self.embedding_size) + self._create_index(self.metric_type, self.index_type, self.index_field_name, self.nlist) + self.vector_db.load() + + def _create_index(self, metric_type, index_type, field_name, nlist=100): + + index_params = { + "metric_type": metric_type, # or "IP" depending on your requirement + "index_type": index_type, # You can choose other types like IVF_PQ based on your need + "params": {"nlist": nlist} # Adjust the nlist parameter as per your requirements + } + self.vector_db.create_index(field_name=field_name, index_params=index_params) + + def connect(self, collection_name, hostname, port, alias="default", embedding_size=1024): + connections.connect(alias, host=hostname, port=port) + try: + vector_db = Collection(name=collection_name) + return vector_db + except: + # create the vector DB using default embedding dimensions of 1024 + vector_db = self.create_collection(collection_name, embedding_size) + return self.vector_db + + def disconnect(self, alias="default"): + connections.disconnect(alias) + + def search(self, query_vectors, limit=5): + search_params = { + "metric_type": self.metric_type, + "params": {"nprobe": self.nprobe} + } + + search_results = self.vector_db.search( + data=query_vectors, + anns_field=self.index_field_name, # Replace with your vector field name + param=search_params, + output_fields=["content", "metadata"], + limit=limit + ) + concatdocs = "" + sources = {} + # return concatdocs, sources + print("Number of results: ", len(search_results[0])) + for idx, result in enumerate(search_results[0]): + hit = result + doc_content = hit.entity.get("content") + doc_metadata = hit.entity.get("metadata") + print(doc_metadata) + # Storing metadata and content in sources dictionary + sources[doc_metadata["source"]] = {"doc_content": doc_content, "doc_metadata": doc_metadata} + + # # Concatenating document content with an identifier + concatdocs += f"[[DOCUMENT {idx}]]\n\n" + doc_content + "\n\n" + + # Note: The return statement should be outside the for loop + return concatdocs, sources + + def __del__(self): + self.disconnect() + + def update(self, documents, embeddings, collection_name): + # Processing each document + insert_data = [] + for i, doc in enumerate(documents): + # Prepare data for insertion + example = { + "id": i, + "content": doc.page_content, + "embedding": embeddings[i], + "metadata": doc.metadata + } + insert_data.append(example) + + self.vector_db.insert(insert_data) + + def get_schema(self, embedding_size): + # Define the primary key field along with other fields + fields = [ + FieldSchema(name="id", dtype=DataType.INT64, is_primary=True), # Primary key field + FieldSchema(name="content", dtype=DataType.VARCHAR, max_length=10000), # Text field with up to 10000 characters + FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, dim=embedding_size), + FieldSchema(name="metadata", dtype=DataType.JSON) + ] + + schema = CollectionSchema(fields, "Collection for storing document embeddings and metadata") + return schema + + def create_collection(self, collection_name, embedding_size): + # Formulate the schema and create the collection + schema = self.get_schema(embedding_size) + self.vector_db = Collection(name=collection_name, schema=schema) diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/custom_pdf_parser.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/custom_pdf_parser.py new file mode 100644 index 000000000..70a554eac --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/custom_pdf_parser.py @@ -0,0 +1,267 @@ +# SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import fitz +import pandas as pd +import os +from langchain.docstore.document import Document +from RetrievalAugmentedGeneration.example.llm.llm_client import LLMClient +from RetrievalAugmentedGeneration.common.utils import get_config +from PIL import Image +from io import BytesIO +import base64 + +def get_b64_image(image_path): + image = Image.open(image_path).convert("RGB") + buffered = BytesIO() + image.save(buffered, format="JPEG", quality=20) + b64_string = base64.b64encode(buffered.getvalue()).decode("utf-8") + return b64_string + +def is_graph(image_path): + # Placeholder function for graph detection logic + # Implement graph detection algorithm here + neva = LLMClient("ai-neva-22b") + b64_string = get_b64_image(image_path) + res = neva.multimodal_invoke(b64_string, creativity = 0, quality = 9, complexity = 0, verbosity = 9).content + print(res) + if "graph" in res or "plot" in res or "chart" in res: + return True + else: + return False + +def process_graph(image_path): + # Placeholder function for graph processing logic + # Implement graph processing algorithm here + # Call DePlot through the API + deplot = LLMClient("ai-google-deplot") + b64_string = get_b64_image(image_path) + res = deplot.multimodal_invoke(b64_string) + deplot_description = res.content + # Accessing the model name environment variable + settings = get_config() + mixtral = LLMClient(model_name=settings.llm.model_name) + response = mixtral.chat_with_prompt(system_prompt="Your responsibility is to explain charts. You are an expert in describing the responses of linearized tables into plain English text for LLMs to use.", + prompt="Explain the following linearized table. " + deplot_description) + full_response = "" + for chunk in response: + full_response += chunk + print(full_response) + return full_response + +def extract_text_around_item(text_blocks, bbox, page_height, threshold_percentage=0.1): + before_text, after_text = "", "" + vertical_threshold_distance = page_height * threshold_percentage + horizontal_threshold_distance = bbox.width * threshold_percentage # Assuming similar threshold for horizontal distance + + for block in text_blocks: + block_bbox = fitz.Rect(block[:4]) + vertical_distance = min(abs(block_bbox.y1 - bbox.y0), abs(block_bbox.y0 - bbox.y1)) + horizontal_overlap = max(0, min(block_bbox.x1, bbox.x1) - max(block_bbox.x0, bbox.x0)) + + # Check if within vertical threshold distance and has horizontal overlap or closeness + if vertical_distance <= vertical_threshold_distance and horizontal_overlap >= -horizontal_threshold_distance: + if block_bbox.y1 < bbox.y0 and not before_text: + before_text = block[4] + elif block_bbox.y0 > bbox.y1 and not after_text: + after_text = block[4] + break + + return before_text, after_text + + +def process_text_blocks(text_blocks): + char_count_threshold = 500 # Threshold for the number of characters in a group + current_group = [] + grouped_blocks = [] + current_char_count = 0 + + for block in text_blocks: + if block[-1] == 0: # Check if the block is of text type + block_text = block[4] + block_char_count = len(block_text) + + if current_char_count + block_char_count <= char_count_threshold: + current_group.append(block) + current_char_count += block_char_count + else: + if current_group: + grouped_content = "\n".join([b[4] for b in current_group]) + grouped_blocks.append((current_group[0], grouped_content)) + current_group = [block] + current_char_count = block_char_count + + # Append the last group + if current_group: + grouped_content = "\n".join([b[4] for b in current_group]) + grouped_blocks.append((current_group[0], grouped_content)) + + return grouped_blocks + +def parse_all_tables(filename, page, pagenum, text_blocks, ongoing_tables): + table_docs = [] + table_bboxes = [] + ctr = 1 + try: + tables = page.find_tables(horizontal_strategy = "lines_strict", vertical_strategy = "lines_strict") + except Exception as e: + print(f"Error during table extraction: {e}") + return table_docs, table_bboxes, ongoing_tables + if tables: + for tab in tables: + if tab.header.external: + # Check if this table is a continuation of a table from a previous page + previous_table = ongoing_tables.get(pagenum - 1, None) + if previous_table: + # Merge the current table with the previous part + combined_df = pd.concat([previous_table['dataframe'], tab.to_pandas()]) + ongoing_tables[pagenum] = {"dataframe": combined_df, "bbox": bbox} + continue + if not tab.header.external: + pandas_df = tab.to_pandas() + tablerefdir = os.path.join(os.getcwd(), "multimodal/table_references") + if not os.path.exists(tablerefdir): + os.makedirs(tablerefdir) + df_xlsx_path = os.path.join(tablerefdir, f"table{ctr}-page{pagenum}.xlsx") + pandas_df.to_excel(df_xlsx_path) + bbox = fitz.Rect(tab.bbox) + table_bboxes.append(bbox) + + # Find text around the table + before_text, after_text = extract_text_around_item(text_blocks, bbox, page.rect.height) + + table_img = page.get_pixmap(clip=bbox) + table_img_path = os.path.join(tablerefdir, f"table{ctr}-page{pagenum}.jpg") + table_img.save(table_img_path) + description = process_graph(table_img_path) + ctr += 1 + + caption = before_text.replace("\n", " ") + description + after_text.replace("\n", " ") + if before_text == "" and after_text == "": + caption = " ".join(tab.header.names) + + + table_metadata = { + "source": f"{filename[:-4]}-page{pagenum}-table{ctr}", + "dataframe": df_xlsx_path, + "image": table_img_path, + "caption": caption, + "type": "table", + "page_num": pagenum + } + all_cols = ", ".join(list(pandas_df.columns.values)) + doc = Document(page_content="This is a table with the caption: " + caption + f"\nThe columns are {all_cols}", metadata=table_metadata) + table_docs.append(doc) + return table_docs, table_bboxes, ongoing_tables + +def parse_all_images(filename, page, pagenum, text_blocks): + image_docs = [] + image_info_list = page.get_image_info(xrefs=True) + page_rect = page.rect # Get the dimensions of the page + + for image_info in image_info_list: + xref = image_info['xref'] + if xref == 0: + continue # Skip inline images or undetectable images + + img_bbox = fitz.Rect(image_info['bbox']) + # Check if the image size is at least 5% of the page size in any dimension + if img_bbox.width < page_rect.width / 20 or img_bbox.height < page_rect.height / 20: + continue # Skip very small images + + # Extract and save the image + extracted_image = page.parent.extract_image(xref) + image_data = extracted_image["image"] + imgrefpath = os.path.join(os.getcwd(), "multimodal/image_references") + if not os.path.exists(imgrefpath): + os.makedirs(imgrefpath) + image_path = os.path.join(imgrefpath, f"image{xref}-page{pagenum}.png") + with open(image_path, "wb") as img_file: + img_file.write(image_data) + + # Find text around the image + before_text, after_text = extract_text_around_item(text_blocks, img_bbox, page.rect.height) + # skip images without a caption, they are likely just some logo or graphics + if before_text == "" and after_text == "": + continue + + # Process the image if it's a graph + image_description = " " + if is_graph(image_path): + image_description = process_graph(image_path) + + # Combine the texts to form a caption + caption = before_text.replace("\n", " ") + image_description + after_text.replace("\n", " ") + + image_metadata = { + "source": f"{filename[:-4]}-page{pagenum}-image{xref}", + "image": image_path, + "caption": caption, + "type": "image", + "page_num": pagenum + } + image_docs.append(Document(page_content="This is an image with the caption: " + caption, metadata=image_metadata)) + return image_docs + +def get_pdf_documents(filepath): + all_pdf_documents = [] + ongoing_tables = {} + try: + f = fitz.open(filepath) + except Exception as e: + print(f"Error opening or processing the PDF file: {e}") + return [] + + for i in range(len(f)): + page = f[i] + page_docs = [] + + # Process text blocks + initial_text_blocks = page.get_text("blocks", sort=True) + + # Define thresholds for header and footer (10% of the page height) + page_height = page.rect.height + header_threshold = page_height * 0.1 + footer_threshold = page_height * 0.9 + + # Filter out text blocks that are likely headers or footers + text_blocks = [block for block in initial_text_blocks if block[-1] == 0 and not (block[1] < header_threshold or block[3] > footer_threshold)] + + # Group text blocks by character count + grouped_text_blocks = process_text_blocks(text_blocks) + + # Extract tables and their bounding boxes + table_docs, table_bboxes, ongoing_tables = parse_all_tables(filepath, page, i, text_blocks, ongoing_tables) + page_docs.extend(table_docs) + + # Extract and process images + image_docs = parse_all_images(filepath, page, i, text_blocks) + page_docs.extend(image_docs) + + # Process grouped text blocks + text_block_ctr = 0 + for heading_block, content in grouped_text_blocks: + text_block_ctr +=1 + heading_bbox = fitz.Rect(heading_block[:4]) + # Check if the heading or its content overlaps with table or image bounding boxes + if not any(heading_bbox.intersects(table_bbox) for table_bbox in table_bboxes): + bbox = {"x1": heading_block[0], "y1": heading_block[1], "x2": heading_block[2], "x3": heading_block[3]} + text_doc = Document(page_content=f"{heading_block[4]}\n{content}", metadata={**bbox, "type": "text", "page_num": i, "source": f"{filepath[:-4]}-page{i}-block{text_block_ctr}"}) + page_docs.append(text_doc) + + all_pdf_documents.append(page_docs) + + f.close() + return all_pdf_documents diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/custom_powerpoint_parser.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/custom_powerpoint_parser.py new file mode 100644 index 000000000..d5531a369 --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/custom_powerpoint_parser.py @@ -0,0 +1,105 @@ +# SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +from pptx import Presentation +import fitz +from langchain.docstore.document import Document +from RetrievalAugmentedGeneration.example.vectorstore.custom_pdf_parser import is_graph, process_graph + + +def convert_ppt_to_pdf(ppt_path): + """Convert a PowerPoint file to PDF using LibreOffice and save in '../../ppt_references/' folder.""" + base_name = os.path.basename(ppt_path) + ppt_name_without_ext = os.path.splitext(base_name)[0].replace(' ', '_') + + # Use the existing directory '../../ppt_references/' + new_dir_path = os.path.abspath("multimodal/ppt_references") + + # Set the new PDF path in the existing directory + pdf_path = os.path.join(new_dir_path, f"{ppt_name_without_ext}.pdf") + + # LibreOffice command to convert PPT to PDF + command = ['libreoffice', '--headless', '--convert-to', 'pdf', '--outdir', new_dir_path, ppt_path] + subprocess.run(command, check=True) + + return pdf_path + +def convert_pdf_to_images(pdf_path): + """Convert a PDF file to a series of images using PyMuPDF and save in '../../ppt_references/' folder.""" + doc = fitz.open(pdf_path) + + # Extract the base name of the PDF file and replace spaces with underscores + base_name = os.path.basename(pdf_path) + pdf_name_without_ext = os.path.splitext(base_name)[0].replace(' ', '_') + + # Use the existing directory '../../ppt_references/' + new_dir_path = os.path.join(os.getcwd(), "multimodal/ppt_references") + + image_paths = [] + + for page_num in range(len(doc)): + page = doc.load_page(page_num) + pix = page.get_pixmap() + + # Save images in the existing directory + output_image_path = os.path.join(new_dir_path, f"{pdf_name_without_ext}_{page_num:04d}.png") + pix.save(output_image_path) + image_paths.append((output_image_path, page_num)) + + doc.close() + return image_paths + +def extract_text_and_notes_from_ppt(ppt_path): + """Extract text and notes from a PowerPoint file.""" + prs = Presentation(ppt_path) + text_and_notes = [] + for slide in prs.slides: + slide_text = ' '.join([shape.text for shape in slide.shapes if hasattr(shape, "text")]) + try: + notes = slide.notes_slide.notes_text_frame.text if slide.notes_slide else '' + except: + notes = '' + text_and_notes.append((slide_text, notes)) + return text_and_notes + +def process_ppt_file(ppt_path): + """Process a PowerPoint file.""" + pdf_path = os.path.join(os.getcwd(), "multimodal/ppt_references", os.path.basename(ppt_path).replace('.pptx', '.pdf').replace('.ppt', '.pdf')) + convert_ppt_to_pdf(ppt_path) + images_data = convert_pdf_to_images(pdf_path) + slide_texts = extract_text_and_notes_from_ppt(ppt_path) + processed_data = [] + + for (image_path, page_num), (slide_text, notes) in zip(images_data, slide_texts): + if notes: + notes = "\n\nThe speaker notes for this slide are: " + notes + + # get image description with NeVA/DePlot + image_description = " " + if is_graph(image_path): + image_description = process_graph(image_path) + + image_metadata = { + "source": f"{os.path.basename(ppt_path)}", + "image": image_path, + "caption": slide_text + image_description + notes, + "type": "image", + "page_num": page_num + } + processed_data.append(Document(page_content = "This is a slide with the text: " + slide_text + image_description, metadata = image_metadata)) + + return processed_data diff --git a/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/vectorstore_updater.py b/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/vectorstore_updater.py new file mode 100644 index 000000000..b8cbf3e1d --- /dev/null +++ b/RetrievalAugmentedGeneration/examples/multimodal_rag/vectorstore/vectorstore_updater.py @@ -0,0 +1,84 @@ +# SPDX-FileCopyrightText: Copyright (c) 2023-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from langchain.text_splitter import RecursiveCharacterTextSplitter +from langchain_community.document_loaders import UnstructuredFileLoader + +from RetrievalAugmentedGeneration.example.vectorstore.custom_powerpoint_parser import process_ppt_file +from RetrievalAugmentedGeneration.example.vectorstore.custom_pdf_parser import get_pdf_documents + +logger = logging.getLogger(__name__) + +CUSTOM_PROCESSING = True + +def load_documents(file): + """Load documents from the specified folder.""" + raw_documents = [] + + logger.info(f"Loading document: {file}") + + if file.endswith("pdf") and CUSTOM_PROCESSING: + # Process each PDF document and add them individually to the list + pdf_docs = get_pdf_documents(file) + for each_page in pdf_docs: + raw_documents.extend(each_page) + elif file.endswith("ppt") or file.endswith("pptx"): + pptx_docs = process_ppt_file(file) + raw_documents.extend(pptx_docs) + else: + # Load unstructured files and add them individually + loader = UnstructuredFileLoader(file) + unstructured_docs = loader.load() + raw_documents.extend(unstructured_docs) # 'extend' is used here to add elements of the list individually + return raw_documents + + +def split_text(documents): + """Split text documents into chunks.""" + text_splitter = RecursiveCharacterTextSplitter( + # Set a really small chunk size, just to show. + chunk_size = 1000, + chunk_overlap = 100, + length_function = len, + is_separator_regex = False, + ) + split_docs = text_splitter.split_documents(documents) + return split_docs + + +def update_vectorstore(file_path, vector_client, embedder, config_name): + """Generates word embeddings for documents and updates the Milvus collection.""" + # Attempt to create collection, catch exception if it already exists + logger.info("[Step 1/4] Creating/loading vector store") + + # Create collection if it doesn't exist + logger.info("Accessing collection...") + + logger.info("[Step 2/4] Processing and splitting documents") + # load and split documents + raw_documents = load_documents(file_path) + documents = split_text(raw_documents) + + logger.info("[Step 3/4] Inserting documents into the vector store...") + # Extracting the page content from each document + document_contents = [doc.page_content for doc in documents] + + # Embedding the documents using the updated embedding function + document_embeddings = embedder.embed_documents(document_contents, batch_size=10) + + # Batch insert into Milvus collection + vector_client.update(documents, document_embeddings, config_name) + logger.info("[Step 4/4] Saved vector store!") diff --git a/RetrievalAugmentedGeneration/examples/nvidia_ai_foundation/chains.py b/RetrievalAugmentedGeneration/examples/nvidia_api_catalog/chains.py similarity index 62% rename from RetrievalAugmentedGeneration/examples/nvidia_ai_foundation/chains.py rename to RetrievalAugmentedGeneration/examples/nvidia_api_catalog/chains.py index 251208f8b..b31140e1e 100644 --- a/RetrievalAugmentedGeneration/examples/nvidia_ai_foundation/chains.py +++ b/RetrievalAugmentedGeneration/examples/nvidia_api_catalog/chains.py @@ -18,24 +18,25 @@ from functools import lru_cache from typing import Generator, List, Dict, Any -from langchain.document_loaders import UnstructuredFileLoader +from langchain_community.document_loaders import UnstructuredFileLoader from langchain.text_splitter import CharacterTextSplitter -from langchain.vectorstores import FAISS -from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate +from langchain_community.vectorstores.faiss import FAISS +from langchain_core.output_parsers.string import StrOutputParser +from langchain_core.prompts.chat import ChatPromptTemplate from langchain_nvidia_ai_endpoints import ChatNVIDIA, NVIDIAEmbeddings from RetrievalAugmentedGeneration.common.base import BaseExample -from RetrievalAugmentedGeneration.common.utils import get_config, get_llm, get_embedding_model, get_vectorstore_langchain +from RetrievalAugmentedGeneration.common.utils import get_config, get_llm, get_embedding_model, get_vectorstore_langchain, get_docs_vectorstore_langchain, del_docs_vectorstore_langchain, get_text_splitter logger = logging.getLogger(__name__) DOCS_DIR = os.path.abspath("./uploaded_files") vector_store_path = "vectorstore.pkl" document_embedder = get_embedding_model() vectorstore = None +text_splitter = None settings = get_config() -class NvidiaAIFoundation(BaseExample): +class NvidiaAPICatalog(BaseExample): def ingest_docs(self, file_name: str, filename: str): """Ingest documents to the VectorDB.""" try: @@ -48,7 +49,10 @@ def ingest_docs(self, file_name: str, filename: str): raw_documents = UnstructuredFileLoader(_path).load() if raw_documents: - text_splitter = CharacterTextSplitter(chunk_size=settings.text_splitter.chunk_size, chunk_overlap=settings.text_splitter.chunk_overlap) + global text_splitter + if not text_splitter: + text_splitter = get_text_splitter() + documents = text_splitter.split_documents(raw_documents) if vectorstore: vectorstore.add_documents(documents) @@ -61,64 +65,70 @@ def ingest_docs(self, file_name: str, filename: str): raise ValueError("Failed to upload document. Please upload an unstructured text document.") def llm_chain( - self, context: str, question: str, num_tokens: str + self, query: str, chat_history: List["Message"], **kwargs ) -> Generator[str, None, None]: """Execute a simple LLM chain using the components defined above.""" logger.info("Using llm to generate response directly without knowledge base.") + system_message = [("system", settings.prompts.chat_template)] + conversation_history = [(msg.role, msg.content) for msg in chat_history] + user_input = [("user", "{input}")] + + # Checking if conversation_history is not None and not empty prompt_template = ChatPromptTemplate.from_messages( - [ - ( - "system", - settings.prompts.chat_template, - ), - ("user", "{input}"), - ] + system_message + conversation_history + user_input + ) if conversation_history else ChatPromptTemplate.from_messages( + system_message + user_input ) - llm = get_llm() + llm = get_llm(**kwargs) chain = prompt_template | llm | StrOutputParser() augmented_user_input = ( - "Context: " + context + "\n\nQuestion: " + question + "\n" + "\n\nQuestion: " + query + "\n" ) return chain.stream({"input": augmented_user_input}) - def rag_chain(self, prompt: str, num_tokens: int) -> Generator[str, None, None]: + def rag_chain(self, query: str, chat_history: List["Message"], **kwargs) -> Generator[str, None, None]: """Execute a Retrieval Augmented Generation chain using the components defined above.""" logger.info("Using rag to generate response from document") + system_message = [("system", settings.prompts.rag_template)] + conversation_history = [(msg.role, msg.content) for msg in chat_history] + user_input = [("user", "{input}")] + # Checking if conversation_history is not None and not empty prompt_template = ChatPromptTemplate.from_messages( - [ - ( - "system", - settings.prompts.rag_template, - ), - ("user", "{input}"), - ] + system_message + conversation_history + user_input + ) if conversation_history else ChatPromptTemplate.from_messages( + system_message + user_input ) - llm = get_llm() + + llm = get_llm(**kwargs) chain = prompt_template | llm | StrOutputParser() try: if vectorstore != None: try: - retriever = vectorstore.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": 0.25}) - docs = retriever.get_relevant_documents(prompt) + logger.info(f"Getting retrieved top k values: {settings.retriever.top_k} with confidence threshold: {settings.retriever.score_threshold}") + retriever = vectorstore.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": settings.retriever.score_threshold, "k": settings.retriever.top_k}) + docs = retriever.get_relevant_documents(query) except NotImplementedError: # Some retriever like milvus don't have similarity score threshold implemented retriever = vectorstore.as_retriever() - docs = retriever.get_relevant_documents(prompt) + docs = retriever.get_relevant_documents(query) + if not docs: + logger.warning("Retrieval failed to get any relevant context") + return iter(["No response generated from LLM, make sure your query is relavent to the ingested document."]) context = "" for doc in docs: context += doc.page_content + "\n\n" augmented_user_input = ( - "Context: " + context + "\n\nQuestion: " + prompt + "\n" + "Context: " + context + "\n\nQuestion: " + query + "\n" ) return chain.stream({"input": augmented_user_input}) @@ -139,7 +149,7 @@ def document_search(self, content: str, num_docs: int) -> List[Dict[str, Any]]: try: if vectorstore != None: try: - retriever = vectorstore.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": 0.25}) + retriever = vectorstore.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": settings.retriever.score_threshold, "k": settings.retriever.top_k}) docs = retriever.get_relevant_documents(content) except NotImplementedError: # Some retriever like milvus don't have similarity score threshold implemented @@ -157,5 +167,22 @@ def document_search(self, content: str, num_docs: int) -> List[Dict[str, Any]]: return result return [] except Exception as e: - logger.error(f"Error from /documentSearch endpoint. Error details: {e}") - return [] + logger.error(f"Error from POST /search endpoint. Error details: {e}") + + def get_documents(self) -> List[str]: + """Retrieves filenames stored in the vector store.""" + try: + if vectorstore: + return get_docs_vectorstore_langchain(vectorstore) + except Exception as e: + logger.error(f"Vectorstore not initialized. Error details: {e}") + return [] + + + def delete_documents(self, filenames: List[str]): + """Delete documents from the vector index.""" + try: + if vectorstore: + return del_docs_vectorstore_langchain(vectorstore, filenames) + except Exception as e: + logger.error(f"Vectorstore not initialized. Error details: {e}") \ No newline at end of file diff --git a/RetrievalAugmentedGeneration/examples/nvidia_ai_foundation/requirements.txt b/RetrievalAugmentedGeneration/examples/nvidia_api_catalog/requirements.txt similarity index 100% rename from RetrievalAugmentedGeneration/examples/nvidia_ai_foundation/requirements.txt rename to RetrievalAugmentedGeneration/examples/nvidia_api_catalog/requirements.txt diff --git a/RetrievalAugmentedGeneration/examples/query_decomposition_rag/chains.py b/RetrievalAugmentedGeneration/examples/query_decomposition_rag/chains.py index c77b620cf..5fb4634b0 100644 --- a/RetrievalAugmentedGeneration/examples/query_decomposition_rag/chains.py +++ b/RetrievalAugmentedGeneration/examples/query_decomposition_rag/chains.py @@ -20,15 +20,16 @@ Search tool is a RAG pipeline, whereas the math tool uses an LLM call to perform mathematical calculations. """ -from langchain.vectorstores import FAISS -from langchain.document_loaders import UnstructuredFileLoader +from langchain_community.vectorstores.faiss import FAISS +from langchain_community.document_loaders import UnstructuredFileLoader from langchain.text_splitter import CharacterTextSplitter -from langchain_core.prompts import ChatPromptTemplate -from langchain_core.output_parsers import StrOutputParser -from langchain.chains import LLMChain -from langchain.prompts import BaseChatPromptTemplate -from langchain.schema import HumanMessage -from langchain.agents import LLMSingleActionAgent, AgentOutputParser, AgentExecutor, Tool +from langchain_core.prompts.chat import ChatPromptTemplate +from langchain_core.output_parsers.string import StrOutputParser +from langchain.chains.llm import LLMChain +from langchain_core.prompts.chat import BaseChatPromptTemplate +from langchain_core.messages.human import HumanMessage +from langchain.agents.agent import LLMSingleActionAgent, AgentOutputParser, AgentExecutor +from langchain.tools import Tool from langchain.schema.agent import AgentFinish, AgentAction from typing import List, Union, Dict, Any import json @@ -45,12 +46,13 @@ get_embedding_model, get_doc_retriever, get_vectorstore_langchain, + get_docs_vectorstore_langchain, + del_docs_vectorstore_langchain, ) from RetrievalAugmentedGeneration.common.base import BaseExample logger = logging.getLogger(__name__) -llm = get_llm() DOCS_DIR = os.path.abspath("./uploaded_files") vector_store_path = "vectorstore.pkl" document_embedder = get_embedding_model() @@ -125,6 +127,8 @@ def parse(self, llm_output: str) -> Union[AgentAction, AgentFinish]: logger.info(f"LLM Response: {llm_output}") local_state = json.loads(llm_output) + if len(local_state["Generated Sub Questions"]) == 0: + local_state["Generated Sub Questions"].append("Nil") if ( local_state["Generated Sub Questions"][0] == "Nil" or local_state["Tool_Request"] == "Nil" @@ -177,52 +181,58 @@ def ingest_docs(self, file_name: str, filename: str): def llm_chain( - self, context: str, question: str, num_tokens: str + self, query: str, chat_history: List["Message"], **kwargs ) -> Generator[str, None, None]: """Execute a simple LLM chain using the components defined above.""" logger.info("Using llm to generate response directly without knowledge base.") + system_message = [("system", settings.prompts.chat_template)] + conversation_history = [(msg.role, msg.content) for msg in chat_history] + user_input = [("user", "{input}")] + + # Checking if conversation_history is not None and not empty prompt_template = ChatPromptTemplate.from_messages( - [ - ( - "system", - settings.prompts.chat_template, - ), - ("user", "{input}"), - ] + system_message + conversation_history + user_input + ) if conversation_history else ChatPromptTemplate.from_messages( + system_message + user_input ) - + llm = get_llm(**kwargs) chain = prompt_template | llm | StrOutputParser() augmented_user_input = ( - "Context: " + context + "\n\nQuestion: " + question + "\n" + "\n\nQuestion: " + query + "\n" ) return chain.stream({"input": augmented_user_input}) - def rag_chain(self, question: str, num_tokens: int) -> Generator[str, None, None]: + def rag_chain(self, query: str, chat_history: List["Message"], **kwargs) -> Generator[str, None, None]: """Execute a Retrieval Augmented Generation chain using the components defined above.""" logger.info("Using rag to generate response from document") - set_service_context() - final_context = self.run_agent(question) + final_context = self.run_agent(query, **kwargs) + if not final_context: + logger.warning("Retrieval failed to get any relevant context") + return iter(["No response generated from LLM, make sure your query is relavent to the ingested document."]) + logger.info(f"Final Answer from agent: {final_context}") - + # TODO Add chat_history final_prompt_template = ChatPromptTemplate.from_messages( [ ("human", final_context) ] ) + llm = get_llm(**kwargs) chain = final_prompt_template | llm | StrOutputParser() return chain.stream({}) - def create_agent(self) -> AgentExecutor: + def create_agent(self, **kwargs) -> AgentExecutor: """ Creates the tools, chain, output parser and agent used to fetch the full context. """ self.ledger = Ledger() + self.kwargs = kwargs tools = [ Tool(name="Search tool", func=self.search, description="Searches for the answer from a given context."), @@ -232,6 +242,7 @@ def create_agent(self) -> AgentExecutor: prompt = CustomPromptTemplate(template=template, tools=tools, input_variables=["question"], ledger=self.ledger) output_parser = CustomOutputParser(ledger=self.ledger) + llm = get_llm(**kwargs) llm_chain = LLMChain(llm=llm, prompt=prompt) recursive_decomposition_agent = LLMSingleActionAgent( @@ -242,12 +253,12 @@ def create_agent(self) -> AgentExecutor: return agent_executor - def run_agent(self, question: str): + def run_agent(self, question: str, **kwargs): """ Run question on the agent """ - agent_executor = self.create_agent() + agent_executor = self.create_agent(**kwargs) agent_executor.invoke({"question": question}) ##### LLM call to get final answer ###### @@ -269,6 +280,9 @@ def retriever(self, query: str) -> List[str]: if vectorstore is None: return [] + logger.info(f"Skipping top k and confidence threshold for query decomposition rag") + # TODO: Use similarity score threshold and top k provided in config + # Currently it's raising an error during invoke. retriever = vectorstore.as_retriever() result = retriever.get_relevant_documents(query) logger.info(result) @@ -285,7 +299,7 @@ def extract_answer(self, chunks: List[str], question: str) -> str: for idx, chunk in enumerate(chunks): prompt += f"Passage {idx + 1}:\n" prompt += chunk + "\n" - + llm = get_llm(**self.kwargs) answer = llm([HumanMessage(content=prompt)]) return answer.content @@ -314,6 +328,7 @@ def math(self, sub_questions: List[str]): prompt += "Be concise and only return the answer." logger.info(f"Performing Math LLM call with prompt: {prompt}") + llm = get_llm(**self.kwargs) sub_answer = llm([HumanMessage(content=prompt)]) self.ledger.question_trace.append(sub_questions[0]) self.ledger.answer_trace.append(sub_answer.content) @@ -325,6 +340,9 @@ def document_search(self, content: str, num_docs: int) -> List[Dict[str, Any]]: try: if vectorstore != None: + logger.info(f"Skipping top k and confidence threshold for query decomposition rag") + + # TODO: Use top k and confidence threshold once retriever issue is resolved retriever = vectorstore.as_retriever() docs = retriever.get_relevant_documents(content) @@ -339,5 +357,23 @@ def document_search(self, content: str, num_docs: int) -> List[Dict[str, Any]]: return result return [] except Exception as e: - logger.error(f"Error from /documentSearch endpoint. Error details: {e}") - return [] + logger.error(f"Error from POST /search endpoint. Error details: {e}") + return [] + + def get_documents(self) -> List[str]: + """Retrieves filenames stored in the vector store.""" + try: + if vectorstore: + return get_docs_vectorstore_langchain(vectorstore) + except Exception as e: + logger.error(f"Vectorstore not initialized. Error details: {e}") + return [] + + + def delete_documents(self, filenames: List[str]): + """Delete documents from the vector index.""" + try: + if vectorstore: + return del_docs_vectorstore_langchain(vectorstore, filenames) + except Exception as e: + logger.error(f"Vectorstore not initialized. Error details: {e}") diff --git a/RetrievalAugmentedGeneration/frontend/Dockerfile b/RetrievalAugmentedGeneration/frontend/Dockerfile index a0fda78d7..24d85ea55 100644 --- a/RetrievalAugmentedGeneration/frontend/Dockerfile +++ b/RetrievalAugmentedGeneration/frontend/Dockerfile @@ -1,16 +1,33 @@ -FROM nvcr.io/nvidia/pytorch:23.12-py3 +ARG BASE_IMAGE_URL=nvcr.io/nvidia/base/ubuntu +ARG BASE_IMAGE_TAG=20.04_x64_2022-09-23 +FROM ${BASE_IMAGE_URL}:${BASE_IMAGE_TAG} + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV DEBIAN_FRONTEND noninteractive + +# Install required ubuntu packages for setting up python 3.10 +RUN apt update && \ + apt install -y dpkg openssl libgl1 linux-libc-dev libksba8 curl software-properties-common build-essential libssl-dev libffi-dev && \ + add-apt-repository ppa:deadsnakes/ppa && \ + apt update && apt install -y python3.10 python3.10-dev python3.10-distutils + +# Install pip for python3.10 +RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 + +RUN rm -rf /var/lib/apt/lists/* + +# Install all custom python dependencies RUN mkdir /app COPY requirements.txt /app RUN apt-get update; \ apt-get upgrade -y; \ - python3 -m pip --no-cache-dir install -r /app/requirements.txt; \ - python3 -m pip --no-cache-dir install nvidia-riva-client==2.14.0; \ + pip3 --no-cache-dir install -r /app/requirements.txt; \ + pip3 --no-cache-dir install nvidia-riva-client==2.14.0; \ apt-get clean -RUN apt-get remove python3-pip USER 1001 COPY frontend /app/frontend WORKDIR /app -ENTRYPOINT ["python3", "-m", "frontend"] +ENTRYPOINT ["python3.10", "-m", "frontend"] diff --git a/RetrievalAugmentedGeneration/frontend/frontend/chat_client.py b/RetrievalAugmentedGeneration/frontend/frontend/chat_client.py index c853a07eb..acc5fa869 100644 --- a/RetrievalAugmentedGeneration/frontend/frontend/chat_client.py +++ b/RetrievalAugmentedGeneration/frontend/frontend/chat_client.py @@ -17,6 +17,7 @@ import logging import mimetypes import typing +import json import requests @@ -44,12 +45,12 @@ def search( self, carrier, prompt: str ) -> typing.List[typing.Dict[str, typing.Union[str, float]]]: """Search for relevant documents and return json data.""" - data = {"content": prompt, "num_docs": 4} + data = {"query": prompt, "top_k": 4} headers = { **carrier, "accept": "application/json", "Content-Type": "application/json" } - url = f"{self.server_url}/documentSearch" + url = f"{self.server_url}/search" _LOGGER.debug( "looking up documents - %s", str({"server_url": url, "post_data": data}) ) @@ -74,10 +75,20 @@ def predict( ) -> typing.Generator[str, None, None]: """Make a model prediction.""" data = { - "question": query, - "context": "", - "use_knowledge_base": use_knowledge_base, - "num_tokens": num_tokens, + "messages": [ + { + "role": "user", + "content": query + } + ], + "use_knowledge_base": use_knowledge_base, + "temperature": 0.2, + "top_p": 0.7, + "max_tokens": num_tokens, + "seed": 42, + "bad": ["string"], + "stop": ["string"], + "stream": True } url = f"{self.server_url}/generate" _LOGGER.debug( @@ -87,8 +98,22 @@ def predict( try: with requests.post(url, stream=True, json=data, timeout=50, headers=carrier) as req: req.raise_for_status() - for chunk in req.iter_content(16): - yield chunk.decode("UTF-8") + for chunk in req.iter_lines(): + raw_resp = chunk.decode("UTF-8") + if not raw_resp: + continue + resp_dict = None + try: + resp_dict = json.loads(raw_resp[6:]) + resp_choices = resp_dict.get("choices", []) + if len(resp_choices): + resp_str = resp_choices[0].get("message", {}).get("content", "") + yield resp_str + else: + yield "" + except Exception as e: + raise ValueError(f"Invalid response json: {raw_resp}") from e + except Exception as e: _LOGGER.error(f"Failed to get response from /generate endpoint of chain-server. Error details: {e}. Refer to chain-server logs for details.") yield str("Failed to get response from /generate endpoint of chain-server. Check if the fastapi server in chain-server is up. Refer to chain-server logs for details.") @@ -100,7 +125,7 @@ def predict( @tracing.instrumentation_wrapper def upload_documents(self, carrier, file_paths: typing.List[str]) -> None: """Upload documents to the kb.""" - url = f"{self.server_url}/uploadDocument" + url = f"{self.server_url}/documents" headers = { **carrier, "accept": "application/json", diff --git a/RetrievalAugmentedGeneration/frontend/requirements.txt b/RetrievalAugmentedGeneration/frontend/requirements.txt index 78b1c5c8c..c94be2174 100644 --- a/RetrievalAugmentedGeneration/frontend/requirements.txt +++ b/RetrievalAugmentedGeneration/frontend/requirements.txt @@ -1,12 +1,12 @@ -dataclass_wizard==0.22.2 +PyYAML==6.0.1 +dataclass-wizard==0.22.3 gradio==4.13.0 -jinja2==3.1.2 -numpy==1.25.2 -protobuf==3.20.3 -PyYAML==6.0 -tritonclient[all]==2.36.0 -uvicorn==0.22.0 -opentelemetry-sdk==1.21.0 -opentelemetry-api==1.21.0 -opentelemetry-exporter-otlp-proto-grpc==1.21.0 +jinja2==3.1.3 +numpy==1.26.4 +opentelemetry-api==1.23.0 +opentelemetry-exporter-otlp-proto-grpc==1.23.0 +opentelemetry-sdk==1.23.0 +protobuf==4.25.3 pycountry==23.12.11 +tritonclient[all]==2.43.0 +uvicorn==0.27.1 diff --git a/RetrievalAugmentedGeneration/requirements.txt b/RetrievalAugmentedGeneration/requirements.txt index 20b1b1aa3..cb4ca36ca 100644 --- a/RetrievalAugmentedGeneration/requirements.txt +++ b/RetrievalAugmentedGeneration/requirements.txt @@ -1,22 +1,24 @@ -fastapi==0.104.1 -uvicorn[standard]==0.24.0 -python-multipart==0.0.6 -langchain==0.0.352 -unstructured[all-docs]==0.11.2 -sentence-transformers==2.2.2 +fastapi==0.110.0 +uvicorn[standard]==0.27.1 +python-multipart==0.0.9 +langchain==0.1.9 +unstructured[all-docs]==0.12.5 +sentence-transformers==2.5.1 llama-index==0.9.22 pymilvus==2.3.1 -dataclass-wizard==0.22.2 +dataclass-wizard==0.22.3 opencv-python==4.8.0.74 -minio==7.2.0 +minio==7.2.5 asyncpg==0.29.0 psycopg2-binary==2.9.9 -pgvector==0.2.4 -langchain-core==0.1.3 -langchain-nvidia-ai-endpoints==0.0.1 +pgvector==0.2.5 +langchain-core==0.1.29 +langchain-nvidia-ai-endpoints==0.0.3 langchain-nvidia-trt==0.0.1rc0 nemollm==0.3.4 -opentelemetry-sdk==1.21.0 -opentelemetry-api==1.21.0 -opentelemetry-exporter-otlp-proto-grpc==1.21.0 +opentelemetry-sdk==1.23.0 +opentelemetry-api==1.23.0 +opentelemetry-exporter-otlp-proto-grpc==1.23.0 faiss-cpu==1.7.4 +httpcore==1.0.2 +httpx==0.26.0 \ No newline at end of file diff --git a/deploy/compose/compose.env b/deploy/compose/compose.env index cac6a6640..02846b158 100644 --- a/deploy/compose/compose.env +++ b/deploy/compose/compose.env @@ -4,7 +4,7 @@ export MODEL_DIRECTORY="/home/nvidia/llama2_13b_chat_hf_v1/" # export MODEL_DIRECTORY="/home/nvidia/nemotron-3-8b-chat-4k-sft" # Fill this out if you dont have a GPU. Leave this empty if you have a local GPU -export NVIDIA_API_KEY="nvapi-*" +export NVIDIA_API_KEY=${NVIDIA_API_KEY} # flag to enable activation aware quantization for the LLM # export QUANTIZATION="int4_awq" @@ -13,27 +13,18 @@ export NVIDIA_API_KEY="nvapi-*" export MODEL_ARCHITECTURE="llama" -# the name of the model being used - only for displaying on frontend +# the name of the model being used - only for displaying on rag-playground export MODEL_NAME="Llama-2-13b-chat-hf" -# the name of the RAG example being used -export RAG_EXAMPLE="developer_rag" - # [OPTIONAL] the maximum number of input tokens # export MODEL_MAX_INPUT_LENGTH=3000 -# [OPTIONAL] the maximum number of output tokens -# export MODEL_MAX_OUTPUT_LENGTH=512 - # [OPTIONAL] the number of GPUs to make available to the inference server # export INFERENCE_GPU_COUNT="all" # [OPTIONAL] the base directory inside which all persistent volumes will be created # export DOCKER_VOLUME_DIRECTORY="." -# [OPTIONAL] the config file for chain server w.r.t. pwd -export APP_CONFIG_FILE=/dev/null - # parameters for PGVector, update this when using PGVector Vector store # export POSTGRES_PASSWORD=password # export POSTGRES_USER=postgres @@ -62,4 +53,4 @@ export TTS_SAMPLE_RATE=48000 # the config file for the OpenTelemetry collector export OPENTELEMETRY_CONFIG_FILE="./configs/otel-collector-config.yaml" # the config file for Jaeger -export JAEGER_CONFIG_FILE="./configs/jaeger.yaml" +export JAEGER_CONFIG_FILE="./configs/jaeger.yaml" \ No newline at end of file diff --git a/deploy/compose/config.yaml b/deploy/compose/config.yaml deleted file mode 100644 index ddf996f18..000000000 --- a/deploy/compose/config.yaml +++ /dev/null @@ -1,80 +0,0 @@ -vector_store: - # The configuration of the Vector Store connection. - - name: milvus - # The name of vector store db. Can be pgvector or milvus. - # Type: str - # ENV Variable: APP_VECTORSTORE_NAME - - url: "http://milvus:19530" - # The location of the VectorStore DB. - # Type: str - # ENV Variable: APP_VECTORSTORE_URL - -llm: - # The configuration for the server hosting the Large Language models. - - model_engine: "triton-trt-llm" - # The backend name hosting the model. Options currently supported are: triton-trt-llm, nv-ai-foundation - # Type: str - # ENV Variable: APP_LLM_MODELENGINE - - server_url: "llm:8001" - # The location of the server hosting the large language model. Use this option when model engine is - # set to triton-trt-llm, ignore this option if model_engine is set to "nv-ai-foundation" - # Type: str - # ENV Variable: APP_LLM_SERVERURL - - model_name: "ensemble" - # if model_engine is "triton-trt-llm" set this to "ensemble" - # if model_engine is "ai-plaground" options are "llama2_13b", "llama2_70b", "mistral_7b" - # The name of the hosted model. - # Type: str - # ENV Variable: APP_LLM_MODELNAME - -text_splitter: - # The configuration for the Text Splitter. - - chunk_size: 510 - # Chunk size for text splitting. - # When using a token-based text splitter, this is the number of 'tokens per chunk' - # Type: int - - chunk_overlap: 200 - # Overlapping text length for splitting. - # Type: int - -embeddings: - # The configuration embedding models. - - model_name: intfloat/e5-large-v2 - # The name embedding search model from huggingface or nv-ai-foundation. - # Type: str - # ENV Variable: APP_EMBEDDINGS_MODELNAME - - dimensions: 1024 - # The dimensions of the embedding search model from huggingface. - # Type: int - - model_engine: huggingface - # The backend name hosting the model, huggingface, nv-ai-foundation are supported. - # Type: str - # ENV Variable: APP_EMBEDDINGS_MODELENGINE - - server_url: "embeddings:9080" - # The server url of nemo embedding microservice - # Type: str - # ENV Variable: APP_EMBEDDINGS_SERVERURL - -prompts: - # The configuration for the prompts used for response generation. - - chat_template: - [INST] <>You are a helpful, respectful and honest assistant.Always answer as helpfully as possible, while being safe.Please ensure that your responses are positive in nature.<>[/INST] {context_str} [INST] {query_str} [/INST] - # The chat prompt template guides the model to generate responses for queries. - # Type: str - - rag_template: - "[INST] <>Use the following context to answer the user's question. If you don't know the answer,just say that you don't know, don't try to make up an answer.<>[INST] Context: {context_str} Question: {query_str} Only return the helpful answer below and nothing else. Helpful answer:[/INST]" - # The RAG prompt template instructs the model to generate responses for queries while utilizing knowledge base. - # Type: str diff --git a/deploy/compose/docker-compose-evaluation.yaml b/deploy/compose/docker-compose-evaluation.yaml index caab7e8f1..64e7a8f95 100644 --- a/deploy/compose/docker-compose-evaluation.yaml +++ b/deploy/compose/docker-compose-evaluation.yaml @@ -19,4 +19,4 @@ services: networks: default: - name: nvidia-llm \ No newline at end of file + name: nvidia-rag \ No newline at end of file diff --git a/deploy/compose/docker-compose-nemotron.yaml b/deploy/compose/docker-compose-nemotron.yaml index 3dd45d864..930555f7c 100644 --- a/deploy/compose/docker-compose-nemotron.yaml +++ b/deploy/compose/docker-compose-nemotron.yaml @@ -8,7 +8,7 @@ services: dockerfile: Dockerfile volumes: - ${MODEL_DIRECTORY:?please update the env file and source it before running}:/model - command: ${MODEL_ARCHITECTURE:?please update the env file and source it before running} --http --max-input-length ${MODEL_MAX_INPUT_LENGTH:-3000} --max-output-length ${MODEL_MAX_OUTPUT_LENGTH:-512} ${QUANTIZATION:+--quantization $QUANTIZATION} + command: ${MODEL_ARCHITECTURE:?please update the env file and source it before running} --http --max-input-length ${MODEL_MAX_INPUT_LENGTH:-3000} ${QUANTIZATION:+--quantization $QUANTIZATION} ports: - "8000:8000" - "8001:8001" @@ -111,7 +111,7 @@ services: capabilities: ["gpu"] count: 1 - query: + chain-server: container_name: chain-server image: chain-server:latest build: @@ -128,12 +128,11 @@ services: APP_LLM_SERVERURL: "llm:8001" APP_LLM_MODELNAME: ensemble APP_LLM_MODELENGINE: triton-trt-llm - APP_CONFIG_FILE: ${APP_CONFIG_FILE} OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317 OTEL_EXPORTER_OTLP_PROTOCOL: grpc ENABLE_TRACING: false - volumes: - - ${APP_CONFIG_FILE}:${APP_CONFIG_FILE} + APP_RETRIEVER_TOPK: 4 + APP_RETRIEVER_SCORETHRESHOLD: 0.25 ports: - "8081:8081" expose: @@ -155,30 +154,30 @@ services: - "milvus" - "llm" - frontend: - container_name: llm-playground - image: llm-playground:latest + rag-playground: + container_name: rag-playground + image: rag-playground:latest build: context: ../.././RetrievalAugmentedGeneration/frontend/ dockerfile: Dockerfile command: --port 8090 environment: - APP_SERVERURL: http://query + APP_SERVERURL: http://chain-server APP_SERVERPORT: 8081 APP_MODELNAME: ${MODEL_NAME:-${MODEL_ARCHITECTURE}} OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317 OTEL_EXPORTER_OTLP_PROTOCOL: grpc ENABLE_TRACING: false - RIVA_API_URI: ${RIVA_API_URI} - RIVA_API_KEY: ${RIVA_API_KEY} - RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID} - TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE} + RIVA_API_URI: ${RIVA_API_URI:-} + RIVA_API_KEY: ${RIVA_API_KEY:-} + RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID:-} + TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE:-48000} ports: - "8090:8090" expose: - "8090" depends_on: - - query + - chain-server networks: default: diff --git a/deploy/compose/docker-compose-observability.yaml b/deploy/compose/docker-compose-observability.yaml index cc2e11a06..52fd7d2a7 100644 --- a/deploy/compose/docker-compose-observability.yaml +++ b/deploy/compose/docker-compose-observability.yaml @@ -44,4 +44,4 @@ services: networks: default: - name: nvidia-llm + name: nvidia-rag diff --git a/deploy/compose/docker-compose-vectordb.yaml b/deploy/compose/docker-compose-vectordb.yaml new file mode 100644 index 000000000..62096b6a1 --- /dev/null +++ b/deploy/compose/docker-compose-vectordb.yaml @@ -0,0 +1,83 @@ +services: + pgvector: + container_name: pgvector + image: ankane/pgvector:v0.5.1 + ports: + - 5432:5432 + expose: + - "5432" + volumes: + - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/data:/var/lib/postgresql/data + environment: + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-password} + - POSTGRES_USER=${POSTGRES_USER:-postgres} + - POSTGRES_DB=${POSTGRES_DB:-api} + + etcd: + container_name: milvus-etcd + image: quay.io/coreos/etcd:v3.5.5 + environment: + - ETCD_AUTO_COMPACTION_MODE=revision + - ETCD_AUTO_COMPACTION_RETENTION=1000 + - ETCD_QUOTA_BACKEND_BYTES=4294967296 + - ETCD_SNAPSHOT_COUNT=50000 + volumes: + - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/etcd:/etcd + command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd + healthcheck: + test: ["CMD", "etcdctl", "endpoint", "health"] + interval: 30s + timeout: 20s + retries: 3 + + minio: + container_name: milvus-minio + image: minio/minio:RELEASE.2023-03-20T20-16-18Z + environment: + MINIO_ACCESS_KEY: minioadmin + MINIO_SECRET_KEY: minioadmin + ports: + - "9011:9011" + - "9010:9010" + volumes: + - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/minio:/minio_data + command: minio server /minio_data --console-address ":9011" --address ":9010" + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9010/minio/health/live"] + interval: 30s + timeout: 20s + retries: 3 + + milvus: + container_name: milvus-standalone + image: milvusdb/milvus:v2.4.0.1-gpu-beta + command: ["milvus", "run", "standalone"] + environment: + ETCD_ENDPOINTS: etcd:2379 + MINIO_ADDRESS: minio:9010 + KNOWHERE_GPU_MEM_POOL_SIZE: 2048;4096 + volumes: + - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/milvus:/var/lib/milvus + # healthcheck: + # test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"] + # interval: 30s + # start_period: 90s + # timeout: 20s + # retries: 3 + ports: + - "19530:19530" + - "9091:9091" + depends_on: + - "etcd" + - "minio" + deploy: + resources: + reservations: + devices: + - driver: nvidia + capabilities: ["gpu"] + count: 1 + +networks: + default: + name: nvidia-rag diff --git a/deploy/compose/docker-compose.yaml b/deploy/compose/docker-compose.yaml deleted file mode 100644 index b3698a046..000000000 --- a/deploy/compose/docker-compose.yaml +++ /dev/null @@ -1,182 +0,0 @@ -services: - - llm: - container_name: llm-inference-server - image: llm-inference-server:latest - build: - context: ../.././RetrievalAugmentedGeneration/llm-inference-server/ - dockerfile: Dockerfile - volumes: - - ${MODEL_DIRECTORY:?please update the env file and source it before running}:/model - command: ${MODEL_ARCHITECTURE:?please update the env file and source it before running} --max-input-length ${MODEL_MAX_INPUT_LENGTH:-3000} --max-output-length ${MODEL_MAX_OUTPUT_LENGTH:-512} ${QUANTIZATION:+--quantization $QUANTIZATION} - ports: - - "8000:8000" - - "8001:8001" - - "8002:8002" - expose: - - "8000" - - "8001" - - "8002" - shm_size: 20gb - deploy: - resources: - reservations: - devices: - - driver: nvidia - count: ${INFERENCE_GPU_COUNT:-all} - capabilities: [gpu] - - jupyter-server: - container_name: notebook-server - image: notebook-server:latest - build: - context: ../../ - dockerfile: ./notebooks/Dockerfile.notebooks # replace GPU enabled Dockerfile ./notebooks/Dockerfile.gpu_notebook - ports: - - "8888:8888" - expose: - - "8888" - deploy: - resources: - reservations: - devices: - - driver: nvidia - count: 1 - capabilities: [gpu] - - etcd: - container_name: milvus-etcd - image: quay.io/coreos/etcd:v3.5.5 - environment: - - ETCD_AUTO_COMPACTION_MODE=revision - - ETCD_AUTO_COMPACTION_RETENTION=1000 - - ETCD_QUOTA_BACKEND_BYTES=4294967296 - - ETCD_SNAPSHOT_COUNT=50000 - volumes: - - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/etcd:/etcd - command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd - healthcheck: - test: ["CMD", "etcdctl", "endpoint", "health"] - interval: 30s - timeout: 20s - retries: 3 - - minio: - container_name: milvus-minio - image: minio/minio:RELEASE.2023-03-20T20-16-18Z - environment: - MINIO_ACCESS_KEY: minioadmin - MINIO_SECRET_KEY: minioadmin - ports: - - "9011:9011" - - "9010:9010" - volumes: - - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/minio:/minio_data - command: minio server /minio_data --console-address ":9011" --address ":9010" - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:9010/minio/health/live"] - interval: 30s - timeout: 20s - retries: 3 - - milvus: - container_name: milvus-standalone - image: milvusdb/milvus:v2.4.0.1-gpu-beta - command: ["milvus", "run", "standalone"] - environment: - ETCD_ENDPOINTS: etcd:2379 - MINIO_ADDRESS: minio:9010 - KNOWHERE_GPU_MEM_POOL_SIZE: 2048;4096 - volumes: - - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/milvus:/var/lib/milvus - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"] - interval: 30s - start_period: 90s - timeout: 20s - retries: 3 - ports: - - "19530:19530" - - "9091:9091" - depends_on: - - "etcd" - - "minio" - deploy: - resources: - reservations: - devices: - - driver: nvidia - capabilities: ["gpu"] - count: 1 - - query: - container_name: chain-server - image: chain-server:latest - build: - context: ../../ - dockerfile: ./RetrievalAugmentedGeneration/Dockerfile - args: - EXAMPLE_NAME: ${RAG_EXAMPLE} - command: --port 8081 --host 0.0.0.0 - environment: - APP_VECTORSTORE_URL: "http://milvus:19530" - APP_VECTORSTORE_NAME: "milvus" - COLLECTION_NAME: ${RAG_EXAMPLE} - APP_LLM_SERVERURL: "llm:8001" - APP_LLM_MODELNAME: ensemble - APP_LLM_MODELENGINE: triton-trt-llm - APP_CONFIG_FILE: ${APP_CONFIG_FILE} - OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317 - OTEL_EXPORTER_OTLP_PROTOCOL: grpc - ENABLE_TRACING: false - volumes: - - ${APP_CONFIG_FILE}:${APP_CONFIG_FILE} - ports: - - "8081:8081" - expose: - - "8081" - shm_size: 5gb - deploy: - resources: - reservations: - devices: - - driver: nvidia - count: 1 - capabilities: [gpu] - # healthcheck: - # test: ["CMD", "curl", "-f", "http://localhost:8080/"] - # interval: 30s - # timeout: 20s - # retries: 3 - depends_on: - - "milvus" - - "llm" - - frontend: - container_name: llm-playground - image: llm-playground:latest - build: - context: ../.././RetrievalAugmentedGeneration/frontend/ - dockerfile: Dockerfile - command: --port 8090 - environment: - APP_SERVERURL: http://query - APP_SERVERPORT: 8081 - APP_MODELNAME: ${MODEL_NAME:-${MODEL_ARCHITECTURE}} - OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317 - OTEL_EXPORTER_OTLP_PROTOCOL: grpc - ENABLE_TRACING: false - RIVA_API_URI: ${RIVA_API_URI} - RIVA_API_KEY: ${RIVA_API_KEY} - RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID} - TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE} - ports: - - "8090:8090" - expose: - - "8090" - depends_on: - - query - -networks: - default: - name: nvidia-llm diff --git a/deploy/compose/nemotron_config.yaml b/deploy/compose/nemotron_config.yaml deleted file mode 100644 index 658408f2b..000000000 --- a/deploy/compose/nemotron_config.yaml +++ /dev/null @@ -1,65 +0,0 @@ -milvus: - # The configuration of the Milvus connection. - - url: "http://milvus:19530" - # The location of the Milvus Server. - # Type: str - # ENV Variable: APP_MILVUS_URL - -llm: - # The configuration for the server hosting the Large Language models. - - server_url: "llm:9999" - # The location of the server hosting the large language model. - # Type: str - # ENV Variable: APP_LLM_SERVERURL - - model_name: "ensemble" - # The name of the hosted model. - # Type: str - # ENV Variable: APP_LLM_MODELNAME - - model_engine: "triton-trt-llm" - # The backend name hosting the model. Right now only triton-trt-llm and nemo-infer is supported. - # Type: str - # ENV Variable: APP_LLM_MODELENGINE - -text_splitter: - # The configuration for the Text Splitter. - - chunk_size: 510 - # Chunk size for text splitting. - # When using a token-based text splitter, this is the number of 'tokens per chunk' - # Type: int - - chunk_overlap: 200 - # Overlapping text length for splitting. - # Type: int - -embeddings: - # The configuration embedding models. - - model_name: intfloat/e5-large-v2 - # The name embedding search model from huggingface. - # Type: str - - dimensions: 1024 - # The dimensions of the embedding search model from huggingface. - # Type: int - - model_engine: huggingface - # The backend name hosting the model. Right now only huggingface is supported. - # Type: str - -prompts: - # The configuration for the prompts used for response generation. - - chat_template: - "System You are a helpful, respectful and honest assistant.Always answer as helpfully as possible, while being safe.Please ensure that your responses are positive in nature.\nSystem {context_str} \n {query_str} Given context followed by query, you try to answer the query truthfully \nAssistant" - # The chat prompt template guides the model to generate responses for queries. - # Type: str - - rag_template: - "System Use the following context to answer the user's question. If you don't know the answer,just say that you don't know, don't try to make up an answer.\nSystem Context {context_str} Question {query_str} \nOnly return the helpful answer below and nothing else. Helpful answer\nAssistant" - # The RAG prompt template instructs the model to generate responses for queries while utilizing knowledge base. - # Type: str diff --git a/deploy/compose/rag-app-api-catalog-text-chatbot.yaml b/deploy/compose/rag-app-api-catalog-text-chatbot.yaml new file mode 100644 index 000000000..3ea3c0fd0 --- /dev/null +++ b/deploy/compose/rag-app-api-catalog-text-chatbot.yaml @@ -0,0 +1,66 @@ +services: + chain-server: + container_name: chain-server + image: chain-server:latest + build: + context: ../../ + dockerfile: ./RetrievalAugmentedGeneration/Dockerfile + args: + EXAMPLE_NAME: nvidia_api_catalog + command: --port 8081 --host 0.0.0.0 + environment: + APP_VECTORSTORE_URL: "http://milvus:19530" + APP_VECTORSTORE_NAME: "milvus" + APP_LLM_MODELNAME: ai-mixtral-8x7b-instruct + APP_LLM_MODELENGINE: nv-api-catalog + APP_EMBEDDINGS_MODELNAME: nvolveqa_40k + APP_EMBEDDINGS_MODELENGINE: nv-api-catalog + APP_TEXTSPLITTER_CHUNKSIZE: 510 + APP_TEXTSPLITTER_CHUNKOVERLAP: 200 + NVIDIA_API_KEY: ${NVIDIA_API_KEY} + APP_PROMPTS_CHATTEMPLATE: "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Please ensure that your responses are positive in nature." + APP_PROMPTS_RAGTEMPLATE: "You are a helpful AI assistant named Envie. You will reply to questions only based on the context that you are provided. If something is out of context, you will refrain from replying and politely decline to respond to the user." + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-api} + COLLECTION_NAME: nvidia_api_catalog + APP_RETRIEVER_TOPK: 4 + APP_RETRIEVER_SCORETHRESHOLD: 0.25 + ports: + - "8081:8081" + expose: + - "8081" + shm_size: 5gb + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] + + rag-playground: + container_name: rag-playground + image: rag-playground:latest + build: + context: ../.././RetrievalAugmentedGeneration/frontend/ + dockerfile: Dockerfile + command: --port 8090 + environment: + APP_SERVERURL: http://chain-server + APP_SERVERPORT: 8081 + APP_MODELNAME: ai-mixtral-8x7b-instruct + RIVA_API_URI: ${RIVA_API_URI:-} + RIVA_API_KEY: ${RIVA_API_KEY:-} + RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID:-} + TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE:-48000} + ports: + - "8090:8090" + expose: + - "8090" + depends_on: + - chain-server + +networks: + default: + name: nvidia-rag diff --git a/deploy/compose/rag-app-multimodal-chatbot.yaml b/deploy/compose/rag-app-multimodal-chatbot.yaml new file mode 100644 index 000000000..6cdf72038 --- /dev/null +++ b/deploy/compose/rag-app-multimodal-chatbot.yaml @@ -0,0 +1,56 @@ +services: + chain-server: + container_name: chain-server + image: chain-server:latest + build: + context: ../../ + dockerfile: ./RetrievalAugmentedGeneration/Dockerfile + args: + EXAMPLE_NAME: multimodal_rag + command: --port 8081 --host 0.0.0.0 + environment: + APP_LLM_MODELNAME: ai-mixtral-8x7b-instruct + APP_LLM_MODELENGINE: nv-api-catalog + APP_EMBEDDINGS_MODELNAME: nvolveqa_40k + APP_EMBEDDINGS_MODELENGINE: nv-api-catalog + APP_TEXTSPLITTER_CHUNKSIZE: 510 + APP_TEXTSPLITTER_CHUNKOVERLAP: 200 + NVIDIA_API_KEY: ${NVIDIA_API_KEY} + APP_RETRIEVER_TOPK: 4 + APP_RETRIEVER_SCORETHRESHOLD: 0.25 + APP_VECTORSTORE_URL: "http://milvus:19530" + APP_VECTORSTORE_NAME: "milvus" + COLLECTION_NAME: multimodal_rag + APP_PROMPTS_CHATTEMPLATE: "You are a helpful and friendly multimodal intelligent AI assistant named Multimodal Chatbot Assistant. You are an expert in the content of the document provided and can provide information using both text and images. The user may also provide an image input, and you will use the image description to retrieve similar images, tables and text. The context given below will provide some technical or financial documentation and whitepapers to help you answer the question. Based on this context, answer the question truthfully. If the question is not related to this, please refrain from answering. Most importantly, if the context provided does not include information about the question from the user, reply saying that you don't know. Do not utilize any information that is not provided in the documents below. All documents will be preceded by tags, for example [[DOCUMENT 1]], [[DOCUMENT 2]], and so on. You can reference them in your reply but without the brackets, so just say document 1 or 2. The question will be preceded by a [[QUESTION]] tag. Be succinct, clear, and helpful. Remember to describe everything in detail by using the knowledge provided, or reply that you don't know the answer. Do not fabricate any responses. Note that you have the ability to reference images, tables, and other multimodal elements when necessary. You can also refer to the image provided by the user, if any." + APP_PROMPTS_RAGTEMPLATE: "You are a helpful and friendly multimodal intelligent AI assistant named Multimodal Chatbot Assistant. You are an expert in the content of the document provided and can provide information using both text and images. The user may also provide an image input, and you will use the image description to retrieve similar images, tables and text. The context given below will provide some technical or financial documentation and whitepapers to help you answer the question. Based on this context, answer the question truthfully. If the question is not related to this, please refrain from answering. Most importantly, if the context provided does not include information about the question from the user, reply saying that you don't know. Do not utilize any information that is not provided in the documents below. All documents will be preceded by tags, for example [[DOCUMENT 1]], [[DOCUMENT 2]], and so on. You can reference them in your reply but without the brackets, so just say document 1 or 2. The question will be preceded by a [[QUESTION]] tag. Be succinct, clear, and helpful. Remember to describe everything in detail by using the knowledge provided, or reply that you don't know the answer. Do not fabricate any responses. Note that you have the ability to reference images, tables, and other multimodal elements when necessary. You can also refer to the image provided by the user, if any." + ports: + - "8081:8081" + expose: + - "8081" + shm_size: 5gb + + rag-playground: + container_name: rag-playground + image: rag-playground:latest + build: + context: ../.././RetrievalAugmentedGeneration/frontend/ + dockerfile: Dockerfile + command: --port 8090 + environment: + APP_SERVERURL: http://chain-server + APP_SERVERPORT: 8081 + APP_MODELNAME: ai-mixtral-8x7b-instruct + RIVA_API_URI: ${RIVA_API_URI:-} + RIVA_API_KEY: ${RIVA_API_KEY:-} + RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID:-} + TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE:-48000} + ports: + - "8090:8090" + expose: + - "8090" + depends_on: + - chain-server + +networks: + default: + name: nvidia-rag diff --git a/deploy/compose/rag-app-multiturn-chatbot.yaml b/deploy/compose/rag-app-multiturn-chatbot.yaml new file mode 100644 index 000000000..e6fdf3ee1 --- /dev/null +++ b/deploy/compose/rag-app-multiturn-chatbot.yaml @@ -0,0 +1,64 @@ +services: + chain-server: + container_name: chain-server + image: chain-server:latest + build: + context: ../../ + dockerfile: ./RetrievalAugmentedGeneration/Dockerfile + args: + EXAMPLE_NAME: multi_turn_rag + command: --port 8081 --host 0.0.0.0 + environment: + APP_VECTORSTORE_URL: "http://milvus:19530" + APP_VECTORSTORE_NAME: "milvus" + APP_LLM_MODELNAME: ai-llama2-70b + APP_LLM_MODELENGINE: nv-api-catalog + APP_EMBEDDINGS_MODELNAME: nvolveqa_40k + APP_EMBEDDINGS_MODELENGINE: nv-api-catalog + APP_TEXTSPLITTER_CHUNKSIZE: 510 + APP_TEXTSPLITTER_CHUNKOVERLAP: 200 + NVIDIA_API_KEY: ${NVIDIA_API_KEY} + APP_RETRIEVER_TOPK: 4 + APP_RETRIEVER_SCORETHRESHOLD: 0.25 + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-api} + COLLECTION_NAME: multi_turn_rag + ports: + - "8081:8081" + expose: + - "8081" + shm_size: 5gb + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] + + rag-playground: + container_name: rag-playground + image: rag-playground:latest + build: + context: ../.././RetrievalAugmentedGeneration/frontend/ + dockerfile: Dockerfile + command: --port 8090 + environment: + APP_SERVERURL: http://chain-server + APP_SERVERPORT: 8081 + APP_MODELNAME: ai-llama2-70b + RIVA_API_URI: ${RIVA_API_URI:-} + RIVA_API_KEY: ${RIVA_API_KEY:-} + RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID:-} + TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE:-48000} + ports: + - "8090:8090" + expose: + - "8090" + depends_on: + - chain-server + +networks: + default: + name: nvidia-rag diff --git a/deploy/compose/rag-app-query-decomposition-agent.yaml b/deploy/compose/rag-app-query-decomposition-agent.yaml new file mode 100644 index 000000000..3c3d17b81 --- /dev/null +++ b/deploy/compose/rag-app-query-decomposition-agent.yaml @@ -0,0 +1,66 @@ +services: + chain-server: + container_name: chain-server + image: chain-server:latest + build: + context: ../../ + dockerfile: ./RetrievalAugmentedGeneration/Dockerfile + args: + EXAMPLE_NAME: query_decomposition_rag + command: --port 8081 --host 0.0.0.0 + environment: + APP_VECTORSTORE_URL: "http://milvus:19530" + APP_VECTORSTORE_NAME: "milvus" + APP_LLM_MODELNAME: ai-llama2-70b + APP_LLM_MODELENGINE: nv-api-catalog + APP_EMBEDDINGS_MODELNAME: nvolveqa_40k + APP_EMBEDDINGS_MODELENGINE: nv-api-catalog + APP_TEXTSPLITTER_CHUNKSIZE: 510 + APP_TEXTSPLITTER_CHUNKOVERLAP: 200 + NVIDIA_API_KEY: ${NVIDIA_API_KEY} + APP_PROMPTS_CHATTEMPLATE: "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Please ensure that your responses are positive in nature." + APP_PROMPTS_RAGTEMPLATE: "You are a helpful AI assistant named Envie. You will reply to questions only based on the context that you are provided. If something is out of context, you will refrain from replying and politely decline to respond to the user." + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-api} + APP_RETRIEVER_TOPK: 4 + APP_RETRIEVER_SCORETHRESHOLD: 0.25 + COLLECTION_NAME: query_decomposition + ports: + - "8081:8081" + expose: + - "8081" + shm_size: 5gb + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] + + rag-playground: + container_name: rag-playground + image: rag-playground:latest + build: + context: ../.././RetrievalAugmentedGeneration/frontend/ + dockerfile: Dockerfile + command: --port 8090 + environment: + APP_SERVERURL: http://chain-server + APP_SERVERPORT: 8081 + APP_MODELNAME: ai-llama2-70b + RIVA_API_URI: ${RIVA_API_URI:-} + RIVA_API_KEY: ${RIVA_API_KEY:-} + RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID:-} + TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE:-48000} + ports: + - "8090:8090" + expose: + - "8090" + depends_on: + - chain-server + +networks: + default: + name: nvidia-rag diff --git a/deploy/compose/docker-compose-nv-ai-foundation.yaml b/deploy/compose/rag-app-structured-data-chatbot.yaml similarity index 56% rename from deploy/compose/docker-compose-nv-ai-foundation.yaml rename to deploy/compose/rag-app-structured-data-chatbot.yaml index ba9ae38be..409556681 100644 --- a/deploy/compose/docker-compose-nv-ai-foundation.yaml +++ b/deploy/compose/rag-app-structured-data-chatbot.yaml @@ -1,57 +1,57 @@ services: - - query: + chain-server: container_name: chain-server image: chain-server:latest build: context: ../../ dockerfile: ./RetrievalAugmentedGeneration/Dockerfile args: - EXAMPLE_NAME: ${RAG_EXAMPLE} + EXAMPLE_NAME: csv_rag command: --port 8081 --host 0.0.0.0 environment: - APP_LLM_MODELNAME: mixtral_8x7b - APP_LLM_MODELENGINE: nv-ai-foundation - APP_EMBEDDINGS_MODELNAME: nvolveqa_40k - APP_EMBEDDINGS_MODELENGINE: nv-ai-foundation - APP_TEXTSPLITTER_CHUNKSIZE: 2000 - APP_TEXTSPLITTER_CHUNKOVERLAP: 200 + APP_LLM_MODELNAME: ai-mixtral-8x7b-instruct + APP_LLM_MODELNAMEPANDASAI: ai-mixtral-8x7b-instruct + APP_LLM_MODELENGINE: nv-api-catalog APP_PROMPTS_CHATTEMPLATE: "You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Please ensure that your responses are positive in nature." APP_PROMPTS_RAGTEMPLATE: "You are a helpful AI assistant named Envie. You will reply to questions only based on the context that you are provided. If something is out of context, you will refrain from replying and politely decline to respond to the user." NVIDIA_API_KEY: ${NVIDIA_API_KEY} - APP_CONFIG_FILE: ${APP_CONFIG_FILE} - APP_VECTORSTORE_NAME: "faiss" - COLLECTION_NAME: ${RAG_EXAMPLE} - volumes: - - ${APP_CONFIG_FILE}:${APP_CONFIG_FILE} + COLLECTION_NAME: csv_rag + CSV_NAME: PdM_machines ports: - "8081:8081" expose: - "8081" shm_size: 5gb + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] - frontend: - container_name: llm-playground - image: llm-playground:latest + rag-playground: + container_name: rag-playground + image: rag-playground:latest build: context: ../.././RetrievalAugmentedGeneration/frontend/ dockerfile: Dockerfile command: --port 8090 environment: - APP_SERVERURL: http://query + APP_SERVERURL: http://chain-server APP_SERVERPORT: 8081 - APP_MODELNAME: ${MODEL_NAME:-${MODEL_ARCHITECTURE}} - RIVA_API_URI: ${RIVA_API_URI} - RIVA_API_KEY: ${RIVA_API_KEY} - RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID} - TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE} + APP_MODELNAME: ai-mixtral-8x7b-instruct + RIVA_API_URI: ${RIVA_API_URI:-} + RIVA_API_KEY: ${RIVA_API_KEY:-} + RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID:-} + TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE:-48000} ports: - "8090:8090" expose: - "8090" depends_on: - - query + - chain-server networks: default: - name: nvidia-llm + name: nvidia-rag diff --git a/deploy/compose/docker-compose-pgvector.yaml b/deploy/compose/rag-app-text-chatbot.yaml similarity index 54% rename from deploy/compose/docker-compose-pgvector.yaml rename to deploy/compose/rag-app-text-chatbot.yaml index ba18c6a36..d0b4704a6 100644 --- a/deploy/compose/docker-compose-pgvector.yaml +++ b/deploy/compose/rag-app-text-chatbot.yaml @@ -1,5 +1,4 @@ services: - llm: container_name: llm-inference-server image: llm-inference-server:latest @@ -8,7 +7,7 @@ services: dockerfile: Dockerfile volumes: - ${MODEL_DIRECTORY:?please update the env file and source it before running}:/model - command: ${MODEL_ARCHITECTURE:?please update the env file and source it before running} --max-input-length ${MODEL_MAX_INPUT_LENGTH:-3000} --max-output-length ${MODEL_MAX_OUTPUT_LENGTH:-512} ${QUANTIZATION:+--quantization $QUANTIZATION} + command: ${MODEL_ARCHITECTURE:?please update the env file and source it before running} --max-input-length ${MODEL_MAX_INPUT_LENGTH:-3000} ${QUANTIZATION:+--quantization $QUANTIZATION} ports: - "8000:8000" - "8001:8001" @@ -26,42 +25,50 @@ services: count: ${INFERENCE_GPU_COUNT:-all} capabilities: [gpu] - pgvector: - container_name: pgvector - image: ankane/pgvector:v0.5.1 + jupyter-server: + container_name: notebook-server + image: notebook-server:latest + build: + context: ../../ + dockerfile: ./notebooks/Dockerfile.notebooks # replace GPU enabled Dockerfile ./notebooks/Dockerfile.gpu_notebook ports: - - 5432:5432 + - "8888:8888" expose: - - "5432" - volumes: - - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/data:/var/lib/postgresql/data - environment: - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-password} - - POSTGRES_USER=${POSTGRES_USER:-postgres} - - POSTGRES_DB=${POSTGRES_DB:-api} + - "8888" + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] - query: + chain-server: container_name: chain-server image: chain-server:latest build: context: ../../ dockerfile: ./RetrievalAugmentedGeneration/Dockerfile args: - EXAMPLE_NAME: ${RAG_EXAMPLE} + EXAMPLE_NAME: developer_rag command: --port 8081 --host 0.0.0.0 environment: - APP_VECTORSTORE_URL: "${POSTGRES_HOST_IP:-pgvector}:${POSTGRES_PORT_NUMBER:-5432}" - APP_VECTORSTORE_NAME: "pgvector" + APP_VECTORSTORE_URL: "http://milvus:19530" + APP_VECTORSTORE_NAME: "milvus" APP_LLM_SERVERURL: "llm:8001" - APP_LLM_MODELNAME: "ensemble" - APP_LLM_MODELENGINE: "triton-trt-llm" - APP_CONFIG_FILE: ${APP_CONFIG_FILE} + APP_LLM_MODELNAME: ensemble + APP_LLM_MODELENGINE: triton-trt-llm POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} POSTGRES_USER: ${POSTGRES_USER:-postgres} POSTGRES_DB: ${POSTGRES_DB:-api} - COLLECTION_NAME: ${RAG_EXAMPLE} - volumes: - - ${APP_CONFIG_FILE}:${APP_CONFIG_FILE} + COLLECTION_NAME: developer_rag + APP_RETRIEVER_TOPK: 4 + APP_RETRIEVER_SCORETHRESHOLD: 0.25 + OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317 + OTEL_EXPORTER_OTLP_PROTOCOL: grpc + ENABLE_TRACING: false + APP_TEXTSPLITTER_CHUNKSIZE: 510 + APP_TEXTSPLITTER_CHUNKOVERLAP: 200 ports: - "8081:8081" expose: @@ -75,31 +82,33 @@ services: count: 1 capabilities: [gpu] depends_on: - - "pgvector" - "llm" - frontend: - container_name: llm-playground - image: llm-playground:latest + rag-playground: + container_name: rag-playground + image: rag-playground:latest build: context: ../.././RetrievalAugmentedGeneration/frontend/ dockerfile: Dockerfile command: --port 8090 environment: - APP_SERVERURL: http://query + APP_SERVERURL: http://chain-server APP_SERVERPORT: 8081 APP_MODELNAME: ${MODEL_NAME:-${MODEL_ARCHITECTURE}} - RIVA_API_URI: ${RIVA_API_URI} - RIVA_API_KEY: ${RIVA_API_KEY} - RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID} - TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE} + OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317 + OTEL_EXPORTER_OTLP_PROTOCOL: grpc + ENABLE_TRACING: false + RIVA_API_URI: ${RIVA_API_URI:-} + RIVA_API_KEY: ${RIVA_API_KEY:-} + RIVA_FUNCTION_ID: ${RIVA_FUNCTION_ID:-} + TTS_SAMPLE_RATE: ${TTS_SAMPLE_RATE:-48000} ports: - "8090:8090" expose: - "8090" depends_on: - - query + - chain-server networks: default: - name: nvidia-llm + name: nvidia-rag diff --git a/docs/Dockerfile b/docs/Dockerfile new file mode 100644 index 000000000..ace9253b7 --- /dev/null +++ b/docs/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12-slim +RUN apt-get update && DEBIAN_FRONTEND=noninteractive \ + && apt-get install --no-install-recommends -y \ + curl \ + rsync \ + openssh-client \ + wget \ + jq \ + git \ + python3-pip \ + lsb-release \ + gpg + +RUN wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor -o /usr/share/keyrings/hashicorp-archive-keyring.gpg +RUN echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/hashicorp.list +RUN apt-get update && DEBIAN_FRONTEND=noninteractive \ + && apt-get install --no-install-recommends -y vault=1.10.11-1 \ + && apt-get autoremove \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +RUN --mount=type=bind,source=docs,target=/docs pip install -r docs/requirements.txt +RUN --mount=type=bind,source=docs,target=/docs pip install -U --no-deps --no-cache-dir -r docs/swagger-requirements.txt \ No newline at end of file diff --git a/docs/README.md b/docs/README.md index 47dc3a83f..7d08b0780 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,48 +1,54 @@ # RAG Documentation -The RAG documentation is divided into the following sections: +The Generative AI Examples documentation is available from . -- [RAG Documentation](#rag-documentation) - - [Getting Started](#getting-started) - - [User Guides](#user-guides) - - [Architecture Guide](#architecture-guide) - - [Evaluation Tool](#evaluation-tool) - - [Observability Tool](#observability-tool) - - [Others](#others) -## Getting Started +## Building the Documentation -* [Getting Started guides](../RetrievalAugmentedGeneration/README.md): A series of quick start steps that will help you to understand the core concepts and start the pipeline quickly for the different examples and usecases provided in this repository. These guides also include Jupyter notebooks that you can experiment with. +1. Build the container: -## User Guides + ```bash + docker build --pull \ + --tag genai-docs:0.1.0 \ + --file docs/Dockerfile . + ``` -The user guides cover the core details of the provided sample canonical developer rag example and how to configure and use different features to make your own chains. +1. Run the container from the previous step: -* [LLM Inference Server](./rag/llm_inference_server.md): Learn about the service which accelerates LLM inference time using TRT-LLM. -* [Integration with Nvidia AI Playground](./rag/aiplayground.md): Understand how to access **NVIDIA AI Playground** on NGC which allows developers to experience state of the art LLMs and embedding models accelerated on NVIDIA DGX Cloud with NVIDIA TensorRT and Triton Inference Server. -* [Configuration Guide](./rag/configuration.md): The complete guide to all the configuration options available in the `config.yaml` file. -* [Frontend](./rag/frontend.md): Learn more about the sample playground provided as part of the workflow used by all the examples. -* [Chat Server Guide](./rag/chat_server.md): Learn about the chat server which exposes core API's for the end user. All the different examples are deployed behind these standardized API's, exposed by this server. -* [Notebooks Guide](./rag/jupyter_server.md): Learn about the different notebooks available and the server which can be used to access them. + ```bash + docker run -it --rm \ + -v $(pwd):/work -w /work \ + genai-docs:0.1.0 \ + bash + ``` -## Architecture Guide +1. Build the docs: -This guide sheds more light on the infrastructure details and the execution flow for a query when the runtime is used for the default canonical RAG example: + ```bash + sphinx-build -E -a -b html -d /tmp docs docs/_build/output + ``` -* [Architecture](./rag/architecture.md): Understand the architecture of the sample RAG workflow. + The documentation is viewable in your browser with a URL like . -## Evaluation Tool -The sample RAG worlflow provides a set of evaluation pipelines via notebooks which developers can use for benchmarking the default canonical RAG example. -There are also detailed guides on how to reproduce results and create datasets for the evaluation. -* [RAG Evaluation](./rag/evaluation.md): Understand the different notebooks available. +## Publishing Documentation -## Observability Tool +Update `docs/versions.json` and `docs/project.json` to include the new version. -Observability is a crucial aspect that facilitates the monitoring and comprehension of the internal state and behavior of a system or application. -* [Observability tool](./rag/observability.md): Understand the tool and deployment steps for the observability tool. +After the content is finalized, tag the commit to publish with `v` and a semantic version, such as `v0.6.0`. -## Others +If post-release updates are required, tag those commits like `v0.6.0-1`. + +If post-release updates to an older version are required, ensure the commit message includes `/not-latest`. +Then tag the commit like `v0.5.0-1`. + +## Checking for Broken URLs + +Run a linkcheck build: + +```bash +sphinx-build -b linkcheck -d /tmp docs docs/_build/output | grep broken +``` + +Artificial URLs like are reported as broken, so perfection is not possible. -* [Support Matrix](./rag/support_matrix.md) -* [Open API schema references](./rag/api_reference/openapi_schema.json) diff --git a/docs/api-catalog.md b/docs/api-catalog.md new file mode 100644 index 000000000..8fe35d2ac --- /dev/null +++ b/docs/api-catalog.md @@ -0,0 +1,196 @@ + + +# Using the NVIDIA API Catalog + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Example Features + +This example deploys a developer RAG pipeline for chat Q&A and serves inferencing from an NVIDIA API Catalog endpoint +instead of NVIDIA Triton Inference Server, a local Llama 2 model, or local GPUs. + +Developers get free credits for 10K requests to any of the available models. + +```{list-table} +:header-rows: 1 + +* - Model + - Embedding + - Framework + - Description + - Multi-GPU + - TRT-LLM + - Model Location + - Triton + - Vector Database + +* - ai-mixtral-8x7b-instruct + - nvolveqa_40k + - Langchain + - QA chatbot + - NO + - NO + - API Catalog + - NO + - Milvus +``` + +The following figure shows the sample topology: + +- The sample chat bot web application communicates with the chain server. + The chain server sends inference requests to an NVIDIA API Catalog endpoint. +- Optionally, you can deploy NVIDIA Riva. Riva can use automatic speech recognition to transcribe + your questions and use text-to-speech to speak the answers aloud. + +![Using NVIDIA API Catalog endpoints for inference instead of local components.](./images/ai-foundations-topology.png) + +## Prerequisites + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Optional: Enable NVIDIA Riva automatic speech recognition (ASR) and text to speech (TTS). + + - To launch a Riva server locally, refer to the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). + + - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. + + - After the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. + + - Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. + + In `deploy/compose/compose.env`, make the following assignments as necessary: + + ```bash + export RIVA_API_URI=":" + export RIVA_API_KEY="" + export RIVA_FUNCTION_ID="" + ``` + +## Get an API Key for the Mixtral 8x7B Instruct API Endpoint + +% api-key-start + +Perform the following steps if you do not already have an API key. +You can use different model API endpoints with the same API key. + +1. Navigate to . + +2. Find the **Mixtral 8x7B Instruct** card and click the card. + + ![Mixtral 8x7B Instruct model card](./images/mixtral-8x7b-instruct.png) + +3. Click **Get API Key**. + + ![API section of the model page.](./images/image8.png) + +4. Click **Generate Key**. + + ![Generate key window.](./images/api-catalog-generate-api-key.png) + +5. Click **Copy Key** and then save the API key. + The key begins with the letters nvapi-. + + ![Key Generated widnow.](./images/key-generated.png) + +% api-key-end + + +## Build and Start the Containers + +1. In the Generative AI examples repository, export this variable in terminal. + + Add the API key for the model endpoint: + + ```text + export NVIDIA_API_KEY="nvapi-<...>" + ``` + +2. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-api-catalog-text-chatbot.yaml build + ``` + +3. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-api-catalog-text-chatbot.yaml up -d + ``` + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +4. Start the Milvus vector database: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + + *Example Output* + + ```output + ✔ Container milvus-minio Started + ✔ Container milvus-etcd Started + ✔ Container milvus-standalone Started + ``` + +5. Confirm the containers are running: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + 39a8524829da rag-playground Up 2 minutes + bfbd0193dbd2 chain-server Up 2 minutes + ec02ff3cc58b milvus-standalone Up 3 minutes + 6969cf5b4342 milvus-minio Up 3 minutes (healthy) + 57a068d62fbb milvus-etcd Up 3 minutes (healthy) + ``` + +## Next Steps + +- Access the web interface for the chat server. + Refer to [](./using-sample-web-application.md) for information about using the web interface. +- [](./vector-database.md) +- Stop the containers by running `docker compose -f deploy/compose/rag-app-api-catalog-text-chatbot.yaml down` and + `docker compose -f deploy/compose/docker-compose-vectordb.yaml down`. diff --git a/docs/api_reference/openapi_schema.json b/docs/api_reference/openapi_schema.json new file mode 100644 index 000000000..ccb2cffd8 --- /dev/null +++ b/docs/api_reference/openapi_schema.json @@ -0,0 +1,467 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "FastAPI", + "version": "0.1.0" + }, + "paths": { + "/documents": { + "post": { + "summary": "Upload Document", + "description": "Upload a document to the vector store.", + "operationId": "upload_document_documents_post", + "requestBody": { + "required": true, + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_upload_document_documents_post" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "get": { + "summary": "Get Documents", + "description": "List available documents.", + "operationId": "get_documents_documents_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DocumentsResponse" + } + } + } + } + } + }, + "delete": { + "summary": "Delete Document", + "description": "Delete a document.", + "operationId": "delete_document_documents_delete", + "parameters": [ + { + "name": "filename", + "in": "query", + "required": true, + "schema": { + "type": "string", + "title": "Filename" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/generate": { + "post": { + "summary": "Generate Answer", + "description": "Generate and stream the response to the provided prompt.", + "operationId": "generate_answer_generate_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Prompt" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChainResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/search": { + "post": { + "summary": "Document Search", + "description": "Search for the most relevant documents for the given search parameters.", + "operationId": "document_search_search_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DocumentSearch" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DocumentSearchResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "Body_upload_document_documents_post": { + "properties": { + "file": { + "type": "string", + "format": "binary", + "title": "File" + } + }, + "type": "object", + "required": [ + "file" + ], + "title": "Body_upload_document_documents_post" + }, + "ChainResponse": { + "properties": { + "id": { + "type": "string", + "maxLength": 100000, + "title": "Id", + "default": "" + }, + "choices": { + "items": { + "$ref": "#/components/schemas/ChainResponseChoices" + }, + "type": "array", + "maxItems": 256, + "title": "Choices", + "default": [] + } + }, + "type": "object", + "title": "ChainResponse", + "description": "Definition of Chain APIs resopnse data type" + }, + "ChainResponseChoices": { + "properties": { + "index": { + "type": "integer", + "maximum": 256, + "minimum": 0, + "title": "Index", + "default": 0 + }, + "message": { + "allOf": [ + { + "$ref": "#/components/schemas/Message" + } + ], + "title": "Message", + "default": { + "role": "assistant", + "content": "" + } + }, + "finish_reason": { + "type": "string", + "maxLength": 4096, + "title": "Finish Reason", + "default": "" + } + }, + "type": "object", + "title": "ChainResponseChoices", + "description": "Definition of Chain response choices" + }, + "DocumentChunk": { + "properties": { + "content": { + "type": "string", + "maxLength": 131072, + "title": "Content", + "description": "The content of the document chunk." + }, + "filename": { + "type": "string", + "maxLength": 4096, + "title": "Filename", + "description": "The name of the file the chunk belongs to." + }, + "score": { + "type": "number", + "title": "Score", + "description": "The relevance score of the chunk." + } + }, + "type": "object", + "required": [ + "content", + "filename", + "score" + ], + "title": "DocumentChunk", + "description": "Represents a chunk of a document." + }, + "DocumentSearch": { + "properties": { + "query": { + "type": "string", + "maxLength": 131072, + "title": "Query", + "description": "The content or keywords to search for within documents." + }, + "top_k": { + "type": "integer", + "maximum": 256, + "minimum": 0, + "title": "Top K", + "description": "The maximum number of documents to return in the response.", + "default": 4 + } + }, + "type": "object", + "required": [ + "query" + ], + "title": "DocumentSearch", + "description": "Definition of the DocumentSearch API data type." + }, + "DocumentSearchResponse": { + "properties": { + "chunks": { + "items": { + "$ref": "#/components/schemas/DocumentChunk" + }, + "type": "array", + "maxItems": 256, + "title": "Chunks", + "description": "List of document chunks." + } + }, + "type": "object", + "required": [ + "chunks" + ], + "title": "DocumentSearchResponse", + "description": "Represents a response from a document search." + }, + "DocumentsResponse": { + "properties": { + "documents": { + "items": { + "type": "string", + "maxLength": 131072 + }, + "type": "array", + "maxItems": 1000000, + "title": "Documents", + "description": "List of filenames." + } + }, + "type": "object", + "required": [ + "documents" + ], + "title": "DocumentsResponse", + "description": "Represents the response containing a list of documents." + }, + "HTTPValidationError": { + "properties": { + "detail": { + "items": { + "$ref": "#/components/schemas/ValidationError" + }, + "type": "array", + "title": "Detail" + } + }, + "type": "object", + "title": "HTTPValidationError" + }, + "Message": { + "properties": { + "role": { + "type": "string", + "maxLength": 256, + "title": "Role", + "description": "Role for a message AI, User and System", + "default": "user" + }, + "content": { + "type": "string", + "maxLength": 131072, + "title": "Content", + "description": "The input query/prompt to the pipeline.", + "default": "I am going to Paris, what should I see?" + } + }, + "type": "object", + "title": "Message", + "description": "Definition of the Chat Message type." + }, + "Prompt": { + "properties": { + "messages": { + "items": { + "$ref": "#/components/schemas/Message" + }, + "type": "array", + "maxItems": 50000, + "title": "Messages", + "description": "A list of messages comprising the conversation so far. The roles of the messages must be alternating between user and assistant. The last input message should have role user. A message with the the system role is optional, and must be the very first message if it is present." + }, + "use_knowledge_base": { + "type": "boolean", + "title": "Use Knowledge Base", + "description": "Whether to use a knowledge base" + }, + "temperature": { + "type": "number", + "maximum": 1, + "minimum": 0.1, + "title": "Temperature", + "description": "The sampling temperature to use for text generation. The higher the temperature value is, the less deterministic the output text will be. It is not recommended to modify both temperature and top_p in the same call.", + "default": 0.2 + }, + "top_p": { + "type": "number", + "maximum": 1, + "minimum": 0.1, + "title": "Top P", + "description": "The top-p sampling mass used for text generation. The top-p value determines the probability mass that is sampled at sampling time. For example, if top_p = 0.2, only the most likely tokens (summing to 0.2 cumulative probability) will be sampled. It is not recommended to modify both temperature and top_p in the same call.", + "default": 0.7 + }, + "max_tokens": { + "type": "integer", + "maximum": 1024, + "minimum": 0, + "title": "Max Tokens", + "description": "The maximum number of tokens to generate in any given call. Note that the model is not aware of this value, and generation will simply stop at the number of tokens specified.", + "default": 1024 + }, + "stop": { + "items": { + "type": "string", + "maxLength": 256 + }, + "type": "array", + "maxItems": 256, + "title": "Stop", + "description": "A string or a list of strings where the API will stop generating further tokens. The returned text will not contain the stop sequence." + } + }, + "type": "object", + "required": [ + "messages", + "use_knowledge_base" + ], + "title": "Prompt", + "description": "Definition of the Prompt API data type." + }, + "ValidationError": { + "properties": { + "loc": { + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ] + }, + "type": "array", + "title": "Location" + }, + "msg": { + "type": "string", + "title": "Message" + }, + "type": { + "type": "string", + "title": "Error Type" + } + }, + "type": "object", + "required": [ + "loc", + "msg", + "type" + ], + "title": "ValidationError" + } + } + } +} \ No newline at end of file diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 000000000..a6c13eb81 --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,133 @@ + + +# Architecture + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Overview of Software Components + +The default sample deployment contains: + +- [NVIDIA NeMo Framework Inference Server](https://docs.nvidia.com/nemo-framework/user-guide/latest/index.html) - part of NVIDIA AI Enterprise solution +- [NVIDIA TensorRT-LLM](https://developer.nvidia.com/tensorrt) - for low latency and high throughput inference for LLMs +- [LangChain](https://github.com/langchain-ai/langchain/) and [LlamaIndex](https://www.llamaindex.ai/) for combining language model components and easily constructing question-answering from a company's database +- [Sample Jupyter Notebooks](jupyter-server.md) and [chat bot web application/API calls](./frontend.md) so that you can test the chat system in an interactive manner +- [Milvus](https://milvus.io/docs/install_standalone-docker.md) - Generated embeddings are stored in a vector database. The vector DB used in this workflow is Milvus. Milvus is an open-source vector database capable of NVIDIA GPU-accelerated vector searches. +- [e5-large-v2 model](https://huggingface.co/embaas/sentence-transformers-e5-large-v2) from Hugging Face to generate the embeddings. +- [Llama2](https://github.com/facebookresearch/llama/), an open source model from Meta, to formulate natural responses. + +This sample deployment is a reference for you to build your own enterprise AI solution with minimal effort. +The software components are used to deploy models and inference pipeline, integrated together with the additional components as indicated in the following diagram: + +![Diagram](./images/image0.png) + +## NVIDIA AI Components + +The sample deployment uses a variety of NVIDIA AI components to customize and deploy the RAG-based chat bot example. + +- [NVIDIA TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) +- [NVIDIA NeMo Inference Container](https://developer.nvidia.com/nemo) + +### NVIDIA TensorRT-LLM Optimization + +An LLM can be optimized using TensorRT-LLM. NVIDIA NeMo uses TensorRT for LLMs (TensorRT-LLM), for deployment which accelerates and maximizes inference performance on the latest LLMs. +The sample deployment leverages a Llama 2 (13B parameters) chat model. +The foundational model is converted to TensorRT format using TensorRT-LLM for optimized inference. + +### NVIDIA NeMo Framework Inference Container + +With NeMo Framework Inference Container, the optimized LLM can be deployed for high-performance, cost-effective, and low-latency inference. NeMo Framework Inference Container contains modules and scripts to help exporting LLM models to [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) and deploying them to [Triton Inference Server](https://docs.nvidia.com/deeplearning/triton-inference-server/user-guide/docs/index.html) with easy-to-use APIs. + +## Inference Pipeline + +To get started with the inferencing pipeline, we connect the customized LLM to a sample proprietary data source. +This knowledge can come in many forms: product specifications, HR documents, or finance spreadsheets. +Enhancing the model’s capabilities with this knowledge can be done with RAG. + +Because foundational LLMs are not trained on your proprietary enterprise data and are only trained up to a fixed point in time, they need to be augmented with additional data. +RAG consists of two processes. +First, *retrieval* of data from document repositories, databases, or APIs that are all outside of the foundational model’s knowledge. +Second, *generation* of responses via inference. +The following graphic describes an overview of this inference pipeline: + +![Diagram](./images/image1.png) + +## Document Ingestion and Retrieval + +RAG begins with a knowledge base of relevant up-to-date information. +Because data within an enterprise is frequently updated, the ingestion of documents into a knowledge base is a recurring process and could be scheduled as a job. +Next, content from the knowledge base is passed to an embedding model such as e5-large-v2 that the sample deployment uses. +The embedding model converts the content to vectors, referred to as *embeddings*. +Generating embeddings is a critical step in RAG. +The embeddings provide dense numerical representations of textual information. +These embeddings are stored in a vector database, in this case Milvus, which is [RAFT accelerated](https://developer.nvidia.com/blog/accelerating-vector-search-using-gpu-powered-indexes-with-rapids-raft). + +## User Query and Response Generation + +When a user query is sent to the inference server, it is converted to an embedding using the embedding model. +This is the same embedding model that is used to convert the documents in the knowledge base, e5-large-v2, in the case of this sample deployment. +The database performs a similarity/semantic search to find the vectors that most closely resemble the user’s intent and provides them to the LLM as enhanced context. +Because Milvus is RAFT accelerated, the similarity serach is optimized on the GPU. +Lastly, the LLM generates a full answer that is streamed to the user. +This is all done with ease using [LangChain](https://github.com/langchain-ai/langchain/) and [LlamaIndex](https://www.llamaindex.ai). + +The following diagram illustrates the ingestion of documents and generation of responses. + +![Diagram](./images/image2.png) + +LangChain enables you to write LLM wrappers for your own custom LLMs. +NVIDIA provides a sample wrapper for streaming responses from a TensorRT-LLM Llama 2 model running on Triton Inference Server. +This wrapper enables us to leverage LangChain’s standard interface for interacting with LLMs while still achieving vast performance speedup from TensorRT-LLM and scalable and flexible inference from Triton Inference Server. + +A sample chat bot web application is provided in the sample deployment so that you can test the chat system in an interactive manner. +Requests to the chat system are wrapped in API calls, so these can be abstracted to other applications. + +An additional method of customization in the inference pipeline is possible with a prompt template. +A prompt template is a pre-defined recipe for generating prompts for language models. +The prompts can contain instructions, few-shot examples, and context that is appropriate for a given task. +In our sample deployment, we prompt our model to generate safe and polite responses. + + +## LLM Inference Server + +The LLM Inference Server uses models that are stored in a model repository. +This repository is available locally to serve inference requests. +After they are available in Triton Inference Server, inference requests are sent from a client application. +Python and C++ libraries provide APIs to simplify communication. +Clients send HTTP/REST requests directly to Triton Inference Server using HTTP/REST or gRPC protocols. + +Within the sample deployment, the Llama2 LLM was optimized using NVIDIA TensorRT for LLMs (TRT-LLM). +This software accelerates and maximizes inference performance on the latest LLMs. + +## Vector DB + +Milvus is an open-source vector database built to power embedding similarity search and AI applications. +The database makes unstructured data from API calls, PDFs, and other documents more accessible by storing them as embeddings. + +When content from the knowledge base is passed to an embedding model, e5-large-v2, the model converts the content to vectors--referred to as *embeddings*. +These embeddings are stored in the vector database. +The sample deployment uses Milvus as the vector database. +Milvus is an open-source vector database capable of NVIDIA GPU-accelerated vector searches. + +If needed, see Milvus's [documentation](https://milvus.io/docs/install_standalone-docker.md/) for how to configure a Docker Compose file for Milvus. diff --git a/docs/chain-server.md b/docs/chain-server.md new file mode 100644 index 000000000..db1dd6e7d --- /dev/null +++ b/docs/chain-server.md @@ -0,0 +1,63 @@ + + +# Chain Server + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## About the Chain Server + +The chain server is implemented as a sample FastAPI-based server so that you can experience a Q&A chat bot. +The server wraps calls made to different components and orchestrates the entire flow for all the generative AI examples. + + +## Running the Chain Server Independently + +To run the server for development purposes, run the following commands: + +- Build the container from source: + + ```console + $ source deploy/compose/compose.env + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml build chain-server + ``` + +- Start the container, which starts the server: + + ```console + $ source deploy/compose/compose.env + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml up chain-server + ``` + +- Open the swagger URL at ``http://host-ip:8081`` to try out the exposed endpoints. + +## Chain Server REST API Reference + +You can view the server REST API schema from the chain server by accessing . + +Alternatively, you can view the same documentation in the following section. + +```{eval-rst} +.. inline-swagger:: + :id: chain-server-api +``` \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 000000000..a5367f04d --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,109 @@ +# +# Copyright (c) 2024, NVIDIA CORPORATION. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +from datetime import date + +project = "NVIDIA Generative AI Examples" +this_year = date.today().year +copyright = f"2023-{this_year}, NVIDIA" +author = "NVIDIA" +release = "0.5.0" + +extensions = [ + "sphinx_rtd_theme", + "myst_nb", + "sphinx.ext.intersphinx", + "sphinx_copybutton", + "sphinxcontrib.mermaid", + "swagger_plugin_for_sphinx", + "sphinxcontrib.copydirs", +] + +copybutton_exclude = '.linenos, .gp, .go' + +myst_linkify_fuzzy_links = False +myst_heading_anchors = 3 +myst_enable_extensions = [ + "deflist", + "fieldlist", +] +nb_execution_mode = 'off' +nb_execution_in_temp = True + +exclude_patterns = [ + "_build/**", + "developer-llm-operator", + "enterprise-rag", + "README.md", + "**/README.md", +] + +# suppress_warnings = ["etoc.toctree", "myst.header", "misc.highlighting_failure"] + +html_theme = "sphinx_rtd_theme" +html_copy_source = False +html_show_sourcelink = False +html_show_sphinx = False + +html_theme_options = { + "logo_only": True, + "titles_only": True, +} + +html_domain_indices = False +html_use_index = False +html_extra_path = ["versions.json", "project.json"] +highlight_language = 'console' + +html_static_path = ["media", "api_reference"] +html_css_files = [ + "omni-style.css", + "custom.css" +] + +html_js_files = [ + "version.js" +] + +html_logo = "media/nvidia-logo-white.png" +html_favicon = "media/favicon.ico" + +templates_path = ["templates"] + +swagger_present_uri = "" +swagger_bundle_uri = "https://unpkg.com/swagger-ui-dist@5/swagger-ui-bundle.js" +swagger_css_uri = "https://unpkg.com/swagger-ui-dist@5/swagger-ui.css" + +swagger = [ + { + "name": "Chain Server API Reference", + "id": "chain-server-api", + "page": "chain-server-api", + "options": { + "url": "_static/openapi_schema.json" + }, + } +] + +copydirs_additional_dirs = [ + "../notebooks/", +] \ No newline at end of file diff --git a/docs/configuration.md b/docs/configuration.md new file mode 100644 index 000000000..6e15420c7 --- /dev/null +++ b/docs/configuration.md @@ -0,0 +1,112 @@ + + +# Software Component Configuration + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Configuration with the Docker Compose Environment File + +The following sections identify the environment variables and parameters that are used in the `rag-app-text-chatbot.yaml` Docker Compose file in the `deploy/compose` directory of the repository. + +You can set environment variables in the `deploy/compose/compose.env` file. + +### LLM Server Configuration + +LLM Inference server hosts the Large Language Model (LLM) with Triton Inference Server backend. + +You can configure the server using the following environment variables: + +:MODEL_DIRECTORY: Specifies the path to the model directory where model checkpoints are stored. +:MODEL_ARCHITECTURE: Defines the architecture of the model used for deployment. +:MODEL_MAX_INPUT_LENGTH: Maximum allowed input length, with a default value of 3000. +:QUANTIZATION: Specifies to enable activation-aware quantization for the LLM. By default, quantization is not enabled. +:INFERENCE_GPU_COUNT: Specifies the GPUs to be used by Triton for model deployment, with the default setting being "all." + +### Milvus + +Milvus is the default vector database server. +You can configure Milvus using the following environment variable: + +:DOCKER_VOLUME_DIRECTORY: Specifies the location of the volume mount on the host for the vector database files. + The default value is `./volumes/milvus` in the current working directory. + +### Pgvector + +Pgvector is an alternative vector database server. +You can configure pgvector using the following environment variables: + +:DOCKER_VOLUME_DIRECTORY: Specifies the location of the volume mount on the host for the vector database files. + The default value is `./volumes/data` in the current working directory. +:POSTGRES_PASSWORD: Specifies the password for authenticating to pgvector. + The default value is `password`. +:POSTGRES_USER: Specifies the user name for authenticating to pgvector. + The default value is `postgres`. +:POSTGRES_DB: Specifies the name of the database instance. + The default value is `api`. + + +### Chain Server + +The chain server is the core component that interacts with the LLM Inference Server and the Milvus server to obtain responses. +You can configure the server using the following environment variable: + +:APP_VECTORSTORE_URL: Specifies the URL of the vector database server. +:APP_VECTORSTORE_NAME: Specifies the vendor name of the vector database. Values are `milvus` or `pgvector`. +:COLLECTION_NAME: Specifies the example-specific collection in the vector database. +:APP_LLM_SERVERURL: Specifies the URL of Triton Inference Server. +:APP_LLM_MODELNAME: The model name used by the Triton server. +:APP_LLM_MODELENGINE: An enum that specifies the backend name hosting the model. Supported values are as follows: + + `triton-trt-llm` to use locally deployed LLM models. + + `nv-ai-foundation` to use models hosted from NVIDIA AI Endpoints. + + `nv-api-catalog` to use models hosted from NVIDIA API Catalog. +:APP_RETRIEVER_TOPK: Number of relevant results to retrieve. The default value is `4`. +:APP_RETRIEVER_SCORETHRESHOLD: The minimum confidence score for the retrieved values to be considered. The default value is `0.25`. +:APP_PROMPTS_CHATTEMPLATE: Specifies the instructions to provide to the model. + The prompt is combined with the user-supplied query and then presented to the model. + The chain server uses this prompt when the query does not use a knowledge base. +:APP_PROMPTS_RAGTEMPLATE: Specifies the instructions to provide to the model. + The prompt is combined with the user-supplied query and then presented to the model. + The chain server uses this prompt when the query uses a knowledge base. + + +### RAG Playground + +The RAG playground component is the user interface web application that interacts with the chain server to retrieve responses and provide a user interface to upload documents. +You can configure the server using the following environment variables: + +:APP_SERVERURL: Specifies the URL for the chain server. +:APP_SERVERPORT: Specifies the network port number for the chain server. +:APP_MODELNAME: Specifies the name of the large language model used in the deployment. + This information is for display purposes only and does not affect the inference process. +:RIVA_API_URI: Specifies the host name and port of the NVIDIA Riva server. + This field is optional and provides automatic speech recognition (ASR) and text-to-speech (TTS) functionality. +:RIVA_API_KEY: Specifies a key to access the Riva API. + This field is optional. +:RIVA_FUNCTION_ID: Specifies the function ID to access the Riva API. + This field is optional. +:TTS_SAMPLE_RATE: Specifies the sample rate in hertz (Hz). + The default value is `48000`. diff --git a/docs/evaluation.md b/docs/evaluation.md new file mode 100644 index 000000000..075f3f769 --- /dev/null +++ b/docs/evaluation.md @@ -0,0 +1,244 @@ + + +# Evaluation Tool + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Introduction + +Evaluation is crucial for retrieval augmented generation (RAG) pipelines because it ensures the accuracy and relevance of the information that is retrieved as well as the generated content. + +There are three components needed for evaluating the performance of a RAG pipeline: + +- Data for testing. +- Automated metrics to measure performance of both the context retrieval and response generation. +- Human-like evaluation of the generated response from the end-to-end pipeline. + +This tool provides a set of notebooks that demonstrate how to address these requirements in an automated fashion for the default developer RAG example. + +The following figure shows the sample topology: + +- The Jupyter notebooks for evaluation are served by a notebook server. +- The notebook server communicates with the chain server to ingest documents and build a knowledge base. +- The notebook server communicates NVIDIA AI Foundation Models and Endpoints for inference. + +![Evaluation example toplogy](./images/evaluation-topology.png) + + +### Synthetic Data Generation + +Using an existing knowledge base, we can generate synthetic question|answer|context triplets using an LLM. +This tool uses the Llama 2 70B model from the NVIDIA AI Foundation Models and Endpoints for data generation. + +### Automated Metrics + +[RAGAS](https://github.com/explodinggradients/ragas) is an automated metrics tool for measuring performance of both the retriever and generator. +This tool uses a LangChain wrapper to connect to NVIDIA AI Foundation Models and Endpoints to run RAGAS evaluation on our example RAG pipeline. + +### LLM-as-a-Judge + +This tool uses LLMs to provide human-like feedback and Likert evaluation scores for full end-to-end RAG pipelines. +The Llama 2 70B model is used as a judge LLM. + +## Prerequisites + +% prerequisites-start + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- A host with an NVIDIA A100, H100, or L40S GPU. + +- Verify NVIDIA GPU driver version 535 or later is installed and that the GPU is in compute mode: + + ```console + $ nvidia-smi -q -d compute + ``` + + *Example Output* + + ```{code-block} output + --- + emphasize-lines: 4,9 + --- + ==============NVSMI LOG============== + + Timestamp : Sun Nov 26 21:17:25 2023 + Driver Version : 535.129.03 + CUDA Version : 12.2 + + Attached GPUs : 1 + GPU 00000000:CA:00.0 + Compute Mode : Default + ``` + + If the driver is not installed or below version 535, refer to the [*NVIDIA Driver Installation Quickstart Guide*](https://docs.nvidia.com/datacenter/tesla/tesla-installation-notes/index.html). + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Install the NVIDIA Container Toolkit. + + 1. Refer to the [installation documentation](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html). + + 1. When you configure the runtime, set the NVIDIA runtime as the default: + + ```console + $ sudo nvidia-ctk runtime configure --runtime=docker --set-as-default + ``` + + If you did not set the runtime as the default, you can reconfigure the runtime by running the preceding command. + + 1. Verify the NVIDIA container toolkit is installed and configured as the default container runtime: + + ```console + $ cat /etc/docker/daemon.json + ``` + + *Example Output* + + ```json + { + "default-runtime": "nvidia", + "runtimes": { + "nvidia": { + "args": [], + "path": "nvidia-container-runtime" + } + } + } + ``` + + 1. Run the `nvidia-smi` command in a container to verify the configuration: + + ```console + $ sudo docker run --rm --runtime=nvidia --gpus all ubuntu nvidia-smi -L + ``` + + *Example Output* + + ```output + GPU 0: NVIDIA A100 80GB PCIe (UUID: GPU-d8ce95c1-12f7-3174-6395-e573163a2ace) + ``` + +% prerequisites-end + + +## Get an API Key for the Llama 2 70B API Endpoint + +Perform the following steps if you do not already have an API key. +You can use different model API endpoints with the same API key. + +1. Navigate to . + +1. Find the **Llama 2 70B** card and click **Learn More**. + + ![Llama 2 70B model card](./images/llama-2-70b-card.png) + +1. Click the **API** button and then click **Generate Key**. + + ![API section of the playground tab.](./images/llama-2-70b-api.png) + +1. Save the generated API key. + + +## Build and Start the Containers + +1. In the Generative AI Examples repository, edit the `deploy/compose/compose.env` file. + + Specify the absolute path to the model location, model architecture, and model name. + + ```text + # full path to the local copy of the model weights + # NOTE: This should be an absolute path and not relative path + export MODEL_DIRECTORY="/path/to/llama/llama-2-13b_chat/" + + # the architecture of the model. eg: llama + export MODEL_ARCHITECTURE="llama" + + # the name of the model being used - only for displaying on frontend + export MODEL_NAME="Llama-2-13b-chat" + ... + ``` + +2. Export the `NVIDIA_API_KEY` variable in terminal. + + Add the API for the model endpoint: + + ```text + export NVIDIA_API_KEY="nvapi-<...>" + ``` + + +3. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml build + ``` + +4. Start the milvus container: + + ```console + $ docker compose -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + +5. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml up -d + ``` + NVIDIA Triton Inference Server can require 5 minutes to start. The `-d` flag starts the services in the background. + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container llm-inference-server Started + ✔ Container notebook-server Started + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +6. Build and deploy the evaluation service: + + ```console + $ docker compose -f deploy/compose/docker-compose-evaluation.yaml build + $ docker compose -f deploy/compose/docker-compose-evaluation.yaml up -d + ``` + +## Next Steps + +- Access the evaluation notebook server at `http://host-ip:8889` from your web browser and run the notebooks sequentially starting from `01_synthetic_data_generation.ipynb`. + +- Stop the containers by running the following commands: + + - `docker compose -f deploy/compose/rag-app-text-chatbot.yaml down` + - `docker compose -f deploy/compose/docker-compose-vectordb.yaml down` + - `docker compose -f deploy/compose/docker-compose-evaluation.yaml down` diff --git a/docs/frontend.md b/docs/frontend.md new file mode 100644 index 000000000..6fe24135a --- /dev/null +++ b/docs/frontend.md @@ -0,0 +1,62 @@ + + +# RAG Playground Web Application + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## About the Web Application + +The web application provides a user interface to the RAG [chain server](./chain-server.md) APIs. + +- You can chat with the LLM and see responses streamed back for different examples. +- By selecting **Use knowledge base**, the chat bot returns responses that are augmented with data from documents that you uploaded and were stored in the vector database. +- To store content in the vector database, click **Knowledge Base** in the upper right corner and upload documents. + +![Diagram](./images/image4.jpg) + +## Web Application Design + +At its core, the application is a FastAPI server written in Python. This FastAPI server hosts two [Gradio](https://www.gradio.app/) applications, one for conversing with the model and another for uploading documents. These Gradio pages are wrapped in a static frame created with the NVIDIA Kaizen UI React+Next.js framework and compiled down to static pages. Iframes are used to mount the Gradio applications into the outer frame. + +## Running the Web Application Individually + +To run the web application for development purposes, run the following commands: + +- Build the container from source: + + ```console + $ source deploy/compose/compose.env + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml build frontend + ``` + +- Start the container, which starts the server: + + ```console + $ source deploy/compose/compose.env + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml up frontend + ``` + +- Open the web application at ``http://host-ip:8090``. + +If you upload multiple PDF files, the expected time of completion that is shown in the web application might not be correct. diff --git a/docs/rag/hf_model_download.md b/docs/hf_model_download.md similarity index 100% rename from docs/rag/hf_model_download.md rename to docs/hf_model_download.md diff --git a/docs/images/ai-foundations-topology.png b/docs/images/ai-foundations-topology.png new file mode 100644 index 000000000..a40b7a8d4 Binary files /dev/null and b/docs/images/ai-foundations-topology.png differ diff --git a/docs/images/api-catalog-generate-api-key.png b/docs/images/api-catalog-generate-api-key.png new file mode 100644 index 000000000..c4332dc54 Binary files /dev/null and b/docs/images/api-catalog-generate-api-key.png differ diff --git a/docs/images/catalog-and-vector-db.png b/docs/images/catalog-and-vector-db.png new file mode 100644 index 000000000..84f64f41e Binary files /dev/null and b/docs/images/catalog-and-vector-db.png differ diff --git a/docs/images/chrome-flags-fix-media-device-access-error.png b/docs/images/chrome-flags-fix-media-device-access-error.png new file mode 100644 index 000000000..a0491fd71 Binary files /dev/null and b/docs/images/chrome-flags-fix-media-device-access-error.png differ diff --git a/docs/images/evaluation-topology.png b/docs/images/evaluation-topology.png new file mode 100644 index 000000000..ef186a35b Binary files /dev/null and b/docs/images/evaluation-topology.png differ diff --git a/docs/rag/images/hf/Slide1.JPG b/docs/images/hf/Slide1.JPG similarity index 100% rename from docs/rag/images/hf/Slide1.JPG rename to docs/images/hf/Slide1.JPG diff --git a/docs/rag/images/hf/Slide10.JPG b/docs/images/hf/Slide10.JPG similarity index 100% rename from docs/rag/images/hf/Slide10.JPG rename to docs/images/hf/Slide10.JPG diff --git a/docs/rag/images/hf/Slide11.JPG b/docs/images/hf/Slide11.JPG similarity index 100% rename from docs/rag/images/hf/Slide11.JPG rename to docs/images/hf/Slide11.JPG diff --git a/docs/rag/images/hf/Slide12.JPG b/docs/images/hf/Slide12.JPG similarity index 100% rename from docs/rag/images/hf/Slide12.JPG rename to docs/images/hf/Slide12.JPG diff --git a/docs/rag/images/hf/Slide13.JPG b/docs/images/hf/Slide13.JPG similarity index 100% rename from docs/rag/images/hf/Slide13.JPG rename to docs/images/hf/Slide13.JPG diff --git a/docs/rag/images/hf/Slide14.JPG b/docs/images/hf/Slide14.JPG similarity index 100% rename from docs/rag/images/hf/Slide14.JPG rename to docs/images/hf/Slide14.JPG diff --git a/docs/rag/images/hf/Slide15.JPG b/docs/images/hf/Slide15.JPG similarity index 100% rename from docs/rag/images/hf/Slide15.JPG rename to docs/images/hf/Slide15.JPG diff --git a/docs/rag/images/hf/Slide2.JPG b/docs/images/hf/Slide2.JPG similarity index 100% rename from docs/rag/images/hf/Slide2.JPG rename to docs/images/hf/Slide2.JPG diff --git a/docs/rag/images/hf/Slide3.JPG b/docs/images/hf/Slide3.JPG similarity index 100% rename from docs/rag/images/hf/Slide3.JPG rename to docs/images/hf/Slide3.JPG diff --git a/docs/rag/images/hf/Slide4.JPG b/docs/images/hf/Slide4.JPG similarity index 100% rename from docs/rag/images/hf/Slide4.JPG rename to docs/images/hf/Slide4.JPG diff --git a/docs/rag/images/hf/Slide5.JPG b/docs/images/hf/Slide5.JPG similarity index 100% rename from docs/rag/images/hf/Slide5.JPG rename to docs/images/hf/Slide5.JPG diff --git a/docs/rag/images/hf/Slide6.JPG b/docs/images/hf/Slide6.JPG similarity index 100% rename from docs/rag/images/hf/Slide6.JPG rename to docs/images/hf/Slide6.JPG diff --git a/docs/rag/images/hf/Slide7.JPG b/docs/images/hf/Slide7.JPG similarity index 100% rename from docs/rag/images/hf/Slide7.JPG rename to docs/images/hf/Slide7.JPG diff --git a/docs/rag/images/hf/Slide8.JPG b/docs/images/hf/Slide8.JPG similarity index 100% rename from docs/rag/images/hf/Slide8.JPG rename to docs/images/hf/Slide8.JPG diff --git a/docs/rag/images/hf/Slide9.JPG b/docs/images/hf/Slide9.JPG similarity index 100% rename from docs/rag/images/hf/Slide9.JPG rename to docs/images/hf/Slide9.JPG diff --git a/docs/rag/images/hf/download.png b/docs/images/hf/download.png similarity index 100% rename from docs/rag/images/hf/download.png rename to docs/images/hf/download.png diff --git a/docs/rag/images/image0.png b/docs/images/image0.png similarity index 100% rename from docs/rag/images/image0.png rename to docs/images/image0.png diff --git a/docs/rag/images/image1.png b/docs/images/image1.png similarity index 100% rename from docs/rag/images/image1.png rename to docs/images/image1.png diff --git a/docs/rag/images/image10.png b/docs/images/image10.png similarity index 100% rename from docs/rag/images/image10.png rename to docs/images/image10.png diff --git a/docs/rag/images/image11.png b/docs/images/image11.png similarity index 100% rename from docs/rag/images/image11.png rename to docs/images/image11.png diff --git a/docs/rag/images/image12.png b/docs/images/image12.png similarity index 100% rename from docs/rag/images/image12.png rename to docs/images/image12.png diff --git a/docs/rag/images/image2.png b/docs/images/image2.png similarity index 100% rename from docs/rag/images/image2.png rename to docs/images/image2.png diff --git a/docs/rag/images/image3.jpg b/docs/images/image3.jpg similarity index 100% rename from docs/rag/images/image3.jpg rename to docs/images/image3.jpg diff --git a/docs/rag/images/image4.jpg b/docs/images/image4.jpg similarity index 100% rename from docs/rag/images/image4.jpg rename to docs/images/image4.jpg diff --git a/docs/rag/images/image5.png b/docs/images/image5.png similarity index 100% rename from docs/rag/images/image5.png rename to docs/images/image5.png diff --git a/docs/rag/images/image6.png b/docs/images/image6.png similarity index 100% rename from docs/rag/images/image6.png rename to docs/images/image6.png diff --git a/docs/rag/images/image7.png b/docs/images/image7.png similarity index 100% rename from docs/rag/images/image7.png rename to docs/images/image7.png diff --git a/docs/images/image8.png b/docs/images/image8.png new file mode 100644 index 000000000..16baceeab Binary files /dev/null and b/docs/images/image8.png differ diff --git a/docs/rag/images/image9.png b/docs/images/image9.png similarity index 100% rename from docs/rag/images/image9.png rename to docs/images/image9.png diff --git a/docs/images/key-generated.png b/docs/images/key-generated.png new file mode 100644 index 000000000..e1f204d90 Binary files /dev/null and b/docs/images/key-generated.png differ diff --git a/docs/images/llama-2-70b-api.png b/docs/images/llama-2-70b-api.png new file mode 100644 index 000000000..de44215a5 Binary files /dev/null and b/docs/images/llama-2-70b-api.png differ diff --git a/docs/images/llama-2-70b-card.png b/docs/images/llama-2-70b-card.png new file mode 100644 index 000000000..aa4c8484d Binary files /dev/null and b/docs/images/llama-2-70b-card.png differ diff --git a/docs/images/llama-2-generate-key.png b/docs/images/llama-2-generate-key.png new file mode 100644 index 000000000..5814d31c7 Binary files /dev/null and b/docs/images/llama-2-generate-key.png differ diff --git a/docs/images/local-gpus-topology.png b/docs/images/local-gpus-topology.png new file mode 100644 index 000000000..c33a14eda Binary files /dev/null and b/docs/images/local-gpus-topology.png differ diff --git a/docs/images/media-device-access-error.png b/docs/images/media-device-access-error.png new file mode 100644 index 000000000..e0a2d8c14 Binary files /dev/null and b/docs/images/media-device-access-error.png differ diff --git a/docs/images/mixtral-8x7b-instruct.png b/docs/images/mixtral-8x7b-instruct.png new file mode 100644 index 000000000..b12d8f67a Binary files /dev/null and b/docs/images/mixtral-8x7b-instruct.png differ diff --git a/docs/images/sample-web-application.png b/docs/images/sample-web-application.png new file mode 100644 index 000000000..3fc2d8789 Binary files /dev/null and b/docs/images/sample-web-application.png differ diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 000000000..c3fbc15e4 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,199 @@ + + +# NVIDIA Generative AI Examples + +Generative AI enables users to quickly generate new content based on a variety of inputs and is a powerful tool for streamlining the workflow of creatives, engineers, researchers, scientists, and more. +The use cases and possibilities span all industries and individuals. +Generative AI models can produce novel content like stories, emails, music, images, and videos. + +Generative AI starts with foundational models trained on vast quantities of unlabeled data. +Large language models (LLMs) are trained on an extensive range of textual data online. +These LLMs can understand prompts and generate novel, human-like responses. +Businesses can build applications to leverage this capability of LLMs. +Some uses are creative writing assistants for marketing, document summarization for legal teams, and code writing for software development. + +The NVIDIA Generative AI Examples use Docker Compose +run Retrieval Augmented Generation (RAG) Large Language Model (LLM) pipelines. + +All the example pipelines deploy a sample chat bot application for question and answering that is enhanced with RAG. +The chat bot also supports uploading documents to create a knowledge base. + +## Developer RAG Examples + +```{eval-rst} +.. list-table:: + :header-rows: 1 + + * - | Model + - | Embedding + - | Framework + - | Description + - | Multi-GPU + - | TensorRT-LLM + - | Model + | Location + - | Triton + | Inference + | Server + - | Vector + | Database + + * - ai-mixtral-8x7b-instruct + - nvolveqa_40k + - LangChain + - :doc:`api-catalog` + - NO + - NO + - API Catalog + - NO + - Milvus or pgvector + + * - llama-2 + - e5-large-v2 + - LlamaIndex + - :doc:`local-gpu` + - NO + - YES + - Local Model + - YES + - Milvus or pgvector + + * - llama-2 + - e5-large-v2 + - LlamaIndex + - :doc:`multi-gpu` + - YES + - YES + - Local Model + - YES + - Milvus or pgvector + + * - ai-llama2-70b + - nvolveqa_40k + - LangChain + - :doc:`query-decomposition` + - NO + - NO + - API Catalog + - NO + - Milvus or pgvector + + * - llama2-7b + - e5-large-v2 + - LlamaIndex + - :doc:`quantized-llm-model` + - NO + - YES + - Local Model + - YES + - Milvus or pgvector + + * - ai-mixtral-8x7b-instruct for response generation + + ai-mixtral-8x7b-instruct for PandasAI + - Not Applicable + - PandasAI + - :doc:`structured-data` + - NO + - NO + - API Catalog + - NO + - Not Applicable + + * - ai-mixtral-8x7b-instruct for response generation + + ai-google-Deplot for graph to text conversion + + ai-Neva-22B for image to text conversion + - nvolveqa_40k + - Custom Python + - :doc:`multimodal-data` + - NO + - NO + - API Catalog + - NO + - Milvus or pgvector + + * - ai-llama2-70b + - nvolveqa_40k + - LangChain + - :doc:`multi-turn` + - NO + - NO + - API Catalog + - NO + - Milvus or pgvector + +``` + +## Open Source Connectors + +```{include} ../README.md +:start-after: '## Open Source Integrations' +:end-before: '## NVIDIA support' +``` + +```{toctree} +:caption: RAG Pipelines for Developers +:titlesonly: +:hidden: + +About the RAG Pipelines +support-matrix +API Catalog Models +Local GPUs +Multi-GPU for Inference +Query Decomposition +Quantized Model +Structured Data +Multimodal Data +Multi-turn +Sample Chat Application +Alternative Vector Database +``` + +```{toctree} +:caption: Tools +:titlesonly: +:hidden: + +Evaluation +Observability +``` + +```{toctree} +:caption: Jupyter Notebooks +:titlesonly: +:hidden: +:glob: + +notebooks/* +``` + +```{toctree} +:caption: Software Components +:titlesonly: +:hidden: + +architecture +llm-inference-server +frontend +jupyter-server +chain-server +configuration +``` diff --git a/docs/jupyter-server.md b/docs/jupyter-server.md new file mode 100644 index 000000000..32a44ae67 --- /dev/null +++ b/docs/jupyter-server.md @@ -0,0 +1,126 @@ + + +# Jupyter Notebook Server + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## About the Notebooks + +The Jupyter notebooks provide guidance to building knowledge-augmented chat bots. + +The following Jupyter notebooks are provided with the AI workflow for the default canonical RAG example: + +- [LLM Streaming Client](../../notebooks/01-llm-streaming-client.ipynb) + + This notebook demonstrates how to use a client to stream responses from an LLM deployed to NVIDIA Triton Inference Server with NVIDIA TensorRT-LLM (TRT-LLM). This deployment format optimizes the model for low latency and high throughput inference. + +- [Document Question-Answering with LangChain](../../notebooks/02_langchain_simple.ipynb) + + This notebook demonstrates how to use LangChain to build a chat bot that references a custom knowledge base. LangChain provides a simple framework for connecting LLMs to your own data sources. It shows how to integrate a TensorRT-LLM to LangChain using a custom wrapper. + +- [Document Question-Answering with LlamaIndex](../../notebooks/03_llama_index_simple.ipynb) + + This notebook demonstrates how to use LlamaIndex to build a chat bot that references a custom knowledge base. It contains the same functionality as the preceding notebook, but uses some LlamaIndex components instead of LangChain components. It also shows how the two frameworks can be used together. + +- [Advanced Document Question-Answering with LlamaIndex](../../notebooks/04_llamaindex_hier_node_parser.ipynb) + + This notebook demonstrates how to use LlamaIndex to build a more complex retrieval for a chat bot. The retrieval method shown in this notebook works well for code documentation. The method retrieves more contiguous document blocks that preserve both code snippets and explanations of code. + +- [Upload Press Releases and Interact with REST FastAPI Server](../../notebooks/05_dataloader.ipynb) + + This notebook demonstrates how to use the REST FastAPI server to upload the knowledge base and then ask a question without and with the knowledge base. + +- [NVIDIA AI Endpoint Integration with LangChain](../../notebooks/07_Option(1)_NVIDIA_AI_endpoint_simple.ipynb) + + This notebook demonstrates how to build a Retrieval Augmented Generation (RAG) example using the NVIDIA AI endpoint integrated with Langchain, with FAISS as the vector store. + +- [RAG with LangChain and local LLM model](../../notebooks/07_Option(2)_minimalistic_RAG_with_langchain_local_HF_LLM.ipynb) + + This notebook demonstrates how to plug in a local LLM from Hugging Face Hub and build a simple RAG app using LangChain. + +- [NVIDIA AI Endpoint with LlamaIndex and LangChain](../../notebooks/08_Option(1)_llama_index_with_NVIDIA_AI_endpoint.ipynb) + + This notebook demonstrates how to plug in an NVIDIA AI Endpoint mixtral_8x7b and embedding nvolveqa_40k, bind these into LlamaIndex with these customizations. + +- [Locally deployed model from Hugging Face integration with LlamaIndex and LangChain](../../notebooks/08_Option(2)_llama_index_with_HF_local_LLM.ipynb) + + This notebook demonstrates how to plug in a local LLM from Hugging Face Hub Llama-2-13b-chat-hf and all-MiniLM-L6-v2 embedding from Hugging Face, bind these to into LlamaIndex with these customizations. + +- [LangChain agent with tools plug in multiple models from NVIDIA AI Endpoints](../../notebooks/09_Agent_use_tools_leveraging_NVIDIA_AI_endpoints.ipynb) + + This notebook demonstrates how to use multiple NVIDIA AI Endpoint models such as mixtral_8x7b, Deplot, and Neva. + +- [LangChain with HTML documents and NVIDIA AI Endpoints](../../notebooks/10_RAG_for_HTML_docs_with_Langchain_NVIDIA_AI_Endpoints.html) + + This notebook demonstrates how to build a RAG using NVIDIA AI Endpoints for LangChain. + The notebook creates a vector store by downloading web pages and generating their embeddings using FAISS. + The notebook shows two different chat chains for querying the vector store. + + + +## Running JupyterLab Server Individually + +To run the JupyterLab server for development purposes, run the following commands: + +- Optional: Notebooks 7 to 9 require GPUs. + If you have a GPU and want to run one of these notebooks, update the jupyter-server service in the Docker Compose file to use `./notebooks/Dockerfile.gpu_notebook` as the Dockerfile: + + ```yaml + jupyter-server: + container_name: notebook-server + image: notebook-server:latest + build: + context: ../../ + dockerfile: ./notebooks/Dockerfile.gpu_notebook + ``` + + These notebooks can use more than one GPU. + To use more than one, specify the GPU IDs in the `device_ids` field or specify `count: all` + + ```yaml + jupyter-server: + deploy: + resources: + reservations: + devices: + - driver: nvidia + device_ids: ['0', '1'] + capabilities: [gpu] + ``` + +- Build the container from source: + + ```console + $ source deploy/compose/compose.env + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml build jupyter-server + ``` + +- Start the container which starts the notebook server: + + ```console + $ source deploy/compose/compose.env + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml up jupyter-server + ``` + +- Open the JupyterLab server at ``http://host-ip:8888`` diff --git a/docs/llm-inference-server.md b/docs/llm-inference-server.md new file mode 100644 index 000000000..a9298be2f --- /dev/null +++ b/docs/llm-inference-server.md @@ -0,0 +1,61 @@ + + +# NeMo Framework Inference Server + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## About the Inference Server + +The generative AI examples use [NeMo Framework Inference Server](https://docs.nvidia.com/nemo-framework/user-guide/latest/index.html) container. +NeMo can create optimized LLM using TensorRT-LLM and can deploy models using NVIDIA Triton Inference Server for high-performance, cost-effective, and low-latency inference. +Many examples use Llama 2 models and LLM Inference Server container contains modules and scripts that are required for TRT-LLM conversion of the Llama 2 models and deployment using NVIDIA Triton Inference Server. + +The inference server is used with examples that deploy a model on-premises. +The examples that use [NVIDIA AI foundation models](https://www.nvidia.com/en-in/ai-data-science/foundation-models/) or NVIDIA AI Endpoints do not use this component. + + +## Running the Inference Server Individually + +The following steps describe how a Llama 2 model deployment. + +- Download Llama 2 Chat Model Weights from [Meta](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) or [HuggingFace](https://huggingface.co/meta-llama/Llama-2-13b-chat-hf/). You can check [support matrix](support-matrix.md) for GPU requirements for the deployment. + +- Update the `deploy/compose/compose.env` file with `MODEL_DIRECTORY` as the downloaded Llama 2 model path and other model parameters as needed. + +- Build the LLM inference server container from source: + + ```console + $ source deploy/compose/compose.env + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml build llm + ``` + +- Run the container. The container starts Triton Inference Server with TRT-LLM optimized Llama 2 model: + + ```console + $ source deploy/compose/compose.env + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml up llm + ``` + +After the optimized Llama 2 model is deployed in Triton Inference Server, clients can send HTTP/REST or gRPC requests directly to the server. +A sample implementation of a client can be found in the `triton_trt_llm.py` file of GitHub repository at [integrations/langchain/llms/](https://github.com/NVIDIA/GenerativeAIExamples/tree/main/integrations/langchain/llms). diff --git a/docs/local-gpu.md b/docs/local-gpu.md new file mode 100644 index 000000000..ae4d24e1b --- /dev/null +++ b/docs/local-gpu.md @@ -0,0 +1,328 @@ + + +# Using Local GPUs for a Q&A Chatbot + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Example Features + +This example deploys a developer RAG pipeline for chat Q&A and serves inferencing with the NeMo Framework Inference container. + +This example uses a local host with an NVIDIA A100, H100, or L40S GPU. + +```{list-table} +:header-rows: 1 + +* - Model + - Embedding + - Framework + - Description + - Multi-GPU + - TRT-LLM + - Model Location + - Triton + - Vector Database + +* - llama-2 + - e5-large-v2 + - LlamaIndex + - QA chatbot + - NO + - YES + - Local Model + - YES + - Milvus + +* - llama-2 + - e5-large-v2 + - LlamaIndex + - QA chatbot + - NO + - YES + - Local Model + - YES + - pgvector +``` + +The following figure shows the sample topology: + +- The sample chat bot web application communicates with the local chain server. + +- The local chain server sends inference requests to NVIDIA Triton Inference Server (TIS). + TIS uses TensorRT-LLM and NVIDIA GPUs with the LLama 2 model for generative AI. + +- The sample chat bot supports uploading documents to create a knowledge base. + The uploaded documents are parsed by the chain server and embeddings are stored + in the vector database, Milvus or pgvector. + When you submit a question and request to use the knowledge base, the chain server + retrieves the most relevant documents and submits them with the question to + TIS to perform retrieval-augumented generation. + +- Optionally, you can deploy NVIDIA Riva. Riva can use automatic speech recognition to + transcribe your questions and use text-to-speech to speak the answers aloud. + +![Sample topology for a RAG pipeline with local GPUs and local inference.](./images/local-gpus-topology.png) + + +## Prerequisites + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- A host with an NVIDIA A100, H100, or L40S GPU. + +- Verify NVIDIA GPU driver version 535 or later is installed and that the GPU is in compute mode: + + ```console + $ nvidia-smi -q -d compute + ``` + + *Example Output* + + ```{code-block} output + --- + emphasize-lines: 4,9 + --- + ==============NVSMI LOG============== + + Timestamp : Sun Nov 26 21:17:25 2023 + Driver Version : 535.129.03 + CUDA Version : 12.2 + + Attached GPUs : 1 + GPU 00000000:CA:00.0 + Compute Mode : Default + ``` + + If the driver is not installed or below version 535, refer to the [*NVIDIA Driver Installation Quickstart Guide*](https://docs.nvidia.com/datacenter/tesla/tesla-installation-notes/index.html). + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Install the NVIDIA Container Toolkit. + + 1. Refer to the [installation documentation](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html). + + 1. When you configure the runtime, set the NVIDIA runtime as the default: + + ```console + $ sudo nvidia-ctk runtime configure --runtime=docker --set-as-default + ``` + + If you did not set the runtime as the default, you can reconfigure the runtime by running the preceding command. + + 1. Verify the NVIDIA container toolkit is installed and configured as the default container runtime: + + ```console + $ cat /etc/docker/daemon.json + ``` + + *Example Output* + + ```json + { + "default-runtime": "nvidia", + "runtimes": { + "nvidia": { + "args": [], + "path": "nvidia-container-runtime" + } + } + } + ``` + + 1. Run the `nvidia-smi` command in a container to verify the configuration: + + ```console + $ sudo docker run --rm --runtime=nvidia --gpus all ubuntu nvidia-smi -L + ``` + + *Example Output* + + ```output + GPU 0: NVIDIA A100 80GB PCIe (UUID: GPU-d8ce95c1-12f7-3174-6395-e573163a2ace) + ``` + +- Optional: Enable NVIDIA Riva automatic speech recognition (ASR) and text to speech (TTS). + + - To launch a Riva server locally, refer to the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). + + - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. + + - After the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. + + - Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. + + In `deploy/compose/compose.env`, make the following assignments as necessary: + + ```bash + export RIVA_API_URI=":" + export RIVA_API_KEY="" + export RIVA_FUNCTION_ID="" + ``` + +## Download the Llama 2 Model and Weights + +1. Fill out Meta's [Llama request access form](https://ai.meta.com/resources/models-and-libraries/llama-downloads/). + + - Select the **Llama 2 & Llama Chat** checkbox. + - After verifying your email, Meta will email you a download link. + +1. Clone the Llama repository: + + ```console + $ git clone https://github.com/facebookresearch/llama.git + $ cd llama/ + ``` + +1. Run the `download.sh` script. When prompted, specify `13B-chat` to download the llama-2-13b-chat model: + + ```console + $ ./download.sh + Enter the URL from email: < https://download.llamameta.net/...> + + Enter the list of models to download without spaces (7B,13B,70B,7B-chat,13B-chat,70B-chat), or press Enter for all: 13B-chat + ``` + +1. Copy the tokenizer to the model directory. + + ```console + $ mv tokenizer* llama-2-13b-chat/ + $ ls llama-2-13b-chat/ + ``` + + *Example Output* + + ```output + checklist.chk consolidated.00.pth consolidated.01.pth params.json tokenizer.model tokenizer_checklist.chk + ``` + +## Build and Start the Containers + +1. In the Generative AI Examples repository, edit the `deploy/compose/compose.env` file. + + Specify the absolute path to the model location, model architecture, and model name. + + ```bash + # full path to the local copy of the model weights + # NOTE: This should be an absolute path and not relative path + export MODEL_DIRECTORY="/path/to/llama/llama-2-13b_chat/" + + # the architecture of the model. eg: llama + export MODEL_ARCHITECTURE="llama" + + # the name of the model being used - only for displaying on frontend + export MODEL_NAME="Llama-2-13b-chat" + ... + ``` + +1. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml build + ``` + +1. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml up -d + ``` + + NVIDIA Triton Inference Server can require 5 minutes to start. The `-d` flag starts the services in the background. + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container notebook-server Started + ✔ Container llm-inference-server Started + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +1. Start the Milvus vector database: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + + *Example Output* + + ```output + ✔ Container milvus-minio Started + ✔ Container milvus-etcd Started + ✔ Container milvus-standalone Started + ``` + +1. Confirm the containers are running: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + 256da0ecdb7b rag-playground Up 48 minutes + 2974aa4fb2ce chain-server Up 48 minutes + 4a8c4aebe4ad notebook-server Up 48 minutes + 5be2b57bb5c1 milvus-standalone Up 48 minutes (healthy) + ecf674c8139c llm-inference-server Up 48 minutes (healthy) + a6609c22c171 milvus-minio Up 48 minutes (healthy) + b23c0858c4d4 milvus-etcd Up 48 minutes (healthy) + ``` + + +### Related Information + +- [Meta Llama README](https://github.com/facebookresearch/llama/blob/main/README.md) +- [Meta Llama request access form](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) + + +## Stopping the Containers + +1. Stop the vector database: + + ```console + $ docker compose -f deploy/compose/docker-compose-vectordb.yaml down + ``` + +1. Stop and remove the application containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml down + ``` + +## Next Steps + +- Use the [](./using-sample-web-application.md). +- [](./vector-database.md) +- Run the sample Jupyter notebooks to learn about optional features. diff --git a/docs/media/custom.css b/docs/media/custom.css new file mode 100644 index 000000000..f0fde15fb --- /dev/null +++ b/docs/media/custom.css @@ -0,0 +1,9 @@ +.swagger-ui code { + white-space: pre-wrap; +} + +.microlight code { + color: white; + background: none; + border: none; +} \ No newline at end of file diff --git a/docs/media/favicon.ico b/docs/media/favicon.ico new file mode 100644 index 000000000..424df8720 Binary files /dev/null and b/docs/media/favicon.ico differ diff --git a/docs/media/nvidia-logo-white.png b/docs/media/nvidia-logo-white.png new file mode 100644 index 000000000..b408e88fa Binary files /dev/null and b/docs/media/nvidia-logo-white.png differ diff --git a/docs/media/omni-style.css b/docs/media/omni-style.css new file mode 100644 index 000000000..a3e9ba004 --- /dev/null +++ b/docs/media/omni-style.css @@ -0,0 +1,1567 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +/* Set up for old browsers*/ +@supports not (font-variation-settings: normal) { + @font-face { + font-family: "NVIDIA"; + src: url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/NVIDIASans_W_Lt.woff") format("woff"), + url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/NVIDIASans_W_Lt.woff2") format("woff2"); + font-weight: 300; + font-style: normal; + } + @font-face { + font-family: "NVIDIA"; + src: url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/NVIDIASans_W_Rg.woff") format("woff"), + url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/NVIDIASans_W_Rg.woff2") format("woff2"); + font-weight: 400; + font-style: normal; + } + @font-face { + font-family: "NVIDIA"; + src: url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/NVIDIASans_W_Md.woff") format("woff"), + url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/NVIDIASans_W_Md.woff2") format("woff2"); + font-weight: 500; + font-style: normal; + } + @font-face { + font-family: "NVIDIA"; + src: url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/NVIDIASans_W_Bd.woff") format("woff"), + url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/NVIDIASans_W_Bd.woff2") format("woff2"); + font-weight: 700; + font-style: normal; + } +} + +/* Set up for modern browsers, all weights */ +@supports (font-variation-settings: normal) { + @font-face { + font-family: 'NVIDIA'; + src: url('https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/var/NVIDIASansVF_W_Wght.woff2') format('woff2 supports variations'), + url('https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/var/NVIDIASansVF_W_Wght.woff2') format('woff2-variations'); + font-weight: 100 1000; + font-stretch: 25% 151%; + font-style: normal; + } + @font-face{ + font-family:'NVIDIA'; + src:url('https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/var/NVIDIASansVF_Wght_W_Italic.woff2') format('woff2 supports variations'), + url('https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/nvidia-sans/GLOBAL/var/NVIDIASansVF_Wght_W_Italic.woff2') format('woff2-variations'); + font-weight:100 1000; + font-stretch:25% 151%; + font-style:italic; + } +} + +/* Set up for old browsers*/ +@supports not (font-variation-settings: normal) { + @font-face { + font-family: "RobotoMono"; + src: url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/Roboto_Mono/static/RobotoMono-Light.ttf") format("truetype"); + font-weight: 300; + font-style: normal; + } + @font-face { + font-family: "RobotoMono"; + src: url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/Roboto_Mono/static/RobotoMono-Regular.ttf") format("truetype"); + font-weight: 400; + font-style: normal; + } + @font-face { + font-family: "RobotoMono"; + src: url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/Roboto_Mono/static/RobotoMono-Medium.ttf") format("truetype"); + font-weight: 500; + font-style: normal; + } + @font-face { + font-family: "RobotoMono"; + src: url("https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/Roboto_Mono/static/RobotoMono-Bold.ttf") format("truetype"); + font-weight: 700; + font-style: normal; + } +} + +/* Set up for modern browsers, all weights */ +@supports (font-variation-settings: normal) { + @font-face { + font-family: 'RobotoMono'; + src: url('https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/Roboto_Mono/RobotoMono-VariableFont_wght.ttf') format('truetype supports variations'), + url('https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/Roboto_Mono/RobotoMono-VariableFont_wght.ttf') format('truetype-variations'); + font-weight: 100 1000; + font-stretch: 25% 151%; + font-style: normal; + } + @font-face{ + font-family: 'RobotoMono'; + src: url('https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/Roboto_Mono/RobotoMono-Italic-VariableFont_wght.ttf') format('truetype supports variations'), + url('https://images.nvidia.com/etc/designs/nvidiaGDC/clientlibs_base/fonts/Roboto_Mono/RobotoMono-Italic-VariableFont_wght.ttf') format('truetype-variations'); + font-weight:100 1000; + font-stretch:25% 151%; + font-style:italic; + } +} + +:root +{ + /* nv branding */ + --nv-green: #76b900; + --nv-green-illuminate: #76d300; /* button state - hover */ + --nv-black: #000000; + --nv-white: #ffffff; + --nv-green-2: #004831; + + --nv-success: var(--nv-green); + --nv-error: #f44336; + + --nv-font-face: NVIDIA,Arial,Helvetica,Sans-Serif; + --nv-font-face-mono: RobotoMono,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace; + + /* nv branding: light theme */ + --text: #1a1a1a; + --background-default: #ffffff; + --background-alternate: #eeeeee; + --ui-and-graphics: #999999; + + --white: #ffffff; + --gray-1: #f7f7f7; + --gray-2: #eeeeee; + --gray-3: #dddddd; + --gray-4: #cccccc; + + /* nv branding: light theme mobile (closely matches our old font sizes) */ + --h1-color: var(--nv-green); + --h1-font-weight: 100; + --h1-letter-spacing: -0.02em; + --h1-font-size: 36px; + --h1-line-height: 1em; + --h1-text-transform: uppercase; + + --h2-color: var(--nv-green); + --h2-font-weight: 100; + --h2-letter-spacing: -0.02em; + --h2-font-size: 24px; + --h2-line-height: 1em; + --h2-text-transform: uppercase; + + --h3-color: var(--nv-green); + --h3-font-weight: 100; + --h3-letter-spacing: -0.02em; + --h3-font-size: 21px; + --h3-line-height: 1em; + --h3-text-transform: uppercase; + + --h4-color: var(--nv-green); + --h4-font-weight: 100; + --h4-letter-spacing: -0.02em; + --h4-font-size: 18px; + --h4-line-height: 1em; + --h4-text-transform: uppercase; + + --h5-color: var(--nv-green); + --h5-font-size: var(--body-font-size); + + --h6-color: var(--nv-green); + --h6-font-weight: 400; + + --body-font-color: var(--text); + --body-font-weight: normal; + --body-font-size: 16px; + --body-line-height: 1.5em; + + --small-font-color: var(--ui-and-graphics); + --small-font-weight: normal; + --small-font-size: 12px; + --small-line-height: 1.25em; + + --ul-font-color: var(--text); + --ul-font-weight: normal; + --ul-font-size: 16px; + --ul-line-height: 2em; + --ul-marker-font-face: FontAwesome; + --ul-marker-content: '\f105 \00a0 \00a0'; + + --ol-font-color: var(--text); + --ol-font-weight: normal; + --ol-font-size: 16px; + --ol-line-height: 2em; + --ol-list-style-type: decimal; + --ol-ol-list-style-type: upper-alpha; + --ol-ol-ol-list-style-type: decimal; /* not specified in style guide */ + + --disabled-font-color: var(--gray-4); + --disabled-font-weight: normal; + --disabled-font-size: 16px; + --disabled-line-height: 1em; /* style guide says 16px */ + + --error-font-color: var(--nv-error); + --error-font-weight: normal; + --error-font-size: 16px; + --error-line-height: 1em; /* style guide says 16px */ + + --success-font-color: var(--nv-success); + --success-font-weight: normal; + --success-font-size: 16px; + --success-line-height: 1em; /* style guide says 16px */ + + /* omni-style */ + --sidebar-color: #000000; + --sidebar-alt-color: #333333; + --sidebar-headline-color: var(--nv-green); + --sidebar-text-color: #cccccc; + + --table-background-header: var(--nv-black); + --table-background-alternate: var(--background-alternate); /* for alternating rows */ + --table-text: var(--text); + --table-border: var(--ui-and-graphics); + --table-border-header: var(--gray-3); + + /* this is off-brand, but `uppercase` makes headings with source code look bad. */ + --h1-text-transform: none; + --h2-text-transform: none; + --h3-text-transform: none; + --h4-text-transform: none; + + --h3-font-weight: normal; /* this is off-brand and overrides the above definition */ + + --note-background-color: var(--nv-green); + --note-background-alt-color: #cccccc; + + --important-background-color: #f44336; + --important-background-alt-color: #cccccc; + + --link-color: var(--nv-green); + --link-visited-color: var(--nv-green); + --link-hover-color: var(--nv-green-illuminate); + + --background-color: var(--background-default); + + /* template T* tryAcquireInterface(const void* pluginInterface) */ + --api-member-header-background-color: var(--gray-2); + --api-member-header-border-color: var(--sidebar-headline-color); + --api-member-header-text-color: var(--text); + --api-member-header-link-color: var(--link-color); + + --api-member-background-color: var(--gray-1); + + /* struct carb::Framework */ + --api-header-text-color: var(--nv-green); + --api-header-border-color: var(--ui-and-graphics); + + /* sphinx-design color modifications */ + --sd-color-tabs-label-active: var(--nv-green); + --sd-color-tabs-underline-active: var(--nv-green); + + --sd-color-tabs-label-hover: var(--nv-green-illuminate); + --sd-color-tabs-underline-hover: var(--nv-green-illuminate); +} + +/* Width of template */ +.wy-nav-content +{ + max-width: 1200px !important; + background-color: var(--background-color); +} + + /* affects search box */ +#rtd-search-form, +#rtd-search-form > input:nth-child(1) +{ + color-scheme: light; +} + +/**********************************************************************************************************************/ +/* Admonitions */ +/**********************************************************************************************************************/ + +/* todo, attention, caution, warning body */ +.rst-content .admonition-todo, +.rst-content .attention, +.rst-content .caution, +.rst-content .warning, +.rst-content .wy-alert-warning.admonition, +.rst-content .wy-alert-warning.danger, +.rst-content .wy-alert-warning.error, +.rst-content .wy-alert-warning.hint, +.rst-content .wy-alert-warning.important, +.rst-content .wy-alert-warning.note, +.rst-content .wy-alert-warning.seealso, +.rst-content .wy-alert-warning.tip, +.wy-alert.wy-alert-warning +{ + background: rgb(255, 244, 224); +} + +/* todo, attention, caution, warning title */ +.rst-content .admonition-todo .admonition-title, +.rst-content .admonition-todo .wy-alert-title, +.rst-content .attention .admonition-title, +.rst-content .attention .wy-alert-title, +.rst-content .caution .admonition-title, +.rst-content .caution .wy-alert-title, +.rst-content .warning .admonition-title, +.rst-content .warning .wy-alert-title, +.rst-content .wy-alert-warning.admonition .admonition-title, +.rst-content .wy-alert-warning.admonition .wy-alert-title, +.rst-content .wy-alert-warning.danger .admonition-title, +.rst-content .wy-alert-warning.danger .wy-alert-title, +.rst-content .wy-alert-warning.error .admonition-title, +.rst-content .wy-alert-warning.error .wy-alert-title, +.rst-content .wy-alert-warning.hint .admonition-title, +.rst-content .wy-alert-warning.hint .wy-alert-title, +.rst-content .wy-alert-warning.important .admonition-title, +.rst-content .wy-alert-warning.important .wy-alert-title, +.rst-content .wy-alert-warning.note .admonition-title, +.rst-content .wy-alert-warning.note .wy-alert-title, +.rst-content .wy-alert-warning.seealso .admonition-title, +.rst-content .wy-alert-warning.seealso .wy-alert-title, +.rst-content .wy-alert-warning.tip .admonition-title, +.rst-content .wy-alert-warning.tip .wy-alert-title, +.rst-content .wy-alert.wy-alert-warning .admonition-title, +.wy-alert.wy-alert-warning .rst-content .admonition-title, +.wy-alert.wy-alert-warning .wy-alert-title +{ + background: rgb(241, 169, 47); +} + +/* danger, error body */ +.rst-content .danger, +.rst-content .error, +.rst-content .wy-alert-danger.admonition, +.rst-content .wy-alert-danger.admonition-todo, +.rst-content .wy-alert-danger.attention, +.rst-content .wy-alert-danger.caution, +.rst-content .wy-alert-danger.hint, +.rst-content .wy-alert-danger.important, +.rst-content .wy-alert-danger.note, +.rst-content .wy-alert-danger.seealso, +.rst-content .wy-alert-danger.tip, +.rst-content .wy-alert-danger.warning, +.wy-alert.wy-alert-danger +{ + background: rgb(254, 240, 237); +} + +/* danger, error title */ +.rst-content .danger .admonition-title, +.rst-content .danger .wy-alert-title, +.rst-content .error .admonition-title, +.rst-content .error .wy-alert-title, +.rst-content .wy-alert-danger.admonition-todo .admonition-title, +.rst-content .wy-alert-danger.admonition-todo .wy-alert-title, +.rst-content .wy-alert-danger.admonition .admonition-title, +.rst-content .wy-alert-danger.admonition .wy-alert-title, +.rst-content .wy-alert-danger.attention .admonition-title, +.rst-content .wy-alert-danger.attention .wy-alert-title, +.rst-content .wy-alert-danger.caution .admonition-title, +.rst-content .wy-alert-danger.caution .wy-alert-title, +.rst-content .wy-alert-danger.hint .admonition-title, +.rst-content .wy-alert-danger.hint .wy-alert-title, +.rst-content .wy-alert-danger.important .admonition-title, +.rst-content .wy-alert-danger.important .wy-alert-title, +.rst-content .wy-alert-danger.note .admonition-title, +.rst-content .wy-alert-danger.note .wy-alert-title, +.rst-content .wy-alert-danger.seealso .admonition-title, +.rst-content .wy-alert-danger.seealso .wy-alert-title, +.rst-content .wy-alert-danger.tip .admonition-title, +.rst-content .wy-alert-danger.tip .wy-alert-title, +.rst-content .wy-alert-danger.warning .admonition-title, +.rst-content .wy-alert-danger.warning .wy-alert-title, +.rst-content .wy-alert.wy-alert-danger .admonition-title, +.wy-alert.wy-alert-danger .rst-content .admonition-title, +.wy-alert.wy-alert-danger .wy-alert-title +{ + background: var(--nv-error); +} + +/* hint, important, tip, note, see also body */ +.rst-content .hint, +.rst-content .important, +.rst-content .tip, +.rst-content .wy-alert-success.admonition, +.rst-content .wy-alert-success.admonition-todo, +.rst-content .wy-alert-success.attention, +.rst-content .wy-alert-success.caution, +.rst-content .wy-alert-success.danger, +.rst-content .wy-alert-success.error, +.rst-content .wy-alert-success.note, +.rst-content .wy-alert-success.seealso, +.rst-content .wy-alert-success.warning, +.wy-alert.wy-alert-success, +.rst-content .note, +.rst-content .seealso, +.rst-content .wy-alert-info.admonition, +.rst-content .wy-alert-info.admonition-todo, +.rst-content .wy-alert-info.attention, +.rst-content .wy-alert-info.caution, +.rst-content .wy-alert-info.danger, +.rst-content .wy-alert-info.error, +.rst-content .wy-alert-info.hint, +.rst-content .wy-alert-info.important, +.rst-content .wy-alert-info.tip, +.rst-content .wy-alert-info.warning, +.wy-alert.wy-alert-info +{ + background: rgb(231, 250, 223); +} + +/* hint, important, tip, note, see also title */ +.rst-content .hint .admonition-title, +.rst-content .hint .wy-alert-title, +.rst-content .important .admonition-title, +.rst-content .important .wy-alert-title, +.rst-content .tip .admonition-title, +.rst-content .tip .wy-alert-title, +.rst-content .wy-alert-success.admonition-todo .admonition-title, +.rst-content .wy-alert-success.admonition-todo .wy-alert-title, +.rst-content .wy-alert-success.admonition .admonition-title, +.rst-content .wy-alert-success.admonition .wy-alert-title, +.rst-content .wy-alert-success.attention .admonition-title, +.rst-content .wy-alert-success.attention .wy-alert-title, +.rst-content .wy-alert-success.caution .admonition-title, +.rst-content .wy-alert-success.caution .wy-alert-title, +.rst-content .wy-alert-success.danger .admonition-title, +.rst-content .wy-alert-success.danger .wy-alert-title, +.rst-content .wy-alert-success.error .admonition-title, +.rst-content .wy-alert-success.error .wy-alert-title, +.rst-content .wy-alert-success.note .admonition-title, +.rst-content .wy-alert-success.note .wy-alert-title, +.rst-content .wy-alert-success.seealso .admonition-title, +.rst-content .wy-alert-success.seealso .wy-alert-title, +.rst-content .wy-alert-success.warning .admonition-title, +.rst-content .wy-alert-success.warning .wy-alert-title, +.rst-content .wy-alert.wy-alert-success .admonition-title, +.wy-alert.wy-alert-success .rst-content .admonition-title, +.wy-alert.wy-alert-success .wy-alert-title, +.rst-content .note .admonition-title, +.rst-content .note .wy-alert-title, +.rst-content .seealso .admonition-title, +.rst-content .seealso .wy-alert-title, +.rst-content .wy-alert-info.admonition-todo .admonition-title, +.rst-content .wy-alert-info.admonition-todo .wy-alert-title, +.rst-content .wy-alert-info.admonition .admonition-title, +.rst-content .wy-alert-info.admonition .wy-alert-title, +.rst-content .wy-alert-info.attention .admonition-title, +.rst-content .wy-alert-info.attention .wy-alert-title, +.rst-content .wy-alert-info.caution .admonition-title, +.rst-content .wy-alert-info.caution .wy-alert-title, +.rst-content .wy-alert-info.danger .admonition-title, +.rst-content .wy-alert-info.danger .wy-alert-title, +.rst-content .wy-alert-info.error .admonition-title, +.rst-content .wy-alert-info.error .wy-alert-title, +.rst-content .wy-alert-info.hint .admonition-title, +.rst-content .wy-alert-info.hint .wy-alert-title, +.rst-content .wy-alert-info.important .admonition-title, +.rst-content .wy-alert-info.important .wy-alert-title, +.rst-content .wy-alert-info.tip .admonition-title, +.rst-content .wy-alert-info.tip .wy-alert-title, +.rst-content .wy-alert-info.warning .admonition-title, +.rst-content .wy-alert-info.warning .wy-alert-title, +.rst-content .wy-alert.wy-alert-info .admonition-title, +.wy-alert.wy-alert-info .rst-content .admonition-title, +.wy-alert.wy-alert-info .wy-alert-title +{ + background: var(--nv-success); +} + +/**********************************************************************************************************************/ +/* Standard Text Formatting */ +/**********************************************************************************************************************/ + + +/* Replace majority of Fonts in RTD */ +body +{ + font-family: var(--nv-font-face); + color: var(--body-font-color); + font-weight: var(--body-font-weight); + font-size: var(--body-font-size); + line-height: var(--body-line-height); +} + + +/* Headline Formatting */ +h1, p.rubric.rubric-h1 +{ + color: var(--h1-color); + + font-family: var(--nv-font-face); + font-weight: var(--h1-font-weight); + font-size: var(--h1-font-size); + font-style: normal; + + line-height: var(--h1-line-height); + margin-top: 0.75em; + margin-bottom: 0.75em !important; /* override RTD theme */ + + text-transform: var(--h1-text-transform); +} + +h2, p.rubric.rubric-h2 +{ + color: var(--h2-color); + + font-family: var(--nv-font-face); + font-weight: var(--h2-font-weight); + font-size: var(--h2-font-size); + font-style: normal; + + line-height: var(--h2-line-height); + margin-top: 1.25em; + margin-bottom: 0.5em !important; /* override RTD theme */ + + text-transform: var(--h2-text-transform); +} + +h3, p.rubric.rubric-h3 +{ + color: var(--h3-color); + + font-family: var(--nv-font-face); + font-weight: var(--h3-font-weight); + font-size: var(--h3-font-size); + font-style: normal; + + line-height: var(--h3-line-height); + margin-top: 1.25em; + margin-bottom: 0.5em !important; /* override RTD theme */ + + text-transform: var(--h3-text-transform); +} + +h4, p.rubric.rubric-h4 +{ + color: var(--h4-color); + + font-family: var(--nv-font-face); + font-weight: var(--h4-font-weight); + font-size: var(--h4-font-size); + font-style: normal; + + line-height: var(--h4-line-height); + margin-top: 1.25em; + margin-bottom: 0.5em !important; /* override RTD theme */ + + text-transform: var(--h4-text-transform); +} + +h5, p.rubric.rubric-h5 +{ + color: var(--h5-color); + + font-family: var(--nv-font-face); + font-size: var(--h5-font-size); +} + +h6, p.rubric.rubric-h6 +{ + color: var(--h6-color); + + font-family: var(--nv-font-face); + font-weight: var(--h6-font-weight); +} + +/* Paragraph Formatting */ +p +{ + margin-top: 15px; + margin-bottom: 15px; + margin-right: 5px; + margin-left: 0px; + font-size: var(--body-font-size); + line-height: var(--body-line-height); + color: var(--text); +} + +/* Math should inherit its color */ +span[id*=MathJax-Span] +{ + color: inherit; +} + +/* text highlighted by search */ +.rst-content .highlighted +{ + background: #f1c40f3b; + box-shadow: 0 0 0 1px #f1c40f; + display: inline; + font-weight: inherit; +} + +/* a local table-of-contents messes with heading colors. make sure to use the regular heading colors */ +.rst-content .toc-backref +{ + color: inherit; +} + +/* make links to function looks like other literals */ +.rst-content code.xref, +.rst-content tt.xref, +a .rst-content code, +a .rst-content tt +{ + color: #e74c3c; + font-weight: inherit; +} + +/* Link Colors */ +a +{ + color: var(--link-color); +} + +a:visited +{ + color: var(--link-visited-color); +} + +a:hover +{ + color: var(--link-hover-color); +} + +/* decorate external links with an icon */ +a.external::after +{ + font-family: "FontAwesome"; + content: "\f08e"; + margin-left: 0.3em; + font-size: 0.75em; +} + +/* follow branding guide for small footer text */ +footer p +{ + color: var(--small-font-color); + font-weight: var(--small-font-weight); + font-size: var(--small-font-size); + line-height: var(--small-line-height); +} + +/* add nvidia logo (like www.nvidia.com) */ +footer p:first-child::before +{ + content: url(../_static/NVIDIA-LogoBlack.svg); + display: block; + width: 110px; + margin: 0px; + position: relative; + left: -9px; +} + +/* fun role for self-made footnotes */ +.asterisks +{ + color: var(--nv-green); + font-size: 0.9em; + font-weight: 400; +} + +/* :download: role. without this, download link would be red. */ +.rst-content a code.download, +.rst-content a tt.download +{ + font-family: var(--nv-font-face); + color: var(--nv-green); + font-weight: var(--body-font-weight); + font-size: var(--body-font-size); + line-height: var(--body-line-height); +} + +/* set all code fonts */ +.rst-content pre.literal-block, +.rst-content .linenodiv pre, +.rst-content div[class^='highlight'] pre, +.rst-content tt, +.rst-content code, +.rst-content pre, +.rst-content kbd, +.rst-content samp, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .descname, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .descclassname, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .sig-name, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) > dt:first-child +{ + font-family: var(--nv-font-face-mono); +} + +/**********************************************************************************************************************/ +/* Tables */ +/**********************************************************************************************************************/ + +/* tables */ +.rst-content table.docutils, +.wy-table-bordered-all +{ + border-color: var(--table-border); +} + +/* tables cells */ +.rst-content table.docutils td, +.rst-content table.field-list td, +.wy-table td +{ + /* line wrap cells */ + white-space: normal !important; + vertical-align: top; + + font-size: var(--body-font-size); + line-height: var(--body-line-height); + + border-color: var(--table-border); +} + +/* add left border for cells to the right of merged cells that are therefore the first in the row */ +.rst-content table.docutils td:first-child, +.rst-content table.docutils th:first-child, +.rst-content table.field-list td:first-child, +.rst-content table.field-list th:first-child, +.wy-table td:first-child, +.wy-table th:first-child +{ + border-left-width: 1px; +} + +/* odd rows have grey background */ +.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td, +.wy-table-backed, +.wy-table-odd td, +.wy-table-striped tr:nth-child(2n-1) td +{ + background-color: var(--table-background-alternate); +} + +/* optional table with no stripes */ +.rst-content table.docutils.table-no-stripes:not(.field-list) tr:nth-child(2n-1) td +{ + background-color: var(--table-background); +} + +/* optional table with no borders */ +.rst-content table.docutils.table-no-borders:not(.field-list), +.rst-content table.docutils.table-no-borders:not(.field-list) td +{ + border: none; +} + +/* optional table right align first column */ +.rst-content table.docutils.table-right-align-first-column:not(.field-list) tr td:first-of-type +{ + text-align: right; +} + +.rst-content table.docutils.table-compact-cells td, +.rst-content table.docutils.table-compact-cells th +{ + padding: 4px 4px; +} + +.rst-content table.docutils.table-compact-cells td p, +.rst-content table.docutils.table-compact-cells th p +{ + margin-top: 0px; +} + +/* table header is white on black */ +html.writer-html5 .rst-content table.docutils th, +.rst-content table.docutils thead th, +.rst-content table.field-list thead th, +.wy-table thead th, +.rst-content table.docutils thead th p, +.rst-content table.field-list thead th p, +.wy-table thead th p +{ + background-color: var(--table-background-header); + color: var(--sidebar-text-color); + border-color: var(--table-border-header); +} + +/* table header text */ +html.writer-html5 .rst-content table.docutils th>p +{ + color: var(--sidebar-text-color); + font-size: var(--body-font-size); + line-height: var(--body-line-height); +} + +/* cell text */ +html.writer-html5 .rst-content table.docutils td>p, +html.writer-html5 .rst-content table.docutils th>p +{ + font-size: var(--body-font-size); + line-height: var(--body-line-height); +} + +/* table name caption text */ +.rst-content table.docutils caption, +.rst-content table.field-list caption, +.wy-table caption +{ + color: var(--text); +} + +/* remove weird top margin in first paragraph of a cell */ +.wy-table td p:first-child, +.rst-content table.docutils td p:first-child, +.rst-content table.field-list td p:first-child, +.wy-table th p:first-child, +.rst-content table.docutils th p:first-child, +.rst-content table.field-list th p:first-child +{ + margin-top: 0px; +} + +/* remove weird bottom margin in last paragraph of a cell */ +.wy-table td p:last-child, +.rst-content table.docutils td p:last-child, +.rst-content table.field-list td p:last-child, +.wy-table th p:last-child, +.rst-content table.docutils th p:last-child, +.rst-content table.field-list th p:last-child +{ + margin-bottom: 0px; +} + +/* provide a compact table style */ +.rst-content table.docutils.table-compact td, +.rst-content table.docutils.table-compact th { + padding: 8px; +} + +html.writer-html5 .rst-content table.docutils.table-compact td > p, +html.writer-html5 .rst-content table.docutils.table-compact th > p { + font-size: var(--small-font-size); + line-height: var(--small-line-height); +} + +/**********************************************************************************************************************/ +/* Lists */ +/**********************************************************************************************************************/ + +/* unordered list should have a nv-green > */ +.rst-content section ul li::marker, +.rst-content .toctree-wrapper ul li::marker, +.wy-plain-list-disc li::marker, +article ul li::marker +{ + font-family: var(--ul-marker-font-face); + content: var(--ul-marker-content); + color: var(--nv-green); + font-weight: 600; +} + +/* top-level ordered list should have a nv-green number */ +.rst-content section ol li::marker, +.rst-content ol.arabic li::marker, +.wy-plain-list-decimal li::marker, +article ol li::marker +{ + font-family: inherit; + content: inherit; + color: var(--nv-green); + font-weight: 600; + list-style: var(--ol-list-style-type); +} + +/* second-level ordered list should have a nv-green uppercase letter */ +.rst-content section ol ol li, +.rst-content ol.arabic ol.arabic li, +.wy-plain-list-decimal ol ol li, +article ol ol li +{ + list-style: var(--ol-ol-list-style-type); +} + +/* third-level ordered lists aren't in the branding guide. let's use numbers. */ +.rst-content section ol ol ol li, +.rst-content ol.arabic ol.arabic ol li, +.wy-plain-list-decimal ol ol ol li, +article ol ol ol li +{ + list-style: var(--ol-ol-ol-list-style-type); +} + +/* fix legacy exhale hierarchy trees */ +.rst-content section .treeView li::marker +{ + content: url(collapsible-lists/css/button.png) !important; +} + +.rst-content section .treeView li.collapsibleListClosed::marker +{ + content: url(collapsible-lists/css/button-closed.png) !important; +} + +.rst-content section .treeView li.collapsibleListOpen::marker +{ + content: url(collapsible-lists/css/button-open.png) !important; +} + +/* start the first paragraph immediately (don't add space at the top) */ +dd p:first-child +{ + margin-top: 0px; +} + +/**********************************************************************************************************************/ +/* Sidebar / Log */ +/**********************************************************************************************************************/ + +/* logo background */ +.wy-side-nav-search, +.wy-nav-top +{ + background-color: var(--sidebar-color); +} + +/* Remove padding around logo */ +.wy-side-nav-search>a +{ + padding: 0px; + margin: 0px; +} + +/* prevent changing project name color */ +.wy-side-nav-search>a:visited +{ + color: #fcfcfc; +} + +/* add padding to the project name */ +.wy-side-nav-project +{ + margin-top: 0.6em; + margin-bottom: 0.6em; +} + +/* remove rounded corner from search box */ +.wy-side-nav-search input[type=text] +{ + border-radius: 0px; +} + +/* sidebar color */ +.wy-nav-side +{ + color: var(--sidebar-headline-color); + background: var(--sidebar-color); +} + +@media screen and (min-width: 1500px) +{ + .wy-nav-side + { + margin-left: calc(0.5*100% - 0.5*1500px); /* 0.5 * (view width - max width of content) */ + } +} + +/* remove padding around sidebar logo */ +.wy-side-nav-search, +.wy-side-nav-search img +{ + padding: 0; +} + +/* sidebar headings */ +.wy-menu-vertical p.caption { + color: var(--sidebar-headline-color); + background-color: var(--sidebar-alt-color); +} + +/* everything is a link in the sidebar, make them not green */ +.wy-menu-vertical a +{ + color: var(--sidebar-text-color); +} + +.wy-menu-vertical li.toctree-l1.current li.toctree-l2 > a, +.wy-menu-vertical li.toctree-l2.current li.toctree-l3 > a, +.wy-menu-vertical li.toctree-l3.current li.toctree-l4 > a, +.wy-menu-vertical li.toctree-l4.current li.toctree-l5 > a, +.wy-menu-vertical li.toctree-l5.current li.toctree-l6 > a, +.wy-menu-vertical li.toctree-l6.current li.toctree-l7 > a, +.wy-menu-vertical li.toctree-l7.current li.toctree-l8 > a, +.wy-menu-vertical li.toctree-l8.current li.toctree-l9 > a, +.wy-menu-vertical li.toctree-l9.current li.toctree-l10 > a +{ + color: #202020; + border: none; +} +.wy-menu-vertical li.toctree-l1.current li.toctree-l2 > a{background-color: #F0F0F0;} +.wy-menu-vertical li.toctree-l2.current li.toctree-l3 > a{background-color: #E8E8E8;} +.wy-menu-vertical li.toctree-l3.current li.toctree-l4 > a{background-color: #E0E0E0;} +.wy-menu-vertical li.toctree-l4.current li.toctree-l5 > a{background-color: #D8D8D8;} +.wy-menu-vertical li.toctree-l5.current li.toctree-l6 > a{background-color: #D0D0D0;} +.wy-menu-vertical li.toctree-l6.current li.toctree-l7 > a{background-color: #C8C8C8;} +.wy-menu-vertical li.toctree-l7.current li.toctree-l8 > a{background-color: #C0C0C0;} +.wy-menu-vertical li.toctree-l8.current li.toctree-l9 > a{background-color: #B8B8B8;} +.wy-menu-vertical li.toctree-l9.current li.toctree-l10 > a{background-color: #B0B0B0;} + +.wy-menu-vertical li.toctree-l1.current:hover li.toctree-l2 > a:hover {background-color: #E8E8E8;} +.wy-menu-vertical li.toctree-l2.current:hover li.toctree-l3 > a:hover {background-color: #E0E0E0;} +.wy-menu-vertical li.toctree-l3.current:hover li.toctree-l4 > a:hover {background-color: #D8D8D8;} +.wy-menu-vertical li.toctree-l4.current:hover li.toctree-l5 > a:hover {background-color: #D0D0D0;} +.wy-menu-vertical li.toctree-l5.current:hover li.toctree-l6 > a:hover {background-color: #C8C8C8;} +.wy-menu-vertical li.toctree-l6.current:hover li.toctree-l7 > a:hover {background-color: #C0C0C0;} +.wy-menu-vertical li.toctree-l7.current:hover li.toctree-l8 > a:hover {background-color: #B8B8B8;} +.wy-menu-vertical li.toctree-l8.current:hover li.toctree-l9 > a:hover {background-color: #B0B0B0;} +.wy-menu-vertical li.toctree-l9.current:hover li.toctree-l10 > a:hover {background-color: #A8A8A8;} + +.wy-menu-vertical li.toctree-l1 a:hover, +.wy-menu-vertical li.toctree-l2 a:hover, +.wy-menu-vertical li.toctree-l3 a:hover, +.wy-menu-vertical li.toctree-l4 a:hover, +.wy-menu-vertical li.toctree-l5 a:hover, +.wy-menu-vertical li.toctree-l6 a:hover, +.wy-menu-vertical li.toctree-l7 a:hover, +.wy-menu-vertical li.toctree-l8 a:hover, +.wy-menu-vertical li.toctree-l9 a:hover, +.wy-menu-vertical li.toctree-l10 a:hover +{ + color: #202020; + background-color: #DDDDDD; +} + +.wy-menu-vertical .toctree-l1 a:hover, +.wy-menu-vertical .toctree-l2 a:hover, +.wy-menu-vertical .toctree-l3 a:hover, +.wy-menu-vertical .toctree-l4 a:hover, +.wy-menu-vertical .toctree-l5 a:hover, +.wy-menu-vertical .toctree-l6 a:hover, +.wy-menu-vertical .toctree-l7 a:hover, +.wy-menu-vertical .toctree-l8 a:hover, +.wy-menu-vertical .toctree-l9 a:hover, +.wy-menu-vertical .toctree-l10 a:hover +{ + color: #000000; + background-color: #EEEEEE; +} + +.wy-menu-vertical .toctree-l1.current a:hover, +.wy-menu-vertical .toctree-l2.current a:hover, +.wy-menu-vertical .toctree-l3.current a:hover, +.wy-menu-vertical .toctree-l4.current a:hover, +.wy-menu-vertical .toctree-l5.current a:hover, +.wy-menu-vertical .toctree-l6.current a:hover, +.wy-menu-vertical .toctree-l7.current a:hover, +.wy-menu-vertical .toctree-l8.current a:hover, +.wy-menu-vertical .toctree-l9.current a:hover, +.wy-menu-vertical .toctree-l10.current a:hover +{ + color: #202020; +} + +.wy-menu-vertical .toctree-l1.current, +.wy-menu-vertical .toctree-l2.current, +.wy-menu-vertical .toctree-l3.current, +.wy-menu-vertical .toctree-l4.current, +.wy-menu-vertical .toctree-l5.current, +.wy-menu-vertical .toctree-l6.current, +.wy-menu-vertical .toctree-l7.current, +.wy-menu-vertical .toctree-l8.current, +.wy-menu-vertical .toctree-l9.current, +.wy-menu-vertical .toctree-l10.current +{ + color: #202020; +} + +/* break these up into shades of grey? at different levels? */ +.wy-menu-vertical .toctree-l1.current a, +.wy-menu-vertical .toctree-l2.current a, +.wy-menu-vertical .toctree-l3.current a, +.wy-menu-vertical .toctree-l4.current a, +.wy-menu-vertical .toctree-l5.current a, +.wy-menu-vertical .toctree-l6.current a, +.wy-menu-vertical .toctree-l7.current a, +.wy-menu-vertical .toctree-l8.current a, +.wy-menu-vertical .toctree-l9.current a, +.wy-menu-vertical .toctree-l10.current a, +.wy-menu-vertical .toctree-l1.on a, +.wy-menu-vertical .toctree-l2.on a, +.wy-menu-vertical .toctree-l3.on a, +.wy-menu-vertical .toctree-l4.on a, +.wy-menu-vertical .toctree-l5.on a, +.wy-menu-vertical .toctree-l6.on a, +.wy-menu-vertical .toctree-l7.on a, +.wy-menu-vertical .toctree-l8.on a, +.wy-menu-vertical .toctree-l9.on a, +.wy-menu-vertical .toctree-l10.on a +{ + color: #202020; + background-color: #EEEEEE; +} + +/* plus/minus expansion signs */ +.wy-menu-vertical li span.toctree-expand, +.wy-menu-vertical li.current > a span.toctree-expand, +.wy-menu-vertical li.current > a:hover span.toctree-expand, +.wy-menu-vertical li.current > a button.toctree-expand, +.wy-menu-vertical li.current > a:hover button.toctree-expand, +.wy-menu-vertical li.on > a span.toctree-expand, +.wy-menu-vertical li.on > a:hover span.toctree-expand, +.wy-menu-vertical li.on a button.toctree-expand +{ + color: #202020; +} + +/**********************************************************************************************************************/ +/* API Docs */ +/**********************************************************************************************************************/ + +/* documentation background for a function "block" should pop */ +.cpp.struct .cpp.function, +.cpp.class .cpp.function, +.cpp.struct .cpp.enum, +.cpp.class .cpp.enum, +.cpp.struct .cpp.var, +.cpp.class .cpp.var, +.py.class .py.method +{ + background: var(--api-member-background-color); +} + +/**********************************************************************************************************************/ +/* Struct / Class API Docs */ +/**********************************************************************************************************************/ + +/* make the class/struct name look like a heading */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.struct>dt:first-child, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.type>dt:first-child, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.function>dt:first-child, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.macro>dt:first-child, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.enum>dt:first-child, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.class>dt:first-child, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).py.function>dt:first-child, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).py.class>dt:first-child +{ + color: var(--api-header-text-color); + border-color: var(--api-header-border-color) !important; + border-top: solid 1px; + border-bottom: solid 1px; + background-color: transparent; + padding-top: 5px; + padding-bottom: 5px; + padding-left: 0px; + padding-right: 0px; + margin-top: 15px; + margin-bottom: 15px; + margin-right: 0px; + margin-left: 0px; + letter-spacing: -0.5px; + font-weight: 500; + font-style: normal; + font-size: 20px; + width: 100%; +} + +dl.py +{ + font-style: normal; /* turn off italics, turning back on in api-styles.css for arg defaults*/ +} + +dl.py .sig.sig-object.py { + font-weight: 400; +} +dl.py .sig.sig-object.py span.sig-name.descname +{ + font-weight: 800; +} + +/* Add newlines-plus-whitespace to params in class/method signatures +Ensure final line does not receive tabs for final paren / type-annotation +https://github.com/sphinx-doc/sphinx/issues/1514 */ + +/* Newlines (\a) and spaces (\20) before each parameter */ +dl.py .sig-param::before { + content: "\a\20\20\20\20"; + white-space: pre; +} +/* Newline after the last parameter (so the closing bracket is on a new line) */ +dl.py dt em.sig-param:last-of-type::after { + content: "\a"; + white-space: pre; +} +/* To have blue background of width of the block (instead of width of content) */ +dl.class > dt:first-of-type { + display: block !important; +} + +.sig-param span.p, +.sig-param span.o, +.sig-param span.default_value +{ + font-style: normal; +} + +/* controls the text in the header */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.struct>dt:first-child code, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.type>dt:first-child code, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.function>dt:first-child code, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.macro>dt:first-child code, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.enum>dt:first-child code, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).cpp.class>dt:first-child code, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).py.function>dt:first-child code, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple).py.class>dt:first-child code +{ + color: var(--api-header-text-color); + padding: 0px; +} + +/* make anchor in class/struct header proper size and color */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt .headerlink +{ + color: var(--api-header-text-color); + font-size: 14px !important; +} + +/* class/struct fields/members */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) > dt +{ + margin-bottom: 6px; + border: none; + border-bottom: 2px solid var(--api-member-header-border-color) !important; + background: var(--api-member-header-background-color); + color: var(--api-member-header-text-color); + width: 100%; + font-size: 100%; + + padding: 6px; + margin-top: 0px; + margin-bottom: 6px; + margin-right: 0px; + margin-left: 0px; + + /* proper spacing hack: https://github.com/readthedocs/sphinx_rtd_theme/issues/694 */ + display: inline-block !important; +} + +/* properties */ +html.writer-html4 .rst-content dl:not(.docutils) .property, +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .property { + display: unset; +} + +/* module/class names */ +html.writer-html4 .rst-content dl:not(.docutils) .descclassname, html.writer-html4 .rst-content dl:not(.docutils) .descname, html.writer-html4 .rst-content dl:not(.docutils) .sig-name, html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname, html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname, html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name +{ + color: var(--api-member-header-text-color); +} + +/* class/struct fields/members names */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl:not(.field-list):not(.simple):not(.enumerator)>dt .sig-name +{ + color: var(--api-member-header-text-color); +} + + +/* class/struct fields/members header link button */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl:not(.field-list):not(.simple):not(.enumerator)>dt .headerlink +{ + color: var(--api-member-header-link-color); +} + + +/* function name in fields/members header */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl:not(.field-list):not(.simple):not(.enumerator)>dt>code +{ + color: var(--api-member-header-text-color); +} + + +/* Return, Parameters, Notes heading in member description */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl.simple>dt +{ + border: none; + background: transparent; + color: var(--api-member-header-text-color); +} + +/**********************************************************************************************************************/ +/* Globals / Enums / Macros */ +/**********************************************************************************************************************/ + +/* members */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt +{ + margin-bottom: 6px; + border: none; + border-bottom: 2px solid var(--api-member-header-border-color); + background: var(--api-member-header-background-color); + color: var(--api-member-header-text-color); + width: 100%; + font-size: 100%; + + /* proper spacing hack: https://github.com/readthedocs/sphinx_rtd_theme/issues/694 */ + display: inline-block !important; +} + + +/* header link button */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple):not(.enumerator)>dt .headerlink +{ + color: var(--api-member-header-link-color); +} + +/* function name in fields/members header */ +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple):not(.enumerator)>dt>code +{ + color: var(--api-member-header-text-color); + font-family: var(--nv-font-face); +} +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple):not(.enumerator)>dt>code>code +{ + color: var(--api-member-header-text-color); +} +html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dl.cpp.enumerator>dt +{ + border: none; + background: transparent; +} + +/* center all content */ + +.wy-grid-for-nav +{ + max-width: 1500px; + margin: 0 auto; + position: static; + border-left: #c9c9c9 solid 1px; + border-right: #c9c9c9 solid 1px; + min-height: 100vh; /* ensure black side borders extend the entire length of page */ + background-color: var(--background-default); +} + +.wy-side-scroll /* the inner container */ +{ + background-color: var(--sidebar-color); +} + +.wy-nav-content-wrap +{ + background-color: var(--background-default); /* default has a silly transparency effect here */ +} + +@media screen and (min-width: 1100px) +{ + .wy-nav-content-wrap + { + background: inherit; + } +} + +/* links in extra breadcrumbs do not need padding */ +.wy-breadcrumbs li.wy-breadcrumbs-aside > a +{ + padding: 0px; +} + +/* document version selection */ + +.omni-version +{ + position: fixed; + bottom: 0; + background-color: var(--sidebar-alt-color); + width: 300px; + line-height: 32px; + padding-right: 1.618em; + color: var(--sidebar-text-color); + text-align:center +} + +.omni-version-content:before /* version icon */ +{ + font-family: var(--ul-marker-font-face); + content: '\f126'; + padding-left: 10px; + padding-right: 3px; + color: var(--sidebar-text-color); +} + +div .wy-menu.wy-menu-vertical /* make sure version selector doesn't cover toc */ +{ + margin-bottom: 2.5em; +} + +.omni-version-select /* version selection box */ +{ + padding: inherit; + background-color: var(--sidebar-alt-color); + color: inherit; + border: inherit; + box-shadow: none; + font-size: inherit; + font-family: inherit; + color: var(--sidebar-text-color); +} + +/* warning dialog when version is not the latest */ +.omni-version-warning-content /* center content in the warning box */ +{ + padding: 8px; + text-align: center; + border-radius: 4px; + color: #856404; + border-color: #ffeeba; + border-style: solid; + border-width: 1px; + background: #fff3cd repeating-linear-gradient(135deg, transparent, transparent 56px, rgba(255, 255, 255, 0.2) 56px, rgba(255, 255, 255, 0.2) 112px ); + margin: 0 0 24px; +} + +@media screen and (max-width: 768px) +{ + .omni-version-warning-content + { + width: 100%; + } +} + +.omni-version-warning-content a /* make links not green */ +{ + color: #533f03; + font-weight: 700; + text-decoration: none; +} + +.omni-version-warning-content:before /* warning icon */ +{ + font-family: var(--ul-marker-font-face); + content: '\f071'; + padding-left: 5px; +} + +/* social media icons */ + +.fa +{ + font-family: var(--ul-marker-font-face); +} + +.social-media-icon a, +.social-media-icon a:visited +{ + color: #c9c9c9; +} + +.social-media-icon a:hover +{ + color: var(--sidebar-alt-color); +} + +/* dropdown icon */ + +details.sd-dropdown .sd-summary-up svg, +details.sd-dropdown .sd-summary-down svg { + stroke: var(--nv-green); + fill: var(--nv-green); + opacity: 1.0; + stroke-width: 1.0px; +} + +/* hierarchy styling */ +.rebreather.hierarchy details.sd-dropdown { + margin-bottom: 0 !important; + border: none; +} + +.rebreather.hierarchy div.rebreather.hierarchy ul, +.rebreather.hierarchy details.sd-dropdown ul { + margin-bottom: 0; +} + +.rebreather.hierarchy details.sd-dropdown div.sd-summary-content { + padding-top: 0; + padding-bottom: 0; +} + +.rebreather.hierarchy details.sd-dropdown>.sd-card-header { + border: none; + padding-left: 1.5em; + padding-top: 0; + padding-bottom: 0; +} + +.rebreather.hierarchy details.sd-dropdown:not([open])>.sd-card-header { + border: none; + padding-left: 1.5em; + padding-top: 0; + padding-bottom: 0; +} + +.rebreather.hierarchy details.sd-dropdown>.sd-summary-title .sd-summary-up { + right: unset; + left: 0.35em; + top: 0.35em; + font-size: 0.7em; +} + +.rebreather.hierarchy details.sd-dropdown:not([open])>.sd-summary-title .sd-summary-down { + right: unset; + left: 0.35em; + top: 0.35em; + font-size: 0.7em; +} + +/* add : to captions */ +.caption-number:after +{ + content: ': '; + margin-left: -0.25rem; +} + +/* text transformations */ +.text-rotate-90 +{ + writing-mode: vertical-rl; +} + +.text-rotate-270 +{ + writing-mode: vertical-rl; + transform: rotate(180deg); +} + +/* styling definition lists as tables for API docs */ +.dl-as-table +{ + display: table; +} + +.dl-as-table > dl +{ + display: table-row-group; +} + +.dl-as-table > dl > dt, +.dl-as-table > dl > dd +{ + display: table-row; +} + +.dl-as-table > dl > dd > p +{ + display: table-cell; +} + +.dl-as-table > dl > dd::before +{ + content: ""; /* add a fake cell */ +} + +.tag-as-table-cell +{ + display: table-cell; + width: auto; + text-align: right; + white-space: nowrap; + padding-right: 0.5em; +} diff --git a/docs/media/version.js b/docs/media/version.js new file mode 100644 index 000000000..cac70456d --- /dev/null +++ b/docs/media/version.js @@ -0,0 +1,192 @@ +// helper to grab a url and convert it to a js object +async function _fetchJson(url) +{ + console.log(`fetching: ${url}`) + let response = await fetch(url) + if (!response.ok) + { + throw Error(`unable to retrieve ${url}: ${response.status}`) + } + + return response.json() +} + +// if newer documentation has been published, this function will add an element to the DOM warning the user. +function _populateVersionWarning(project, latest, versions, root) +{ + console.log(`warning check: ${project.version} ${latest}`) + + const versionMismatch = (latest && (project.version != latest)); + if (!versionMismatch) + { + return + } + + // find the message we should display for this version of the docs. if a custom + // message wasn't provided, use a default message. + let message = `This documentation is not the latest for ${project.name}. ` + for (let i = 0; i < versions.length; i++) + { + if (versions[i].version == project.version) + { + if ("message" in versions[i]) + { + message = versions[i].message + } + break; + } + } + + // since the current version is not the latest version, update the message with a link to the latest docs. + message = `${message} ${project.name} ${latest} is the latest version.` + console.log(message) + + // try to find the main content div + const elems = document.querySelectorAll("div.wy-nav-content") + console.log(elems) + if (elems.length < 1) + { + throw Error("could not find main content") + } + + const content = elems[0] + + // we're trying to inject the following HTML: + // + //
+ //

+ // This documentation is for an older version of repo_docs. + // repo_docs 0.9.1 is the latest version. + //

+ //
+ const outer = document.createElement("div") + outer.classList.add("omni-version-warning") + + const inner = document.createElement("p") + inner.classList.add("omni-version-warning-content") + inner.innerHTML = message + + outer.appendChild(inner) + + content.insertBefore(outer, content.firstChild) +} + +// this method: +// +// - injects a document version selection box +// - injects a warning message if the docs are not the latest docs +async function _populateVersions() +{ + try + { + // sphinx happily puts a relative path to the project root in each of its html files :) + const root = document.getElementById("documentation_options").getAttribute('data-url_root') + if (!root) + { + throw new Error("unable to find data-url_root in header") + } + + // grab project information + const nocache = `?nocache=${(new Date()).getTime()}` + const project = await _fetchJson(`${root}project.json`) + let versions = await _fetchJson(`${root}../versions.json${nocache}`) + const latest = versions.latest + console.log("latest:", latest) + versions = versions.versions + console.log(versions) + + if (versions.length < 1) + { + throw new Error("versions.json does not contain any versions") + } + + // find all of the elements that look like the sidebar (there should just be 1) + let elems = document.querySelectorAll("div.wy-side-scroll") + if (elems.length > 0) + { + if (elems.length > 1) + { + console.log("warning: found more than one side navigation bars") + } + + // here we're creating the following html: + // + //
+ // + // repo_docs + // + // + //
+ let sidebar = elems[0] + let div = document.createElement("div") + div.classList.add("omni-version") + + let content = document.createElement("span") + content.classList.add("omni-version-content") + content.innerHTML = `${project.name} ` + + let selector = document.createElement("select") + selector.classList.add("omni-version-select") + + function _addOption(v) + { + console.log(`adding ${v}`) + let opt = document.createElement("option") + opt.value = `../${v}/index.html` + opt.innerHTML = v + + if (v == project.version) + { + console.log(`found self: ${v}`) + opt.selected = true + foundThisVersion = true + } + + selector.appendChild(opt) + } + + let foundThisVersion = false + for (let i = 0; i < versions.length; i++) + { + _addOption(versions[i].version) + } + + if (!foundThisVersion) + { + _addOption(project.version) + } + + content.appendChild(selector) + + // if the selection changes, change the url + selector.addEventListener("input", (event) => { + console.log("selected:", event.target.value) + + let url = root + event.target.value + console.log("url:", url) + window.location.assign(url) + }) + + div.appendChild(content) + sidebar.appendChild(div) + + // inject a warning about out-of-date documentation (if needed) + _populateVersionWarning(project, latest, versions, root) + } + else + { + throw Error("unable to find navigation bar to inject version selector") + } + } + catch (e) + { + console.log("warning: failed to populate version information:", e) + } +} + +// wait until the page is loaded to modify the DOM +window.addEventListener("load", _populateVersions) diff --git a/docs/multi-gpu.md b/docs/multi-gpu.md new file mode 100644 index 000000000..fe064590b --- /dev/null +++ b/docs/multi-gpu.md @@ -0,0 +1,381 @@ + + +# Multi-GPU for Inference + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Comparison with the Local GPUs Example + +This example is very similar to the example that uses local GPUs. +The key difference is that the `deploy/compose/rag-app-text-chatbot.yaml` file +is modified to identify the device IDs of the GPUs for the services. + +If you performed all the steps in [](local-gpu.md), consider skipping to +step 1 of [](#build-and-start-the-containers) on this page. + +## Example Features + +This example deploys a developer RAG pipeline for chat Q&A and serves inferencing with the NeMo Framework Inference container across multiple local GPUs. + +This example uses a local host with an NVIDIA A100, H100, or L40S GPU. + +```{list-table} +:header-rows: 1 + +* - Model + - Embedding + - Framework + - Description + - Multi-GPU + - TRT-LLM + - Model Location + - Triton + - Vector Database + +* - llama-2 + - e5-large-v2 + - LlamaIndex + - QA chatbot + - YES + - YES + - Local Model + - YES + - Milvus + +* - llama-2 + - e5-large-v2 + - LlamaIndex + - QA chatbot + - YES + - YES + - Local Model + - YES + - pgvector +``` + +The following figure shows the sample topology: + +- The sample chat bot web application communicates with the local chain server. + +- The chain server sends inference requests to NVIDIA Triton Inference Server (TIS). + TIS uses TensorRT-LLM and NVIDIA GPUs with the LLama 2 model for generative AI. + +- The sample chat bot supports uploading documents to create a knowledge base. + The uploaded documents are parsed by the chain server and embeddings are stored + in the vector database, Milvus or pgvector. + When you submit a question and request to use the knowledge base, the chain server + retrieves the most relevant documents and submits them with the question to + TIS to perform retrieval-augumented generation. + +- Optionally, you can deploy NVIDIA Riva. Riva can use automatic speech recognition to + transcribe your questions and use text-to-speech to speak the answers aloud. + +![Sample topology for a RAG pipeline with local GPUs and local inference.](./images/local-gpus-topology.png) + + +## Prerequisites + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- A host with one or more NVIDIA A100, H100, or L40S GPU. + +- Verify NVIDIA GPU driver version 535 or later is installed and that the GPU is in compute mode: + + ```console + $ nvidia-smi -q -d compute + ``` + + *Example Output* + + ```{code-block} output + --- + emphasize-lines: 4,9 + --- + ==============NVSMI LOG============== + + Timestamp : Sun Nov 26 21:17:25 2023 + Driver Version : 535.129.03 + CUDA Version : 12.2 + + Attached GPUs : 2 + GPU 00000000:CA:00.0 + Compute Mode : Default + + GPU 00000000:FA:00.0 + Compute Mode : Default + ``` + + If the driver is not installed or below version 535, refer to the [*NVIDIA Driver Installation Quickstart Guide*](https://docs.nvidia.com/datacenter/tesla/tesla-installation-notes/index.html). + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Install the NVIDIA Container Toolkit. + + 1. Refer to the [installation documentation](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html). + + 1. When you configure the runtime, set the NVIDIA runtime as the default: + + ```console + $ sudo nvidia-ctk runtime configure --runtime=docker --set-as-default + ``` + + If you did not set the runtime as the default, you can reconfigure the runtime by running the preceding command. + + 1. Verify the NVIDIA container toolkit is installed and configured as the default container runtime: + + ```console + $ cat /etc/docker/daemon.json + ``` + + *Example Output* + + ```json + { + "default-runtime": "nvidia", + "runtimes": { + "nvidia": { + "args": [], + "path": "nvidia-container-runtime" + } + } + } + ``` + + 1. Run the `nvidia-smi` command in a container to verify the configuration: + + ```console + $ sudo docker run --rm --runtime=nvidia --gpus all ubuntu nvidia-smi -L + ``` + + *Example Output* + + ```output + GPU 0: NVIDIA A100 80GB PCIe (UUID: GPU-d8ce95c1-12f7-3174-6395-e573163a2ace) + GPU 1: NVIDIA A100 80GB PCIe (UUID: GPU-1d37ef30-0861-de64-a06d-73257e247a0d) + ``` + +- Optional: Enable NVIDIA Riva automatic speech recognition (ASR) and text to speech (TTS). + + - To launch a Riva server locally, refer to the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). + + - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. + + - After the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. + + - Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. + + In `deploy/compose/compose.env`, make the following assignments as necessary: + + ```bash + export RIVA_API_URI=":" + export RIVA_API_KEY="" + export RIVA_FUNCTION_ID="" + ``` + +## Download the Llama 2 Model and Weights + +1. Fill out Meta's [Llama request access form](https://ai.meta.com/resources/models-and-libraries/llama-downloads/). + + - Select the **Llama 2 & Llama Chat** checkbox. + - After verifying your email, Meta will email you a download link. + +1. Clone the Llama repository: + + ```console + $ git clone https://github.com/facebookresearch/llama.git + $ cd llama/ + ``` + +1. Run the `download.sh` script. When prompted, specify `13B-chat` to download the llama-2-13b-chat model: + + ```console + $ ./download.sh + Enter the URL from email: < https://download.llamameta.net/...> + + Enter the list of models to download without spaces (7B,13B,70B,7B-chat,13B-chat,70B-chat), or press Enter for all: 13B-chat + ``` + +1. Copy the tokenizer to the model directory. + + ```console + $ mv tokenizer* llama-2-13b-chat/ + $ ls llama-2-13b-chat/ + ``` + + *Example Output* + + ```output + checklist.chk consolidated.00.pth consolidated.01.pth params.json tokenizer.model tokenizer_checklist.chk + ``` + +## Build and Start the Containers + +1. In the Generative AI Examples repository, edit the `deploy/compose/rag-app-text-chatbot.yaml` file. + + Specify the GPU device IDs to assign to the services: + + ```yaml + services: + llm: + // ... + deploy: + resources: + reservations: + devices: + - driver: nvidia + # count: ${INFERENCE_GPU_COUNT:-all} # Comment this out + device_ids: ["0", "1"] + capabilities: [gpu] + + jupyter-server: + // ... + deploy: + resources: + reservations: + devices: + - driver: nvidia + # count: 1 # Comment this out + device_ids: ["2"] + capabilities: [gpu] + ``` + +1. Edit the `deploy/compose/docker-compose-vectordb.yaml` file. + + Specify the GPU device IDs to assign to the services: + + ```yaml + services: + milvus: + // ... + deploy: + resources: + reservations: + devices: + - driver: nvidia + # count: 1 # Comment this out + device_ids: ["3"] + capabilities: [gpu] + ``` + +1. Edit the `deploy/compose/compose.env` file. + + Specify the absolute path to the model location, model architecture, and model name. + + ```bash + # full path to the local copy of the model weights + # NOTE: This should be an absolute path and not relative path + export MODEL_DIRECTORY="/path/to/llama/llama-2-13b_chat/" + + # the architecture of the model. eg: llama + export MODEL_ARCHITECTURE="llama" + + # the name of the model being used - only for displaying on frontend + export MODEL_NAME="Llama-2-13b-chat" + ... + ``` + +1. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml build + ``` + +1. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml up -d + ``` + + NVIDIA Triton Inference Server can require 5 minutes to start. The `-d` flag starts the services in the background. + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container llm-inference-server Started + ✔ Container notebook-server Started + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +1. Start the Milvus vector database: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + + *Example Output* + + ```output + ✔ Container milvus-minio Started + ✔ Container milvus-etcd Started + ✔ Container milvus-standalone Started + ``` + +1. Confirm the containers are running: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + 256da0ecdb7b rag-playground Up 48 minutes + 2974aa4fb2ce chain-server Up 48 minutes + 4a8c4aebe4ad notebook-server Up 48 minutes + 5be2b57bb5c1 milvus-standalone Up 48 minutes (healthy) + ecf674c8139c llm-inference-server Up 48 minutes (healthy) + a6609c22c171 milvus-minio Up 48 minutes (healthy) + b23c0858c4d4 milvus-etcd Up 48 minutes (healthy) + ``` + +### Related Information + +- [Meta Llama README](https://github.com/facebookresearch/llama/blob/main/README.md) +- [Meta Llama request access form](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) + + +## Stopping the Containers + +- To uninstall, stop and remove the running containers from the root of the Generative AI Examples repository: + + ```console + $ docker compose -f deploy/compose/rag-app-text-chatbot.yaml down + ``` + +## Next Steps + +- Use the [](./using-sample-web-application.md). +- [](./vector-database.md) +- Run the sample Jupyter notebooks to learn about optional features. diff --git a/docs/multi-turn.md b/docs/multi-turn.md new file mode 100644 index 000000000..4b336a15c --- /dev/null +++ b/docs/multi-turn.md @@ -0,0 +1,187 @@ + + +# Multi-Turn Conversational Chat Bot + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Example Features + +This example showcases multi-turn conversational AI in a RAG pipeline. +The chain server stores the conversation history and knowledge base in a vector database and retrieves them at runtime to understand contextual queries. + +The example supports ingestion of PDF and text files. +The documents are ingested in a dedicated document vector store, multi_turn_rag. +The prompt for the example is tuned to act as a document chat bot. +To maintain the conversation history, the chain server stores the previously asked query and the model's generated answer as a text entry in a different and dedicated vector store for conversation history, conv_store. +Both of these vector stores are part of a LangChain [LCEL](https://python.langchain.com/docs/expression_language/) chain as LangChain Retrievers. +When the chain is invoked with a query, the query passes through both the retrievers. +The retriever retrieves context from the document vector store and the closest-matching conversation history from conversation history vector store. +Afterward, the chunks are added into the LLM prompt as part of the chain. + +Developers get free credits for 10K requests to any of the available models. + +This example uses models from the NVIDIA API Catalog. + +```{list-table} +:header-rows: 1 + +* - Model + - Embedding + - Framework + - Description + - Multi-GPU + - TRT-LLM + - Model Location + - Triton + - Vector Database + +* - ai-llama2-70b + - nvolveqa_40k + - LangChain + - QA chatbot + - NO + - NO + - API Catalog + - NO + - Milvus +``` + +The following figure shows the sample topology: + +- The sample chat bot web application communicates with the chain server. + The chain server sends inference requests to an NVIDIA API Catalog endpoint. +- Optionally, you can deploy NVIDIA Riva. Riva can use automatic speech recognition to transcribe + your questions and use text-to-speech to speak the answers aloud. + +![Using NVIDIA API Catalog endpoints for inference instead of local components.](./images/catalog-and-vector-db.png) + +## Prerequisites + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Optional: Enable NVIDIA Riva automatic speech recognition (ASR) and text to speech (TTS). + + - To launch a Riva server locally, refer to the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). + + - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. + + - After the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. + + - Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. + + In `deploy/compose/compose.env`, make the following assignments as necessary: + + ```bash + export RIVA_API_URI=":" + export RIVA_API_KEY="" + export RIVA_FUNCTION_ID="" + ``` + +## Get an API Key for the Llama 2 70B API Endpoint + +```{include} query-decomposition.md +:start-after: api-key-start +:end-before: api-key-end +``` + +## Build and Start the Containers + +1. In the Generative AI examples repository, edit the `deploy/compose/compose.env` file. + + Add the API key for the model endpoint: + + ```text + export NVIDIA_API_KEY="nvapi-..." + ``` + +1. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-multiturn-chatbot.yaml build + ``` + +1. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-multiturn-chatbot.yaml up -d + ``` + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +1. Start the Milvus vector database: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + + *Example Output* + + ```output + ✔ Container milvus-minio Started + ✔ Container milvus-etcd Started + ✔ Container milvus-standalone Started + ``` + +1. Confirm the containers are running: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + f21cf089312a milvus-standalone Up 8 seconds + 44189aa5836b milvus-minio Up 9 seconds (health: starting) + 6024a6304e4b milvus-etcd Up 9 seconds (health: starting) + 4656c2e7640e rag-playground Up 20 seconds + 2e2e8f4decc9 chain-server Up 20 seconds + ``` + +## Next Steps + +- Access the web interface for the chat server. + Refer to [](./using-sample-web-application.md) for information about using the web interface. +- Upload one or more PDF and .txt files to the knowledge base. +- Enable the **Use knowledge base** checkbox when you submit a question. +- [](./vector-database.md) +- Stop the containers by running `docker compose -f deploy/compose/rag-app-multiturn-chatbot.yaml down` and + `docker compose -f deploy/compose/docker-compose-vectordb.yaml down`. diff --git a/docs/multimodal-data.md b/docs/multimodal-data.md new file mode 100644 index 000000000..caf182281 --- /dev/null +++ b/docs/multimodal-data.md @@ -0,0 +1,193 @@ + + +# Multimodal Data + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Example Features + +This example deploys a developer RAG pipeline for chat Q&A and serves inferencing from NVIDIA API Catalog endpoints +instead of NVIDIA Triton Inference Server, a local Llama 2 model, or local GPUs. + +Developers get free credits for 10K requests to any of the available models. + +The key difference from the [](./api-catalog.md) example is that this example demonstrates how work with multimodal data. +The model works with any kind of image in PDF, such as graphs and plots, as well as text and tables. + +This example uses models from the NVIDIA API Catalog. + +```{list-table} +:header-rows: 1 +:widths: 30 10 10 10 10 10 10 10 10 + +* - Model + - Embedding + - Framework + - Description + - Multi-GPU + - TRT-LLM + - Model Location + - Triton + - Vector Database + +* - ai-mixtral-8x7b-instruct for response generation + + ai-google-Deplot for graph to text conversion + + ai-Neva-22B for image to text conversion + - nvolveqa_40k + - Custom Python + - QA chatbot + - NO + - NO + - API Catalog + - NO + - Milvus +``` + +The following figure shows the sample topology: + +- The sample chat bot web application communicates with the chain server. + The chain server sends inference requests to NVIDIA API Catalog endpoints. +- Optionally, you can deploy NVIDIA Riva. Riva can use automatic speech recognition to transcribe + your questions and use text-to-speech to speak the answers aloud. + +![Using NVIDIA API Catalog endpoints for inference instead of local components.](./images/catalog-and-vector-db.png) + + +## Limitations + +Although the AI Foundation Models endpoint uses the Neva_22B model for processing images, this example +supports uploading images that are part of PDF files only. +For example, after deploying the services, you cannot upload a PNG, JPEG, TIFF, or any other image format file. + + +## Prerequisites + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Optional: Enable NVIDIA Riva automatic speech recognition (ASR) and text to speech (TTS). + + - To launch a Riva server locally, refer to the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). + + - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. + + - After the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. + + - Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. + + In `deploy/compose/compose.env`, make the following assignments as necessary: + + ```bash + export RIVA_API_URI=":" + export RIVA_API_KEY="" + export RIVA_FUNCTION_ID="" + ``` + +## Get an API Key for the Mixtral 8x7B Instruct API Endpoint + +```{include} api-catalog.md +:start-after: api-key-start +:end-before: api-key-end +``` + +## Build and Start the Containers + +1. In the Generative AI examples repository, edit the `deploy/compose/compose.env` file. + + Add the API key for the model endpoint: + + ```text + export NVIDIA_API_KEY="nvapi-..." + ``` + +1. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-multimodal-chatbot.yaml build + ``` + +1. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-multimodal-chatbot.yaml up -d + ``` + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +1. Start the Milvus vector database: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + + *Example Output* + + ```output + ✔ Container milvus-minio Started + ✔ Container milvus-etcd Started + ✔ Container milvus-standalone Started + ``` + +1. Confirm the containers are running: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + 37dcdb4ffcb0 rag-playground Up 3 minutes + 39718f6a2a06 chain-server Up 3 minutes + 68af1e4dfb44 milvus-standalone Up 2 minutes + 522b12ec17f0 milvus-minio Up 2 minutes (healthy) + ed48988c5657 milvus-etcd Up 2 minutes (healthy) + ``` + +## Next Steps + +- Access the web interface for the chat server. + Refer to [](./using-sample-web-application.md) for information about using the web interface. +- Upload one or more PDF files with graphics, plots, and tables. +- Enable the **Use knowledge base** checkbox when you submit a question. +- Stop the containers by running `docker compose -f deploy/compose/rag-app-multimodal-chatbot.yaml down` and + `docker compose -f deploy/compose/docker-compose-vectordb.yaml down`. diff --git a/docs/observability.md b/docs/observability.md new file mode 100644 index 000000000..fe425a334 --- /dev/null +++ b/docs/observability.md @@ -0,0 +1,191 @@ + + +# Observability Tool + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Introduction + +Observability is a crucial aspect that facilitates the monitoring and comprehension of the internal state and behavior of a system or application. +Applications based on RAG are intricate systems that encompass the interaction of numerous components. +To enhance the performance of these RAG-based applications, observability is an efficient mechanism for both monitoring and debugging. + +The following diagram shows high-level overview of how traces are captured. + +![RAG with Observability](./images/image9.png) + +The observability stack adds following containers on top of the RAG app containers: + +- OpenTelemetry Collector: Receives, processes, and exports the traces. +- Jaeger: Acts as an OpenTelemetry backend that provides storage, query service, and visualizer. + You can configure any other OTLP-compatible backend such as [Zipkin](https://zipkin.io/), [Prometheus](https://prometheus.io/), and so on. + To configure an alternative backend, refer to [Configuration](https://opentelemetry.io/docs/collector/configuration/) in the OpenTelemetry documentation. +- Cassandra: Provides persistent storage for traces. + Jaeger supports many other [storage backends](https://www.jaegertracing.io/docs/1.18/deployment/#storage-backends) such as ElasticSearch, Kafka, and Badger. + For a large scale, production deployment, the Jaeger team recommends ElasticSearch over Cassandra. + +## Key terms + +Span +: A unit of work within a system, encapsulating information about a specific operation (Eg. LLM call, embedding generation etc). + +Traces +: The recording of a request as it goes through a system, tracking every service the request comes in contact with. + Multiple spans make a trace logically bound by parent-child relationship. + +Root Span +: The first span in a trace, denoting the beginning and end of the entire operation. + +Span Attributes +: Key-value pairs a Span may consist of to provide additional context or metadata. + +Collectors +: Components that process and export telemetry data from instrumented applications. + +Context +: Signifies current location within the trace hierarchy. + The context determines whether a new span initiates a trace or connects to an existing parent span. + +Services +: Microservices that generates telemetry data. + +The following diagram shows a typical trace for query that uses a knowledge base and identifies the spans and root span. + +![Trace for query from knowledge base](./images/image10.png) + +## Prerequisites + +```{include} evaluation.md +:start-after: prerequisites-start +:end-before: prerequisites-end +``` + +## Build and Start the Containers + +1. In the Generative AI Examples repository, edit the `deploy/compose/configs/otel-collector-config.yaml` + and `deploy/compose/configs/jaeger.yaml` files. + + Refer to [configuration](https://opentelemetry.io/docs/collector/configuration/) in the OpenTelemetry documentation + and the [Jaeger all-in-one with Cassandra](https://www.jaegertracing.io/docs/1.52/cli/#jaeger-all-in-one-cassandra) + reference in the Jaeger documentation. + +1. Edit the `deploy/compose/rag-app-text-chatbot.yaml` file. + For the rag-playground and chain-server services, set the following environment variables: + + ```yaml + environment: + OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317 + OTEL_EXPORTER_OTLP_PROTOCOL: grpc + ENABLE_TRACING: true + ``` + +1. Deploy the developer RAG example: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml build + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml up -d + ``` + +1. Start the Milvus vector database: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + +1. Deploy the observability services: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-observability.yaml build + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-observability.yaml up -d + ``` + + *Example Output* + + ```output + ✔ Container otel-collector Started + ✔ Container cassandra Started + ✔ Container compose-cassandra-schema-1 Started + ✔ Container jaeger Started + ``` + +1. Optional: Confirm the services are started: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + beb1582320d6 jaeger Up 5 minutes + 674c7bbb367e cassandra Up 6 minutes + d11e35ee69f4 rag-playground Up 5 minutes + 68f22b3842cb chain-server Up 5 minutes + 751dd4fd80ec milvus-standalone Up 5 minutes (healthy) + b435006c95c1 milvus-minio Up 6 minutes (healthy) + 9108253d058d notebook-server Up 6 minutes + 5315a9dc9eb4 milvus-etcd Up 6 minutes (healthy) + d314a43074c8 otel-collector Up 6 minutes + ``` + +1. Access the Jaeger web interface at `http://host-ip:16686` from your web browser. + + +## Example Traces + +The following screenshots show traces from the Jaeger web interface. + +- Upload document trace + ![upload document trace](./images/image11.png) +- User query using knowledge base trace + ![user query using knowledge base](./images/image12.png) + +## Implementation Details + +The user interface web application, named the RAG playground, and the chain server, are instrumented. + +### RAG Playground + +The [tracing.py](https://github.com/NVIDIA/GenerativeAIExamples/blob/main/RetrievalAugmentedGeneration/frontend/frontend/tracing.py) module in the frontend application code performs the instrumentation. +At high level, the code performs the following: + +- Sets up the OpenTelemetry configurations for resource name, frontend, span processor, and context propagator. +- Provides instrumentation decorator functions, `instrumentation_wrapper` and `predict_instrumentation_wrapper`, for managing trace context across different services. + This decorator function is used with the API functions in [chat_client.py](https://github.com/NVIDIA/GenerativeAIExamples/blob/main/RetrievalAugmentedGeneration/frontend/frontend/chat_client.py) to create new span contexts. + The span contexts can then be injected in the headers of the request made to the chain server. + The code also logs span attributes that are extracted from the API request. + +### Chain Server + +The [tracing.py](https://github.com/NVIDIA/GenerativeAIExamples/blob/main/RetrievalAugmentedGeneration/common/tracing.py) module in the chain server application code is responsible for instrumentation. +At high level, the code performs the following: + +- Sets up the OpenTelemetry configurations for resource name, chain-server, span processor, and context propagator. +- Initializes the [LlamaIndex OpenTelemetry callback handler](https://github.com/NVIDIA/GenerativeAIExamples/blob/main/tools/observability/llamaindex/opentelemetry_callback.py). + The callback handler uses [LlamaIndex callbacks](https://docs.llamaindex.ai/en/stable/module_guides/observability/callbacks/root.html) to track various events such as LLM calls, chunking, embedding, and so on. +- Provides an instrumentation decorator function, `instrumentation_wrapper`, for managing trace context across different services. + This decorator function is used with the API functions in [server.py](https://github.com/NVIDIA/GenerativeAIExamples/blob/main/RetrievalAugmentedGeneration/common/server.py) to extract the trace context that is present in requests from the frontend service and attach it in the new span created by the chain-server. + +The instrumentation decorator function, `instrumentation_wrapper`, can be used to instrument any LlamaIndex application as long as LlamaIndex OpenTelemetry callback handler, `opentelemetry_callback.py`, is set as global handler in the application. diff --git a/docs/project.json b/docs/project.json new file mode 100644 index 000000000..4939983c8 --- /dev/null +++ b/docs/project.json @@ -0,0 +1 @@ +{"name": "generative-ai-examples", "version": "0.5.0"} \ No newline at end of file diff --git a/docs/quantized-llm-model.md b/docs/quantized-llm-model.md new file mode 100644 index 000000000..881a7c924 --- /dev/null +++ b/docs/quantized-llm-model.md @@ -0,0 +1,349 @@ + + +# Quantized LLM Inference Model + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + + +## Example Features + +This example deploys a developer RAG pipeline for chat Q&A and serves inferencing with the NeMo Framework Inference container across multiple local GPUs with a +quantized version of the Llama 7B chat model. + +This example uses a local host with an NVIDIA A100, H100, or L40S GPU. + +```{list-table} +:header-rows: 1 + +* - Model + - Embedding + - Framework + - Description + - Multi-GPU + - TRT-LLM + - Model Location + - Triton + - Vector Database + +* - llama-2-7b-chat + - e5-large-v2 + - LlamaIndex + - QA chatbot + - YES + - YES + - Local Model + - YES + - Milvus +``` + +## Prerequisites + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- A host with one or more NVIDIA A100, H100, or L40S GPU. + +- Verify NVIDIA GPU driver version 535 or later is installed and that the GPU is in compute mode: + + ```console + $ nvidia-smi -q -d compute + ``` + + *Example Output* + + ```{code-block} output + --- + emphasize-lines: 4,9 + --- + ==============NVSMI LOG============== + + Timestamp : Sun Nov 26 21:17:25 2023 + Driver Version : 535.129.03 + CUDA Version : 12.2 + + Attached GPUs : 2 + GPU 00000000:CA:00.0 + Compute Mode : Default + + GPU 00000000:FA:00.0 + Compute Mode : Default + ``` + + If the driver is not installed or below version 535, refer to the [*NVIDIA Driver Installation Quickstart Guide*](https://docs.nvidia.com/datacenter/tesla/tesla-installation-notes/index.html). + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Install the NVIDIA Container Toolkit. + + 1. Refer to the [installation documentation](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html). + + 1. When you configure the runtime, set the NVIDIA runtime as the default: + + ```console + $ sudo nvidia-ctk runtime configure --runtime=docker --set-as-default + ``` + + If you did not set the runtime as the default, you can reconfigure the runtime by running the preceding command. + + 1. Verify the NVIDIA container toolkit is installed and configured as the default container runtime: + + ```console + $ cat /etc/docker/daemon.json + ``` + + *Example Output* + + ```json + { + "default-runtime": "nvidia", + "runtimes": { + "nvidia": { + "args": [], + "path": "nvidia-container-runtime" + } + } + } + ``` + + 1. Run the `nvidia-smi` command in a container to verify the configuration: + + ```console + $ sudo docker run --rm --runtime=nvidia --gpus all ubuntu nvidia-smi -L + ``` + + *Example Output* + + ```output + GPU 0: NVIDIA A100 80GB PCIe (UUID: GPU-d8ce95c1-12f7-3174-6395-e573163a2ace) + GPU 1: NVIDIA A100 80GB PCIe (UUID: GPU-1d37ef30-0861-de64-a06d-73257e247a0d) + ``` + +- Optional: Enable NVIDIA Riva automatic speech recognition (ASR) and text to speech (TTS). + + - To launch a Riva server locally, refer to the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). + + - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. + + - After the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. + + - Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. + + In `deploy/compose/compose.env`, make the following assignments as necessary: + + ```bash + export RIVA_API_URI=":" + export RIVA_API_KEY="" + export RIVA_FUNCTION_ID="" + ``` + +## Download the Llama 2 Model and Weights + +1. Go to . + + - Locate the model to download, such as [Llama 2 7B chat HF](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf). + - Follow the information about accepting the license terms from Meta. + - Log in or sign up for an account with Hugging Face. + +1. After you are granted access, clone the repository by clicking the vertical ellipses button and selecting **Clone repository**. + + During the clone, you might be asked for your username and password multiple times. + Provide the information until the clone is complete. + + +## Download TensorRT-LLM and Quantize the Model + +The following steps summarize downloading the TensorRT-LLM repository, +building a container image, and quantizing the model. + +1. Clone the NVIDIA TensorRT-LLM repository: + + ```console + $ git clone https://github.com/NVIDIA/TensorRT-LLM.git + $ cd TensorRT-LLM + $ git checkout release/0.5.0 + $ git submodule update --init --recursive + $ git lfs install + $ git lfs pull + ``` + +1. Build the TensorRT-LLM Docker image: + + ```console + $ make -C docker release_build + ``` + + Building the image can require more than 30 minutes and requires approximately 30 GB. + The image is named tensorrt_llm/release:latest. + +1. Start the container. + Ensure that the container has one volume mount to the model directory and one volume mount to the TensorRT-LLM repository: + + ```console + $ docker run --rm -it --gpus all --ipc=host \ + -v :/model-store \ + -v $(pwd):/repo -w /repo \ + --ulimit memlock=-1 --shm-size=20g \ + tensorrt_llm/release:latest bash + ``` + +1. Install NVIDIA AMMO Toolkit in the container: + + ```console + # Obtain the cuda version from the system. Assuming nvcc is available in path. + $ cuda_version=$(nvcc --version | grep 'release' | awk '{print $6}' | awk -F'[V.]' '{print $2$3}') + # Obtain the python version from the system. + $ python_version=$(python3 --version 2>&1 | awk '{print $2}' | awk -F. '{print $1$2}') + # Download and install the AMMO package from the DevZone. + $ wget https://developer.nvidia.com/downloads/assets/cuda/files/nvidia-ammo/nvidia_ammo-0.3.0.tar.gz + $ tar -xzf nvidia_ammo-0.3.0.tar.gz + $ pip install nvidia_ammo-0.3.0/nvidia_ammo-0.3.0+cu$cuda_version-cp$python_version-cp$python_version-linux_x86_64.whl + # Install the additional requirements + $ pip install -r examples/quantization/requirements.txt + ``` + +1. Install version `0.25.0` of the accelerate Python package: + + ```console + $ pip install accelerate==0.25.0 + ``` + +1. Run the quantization with the container: + + ```console + $ python3 examples/llama/quantize.py --model_dir /model-store \ + --dtype float16 --qformat int4_awq \ + --export_path ./llama-2-7b-4bit-gs128-awq.pt --calib_size 32 + ``` + + Quantization can require more than 15 minutes to complete. + The sample command creates a `llama-2-7b-4bit-gs128-awq.pt` + quantized checkpoint. + +1. Copy the quantized checkpoint directory to the model directory: + + ```console + $ cp .pt + ``` + +The preceding steps summarize several documents from the NVIDIA TensorRT-LLM GitHub repository. +Refer to the repository for more detail about the following topics: + +- Building the TensorRT-LLM image, refer to the [installation.md](https://github.com/NVIDIA/TensorRT-LLM/blob/release/0.5.0/docs/source/installation.md) file in the release/0.5.0 branch. + +- Installing NVIDIA AMMO Toolkit, refer to the [README](https://github.com/NVIDIA/TensorRT-LLM/blob/release/0.5.0/examples/quantization/README.md) file in the `examples/quantization` directory. + +- Running the `quantize.py` command, refer to [AWQ](https://github.com/NVIDIA/TensorRT-LLM/blob/release/0.5.0/examples/llama/README.md#awq) in the `examples/llama` directory. + + +## Build and Start the Containers + +1. In the Generative AI Examples repository, edit the `deploy/compose/compose.env` file. + + - Update the `MODEL_DIRECTORY` variable to identify the Llama 2 model directory that contains the quantized checkpoint. + + - Uncomment the `QUANTIZATION` variable: + + ```text + export QUANTIZATION="int4_awq" + ``` + +1. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml build + ``` + +1. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml up -d + ``` + + NVIDIA Triton Inference Server can require 5 minutes to start. The `-d` flag starts the services in the background. + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container llm-inference-server Started + ✔ Container notebook-server Started + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +1. Start the Milvus vector database: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + + *Example Output* + + ```output + ✔ Container milvus-minio Started + ✔ Container milvus-etcd Started + ✔ Container milvus-standalone Started + ``` + +1. Confirm the containers are running: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + 256da0ecdb7b rag-playground Up 48 minutes + 2974aa4fb2ce chain-server Up 48 minutes + 4a8c4aebe4ad notebook-server Up 48 minutes + 5be2b57bb5c1 milvus-standalone Up 48 minutes (healthy) + ecf674c8139c llm-inference-server Up 48 minutes (healthy) + a6609c22c171 milvus-minio Up 48 minutes (healthy) + b23c0858c4d4 milvus-etcd Up 48 minutes (healthy) + ``` + +## Stopping the Containers + +- To uninstall, stop and remove the running containers from the root of the Generative AI Examples repository: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-text-chatbot.yaml down + $ docker compose -f deploy/compose/docker-compose-vectordb.yaml down + + ``` + +## Next Steps + +- Use the [](./using-sample-web-application.md). +- [](./vector-database.md) +- Run the sample Jupyter notebooks to learn about optional features. diff --git a/docs/query-decomposition.md b/docs/query-decomposition.md new file mode 100644 index 000000000..68a53dda7 --- /dev/null +++ b/docs/query-decomposition.md @@ -0,0 +1,207 @@ + + +# Query Decomposition + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Example Features + +This example deploys a recursive query decomposition example for chat Q&A. +The example uses the llama2-70b chat model from an NVIDIA API Catalog endpoint for inference. + +Query decomposition can perform RAG when the agent needs to access information from several different documents +(also referred to as _chunks_) or to perform some computation on the answers. +This example uses a custom LangChain agent that recursively breaks down the questions into subquestions. +The agent then attempts to answer the subquestions. + +The agent has access to two tools: + +- search: to perform standard RAG on a subquestion. +- math: to pose a math question to the LLM. + +The agent continues to break down the question into subquestions until it has the answers that it needs to form the final answer. + +```{list-table} +:header-rows: 1 + +* - Model + - Embedding + - Framework + - Description + - Multi-GPU + - TRT-LLM + - Model Location + - Triton + - Vector Database + +* - ai-llama2-70b + - nvolveqa_40k + - LangChain + - QA chatbot + - NO + - NO + - API Catalog + - NO + - Milvus +``` + +The following figure shows the sample topology: + +- The sample chat bot web application communicates with the chain server. + The chain server sends inference requests to an NVIDIA API Catalog endpoint. +- Optionally, you can deploy NVIDIA Riva. Riva can use automatic speech recognition to transcribe + your questions and use text-to-speech to speak the answers aloud. + +![Using NVIDIA API Catalog endpoints for inference instead of local components.](./images/ai-foundations-topology.png) + +## Prerequisites + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Optional: Enable NVIDIA Riva automatic speech recognition (ASR) and text to speech (TTS). + + - To launch a Riva server locally, refer to the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). + + - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. + + - After the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. + + - Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. + + In `deploy/compose/compose.env`, make the following assignments as necessary: + + ```bash + export RIVA_API_URI=":" + export RIVA_API_KEY="" + export RIVA_FUNCTION_ID="" + ``` + +## Get an API Key for the Llama 2 70B API Endpoint + +% api-key-start + +Perform the following steps if you do not already have an API key. +You can use different model API endpoints with the same API key. + +1. Navigate to . + +1. Find the **Llama 2 70B** card and click the card. + + ![Llama 2 70B model card](./images/llama-2-70b-card.png) + +1. Click **Get API Key**. + + ![API section of the model page.](./images/llama-2-generate-key.png) + +1. Click **Generate Key**. + + ![Generate key window.](./images/api-catalog-generate-api-key.png) + +1. Click **Copy Key** and then save the API key. + The key begins with the letters nvapi-. + + ![Key Generated window.](./images/key-generated.png) + +% api-key-end + +## Build and Start the Containers + +1. In the Generative AI examples repository, export this variable in terminal. + + Add the API key for the model endpoint: + + ```shell + export NVIDIA_API_KEY="nvapi=..." + ``` + +2. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-query-decomposition-agent.yaml build + ``` + +3. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-query-decomposition-agent.yaml up -d + ``` + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +4. Start the Milvus vector database: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + + *Example Output* + + ```output + ✔ Container milvus-minio Started + ✔ Container milvus-etcd Started + ✔ Container milvus-standalone Started + ``` + +5. Confirm the containers are running: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + 0be0d21b2fee rag-playground Up 33 minutes + 524905ec3870 chain-server Up 33 minutes + 14cb139a2e4a milvus-standalone Up 34 minutes + 7a807d96c113 milvus-minio Up 34 minutes (healthy) + 937e4165e875 milvus-etcd Up 34 minutes (healthy) + ``` + +## Next Steps + +- Access the web interface for the chat server. + Refer to [](./using-sample-web-application.md) for information about using the web interface. + + Ensure that you upload documents and use the knowledge base to answer queries. +- [](./vector-database.md) +- Stop the containers by running `docker compose -f deploy/compose/rag-app-query-decomposition-agent.yaml down` and + `docker compose -f deploy/compose/docker-compose-vectordb.yaml down`. diff --git a/docs/rag/aiplayground.md b/docs/rag/aiplayground.md deleted file mode 100644 index 050bc1599..000000000 --- a/docs/rag/aiplayground.md +++ /dev/null @@ -1,62 +0,0 @@ -# NVIDIA AI Foundation - -**NVIDIA AI Foundation** lets developers to experience state of the art LLMs accelerated by NVIDIA. Developers get **free credits for 10K requests** to any of the available models. - -## Prepare the environment - -1. Navigate to https://catalog.ngc.nvidia.com/ai-foundation-models. - -2. Find the Mixtral x7B model icon and click ``Learn More``. - -![Diagram](./images/image7.png) - -3. Select the ```API``` navigation bar and click on the ```Generate key``` option.. - -![Diagram](./images/image8.png) - -4. Save the generated API key. - -## Deploy - -1. Clone the Generative AI examples Git repository. - -> ⚠️ **NOTE**: This example requires Git Large File Support (LFS) - -``` -$ sudo apt -y install git-lfs -$ git clone git@github.com:NVIDIA/GenerativeAIExamples.git -Cloning into 'GenerativeAIExamples'... -$ cd GenerativeAIExamples/ -$ git lfs pull -``` - -2. Add your NGC API key to compose.env to use the NVIDIA endpoint. - -``` -$ cd GenerativeAIExamples - -$ grep NVIDIA_API_KEY deploy/compose/compose.env - export NVIDIA_API_KEY="nvapi-*" -``` - -3. Set the nv-ai-foundation example in compose.env. -``` - export RAG_EXAMPLE="nvidia_ai_foundation" -``` -4. Deploy the developer RAG example via Docker compose. - -``` -$ source deploy/compose/compose.env ; docker compose -f deploy/compose/docker-compose-nv-ai-foundation.yaml build - -$ docker compose -f deploy/compose/docker-compose-nv-ai-foundation.yaml up -d - -$ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" -CONTAINER ID NAMES STATUS -70ef27ae4c91 llm-playground Up 56 seconds -4aacfbe89464 chain-server Up 56 seconds -``` - -## Test - -1. Follow steps 1 - 5 in the ["Test" section of example 02](../../RetrievalAugmentedGeneration/README.md#23-test). - diff --git a/docs/rag/api_reference/openapi_schema.json b/docs/rag/api_reference/openapi_schema.json deleted file mode 100644 index ce4779156..000000000 --- a/docs/rag/api_reference/openapi_schema.json +++ /dev/null @@ -1,244 +0,0 @@ -{ - "openapi": "3.0.0", - "info": { - "title": "FastAPI", - "version": "0.1.0" - }, - "paths": { - "/uploadDocument": { - "post": { - "summary": "Upload Document", - "description": "Upload a document to the vector store.", - "operationId": "upload_document_uploadDocument_post", - "requestBody": { - "content": { - "multipart/form-data": { - "schema": { - "$ref": "#/components/schemas/Body_upload_document_uploadDocument_post" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": {} - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, - "/generate": { - "post": { - "summary": "Generate Answer", - "description": "Generate and stream the response to the provided prompt.", - "operationId": "generate_answer_generate_post", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Prompt" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": {} - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, - "/documentSearch": { - "post": { - "summary": "Document Search", - "description": "Search for the most relevant documents for the given search parameters.", - "operationId": "document_search_documentSearch_post", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DocumentSearch" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { - "type": "object" - }, - "type": "array", - "title": "Response Document Search Documentsearch Post" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - } - }, - "components": { - "schemas": { - "Body_upload_document_uploadDocument_post": { - "properties": { - "file": { - "type": "string", - "format": "binary", - "title": "File" - } - }, - "type": "object", - "required": [ - "file" - ], - "title": "Body_upload_document_uploadDocument_post" - }, - "DocumentSearch": { - "properties": { - "content": { - "type": "string", - "title": "Content", - "description": "The content or keywords to search for within documents." - }, - "num_docs": { - "type": "integer", - "title": "Num Docs", - "description": "The maximum number of documents to return in the response.", - "default": 4 - } - }, - "type": "object", - "required": [ - "content" - ], - "title": "DocumentSearch", - "description": "Definition of the DocumentSearch API data type." - }, - "HTTPValidationError": { - "properties": { - "detail": { - "items": { - "$ref": "#/components/schemas/ValidationError" - }, - "type": "array", - "title": "Detail" - } - }, - "type": "object", - "title": "HTTPValidationError" - }, - "Prompt": { - "properties": { - "question": { - "type": "string", - "title": "Question", - "description": "The input query/prompt to the pipeline." - }, - "context": { - "type": "string", - "title": "Context", - "description": "Additional context for the question (optional)" - }, - "use_knowledge_base": { - "type": "boolean", - "title": "Use Knowledge Base", - "description": "Whether to use a knowledge base", - "default": true - }, - "num_tokens": { - "type": "integer", - "title": "Num Tokens", - "description": "The maximum number of tokens in the response.", - "default": 50 - } - }, - "type": "object", - "required": [ - "question", - "context" - ], - "title": "Prompt", - "description": "Definition of the Prompt API data type." - }, - "ValidationError": { - "properties": { - "loc": { - "items": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "integer" - } - ] - }, - "type": "array", - "title": "Location" - }, - "msg": { - "type": "string", - "title": "Message" - }, - "type": { - "type": "string", - "title": "Error Type" - } - }, - "type": "object", - "required": [ - "loc", - "msg", - "type" - ], - "title": "ValidationError" - } - } - } - } \ No newline at end of file diff --git a/docs/rag/architecture.md b/docs/rag/architecture.md deleted file mode 100644 index a498e5974..000000000 --- a/docs/rag/architecture.md +++ /dev/null @@ -1,87 +0,0 @@ - -Overview -================================= - -Generative AI enables users to quickly generate new content based on a variety of inputs and is a powerful tool for streamlining the workflow of creatives, engineers, researchers, scientists, and more. The use cases and possibilities span all industries and individuals. Generative AI models can produce novel content like stories, emails, music, images, and videos. - -Here at NVIDIA, we like to utilize our own products to make our lives easier, so we have used generative AI to create an NVIDIA chatbot enhanced with retrieval augmented generation (RAG). This chatbot is designed to assist an NVIDIA employee with answering public relations related questions. The sample dataset includes the last two years of NVIDIA press releases and corporate blog posts. Our development and deployment of that chatbot is the guide to this reference generative AI workflow. - -Generative AI starts with foundational models trained on vast quantities of unlabeled data. **Large language models (LLMs)** are trained on an extensive range of textual data online. These LLMs can understand prompts and generate novel, human-like responses. Businesses can build applications to leverage this capability of LLMs; for example creative writing assistants for marketing, document summarization for legal teams, and code writing for software development. - -To create true business value from LLMs, these foundational models need to be tailored to your enterprise use case. In this workflow, we use [RAG](https://blog.langchain.dev/tutorial-chatgpt-over-your-data/) with [Llama2](https://github.com/facebookresearch/llama/), an open source model from Meta, to achieve this. Augmenting an existing AI foundational model provides an advanced starting point and a low-cost solution that enterprises can leverage to generate accurate and clear responses to their specific use case. - -> ⚠️ **NOTE**: -This repository contains multiple examples. The architecture for the default canonical developer rag example is described below. - -This RAG-based reference default chatbot workflow contains: - - - [NVIDIA NeMo framework](https://docs.nvidia.com/nemo-framework/user-guide/latest/index.html) - part of NVIDIA AI Enterprise solution - - [NVIDIA TensorRT-LLM](https://developer.nvidia.com/tensorrt) - for low latency and high throughput inference for LLMs - - [LangChain](https://github.com/langchain-ai/langchain/) and [LlamaIndex](https://www.llamaindex.ai/) for combining language model components and easily constructing question-answering from a company's database - - [Sample Jupyter Notebooks](jupyter_server.md) and [chatbot web application/API calls](./frontend.md) so that you can test the chat system in an interactive manner - - [Milvus](https://milvus.io/docs/install_standalone-docker.md) - Generated embeddings are stored in a vector database. The vector DB used in this workflow is Milvus. Milvus is an open-source vector database capable of NVIDIA GPU accelerated vector searches. - - [e5-large-v2 model](https://huggingface.co/embaas/sentence-transformers-e5-large-v2) from huggingface to generate the embeddings. - - [Llama2](https://github.com/facebookresearch/llama/), an open source model from Meta, to formulate natural responses. - -This RAG chatbot workflow provides a reference for you to build your own enterprise AI solution with minimal effort. This AI workflow was designed to be deployed as a Developer experience using Docker Compose on an NVIDIA AI Enterprise-supported platform, which can be deployed on-prem or using a cloud service provider (CSP). Workflow components are used to deploy models and inference pipeline, integrated together with the additional components as indicated in the diagram below: - -![Diagram](./images/image0.png) - -NVIDIA AI Components -====================== -This reference workflow uses a variety of NVIDIA AI components to customize and deploy the RAG-based chatbot example. - - - [NVIDIA TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) - - [NVIDIA NeMo Inference Container](https://developer.nvidia.com/nemo) - -The following sections describe these NVIDIA AI components further. - -**NVIDIA TensorRT-LLM Optimization** - -A LLM can be optimized using TensorRT-LLM. NVIDIA NeMo uses TensorRT for LLMs (TensorRT-LLM), for deployment which accelerates and maximizes inference performance on the latest LLMs. -In this workflow, we will be leveraging a Llama 2 (13B parameters) chat model. We will convert the foundational model to TensorRT format using TensorRT-LLM for optimized inference. - -**NVIDIA NeMo Framework Inference Container** - -With NeMo Framework Inference Container, the optimized LLM can be deployed for high-performance, cost-effective, and low-latency inference. NeMo Framework Inference Container contains modules and scripts to help exporting LLM models to [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) and deploying them to [Triton Inference Server](https://docs.nvidia.com/deeplearning/triton-inference-server/user-guide/docs/index.html) with easy-to-use APIs. - -Inference Pipeline -==================== -To get started with the inferencing pipeline, we will first connect the customized LLM to a sample proprietary data source. This knowledge can come in many forms: product specifications, HR documents, or finance spreadsheets. Enhancing the model’s capabilities with this knowledge can be done with RAG. - -Since foundational LLMs are not trained on your proprietary enterprise data and are only trained up to a fixed point in time, they need to be augmented with additional data. RAG consists of two processes. First, *retrieval* of data from document repositories, databases, or APIs that are all outside of the foundational model’s knowledge. Second, is the *generation* of responses via Inference. The example used within this workflow is a corporate communications co-pilot that could either ingest source data from storage or by scraping. The following graphic describes an overview of this inference pipeline: - -![Diagram](./images/image1.png) - -**Document Ingestion and Retrieval** - -RAG begins with a knowledge base of relevant up-to-date information. Since data within an enterprise is frequently updated, the ingestion of documents into a knowledge base should be a recurring process and scheduled as a job. Next, content from the knowledge base is passed to an embedding model (e5-large-v2, in the case of this workflow), which converts the content to vectors (referred to as “embeddings”). Generating embeddings is a critical step in RAG; it allows for the dense numerical representations of textual information. These embeddings are stored in a vector database, in this case Milvus, which is [RAFT accelerated](https://developer.nvidia.com/blog/accelerating-vector-search-using-gpu-powered-indexes-with-rapids-raft). - -**User Query and Response Generation** - -When a user query is sent to the inference server, it is converted to an embedding using the embedding model. This is the same embedding model used to convert the documents in the knowledge base (e5-large-v2, in the case of this workflow). The database performs a similarity/semantic search to find the vectors that most closely resemble the user’s intent and provides them to the LLM as enhanced context. Since Milvus is RAFT accelerated, the similarity serach is optimized on the GPU. Lastly, the LLM is used to generate a full answer that’s streamed to the user. This is all done with ease via [LangChain](https://github.com/langchain-ai/langchain/) and [LlamaIndex](https://www.llamaindex.ai) - -The following diagram illustrates the ingestion of documents and generation of responses. - -![Diagram](./images/image2.png) - -LangChain allows you to write LLM wrappers for your own custom LLMs, so we have provided a sample wrapper for streaming responses from a TensorRT-LLM Llama 2 model running on Triton Inference Server. This wrapper allows us to leverage LangChain’s standard interface for interacting with LLMs while still achieving vast performance speedup from TensorRT-LLM and scalable and flexible inference from Triton Inference Server. - -A sample chatbot web application is provided in the workflow so that you can test the chat system in an interactive manner. Requests to the chat system are wrapped in API calls, so these can be abstracted to other applications. - -An additional method of customization in the AI Workflow inference pipeline is via a prompt template. A prompt template is a pre-defined recipe for generating prompts for language models. They may contain instructions, few-shot examples, and context appropriate for a given task. In our example, we prompt our model to generate safe and polite responses. - - -**LLM Inference Server** - -The LLM Inference Server uses models stored in a model repository, available locally to serve inference requests. Once they are available in Triton, inference requests are sent from a client application. Python and C++ libraries provide APIs to simplify communication. Clients send HTTP/REST requests directly to Triton using HTTP/REST or gRPC protocols. - -Within this workflow, the Llama2 LLM was optimized using NVIDIA TensorRT for LLMs (TRT-LLM) which accelerates and maximizes inference performance on the latest LLMs. - -**Vector DB** - -Milvus is an open-source vector database built to power embedding similarity search and AI applications. It makes unstructured data from API calls, PDFs, and other documents more accessible by storing them as embeddings. -When content from the knowledge base is passed to an embedding model (e5-large-v2), it converts the content to vectors (referred to as “embeddings”). These embeddings are stored in a vector database. The vector DB used in this workflow is Milvus. Milvus is an open-source vector database capable of NVIDIA GPU accelerated vector searches. - -*Note:: -If needed, see Milvus's [documentation](https://milvus.io/docs/install_standalone-docker.md/) for how a Docker Compose file can be configured for Milvus.* diff --git a/docs/rag/chat_server.md b/docs/rag/chat_server.md deleted file mode 100644 index 224b92d96..000000000 --- a/docs/rag/chat_server.md +++ /dev/null @@ -1,175 +0,0 @@ - -# Chat Server -A sample fastapi based server is provided in the workflow so that you can test the chat system in an interactive manner. -This server wraps calls made to different components and orchestrates the entire flow for all the provided examples. - -This API endpoint allows for several actions: -- [Chat Server](#chat-server) - - [Upload File Endpoint](#upload-file-endpoint) - - [Answer Generation Endpoint](#answer-generation-endpoint) - - [Document Search Endpoint](#document-search-endpoint) -- [Running the chain server](#running-the-chain-server) - -The API server swagger schema can be visualized at ``host-ip:8081/docs``. -You can checkout the openapi standard compatible schema for the endpoints supported [here](./api_reference/openapi_schema.json). - -The following sections describe the API endpoint actions further with relevant examples. - -### Upload File Endpoint -**Summary:** Upload a file. This endpoint should accept a post request with the following JSON in the body: - -```json -{ - "file": (file_path, file_binary_data, mime_type), -} -``` - -The response should be in JSON form. It should be a dictionary with a confirmation message: - -```json -{"message": "File uploaded successfully"} -``` - -**Endpoint:** ``/uploadDocument`` - -**HTTP Method:** POST - -**Request:** - -- **Content-Type:** multipart/form-data -- **Required:** Yes - -**Request Body Parameters:** -- ``file`` (Type: File) - The file to be uploaded. - -**Responses:** -- **200 - Successful Response** - - - Description: The file was successfully uploaded. - - Response Body: Empty - -- **422 - Validation Error** - - - Description: There was a validation error with the request. - - Response Body: Details of the validation error. - - - -### Answer Generation Endpoint -**Summary:** Generate an answer to a question. This endpoint should accept a post request with the following JSON content in the body: - -```json -{ - "question": "USER PROMPT", // A string of the prompt provided by the user - "context": "Conversation context to provide to the model.", - "use_knowledge_base": false, // A boolean flag to toggle VectorDB lookups - "num_tokens": 500, // The maximum number of tokens expected in the response. -} -``` - -The response should in JSON form. It should simply be a string of the response. - -```json -"LLM response" -``` - -The chat server must also handle responses being retrieved in chunks as opposed to all at once. The client code for response streaming looks like this: - -```python -with requests.post(url, stream=True, json=data, timeout=10) as req: - for chunk in req.iter_content(16): - yield chunk.decode("UTF-8") -``` - -**Endpoint:** ``/generate`` - -**HTTP Method:** POST - -**Request:** - -- **Content-Type:** application/json -- **Required:** Yes - -**Request Body Parameters:** - -- ``question`` (Type: string) - The question you want to ask. -- ``context`` (Type: string) - Additional context for the question (optional). -- ``use_knowledge_base`` (Type: boolean, Default: true) - Whether to use a knowledge base. -- ``num_tokens`` (Type: integer, Default: 500) - The maximum number of tokens in the response. - -**Responses:** - -- **200 - Successful Response** - - - Description: The answer was successfully generated. - - Response Body: An object containing the generated answer. - -- **422 - Validation Error** - - - Description: There was a validation error with the request. - - Response Body: Details of the validation error. - -### Document Search Endpoint -**Summary:** Search for documents based on content. This endpoint should accept a post request with the following JSON content in the body: - -```json -{ - "content": "USER PROMPT", // A string of the prompt provided by the user - "num_docs": "4", // An integer indicating how many documents should be returned -} -``` - -The response should in JSON form. It should be a list of dictionaries containing the document score and content. - -```json -[ - { - "score": 0.89123, - "content": "The content of the relevant chunks from the vector db.", - }, - ... -] -``` - - -**Endpoint:** ``/documentSearch`` -**HTTP Method:** POST - -**Request:** - -- **Content-Type:** application/json -- **Required:** Yes - -**Request Body Parameters:** - -- ``content`` (Type: string) - The content or keywords to search for within documents. -- ``num_docs`` (Type: integer, Default: 4) - The maximum number of documents to return in the response. - -**Responses:** - -- **200 - Successful Response** - - - Description: Documents matching the search criteria were found. - - Response Body: An object containing the search results. - -- **422 - Validation Error** - - - Description: There was a validation error with the request. - - Response Body: Details of the validation error. - - -# Running the chain server -If the web frontend needs to be stood up manually for development purposes, run the following commands: - -- Build the web UI container from source -``` - source deploy/compose/compose.env - docker compose -f deploy/compose/docker-compose.yaml build query -``` -- Run the container which will start the server -``` - source deploy/compose/compose.env - docker compose -f deploy/compose/docker-compose.yaml up query -``` - -- Open the swagger URL at ``http://host-ip:8081`` to try out the exposed endpoints. diff --git a/docs/rag/configuration.md b/docs/rag/configuration.md deleted file mode 100644 index 2d7aa6b9f..000000000 --- a/docs/rag/configuration.md +++ /dev/null @@ -1,85 +0,0 @@ -## Configuration Guide - -### Chain Server Configuration - -In this section, we explore the configurations for the [Chain Server](./chat_server.md) used for the default canonical developer rag example. - -Chain server interaction with other components can be controlled by config. Chain Server interacts with components such as the `milvus` vector store and `triton` server, which hosts the Large Language Model (LLM). Additionally, we'll delve into customization options to fine-tune the behavior of the query server. These options include settings for the embedding model, chunk size, and prompts for generating responses. - -You can refer to [sample config](../../deploy/compose/config.yaml) to see the structure. - -#### Vector Database Configuration -The configuration of the solution which serves as a vector database for storing embeddings. - - url: Configure the HTTP URI where the vector database server is hosted. - -#### LLM server Configuration -LLM Inference server hosts the Large Language Model (LLM) with triton backend. - - server_url: Specify the url of the LLM Inference Server. - - model_name: Provide the name of the model hosted on the Triton server. - Note: Changing the value of this field may need code changes. - - model_engine: An enum specifying the backend name hosting the model. Options currently supported are: - 1. `triton-trt-llm` for using locally deployed LLM models. Follow steps [here](../../RetrievalAugmentedGeneration/README.md#local-llm-setup) to understand how to deploy and use on-prem deployed models. - 2. `nv-ai-foundation` for using NV AI Playground based models. Follow steps [here](../../RetrievalAugmentedGeneration/README.md#1-qa-chatbot----nvidia-ai-foundation-inference-endpoint) to understand how to deploy and use TRT-LLM optimized playground models from cloud. - -#### Text Splitter Configuration -This section covers the settings for the Text Splitter component. - - chunk_size: Define the size at which text should be split before being converted into embeddings. - - chunk_overlap: Specify the overlap between two consecutive text chunks to prevent loss of context. - -#### Embeddings Configuration -The Embeddings section contains information required for generating embeddings. - - model_name: Indicate the name of the model used to generate embeddings. - model_engine: An enum specifying the backend name hosting the model, Currently huggingface and nv-ai-foundation are supported. - dimensions: Integer value specifying the dimensions of the embedding search model from huggingface. - Note: Any change in `model_name`` may also necessitate changes in the model's `dimensions`, which can be adjusted using this field. - -#### Prompts Configuration -Customize prompts used for generating responses. - - chat_template: The chat prompt template guides the model to generate responses for queries. - rag_template: The RAG prompt Template instructs the model to generate responses while leveraging a knowledge base. - -You set path to use this config file to be used by chain server using enviornment variable `APP_CONFIG_FILE`. You can do the same in [compose.env](../../deploy/compose/compose.env) and source the file. - -### Configuring docker compose file for default RAG example -In this section, we will look into the environment variables and parameters that can be configured within the [Docker Compose](../../deploy/compose/docker-compose.yaml) YAML file for the default canonical example. Our system comprises multiple microservices that interact harmoniously to generate responses. These microservices include LLM Inference Server, Jupyter Server, Milvus, Query/chain server, and Frontend. - -#### LLM server Configurations -The LLM Inference Server is used for hosting the Large Language Model (LLM) with triton backend. You can configure the model information using the [compose.env](../../deploy/compose/compose.env) file or by setting the corresponding environment variables. Here is a list of environment variables utilized by the llm inference server: - - MODEL_DIRECTORY: Specifies the path to the model directory where model checkpoints are stored. - MODEL_ARCHITECTURE: Defines the architecture of the model used for deployment. - MODEL_MAX_INPUT_LENGTH: Maximum allowed input length, with a default value of 3000. - MODEL_MAX_OUTPUT_LENGTH: Maximum allowed output lenght, with a default of 512. - INFERENCE_GPU_COUNT: Specifies the GPUs to be used by Triton for model deployment, with the default setting being "all." - -#### Jupyter Server -This server hosts jupyter lab server. This contains notebook explaining the flow of chain server. - -#### Milvus -Milvus serves as a GPU-accelerated vector store database, where we store embeddings generated by the knowledge base. - -#### Query/Chain Server -The Query service is the core component responsible for interacting with the llm inference server and the Milvus server to obtain responses. The environment variables utilized by this container are described as follows: - - APP_MILVUS_URL: Specifies the URL where the Milvus server is hosted. - APP_LLM_SERVERURL: Specifies the URL where the Triton server is hosted. - APP_LLM_MODELNAME: The model name used by the Triton server. - APP_LLM_MODELENGINE: An enum specifying the backend name hosting the model. Options currently supported are: - 1. `triton-trt-llm` if you are using locally deployed LLM models. - 2. `nv-ai-foundation` if you are using NV AI Playground based models. - APP_CONFIG_FILE: Provides the path to the configuration file used by the Chain Server or this container. Defaults to /dev/null - -#### Frontend -The Frontend component is the UI server that interacts with the Query/Chain Server to retrieve responses and provide UI interface to ingest documents. The following environment variables are used by the frontend: - - APP_SERVERURL: Indicates the URL where the Query/Chain Server is hosted. - APP_SERVERPORT: Specifies the port on which the Query/Chain Server operates. - APP_MODELNAME: Name of the Large Language Model utilized for deployment. This information is for display purposes only and does not affect the inference process. diff --git a/docs/rag/evaluation.md b/docs/rag/evaluation.md deleted file mode 100644 index 559424636..000000000 --- a/docs/rag/evaluation.md +++ /dev/null @@ -1,34 +0,0 @@ -# Evaluation Tool - -## Introduction -Evaluation is crucial for retrieval augmented generation (RAG) pipelines as it ensures the accuracy and relevance of information retrieved as well as the generated content. - -There are 3 components needed for evaluating the performance of a RAG pipeline: -1. Data for testing. -2. Automated metrics to measure performance of both the context retrieval and response generation. -3. Human-like evaluation of the generated response from the end-to-end pipeline. - -> ⚠️ **NOTE** -This tool provides a set of notebooks that show examples of how to address these requirements in an automated fashion for the default canonical developer rag example. - -### Synthetic Data Generation -Using an existing knowledge base we can synthetically generate question|answer|context triplets using a LLM. This tool uses the Llama 2 70B model on [Nvidia AI Playground](https://www.nvidia.com/en-us/research/ai-playground/) for data generation. - -### Automated Metrics -[RAGAS](https://github.com/explodinggradients/ragas) is an automated metrics tool for measuring performance of both the retriever and generator. We utilize the Nvidia AI Playground langchain wrapper to run RAGAS evaluation on our example RAG pipeline. - -### LLM-as-a-Judge -We can use LLMs to provide human-like feedback and Likert evaluation scores for full end-to-end RAG pipelines. This tool uses Llama 2 70B as a judge LLM. - -## Deploy -1. Follow steps 1 - 5 in the ["Prepare the environment" section of example 02](../../RetrievalAugmentedGeneration/README.md#21-prepare-the-environment). - -2. Deploy the developer RAG example via Docker compose by following [these steps](../../RetrievalAugmentedGeneration/README.md#22-deploy). - -3. Build and deploy the evaluation service -``` - $ docker compose -f deploy/compose/docker-compose-evaluation.yaml build - $ docker compose -f deploy/compose/docker-compose-evaluation.yaml up -d -``` - -4. Access the notebook server at `http://host-ip:8889` from your web browser and try out the notebooks sequentially starting from [Notebook 1: Synthetic Data Generation for RAG Evaluation](../../tools/evaluation/01_synthetic_data_generation.ipynb) diff --git a/docs/rag/frontend.md b/docs/rag/frontend.md deleted file mode 100644 index 8ce78441b..000000000 --- a/docs/rag/frontend.md +++ /dev/null @@ -1,33 +0,0 @@ -# Web Frontend ------------- -The web frontend provides a UI on top of the [RAG chat server APIs](./chat_server.md). -- Users can chat with the LLM and see responses streamed back for different examples. -- By selecting “Use knowledge base,” the chatbot returns responses augmented with the data that’s been stored in the vector database. -- To store content in the vector database, change the window to “Knowledge Base” in the upper right corner and upload documents. - -![Diagram](./images/image4.jpg) - -# Frontend structure - -At its core, llm-playground is a FastAPI server written in Python. This FastAPI server hosts two [Gradio](https://www.gradio.app/) applications, one for conversing with the model and another for uploading documents. These Gradio pages are wrapped in a static frame created with the Kaizen UI React+Next.js framework and compiled down to static pages. Iframes are used to mount the Gradio applications into the outer frame. - -# Running the web UI -If the web frontend needs to be stood up manually for development purposes, run the following commands: - -- Build the web UI container from source -``` - source deploy/compose/compose.env - docker compose -f deploy/compose/docker-compose.yaml build frontend -``` -- Run the container which will start the server -``` - source deploy/compose/compose.env - docker compose -f deploy/compose/docker-compose.yaml up frontend -``` - -- Open the web application at ``http://host-ip:8090`` - -Note: -- If multiple pdf files are being uploaded the expected time of completion as shown in the UI may not be correct. - - diff --git a/docs/rag/images/image8.png b/docs/rag/images/image8.png deleted file mode 100644 index 78ae35cc2..000000000 Binary files a/docs/rag/images/image8.png and /dev/null differ diff --git a/docs/rag/jupyter_server.md b/docs/rag/jupyter_server.md deleted file mode 100644 index 85cdc1684..000000000 --- a/docs/rag/jupyter_server.md +++ /dev/null @@ -1,79 +0,0 @@ -# Jupyter Notebooks -For development and experimentation purposes, the Jupyter notebooks provide guidance to building knowledge augmented chatbots. - -The following Jupyter notebooks are provided with the AI workflow for the default canonical RAG example: - -1. [**LLM Streaming Client**](../../notebooks/01-llm-streaming-client.ipynb) - -This notebook demonstrates how to use a client to stream responses from an LLM deployed to NVIDIA Triton Inference Server with NVIDIA TensorRT-LLM (TRT-LLM). This deployment format optimizes the model for low latency and high throughput inference. - -2. [**Document Question-Answering with LangChain**](../../notebooks/02_langchain_simple.ipynb) - -This notebook demonstrates how to use LangChain to build a chatbot that references a custom knowledge-base. LangChain provides a simple framework for connecting LLMs to your own data sources. It shows how to integrate a TensorRT-LLM to LangChain using a custom wrapper. - -3. [**Document Question-Answering with LlamaIndex**](../../notebooks/03_llama_index_simple.ipynb) - -This notebook demonstrates how to use LlamaIndex to build a chatbot that references a custom knowledge-base. It contains the same functionality as the notebook before, but uses some LlamaIndex components instead of LangChain components. It also shows how the two frameworks can be used together. - -4. [**Advanced Document Question-Answering with LlamaIndex**](../../notebooks/04_llamaindex_hier_node_parser.ipynb) - -This notebook demonstrates how to use LlamaIndex to build a more complex retrieval for a chatbot. The retrieval method shown in this notebook works well for code documentation; it retrieves more contiguous document blocks that preserve both code snippets and explanations of code. - -5. [**Interact with REST FastAPI Server**](../../notebooks/05_dataloader.ipynb) - -This notebook demonstrates how to use the REST FastAPI server to upload the knowledge base and then ask a question without and with the knowledge base. - -6. [**Nvidia AI Endpoint Integration with langchain**](../../notebooks/07_Option(1)_NVIDIA_AI_endpoint_simple.ipynb) -This notebook demonstrates how to build a Retrieval Augmented Generation (RAG) example using the NVIDIA AI endpoint integrated with Langchain, with FAISS as the vector store. - -7. [**RAG with langchain and local LLM model from**](../../notebooks/07_Option(2)_minimalistic_RAG_with_langchain_local_HF_LLM.ipynb) -This notebook demonstrates how to plug in a local llm from HuggingFace Hub and build a simple RAG app using langchain. - -8. [**Nvidia AI Endpoint with llamaIndex and Langchain**](../../notebooks/08_Option(1)_llama_index_with_NVIDIA_AI_endpoint.ipynb) -This notebook demonstrates how to plug in a NVIDIA AI Endpoint mixtral_8x7b and embedding nvolveqa_40k, bind these into LlamaIndex with these customizations. - -9. [**Locally deployed model from Hugginface integration with llamaIndex and Langchain**](../../notebooks/08_Option(2)_llama_index_with_HF_local_LLM.ipynb) -This notebook demonstrates how to plug in a local llm from HuggingFace Hub Llama-2-13b-chat-hf and all-MiniLM-L6-v2 embedding from Huggingface, bind these to into LlamaIndex with these customizations. - -10. [**Langchain agent with tools plug in multiple models from NVIDIA AI Endpoint**](../../notebooks/09_Agent_use_tools_leveraging_NVIDIA_AI_endpoints.ipynb) -This notebook demonstrates how to use multiple NVIDIA's AI endpoint's model like `mixtral_8*7b`, `Deplot` and `Neva`. - -# Running the notebooks -If a JupyterLab server needs to be compiled and stood up manually for development purposes, follow the following commands: - -- [Optional] Notebook `7 to 9` require GPUs. If you have a GPU and are trying out notebooks `7-9` update the jupyter-server service in the [docker-compose.yaml](../../deploy/compose/docker-compose.yaml) file to use `./notebooks/Dockerfile.gpu_notebook` as the Dockerfile -``` - jupyter-server: - container_name: notebook-server - image: notebook-server:latest - build: - context: ../../ - dockerfile: ./notebooks/Dockerfile.gpu_notebook -``` - -- [Optional] Notebook from `7-9` may need multiple GPUs. Update [docker-compose.yaml](../../deploy/compose/docker-compose.yaml) to use multiple gpu ids in `device_ids` field below or set `count: all` -``` - jupyter-server: - deploy: - resources: - reservations: - devices: - - driver: nvidia - device_ids: ['0', '1'] - capabilities: [gpu] -``` - -- Build the container -``` - source deploy/compose/compose.env - docker compose -f deploy/compose/docker-compose.yaml build jupyter-server - -``` -- Run the container which starts the notebook server -``` - source deploy/compose/compose.env - docker compose -f deploy/compose/docker-compose.yaml up jupyter-server -``` -- Using a web browser, type in the following URL to access the notebooks. - - ``http://host-ip:8888`` diff --git a/docs/rag/llm_inference_server.md b/docs/rag/llm_inference_server.md deleted file mode 100644 index df3d05444..000000000 --- a/docs/rag/llm_inference_server.md +++ /dev/null @@ -1,27 +0,0 @@ -# NeMo Framework Inference Server - -We use [NeMo Framework Inference Server](https://docs.nvidia.com/nemo-framework/user-guide/latest/deployingthenemoframeworkmodel.html) container which help us to create optimized LLM using TensorRT LLM and deploy using NVIDIA Triton Server for high-performance, cost-effective, and low-latency inference. Within this workflow, We use Llama2 models and LLM Inference Server container contains modules and script required for TRT-LLM conversion of the Llama2 models and deployment using NVIDIA Triton Server. - -> ⚠️ **NOTE**: LLM inference server is used by examples which deploys the model on-prem. There are examples in this repository which uses [Nvidia AI foundation models](https://www.nvidia.com/en-in/ai-data-science/foundation-models/) from cloud and may not use this component. - - -# Running the LLM Inference Server - -### Llama2 model deployment: - -- Download Llama2 Chat Model Weights from [Meta](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) or [HuggingFace](https://huggingface.co/meta-llama/Llama-2-13b-chat-hf/). You can check [support matrix](support_matrix.md) for GPU requirements for the deployment. - -- Update [compose.env](../../deploy/compose/compose.env) with MODEL_DIRECTORY as Llama2 model downloaded path and other model parameters as needed. - -- Build the llm inference server container from source -``` - source deploy/compose/compose.env - docker compose -f deploy/compose/docker-compose.yaml build llm -``` -- Run the container which will start the triton server with TRT-LLM optimized Llama2 model -``` - source deploy/compose/compose.env - docker compose -f deploy/compose/docker-compose.yaml up llm -``` - -- Once the optimized Llama2 is deployed in Triton Server, clients can send HTTP/REST or gRPC requests directly to Triton Server. Example implmentation of the client can be found [here](../../integrations/langchain/llms/triton_trt_llm.py). diff --git a/docs/rag/observability.md b/docs/rag/observability.md deleted file mode 100644 index 27d652238..000000000 --- a/docs/rag/observability.md +++ /dev/null @@ -1,107 +0,0 @@ -# RAG Observability Tool -## Introduction -Observability is a crucial aspect that facilitates the monitoring and comprehension of the internal state and behavior of a system or application. Applications based on RAG are intricate systems encompassing the interaction of numerous components. To enhance the performance of these RAG-based applications, observability serves as an efficient mechanism for both monitoring and debugging. - -Following diagram shows high level workflow of how traces are captured in the RAG Example -![RAG with Observability](./images/image9.png) - -The observability stack adds following containers on top of the RAG app containers: -1. **OpenTelemetry Collector**: Responsible for receiving, processing and exporting the traces. -2. **Jaeger**: Acts as OpenTelemetry backend providing storage, query service and visualizer. You can also configure any other OTLP compatible backend such as [Zipkin](https://zipkin.io/), [Prometheus](https://prometheus.io/) etc. To configure any other backend refer to [OpenTelemetry Collector configuration](https://opentelemetry.io/docs/collector/configuration/). -3. **Cassandra**: Persistent storage for traces. Jaeger supports many other [storage backends](https://www.jaegertracing.io/docs/1.18/deployment/#storage-backends) like ElasticSearch, Kafka, and Badger. Please note that for large scale production deployment the Jaeger team recommends Elasticsearch backend over Cassandra . - -## Key terms -1. **Span**: A unit of work within a system, encapsulating information about a specific operation (Eg. LLM call, embedding generation etc). -2. **Traces**: The recording of a request as it goes through a system, tracking every service the request comes in contact with. Multiple spans make a trace logically bound by parent-child relationship. -3. **Root Span**: The first span in a trace, denoting the beginning and end of the entire operation. -4. **Span Attributes**: Key-value pairs a Span may consist of to provide additional context or metadata. -5. **Collectors**: Components that process and export telemetry data from instrumented applications. -6. **Context**: Signifies current location within the trace hierarchy. It determines whether a new span initiates a trace or connects to an existing parent span. -7. **Services**: Microservices that generates telemetry data - -Following diagram depicts a typical trace for user query from knowledge base in our RAG example. -![Trace for query from knowledge base](./images/image10.png) - -## Deploy -1. Clone the Generative AI examples Git repository. - -> ⚠️ **NOTE**: This example requires Git Large File Support (LFS) - -``` -$ sudo apt -y install git-lfs -$ git clone git@github.com:NVIDIA/GenerativeAIExamples.git -Cloning into 'GenerativeAIExamples'... -$ cd GenerativeAIExamples/ -$ git lfs pull -``` -2. Update the [OpenTelemetry collector configurations](../../deploy/compose/configs/otel-collector-config.yaml) and [jaeger configurations](../../deploy/compose/configs/jaeger.yaml). - -To know more about available configurations please refer to [OpenTelemetry Collector configurations](https://opentelemetry.io/docs/collector/configuration/) and [Jaeger configurtions](https://github.com/jaegertracing/documentation/blob/main/data/cli/1.52/jaeger-all-in-one-cassandra.yaml) - -3. Update the [compose.env](../../deploy/compose/compose.env). - -4. For the frontend and query services, set the following environment variables in the [docker compose file](../../deploy/compose/docker-compose.yaml): -``` -environment: - OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317 - OTEL_EXPORTER_OTLP_PROTOCOL: grpc - ENABLE_TRACING: true -``` - -5. Deploy the developer RAG example via Docker compose. -``` -$ source deploy/compose/compose.env; docker compose -f deploy/compose/docker-compose.yaml build - -$ docker compose -f deploy/compose/docker-compose.yaml up -d - -$ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" -CONTAINER ID NAMES STATUS -d11e35ee69f4 llm-playground Up 5 minutes -68f22b3842cb chain-server Up 5 minutes -751dd4fd80ec milvus-standalone Up 5 minutes (healthy) -b435006c95c1 milvus-minio Up 6 minutes (healthy) -9108253d058d notebook-server Up 6 minutes -5315a9dc9eb4 milvus-etcd Up 6 minutes (healthy) -``` - -6. Deploy the observability services -``` -$ docker compose -f deploy/compose/docker-compose-observability.yaml build - -$ docker compose -f deploy/compose/docker-compose-observability.yaml up -d - -$ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" -CONTAINER ID NAMES STATUS -beb1582320d6 jaeger Up 5 minutes -674c7bbb367e cassandra Up 6 minutes -d11e35ee69f4 llm-playground Up 5 minutes -68f22b3842cb chain-server Up 5 minutes -751dd4fd80ec milvus-standalone Up 5 minutes (healthy) -b435006c95c1 milvus-minio Up 6 minutes (healthy) -9108253d058d notebook-server Up 6 minutes -5315a9dc9eb4 milvus-etcd Up 6 minutes (healthy) -d314a43074c8 otel-collector Up 6 minutes -``` -7. Access the Jaeger UI at `http://host-ip:16686` from your web browser. - -Below are the screenshots showcasing trace data from the Jaeger UI. - -- Upload document trace: -![upload document trace](./images/image11.png) -- User query using knowledge base trace: -![user query using knowledge base](./images/image12.png) - -## Implementation Details -Currently 2 services viz. frontend and chain-server are instrumented. -### frontend -[tracing.py](../../RetrievalAugmentedGeneration/frontend/frontend/tracing.py) module in frontend application code is responsible for instrumentation. At high level it does the following: -- Set up the OpenTelemetry configurations for resource name (i.e frontend), span processor and context propagator -- Provides an instrumentation decorator functions(`instrumentation_wrapper` and `predict_instrumentation_wrapper`) for managing trace context across different services. This decorator function is used with the API functions in [chat_client.py](../../RetrievalAugmentedGeneration/frontend/frontend/chat_client.py) to create new span contexts (that can then be injected in the headers of the request made to the chain server) and log span attributes extracted from the API request. - -### chain-server -[tracing.py](../../RetrievalAugmentedGeneration/common/tracing.py) module in the chain server application code is responsible for instrumentation. At high level it does the following: -- Set up the OpenTelemetry configurations for resource name(i.e chain-server), span processor and context propagator -- Initialize the [LlamaIndex OpenTelemetry callback handler](../../tools/observability/llamaindex/opentelemetry_callback.py) which uses [LlamaIndex callbacks](https://docs.llamaindex.ai/en/stable/module_guides/observability/callbacks/root.html) to track various events like llm calls, chunking, embedding etc -- Provides an instrumentation decorator function (`instrumentation_wrapper`) for managing trace context across different services. This decorator function is used with the API functions in [server.py](../../RetrievalAugmentedGeneration/common/server.py) to extract the trace context present in requests from the frontend service and attach it in the new span created by chain-server. - -**NOTE**: Instrumentation decorator function (`instrumentation_wrapper`) can be used for instrumenting any LlamaIndex application as long as [LlamaIndex OpenTelemetry callback handler](../../tools/observability/llamaindex/opentelemetry_callback.py) is set as global handler in it. diff --git a/docs/rag/support_matrix.md b/docs/rag/support_matrix.md deleted file mode 100644 index cc4d91539..000000000 --- a/docs/rag/support_matrix.md +++ /dev/null @@ -1,36 +0,0 @@ -# GPU Requirements -Large Language Models are a heavily GPU-limited workflow. All LLMs are defined by the number of billions of parameters that make up their networks. For this workflow, we are focusing on the Llama 2 Chat models from Meta. These models come in three different sizes: 7B, 13B, and 70B. All three models perform very well, but the 13B model is a good balance of performance and GPU Memory utilization. - -Llama2-7B-Chat requires about 30GB of GPU memory. - -Llama2-13B-Chat requires about 50GB of GPU memory. - -Llama2-70B-Chat requires about 320GB of GPU memory. - -Llama2-7B-Chat AWQ quantized requires about 25GB of GPU memory. - -Nemotron-8B-Chat-SFT requires about 100GB of GPU memory. - -These resources can be provided by multiple GPUs on the same machine. - -To perform retrieval augmentation, another model must be hosted. This model is much smaller and is called an embedding model. It is responsible for converting a sequence of words to a representation in the form of a vector of numbers. This model requires an additional 2GB of GPU memory. - -In this workflow, Milvus was selected as the Vector Database. It was selected because Milvus has implemented the NVIDIA RAFT libraries that enable GPU acceleration of vector searches. For the Milvus database, allow an additional 4GB of GPU Memory. - -# CPU and Memory Requirements -For development purposes, we recommend that at least 10 CPU Cores and 64 GB of RAM are available. - -# Storage Requirements -There are two main drivers for storage consumption in retrieval augmented generation. The model weights and the documents in the vector database. The file size of the model varies on how large the model is. - -Llama2-7B-Chat requires about 30GB of storage. - -Llama2-13B-Chat requires about 50GB of storage. - -Llama2-70B-Chat requires about 150GB of storage. - -Nemotron-8B-Chat-SFT requires about 50GB of storage. - -The file space needed for the vector database varies by how many documents it will store. For development purposes, allocating 10 GB is plenty. - -You will need additionally about 60GB of storage for docker images. diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..ffbb0e500 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,11 @@ +sphinx<=4.5.1 +sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.5 +myst-nb==0.17.2 +sphinx-copybutton==0.5.2 +sphinx-rtd-theme==1.0.0 +sphinxcontrib-mermaid==0.9.2 +sphinxcontrib-copydirs@git+https://github.com/mikemckiernan/sphinxcontrib-copydirs.git diff --git a/docs/structured-data.md b/docs/structured-data.md new file mode 100644 index 000000000..55ba0b84b --- /dev/null +++ b/docs/structured-data.md @@ -0,0 +1,181 @@ + + +# Structured Data + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Example Features + +This example deploys a developer RAG pipeline for chat Q&A and serves inferencing from an NVIDIA API Catalog endpoint +instead of NVIDIA Triton Inference Server, a local Llama 2 model, or local GPUs. + +Developers get free credits for 10K requests to any of the available models. + +The key difference from the [](./api-catalog.md) example is that this example demonstrates how to use RAG with structured CSV data. + +This example uses models from the NVIDIA API Catalog. +This approach does not require embedding models or vector database solutions. +Instead, the example uses [PandasAI](https://docs.pandas-ai.com/en/latest/) to manage the workflow. + +For ingestion, the query server loads the structured data from a CSV file into a Pandas dataframe. +The query server can ingest multiple CSV files, provided the files have identical columns. +Ingestion of CSV files with differing columns is not supported and results in an exception. + +The core functionality uses a PandasAI agent to extract information from the dataframe. +This agent combines the query with the structure of the dataframe into an LLM prompt. +The LLM then generates Python code to extract the required information from the dataframe. +Subsequently, this generated code is executed on the dataframe and yields an output dataframe. + +To demonstrate the example, sample CSV files are available. +These are part of the structured data example Helm chart and represent a subset of the [Microsoft Azure Predictive Maintenance](https://www.kaggle.com/datasets/arnabbiswas1/microsoft-azure-predictive-maintenance) from Kaggle. +The CSV data retrieval prompt is specifically tuned for three CSV files from this dataset: `PdM_machines.csv`, `PdM_errors.csv`, and `PdM_failures.csv`. +The CSV files to use are specified in the `rag-app-structured-data-chatbot.yaml` Docker Compose file by updating the environment variable `CSV_NAME`. +The default value is `PdM_machines`, but can be changed to `PdM_errors` or `PdM_failures`. +Customization of the CSV data retrieval prompt is not supported. + +```{list-table} +:header-rows: 1 + +* - Model + - Embedding + - Framework + - Description + - Multi-GPU + - TRT-LLM + - Model Location + - Triton + - Vector Database + +* - ai-mixtral-8x7b-instruct for response generation + + ai-mixtral-8x7b-instruct for PandasAI + - Not Applicable + - PandasAI + - QA chatbot + - NO + - NO + - API Catalog + - NO + - Not Applicable +``` + +The following figure shows the sample topology: + +- The sample chat bot web application communicates with the chain server. + The chain server sends inference requests to an NVIDIA API Catalog endpoint. +- Optionally, you can deploy NVIDIA Riva. Riva can use automatic speech recognition to transcribe + your questions and use text-to-speech to speak the answers aloud. + +![Using NVIDIA API Catalog endpoints for inference instead of local components.](./images/ai-foundations-topology.png) + +## Prerequisites + +- Clone the Generative AI examples Git repository using Git LFS: + + ```console + $ sudo apt -y install git-lfs + $ git clone git@github.com:NVIDIA/GenerativeAIExamples.git + $ cd GenerativeAIExamples/ + $ git lfs pull + ``` + +- Install Docker Engine and Docker Compose. + Refer to the instructions for [Ubuntu](https://docs.docker.com/engine/install/ubuntu/). + +- Optional: Enable NVIDIA Riva automatic speech recognition (ASR) and text to speech (TTS). + + - To launch a Riva server locally, refer to the [Riva Quick Start Guide](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). + + - In the provided `config.sh` script, set `service_enabled_asr=true` and `service_enabled_tts=true`, and select the desired ASR and TTS languages by adding the appropriate language codes to `asr_language_code` and `tts_language_code`. + + - After the server is running, assign its IP address (or hostname) and port (50051 by default) to `RIVA_API_URI` in `deploy/compose/compose.env`. + + - Alternatively, you can use a hosted Riva API endpoint. You might need to obtain an API key and/or Function ID for access. + + In `deploy/compose/compose.env`, make the following assignments as necessary: + + ```bash + export RIVA_API_URI=":" + export RIVA_API_KEY="" + export RIVA_FUNCTION_ID="" + ``` + +## Get an API Key for the Mixtral 8x7B Instruct API Endpoint + +```{include} api-catalog.md +:start-after: api-key-start +:end-before: api-key-end +``` + +## Build and Start the Containers + +1. In the Generative AI examples repository, export this variable in terminal. + + Add the API key for the model endpoint: + + ```text + export NVIDIA_API_KEY="nvapi-..." + ``` + +1. From the root of the repository, build the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-structured-data-chatbot.yaml build + ``` + +2. Start the containers: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/rag-app-structured-data-chatbot.yaml up -d + ``` + + *Example Output* + + ```output + ✔ Network nvidia-rag Created + ✔ Container chain-server Started + ✔ Container rag-playground Started + ``` + +3. Confirm the containers are running: + + ```console + $ docker ps --format "table {{.ID}}\t{{.Names}}\t{{.Status}}" + ``` + + *Example Output* + + ```output + CONTAINER ID NAMES STATUS + 39a8524829da rag-playground Up 2 minutes + bfbd0193dbd2 chain-server Up 2 minutes + ``` + +## Next Steps + +- Access the web interface for the chat server. + Refer to [](./using-sample-web-application.md) for information about using the web interface. +- Upload a CSV from the `RetrievalAugmentedGeneration/examples/csv_rag` directory to the knowledge base. +- Enable the **Use knowledge base** checkbox when you submit a question. +- Stop the containers by running `docker compose -f deploy/compose/rag-app-structured-data-chatbot.yaml down`. diff --git a/docs/support-matrix.md b/docs/support-matrix.md new file mode 100644 index 000000000..bf8426dd7 --- /dev/null +++ b/docs/support-matrix.md @@ -0,0 +1,99 @@ + + +# Support Matrix + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## GPU Requirements + +Large Language Models are a heavily GPU-limited workflow. +All LLMs are defined by the number of billions of parameters that make up their networks. +These generative AI examples focus on the Llama 2 Chat models from Meta. +These models are available in three different sizes: 7B, 13B, and 70B. +All three models perform well, but the 13B model is a good balance of performance and GPU memory utilization. + +```{list-table} +:header-rows: 1 + +* - Model + - GPU Memory Requirement + +* - Llama-2-7B-Chat + - 30 GB + +* - Llama-2-13B-Chat + - 50 GB + +* - Llama-2-70B-Chat + - 320 GB + +* - Llama-2-7B-Chat AWQ Quantized + - 30 GB + +* - Nemotron-8B-Chat-SFT + - 100 GB +``` + +These resources can be provided by multiple GPUs on the same machine. + +To perform retrieval augmentation, an embedding model is required. +The embedding model converts a sequence of words to a representation in the form of a vector of numbers. +This model is much smaller and requires an additional 2GB of GPU memory. + +In the examples, Milvus is set as the default vector database. +Milvus is the default because it can use the NVIDIA RAFT libraries that enable GPU acceleration of vector searches. +For the Milvus database, allow an additional 4GB of GPU Memory. + +## CPU and Memory Requirements + +For development purposes, have at least 10 CPU cores and 64 GB of RAM. + +## Storage Requirements + +The two primary considerations for storage in retrieval augmented generation are the model weights and the documents in the vector database. +The file size of the model varies according to the number of parameters in the model: + +```{list-table} +:header-rows: 1 + +* - Model + - Disk Storage + +* - Llama-2-7B-Chat + - 30 GB + +* - Llama-2-13B-Chat + - 50 GB + +* - Llama-2-70B-Chat + - 150 GB + +* - Nemotron-8B-Chat-SFT + - 50 GB +``` + +The file space needed for the vector database varies by how many documents that you upload. +For development purposes, 10 GB is sufficient. + +You need approximately 60 GB for Docker images. diff --git a/docs/swagger-requirements.txt b/docs/swagger-requirements.txt new file mode 100644 index 000000000..6c6d7b073 --- /dev/null +++ b/docs/swagger-requirements.txt @@ -0,0 +1,8 @@ +attr==0.3.2 +deepmerge==1.1.0 +importlib_resources==5.12.0 +jsonschema==2.5.1 +mistune==2.0.5 +picobox==2.2.0 +sphinx_mdinclude==0.5.3 +swagger-plugin-for-sphinx==3.4.0 diff --git a/docs/templates/layout.html b/docs/templates/layout.html new file mode 100644 index 000000000..56aa59053 --- /dev/null +++ b/docs/templates/layout.html @@ -0,0 +1,58 @@ +{% extends '!layout.html' %} + +{% block extrahead %} +{{ super() }} +{% for content in extra_content_head %} +{{ content }} +{% endfor %} +{% endblock %} + + +{%- block sidebartitle %} +{% set logo_dest = pathto(root_doc) %} +{% if logo_target_url %} + {% set logo_dest = pathto(logo_target_url, 1) %} +{% endif %} + + +{%- if logo %} + {#- Not strictly valid HTML, but it's the only way to display/scale + it properly, without weird scripting or heaps of work + #} + {%- if sphinx_version_info < (4, 0) -%} + + {%- else %} + + {%- endif %} +{%- endif %} + +{%- if not theme_logo_only %} +
+ {{ project }} +
+{%- endif %} +
+ +{%- if theme_display_version %} + {%- set nav_version = version %} + {%- if READTHEDOCS and current_version %} + {%- set nav_version = current_version %} + {%- endif %} + {%- if nav_version %} +
+ {{ nav_version }} +
+ {%- endif %} +{%- endif %} + +{%- include "searchbox.html" %} + +{%- endblock %} + + +{% block footer %} +{{ super() }} +{% for content in extra_content_footer %} +{{ content }} +{% endfor %} +{% endblock %} diff --git a/docs/using-sample-web-application.md b/docs/using-sample-web-application.md new file mode 100644 index 000000000..a396a729d --- /dev/null +++ b/docs/using-sample-web-application.md @@ -0,0 +1,85 @@ + + +# Using the Sample Chat Web Application + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Prerequisites + +- You deployed one of the samples, such as [](./ai-foundation-models.md) or [](./local-gpu.md). + +## Access the Web Application + +- Connect to the sample web application at `http://:8090`. + + ![Sample chat web application](./images/sample-web-application.png) + +## Use Unstructured Documents as a Knowledge Base + +1. Optional: If you configured your deployment with NVIDIA Riva, check **[X] Enable TTS output** to enable the web application to read aloud the answers to your queries. + + Select the desired ASR language (`English (en-US)` for this test), TTS language (`English (en-US)` for this test) and TTS voice from the dropdown menus below the checkboxes to use the voice-to-voice interaction capabilities. + +1. On the **Converse** tab, enter "How many cores does the Grace superchip contain?" in the chat box and click **Submit**. + + Alternatively, click on the microphone button to the right of the text box and ask the question verbally. + + ![Grace query failure](../../notebooks/imgs/grace_noanswer_with_riva.png) + +1. Upload the sample data to the knowledge base. + + Click the **Knowledge Base** tab and then click **Add File**. + + Navigate to the `dataset.zip` file that is located in the `notebooks` directory. Unzip the archive and upload the PDFs. + +1. Return to **Converse** tab and select **[X] Use knowledge base**. + +1. Reenter the question: "How many cores does the Grace superchip contain?" + + ![Grace query success](../../notebooks/imgs/grace_answer_with_riva.png) + + ```{tip} + The default prompts are optimized for Llama chat model. + If you use a completion model, then you must fine tune the prompts. + ``` + +## Troubleshooting + +If you receive the following "Media devices could not be accessed" error message when you first attempt to transcribe a voice query, perform the following steps. + +![Media device access error window.](./images/media-device-access-error.png) + +1. Open another browser tab and enter `chrome://flags` in the location field. + +1. Enter `insecure origins treated as secure` in the search field. + + ![Browser viewing the chrome://flags URL.](./images/chrome-flags-fix-media-device-access-error.png) + +1. Enter `http://:8090` in the text box and select **Enabled** from the menu. + +1. Click **Relaunch**. + +1. After the browser opens, grant `http://host-ip:8090` access to your microphone. + +1. Retry your request. diff --git a/docs/vector-database.md b/docs/vector-database.md new file mode 100644 index 000000000..85c39ac17 --- /dev/null +++ b/docs/vector-database.md @@ -0,0 +1,143 @@ + + +# Configuring an Alternative Vector Database + +```{contents} +--- +depth: 2 +local: true +backlinks: none +--- +``` + +## Supported Vector Databases + +By default, the Docker Compose files for the examples deploy Milvus as the vector database. +Alternatively, you can deploy pgvector. + +## Configuring pgvector as the Vector Database + +1. Edit the Docker Compose file for the example, such as `deploy/compose/rag-app-text-chatbot.yaml`. + + Update the environment variables within the chain server service: + + ```yaml + services: + chain-server: + container_name: chain-server + environment: + APP_VECTORSTORE_NAME: "pgvector" + APP_VECTORSTORE_URL: "pgvector:5432" + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-api} + ``` + + The preceding example shows the default values for the database user, password, and database. + To override the defaults, edit the values in the Docker Compose file, or set the values in the `compose.env` file. + +1. Optional: If a container for a vector database is running, stop the container: + + ```console + $ docker compose -f deploy/compose/docker-compose-vectordb.yaml down + ``` + +1. Stop and then start the services: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/.yaml down + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/.yaml up -d --remove-orphans + ``` + +1. Start the pgvector container: + + ```console + $ docker compose -f deploy/compose/docker-compose-vectordb.yaml up -d pgvector + ``` + +1. Optional: View the chain server logs to confirm the vector database. + + 1. View the logs: + + ```console + $ docker logs -f chain-server + ``` + + 1. Upload a document to the knowledge base. + Refer to [](./using-sample-web-application.md#use-unstructured-documents-as-a-knowledge-base) for more information. + + 1. Confirm the log output includes the vector database: + + ```output + INFO:example:Ingesting .pdf in vectorDB + INFO:RetrievalAugmentedGeneration.common.utils:Using pgvector as vector store + INFO:RetrievalAugmentedGeneration.common.utils:Using PGVector collection: + ``` + +## Configuring Milvus as the Vector Database + +1. Edit the Docker Compose file for the example, such as `deploy/compose/rag-app-text-chatbot.yaml`. + + Update the environment variables within the chain server service: + + ```yaml + services: + chain-server: + container_name: chain-server + environment: + APP_VECTORSTORE_NAME: "milvus" + APP_VECTORSTORE_URL: "http://milvus:19530" + ``` + +1. Optional: If a container for a vector database is running, stop the container: + + ```console + $ docker compose -f deploy/compose/docker-compose-vectordb.yaml down + ``` + +1. Stop and then start the services: + + ```console + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/.yaml down + $ docker compose --env-file deploy/compose/compose.env -f deploy/compose/.yaml up -d --remove-orphans + ``` + +1. Start the Milvus container: + + ```console + $ docker compose -f deploy/compose/docker-compose-vectordb.yaml up -d milvus + ``` + +1. Optional: View the chain server logs to confirm the vector database. + + 1. View the logs: + + ```console + $ docker logs -f chain-server + ``` + + 1. Upload a document to the knowledge base. + Refer to [](./using-sample-web-application.md#use-unstructured-documents-as-a-knowledge-base) for more information. + + 1. Confirm the log output includes the vector database: + + ```output + INFO:example:Ingesting .pdf in vectorDB + INFO:RetrievalAugmentedGeneration.common.utils:Using milvus as vector store + INFO:RetrievalAugmentedGeneration.common.utils:Using milvus collection: + ``` diff --git a/docs/versions.json b/docs/versions.json new file mode 100644 index 000000000..d29a613c2 --- /dev/null +++ b/docs/versions.json @@ -0,0 +1,8 @@ +{ + "latest": "0.5.0", + "versions": [ + { + "version": "0.5.0" + } + ] +} diff --git a/examples/README.md b/examples/README.md index 773c1585a..72da5d021 100644 --- a/examples/README.md +++ b/examples/README.md @@ -22,7 +22,7 @@ This example leverages a simple [Streamlit](https://streamlit.io/) based UI and pip install -r examples/5_mins_rag_no_gpu/requirements.txt ``` -3. Set your NVIDIA_API_KEY. Follow the steps 1-4 mentioned [here](../docs/rag/aiplayground.md#prepare-the-environment) to get this. +3. Set your NVIDIA_API_KEY. Follow the steps mentioned [here](../docs/ai-foundation-models.md#get-an-api-key-for-the-mixtral-8x7b-instruct-api-endpoint) to get this. ``` export NVIDIA_API_KEY="provide_your_key" ``` @@ -32,6 +32,6 @@ This example leverages a simple [Streamlit](https://streamlit.io/) based UI and streamlit run examples/5_mins_rag_no_gpu/main.py ``` -5. Finally to test the deployed example, goto the URL `http://:8501` in a web browser. Click on `browse files` and select your knowledge source. After selecting click on `Upload!` button to complete the ingestion process. +1. Finally to test the deployed example, goto the URL `http://:8501` in a web browser. Click on `browse files` and select your knowledge source. After selecting click on `Upload!` button to complete the ingestion process. -6. You are all set now! Try out queries pertinent to the knowledge base using text from the UI. +2. You are all set now! Try out queries pertinent to the knowledge base using text from the UI. diff --git a/experimental/README.md b/experimental/README.md index 53472cae3..14a4dc9d5 100644 --- a/experimental/README.md +++ b/experimental/README.md @@ -27,6 +27,18 @@ Experimental examples are sample code and deployments for RAG pipelines that are This example demonstrate the construction of a performance-oriented pipeline that accepts a stream of heterogenous documents, divides the documents into smaller segments or chunks, computes the embedding vector for each of these chunks, and uploads the text chunks along with their associated embeddings to a Vector Database. This pipeline builds on the [Morpheus SDK](https://docs.nvidia.com/morpheus/index.html) to take advantage of end-to-end asynchronous processing. This pipeline showcases pipeline parallelism (including CPU and GPU-accelerated nodes), as well as, a mechanism to horizontally scale out data ingestion workers. +* [NVIDIA Live FM Radio ASR RAG](./fm-asr-streaming-rag) + + This example is a demonstration of a RAG workflow that ingests streaming text derived from live FM radio signals. An SDR signal processing pipeline built with [NVIDIA Holoscan](https://developer.nvidia.com/holoscan-sdk) is used to process I/Q samples sent over UDP. ASR is performed on the processed audio data using [NVIDIA Riva](https://www.nvidia.com/en-us/ai-data-science/products/riva/) and stored in a time-informed FAISS database. Uses LangChain connectors to [NVIDIA AI Foundation Models Endpoint](https://www.nvidia.com/en-us/ai-data-science/foundation-models/) or models running on-prem with [NVIDIA NIM](https://developer.nvidia.com/docs/nemo-microservices/inference/overview.html). + +* [NVIDIA ORAN chatbot multimodal Assistant](./oran-chatbot-multimodal/) + + This example is designed to make it extremely easy to set up your own retrieval-augmented generation chatbot for ORAN techncial specifications and processes. The backend here calls the NVIDIA NeMo Service, which makes it very easy to deploy on a thin client or Virtual Machine (ie, without a GPU setup). + +* [NVIDIA Retrieval Customization](./synthetic-data-retriever-customization/) + + This example is a sample demonstration on how Large Language Models (LLMs) could be used to synthetically generate training data, which can then be used to adapt retriever models. + * [NVIDIA Multimodal RAG Assistant](./multimodal_assistant) This example is able to ingest PDFs, PowerPoint slides, Word and other documents with complex data formats including text, images, slides and tables. It allows users to ask questions through a text interface and optionally with an image query, and it can respond with text and reference images, slides and tables in its response, along with source links and downloads. diff --git a/experimental/fm-asr-streaming-rag/README.md b/experimental/fm-asr-streaming-rag/README.md new file mode 100644 index 000000000..7a03b6658 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/README.md @@ -0,0 +1,87 @@ +# Streaming FM Radio RAG +This repository enables live processing of FM baseband I/Q samples, automatic speech recognition (ASR) of the resulting audio, and LLM interaction with the transcribed audio. + +If you don't have an SDR capable of recieving FM, that's ok. Code in the `file-replay` container will read in `.wav` audio files, do signal processing to FM-modulate them, and send the data as UDP packets. From the perspective of the pipeline, this file replay data looks equivalent to data streamed in from an FM source. + +![FM chatbot](docs/imgs/chatbot.jpg) + +## Tools +- [NVIDIA Holoscan SDK](https://developer.nvidia.com/holoscan-sdk) - UDP data ingest and signal processing +- [NVIDIA Riva](https://www.nvidia.com/en-us/ai-data-science/products/riva/) - ASR +- [NVIDIA AI Foundation Endpoint](https://www.nvidia.com/en-us/ai-data-science/foundation-models/) - Optimized LLM inference running on the cloud +- [NVIDIA NIM](https://developer.nvidia.com/docs/nemo-microservices/inference/overview.html) - Convert Hugging Face or Nemo checkpoint to [TRT-LLM](https://github.com/NVIDIA/TensorRT-LLM) and deploy locally with [Triton Inference Server](https://developer.nvidia.com/triton-inference-server) +- [Docker](https://docker.com) - Tested with versions >=25.0.0 + +## Hardware and Access Requirements +- NVIDIA GPU (or GPUs) capable of running at minimum a Riva ASR server and Holoscan signal processing. This setup has been tested with an [RTX A6000](https://www.nvidia.com/en-us/design-visualization/rtx-a6000/), which handles that workload easily. Pushes the A6000's capability when running LLM inference on the same GPU; a dedicated GPU for inference is recommended when deploying locally. +- A [NVIDIA AI Foundation Endpoint](https://www.nvidia.com/en-us/ai-data-science/foundation-models/) key. Uses [LangChain implementation](https://python.langchain.com/docs/integrations/chat/nvidia_ai_endpoints) for LLM inference. Put your key in `NVIDIA_API_KEY` in `deploy/compose.env`. +- NVIDIA NIM is in early access and container is not available for developers not in EA program. +- Access to the [NGC catalog](https://catalog.ngc.nvidia.com/). + +## Requirements for running live FM +- An SDR and antenna for downconversion and A2D conversion. Tested with [RTL-SDR Blog V.3](https://www.rtl-sdr.com/rtl-sdr-blog-v-3-dongles-user-guide/). +- [GNU Radio](https://www.gnuradio.org/) or similar software that can deliver baseband I/Q samples over UDP. See a sample companion file [sample_fm_radio.grc](docs/samples/sample_fm_radio.grc). + +## Future Work +- GNU Radio container for users with an SDR + +## Setup +### Riva ASR +NVIDIA Riva is required to perform the automated transcriptions. You will need to install and configure the [NGC-CLI](https://ngc.nvidia.com/setup/installers/cli) tool, if you have not done so already, to obtain the Riva container and API. The Riva installation steps may be found at this link: [Riva-Install](https://docs.nvidia.com/deeplearning/riva/user-guide/docs/quick-start-guide.html). Note that Riva performs a TensorRT build during setup and requires access to the targeted GPU. + +Container-based development and deployment is supported. See our sample [sample_riva_config.sh](docs/samples/sample_riva_config.sh) file for an example of how to configure Riva. + +### Replay +Move a `.wav` file into `file-replay/files`, set the `REPLAY_FILE` to the file name, relative to the `file-replay/files` directory. So `file-replay/files/my-audio.wav` should be `my-audio.wav`. + +### Containers +The project uses Docker Compose to easily build and deploy containers. Environment variables needed to run are in `deploy/compose.env`. + +```bash +source deploy/compose.env +docker compose -f deploy/docker-compose.yml up --build +``` + +Alternatively, use `deploy/scripts/run.sh`. + +### GPUs +The GPU or GPUs used for each container can be specified in `compose.env`. By default all containers have access to 'all' GPUs. + +## NVIDIA NIM +This repository also provides the tools and frameworks for using NVIDIA NIM to build and deploy TensorRT-LLM models on-prem. [Documentation for NVIDIA NIM](https://developer.nvidia.com/docs/nemo-microservices/inference/overview.html). + +### Running without NIM +Currently NVIDIA NIM is in early access and you must have access to pull the container in `nim/Dockerfile`. If you don't have EA, or only want to run on the cloud, just deploy the Docker compose project without the `nim`. You can do this by: +1) Commenting out the `nim` block in the compose file +2) Use the helper script in `nim/deploy/scripts/run-cloud-only.sh` +3) Simply calling compose with all other containers: `docker compose -f docker-compose.yml up --build sdr frontend server replay`. + +### Building TRT-LLM from Hugging Face checkpoint +NVIDIA NIM uses the `model_repo_generator` command to build a TRT-LLM engine from a Hugging Face checkpoint. Use the helper script `deploy/scripts/nim-model-build.sh` to build the engine. This engine will be deployed with the inference microservice command called in the main compose file (`nemollm_inference_ms`). + +A sample configuration file for Mistral 7B v0.2 is included in `nim/configs`. Simply drop your own config in the folder, set appropriate environment variables and build using the playbook above. + +## Intent Detection and Planning +By default, the Q&A pipeline is designed to infer some basic intent types from the user query, which affects how information is retrieved. [Pydantic](https://docs.pydantic.dev/latest/) is used under the hood to handle all agent-style planning and action decisions. The 3 types of intent are: +1. Question or comment about a specific topic – “Who do the Boston Bruins play tonight?” +2. Summarization of recent entries – “What are the main stories from the past hour?” +3. Ask about topic at specific time – “What was being discussed 15 minutes ago?” + +The intent of the user query changes what information needs to be retrieved from the database: + +![User intent affects retrieval](docs/imgs/intent-retrieval.jpg) + +If the user query is classified as either `RecentSummary` or `TimeWindow`, the LLM selected for inferencing is used to detect the time units being referenced, i.e. "Summarize the past quarter hour" is classified as (`RecentSummary`, `900 seconds`). + +Decision tree for detection and retrieval is shown below: + +![Intent tree](docs/imgs/intent-tree.jpg) + +## Recusive summarization +As a feature to test capability with edge deployments using smaller models with reduced KV-cache and smaller context windows, this code is enabled to reduce context window via recursive summarization. When enabled, if the number of entries retrieved exceeds the max entries parameter, the context window is reduced via summarization. For each block of `max_entries` entries, the LLM reduces the context by summarizing it, returns the result to the context pool, and re-chunks the summarized result. + +## Block Diagram Overview +### File replay +![Block overview with file replay source](docs/imgs/high-level-replay-overview.jpg) +### Live FM +![Block overview with FM source](docs/imgs/high-level-overview.jpg) \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/chain-server/Dockerfile b/experimental/fm-asr-streaming-rag/chain-server/Dockerfile new file mode 100644 index 000000000..82f0ddb0a --- /dev/null +++ b/experimental/fm-asr-streaming-rag/chain-server/Dockerfile @@ -0,0 +1,34 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ARG BASE_IMAGE_URL="nvcr.io/nvidia/pytorch" +ARG BASE_IMAGE_TAG="23.12-py3" + +FROM ${BASE_IMAGE_URL}:${BASE_IMAGE_TAG} + +ENV TZ="America/New_York" +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y tzdata + +RUN --mount=type=bind,source=requirements.txt,target=/opt/requirements.txt \ + pip install --no-cache-dir -r /opt/requirements.txt + +COPY . /opt + +# RUN apt-get update && apt-get install -y libpq-dev + +RUN apt-get remove python3-pip + +WORKDIR /opt +ENTRYPOINT ["uvicorn", "server:app"] \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/chain-server/chains.py b/experimental/fm-asr-streaming-rag/chain-server/chains.py new file mode 100644 index 000000000..79f6097ad --- /dev/null +++ b/experimental/fm-asr-streaming-rag/chain-server/chains.py @@ -0,0 +1,198 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import logging + +from copy import copy +from datetime import datetime, timedelta +from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import ChatPromptTemplate +from langchain.docstore.document import Document + +from database import VectorStoreInterface +from common import LLMConfig, TimeResponse, UserIntent +from utils import get_llm, classify, doc_tstamp +from prompts import RAG_PROMPT, INTENT_PROMPT, RECENCY_PROMPT, SUMMARIZATION_PROMPT + +LOG_LEVEL = logging.getLevelName(os.environ.get('CHAIN_LOG_LEVEL', 'WARN').upper()) +logger = logging.getLogger(__name__) +logger.setLevel(LOG_LEVEL) + +# Maximum number of times to attempt recursive summarization (if enabled) +MAX_SUMMARIZATION_ATTEMPTS = 3 + +class RagChain: + def __init__(self, config: LLMConfig, db: VectorStoreInterface): + self.config = config + self.db = db + self.llm = get_llm(config) + self.rag_prompt = ChatPromptTemplate.from_messages([ + ("system", RAG_PROMPT), + ("user", "Transcript: '{context}'\nUser: '{input}'\nAI:"), + ]) + self.chain = self.rag_prompt | self.llm | StrOutputParser() + + def get_chat_chain(self, template): + prompt = ChatPromptTemplate.from_messages([ + ("system", template), + ("user", "{input}"), + ]) + return prompt | self.llm | StrOutputParser() + + def generate(self, docs): + generator = self.chain.stream( + {"context": "\n".join(d.page_content for d in docs), + "input": self.config.question} + ) + for tok in generator: + yield tok + + def answer(self): + if not self.config.use_knowledge_base: + # Just chat then return + chat_prompt = ChatPromptTemplate.from_messages([("user", "{input}")]) + chat_chain = chat_prompt | self.llm | StrOutputParser() + for tok in chat_chain.stream({"input": self.config.question}): + yield tok + return + + # Determine user intent and answer accordingly + intent = classify( + self.config.question, + self.get_chat_chain(INTENT_PROMPT), + UserIntent + ) + + if intent.intentType in ['RecentSummary', 'TimeWindow']: + try: + # Determine the time units user is asking about + recency = classify( + self.config.question, + self.get_chat_chain(RECENCY_PROMPT), + TimeResponse + ) + + # Answer with a summary of the recent entries + if intent.intentType == 'RecentSummary': + yield from self.answer_by_recent(recency) + # Answer a question about entries near some point in the past + elif intent.intentType == 'TimeWindow': + yield from self.answer_by_past(recency) + return + except Exception as e: + # If there's an exception for some reason, just fall back to basic retrieval + logger.warning( + f"Exception {e} occured trying to answer with {intent.intentType}, " + f"falling back to basic RAG" + ) + intent.intentType = 'SpecificTopic' + + # Do basic RAG with semantic similarity retrieval + if intent is None or intent.intentType != 'SpecificTopic': + logger.warning('Unknown user intent, falling back to basic RAG') + yield from self.answer_by_relevence() + return + + def answer_by_relevence(self): + # Retrieve + docs = self.db.search( + self.config.question, + max_entries=self.config.max_docs, + score_threshold=self.config.threshold + ) + yield f"*Returned {len(docs)} related entries*\n" + + # Output + if not len(docs): + yield "*Try to lower the retrieval threshold or be more specific*" + else: + yield "\n" + yield from self.generate(docs) + + def answer_by_recent(self, recency: TimeResponse): + # Retrieve + seconds = recency.to_seconds() + tstamp = datetime.now() - timedelta(seconds=seconds) + docs = self.db.recent(tstamp) + yield f"*Found {len(docs)} entries from the last {seconds:.0f}s*\n" + + # Handle case when we get too many docs + if len(docs) > self.config.max_docs: + if self.config.allow_summary: + # Use recursive summarization + yield f"*Using summarization to reduce context*\n" + for attempt in range(MAX_SUMMARIZATION_ATTEMPTS): + docs = self.summarize(docs) + yield f"*Reduced to {len(docs)} entries on attempt {attempt+1}*\n" + if len(docs) <= self.config.max_docs: + break + docs = docs[-self.config.max_docs:] + else: + # Just throw some away + docs = docs[-self.config.max_docs:] + oldest = doc_tstamp(docs[0]).second + yield f"*Reduced to last {len(docs)} entries, oldest is from {oldest}s ago*\n" + + # Output + if len(docs): + yield "\n" + yield from self.generate(docs) + + def answer_by_past(self, recency: TimeResponse, window=90): + # Retrieve + seconds = recency.to_seconds() + tstamp = datetime.now() - timedelta(seconds=seconds) + docs = self.db.past(tstamp, window=window) + yield f"*Found {len(docs)} entries from {seconds:.0f}s ago (+/- {window}s)*\n" + + # Handle case when we get too many docs + if len(docs) > self.config.max_docs: + if self.config.allow_summary: + # Use recursive summarization + yield f"*Using summarization to reduce context*\n" + for attempt in range(MAX_SUMMARIZATION_ATTEMPTS): + docs = self.summarize(docs) + yield f"*Reduced to {len(docs)} entries on attempt {attempt+1}*\n" + if len(docs) <= self.config.max_docs: + break + docs = docs[-self.config.max_docs:] + else: + # Just throw some away + sorted_docs = sorted(docs, key=lambda doc: abs(doc_tstamp(doc) - tstamp)) + docs = sorted_docs[:self.config.max_docs] + dt = abs(doc_tstamp(docs[-1]) - tstamp).seconds + yield f"*Reduced to last {len(docs)} entries, furthest is {dt}s away*\n" + + # Output + if len(docs): + yield "\n" + yield from self.generate(docs) + + def summarize(self, docs): + """ Given a set of documents, leverage the LLM to reduce context via summarization + """ + summary_chain = self.get_chat_chain(SUMMARIZATION_PROMPT) + splitter = copy(self.db._text_splitter) + splitter._chunk_overlap = 0 + + # Summarize each chunk of 'max_docs' entries + summary = "" + for i in range(0, len(docs), self.config.max_docs): + k = min(i + self.config.max_docs, len(docs)) + text = " ".join(docs[j].page_content for j in range(i, k)) + summary = f"{summary} {summary_chain.invoke({'input': text})}" + + return [Document(page_content=chunk) for chunk in splitter.split_text(summary)] diff --git a/experimental/fm-asr-streaming-rag/chain-server/common.py b/experimental/fm-asr-streaming-rag/chain-server/common.py new file mode 100644 index 000000000..b518f9f4e --- /dev/null +++ b/experimental/fm-asr-streaming-rag/chain-server/common.py @@ -0,0 +1,111 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np + +from datetime import datetime, timedelta +from pydantic import BaseModel, Field +from typing import Literal +from langchain_community.utils.math import cosine_similarity +from langchain_community.embeddings import HuggingFaceEmbeddings + +class TextEntry(BaseModel): + """ API to store text in database + """ + transcript: str = Field("Streaming text to store") + timestamp: datetime = Field("Timestamp of text") + +class SearchDocumentConfig(BaseModel): + """ API to do similarity search on database + """ + content: str = Field("Content to search database for") + max_docs: int = Field("Maximum number of documents to return") + threshold: float = Field("Minimum similarity threshold for docs") + +class RecentDocumentConfig(BaseModel): + """ API to return all documents since timestamp + """ + timestamp: datetime = Field("Timestamp of documents to retrieve up to") + max_docs: int = Field("Maximum number of documents to return") + +class PastDocumentConfig(BaseModel): + """ API to return all documents near timestamp, within window seconds + """ + timestamp: datetime = Field("Timestamp of documents to retrieve near") + max_docs: int = Field("Maximum number of documents to return") + window: int = Field( + description="Window (sec) around which documents from timestamp are returned", + default=90 + ) + +class LLMConfig(BaseModel): + """ Definition of the LLMConfig API data type + """ + # Input text + question: str = Field("The input query/prompt to the pipeline.") + context: str = Field( + description="Additional context for the question (optional)", default=None + ) + # Model choice + name: str = Field("Name of LLM instance to use") + engine: str = Field("Name of engine ['nv-ai-foundation', 'triton-trt-llm']") + # Chain parameters + use_knowledge_base: bool = Field( + description="Whether to use a knowledge base", default=True + ) + allow_summary: bool = Field("Use recursive summarization to reduce long contexts") + temperature: float = Field("Temperature of the LLM response") + threshold: float = Field("Minimum similarity threshold for docs") + max_docs: int = Field("Maximum number of documents to return") + num_tokens: int = Field("The maximum number of tokens in the response") + +""" +For cases where an LLM returns a time unit that doesn't match one of the discrete +options, find the closest with cosine similarity. + +Example: 'min' -> 'minutes' +""" +EMBEDDINGS = HuggingFaceEmbeddings() +VALID_TIME_UNITS = ["seconds", "minutes", "hours", "days"] +TIME_VECTORS = EMBEDDINGS.embed_documents(VALID_TIME_UNITS) + +def sanitize_time_unit(time_unit): + if time_unit in VALID_TIME_UNITS: + return time_unit + + unit_embedding = [EMBEDDINGS.embed_query(time_unit)] + similarity = cosine_similarity(unit_embedding, TIME_VECTORS) + return VALID_TIME_UNITS[np.argmax(similarity)] + +""" +Pydantic classes that are used to detect user intent and plan accordingly +""" +class TimeResponse(BaseModel): + timeNum: float = Field("The number of time units the user asked about") + timeUnit: str = Field("The unit of time the user asked about") + + def to_seconds(self): + """ Return the total number of seconds this represents + """ + self.timeUnit = sanitize_time_unit(self.timeUnit) + return timedelta(**{self.timeUnit: self.timeNum}).total_seconds() + +class UserIntent(BaseModel): + intentType: Literal[ + "SpecificTopic", + "RecentSummary", + "TimeWindow", + "Unknown" + ] = Field("The intent of user's query") \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/chain-server/database.py b/experimental/fm-asr-streaming-rag/chain-server/database.py new file mode 100644 index 000000000..151a5eed5 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/chain-server/database.py @@ -0,0 +1,201 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This is an implementation of a time-informed FAISS database using Hugging Face +Instruct Embeddings. As text is streamed in, it is appended to the most recent +entry, re-chunked, and added to the database with an associated timestamp. This +allows for time-based entry retrieval for cases where recent entries or entries +from a specific point in time are requested. + +Note that this style of implementation won't scale well when using many different +data sources or users querying the database - it is intended to be an example +implementation of what is possible with this sort of streaming workflow. +""" + +import os +import logging +import faiss +import datetime +import numpy as np + +from typing import List + +from langchain_community.embeddings import HuggingFaceInstructEmbeddings +from langchain_community.docstore import InMemoryDocstore +from langchain_community.vectorstores import FAISS +from langchain.text_splitter import RecursiveCharacterTextSplitter + +LOG_LEVEL = logging.getLevelName(os.environ.get('CHAIN_LOG_LEVEL', 'WARN').upper()) +logger = logging.getLogger(__name__) +logger.setLevel(LOG_LEVEL) + +EMBED_INSTRUCT = "Represent the sentence for retrieval: " +EMBED_QUERY = "Represent the question for retrieving supporting texts from the sentence: " + +class TimeIndex: + """ Manages database entry indices, tying the entry index to its timestamp + """ + def __init__(self): + self.index: List[int] = [] + self.tstamp: np.ndarray = np.array([], dtype=np.datetime64) + + def size(self): + return len(self.index) + + def get(self, i): + return self.index[i], self.tstamp[i] + + def get_range(self, start=None, stop=None): + return self.index[start:stop], self.tstamp[start:stop] + + def reduce_to(self, start=None, stop=None): + self.index, self.tstamp = self.get_range(start, stop) + + def append(self, new_id, new_tstamp): + self.index.append(new_id) + self.tstamp = np.append(self.tstamp, new_tstamp) + + def time_window(self, tstart=None, tend=None): + if not tstart: + tstart = self.tstamp[0] + if not tend: + tend = self.tstamp[-1] + mask = (self.tstamp >= tstart) & (self.tstamp <= tend) + return [self.index[i] for i in np.where(mask)[0]] + + def next_id(self): + return self.index[-1] + 1 if self.size() > 0 else 0 + +class DatabaseManager: + """ Self-managed FAISS database that ties entries to when they were added + """ + def __init__(self, embedding_model, embedding_dim): + self._timeindex = TimeIndex() + self._db_index = faiss.IndexFlatL2(embedding_dim) + self._db = FAISS( + embedding_model, + self._db_index, + InMemoryDocstore({}), + {} + ) + + def size(self): + return self._timeindex.size() + + def pop_back(self): + if self.size() == 0: + return None + + # Get the last document and delete it + idx, _ = self._timeindex.get(-1) + doc = self._db.docstore._dict[idx] + self._db.delete([self._db.index_to_docstore_id[idx]]) + + # Adjust the indices and timestamps + self._timeindex.reduce_to(stop=-1) + return doc + + def pop_front(self): + if self.size() == 0: + return None + + # Get the document and delete it + idx, _ = self._timeindex.get(0) + doc = self._db.docstore._dict[idx] + self._db.delete([self._db.index_to_docstore_id[idx]]) + + # Adjust the indices and timestamps + self._timeindex.reduce_to(start=1) + return doc + + def push_back(self, entry, tstamp): + # Add the entry to the database + new_id = self._timeindex.next_id() + self._db.add_texts( + [entry], ids=[new_id], metadatas=[{'tstamp': tstamp.strftime("%Y-%m-%d %H:%M:%S")}] + ) + self._timeindex.append(new_id, tstamp) + + def as_retriever(self, search_kwargs): + return self._db.as_retriever( + search_type='similarity_score_threshold', + search_kwargs=search_kwargs + ) + + def get_by_time(self, tstart=None, tend=None): + if self.size() == 0: + return [] + indices = self._timeindex.time_window(tstart=tstart, tend=tend) + return [self._db.docstore._dict[i] for i in indices] + +class VectorStoreInterface: + """ Manages interfacing with the vector store + """ + def __init__(self, chunk_size=1024, chunk_overlap=200): + self._text_splitter = RecursiveCharacterTextSplitter( + chunk_size=chunk_size, + chunk_overlap=chunk_overlap, + length_function=len + ) + self._embed_model = HuggingFaceInstructEmbeddings( + embed_instruction=EMBED_INSTRUCT, + query_instruction=EMBED_QUERY + ) + embedding_pool = self._embed_model.dict()['client'][1] + self.embedding_dim = embedding_pool.get_config_dict()['word_embedding_dimension'] + self._db_mgr = DatabaseManager(self._embed_model, self.embedding_dim) + + def dbsize(self): + return self._db_mgr.size() + + def store_text(self, text, tstamp): + """ Split text into chunks and store in DB + """ + new_entries = self._text_splitter.split_text(text) + for entry in new_entries: + self._db_mgr.push_back(entry, tstamp) + return { + "status": + f"Added {len(new_entries)} entries. " + + f"Number of total database entries: {self.dbsize()}" + } + + def store_streaming_text(self, text, tstamp): + """ Assume last entry was short, delete it, append it to new text, and re-chunk + """ + prev_doc = self._db_mgr.pop_back() + if prev_doc: + text = f"{prev_doc.page_content} {text}" + return self.store_text(text, tstamp) + + def search(self, query, max_entries=4, score_threshold=0.65): + """ Search DB for similar documents + """ + search_kwargs = {'k': max_entries, 'score_threshold': score_threshold} + retriever = self._db_mgr.as_retriever(search_kwargs) + return [doc for doc in retriever.get_relevant_documents(query)] + + def recent(self, tstamp): + """ Return all entries since tstamp + """ + return self._db_mgr.get_by_time(tstart=tstamp) + + def past(self, tstamp, window=90): + """ Return entries within 'window' seconds of tstamp + """ + tstart = tstamp - datetime.timedelta(seconds=window) + tend = tstamp + datetime.timedelta(seconds=window) + return self._db_mgr.get_by_time(tstart=tstart, tend=tend) \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/chain-server/prompts.py b/experimental/fm-asr-streaming-rag/chain-server/prompts.py new file mode 100644 index 000000000..c2c005951 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/chain-server/prompts.py @@ -0,0 +1,111 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pydantic import BaseModel +from common import UserIntent, TimeResponse + +def format_schema(pydantic_obj: BaseModel): + return str( + pydantic_obj.model_json_schema() + ).replace("\'", "\"").replace("{", "{{").replace("}", "}}") + +def format_json(text: str): + return text.replace("{", "{{").replace("}", "}}") + +RAG_PROMPT = """\ +You are a query answering system specialized in providing accurate responses to \ +questions based on a live radio transcript. Your input includes: +- 'Transcript': a radio transcript that you're currently listening to. +- 'User': the user query that you're responding to. +Your task is to answer the user query by directly referencing and extracting \ +information from these transcripts. Ensure your responses are concise, accurate, \ +and solely based on the provided context. +""" + +INTENT_PROMPT = ("""\ +You are an advanced classification system designed to understand and categorize \ +user intent from natural language input. Your sole output is JSON, representing \ +the classification of the intent according to the following specification: + +""" + format_schema(UserIntent) + """\ + +There are 4 options for user intent: 'SpecificTopic', 'RecentSummary', 'TimeWindow', and 'Unknown': +- 'SpecificTopic': If the user is asking about a specific topic, such as a factual question \ +or seeking specific information. Examples: "Who is the president of the US?", "What time is the \ +game tonight?", "What is the weather forecast for tonight?". +- 'RecentSummary': If the user is asking for a summary or overview of content or news within a \ +recent timeframe. Examples: "Can you summarize the last hour of content?", "What have the main \ +topics been over the last 5 minutes?", "Tell me the main stories of the past 2 hours.". +- 'TimeWindow': If the user is asking about the focus of the conversation from a specified time in \ +the past. Examples: "What were they talking about 15 minutes ago?", "What was the focus an hour ago?". +- If the user's intent is not clear, or if the intent cannot be confidently determined, classify \ +this as 'Unknown'. + +Your response should be in JSON format and include the classification type and the \ +original query, with no additional explanations. Follow this JSON structure: + +{{"intentType": }} + +Examples: +"What were they talking about 15 minutes ago?" --> {{"intentType": "TimeWindow"}} +"Can you summarize the last hour of content?" --> {{"intentType": "RecentSummary"}} +"Who is the president of the US?" --> {{"intentType": "SpecificTopic"}} +"Hey there!" --> {{"intentType": "Unknown"}} + +Ensure accuracy in classification by carefully analyzing the user's request. \ +Provide no other information or output." +""") + +# Define recency analysis prompt +recency_examples_json = [ + {'timeNum': 5, 'timeUnit': 'minutes'}, + {'timeNum': 7, 'timeUnit': 'hours'}, + {'timeNum': 2, 'timeUnit': 'days'}, + {'timeNum': 15, 'timeUnit': 'minutes'} +] +recency_examples_obj = [TimeResponse(**ex) for ex in recency_examples_json] +recency_examples_str = [ + "Tell me what's happened in the last {timeNum} {timeUnit}.", + "In the previous {timeNum} {timeUnit}, what are the main highlights?", + "Distill the last {timeNum} {timeUnit} down to a small summary", + "What was the topic {timeNum} {timeUnit} ago?" +] +recency_examples = [ + ex_str.format(timeNum=obj.timeNum, timeUnit=obj.timeUnit) + for (ex_str, obj) in zip(recency_examples_str, recency_examples_obj) +] + +RECENCY_PROMPT = ("""\ +You are an expert at restructuring natural language input into JSON. Your input is +natural language from a user and your output is JSON and nothing else. Provide no +explanations, just JSON. You will respond in JSON per the following specification: + +""" + format_schema(TimeResponse) + """\ + +Here are some example conversions: + +""" + +f"'{recency_examples[0]}' --> '{format_json(recency_examples_obj[0].model_dump_json())}'\n" + +f"'{recency_examples[1]}' --> '{format_json(recency_examples_obj[1].model_dump_json())}'\n" + +f"'{recency_examples[2]}' --> '{format_json(recency_examples_obj[2].model_dump_json())}'\n" + """ + +Convert the user input below into this JSON format. +""") + +SUMMARIZATION_PROMPT = """\ +You are a sophisticated summarization tool designed to condense large blocks \ +of text into a concise summary. Given the user text, reduce the character \ +count by distilling into only the most important information. +""" \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/chain-server/requirements.txt b/experimental/fm-asr-streaming-rag/chain-server/requirements.txt new file mode 100644 index 000000000..29e71b7ea --- /dev/null +++ b/experimental/fm-asr-streaming-rag/chain-server/requirements.txt @@ -0,0 +1,23 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +python-multipart==0.0.6 +langchain==0.0.352 +unstructured[all-docs]==0.11.2 +sentence-transformers==2.2.2 +llama-index==0.9.22 +pymilvus==2.3.1 +dataclass-wizard==0.22.2 +opencv-python==4.8.0.74 +minio==7.2.0 +asyncpg==0.29.0 +psycopg2-binary==2.9.9 +pgvector==0.2.4 +langchain-core==0.1.3 +langchain-nvidia-ai-endpoints==0.0.1 +langchain-nvidia-trt==0.0.1rc0 +nemollm==0.3.4 +opentelemetry-sdk==1.21.0 +opentelemetry-api==1.21.0 +opentelemetry-exporter-otlp-proto-grpc==1.21.0 +faiss-cpu==1.7.4 +instructorembedding==1.0.1 \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/chain-server/server.py b/experimental/fm-asr-streaming-rag/chain-server/server.py new file mode 100644 index 000000000..b9c8e3ad8 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/chain-server/server.py @@ -0,0 +1,73 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import logging + +from database import VectorStoreInterface +from chains import RagChain +from common import ( + TextEntry, + SearchDocumentConfig, + RecentDocumentConfig, + PastDocumentConfig, + LLMConfig +) + +from fastapi import FastAPI, Request +from fastapi.responses import JSONResponse, StreamingResponse + +LOG_LEVEL = logging.getLevelName(os.environ.get('CHAIN_LOG_LEVEL', 'WARN').upper()) +logger = logging.getLogger(__name__) +logger.setLevel(LOG_LEVEL) + +app = FastAPI() + +# Create vector database +db = VectorStoreInterface( + chunk_size=int(os.environ.get('DB_CHUNK_SIZE', 256)), + chunk_overlap=int(os.environ.get('DB_CHUNK_OVERLAP', 32)) +) + + +# API for database storage and searching +@app.post("/storeStreamingText") +async def store_streaming_text(request: Request, data: TextEntry) -> JSONResponse: + return JSONResponse( + db.store_streaming_text(data.transcript, tstamp=data.timestamp) + ) + +@app.get("/searchDocuments") +async def search_documents(request: Request, data: SearchDocumentConfig) -> JSONResponse: + docs = db.search( + data.content, max_entries=data.max_docs, score_threshold=data.threshold + ) + return JSONResponse([doc.dict() for doc in docs]) + +@app.get("/recentDocuments") +async def recent_documents(request: Request, data: RecentDocumentConfig) -> JSONResponse: + docs = db.recent(data.timestamp, max_entries=data.max_docs) + return JSONResponse([doc.dict() for doc in docs]) + +@app.get("/pastDocuments") +async def past_documents(request: Request, data: PastDocumentConfig) -> JSONResponse: + docs = db.past(data.timestamp, window=data.window, max_entries=data.max_docs) + return JSONResponse([doc.dict() for doc in docs]) + +# API for LLM interaction +@app.get("/generate") +async def generate_answer(request: Request, config: LLMConfig) -> StreamingResponse: + chain = RagChain(config, db) + return StreamingResponse(chain.answer()) \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/chain-server/utils.py b/experimental/fm-asr-streaming-rag/chain-server/utils.py new file mode 100644 index 000000000..8831d8b42 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/chain-server/utils.py @@ -0,0 +1,108 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import logging +import json +import re + +from datetime import datetime +from langchain_nvidia_ai_endpoints import ChatNVIDIA +from langchain_community.chat_models import ChatOpenAI +from pydantic import BaseModel + +from common import LLMConfig + +LOG_LEVEL = logging.getLevelName(os.environ.get('CHAIN_LOG_LEVEL', 'WARN').upper()) +logger = logging.getLogger(__name__) +logger.setLevel(LOG_LEVEL) + +def get_llm(config: LLMConfig): + if config.engine == "triton-trt-llm": + openai_port = os.environ.get('NIM_OPENAI_PORT', 9999) + return ChatOpenAI( + model_name=config.name, + temperature=config.temperature, + max_tokens=config.num_tokens, + openai_api_base=f"http://0.0.0.0:{openai_port}/v1/", + openai_api_key="not needed" + ) + elif config.engine == "nv-ai-foundation": + return ChatNVIDIA( + model=config.name, + temperature=config.temperature, + max_tokens=config.num_tokens + ) + else: + raise ValueError(f"Unknown engine {config.engine}") + +def classify(question, chain, pydantic_obj: BaseModel): + """ Parse a question into structured pydantic_obj + """ + output = chain.invoke({"input": question}) + try: + # Try to parse as Pydantic object + result = pydantic_obj.model_validate_json(output) + except ValueError: + try: + # If failed, look for valid JSON inside output + logger.warning(f"Failed to parse initial output {output}") + sanitized_output = sanitize_json(output) + result = pydantic_obj.model_validate_json(sanitized_output) + except ValueError: + # Neither approach worked, return None + logger.error(f"Error parsing output into {pydantic_obj}: '{output}'") + result = None + return result + +def doc_tstamp(doc): + return datetime.strptime(doc.metadata['tstamp'], "%Y-%m-%d %H:%M:%S") + +""" +These are some functions that try to fix some common mistakes LLMs might make +when outputting structured JSON. Rather than immediately giving up, we replace +some incorrect special characters and look for possible JSON matches that are +embeddeded into a wider string. + +Example: + - LLM output: Sure! Here's the JSON you asked for: {'key': value} + - Sanitized: {"key": value} +""" +def sanitize_json(text: str, pydantic_obj: BaseModel=None): + return find_first_valid_json( + replace_special(text), pydantic_obj=pydantic_obj + ) + +def replace_special(text_in: str): + text_out = text_in.replace("\'", "\"") + text_out = text_out.replace("\_", "_") + return text_out + +def find_first_valid_json(text: str, pydantic_obj: BaseModel=None): + # Regex pattern to find substrings that look like JSON objects or arrays + pattern = r'(\{.*?\}|\[.*?\])' + + # Find all substrings that match the pattern + potential_jsons = re.findall(pattern, text, re.DOTALL) + for pj in potential_jsons: + try: + # Attempt to parse the substring as JSON + json.loads(pj) + if pydantic_obj: + pydantic_obj.model_validate_json(pj) + return pj + except (json.JSONDecodeError, ValueError): + continue # if parsing fails, move on to the next substring + return None # if no valid JSON is found, return None \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/deploy/compose.env b/experimental/fm-asr-streaming-rag/deploy/compose.env new file mode 100644 index 000000000..1261ce735 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/deploy/compose.env @@ -0,0 +1,48 @@ +#!/bin/bash + +# Setup directories +export DEPLOY_DIR="$( cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 ; pwd -P )" +export PROJECT_DIR="${DEPLOY_DIR}/.." +export MODEL_DIR="/path/to/llm/checkpoints/" # where you keep your model checkpoints + +# Connections +export FRONTEND_URI="localhost:6001" +export FRONTEND_SERVER_PORT="8090" +export CHAIN_SERVER_HOST="0.0.0.0" +export CHAIN_SERVER_PORT="8081" + +# Log levels [optional, default='WARN'] +export SDR_LOG_LEVEL='INFO' +export FRONTEND_LOG_LEVEL='WARN' +export CHAIN_LOG_LEVEL='INFO' + +# LLM chain settings +export NVIDIA_API_KEY="" +export DB_CHUNK_SIZE=1024 +export DB_CHUNK_OVERLAP=128 + +# NIM settings +# Currently configured to build Mistral 7B (with example config) +export LLM="mistralai/Mistral-7B-Instruct-v0.2" # directory of checkpoint, relative to MODEL_DIR +export MODEL_CHECKPOINT="${MODEL_DIR}/${LLM}" +export NIM_MODEL_PATH="${MODEL_DIR}/nim/${LLM}" +export NIM_CONFIG_FILE="${PROJECT_DIR}/nim/configs/mistral-7b.yaml" +export NIM_OPENAI_PORT=9999 + +# File replay settings +# If a replay file is provided, the 'file-replay' container will replay a +# .wav audio file as I/Q FM samples transmitted over UDP in real-time, mimicking +# the input from a radio. +export REPLAY_FILE="" # WAV file to replay. Should be located in file-replay/files. +# export SDR_IP="0.0.0.0" # [optional] Check sdr-holoscan.params.network_rx.ip_addr +# export SDR_PORT=5005 # [optional] Check sdr-holoscan.params.network_rx.dst_port +# export SDR_MAX_PKT_SZ=1472 # [optional] Check sdr-holoscan.params.network_rx.max_payload_size +# export SDR_SAMPLE_RATE=1000000 # [optional] Check sdr-holoscan.params.sensor.sample_rate +# export REPLAY_TIME=0 # [optional] If non-zero, loops for REPLAY_TIME seconds + +# Specify GPUs (Riva by default uses 0) +# export SDR_GPU=0 # [optional, default='all'] +# export FRONTEND_GPU=0 # [optional, default='all'] +# export CHAIN_GPU=0 # [optional, default='all'] +# export REPLAY_GPU=0 # [optional, default='all'] +# export NIM_GPU=0 # [optional, default='all'] diff --git a/experimental/fm-asr-streaming-rag/deploy/docker-compose-nim-build.yml b/experimental/fm-asr-streaming-rag/deploy/docker-compose-nim-build.yml new file mode 100644 index 000000000..a05e8ed72 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/deploy/docker-compose-nim-build.yml @@ -0,0 +1,54 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +version: '3' + +services: + nim: + container_name: fm-rag-nim + image: nim:latest + build: + context: ${PROJECT_DIR?:source compose.env}/nim + dockerfile: Dockerfile + + volumes: + - ${MODEL_CHECKPOINT}:/huggingface-dir + - ${NIM_MODEL_PATH}:/model-store + - ${NIM_CONFIG_FILE}:/model_config.yaml + + environment: + NIM_OPENAI_PORT: ${NIM_OPENAI_PORT} + + ports: + - "${NIM_OPENAI_PORT}:${NIM_OPENAI_PORT}" + expose: + - "${NIM_OPENAI_PORT}" + + # Start inference server model generator + command: > + model_repo_generator llm + --verbose + --yaml_config_file=/model_config.yaml + + # Enable GPU usage + runtime: nvidia + shm_size: 8gb + deploy: + resources: + reservations: + devices: + - driver: nvidia + device_ids: ['${NIM_GPU:-all}'] + capabilities: [gpu] \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/deploy/docker-compose.yml b/experimental/fm-asr-streaming-rag/deploy/docker-compose.yml new file mode 100644 index 000000000..977b24d25 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/deploy/docker-compose.yml @@ -0,0 +1,188 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +version: '3' + +services: + sdr: + container_name: fm-rag-sdr + image: fm-rag-sdr-holoscan:latest + build: + context: ${PROJECT_DIR?:source compose.env}/sdr-holoscan + dockerfile: Dockerfile + + restart: "no" + + environment: + TZ: ${TIMEZONE:-America/New_York} + SDR_LOG_LEVEL: ${SDR_LOG_LEVEL:-WARN} + FRONTEND_URI: ${FRONTEND_URI} + DATABASE_URI: ${CHAIN_SERVER_HOST}:${CHAIN_SERVER_PORT} + + working_dir: /workspace/ + command: "python sdr-holoscan/app.py" + + network_mode: host + devices: + - "/dev/bus/usb:/dev/bus/usb" + - "/dev/snd:/dev/snd" + + # Enable GPU usage + runtime: nvidia + shm_size: 8gb + deploy: + resources: + reservations: + devices: + - driver: nvidia + device_ids: ['${SDR_GPU:-all}'] + capabilities: [gpu] + + frontend: + container_name: fm-rag-frontend + image: fm-rag-frontend:latest + build: + context: ${PROJECT_DIR?:source compose.env}/frontend + dockerfile: Dockerfile + + command: --port ${FRONTEND_SERVER_PORT} + + environment: + TZ: ${TIMEZONE:-America/New_York} + FRONTEND_LOG_LEVEL: ${FRONTEND_LOG_LEVEL:-WARN} + FRONTEND_URI: ${FRONTEND_URI} + APP_SERVERURL: http://localhost + APP_SERVERPORT: ${CHAIN_SERVER_PORT} + + ports: + - "${CHAIN_SERVER_PORT}:${CHAIN_SERVER_PORT}" + expose: + - "${CHAIN_SERVER_PORT}" + network_mode: host + + deploy: + resources: + reservations: + devices: + - driver: nvidia + device_ids: ['${FRONTEND_GPU:-all}'] + capabilities: [gpu] + + server: + container_name: fm-rag-chain-server + image: fm-rag-chain-server:latest + build: + context: ${PROJECT_DIR?:source compose.env}/chain-server + dockerfile: Dockerfile + + command: --host ${CHAIN_SERVER_HOST} --port ${CHAIN_SERVER_PORT} + + environment: + TZ: ${TIMEZONE:-America/New_York} + NVIDIA_API_KEY: ${NVIDIA_API_KEY:-} + CHAIN_LOG_LEVEL: ${CHAIN_LOG_LEVEL:-WARN} + DB_CHUNK_SIZE: ${DB_CHUNK_SIZE:-256} + DB_CHUNK_OVERLAP: ${DB_CHUNK_OVERLAP:-32} + + ports: + - "8081:8081" + expose: + - "8081" + network_mode: host + + deploy: + resources: + reservations: + devices: + - driver: nvidia + device_ids: ['${CHAIN_GPU:-all}'] + capabilities: [gpu] + + replay: + container_name: fm-rag-file-replay + image: fm-rag-file-replay:latest + build: + context: ${PROJECT_DIR?:source compose.env}/file-replay + dockerfile: Dockerfile + + environment: + TZ: ${TIMEZONE:-America/New_York} + + volumes: + - ${PROJECT_DIR}/file-replay/files:/workspace/files + + working_dir: /workspace/ + command: > + python wav_replay.py + --file-name ${REPLAY_FILE} + --dst-ip ${SDR_IP:-"0.0.0.0"} + --dst-port ${SDR_PORT:-5005} + --sample-rate ${SDR_SAMPLE_RATE:-1000000} + --packet-size ${SDR_MAX_PKT_SZ:-1472} + --total-time ${REPLAY_TIME:-0} + + network_mode: host + devices: + - "/dev/bus/usb:/dev/bus/usb" + - "/dev/snd:/dev/snd" + + # Enable GPU usage + runtime: nvidia + shm_size: 8gb + deploy: + resources: + reservations: + devices: + - driver: nvidia + device_ids: ['${REPLAY_GPU:-all}'] + capabilities: [gpu] + + nim: + container_name: fm-rag-nim + image: nim:latest + build: + context: ${PROJECT_DIR?:source compose.env}/nim + dockerfile: Dockerfile + + volumes: + - ${MODEL_CHECKPOINT}:/huggingface-dir + - ${NIM_MODEL_PATH}:/model-store + - ${NIM_CONFIG_FILE}:/model_config.yaml + + environment: + NIM_OPENAI_PORT: ${NIM_OPENAI_PORT} + + ports: + - "${NIM_OPENAI_PORT}:${NIM_OPENAI_PORT}" + expose: + - "${NIM_OPENAI_PORT}" + + # Start inference server + command: > + nemollm_inference_ms + --model mistral_7b + --openai_port=${NIM_OPENAI_PORT} + --num_gpus=1 + + # Enable GPU usage + runtime: nvidia + shm_size: 8gb + deploy: + resources: + reservations: + devices: + - driver: nvidia + device_ids: ['${NIM_GPU:-all}'] + capabilities: [gpu] \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/deploy/scripts/nim-model-build.sh b/experimental/fm-asr-streaming-rag/deploy/scripts/nim-model-build.sh new file mode 100755 index 000000000..fb0885209 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/deploy/scripts/nim-model-build.sh @@ -0,0 +1,7 @@ +#!/bin/bash +export THIS_DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +source $THIS_DIR/../compose.env + +docker compose \ + -f ${DEPLOY_DIR}/docker-compose.yml \ + -f ${DEPLOY_DIR}/docker-compose-nim-build.yml up --build nim \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/deploy/scripts/run-cloud-only.sh b/experimental/fm-asr-streaming-rag/deploy/scripts/run-cloud-only.sh new file mode 100755 index 000000000..7717b602f --- /dev/null +++ b/experimental/fm-asr-streaming-rag/deploy/scripts/run-cloud-only.sh @@ -0,0 +1,7 @@ +#!/bin/bash +export THIS_DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +source $THIS_DIR/../compose.env + +docker compose \ + -f ${DEPLOY_DIR}/docker-compose.yml up --build \ + sdr frontend server replay \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/deploy/scripts/run.sh b/experimental/fm-asr-streaming-rag/deploy/scripts/run.sh new file mode 100755 index 000000000..8fa84d9d8 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/deploy/scripts/run.sh @@ -0,0 +1,5 @@ +#!/bin/bash +export THIS_DIR="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +source $THIS_DIR/../compose.env + +docker compose -f ${DEPLOY_DIR}/docker-compose.yml up --build \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/docs/imgs/chatbot.jpg b/experimental/fm-asr-streaming-rag/docs/imgs/chatbot.jpg new file mode 100755 index 000000000..f137dc5f9 Binary files /dev/null and b/experimental/fm-asr-streaming-rag/docs/imgs/chatbot.jpg differ diff --git a/experimental/fm-asr-streaming-rag/docs/imgs/high-level-overview.jpg b/experimental/fm-asr-streaming-rag/docs/imgs/high-level-overview.jpg new file mode 100755 index 000000000..e3d815db5 Binary files /dev/null and b/experimental/fm-asr-streaming-rag/docs/imgs/high-level-overview.jpg differ diff --git a/experimental/fm-asr-streaming-rag/docs/imgs/high-level-replay-overview.jpg b/experimental/fm-asr-streaming-rag/docs/imgs/high-level-replay-overview.jpg new file mode 100755 index 000000000..6902a04e7 Binary files /dev/null and b/experimental/fm-asr-streaming-rag/docs/imgs/high-level-replay-overview.jpg differ diff --git a/experimental/fm-asr-streaming-rag/docs/imgs/intent-retrieval.jpg b/experimental/fm-asr-streaming-rag/docs/imgs/intent-retrieval.jpg new file mode 100755 index 000000000..f1c218e15 Binary files /dev/null and b/experimental/fm-asr-streaming-rag/docs/imgs/intent-retrieval.jpg differ diff --git a/experimental/fm-asr-streaming-rag/docs/imgs/intent-tree.jpg b/experimental/fm-asr-streaming-rag/docs/imgs/intent-tree.jpg new file mode 100755 index 000000000..83874f9b0 Binary files /dev/null and b/experimental/fm-asr-streaming-rag/docs/imgs/intent-tree.jpg differ diff --git a/experimental/fm-asr-streaming-rag/docs/samples/sample_fm_radio.grc b/experimental/fm-asr-streaming-rag/docs/samples/sample_fm_radio.grc new file mode 100644 index 000000000..b20324425 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/docs/samples/sample_fm_radio.grc @@ -0,0 +1,729 @@ +options: + parameters: + author: '' + catch_exceptions: 'True' + category: '[GRC Hier Blocks]' + cmake_opt: '' + comment: '' + copyright: '' + description: '' + gen_cmake: 'On' + gen_linking: dynamic + generate_options: qt_gui + hier_block_src_path: '.:' + id: fm_radio + max_nouts: '0' + output_language: python + placement: (0,0) + qt_qss_theme: '' + realtime_scheduling: '' + run: 'True' + run_command: '{python} -u {filename}' + run_options: prompt + sizing_mode: fixed + thread_safe_setters: '' + title: FM Radio with audio sink + window_size: (1000,1000) + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [8, 8] + rotation: 0 + state: enabled + +blocks: +- name: cutoff_freq + id: variable_qtgui_range + parameters: + comment: '' + gui_hint: '' + label: '' + min_len: '200' + orient: QtCore.Qt.Horizontal + rangeType: float + start: '100' + step: 10e3 + stop: 1e6 + value: 100e3 + widget: counter_slider + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [536, 20.0] + rotation: 0 + state: true +- name: freq + id: variable_qtgui_range + parameters: + comment: '' + gui_hint: '' + label: '' + min_len: '200' + orient: QtCore.Qt.Horizontal + rangeType: float + start: 88e6 + step: 100e3 + stop: 108e6 + value: 89.7e6 + widget: counter_slider + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [392, 20.0] + rotation: 0 + state: true +- name: samp_rate + id: variable_qtgui_range + parameters: + comment: '' + gui_hint: '' + label: '' + min_len: '200' + orient: QtCore.Qt.Horizontal + rangeType: float + start: 10e3 + step: 10e3 + stop: 8e6 + value: starting_sample_rate + widget: counter_slider + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [232, 100.0] + rotation: 0 + state: true +- name: volume + id: variable_qtgui_range + parameters: + comment: '' + gui_hint: '' + label: '' + min_len: '200' + orient: QtCore.Qt.Horizontal + rangeType: float + start: '0' + step: '0.01' + stop: '20.0' + value: '0.05' + widget: counter_slider + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [664, 20.0] + rotation: 0 + state: true +- name: analog_wfm_rcv_0 + id: analog_wfm_rcv + parameters: + affinity: '' + alias: '' + audio_decimation: '1' + comment: '' + maxoutbuf: '0' + minoutbuf: '0' + quad_rate: samp_rate + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [680, 428.0] + rotation: 0 + state: enabled +- name: audio_sink_0 + id: audio_sink + parameters: + affinity: '' + alias: '' + comment: '' + device_name: plughw:CARD=C3422WE,DEV=0 + num_inputs: '1' + ok_to_block: 'True' + samp_rate: '48000' + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [1296, 428.0] + rotation: 0 + state: enabled +- name: blocks_multiply_const_vxx_0 + id: blocks_multiply_const_vxx + parameters: + affinity: '' + alias: '' + comment: '' + const: volume + maxoutbuf: '0' + minoutbuf: '0' + type: float + vlen: '1' + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [1104, 436.0] + rotation: 0 + state: enabled +- name: low_pass_filter_0 + id: low_pass_filter + parameters: + affinity: '' + alias: '' + beta: '6.76' + comment: '' + cutoff_freq: cutoff_freq + decim: '1' + gain: '1' + interp: '1' + maxoutbuf: '0' + minoutbuf: '0' + samp_rate: samp_rate + type: fir_filter_ccf + width: '1000' + win: window.WIN_HAMMING + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [472, 388.0] + rotation: 0 + state: enabled +- name: network_udp_sink_0 + id: network_udp_sink + parameters: + addr: 127.0.0.1 + affinity: '' + alias: '' + comment: '' + header: '1' + payloadsize: '1472' + port: '5005' + send_eof: 'True' + type: complex + vlen: '1' + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [376, 540.0] + rotation: 0 + state: enabled +- name: note_0 + id: note + parameters: + alias: '' + comment: '' + note: Top flow works, sends output to speaker + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [800, 100.0] + rotation: 0 + state: true +- name: note_0_0 + id: note + parameters: + alias: '' + comment: '' + note: This needs to scale to 48KHz + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [1040, 116.0] + rotation: 0 + state: true +- name: qtgui_freq_sink_x_0 + id: qtgui_freq_sink_x + parameters: + affinity: '' + alias: '' + alpha1: '1.0' + alpha10: '1.0' + alpha2: '1.0' + alpha3: '1.0' + alpha4: '1.0' + alpha5: '1.0' + alpha6: '1.0' + alpha7: '1.0' + alpha8: '1.0' + alpha9: '1.0' + autoscale: 'False' + average: '1.0' + axislabels: 'True' + bw: samp_rate + color1: '"blue"' + color10: '"dark blue"' + color2: '"red"' + color3: '"green"' + color4: '"black"' + color5: '"cyan"' + color6: '"magenta"' + color7: '"yellow"' + color8: '"dark red"' + color9: '"dark green"' + comment: '' + ctrlpanel: 'False' + fc: freq + fftsize: '1024' + freqhalf: 'True' + grid: 'False' + gui_hint: '' + label: Relative Gain + label1: '' + label10: '''''' + label2: '''''' + label3: '''''' + label4: '''''' + label5: '''''' + label6: '''''' + label7: '''''' + label8: '''''' + label9: '''''' + legend: 'True' + maxoutbuf: '0' + minoutbuf: '0' + name: '""' + nconnections: '1' + norm_window: 'False' + showports: 'False' + tr_chan: '0' + tr_level: '0.0' + tr_mode: qtgui.TRIG_MODE_FREE + tr_tag: '""' + type: complex + units: dB + update_time: '0.10' + width1: '1' + width10: '1' + width2: '1' + width3: '1' + width4: '1' + width5: '1' + width6: '1' + width7: '1' + width8: '1' + width9: '1' + wintype: window.WIN_BLACKMAN_hARRIS + ymax: '10' + ymin: '-140' + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [416, 276.0] + rotation: 0 + state: enabled +- name: rational_resampler_xxx_0 + id: rational_resampler_xxx + parameters: + affinity: '' + alias: '' + comment: '' + decim: int(3*samp_rate/48e3) + fbw: '0' + interp: '3' + maxoutbuf: '0' + minoutbuf: '0' + taps: '[]' + type: fff + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [888, 412.0] + rotation: 0 + state: enabled +- name: rtlsdr_source_0 + id: rtlsdr_source + parameters: + affinity: '' + alias: '' + ant0: '' + ant1: '' + ant10: '' + ant11: '' + ant12: '' + ant13: '' + ant14: '' + ant15: '' + ant16: '' + ant17: '' + ant18: '' + ant19: '' + ant2: '' + ant20: '' + ant21: '' + ant22: '' + ant23: '' + ant24: '' + ant25: '' + ant26: '' + ant27: '' + ant28: '' + ant29: '' + ant3: '' + ant30: '' + ant31: '' + ant4: '' + ant5: '' + ant6: '' + ant7: '' + ant8: '' + ant9: '' + args: '""' + bb_gain0: '20' + bb_gain1: '20' + bb_gain10: '20' + bb_gain11: '20' + bb_gain12: '20' + bb_gain13: '20' + bb_gain14: '20' + bb_gain15: '20' + bb_gain16: '20' + bb_gain17: '20' + bb_gain18: '20' + bb_gain19: '20' + bb_gain2: '20' + bb_gain20: '20' + bb_gain21: '20' + bb_gain22: '20' + bb_gain23: '20' + bb_gain24: '20' + bb_gain25: '20' + bb_gain26: '20' + bb_gain27: '20' + bb_gain28: '20' + bb_gain29: '20' + bb_gain3: '20' + bb_gain30: '20' + bb_gain31: '20' + bb_gain4: '20' + bb_gain5: '20' + bb_gain6: '20' + bb_gain7: '20' + bb_gain8: '20' + bb_gain9: '20' + bw0: '12500' + bw1: '0' + bw10: '0' + bw11: '0' + bw12: '0' + bw13: '0' + bw14: '0' + bw15: '0' + bw16: '0' + bw17: '0' + bw18: '0' + bw19: '0' + bw2: '0' + bw20: '0' + bw21: '0' + bw22: '0' + bw23: '0' + bw24: '0' + bw25: '0' + bw26: '0' + bw27: '0' + bw28: '0' + bw29: '0' + bw3: '0' + bw30: '0' + bw31: '0' + bw4: '0' + bw5: '0' + bw6: '0' + bw7: '0' + bw8: '0' + bw9: '0' + clock_source0: '' + clock_source1: '' + clock_source2: '' + clock_source3: '' + clock_source4: '' + clock_source5: '' + clock_source6: '' + clock_source7: '' + comment: '' + corr0: '0' + corr1: '0' + corr10: '0' + corr11: '0' + corr12: '0' + corr13: '0' + corr14: '0' + corr15: '0' + corr16: '0' + corr17: '0' + corr18: '0' + corr19: '0' + corr2: '0' + corr20: '0' + corr21: '0' + corr22: '0' + corr23: '0' + corr24: '0' + corr25: '0' + corr26: '0' + corr27: '0' + corr28: '0' + corr29: '0' + corr3: '0' + corr30: '0' + corr31: '0' + corr4: '0' + corr5: '0' + corr6: '0' + corr7: '0' + corr8: '0' + corr9: '0' + dc_offset_mode0: '0' + dc_offset_mode1: '0' + dc_offset_mode10: '0' + dc_offset_mode11: '0' + dc_offset_mode12: '0' + dc_offset_mode13: '0' + dc_offset_mode14: '0' + dc_offset_mode15: '0' + dc_offset_mode16: '0' + dc_offset_mode17: '0' + dc_offset_mode18: '0' + dc_offset_mode19: '0' + dc_offset_mode2: '0' + dc_offset_mode20: '0' + dc_offset_mode21: '0' + dc_offset_mode22: '0' + dc_offset_mode23: '0' + dc_offset_mode24: '0' + dc_offset_mode25: '0' + dc_offset_mode26: '0' + dc_offset_mode27: '0' + dc_offset_mode28: '0' + dc_offset_mode29: '0' + dc_offset_mode3: '0' + dc_offset_mode30: '0' + dc_offset_mode31: '0' + dc_offset_mode4: '0' + dc_offset_mode5: '0' + dc_offset_mode6: '0' + dc_offset_mode7: '0' + dc_offset_mode8: '0' + dc_offset_mode9: '0' + freq0: freq + freq1: freq + 12500 + freq10: 100e6 + freq11: 100e6 + freq12: 100e6 + freq13: 100e6 + freq14: 100e6 + freq15: 100e6 + freq16: 100e6 + freq17: 100e6 + freq18: 100e6 + freq19: 100e6 + freq2: 100e6 + freq20: 100e6 + freq21: 100e6 + freq22: 100e6 + freq23: 100e6 + freq24: 100e6 + freq25: 100e6 + freq26: 100e6 + freq27: 100e6 + freq28: 100e6 + freq29: 100e6 + freq3: 100e6 + freq30: 100e6 + freq31: 100e6 + freq4: 100e6 + freq5: 100e6 + freq6: 100e6 + freq7: 100e6 + freq8: 100e6 + freq9: 100e6 + gain0: '30' + gain1: '10' + gain10: '10' + gain11: '10' + gain12: '10' + gain13: '10' + gain14: '10' + gain15: '10' + gain16: '10' + gain17: '10' + gain18: '10' + gain19: '10' + gain2: '10' + gain20: '10' + gain21: '10' + gain22: '10' + gain23: '10' + gain24: '10' + gain25: '10' + gain26: '10' + gain27: '10' + gain28: '10' + gain29: '10' + gain3: '10' + gain30: '10' + gain31: '10' + gain4: '10' + gain5: '10' + gain6: '10' + gain7: '10' + gain8: '10' + gain9: '10' + gain_mode0: 'True' + gain_mode1: 'False' + gain_mode10: 'False' + gain_mode11: 'False' + gain_mode12: 'False' + gain_mode13: 'False' + gain_mode14: 'False' + gain_mode15: 'False' + gain_mode16: 'False' + gain_mode17: 'False' + gain_mode18: 'False' + gain_mode19: 'False' + gain_mode2: 'False' + gain_mode20: 'False' + gain_mode21: 'False' + gain_mode22: 'False' + gain_mode23: 'False' + gain_mode24: 'False' + gain_mode25: 'False' + gain_mode26: 'False' + gain_mode27: 'False' + gain_mode28: 'False' + gain_mode29: 'False' + gain_mode3: 'False' + gain_mode30: 'False' + gain_mode31: 'False' + gain_mode4: 'False' + gain_mode5: 'False' + gain_mode6: 'False' + gain_mode7: 'False' + gain_mode8: 'False' + gain_mode9: 'False' + if_gain0: '20' + if_gain1: '20' + if_gain10: '20' + if_gain11: '20' + if_gain12: '20' + if_gain13: '20' + if_gain14: '20' + if_gain15: '20' + if_gain16: '20' + if_gain17: '20' + if_gain18: '20' + if_gain19: '20' + if_gain2: '20' + if_gain20: '20' + if_gain21: '20' + if_gain22: '20' + if_gain23: '20' + if_gain24: '20' + if_gain25: '20' + if_gain26: '20' + if_gain27: '20' + if_gain28: '20' + if_gain29: '20' + if_gain3: '20' + if_gain30: '20' + if_gain31: '20' + if_gain4: '20' + if_gain5: '20' + if_gain6: '20' + if_gain7: '20' + if_gain8: '20' + if_gain9: '20' + iq_balance_mode0: '0' + iq_balance_mode1: '0' + iq_balance_mode10: '0' + iq_balance_mode11: '0' + iq_balance_mode12: '0' + iq_balance_mode13: '0' + iq_balance_mode14: '0' + iq_balance_mode15: '0' + iq_balance_mode16: '0' + iq_balance_mode17: '0' + iq_balance_mode18: '0' + iq_balance_mode19: '0' + iq_balance_mode2: '0' + iq_balance_mode20: '0' + iq_balance_mode21: '0' + iq_balance_mode22: '0' + iq_balance_mode23: '0' + iq_balance_mode24: '0' + iq_balance_mode25: '0' + iq_balance_mode26: '0' + iq_balance_mode27: '0' + iq_balance_mode28: '0' + iq_balance_mode29: '0' + iq_balance_mode3: '0' + iq_balance_mode30: '0' + iq_balance_mode31: '0' + iq_balance_mode4: '0' + iq_balance_mode5: '0' + iq_balance_mode6: '0' + iq_balance_mode7: '0' + iq_balance_mode8: '0' + iq_balance_mode9: '0' + maxoutbuf: '0' + minoutbuf: '0' + nchan: '1' + num_mboards: '1' + sample_rate: samp_rate + sync: sync + time_source0: '' + time_source1: '' + time_source2: '' + time_source3: '' + time_source4: '' + time_source5: '' + time_source6: '' + time_source7: '' + type: fc32 + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [64, 348.0] + rotation: 0 + state: enabled +- name: starting_sample_rate + id: parameter + parameters: + alias: '' + comment: '' + hide: none + label: starting_sample_rate + short_id: '' + type: '' + value: 1e6 + states: + bus_sink: false + bus_source: false + bus_structure: null + coordinate: [232, 20.0] + rotation: 0 + state: true + +connections: +- [analog_wfm_rcv_0, '0', rational_resampler_xxx_0, '0'] +- [blocks_multiply_const_vxx_0, '0', audio_sink_0, '0'] +- [low_pass_filter_0, '0', analog_wfm_rcv_0, '0'] +- [rational_resampler_xxx_0, '0', blocks_multiply_const_vxx_0, '0'] +- [rtlsdr_source_0, '0', low_pass_filter_0, '0'] +- [rtlsdr_source_0, '0', network_udp_sink_0, '0'] +- [rtlsdr_source_0, '0', qtgui_freq_sink_x_0, '0'] + +metadata: + file_format: 1 + grc_version: 3.10.5.1 diff --git a/experimental/fm-asr-streaming-rag/docs/samples/sample_riva_config.sh b/experimental/fm-asr-streaming-rag/docs/samples/sample_riva_config.sh new file mode 100644 index 000000000..c2bee6e61 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/docs/samples/sample_riva_config.sh @@ -0,0 +1,300 @@ +# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. +# +# NVIDIA CORPORATION and its licensors retain all intellectual property +# and proprietary rights in and to this software, related documentation +# and any modifications thereto. Any use, reproduction, disclosure or +# distribution of this software and related documentation without an express +# license agreement from NVIDIA CORPORATION is strictly prohibited. + +# GPU family of target platform. Supported values: tegra, non-tegra +riva_target_gpu_family="non-tegra" + +# Name of tegra platform that is being used. Supported tegra platforms: orin, xavier +riva_tegra_platform="orin" + +# Enable or Disable Riva Services +service_enabled_asr=true +service_enabled_nlp=false +service_enabled_tts=false +service_enabled_nmt=false + +# Enable Riva Enterprise +# If enrolled in Enterprise, enable Riva Enterprise by setting configuration +# here. You must explicitly acknowledge you have read and agree to the EULA. +# RIVA_API_KEY= +# RIVA_API_NGC_ORG= +# RIVA_EULA=accept + +# Language code to fetch models of a specify language +# Currently only ASR supports languages other than English +# Supported language codes: ar-AR, en-US, en-GB, de-DE, es-ES, es-US, fr-FR, hi-IN, it-IT, ja-JP, ru-RU, ko-KR, pt-BR, zh-CN +# for any language other than English, set service_enabled_nlp and service_enabled_tts to False +# for multiple languages enter space separated language codes. +language_code=("en-US") + +# ASR acoustic model architecture +# Supported values are: conformer, conformer_unified (ja-JP only), conformer_xl (en-US + amd64 only), citrinet_1024, citrinet_256 (en-US + arm64 only), jasper (en-US + amd64 only), quartznet (en-US + amd64 only) +asr_acoustic_model=("conformer") + +# Specify one or more GPUs to use +# specifying more than one GPU is currently an experimental feature, and may result in undefined behaviours. +gpus_to_use="device=0" + +# Specify the encryption key to use to deploy models +MODEL_DEPLOY_KEY="tlt_encode" + +# Locations to use for storing models artifacts +# +# If an absolute path is specified, the data will be written to that location +# Otherwise, a docker volume will be used (default). +# +# riva_init.sh will create a `rmir` and `models` directory in the volume or +# path specified. +# +# RMIR ($riva_model_loc/rmir) +# Riva uses an intermediate representation (RMIR) for models +# that are ready to deploy but not yet fully optimized for deployment. Pretrained +# versions can be obtained from NGC (by specifying NGC models below) and will be +# downloaded to $riva_model_loc/rmir by `riva_init.sh` +# +# Custom models produced by NeMo or TLT and prepared using riva-build +# may also be copied manually to this location $(riva_model_loc/rmir). +# +# Models ($riva_model_loc/models) +# During the riva_init process, the RMIR files in $riva_model_loc/rmir +# are inspected and optimized for deployment. The optimized versions are +# stored in $riva_model_loc/models. The riva server exclusively uses these +# optimized versions. +riva_model_loc="riva-model-repo" + +if [[ $riva_target_gpu_family == "tegra" ]]; then + riva_model_loc="`pwd`/model_repository" +fi + +# The default RMIRs are downloaded from NGC by default in the above $riva_rmir_loc directory +# If you'd like to skip the download from NGC and use the existing RMIRs in the $riva_rmir_loc +# then set the below $use_existing_rmirs flag to true. You can also deploy your set of custom +# RMIRs by keeping them in the riva_rmir_loc dir and use this quickstart script with the +# below flag to deploy them all together. +use_existing_rmirs=false + +# Ports to expose for Riva services +riva_speech_api_port="50051" + +# NGC orgs +riva_ngc_org="nvidia" +riva_ngc_team="riva" +riva_ngc_image_version="2.12.1" +riva_ngc_model_version="2.12.0" + +# Pre-built models listed below will be downloaded from NGC. If models already exist in $riva-rmir +# then models can be commented out to skip download from NGC + +########## ASR MODELS ########## + +models_asr=() + +for lang_code in ${language_code[@]}; do + modified_lang_code="${lang_code/-/_}" + modified_lang_code=${modified_lang_code,,} + + # Setting default Conformer Mandarin and Japanese models to greedy decoder due to high high latency in os2s. + decoder="" + if [[ ${asr_acoustic_model} == "conformer" ]]; then + if [[ ${lang_code} == "zh-CN" ]]; then + decoder="_gre" + fi + fi + + if [[ ${asr_acoustic_model} == "conformer_xl" && ${lang_code} != "en-US" ]]; then + echo "Conformer-XL acoustic model is only available for language code en-US." + exit 1 + fi + + if [[ ${asr_acoustic_model} == "conformer_unified" && ${lang_code} != "ja-JP" ]]; then + echo "Unified Conformer acoustic model is only available for language code ja-JP." + exit 1 + fi + + if [[ $riva_target_gpu_family == "tegra" ]]; then + + if [[ ${asr_acoustic_model} == "jasper" || \ + ${asr_acoustic_model} == "quartznet" || \ + ${asr_acoustic_model} == "conformer_xl" ]]; then + echo "Conformer-XL, Jasper and Quartznet models are not available for arm64 architecture" + exit 1 + fi + + if [[ ${asr_acoustic_model} == "citrinet_256" && ${lang_code} != "en-US" ]]; then + echo "For arm64 architecture, citrinet_256 acoustic model is only available for language code en-US." + exit 1 + fi + + models_asr+=( + ### Streaming w/ CPU decoder, best latency configuration + "${riva_ngc_org}/${riva_ngc_team}/models_asr_${asr_acoustic_model}_${modified_lang_code}_str:${riva_ngc_model_version}-${riva_target_gpu_family}-${riva_tegra_platform}" + + ### Offline w/ CPU decoder + # "${riva_ngc_org}/${riva_ngc_team}/rmir_asr_${asr_acoustic_model}_${modified_lang_code}_ofl${decoder}:${riva_ngc_model_version}" + ) + else + + if [[ ${asr_acoustic_model} != "conformer" && \ + ${asr_acoustic_model} != "conformer_unified" && \ + ${asr_acoustic_model} != "conformer_xl" && \ + ${asr_acoustic_model} != "citrinet_1024" && \ + ${asr_acoustic_model} != "jasper" && \ + ${asr_acoustic_model} != "quartznet" ]]; then + echo "For amd64 architecture, valid acoustic models are conformer, conformer_unified, conformer_xl, citrinet_1024, jasper and quartznet." + exit 1 + fi + + if [[ (${asr_acoustic_model} == "jasper" || \ + ${asr_acoustic_model} == "quartznet") && \ + ${lang_code} != "en-US" ]]; then + echo "jasper and quartznet acoustic models are only available for language code en-US." + exit 1 + fi + + models_asr+=( + ### Streaming w/ CPU decoder, best latency configuration + "${riva_ngc_org}/${riva_ngc_team}/rmir_asr_${asr_acoustic_model}_${modified_lang_code}_str${decoder}:${riva_ngc_model_version}" + + ### Streaming w/ CPU decoder, best throughput configuration + # "${riva_ngc_org}/${riva_ngc_team}/rmir_asr_${asr_acoustic_model}_${modified_lang_code}_str_thr${decoder}:${riva_ngc_model_version}" + + ### Offline w/ CPU decoder + "${riva_ngc_org}/${riva_ngc_team}/rmir_asr_${asr_acoustic_model}_${modified_lang_code}_ofl${decoder}:${riva_ngc_model_version}" + ) + fi + + ### Punctuation model + if [[ ${asr_acoustic_model} != "conformer_unified" ]]; then + if [[ $riva_target_gpu_family == "tegra" ]]; then + models_asr+=( + "${riva_ngc_org}/${riva_ngc_team}/models_nlp_punctuation_bert_base_${modified_lang_code}:${riva_ngc_model_version}-${riva_target_gpu_family}-${riva_tegra_platform}" + ) + else + models_asr+=( + "${riva_ngc_org}/${riva_ngc_team}/rmir_nlp_punctuation_bert_base_${modified_lang_code}:${riva_ngc_model_version}" + ) + fi + fi +done + +### Speaker diarization model +models_asr+=( +# "${riva_ngc_org}/${riva_ngc_team}/rmir_diarizer_offline:${riva_ngc_model_version}" +) + +########## NLP MODELS ########## + +if [[ $riva_target_gpu_family == "tegra" ]]; then + models_nlp=( + ### BERT Base Intent Slot model for misty domain fine-tuned on weather, smalltalk/personality, poi/map datasets. + "${riva_ngc_org}/${riva_ngc_team}/models_nlp_intent_slot_misty_bert_base:${riva_ngc_model_version}-${riva_target_gpu_family}-${riva_tegra_platform}" + + ### DistilBERT Intent Slot model for misty domain fine-tuned on weather, smalltalk/personality, poi/map datasets. + # "${riva_ngc_org}/${riva_ngc_team}/models_nlp_intent_slot_misty_distilbert:${riva_ngc_model_version}-${riva_target_gpu_family}-${riva_tegra_platform}" + ) +else + models_nlp=( + ### Bert base Punctuation model + "${riva_ngc_org}/${riva_ngc_team}/rmir_nlp_punctuation_bert_base_en_us:${riva_ngc_model_version}" + + ### BERT base Named Entity Recognition model fine-tuned on GMB dataset with class labels LOC, PER, ORG etc. + "${riva_ngc_org}/${riva_ngc_team}/rmir_nlp_named_entity_recognition_bert_base:${riva_ngc_model_version}" + + ### BERT Base Intent Slot model fine-tuned on weather dataset. + "${riva_ngc_org}/${riva_ngc_team}/rmir_nlp_intent_slot_bert_base:${riva_ngc_model_version}" + + ### BERT Base Question Answering model fine-tuned on Squad v2. + "${riva_ngc_org}/${riva_ngc_team}/rmir_nlp_question_answering_bert_base:${riva_ngc_model_version}" + + ### Megatron345M Question Answering model fine-tuned on Squad v2. + # "${riva_ngc_org}/${riva_ngc_team}/rmir_nlp_question_answering_megatron:${riva_ngc_model_version}" + + ### Bert base Text Classification model fine-tuned on 4class (weather, meteorology, personality, nomatch) domain model. + "${riva_ngc_org}/${riva_ngc_team}/rmir_nlp_text_classification_bert_base:${riva_ngc_model_version}" + ) +fi + +########## TTS MODELS ########## + +if [[ $riva_target_gpu_family == "tegra" ]]; then + models_tts=( + ### This model has been trained with energy conditioning and International Phonetic Alphabet (IPA) for inference and training. + "${riva_ngc_org}/${riva_ngc_team}/models_tts_fastpitch_hifigan_en_us_ipa:${riva_ngc_model_version}-${riva_target_gpu_family}-${riva_tegra_platform}" + + #"${riva_ngc_org}/${riva_ngc_team}/models_tts_radtts_hifigan_en_us_ipa:${riva_ngc_model_version}-${riva_target_gpu_family}-${riva_tegra_platform}" + + ### This model uses the ARPABET for inference and training. + # "${riva_ngc_org}/${riva_ngc_team}/models_tts_fastpitch_hifigan_en_us:${riva_ngc_model_version}-${riva_target_gpu_family}-${riva_tegra_platform}" + ) +else + models_tts=( + ### These models have been trained with energy conditioning and use the International Phonetic Alphabet (IPA) for inference and training. + "${riva_ngc_org}/${riva_ngc_team}/rmir_tts_fastpitch_hifigan_en_us_ipa:${riva_ngc_model_version}" + "${riva_ngc_org}/${riva_ngc_team}/rmir_tts_radtts_hifigan_en_us_ipa:${riva_ngc_model_version}" + + ### This model uses the ARPABET for inference and training. + # "${riva_ngc_org}/${riva_ngc_team}/rmir_tts_fastpitch_hifigan_en_us:${riva_ngc_model_version}" + ) +fi + +######### NMT models ############### + +# Only models specified here get loaded, commented models (preceded with #) are skipped. +# models follow Source language _ One or more target languages model architecture +# e.g., rmir_de_en_24x6 is a German to English 24x6 bilingual model + +models_nmt=( + ###### Bilingual models + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_en_de_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_en_es_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_en_zh_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_en_ru_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_en_fr_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_de_en_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_es_en_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_ru_en_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_zh_en_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_fr_en_24x6:${riva_ngc_model_version}" + + ###### Multilingual models + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_en_deesfr_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_en_deesfr_12x2:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_deesfr_en_24x6:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_nmt_deesfr_en_12x2:${riva_ngc_model_version}" + + ###### Megatron models + #"${riva_ngc_org}/${riva_ngc_team}/rmir_megatronnmt_any_en_500m:${riva_ngc_model_version}" + #"${riva_ngc_org}/${riva_ngc_team}/rmir_megatronnmt_en_any_500m:${riva_ngc_model_version}" +) + +NGC_TARGET=${riva_ngc_org} +if [[ ! -z ${riva_ngc_team} ]]; then + NGC_TARGET="${NGC_TARGET}/${riva_ngc_team}" +else + team="\"\"" +fi + +# Specify paths to SSL Key and Certificate files to use TLS/SSL Credentials for a secured connection. +# If either are empty, an insecure connection will be used. +# Stored within container at /ssl/servert.crt and /ssl/server.key +# Optional, one can also specify a root certificate, stored within container at /ssl/root_server.crt +ssl_server_cert="" +ssl_server_key="" +ssl_root_cert="" + +# define docker images required to run Riva +image_speech_api="nvcr.io/${NGC_TARGET}/riva-speech:${riva_ngc_image_version}" + +# define docker images required to setup Riva +image_init_speech="nvcr.io/${NGC_TARGET}/riva-speech:${riva_ngc_image_version}-servicemaker" + +# daemon names +riva_daemon_speech="riva-speech" +if [[ $riva_target_gpu_family != "tegra" ]]; then + riva_daemon_client="riva-client" +fi \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/file-replay/Dockerfile b/experimental/fm-asr-streaming-rag/file-replay/Dockerfile new file mode 100644 index 000000000..70e95dc2e --- /dev/null +++ b/experimental/fm-asr-streaming-rag/file-replay/Dockerfile @@ -0,0 +1,32 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ARG BASE_IMAGE_URL="nvcr.io/nvidia/pytorch" +ARG BASE_IMAGE_TAG="23.08-py3" +FROM ${BASE_IMAGE_URL}:${BASE_IMAGE_TAG} + +ENV TZ="America/New_York" +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y tzdata + +RUN --mount=type=bind,source=requirements.txt,target=/opt/requirements.txt \ + pip install --no-cache-dir -r /opt/requirements.txt + +# Uninstall cupy12 and install cupy13 +RUN pip uninstall -y cupy-cuda12x && \ + pip install --pre cupy-cuda12x -f https://pip.cupy.dev/pre + +COPY . /workspace + +WORKDIR /workspace \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/file-replay/requirements.txt b/experimental/fm-asr-streaming-rag/file-replay/requirements.txt new file mode 100644 index 000000000..c3916deec --- /dev/null +++ b/experimental/fm-asr-streaming-rag/file-replay/requirements.txt @@ -0,0 +1 @@ +librosa==0.10.1 \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/file-replay/wav_replay.py b/experimental/fm-asr-streaming-rag/file-replay/wav_replay.py new file mode 100644 index 000000000..c3771397f --- /dev/null +++ b/experimental/fm-asr-streaming-rag/file-replay/wav_replay.py @@ -0,0 +1,197 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Given a .wav audio file, this function will convert the floating-point +audio samples to I/Q FM-modulated samples and send them as UDP packets +downstream to the Holoscan SDR application. Note that the replay parameters +used here should match up with the expected parameters in the SDR's +params.yml file. +""" + +import time +import os +import logging +import librosa +import argparse +import struct +import socket + +import cupy as cp +import cupyx.scipy.signal as cusignal + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +logger.addHandler(logging.StreamHandler()) + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Replay audio file as UDP packets with I/Q samples" + ) + parser.add_argument( + "--dst-ip", + type=str, + default="0.0.0.0", + help="IP address, should match 'network_rx.ip_addr' in sdr-holoscan/params.yml" + ) + parser.add_argument( + "--dst-port", + type=int, + default=5005, + help="Destination port, should match 'network_rx.dst_port' in sdr-holoscan/params.yml" + ) + parser.add_argument( + "--file-name", + type=str, + nargs="?", + const="", + default="", + help="Filename, should be located in file-replay/files" + ) + parser.add_argument( + "--sample-rate", + type=float, + default=1e6, + help="Output sample rate, should match 'sensor.sample_rate' in sdr-holoscan/params.yml" + ) + parser.add_argument( + "--packet-size", + type=int, + default=1472, + help="Size in bytes of each UDP packet, plus 8 counting bytes at front" + ) + parser.add_argument( + "--total-time", + type=float, + default=0, + help="Total runtime. If non-zero, loops until time is hit if .wav file is shorter." + ) + return parser.parse_args() + +def wait_for_dst(dst_ip, dst_port, wait_time=5, timeout=300): + """ Try to connect until successful + """ + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.connect((dst_ip, dst_port)) + start_time = time.time() + curr_time = start_time + while curr_time - start_time < timeout: + try: + sock.sendto(struct.pack('Q', 0), (dst_ip, dst_port)) + logger.info(f"{dst_ip}:{dst_port} open, replaying") + return + except ConnectionRefusedError: + logger.info(f"Waiting {wait_time}s for {dst_ip}:{dst_port} to open") + time.sleep(wait_time) + logger.error(f"{dst_ip}:{dst_port} never opened") + +def fm_modulate(audio, fs_in, fs_out, deviation=100000): + """ Given audio samples in floating point""" + # Resample + nsamples = int(audio.shape[0] * fs_out / fs_in) + chunk = cusignal.resample(audio, nsamples) + + # Integrate and frequency modulate + integrated_audio = cp.cumsum(chunk) / fs_out + phase_deviation = 2 * cp.pi * deviation * integrated_audio + samples = cp.cos(phase_deviation) + 1j*cp.sin(phase_deviation) + return samples.astype(cp.complex64) + +def replay(file_name, fs_out, dst_ip, dst_port, pkt_size, chunk_time=2, total_time=0): + file_path = os.path.join("files", file_name) + fs_in = librosa.get_samplerate(file_path) + + # Setup socket + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.connect((dst_ip, dst_port)) + + # Warm start + audio, _ = librosa.load(file_path, duration=chunk_time) + samples = fm_modulate(cp.array(audio), fs_in, fs_out) + iq_data = samples.tobytes() + + if not total_time: + total_time = librosa.get_duration(filename=file_path) + + # Stream in file + elapsed = 0 + while elapsed < total_time: + stream = librosa.stream( + file_path, + block_length=1, + frame_length=chunk_time * fs_in, + hop_length=chunk_time * fs_in + ) + for audio in stream: + # Reset stats + start_time = time.time() + prev_time = start_time + pkts_sent = 0 + bytes_sent = 0 + + # Convert WAV to I/Q samples + samples = fm_modulate(cp.array(audio), fs_in, fs_out) + iq_data = samples.tobytes() + inter_pkt_time = chunk_time / (len(iq_data) // pkt_size) + + # Form packet and send + for i in range(0, len(iq_data), pkt_size): + # Send + header = struct.pack('Q', pkts_sent) + pkt_data = iq_data[i:i+pkt_size] + sock.sendto(header + pkt_data, (dst_ip, dst_port)) + + pkts_sent += 1 + bytes_sent += len(pkt_data) + + # Wait allotted time + curr_time = time.time() + while (curr_time - prev_time) < inter_pkt_time: + curr_time = time.time() + prev_time = curr_time + + # Print stats + dt = curr_time - start_time + elapsed += dt + logger.info(f"Stats ({elapsed:.2f}s):") + logger.info(f" - {pkts_sent} packets") + logger.info(f" - {bytes_sent} bytes") + logger.info(f" - {bytes_sent / dt / 1e6:.2f} MB/s") + + if elapsed >= total_time: + break + +if __name__ == "__main__": + args = parse_args() + if not args.file_name: + logger.info("No file provided, exiting") + exit() + elif args.file_name.split('.')[-1].lower() != "wav": + logger.error(f"Only configured for .wav, cannot play {args.file_name}") + raise ValueError + + # Wait for other apps + time.sleep(10) + wait_for_dst(args.dst_ip, args.dst_port) + + # Do replay + replay( + args.file_name, + args.sample_rate, + args.dst_ip, + args.dst_port, + args.packet_size, + total_time=args.total_time + ) \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/frontend/Dockerfile b/experimental/fm-asr-streaming-rag/frontend/Dockerfile new file mode 100644 index 000000000..676b664f8 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/Dockerfile @@ -0,0 +1,31 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM docker.io/library/python:3.11-slim + +ENV TZ="America/New_York" +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y tzdata + +RUN --mount=type=bind,source=requirements.txt,target=/opt/requirements.txt \ + pip install --no-cache-dir -r /opt/requirements.txt + +COPY frontend /app/frontend +RUN chown -R 1001 /app + +USER 1001 + +WORKDIR /app + +ENTRYPOINT ["python3", "-m", "frontend"] \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/__init__.py b/experimental/fm-asr-streaming-rag/frontend/frontend/__init__.py new file mode 100644 index 000000000..c8c2813c3 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/__init__.py @@ -0,0 +1,165 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Document Retrieval Service. +Handle document ingestion and retrieval from a VectorDB. +""" + +import logging +import os +import sys +import threading +import typing + +from flask import Flask, request, jsonify, stream_with_context +from flask.views import MethodView + +if typing.TYPE_CHECKING: + from frontend.api import APIServer + +_LOG_LEVEL = logging.getLevelName(os.environ.get('FRONTEND_LOG_LEVEL', 'WARN').upper()) +flask_logger = logging.getLogger("werkzeug") +flask_logger.setLevel(logging.getLevelName(_LOG_LEVEL)) + +_LOG_FMT = f"[{os.getpid()}] %(asctime)15s [%(levelname)7s] - %(name)s - %(message)s" +_LOG_DATE_FMT = "%b %d %H:%M:%S" +_LOGGER = logging.getLogger(__name__) +app = Flask(__name__) + +def bootstrap_logging(verbosity: str = 'WARN') -> None: + """ + Configure Python's logger according to the given verbosity level. + Use string input, options are any of 'logging' module's logging levels. + """ + # determine log level + log_level = logging.getLevelName(verbosity) + + # configure python's logger + logging.basicConfig(format=_LOG_FMT, datefmt=_LOG_DATE_FMT, level=log_level) + # update existing loggers + _LOGGER.setLevel(log_level) + for logger in [ + __name__, + "uvicorn", + "uvicorn.access", + "uvicorn.error", + ]: + for handler in logging.getLogger(logger).handlers: + handler.setFormatter(logging.Formatter(fmt=_LOG_FMT, datefmt=_LOG_DATE_FMT)) + +class DemoAppAPI(MethodView): + def __init__(self, client): + self._client = client + + def post(self, action=None, device=None, state=None): + ''' + This gets called first for an HTTP POST + This functions determines which function to call based on the url + e.g /apps/control_device will map method to control_device. + Data is passed by post and processed in function itself + ''' + try: + method = getattr(self, action) + return method() + except AttributeError as e: + return jsonify(f"Error -- no POST action {action} (error: {e})") + except Exception as e: + return jsonify(f"POST error {e}") + + def get(self, action): + ''' + This gets called first for an HTTP GET + This functions determines which function to call based on the url + e.g /apps/control_device will map method to control_device. + ''' + try: + method = getattr(self, action) + return method() + except AttributeError as e: + return jsonify(f"Error -- no GET action {action} (error: {e})") + except Exception as e: + return jsonify(f"GET error {e}") + + def update_running_transcript(self): + """ Updates the Riva partial transcript """ + data = request.get_json() + transcript = data.get('transcript') + return app.response_class( + stream_with_context( + self._client.update_running_buffer(transcript) + ) + ) + + def update_finalized_transcript(self): + """ Updates the Riva final transcript """ + data = request.get_json() + transcript = data.get('transcript') + return app.response_class( + stream_with_context( + self._client.update_finalized_buffer(transcript) + ) + ) + +def create_app(client): + app.add_url_rule('/app/', + view_func=DemoAppAPI.as_view('flask_api', client), + methods=['POST','GET']) + return app + +def main() -> "APIServer": + """ + Bootstrap and Execute the application. + :returns: 0 if the application completed successfully, 1 if an error occurred. + :rtype: Literal[0,1] + """ + bootstrap_logging(_LOG_LEVEL) + + # load the application libraries + # pylint: disable=import-outside-toplevel; + # this is intentional to allow for the environment to be configured before + # any of the application libraries are loaded. + from frontend import api, chat_client, configuration + + # load config + config_file = os.environ.get("APP_CONFIG_FILE", "/dev/null") + _LOGGER.info("Loading application configuration.") + config = configuration.AppConfig.from_file(config_file) + if not config: + sys.exit(1) + _LOGGER.info("Configuration: \n%s", config.to_yaml()) + + _ = threading.Lock() + + # Connect to other services + client = chat_client.ChatClient( + f"{config.server_url}:{config.server_port}", config.model_name + ) + + app_uri = os.environ.get("FRONTEND_URI", "localhost:6001") + app_port = app_uri.split(':')[-1] + flask_app = create_app(client) + threading.Thread(target=lambda: flask_app.run( + debug=True, host='0.0.0.0', use_reloader=False, port=app_port + )).start() + + # Create api server + _LOGGER.info("Instantiating the API Server.") + server = api.APIServer(client) + server.configure_routes() + + # Run until complete + _LOGGER.info("Starting the API Server.") + return server diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/__main__.py b/experimental/fm-asr-streaming-rag/frontend/frontend/__main__.py new file mode 100644 index 000000000..f11d87c60 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/__main__.py @@ -0,0 +1,112 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Entrypoint for the Conversation GUI. + +The functions in this module are responsible for bootstrapping then executing the Conversation GUI server. +""" + +import argparse +import os +import sys + +import uvicorn + + +def parse_args() -> argparse.Namespace: + """Parse command-line arguments for the program. + + :returns: A namespace containing the parsed arguments. + :rtype: argparse.Namespace + """ + parser = argparse.ArgumentParser(description="Document Retrieval Service") + + parser.add_argument( + "--help-config", + action="store_true", + default=False, + help="show the configuration help text", + ) + + parser.add_argument( + "-c", + "--config", + metavar="CONFIGURATION_FILE", + default="/dev/null", + help="path to the configuration file (json or yaml)", + ) + + parser.add_argument( + "--host", + metavar="HOSTNAME", + type=str, + default="0.0.0.0", # nosec # this is intentional + help="Bind socket to this host.", + ) + parser.add_argument( + "--port", + metavar="PORT_NUM", + type=int, + default=8090, + help="Bind socket to this port.", + ) + parser.add_argument( + "--workers", + metavar="NUM_WORKERS", + type=int, + default=1, + help="Number of worker processes.", + ) + parser.add_argument( + "--ssl-keyfile", + metavar="SSL_KEY", + type=str, + default=None, + help="SSL key file" + ) + parser.add_argument( + "--ssl-certfile", + metavar="SSL_CERT", + type=str, + default=None, + help="SSL certificate file", + ) + + cliargs = parser.parse_args() + if cliargs.help_config: + # pylint: disable=import-outside-toplevel; + # this is intentional to allow for the environment to be configured + # before any of the application libraries are loaded. + from frontend.configuration import AppConfig + + sys.stdout.write("\nconfiguration file format:\n") + AppConfig.print_help(sys.stdout.write) + sys.exit(0) + + return cliargs + + +if __name__ == "__main__": + args = parse_args() + os.environ["APP_CONFIG_FILE"] = args.config + uvicorn.run( + "frontend:main", + factory=True, + host=args.host, + port=args.port, + workers=args.workers, + ssl_keyfile=args.ssl_keyfile, + ssl_certfile=args.ssl_certfile, + ) diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/api.py b/experimental/fm-asr-streaming-rag/frontend/frontend/api.py new file mode 100644 index 000000000..012566f49 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/api.py @@ -0,0 +1,92 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains the Server that will host the frontend and API.""" +import os + +import gradio as gr +from fastapi import FastAPI +from fastapi.responses import FileResponse +from fastapi.staticfiles import StaticFiles +from frontend.chat_client import ChatClient + +from frontend import pages + +STATIC_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "static") + + +class APIServer(FastAPI): + """A class that hosts the service api. + + :cvar title: The title of the server. + :type title: str + :cvar desc: A description of the server. + :type desc: str + """ + + title = "Chat" + desc = "This service provides a sample conversation frontend flow." + + def __init__(self, client: ChatClient) -> None: + """Initialize the API server.""" + self._client = client + super().__init__(title=self.title, description=self.desc) + + def configure_routes(self) -> None: + """Configure the routes in the API Server.""" + _ = gr.mount_gradio_app( + self, + blocks=pages.converse.build_page(self._client), + path=f"/content{pages.converse.PATH}", + ) + _ = gr.mount_gradio_app( + self, + blocks=pages.kb.build_page(self._client), + path=f"/content{pages.kb.PATH}", + ) + + _ = gr.mount_gradio_app( + self, + blocks=pages.stats.build_page(), + path=f"/content{pages.stats.PATH}", + ) + + _ = gr.mount_gradio_app( + self, + blocks=pages.waveform.build_page(), + path=f"/content{pages.waveform.PATH}", + ) + + @self.get("/") + async def root_redirect() -> FileResponse: + return FileResponse(os.path.join(STATIC_DIR, "converse.html")) + + @self.get("/converse") + async def converse_redirect() -> FileResponse: + return FileResponse(os.path.join(STATIC_DIR, "converse.html")) + + @self.get("/kb") + async def kb_redirect() -> FileResponse: + return FileResponse(os.path.join(STATIC_DIR, "kb.html")) + + @self.get("/stats") + async def kb_redirect() -> FileResponse: + return FileResponse(os.path.join(STATIC_DIR, "stats.html")) + + @self.get("/waveform") + async def kb_redirect() -> FileResponse: + return FileResponse(os.path.join(STATIC_DIR, "waveform.html")) + + self.mount("/", StaticFiles(directory=STATIC_DIR, html=True)) diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/assets/__init__.py b/experimental/fm-asr-streaming-rag/frontend/frontend/assets/__init__.py new file mode 100644 index 000000000..ed8ace64b --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/assets/__init__.py @@ -0,0 +1,38 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains theming assets.""" +import os.path +from typing import Tuple + +import gradio as gr + +_ASSET_DIR = os.path.dirname(__file__) + + +def load_theme(name: str) -> Tuple[gr.Theme, str]: + """Load a pre-defined frontend theme. + + :param name: The name of the theme to load. + :type name: str + :returns: A tuple containing the Gradio theme and custom CSS. + :rtype: Tuple[gr.Theme, str] + """ + theme_json_path = os.path.join(_ASSET_DIR, f"{name}-theme.json") + theme_css_path = os.path.join(_ASSET_DIR, f"{name}-theme.css") + return ( + gr.themes.Default().load(theme_json_path), + open(theme_css_path, encoding="UTF-8").read(), + ) diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/assets/kaizen-theme.css b/experimental/fm-asr-streaming-rag/frontend/frontend/assets/kaizen-theme.css new file mode 100644 index 000000000..04e930498 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/assets/kaizen-theme.css @@ -0,0 +1,13 @@ +.tabitem { + background-color: var(--block-background-fill); + } + + .gradio-container { + /* This needs to be !important, otherwise the breakpoint override the container being full width */ + max-width: 100% !important; + padding: 10px !important; + } + + footer { + visibility: hidden; + } diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/assets/kaizen-theme.json b/experimental/fm-asr-streaming-rag/frontend/frontend/assets/kaizen-theme.json new file mode 100644 index 000000000..a3218660b --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/assets/kaizen-theme.json @@ -0,0 +1,336 @@ +{ + "theme": { + "_font": [ + { + "__gradio_font__": true, + "name": "NVIDIA Sans", + "class": "font" + }, + { + "__gradio_font__": true, + "name": "ui-sans-serif", + "class": "font" + }, + { + "__gradio_font__": true, + "name": "system-ui", + "class": "font" + }, + { + "__gradio_font__": true, + "name": "sans-serif", + "class": "font" + } + ], + "_font_mono": [ + { + "__gradio_font__": true, + "name": "JetBrains Mono", + "class": "google" + }, + { + "__gradio_font__": true, + "name": "ui-monospace", + "class": "font" + }, + { + "__gradio_font__": true, + "name": "Consolas", + "class": "font" + }, + { + "__gradio_font__": true, + "name": "monospace", + "class": "font" + } + ], + "_stylesheets": [ + "https://fonts.googleapis.com/css2?family=JetBrains+Mono&family=Roboto:ital,wght@0,100;0,300;0,400;0,500;0,700;0,900;1,100;1,300;1,400;1,500;1,700;1,900&display=swap", + "https://brand-assets.cne.ngc.nvidia.com/assets/fonts/nvidia-sans/1.0.0/NVIDIASans_Lt.woff2", + "https://brand-assets.cne.ngc.nvidia.com/assets/fonts/nvidia-sans/1.0.0/NVIDIASans_LtIt.woff2", + "https://brand-assets.cne.ngc.nvidia.com/assets/fonts/nvidia-sans/1.0.0/NVIDIASans_Rg.woff2", + "https://brand-assets.cne.ngc.nvidia.com/assets/fonts/nvidia-sans/1.0.0/NVIDIASans_It.woff2", + "https://brand-assets.cne.ngc.nvidia.com/assets/fonts/nvidia-sans/1.0.0/NVIDIASans_Md.woff2", + "https://brand-assets.cne.ngc.nvidia.com/assets/fonts/nvidia-sans/1.0.0/NVIDIASans_MdIt.woff2", + "https://brand-assets.cne.ngc.nvidia.com/assets/fonts/nvidia-sans/1.0.0/NVIDIASans_Bd.woff2", + "https://brand-assets.cne.ngc.nvidia.com/assets/fonts/nvidia-sans/1.0.0/NVIDIASans_BdIt.woff2" + ], + "background_fill_primary": "#ffffff", + "background_fill_primary_dark": "#292929", + "background_fill_secondary": "*neutral_50", + "background_fill_secondary_dark": "*neutral_900", + "block_background_fill": "#ffffff", + "block_background_fill_dark": "#292929", + "block_border_color": "#d8d8d8", + "block_border_color_dark": "*border_color_primary", + "block_border_width": "1px", + "block_info_text_color": "*body_text_color_subdued", + "block_info_text_color_dark": "*body_text_color_subdued", + "block_info_text_size": "*text_sm", + "block_info_text_weight": "400", + "block_label_background_fill": "#e4fabe", + "block_label_background_fill_dark": "#e4fabe", + "block_label_border_color": "#e4fabe", + "block_label_border_color_dark": "#e4fabe", + "block_label_border_width": "1px", + "block_label_margin": "0", + "block_label_padding": "*spacing_sm *spacing_lg", + "block_label_radius": "calc(*radius_lg - 1px) 0 calc(*radius_lg - 1px) 0", + "block_label_right_radius": "0 calc(*radius_lg - 1px) 0 calc(*radius_lg - 1px)", + "block_label_shadow": "*block_shadow", + "block_label_text_color": "#4d6721", + "block_label_text_color_dark": "#4d6721", + "block_label_text_size": "*text_sm", + "block_label_text_weight": "400", + "block_padding": "*spacing_xl calc(*spacing_xl + 2px)", + "block_radius": "*radius_lg", + "block_shadow": "*shadow_drop", + "block_title_background_fill": "none", + "block_title_border_color": "none", + "block_title_border_width": "0px", + "block_title_padding": "0", + "block_title_radius": "none", + "block_title_text_color": "*neutral_500", + "block_title_text_color_dark": "*neutral_200", + "block_title_text_size": "*text_md", + "block_title_text_weight": "500", + "body_background_fill": "#f2f2f2", + "body_background_fill_dark": "#202020", + "body_text_color": "#202020", + "body_text_color_dark": "#f2f2f2", + "body_text_color_subdued": "*neutral_400", + "body_text_color_subdued_dark": "*neutral_400", + "body_text_size": "*text_md", + "body_text_weight": "400", + "border_color_accent": "*primary_300", + "border_color_accent_dark": "*neutral_600", + "border_color_primary": "#d8d8d8", + "border_color_primary_dark": "#343434", + "button_border_width": "1px", + "button_border_width_dark": "1px", + "button_cancel_background_fill": "#dc3528", + "button_cancel_background_fill_dark": "#dc3528", + "button_cancel_background_fill_hover": "#b6251b", + "button_cancel_background_fill_hover_dark": "#b6251b", + "button_cancel_border_color": "#dc3528", + "button_cancel_border_color_dark": "#dc3528", + "button_cancel_border_color_hover": "#b6251b", + "button_cancel_border_color_hover_dark": "#b6251b", + "button_cancel_text_color": "#ffffff", + "button_cancel_text_color_dark": "#ffffff", + "button_cancel_text_color_hover": "#ffffff", + "button_cancel_text_color_hover_dark": "#ffffff", + "button_large_padding": "*spacing_lg calc(2 * *spacing_lg)", + "button_large_radius": "*radius_lg", + "button_large_text_size": "*text_lg", + "button_large_text_weight": "500", + "button_primary_background_fill": "#76b900", + "button_primary_background_fill_dark": "#76b900", + "button_primary_background_fill_hover": "#659f00", + "button_primary_background_fill_hover_dark": "#659f00", + "button_primary_border_color": "#76b900", + "button_primary_border_color_dark": "#76b900", + "button_primary_border_color_hover": "#659f00", + "button_primary_border_color_hover_dark": "#659f00", + "button_primary_text_color": "#202020", + "button_primary_text_color_dark": "#202020", + "button_primary_text_color_hover": "#202020", + "button_primary_text_color_hover_dark": "#202020", + "button_secondary_background_fill": "#ffffff", + "button_secondary_background_fill_dark": "#292929", + "button_secondary_background_fill_hover": "#e2e2e2", + "button_secondary_background_fill_hover_dark": "#202020", + "button_secondary_border_color": "#5e5e5e", + "button_secondary_border_color_dark": "#c6c6c6", + "button_secondary_border_color_hover": "#5e5e5e", + "button_secondary_border_color_hover_dark": "#c6c6c6", + "button_secondary_text_color": "#5e5e5e", + "button_secondary_text_color_dark": "#e2e2e2", + "button_secondary_text_color_hover": "#343434", + "button_secondary_text_color_hover_dark": "#ffffff", + "button_shadow": "*shadow_drop", + "button_shadow_active": "*shadow_inset", + "button_shadow_hover": "*shadow_drop_lg", + "button_small_padding": "*spacing_sm calc(2 * *spacing_sm)", + "button_small_radius": "*radius_lg", + "button_small_text_size": "*text_md", + "button_small_text_weight": "400", + "button_transition": "none", + "chatbot_code_background_color": "*neutral_100", + "chatbot_code_background_color_dark": "*neutral_800", + "checkbox_background_color": "*background_fill_primary", + "checkbox_background_color_dark": "*neutral_800", + "checkbox_background_color_focus": "*checkbox_background_color", + "checkbox_background_color_focus_dark": "*checkbox_background_color", + "checkbox_background_color_hover": "*checkbox_background_color", + "checkbox_background_color_hover_dark": "*checkbox_background_color", + "checkbox_background_color_selected": "#659f00", + "checkbox_background_color_selected_dark": "#659f00", + "checkbox_border_color": "*neutral_300", + "checkbox_border_color_dark": "*neutral_700", + "checkbox_border_color_focus": "*secondary_500", + "checkbox_border_color_focus_dark": "*secondary_500", + "checkbox_border_color_hover": "*neutral_300", + "checkbox_border_color_hover_dark": "*neutral_600", + "checkbox_border_color_selected": "#659f00", + "checkbox_border_color_selected_dark": "#659f00", + "checkbox_border_radius": "*radius_sm", + "checkbox_border_width": "2px", + "checkbox_border_width_dark": "*input_border_width", + "checkbox_check": "url(\"data:image/svg+xml,%3csvg viewBox='0 0 16 16' fill='white' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M12.207 4.793a1 1 0 010 1.414l-5 5a1 1 0 01-1.414 0l-2-2a1 1 0 011.414-1.414L6.5 9.086l4.293-4.293a1 1 0 011.414 0z'/%3e%3c/svg%3e\")", + "checkbox_label_background_fill": "#ffffff", + "checkbox_label_background_fill_dark": "#292929", + "checkbox_label_background_fill_hover": "#ffffff", + "checkbox_label_background_fill_hover_dark": "#292929", + "checkbox_label_background_fill_selected": "*checkbox_label_background_fill", + "checkbox_label_background_fill_selected_dark": "*checkbox_label_background_fill", + "checkbox_label_border_color": "#ffffff", + "checkbox_label_border_color_dark": "#292929", + "checkbox_label_border_color_hover": "*checkbox_label_border_color", + "checkbox_label_border_color_hover_dark": "*checkbox_label_border_color", + "checkbox_label_border_width": "0", + "checkbox_label_border_width_dark": "*input_border_width", + "checkbox_label_gap": "16px", + "checkbox_label_padding": "", + "checkbox_label_shadow": "none", + "checkbox_label_text_color": "*body_text_color", + "checkbox_label_text_color_dark": "*body_text_color", + "checkbox_label_text_color_selected": "*checkbox_label_text_color", + "checkbox_label_text_color_selected_dark": "*checkbox_label_text_color", + "checkbox_label_text_size": "*text_md", + "checkbox_label_text_weight": "400", + "checkbox_shadow": "*input_shadow", + "color_accent": "*primary_500", + "color_accent_soft": "*primary_50", + "color_accent_soft_dark": "*neutral_700", + "container_radius": "*radius_lg", + "embed_radius": "*radius_lg", + "error_background_fill": "#fef2f2", + "error_background_fill_dark": "*neutral_900", + "error_border_color": "#fee2e2", + "error_border_color_dark": "#ef4444", + "error_border_width": "1px", + "error_icon_color": "#b91c1c", + "error_icon_color_dark": "#ef4444", + "error_text_color": "#b91c1c", + "error_text_color_dark": "#fef2f2", + "font": "'NVIDIA Sans', 'ui-sans-serif', 'system-ui', sans-serif", + "font_mono": "'JetBrains Mono', 'ui-monospace', 'Consolas', monospace", + "form_gap_width": "1px", + "input_background_fill": "white", + "input_background_fill_dark": "*neutral_800", + "input_background_fill_focus": "*secondary_500", + "input_background_fill_focus_dark": "*secondary_600", + "input_background_fill_hover": "*input_background_fill", + "input_background_fill_hover_dark": "*input_background_fill", + "input_border_color": "#d8d8d8", + "input_border_color_dark": "#343434", + "input_border_color_focus": "*secondary_300", + "input_border_color_focus_dark": "*neutral_700", + "input_border_color_hover": "*input_border_color", + "input_border_color_hover_dark": "*input_border_color", + "input_border_width": "2px", + "input_padding": "*spacing_xl", + "input_placeholder_color": "*neutral_400", + "input_placeholder_color_dark": "*neutral_500", + "input_radius": "*radius_lg", + "input_shadow": "0 0 0 *shadow_spread transparent, *shadow_inset", + "input_shadow_focus": "0 0 0 *shadow_spread *secondary_50, *shadow_inset", + "input_shadow_focus_dark": "0 0 0 *shadow_spread *neutral_700, *shadow_inset", + "input_text_size": "*text_md", + "input_text_weight": "400", + "layout_gap": "*spacing_xxl", + "link_text_color": "*secondary_600", + "link_text_color_active": "*secondary_600", + "link_text_color_active_dark": "*secondary_500", + "link_text_color_dark": "*secondary_500", + "link_text_color_hover": "*secondary_700", + "link_text_color_hover_dark": "*secondary_400", + "link_text_color_visited": "*secondary_500", + "link_text_color_visited_dark": "*secondary_600", + "loader_color": "*color_accent", + "name": "default", + "neutral_100": "#e2e2e2", + "neutral_200": "#d8d8d8", + "neutral_300": "#c6c6c6", + "neutral_400": "#8f8f8f", + "neutral_50": "#f2f2f2", + "neutral_500": "#767676", + "neutral_600": "#5e5e5e", + "neutral_700": "#343434", + "neutral_800": "#292929", + "neutral_900": "#202020", + "neutral_950": "#121212", + "panel_background_fill": "*background_fill_secondary", + "panel_background_fill_dark": "*background_fill_secondary", + "panel_border_color": "*border_color_primary", + "panel_border_color_dark": "*border_color_primary", + "panel_border_width": "0", + "primary_100": "#caf087", + "primary_200": "#b6e95d", + "primary_300": "#9fd73d", + "primary_400": "#76b900", + "primary_50": "#e4fabe", + "primary_500": "#659f00", + "primary_600": "#538300", + "primary_700": "#4d6721", + "primary_800": "#253a00", + "primary_900": "#1d2e00", + "primary_950": "#172400", + "prose_header_text_weight": "600", + "prose_text_size": "*text_md", + "prose_text_weight": "400", + "radio_circle": "url(\"data:image/svg+xml,%3csvg viewBox='0 0 16 16' fill='white' xmlns='http://www.w3.org/2000/svg'%3e%3ccircle cx='8' cy='8' r='3'/%3e%3c/svg%3e\")", + "radius_lg": "0px", + "radius_md": "0px", + "radius_sm": "0px", + "radius_xl": "0px", + "radius_xs": "0px", + "radius_xxl": "0px", + "radius_xxs": "0px", + "secondary_100": "#cde6fa", + "secondary_200": "#badef8", + "secondary_300": "#9accf2", + "secondary_400": "#3a96d9", + "secondary_50": "#e9f4fb", + "secondary_500": "#2378ca", + "secondary_600": "#2a63ba", + "secondary_700": "#013076", + "secondary_800": "#00265e", + "secondary_900": "#001e4b", + "secondary_950": "#00112c", + "section_header_text_size": "*text_md", + "section_header_text_weight": "500", + "shadow_drop": "none", + "shadow_drop_lg": "0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1)", + "shadow_inset": "rgba(0,0,0,0.05) 0px 2px 4px 0px inset", + "shadow_spread": "3px", + "shadow_spread_dark": "1px", + "slider_color": "#9fd73d", + "spacing_lg": "8px", + "spacing_md": "6px", + "spacing_sm": "4px", + "spacing_xl": "10px", + "spacing_xs": "2px", + "spacing_xxl": "16px", + "spacing_xxs": "1px", + "stat_background_fill": "linear-gradient(to right, *primary_400, *primary_200)", + "stat_background_fill_dark": "linear-gradient(to right, *primary_400, *primary_600)", + "table_border_color": "*neutral_300", + "table_border_color_dark": "*neutral_700", + "table_even_background_fill": "white", + "table_even_background_fill_dark": "*neutral_950", + "table_odd_background_fill": "*neutral_50", + "table_odd_background_fill_dark": "*neutral_900", + "table_radius": "*radius_lg", + "table_row_focus": "*color_accent_soft", + "table_row_focus_dark": "*color_accent_soft", + "text_lg": "16px", + "text_md": "14px", + "text_sm": "12px", + "text_xl": "22px", + "text_xs": "10px", + "text_xxl": "26px", + "text_xxs": "9px" + } + } diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/chat_client.py b/experimental/fm-asr-streaming-rag/frontend/frontend/chat_client.py new file mode 100644 index 000000000..78eac1557 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/chat_client.py @@ -0,0 +1,105 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The API client for the langchain-esque service.""" + +from collections import deque +from datetime import datetime + +import logging +import threading +import typing +import time +import os +import requests + +_LOG_LEVEL = logging.getLevelName(os.environ.get('FRONTEND_LOG_LEVEL', 'WARN').upper()) +_LOGGER = logging.getLogger(__name__) +_LOGGER.setLevel(_LOG_LEVEL) + +class ChatClient: + """ A client for connecting the the lanchain-esque service. + """ + + def __init__(self, server_url: str, model_name: str) -> None: + """Initialize the client.""" + self.server_url = server_url + self._model_name = model_name + self._riva_thread = None + self._riva_output_box = None + self._buffer = deque(maxlen=50) + self._lock = threading.Lock() + self._running_buffer = "" + self._finalized_buffer = deque(maxlen=50) + self._timetag_len = len(f'[{datetime.now().strftime("%Y-%m-%d %H:%M:%S")}] ') + + @property + def model_name(self) -> str: + """Return the friendly model name.""" + return self._model_name + + def get_obj_status(self) -> str: + connected = False + max_tries = 60 + tries = 0 + while not connected: + try: + response = requests.post(f"{self.server_url}/app/get_all_devices_status") + connected = True + except Exception as e: + tries += 1 + if tries >= max_tries: + raise e + time.sleep(1) + pass + return response + + def predict(self, query: str, params: dict) -> typing.Generator[str, None, None]: + defaults = { + "question": query, + "name": "mixtral_8x7b", + "engine": "nv-ai-foundation", + "use_knowledge_base": True, + "temperature": 1.0, + "threshold": 0.65, + "max_docs": 4, + "num_tokens": 512 + } + data = {**defaults, **params} + url = (f"{self.server_url}/generate") + _LOGGER.debug("making request - %s", str({"server_url": url, "post_data": data})) + with requests.get(url, stream=True, json=data) as req: + for chunk in req.iter_content(): + yield chunk.decode("UTF-8") + + def update_running_buffer(self, transcript): + with self._lock: + # Strip datetime tag and set + self._running_buffer = transcript[self._timetag_len:] + yield "Updated transcript buffer" + + def update_finalized_buffer(self, transcript): + with self._lock: + # Insert a newline after the datetime tag + tag, text = transcript[:self._timetag_len], transcript[self._timetag_len:] + transcript = f"{tag}\n{text}" + self._finalized_buffer.append(f"{transcript}\n\n") + yield "Updated transcript buffer" + + def upload_documents(self, file_paths: typing.List[str]) -> None: + raise NotImplementedError + + def search(self, prompt: str) -> typing.List[typing.Dict[str, typing.Union[str, float]]]: + raise NotImplementedError \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/configuration.py b/experimental/fm-asr-streaming-rag/frontend/frontend/configuration.py new file mode 100644 index 000000000..3bcf407b2 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/configuration.py @@ -0,0 +1,44 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The definition of the application configuration.""" +from frontend.configuration_wizard import ConfigWizard, configclass, configfield + + +@configclass +class AppConfig(ConfigWizard): + """Configuration class for the application. + + :cvar triton: The configuration of the chat server. + :type triton: ChatConfig + :cvar model: The configuration of the model + :type triton: ModelConfig + """ + + server_url: str = configfield( + "serverUrl", + default="http://localhost", + help_txt="The location of the chat server.", + ) + server_port: str = configfield( + "serverPort", + default="8000", + help_txt="The port on which the chat server is listening for HTTP requests.", + ) + model_name: str = configfield( + "modelName", + default="llama2-7B-chat", + help_txt="The name of the hosted LLM model.", + ) diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/configuration_wizard.py b/experimental/fm-asr-streaming-rag/frontend/frontend/configuration_wizard.py new file mode 100644 index 000000000..0c60f788c --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/configuration_wizard.py @@ -0,0 +1,413 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A module containing utilities for defining application configuration. + +This module provides a configuration wizard class that can read configuration data from YAML, JSON, and environment +variables. The configuration wizard is based heavily off of the JSON and YAML wizards from the `dataclass-wizard` +Python package. That package is in-turn based heavily off of the built-in `dataclass` module. + +This module adds Environment Variable parsing to config file reading. +""" +# pylint: disable=too-many-lines; this file is meant to be portable between projects so everything is put into one file + +import json +import logging +import os +from dataclasses import _MISSING_TYPE, dataclass +from typing import Any, Callable, Dict, List, Optional, TextIO, Tuple, Union + +import yaml +from dataclass_wizard import ( + JSONWizard, + LoadMeta, + YAMLWizard, + errors, + fromdict, + json_field, +) +from dataclass_wizard.models import JSONField +from dataclass_wizard.utils.string_conv import to_camel_case + +configclass = dataclass(frozen=True) +ENV_BASE = "APP" +_LOG_LEVEL = logging.getLevelName(os.environ.get('FRONTEND_LOG_LEVEL', 'WARN').upper()) +_LOGGER = logging.getLogger(__name__) +_LOGGER.setLevel(_LOG_LEVEL) + + +def configfield( + name: str, *, env: bool = True, help_txt: str = "", **kwargs: Any +) -> JSONField: + """Create a data class field with the specified name in JSON format. + + :param name: The name of the field. + :type name: str + :param env: Whether this field should be configurable from an environment variable. + :type env: bool + :param help_txt: The description of this field that is used in help docs. + :type help_txt: str + :param **kwargs: Optional keyword arguments to customize the JSON field. More information here: + https://dataclass-wizard.readthedocs.io/en/latest/dataclass_wizard.html#dataclass_wizard.json_field + :type **kwargs: Any + :returns: A JSONField instance with the specified name and optional parameters. + :rtype: JSONField + + :raises TypeError: If the provided name is not a string. + """ + # sanitize specified name + if not isinstance(name, str): + raise TypeError("Provided name must be a string.") + json_name = to_camel_case(name) + + # update metadata + meta = kwargs.get("metadata", {}) + meta["env"] = env + meta["help"] = help_txt + kwargs["metadata"] = meta + + # create the data class field + field = json_field(json_name, **kwargs) + return field + + +class _Color: + """A collection of colors used when writing output to the shell.""" + + # pylint: disable=too-few-public-methods; this class does not require methods. + + PURPLE = "\033[95m" + BLUE = "\033[94m" + GREEN = "\033[92m" + YELLOW = "\033[93m" + RED = "\033[91m" + BOLD = "\033[1m" + UNDERLINE = "\033[4m" + END = "\033[0m" + + +class ConfigWizard(JSONWizard, YAMLWizard): # type: ignore[misc] # dataclass-wizard doesn't provide stubs + """A configuration wizard class that can read configuration data from YAML, JSON, and environment variables.""" + + # pylint: disable=arguments-differ,arguments-renamed; this class intentionally reduces arguments for some methods. + + @classmethod + def print_help( + cls, + help_printer: Callable[[str], Any], + *, + env_parent: Optional[str] = None, + json_parent: Optional[Tuple[str, ...]] = None, + ) -> None: + """Print the help documentation for the application configuration with the provided `write` function. + + :param help_printer: The `write` function that will be used to output the data. + :param help_printer: Callable[[str], None] + :param env_parent: The name of the parent environment variable. Leave blank, used for recursion. + :type env_parent: Optional[str] + :param json_parent: The name of the parent JSON key. Leave blank, used for recursion. + :type json_parent: Optional[Tuple[str, ...]] + :returns: A list of tuples with one item per configuration value. Each item will have the environment variable + and a tuple to the path in configuration. + :rtype: List[Tuple[str, Tuple[str, ...]]] + """ + if not env_parent: + env_parent = "" + help_printer("---\n") + if not json_parent: + json_parent = () + + for ( + _, + val, + ) in ( + cls.__dataclass_fields__.items() # pylint: disable=no-member; false positive + ): # pylint: disable=no-member; member is added by dataclass. + jsonname = val.json.keys[0] + envname = jsonname.upper() + full_envname = f"{ENV_BASE}{env_parent}_{envname}" + is_embedded_config = hasattr(val.type, "envvars") + + # print the help data + indent = len(json_parent) * 2 + if is_embedded_config: + default = "" + elif not isinstance(val.default_factory, _MISSING_TYPE): + default = val.default_factory() + elif isinstance(val.default, _MISSING_TYPE): + default = "NO-DEFAULT-VALUE" + else: + default = val.default + help_printer( + f"{_Color.BOLD}{' ' * indent}{jsonname}:{_Color.END} {default}\n" + ) + + # print comments + if is_embedded_config: + indent += 2 + if val.metadata.get("help"): + help_printer(f"{' ' * indent}# {val.metadata['help']}\n") + if not is_embedded_config: + typestr = getattr(val.type, "__name__", None) or str(val.type).replace( + "typing.", "" + ) + help_printer(f"{' ' * indent}# Type: {typestr}\n") + if val.metadata.get("env", True): + help_printer(f"{' ' * indent}# ENV Variable: {full_envname}\n") + # if not is_embedded_config: + help_printer("\n") + + if is_embedded_config: + new_env_parent = f"{env_parent}_{envname}" + new_json_parent = json_parent + (jsonname,) + val.type.print_help( + help_printer, env_parent=new_env_parent, json_parent=new_json_parent + ) + + help_printer("\n") + + @classmethod + def envvars( + cls, + env_parent: Optional[str] = None, + json_parent: Optional[Tuple[str, ...]] = None, + ) -> List[Tuple[str, Tuple[str, ...], type]]: + """Calculate valid environment variables and their config structure location. + + :param env_parent: The name of the parent environment variable. + :type env_parent: Optional[str] + :param json_parent: The name of the parent JSON key. + :type json_parent: Optional[Tuple[str, ...]] + :returns: A list of tuples with one item per configuration value. Each item will have the environment variable, + a tuple to the path in configuration, and they type of the value. + :rtype: List[Tuple[str, Tuple[str, ...], type]] + """ + if not env_parent: + env_parent = "" + if not json_parent: + json_parent = () + output = [] + + for ( + _, + val, + ) in ( + cls.__dataclass_fields__.items() # pylint: disable=no-member; false positive + ): # pylint: disable=no-member; member is added by dataclass. + jsonname = val.json.keys[0] + envname = jsonname.upper() + full_envname = f"{ENV_BASE}{env_parent}_{envname}" + is_embedded_config = hasattr(val.type, "envvars") + + # add entry to output list + if is_embedded_config: + new_env_parent = f"{env_parent}_{envname}" + new_json_parent = json_parent + (jsonname,) + output += val.type.envvars( + env_parent=new_env_parent, json_parent=new_json_parent + ) + elif val.metadata.get("env", True): + output += [(full_envname, json_parent + (jsonname,), val.type)] + + return output + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "ConfigWizard": + """Create a ConfigWizard instance from a dictionary. + + :param data: The dictionary containing the configuration data. + :type data: Dict[str, Any] + :returns: A ConfigWizard instance created from the input dictionary. + :rtype: ConfigWizard + + :raises RuntimeError: If the configuration data is not a dictionary. + """ + # sanitize data + if not data: + data = {} + if not isinstance(data, dict): + raise RuntimeError("Configuration data is not a dictionary.") + + # parse env variables + for envvar in cls.envvars(): + var_name, conf_path, var_type = envvar + var_value = os.environ.get(var_name) + if var_value: + var_value = try_json_load(var_value) + update_dict(data, conf_path, var_value) + _LOGGER.debug( + "Found EnvVar Config - %s:%s = %s", + var_name, + str(var_type), + repr(var_value), + ) + + LoadMeta(key_transform="CAMEL").bind_to(cls) + return fromdict(cls, data) # type: ignore[no-any-return] # dataclass-wizard doesn't provide stubs + + @classmethod + def from_file(cls, filepath: str) -> Optional["ConfigWizard"]: + """Load the application configuration from the specified file. + + The file must be either in JSON or YAML format. + + :returns: The fully processed configuration file contents. If the file was unreadable, None will be returned. + :rtype: Optional["ConfigWizard"] + """ + # open the file + try: + # pylint: disable-next=consider-using-with; using a with would make exception handling even more ugly + file = open(filepath, encoding="utf-8") + except FileNotFoundError: + _LOGGER.error("The configuration file cannot be found.") + file = None + except PermissionError: + _LOGGER.error( + "Permission denied when trying to read the configuration file." + ) + file = None + if not file: + return None + + # read the file + try: + data = read_json_or_yaml(file) + except ValueError as err: + _LOGGER.error( + "Configuration file must be valid JSON or YAML. The following errors occured:\n%s", + str(err), + ) + data = None + config = None + finally: + file.close() + + # parse the file + if data: + try: + config = cls.from_dict(data) + except errors.MissingFields as err: + _LOGGER.error( + "Configuration is missing required fields: \n%s", str(err) + ) + config = None + except errors.ParseError as err: + _LOGGER.error("Invalid configuration value provided:\n%s", str(err)) + config = None + else: + config = cls.from_dict({}) + + return config + + +def read_json_or_yaml(stream: TextIO) -> Dict[str, Any]: + """Read a file without knowing if it is JSON or YAML formatted. + + The file will first be assumed to be JSON formatted. If this fails, an attempt to parse the file with the YAML + parser will be made. If both of these fail, an exception will be raised that contains the exception strings returned + by both the parsers. + + :param stream: An IO stream that allows seeking. + :type stream: typing.TextIO + :returns: The parsed file contents. + :rtype: typing.Dict[str, typing.Any]: + :raises ValueError: If the IO stream is not seekable or if the file doesn't appear to be JSON or YAML formatted. + """ + exceptions: Dict[str, Union[None, ValueError, yaml.error.YAMLError]] = { + "JSON": None, + "YAML": None, + } + data: Dict[str, Any] + + # ensure we can rewind the file + if not stream.seekable(): + raise ValueError("The provided stream must be seekable.") + + # attempt to read json + try: + data = json.loads(stream.read()) + except ValueError as err: + exceptions["JSON"] = err + else: + return data + finally: + stream.seek(0) + + # attempt to read yaml + try: + data = yaml.safe_load(stream.read()) + except (yaml.error.YAMLError, ValueError) as err: + exceptions["YAML"] = err + else: + return data + + # neither json nor yaml + err_msg = "\n\n".join( + [key + " Parser Errors:\n" + str(val) for key, val in exceptions.items()] + ) + raise ValueError(err_msg) + + +def try_json_load(value: str) -> Any: + """Try parsing the value as JSON and silently ignore errors. + + :param value: The value on which a JSON load should be attempted. + :type value: str + :returns: Either the parsed JSON or the provided value. + :rtype: typing.Any + """ + try: + return json.loads(value) + except json.JSONDecodeError: + return value + + +def update_dict( + data: Dict[str, Any], + path: Tuple[str, ...], + value: Any, + overwrite: bool = False, +) -> None: + """Update a dictionary with a new value at a given path. + + :param data: The dictionary to be updated. + :type data: Dict[str, Any] + :param path: The path to the key that should be updated. + :type path: Tuple[str, ...] + :param value: The new value to be set at the specified path. + :type value: Any + :param overwrite: If True, overwrite the existing value. Otherwise, don't update if the key already exists. + :type overwrite: bool + :returns: None + """ + end = len(path) + target = data + for idx, key in enumerate(path, 1): + # on the last field in path, update the dict if necessary + if idx == end: + if overwrite or not target.get(key): + target[key] = value + return + + # verify the next hop exists + if not target.get(key): + target[key] = {} + + # if the next hop is not a dict, exit + if not isinstance(target.get(key), dict): + return + + # get next hop + target = target.get(key) # type: ignore[assignment] # type has already been enforced. diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/pages/__init__.py b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/__init__.py new file mode 100644 index 000000000..573269f2c --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/__init__.py @@ -0,0 +1,19 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains definitions for all the frontend pages.""" +from frontend.pages import converse, kb, stats, waveform + +__all__ = ["converse", "kb", "stats", "waveform"] diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/pages/converse.py b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/converse.py new file mode 100644 index 000000000..727b860c4 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/converse.py @@ -0,0 +1,244 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains the frontend gui for having a conversation.""" +import functools +import os +import logging +from typing import Any, Dict, List, Tuple, Union + +import gradio as gr +import pandas as pd + +from frontend import assets, chat_client + +_LOG_LEVEL = logging.getLevelName(os.environ.get('FRONTEND_LOG_LEVEL', 'WARN').upper()) +_LOGGER = logging.getLogger(__name__) +_LOGGER.setLevel(_LOG_LEVEL) +PATH = "/converse" +TITLE = "Converse" +OUTPUT_TOKENS = 250 +MAX_DOCS = 5 + +_LOCAL_CSS = """ + +#contextbox { + overflow-y: scroll !important; + max-height: 400px; +} + +.submit-button { + height: 40px; + width: 80px; +} + +""" + +BACKEND_OPTIONS = [ + "NVIDIA NIM - Mistral 7B", + "NVIDIA AI Foundation - Mistral 7B", + "NVIDIA AI Foundation - Mixtral 8x7B", + "NVIDIA AI Foundation - Llama 2 70B" +] + +BACKEND_MAPPING = { + "NVIDIA NIM": "triton-trt-llm", + "NVIDIA AI Foundation": "nv-ai-foundation" +} + +MODEL_MAPPING = { + "Mistral 7B": "mistral_7b", + "Mixtral 8x7B": "mixtral_8x7b", + "Llama 2 70B": "llama2_70b" +} + +def get_backend_and_model(option): + backend, model = option.split(' - ') + backend = BACKEND_MAPPING[backend] + model = MODEL_MAPPING[model] + return backend, model + +def build_page(client: chat_client.ChatClient) -> gr.Blocks: + """Buiild the gradio page to be mounted in the frame.""" + kui_theme, kui_styles = assets.load_theme("kaizen") + + with gr.Blocks(title=TITLE, theme=kui_theme, css=kui_styles + _LOCAL_CSS) as page: + + with gr.Row(): + + # Chat column + with gr.Column(scale=3): + gr.Markdown(value="**User Query**") + with gr.Row(equal_height=True): + # TODO: Change this model name. + chatbot = gr.Chatbot(height=700) + context = gr.JSON( + label="Knowledge Base Context", + visible=False, + elem_id="contextbox", + ) + with gr.Row(): + msg = gr.TextArea( + show_label=False, + placeholder="Enter text and press ENTER", + container=False, + lines=3, + scale=7 + ) + submit_btn = gr.Button( + value="Submit", + elem_classes=["submit-button"] + ) + + with gr.Row(): + with gr.Column(): + backend_dropdown = gr.Dropdown( + choices=BACKEND_OPTIONS, + value=BACKEND_OPTIONS[1], + label="LLM Backend / Model", + interactive=True + ) + knowledge_checkbox = gr.Checkbox( + value=True, + label="Use knowledge base", + interactive=True + ) + summary_checkbox = gr.Checkbox( + value=True, + label="Allow long-form summarization", + interactive=True + ) + with gr.Column(): + temp_slider = gr.Slider( + minimum=0, + maximum=1, + value=1, + label="Temperature", + step=0.05, + interactive=True + ) + tokens_slider = gr.Slider( + minimum=32, + maximum=1024, + value=512, + label="Max Tokens", + step=32, + interactive=True + ) + similarity_slider = gr.Slider( + minimum=0, + maximum=1, + value=0.65, + label="Retrieval Similarity Threshold", + step=0.01, + interactive=True + ) + entries_slider = gr.Slider( + minimum=1, + maximum=25, + value=15, + label="Retrieval Max Entries", + step=1, + interactive=True + ) + + # Riva transcript + with gr.Column(scale=2): + + gr.Markdown(value="**Live Riva ASR**") + with gr.Row(): + _ = gr.Textbox(value=get_running_buffer_data(client), + every=0.5, + lines=10, + max_lines=10, + interactive=False, + container=False) + + gr.Markdown(value="**Transcript**") + with gr.Row(): + _ = gr.Textbox(value=get_finalized_buffer_data(client), + every=1, + lines=30, + max_lines=30, + interactive=False, + container=False) + + # form actions + _my_build_stream = functools.partial(_stream_predict, client) + inputs = [ + backend_dropdown, + knowledge_checkbox, + summary_checkbox, + temp_slider, + tokens_slider, + similarity_slider, + entries_slider, + msg, + chatbot + ] + history = [msg, chatbot, context] + msg.submit(_my_build_stream, inputs, history) + submit_btn.click(_my_build_stream, inputs, history) + + page.queue() + return page + +def get_running_buffer_data(client: chat_client.ChatClient) -> callable: + def get_data() -> str: + with client._lock: + data = "".join(client._running_buffer) + return data + return get_data + +def get_finalized_buffer_data(client: chat_client.ChatClient) -> callable: + def get_data() -> str: + with client._lock: + data = "".join(client._finalized_buffer) + return data + return get_data + +def _stream_predict( + client: chat_client.ChatClient, + backend_option: str, + knowledge_checkbox: bool, + summary_checkbox: bool, + temperature: float, + max_tokens: int, + threshold: float, + max_entries: int, + question: str, + chat_history: List[Tuple[str, str]] +) -> Any: + """Make a prediction of the response to the prompt.""" + chunks = "" + backend, model = get_backend_and_model(backend_option) + params = { + "question": question, + "name": model, + "engine": backend, + "use_knowledge_base": knowledge_checkbox, + "allow_summary": summary_checkbox, + "temperature": temperature, + "threshold": threshold, + "max_docs": max_entries, + "num_tokens": max_tokens + } + _LOGGER.info(f"processing inference request - {question} [{params}]") + + documents: Union[None, List[Dict[str, Union[str, float]]]] = None + + for chunk in client.predict(question, params): + chunks += chunk + yield "", chat_history + [[question, chunks]], documents diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/pages/kb.py b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/kb.py new file mode 100644 index 000000000..507163c3f --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/kb.py @@ -0,0 +1,56 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains the frontend gui for chat.""" +from pathlib import Path +from typing import List + +import gradio as gr + +from frontend import assets, chat_client + +PATH = "/kb" +TITLE = "Knowledge Base Management" + + +def build_page(client: chat_client.ChatClient) -> gr.Blocks: + """Buiild the gradio page to be mounted in the frame.""" + kui_theme, kui_styles = assets.load_theme("kaizen") + + with gr.Blocks(title=TITLE, theme=kui_theme, css=kui_styles) as page: + # create the page header + gr.Markdown(f"# {TITLE}") + + with gr.Row(): + upload_button = gr.UploadButton( + "Add File", file_types=["pdf"], file_count="multiple" + ) + with gr.Row(): + file_output = gr.File() + + # form actions + upload_button.upload( + lambda files: upload_file(files, client), upload_button, file_output + ) + + page.queue() + return page + + +def upload_file(files: List[Path], client: chat_client.ChatClient) -> List[str]: + """Use the client to upload a file to the knowledge base.""" + file_paths = [file.name for file in files] + client.upload_documents(file_paths) + return file_paths diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/pages/stats.py b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/stats.py new file mode 100644 index 000000000..af004f6ac --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/stats.py @@ -0,0 +1,69 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains the frontend gui for chat.""" +from pathlib import Path +from typing import List +from datetime import datetime + +import psutil +import gradio as gr + +from frontend import assets + +from pynvml import * + +PATH = "/stats" +TITLE = "Statistics Page" + + +def build_page() -> gr.Blocks: + """Buiild the gradio page to be mounted in the frame.""" + kui_theme, kui_styles = assets.load_theme("kaizen") + + with gr.Blocks(title=TITLE, theme=kui_theme, css=kui_styles) as page: + # create the page header + gr.Markdown(f"# {TITLE}") + + # TODO: Code to display stats + _ = gr.Textbox(value = _get_cpu_stats(), label = "CPU Stats", interactive=False, every=2) + _ = gr.Textbox(value = _get_gpu_stats(), label = "GPU Stats", interactive=False, every=2) + + + page.queue() + return page + +def _get_cpu_stats() -> callable: + def _cpu_stats(): + print_str = (f"| CPU Utilization: {psutil.cpu_percent()}% | Memory utilization: {psutil.virtual_memory().percent}% |") + return print_str + return _cpu_stats + +def _get_gpu_stats() -> callable: + def _gpu_stats(): + print_str = "" + nvmlInit() + deviceCount = nvmlDeviceGetCount() + for i in range(deviceCount): + handle = nvmlDeviceGetHandleByIndex(i) + name = nvmlDeviceGetName(handle) + util = nvmlDeviceGetUtilizationRates(handle) + mem = nvmlDeviceGetMemoryInfo(handle) + print_str += (f"| Device {i} | {name} | Mem Free: {mem.free/1024**2:5.2f}MB / {mem.total/1024**2:5.2f}MB | gpu-util: {util.gpu:3.0%} | gpu-mem: {util.memory:3.1%} |\n") + return print_str + #return "TODO: implement this method." + return _gpu_stats + + diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/pages/waveform.py b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/waveform.py new file mode 100644 index 000000000..9564742ff --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/pages/waveform.py @@ -0,0 +1,48 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains the frontend gui for chat.""" +import os +from pathlib import Path +from typing import List, Tuple + +import gradio as gr + +from frontend import assets, chat_client + +from math import log2, pow + +import numpy as np +from scipy.fftpack import fft +# import matplotlib.pyplot as plt + +PATH = "/waveform" +TITLE = "Input Audio Waveform" + +def build_page() -> gr.Blocks: + """Buiild the gradio page to be mounted in the frame.""" + kui_theme, kui_styles = assets.load_theme("kaizen") + + with gr.Blocks(title=TITLE, theme=kui_theme, css=kui_styles) as page: + # create the page header + gr.Markdown(f"# {TITLE}") + _ = gr.Textbox( + value="TBD - connect frequency plot here", + label="Unimplemented", + interactive=False + ) + + page.queue() + return page \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/static/404.html b/experimental/fm-asr-streaming-rag/frontend/frontend/static/404.html new file mode 100644 index 000000000..44bc144c6 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/static/404.html @@ -0,0 +1 @@ +
\ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/ZuyLfTn0WWGPn0wKTmN0V/_buildManifest.js b/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/ZuyLfTn0WWGPn0wKTmN0V/_buildManifest.js new file mode 100644 index 000000000..fe30dcdf8 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/ZuyLfTn0WWGPn0wKTmN0V/_buildManifest.js @@ -0,0 +1 @@ +self.__BUILD_MANIFEST=function(s){return{__rewrites:{afterFiles:[],beforeFiles:[],fallback:[]},"/":["static/chunks/pages/index-bd877b96e2342d55.js"],"/_error":["static/chunks/pages/_error-ee5b5fb91d29d86f.js"],"/converse":[s,"static/chunks/pages/converse-bd575ea9251886b9.js"],"/kb":[s,"static/chunks/pages/kb-8e8e5f97dcec4a53.js"],"/stats":[s,"static/chunks/pages/stats-c181bdd959e91e0f.js"],"/waveform":[s,"static/chunks/pages/waveform-0d7fcdd3766fcbdb.js"],sortedPages:["/","/_app","/_error","/converse","/kb","/stats","/waveform"]}}("static/chunks/78-bc18c16297940613.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB(); \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/ZuyLfTn0WWGPn0wKTmN0V/_ssgManifest.js b/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/ZuyLfTn0WWGPn0wKTmN0V/_ssgManifest.js new file mode 100644 index 000000000..0511aa895 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/ZuyLfTn0WWGPn0wKTmN0V/_ssgManifest.js @@ -0,0 +1 @@ +self.__SSG_MANIFEST=new Set,self.__SSG_MANIFEST_CB&&self.__SSG_MANIFEST_CB(); \ No newline at end of file diff --git a/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/chunks/78-bc18c16297940613.js b/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/chunks/78-bc18c16297940613.js new file mode 100644 index 000000000..1446824c5 --- /dev/null +++ b/experimental/fm-asr-streaming-rag/frontend/frontend/static/_next/static/chunks/78-bc18c16297940613.js @@ -0,0 +1 @@ +(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[78],{6649:function(e,n,t){"use strict";var i,o,r=t(5697),a=t.n(r),s=t(7294);function _extends(){return(_extends=Object.assign||function(e){for(var n=1;n=0||(o[t]=e[t]);return o}var c=(i=o={exports:{}},o.exports,function(e){if("undefined"!=typeof window){var n,t=0,o=!1,r=!1,a=7,s="[iFrameSizer]",c=s.length,d=null,l=window.requestAnimationFrame,u={max:1,scroll:1,bodyScroll:1,documentElementScroll:1},g={},f=null,m={autoResize:!0,bodyBackground:null,bodyMargin:null,bodyMarginV1:8,bodyPadding:null,checkOrigin:!0,inPageLinks:!1,enablePublicMethods:!0,heightCalculationMethod:"bodyOffset",id:"iFrameResizer",interval:32,log:!1,maxHeight:1/0,maxWidth:1/0,minHeight:0,minWidth:0,resizeFrom:"parent",scrolling:!1,sizeHeight:!0,sizeWidth:!1,warningTimeout:5e3,tolerance:0,widthCalculationMethod:"scroll",onClose:function(){return!0},onClosed:function(){},onInit:function(){},onMessage:function(){warn("onMessage function not defined")},onMouseEnter:function(){},onMouseLeave:function(){},onResized:function(){},onScroll:function(){return!0}},h={};window.jQuery&&((n=window.jQuery).fn?n.fn.iFrameResize||(n.fn.iFrameResize=function(e){return this.filter("iframe").each(function(n,t){setupIFrame(t,e)}).end()}):info("","Unable to bind to jQuery, it is not fully loaded.")),"function"==typeof e&&e.amd?e([],factory):i.exports=factory(),window.iFrameResize=window.iFrameResize||factory()}function getMutationObserver(){return window.MutationObserver||window.WebKitMutationObserver||window.MozMutationObserver}function addEventListener(e,n,t){e.addEventListener(n,t,!1)}function removeEventListener(e,n,t){e.removeEventListener(n,t,!1)}function isLogEnabled(e){return g[e]?g[e].log:o}function log(e,n){output("log",e,n,isLogEnabled(e))}function info(e,n){output("info",e,n,isLogEnabled(e))}function warn(e,n){output("warn",e,n,!0)}function output(e,n,t,i){if(!0===i&&"object"==typeof window.console){var o;console[e](s+"["+(o="Host page: "+n,window.top!==window.self&&(o=window.parentIFrame&&window.parentIFrame.getId?window.parentIFrame.getId()+": "+n:"Nested host page: "+n),o)+"]",t)}}function iFrameListener(e){function resizeIFrame(){ensureInRange("Height"),ensureInRange("Width"),syncResize(function(){var e;setSize(p),setPagePosition(b),chkEvent(b,"onResized",p)},p,"init")}function ensureInRange(e){var n=Number(g[b]["max"+e]),t=Number(g[b]["min"+e]),i=e.toLowerCase(),o=Number(p[i]);log(b,"Checking "+i+" is in range "+t+"-"+n),on&&(o=n,log(b,"Set "+i+" to max value")),p[i]=""+o}function getMsgBody(e){return m.substr(m.indexOf(":")+a+e)}function sendPageInfoToIframe(e,n){var t;t=function(){var t,i;trigger("Send Page Info","pageInfo:"+(t=document.body.getBoundingClientRect(),JSON.stringify({iframeHeight:(i=p.iframe.getBoundingClientRect()).height,iframeWidth:i.width,clientHeight:Math.max(document.documentElement.clientHeight,window.innerHeight||0),clientWidth:Math.max(document.documentElement.clientWidth,window.innerWidth||0),offsetTop:parseInt(i.top-t.top,10),offsetLeft:parseInt(i.left-t.left,10),scrollTop:window.pageYOffset,scrollLeft:window.pageXOffset,documentHeight:document.documentElement.clientHeight,documentWidth:document.documentElement.clientWidth,windowHeight:window.innerHeight,windowWidth:window.innerWidth})),e,n)},h[n]||(h[n]=setTimeout(function(){h[n]=null,t()},32))}function getElementPosition(e){var n=e.getBoundingClientRect();return getPagePosition(b),{x:Math.floor(Number(n.left)+Number(d.x)),y:Math.floor(Number(n.top)+Number(d.y))}}function scrollRequestFromChild(e){var n=e?getElementPosition(p.iframe):{x:0,y:0},t={x:Number(p.width)+n.x,y:Number(p.height)+n.y};log(b,"Reposition requested from iFrame (offset x:"+n.x+" y:"+n.y+")"),window.top!==window.self?window.parentIFrame?window.parentIFrame["scrollTo"+(e?"Offset":"")](t.x,t.y):warn(b,"Unable to scroll to requested position, window.parentIFrame not found"):(d=t,scrollTo(),log(b,"--"))}function scrollTo(){var e;!1!==chkEvent(b,"onScroll",d)?setPagePosition(b):d=null}function onMouse(e){var n;n={iframe:p.iframe,screenX:p.width,screenY:p.height,type:p.type},chkEvent(b,e,n)}var n,t,i,o,r,l,u,f,m=e.data,p={},b=null;"[iFrameResizerChild]Ready"===m?!function(){for(var e in g)trigger("iFrame requested init",createOutgoingMsg(e),g[e].iframe,e)}():s===(""+m).substr(0,c)&&m.substr(c).split(":")[0]in g?(t=(n=m.substr(c).split(":"))[1]?parseInt(n[1],10):0,i=g[n[0]]&&g[n[0]].iframe,o=getComputedStyle(i),g[b=(p={iframe:i,id:n[0],height:t+("border-box"!==o.boxSizing?0:(o.paddingTop?parseInt(o.paddingTop,10):0)+(o.paddingBottom?parseInt(o.paddingBottom,10):0))+("border-box"!==o.boxSizing?0:(o.borderTopWidth?parseInt(o.borderTopWidth,10):0)+(o.borderBottomWidth?parseInt(o.borderBottomWidth,10):0)),width:n[2],type:n[3]}).id]&&(g[b].loaded=!0),(r=p.type in{true:1,false:1,undefined:1})&&log(b,"Ignoring init message from meta parent page"),!r&&(u=!0,g[l=b]||(u=!1,warn(p.type+" No settings for "+l+". Message was: "+m)),u)&&(log(b,"Received: "+m),f=!0,null===p.iframe&&(warn(b,"IFrame ("+p.id+") not found"),f=!1),f&&function(){var n,t=e.origin,i=g[b]&&g[b].checkOrigin;if(i&&""+t!="null"&&!(i.constructor===Array?function(){var e=0,n=!1;for(log(b,"Checking connection is from allowed list of origins: "+i);eg[s]["max"+e])throw Error("Value for min"+e+" can not be greater than max"+e)}chkMinMax("Height"),chkMinMax("Width"),addStyle("maxHeight"),addStyle("minHeight"),addStyle("maxWidth"),addStyle("minWidth")}(),("number"==typeof(g[s]&&g[s].bodyMargin)||"0"===(g[s]&&g[s].bodyMargin))&&(g[s].bodyMarginV1=g[s].bodyMargin,g[s].bodyMargin=""+g[s].bodyMargin+"px"),r=createOutgoingMsg(s),(a=getMutationObserver())&&n.parentNode&&new a(function(e){e.forEach(function(e){Array.prototype.slice.call(e.removedNodes).forEach(function(e){e===n&&closeIFrame(n)})})}).observe(n.parentNode,{childList:!0}),addEventListener(n,"load",function(){var t,i;trigger("iFrame.onload",r,n,e,!0),t=g[s]&&g[s].firstRun,i=g[s]&&g[s].heightCalculationMethod in u,!t&&i&&resetIFrame({iframe:n,height:0,width:0,type:"init"})}),trigger("init",r,n,e,!0),g[s]&&(g[s].iframe.iFrameResizer={close:closeIFrame.bind(null,g[s].iframe),removeListeners:removeIframeListeners.bind(null,g[s].iframe),resize:trigger.bind(null,"Window resize","resize",g[s].iframe),moveToAnchor:function(e){trigger("Move to anchor","moveToAnchor:"+e,g[s].iframe,s)},sendMessage:function(e){trigger("Send Message","message:"+(e=JSON.stringify(e)),g[s].iframe,s)}}))}function debouce(e,n){null===f&&(f=setTimeout(function(){f=null,e()},n))}function tabVisible(){"hidden"!==document.visibilityState&&(log("document","Trigger event: Visiblity change"),debouce(function(){sendTriggerMsg("Tab Visable","resize")},16))}function sendTriggerMsg(e,n){Object.keys(g).forEach(function(t){g[t]&&"parent"===g[t].resizeFrom&&g[t].autoResize&&!g[t].firstRun&&trigger(e,n,g[t].iframe,t)})}function factory(){var n;function init(e,t){t&&(function(){if(t.tagName){if("IFRAME"!==t.tagName.toUpperCase())throw TypeError("Expected