diff --git a/gpu-requirements.txt b/gpu-requirements.txt index 12ad074..ebdd313 100644 --- a/gpu-requirements.txt +++ b/gpu-requirements.txt @@ -1,15 +1,27 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --output-file=gpu-requirements.txt requirements/gpu-requirements.in # --extra-index-url https://download.pytorch.org/whl/cu113 +aiohttp==3.8.4 + # via + # cohere + # openai +aiosignal==1.3.1 + # via aiohttp anyio==3.7.0 # via # -r requirements/torch-cuda-requirements.txt # starlette +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via aiohttp +backoff==2.2.1 + # via cohere blis==0.7.9 # via thinc boto3==1.25.0 @@ -32,6 +44,7 @@ certifi==2023.5.7 charset-normalizer==3.1.0 # via # -r requirements/torch-cuda-requirements.txt + # aiohttp # requests click==8.1.3 # via @@ -39,6 +52,8 @@ click==8.1.3 # nltk # typer # uvicorn +cohere==4.11.2 + # via embedders confection==0.0.3 # via thinc cymem==2.0.7 @@ -46,7 +61,7 @@ cymem==2.0.7 # preshed # spacy # thinc -embedders==0.0.18 +embedders==0.1.4 # via -r requirements/gpu-requirements.in exceptiongroup==1.1.1 # via @@ -54,12 +69,16 @@ exceptiongroup==1.1.1 # anyio fastapi==0.95.2 # via -r requirements/torch-cuda-requirements.txt -filelock==3.12.0 +filelock==3.12.2 # via # -r requirements/torch-cuda-requirements.txt # huggingface-hub # transformers -fsspec==2023.5.0 +frozenlist==1.3.3 + # via + # aiohttp + # aiosignal +fsspec==2023.6.0 # via # -r requirements/torch-cuda-requirements.txt # huggingface-hub @@ -67,7 +86,7 @@ h11==0.14.0 # via # -r requirements/torch-cuda-requirements.txt # uvicorn -huggingface-hub==0.14.1 +huggingface-hub==0.15.1 # via # -r requirements/torch-cuda-requirements.txt # sentence-transformers @@ -77,6 +96,9 @@ idna==3.4 # -r requirements/torch-cuda-requirements.txt # anyio # requests + # yarl +importlib-metadata==6.7.0 + # via cohere jinja2==3.1.2 # via spacy jmespath==1.0.1 @@ -94,6 +116,10 @@ markupsafe==2.1.1 # via jinja2 minio==7.1.12 # via -r requirements/torch-cuda-requirements.txt +multidict==6.0.4 + # via + # aiohttp + # yarl murmurhash==1.0.9 # via # preshed @@ -114,6 +140,8 @@ numpy==1.23.4 # thinc # torchvision # transformers +openai==0.27.8 + # via embedders packaging==23.1 # via # -r requirements/torch-cuda-requirements.txt @@ -153,7 +181,7 @@ pyyaml==6.0 # -r requirements/torch-cuda-requirements.txt # huggingface-hub # transformers -regex==2023.5.5 +regex==2023.6.3 # via # -r requirements/torch-cuda-requirements.txt # nltk @@ -161,7 +189,9 @@ regex==2023.5.5 requests==2.31.0 # via # -r requirements/torch-cuda-requirements.txt + # cohere # huggingface-hub + # openai # spacy # torchvision # transformers @@ -169,6 +199,10 @@ s3transfer==0.6.1 # via # -r requirements/torch-cuda-requirements.txt # boto3 +safetensors==0.3.1 + # via + # -r requirements/torch-cuda-requirements.txt + # transformers scikit-learn==1.1.3 # via # embedders @@ -234,10 +268,11 @@ tqdm==4.65.0 # embedders # huggingface-hub # nltk + # openai # sentence-transformers # spacy # transformers -transformers==4.23.1 +transformers==4.30.0 # via # -r requirements/torch-cuda-requirements.txt # embedders @@ -246,7 +281,7 @@ typer==0.4.2 # via # pathy # spacy -typing-extensions==4.6.2 +typing-extensions==4.6.3 # via # -r requirements/torch-cuda-requirements.txt # huggingface-hub @@ -267,6 +302,10 @@ wasabi==0.10.1 # spacy # spacy-loggers # thinc +yarl==1.9.2 + # via aiohttp +zipp==3.15.0 + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/gpu.Dockerfile b/gpu.Dockerfile index 2cec540..80f27d5 100644 --- a/gpu.Dockerfile +++ b/gpu.Dockerfile @@ -1,4 +1,4 @@ -FROM kernai/refinery-parent-images:v1.11.0-torch-cuda +FROM kernai/refinery-parent-images:v1.12.0-torch-cuda WORKDIR /program diff --git a/requirements/torch-cuda-requirements.txt b/requirements/torch-cuda-requirements.txt index 30fd7fd..e5ea914 100644 --- a/requirements/torch-cuda-requirements.txt +++ b/requirements/torch-cuda-requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile torch-cuda-requirements.in # @@ -26,15 +26,15 @@ exceptiongroup==1.1.1 # via anyio fastapi==0.95.2 # via -r mini-requirements.in -filelock==3.12.0 +filelock==3.12.2 # via # huggingface-hub # transformers -fsspec==2023.5.0 +fsspec==2023.6.0 # via huggingface-hub h11==0.14.0 # via uvicorn -huggingface-hub==0.14.1 +huggingface-hub==0.15.1 # via transformers idna==3.4 # via @@ -73,7 +73,7 @@ pyyaml==6.0 # via # huggingface-hub # transformers -regex==2023.5.5 +regex==2023.6.3 # via transformers requests==2.31.0 # via @@ -82,6 +82,8 @@ requests==2.31.0 # transformers s3transfer==0.6.1 # via boto3 +safetensors==0.3.1 + # via transformers six==1.16.0 # via python-dateutil sniffio==1.3.0 @@ -98,9 +100,9 @@ tqdm==4.65.0 # via # huggingface-hub # transformers -transformers==4.23.1 +transformers==4.30.0 # via -r torch-cuda-requirements.in -typing-extensions==4.6.2 +typing-extensions==4.6.3 # via # huggingface-hub # pydantic @@ -112,4 +114,4 @@ urllib3==1.26.16 # minio # requests uvicorn==0.22.0 - # via -r mini-requirements.in \ No newline at end of file + # via -r mini-requirements.in