Skip to content

Commit

Permalink
upgrade to torch2.3
Browse files Browse the repository at this point in the history
  • Loading branch information
nobody committed Jun 5, 2024
1 parent 87d1702 commit 0fa8e77
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 12 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/docker_rdma_latest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Build and push cu12.1 with rdma
name: Build and push cu12.4 with rdma
uses: docker/build-push-action@v3
with:
context: ./docker/rdma/
push: true
tags: dptechnology/unicore:latest-pytorch2.1.0-cuda12.1-rdma
tags: dptechnology/unicore:latest-pytorch2.3.0-cuda12.4-rdma
14 changes: 4 additions & 10 deletions docker/rdma/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM nvcr.io/nvidia/pytorch:23.04-py3
FROM nvcr.io/nvidia/pytorch:24.03-py3

RUN APT_INSTALL="apt-get install -y --no-install-recommends" && \
rm -rf /var/lib/apt/lists/* \
Expand Down Expand Up @@ -62,18 +62,12 @@ RUN cd /tmp && \
git clone https://github.com/dptech-corp/Uni-Core && \
cd Uni-Core && \
python setup.py install && \
rm -rf /tmp/* && rm -rf ~/.cache/pip

RUN pip3 uninstall flash_attn -y

RUN cd /tmp && \
git clone https://github.com/Dao-AILab/flash-attention.git && \
cd flash-attention && \
python setup.py install && \
rm -rf /tmp/* && rm -rf ~/.cache/pip
rm -rf /tmp/* && rm -rf ~/.cache/pip

RUN pip3 install --no-cache-dir tokenizers lmdb biopython ml-collections timeout-decorator urllib3 tree dm-tree && rm -rf ~/.cache/pip

RUN MAX_JOBS=4 pip3 install -U 'flash-attn<2.5.0' --no-build-isolation --no-cache-dir

RUN ldconfig && \
apt-get clean && \
apt-get autoremove && \
Expand Down

0 comments on commit 0fa8e77

Please sign in to comment.