Skip to content

split out rllm common from rllm-cuda #43

split out rllm common from rllm-cuda

split out rllm common from rllm-cuda #43

Workflow file for this run

name: rLLM with CUDA
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
env:
CARGO_TERM_COLOR: always
TORCH_CUDA_ARCH_LIST: 8.0
CUDA_COMPUTE_CAP: 80
LIBTORCH_USE_PYTORCH: 1
LIBTORCH_BYPASS_VERSION_CHECK: 1
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: true
- uses: hendrikmuhs/ccache-action@v1.2
- run: pip install torch==2.1.0
- uses: Jimver/cuda-toolkit@v0.2.13
id: cuda-toolkit
with:
cuda: '12.3.2'
- run: echo "Installed cuda version is ${{ steps.cuda-toolkit.outputs.cuda }}"
- run: echo "Cuda install location ${{ steps.cuda-toolkit.outputs.CUDA_PATH }}"
- run: nvcc -V
- name: Build rLLM
run: cargo build --verbose --release
working-directory: rllm-cuda
- run: strip target/release/rllm-server
- name: Artifact upload
uses: actions/upload-artifact@v4
with:
name: rllm-cuda
path: target/release/rllm-server