This repository was archived by the owner on Jul 4, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 3 files changed +9
-9
lines changed
cortex-cpp/engines/cortex.llamacpp Expand file tree Collapse file tree 3 files changed +9
-9
lines changed Original file line number Diff line number Diff line change @@ -97,25 +97,25 @@ jobs:
9797
9898 - os : " windows"
9999 name : " amd64-avx2"
100- runs-on : " windows-latest "
100+ runs-on : " windows-cuda-12-0 "
101101 cmake-flags : " -DLLAMA_AVX2=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE"
102102 run-e2e : true
103103
104104 - os : " windows"
105105 name : " amd64-avx"
106- runs-on : " windows-latest "
106+ runs-on : " windows-cuda-12-0 "
107107 cmake-flags : " -DLLAMA_AVX2=OFF -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE"
108108 run-e2e : false
109109
110110 - os : " windows"
111111 name : " amd64-avx512"
112- runs-on : " windows-latest "
112+ runs-on : " windows-cuda-12-0 "
113113 cmake-flags : " -DLLAMA_AVX512=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE"
114114 run-e2e : false
115115
116116 - os : " windows"
117117 name : " amd64-vulkan"
118- runs-on : " windows-latest "
118+ runs-on : " windows-cuda-12-0 "
119119 cmake-flags : " -DLLAMA_VULKAN=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE"
120120 run-e2e : false
121121
Original file line number Diff line number Diff line change @@ -80,28 +80,28 @@ jobs:
8080
8181 - os : " windows"
8282 name : " amd64-avx2"
83- runs-on : " windows-latest "
83+ runs-on : " windows-cuda-12-0 "
8484 cmake-flags : " -DLLAMA_AVX2=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE"
8585 run-e2e : true
8686 run-python-e2e : true
8787
8888 - os : " windows"
8989 name : " amd64-avx"
90- runs-on : " windows-latest "
90+ runs-on : " windows-cuda-12-0 "
9191 cmake-flags : " -DLLAMA_AVX2=OFF -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE"
9292 run-e2e : false
9393 run-python-e2e : false
9494
9595 - os : " windows"
9696 name : " amd64-avx512"
97- runs-on : " windows-latest "
97+ runs-on : " windows-cuda-12-0 "
9898 cmake-flags : " -DLLAMA_AVX512=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE"
9999 run-e2e : false
100100 run-python-e2e : false
101101
102102 - os : " windows"
103103 name : " amd64-vulkan"
104- runs-on : " windows-latest "
104+ runs-on : " windows-cuda-12-0 "
105105 cmake-flags : " -DLLAMA_VULKAN=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE"
106106 run-e2e : false
107107 run-python-e2e : false
Original file line number Diff line number Diff line change 11# cortex.llamacpp release version
2- set (VERSION 0.1.15 )
2+ set (VERSION 0.1.17 )
33set (ENGINE_VERSION v${VERSION} )
44add_compile_definitions (CORTEX_LLAMACPP_VERSION="${VERSION} " )
55
You can’t perform that action at this time.
0 commit comments