Skip to content

Commit

Permalink
Merge pull request #78 from SciSharp/rinne-dev
Browse files Browse the repository at this point in the history
feat: update the llama backends.
  • Loading branch information
AsakusaRinne committed Aug 6, 2023
2 parents 5e15077 + 1b15d28 commit bfe9cc8
Show file tree
Hide file tree
Showing 12 changed files with 23 additions and 12 deletions.
14 changes: 7 additions & 7 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,20 @@ jobs:
strategy:
fail-fast: false
matrix:
build: [linux-debug, linux-release, macos-debug, macos-release, windows-debug, windows-release]
build: [linux-debug, linux-release, windows-debug, windows-release]
include:
- build: linux-debug
os: ubuntu-latest
config: debug
- build: linux-release
os: ubuntu-latest
config: release
- build: macos-debug
os: macos-latest
config: debug
- build: macos-release
os: macos-latest
config: release
# - build: macos-debug
# os: macos-latest
# config: debug
# - build: macos-release
# os: macos-latest
# config: release
- build: windows-debug
os: windows-2019
config: debug
Expand Down
14 changes: 10 additions & 4 deletions LLama/Common/FixedSizeQueue.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,11 @@ public FixedSizeQueue(int size)
/// <param name="data"></param>
public FixedSizeQueue(int size, IEnumerable<T> data)
{
#if NETCOREAPP3_0_OR_GREATER
// Try an early check on the amount of data supplied (if possible)
#if NETSTANDARD2_0
var dataCount = data.Count();
if (data.Count() > size)
throw new ArgumentException($"The max size set for the quene is {size}, but got {dataCount} initial values.");
#else
if (data.TryGetNonEnumeratedCount(out var count) && count > size)
throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values.");
#endif
Expand All @@ -42,9 +45,12 @@ public FixedSizeQueue(int size, IEnumerable<T> data)

// Now check if that list is a valid size
if (_storage.Count > _maxSize)
throw new ArgumentException($"The max size set for the quene is {size}, but got {_storage.Count} initial values.");
#if NETSTANDARD2_0
throw new ArgumentException($"The max size set for the quene is {size}, but got {dataCount} initial values.");
#else
throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values.");
#endif
}

/// <summary>
/// Replace every item in the queue with the given value
/// </summary>
Expand Down
4 changes: 4 additions & 0 deletions LLama/LLamaSharp.Runtime.targets
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<Link>libllama.dylib</Link>
</None>
<None Include="$(MSBuildThisFileDirectory)runtimes/libllama-metal.dylib">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<Link>libllama-metal.dylib</Link>
</None>
<None Include="$(MSBuildThisFileDirectory)runtimes/ggml-metal.metal">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<Link>ggml-metal.metal</Link>
Expand Down
3 changes: 2 additions & 1 deletion LLama/Native/NativeApi.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ static NativeApi()
"1. No LLamaSharp backend was installed. Please search LLamaSharp.Backend and install one of them. \n" +
"2. You are using a device with only CPU but installed cuda backend. Please install cpu backend instead. \n" +
"3. The backend is not compatible with your system cuda environment. Please check and fix it. If the environment is " +
"expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.");
"expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.\n" +
"4. One of the dependency of the native library is missed.\n");
}
NativeApi.llama_backend_init(false);
}
Expand Down
Binary file modified LLama/runtimes/libllama-cuda11.dll
Binary file not shown.
Binary file modified LLama/runtimes/libllama-cuda11.so
Binary file not shown.
Binary file modified LLama/runtimes/libllama-cuda12.dll
Binary file not shown.
Binary file modified LLama/runtimes/libllama-cuda12.so
Binary file not shown.
Binary file added LLama/runtimes/libllama-metal.dylib
Binary file not shown.
Binary file modified LLama/runtimes/libllama.dll
Binary file not shown.
Binary file modified LLama/runtimes/libllama.dylib
100755 → 100644
Binary file not shown.
Binary file modified LLama/runtimes/libllama.so
Binary file not shown.

0 comments on commit bfe9cc8

Please sign in to comment.