From c963b051e25a1cf8838b3275aa542ff5321f04b1 Mon Sep 17 00:00:00 2001 From: Jason Couture Date: Mon, 5 Feb 2024 06:16:49 -0500 Subject: [PATCH] Add nuspec for OpenCL (CLBLAST) --- .github/prepare_release.sh | 9 ++++--- .github/workflows/compile.yml | 10 ++++--- .../build/LLamaSharp.Backend.OpenCL.nuspec | 27 +++++++++++++++++++ 3 files changed, 39 insertions(+), 7 deletions(-) create mode 100644 LLama/runtimes/build/LLamaSharp.Backend.OpenCL.nuspec diff --git a/.github/prepare_release.sh b/.github/prepare_release.sh index c9896c2c..3a5db06f 100755 --- a/.github/prepare_release.sh +++ b/.github/prepare_release.sh @@ -71,10 +71,11 @@ dotnet pack ./LLama.KernelMemory/LLamaSharp.KernelMemory.csproj -c Release -o ./ # pack the backends cd temp -nuget pack LLamaSharp.Backend.Cpu.nuspec -version $updated_version -nuget pack LLamaSharp.Backend.Cuda11.nuspec -version $updated_version -nuget pack LLamaSharp.Backend.Cuda12.nuspec -version $updated_version - +for nuspec in *.nuspec +do + echo "Packing $nuspec" + nuget pack $nuspec -version $updated_version +done cd .. exit 0 diff --git a/.github/workflows/compile.yml b/.github/workflows/compile.yml index 2efcedcf..0400917f 100644 --- a/.github/workflows/compile.yml +++ b/.github/workflows/compile.yml @@ -141,6 +141,9 @@ jobs: cmake .. ${{ env.COMMON_DEFINE }} -DLLAMA_CLBLAST=ON -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/clblast" cmake --build . --config Release -j ${env:NUMBER_OF_PROCESSORS} copy $env:RUNNER_TEMP/clblast/lib/clblast.dll .\bin\Release\clblast.dll + echo "78a8c98bcb2efe1a63318d901ab204d9ba96c3b29707b4ce0c4240bdcdc698d6 clblast.dll" >> tmp + sha256sum -c tmp || exit 255 + rm tmp ls -R - name: Build if: ${{ matrix.os == 'ubuntu-22.04' }} @@ -149,7 +152,8 @@ jobs: cd build cmake .. ${{ env.COMMON_DEFINE }} -DLLAMA_CLBLAST=ON cmake --build . --config Release -j ${env:NUMBER_OF_PROCESSORS} - cp $(ldconfig -p | grep libclblast.so | tail -n 1 | cut -d ' ' -f 4) ./ + # if we ever want to pull libclblast.so back into the packages, just uncomment this line, and the one below for the upload + # cp $(ldconfig -p | grep libclblast.so | tail -n 1 | cut -d ' ' -f 4) ./ ls -R - name: Upload artifacts (Windows) if: ${{ matrix.os == 'windows-latest' }} @@ -165,7 +169,7 @@ jobs: with: path: | ./build/libllama.so - ./build/libclblast.so + # ./build/libclblast.so name: llama-bin-linux-clblast-x64.so compile-cublas: @@ -305,7 +309,7 @@ jobs: cp artifacts/llama-bin-linux-cublas-cu12.1.0-x64.so/libllama.so deps/cu12.1.0/libllama.so cp artifacts/llama-bin-win-clblast-x64.dll/{llama,clblast}.dll deps/clblast/ - cp artifacts/llama-bin-linux-clblast-x64.so/lib{llama,clblast}.so deps/clblast/ + cp artifacts/llama-bin-linux-clblast-x64.so/libllama.so deps/clblast/ - name: Upload artifacts uses: actions/upload-artifact@v4 diff --git a/LLama/runtimes/build/LLamaSharp.Backend.OpenCL.nuspec b/LLama/runtimes/build/LLamaSharp.Backend.OpenCL.nuspec new file mode 100644 index 00000000..6ea6c4b5 --- /dev/null +++ b/LLama/runtimes/build/LLamaSharp.Backend.OpenCL.nuspec @@ -0,0 +1,27 @@ + + + + LLamaSharp.Backend.OpenCL + $version$ + LLamaSharp.Backend.OpenCL - OpenCL Backend for LLamaSharp + llama.cpp Authors + false + MIT + icon512.png + https://github.com/SciSharp/LLamaSharp + LLamaSharp.Backend.OpenCL is a backend for LLamaSharp to use with OpenCL. + + Copyright 2023 The llama.cpp Authors. All rights reserved. + LLamaSharp LLama LLM GPT AI ChatBot SciSharp + + + + + + + + + + + +