diff --git a/.github/prepare_release.sh b/.github/prepare_release.sh index c9896c2c8..3a5db06f4 100755 --- a/.github/prepare_release.sh +++ b/.github/prepare_release.sh @@ -71,10 +71,11 @@ dotnet pack ./LLama.KernelMemory/LLamaSharp.KernelMemory.csproj -c Release -o ./ # pack the backends cd temp -nuget pack LLamaSharp.Backend.Cpu.nuspec -version $updated_version -nuget pack LLamaSharp.Backend.Cuda11.nuspec -version $updated_version -nuget pack LLamaSharp.Backend.Cuda12.nuspec -version $updated_version - +for nuspec in *.nuspec +do + echo "Packing $nuspec" + nuget pack $nuspec -version $updated_version +done cd .. exit 0 diff --git a/.github/workflows/compile.yml b/.github/workflows/compile.yml index 2efcedcf8..f30fd0a1e 100644 --- a/.github/workflows/compile.yml +++ b/.github/workflows/compile.yml @@ -141,6 +141,9 @@ jobs: cmake .. ${{ env.COMMON_DEFINE }} -DLLAMA_CLBLAST=ON -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/clblast" cmake --build . --config Release -j ${env:NUMBER_OF_PROCESSORS} copy $env:RUNNER_TEMP/clblast/lib/clblast.dll .\bin\Release\clblast.dll + echo "78a8c98bcb2efe1a63318d901ab204d9ba96c3b29707b4ce0c4240bdcdc698d6 clblast.dll" >> tmp + sha256sum -c tmp || exit 255 + rm tmp ls -R - name: Build if: ${{ matrix.os == 'ubuntu-22.04' }} @@ -149,7 +152,8 @@ jobs: cd build cmake .. ${{ env.COMMON_DEFINE }} -DLLAMA_CLBLAST=ON cmake --build . --config Release -j ${env:NUMBER_OF_PROCESSORS} - cp $(ldconfig -p | grep libclblast.so | tail -n 1 | cut -d ' ' -f 4) ./ + # if we ever want to pull libclblast.so back into the packages, just uncomment this line, and the one below for the upload + # cp $(ldconfig -p | grep libclblast.so | tail -n 1 | cut -d ' ' -f 4) ./ ls -R - name: Upload artifacts (Windows) if: ${{ matrix.os == 'windows-latest' }} @@ -165,7 +169,7 @@ jobs: with: path: | ./build/libllama.so - ./build/libclblast.so + # ./build/libclblast.so name: llama-bin-linux-clblast-x64.so compile-cublas: diff --git a/LLama/runtimes/build/LLamaSharp.Backend.OpenCL.nuspec b/LLama/runtimes/build/LLamaSharp.Backend.OpenCL.nuspec new file mode 100644 index 000000000..c27505e37 --- /dev/null +++ b/LLama/runtimes/build/LLamaSharp.Backend.OpenCL.nuspec @@ -0,0 +1,28 @@ + + + + LLamaSharp.Backend.OpenCL + $version$ + LLamaSharp.Backend.OpenCL - OpenCL Backend for LLamaSharp + llama.cpp Authors + false + MIT + icon512.png + https://github.com/SciSharp/LLamaSharp + LLamaSharp.Backend.OpenCL is a backend for LLamaSharp to use with OpenCL. + + Copyright 2023 The llama.cpp Authors. All rights reserved. + LLamaSharp LLama LLM GPT AI ChatBot SciSharp + + + + + + + + + + + + +