diff --git a/ollama.spec b/ollama.spec index 8b6e211..e8f2047 100644 --- a/ollama.spec +++ b/ollama.spec @@ -36,6 +36,8 @@ BuildRequires: golang(API) >= 1.22 %{sysusers_requires} +Patch24300: riscv.patch + %description Ollama is a tool for running AI models on one's own hardware. It offers a command-line interface and a RESTful API. diff --git a/riscv.patch b/riscv.patch new file mode 100644 index 0000000..8f50500 --- /dev/null +++ b/riscv.patch @@ -0,0 +1,38 @@ +Index: ollama-0.1.37/llm/llm.go +=================================================================== +--- ollama-0.1.37.orig/llm/llm.go ++++ ollama-0.1.37/llm/llm.go +@@ -7,6 +7,7 @@ package llm + // #cgo windows,arm64 LDFLAGS: ${SRCDIR}/build/windows/arm64_static/libllama.a -static -lstdc++ + // #cgo linux,amd64 LDFLAGS: ${SRCDIR}/build/linux/x86_64_static/libllama.a -lstdc++ + // #cgo linux,arm64 LDFLAGS: ${SRCDIR}/build/linux/arm64_static/libllama.a -lstdc++ ++// #cgo linux,riscv64 LDFLAGS: ${SRCDIR}/build/linux/riscv64_static/libllama.a -lstdc++ + // #include + // #include "llama.h" + import "C" +Index: ollama-0.1.37/vendor/github.com/chewxy/math32/stubs_riscv64.s +=================================================================== +--- /dev/null ++++ ollama-0.1.37/vendor/github.com/chewxy/math32/stubs_riscv64.s +@@ -0,0 +1,21 @@ ++#include "textflag.h" ++ ++// func Exp(x float32) float32 ++TEXT ·Exp(SB),NOSPLIT,$0 ++ JMP ·exp(SB) ++ ++// func Exp2(x float32) float32 ++TEXT ·Exp2(SB),NOSPLIT,$0 ++ JMP ·exp2(SB) ++ ++// func Log(x float32) float32 ++TEXT ·Log(SB),NOSPLIT,$0 ++ JMP ·log(SB) ++ ++// func Remainder(x, y float32) float32 ++TEXT ·Remainder(SB),NOSPLIT,$0 ++ JMP ·remainder(SB) ++ ++// func Sqrt(x float32) float32 ++TEXT ·Sqrt(SB),NOSPLIT,$0 ++ JMP ·sqrt(SB)