Accepting request 1150495 from home:LorenDB

I've created a package for Ollama (https://ollama.com) so that users don't have to use an install script. I will point out that this does not have CUDA support or ROCm enabled; we won't be able to package CUDA for obvious reasons, and ROCm is currently not packaged in Factory. However, for basic CPU-enabled use, this is better than curling a random script from the interwebs :)

OBS-URL: https://build.opensuse.org/request/show/1150495
OBS-URL: https://build.opensuse.org/package/show/science:machinelearning/ollama?expand=0&rev=1
This commit is contained in:
Guillaume GARDET 2024-02-26 09:11:49 +00:00 committed by Git OBS Bridge
commit 5a3ae9ab21
10 changed files with 183 additions and 0 deletions

23
.gitattributes vendored Normal file
View File

@ -0,0 +1,23 @@
## Default LFS
*.7z filter=lfs diff=lfs merge=lfs -text
*.bsp filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.gem filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.jar filter=lfs diff=lfs merge=lfs -text
*.lz filter=lfs diff=lfs merge=lfs -text
*.lzma filter=lfs diff=lfs merge=lfs -text
*.obscpio filter=lfs diff=lfs merge=lfs -text
*.oxt filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.rpm filter=lfs diff=lfs merge=lfs -text
*.tbz filter=lfs diff=lfs merge=lfs -text
*.tbz2 filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.txz filter=lfs diff=lfs merge=lfs -text
*.whl filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
.osc

21
_service Normal file
View File

@ -0,0 +1,21 @@
<services>
<service name="format_spec_file" mode="manual">
</service>
<service name="tar_scm" mode="manual">
<param name="url">https://github.com/ollama/ollama.git</param>
<param name="scm">git</param>
<param name="revision">v0.1.27</param>
<param name="versionformat">@PARENT_TAG@</param>
<param name="versionrewrite-pattern">v(.*)</param>
<param name="changesgenerate">enable</param>
<param name="submodules">enable</param>
<param name="package-meta">yes</param>
</service>
<service name="recompress" mode="manual">
<param name="file">*.tar</param>
<param name="compression">gz</param>
</service>
<service name="go_modules" mode="manual">
<param name="compression">xz</param>
</service>
</services>

30
enable-lto.patch Normal file
View File

@ -0,0 +1,30 @@
diff -rub ollama-0.1.27/llm/generate/gen_linux.sh ollama-0.1.27-patched/llm/generate/gen_linux.sh
--- ollama-0.1.27/llm/generate/gen_linux.sh 2024-02-22 23:41:43.000000000 +0100
+++ ollama-0.1.27-patched/llm/generate/gen_linux.sh 2024-02-25 03:16:43.566940450 +0100
@@ -48,7 +48,7 @@
export CUDACXX=$(command -v nvcc)
fi
fi
-COMMON_CMAKE_DEFS="-DCMAKE_POSITION_INDEPENDENT_CODE=on -DLLAMA_NATIVE=off -DLLAMA_AVX=on -DLLAMA_AVX2=off -DLLAMA_AVX512=off -DLLAMA_FMA=off -DLLAMA_F16C=off"
+COMMON_CMAKE_DEFS="-DCMAKE_POSITION_INDEPENDENT_CODE=on -DLLAMA_LTO=on -DCMAKE_BUILD_TYPE=Release -DLLAMA_NATIVE=off -DLLAMA_AVX=on -DLLAMA_AVX2=off -DLLAMA_AVX512=off -DLLAMA_FMA=off -DLLAMA_F16C=off"
source $(dirname $0)/gen_common.sh
init_vars
git_module_setup
@@ -59,7 +59,7 @@
# llama.cpp, and we'll build only 1 CPU variant in that case as the default.
if [ -n "${OLLAMA_CUSTOM_CPU_DEFS}" ]; then
echo "OLLAMA_CUSTOM_CPU_DEFS=\"${OLLAMA_CUSTOM_CPU_DEFS}\""
- CMAKE_DEFS="${OLLAMA_CUSTOM_CPU_DEFS} -DCMAKE_POSITION_INDEPENDENT_CODE=on ${CMAKE_DEFS}"
+ CMAKE_DEFS="${OLLAMA_CUSTOM_CPU_DEFS} -DCMAKE_POSITION_INDEPENDENT_CODE=on -DLLAMA_LTO=on -DCMAKE_BUILD_TYPE=Release ${CMAKE_DEFS}"
BUILD_DIR="${LLAMACPP_DIR}/build/linux/${ARCH}/cpu"
echo "Building custom CPU"
build
@@ -75,7 +75,7 @@
# -DLLAMA_AVX512_VBMI -- 2018 Intel Cannon Lake
# -DLLAMA_AVX512_VNNI -- 2021 Intel Alder Lake
- COMMON_CPU_DEFS="-DCMAKE_POSITION_INDEPENDENT_CODE=on -DLLAMA_NATIVE=off"
+ COMMON_CPU_DEFS="-DCMAKE_POSITION_INDEPENDENT_CODE=on -DLLAMA_LTO=on -DCMAKE_BUILD_TYPE=Release -DLLAMA_NATIVE=off"
if [ -z "${OLLAMA_CPU_TARGET}" -o "${OLLAMA_CPU_TARGET}" = "cpu" ]; then
#
# CPU first for the default library, set up as lowest common denominator for maximum compatibility (including Rosetta)

3
ollama-0.1.27.tar.gz Normal file
View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c9b7005256616e8161cc2800cec78b0f43ab4c05ae78b18a7337756dacf5b97a
size 63206855

2
ollama-user.conf Normal file
View File

@ -0,0 +1,2 @@
#Type Name ID GECOS Home directory Shell
u ollama - "Ollama" /usr/share/ollama -

5
ollama.changes Normal file
View File

@ -0,0 +1,5 @@
-------------------------------------------------------------------
Fri Feb 23 21:13:53 UTC 2024 - Loren Burkholder <computersemiexpert@outlook.com>
- Added the Ollama package
- Included a systemd service

13
ollama.service Normal file
View File

@ -0,0 +1,13 @@
[Unit]
Description=Ollama Service
After=network-online.target
[Service]
ExecStart=/usr/bin/ollama serve
User=ollama
Group=ollama
Restart=always
RestartSec=3
[Install]
WantedBy=default.target

82
ollama.spec Normal file
View File

@ -0,0 +1,82 @@
#
# spec file for package ollama
#
# Copyright (c) 2024 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
# upon. The license for this file, and modifications and additions to the
# file, is the same license as for the pristine package itself (unless the
# license for the pristine package is not an Open Source License, in which
# case the license is the MIT License). An "Open Source License" is a
# license that conforms to the Open Source Definition (Version 1.9)
# published by the Open Source Initiative.
# Please submit bugfixes or comments via https://bugs.opensuse.org/
#
Name: ollama
Version: 0.1.27
Release: 0
Summary: Get up and running with Llama 2, Mistral, and other large language models
License: MIT
URL: https://ollama.com
Source: %{name}-%{version}.tar.gz
Source1: vendor.tar.xz
Source2: ollama.service
Source3: %{name}-user.conf
Patch0: enable-lto.patch
BuildRequires: cmake >= 3.24
BuildRequires: gcc-c++ >= 11.4.0
BuildRequires: git
BuildRequires: sysuser-tools
BuildRequires: golang(API) >= 1.21
Requires(pre): shadow
%sysusers_requires
%description
Get up and running with Llama 2, Mistral, Gemma, and other large language models.
%prep
%autosetup -a1 -p1
%build
%sysusers_generate_pre %{SOURCE3} %{name} %{name}-user.conf
%ifnarch ppc64
export GOFLAGS="-buildmode=pie -mod=vendor"
%endif
export OLLAMA_SKIP_PATCHING=1
go generate ./...
go build .
%install
install -D -m 0755 %{name} %{buildroot}/%{_bindir}/%{name}
install -D -m 0644 %{SOURCE2} %{buildroot}%{_unitdir}/%{name}.service
install -D -m 0644 %{SOURCE3} %{buildroot}%{_sysusersdir}/%{name}-user.conf
install -d %{buildroot}%{_datadir}/%{name}
%pre -f %{name}.pre
%service_add_pre %{name}.service
%post
%service_add_post %{name}.service
%preun
%service_del_preun %{name}.service
%postun
%service_del_postun %{name}.service
%files
%doc README.md
%license LICENSE
%{_bindir}/%{name}
%{_unitdir}/%{name}.service
%{_sysusersdir}/%{name}-user.conf
%attr(-, ollama, ollama) %{_datadir}/%{name}
%changelog

3
vendor.tar.xz Normal file
View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:1668fa3db9f05fbb58eaf3e9200bd23ac93991cdff56234fac154296acc4e419
size 2995404