- Update to version 0.11.0:

* New model: OpenAI gpt-oss 20B and 120B
  * Quantization - MXFP4 format

OBS-URL: https://build.opensuse.org/package/show/science:machinelearning/ollama?expand=0&rev=111
This commit is contained in:
2025-08-06 12:51:20 +00:00
committed by Git OBS Bridge
commit 838781a04f
38 changed files with 1732 additions and 0 deletions

25
.gitattributes vendored Normal file
View File

@@ -0,0 +1,25 @@
## Default LFS
*.7z filter=lfs diff=lfs merge=lfs -text
*.bsp filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.gem filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.jar filter=lfs diff=lfs merge=lfs -text
*.lz filter=lfs diff=lfs merge=lfs -text
*.lzma filter=lfs diff=lfs merge=lfs -text
*.obscpio filter=lfs diff=lfs merge=lfs -text
*.oxt filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.rpm filter=lfs diff=lfs merge=lfs -text
*.tbz filter=lfs diff=lfs merge=lfs -text
*.tbz2 filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.txz filter=lfs diff=lfs merge=lfs -text
*.whl filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
## Specific LFS patterns
vendor.tar.zstd filter=lfs diff=lfs merge=lfs -text

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
.osc

34
01-build-verbose.patch Normal file
View File

@@ -0,0 +1,34 @@
diff --git a/llama/make/Makefile.default b/llama/make/Makefile.default
index 95b13a7..6a9b9ef 100644
--- a/llama/make/Makefile.default
+++ b/llama/make/Makefile.default
@@ -24,17 +24,17 @@ all: $(BUILD_RUNNERS) $(DIST_RUNNERS) $(PAYLOAD_RUNNERS)
$(RUNNERS_BUILD_DIR)/$(DEFAULT_RUNNER)/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS=$(CUSTOM_CPU_FLAGS)
$(RUNNERS_BUILD_DIR)/$(DEFAULT_RUNNER)/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
@-mkdir -p $(dir $@)
- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath $(if $(CUSTOM_CPU_FLAGS),-tags $(subst $(space),$(comma),$(CUSTOM_CPU_FLAGS))) -o $@ ./runner
+ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath $(if $(CUSTOM_CPU_FLAGS),-tags $(subst $(space),$(comma),$(CUSTOM_CPU_FLAGS))) -o $@ ./runner
$(RUNNERS_BUILD_DIR)/cpu_avx/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS="avx"
$(RUNNERS_BUILD_DIR)/cpu_avx/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
@-mkdir -p $(dir $@)
- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
+ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
$(RUNNERS_BUILD_DIR)/cpu_avx2/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS="avx avx2"
$(RUNNERS_BUILD_DIR)/cpu_avx2/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
@-mkdir -p $(dir $@)
- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
+ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
$(RUNNERS_DIST_DIR)/%: $(RUNNERS_BUILD_DIR)/%
@-mkdir -p $(dir $@)
@@ -44,7 +44,7 @@ $(RUNNERS_PAYLOAD_DIR)/%/ollama_llama_server$(EXE_EXT).gz: $(RUNNERS_BUILD_DIR)/
@-mkdir -p $(dir $@)
${GZIP} --best -c $< > $@
-clean:
+clean:
rm -f $(BUILD_RUNNERS) $(DIST_RUNNERS) $(PAYLOAD_RUNNERS)
.PHONY: clean all

8
_service Normal file
View File

@@ -0,0 +1,8 @@
<services>
<service name="format_spec_file" mode="manual" />
<service name="download_files" mode="manual" />
<service name="go_modules" mode="manual">
<param name="compression">zstd</param>
</service>
</services>

4
_servicedata Normal file
View File

@@ -0,0 +1,4 @@
<servicedata>
<service name="tar_scm">
<param name="url">https://github.com/ollama/ollama.git</param>
<param name="changesrevision">88738b357bcd25eea860b59bf7de2f6b94cfc352</param></service></servicedata>

BIN
ollama-0.1.45.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

3
ollama-0.10.1.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:19f791a5a2e27ea1fb7b438b99b2ca2de121dda8d04986928b364c7df2abbc3e
size 10426173

3
ollama-0.11.3.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:635d31891a026fb056f0821c132a9091d320a7c0d4753491656f797e6bd993d6
size 10470406

BIN
ollama-0.2.6.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.2.8.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.3.0.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.3.10.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.3.11.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.3.14.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.3.3.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.3.6.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.4.0.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.4.2.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

BIN
ollama-0.5.1.obscpio (Stored with Git LFS) Normal file

Binary file not shown.

3
ollama-0.5.11.obscpio Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:399e3922a8f876c2f0c5e99ff5c1b0098f5ef3edcd33d808d825ea64fbcd1a4e
size 19584013

3
ollama-0.5.12.obscpio Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:20a2ae14bf9f4fd51ce98b621d370d78667bb2c1bdc4a26ec8cb112bfb01fbf3
size 29856781

3
ollama-0.5.7.obscpio Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:efb1f1510c40a71f933e8b1ad3b88acb499f017e1bfd271492d9ccf5b7a69d2c
size 154515982

3
ollama-0.6.0.obscpio Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:2e8a634f6cc6c986251416d8a1030a31c468238a5df0976b63444d17196cfcc9
size 39767053

3
ollama-0.6.2.obscpio Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fd36a963599e7fb0cbbbe9f57e9db6d5c7b89ac41f5288523794e249087eef6e
size 36151821

3
ollama-0.6.5.obscpio Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0af24923aff90ebf6a6a9589b0f888c63970cf7a86aaac5e25dd407363f25bc0
size 39645197

3
ollama-0.6.6.obscpio Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:576b290a43eec3c90366c0ec9ae59169ee73efa85c8421d4c025d7f1caa34f87
size 39154701

3
ollama-0.6.8.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:46e3953373be41fae1699eaca97eb0e5f35b9a83d9652f38901938c1a988634d
size 8330813

3
ollama-0.7.0.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a27aa521caafd24104bb7b3eb2eec7db9f46c84a66ff526900e60701382a18ae
size 8349937

3
ollama-0.9.0.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:bb3f05e0610b36ceb9cb2df380077a97f47f8ffdd0d828baeb6c5c4fddef244c
size 8353746

3
ollama-0.9.1.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0057bbd69535c9818ca4beb4c262c50d44919c6d125bce730f1a320d623ac23e
size 8358070

3
ollama-0.9.5.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b67d056634498cdb81d06a210267e5b2cbcf886e2e64316c65164b7f8962fde2
size 8396484

4
ollama-user.conf Normal file
View File

@@ -0,0 +1,4 @@
#Type Name ID GECOS Home directory Shell
u ollama - "Ollama" /var/lib/ollama -
g video
m ollama video

1368
ollama.changes Normal file

File diff suppressed because it is too large Load Diff

4
ollama.obsinfo Normal file
View File

@@ -0,0 +1,4 @@
name: ollama
version: 0.6.6
mtime: 1745025185
commit: 88738b357bcd25eea860b59bf7de2f6b94cfc352

14
ollama.service Normal file
View File

@@ -0,0 +1,14 @@
[Unit]
Description=Ollama Service
After=network-online.target
[Service]
ExecStart=/usr/bin/ollama serve
User=ollama
Group=ollama
Restart=always
RestartSec=3
EnvironmentFile=-/etc/sysconfig/ollama
[Install]
WantedBy=default.target

131
ollama.spec Normal file
View File

@@ -0,0 +1,131 @@
#
# spec file for package ollama
#
# Copyright (c) 2025 SUSE LLC and contributors
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
# upon. The license for this file, and modifications and additions to the
# file, is the same license as for the pristine package itself (unless the
# license for the pristine package is not an Open Source License, in which
# case the license is the MIT License). An "Open Source License" is a
# license that conforms to the Open Source Definition (Version 1.9)
# published by the Open Source Initiative.
# Please submit bugfixes or comments via https://bugs.opensuse.org/
#
%if 0%{?sle_version} && 0%{?sle_version} >= 150600
%global force_gcc_version 12
%endif
Name: ollama
Version: 0.11.3
Release: 0
Summary: Tool for running AI models on-premise
License: MIT
URL: https://ollama.com
Source: https://github.com/ollama/ollama/archive/v%{version}/%{name}-%{version}.tar.gz
Source1: vendor.tar.zstd
Source2: ollama.service
Source3: %{name}-user.conf
Source4: sysconfig.ollama
BuildRequires: cmake >= 3.24
BuildRequires: git-core
BuildRequires: ninja
BuildRequires: sysuser-tools
BuildRequires: zstd
BuildRequires: golang(API) >= 1.24
Requires(pre): %fillup_prereq
# 32bit seems not to be supported anymore
ExcludeArch: %{ix86} %{arm}
%sysusers_requires
%if 0%{?force_gcc_version}
BuildRequires: gcc%{?force_gcc_version}-c++
BuildRequires: libstdc++6-gcc%{?force_gcc_version}
%else
BuildRequires: gcc-c++ >= 11.4.0
BuildRequires: libstdc++6
%endif
%description
Ollama is a tool for running AI models on one's own hardware.
It offers a command-line interface and a RESTful API.
New models can be created or existing ones modified in the
Ollama library using the Modelfile syntax.
Source model weights found on Hugging Face and similar sites
can be imported.
%prep
%autosetup -a1 -p1
%build
%define __builder ninja
%sysusers_generate_pre %{SOURCE3} %{name} %{name}-user.conf
%ifnarch ppc64
export GOFLAGS="-buildmode=pie -mod=vendor"
%endif
%if 0%{?force_gcc_version}
export CXX="g++-%{?force_gcc_version}"
export CC="gcc-%{?force_gcc_version}"
# pie doesn't work with gcc12 on leap
export GOFLAGS="-mod=vendor"
%endif
export GOFLAGS="${GOFLAGS} -v"
%cmake -UOLLAMA_INSTALL_DIR -DOLLAMA_INSTALL_DIR=%{_libdir}/ollama
%cmake_build
cd ..
go build -mod=vendor -buildmode=pie -o %{name} .
%install
%cmake_install
install -D -m 0755 %{name} %{buildroot}/%{_bindir}/%{name}
install -D -m 0644 %{SOURCE2} %{buildroot}%{_unitdir}/%{name}.service
install -D -m 0644 %{SOURCE3} %{buildroot}%{_sysusersdir}/%{name}-user.conf
install -D -m 0644 %{SOURCE4} %{buildroot}%{_fillupdir}/sysconfig.%{name}
install -d %{buildroot}%{_localstatedir}/lib/%{name}
mkdir -p "%{buildroot}/%{_docdir}/%{name}"
cp -Ra docs/* "%{buildroot}/%{_docdir}/%{name}"
%check
%if 0%{?force_gcc_version}
export CXX="g++-%{?force_gcc_version}"
export CC="gcc-%{?force_gcc_version}"
# pie doesn't work with gcc12 on leap
export GOFLAGS="-mod=vendor"
%endif
go test -v ./...
%pre -f %{name}.pre
%service_add_pre %{name}.service
%post
%service_add_post %{name}.service
%fillup_only
%preun
%service_del_preun %{name}.service
%postun
%service_del_postun %{name}.service
%files
%doc README.md
%license LICENSE
%{_docdir}/%{name}
%{_bindir}/%{name}
%{_unitdir}/%{name}.service
%{_sysusersdir}/%{name}-user.conf
%{_prefix}/lib/ollama
%{_fillupdir}/sysconfig.%{name}
%attr(-, ollama, ollama) %{_localstatedir}/lib/%{name}
%changelog

58
sysconfig.ollama Normal file
View File

@@ -0,0 +1,58 @@
## Path: Network/Ollama
## Description: Ollama server access
## Type: string
## Default: "http://127.0.0.1:11434"
## ServiceRestart: ollama
#
# set it to 0.0.0.0 for global network access
#
OLLAMA_HOST="http://127.0.0.1:11434"
## Type: string
## Description: Ollama default quantization type for the K/V cache
## Default: "f16"
## ServiceRestart: ollama
OLLAMA_KV_CACHE_TYPE=f16
## Type: string
## Description: Ollama default quantization type for the K/V cache
## Default: "f16"
## ServiceRestart: ollama
OLLAMA_KEEP_ALIVE=
## Type: string
## Description: Parallel processes
## Default: ""
## ServiceRestart: ollama
OLLAMA_NUM_PARALLEL=
## Type: string
## Description: Maxmimal memory to be used
## Default: ""
## ServiceRestart: ollama
OLLAMA_MAX_VRAM=
## Type: string
## Description: Ollama runner directory
## Default: ""
## ServiceRestart: ollama
OLLAMA_RUNNERS_DIR=
## Type: string
## Description: Ollama temporary directory
## Default: ""
## ServiceRestart: ollama
OLLAMA_TMPDIR=
## Type: string
## Description: Models to be loaded by default
## Default: ""
## ServiceRestart: ollama
OLLAMA_MODELS=
## Type: string
## Description: List of allowed remote hosts
## Default: ""
## ServiceRestart: ollama
OLLAMA_ORIGINS=

3
vendor.tar.zstd Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b7f4a3b11e5272a0bb55ba9b5e6f5ba2beaf9007b82084a2c8dc5fda985bf745
size 5613298