Accepting request 1240594 from science:machinelearning

OBS-URL: https://build.opensuse.org/request/show/1240594
OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/ollama?expand=0&rev=27
This commit is contained in:
Ana Guerrero 2025-01-29 15:10:09 +00:00 committed by Git OBS Bridge
commit 0c42aadc96
12 changed files with 140 additions and 64 deletions

View File

@ -1,34 +0,0 @@
diff --git a/llama/make/Makefile.default b/llama/make/Makefile.default
index 95b13a7..6a9b9ef 100644
--- a/llama/make/Makefile.default
+++ b/llama/make/Makefile.default
@@ -24,17 +24,17 @@ all: $(BUILD_RUNNERS) $(DIST_RUNNERS) $(PAYLOAD_RUNNERS)
$(RUNNERS_BUILD_DIR)/$(DEFAULT_RUNNER)/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS=$(CUSTOM_CPU_FLAGS)
$(RUNNERS_BUILD_DIR)/$(DEFAULT_RUNNER)/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
@-mkdir -p $(dir $@)
- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath $(if $(CUSTOM_CPU_FLAGS),-tags $(subst $(space),$(comma),$(CUSTOM_CPU_FLAGS))) -o $@ ./runner
+ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath $(if $(CUSTOM_CPU_FLAGS),-tags $(subst $(space),$(comma),$(CUSTOM_CPU_FLAGS))) -o $@ ./runner
$(RUNNERS_BUILD_DIR)/cpu_avx/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS="avx"
$(RUNNERS_BUILD_DIR)/cpu_avx/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
@-mkdir -p $(dir $@)
- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
+ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
$(RUNNERS_BUILD_DIR)/cpu_avx2/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS="avx avx2"
$(RUNNERS_BUILD_DIR)/cpu_avx2/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
@-mkdir -p $(dir $@)
- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
+ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
$(RUNNERS_DIST_DIR)/%: $(RUNNERS_BUILD_DIR)/%
@-mkdir -p $(dir $@)
@@ -44,7 +44,7 @@ $(RUNNERS_PAYLOAD_DIR)/%/ollama_llama_server$(EXE_EXT).gz: $(RUNNERS_BUILD_DIR)/
@-mkdir -p $(dir $@)
${GZIP} --best -c $< > $@
-clean:
+clean:
rm -f $(BUILD_RUNNERS) $(DIST_RUNNERS) $(PAYLOAD_RUNNERS)
.PHONY: clean all

View File

@ -3,7 +3,7 @@
<service name="obs_scm" mode="manual">
<param name="url">https://github.com/ollama/ollama.git</param>
<param name="scm">git</param>
<param name="revision">v0.5.1</param>
<param name="revision">v0.5.7</param>
<param name="versionformat">@PARENT_TAG@</param>
<param name="versionrewrite-pattern">v(.*)</param>
<param name="changesgenerate">enable</param>

View File

@ -1,4 +1,4 @@
<servicedata>
<service name="tar_scm">
<param name="url">https://github.com/ollama/ollama.git</param>
<param name="changesrevision">de52b6c2f90ff220ed9469167d51e3f5d7474fa2</param></service></servicedata>
<param name="changesrevision">a420a453b4783841e3e79c248ef0fe9548df6914</param></service></servicedata>

BIN
ollama-0.5.1.obscpio (Stored with Git LFS)

Binary file not shown.

3
ollama-0.5.7.obscpio Normal file
View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:efb1f1510c40a71f933e8b1ad3b88acb499f017e1bfd271492d9ccf5b7a69d2c
size 154515982

View File

@ -1,3 +1,64 @@
-------------------------------------------------------------------
Mon Jan 27 14:21:42 UTC 2025 - Adrian Schröter <adrian@suse.de>
- Make ollama configurable by the admin via /etc/sysconfig/ollama
(boo#1236008)
- cleanup reproducible.patch
-------------------------------------------------------------------
Thu Jan 16 23:52:45 UTC 2025 - Eyad Issa <eyadlorenzo@gmail.com>
- Removed 01-build-verbose.patch: embedded GOFLAG into .spec file
- Disabled reproducible.patch: should be not needed, as .gz is
not produced anymore
- Update to version 0.5.7:
* Fixed issue where using two FROM commands in Modelfile
* Support importing Command R and Command R+ architectures
from safetensors
- Update to version 0.5.6:
* Fixed errors that would occur when running ollama create on
Windows and when using absolute paths
- Update to version 0.5.5:
* New models:
~ Phi-4
~ Command R7B
~ DeepSeek-V3
~ OLMo 2
~ Dolphin 3
~ SmallThinker:
~ Granite 3.1 Dense
~ Granite 3.1 MoE
* The /api/create API endpoint that powers ollama create has
been changed to improve conversion time and also accept a JSON
object.
* Fixed runtime error that would occur when filling the model's
context window
* Fixed crash that would occur when quotes were used in /save
* Fixed errors that would occur when sending x-stainless headers
from OpenAI clients
- Update to version 0.5.4:
* New model: Falcon3
* Fixed issue where providing null to format would result in
an error
- Update to version 0.5.3:
* Fixed runtime errors on older Intel Macs
* Fixed issue where setting the format field to "" would cause
an error
- Update to version 0.5.2:
* New model: EXAONE 3.5
* Fixed issue where whitespace would get trimmed from prompt
when images were provided
* Improved memory estimation when scheduling models
* OLLAMA_ORIGINS will now check hosts in a case insensitive
manner
-------------------------------------------------------------------
Thu Dec 12 14:00:56 UTC 2024 - Bernhard Wiedemann <bwiedemann@suse.com>

View File

@ -1,4 +1,4 @@
name: ollama
version: 0.5.1
mtime: 1733523195
commit: de52b6c2f90ff220ed9469167d51e3f5d7474fa2
version: 0.5.7
mtime: 1737018844
commit: a420a453b4783841e3e79c248ef0fe9548df6914

View File

@ -8,6 +8,7 @@ User=ollama
Group=ollama
Restart=always
RestartSec=3
EnvironmentFile=-/etc/sysconfig/ollama
[Install]
WantedBy=default.target

View File

@ -1,7 +1,7 @@
#
# spec file for package ollama
#
# Copyright (c) 2024 SUSE LLC
# Copyright (c) 2025 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@ -17,7 +17,7 @@
Name: ollama
Version: 0.5.1
Version: 0.5.7
Release: 0
Summary: Tool for running AI models on-premise
License: MIT
@ -26,8 +26,7 @@ Source: %{name}-%{version}.tar
Source1: vendor.tar.zstd
Source2: ollama.service
Source3: %{name}-user.conf
Patch1: 01-build-verbose.patch
Patch2: reproducible.patch
Source4: sysconfig.ollama
BuildRequires: cmake >= 3.24
BuildRequires: git
BuildRequires: sysuser-tools
@ -42,6 +41,7 @@ BuildRequires: libstdc++6-gcc12
%else
BuildRequires: gcc-c++ >= 11.4.0
%endif
Requires(pre): %{fillup_prereq}
%description
Ollama is a tool for running AI models on one's own hardware.
@ -67,15 +67,16 @@ export CC=gcc-12
export GOFLAGS="-mod=vendor"
%endif
export OLLAMA_SKIP_PATCHING=1
export GOFLAGS="${GOFLAGS} -v"
go generate ./...
go build -v .
%make_build
%install
install -D -m 0755 %{name} %{buildroot}/%{_bindir}/%{name}
install -D -m 0644 %{SOURCE2} %{buildroot}%{_unitdir}/%{name}.service
install -D -m 0644 %{SOURCE3} %{buildroot}%{_sysusersdir}/%{name}-user.conf
install -D -m 0644 %{SOURCE4} %{buildroot}%{_fillupdir}/sysconfig.%name
install -d %{buildroot}%{_localstatedir}/lib/%{name}
mkdir -p "%{buildroot}/%{_docdir}/%{name}"
@ -95,6 +96,7 @@ go test -v ./...
%post
%service_add_post %{name}.service
%fillup_only
%preun
%service_del_preun %{name}.service
@ -109,6 +111,7 @@ go test -v ./...
%{_bindir}/%{name}
%{_unitdir}/%{name}.service
%{_sysusersdir}/%{name}-user.conf
%{_fillupdir}/sysconfig.%name
%attr(-, ollama, ollama) %{_localstatedir}/lib/%{name}
%changelog

View File

@ -1,13 +0,0 @@
diff --git a/llama/make/Makefile.default b/llama/make/Makefile.default
index 6a9b9ef..2ab7dfe 100644
--- a/llama/make/Makefile.default
+++ b/llama/make/Makefile.default
@@ -42,7 +42,7 @@ $(RUNNERS_DIST_DIR)/%: $(RUNNERS_BUILD_DIR)/%
$(RUNNERS_PAYLOAD_DIR)/%/ollama_llama_server$(EXE_EXT).gz: $(RUNNERS_BUILD_DIR)/%/ollama_llama_server$(EXE_EXT)
@-mkdir -p $(dir $@)
- ${GZIP} --best -c $< > $@
+ ${GZIP} -n --best -c $< > $@
clean:
rm -f $(BUILD_RUNNERS) $(DIST_RUNNERS) $(PAYLOAD_RUNNERS)

58
sysconfig.ollama Normal file
View File

@ -0,0 +1,58 @@
## Path: Network/Ollama
## Description: Ollama server access
## Type: string
## Default: "http://127.0.0.1:11434"
## ServiceRestart: ollama
#
# set it to 0.0.0.0 for global network access
#
OLLAMA_HOST="http://127.0.0.1:11434"
## Type: string
## Description: Ollama default quantization type for the K/V cache
## Default: "f16"
## ServiceRestart: ollama
OLLAMA_KV_CACHE_TYPE=f16
## Type: string
## Description: Ollama default quantization type for the K/V cache
## Default: "f16"
## ServiceRestart: ollama
OLLAMA_KEEP_ALIVE=
## Type: string
## Description: Parallel processes
## Default: ""
## ServiceRestart: ollama
OLLAMA_NUM_PARALLEL=
## Type: string
## Description: Maxmimal memory to be used
## Default: ""
## ServiceRestart: ollama
OLLAMA_MAX_VRAM=
## Type: string
## Description: Ollama runner directory
## Default: ""
## ServiceRestart: ollama
OLLAMA_RUNNERS_DIR=
## Type: string
## Description: Ollama temporary directory
## Default: ""
## ServiceRestart: ollama
OLLAMA_TMPDIR=
## Type: string
## Description: Models to be loaded by default
## Default: ""
## ServiceRestart: ollama
OLLAMA_MODELS=
## Type: string
## Description: List of allowed remote hosts
## Default: ""
## ServiceRestart: ollama
OLLAMA_ORIGINS=

BIN
vendor.tar.zstd (Stored with Git LFS)

Binary file not shown.