Accepting request 1247774 from science:machinelearning
Automatic submission by obs-autosubmit OBS-URL: https://build.opensuse.org/request/show/1247774 OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/ollama?expand=0&rev=28
This commit is contained in:
commit
7cb5f0b012
2
_service
2
_service
@ -3,7 +3,7 @@
|
||||
<service name="obs_scm" mode="manual">
|
||||
<param name="url">https://github.com/ollama/ollama.git</param>
|
||||
<param name="scm">git</param>
|
||||
<param name="revision">v0.5.7</param>
|
||||
<param name="revision">v0.5.11</param>
|
||||
<param name="versionformat">@PARENT_TAG@</param>
|
||||
<param name="versionrewrite-pattern">v(.*)</param>
|
||||
<param name="changesgenerate">enable</param>
|
||||
|
@ -1,4 +1,4 @@
|
||||
<servicedata>
|
||||
<service name="tar_scm">
|
||||
<param name="url">https://github.com/ollama/ollama.git</param>
|
||||
<param name="changesrevision">a420a453b4783841e3e79c248ef0fe9548df6914</param></service></servicedata>
|
||||
<param name="changesrevision">f8453e9d4a15f5f54b610993e8647d252cb65626</param></service></servicedata>
|
3
ollama-0.5.11.obscpio
Normal file
3
ollama-0.5.11.obscpio
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:399e3922a8f876c2f0c5e99ff5c1b0098f5ef3edcd33d808d825ea64fbcd1a4e
|
||||
size 19584013
|
@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:efb1f1510c40a71f933e8b1ad3b88acb499f017e1bfd271492d9ccf5b7a69d2c
|
||||
size 154515982
|
@ -1,3 +1,27 @@
|
||||
-------------------------------------------------------------------
|
||||
Sat Feb 15 02:48:59 UTC 2025 - Eyad Issa <eyadlorenzo@gmail.com>
|
||||
|
||||
- Use Ninja instead of Make and update the build script to
|
||||
match the new version
|
||||
|
||||
- Update to version 0.5.11:
|
||||
* No notable changes for Linux
|
||||
|
||||
- Update to version 0.5.10:
|
||||
* Fixed issue on multi-GPU Windows and Linux machines where
|
||||
memory estimations would be incorrect
|
||||
|
||||
- Update to version 0.5.9:
|
||||
* New model: DeepScaleR
|
||||
* New model: OpenThinker
|
||||
|
||||
- Update to version 0.5.8:
|
||||
* Ollama will now use AVX-512 instructions where available for
|
||||
additional CPU acceleration
|
||||
* Fixed indexing error that would occur when downloading a model
|
||||
with ollama run or ollama pull
|
||||
* Fixes cases where download progress would reverse
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Jan 27 14:21:42 UTC 2025 - Adrian Schröter <adrian@suse.de>
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
name: ollama
|
||||
version: 0.5.7
|
||||
mtime: 1737018844
|
||||
commit: a420a453b4783841e3e79c248ef0fe9548df6914
|
||||
version: 0.5.11
|
||||
mtime: 1739515103
|
||||
commit: f8453e9d4a15f5f54b610993e8647d252cb65626
|
||||
|
19
ollama.spec
19
ollama.spec
@ -17,7 +17,7 @@
|
||||
|
||||
|
||||
Name: ollama
|
||||
Version: 0.5.7
|
||||
Version: 0.5.11
|
||||
Release: 0
|
||||
Summary: Tool for running AI models on-premise
|
||||
License: MIT
|
||||
@ -29,9 +29,11 @@ Source3: %{name}-user.conf
|
||||
Source4: sysconfig.ollama
|
||||
BuildRequires: cmake >= 3.24
|
||||
BuildRequires: git
|
||||
BuildRequires: ninja
|
||||
BuildRequires: sysuser-tools
|
||||
BuildRequires: zstd
|
||||
BuildRequires: golang(API) >= 1.22
|
||||
Requires(pre): %fillup_prereq
|
||||
# 32bit seems not to be supported anymore
|
||||
ExcludeArch: %{ix86} %{arm}
|
||||
%sysusers_requires
|
||||
@ -41,7 +43,6 @@ BuildRequires: libstdc++6-gcc12
|
||||
%else
|
||||
BuildRequires: gcc-c++ >= 11.4.0
|
||||
%endif
|
||||
Requires(pre): %{fillup_prereq}
|
||||
|
||||
%description
|
||||
Ollama is a tool for running AI models on one's own hardware.
|
||||
@ -55,6 +56,8 @@ can be imported.
|
||||
%autosetup -a1 -p1
|
||||
|
||||
%build
|
||||
%define __builder ninja
|
||||
|
||||
%sysusers_generate_pre %{SOURCE3} %{name} %{name}-user.conf
|
||||
|
||||
%ifnarch ppc64
|
||||
@ -69,14 +72,19 @@ export GOFLAGS="-mod=vendor"
|
||||
|
||||
export GOFLAGS="${GOFLAGS} -v"
|
||||
|
||||
%make_build
|
||||
%cmake -UOLLAMA_INSTALL_DIR -DOLLAMA_INSTALL_DIR=%{_libdir}/ollama
|
||||
%cmake_build
|
||||
|
||||
cd ..
|
||||
go build -mod=vendor -buildmode=pie -o %{name} .
|
||||
|
||||
%install
|
||||
%cmake_install
|
||||
install -D -m 0755 %{name} %{buildroot}/%{_bindir}/%{name}
|
||||
|
||||
install -D -m 0644 %{SOURCE2} %{buildroot}%{_unitdir}/%{name}.service
|
||||
install -D -m 0644 %{SOURCE3} %{buildroot}%{_sysusersdir}/%{name}-user.conf
|
||||
install -D -m 0644 %{SOURCE4} %{buildroot}%{_fillupdir}/sysconfig.%name
|
||||
install -D -m 0644 %{SOURCE4} %{buildroot}%{_fillupdir}/sysconfig.%{name}
|
||||
install -d %{buildroot}%{_localstatedir}/lib/%{name}
|
||||
|
||||
mkdir -p "%{buildroot}/%{_docdir}/%{name}"
|
||||
@ -111,7 +119,8 @@ go test -v ./...
|
||||
%{_bindir}/%{name}
|
||||
%{_unitdir}/%{name}.service
|
||||
%{_sysusersdir}/%{name}-user.conf
|
||||
%{_fillupdir}/sysconfig.%name
|
||||
%{_prefix}/lib/ollama
|
||||
%{_fillupdir}/sysconfig.%{name}
|
||||
%attr(-, ollama, ollama) %{_localstatedir}/lib/%{name}
|
||||
|
||||
%changelog
|
||||
|
@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:8859600b0cadcc09f5301ff86e18d25676e263146758e949aa614a5d6819e829
|
||||
size 5374849
|
||||
oid sha256:b2062aa78570afafb60f49092a5954dcbd48cbdfc39fff776b5ab031101583ea
|
||||
size 5394876
|
||||
|
Loading…
x
Reference in New Issue
Block a user