Accepting request 1190824 from science:machinelearning
OBS-URL: https://build.opensuse.org/request/show/1190824 OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/ollama?expand=0&rev=16
This commit is contained in:
commit
9f8f3754a4
2
_service
2
_service
@ -3,7 +3,7 @@
|
|||||||
<service name="obs_scm" mode="manual">
|
<service name="obs_scm" mode="manual">
|
||||||
<param name="url">https://github.com/ollama/ollama.git</param>
|
<param name="url">https://github.com/ollama/ollama.git</param>
|
||||||
<param name="scm">git</param>
|
<param name="scm">git</param>
|
||||||
<param name="revision">v0.3.0</param>
|
<param name="revision">v0.3.1</param>
|
||||||
<param name="versionformat">@PARENT_TAG@</param>
|
<param name="versionformat">@PARENT_TAG@</param>
|
||||||
<param name="versionrewrite-pattern">v(.*)</param>
|
<param name="versionrewrite-pattern">v(.*)</param>
|
||||||
<param name="changesgenerate">enable</param>
|
<param name="changesgenerate">enable</param>
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
<servicedata>
|
<servicedata>
|
||||||
<service name="tar_scm">
|
<service name="tar_scm">
|
||||||
<param name="url">https://github.com/ollama/ollama.git</param>
|
<param name="url">https://github.com/ollama/ollama.git</param>
|
||||||
<param name="changesrevision">bbf8f102ee06bd6b149e4999571c0844aa47b12f</param></service></servicedata>
|
<param name="changesrevision">1a83581a8e1063418f5f1fec14638409d0681b68</param></service></servicedata>
|
BIN
ollama-0.3.0.obscpio
(Stored with Git LFS)
BIN
ollama-0.3.0.obscpio
(Stored with Git LFS)
Binary file not shown.
BIN
ollama-0.3.1.obscpio
(Stored with Git LFS)
Normal file
BIN
ollama-0.3.1.obscpio
(Stored with Git LFS)
Normal file
Binary file not shown.
@ -1,3 +1,23 @@
|
|||||||
|
-------------------------------------------------------------------
|
||||||
|
Tue Jul 30 07:08:37 UTC 2024 - Adrian Schröter <adrian@suse.de>
|
||||||
|
|
||||||
|
- Update to version 0.3.1:
|
||||||
|
* Added support for min_p sampling option
|
||||||
|
* Lowered number of requests required when downloading models
|
||||||
|
with ollama pull
|
||||||
|
* ollama create will now autodetect required stop parameters
|
||||||
|
when importing certain models
|
||||||
|
* Fixed issue where /save would cause parameters to be saved
|
||||||
|
incorrectly.
|
||||||
|
* OpenAI-compatible API will now return a finish_reason of
|
||||||
|
tool_calls if a tool call occured.
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Mon Jul 29 09:59:58 UTC 2024 - Adrian Schröter <adrian@suse.de>
|
||||||
|
|
||||||
|
- fix build on leap 15.6
|
||||||
|
- exclude builds on 32bit due to build failures
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
-------------------------------------------------------------------
|
||||||
Sun Jul 28 11:32:19 UTC 2024 - Eyad Issa <eyadlorenzo@gmail.com>
|
Sun Jul 28 11:32:19 UTC 2024 - Eyad Issa <eyadlorenzo@gmail.com>
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
name: ollama
|
name: ollama
|
||||||
version: 0.3.0
|
version: 0.3.1
|
||||||
mtime: 1721946295
|
mtime: 1722288341
|
||||||
commit: bbf8f102ee06bd6b149e4999571c0844aa47b12f
|
commit: 1a83581a8e1063418f5f1fec14638409d0681b68
|
||||||
|
11
ollama.spec
11
ollama.spec
@ -17,7 +17,7 @@
|
|||||||
|
|
||||||
|
|
||||||
Name: ollama
|
Name: ollama
|
||||||
Version: 0.3.0
|
Version: 0.3.1
|
||||||
Release: 0
|
Release: 0
|
||||||
Summary: Tool for running AI models on-premise
|
Summary: Tool for running AI models on-premise
|
||||||
License: MIT
|
License: MIT
|
||||||
@ -35,9 +35,12 @@ BuildRequires: golang(API) >= 1.22
|
|||||||
%sysusers_requires
|
%sysusers_requires
|
||||||
%if 0%{?sle_version} == 150600
|
%if 0%{?sle_version} == 150600
|
||||||
BuildRequires: gcc12-c++
|
BuildRequires: gcc12-c++
|
||||||
|
BuildRequires: libstdc++6-gcc12
|
||||||
%else
|
%else
|
||||||
BuildRequires: gcc-c++ >= 11.4.0
|
BuildRequires: gcc-c++ >= 11.4.0
|
||||||
%endif
|
%endif
|
||||||
|
# 32bit seems not to be supported anymore
|
||||||
|
ExcludeArch: %ix86 %arm
|
||||||
|
|
||||||
%description
|
%description
|
||||||
Ollama is a tool for running AI models on one's own hardware.
|
Ollama is a tool for running AI models on one's own hardware.
|
||||||
@ -78,6 +81,12 @@ mkdir -p "%{buildroot}/%{_docdir}/%{name}"
|
|||||||
cp -Ra docs/* "%{buildroot}/%{_docdir}/%{name}"
|
cp -Ra docs/* "%{buildroot}/%{_docdir}/%{name}"
|
||||||
|
|
||||||
%check
|
%check
|
||||||
|
%if 0%{?sle_version} == 150600
|
||||||
|
export CXX=g++-12
|
||||||
|
export CC=gcc-12
|
||||||
|
# pie doesn't work with gcc12 on leap
|
||||||
|
export GOFLAGS="-mod=vendor"
|
||||||
|
%endif
|
||||||
go test ./...
|
go test ./...
|
||||||
|
|
||||||
%pre -f %{name}.pre
|
%pre -f %{name}.pre
|
||||||
|
BIN
vendor.tar.zstd
(Stored with Git LFS)
BIN
vendor.tar.zstd
(Stored with Git LFS)
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user