Accepting request 1188404 from science:machinelearning
- Fixed issue with shared libraries - Added %check section - Use -v when building - Update to version 0.2.6: * New models: MathΣtral is a 7B model designed for math reasoning and scientific discovery by Mistral AI. * Fixed issue where uppercase roles such as USER would no longer work in the chat endpoints * Fixed issue where empty system message would be included in the prompt OBS-URL: https://build.opensuse.org/request/show/1188404 OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/ollama?expand=0&rev=13
This commit is contained in:
commit
e1464e1fa0
2
_service
2
_service
@ -3,7 +3,7 @@
|
|||||||
<service name="obs_scm" mode="manual">
|
<service name="obs_scm" mode="manual">
|
||||||
<param name="url">https://github.com/ollama/ollama.git</param>
|
<param name="url">https://github.com/ollama/ollama.git</param>
|
||||||
<param name="scm">git</param>
|
<param name="scm">git</param>
|
||||||
<param name="revision">v0.2.5</param>
|
<param name="revision">v0.2.6</param>
|
||||||
<param name="versionformat">@PARENT_TAG@</param>
|
<param name="versionformat">@PARENT_TAG@</param>
|
||||||
<param name="versionrewrite-pattern">v(.*)</param>
|
<param name="versionrewrite-pattern">v(.*)</param>
|
||||||
<param name="changesgenerate">enable</param>
|
<param name="changesgenerate">enable</param>
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
<servicedata>
|
<servicedata>
|
||||||
<service name="tar_scm">
|
<service name="tar_scm">
|
||||||
<param name="url">https://github.com/ollama/ollama.git</param>
|
<param name="url">https://github.com/ollama/ollama.git</param>
|
||||||
<param name="changesrevision">f7ee0123008dbdb3fd5954438d12196951b58b78</param></service></servicedata>
|
<param name="changesrevision">b2554455572b28c0e18423d6fe6896cf7137dbd6</param></service></servicedata>
|
@ -22,7 +22,7 @@ index db2c6c3..8194cd9 100755
|
|||||||
# -DGGML_AVX512_VNNI -- 2021 Intel Alder Lake
|
# -DGGML_AVX512_VNNI -- 2021 Intel Alder Lake
|
||||||
|
|
||||||
COMMON_CPU_DEFS="-DBUILD_SHARED_LIBS=off -DCMAKE_POSITION_INDEPENDENT_CODE=on -DGGML_NATIVE=off -DGGML_OPENMP=off"
|
COMMON_CPU_DEFS="-DBUILD_SHARED_LIBS=off -DCMAKE_POSITION_INDEPENDENT_CODE=on -DGGML_NATIVE=off -DGGML_OPENMP=off"
|
||||||
+ COMMON_CPU_DEFS="-DGGML_LTO=on -DCMAKE_BUILD_TYPE=Release"
|
+ COMMON_CPU_DEFS="${COMMON_CPU_DEFS} -DGGML_LTO=on -DCMAKE_BUILD_TYPE=Release"
|
||||||
if [ -z "${OLLAMA_CPU_TARGET}" -o "${OLLAMA_CPU_TARGET}" = "cpu" ]; then
|
if [ -z "${OLLAMA_CPU_TARGET}" -o "${OLLAMA_CPU_TARGET}" = "cpu" ]; then
|
||||||
#
|
#
|
||||||
# CPU first for the default library, set up as lowest common denominator for maximum compatibility (including Rosetta)
|
# CPU first for the default library, set up as lowest common denominator for maximum compatibility (including Rosetta)
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
version https://git-lfs.github.com/spec/v1
|
|
||||||
oid sha256:0a19afdb4bd732dd717c5a97dc8baed30939f4cd74395c304876ef837d041d6f
|
|
||||||
size 161660942
|
|
3
ollama-0.2.6.obscpio
Normal file
3
ollama-0.2.6.obscpio
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:391fad97bacee37e8fab00273fd5d5a0a20912fd47c51907131ee1f274c7d2bf
|
||||||
|
size 161902606
|
@ -1,3 +1,22 @@
|
|||||||
|
-------------------------------------------------------------------
|
||||||
|
Thu Jul 18 13:09:10 UTC 2024 - Eyad Issa <eyadlorenzo@gmail.com>
|
||||||
|
|
||||||
|
- Fixed issue with shared libraries
|
||||||
|
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
Thu Jul 18 12:27:54 UTC 2024 - Eyad Issa <eyadlorenzo@gmail.com>
|
||||||
|
|
||||||
|
- Added %check section
|
||||||
|
- Use -v when building
|
||||||
|
|
||||||
|
- Update to version 0.2.6:
|
||||||
|
* New models: MathΣtral is a 7B model designed for math
|
||||||
|
reasoning and scientific discovery by Mistral AI.
|
||||||
|
* Fixed issue where uppercase roles such as USER would no longer
|
||||||
|
work in the chat endpoints
|
||||||
|
* Fixed issue where empty system message would be included in the
|
||||||
|
prompt
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
-------------------------------------------------------------------
|
||||||
Sun Jul 14 17:48:36 UTC 2024 - eyadlorenzo@gmail.com
|
Sun Jul 14 17:48:36 UTC 2024 - eyadlorenzo@gmail.com
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
name: ollama
|
name: ollama
|
||||||
version: 0.2.5
|
version: 0.2.6
|
||||||
mtime: 1720908480
|
mtime: 1721255711
|
||||||
commit: f7ee0123008dbdb3fd5954438d12196951b58b78
|
commit: b2554455572b28c0e18423d6fe6896cf7137dbd6
|
||||||
|
26
ollama.spec
26
ollama.spec
@ -17,7 +17,7 @@
|
|||||||
|
|
||||||
|
|
||||||
Name: ollama
|
Name: ollama
|
||||||
Version: 0.2.5
|
Version: 0.2.6
|
||||||
Release: 0
|
Release: 0
|
||||||
Summary: Tool for running AI models on-premise
|
Summary: Tool for running AI models on-premise
|
||||||
License: MIT
|
License: MIT
|
||||||
@ -28,17 +28,16 @@ Source2: ollama.service
|
|||||||
Source3: %{name}-user.conf
|
Source3: %{name}-user.conf
|
||||||
Patch0: enable-lto.patch
|
Patch0: enable-lto.patch
|
||||||
BuildRequires: cmake >= 3.24
|
BuildRequires: cmake >= 3.24
|
||||||
|
BuildRequires: git
|
||||||
|
BuildRequires: sysuser-tools
|
||||||
|
BuildRequires: zstd
|
||||||
|
BuildRequires: golang(API) >= 1.22
|
||||||
|
%sysusers_requires
|
||||||
%if 0%{?sle_version} == 150600
|
%if 0%{?sle_version} == 150600
|
||||||
BuildRequires: gcc12-c++
|
BuildRequires: gcc12-c++
|
||||||
%else
|
%else
|
||||||
BuildRequires: gcc-c++ >= 11.4.0
|
BuildRequires: gcc-c++ >= 11.4.0
|
||||||
%endif
|
%endif
|
||||||
BuildRequires: git
|
|
||||||
BuildRequires: sysuser-tools
|
|
||||||
BuildRequires: zstd
|
|
||||||
BuildRequires: golang(API) >= 1.22
|
|
||||||
|
|
||||||
%{sysusers_requires}
|
|
||||||
|
|
||||||
%description
|
%description
|
||||||
Ollama is a tool for running AI models on one's own hardware.
|
Ollama is a tool for running AI models on one's own hardware.
|
||||||
@ -67,16 +66,19 @@ export GOFLAGS="-mod=vendor"
|
|||||||
export OLLAMA_SKIP_PATCHING=1
|
export OLLAMA_SKIP_PATCHING=1
|
||||||
|
|
||||||
go generate ./...
|
go generate ./...
|
||||||
go build .
|
go build -v .
|
||||||
|
|
||||||
%install
|
%install
|
||||||
install -D -m 0755 %{name} %{buildroot}/%{_bindir}/%{name}
|
install -D -m 0755 %{name} %{buildroot}/%{_bindir}/%{name}
|
||||||
install -D -m 0644 %{SOURCE2} %{buildroot}%{_unitdir}/%{name}.service
|
install -D -m 0644 %{SOURCE2} %{buildroot}%{_unitdir}/%{name}.service
|
||||||
install -D -m 0644 %{SOURCE3} %{buildroot}%{_sysusersdir}/%{name}-user.conf
|
install -D -m 0644 %{SOURCE3} %{buildroot}%{_sysusersdir}/%{name}-user.conf
|
||||||
install -d %{buildroot}/var/lib/%{name}
|
install -d %{buildroot}%{_localstatedir}/lib/%{name}
|
||||||
|
|
||||||
mkdir -p "%buildroot/%_docdir/%name"
|
mkdir -p "%{buildroot}/%{_docdir}/%{name}"
|
||||||
cp -Ra docs/* "%buildroot/%_docdir/%name"
|
cp -Ra docs/* "%{buildroot}/%{_docdir}/%{name}"
|
||||||
|
|
||||||
|
%check
|
||||||
|
go test ./...
|
||||||
|
|
||||||
%pre -f %{name}.pre
|
%pre -f %{name}.pre
|
||||||
%service_add_pre %{name}.service
|
%service_add_pre %{name}.service
|
||||||
@ -97,6 +99,6 @@ cp -Ra docs/* "%buildroot/%_docdir/%name"
|
|||||||
%{_bindir}/%{name}
|
%{_bindir}/%{name}
|
||||||
%{_unitdir}/%{name}.service
|
%{_unitdir}/%{name}.service
|
||||||
%{_sysusersdir}/%{name}-user.conf
|
%{_sysusersdir}/%{name}-user.conf
|
||||||
%attr(-, ollama, ollama) /var/lib/%{name}
|
%attr(-, ollama, ollama) %{_localstatedir}/lib/%{name}
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
version https://git-lfs.github.com/spec/v1
|
version https://git-lfs.github.com/spec/v1
|
||||||
oid sha256:0a7dde5a5d4e0794b5a9b5e7dd865559a6625ef387a90d2843581d008a9c5af2
|
oid sha256:b9dabb1b28321cce2672e5b37eb792e904715539dad5ecabc0eee92d6b0b10e1
|
||||||
size 5355013
|
size 5355343
|
||||||
|
Loading…
Reference in New Issue
Block a user