forked from pool/ollama
Accepting request 1174682 from home:VaiTon:branches:science:machinelearning
- Update to version 0.1.38: * New model: Falcon 2: A new 11B parameters causal decoder-only model built by TII and trained over 5T tokens. * New model: Yi 1.5: A new high-performing version of Yi, now licensed as Apache 2.0. Available in 6B, 9B and 34B sizes. * Added ollama ps command * Added /clear command * Fixed issue where switching loaded models on Windows would take several seconds * Running /save will no longer abort the chat session if an incorrect name is provided * The /api/tags API endpoint will now correctly return an empty list [] instead of null if no models are provided OBS-URL: https://build.opensuse.org/request/show/1174682 OBS-URL: https://build.opensuse.org/package/show/science:machinelearning/ollama?expand=0&rev=15
This commit is contained in:
parent
cf9cf2a4df
commit
32d8d25838
2
_service
2
_service
@ -3,7 +3,7 @@
|
||||
<service name="obs_scm" mode="manual">
|
||||
<param name="url">https://github.com/ollama/ollama.git</param>
|
||||
<param name="scm">git</param>
|
||||
<param name="revision">v0.1.37</param>
|
||||
<param name="revision">v0.1.38</param>
|
||||
<param name="versionformat">@PARENT_TAG@</param>
|
||||
<param name="versionrewrite-pattern">v(.*)</param>
|
||||
<param name="changesgenerate">enable</param>
|
||||
|
@ -1,4 +1,4 @@
|
||||
<servicedata>
|
||||
<service name="tar_scm">
|
||||
<param name="url">https://github.com/ollama/ollama.git</param>
|
||||
<param name="changesrevision">41ba3017fd74dfce9a3dc00160f29befec85a41b</param></service></servicedata>
|
||||
<param name="changesrevision">d1692fd3e0b4a80ff55ba052b430207134df4714</param></service></servicedata>
|
@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6825400dbf9a926fb6d8ea4c03fc61f981ec03f0897ffc65f74fc3a733b562cf
|
||||
size 130026510
|
3
ollama-0.1.38.obscpio
Normal file
3
ollama-0.1.38.obscpio
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6ec8a477587c138c5cd55da52d0cb8eef73e9efb50df9fd083417d286ad6edf9
|
||||
size 154565646
|
@ -1,3 +1,20 @@
|
||||
-------------------------------------------------------------------
|
||||
Thu May 16 19:55:51 UTC 2024 - Eyad Issa <eyadlorenzo@gmail.com>
|
||||
|
||||
- Update to version 0.1.38:
|
||||
* New model: Falcon 2: A new 11B parameters causal decoder-only
|
||||
model built by TII and trained over 5T tokens.
|
||||
* New model: Yi 1.5: A new high-performing version of Yi, now
|
||||
licensed as Apache 2.0. Available in 6B, 9B and 34B sizes.
|
||||
* Added ollama ps command
|
||||
* Added /clear command
|
||||
* Fixed issue where switching loaded models on Windows would take
|
||||
several seconds
|
||||
* Running /save will no longer abort the chat session if an
|
||||
incorrect name is provided
|
||||
* The /api/tags API endpoint will now correctly return an empty
|
||||
list [] instead of null if no models are provided
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Sun May 12 19:05:53 UTC 2024 - Eyad Issa <eyadlorenzo@gmail.com>
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
name: ollama
|
||||
version: 0.1.37
|
||||
mtime: 1715466701
|
||||
commit: 41ba3017fd74dfce9a3dc00160f29befec85a41b
|
||||
version: 0.1.38
|
||||
mtime: 1715812996
|
||||
commit: d1692fd3e0b4a80ff55ba052b430207134df4714
|
||||
|
@ -17,7 +17,7 @@
|
||||
|
||||
|
||||
Name: ollama
|
||||
Version: 0.1.37
|
||||
Version: 0.1.38
|
||||
Release: 0
|
||||
Summary: Tool for running AI models on-premise
|
||||
License: MIT
|
||||
|
@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:202da226a0bad5afdb4afb42c75da9a04277e4c3a5be9bce390a049c025bd7af
|
||||
size 5330325
|
||||
oid sha256:d4ea082357c422a313e379f4fa232a1984fec80429d8a66664699e5ff3627d19
|
||||
size 5330464
|
||||
|
Loading…
Reference in New Issue
Block a user