diff --git a/01-build-verbose.patch b/01-build-verbose.patch
new file mode 100644
index 0000000..58e8e18
--- /dev/null
+++ b/01-build-verbose.patch
@@ -0,0 +1,34 @@
+diff --git a/llama/make/Makefile.default b/llama/make/Makefile.default
+index 95b13a7..6a9b9ef 100644
+--- a/llama/make/Makefile.default
++++ b/llama/make/Makefile.default
+@@ -24,17 +24,17 @@ all: $(BUILD_RUNNERS) $(DIST_RUNNERS) $(PAYLOAD_RUNNERS)
+ $(RUNNERS_BUILD_DIR)/$(DEFAULT_RUNNER)/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS=$(CUSTOM_CPU_FLAGS)
+ $(RUNNERS_BUILD_DIR)/$(DEFAULT_RUNNER)/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
+ @-mkdir -p $(dir $@)
+- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath $(if $(CUSTOM_CPU_FLAGS),-tags $(subst $(space),$(comma),$(CUSTOM_CPU_FLAGS))) -o $@ ./runner
++ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath $(if $(CUSTOM_CPU_FLAGS),-tags $(subst $(space),$(comma),$(CUSTOM_CPU_FLAGS))) -o $@ ./runner
+
+ $(RUNNERS_BUILD_DIR)/cpu_avx/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS="avx"
+ $(RUNNERS_BUILD_DIR)/cpu_avx/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
+ @-mkdir -p $(dir $@)
+- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
++ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
+
+ $(RUNNERS_BUILD_DIR)/cpu_avx2/ollama_llama_server$(EXE_EXT): TARGET_CPU_FLAGS="avx avx2"
+ $(RUNNERS_BUILD_DIR)/cpu_avx2/ollama_llama_server$(EXE_EXT): *.go ./runner/*.go $(COMMON_SRCS) $(COMMON_HDRS)
+ @-mkdir -p $(dir $@)
+- GOARCH=$(ARCH) go build -buildmode=pie $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
++ GOARCH=$(ARCH) go build -buildmode=pie -v $(CPU_GOFLAGS) -trimpath -tags $(subst $(space),$(comma),$(TARGET_CPU_FLAGS)) -o $@ ./runner
+
+ $(RUNNERS_DIST_DIR)/%: $(RUNNERS_BUILD_DIR)/%
+ @-mkdir -p $(dir $@)
+@@ -44,7 +44,7 @@ $(RUNNERS_PAYLOAD_DIR)/%/ollama_llama_server$(EXE_EXT).gz: $(RUNNERS_BUILD_DIR)/
+ @-mkdir -p $(dir $@)
+ ${GZIP} --best -c $< > $@
+
+-clean:
++clean:
+ rm -f $(BUILD_RUNNERS) $(DIST_RUNNERS) $(PAYLOAD_RUNNERS)
+
+ .PHONY: clean all
diff --git a/_service b/_service
index de6d9b9..70e01c7 100644
--- a/_service
+++ b/_service
@@ -3,7 +3,7 @@
https://github.com/ollama/ollama.git
git
- v0.4.0
+ v0.4.2
@PARENT_TAG@
v(.*)
enable
diff --git a/_servicedata b/_servicedata
index 71a9f1f..ff6fe6f 100644
--- a/_servicedata
+++ b/_servicedata
@@ -1,4 +1,4 @@
https://github.com/ollama/ollama.git
- 9d71bcc3e2a97c8e62d758450f43aa212346410e
\ No newline at end of file
+ d875e99e4639dc07af90b2e3ea0d175e2e692efb
\ No newline at end of file
diff --git a/ollama-0.4.0.obscpio b/ollama-0.4.0.obscpio
deleted file mode 100644
index ce96ac6..0000000
--- a/ollama-0.4.0.obscpio
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:dd8818aea2a9674d68930e3c8d82899c26860ddc0bb6215720443f7d2bae3933
-size 16452109
diff --git a/ollama-0.4.2.obscpio b/ollama-0.4.2.obscpio
new file mode 100644
index 0000000..42ea222
--- /dev/null
+++ b/ollama-0.4.2.obscpio
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a64b94eab71d52a3b22424b1cb7ec98198e38f021c0bfa55c24d2ad7514be49f
+size 17815053
diff --git a/ollama.changes b/ollama.changes
index 9cc56a6..c7290b9 100644
--- a/ollama.changes
+++ b/ollama.changes
@@ -1,3 +1,49 @@
+-------------------------------------------------------------------
+Sat Nov 16 16:07:38 UTC 2024 - Eyad Issa
+
+- Update to version 0.4.2:
+ * runner.go: Propagate panics back to the user.
+ * runner.go: Increase survivability of main processing loop
+ * build: fix arm container image (#7674)
+ * add line numbers for parser errors (#7326)
+ * chore(deps): bump golang.org/x dependencies (#7655)
+ * runner.go: Don't trim whitespace from inputs
+ * runner.go: Enforce NUM_PARALLEL directly in the runner
+ * cmd: preserve exact bytes when displaying template/system layers (#7586)
+ * fix(mllama): sync backend between batches
+ * runner.go: Fix off-by-one for num predicted
+ * CI: give windows lint more time (#7635)
+ * Jetpack support for Go server (#7217)
+ * doc: capture numeric group requirement (#6941)
+ * docs: Capture docker cgroup workaround (#7519)
+ * runner.go: Make KV entry accounting more robust
+ * readme: add aichat terminal app to community integrations (#7418)
+ * api: fix typos in Go Doc comments (#7620)
+ * readme: add GoLamify to community integrations (#7521)
+ * readme: add browser extension that enables using Ollama for interacting with web pages (#5827)
+ * docs: add mentions of Llama 3.2 (#7517)
+ * api: fix typo in python ClientFromEnvironment docs (#7604)
+ * readme: add llama3.2-vision to model list (#7580)
+
+-------------------------------------------------------------------
+Mon Nov 11 13:57:46 UTC 2024 - Eyad Issa
+
+- Add patch 01-build-verbose.patch to add the -v option
+ to go build
+
+- Update to version 0.4.1:
+ * runner.go: Check for zero length images
+ * docs: update langchainpy.md with proper model name (#7527)
+ * Set macos min version for all architectures (#7579)
+ * win: remove preview title from installer (#7529)
+ * Workaround buggy P2P ROCm copy on windows (#7466)
+ * Debug logging for nvcuda init (#7532)
+ * Align rocm compiler flags (#7467)
+ * Be explicit for gpu library link dir (#7560)
+ * docs: OLLAMA_NEW_RUNNERS no longer exists
+ * runner.go: Remove unused arguments
+ * sched: Lift parallel restriction for multimodal models except mllama
+
-------------------------------------------------------------------
Thu Nov 07 12:06:09 UTC 2024 - adrian@suse.de
diff --git a/ollama.obsinfo b/ollama.obsinfo
index c8126d6..1b52041 100644
--- a/ollama.obsinfo
+++ b/ollama.obsinfo
@@ -1,4 +1,4 @@
name: ollama
-version: 0.4.0
-mtime: 1730848045
-commit: 9d71bcc3e2a97c8e62d758450f43aa212346410e
+version: 0.4.2
+mtime: 1731700345
+commit: d875e99e4639dc07af90b2e3ea0d175e2e692efb
diff --git a/ollama.spec b/ollama.spec
index ea83fca..66b3f71 100644
--- a/ollama.spec
+++ b/ollama.spec
@@ -17,7 +17,7 @@
Name: ollama
-Version: 0.4.0
+Version: 0.4.2
Release: 0
Summary: Tool for running AI models on-premise
License: MIT
@@ -26,6 +26,7 @@ Source: %{name}-%{version}.tar
Source1: vendor.tar.zstd
Source2: ollama.service
Source3: %{name}-user.conf
+Patch1: 01-build-verbose.patch
BuildRequires: cmake >= 3.24
BuildRequires: git
BuildRequires: sysuser-tools
@@ -86,7 +87,7 @@ export CC=gcc-12
# pie doesn't work with gcc12 on leap
export GOFLAGS="-mod=vendor"
%endif
-go test ./...
+go test -v ./...
%pre -f %{name}.pre
%service_add_pre %{name}.service
diff --git a/vendor.tar.zstd b/vendor.tar.zstd
index d024d9a..6984029 100644
--- a/vendor.tar.zstd
+++ b/vendor.tar.zstd
@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
-oid sha256:a282354c7f6d327ef4ae6cb9c275e7e6596dd9989f5062302af1e42443376d45
-size 5367853
+oid sha256:dbaffc2fb10534d64b6c519d6461875fff9320d79cf27663eb05030a69b8e9cf
+size 5366874