Skip site navigation (1)Skip section navigation (2)
Date:      Thu, 2 Jan 2025 00:39:32 GMT
From:      Yuri Victorovich <yuri@FreeBSD.org>
To:        ports-committers@FreeBSD.org, dev-commits-ports-all@FreeBSD.org, dev-commits-ports-main@FreeBSD.org
Subject:   git: f72088b8f67d - main - misc/llama-cpp: update 4381 =?utf-8?Q?=E2=86=92?= 4404
Message-ID:  <202501020039.5020dW5Y057803@gitrepo.freebsd.org>

next in thread | raw e-mail | index | archive | help
The branch main has been updated by yuri:

URL: https://cgit.FreeBSD.org/ports/commit/?id=f72088b8f67d217861539f1e560a3e76da83279b

commit f72088b8f67d217861539f1e560a3e76da83279b
Author:     Yuri Victorovich <yuri@FreeBSD.org>
AuthorDate: 2025-01-02 00:38:51 +0000
Commit:     Yuri Victorovich <yuri@FreeBSD.org>
CommitDate: 2025-01-02 00:39:10 +0000

    misc/llama-cpp: update 4381 → 4404
---
 misc/llama-cpp/Makefile | 13 +++++++++----
 misc/llama-cpp/distinfo |  6 +++---
 2 files changed, 12 insertions(+), 7 deletions(-)

diff --git a/misc/llama-cpp/Makefile b/misc/llama-cpp/Makefile
index 6736e78555b2..59cdf9831dfc 100644
--- a/misc/llama-cpp/Makefile
+++ b/misc/llama-cpp/Makefile
@@ -1,6 +1,6 @@
 PORTNAME=	llama-cpp
 DISTVERSIONPREFIX=	b
-DISTVERSION=	4381
+DISTVERSION=	4404
 CATEGORIES=	misc # machine-learning
 
 MAINTAINER=	yuri@FreeBSD.org
@@ -27,10 +27,15 @@ CMAKE_ON=	BUILD_SHARED_LIBS
 CMAKE_OFF=	LLAMA_BUILD_TESTS
 CMAKE_TESTING_ON=	LLAMA_BUILD_TESTS
 
-OPTIONS_DEFINE=		EXAMPLES VULKAN
-OPTIONS_DEFAULT=	VULKAN
+OPTIONS_DEFINE=		CURL EXAMPLES VULKAN
+OPTIONS_DEFAULT=	CURL VULKAN
 OPTIONS_SUB=		yes
 
+CURL_DESCR=		Use libcurl to download model from an URL
+CURL_CMAKE_BOOL=	LLAMA_CURL
+CURL_USES=		localbase
+CURL_LIB_DEPENDS=	libcurl.so:ftp/curl
+
 EXAMPLES_CMAKE_BOOL=	LLAMA_BUILD_EXAMPLES
 
 VULKAN_DESC=		Vulkan GPU offload support
@@ -45,6 +50,6 @@ do-test-ci: # build of tests fails, see https://github.com/ggerganov/llama.cpp/i
 	@cd ${WRKSRC} && \
 		${SETENV} ${MAKE_ENV} bash ci/run.sh ./tmp/results ./tmp/mnt
 
-# tests as of 4381: 97% tests passed, 1 tests failed out of 31
+# tests as of 4404: 97% tests passed, 1 tests failed out of 31, see https://github.com/ggerganov/llama.cpp/issues/11036
 
 .include <bsd.port.mk>
diff --git a/misc/llama-cpp/distinfo b/misc/llama-cpp/distinfo
index d9f5b907abca..d85a10abe4e6 100644
--- a/misc/llama-cpp/distinfo
+++ b/misc/llama-cpp/distinfo
@@ -1,5 +1,5 @@
-TIMESTAMP = 1734927684
-SHA256 (ggerganov-llama.cpp-b4381_GH0.tar.gz) = 9fba75d7add548cd772c8edbccd00c11143e3f2b26155e12f33348a6ad8e695d
-SIZE (ggerganov-llama.cpp-b4381_GH0.tar.gz) = 20594961
+TIMESTAMP = 1735768835
+SHA256 (ggerganov-llama.cpp-b4404_GH0.tar.gz) = ec16aa08daa58f804190e446bd010a941057391d66e5627d067f1fc3cc3f87ff
+SIZE (ggerganov-llama.cpp-b4404_GH0.tar.gz) = 20604345
 SHA256 (nomic-ai-kompute-4565194_GH0.tar.gz) = 95b52d2f0514c5201c7838348a9c3c9e60902ea3c6c9aa862193a212150b2bfc
 SIZE (nomic-ai-kompute-4565194_GH0.tar.gz) = 13540496



Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?202501020039.5020dW5Y057803>