guix-commits
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

02/09: gnu: llama-cpp: Update commit and configure flags for shared libr


From: guix-commits
Subject: 02/09: gnu: llama-cpp: Update commit and configure flags for shared library build.
Date: Wed, 10 Jul 2024 10:13:28 -0400 (EDT)

civodul pushed a commit to branch master
in repository guix.

commit 85f1bfac0b5e3bd96f9dac63ba661f7c99263f7b
Author: Andy Tai <atai@atai.org>
AuthorDate: Tue Jun 11 05:02:13 2024 -0700

    gnu: llama-cpp: Update commit and configure flags for shared library build.
    
    * gnu/packages/machine-learning.scm (lama-cpp): Update to commit a5735e with
      pkg-config support.
      [arguments](configure-flags): Add cmake configure flag
      for shared library build and adjust arguments to make openblas
      found by cmake.
      (phases) 'install-python-scripts: Remove references to deleted scripts
      and add new ones upsteeam.
    
    Change-Id: I7c4bc219a22aa9a949e811b340c7cf745b176d14
    Signed-off-by: Ludovic Courtès <ludo@gnu.org>
---
 gnu/packages/machine-learning.scm | 40 +++++++++++++++++++++++----------------
 1 file changed, 24 insertions(+), 16 deletions(-)

diff --git a/gnu/packages/machine-learning.scm 
b/gnu/packages/machine-learning.scm
index 8ca9a6376e..303e123a15 100644
--- a/gnu/packages/machine-learning.scm
+++ b/gnu/packages/machine-learning.scm
@@ -549,8 +549,8 @@ Performance is achieved by using the LLVM JIT compiler.")
   (deprecated-package "guile-aiscm-next" guile-aiscm))
 
 (define-public llama-cpp
-  (let ((commit "fed0108491a3a3cbec6c6480dc8667ffff9d7659")
-        (revision "2"))
+  (let ((commit "a5735e4426b19a3ebd0c653ad8ac01420458ee95")
+        (revision "3"))
     (package
       (name "llama-cpp")
       (version (git-version "0.0.0" revision commit))
@@ -562,19 +562,27 @@ Performance is achieved by using the LLVM JIT compiler.")
                (commit commit)))
          (file-name (git-file-name name version))
          (sha256
-          (base32 "16rm9gy0chd6k07crm8rkl2j3hg7y7h0km7k6c8q7bmm2jrd64la"))))
+          (base32 "0nx55wchwf204ld6jygfn37cjrzc4lspwn5v0qk8i6p92499bv0h"))))
       (build-system cmake-build-system)
       (arguments
        (list
-        #:configure-flags #~'("-DLLAMA_BLAS=ON"
-                              "-DLLAMA_BLAS_VENDOR=OpenBLAS"
-
-                              "-DLLAMA_NATIVE=OFF" ;no '-march=native'
-                              "-DLLAMA_FMA=OFF"    ;and no '-mfma', etc.
-                              "-DLLAMA_AVX2=OFF"
-                              "-DLLAMA_AVX512=OFF"
-                              "-DLLAMA_AVX512_VBMI=OFF"
-                              "-DLLAMA_AVX512_VNNI=OFF")
+        #:configure-flags
+        #~(list "-DBUILD_SHARED_LIBS=ON"
+                "-DLLAMA_BLAS=ON"
+                "-DLLAMA_BLAS_VENDOR=OpenBLAS"
+                (string-append "-DBLAS_INCLUDE_DIRS="
+                               #$(this-package-input "openblas")
+                               "/include")
+                (string-append "-DBLAS_LIBRARIES="
+                               #$(this-package-input "openblas")
+                               "/lib/libopenblas.so")
+
+                "-DLLAMA_NATIVE=OFF" ;no '-march=native'
+                "-DLLAMA_FMA=OFF"    ;and no '-mfma', etc.
+                "-DLLAMA_AVX2=OFF"
+                "-DLLAMA_AVX512=OFF"
+                "-DLLAMA_AVX512_VBMI=OFF"
+                "-DLLAMA_AVX512_VNNI=OFF")
 
         #:modules '((ice-9 textual-ports)
                     (guix build utils)
@@ -609,14 +617,14 @@ Performance is achieved by using the LLVM JIT compiler.")
                   (mkdir-p bin)
                   (make-script "convert-hf-to-gguf")
                   (make-script "convert-llama-ggml-to-gguf")
-                  (make-script "convert-lora-to-ggml")
-                  (make-script "convert-persimmon-to-gguf")
-                  (make-script "convert"))))
+                  (make-script "convert-hf-to-gguf-update.py"))))
             (add-after 'install-python-scripts 'wrap-python-scripts
               (assoc-ref python:%standard-phases 'wrap))
             (add-after 'install 'install-main
               (lambda _
-                (copy-file "bin/main" (string-append #$output 
"/bin/llama")))))))
+                (with-directory-excursion (string-append #$output "/bin")
+                    (symlink "main" "llama"))))
+            )))
       (inputs (list python))
       (native-inputs (list pkg-config))
       (propagated-inputs



reply via email to

[Prev in Thread] Current Thread [Next in Thread]