From d8a63bbcee616f224c10462dbfb117ec009c50d8 Mon Sep 17 00:00:00 2001 From: John Fremlin Date: Wed, 3 Apr 2024 23:46:25 -0400 Subject: gnu: llama-cpp: Use OpenBLAS. For faster prompt processing, OpenBLAS is recommended by https://github.com/ggerganov/llama.cpp * gnu/packages/machine-learning.scm (llama-cpp)[arguments]: Add #:configure-flags. [native-inputs]: Add pkg-config. [propagated-inputs]: Add openblas. Change-Id: Iaf6f22252da13e2d6f503992878b35b0da7de0aa Signed-off-by: Christopher Baines --- gnu/packages/machine-learning.scm | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'gnu/packages/machine-learning.scm') diff --git a/gnu/packages/machine-learning.scm b/gnu/packages/machine-learning.scm index e38d93ea05..e61299a5db 100644 --- a/gnu/packages/machine-learning.scm +++ b/gnu/packages/machine-learning.scm @@ -541,6 +541,8 @@ Performance is achieved by using the LLVM JIT compiler.") (build-system cmake-build-system) (arguments (list + #:configure-flags + '(list "-DLLAMA_BLAS=ON" "-DLLAMA_BLAS_VENDOR=OpenBLAS") #:modules '((ice-9 textual-ports) (guix build utils) ((guix build python-build-system) #:prefix python:) @@ -575,8 +577,9 @@ Performance is achieved by using the LLVM JIT compiler.") (lambda _ (copy-file "bin/main" (string-append #$output "/bin/llama"))))))) (inputs (list python)) + (native-inputs (list pkg-config)) (propagated-inputs - (list python-numpy python-pytorch python-sentencepiece)) + (list python-numpy python-pytorch python-sentencepiece openblas)) (home-page "https://github.com/ggerganov/llama.cpp") (synopsis "Port of Facebook's LLaMA model in C/C++") (description "This package provides a port to Facebook's LLaMA collection -- cgit v1.2.3