summaryrefslogtreecommitdiff
path: root/pkgs/development/python-modules/llama-cpp-python
diff options
context:
space:
mode:
authorMartin Weinelt <hexa@darmstadt.ccc.de>2025-01-29 16:08:41 +0100
committerMartin Weinelt <hexa@darmstadt.ccc.de>2025-01-29 16:08:53 +0100
commit909803a20e33acb659c62332198c5d9d04d6f98a (patch)
treeaeae159543352c51ad06f809b6e3ceb1b7b3ceb6 /pkgs/development/python-modules/llama-cpp-python
parentc864087bd2e466b1d3be537e0a662208f20b1886 (diff)
parent1eebcc7cdde7015109fafb284d52f78e1e273fa0 (diff)
Merge remote-tracking branch 'origin/master' into staging-next
Conflicts: - pkgs/by-name/au/automatic-timezoned/package.nix - pkgs/by-name/da/darklua/package.nix - pkgs/by-name/ki/kittycad-kcl-lsp/package.nix - pkgs/by-name/li/limbo/package.nix - pkgs/by-name/mi/minijinja/package.nix - pkgs/by-name/pa/pay-respects/package.nix - pkgs/by-name/ri/river-bsp-layout/package.nix - pkgs/by-name/sv/svgbob/package.nix - pkgs/by-name/tu/tui-journal/package.nix - pkgs/by-name/wa/waypipe/package.nix - pkgs/development/python-modules/zxcvbn-rs-py/default.nix
Diffstat (limited to 'pkgs/development/python-modules/llama-cpp-python')
-rw-r--r--pkgs/development/python-modules/llama-cpp-python/default.nix22
1 files changed, 17 insertions, 5 deletions
diff --git a/pkgs/development/python-modules/llama-cpp-python/default.nix b/pkgs/development/python-modules/llama-cpp-python/default.nix
index 6c07005d4ddf..90e2d9f7d0cb 100644
--- a/pkgs/development/python-modules/llama-cpp-python/default.nix
+++ b/pkgs/development/python-modules/llama-cpp-python/default.nix
@@ -64,13 +64,26 @@ buildPythonPackage rec {
dontUseCmakeConfigure = true;
SKBUILD_CMAKE_ARGS = lib.strings.concatStringsSep ";" (
- lib.optionals cudaSupport [
+ # Set GGML_NATIVE=off. Otherwise, cmake attempts to build with
+ # -march=native* which is either a no-op (if cc-wrapper is able to ignore
+ # it), or an attempt to build a non-reproducible binary.
+ #
+ # This issue was spotted when cmake rules appended feature modifiers to
+ # -mcpu, breaking linux build as follows:
+ #
+ # cc1: error: unknown value ‘native+nodotprod+noi8mm+nosve’ for ‘-mcpu’
+ [ "-DGGML_NATIVE=off" ]
+ ++ lib.optionals cudaSupport [
"-DGGML_CUDA=on"
"-DCUDAToolkit_ROOT=${lib.getDev cudaPackages.cuda_nvcc}"
"-DCMAKE_CUDA_COMPILER=${lib.getExe cudaPackages.cuda_nvcc}"
]
);
+ preBuild = ''
+ export CMAKE_BUILD_PARALLEL_LEVEL="$NIX_BUILD_CORES"
+ '';
+
nativeBuildInputs = [
cmake
ninja
@@ -128,10 +141,9 @@ buildPythonPackage rec {
homepage = "https://github.com/abetlen/llama-cpp-python";
changelog = "https://github.com/abetlen/llama-cpp-python/blob/v${version}/CHANGELOG.md";
license = lib.licenses.mit;
- maintainers = with lib.maintainers; [ kirillrdy ];
- badPlatforms = [
- # cc1: error: unknown value ‘native+nodotprod+noi8mm+nosve’ for ‘-mcpu’
- "aarch64-linux"
+ maintainers = with lib.maintainers; [
+ booxter
+ kirillrdy
];
};
}