summaryrefslogtreecommitdiff
path: root/pkgs/development/python-modules/llama-cpp-python
diff options
context:
space:
mode:
authorPhilip Taron <philip.taron@gmail.com>2024-12-02 08:58:37 -0800
committerPhilip Taron <philip.taron@gmail.com>2024-12-02 08:58:37 -0800
commit898a5023f39d67208d55c0ee4d69088a60967ab9 (patch)
tree8a358bad5d8b0d00b70648a19600d32bd6ac6d44 /pkgs/development/python-modules/llama-cpp-python
parent548eb2776de07532d7ad3e92001be05be2f79190 (diff)
parent3ad0927f72a6062cd1311aaf1327132ca7c30fcb (diff)
nixos/boot: merge to maintain commit signatures
Diffstat (limited to 'pkgs/development/python-modules/llama-cpp-python')
-rw-r--r--pkgs/development/python-modules/llama-cpp-python/default.nix103
1 files changed, 103 insertions, 0 deletions
diff --git a/pkgs/development/python-modules/llama-cpp-python/default.nix b/pkgs/development/python-modules/llama-cpp-python/default.nix
new file mode 100644
index 000000000000..4c0391858e4a
--- /dev/null
+++ b/pkgs/development/python-modules/llama-cpp-python/default.nix
@@ -0,0 +1,103 @@
+{
+ lib,
+ stdenv,
+ buildPythonPackage,
+ cmake,
+ fetchFromGitHub,
+ gitUpdater,
+ ninja,
+ pathspec,
+ pyproject-metadata,
+ pytestCheckHook,
+ pythonOlder,
+ scikit-build-core,
+ llama-cpp-python,
+
+ config,
+ cudaSupport ? config.cudaSupport,
+ cudaPackages ? { },
+
+ diskcache,
+ jinja2,
+ numpy,
+ typing-extensions,
+ scipy,
+ huggingface-hub,
+}:
+let
+ version = "0.3.1";
+in
+buildPythonPackage {
+ pname = "llama-cpp-python";
+ inherit version;
+ pyproject = true;
+
+ disabled = pythonOlder "3.7";
+
+ stdenv = if cudaSupport then cudaPackages.backendStdenv else stdenv;
+
+ src = fetchFromGitHub {
+ owner = "abetlen";
+ repo = "llama-cpp-python";
+ rev = "refs/tags/v${version}";
+ hash = "sha256-eO1zvNJZBE5BCnbgbh00tFIRWBCWor1lIsrLXs/HFds=";
+ fetchSubmodules = true;
+ };
+
+ dontUseCmakeConfigure = true;
+ SKBUILD_CMAKE_ARGS = lib.strings.concatStringsSep ";" (
+ lib.optionals cudaSupport [
+ "-DGGML_CUDA=on"
+ "-DCUDAToolkit_ROOT=${lib.getDev cudaPackages.cuda_nvcc}"
+ "-DCMAKE_CUDA_COMPILER=${lib.getExe cudaPackages.cuda_nvcc}"
+ ]
+ );
+
+ nativeBuildInputs = [
+ cmake
+ ninja
+ pathspec
+ pyproject-metadata
+ scikit-build-core
+ ];
+
+ buildInputs = lib.optionals cudaSupport (
+ with cudaPackages;
+ [
+ cuda_cudart # cuda_runtime.h
+ cuda_cccl # <thrust/*>
+ libcublas # cublas_v2.h
+ ]
+ );
+
+ propagatedBuildInputs = [
+ diskcache
+ jinja2
+ numpy
+ typing-extensions
+ ];
+
+ nativeCheckInputs = [
+ pytestCheckHook
+ scipy
+ huggingface-hub
+ ];
+
+ disabledTests = [
+ # tries to download model from huggingface-hub
+ "test_real_model"
+ "test_real_llama"
+ ];
+
+ pythonImportsCheck = [ "llama_cpp" ];
+
+ passthru.updateScript = gitUpdater { rev-prefix = "v"; };
+ passthru.tests.llama-cpp-python = llama-cpp-python.override { cudaSupport = true; };
+
+ meta = {
+ description = "Python bindings for llama.cpp";
+ homepage = "https://github.com/abetlen/llama-cpp-python";
+ license = lib.licenses.mit;
+ maintainers = with lib.maintainers; [ kirillrdy ];
+ };
+}