diff options
| author | Kirill Radzikhovskyy <kirillrdy@gmail.com> | 2024-10-19 10:10:06 +1100 |
|---|---|---|
| committer | Kirill Radzikhovskyy <kirillrdy@gmail.com> | 2024-11-03 08:05:38 +1100 |
| commit | 17c58cde1623229a7f34973ca49fb9204badddda (patch) | |
| tree | ae4e752133eaffaeefa49a492f48b5f561f0dbe1 /pkgs/development/python-modules/llama-cpp-python/default.nix | |
| parent | e4ee7324f9c1977c16a9f2426ac4aee102c5a36b (diff) | |
python312Packages.llama-cpp-python: init at 0.3.1
Diffstat (limited to 'pkgs/development/python-modules/llama-cpp-python/default.nix')
| -rw-r--r-- | pkgs/development/python-modules/llama-cpp-python/default.nix | 96 |
1 files changed, 96 insertions, 0 deletions
diff --git a/pkgs/development/python-modules/llama-cpp-python/default.nix b/pkgs/development/python-modules/llama-cpp-python/default.nix new file mode 100644 index 000000000000..9a2f9458a875 --- /dev/null +++ b/pkgs/development/python-modules/llama-cpp-python/default.nix @@ -0,0 +1,96 @@ +{ + lib, + buildPythonPackage, + cmake, + fetchFromGitHub, + gitUpdater, + ninja, + pathspec, + pyproject-metadata, + pytestCheckHook, + pythonOlder, + scikit-build-core, + + config, + cudaSupport ? config.cudaSupport, + cudaPackages ? { }, + + diskcache, + jinja2, + numpy, + typing-extensions, + scipy, + huggingface-hub, +}: + +buildPythonPackage rec { + pname = "llama-cpp-python"; + version = "0.3.1"; + pyproject = true; + + disabled = pythonOlder "3.7"; + + src = fetchFromGitHub { + owner = "abetlen"; + repo = "llama-cpp-python"; + rev = "refs/tags/v${version}"; + hash = "sha256-eO1zvNJZBE5BCnbgbh00tFIRWBCWor1lIsrLXs/HFds="; + fetchSubmodules = true; + }; + + dontUseCmakeConfigure = true; + SKBUILD_CMAKE_ARGS = lib.strings.concatStringsSep ";" ( + lib.optionals cudaSupport [ + "-DGGML_CUDA=on" + "-DCUDAToolkit_ROOT=${lib.getDev cudaPackages.cuda_nvcc}" + "-DCMAKE_CUDA_COMPILER=${lib.getExe cudaPackages.cuda_nvcc}" + ] + ); + + nativeBuildInputs = [ + cmake + ninja + pathspec + pyproject-metadata + scikit-build-core + ]; + + buildInputs = lib.optionals cudaSupport ( + with cudaPackages; + [ + cuda_cudart # cuda_runtime.h + cuda_cccl # <thrust/*> + libcublas # cublas_v2.h + ] + ); + + propagatedBuildInputs = [ + diskcache + jinja2 + numpy + typing-extensions + ]; + + nativeCheckInputs = [ + pytestCheckHook + scipy + huggingface-hub + ]; + + disabledTests = [ + # tries to download model from huggingface-hub + "test_real_model" + "test_real_llama" + ]; + + pythonImportsCheck = [ "llama_cpp" ]; + + passthru.updateScript = gitUpdater { rev-prefix = "v"; }; + + meta = { + description = "Python bindings for llama.cpp"; + homepage = "https://github.com/abetlen/llama-cpp-python"; + license = lib.licenses.mit; + maintainers = with lib.maintainers; [ kirillrdy ]; + }; +} |
