blob: bfe9f27c387ba0a92d4f7f209b1995f72c0234b7 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
|
{
lib,
buildPythonPackage,
fetchFromGitHub,
# build-system
setuptools,
# dependencies
onnx,
optimum,
transformers,
# optional-dependencies
onnxruntime,
# onnxruntime-gpu, unpackaged
ruff,
}:
buildPythonPackage rec {
pname = "optimum-onnx";
version = "0.0.3";
pyproject = true;
src = fetchFromGitHub {
owner = "huggingface";
repo = "optimum-onnx";
tag = "v${version}";
hash = "sha256-IFXtKkJwmrcdjfXE2YccbRylU723fTG70Z6c9fIL5mE=";
};
build-system = [
setuptools
];
pythonRelaxDeps = [
"transformers"
];
dependencies = [
onnx
optimum
transformers
];
optional-dependencies = {
onnxruntime = [
onnxruntime
];
# onnxruntime-gpu = [ onnxruntime-gpu ];
quality = [
ruff
];
};
pythonImportsCheck = [ "optimum.onnxruntime" ];
# Almost all tests need internet access
doCheck = false;
meta = {
description = "Export your model to ONNX and run inference with ONNX Runtime";
homepage = "https://github.com/huggingface/optimum-onnx";
changelog = "https://github.com/huggingface/optimum-onnx/releases/tag/${src.tag}";
license = lib.licenses.asl20;
maintainers = with lib.maintainers; [ GaetanLepage ];
};
}
|