summaryrefslogtreecommitdiff
path: root/pkgs/development/python-modules/tensorflow-datasets/default.nix
blob: d946d3d11ec711257c57eb02f20600891bd040e1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
{
  lib,
  buildPythonPackage,
  fetchFromGitHub,
  fetchpatch2,

  # build system
  setuptools,

  # dependencies
  absl-py,
  array-record,
  dm-tree,
  etils,
  immutabledict,
  importlib-resources,
  numpy,
  promise,
  protobuf,
  psutil,
  pyarrow,
  requests,
  simple-parsing,
  tensorflow-metadata,
  termcolor,
  toml,
  tqdm,
  wrapt,

  # tests
  apache-beam,
  beautifulsoup4,
  click,
  cloudpickle,
  datasets,
  dill,
  ffmpeg,
  imagemagick,
  jax,
  jaxlib,
  jinja2,
  langdetect,
  lxml,
  matplotlib,
  mlcroissant,
  mwparserfromhell,
  mwxml,
  networkx,
  nltk,
  opencv4,
  pandas,
  pillow,
  pycocotools,
  pydub,
  pytest-xdist,
  pytestCheckHook,
  scikit-image,
  scipy,
  sortedcontainers,
  tensorflow,
  tifffile,
  zarr,
}:

buildPythonPackage (finalAttrs: {
  pname = "tensorflow-datasets";
  version = "4.9.9";
  pyproject = true;

  src = fetchFromGitHub {
    owner = "tensorflow";
    repo = "datasets";
    tag = "v${finalAttrs.version}";
    hash = "sha256-ZXaPYmj8aozfe6ygzKybId8RZ1TqPuIOSpd8XxnRHus=";
  };

  patches = [
    # TypeError: Cannot handle this data type: (1, 1, 4), <u2
    # Issue: https://github.com/tensorflow/datasets/issues/11148
    # PR: https://github.com/tensorflow/datasets/pull/11149
    (fetchpatch2 {
      name = "fix-pillow-12-compat";
      url = "https://github.com/tensorflow/datasets/pull/11149/commits/21062d65b33978f2263443280c03413add5c0224.patch";
      hash = "sha256-GWb+1E5lQNhFVp57sqjp+WqzZSva1AGpXe9fbvXXeIA=";
    })
  ];

  build-system = [ setuptools ];

  dependencies = [
    absl-py
    array-record
    dm-tree
    etils
    immutabledict
    importlib-resources
    numpy
    promise
    protobuf
    psutil
    pyarrow
    requests
    simple-parsing
    tensorflow-metadata
    termcolor
    toml
    tqdm
    wrapt
  ]
  ++ etils.optional-dependencies.epath
  ++ etils.optional-dependencies.etree;

  pythonImportsCheck = [ "tensorflow_datasets" ];

  nativeCheckInputs = [
    apache-beam
    beautifulsoup4
    click
    cloudpickle
    datasets
    dill
    ffmpeg
    imagemagick
    jax
    jaxlib
    jinja2
    langdetect
    lxml
    matplotlib
    mlcroissant
    mwparserfromhell
    mwxml
    networkx
    nltk
    opencv4
    pandas
    pillow
    pycocotools
    pydub
    pytest-xdist
    pytestCheckHook
    scikit-image
    scipy
    sortedcontainers
    tensorflow
    tifffile
    zarr
  ];

  disabledTests = [
    # Since updating apache-beam to 2.65.0
    # RuntimeError: Unable to pickle fn CallableWrapperDoFn...: maximum recursion depth exceeded
    # https://github.com/tensorflow/datasets/issues/11055
    "test_download_and_prepare_as_dataset"
  ];

  disabledTestPaths = [
    # Sandbox violations: network access, filesystem write attempts outside of build dir, ...
    "tensorflow_datasets/core/dataset_builder_test.py"
    "tensorflow_datasets/core/dataset_info_test.py"
    "tensorflow_datasets/core/features/features_test.py"
    "tensorflow_datasets/core/github_api/github_path_test.py"
    "tensorflow_datasets/core/registered_test.py"
    "tensorflow_datasets/core/utils/gcs_utils_test.py"
    "tensorflow_datasets/import_without_tf_test.py"
    "tensorflow_datasets/proto/build_tf_proto_test.py"
    "tensorflow_datasets/scripts/cli/build_test.py"
    "tensorflow_datasets/datasets/imagenet2012_corrupted/imagenet2012_corrupted_dataset_builder_test.py"

    # Requires `pretty_midi` which is not packaged in `nixpkgs`.
    "tensorflow_datasets/audio/groove.py"
    "tensorflow_datasets/datasets/groove/groove_dataset_builder_test.py"

    # Requires `crepe` which is not packaged in `nixpkgs`.
    "tensorflow_datasets/audio/nsynth.py"
    "tensorflow_datasets/datasets/nsynth/nsynth_dataset_builder_test.py"

    # Requires `conllu` which is not packaged in `nixpkgs`.
    "tensorflow_datasets/core/dataset_builders/conll/conllu_dataset_builder_test.py"
    "tensorflow_datasets/datasets/universal_dependencies/universal_dependencies_dataset_builder_test.py"
    "tensorflow_datasets/datasets/xtreme_pos/xtreme_pos_dataset_builder_test.py"

    # Requires `gcld3` and `pretty_midi` which are not packaged in `nixpkgs`.
    "tensorflow_datasets/core/lazy_imports_lib_test.py"

    # AttributeError: 'NoneType' object has no attribute 'Table'
    "tensorflow_datasets/core/dataset_builder_beam_test.py"
    "tensorflow_datasets/core/dataset_builders/adhoc_builder_test.py"
    "tensorflow_datasets/core/split_builder_test.py"
    "tensorflow_datasets/core/writer_test.py"

    # Requires `tensorflow_io` which is not packaged in `nixpkgs`.
    "tensorflow_datasets/core/features/audio_feature_test.py"
    "tensorflow_datasets/image/lsun_test.py"

    # Fails with `TypeError: Constant constructor takes either 0 or 2 positional arguments`
    # deep in TF AutoGraph. Doesn't reproduce in Docker with Ubuntu 22.04 => might be related
    # to the differences in some of the dependencies?
    "tensorflow_datasets/rl_unplugged/rlu_atari/rlu_atari_test.py"

    # Fails with `ValueError: setting an array element with a sequence`
    "tensorflow_datasets/core/dataset_utils_test.py"
    "tensorflow_datasets/core/features/sequence_feature_test.py"

    # Requires `tensorflow_docs` which is not packaged in `nixpkgs` and the test is for documentation anyway.
    "tensorflow_datasets/scripts/documentation/build_api_docs_test.py"

    # Not a test, should not be executed.
    "tensorflow_datasets/testing/test_utils.py"

    # Require `gcld3` and `nltk.punkt` which are not packaged in `nixpkgs`.
    "tensorflow_datasets/text/c4_test.py"
    "tensorflow_datasets/text/c4_utils_test.py"

    # AttributeError: 'NoneType' object has no attribute 'Table'
    "tensorflow_datasets/core/file_adapters_test.py::test_read_write"
    "tensorflow_datasets/text/c4_wsrs/c4_wsrs_test.py::C4WSRSTest"
  ];

  meta = {
    description = "Library of datasets ready to use with TensorFlow";
    homepage = "https://www.tensorflow.org/datasets/overview";
    changelog = "https://github.com/tensorflow/datasets/releases/tag/${finalAttrs.src.tag}";
    license = lib.licenses.asl20;
    maintainers = with lib.maintainers; [ ndl ];
  };
})