blob: edaf27047e84ae1297a4b5991ab4802335011998 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
|
{
lib,
buildPythonPackage,
fetchFromGitHub,
python,
isPy3k,
}:
buildPythonPackage rec {
pname = "jieba";
version = "0.42.1";
format = "setuptools";
# no tests in PyPI tarball
src = fetchFromGitHub {
owner = "fxsjy";
repo = "jieba";
rev = "v${version}";
sha256 = "028vmd6sj6wn9l1ilw7qfmlpyiysnlzdgdlhwxs6j4fvq0gyrwxk";
};
# UnicodeEncodeError
doCheck = isPy3k;
# Citing https://github.com/fxsjy/jieba/issues/384: "testcases is in a mess"
# So just picking random ones that currently work
checkPhase = ''
${python.interpreter} test/test.py
${python.interpreter} test/test_tokenize.py
'';
meta = {
description = "Chinese Words Segementation Utilities";
homepage = "https://github.com/fxsjy/jieba";
license = lib.licenses.mit;
teams = [ lib.teams.tts ];
};
}
|