blob: 5c8ea1fb30fbef56991a72355a94be484b72d764 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
|
{
lib,
beautifulsoup4,
buildPythonPackage,
fetchFromGitHub,
fetchpatch,
filelock,
lxml,
requests,
setuptools-scm,
}:
buildPythonPackage rec {
pname = "snscrape";
version = "0.7.0.20230622";
pyproject = true;
src = fetchFromGitHub {
owner = "JustAnotherArchivist";
repo = "snscrape";
tag = "v${version}";
hash = "sha256-9xAUMr1SWFePEvIz6DFEexk9Txex3u8wPNfMAdxEUCA=";
};
patches = [
# Fix find_module deprecation, https://github.com/JustAnotherArchivist/snscrape/pull/1036
(fetchpatch {
name = "fix-find-module.patch";
url = "https://github.com/JustAnotherArchivist/snscrape/commit/7f4717aaaaa8d4c96fa1dbe72ded799a722732ee.patch";
hash = "sha256-6O9bZ5GlTPuR0MML/O4DDRBcDX/CJbU54ZE551cfPHo=";
})
];
build-system = [ setuptools-scm ];
dependencies = [
beautifulsoup4
filelock
lxml
requests
]
++ requests.optional-dependencies.socks;
# There are no tests; make sure the executable works.
checkPhase = ''
export PATH=$PATH:$out/bin
snscrape --help
'';
pythonImportsCheck = [ "snscrape" ];
meta = {
description = "Social networking service scraper";
homepage = "https://github.com/JustAnotherArchivist/snscrape";
license = lib.licenses.gpl3Plus;
maintainers = with lib.maintainers; [ ivan ];
mainProgram = "snscrape";
};
}
|