diff --git a/pkgs/development/python-modules/scrapy-deltafetch/default.nix b/pkgs/development/python-modules/scrapy-deltafetch/default.nix new file mode 100644 index 00000000000..cf6f8a01071 --- /dev/null +++ b/pkgs/development/python-modules/scrapy-deltafetch/default.nix @@ -0,0 +1,22 @@ +{ stdenv, fetchPypi, buildPythonPackage, pytest, scrapy, bsddb3 }: + +buildPythonPackage rec { + pname = "scrapy-deltafetch"; + version = "1.2.1"; + + src = fetchPypi { + inherit pname version; + sha256 = "1m511psddvlapg492ny36l8rzy7z4i39yx6a1agxzfz6s9b83fq8"; + }; + + propagatedBuildInputs = [ bsddb3 scrapy ]; + + checkInputs = [ pytest ]; + + meta = with stdenv.lib; { + description = "Scrapy spider middleware to ignore requests to pages containing items seen in previous crawls"; + homepage = "https://github.com/scrapy-plugins/scrapy-deltafetch"; + license = licenses.bsd3; + maintainers = with maintainers; [ evanjs ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 00104604b3a..4aa900ae1a0 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -6904,6 +6904,8 @@ in { scrapy-fake-useragent = callPackage ../development/python-modules/scrapy-fake-useragent { }; + scrapy-deltafetch = callPackage ../development/python-modules/scrapy-deltafetch { }; + pandocfilters = callPackage ../development/python-modules/pandocfilters { }; pandoc-attributes = callPackage ../development/python-modules/pandoc-attributes { };