summaryrefslogtreecommitdiffstats
path: root/pkgs/development/python-modules
diff options
context:
space:
mode:
authorMario Rodas <marsam@users.noreply.github.com>2020-10-11 17:33:00 -0500
committerJon <jonringer@users.noreply.github.com>2020-10-11 20:33:56 -0700
commit8c154d05454a8c04c9b8a082d084978dd5b919ae (patch)
treef74120bc19be2cc2a9b92b57ee0f29385d939177 /pkgs/development/python-modules
parentb3dca2ae798e13d3e9a08ea5c2e398e6e7057500 (diff)
pythonPackages.scrapy: 2.3.0 -> 2.4.0
https://github.com/scrapy/scrapy/releases/tag/2.4.0
Diffstat (limited to 'pkgs/development/python-modules')
-rw-r--r--pkgs/development/python-modules/scrapy/default.nix34
1 files changed, 21 insertions, 13 deletions
diff --git a/pkgs/development/python-modules/scrapy/default.nix b/pkgs/development/python-modules/scrapy/default.nix
index db91528da632..a2db6b9cb3b2 100644
--- a/pkgs/development/python-modules/scrapy/default.nix
+++ b/pkgs/development/python-modules/scrapy/default.nix
@@ -3,7 +3,7 @@
, isPy27
, fetchPypi
, glibcLocales
-, pytest
+, pytestCheckHook
, testfixtures
, pillow
, twisted
@@ -18,7 +18,6 @@
, cssselect
, zope_interface
, protego
-, lib
, jmespath
, sybil
, pytest-twisted
@@ -28,7 +27,7 @@
}:
buildPythonPackage rec {
- version = "2.3.0";
+ version = "2.4.0";
pname = "Scrapy";
disabled = isPy27;
@@ -36,7 +35,7 @@ buildPythonPackage rec {
checkInputs = [
glibcLocales
jmespath
- pytest
+ pytestCheckHook
sybil
testfixtures
pillow
@@ -63,19 +62,28 @@ buildPythonPackage rec {
LC_ALL = "en_US.UTF-8";
- # Disable doctest plugin—enabled in the shipped pytest.ini—because it causes pytest to hang
- # Ignore proxy tests because requires mitmproxy
- # Ignore utils_display tests because it requires pygments
- # Ignore test_retry_dns_error because tries to resolve an invalid dns and weirdly fails with "Reactor was unclean"
- # Ignore xml encoding test on darwin because lxml can't find encodings https://bugs.launchpad.net/lxml/+bug/707396
- checkPhase = ''
+ # Disable doctest plugin because it causes pytest to hang
+ preCheck = ''
substituteInPlace pytest.ini --replace "--doctest-modules" ""
- pytest --ignore=tests/test_linkextractors_deprecated.py --ignore=tests/test_proxy_connect.py --ignore=tests/test_utils_display.py --deselect tests/test_crawl.py::CrawlTestCase::test_retry_dns_error ${lib.optionalString stdenv.isDarwin "--deselect tests/test_utils_iterators.py::LxmlXmliterTestCase::test_xmliter_encoding"}
'';
+ pytestFlagsArray = [
+ "--ignore=tests/test_proxy_connect.py"
+ "--ignore=tests/test_utils_display.py"
+ "--ignore=tests/test_command_check.py"
+ ];
+
+ disabledTests = [
+ "FTPFeedStorageTest"
+ "test_noconnect"
+ "test_retry_dns_error"
+ "test_custom_asyncio_loop_enabled_true"
+ "test_custom_loop_asyncio"
+ ] ++ stdenv.lib.optionals stdenv.isDarwin [ "test_xmliter_encoding" ];
+
src = fetchPypi {
inherit pname version;
- sha256 = "b4d08cdacb615563c291d053ef1ba2dc08d9d4b6d81578684eaa1cf7b832f90c";
+ sha256 = "4ea7fbc902ee0b0a79b154d07a5f4e747e2146f272a748557941946000728479";
};
postInstall = ''
@@ -84,7 +92,7 @@ buildPythonPackage rec {
install -m 644 -D extras/scrapy_zsh_completion $out/share/zsh/site-functions/_scrapy
'';
- meta = with lib; {
+ meta = with stdenv.lib; {
description = "A fast high-level web crawling and web scraping framework, used to crawl websites and extract structured data from their pages";
homepage = "https://scrapy.org/";
license = licenses.bsd3;