summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMaxim Cournoyer <maxim.cournoyer@gmail.com>2022-05-18 10:25:14 -0400
committerMaxim Cournoyer <maxim.cournoyer@gmail.com>2022-05-18 10:58:19 -0400
commit1c4241d91515b75d4dab160be6b2ad840ceb6335 (patch)
tree86c72b21b21521d250c59899bf4e118ed25e4275
parent894b89c17bef61eec61e96f9c5acdd749aec119b (diff)
downloadguix-patches-1c4241d91515b75d4dab160be6b2ad840ceb6335.tar
guix-patches-1c4241d91515b75d4dab160be6b2ad840ceb6335.tar.gz
gnu: python-scrapy: Parallelize test suite.base-for-series-12429base-for-series-12428
* gnu/packages/python-web.scm (python-scrapy)[phases]{check}: Run in parallel. Disable problematic (when run in parallel) tests. Use complete sentences for comments. [native-inputs]: Add python-pytest-xdist. [description]: Use double space between sentences.
-rw-r--r--gnu/packages/python-web.scm13
1 files changed, 10 insertions, 3 deletions
diff --git a/gnu/packages/python-web.scm b/gnu/packages/python-web.scm
index aa510b57b7..2a688b53df 100644
--- a/gnu/packages/python-web.scm
+++ b/gnu/packages/python-web.scm
@@ -6914,11 +6914,17 @@ regular expressions.")
(lambda* (#:key tests? #:allow-other-keys)
(when tests?
(invoke "pytest"
- ;; requires network access
+ "-n" (number->string (parallel-job-count))
+ ;; These tests fail when run in parallel (see:
+ ;; https://github.com/scrapy/scrapy/issues/5502).
+ "--ignore" "tests/test_engine.py"
+ "--ignore" "tests/test_engine_stop_download_bytes.py"
+ "--ignore" "tests/test_engine_stop_download_headers.py"
+ ;; This test require network access.
"--ignore" "tests/test_command_check.py"
"-k"
(string-append
- ;; Failing for unknown reasons
+ ;; The followin tests fail for unknown reasons.
"not test_server_set_cookie_domain_suffix_public_private"
" and not test_user_set_cookie_domain_suffix_public_private"
" and not test_pformat")
@@ -6943,6 +6949,7 @@ regular expressions.")
python-zope-interface))
(native-inputs
(list python-pytest
+ python-pytest-xdist
python-pyftpdlib
python-sybil
python-testfixtures
@@ -6951,6 +6958,6 @@ regular expressions.")
(synopsis "High-level Web crawling and Web scraping framework")
(description "Scrapy is a fast high-level web crawling and web
scraping framework, used to crawl websites and extract structured data
-from their pages. It can be used for a wide range of purposes, from data
+from their pages. It can be used for a wide range of purposes, from data
mining to monitoring and automated testing.")
(license license:bsd-3)))