14
0
forked from pool/python-Scrapy

- Update to 2.0.1:

* Python 2 support has been removed
  * Partial coroutine syntax support and experimental asyncio support
  * New Response.follow_all method
  * FTP support for media pipelines
  * New Response.certificate attribute
  * IPv6 support through DNS_RESOLVER
  * Response.follow_all now supports an empty URL iterable as input
  * Removed top-level reactor imports to prevent errors about the wrong
    Twisted reactor being installed when setting a different Twisted
    reactor using TWISTED_REACTOR
- Add zope-exception-test_crawler.patch, rewriting one testcase to pass
  with our version of Zope.
- Update BuildRequires based on test requirements.

OBS-URL: https://build.opensuse.org/package/show/devel:languages:python/python-Scrapy?expand=0&rev=12
This commit is contained in:
2020-04-02 03:41:29 +00:00
committed by Git OBS Bridge
parent 535b71edfe
commit 5577430fb1
5 changed files with 57 additions and 4 deletions

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fe06576f9a4971de9dc0175c60fd92561e8275f2bad585c1cb5d65c5181b2db0
size 977658

3
Scrapy-2.0.1.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:85581a01f4160a103ca9906ffa4e44474f4ecd1685f0934728892c58ebf111f6
size 983545

View File

@@ -1,3 +1,21 @@
-------------------------------------------------------------------
Thu Apr 2 03:38:20 UTC 2020 - Steve Kowalik <steven.kowalik@suse.com>
- Update to 2.0.1:
* Python 2 support has been removed
* Partial coroutine syntax support and experimental asyncio support
* New Response.follow_all method
* FTP support for media pipelines
* New Response.certificate attribute
* IPv6 support through DNS_RESOLVER
* Response.follow_all now supports an empty URL iterable as input
* Removed top-level reactor imports to prevent errors about the wrong
Twisted reactor being installed when setting a different Twisted
reactor using TWISTED_REACTOR
- Add zope-exception-test_crawler.patch, rewriting one testcase to pass
with our version of Zope.
- Update BuildRequires based on test requirements.
-------------------------------------------------------------------
Thu Jan 16 15:00:50 UTC 2020 - Marketa Calabkova <mcalabkova@suse.com>

View File

@@ -17,29 +17,35 @@
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
%define skip_python2 1
Name: python-Scrapy
Version: 1.8.0
Version: 2.0.1
Release: 0
Summary: A high-level Python Screen Scraping framework
License: BSD-3-Clause
Group: Development/Languages/Python
URL: https://scrapy.org
Source: https://files.pythonhosted.org/packages/source/S/Scrapy/Scrapy-%{version}.tar.gz
Patch0: zope-exception-test_crawler.patch
BuildRequires: %{python_module Pillow}
BuildRequires: %{python_module Protego >= 0.1.15}
BuildRequires: %{python_module PyDispatcher >= 2.0.5}
BuildRequires: %{python_module Twisted >= 17.9.0}
BuildRequires: %{python_module cryptography >= 2.0}
BuildRequires: %{python_module cssselect >= 0.9.1}
BuildRequires: %{python_module dbm}
BuildRequires: %{python_module jmespath}
BuildRequires: %{python_module lxml >= 3.5.0}
BuildRequires: %{python_module mock}
BuildRequires: %{python_module parsel >= 1.5.0}
BuildRequires: %{python_module pyOpenSSL >= 16.2.0}
BuildRequires: %{python_module pytest-twisted}
BuildRequires: %{python_module pytest-xdist}
BuildRequires: %{python_module pytest}
BuildRequires: %{python_module queuelib >= 1.4.2}
BuildRequires: %{python_module service_identity >= 16.0.0}
BuildRequires: %{python_module setuptools}
BuildRequires: %{python_module sybil}
BuildRequires: %{python_module testfixtures}
BuildRequires: %{python_module w3lib >= 1.17.2}
BuildRequires: fdupes
@@ -76,6 +82,7 @@ Provides documentation for %{name}.
%prep
%setup -q -n Scrapy-%{version}
sed -i -e 's:= python:= python3:g' docs/Makefile
%autopatch -p1
%build
%python_build

View File

@@ -0,0 +1,28 @@
Index: Scrapy-2.0.1/tests/test_crawler.py
===================================================================
--- Scrapy-2.0.1.orig/tests/test_crawler.py
+++ Scrapy-2.0.1/tests/test_crawler.py
@@ -20,6 +20,8 @@ from scrapy.extensions.throttle import A
from scrapy.extensions import telnet
from scrapy.utils.test import get_testenv
+from zope.interface.exceptions import MultipleInvalid
+
class BaseCrawlerTest(unittest.TestCase):
@@ -157,11 +159,9 @@ class CrawlerRunnerTestCase(BaseCrawlerT
settings = Settings({
'SPIDER_LOADER_CLASS': 'tests.test_crawler.SpiderLoaderWithWrongInterface'
})
- with warnings.catch_warnings(record=True) as w:
- self.assertRaises(AttributeError, CrawlerRunner, settings)
- self.assertEqual(len(w), 1)
- self.assertIn("SPIDER_LOADER_CLASS", str(w[0].message))
- self.assertIn("scrapy.interfaces.ISpiderLoader", str(w[0].message))
+ msg = "SpiderLoaderWithWrongInterface.*scrapy.interfaces.ISpiderLoader"
+ with self.assertRaisesRegex(MultipleInvalid, msg):
+ CrawlerRunner(settings)
def test_crawler_runner_accepts_dict(self):
runner = CrawlerRunner({'foo': 'bar'})