Skip to content

Commit 6f7adf5

Browse files
authored
Remove use of the deprecated binary parameter of the Python item exporter (#71)
* Remove use of the deprecated binary parameter of the Python item exporter * Fix a deprecation message
1 parent a55cc06 commit 6f7adf5

File tree

4 files changed

+13
-21
lines changed

4 files changed

+13
-21
lines changed

sh_scrapy/crawl.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@
1515
# and at that point main() function doesn't completed leading to lost log
1616
# messages.
1717

18+
from sh_scrapy.exceptions import SHScrapyDeprecationWarning
19+
1820
# Keep a reference to standard output/error as they are redirected
1921
# at log initialization
2022
_sys_stderr = sys.stderr # stderr and stoud are redirected to HS later
@@ -51,6 +53,11 @@ def ignore_warnings(**kwargs):
5153
5254
As warnings.catch_warnings, this context manager is not thread-safe.
5355
"""
56+
warnings.warn(
57+
"The sh_scrapy.crawl.ignore_warnings function is deprecated.",
58+
category=SHScrapyDeprecationWarning,
59+
stacklevel=2,
60+
)
5461
_filters = warnings.filters[:]
5562
warnings.filterwarnings('ignore', **kwargs)
5663
yield
@@ -174,9 +181,13 @@ def _launch():
174181
_run_usercode(job['spider'], args, _get_apisettings, loghdlr)
175182

176183

177-
# TODO: deprecate
178184
def list_spiders():
179185
""" An entrypoint for list-spiders."""
186+
warnings.warn(
187+
"The sh_scrapy.crawl.list_spiders function is deprecated.",
188+
category=SHScrapyDeprecationWarning,
189+
stacklevel=2,
190+
)
180191
try:
181192
from scrapy.exceptions import ScrapyDeprecationWarning
182193
warnings.filterwarnings(

sh_scrapy/extension.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -41,10 +41,7 @@ def __init__(self, crawler):
4141
self.crawler = crawler
4242
self.logger = logging.getLogger(__name__)
4343
self._write_item = self.pipe_writer.write_item
44-
# https://github.com/scrapy/scrapy/commit/c76190d491fca9f35b6758bdc06c34d77f5d9be9
45-
exporter_kwargs = {'binary': False}
46-
with ignore_warnings(category=ScrapyDeprecationWarning):
47-
self.exporter = PythonItemExporter(**exporter_kwargs)
44+
self.exporter = PythonItemExporter()
4845

4946
@classmethod
5047
def from_crawler(cls, crawler):

tests/test_crawl.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
from scrapy.exceptions import ScrapyDeprecationWarning
99

1010
import sh_scrapy.crawl
11-
from sh_scrapy.crawl import ignore_warnings
1211
from sh_scrapy.crawl import _fatalerror
1312
from sh_scrapy.crawl import _get_apisettings
1413
from sh_scrapy.crawl import _run
@@ -29,11 +28,6 @@ def test_init_module():
2928
assert sh_scrapy.crawl.socket.getdefaulttimeout() == 60.0
3029

3130

32-
def test_ignore_warnings():
33-
with ignore_warnings(category=ScrapyDeprecationWarning):
34-
warnings.warn("must be suppressed", ScrapyDeprecationWarning)
35-
36-
3731
@mock.patch('traceback.print_exception')
3832
def test_fatal_error(trace_print):
3933
exception = ValueError('some exception')

tests/test_extension.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -28,16 +28,6 @@ def test_hs_ext_init(hs_ext):
2828
assert isinstance(hs_ext.exporter, PythonItemExporter)
2929

3030

31-
@pytest.mark.skipif(sys.version_info > (2,), reason="requires python2")
32-
def test_hs_ext_binary_exporter_py2(hs_ext):
33-
assert not hasattr(hs_ext.exporter, 'binary')
34-
35-
36-
@pytest.mark.skipif(sys.version_info < (3,), reason="requires python3")
37-
def test_hs_ext_binary_exporter_py3(hs_ext):
38-
assert not getattr(hs_ext.exporter, 'binary')
39-
40-
4131
@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7")
4232
def test_hs_ext_dataclass_item_scraped(hs_ext):
4333
from dataclasses import dataclass

0 commit comments

Comments
 (0)