Skip to content

Commit

Permalink
Reducing amount of warnings during test run (scrapy#5162)
Browse files Browse the repository at this point in the history
* put flake8 options into separate file to remove pytest warnings

* remove ResourceLeaked warning in pypy

* suppress warnings from twisted

* ignore deprecation warnings here

* ignore deprecation warning in tests of deprecated methods

* ignore deprecation warnings here

* update test classes

* don`t use deprecated method call

* ignore deprecation warnings here

* proper warning class

* more selective ignoring

* Revert "don`t use deprecated method call"

This reverts commit 59216ab.
  • Loading branch information
whalebot-helmsman authored May 28, 2021
1 parent ee682af commit 23cfdb0
Show file tree
Hide file tree
Showing 10 changed files with 67 additions and 45 deletions.
19 changes: 19 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
[flake8]

max-line-length = 119
ignore = W503

exclude =
# Exclude files that are meant to provide top-level imports
# E402: Module level import not at top of file
# F401: Module imported but unused
scrapy/__init__.py E402
scrapy/core/downloader/handlers/http.py F401
scrapy/http/__init__.py F401
scrapy/linkextractors/__init__.py E402 F401
scrapy/selector/__init__.py F401
scrapy/spiders/__init__.py E402 F401

# Issues pending a review:
scrapy/utils/url.py F403 F405
tests/test_loader.py E741
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ htmlcov/
.coverage
.pytest_cache/
.coverage.*
coverage.*
test-output.*
.cache/
.mypy_cache/
/tests/keys/localhost.crt
Expand Down
9 changes: 5 additions & 4 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,11 @@ def _py_files(folder):
*_py_files("tests/CrawlerRunner"),
]

for line in open('tests/ignores.txt'):
file_path = line.strip()
if file_path and file_path[0] != '#':
collect_ignore.append(file_path)
with open('tests/ignores.txt') as reader:
for line in reader:
file_path = line.strip()
if file_path and file_path[0] != '#':
collect_ignore.append(file_path)

if not H2_ENABLED:
collect_ignore.extend(
Expand Down
19 changes: 2 additions & 17 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -20,20 +20,5 @@ addopts =
--ignore=docs/utils
markers =
only_asyncio: marks tests as only enabled when --reactor=asyncio is passed
flake8-max-line-length = 119
flake8-ignore =
W503

# Exclude files that are meant to provide top-level imports
# E402: Module level import not at top of file
# F401: Module imported but unused
scrapy/__init__.py E402
scrapy/core/downloader/handlers/http.py F401
scrapy/http/__init__.py F401
scrapy/linkextractors/__init__.py E402 F401
scrapy/selector/__init__.py F401
scrapy/spiders/__init__.py E402 F401

# Issues pending a review:
scrapy/utils/url.py F403 F405
tests/test_loader.py E741
filterwarnings=
ignore::DeprecationWarning:twisted.web.test.test_webclient
12 changes: 8 additions & 4 deletions tests/test_exporters.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,14 @@
import unittest
from io import BytesIO
from datetime import datetime
from warnings import catch_warnings, filterwarnings

import lxml.etree
from itemadapter import ItemAdapter

from scrapy.item import Item, Field
from scrapy.utils.python import to_unicode
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.exporters import (
BaseItemExporter, PprintItemExporter, PickleItemExporter, CsvItemExporter,
XmlItemExporter, JsonLinesItemExporter, JsonItemExporter,
Expand Down Expand Up @@ -172,10 +174,12 @@ def test_export_item_dict_list(self):
self.assertEqual(type(exported['age'][0]['age'][0]), dict)

def test_export_binary(self):
exporter = PythonItemExporter(binary=True)
value = self.item_class(name='John\xa3', age='22')
expected = {b'name': b'John\xc2\xa3', b'age': b'22'}
self.assertEqual(expected, exporter.export_item(value))
with catch_warnings():
filterwarnings('ignore', category=ScrapyDeprecationWarning)
exporter = PythonItemExporter(binary=True)
value = self.item_class(name='John\xa3', age='22')
expected = {b'name': b'John\xc2\xa3', b'age': b'22'}
self.assertEqual(expected, exporter.export_item(value))

def test_nonstring_types_item(self):
item = self._get_nonstring_types_item()
Expand Down
4 changes: 2 additions & 2 deletions tests/test_feedexport.py
Original file line number Diff line number Diff line change
Expand Up @@ -515,7 +515,7 @@ def from_crawler(cls, crawler, *args, feed_options=None, **kwargs):

class DummyBlockingFeedStorage(BlockingFeedStorage):

def __init__(self, uri):
def __init__(self, uri, *args, feed_options=None):
self.path = file_uri_to_path(uri)

def _store_in_thread(self, file):
Expand All @@ -541,7 +541,7 @@ class LogOnStoreFileStorage:
It can be used to make sure `store` method is invoked.
"""

def __init__(self, uri):
def __init__(self, uri, feed_options=None):
self.path = file_uri_to_path(uri)
self.logger = getLogger()

Expand Down
12 changes: 8 additions & 4 deletions tests/test_http_response.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import unittest
from unittest import mock
from warnings import catch_warnings
from warnings import catch_warnings, filterwarnings

from w3lib.encoding import resolve_encoding

Expand Down Expand Up @@ -134,7 +134,9 @@ def _assert_response_values(self, response, encoding, body):
assert isinstance(response.text, str)
self._assert_response_encoding(response, encoding)
self.assertEqual(response.body, body_bytes)
self.assertEqual(response.body_as_unicode(), body_unicode)
with catch_warnings():
filterwarnings("ignore", category=ScrapyDeprecationWarning)
self.assertEqual(response.body_as_unicode(), body_unicode)
self.assertEqual(response.text, body_unicode)

def _assert_response_encoding(self, response, encoding):
Expand Down Expand Up @@ -345,8 +347,10 @@ def test_unicode_body(self):
r1 = self.response_class('http://www.example.com', body=original_string, encoding='cp1251')

# check body_as_unicode
self.assertTrue(isinstance(r1.body_as_unicode(), str))
self.assertEqual(r1.body_as_unicode(), unicode_string)
with catch_warnings():
filterwarnings("ignore", category=ScrapyDeprecationWarning)
self.assertTrue(isinstance(r1.body_as_unicode(), str))
self.assertEqual(r1.body_as_unicode(), unicode_string)

# check response.text
self.assertTrue(isinstance(r1.text, str))
Expand Down
24 changes: 13 additions & 11 deletions tests/test_item.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import unittest
from unittest import mock
from warnings import catch_warnings
from warnings import catch_warnings, filterwarnings

from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.item import ABCMeta, _BaseItem, BaseItem, DictItem, Field, Item, ItemMeta
Expand Down Expand Up @@ -328,16 +328,18 @@ class SubclassedBaseItem(BaseItem):
class SubclassedItem(Item):
pass

self.assertTrue(isinstance(BaseItem(), BaseItem))
self.assertTrue(isinstance(SubclassedBaseItem(), BaseItem))
self.assertTrue(isinstance(Item(), BaseItem))
self.assertTrue(isinstance(SubclassedItem(), BaseItem))

# make sure internal checks using private _BaseItem class succeed
self.assertTrue(isinstance(BaseItem(), _BaseItem))
self.assertTrue(isinstance(SubclassedBaseItem(), _BaseItem))
self.assertTrue(isinstance(Item(), _BaseItem))
self.assertTrue(isinstance(SubclassedItem(), _BaseItem))
with catch_warnings():
filterwarnings("ignore", category=ScrapyDeprecationWarning)
self.assertTrue(isinstance(BaseItem(), BaseItem))
self.assertTrue(isinstance(SubclassedBaseItem(), BaseItem))
self.assertTrue(isinstance(Item(), BaseItem))
self.assertTrue(isinstance(SubclassedItem(), BaseItem))

# make sure internal checks using private _BaseItem class succeed
self.assertTrue(isinstance(BaseItem(), _BaseItem))
self.assertTrue(isinstance(SubclassedBaseItem(), _BaseItem))
self.assertTrue(isinstance(Item(), _BaseItem))
self.assertTrue(isinstance(SubclassedItem(), _BaseItem))

def test_deprecation_warning(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion tests/test_utils_deprecate.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def test_warning_on_instance(self):

# ignore subclassing warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
warnings.simplefilter('ignore', MyWarning)

class UserClass(Deprecated):
pass
Expand Down
9 changes: 7 additions & 2 deletions tests/test_utils_python.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
import unittest
from datetime import datetime
from itertools import count
from warnings import catch_warnings
from warnings import catch_warnings, filterwarnings

from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.python import (
memoizemethod_noargs, binary_is_text, equal_attributes,
WeakKeyCache, get_func_args, to_bytes, to_unicode,
Expand Down Expand Up @@ -160,7 +161,11 @@ class _Weakme:
pass

_values = count()
wk = WeakKeyCache(lambda k: next(_values))

with catch_warnings():
filterwarnings("ignore", category=ScrapyDeprecationWarning)
wk = WeakKeyCache(lambda k: next(_values))

k = _Weakme()
v = wk[k]
self.assertEqual(v, wk[k])
Expand Down

0 comments on commit 23cfdb0

Please sign in to comment.