scrapy-plugins / scrapy-splash
Showing 4 of 9 files from the diff.
Newly tracked file
scrapy_splash/utils.py changed.
Newly tracked file
scrapy_splash/cache.py changed.
Newly tracked file
scrapy_splash/request.py changed.
Other files ignored by Codecov

@@ -5,20 +5,12 @@
Loading
5 5
import six
6 6
7 7
from scrapy.http import Headers
8 -
try:
9 -
    from scrapy.utils.python import to_bytes, to_unicode, to_native_str
10 -
except ImportError:
11 -
    # scrapy < 1.1
12 -
    from scrapy.utils.python import unicode_to_str as to_bytes
13 -
    from scrapy.utils.python import str_to_unicode as to_unicode
14 -
15 -
    def to_native_str(text, encoding=None, errors='strict'):
16 -
        """ Return str representation of `text`
17 -
        (bytes in Python 2.x and unicode in Python 3.x). """
18 -
        if six.PY2:
19 -
            return to_bytes(text, encoding, errors)
20 -
        else:
21 -
            return to_unicode(text, encoding, errors)
8 +
import scrapy
9 +
if scrapy.version_info >= (2, ):
10 +
    from scrapy.utils.python import to_unicode
11 +
else:
12 +
    from scrapy.utils.python import to_native_str as to_unicode
13 +
from scrapy.utils.python import to_bytes
22 14
23 15
24 16
def dict_hash(obj, start=''):

@@ -6,11 +6,7 @@
Loading
6 6
from __future__ import absolute_import
7 7
from copy import deepcopy
8 8
9 -
try:
10 -
    from scrapy.dupefilters import RFPDupeFilter
11 -
except ImportError:
12 -
    # scrapy < 1.0
13 -
    from scrapy.dupefilter import RFPDupeFilter
9 +
from scrapy.dupefilters import RFPDupeFilter
14 10
15 11
from scrapy.utils.url import canonicalize_url
16 12
from scrapy.utils.request import request_fingerprint

@@ -8,11 +8,7 @@
Loading
8 8
from __future__ import absolute_import
9 9
import os
10 10
11 -
try:
12 -
    from scrapy.extensions.httpcache import FilesystemCacheStorage
13 -
except ImportError:
14 -
    # scrapy < 1.0
15 -
    from scrapy.contrib.httpcache import FilesystemCacheStorage
11 +
from scrapy.extensions.httpcache import FilesystemCacheStorage
16 12
17 13
from .dupefilter import splash_request_fingerprint
18 14

@@ -5,7 +5,7 @@
Loading
5 5
from scrapy.http import FormRequest
6 6
7 7
from scrapy_splash import SlotPolicy
8 -
from scrapy_splash.utils import to_native_str
8 +
from scrapy_splash.utils import to_unicode
9 9
10 10
# XXX: we can't implement SplashRequest without middleware support
11 11
# because there is no way to set Splash URL based on settings
@@ -20,7 +20,7 @@
Loading
20 20
    It requires SplashMiddleware to work.
21 21
    """
22 22
    def __init__(self,
23 -
                 url=None,
23 +
                 url,
24 24
                 callback=None,
25 25
                 method='GET',
26 26
                 endpoint='render.html',
@@ -37,9 +37,7 @@
Loading
37 37
                 meta=None,
38 38
                 **kwargs):
39 39
40 -
        if url is None:
41 -
            url = 'about:blank'
42 -
        url = to_native_str(url)
40 +
        url = to_unicode(url)
43 41
44 42
        meta = copy.deepcopy(meta) or {}
45 43
        splash_meta = meta.setdefault('splash', {})
Files Coverage
scrapy_splash 93.39%
Project Totals (9 files) 93.39%
218.4
TRAVIS_PYTHON_VERSION=3.6
TRAVIS_OS_NAME=linux
TOXENV=py36
218.1
TRAVIS_PYTHON_VERSION=2.7
TRAVIS_OS_NAME=linux
TOXENV=py27
218.3
TRAVIS_PYTHON_VERSION=3.5
TRAVIS_OS_NAME=linux
TOXENV=py35
218.2
TRAVIS_PYTHON_VERSION=3.4
TRAVIS_OS_NAME=linux
TOXENV=py34

No yaml found.

Create your codecov.yml to customize your Codecov experience

Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading