scrapy / scrapy

Compare 22bd012 ... +0 ... cc89f6b

Coverage Reach
core/downloader/handlers/http11.py core/downloader/handlers/http2.py core/downloader/handlers/ftp.py core/downloader/handlers/__init__.py core/downloader/handlers/s3.py core/downloader/handlers/http10.py core/downloader/handlers/datauri.py core/downloader/handlers/file.py core/downloader/handlers/http.py core/downloader/__init__.py core/downloader/webclient.py core/downloader/middleware.py core/downloader/contextfactory.py core/downloader/tls.py core/http2/protocol.py core/http2/stream.py core/http2/agent.py core/engine.py core/scraper.py core/scheduler.py core/spidermw.py utils/python.py utils/misc.py utils/conf.py utils/iterators.py utils/defer.py utils/log.py utils/datatypes.py utils/deprecate.py utils/test.py utils/request.py utils/console.py utils/reactor.py utils/curl.py utils/project.py utils/url.py utils/signal.py utils/response.py utils/ssl.py utils/testproc.py utils/trackref.py utils/spider.py utils/benchserver.py utils/testsite.py utils/serialize.py utils/decorators.py utils/display.py utils/sitemap.py utils/gz.py utils/ftp.py utils/engine.py utils/boto.py utils/template.py utils/ossignal.py utils/versions.py utils/reqser.py utils/httpobj.py utils/job.py utils/asyncgen.py utils/py36.py extensions/feedexport.py extensions/httpcache.py extensions/memusage.py extensions/telnet.py extensions/throttle.py extensions/closespider.py extensions/debug.py extensions/logstats.py extensions/corestats.py extensions/spiderstate.py extensions/statsmailer.py extensions/memdebug.py commands/parse.py commands/genspider.py commands/startproject.py commands/__init__.py commands/check.py commands/runspider.py commands/fetch.py commands/shell.py commands/bench.py commands/settings.py commands/edit.py commands/crawl.py commands/version.py commands/view.py commands/list.py http/request/form.py http/request/__init__.py http/request/json_request.py http/request/rpc.py http/response/text.py http/response/__init__.py http/response/html.py http/response/xml.py http/cookies.py http/headers.py http/__init__.py http/common.py downloadermiddlewares/httpcache.py downloadermiddlewares/cookies.py downloadermiddlewares/robotstxt.py downloadermiddlewares/httpcompression.py downloadermiddlewares/redirect.py downloadermiddlewares/retry.py downloadermiddlewares/decompression.py downloadermiddlewares/httpproxy.py downloadermiddlewares/ajaxcrawl.py downloadermiddlewares/stats.py downloadermiddlewares/httpauth.py downloadermiddlewares/useragent.py downloadermiddlewares/downloadtimeout.py downloadermiddlewares/defaultheaders.py pipelines/files.py pipelines/media.py pipelines/images.py pipelines/__init__.py settings/__init__.py settings/default_settings.py spidermiddlewares/referer.py spidermiddlewares/offsite.py spidermiddlewares/depth.py spidermiddlewares/httperror.py spidermiddlewares/urllength.py spiders/crawl.py spiders/__init__.py spiders/feed.py spiders/sitemap.py spiders/init.py exporters.py contracts/__init__.py contracts/default.py crawler.py linkextractors/lxmlhtml.py linkextractors/__init__.py shell.py cmdline.py pqueues.py squeues.py mail.py robotstxt.py item.py resolver.py responsetypes.py dupefilters.py middleware.py statscollectors.py spiderloader.py logformatter.py selector/unified.py selector/__init__.py exceptions.py loader/__init__.py loader/processors.py loader/common.py signals.py signalmanager.py __init__.py link.py extension.py interfaces.py __main__.py

No flags found

Use flags to group coverage reports by test type, project and/or folders.
Then setup custom commit statuses and notifications for each flag.

e.g., #unittest #integration

#production #enterprise

#frontend #backend

Learn more about Codecov Flags here.


@@ -4,7 +4,7 @@
Loading
4 4
5 5
See documentation in docs/topics/request-response.rst
6 6
"""
7 -
from typing import Generator
7 +
from typing import Generator, Tuple
8 8
from urllib.parse import urljoin
9 9
10 10
from scrapy.exceptions import NotSupported
@@ -16,6 +16,19 @@
Loading
16 16
17 17
18 18
class Response(object_ref):
19 +
    """An object that represents an HTTP response, which is usually
20 +
    downloaded (by the Downloader) and fed to the Spiders for processing.
21 +
    """
22 +
23 +
    attributes: Tuple[str, ...] = (
24 +
        "url", "status", "headers", "body", "flags", "request", "certificate", "ip_address", "protocol",
25 +
    )
26 +
    """A tuple of :class:`str` objects containing the name of all public
27 +
    attributes of the class that are also keyword parameters of the
28 +
    ``__init__`` method.
29 +
30 +
    Currently used by :meth:`Response.replace`.
31 +
    """
19 32
20 33
    def __init__(
21 34
        self,
@@ -97,12 +110,8 @@
Loading
97 110
        return self.replace()
98 111
99 112
    def replace(self, *args, **kwargs):
100 -
        """Create a new Response with the same attributes except for those
101 -
        given new values.
102 -
        """
103 -
        for x in [
104 -
            "url", "status", "headers", "body", "request", "flags", "certificate", "ip_address", "protocol",
105 -
        ]:
113 +
        """Create a new Response with the same attributes except for those given new values"""
114 +
        for x in self.attributes:
106 115
            kwargs.setdefault(x, getattr(self, x))
107 116
        cls = kwargs.pop('cls', self.__class__)
108 117
        return cls(*args, **kwargs)

@@ -8,7 +8,7 @@
Loading
8 8
import json
9 9
import warnings
10 10
from contextlib import suppress
11 -
from typing import Generator
11 +
from typing import Generator, Tuple
12 12
from urllib.parse import urljoin
13 13
14 14
import parsel
@@ -30,6 +30,8 @@
Loading
30 30
    _DEFAULT_ENCODING = 'ascii'
31 31
    _cached_decoded_json = _NONE
32 32
33 +
    attributes: Tuple[str, ...] = Response.attributes + ("encoding",)
34 +
33 35
    def __init__(self, *args, **kwargs):
34 36
        self._encoding = kwargs.pop('encoding', None)
35 37
        self._cached_benc = None
@@ -53,10 +55,6 @@
Loading
53 55
        else:
54 56
            super()._set_body(body)
55 57
56 -
    def replace(self, *args, **kwargs):
57 -
        kwargs.setdefault('encoding', self.encoding)
58 -
        return Response.replace(self, *args, **kwargs)
59 -
60 58
    @property
61 59
    def encoding(self):
62 60
        return self._declared_encoding() or self._body_inferred_encoding()

Everything is accounted for!

No changes detected that need to be reviewed.
What changes does Codecov check for?
Lines, not adjusted in diff, that have changed coverage data.
Files that introduced coverage data that had none before.
Files that have missing coverage data that once were tracked.
Files Coverage
scrapy -<.01% 88.42%
Project Totals (162 files) 88.42%
Loading