scrapy / scrapy

Compare 5a75b14 ... +1 ... d63cc68

Coverage Reach
core/downloader/handlers/http11.py core/downloader/handlers/http2.py core/downloader/handlers/ftp.py core/downloader/handlers/__init__.py core/downloader/handlers/s3.py core/downloader/handlers/http10.py core/downloader/handlers/datauri.py core/downloader/handlers/file.py core/downloader/handlers/http.py core/downloader/__init__.py core/downloader/webclient.py core/downloader/middleware.py core/downloader/contextfactory.py core/downloader/tls.py core/http2/protocol.py core/http2/stream.py core/http2/agent.py core/engine.py core/scraper.py core/scheduler.py core/spidermw.py utils/python.py utils/misc.py utils/conf.py utils/iterators.py utils/log.py utils/defer.py utils/datatypes.py utils/deprecate.py utils/test.py utils/console.py utils/reactor.py utils/curl.py utils/project.py utils/url.py utils/request.py utils/signal.py utils/response.py utils/ssl.py utils/testproc.py utils/reqser.py utils/trackref.py utils/spider.py utils/benchserver.py utils/testsite.py utils/serialize.py utils/decorators.py utils/display.py utils/sitemap.py utils/gz.py utils/ftp.py utils/engine.py utils/boto.py utils/template.py utils/ossignal.py utils/versions.py utils/httpobj.py utils/job.py utils/asyncgen.py utils/py36.py extensions/feedexport.py extensions/httpcache.py extensions/memusage.py extensions/telnet.py extensions/throttle.py extensions/closespider.py extensions/debug.py extensions/logstats.py extensions/corestats.py extensions/spiderstate.py extensions/statsmailer.py extensions/memdebug.py commands/parse.py commands/genspider.py commands/startproject.py commands/__init__.py commands/check.py commands/runspider.py commands/fetch.py commands/shell.py commands/bench.py commands/settings.py commands/edit.py commands/crawl.py commands/version.py commands/view.py commands/list.py downloadermiddlewares/httpcache.py downloadermiddlewares/cookies.py downloadermiddlewares/robotstxt.py downloadermiddlewares/httpcompression.py downloadermiddlewares/redirect.py downloadermiddlewares/retry.py downloadermiddlewares/decompression.py downloadermiddlewares/httpproxy.py downloadermiddlewares/ajaxcrawl.py downloadermiddlewares/stats.py downloadermiddlewares/httpauth.py downloadermiddlewares/useragent.py downloadermiddlewares/downloadtimeout.py downloadermiddlewares/defaultheaders.py http/request/form.py http/request/__init__.py http/request/json_request.py http/request/rpc.py http/response/text.py http/response/__init__.py http/response/html.py http/response/xml.py http/cookies.py http/headers.py http/__init__.py http/common.py pipelines/files.py pipelines/media.py pipelines/images.py pipelines/__init__.py settings/__init__.py settings/default_settings.py spidermiddlewares/referer.py spidermiddlewares/offsite.py spidermiddlewares/depth.py spidermiddlewares/httperror.py spidermiddlewares/urllength.py spiders/crawl.py spiders/feed.py spiders/sitemap.py spiders/__init__.py spiders/init.py exporters.py contracts/__init__.py contracts/default.py crawler.py linkextractors/lxmlhtml.py linkextractors/__init__.py shell.py cmdline.py pqueues.py mail.py robotstxt.py item.py resolver.py responsetypes.py squeues.py dupefilters.py middleware.py statscollectors.py spiderloader.py logformatter.py selector/unified.py selector/__init__.py exceptions.py loader/__init__.py loader/processors.py loader/common.py signals.py signalmanager.py __init__.py link.py extension.py interfaces.py __main__.py

No flags found

Use flags to group coverage reports by test type, project and/or folders.
Then setup custom commit statuses and notifications for each flag.

e.g., #unittest #integration

#production #enterprise

#frontend #backend

Learn more about Codecov Flags here.

Showing 1 of 2 files from the diff.
Other files ignored by Codecov

@@ -4,14 +4,12 @@
Loading
4 4
See documentation in docs/topics/spiders.rst
5 5
"""
6 6
import logging
7 -
import warnings
8 7
from typing import Optional
9 8
10 9
from scrapy import signals
11 10
from scrapy.http import Request
12 11
from scrapy.utils.trackref import object_ref
13 12
from scrapy.utils.url import url_is_from_spider
14 -
from scrapy.utils.deprecate import method_is_overridden
15 13
16 14
17 15
class Spider(object_ref):
@@ -57,34 +55,13 @@
Loading
57 55
        crawler.signals.connect(self.close, signals.spider_closed)
58 56
59 57
    def start_requests(self):
60 -
        cls = self.__class__
61 58
        if not self.start_urls and hasattr(self, 'start_url'):
62 59
            raise AttributeError(
63 60
                "Crawling could not start: 'start_urls' not found "
64 61
                "or empty (but found 'start_url' attribute instead, "
65 62
                "did you miss an 's'?)")
66 -
        if method_is_overridden(cls, Spider, 'make_requests_from_url'):
67 -
            warnings.warn(
68 -
                "Spider.make_requests_from_url method is deprecated; it "
69 -
                "won't be called in future Scrapy releases. Please "
70 -
                "override Spider.start_requests method instead "
71 -
                f"(see {cls.__module__}.{cls.__name__}).",
72 -
            )
73 -
            for url in self.start_urls:
74 -
                yield self.make_requests_from_url(url)
75 -
        else:
76 -
            for url in self.start_urls:
77 -
                yield Request(url, dont_filter=True)
78 -
79 -
    def make_requests_from_url(self, url):
80 -
        """ This method is deprecated. """
81 -
        warnings.warn(
82 -
            "Spider.make_requests_from_url method is deprecated: "
83 -
            "it will be removed and not be called by the default "
84 -
            "Spider.start_requests method in future Scrapy releases. "
85 -
            "Please override Spider.start_requests method instead."
86 -
        )
87 -
        return Request(url, dont_filter=True)
63 +
        for url in self.start_urls:
64 +
            yield Request(url, dont_filter=True)
88 65
89 66
    def _parse(self, response, **kwargs):
90 67
        return self.parse(response, **kwargs)

Everything is accounted for!

No changes detected that need to be reviewed.
What changes does Codecov check for?
Lines, not adjusted in diff, that have changed coverage data.
Files that introduced coverage data that had none before.
Files that have missing coverage data that once were tracked.
Files Coverage
scrapy -0.01% 88.08%
Project Totals (162 files) 88.08%
Loading