scrapy / scrapy
Showing 1 of 2 files from the diff.
Other files ignored by Codecov

@@ -4,14 +4,12 @@
Loading
4 4
See documentation in docs/topics/spiders.rst
5 5
"""
6 6
import logging
7 -
import warnings
8 7
from typing import Optional
9 8
10 9
from scrapy import signals
11 10
from scrapy.http import Request
12 11
from scrapy.utils.trackref import object_ref
13 12
from scrapy.utils.url import url_is_from_spider
14 -
from scrapy.utils.deprecate import method_is_overridden
15 13
16 14
17 15
class Spider(object_ref):
@@ -57,34 +55,13 @@
Loading
57 55
        crawler.signals.connect(self.close, signals.spider_closed)
58 56
59 57
    def start_requests(self):
60 -
        cls = self.__class__
61 58
        if not self.start_urls and hasattr(self, 'start_url'):
62 59
            raise AttributeError(
63 60
                "Crawling could not start: 'start_urls' not found "
64 61
                "or empty (but found 'start_url' attribute instead, "
65 62
                "did you miss an 's'?)")
66 -
        if method_is_overridden(cls, Spider, 'make_requests_from_url'):
67 -
            warnings.warn(
68 -
                "Spider.make_requests_from_url method is deprecated; it "
69 -
                "won't be called in future Scrapy releases. Please "
70 -
                "override Spider.start_requests method instead "
71 -
                f"(see {cls.__module__}.{cls.__name__}).",
72 -
            )
73 -
            for url in self.start_urls:
74 -
                yield self.make_requests_from_url(url)
75 -
        else:
76 -
            for url in self.start_urls:
77 -
                yield Request(url, dont_filter=True)
78 -
79 -
    def make_requests_from_url(self, url):
80 -
        """ This method is deprecated. """
81 -
        warnings.warn(
82 -
            "Spider.make_requests_from_url method is deprecated: "
83 -
            "it will be removed and not be called by the default "
84 -
            "Spider.start_requests method in future Scrapy releases. "
85 -
            "Please override Spider.start_requests method instead."
86 -
        )
87 -
        return Request(url, dont_filter=True)
63 +
        for url in self.start_urls:
64 +
            yield Request(url, dont_filter=True)
88 65
89 66
    def _parse(self, response, **kwargs):
90 67
        return self.parse(response, **kwargs)
Files Coverage
scrapy 88.08%
Project Totals (162 files) 88.08%
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading