#4955 Remove redundant int/float conversions needed in Python2

Open Miroslav Šedivý eumiro
Coverage Reach
utils/python.py utils/misc.py utils/conf.py utils/iterators.py utils/log.py utils/defer.py utils/datatypes.py utils/deprecate.py utils/test.py utils/console.py utils/reactor.py utils/curl.py utils/project.py utils/url.py utils/signal.py utils/request.py utils/response.py utils/ssl.py utils/testproc.py utils/reqser.py utils/trackref.py utils/spider.py utils/benchserver.py utils/testsite.py utils/serialize.py utils/decorators.py utils/display.py utils/sitemap.py utils/gz.py utils/ftp.py utils/engine.py utils/boto.py utils/template.py utils/ossignal.py utils/versions.py utils/httpobj.py utils/job.py utils/asyncgen.py utils/py36.py core/downloader/handlers/http11.py core/downloader/handlers/ftp.py core/downloader/handlers/__init__.py core/downloader/handlers/s3.py core/downloader/handlers/http10.py core/downloader/handlers/datauri.py core/downloader/handlers/file.py core/downloader/handlers/http.py core/downloader/__init__.py core/downloader/webclient.py core/downloader/middleware.py core/downloader/tls.py core/downloader/contextfactory.py core/engine.py core/scraper.py core/scheduler.py core/spidermw.py extensions/feedexport.py extensions/httpcache.py extensions/memusage.py extensions/telnet.py extensions/throttle.py extensions/closespider.py extensions/debug.py extensions/logstats.py extensions/corestats.py extensions/spiderstate.py extensions/statsmailer.py extensions/memdebug.py commands/parse.py commands/genspider.py commands/startproject.py commands/__init__.py commands/check.py commands/runspider.py commands/fetch.py commands/shell.py commands/bench.py commands/settings.py commands/edit.py commands/crawl.py commands/version.py commands/view.py commands/list.py http/request/form.py http/request/__init__.py http/request/json_request.py http/request/rpc.py http/response/text.py http/response/__init__.py http/response/html.py http/response/xml.py http/cookies.py http/headers.py http/__init__.py http/common.py downloadermiddlewares/httpcache.py downloadermiddlewares/cookies.py downloadermiddlewares/robotstxt.py downloadermiddlewares/redirect.py downloadermiddlewares/decompression.py downloadermiddlewares/httpcompression.py downloadermiddlewares/httpproxy.py downloadermiddlewares/retry.py downloadermiddlewares/ajaxcrawl.py downloadermiddlewares/stats.py downloadermiddlewares/httpauth.py downloadermiddlewares/useragent.py downloadermiddlewares/downloadtimeout.py downloadermiddlewares/defaultheaders.py pipelines/files.py pipelines/media.py pipelines/images.py pipelines/__init__.py settings/__init__.py settings/default_settings.py spidermiddlewares/referer.py spidermiddlewares/offsite.py spidermiddlewares/depth.py spidermiddlewares/httperror.py spidermiddlewares/urllength.py spiders/crawl.py spiders/__init__.py spiders/feed.py spiders/sitemap.py spiders/init.py exporters.py contracts/__init__.py contracts/default.py crawler.py linkextractors/lxmlhtml.py linkextractors/__init__.py shell.py cmdline.py pqueues.py robotstxt.py mail.py item.py resolver.py responsetypes.py squeues.py dupefilters.py middleware.py statscollectors.py spiderloader.py logformatter.py selector/unified.py selector/__init__.py exceptions.py loader/__init__.py loader/processors.py loader/common.py signals.py signalmanager.py __init__.py link.py extension.py interfaces.py __main__.py

No flags found

Use flags to group coverage reports by test type, project and/or folders.
Then setup custom commit statuses and notifications for each flag.

e.g., #unittest #integration

#production #enterprise

#frontend #backend

Learn more about Codecov Flags here.


@@ -41,7 +41,7 @@
Loading
41 41
        if issubclass(cls, ignore):
42 42
            continue
43 43
        oldest = min(wdict.values())
44 -
        s += f"{cls.__name__:<30} {len(wdict):6}   oldest: {int(now - oldest)}s ago\n"
44 +
        s += f"{cls.__name__:<30} {len(wdict):6}   oldest: {now - oldest:.0f}s ago\n"
45 45
    return s
46 46
47 47

@@ -11,10 +11,10 @@
Loading
11 11
class LogStats:
12 12
    """Log basic scraping stats periodically"""
13 13
14 -
    def __init__(self, stats, interval=60.0):
14 +
    def __init__(self, stats, interval=60):
15 15
        self.stats = stats
16 16
        self.interval = interval
17 -
        self.multiplier = 60.0 / self.interval
17 +
        self.multiplier = 60 / self.interval
18 18
        self.task = None
19 19
20 20
    @classmethod

@@ -23,9 +23,9 @@
Loading
23 23
24 24
AUTOTHROTTLE_ENABLED = False
25 25
AUTOTHROTTLE_DEBUG = False
26 -
AUTOTHROTTLE_MAX_DELAY = 60.0
27 -
AUTOTHROTTLE_START_DELAY = 5.0
28 -
AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
26 +
AUTOTHROTTLE_MAX_DELAY = 60
27 +
AUTOTHROTTLE_START_DELAY = 5
28 +
AUTOTHROTTLE_TARGET_CONCURRENCY = 1
29 29
30 30
BOT_NAME = 'scrapybot'
31 31
@@ -211,7 +211,7 @@
Loading
211 211
212 212
SCHEDULER_DEBUG = False
213 213
214 -
LOGSTATS_INTERVAL = 60.0
214 +
LOGSTATS_INTERVAL = 60
215 215
216 216
MAIL_HOST = 'localhost'
217 217
MAIL_PORT = 25
@@ -222,7 +222,7 @@
Loading
222 222
MEMDEBUG_ENABLED = False        # enable memory debugging
223 223
MEMDEBUG_NOTIFY = []            # send memory debugging report by mail at engine shutdown
224 224
225 -
MEMUSAGE_CHECK_INTERVAL_SECONDS = 60.0
225 +
MEMUSAGE_CHECK_INTERVAL_SECONDS = 60
226 226
MEMUSAGE_ENABLED = True
227 227
MEMUSAGE_LIMIT_MB = 0
228 228
MEMUSAGE_NOTIFY_MAIL = []

@@ -74,7 +74,7 @@
Loading
74 74
        target_delay = latency / self.target_concurrency
75 75
76 76
        # Adjust the delay to make it closer to target_delay
77 -
        new_delay = (slot.delay + target_delay) / 2.0
77 +
        new_delay = (slot.delay + target_delay) / 2
78 78
79 79
        # If target delay is bigger than old delay, then use it instead of mean.
80 80
        # It works better with problematic sites.

Learn more Showing 1 files with coverage changes found.

Changes in scrapy/core/downloader/__init__.py
-2
+1
+1
Loading file...
Files Coverage
scrapy -0.02% 88.00%
Project Totals (158 files) 88.00%
Loading