#4953 adding variable type values support to 'scrapy.FormRequest()'

Open Akshay Sharma AKSHAYSHARMAJS
Coverage Reach
utils/python.py utils/misc.py utils/conf.py utils/iterators.py utils/log.py utils/defer.py utils/datatypes.py utils/deprecate.py utils/test.py utils/console.py utils/reactor.py utils/curl.py utils/project.py utils/url.py utils/signal.py utils/request.py utils/response.py utils/ssl.py utils/testproc.py utils/reqser.py utils/trackref.py utils/spider.py utils/benchserver.py utils/testsite.py utils/serialize.py utils/decorators.py utils/display.py utils/sitemap.py utils/gz.py utils/ftp.py utils/engine.py utils/boto.py utils/template.py utils/ossignal.py utils/versions.py utils/httpobj.py utils/job.py utils/asyncgen.py utils/py36.py core/downloader/handlers/http11.py core/downloader/handlers/ftp.py core/downloader/handlers/__init__.py core/downloader/handlers/s3.py core/downloader/handlers/http10.py core/downloader/handlers/datauri.py core/downloader/handlers/file.py core/downloader/handlers/http.py core/downloader/__init__.py core/downloader/webclient.py core/downloader/middleware.py core/downloader/tls.py core/downloader/contextfactory.py core/engine.py core/scraper.py core/scheduler.py core/spidermw.py extensions/feedexport.py extensions/httpcache.py extensions/memusage.py extensions/telnet.py extensions/throttle.py extensions/closespider.py extensions/debug.py extensions/logstats.py extensions/corestats.py extensions/spiderstate.py extensions/statsmailer.py extensions/memdebug.py commands/parse.py commands/genspider.py commands/startproject.py commands/__init__.py commands/check.py commands/runspider.py commands/fetch.py commands/shell.py commands/bench.py commands/settings.py commands/edit.py commands/crawl.py commands/version.py commands/view.py commands/list.py http/request/form.py http/request/__init__.py http/request/json_request.py http/request/rpc.py http/response/text.py http/response/__init__.py http/response/html.py http/response/xml.py http/cookies.py http/headers.py http/__init__.py http/common.py downloadermiddlewares/httpcache.py downloadermiddlewares/cookies.py downloadermiddlewares/robotstxt.py downloadermiddlewares/redirect.py downloadermiddlewares/decompression.py downloadermiddlewares/httpcompression.py downloadermiddlewares/httpproxy.py downloadermiddlewares/retry.py downloadermiddlewares/ajaxcrawl.py downloadermiddlewares/stats.py downloadermiddlewares/httpauth.py downloadermiddlewares/useragent.py downloadermiddlewares/downloadtimeout.py downloadermiddlewares/defaultheaders.py pipelines/files.py pipelines/media.py pipelines/images.py pipelines/__init__.py settings/__init__.py settings/default_settings.py spidermiddlewares/referer.py spidermiddlewares/offsite.py spidermiddlewares/depth.py spidermiddlewares/httperror.py spidermiddlewares/urllength.py spiders/crawl.py spiders/__init__.py spiders/feed.py spiders/sitemap.py spiders/init.py exporters.py contracts/__init__.py contracts/default.py crawler.py linkextractors/lxmlhtml.py linkextractors/__init__.py shell.py cmdline.py pqueues.py robotstxt.py mail.py item.py resolver.py responsetypes.py squeues.py dupefilters.py middleware.py statscollectors.py spiderloader.py logformatter.py selector/unified.py selector/__init__.py exceptions.py loader/__init__.py loader/processors.py loader/common.py signals.py signalmanager.py __init__.py link.py extension.py interfaces.py __main__.py

No flags found

Use flags to group coverage reports by test type, project and/or folders.
Then setup custom commit statuses and notifications for each flag.

e.g., #unittest #integration

#production #enterprise

#frontend #backend

Learn more about Codecov Flags here.

Showing 1 of 1 files from the diff.

@@ -12,7 +12,6 @@
Loading
12 12
from w3lib.html import strip_html5_whitespace
13 13
14 14
from scrapy.http.request import Request
15 -
from scrapy.utils.python import to_bytes, is_listlike
16 15
from scrapy.utils.response import get_base_url
17 16
18 17
@@ -27,8 +26,7 @@
Loading
27 26
        super().__init__(*args, **kwargs)
28 27
29 28
        if formdata:
30 -
            items = formdata.items() if isinstance(formdata, dict) else formdata
31 -
            querystr = _urlencode(items, self.encoding)
29 +
            querystr = urlencode(formdata, doseq=1, encoding=self.encoding)
32 30
            if self.method == 'POST':
33 31
                self.headers.setdefault(b'Content-Type', b'application/x-www-form-urlencoded')
34 32
                self._set_body(querystr)
@@ -67,13 +65,6 @@
Loading
67 65
    return urljoin(form.base_url, url)
68 66
69 67
70 -
def _urlencode(seq, enc):
71 -
    values = [(to_bytes(k, enc), to_bytes(v, enc))
72 -
              for k, vs in seq
73 -
              for v in (vs if is_listlike(vs) else [vs])]
74 -
    return urlencode(values, doseq=1)
75 -
76 -
77 68
def _get_form(response, formname, formid, formnumber, formxpath):
78 69
    """Find the form element """
79 70
    root = create_root_node(response.text, lxml.html.HTMLParser,

Learn more Showing 1 files with coverage changes found.

Changes in scrapy/extensions/memusage.py
-1
+1
Loading file...
Files Coverage
scrapy +<.01% 88.02%
Project Totals (158 files) 88.02%
Loading