scrapy / scrapy
Showing 2 of 4 files from the diff.
Newly tracked file
scrapy/utils/misc.py changed.
Newly tracked file
scrapy/commands/check.py changed.
Other files ignored by Codecov

@@ -1,6 +1,8 @@
Loading
1 1
"""Helper functions which don't fit anywhere else"""
2 +
import os
2 3
import re
3 4
import hashlib
5 +
from contextlib import contextmanager
4 6
from importlib import import_module
5 7
from pkgutil import iter_modules
6 8
@@ -142,3 +144,21 @@
Loading
142 144
        return objcls.from_settings(settings, *args, **kwargs)
143 145
    else:
144 146
        return objcls(*args, **kwargs)
147 +
148 +
149 +
@contextmanager
150 +
def set_environ(**kwargs):
151 +
    """Temporarily set environment variables inside the context manager and
152 +
    fully restore previous environment afterwards
153 +
    """
154 +
155 +
    original_env = {k: os.environ.get(k) for k in kwargs}
156 +
    os.environ.update(kwargs)
157 +
    try:
158 +
        yield
159 +
    finally:
160 +
        for k, v in original_env.items():
161 +
            if v is None:
162 +
                del os.environ[k]
163 +
            else:
164 +
                os.environ[k] = v

@@ -6,7 +6,7 @@
Loading
6 6
7 7
from scrapy.commands import ScrapyCommand
8 8
from scrapy.contracts import ContractsManager
9 -
from scrapy.utils.misc import load_object
9 +
from scrapy.utils.misc import load_object, set_environ
10 10
from scrapy.utils.conf import build_component_list
11 11
12 12
@@ -68,16 +68,17 @@
Loading
68 68
69 69
        spider_loader = self.crawler_process.spider_loader
70 70
71 -
        for spidername in args or spider_loader.list():
72 -
            spidercls = spider_loader.load(spidername)
73 -
            spidercls.start_requests = lambda s: conman.from_spider(s, result)
74 -
75 -
            tested_methods = conman.tested_methods_from_spidercls(spidercls)
76 -
            if opts.list:
77 -
                for method in tested_methods:
78 -
                    contract_reqs[spidercls.name].append(method)
79 -
            elif tested_methods:
80 -
                self.crawler_process.crawl(spidercls)
71 +
        with set_environ(SCRAPY_CHECK='true'):
72 +
            for spidername in args or spider_loader.list():
73 +
                spidercls = spider_loader.load(spidername)
74 +
                spidercls.start_requests = lambda s: conman.from_spider(s, result)
75 +
76 +
                tested_methods = conman.tested_methods_from_spidercls(spidercls)
77 +
                if opts.list:
78 +
                    for method in tested_methods:
79 +
                        contract_reqs[spidercls.name].append(method)
80 +
                elif tested_methods:
81 +
                    self.crawler_process.crawl(spidercls)
81 82
82 83
        # start checks
83 84
        if opts.list:
Files Coverage
scrapy 85.44%
Project Totals (169 files) 85.44%
Notifications are pending CI completion. Waiting for GitHub's status webhook to queue notifications. Push notifications now.
6506.5
TRAVIS_PYTHON_VERSION=3.4
TRAVIS_OS_NAME=linux
TOXENV=py34
6506.2
TRAVIS_PYTHON_VERSION=2.7
TRAVIS_OS_NAME=linux
TOXENV=jessie
6506.6
TRAVIS_PYTHON_VERSION=3.5
TRAVIS_OS_NAME=linux
TOXENV=py35
6506.7
TRAVIS_PYTHON_VERSION=3.6
TRAVIS_OS_NAME=linux
TOXENV=py36
6506.8
TRAVIS_PYTHON_VERSION=3.7
TRAVIS_OS_NAME=linux
TOXENV=py37
1
comment:
2
  layout: "header, diff, tree"
3

4
coverage:
5
  status:
6
    project: false
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading