scrapy / scrapy
1 7
import logging
2 7
import sys
3 7
import warnings
4 7
from logging.config import dictConfig
5

6 7
from twisted.python import log as twisted_log
7 7
from twisted.python.failure import Failure
8

9 7
import scrapy
10 7
from scrapy.exceptions import ScrapyDeprecationWarning
11 7
from scrapy.settings import Settings
12 7
from scrapy.utils.versions import scrapy_components_versions
13

14

15 7
logger = logging.getLogger(__name__)
16

17

18 7
def failure_to_exc_info(failure):
19
    """Extract exc_info from Failure instances"""
20 7
    if isinstance(failure, Failure):
21 7
        return (failure.type, failure.value, failure.getTracebackObject())
22

23

24 7
class TopLevelFormatter(logging.Filter):
25
    """Keep only top level loggers's name (direct children from root) from
26
    records.
27

28
    This filter will replace Scrapy loggers' names with 'scrapy'. This mimics
29
    the old Scrapy log behaviour and helps shortening long names.
30

31
    Since it can't be set for just one logger (it won't propagate for its
32
    children), it's going to be set in the root handler, with a parametrized
33
    ``loggers`` list where it should act.
34
    """
35

36 7
    def __init__(self, loggers=None):
37 7
        self.loggers = loggers or []
38

39 7
    def filter(self, record):
40 7
        if any(record.name.startswith(logger + '.') for logger in self.loggers):
41 7
            record.name = record.name.split('.', 1)[0]
42 7
        return True
43

44

45 7
DEFAULT_LOGGING = {
46
    'version': 1,
47
    'disable_existing_loggers': False,
48
    'loggers': {
49
        'hpack': {
50
            'level': 'ERROR',
51
        },
52
        'scrapy': {
53
            'level': 'DEBUG',
54
        },
55
        'twisted': {
56
            'level': 'ERROR',
57
        },
58
    }
59
}
60

61

62 7
def configure_logging(settings=None, install_root_handler=True):
63
    """
64
    Initialize logging defaults for Scrapy.
65

66
    :param settings: settings used to create and configure a handler for the
67
        root logger (default: None).
68
    :type settings: dict, :class:`~scrapy.settings.Settings` object or ``None``
69

70
    :param install_root_handler: whether to install root logging handler
71
        (default: True)
72
    :type install_root_handler: bool
73

74
    This function does:
75

76
    - Route warnings and twisted logging through Python standard logging
77
    - Assign DEBUG and ERROR level to Scrapy and Twisted loggers respectively
78
    - Route stdout to log if LOG_STDOUT setting is True
79

80
    When ``install_root_handler`` is True (default), this function also
81
    creates a handler for the root logger according to given settings
82
    (see :ref:`topics-logging-settings`). You can override default options
83
    using ``settings`` argument. When ``settings`` is empty or None, defaults
84
    are used.
85
    """
86 7
    if not sys.warnoptions:
87
        # Route warnings through python logging
88 7
        logging.captureWarnings(True)
89

90 7
    observer = twisted_log.PythonLoggingObserver('twisted')
91 7
    observer.start()
92

93 7
    dictConfig(DEFAULT_LOGGING)
94

95 7
    if isinstance(settings, dict) or settings is None:
96 7
        settings = Settings(settings)
97

98 7
    if settings.getbool('LOG_STDOUT'):
99 0
        sys.stdout = StreamLogger(logging.getLogger('stdout'))
100

101 7
    if install_root_handler:
102 7
        install_scrapy_root_handler(settings)
103

104

105 7
def install_scrapy_root_handler(settings):
106
    global _scrapy_root_handler
107

108 7
    if (_scrapy_root_handler is not None
109
            and _scrapy_root_handler in logging.root.handlers):
110 7
        logging.root.removeHandler(_scrapy_root_handler)
111 7
    logging.root.setLevel(logging.NOTSET)
112 7
    _scrapy_root_handler = _get_handler(settings)
113 7
    logging.root.addHandler(_scrapy_root_handler)
114

115

116 7
def get_scrapy_root_handler():
117 7
    return _scrapy_root_handler
118

119

120 7
_scrapy_root_handler = None
121

122

123 7
def _get_handler(settings):
124
    """ Return a log handler object according to settings """
125 7
    filename = settings.get('LOG_FILE')
126 7
    if filename:
127 7
        encoding = settings.get('LOG_ENCODING')
128 7
        handler = logging.FileHandler(filename, encoding=encoding)
129 7
    elif settings.getbool('LOG_ENABLED'):
130 7
        handler = logging.StreamHandler()
131
    else:
132 7
        handler = logging.NullHandler()
133

134 7
    formatter = logging.Formatter(
135
        fmt=settings.get('LOG_FORMAT'),
136
        datefmt=settings.get('LOG_DATEFORMAT')
137
    )
138 7
    handler.setFormatter(formatter)
139 7
    handler.setLevel(settings.get('LOG_LEVEL'))
140 7
    if settings.getbool('LOG_SHORT_NAMES'):
141 7
        handler.addFilter(TopLevelFormatter(['scrapy']))
142 7
    return handler
143

144

145 7
def log_scrapy_info(settings):
146 7
    logger.info("Scrapy %(version)s started (bot: %(bot)s)",
147
                {'version': scrapy.__version__, 'bot': settings['BOT_NAME']})
148 7
    versions = [
149
        f"{name} {version}"
150
        for name, version in scrapy_components_versions()
151
        if name != "Scrapy"
152
    ]
153 7
    logger.info("Versions: %(versions)s", {'versions': ", ".join(versions)})
154 7
    from twisted.internet import reactor
155 7
    logger.debug("Using reactor: %s.%s", reactor.__module__, reactor.__class__.__name__)
156 7
    from twisted.internet import asyncioreactor
157 7
    if isinstance(reactor, asyncioreactor.AsyncioSelectorReactor):
158 7
        logger.debug(
159
            "Using asyncio event loop: %s.%s",
160
            reactor._asyncioEventloop.__module__,
161
            reactor._asyncioEventloop.__class__.__name__,
162
        )
163

164

165 7
class StreamLogger:
166
    """Fake file-like stream object that redirects writes to a logger instance
167

168
    Taken from:
169
        https://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/
170
    """
171 7
    def __init__(self, logger, log_level=logging.INFO):
172 7
        self.logger = logger
173 7
        self.log_level = log_level
174 7
        self.linebuf = ''
175

176 7
    def write(self, buf):
177 7
        for line in buf.rstrip().splitlines():
178 7
            self.logger.log(self.log_level, line.rstrip())
179

180 7
    def flush(self):
181 7
        for h in self.logger.handlers:
182 0
            h.flush()
183

184

185 7
class LogCounterHandler(logging.Handler):
186
    """Record log levels count into a crawler stats"""
187

188 7
    def __init__(self, crawler, *args, **kwargs):
189 7
        super().__init__(*args, **kwargs)
190 7
        self.crawler = crawler
191

192 7
    def emit(self, record):
193 7
        sname = f'log_count/{record.levelname}'
194 7
        self.crawler.stats.inc_value(sname)
195

196

197 7
def logformatter_adapter(logkws):
198
    """
199
    Helper that takes the dictionary output from the methods in LogFormatter
200
    and adapts it into a tuple of positional arguments for logger.log calls,
201
    handling backward compatibility as well.
202
    """
203 7
    if not {'level', 'msg', 'args'} <= set(logkws):
204 0
        warnings.warn('Missing keys in LogFormatter method',
205
                      ScrapyDeprecationWarning)
206

207 7
    if 'format' in logkws:
208 0
        warnings.warn('`format` key in LogFormatter methods has been '
209
                      'deprecated, use `msg` instead',
210
                      ScrapyDeprecationWarning)
211

212 7
    level = logkws.get('level', logging.INFO)
213 7
    message = logkws.get('format', logkws.get('msg'))
214
    # NOTE: This also handles 'args' being an empty dict, that case doesn't
215
    # play well in logger.log calls
216 7
    args = logkws if not logkws.get('args') else logkws['args']
217

218 7
    return (level, message, args)

Read our documentation on viewing source code .

Loading