scrapy / scrapy
1 7
import os
2 7
import logging
3

4 7
from twisted.python.failure import Failure
5

6 7
from scrapy.utils.request import referer_str
7

8 7
SCRAPEDMSG = "Scraped from %(src)s" + os.linesep + "%(item)s"
9 7
DROPPEDMSG = "Dropped: %(exception)s" + os.linesep + "%(item)s"
10 7
CRAWLEDMSG = "Crawled (%(status)s) %(request)s%(request_flags)s (referer: %(referer)s)%(response_flags)s"
11 7
ITEMERRORMSG = "Error processing %(item)s"
12 7
SPIDERERRORMSG = "Spider error processing %(request)s (referer: %(referer)s)"
13 7
DOWNLOADERRORMSG_SHORT = "Error downloading %(request)s"
14 7
DOWNLOADERRORMSG_LONG = "Error downloading %(request)s: %(errmsg)s"
15

16

17 7
class LogFormatter:
18
    """Class for generating log messages for different actions.
19

20
    All methods must return a dictionary listing the parameters ``level``, ``msg``
21
    and ``args`` which are going to be used for constructing the log message when
22
    calling ``logging.log``.
23

24
    Dictionary keys for the method outputs:
25

26
    *   ``level`` is the log level for that action, you can use those from the
27
        `python logging library <https://docs.python.org/3/library/logging.html>`_ :
28
        ``logging.DEBUG``, ``logging.INFO``, ``logging.WARNING``, ``logging.ERROR``
29
        and ``logging.CRITICAL``.
30
    *   ``msg`` should be a string that can contain different formatting placeholders.
31
        This string, formatted with the provided ``args``, is going to be the long message
32
        for that action.
33
    *   ``args`` should be a tuple or dict with the formatting placeholders for ``msg``.
34
        The final log message is computed as ``msg % args``.
35

36
    Users can define their own ``LogFormatter`` class if they want to customize how
37
    each action is logged or if they want to omit it entirely. In order to omit
38
    logging an action the method must return ``None``.
39

40
    Here is an example on how to create a custom log formatter to lower the severity level of
41
    the log message when an item is dropped from the pipeline::
42

43
            class PoliteLogFormatter(logformatter.LogFormatter):
44
                def dropped(self, item, exception, response, spider):
45
                    return {
46
                        'level': logging.INFO, # lowering the level from logging.WARNING
47
                        'msg': "Dropped: %(exception)s" + os.linesep + "%(item)s",
48
                        'args': {
49
                            'exception': exception,
50
                            'item': item,
51
                        }
52
                    }
53
    """
54

55 7
    def crawled(self, request, response, spider):
56
        """Logs a message when the crawler finds a webpage."""
57 7
        request_flags = f' {str(request.flags)}' if request.flags else ''
58 7
        response_flags = f' {str(response.flags)}' if response.flags else ''
59 7
        return {
60
            'level': logging.DEBUG,
61
            'msg': CRAWLEDMSG,
62
            'args': {
63
                'status': response.status,
64
                'request': request,
65
                'request_flags': request_flags,
66
                'referer': referer_str(request),
67
                'response_flags': response_flags,
68
                # backward compatibility with Scrapy logformatter below 1.4 version
69
                'flags': response_flags
70
            }
71
        }
72

73 7
    def scraped(self, item, response, spider):
74
        """Logs a message when an item is scraped by a spider."""
75 7
        if isinstance(response, Failure):
76 0
            src = response.getErrorMessage()
77
        else:
78 7
            src = response
79 7
        return {
80
            'level': logging.DEBUG,
81
            'msg': SCRAPEDMSG,
82
            'args': {
83
                'src': src,
84
                'item': item,
85
            }
86
        }
87

88 7
    def dropped(self, item, exception, response, spider):
89
        """Logs a message when an item is dropped while it is passing through the item pipeline."""
90 7
        return {
91
            'level': logging.WARNING,
92
            'msg': DROPPEDMSG,
93
            'args': {
94
                'exception': exception,
95
                'item': item,
96
            }
97
        }
98

99 7
    def item_error(self, item, exception, response, spider):
100
        """Logs a message when an item causes an error while it is passing
101
        through the item pipeline.
102

103
        .. versionadded:: 2.0
104
        """
105 7
        return {
106
            'level': logging.ERROR,
107
            'msg': ITEMERRORMSG,
108
            'args': {
109
                'item': item,
110
            }
111
        }
112

113 7
    def spider_error(self, failure, request, response, spider):
114
        """Logs an error message from a spider.
115

116
        .. versionadded:: 2.0
117
        """
118 7
        return {
119
            'level': logging.ERROR,
120
            'msg': SPIDERERRORMSG,
121
            'args': {
122
                'request': request,
123
                'referer': referer_str(request),
124
            }
125
        }
126

127 7
    def download_error(self, failure, request, spider, errmsg=None):
128
        """Logs a download error message from a spider (typically coming from
129
        the engine).
130

131
        .. versionadded:: 2.0
132
        """
133 7
        args = {'request': request}
134 7
        if errmsg:
135 7
            msg = DOWNLOADERRORMSG_LONG
136 7
            args['errmsg'] = errmsg
137
        else:
138 7
            msg = DOWNLOADERRORMSG_SHORT
139 7
        return {
140
            'level': logging.ERROR,
141
            'msg': msg,
142
            'args': args,
143
        }
144

145 7
    @classmethod
146 4
    def from_crawler(cls, crawler):
147 7
        return cls()

Read our documentation on viewing source code .

Loading