/usr/lib/python2.7/dist-packages/scrapy/log.py is in python-scrapy 0.24.2-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 | """
Scrapy logging facility
See documentation in docs/topics/logging.rst
"""
import sys
import logging
import warnings
from twisted.python import log
import scrapy
from scrapy.utils.python import unicode_to_str
from scrapy.settings import overridden_settings
# Logging levels
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
SILENT = CRITICAL + 1
level_names = {
logging.DEBUG: "DEBUG",
logging.INFO: "INFO",
logging.WARNING: "WARNING",
logging.ERROR: "ERROR",
logging.CRITICAL: "CRITICAL",
SILENT: "SILENT",
}
class ScrapyFileLogObserver(log.FileLogObserver):
def __init__(self, f, level=INFO, encoding='utf-8', crawler=None):
self.level = level
self.encoding = encoding
if crawler:
self.crawler = crawler
self.emit = self._emit_with_crawler
else:
self.emit = self._emit
log.FileLogObserver.__init__(self, f)
def _emit(self, eventDict):
ev = _adapt_eventdict(eventDict, self.level, self.encoding)
if ev is not None:
log.FileLogObserver.emit(self, ev)
return ev
def _emit_with_crawler(self, eventDict):
ev = self._emit(eventDict)
if ev:
level = ev['logLevel']
sname = 'log_count/%s' % level_names.get(level, level)
self.crawler.stats.inc_value(sname)
def _adapt_eventdict(eventDict, log_level=INFO, encoding='utf-8', prepend_level=True):
"""Adapt Twisted log eventDict making it suitable for logging with a Scrapy
log observer. It may return None to indicate that the event should be
ignored by a Scrapy log observer.
`log_level` is the minimum level being logged, and `encoding` is the log
encoding.
"""
ev = eventDict.copy()
if ev['isError']:
ev.setdefault('logLevel', ERROR)
# ignore non-error messages from outside scrapy
if ev.get('system') != 'scrapy' and not ev['isError']:
return
level = ev.get('logLevel')
if level < log_level:
return
spider = ev.get('spider')
if spider:
ev['system'] = unicode_to_str(spider.name, encoding)
lvlname = level_names.get(level, 'NOLEVEL')
message = ev.get('message')
if message:
message = [unicode_to_str(x, encoding) for x in message]
if prepend_level:
message[0] = "%s: %s" % (lvlname, message[0])
ev['message'] = message
why = ev.get('why')
if why:
why = unicode_to_str(why, encoding)
if prepend_level:
why = "%s: %s" % (lvlname, why)
ev['why'] = why
fmt = ev.get('format')
if fmt:
fmt = unicode_to_str(fmt, encoding)
if prepend_level:
fmt = "%s: %s" % (lvlname, fmt)
ev['format'] = fmt
return ev
def _get_log_level(level_name_or_id):
if isinstance(level_name_or_id, int):
return level_name_or_id
elif isinstance(level_name_or_id, basestring):
return globals()[level_name_or_id]
else:
raise ValueError("Unknown log level: %r" % level_name_or_id)
def start(logfile=None, loglevel='INFO', logstdout=True, logencoding='utf-8', crawler=None):
loglevel = _get_log_level(loglevel)
file = open(logfile, 'a') if logfile else sys.stderr
sflo = ScrapyFileLogObserver(file, loglevel, logencoding, crawler)
_oldshowwarning = warnings.showwarning
log.startLoggingWithObserver(sflo.emit, setStdout=logstdout)
# restore warnings, wrongly silenced by Twisted
warnings.showwarning = _oldshowwarning
return sflo
def msg(message=None, _level=INFO, **kw):
kw['logLevel'] = kw.pop('level', _level)
kw.setdefault('system', 'scrapy')
if message is None:
log.msg(**kw)
else:
log.msg(message, **kw)
def err(_stuff=None, _why=None, **kw):
kw['logLevel'] = kw.pop('level', ERROR)
kw.setdefault('system', 'scrapy')
log.err(_stuff, _why, **kw)
def start_from_settings(settings, crawler=None):
if settings.getbool('LOG_ENABLED'):
return start(settings['LOG_FILE'], settings['LOG_LEVEL'], settings['LOG_STDOUT'],
settings['LOG_ENCODING'], crawler)
def scrapy_info(settings):
sflo = start_from_settings(settings)
if sflo:
msg("Scrapy %s started (bot: %s)" % (scrapy.__version__, \
settings['BOT_NAME']))
msg("Optional features available: %s" % ", ".join(scrapy.optional_features),
level=INFO)
d = dict(overridden_settings(settings))
msg(format="Overridden settings: %(settings)r", settings=d, level=INFO)
sflo.stop()
def start_from_crawler(crawler):
return start_from_settings(crawler.settings, crawler)
|