/usr/share/pyshared/nipype/utils/logger.py is in python-nipype 0.9.2-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
import logging
import os
import sys
try:
from ..external.cloghandler import ConcurrentRotatingFileHandler as \
RFHandler
except ImportError:
# Next 2 lines are optional: issue a warning to the user
from warnings import warn
warn("ConcurrentLogHandler not installed. Using builtin log handler")
from logging.handlers import RotatingFileHandler as RFHandler
from .misc import str2bool
from .config import NipypeConfig
class Logging(object):
"""Nipype logging class
"""
fmt = ('%(asctime)s,%(msecs)d %(name)-2s '
'%(levelname)-2s:\n\t %(message)s')
datefmt = '%y%m%d-%H:%M:%S'
def __init__(self, config):
self._config = config
logging.basicConfig(format=self.fmt, datefmt=self.datefmt,
stream=sys.stdout)
#logging.basicConfig(stream=sys.stdout)
self._logger = logging.getLogger('workflow')
self._fmlogger = logging.getLogger('filemanip')
self._iflogger = logging.getLogger('interface')
self.loggers = {'workflow': self._logger,
'filemanip': self._fmlogger,
'interface': self._iflogger}
self._hdlr = None
self.update_logging(self._config)
def enable_file_logging(self):
config = self._config
LOG_FILENAME = os.path.join(config.get('logging', 'log_directory'),
'pypeline.log')
hdlr = RFHandler(LOG_FILENAME,
maxBytes=int(config.get('logging', 'log_size')),
backupCount=int(config.get('logging',
'log_rotate')))
formatter = logging.Formatter(fmt=self.fmt, datefmt=self.datefmt)
hdlr.setFormatter(formatter)
self._logger.addHandler(hdlr)
self._fmlogger.addHandler(hdlr)
self._iflogger.addHandler(hdlr)
self._hdlr = hdlr
def disable_file_logging(self):
if self._hdlr:
self._logger.removeHandler(self._hdlr)
self._fmlogger.removeHandler(self._hdlr)
self._iflogger.removeHandler(self._hdlr)
self._hdlr = None
def update_logging(self, config):
self._config = config
self.disable_file_logging()
self._logger.setLevel(logging.getLevelName(config.get('logging',
'workflow_level')))
self._fmlogger.setLevel(logging.getLevelName(config.get('logging',
'filemanip_level')))
self._iflogger.setLevel(logging.getLevelName(config.get('logging',
'interface_level')))
if str2bool(config.get('logging', 'log_to_file')):
self.enable_file_logging()
def getLogger(self, name):
if name in self.loggers:
return self.loggers[name]
return None
def getLevelName(self, name):
return logging.getLevelName(name)
def logdebug_dict_differences(self, dold, dnew, prefix=""):
"""Helper to log what actually changed from old to new values of
dictionaries.
typical use -- log difference for hashed_inputs
"""
# Compare against hashed_inputs
# Keys: should rarely differ
new_keys = set(dnew.keys())
old_keys = set(dold.keys())
if len(new_keys - old_keys):
self._logger.debug("%s not previously seen: %s"
% (prefix, new_keys - old_keys))
if len(old_keys - new_keys):
self._logger.debug("%s not presently seen: %s"
% (prefix, old_keys - new_keys))
# Values in common keys would differ quite often,
# so we need to join the messages together
msgs = []
for k in new_keys.intersection(old_keys):
same = False
try:
new, old = dnew[k], dold[k]
same = new == old
if not same:
# Since JSON does not discriminate between lists and
# tuples, we might need to cast them into the same type
# as the last resort. And lets try to be more generic
same = old.__class__(new) == old
except Exception, e:
same = False
if not same:
msgs += ["%s: %r != %r"
% (k, dnew[k], dold[k])]
if len(msgs):
self._logger.debug("%s values differ in fields: %s" % (prefix,
", ".join(msgs)))
|