1# Copyright 2001-2023 by Vinay Sajip. All Rights Reserved. 2# 3# Permission to use, copy, modify, and distribute this software and its 4# documentation for any purpose and without fee is hereby granted, 5# provided that the above copyright notice appear in all copies and that 6# both that copyright notice and this permission notice appear in 7# supporting documentation, and that the name of Vinay Sajip 8# not be used in advertising or publicity pertaining to distribution 9# of the software without specific, written prior permission. 10# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING 11# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL 12# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR 13# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER 14# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT 15# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 17""" 18Configuration functions for the logging package for Python. The core package 19is based on PEP 282 and comments thereto in comp.lang.python, and influenced 20by Apache's log4j system. 21 22Copyright (C) 2001-2022 Vinay Sajip. All Rights Reserved. 23 24To use, simply 'import logging' and log away! 25""" 26 27import errno 28import functools 29import io 30import logging 31import logging.handlers 32import os 33import queue 34import re 35import struct 36import threading 37import traceback 38 39from socketserver import ThreadingTCPServer, StreamRequestHandler 40 41 42DEFAULT_LOGGING_CONFIG_PORT = 9030 43 44RESET_ERROR = errno.ECONNRESET 45 46# 47# The following code implements a socket listener for on-the-fly 48# reconfiguration of logging. 49# 50# _listener holds the server object doing the listening 51_listener = None 52 53def fileConfig(fname, defaults=None, disable_existing_loggers=True, encoding=None): 54 """ 55 Read the logging configuration from a ConfigParser-format file. 56 57 This can be called several times from an application, allowing an end user 58 the ability to select from various pre-canned configurations (if the 59 developer provides a mechanism to present the choices and load the chosen 60 configuration). 61 """ 62 import configparser 63 64 if isinstance(fname, str): 65 if not os.path.exists(fname): 66 raise FileNotFoundError(f"{fname} doesn't exist") 67 elif not os.path.getsize(fname): 68 raise RuntimeError(f'{fname} is an empty file') 69 70 if isinstance(fname, configparser.RawConfigParser): 71 cp = fname 72 else: 73 try: 74 cp = configparser.ConfigParser(defaults) 75 if hasattr(fname, 'readline'): 76 cp.read_file(fname) 77 else: 78 encoding = io.text_encoding(encoding) 79 cp.read(fname, encoding=encoding) 80 except configparser.ParsingError as e: 81 raise RuntimeError(f'{fname} is invalid: {e}') 82 83 formatters = _create_formatters(cp) 84 85 # critical section 86 with logging._lock: 87 _clearExistingHandlers() 88 89 # Handlers add themselves to logging._handlers 90 handlers = _install_handlers(cp, formatters) 91 _install_loggers(cp, handlers, disable_existing_loggers) 92 93 94def _resolve(name): 95 """Resolve a dotted name to a global object.""" 96 name = name.split('.') 97 used = name.pop(0) 98 found = __import__(used) 99 for n in name: 100 used = used + '.' + n 101 try: 102 found = getattr(found, n) 103 except AttributeError: 104 __import__(used) 105 found = getattr(found, n) 106 return found 107 108def _strip_spaces(alist): 109 return map(str.strip, alist) 110 111def _create_formatters(cp): 112 """Create and return formatters""" 113 flist = cp["formatters"]["keys"] 114 if not len(flist): 115 return {} 116 flist = flist.split(",") 117 flist = _strip_spaces(flist) 118 formatters = {} 119 for form in flist: 120 sectname = "formatter_%s" % form 121 fs = cp.get(sectname, "format", raw=True, fallback=None) 122 dfs = cp.get(sectname, "datefmt", raw=True, fallback=None) 123 stl = cp.get(sectname, "style", raw=True, fallback='%') 124 defaults = cp.get(sectname, "defaults", raw=True, fallback=None) 125 126 c = logging.Formatter 127 class_name = cp[sectname].get("class") 128 if class_name: 129 c = _resolve(class_name) 130 131 if defaults is not None: 132 defaults = eval(defaults, vars(logging)) 133 f = c(fs, dfs, stl, defaults=defaults) 134 else: 135 f = c(fs, dfs, stl) 136 formatters[form] = f 137 return formatters 138 139 140def _install_handlers(cp, formatters): 141 """Install and return handlers""" 142 hlist = cp["handlers"]["keys"] 143 if not len(hlist): 144 return {} 145 hlist = hlist.split(",") 146 hlist = _strip_spaces(hlist) 147 handlers = {} 148 fixups = [] #for inter-handler references 149 for hand in hlist: 150 section = cp["handler_%s" % hand] 151 klass = section["class"] 152 fmt = section.get("formatter", "") 153 try: 154 klass = eval(klass, vars(logging)) 155 except (AttributeError, NameError): 156 klass = _resolve(klass) 157 args = section.get("args", '()') 158 args = eval(args, vars(logging)) 159 kwargs = section.get("kwargs", '{}') 160 kwargs = eval(kwargs, vars(logging)) 161 h = klass(*args, **kwargs) 162 h.name = hand 163 if "level" in section: 164 level = section["level"] 165 h.setLevel(level) 166 if len(fmt): 167 h.setFormatter(formatters[fmt]) 168 if issubclass(klass, logging.handlers.MemoryHandler): 169 target = section.get("target", "") 170 if len(target): #the target handler may not be loaded yet, so keep for later... 171 fixups.append((h, target)) 172 handlers[hand] = h 173 #now all handlers are loaded, fixup inter-handler references... 174 for h, t in fixups: 175 h.setTarget(handlers[t]) 176 return handlers 177 178def _handle_existing_loggers(existing, child_loggers, disable_existing): 179 """ 180 When (re)configuring logging, handle loggers which were in the previous 181 configuration but are not in the new configuration. There's no point 182 deleting them as other threads may continue to hold references to them; 183 and by disabling them, you stop them doing any logging. 184 185 However, don't disable children of named loggers, as that's probably not 186 what was intended by the user. Also, allow existing loggers to NOT be 187 disabled if disable_existing is false. 188 """ 189 root = logging.root 190 for log in existing: 191 logger = root.manager.loggerDict[log] 192 if log in child_loggers: 193 if not isinstance(logger, logging.PlaceHolder): 194 logger.setLevel(logging.NOTSET) 195 logger.handlers = [] 196 logger.propagate = True 197 else: 198 logger.disabled = disable_existing 199 200def _install_loggers(cp, handlers, disable_existing): 201 """Create and install loggers""" 202 203 # configure the root first 204 llist = cp["loggers"]["keys"] 205 llist = llist.split(",") 206 llist = list(_strip_spaces(llist)) 207 llist.remove("root") 208 section = cp["logger_root"] 209 root = logging.root 210 log = root 211 if "level" in section: 212 level = section["level"] 213 log.setLevel(level) 214 for h in root.handlers[:]: 215 root.removeHandler(h) 216 hlist = section["handlers"] 217 if len(hlist): 218 hlist = hlist.split(",") 219 hlist = _strip_spaces(hlist) 220 for hand in hlist: 221 log.addHandler(handlers[hand]) 222 223 #and now the others... 224 #we don't want to lose the existing loggers, 225 #since other threads may have pointers to them. 226 #existing is set to contain all existing loggers, 227 #and as we go through the new configuration we 228 #remove any which are configured. At the end, 229 #what's left in existing is the set of loggers 230 #which were in the previous configuration but 231 #which are not in the new configuration. 232 existing = list(root.manager.loggerDict.keys()) 233 #The list needs to be sorted so that we can 234 #avoid disabling child loggers of explicitly 235 #named loggers. With a sorted list it is easier 236 #to find the child loggers. 237 existing.sort() 238 #We'll keep the list of existing loggers 239 #which are children of named loggers here... 240 child_loggers = [] 241 #now set up the new ones... 242 for log in llist: 243 section = cp["logger_%s" % log] 244 qn = section["qualname"] 245 propagate = section.getint("propagate", fallback=1) 246 logger = logging.getLogger(qn) 247 if qn in existing: 248 i = existing.index(qn) + 1 # start with the entry after qn 249 prefixed = qn + "." 250 pflen = len(prefixed) 251 num_existing = len(existing) 252 while i < num_existing: 253 if existing[i][:pflen] == prefixed: 254 child_loggers.append(existing[i]) 255 i += 1 256 existing.remove(qn) 257 if "level" in section: 258 level = section["level"] 259 logger.setLevel(level) 260 for h in logger.handlers[:]: 261 logger.removeHandler(h) 262 logger.propagate = propagate 263 logger.disabled = 0 264 hlist = section["handlers"] 265 if len(hlist): 266 hlist = hlist.split(",") 267 hlist = _strip_spaces(hlist) 268 for hand in hlist: 269 logger.addHandler(handlers[hand]) 270 271 #Disable any old loggers. There's no point deleting 272 #them as other threads may continue to hold references 273 #and by disabling them, you stop them doing any logging. 274 #However, don't disable children of named loggers, as that's 275 #probably not what was intended by the user. 276 #for log in existing: 277 # logger = root.manager.loggerDict[log] 278 # if log in child_loggers: 279 # logger.level = logging.NOTSET 280 # logger.handlers = [] 281 # logger.propagate = 1 282 # elif disable_existing_loggers: 283 # logger.disabled = 1 284 _handle_existing_loggers(existing, child_loggers, disable_existing) 285 286 287def _clearExistingHandlers(): 288 """Clear and close existing handlers""" 289 logging._handlers.clear() 290 logging.shutdown(logging._handlerList[:]) 291 del logging._handlerList[:] 292 293 294IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) 295 296 297def valid_ident(s): 298 m = IDENTIFIER.match(s) 299 if not m: 300 raise ValueError('Not a valid Python identifier: %r' % s) 301 return True 302 303 304class ConvertingMixin(object): 305 """For ConvertingXXX's, this mixin class provides common functions""" 306 307 def convert_with_key(self, key, value, replace=True): 308 result = self.configurator.convert(value) 309 #If the converted value is different, save for next time 310 if value is not result: 311 if replace: 312 self[key] = result 313 if type(result) in (ConvertingDict, ConvertingList, 314 ConvertingTuple): 315 result.parent = self 316 result.key = key 317 return result 318 319 def convert(self, value): 320 result = self.configurator.convert(value) 321 if value is not result: 322 if type(result) in (ConvertingDict, ConvertingList, 323 ConvertingTuple): 324 result.parent = self 325 return result 326 327 328# The ConvertingXXX classes are wrappers around standard Python containers, 329# and they serve to convert any suitable values in the container. The 330# conversion converts base dicts, lists and tuples to their wrapped 331# equivalents, whereas strings which match a conversion format are converted 332# appropriately. 333# 334# Each wrapper should have a configurator attribute holding the actual 335# configurator to use for conversion. 336 337class ConvertingDict(dict, ConvertingMixin): 338 """A converting dictionary wrapper.""" 339 340 def __getitem__(self, key): 341 value = dict.__getitem__(self, key) 342 return self.convert_with_key(key, value) 343 344 def get(self, key, default=None): 345 value = dict.get(self, key, default) 346 return self.convert_with_key(key, value) 347 348 def pop(self, key, default=None): 349 value = dict.pop(self, key, default) 350 return self.convert_with_key(key, value, replace=False) 351 352class ConvertingList(list, ConvertingMixin): 353 """A converting list wrapper.""" 354 def __getitem__(self, key): 355 value = list.__getitem__(self, key) 356 return self.convert_with_key(key, value) 357 358 def pop(self, idx=-1): 359 value = list.pop(self, idx) 360 return self.convert(value) 361 362class ConvertingTuple(tuple, ConvertingMixin): 363 """A converting tuple wrapper.""" 364 def __getitem__(self, key): 365 value = tuple.__getitem__(self, key) 366 # Can't replace a tuple entry. 367 return self.convert_with_key(key, value, replace=False) 368 369class BaseConfigurator(object): 370 """ 371 The configurator base class which defines some useful defaults. 372 """ 373 374 CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$') 375 376 WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') 377 DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') 378 INDEX_PATTERN = re.compile(r'^\[([^\[\]]*)\]\s*') 379 DIGIT_PATTERN = re.compile(r'^\d+$') 380 381 value_converters = { 382 'ext' : 'ext_convert', 383 'cfg' : 'cfg_convert', 384 } 385 386 # We might want to use a different one, e.g. importlib 387 importer = staticmethod(__import__) 388 389 def __init__(self, config): 390 self.config = ConvertingDict(config) 391 self.config.configurator = self 392 393 def resolve(self, s): 394 """ 395 Resolve strings to objects using standard import and attribute 396 syntax. 397 """ 398 name = s.split('.') 399 used = name.pop(0) 400 try: 401 found = self.importer(used) 402 for frag in name: 403 used += '.' + frag 404 try: 405 found = getattr(found, frag) 406 except AttributeError: 407 self.importer(used) 408 found = getattr(found, frag) 409 return found 410 except ImportError as e: 411 v = ValueError('Cannot resolve %r: %s' % (s, e)) 412 raise v from e 413 414 def ext_convert(self, value): 415 """Default converter for the ext:// protocol.""" 416 return self.resolve(value) 417 418 def cfg_convert(self, value): 419 """Default converter for the cfg:// protocol.""" 420 rest = value 421 m = self.WORD_PATTERN.match(rest) 422 if m is None: 423 raise ValueError("Unable to convert %r" % value) 424 else: 425 rest = rest[m.end():] 426 d = self.config[m.groups()[0]] 427 #print d, rest 428 while rest: 429 m = self.DOT_PATTERN.match(rest) 430 if m: 431 d = d[m.groups()[0]] 432 else: 433 m = self.INDEX_PATTERN.match(rest) 434 if m: 435 idx = m.groups()[0] 436 if not self.DIGIT_PATTERN.match(idx): 437 d = d[idx] 438 else: 439 try: 440 n = int(idx) # try as number first (most likely) 441 d = d[n] 442 except TypeError: 443 d = d[idx] 444 if m: 445 rest = rest[m.end():] 446 else: 447 raise ValueError('Unable to convert ' 448 '%r at %r' % (value, rest)) 449 #rest should be empty 450 return d 451 452 def convert(self, value): 453 """ 454 Convert values to an appropriate type. dicts, lists and tuples are 455 replaced by their converting alternatives. Strings are checked to 456 see if they have a conversion format and are converted if they do. 457 """ 458 if not isinstance(value, ConvertingDict) and isinstance(value, dict): 459 value = ConvertingDict(value) 460 value.configurator = self 461 elif not isinstance(value, ConvertingList) and isinstance(value, list): 462 value = ConvertingList(value) 463 value.configurator = self 464 elif not isinstance(value, ConvertingTuple) and\ 465 isinstance(value, tuple) and not hasattr(value, '_fields'): 466 value = ConvertingTuple(value) 467 value.configurator = self 468 elif isinstance(value, str): # str for py3k 469 m = self.CONVERT_PATTERN.match(value) 470 if m: 471 d = m.groupdict() 472 prefix = d['prefix'] 473 converter = self.value_converters.get(prefix, None) 474 if converter: 475 suffix = d['suffix'] 476 converter = getattr(self, converter) 477 value = converter(suffix) 478 return value 479 480 def configure_custom(self, config): 481 """Configure an object with a user-supplied factory.""" 482 c = config.pop('()') 483 if not callable(c): 484 c = self.resolve(c) 485 # Check for valid identifiers 486 kwargs = {k: config[k] for k in config if (k != '.' and valid_ident(k))} 487 result = c(**kwargs) 488 props = config.pop('.', None) 489 if props: 490 for name, value in props.items(): 491 setattr(result, name, value) 492 return result 493 494 def as_tuple(self, value): 495 """Utility function which converts lists to tuples.""" 496 if isinstance(value, list): 497 value = tuple(value) 498 return value 499 500def _is_queue_like_object(obj): 501 """Check that *obj* implements the Queue API.""" 502 if isinstance(obj, (queue.Queue, queue.SimpleQueue)): 503 return True 504 # defer importing multiprocessing as much as possible 505 from multiprocessing.queues import Queue as MPQueue 506 if isinstance(obj, MPQueue): 507 return True 508 # Depending on the multiprocessing start context, we cannot create 509 # a multiprocessing.managers.BaseManager instance 'mm' to get the 510 # runtime type of mm.Queue() or mm.JoinableQueue() (see gh-119819). 511 # 512 # Since we only need an object implementing the Queue API, we only 513 # do a protocol check, but we do not use typing.runtime_checkable() 514 # and typing.Protocol to reduce import time (see gh-121723). 515 # 516 # Ideally, we would have wanted to simply use strict type checking 517 # instead of a protocol-based type checking since the latter does 518 # not check the method signatures. 519 # 520 # Note that only 'put_nowait' and 'get' are required by the logging 521 # queue handler and queue listener (see gh-124653) and that other 522 # methods are either optional or unused. 523 minimal_queue_interface = ['put_nowait', 'get'] 524 return all(callable(getattr(obj, method, None)) 525 for method in minimal_queue_interface) 526 527class DictConfigurator(BaseConfigurator): 528 """ 529 Configure logging using a dictionary-like object to describe the 530 configuration. 531 """ 532 533 def configure(self): 534 """Do the configuration.""" 535 536 config = self.config 537 if 'version' not in config: 538 raise ValueError("dictionary doesn't specify a version") 539 if config['version'] != 1: 540 raise ValueError("Unsupported version: %s" % config['version']) 541 incremental = config.pop('incremental', False) 542 EMPTY_DICT = {} 543 with logging._lock: 544 if incremental: 545 handlers = config.get('handlers', EMPTY_DICT) 546 for name in handlers: 547 if name not in logging._handlers: 548 raise ValueError('No handler found with ' 549 'name %r' % name) 550 else: 551 try: 552 handler = logging._handlers[name] 553 handler_config = handlers[name] 554 level = handler_config.get('level', None) 555 if level: 556 handler.setLevel(logging._checkLevel(level)) 557 except Exception as e: 558 raise ValueError('Unable to configure handler ' 559 '%r' % name) from e 560 loggers = config.get('loggers', EMPTY_DICT) 561 for name in loggers: 562 try: 563 self.configure_logger(name, loggers[name], True) 564 except Exception as e: 565 raise ValueError('Unable to configure logger ' 566 '%r' % name) from e 567 root = config.get('root', None) 568 if root: 569 try: 570 self.configure_root(root, True) 571 except Exception as e: 572 raise ValueError('Unable to configure root ' 573 'logger') from e 574 else: 575 disable_existing = config.pop('disable_existing_loggers', True) 576 577 _clearExistingHandlers() 578 579 # Do formatters first - they don't refer to anything else 580 formatters = config.get('formatters', EMPTY_DICT) 581 for name in formatters: 582 try: 583 formatters[name] = self.configure_formatter( 584 formatters[name]) 585 except Exception as e: 586 raise ValueError('Unable to configure ' 587 'formatter %r' % name) from e 588 # Next, do filters - they don't refer to anything else, either 589 filters = config.get('filters', EMPTY_DICT) 590 for name in filters: 591 try: 592 filters[name] = self.configure_filter(filters[name]) 593 except Exception as e: 594 raise ValueError('Unable to configure ' 595 'filter %r' % name) from e 596 597 # Next, do handlers - they refer to formatters and filters 598 # As handlers can refer to other handlers, sort the keys 599 # to allow a deterministic order of configuration 600 handlers = config.get('handlers', EMPTY_DICT) 601 deferred = [] 602 for name in sorted(handlers): 603 try: 604 handler = self.configure_handler(handlers[name]) 605 handler.name = name 606 handlers[name] = handler 607 except Exception as e: 608 if ' not configured yet' in str(e.__cause__): 609 deferred.append(name) 610 else: 611 raise ValueError('Unable to configure handler ' 612 '%r' % name) from e 613 614 # Now do any that were deferred 615 for name in deferred: 616 try: 617 handler = self.configure_handler(handlers[name]) 618 handler.name = name 619 handlers[name] = handler 620 except Exception as e: 621 raise ValueError('Unable to configure handler ' 622 '%r' % name) from e 623 624 # Next, do loggers - they refer to handlers and filters 625 626 #we don't want to lose the existing loggers, 627 #since other threads may have pointers to them. 628 #existing is set to contain all existing loggers, 629 #and as we go through the new configuration we 630 #remove any which are configured. At the end, 631 #what's left in existing is the set of loggers 632 #which were in the previous configuration but 633 #which are not in the new configuration. 634 root = logging.root 635 existing = list(root.manager.loggerDict.keys()) 636 #The list needs to be sorted so that we can 637 #avoid disabling child loggers of explicitly 638 #named loggers. With a sorted list it is easier 639 #to find the child loggers. 640 existing.sort() 641 #We'll keep the list of existing loggers 642 #which are children of named loggers here... 643 child_loggers = [] 644 #now set up the new ones... 645 loggers = config.get('loggers', EMPTY_DICT) 646 for name in loggers: 647 if name in existing: 648 i = existing.index(name) + 1 # look after name 649 prefixed = name + "." 650 pflen = len(prefixed) 651 num_existing = len(existing) 652 while i < num_existing: 653 if existing[i][:pflen] == prefixed: 654 child_loggers.append(existing[i]) 655 i += 1 656 existing.remove(name) 657 try: 658 self.configure_logger(name, loggers[name]) 659 except Exception as e: 660 raise ValueError('Unable to configure logger ' 661 '%r' % name) from e 662 663 #Disable any old loggers. There's no point deleting 664 #them as other threads may continue to hold references 665 #and by disabling them, you stop them doing any logging. 666 #However, don't disable children of named loggers, as that's 667 #probably not what was intended by the user. 668 #for log in existing: 669 # logger = root.manager.loggerDict[log] 670 # if log in child_loggers: 671 # logger.level = logging.NOTSET 672 # logger.handlers = [] 673 # logger.propagate = True 674 # elif disable_existing: 675 # logger.disabled = True 676 _handle_existing_loggers(existing, child_loggers, 677 disable_existing) 678 679 # And finally, do the root logger 680 root = config.get('root', None) 681 if root: 682 try: 683 self.configure_root(root) 684 except Exception as e: 685 raise ValueError('Unable to configure root ' 686 'logger') from e 687 688 def configure_formatter(self, config): 689 """Configure a formatter from a dictionary.""" 690 if '()' in config: 691 factory = config['()'] # for use in exception handler 692 try: 693 result = self.configure_custom(config) 694 except TypeError as te: 695 if "'format'" not in str(te): 696 raise 697 # logging.Formatter and its subclasses expect the `fmt` 698 # parameter instead of `format`. Retry passing configuration 699 # with `fmt`. 700 config['fmt'] = config.pop('format') 701 config['()'] = factory 702 result = self.configure_custom(config) 703 else: 704 fmt = config.get('format', None) 705 dfmt = config.get('datefmt', None) 706 style = config.get('style', '%') 707 cname = config.get('class', None) 708 defaults = config.get('defaults', None) 709 710 if not cname: 711 c = logging.Formatter 712 else: 713 c = _resolve(cname) 714 715 kwargs = {} 716 717 # Add defaults only if it exists. 718 # Prevents TypeError in custom formatter callables that do not 719 # accept it. 720 if defaults is not None: 721 kwargs['defaults'] = defaults 722 723 # A TypeError would be raised if "validate" key is passed in with a formatter callable 724 # that does not accept "validate" as a parameter 725 if 'validate' in config: # if user hasn't mentioned it, the default will be fine 726 result = c(fmt, dfmt, style, config['validate'], **kwargs) 727 else: 728 result = c(fmt, dfmt, style, **kwargs) 729 730 return result 731 732 def configure_filter(self, config): 733 """Configure a filter from a dictionary.""" 734 if '()' in config: 735 result = self.configure_custom(config) 736 else: 737 name = config.get('name', '') 738 result = logging.Filter(name) 739 return result 740 741 def add_filters(self, filterer, filters): 742 """Add filters to a filterer from a list of names.""" 743 for f in filters: 744 try: 745 if callable(f) or callable(getattr(f, 'filter', None)): 746 filter_ = f 747 else: 748 filter_ = self.config['filters'][f] 749 filterer.addFilter(filter_) 750 except Exception as e: 751 raise ValueError('Unable to add filter %r' % f) from e 752 753 def _configure_queue_handler(self, klass, **kwargs): 754 if 'queue' in kwargs: 755 q = kwargs.pop('queue') 756 else: 757 q = queue.Queue() # unbounded 758 759 rhl = kwargs.pop('respect_handler_level', False) 760 lklass = kwargs.pop('listener', logging.handlers.QueueListener) 761 handlers = kwargs.pop('handlers', []) 762 763 listener = lklass(q, *handlers, respect_handler_level=rhl) 764 handler = klass(q, **kwargs) 765 handler.listener = listener 766 return handler 767 768 def configure_handler(self, config): 769 """Configure a handler from a dictionary.""" 770 config_copy = dict(config) # for restoring in case of error 771 formatter = config.pop('formatter', None) 772 if formatter: 773 try: 774 formatter = self.config['formatters'][formatter] 775 except Exception as e: 776 raise ValueError('Unable to set formatter ' 777 '%r' % formatter) from e 778 level = config.pop('level', None) 779 filters = config.pop('filters', None) 780 if '()' in config: 781 c = config.pop('()') 782 if not callable(c): 783 c = self.resolve(c) 784 factory = c 785 else: 786 cname = config.pop('class') 787 if callable(cname): 788 klass = cname 789 else: 790 klass = self.resolve(cname) 791 if issubclass(klass, logging.handlers.MemoryHandler): 792 if 'flushLevel' in config: 793 config['flushLevel'] = logging._checkLevel(config['flushLevel']) 794 if 'target' in config: 795 # Special case for handler which refers to another handler 796 try: 797 tn = config['target'] 798 th = self.config['handlers'][tn] 799 if not isinstance(th, logging.Handler): 800 config.update(config_copy) # restore for deferred cfg 801 raise TypeError('target not configured yet') 802 config['target'] = th 803 except Exception as e: 804 raise ValueError('Unable to set target handler %r' % tn) from e 805 elif issubclass(klass, logging.handlers.QueueHandler): 806 # Another special case for handler which refers to other handlers 807 # if 'handlers' not in config: 808 # raise ValueError('No handlers specified for a QueueHandler') 809 if 'queue' in config: 810 qspec = config['queue'] 811 812 if isinstance(qspec, str): 813 q = self.resolve(qspec) 814 if not callable(q): 815 raise TypeError('Invalid queue specifier %r' % qspec) 816 config['queue'] = q() 817 elif isinstance(qspec, dict): 818 if '()' not in qspec: 819 raise TypeError('Invalid queue specifier %r' % qspec) 820 config['queue'] = self.configure_custom(dict(qspec)) 821 elif not _is_queue_like_object(qspec): 822 raise TypeError('Invalid queue specifier %r' % qspec) 823 824 if 'listener' in config: 825 lspec = config['listener'] 826 if isinstance(lspec, type): 827 if not issubclass(lspec, logging.handlers.QueueListener): 828 raise TypeError('Invalid listener specifier %r' % lspec) 829 else: 830 if isinstance(lspec, str): 831 listener = self.resolve(lspec) 832 if isinstance(listener, type) and\ 833 not issubclass(listener, logging.handlers.QueueListener): 834 raise TypeError('Invalid listener specifier %r' % lspec) 835 elif isinstance(lspec, dict): 836 if '()' not in lspec: 837 raise TypeError('Invalid listener specifier %r' % lspec) 838 listener = self.configure_custom(dict(lspec)) 839 else: 840 raise TypeError('Invalid listener specifier %r' % lspec) 841 if not callable(listener): 842 raise TypeError('Invalid listener specifier %r' % lspec) 843 config['listener'] = listener 844 if 'handlers' in config: 845 hlist = [] 846 try: 847 for hn in config['handlers']: 848 h = self.config['handlers'][hn] 849 if not isinstance(h, logging.Handler): 850 config.update(config_copy) # restore for deferred cfg 851 raise TypeError('Required handler %r ' 852 'is not configured yet' % hn) 853 hlist.append(h) 854 except Exception as e: 855 raise ValueError('Unable to set required handler %r' % hn) from e 856 config['handlers'] = hlist 857 elif issubclass(klass, logging.handlers.SMTPHandler) and\ 858 'mailhost' in config: 859 config['mailhost'] = self.as_tuple(config['mailhost']) 860 elif issubclass(klass, logging.handlers.SysLogHandler) and\ 861 'address' in config: 862 config['address'] = self.as_tuple(config['address']) 863 if issubclass(klass, logging.handlers.QueueHandler): 864 factory = functools.partial(self._configure_queue_handler, klass) 865 else: 866 factory = klass 867 kwargs = {k: config[k] for k in config if (k != '.' and valid_ident(k))} 868 try: 869 result = factory(**kwargs) 870 except TypeError as te: 871 if "'stream'" not in str(te): 872 raise 873 #The argument name changed from strm to stream 874 #Retry with old name. 875 #This is so that code can be used with older Python versions 876 #(e.g. by Django) 877 kwargs['strm'] = kwargs.pop('stream') 878 result = factory(**kwargs) 879 if formatter: 880 result.setFormatter(formatter) 881 if level is not None: 882 result.setLevel(logging._checkLevel(level)) 883 if filters: 884 self.add_filters(result, filters) 885 props = config.pop('.', None) 886 if props: 887 for name, value in props.items(): 888 setattr(result, name, value) 889 return result 890 891 def add_handlers(self, logger, handlers): 892 """Add handlers to a logger from a list of names.""" 893 for h in handlers: 894 try: 895 logger.addHandler(self.config['handlers'][h]) 896 except Exception as e: 897 raise ValueError('Unable to add handler %r' % h) from e 898 899 def common_logger_config(self, logger, config, incremental=False): 900 """ 901 Perform configuration which is common to root and non-root loggers. 902 """ 903 level = config.get('level', None) 904 if level is not None: 905 logger.setLevel(logging._checkLevel(level)) 906 if not incremental: 907 #Remove any existing handlers 908 for h in logger.handlers[:]: 909 logger.removeHandler(h) 910 handlers = config.get('handlers', None) 911 if handlers: 912 self.add_handlers(logger, handlers) 913 filters = config.get('filters', None) 914 if filters: 915 self.add_filters(logger, filters) 916 917 def configure_logger(self, name, config, incremental=False): 918 """Configure a non-root logger from a dictionary.""" 919 logger = logging.getLogger(name) 920 self.common_logger_config(logger, config, incremental) 921 logger.disabled = False 922 propagate = config.get('propagate', None) 923 if propagate is not None: 924 logger.propagate = propagate 925 926 def configure_root(self, config, incremental=False): 927 """Configure a root logger from a dictionary.""" 928 root = logging.getLogger() 929 self.common_logger_config(root, config, incremental) 930 931dictConfigClass = DictConfigurator 932 933def dictConfig(config): 934 """Configure logging using a dictionary.""" 935 dictConfigClass(config).configure() 936 937 938def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None): 939 """ 940 Start up a socket server on the specified port, and listen for new 941 configurations. 942 943 These will be sent as a file suitable for processing by fileConfig(). 944 Returns a Thread object on which you can call start() to start the server, 945 and which you can join() when appropriate. To stop the server, call 946 stopListening(). 947 948 Use the ``verify`` argument to verify any bytes received across the wire 949 from a client. If specified, it should be a callable which receives a 950 single argument - the bytes of configuration data received across the 951 network - and it should return either ``None``, to indicate that the 952 passed in bytes could not be verified and should be discarded, or a 953 byte string which is then passed to the configuration machinery as 954 normal. Note that you can return transformed bytes, e.g. by decrypting 955 the bytes passed in. 956 """ 957 958 class ConfigStreamHandler(StreamRequestHandler): 959 """ 960 Handler for a logging configuration request. 961 962 It expects a completely new logging configuration and uses fileConfig 963 to install it. 964 """ 965 def handle(self): 966 """ 967 Handle a request. 968 969 Each request is expected to be a 4-byte length, packed using 970 struct.pack(">L", n), followed by the config file. 971 Uses fileConfig() to do the grunt work. 972 """ 973 try: 974 conn = self.connection 975 chunk = conn.recv(4) 976 if len(chunk) == 4: 977 slen = struct.unpack(">L", chunk)[0] 978 chunk = self.connection.recv(slen) 979 while len(chunk) < slen: 980 chunk = chunk + conn.recv(slen - len(chunk)) 981 if self.server.verify is not None: 982 chunk = self.server.verify(chunk) 983 if chunk is not None: # verified, can process 984 chunk = chunk.decode("utf-8") 985 try: 986 import json 987 d =json.loads(chunk) 988 assert isinstance(d, dict) 989 dictConfig(d) 990 except Exception: 991 #Apply new configuration. 992 993 file = io.StringIO(chunk) 994 try: 995 fileConfig(file) 996 except Exception: 997 traceback.print_exc() 998 if self.server.ready: 999 self.server.ready.set() 1000 except OSError as e: 1001 if e.errno != RESET_ERROR: 1002 raise 1003 1004 class ConfigSocketReceiver(ThreadingTCPServer): 1005 """ 1006 A simple TCP socket-based logging config receiver. 1007 """ 1008 1009 allow_reuse_address = 1 1010 1011 def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT, 1012 handler=None, ready=None, verify=None): 1013 ThreadingTCPServer.__init__(self, (host, port), handler) 1014 with logging._lock: 1015 self.abort = 0 1016 self.timeout = 1 1017 self.ready = ready 1018 self.verify = verify 1019 1020 def serve_until_stopped(self): 1021 import select 1022 abort = 0 1023 while not abort: 1024 rd, wr, ex = select.select([self.socket.fileno()], 1025 [], [], 1026 self.timeout) 1027 if rd: 1028 self.handle_request() 1029 with logging._lock: 1030 abort = self.abort 1031 self.server_close() 1032 1033 class Server(threading.Thread): 1034 1035 def __init__(self, rcvr, hdlr, port, verify): 1036 super(Server, self).__init__() 1037 self.rcvr = rcvr 1038 self.hdlr = hdlr 1039 self.port = port 1040 self.verify = verify 1041 self.ready = threading.Event() 1042 1043 def run(self): 1044 server = self.rcvr(port=self.port, handler=self.hdlr, 1045 ready=self.ready, 1046 verify=self.verify) 1047 if self.port == 0: 1048 self.port = server.server_address[1] 1049 self.ready.set() 1050 global _listener 1051 with logging._lock: 1052 _listener = server 1053 server.serve_until_stopped() 1054 1055 return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify) 1056 1057def stopListening(): 1058 """ 1059 Stop the listening server which was created with a call to listen(). 1060 """ 1061 global _listener 1062 with logging._lock: 1063 if _listener: 1064 _listener.abort = 1 1065 _listener = None 1066