1# Copyright 2021 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14"""Python logging helper fuctions.""" 15 16import copy 17from datetime import datetime 18import json 19import logging 20import tempfile 21from typing import Any, Dict, Iterable, Iterator, Optional 22 23 24def all_loggers() -> Iterator[logging.Logger]: 25 """Iterates over all loggers known to Python logging.""" 26 manager = logging.getLogger().manager # type: ignore[attr-defined] 27 28 for logger_name in manager.loggerDict: # pylint: disable=no-member 29 yield logging.getLogger(logger_name) 30 31 32def create_temp_log_file( 33 prefix: Optional[str] = None, add_time: bool = True 34) -> str: 35 """Create a unique tempfile for saving logs. 36 37 Example format: /tmp/pw_console_2021-05-04_151807_8hem6iyq 38 """ 39 if not prefix: 40 prefix = str(__package__) 41 42 # Grab the current system timestamp as a string. 43 isotime = datetime.now().isoformat(sep="_", timespec="seconds") 44 # Timestamp string should not have colons in it. 45 isotime = isotime.replace(":", "") 46 47 if add_time: 48 prefix += f"_{isotime}" 49 50 log_file_name = None 51 with tempfile.NamedTemporaryFile( 52 prefix=f"{prefix}_", delete=False 53 ) as log_file: 54 log_file_name = log_file.name 55 56 return log_file_name 57 58 59def set_logging_last_resort_file_handler( 60 file_name: Optional[str] = None, 61) -> None: 62 log_file = file_name if file_name else create_temp_log_file() 63 logging.lastResort = logging.FileHandler(log_file) 64 65 66def disable_stdout_handlers(logger: logging.Logger) -> None: 67 """Remove all stdout and stdout & stderr logger handlers.""" 68 for handler in copy.copy(logger.handlers): 69 # Must use type() check here since this returns True: 70 # isinstance(logging.FileHandler, logging.StreamHandler) 71 # pylint: disable=unidiomatic-typecheck 72 if type(handler) == logging.StreamHandler: 73 logger.removeHandler(handler) 74 # pylint: enable=unidiomatic-typecheck 75 76 77def setup_python_logging( 78 last_resort_filename: Optional[str] = None, 79 loggers_with_no_propagation: Optional[Iterable[logging.Logger]] = None, 80) -> None: 81 """Disable log handlers for full screen prompt_toolkit applications.""" 82 if not loggers_with_no_propagation: 83 loggers_with_no_propagation = [] 84 disable_stdout_handlers(logging.getLogger()) 85 86 if logging.lastResort is not None: 87 set_logging_last_resort_file_handler(last_resort_filename) 88 89 for logger in list(all_loggers()): 90 # Prevent stdout handlers from corrupting the prompt_toolkit UI. 91 disable_stdout_handlers(logger) 92 if logger in loggers_with_no_propagation: 93 continue 94 # Make sure all known loggers propagate to the root logger. 95 logger.propagate = True 96 97 # Prevent these loggers from propagating to the root logger. 98 hidden_host_loggers = [ 99 "pw_console", 100 "pw_console.plugins", 101 # prompt_toolkit triggered debug log messages 102 "prompt_toolkit", 103 "prompt_toolkit.buffer", 104 "parso.python.diff", 105 "parso.cache", 106 "pw_console.serial_debug_logger", 107 ] 108 for logger_name in hidden_host_loggers: 109 logging.getLogger(logger_name).propagate = False 110 111 # Set asyncio log level to WARNING 112 logging.getLogger("asyncio").setLevel(logging.WARNING) 113 114 # Always set DEBUG level for serial debug. 115 logging.getLogger("pw_console.serial_debug_logger").setLevel(logging.DEBUG) 116 117 118def log_record_to_json(record: logging.LogRecord) -> str: 119 log_dict: Dict[str, Any] = {} 120 log_dict["message"] = record.getMessage() 121 log_dict["levelno"] = record.levelno 122 log_dict["levelname"] = record.levelname 123 log_dict["args"] = record.args 124 125 if hasattr(record, "extra_metadata_fields") and ( 126 record.extra_metadata_fields # type: ignore 127 ): 128 fields = record.extra_metadata_fields # type: ignore 129 log_dict["fields"] = {} 130 for key, value in fields.items(): 131 if key == "msg": 132 log_dict["message"] = value 133 continue 134 135 log_dict["fields"][key] = str(value) 136 137 return json.dumps(log_dict) 138 139 140class JsonLogFormatter(logging.Formatter): 141 """Json Python logging Formatter 142 143 Use this formatter to log pw_console messages to a file in json 144 format. Column values normally shown in table view will be populated in the 145 'fields' key. 146 147 Example log entry: 148 149 .. code-block:: json 150 151 { 152 "message": "System init", 153 "levelno": 20, 154 "levelname": "INF", 155 "args": [ 156 "0:00", 157 "pw_system ", 158 "System init" 159 ], 160 "fields": { 161 "module": "pw_system", 162 "file": "pw_system/init.cc", 163 "timestamp": "0:00" 164 } 165 } 166 167 Example usage: 168 169 .. code-block:: python 170 171 import logging 172 import pw_console.python_logging 173 174 _DEVICE_LOG = logging.getLogger('rpc_device') 175 176 json_filehandler = logging.FileHandler('logs.json', encoding='utf-8') 177 json_filehandler.setLevel(logging.DEBUG) 178 json_filehandler.setFormatter( 179 pw_console.python_logging.JsonLogFormatter()) 180 _DEVICE_LOG.addHandler(json_filehandler) 181 182 """ 183 184 def __init__(self, *args, **kwargs): 185 super().__init__(*args, **kwargs) 186 187 def format(self, record: logging.LogRecord) -> str: 188 return log_record_to_json(record) 189