1import contextlib 2import logging 3import os 4import os.path 5import platform 6import re 7import sys 8 9from c_common.fsutil import match_glob as _match_glob 10from c_common.tables import parse_table as _parse_table 11from ..source import ( 12 resolve as _resolve_source, 13 good_file as _good_file, 14) 15from . import errors as _errors 16from . import ( 17 pure as _pure, 18 gcc as _gcc, 19) 20 21 22logger = logging.getLogger(__name__) 23 24 25# Supported "source": 26# * filename (string) 27# * lines (iterable) 28# * text (string) 29# Supported return values: 30# * iterator of SourceLine 31# * sequence of SourceLine 32# * text (string) 33# * something that combines all those 34# XXX Add the missing support from above. 35# XXX Add more low-level functions to handle permutations? 36 37def preprocess(source, *, 38 incldirs=None, 39 includes=None, 40 macros=None, 41 samefiles=None, 42 filename=None, 43 cwd=None, 44 tool=True, 45 ): 46 """... 47 48 CWD should be the project root and "source" should be relative. 49 """ 50 if tool: 51 if not cwd: 52 cwd = os.getcwd() 53 logger.debug(f'CWD: {cwd!r}') 54 logger.debug(f'incldirs: {incldirs!r}') 55 logger.debug(f'includes: {includes!r}') 56 logger.debug(f'macros: {macros!r}') 57 logger.debug(f'samefiles: {samefiles!r}') 58 _preprocess = _get_preprocessor(tool) 59 with _good_file(source, filename) as source: 60 return _preprocess( 61 source, 62 incldirs, 63 includes, 64 macros, 65 samefiles, 66 cwd, 67 ) or () 68 else: 69 source, filename = _resolve_source(source, filename) 70 # We ignore "includes", "macros", etc. 71 return _pure.preprocess(source, filename, cwd) 72 73 # if _run() returns just the lines: 74# text = _run(source) 75# lines = [line + os.linesep for line in text.splitlines()] 76# lines[-1] = lines[-1].splitlines()[0] 77# 78# conditions = None 79# for lno, line in enumerate(lines, 1): 80# kind = 'source' 81# directive = None 82# data = line 83# yield lno, kind, data, conditions 84 85 86def get_preprocessor(*, 87 file_macros=None, 88 file_includes=None, 89 file_incldirs=None, 90 file_same=None, 91 ignore_exc=False, 92 log_err=None, 93 ): 94 _preprocess = preprocess 95 if file_macros: 96 file_macros = tuple(_parse_macros(file_macros)) 97 if file_includes: 98 file_includes = tuple(_parse_includes(file_includes)) 99 if file_incldirs: 100 file_incldirs = tuple(_parse_incldirs(file_incldirs)) 101 if file_same: 102 file_same = dict(file_same or ()) 103 if not callable(ignore_exc): 104 ignore_exc = (lambda exc, _ig=ignore_exc: _ig) 105 106 def get_file_preprocessor(filename): 107 filename = filename.strip() 108 if file_macros: 109 macros = list(_resolve_file_values(filename, file_macros)) 110 if file_includes: 111 # There's a small chance we could need to filter out any 112 # includes that import "filename". It isn't clear that it's 113 # a problem any longer. If we do end up filtering then 114 # it may make sense to use c_common.fsutil.match_path_tail(). 115 includes = [i for i, in _resolve_file_values(filename, file_includes)] 116 if file_incldirs: 117 incldirs = [v for v, in _resolve_file_values(filename, file_incldirs)] 118 if file_same: 119 samefiles = _resolve_samefiles(filename, file_same) 120 121 def preprocess(**kwargs): 122 if file_macros and 'macros' not in kwargs: 123 kwargs['macros'] = macros 124 if file_includes and 'includes' not in kwargs: 125 kwargs['includes'] = includes 126 if file_incldirs and 'incldirs' not in kwargs: 127 kwargs['incldirs'] = incldirs 128 if file_same and 'samefiles' not in kwargs: 129 kwargs['samefiles'] = samefiles 130 kwargs.setdefault('filename', filename) 131 with handling_errors(ignore_exc, log_err=log_err): 132 return _preprocess(filename, **kwargs) 133 return preprocess 134 return get_file_preprocessor 135 136 137def _resolve_file_values(filename, file_values): 138 # We expect the filename and all patterns to be absolute paths. 139 for pattern, *value in file_values or (): 140 if _match_glob(filename, pattern): 141 yield value 142 143 144def _parse_macros(macros): 145 for row, srcfile in _parse_table(macros, '\t', 'glob\tname\tvalue', rawsep='=', default=None): 146 yield row 147 148 149def _parse_includes(includes): 150 for row, srcfile in _parse_table(includes, '\t', 'glob\tinclude', default=None): 151 yield row 152 153 154def _parse_incldirs(incldirs): 155 for row, srcfile in _parse_table(incldirs, '\t', 'glob\tdirname', default=None): 156 glob, dirname = row 157 if dirname is None: 158 # Match all files. 159 dirname = glob 160 row = ('*', dirname.strip()) 161 yield row 162 163 164def _resolve_samefiles(filename, file_same): 165 assert '*' not in filename, (filename,) 166 assert os.path.normpath(filename) == filename, (filename,) 167 _, suffix = os.path.splitext(filename) 168 samefiles = [] 169 for patterns, in _resolve_file_values(filename, file_same.items()): 170 for pattern in patterns: 171 same = _resolve_samefile(filename, pattern, suffix) 172 if not same: 173 continue 174 samefiles.append(same) 175 return samefiles 176 177 178def _resolve_samefile(filename, pattern, suffix): 179 if pattern == filename: 180 return None 181 if pattern.endswith(os.path.sep): 182 pattern += f'*{suffix}' 183 assert os.path.normpath(pattern) == pattern, (pattern,) 184 if '*' in os.path.dirname(pattern): 185 raise NotImplementedError((filename, pattern)) 186 if '*' not in os.path.basename(pattern): 187 return pattern 188 189 common = os.path.commonpath([filename, pattern]) 190 relpattern = pattern[len(common) + len(os.path.sep):] 191 relpatterndir = os.path.dirname(relpattern) 192 relfile = filename[len(common) + len(os.path.sep):] 193 if os.path.basename(pattern) == '*': 194 return os.path.join(common, relpatterndir, relfile) 195 elif os.path.basename(relpattern) == '*' + suffix: 196 return os.path.join(common, relpatterndir, relfile) 197 else: 198 raise NotImplementedError((filename, pattern)) 199 200 201@contextlib.contextmanager 202def handling_errors(ignore_exc=None, *, log_err=None): 203 try: 204 yield 205 except _errors.OSMismatchError as exc: 206 if not ignore_exc(exc): 207 raise # re-raise 208 if log_err is not None: 209 log_err(f'<OS mismatch (expected {" or ".join(exc.expected)})>') 210 return None 211 except _errors.MissingDependenciesError as exc: 212 if not ignore_exc(exc): 213 raise # re-raise 214 if log_err is not None: 215 log_err(f'<missing dependency {exc.missing}') 216 return None 217 except _errors.ErrorDirectiveError as exc: 218 if not ignore_exc(exc): 219 raise # re-raise 220 if log_err is not None: 221 log_err(exc) 222 return None 223 224 225################################## 226# tools 227 228_COMPILERS = { 229 # matching distutils.ccompiler.compiler_class: 230 'unix': _gcc.preprocess, 231 'msvc': None, 232 'cygwin': None, 233 'mingw32': None, 234 'bcpp': None, 235 # aliases/extras: 236 'gcc': _gcc.preprocess, 237 'clang': None, 238} 239 240 241def _get_default_compiler(): 242 if re.match('cygwin.*', sys.platform) is not None: 243 return 'unix' 244 if os.name == 'nt': 245 return 'msvc' 246 if sys.platform == 'darwin' and 'clang' in platform.python_compiler(): 247 return 'clang' 248 return 'unix' 249 250 251def _get_preprocessor(tool): 252 if tool is True: 253 tool = _get_default_compiler() 254 preprocess = _COMPILERS.get(tool) 255 if preprocess is None: 256 raise ValueError(f'unsupported tool {tool}') 257 return preprocess 258 259 260################################## 261# aliases 262 263from .errors import ( 264 PreprocessorError, 265 PreprocessorFailure, 266 ErrorDirectiveError, 267 MissingDependenciesError, 268 OSMismatchError, 269) 270from .common import FileInfo, SourceLine 271