/external/python/cpython2/Tools/scripts/ |
D | cleanfuture.py | 166 type, token, (srow, scol), (erow, ecol), line = get() 170 type, token, (srow, scol), (erow, ecol), line = get() 174 type, token, (srow, scol), (erow, ecol), line = get() 180 type, token, (srow, scol), (erow, ecol), line = get() 185 type, token, (srow, scol), (erow, ecol), line = get() 189 type, token, (srow, scol), (erow, ecol), line = get() 193 type, token, (srow, scol), (erow, ecol), line = get() 199 type, token, (srow, scol), (erow, ecol), line = get() 203 type, token, (srow, scol), (erow, ecol), line = get() 209 type, token, (srow, scol), (erow, ecol), line = get()
|
D | reindent.py | 287 def tokeneater(self, type, token, (sline, scol), end, line, argument
|
/external/python/cpython3/Tools/scripts/ |
D | cleanfuture.py | 165 type, token, (srow, scol), (erow, ecol), line = get() 169 type, token, (srow, scol), (erow, ecol), line = get() 173 type, token, (srow, scol), (erow, ecol), line = get() 179 type, token, (srow, scol), (erow, ecol), line = get() 184 type, token, (srow, scol), (erow, ecol), line = get() 188 type, token, (srow, scol), (erow, ecol), line = get() 192 type, token, (srow, scol), (erow, ecol), line = get() 198 type, token, (srow, scol), (erow, ecol), line = get() 202 type, token, (srow, scol), (erow, ecol), line = get() 208 type, token, (srow, scol), (erow, ecol), line = get()
|
D | highlight.py | 21 (srow, scol), (erow, ecol) = start, end 23 return lines[srow-1][scol:ecol], end 24 rows = [lines[srow-1][scol:]] + lines[srow: erow-1] + [lines[erow-1][:ecol]] 39 tok_type, tok_str, (srow, scol), (erow, ecol), logical_lineno = tok 47 if prev_tok_type == tokenize.INDENT or scol==0: 59 text, written = combine_range(lines, written, (srow, scol))
|
/external/angle/src/tests/gl_tests/ |
D | CubeMapTextureTest.cpp | 157 for (size_t scol = 0; scol < kCubeFaceSectionCountSqrt; ++scol) in runSampleCoordinateTransformTest() local 160 size_t c = col + scol * kTextureSize / kCubeFaceSectionCountSqrt; in runSampleCoordinateTransformTest() 161 size_t s = srow * kCubeFaceSectionCountSqrt + scol; in runSampleCoordinateTransformTest()
|
/external/chromium-trace/catapult/common/py_utils/py_utils/refactor/ |
D | offset_token.py | 75 token_type, string, (srow, scol), _, _ = next_token 81 offset_tokens.append(OffsetToken(token_type, string, (0, scol - ecol))) 84 token_type, string, (srow - erow, scol)))
|
/external/python/cpython2/Lib/lib2to3/pgen2/ |
D | tokenize.py | 155 (srow, scol) = start 158 (srow, scol, erow, ecol, tok_name[type], repr(token))
|
/external/python/cpython2/Lib/ |
D | tokenize.py | 151 srow, scol = srow_scol 154 (srow, scol, erow, ecol, tok_name[type], repr(token))
|
D | trace.py | 430 sline, scol = start
|
D | inspect.py | 641 srow, scol = srow_scol
|
/external/python/cpython3/Lib/lib2to3/pgen2/ |
D | tokenize.py | 143 (srow, scol) = xxx_todo_changeme 146 (srow, scol, erow, ecol, tok_name[type], repr(token)))
|
/external/rust/crates/proc-macro2/tests/ |
D | test.rs | 640 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() { in check_spans_internal() 645 assert_eq!(start.column, scol, "scol did not match for {}", i); in check_spans_internal()
|
/external/autotest/utils/ |
D | reindent.py | 270 (sline, scol) = sline_scol
|
/external/python/cpython2/Doc/library/ |
D | tokenize.rst | 37 string; a 2-tuple ``(srow, scol)`` of ints specifying the row and column
|
/external/python/cpython3/Doc/library/ |
D | tokenize.rst | 38 token string; a 2-tuple ``(srow, scol)`` of ints specifying the row and
|
/external/python/cpython3/Lib/ |
D | trace.py | 366 sline, scol = start
|