| /external/rust/crates/quote/tests/ |
| D | test.rs | 19 let tokens = quote! { in test_quote_impl() localVariable 41 let tokens = quote!(#x <#x> (#x) [#x] {#x}); in test_substitution() localVariable 73 let tokens = quote! { in test_advanced() localVariable 129 let tokens = quote! { in test_integer() localVariable 143 let tokens = quote! { in test_floating() localVariable 160 let tokens = quote! { in test_char() localVariable 170 let tokens = quote!(#s); in test_str() localVariable 178 let tokens = quote!(#s); in test_string() localVariable 187 let tokens = quote!(struct #foo; enum #bar {}); in test_ident() localVariable 196 let tokens = quote!(#ch #ch); in test_duplicate() localVariable [all …]
|
| /external/minijail/tools/ |
| D | parser.py | 263 def _parse_constant(self, tokens): argument 288 def parse_value(self, tokens): argument 317 def _parse_atom(self, tokens): argument 338 def _parse_clause(self, tokens): argument 351 def parse_argument_expression(self, tokens): argument 374 def _parse_default_action(self, tokens): argument 401 def parse_action(self, tokens): argument 433 def _parse_single_filter(self, tokens): argument 451 def parse_filter(self, tokens): argument 472 def _parse_key_value_pair(self, tokens): argument [all …]
|
| /external/antlr/tool/src/test/java/org/antlr/test/ |
| D | TestTokenRewriteStream.java | 53 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testInsertBeforeIndex0() local 68 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testInsertAfterLastIndex() local 83 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in test2InsertBeforeAfterMiddleIndex() local 100 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testReplaceIndex0() local 116 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testReplaceLastIndex() local 132 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testReplaceMiddleIndex() local 153 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testToStringStartStop() local 188 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testToStringStartStop2() local 235 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in test2ReplaceMiddleIndex() local 252 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in test2ReplaceMiddleIndex1InsertBefore() local [all …]
|
| D | TestCommonTokenStream.java | 54 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in testFirstToken() local 75 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in test2ndToken() local 96 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in testCompleteBuffer() local 126 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in testCompleteBufferAfterConsuming() local 157 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in testLookback() local 204 CommonTokenStream tokens = new CommonTokenStream(lexer); in testOffChannel() local
|
| D | TestInterpretedParsing.java | 68 FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); in testSimpleParse() local 103 FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); in testMismatchedTokenError() local 138 FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); in testMismatchedSetError() local 173 FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); in testNoViableAltError() local
|
| /external/libtextclassifier/native/annotator/duration/ |
| D | duration_test.cc | 152 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 171 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 190 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 209 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 228 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 247 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 266 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 286 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 305 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 323 std::vector<Token> tokens = Tokenize(text); in TEST_F() local [all …]
|
| /external/pdfium/samples/ |
| D | pdfium_test_event_helper.cc | 19 const std::vector<std::string>& tokens) { in SendCharCodeEvent() 31 const std::vector<std::string>& tokens) { in SendKeyCodeEvent() 56 const std::vector<std::string>& tokens) { in SendMouseDownEvent() 76 const std::vector<std::string>& tokens) { in SendMouseUpEvent() 95 const std::vector<std::string>& tokens) { in SendMouseDoubleClickEvent() 113 const std::vector<std::string>& tokens) { in SendMouseMoveEvent() 126 const std::vector<std::string>& tokens) { in SendFocusEvent() 146 auto tokens = StringSplit(command[0], ','); in SendPageEvents() local
|
| /external/libtextclassifier/native/utils/ |
| D | tokenizer_test.cc | 178 std::vector<Token> tokens = tokenizer.Tokenize("Hello world!"); in TEST() local 349 std::vector<Token> tokens; in TEST() local 379 std::vector<Token> tokens = tokenizer.Tokenize("พระบาท สมเด็จ พระ ปร มิ"); in TEST() local 399 std::vector<Token> tokens = in TEST() local 427 std::vector<Token> tokens = tokenizer.Tokenize("3.1 3﹒2 3.3"); in TEST() local 445 std::vector<Token> tokens = tokenizer.Tokenize("พระบาทสมเด็จพระปรมิ"); in TEST() local 495 std::vector<Token> tokens = tokenizer.Tokenize( in TEST() local 548 std::vector<Token> tokens = tokenizer.Tokenize("7% -3.14 68.9#? 7% $99 .18."); in TEST() local 565 std::vector<Token> tokens = tokenizer.Tokenize("2 pércént 3パーセント"); in TEST() local 577 std::vector<Token> tokens = tokenizer.Tokenize("3 3﹒2 3.3%"); in TEST() local [all …]
|
| D | test-utils_test.cc | 26 std::vector<Token> tokens = in TEST() local 69 std::vector<Token> tokens = TokenizeOnDelimiters( in TEST() local
|
| /external/rust/crates/syn/0.15.42/tests/ |
| D | test_generics.rs | 132 let tokens = quote!('a); in test_ty_param_bound() localVariable 139 let tokens = quote!('_); in test_ty_param_bound() localVariable 146 let tokens = quote!(Debug); in test_ty_param_bound() localVariable 161 let tokens = quote!(?Sized); in test_ty_param_bound() localVariable
|
| /external/rust/crates/syn/1.0.7/tests/ |
| D | test_generics.rs | 129 let tokens = quote!('a); in test_ty_param_bound() localVariable 136 let tokens = quote!('_); in test_ty_param_bound() localVariable 143 let tokens = quote!(Debug); in test_ty_param_bound() localVariable 158 let tokens = quote!(?Sized); in test_ty_param_bound() localVariable
|
| /external/mesa3d/src/gallium/auxiliary/tgsi/ |
| D | tgsi_parse.c | 36 const struct tgsi_token *tokens ) in tgsi_parse_init() 279 tgsi_dup_tokens(const struct tgsi_token *tokens) in tgsi_dup_tokens() 305 tgsi_free_tokens(const struct tgsi_token *tokens) in tgsi_free_tokens() 312 tgsi_dump_tokens(const struct tgsi_token *tokens) in tgsi_dump_tokens() 327 tgsi_get_processor_type(const struct tgsi_token *tokens) in tgsi_get_processor_type()
|
| /external/mesa3d/src/gallium/drivers/nouveau/ |
| D | nouveau_compiler.c | 38 nv30_fp(int chipset, struct tgsi_token tokens[], in nv30_fp() 51 nv30_vp(int chipset, struct tgsi_token tokens[], in nv30_vp() 65 nv30_codegen(int chipset, int type, struct tgsi_token tokens[], in nv30_codegen() 105 nouveau_codegen(int chipset, int type, struct tgsi_token tokens[], in nouveau_codegen() 142 struct tgsi_token tokens[4096]; in main() local
|
| /external/python/cpython3/Parser/pgen/ |
| D | token.py | 4 def generate_tokens(tokens): argument 21 def generate_opmap(tokens): argument
|
| /external/mesa3d/src/gallium/targets/graw-null/ |
| D | graw_util.c | 19 struct tgsi_token tokens[1024]; in graw_parse_geometry_shader() local 34 struct tgsi_token tokens[1024]; in graw_parse_vertex_shader() local 49 struct tgsi_token tokens[1024]; in graw_parse_fragment_shader() local
|
| /external/libtextclassifier/native/annotator/number/ |
| D | number.cc | 77 bool NumberAnnotator::TokensAreValidStart(const std::vector<Token>& tokens, in TokensAreValidStart() 86 const std::vector<Token>& tokens, const int prefix_end_index) const { in TokensAreValidNumberPrefix() argument 120 bool NumberAnnotator::TokensAreValidEnding(const std::vector<Token>& tokens, in TokensAreValidEnding() 144 const std::vector<Token>& tokens, const int suffix_start_index) const { in TokensAreValidNumberSuffix() argument 179 const std::vector<Token>& tokens, in FindPercentSuffixEndCodepoint() 224 const std::vector<Token> tokens = tokenizer_.Tokenize(context); in FindAll() local
|
| /external/python/pycparser/pycparser/ply/ |
| D | cpp.py | 24 tokens = ( variable 312 def tokenstrip(self,tokens): argument 486 def expand_macros(self,tokens,expanded=None): argument 552 def evalexpr(self,tokens): argument 742 def include(self,tokens): argument 791 def define(self,tokens): argument 861 def undef(self,tokens): argument
|
| /external/ply/ply/ply/ |
| D | cpp.py | 26 tokens = ( variable 314 def tokenstrip(self,tokens): argument 489 def expand_macros(self,tokens,expanded=None): argument 561 def evalexpr(self,tokens): argument 751 def include(self,tokens): argument 800 def define(self,tokens): argument 870 def undef(self,tokens): argument
|
| /external/mesa3d/src/gallium/drivers/radeonsi/ |
| D | si_shaderlib_tgsi.c | 481 struct tgsi_token tokens[1024]; in si_create_query_result_cs() local 524 struct tgsi_token tokens[1024]; in si_create_copy_image_compute_shader() local 561 struct tgsi_token tokens[1024]; in si_create_copy_image_compute_shader_1d_array() local 600 struct tgsi_token tokens[1024]; in si_create_dcc_decompress_cs() local 634 struct tgsi_token tokens[1024]; in si_clear_render_target_shader() local 669 struct tgsi_token tokens[1024]; in si_clear_render_target_shader_1d_array() local 700 struct tgsi_token tokens[1024]; in si_clear_12bytes_buffer_shader() local 983 struct tgsi_token tokens[1024]; in gfx10_create_sh_query_result_cs() local
|
| /external/tensorflow/tensorflow/compiler/xla/tools/ |
| D | interactive_graphviz.cc | 182 void DoBackendConfigCommand(const std::vector<string>& tokens) { in DoBackendConfigCommand() 197 const std::vector<string>& tokens) { in DoListComputationsCommand() 218 void DoListCommand(const HloModule& module, const std::vector<string>& tokens) { in DoListCommand() 250 void DoInfoCommand(const HloModule& module, const std::vector<string>& tokens) { in DoInfoCommand() 347 absl::Span<const string> tokens) { in DoExtractCommand() 478 const std::vector<string>& tokens) { 525 const std::vector<string>& tokens) { 612 std::vector<string> tokens = absl::StrSplit(line, ' ', absl::SkipEmpty()); local
|
| /external/virglrenderer/src/gallium/auxiliary/tgsi/ |
| D | tgsi_parse.c | 36 const struct tgsi_token *tokens ) in tgsi_parse_init() 277 tgsi_dup_tokens(const struct tgsi_token *tokens) in tgsi_dup_tokens() 300 tgsi_dump_tokens(const struct tgsi_token *tokens) in tgsi_dump_tokens()
|
| /external/perfetto/src/trace_processor/importers/systrace/ |
| D | systrace_trace_parser.cc | 51 bool IsProcessDumpShortHeader(const std::vector<base::StringView>& tokens) { in IsProcessDumpShortHeader() 56 bool IsProcessDumpLongHeader(const std::vector<base::StringView>& tokens) { in IsProcessDumpLongHeader() 119 std::vector<base::StringView> tokens = in Parse() local
|
| /external/snakeyaml/src/test/java/org/pyyaml/ |
| D | PyCanonicalTest.java | 38 List<Token> tokens = canonicalScan(input); in testCanonicalScanner() local 64 List<Event> tokens = canonicalParse(input); in testCanonicalParser() local
|
| /external/chromium-trace/catapult/common/py_utils/py_utils/refactor/ |
| D | snippet.py | 41 def tokens(self): member in Snippet 99 def __init__(self, token_type, tokens): argument 135 def tokens(self): member in TokenSnippet 180 def tokens(self): member in Symbol 215 def _SnippetizeNode(node, tokens): argument
|
| /external/skqp/src/sksl/ |
| D | SkSLCPPUniformCTypes.cpp | 20 static String eval_template(const String& format, const std::vector<String>& tokens, in eval_template() 90 std::vector<String> tokens = { "newVar", "oldVar" }; in dirtyExpression() local 100 std::vector<String> tokens = { "newVar", "oldVar" }; in saveState() local 110 std::vector<String> tokens = { "pdman", "uniform", "var" }; in setUniform() local
|