/external/rust/crates/syn/tests/ |
D | test_item.rs | 12 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_attr() localVariable 53 let tokens = quote! { in test_negative_impl() localVariable 66 let tokens = quote! { in test_negative_impl() localVariable 78 let tokens = quote! { in test_negative_impl() localVariable 111 let tokens = quote! { in test_negative_impl() localVariable 125 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_impl() localVariable 168 let tokens = quote!(trait Trait where {}); in test_supertraits() localVariable 180 let tokens = quote!(trait Trait: where {}); in test_supertraits() localVariable 193 let tokens = quote!(trait Trait: Sized where {}); in test_supertraits() localVariable 219 let tokens = quote!(trait Trait: Sized + where {}); in test_supertraits() localVariable [all …]
|
D | test_ty.rs | 22 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_type() localVariable 56 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_type() localVariable 96 let tokens = TokenStream::from_iter(vec![ in test_group_angle_brackets() localVariable 148 let tokens = TokenStream::from_iter(vec![ in test_group_colons() localVariable 185 let tokens = TokenStream::from_iter(vec![ in test_group_colons() localVariable 224 let tokens = quote!(dyn for<'a> Trait<'a> + 'static); in test_trait_object() localVariable 262 let tokens = quote!(dyn 'a + Trait); in test_trait_object() localVariable 293 let tokens = quote!(impl Trait +); in test_trailing_plus() localVariable 313 let tokens = quote!(dyn Trait +); in test_trailing_plus() localVariable 334 let tokens = quote!(Trait +); in test_trailing_plus() localVariable
|
D | test_expr.rs | 11 let tokens = quote!(..100u32); in test_expr_parse() localVariable 21 let tokens = quote!(..100u32); in test_expr_parse() localVariable 35 let tokens = quote!(fut.await); in test_await() localVariable 104 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_func() localVariable 126 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_func() localVariable 182 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_macro() localVariable 209 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_struct() localVariable 231 let tokens = TokenStream::from_iter(vec![ in test_macro_variable_match_arm() localVariable 288 let tokens = quote!(|| .. .method()); in test_closure_vs_rangefull() localVariable
|
D | test_generics.rs | 129 let tokens = quote!('a); in test_ty_param_bound() localVariable 136 let tokens = quote!('_); in test_ty_param_bound() localVariable 143 let tokens = quote!(Debug); in test_ty_param_bound() localVariable 158 let tokens = quote!(?Sized); in test_ty_param_bound() localVariable
|
/external/rust/crates/quote/tests/ |
D | test.rs | 24 let tokens = quote! { in test_quote_impl() localVariable 46 let tokens = quote_spanned! {span=> in test_quote_spanned_impl() localVariable 68 let tokens = quote!(#x <#x> (#x) [#x] {#x}); in test_substitution() localVariable 121 let tokens = quote! { in test_advanced() localVariable 177 let tokens = quote! { in test_integer() localVariable 193 let tokens = quote! { in test_floating() localVariable 210 let tokens = quote! { in test_char() localVariable 220 let tokens = quote!(#s); in test_str() localVariable 228 let tokens = quote!(#s); in test_string() localVariable 241 let tokens = m!(1); in test_interpolated_literal() localVariable [all …]
|
/external/antlr/tool/src/test/java/org/antlr/test/ |
D | TestTokenRewriteStream.java | 53 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testInsertBeforeIndex0() local 68 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testInsertAfterLastIndex() local 83 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in test2InsertBeforeAfterMiddleIndex() local 100 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testReplaceIndex0() local 116 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testReplaceLastIndex() local 132 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testReplaceMiddleIndex() local 153 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testToStringStartStop() local 188 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in testToStringStartStop2() local 235 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in test2ReplaceMiddleIndex() local 252 TokenRewriteStream tokens = new TokenRewriteStream(lexEngine); in test2ReplaceMiddleIndex1InsertBefore() local [all …]
|
D | TestCommonTokenStream.java | 54 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in testFirstToken() local 75 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in test2ndToken() local 96 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in testCompleteBuffer() local 126 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in testCompleteBufferAfterConsuming() local 157 BufferedTokenStream tokens = new BufferedTokenStream(lexEngine); in testLookback() local 204 CommonTokenStream tokens = new CommonTokenStream(lexer); in testOffChannel() local
|
D | TestInterpretedParsing.java | 68 FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); in testSimpleParse() local 103 FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); in testMismatchedTokenError() local 138 FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); in testMismatchedSetError() local 173 FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); in testNoViableAltError() local
|
/external/minijail/tools/ |
D | parser.py | 290 def _parse_constant(self, tokens): argument 315 def parse_value(self, tokens): argument 344 def _parse_atom(self, tokens): argument 365 def _parse_clause(self, tokens): argument 378 def parse_argument_expression(self, tokens): argument 402 def _parse_default_action(self, tokens): argument 432 def parse_action(self, tokens): argument 476 def _parse_single_filter(self, tokens): argument 494 def parse_filter(self, tokens): argument 515 def _parse_key_value_pair(self, tokens): argument [all …]
|
/external/libtextclassifier/native/utils/ |
D | tokenizer-utils_test.cc | 26 std::vector<Token> tokens = in TEST() local 69 std::vector<Token> tokens = TokenizeOnDelimiters( in TEST() local 100 std::vector<Token> tokens = TokenizeOnDelimiters( in TEST() local 146 std::vector<Token> tokens = in TEST() local 158 std::vector<Token> tokens = in TEST() local 169 std::vector<Token> tokens = in TEST() local 178 std::vector<Token> tokens = in TEST() local 192 std::vector<Token> tokens = in TEST() local
|
D | tokenizer_test.cc | 178 std::vector<Token> tokens = tokenizer.Tokenize("Hello world!"); in TEST() local 349 std::vector<Token> tokens; in TEST() local 379 std::vector<Token> tokens = tokenizer.Tokenize("พระบาท สมเด็จ พระ ปร มิ"); in TEST() local 399 std::vector<Token> tokens = in TEST() local 427 std::vector<Token> tokens = tokenizer.Tokenize("3.1 3﹒2 3.3"); in TEST() local 445 std::vector<Token> tokens = tokenizer.Tokenize("พระบาทสมเด็จพระปรมิ"); in TEST() local 495 std::vector<Token> tokens = tokenizer.Tokenize( in TEST() local 548 std::vector<Token> tokens = tokenizer.Tokenize("7% -3.14 68.9#? 7% $99 .18."); in TEST() local 565 std::vector<Token> tokens = tokenizer.Tokenize("2 pércént 3パーセント"); in TEST() local 577 std::vector<Token> tokens = tokenizer.Tokenize("3 3﹒2 3.3%"); in TEST() local [all …]
|
/external/libtextclassifier/native/annotator/duration/ |
D | duration_test.cc | 194 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 214 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 226 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 246 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 266 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 286 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 306 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 326 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 347 std::vector<Token> tokens = Tokenize(text); in TEST_F() local 367 std::vector<Token> tokens = Tokenize(text); in TEST_F() local [all …]
|
/external/libtextclassifier/native/utils/grammar/parsing/ |
D | lexer_test.cc | 88 std::vector<Token> tokens = tokenizer_.Tokenize("This is a word"); in TEST_F() local 97 std::vector<Token> tokens = tokenizer_.Tokenize("1234This a4321cde"); in TEST_F() local 107 std::vector<Token> tokens = tokenizer_.Tokenize("10/18/2014"); in TEST_F() local 117 std::vector<Token> tokens = tokenizer_.Tokenize("电话:0871—6857(曹"); in TEST_F() local 130 std::vector<Token> tokens = tokenizer_.Tokenize("电话 :0871—6857(曹"); in TEST_F() local 143 std::vector<Token> tokens = in TEST_F() local 158 std::vector<Token> tokens = tokenizer_.Tokenize("The+2345++the +"); in TEST_F() local
|
/external/pdfium/samples/ |
D | pdfium_test_event_helper.cc | 32 const std::vector<std::string>& tokens) { in SendCharCodeEvent() 44 const std::vector<std::string>& tokens) { in SendKeyCodeEvent() 58 const std::vector<std::string>& tokens) { in SendMouseDownEvent() 78 const std::vector<std::string>& tokens) { in SendMouseUpEvent() 97 const std::vector<std::string>& tokens) { in SendMouseDoubleClickEvent() 115 const std::vector<std::string>& tokens) { in SendMouseMoveEvent() 128 const std::vector<std::string>& tokens) { in SendMouseWheelEvent() 144 const std::vector<std::string>& tokens) { in SendFocusEvent() 166 auto tokens = StringSplit(command[0], ','); in SendPageEvents() local
|
/external/cronet/net/third_party/quiche/src/quiche/blind_sign_auth/ |
D | cached_blind_sign_auth_test.cc | 73 &done](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 103 &first](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 119 &second](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 152 &first](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 168 &second](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 198 &first](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 214 &second](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 231 &third](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 252 [](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() 270 [num_tokens](absl::StatusOr<absl::Span<const std::string>> tokens) { in TEST_F() [all …]
|
/external/libtextclassifier/native/annotator/ |
D | feature-processor_test.cc | 87 std::vector<Token> tokens{Token("Hělló", 0, 5), in TEST_F() local 104 std::vector<Token> tokens{Token("Hělló", 0, 5), in TEST_F() local 120 std::vector<Token> tokens{Token("Hělló", 0, 5), in TEST_F() local 136 std::vector<Token> tokens{Token("Hělló", 0, 5), in TEST_F() local 152 std::vector<Token> tokens{Token("Hělló", 0, 5), in TEST_F() local 179 std::vector<Token> tokens = {Token("Fiřst", 0, 5), in TEST_F() local 204 std::vector<Token> tokens = {Token("Fiřst", 0, 5), in TEST_F() local 229 std::vector<Token> tokens = {Token("Fiřst", 0, 5), in TEST_F() local 254 std::vector<Token> tokens = {Token("Fiřst", 0, 5), in TEST_F() local 281 std::vector<Token> tokens = {Token("Fiřst", 0, 5), in TEST_F() local [all …]
|
/external/jackson-databind/src/main/java/com/fasterxml/jackson/databind/type/ |
D | TypeParser.java | 32 MyTokenizer tokens = new MyTokenizer(canonical.trim()); in parse() local 41 protected JavaType parseType(MyTokenizer tokens) in parseType() 63 protected List<JavaType> parseTypes(MyTokenizer tokens) in parseTypes() 79 protected Class<?> findClass(String className, MyTokenizer tokens) in findClass() 89 protected IllegalArgumentException _problem(MyTokenizer tokens, String msg) in _problem()
|
/external/mesa3d/src/gallium/auxiliary/tgsi/ |
D | tgsi_parse.c | 36 const struct tgsi_token *tokens ) in tgsi_parse_init() 279 tgsi_dup_tokens(const struct tgsi_token *tokens) in tgsi_dup_tokens() 305 tgsi_free_tokens(const struct tgsi_token *tokens) in tgsi_free_tokens() 312 tgsi_dump_tokens(const struct tgsi_token *tokens) in tgsi_dump_tokens() 327 tgsi_get_processor_type(const struct tgsi_token *tokens) in tgsi_get_processor_type()
|
/external/rust/crates/syn/tests/regression/ |
D | issue1235.rs | 8 let tokens = quote! { in main() localVariable 20 let tokens = quote!(pub #inner;); in main() localVariable 29 let tokens = quote!(pub #inner;); in main() localVariable
|
/external/mesa3d/src/gallium/drivers/nouveau/ |
D | nouveau_compiler.c | 38 nv30_fp(int chipset, struct tgsi_token tokens[], in nv30_fp() 51 nv30_vp(int chipset, struct tgsi_token tokens[], in nv30_vp() 65 nv30_codegen(int chipset, int type, struct tgsi_token tokens[], in nv30_codegen() 105 nouveau_codegen(int chipset, int type, struct tgsi_token tokens[], in nouveau_codegen() 142 struct tgsi_token tokens[4096]; in main() local
|
/external/rust/crates/syn/benches/ |
D | file.rs | 38 let tokens = get_tokens(); in baseline() localVariable 44 let tokens = get_tokens(); in create_token_buffer() localVariable 53 let tokens = get_tokens(); in parse_file() localVariable
|
/external/libtextclassifier/native/annotator/number/ |
D | number.cc | 79 bool NumberAnnotator::TokensAreValidStart(const std::vector<Token>& tokens, in TokensAreValidStart() 88 const std::vector<Token>& tokens, const int prefix_end_index) const { in TokensAreValidNumberPrefix() argument 122 bool NumberAnnotator::TokensAreValidEnding(const std::vector<Token>& tokens, in TokensAreValidEnding() 146 const std::vector<Token>& tokens, const int suffix_start_index) const { in TokensAreValidNumberSuffix() argument 181 const std::vector<Token>& tokens, in FindPercentSuffixEndCodepoint() 227 const std::vector<Token> tokens = tokenizer_.Tokenize(context); in FindAll() local
|
/external/ply/ply/ply/ |
D | cpp.py | 26 tokens = ( variable 314 def tokenstrip(self,tokens): argument 489 def expand_macros(self,tokens,expanded=None): argument 561 def evalexpr(self,tokens): argument 751 def include(self,tokens): argument 800 def define(self,tokens): argument 870 def undef(self,tokens): argument
|
/external/python/pycparser/pycparser/ply/ |
D | cpp.py | 24 tokens = ( variable 312 def tokenstrip(self,tokens): argument 486 def expand_macros(self,tokens,expanded=None): argument 552 def evalexpr(self,tokens): argument 742 def include(self,tokens): argument 791 def define(self,tokens): argument 861 def undef(self,tokens): argument
|
/external/mesa3d/src/gallium/drivers/radeonsi/ |
D | si_shaderlib_tgsi.c | 481 struct tgsi_token tokens[1024]; in si_create_query_result_cs() local 524 struct tgsi_token tokens[1024]; in si_create_copy_image_compute_shader() local 561 struct tgsi_token tokens[1024]; in si_create_copy_image_compute_shader_1d_array() local 600 struct tgsi_token tokens[1024]; in si_create_dcc_decompress_cs() local 634 struct tgsi_token tokens[1024]; in si_clear_render_target_shader() local 669 struct tgsi_token tokens[1024]; in si_clear_render_target_shader_1d_array() local 700 struct tgsi_token tokens[1024]; in si_clear_12bytes_buffer_shader() local 983 struct tgsi_token tokens[1024]; in gfx10_create_sh_query_result_cs() local
|