Home
last modified time | relevance | path

Searched refs:tokens (Results 1 – 25 of 36) sorted by relevance

12

/system/bt/service/ipc/
Dlinux_ipc_host.cc271 std::vector<std::string> tokens = base::SplitString( in OnMessage() local
273 switch (tokens.size()) { in OnMessage()
275 if (tokens[0] == kSetAdapterNameCommand) in OnMessage()
276 return OnSetAdapterName(tokens[1]); in OnMessage()
277 if (tokens[0] == kCreateServiceCommand) return OnCreateService(tokens[1]); in OnMessage()
278 if (tokens[0] == kDestroyServiceCommand) in OnMessage()
279 return OnDestroyService(tokens[1]); in OnMessage()
280 if (tokens[0] == kStartServiceCommand) return OnStartService(tokens[1]); in OnMessage()
281 if (tokens[0] == kStopServiceCommand) return OnStopService(tokens[1]); in OnMessage()
284 if (tokens[0] == kSetCharacteristicValueCommand) in OnMessage()
[all …]
/system/tools/hidl/docs/src/parser/elements/declarations/
DTypedefDeclarationParser.kt31 override fun parseTokens(tokens: List<Token>) { in parseTokens()
32 assert(tokens.isNotEmpty()) in parseTokens()
33 assert(tokens.first().identifier == TokenGrammar.TYPEDEF) in parseTokens()
34 assert(tokens.last().identifier == TokenGrammar.SEMICOLON) in parseTokens()
36 this.name = tokens.get(tokens.size - 2).value in parseTokens()
37 this.type = tokens.subList(1, tokens.size - 2).map { it.value }.joinToString("") in parseTokens()
DEnumDeclarationParser.kt38 override fun parseTokens(tokens: List<Token>) { in parseTokens()
39 val iter = tokens.listIterator() in parseTokens()
42 assert(tokens.last().identifier == TokenGrammar.SEMICOLON) in parseTokens()
89 class EnumMember(tokens: List<Token>) {
95 assert(tokens.isNotEmpty())
96 this.name = tokens.first().value
99 if (tokens.any { it.identifier == TokenGrammar.EQUAL }) { in <lambda>()
100 this.value = tokens.takeLastWhile { it.identifier != TokenGrammar.EQUAL } in <lambda>()
DCompoundDeclarationParser.kt39 override fun parseTokens(tokens: List<Token>) { in parseTokens()
40 val iter = tokens.listIterator() in parseTokens()
43 assert(tokens.last().identifier == TokenGrammar.SEMICOLON) in parseTokens()
85 tokens = statementTokens.subList(2, statementTokens.size-1) in parseTokens()
94 tokens = statementTokens in parseTokens()
102 tokens = statementTokens in parseTokens()
114 val tokens: List<Token> //TODO: doesn't seem needed constant
120 override val tokens: List<Token>) : IMemberDeclaration constant in MemberDeclaration
125 override val tokens: List<Token>, constant in CompoundMemberDeclaration
DInterfaceDeclarationParser.kt35 override fun parseTokens(tokens: List<Token>) { in parseTokens()
36 assert(tokens.isNotEmpty()) in parseTokens()
37 assert(tokens.first().identifier == TokenGrammar.INTERFACE) in parseTokens()
38 assert(tokens.last().identifier == TokenGrammar.SEMICOLON) in parseTokens()
41 val sigToks = tokens.takeWhile { it.identifier != TokenGrammar.BRACE_OPEN } in parseTokens()
DMethodDeclarationParser.kt40 override fun parseTokens(tokens: List<Token>) { in parseTokens()
41 assert(tokens.last().identifier == TokenGrammar.SEMICOLON) in parseTokens()
42 val iter = tokens.listIterator() in parseTokens()
/system/tools/hidl/docs/src/lexer/
DHidlLexer.kt29 val tokens = mutableListOf<Token>() in <lambda>() constant
47 … throw ParseException("Unable to find closing comment marker", tokens.lastIndex) in <lambda>()
55 tokens.add(TokenGrammar.newToken(token)) //doc_start in <lambda>()
74 tokens += DocLexer.tokenize(sb.toString()) in <lambda>()
75 tokens.add(TokenGrammar.newToken(TokenGrammar.DOC_END.value)) //doc_end in <lambda>()
79 tokens.add(TokenGrammar.newToken(token)) //'@' in <lambda>()
91 … throw ParseException("Unable to find closing annotation paren", tokens.lastIndex) in <lambda>()
96tokens.add(TokenGrammar.newToken(identifier = annotation, value = annotationArgs.toString())) in <lambda>()
101 else -> tokens.add(TokenGrammar.newToken(token)) in <lambda>()
105 return tokens.toList() in <lambda>()
DDocLexer.kt27 val tokens = mutableListOf<Token>() in <lambda>() constant
44 … TokenGrammar.EMPTY_LINE.value -> tokens.add(TokenGrammar.newToken("", TokenGrammar.EMPTY_LINE)) in <lambda>()
48 tokens.add(TokenGrammar.newToken(token)) //'@' in <lambda>()
52 tokens.add(TokenGrammar.newToken(scanner.next())) in <lambda>()
61 tokens.add(TokenGrammar.newToken(token, category)) in <lambda>()
68 return tokens.toList() in <lambda>()
/system/tools/hidl/docs/src/parser/elements/
DEntryCollectionParser.kt34 constructor(tokens: List<Token>) : this(tokens.listIterator())
43 val tokens = mutableListOf<Token>() in scanTokens() constant
45 tokens.add(iter.next()) in scanTokens()
47 return tokens in scanTokens()
50 override fun parseTokens(tokens: List<Token>) { in parseTokens()
51 val iter = tokens.listIterator() in parseTokens()
DAbstractParser.kt44 abstract fun parseTokens(tokens: List<Token>) in parseTokens()
52 val tokens = mutableListOf<Token>() in parseTokens() constant
64 tokens.add(iter.next()) //doc_start in parseTokens()
68 tokens.add(token) in parseTokens()
79 return tokens in parseTokens()
86 val tokens = mutableListOf<Token>() in scanDeclarationTokens() constant
93 tokens.add(token) in scanDeclarationTokens()
111 assert(tokens.last().identifier == TokenGrammar.SEMICOLON) in scanDeclarationTokens()
112 return tokens in scanDeclarationTokens()
DDocAnnotationParser.kt39 private fun formatValue(tokens: List<Token>): String { in formatValue()
40 return if (tokens.isEmpty()) { in formatValue()
43 tokens.map { in formatValue()
58 val tokens = mutableListOf<Token>() in scanTokens() constant
82 else -> tokens.add(token) in scanTokens()
85 return tokens in scanTokens()
88 override fun parseTokens(tokens: List<Token>) { in parseTokens()
89 val iter = tokens.listIterator() in parseTokens()
DAnnotationParser.kt37 val tokens = mutableListOf<Token>() in scanTokens() constant
46 tokens.add(iter.next()) in scanTokens()
47 return tokens in scanTokens()
50 override fun parseTokens(tokens: List<Token>) { in parseTokens()
51 val iter = tokens.listIterator() in parseTokens()
DDocParser.kt37 private fun formatDescription(tokens: List<Token>): String { in formatDescription()
38 return tokens in formatDescription()
56 override fun parseTokens(tokens: List<Token>) { in parseTokens()
57 val iter = tokens.listIterator() in parseTokens()
61 assert(tokens.last().identifier == TokenGrammar.DOC_END) in parseTokens()
DEntryParser.kt80 val tokens = mutableListOf<Token>() in scanTokens() constant
82 tokens += scanDocTokens(iter) in scanTokens()
84 tokens += scanDeclarationTokens(iter) in scanTokens()
85 return tokens in scanTokens()
88 override fun parseTokens(tokens: List<Token>) { in parseTokens()
89 val iter = tokens.listIterator() in parseTokens()
/system/tools/hidl/docs/src/
Dmain.kt40 val tokens = HidlLexer.tokenize(fp) in main() constant
41 val (parser, writer) = parseAndGetWriter(tokens) in main()
73 fun parseAndGetWriter(tokens: List<Token>): Pair<AbstractFileParser, AbstractParserFileWriter> { in parseAndGetWriter()
76 if (InterfaceFileParser.isInterface(tokens)) { in parseAndGetWriter()
77 parser = InterfaceFileParser(tokens) in parseAndGetWriter()
80 parser = TypesFileParser(tokens) in parseAndGetWriter()
/system/tools/hidl/docs/src/parser/files/
DAbstractFileParser.kt36 abstract class AbstractFileParser(tokens: List<Token>) {
38 private val packageInfo: PackageInfo by lazy { parsePackageInfo(tokens) } in <lambda>()
45 assert(tokens.isNotEmpty()) in <lambda>()
47 EntryCollectionParser(insertDocsForRequiredTypes(tokens)).entryParsers in <lambda>()
74 private fun insertDocsForRequiredTypes(tokens: List<Token>): List<Token> { in insertDocsForRequiredTypes()
76 val iter = tokens.listIterator() in insertDocsForRequiredTypes()
116 private fun indexInsertionPointforDocTokens(tokens: List<Token>): Int { in indexInsertionPointforDocTokens()
117 val iter = tokens.reversed().listIterator() in indexInsertionPointforDocTokens()
DInterfaceFileParser.kt27 class InterfaceFileParser(tokens: List<Token>) : AbstractFileParser(tokens) {
57 fun isInterface(tokens: List<Token>): Boolean { in isInterface()
58 val iter = tokens.listIterator() in isInterface()
DTypesFileParser.kt21 class TypesFileParser(tokens: List<Token>) : AbstractFileParser(tokens) {
Dpackage.kt30 fun parsePackageInfo(tokens: List<Token>): PackageInfo { in parsePackageInfo()
31 val iter: ListIterator<Token> = tokens.listIterator() in parsePackageInfo()
/system/core/init/test_service/
Dtest_service.cpp51 std::vector<std::string> tokens = android::base::Split(line, ":"); in main() local
52 if (tokens.size() >= 2) { in main()
53 std::string field = tokens[0]; in main()
54 std::string value = android::base::Trim(tokens[1]); in main()
/system/tools/hidl/docs/src/writer/
Dformatutils.kt32 fun tokenValues(tokens: List<Token>): String { in tokenValues()
33 return tokens.map { it.value }.joinToString("|") in tokenValues()
/system/extras/simpleperf/scripts/inferno/
Dinferno.py66 tokens = args.events.split()
67 if len(tokens) == 2:
68 num_events = tokens[0]
69 event_name = tokens[1]
/system/sepolicy/prebuilts/api/28.0/public/
Dfingerprintd.te18 # Need to add auth tokens to KeyStore
/system/sepolicy/public/
Dfingerprintd.te18 # Need to add auth tokens to KeyStore
/system/sepolicy/prebuilts/api/26.0/public/
Dfingerprintd.te18 # Need to add auth tokens to KeyStore

12