1 /* <lambda>null2 * Copyright (C) 2017 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package lexer 18 19 import java.text.ParseException 20 import java.util.* 21 22 object HidlLexer : ILexer { 23 24 /** 25 * Given the text of a HIDL file, return a list ot tokens. 26 * Scanner moves forward only, but can check queued tokens. 27 */ 28 override fun tokenize(str: String): List<Token> { 29 val tokens = mutableListOf<Token>() 30 //match line to '*/', check for anything after on same line 31 val matchToDocEnd = Regex("""(.*)\s+${Regex.escape(TokenGrammar.DOC_END.value)}(.*)$""") 32 33 //pad delimiter tokens so the scanner picks them up. 34 val paddedStr = ILexer.padDelimiters(str) 35 36 Scanner(paddedStr).use { scanner -> 37 while (scanner.hasNext()) { 38 val token = scanner.next() 39 40 when (token) { 41 //strip line comments 42 TokenGrammar.COMMENT_LINE.value -> scanner.nextLine() 43 44 //strip block comments -- jump to comment close 45 TokenGrammar.COMMENT_START.value -> { 46 if (scanner.findWithinHorizon(Regex.escape(TokenGrammar.DOC_END.value), 0) == null) { 47 throw ParseException("Unable to find closing comment marker", tokens.lastIndex) 48 } 49 } 50 51 //slurp text between /** and */ into a string, 52 //tokenize string using the doc comment lexer, 53 //append those tokens with the rest of the file tokens. 54 TokenGrammar.DOC_START.value -> { 55 tokens.add(TokenGrammar.newToken(token)) //doc_start 56 //slurp everything until doc_end into a string. but want to keep newline formatting 57 val sb = StringBuilder() 58 while (scanner.hasNextLine()) { 59 val line = scanner.nextLine() 60 61 val matches = matchToDocEnd.find(line)?.groups 62 if (matches != null) { 63 if (!matches[2]!!.value.isNullOrBlank()) { 64 throw ParseException("No text after '*/' on same line: ${line}", 0) 65 } 66 //found doc_end 67 sb.append(matches[1]!!.value) 68 break 69 } else { 70 sb.appendln(line) 71 } 72 } 73 //tokenize comment string and append all 74 tokens += DocLexer.tokenize(sb.toString()) 75 tokens.add(TokenGrammar.newToken(TokenGrammar.DOC_END.value)) //doc_end 76 } 77 78 TokenGrammar.AT.value -> { 79 tokens.add(TokenGrammar.newToken(token)) //'@' 80 //determine if part of annotation tag 81 for (annotation in TokenGrammar.annotations()) { 82 if (scanner.hasNext(annotation.value)) { 83 scanner.next() //annotation tag 84 val annotationArgs = StringBuilder() 85 //capture any args: (...) 86 if (scanner.hasNext(Regex.escape(TokenGrammar.PAREN_OPEN.value))) { 87 while (!scanner.hasNext(Regex.escape(TokenGrammar.PAREN_CLOSE.value))) { 88 annotationArgs.append(scanner.next()).append(" ") 89 } 90 if (!scanner.hasNext()) { 91 throw ParseException("Unable to find closing annotation paren", tokens.lastIndex) 92 } 93 annotationArgs.append(scanner.next()) //')' 94 } 95 //all annotation args are embedded in the token's value 96 tokens.add(TokenGrammar.newToken(identifier = annotation, value = annotationArgs.toString())) 97 } 98 } 99 } 100 101 else -> tokens.add(TokenGrammar.newToken(token)) 102 } 103 } 104 } 105 return tokens.toList() 106 } 107 }