1 /* 2 * Copyright (C) 2017 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package parser.elements.declarations 18 19 import lexer.Token 20 import lexer.TokenCategory 21 import lexer.TokenGrammar 22 import parser.elements.AbstractParser 23 import parser.peekPreviousToken 24 import parser.peekToken 25 import java.text.ParseException 26 27 //can be a method, struct, enum, typedef 28 abstract class AbstractDeclarationParser(iter: ListIterator<Token>) : AbstractParser(iter) { 29 30 abstract var name: String //user-specified name declarationParser 31 32 //should be segmented in EntryParser, remaining tokens are the declarationParser, may be nested scanTokensnull33 override fun scanTokens(iter: ListIterator<Token>): List<Token> { 34 val token = peekPreviousToken(iter) ?: throw ParseException("No token before declaration", this.indexStart) 35 36 if (token.category != TokenCategory.Annotation && token.identifier != TokenGrammar.DOC_END) 37 throw ParseException("Invalid declaration start", this.indexStart) 38 39 return scanDeclarationTokens(iter) 40 } 41 42 /** 43 * Takes a delimited separated list and splits entries into list of list<token>. 44 * Ignore nested lists using same open/close delimiter. 45 * For method param lists, enum members, struct members, etc. 46 */ scanDelimitedListnull47 protected fun scanDelimitedList(iter: ListIterator<Token>, 48 delimiter: TokenGrammar = TokenGrammar.COMMA, 49 openDelimiter: TokenGrammar = TokenGrammar.PAREN_OPEN, 50 closeDelimiter: TokenGrammar = TokenGrammar.PAREN_CLOSE): List<List<Token>> { 51 val allFields = mutableListOf<List<Token>>() //top-level list 52 //queue up list open 53 if (iter.hasPrevious() && peekPreviousToken(iter)!!.identifier == openDelimiter) { 54 iter.previous() 55 } 56 var token = iter.next() 57 if (token.identifier != openDelimiter) 58 throw ParseException("Expected list start '${openDelimiter}', but got '${token.identifier}'", this.indexStart) 59 60 // collect tokens between open/close delimiters, fields separated by delimiter. 61 // ignore if nested, ignore in doc comment 62 while (iter.hasNext()) { 63 token = peekToken(iter)!! //iter.next() 64 65 if (token.identifier == closeDelimiter) { 66 iter.next() 67 break 68 } else if (token.identifier == delimiter) { 69 iter.next() 70 continue //skip 71 } else { 72 //start field entry 73 val fieldTokens = mutableListOf<Token>() 74 var inDoc = false 75 var nestLevel = 0 76 77 while (iter.hasNext()) { 78 token = iter.next() 79 80 if (token.identifier == TokenGrammar.DOC_START) { 81 inDoc = true 82 } else if (token.identifier == TokenGrammar.DOC_END) { 83 inDoc = false 84 } 85 86 //check for end of field 87 if ((token.identifier == delimiter || token.identifier == closeDelimiter) && nestLevel == 0 && !inDoc) { 88 break 89 } else { 90 fieldTokens.add(token) 91 } 92 93 if (token.identifier == openDelimiter) { 94 nestLevel++ 95 } else if (token.identifier == closeDelimiter && nestLevel > 0) { 96 nestLevel-- 97 } 98 } 99 100 //add entry 101 allFields.add(fieldTokens) 102 //check for end of list 103 if (token.identifier == closeDelimiter && nestLevel == 0) { 104 break 105 } 106 } 107 } 108 109 if (!iter.hasPrevious() || peekPreviousToken(iter)!!.identifier != closeDelimiter) { 110 throw ParseException("Didn't find closing '${closeDelimiter.value}' for list, found '${peekPreviousToken(iter)!!.value}'", this.indexStart) 111 } 112 return allFields 113 } 114 }