• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package parser.files
18 
19 import lexer.Token
20 import lexer.TokenCategory
21 import lexer.TokenGrammar
22 import parser.elements.EntryCollectionParser
23 import parser.elements.EntryParser
24 import parser.elements.declarations.CompoundDeclarationParser
25 import parser.elements.declarations.EnumDeclarationParser
26 import parser.elements.declarations.TypedefDeclarationParser
27 import parser.peekPreviousToken
28 import parser.peekToken
29 import java.text.ParseException
30 
31 /**
32  * Parses package info and all entries (determined by doc start/end tokens).
33  * Adds empty doc tokens for required types.
34  * This class shouldn't be instantiated on it's own.
35  */
36 abstract class AbstractFileParser(tokens: List<Token>) {
37 
<lambda>null38     private val packageInfo: PackageInfo by lazy { parsePackageInfo(tokens) }
39 
40     abstract val name: String
41     val packageName: String get() = packageInfo.name
42     val packageVersion: Float get() = packageInfo.version
43 
<lambda>null44     protected val entries: List<EntryParser> by lazy {
45         assert(tokens.isNotEmpty())
46         //add empty docblocks
47         EntryCollectionParser(insertDocsForRequiredTypes(tokens)).entryParsers
48     }
49 
<lambda>null50     val enums: List<EntryParser> by lazy { getEntriesByDeclarationParser<EnumDeclarationParser>() }
<lambda>null51     val typedefs: List<EntryParser> by lazy { getEntriesByDeclarationParser<TypedefDeclarationParser>() }
<lambda>null52     val structs: List<EntryParser> by lazy { getEntriesByCompoundDeclarationParser(TokenGrammar.STRUCT) }
<lambda>null53     val unions: List<EntryParser> by lazy { getEntriesByCompoundDeclarationParser(TokenGrammar.UNION) }
54 
getEntriesByDeclarationParsernull55     protected inline fun <reified T> getEntriesByDeclarationParser(): List<EntryParser> {
56         return entries.filter { it.declarationParser is T }
57     }
58 
getEntriesByCompoundDeclarationParsernull59     private fun getEntriesByCompoundDeclarationParser(identifier: TokenGrammar): List<EntryParser> {
60         return getEntriesByDeclarationParser<CompoundDeclarationParser>()
61                 .filter { (it.declarationParser as CompoundDeclarationParser).type == identifier }
62     }
63 
64     private val REQUIRED_DOC_TYPES = listOf(
65             TokenGrammar.INTERFACE,
66             TokenGrammar.ENUM,
67             TokenGrammar.STRUCT,
68             TokenGrammar.UNION,
69             TokenGrammar.TYPEDEF)
70 
71     /**
72      * Insert doc block before the undocumented types we want to show up.
73      */
insertDocsForRequiredTypesnull74     private fun insertDocsForRequiredTypes(tokens: List<Token>): List<Token> {
75         val tokensCopy = mutableListOf<Token>()
76         val iter = tokens.listIterator()
77         var token: Token
78         var inDoc = false
79 
80         while (iter.hasNext()) {
81             token = iter.next()
82             tokensCopy.add(token)
83 
84             if (token.identifier == TokenGrammar.DOC_START) {
85                 inDoc = true
86                 continue
87             } else if (token.identifier == TokenGrammar.DOC_END) {
88                 inDoc = false
89                 continue
90 
91             } else if (!inDoc && token.identifier in REQUIRED_DOC_TYPES) {
92                 //make sure it's not a reference to a Generic: <name>
93                 if (peekToken(iter)?.identifier == TokenGrammar.CHEVRON_CLOSE) {
94                     continue
95                 }
96 
97                 val idx = indexInsertionPointforDocTokens(tokensCopy)
98                 if (idx != -1) {
99                     val removedTokens = mutableListOf<Token>()
100                     repeat(idx) {
101                         removedTokens.add(tokensCopy.removeAt(tokensCopy.size-1))
102                     }
103                     tokensCopy.add(TokenGrammar.newToken(TokenGrammar.DOC_START.value))
104                     tokensCopy.add(TokenGrammar.newToken(TokenGrammar.DOC_END.value))
105                     removedTokens.reversed().forEach { tokensCopy.add(it) }
106                 }
107             }
108         }
109         return tokensCopy.toList()
110     }
111 
112     /**
113      * @return -1 if documented, otherwise the index count backwards for where
114      *          to begin insertion of doc tokens.
115      */
indexInsertionPointforDocTokensnull116     private fun indexInsertionPointforDocTokens(tokens: List<Token>): Int {
117         val iter = tokens.reversed().listIterator()
118         var token: Token
119         var idx = 0
120 
121         iter.next() //skip keyword token
122         while (iter.hasNext()) {
123             token = iter.next()
124             if (token.identifier == TokenGrammar.AT || token.category == TokenCategory.Annotation) {
125                 idx++
126                 continue //skip annotations
127             } else {
128                 return if (token.identifier == TokenGrammar.DOC_END) -1 else idx+1
129             }
130         }
131         throw ParseException("Empty token list", 0)
132     }
133 }