• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package parser.elements
18 
19 import lexer.ILexer
20 import lexer.Token
21 import lexer.TokenCategory
22 import lexer.TokenGrammar
23 import parser.peekToken
24 
25 class DocParser(iter: ListIterator<Token>, var shouldResetIterator: Boolean = false) : AbstractParser(iter) {
26 
<lambda>null27     val description: String by lazy { formatDescription(this.descTokens) }
28     var docAnnotationParsers = mutableListOf<DocAnnotationParser>()
29 
30     private var descTokens = mutableListOf<Token>()
31 
32     init {
33         parseTokens(scanTokens(iter))
34         if (shouldResetIterator) resetIterator(iter)
35     }
36 
formatDescriptionnull37     private fun formatDescription(tokens: List<Token>): String {
38         return tokens
39                 .filterNot { it.identifier == TokenGrammar.DOC_START }
40                 .filterNot { it.identifier == TokenGrammar.DOC_END }
41                 .map {
42                     when (it.identifier) {
43                         TokenGrammar.EMPTY_LINE -> "\n\n"
44                         else -> it.value
45                     }
46                 }
47                 .joinToString(" ")
48                 .let { ILexer.unpadDelimiters(it) }
49     }
50 
scanTokensnull51     override fun scanTokens(iter: ListIterator<Token>): List<Token> {
52         //keep doc_start and doc_end tokens /** ... */
53         return scanDocTokens(iter)
54     }
55 
parseTokensnull56     override fun parseTokens(tokens: List<Token>) {
57         val iter = tokens.listIterator()
58         var token = iter.next() //doc_start
59 
60         assert(token.identifier == TokenGrammar.DOC_START)
61         assert(tokens.last().identifier == TokenGrammar.DOC_END)
62 
63         loop@ while(iter.hasNext()) {
64             token = iter.next()
65 
66             when {
67                 token.identifier == TokenGrammar.AT && peekToken(iter)?.category == TokenCategory.DocAnnotation -> {
68                     docAnnotationParsers.add(DocAnnotationParser(iter))  //increments iterator
69                 }
70                 token.identifier == TokenGrammar.DOC_END -> break@loop
71                 else -> this.descTokens.add(token)
72             }
73         }
74     }
75 }