• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
<lambda>null2  * Copyright (C) 2017 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package lexer
18 
19 import java.util.*
20 
21 object DocLexer : ILexer {
22 
23     /**
24      * @param str The string should already be padded from the file lexer.
25      */
26     override fun tokenize(str: String): List<Token> {
27         val tokens = mutableListOf<Token>()
28         var token: String
29 
30         //remove docblock comment indention - must go before others
31         val formattedStr = str.lines()
32                 //remove docblock comment indent prefix (must go before others) ...
33                 .map { it.replace(Regex("^\\s*\\*[^/]"), "") } //indented prefix except doc_end
34                 .map { it.replace(Regex("^\\s*\\*$"), "") }    //prefix empty lines remain
35                 //replace empty lines with something the scanner can pick out ...
36                 .map { it.replace(Regex("$\\s*^"), TokenGrammar.EMPTY_LINE.value) }
37                 .joinToString("\n")
38 
39         Scanner(formattedStr).use { scanner ->
40             while (scanner.hasNext()) {
41                 token = scanner.next()
42 
43                 when (token) {
44                     TokenGrammar.EMPTY_LINE.value -> tokens.add(TokenGrammar.newToken("", TokenGrammar.EMPTY_LINE))
45 
46                     //if part of annotation, add following tag as well
47                     TokenGrammar.AT.value -> {
48                         tokens.add(TokenGrammar.newToken(token)) //'@'
49                         //determine if part of annotation
50                         for (annotation in TokenGrammar.docAnnotations()) {
51                             if (scanner.hasNext(annotation.value)) {
52                                 tokens.add(TokenGrammar.newToken(scanner.next()))
53                             }
54                         }
55                     }
56 
57                     //default to DocWord
58                     else -> {
59                         val id = TokenGrammar.getFromValueOrDefault(token)
60                         val category = if (id == TokenGrammar.WORD) TokenCategory.DocWord else id.category
61                         tokens.add(TokenGrammar.newToken(token, category))
62                         //TODO: Perhaps make all docblock words a DocWord
63                         //tokens.add(TokenGrammar.newToken(token, TokenCategory.DocWord))
64                     }
65                 }
66             }
67         }
68         return tokens.toList()
69     }
70 }