1 //===--- PPCaching.cpp - Handle caching lexed tokens ----------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements pieces of the Preprocessor interface that manage the 11 // caching of lexed tokens. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "clang/Lex/Preprocessor.h" 16 using namespace clang; 17 18 /// EnableBacktrackAtThisPos - From the point that this method is called, and 19 /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor 20 /// keeps track of the lexed tokens so that a subsequent Backtrack() call will 21 /// make the Preprocessor re-lex the same tokens. 22 /// 23 /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can 24 /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will 25 /// be combined with the EnableBacktrackAtThisPos calls in reverse order. EnableBacktrackAtThisPos()26void Preprocessor::EnableBacktrackAtThisPos() { 27 BacktrackPositions.push_back(CachedLexPos); 28 EnterCachingLexMode(); 29 } 30 31 /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call. CommitBacktrackedTokens()32void Preprocessor::CommitBacktrackedTokens() { 33 assert(!BacktrackPositions.empty() 34 && "EnableBacktrackAtThisPos was not called!"); 35 BacktrackPositions.pop_back(); 36 } 37 38 /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since 39 /// EnableBacktrackAtThisPos() was previously called. Backtrack()40void Preprocessor::Backtrack() { 41 assert(!BacktrackPositions.empty() 42 && "EnableBacktrackAtThisPos was not called!"); 43 CachedLexPos = BacktrackPositions.back(); 44 BacktrackPositions.pop_back(); 45 } 46 CachingLex(Token & Result)47void Preprocessor::CachingLex(Token &Result) { 48 if (!InCachingLexMode()) 49 return; 50 51 if (CachedLexPos < CachedTokens.size()) { 52 Result = CachedTokens[CachedLexPos++]; 53 return; 54 } 55 56 ExitCachingLexMode(); 57 Lex(Result); 58 59 if (!isBacktrackEnabled()) { 60 // All cached tokens were consumed. 61 CachedTokens.clear(); 62 CachedLexPos = 0; 63 return; 64 } 65 66 // Cache the lexed token. 67 EnterCachingLexMode(); 68 CachedTokens.push_back(Result); 69 ++CachedLexPos; 70 } 71 EnterCachingLexMode()72void Preprocessor::EnterCachingLexMode() { 73 if (InCachingLexMode()) 74 return; 75 76 PushIncludeMacroStack(); 77 } 78 79 PeekAhead(unsigned N)80const Token &Preprocessor::PeekAhead(unsigned N) { 81 assert(CachedLexPos + N > CachedTokens.size() && "Confused caching."); 82 ExitCachingLexMode(); 83 for (unsigned C = CachedLexPos + N - CachedTokens.size(); C > 0; --C) { 84 CachedTokens.push_back(Token()); 85 Lex(CachedTokens.back()); 86 } 87 EnterCachingLexMode(); 88 return CachedTokens.back(); 89 } 90 AnnotatePreviousCachedTokens(const Token & Tok)91void Preprocessor::AnnotatePreviousCachedTokens(const Token &Tok) { 92 assert(Tok.isAnnotation() && "Expected annotation token"); 93 assert(CachedLexPos != 0 && "Expected to have some cached tokens"); 94 assert(CachedTokens[CachedLexPos-1].getLastLoc() == Tok.getAnnotationEndLoc() 95 && "The annotation should be until the most recent cached token"); 96 97 // Start from the end of the cached tokens list and look for the token 98 // that is the beginning of the annotation token. 99 for (CachedTokensTy::size_type i = CachedLexPos; i != 0; --i) { 100 CachedTokensTy::iterator AnnotBegin = CachedTokens.begin() + i-1; 101 if (AnnotBegin->getLocation() == Tok.getLocation()) { 102 assert((BacktrackPositions.empty() || BacktrackPositions.back() < i) && 103 "The backtrack pos points inside the annotated tokens!"); 104 // Replace the cached tokens with the single annotation token. 105 if (i < CachedLexPos) 106 CachedTokens.erase(AnnotBegin + 1, CachedTokens.begin() + CachedLexPos); 107 *AnnotBegin = Tok; 108 CachedLexPos = i; 109 return; 110 } 111 } 112 } 113