#define LLVM_CLANG_PTHLEXER_H
#include "clang/Lex/PreprocessorLexer.h"
+#include <vector>
namespace clang {
class PTHLexer : public PreprocessorLexer {
- /// Tokens - This is the pointer to an array of tokens that the macro is
- /// defined to, with arguments expanded for function-like macros. If this is
- /// a token stream, these are the tokens we are returning.
- const Token *Tokens;
-
- /// LastTokenIdx - The index of the last token in Tokens. This token
- /// will be an eof token.
- unsigned LastTokenIdx;
-
+ /// Tokens - Vector of raw tokens.
+ std::vector<Token> Tokens;
+
/// CurTokenIdx - This is the index of the next token that Lex will return.
unsigned CurTokenIdx;
PTHLexer(const PTHLexer&); // DO NOT IMPLEMENT
void operator=(const PTHLexer&); // DO NOT IMPLEMENT
-public:
+public:
/// Create a PTHLexer for the specified token stream.
- PTHLexer(Preprocessor& pp, SourceLocation fileloc,
- const Token *TokArray, unsigned NumToks);
+ PTHLexer(Preprocessor& pp, SourceLocation fileloc);
~PTHLexer() {}
/// Lex - Return the next token.
void setEOF(Token &Tok);
+ std::vector<Token>& getTokens() { return Tokens; }
+
/// DiscardToEndOfLine - Read the rest of the current preprocessor line as an
/// uninterpreted string. This switches the lexer out of directive mode.
void DiscardToEndOfLine();
private:
/// AtLastToken - Returns true if the PTHLexer is at the last token.
- bool AtLastToken() const { return CurTokenIdx == LastTokenIdx; }
+ bool AtLastToken() const { return CurTokenIdx+1 == Tokens.size(); }
/// GetToken - Returns the next token. This method does not advance the
/// PTHLexer to the next token.
Lexer *TheLexer = new Lexer(SourceLocation::getFileLoc(FileID, 0), *this);
EnterSourceFileWithLexer(TheLexer, CurDir);
#else
+ if (CurPPLexer || CurTokenLexer)
+ PushIncludeMacroStack();
+
+ CurDirLookup = CurDir;
+ SourceLocation Loc = SourceLocation::getFileLoc(FileID, 0);
+ CurPTHLexer.reset(new PTHLexer(*this, Loc));
+ CurPPLexer = CurPTHLexer.get();
+
+ // Generate the tokens.
+
const llvm::MemoryBuffer* B = getSourceManager().getBuffer(FileID);
// Create a raw lexer.
L.SetCommentRetentionState(false);
// Lex the file, populating our data structures.
- std::vector<Token>* Tokens = new std::vector<Token>();
+ std::vector<Token>& Tokens = CurPTHLexer->getTokens();
Token Tok;
do {
else if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) {
// Special processing for #include. Store the '#' token and lex
// the next token.
- Tokens->push_back(Tok);
+ Tokens.push_back(Tok);
L.LexFromRawLexer(Tok);
// Did we see 'include'/'import'/'include_next'?
K == tok::pp_include_next) {
// Save the 'include' token.
- Tokens->push_back(Tok);
+ Tokens.push_back(Tok);
// Lex the next token as an include string.
L.ParsingPreprocessorDirective = true;
}
}
}
- while (Tokens->push_back(Tok), Tok.isNot(tok::eof));
-
- if (CurPPLexer || CurTokenLexer)
- PushIncludeMacroStack();
-
- CurDirLookup = CurDir;
- SourceLocation Loc = SourceLocation::getFileLoc(FileID, 0);
- CurPTHLexer.reset(new PTHLexer(*this, Loc, &(*Tokens)[0], Tokens->size()));
- CurPPLexer = CurPTHLexer.get();
+ while (Tokens.push_back(Tok), Tok.isNot(tok::eof));
// Notify the client, if desired, that we are in a new source file.
if (Callbacks) {
#include "clang/Basic/TokenKinds.h"
using namespace clang;
-PTHLexer::PTHLexer(Preprocessor& pp, SourceLocation fileloc,
- const Token *TokArray, unsigned NumTokens)
- : PreprocessorLexer(&pp, fileloc),
- Tokens(TokArray),
- LastTokenIdx(NumTokens - 1),
- CurTokenIdx(0) {
-
- assert(NumTokens >= 1);
- assert(Tokens[LastTokenIdx].is(tok::eof));
-}
+PTHLexer::PTHLexer(Preprocessor& pp, SourceLocation fileloc)
+ : PreprocessorLexer(&pp, fileloc), CurTokenIdx(0) {}
Token PTHLexer::GetToken() {
Token Tok = Tokens[CurTokenIdx];
}
void PTHLexer::setEOF(Token& Tok) {
- Tok = Tokens[LastTokenIdx];
+ assert(!Tokens.empty());
+ Tok = Tokens[Tokens.size()-1];
}
void PTHLexer::DiscardToEndOfLine() {