#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/FoldingSet.h"
+#include "llvm/ADT/FunctionExtras.h"
#include "llvm/ADT/None.h"
#include "llvm/ADT/Optional.h"
#include "llvm/ADT/PointerUnion.h"
#include <cassert>
#include <cstddef>
#include <cstdint>
-#include <memory>
#include <map>
+#include <memory>
#include <string>
#include <utility>
#include <vector>
friend class VAOptDefinitionContext;
friend class VariadicMacroScopeGuard;
+ llvm::unique_function<void(const clang::Token &)> OnToken;
std::shared_ptr<PreprocessorOptions> PPOpts;
DiagnosticsEngine *Diags;
LangOptions &LangOpts;
}
/// \}
+ /// Register a function that would be called on each token in the final
+ /// expanded token stream.
+ /// This also reports annotation tokens produced by the parser.
+ void setTokenWatcher(llvm::unique_function<void(const clang::Token &)> F) {
+ OnToken = std::move(F);
+ }
+
bool isMacroDefined(StringRef Id) {
return isMacroDefined(&Identifiers.get(Id));
}
void EnterMacro(Token &Tok, SourceLocation ILEnd, MacroInfo *Macro,
MacroArgs *Args);
+private:
/// Add a "macro" context to the top of the include stack,
/// which will cause the lexer to start returning the specified tokens.
///
/// of tokens has a permanent owner somewhere, so they do not need to be
/// copied. If it is true, it assumes the array of tokens is allocated with
/// \c new[] and the Preprocessor will delete[] it.
-private:
+ ///
+ /// If \p IsReinject the resulting tokens will have Token::IsReinjected flag
+ /// set, see the flag documentation for details.
void EnterTokenStream(const Token *Toks, unsigned NumToks,
- bool DisableMacroExpansion, bool OwnsTokens);
+ bool DisableMacroExpansion, bool OwnsTokens,
+ bool IsReinject);
public:
void EnterTokenStream(std::unique_ptr<Token[]> Toks, unsigned NumToks,
- bool DisableMacroExpansion) {
- EnterTokenStream(Toks.release(), NumToks, DisableMacroExpansion, true);
+ bool DisableMacroExpansion, bool IsReinject) {
+ EnterTokenStream(Toks.release(), NumToks, DisableMacroExpansion, true,
+ IsReinject);
}
- void EnterTokenStream(ArrayRef<Token> Toks, bool DisableMacroExpansion) {
- EnterTokenStream(Toks.data(), Toks.size(), DisableMacroExpansion, false);
+ void EnterTokenStream(ArrayRef<Token> Toks, bool DisableMacroExpansion,
+ bool IsReinject) {
+ EnterTokenStream(Toks.data(), Toks.size(), DisableMacroExpansion, false,
+ IsReinject);
}
/// Pop the current lexer/macro exp off the top of the lexer stack.
///
/// If BackTrack() is called afterwards, the token will remain at the
/// insertion point.
- void EnterToken(const Token &Tok) {
+ /// If \p IsReinject is true, resulting token will have Token::IsReinjected
+ /// flag set. See the flag documentation for details.
+ void EnterToken(const Token &Tok, bool IsReinject) {
if (LexLevel) {
// It's not correct in general to enter caching lex mode while in the
// middle of a nested lexing action.
auto TokCopy = llvm::make_unique<Token[]>(1);
TokCopy[0] = Tok;
- EnterTokenStream(std::move(TokCopy), 1, true);
+ EnterTokenStream(std::move(TokCopy), 1, true, IsReinject);
} else {
EnterCachingLexMode();
+ assert(IsReinject && "new tokens in the middle of cached stream");
CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
}
}
//===--------------------------------------------------------------------===//
// Caching stuff.
- void CachingLex(Token &Result, bool &IsNewToken);
+ void CachingLex(Token &Result);
bool InCachingLexMode() const {
// If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
public:
// Various flags set per token:
enum TokenFlags {
- StartOfLine = 0x01, // At start of line or only after whitespace
- // (considering the line after macro expansion).
- LeadingSpace = 0x02, // Whitespace exists before this token (considering
- // whitespace after macro expansion).
- DisableExpand = 0x04, // This identifier may never be macro expanded.
- NeedsCleaning = 0x08, // Contained an escaped newline or trigraph.
+ StartOfLine = 0x01, // At start of line or only after whitespace
+ // (considering the line after macro expansion).
+ LeadingSpace = 0x02, // Whitespace exists before this token (considering
+ // whitespace after macro expansion).
+ DisableExpand = 0x04, // This identifier may never be macro expanded.
+ NeedsCleaning = 0x08, // Contained an escaped newline or trigraph.
LeadingEmptyMacro = 0x10, // Empty macro exists before this token.
- HasUDSuffix = 0x20, // This string or character literal has a ud-suffix.
- HasUCN = 0x40, // This identifier contains a UCN.
- IgnoredComma = 0x80, // This comma is not a macro argument separator (MS).
+ HasUDSuffix = 0x20, // This string or character literal has a ud-suffix.
+ HasUCN = 0x40, // This identifier contains a UCN.
+ IgnoredComma = 0x80, // This comma is not a macro argument separator (MS).
StringifiedInMacro = 0x100, // This string or character literal is formed by
// macro stringizing or charizing operator.
CommaAfterElided = 0x200, // The comma following this token was elided (MS).
IsEditorPlaceholder = 0x400, // This identifier is a placeholder.
+ IsReinjected = 0x800, // A phase 4 token that was produced before and
+ // re-added, e.g. via EnterTokenStream. Annotation
+ // tokens are *not* reinjected.
};
tok::TokenKind getKind() const { return Kind; }
/// should not be subject to further macro expansion.
bool DisableMacroExpansion : 1;
+ /// When true, the produced tokens have Token::IsReinjected flag set.
+ /// See the flag documentation for details.
+ bool IsReinject : 1;
+
public:
/// Create a TokenLexer for the specified macro with the specified actual
/// arguments. Note that this ctor takes ownership of the ActualArgs pointer.
/// specified, this takes ownership of the tokens and delete[]'s them when
/// the token lexer is empty.
TokenLexer(const Token *TokArray, unsigned NumToks, bool DisableExpansion,
- bool ownsTokens, Preprocessor &pp)
+ bool ownsTokens, bool isReinject, Preprocessor &pp)
: PP(pp), OwnsTokens(false) {
- Init(TokArray, NumToks, DisableExpansion, ownsTokens);
+ Init(TokArray, NumToks, DisableExpansion, ownsTokens, isReinject);
}
TokenLexer(const TokenLexer &) = delete;
///
/// DisableExpansion is true when macro expansion of tokens lexed from this
/// stream should be disabled.
- void Init(const Token *TokArray, unsigned NumToks,
- bool DisableMacroExpansion, bool OwnsTokens);
+ void Init(const Token *TokArray, unsigned NumToks, bool DisableMacroExpansion,
+ bool OwnsTokens, bool IsReinject);
/// If the next token lexed will pop this macro off the
/// expansion stack, return 2. If the next unexpanded token is a '(', return
Depth = Depth - AddedLevels + D;
AddedLevels = D;
}
-
+
unsigned getDepth() const { return Depth; }
unsigned getOriginalDepth() const { return Depth - AddedLevels; }
};
/// token the current token.
void UnconsumeToken(Token &Consumed) {
Token Next = Tok;
- PP.EnterToken(Consumed);
+ PP.EnterToken(Consumed, /*IsReinject*/true);
PP.Lex(Tok);
- PP.EnterToken(Next);
+ PP.EnterToken(Next, /*IsReinject*/true);
}
SourceLocation ConsumeAnnotationToken() {
auto Toks = llvm::make_unique<Token[]>(1);
Toks[0] = PragmaTok;
PP.EnterTokenStream(std::move(Toks), /*NumToks=*/1,
- /*DisableMacroExpansion=*/false);
+ /*DisableMacroExpansion=*/false,
+ /*IsReinject=*/false);
PP.Lex(PragmaTok);
}
Token PrevToken;
// list. With this installed, we lex expanded tokens until we hit the EOF
// token at the end of the unexp list.
PP.EnterTokenStream(AT, NumToks, false /*disable expand*/,
- false /*owns tokens*/);
+ false /*owns tokens*/, false /*is reinject*/);
// Lex all of the macro-expanded tokens into Result.
do {
recomputeCurLexerKind();
}
-void Preprocessor::CachingLex(Token &Result, bool &IsNewToken) {
+void Preprocessor::CachingLex(Token &Result) {
if (!InCachingLexMode())
return;
if (CachedLexPos < CachedTokens.size()) {
Result = CachedTokens[CachedLexPos++];
- IsNewToken = false;
+ Result.setFlag(Token::IsReinjected);
return;
}
// Enter this token stream so that we re-lex the tokens. Make sure to
// enable macro expansion, in case the token after the # is an identifier
// that is expanded.
- EnterTokenStream(std::move(Toks), 2, false);
+ EnterTokenStream(std::move(Toks), 2, false, /*IsReinject*/false);
return;
}
Tok[0].setLocation(Range.getBegin());
Tok[0].setAnnotationEndLoc(Range.getEnd());
Tok[0].setAnnotationValue(AnnotationVal);
- EnterTokenStream(std::move(Tok), 1, true);
+ EnterTokenStream(std::move(Tok), 1, true, /*IsReinject*/ false);
}
/// Produce a diagnostic informing the user that a #include or similar
/// must be freed.
///
void Preprocessor::EnterTokenStream(const Token *Toks, unsigned NumToks,
- bool DisableMacroExpansion,
- bool OwnsTokens) {
+ bool DisableMacroExpansion, bool OwnsTokens,
+ bool IsReinject) {
if (CurLexerKind == CLK_CachingLexer) {
if (CachedLexPos < CachedTokens.size()) {
+ assert(IsReinject && "new tokens in the middle of cached stream");
// We're entering tokens into the middle of our cached token stream. We
// can't represent that, so just insert the tokens into the buffer.
CachedTokens.insert(CachedTokens.begin() + CachedLexPos,
// New tokens are at the end of the cached token sequnece; insert the
// token stream underneath the caching lexer.
ExitCachingLexMode();
- EnterTokenStream(Toks, NumToks, DisableMacroExpansion, OwnsTokens);
+ EnterTokenStream(Toks, NumToks, DisableMacroExpansion, OwnsTokens,
+ IsReinject);
EnterCachingLexMode();
return;
}
std::unique_ptr<TokenLexer> TokLexer;
if (NumCachedTokenLexers == 0) {
TokLexer = llvm::make_unique<TokenLexer>(
- Toks, NumToks, DisableMacroExpansion, OwnsTokens, *this);
+ Toks, NumToks, DisableMacroExpansion, OwnsTokens, IsReinject, *this);
} else {
TokLexer = std::move(TokenLexerCache[--NumCachedTokenLexers]);
- TokLexer->Init(Toks, NumToks, DisableMacroExpansion, OwnsTokens);
+ TokLexer->Init(Toks, NumToks, DisableMacroExpansion, OwnsTokens,
+ IsReinject);
}
// Save our current state.
// Do not lose the EOF/EOD.
auto Toks = llvm::make_unique<Token[]>(1);
Toks[0] = Tok;
- EnterTokenStream(std::move(Toks), 1, true);
+ EnterTokenStream(std::move(Toks), 1, true, /*IsReinject*/ false);
break;
} else if (Tok.is(tok::r_paren)) {
// If we found the ) token, the macro arg list is done.
std::copy(Tokens.begin() + 1, Tokens.end(), Toks.get());
Toks[Tokens.size() - 1] = Tok;
Self.EnterTokenStream(std::move(Toks), Tokens.size(),
- /*DisableMacroExpansion*/ true);
+ /*DisableMacroExpansion*/ true,
+ /*IsReinject*/ true);
// ... and return the _Pragma token unchanged.
Tok = *Tokens.begin();
std::copy(PragmaToks.begin(), PragmaToks.end(), TokArray);
// Push the tokens onto the stack.
- EnterTokenStream(TokArray, PragmaToks.size(), true, true);
+ EnterTokenStream(TokArray, PragmaToks.size(), true, true,
+ /*IsReinject*/ false);
// With everything set up, lex this as a #pragma directive.
HandlePragmaDirective(PragmaLoc, PIK___pragma);
Crasher.startToken();
Crasher.setKind(tok::annot_pragma_parser_crash);
Crasher.setAnnotationRange(SourceRange(Tok.getLocation()));
- PP.EnterToken(Crasher);
+ PP.EnterToken(Crasher, /*IsReinject*/false);
} else if (II->isStr("dump")) {
Token Identifier;
PP.LexUnexpandedToken(Identifier);
SourceRange(Tok.getLocation(), Identifier.getLocation()));
DumpAnnot.setAnnotationValue(DumpII);
PP.DiscardUntilEndOfDirective();
- PP.EnterToken(DumpAnnot);
+ PP.EnterToken(DumpAnnot, /*IsReinject*/false);
} else {
PP.Diag(Identifier, diag::warn_pragma_debug_missing_argument)
<< II->getName();
Toks[0].setKind(tok::annot_pragma_captured);
Toks[0].setLocation(NameLoc);
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
// Disable MSVC warning about runtime stack overflow.
CurTokenLexer->Lex(Tok);
break;
case CLK_CachingLexer:
- bool IsNewToken;
- CachingLex(Tok, IsNewToken);
+ CachingLex(Tok);
break;
case CLK_LexAfterModuleImport:
LexAfterModuleImport(Tok);
// We loop here until a lex function returns a token; this avoids recursion.
bool ReturnedToken;
- bool IsNewToken = true;
do {
switch (CurLexerKind) {
case CLK_Lexer:
ReturnedToken = CurTokenLexer->Lex(Result);
break;
case CLK_CachingLexer:
- CachingLex(Result, IsNewToken);
+ CachingLex(Result);
ReturnedToken = true;
break;
case CLK_LexAfterModuleImport:
// Update ImportSeqState to track our position within a C++20 import-seq
// if this token is being produced as a result of phase 4 of translation.
- if (getLangOpts().CPlusPlusModules && LexLevel == 1 && IsNewToken) {
+ if (getLangOpts().CPlusPlusModules && LexLevel == 1 &&
+ !Result.getFlag(Token::IsReinjected)) {
switch (Result.getKind()) {
case tok::l_paren: case tok::l_square: case tok::l_brace:
ImportSeqState.handleOpenBracket();
LastTokenWasAt = Result.is(tok::at);
--LexLevel;
+ if (OnToken && LexLevel == 0 && !Result.getFlag(Token::IsReinjected))
+ OnToken(Result);
}
/// Lex a header-name token (including one formed from header-name-tokens if
auto ToksCopy = llvm::make_unique<Token[]>(Toks.size());
std::copy(Toks.begin(), Toks.end(), ToksCopy.get());
EnterTokenStream(std::move(ToksCopy), Toks.size(),
- /*DisableMacroExpansion*/ true);
+ /*DisableMacroExpansion*/ true, /*IsReinject*/ false);
};
// Check for a header-name.
Tokens = &*Macro->tokens_begin();
OwnsTokens = false;
DisableMacroExpansion = false;
+ IsReinject = false;
NumTokens = Macro->tokens_end()-Macro->tokens_begin();
MacroExpansionStart = SourceLocation();
/// Create a TokenLexer for the specified token stream. This does not
/// take ownership of the specified token vector.
void TokenLexer::Init(const Token *TokArray, unsigned NumToks,
- bool disableMacroExpansion, bool ownsTokens) {
+ bool disableMacroExpansion, bool ownsTokens,
+ bool isReinject) {
+ assert(!isReinject || disableMacroExpansion);
// If the client is reusing a TokenLexer, make sure to free any memory
// associated with it.
destroy();
Tokens = TokArray;
OwnsTokens = ownsTokens;
DisableMacroExpansion = disableMacroExpansion;
+ IsReinject = isReinject;
NumTokens = NumToks;
CurTokenIdx = 0;
ExpandLocStart = ExpandLocEnd = SourceLocation();
// Get the next token to return.
Tok = Tokens[CurTokenIdx++];
+ if (IsReinject)
+ Tok.setFlag(Token::IsReinjected);
bool TokenIsFromPaste = false;
// Parse the default argument from its saved token stream.
Toks->push_back(Tok); // So that the current token doesn't get lost
- PP.EnterTokenStream(*Toks, true);
+ PP.EnterTokenStream(*Toks, true, /*IsReinject*/ true);
// Consume the previously-pushed token.
ConsumeAnyToken();
// Parse the default argument from its saved token stream.
Toks->push_back(Tok); // So that the current token doesn't get lost
- PP.EnterTokenStream(*Toks, true);
+ PP.EnterTokenStream(*Toks, true, /*IsReinject*/true);
// Consume the previously-pushed token.
ConsumeAnyToken();
// Append the current token at the end of the new token stream so that it
// doesn't get lost.
LM.Toks.push_back(Tok);
- PP.EnterTokenStream(LM.Toks, true);
+ PP.EnterTokenStream(LM.Toks, true, /*IsReinject*/true);
// Consume the previously pushed token.
ConsumeAnyToken(/*ConsumeCodeCompletionTok=*/true);
// Append the current token at the end of the new token stream so that it
// doesn't get lost.
MI.Toks.push_back(Tok);
- PP.EnterTokenStream(MI.Toks, true);
+ PP.EnterTokenStream(MI.Toks, true, /*IsReinject*/true);
// Consume the previously pushed token.
ConsumeAnyToken(/*ConsumeCodeCompletionTok=*/true);
auto Buffer = llvm::make_unique<Token[]>(Toks.size());
std::copy(Toks.begin() + 1, Toks.end(), Buffer.get());
Buffer[Toks.size() - 1] = Self.Tok;
- Self.PP.EnterTokenStream(std::move(Buffer), Toks.size(), true);
+ Self.PP.EnterTokenStream(std::move(Buffer), Toks.size(), true,
+ /*IsReinject*/ true);
Self.Tok = Toks.front();
}
// Append the current token at the end of the new token stream so that it
// doesn't get lost.
LA.Toks.push_back(Tok);
- PP.EnterTokenStream(LA.Toks, true);
+ PP.EnterTokenStream(LA.Toks, true, /*IsReinject=*/true);
// Consume the previously pushed token.
ConsumeAnyToken(/*ConsumeCodeCompletionTok=*/true);
if (Tok.isNot(tok::semi)) {
// A semicolon was missing after this declaration. Diagnose and recover.
ExpectAndConsume(tok::semi, diag::err_expected_after, "enum");
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject=*/true);
Tok.setKind(tok::semi);
}
} else {
// Push this token back into the preprocessor and change our current token
// to ';' so that the rest of the code recovers as though there were an
// ';' after the definition.
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject=*/true);
Tok.setKind(tok::semi);
}
}
if (PP.isBacktrackEnabled())
PP.RevertCachedTokens(1);
else
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/true);
Tok.setKind(tok::annot_decltype);
setExprAnnotation(Tok,
// A semicolon was missing after this declaration. Diagnose and recover.
ExpectAndConsume(tok::semi, diag::err_expected_after,
DeclSpec::getSpecifierName(TagType, PPol));
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/true);
Tok.setKind(tok::semi);
}
} else
// Push this token back into the preprocessor and change our current token
// to ';' so that the rest of the code recovers as though there were an
// ';' after the definition.
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject=*/true);
Tok.setKind(tok::semi);
}
}
if (SuggestFixIt) {
LBraceDiag << FixItHint::CreateInsertion(BraceLoc, " {");
// Try recovering from missing { after base-clause.
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/true);
Tok.setKind(tok::l_brace);
} else {
if (TagDecl)
diag::note_missing_end_of_definition_before) << D;
// Push '};' onto the token stream to recover.
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/ true);
Tok.startToken();
Tok.setLocation(PP.getLocForEndOfToken(PrevTokLocation));
Tok.setKind(tok::semi);
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/ true);
Tok.setKind(tok::r_brace);
}
// We can't do this before consuming the comma, because
// isNotExpressionStart() looks at the token stream.
if (OpToken.is(tok::comma) && isNotExpressionStart()) {
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/true);
Tok = OpToken;
return LHS;
}
if (isFoldOperator(NextTokPrec) && Tok.is(tok::ellipsis)) {
// FIXME: We can't check this via lookahead before we consume the token
// because that tickles a lexer bug.
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/true);
Tok = OpToken;
return LHS;
}
if (getLangOpts().ObjC && getLangOpts().CPlusPlus &&
Tok.isOneOf(tok::colon, tok::r_square) &&
OpToken.getIdentifierInfo() != nullptr) {
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/true);
Tok = OpToken;
return LHS;
}
DigraphToken.setLength(1);
// Push new tokens back to token stream.
- PP.EnterToken(ColonToken);
+ PP.EnterToken(ColonToken, /*IsReinject*/ true);
if (!AtDigraph)
- PP.EnterToken(DigraphToken);
+ PP.EnterToken(DigraphToken, /*IsReinject*/ true);
}
// Check for '<::' which should be '< ::' instead of '[:' when following
Token ColonColon;
PP.Lex(ColonColon);
ColonColon.setKind(tok::colon);
- PP.EnterToken(ColonColon);
+ PP.EnterToken(ColonColon, /*IsReinject*/ true);
break;
}
}
// mistyped '::' instead of ':'.
if (CorrectionFlagPtr && IsCorrectedToColon) {
ColonColon.setKind(tok::colon);
- PP.EnterToken(Tok);
- PP.EnterToken(ColonColon);
+ PP.EnterToken(Tok, /*IsReinject*/ true);
+ PP.EnterToken(ColonColon, /*IsReinject*/ true);
Tok = Identifier;
break;
}
Toks.push_back(Tok);
// Re-enter the stored parenthesized tokens into the token stream, so we may
// parse them now.
- PP.EnterTokenStream(Toks, true /*DisableMacroExpansion*/);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion*/ true,
+ /*IsReinject*/ true);
// Drop the current token and bring the first cached one. It's the same token
// as when we entered this function.
ConsumeAnyToken();
Tok.setLocation(Tok.getLocation().getLocWithOffset(-1));
Tok.setKind(tok::at);
Tok.setLength(1);
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/true);
HelperActionsForIvarDeclarations(interfaceDecl, atLoc,
T, AllIvarDecls, true);
return;
// Append the current token at the end of the new token stream so that it
// doesn't get lost.
LM.Toks.push_back(Tok);
- PP.EnterTokenStream(LM.Toks, true);
+ PP.EnterTokenStream(LM.Toks, true, /*IsReinject*/true);
// Consume the previously pushed token.
ConsumeAnyToken(/*ConsumeCodeCompletionTok=*/true);
Parser::DeclGroupPtrTy
Parser::ParseOMPDeclareSimdClauses(Parser::DeclGroupPtrTy Ptr,
CachedTokens &Toks, SourceLocation Loc) {
- PP.EnterToken(Tok);
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterToken(Tok, /*IsReinject*/ true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject*/ true);
// Consume the previously pushed token.
ConsumeAnyToken(/*ConsumeCodeCompletionTok=*/true);
FlushHasClause = true;
// Push copy of the current token back to stream to properly parse
// pseudo-clause OMPFlushClause.
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject*/ true);
}
LLVM_FALLTHROUGH;
case OMPD_taskyield:
Toks[0].setAnnotationEndLoc(Tok.getLocation());
Toks[0].setAnnotationValue(reinterpret_cast<void*>(
static_cast<uintptr_t>(OOS)));
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
};
// Grab the tokens out of the annotation and enter them into the stream.
auto TheTokens =
(std::pair<std::unique_ptr<Token[]>, size_t> *)Tok.getAnnotationValue();
- PP.EnterTokenStream(std::move(TheTokens->first), TheTokens->second, true);
+ PP.EnterTokenStream(std::move(TheTokens->first), TheTokens->second, true,
+ /*IsReinject=*/true);
SourceLocation PragmaLocation = ConsumeAnnotationToken();
assert(Tok.isAnyIdentifier());
StringRef PragmaName = Tok.getIdentifierInfo()->getName();
Hint.StateLoc = IdentifierLoc::create(Actions.Context, StateLoc, StateInfo);
} else {
// Enter constant expression including eof terminator into token stream.
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/false);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/false,
+ /*IsReinject=*/false);
ConsumeAnnotationToken();
ExprResult R = ParseConstantExpression();
return;
}
- PP.EnterTokenStream(Info->Tokens, /*DisableMacroExpansion=*/false);
+ PP.EnterTokenStream(Info->Tokens, /*DisableMacroExpansion=*/false,
+ /*IsReinject=*/false);
ConsumeAnnotationToken();
ParsedAttributes &Attrs = Info->Attributes;
Toks[0].setLocation(VisLoc);
Toks[0].setAnnotationEndLoc(EndLoc);
Toks[0].setAnnotationValue(
- const_cast<void*>(static_cast<const void*>(VisType)));
- PP.EnterTokenStream(std::move(Toks), 1, /*DisableMacroExpansion=*/true);
+ const_cast<void *>(static_cast<const void *>(VisType)));
+ PP.EnterTokenStream(std::move(Toks), 1, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
// #pragma pack(...) comes in the following delicious flavors:
Toks[0].setLocation(PackLoc);
Toks[0].setAnnotationEndLoc(RParenLoc);
Toks[0].setAnnotationValue(static_cast<void*>(Info));
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
// #pragma ms_struct on
Toks[0].setAnnotationEndLoc(EndLoc);
Toks[0].setAnnotationValue(reinterpret_cast<void*>(
static_cast<uintptr_t>(Kind)));
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
// #pragma clang section bss="abc" data="" rodata="def" text=""
Toks[0].setAnnotationEndLoc(EndLoc);
Toks[0].setAnnotationValue(reinterpret_cast<void*>(
static_cast<uintptr_t>(Kind)));
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
void PragmaAlignHandler::HandlePragma(Preprocessor &PP,
pragmaUnusedTok.setLocation(UnusedLoc);
idTok = Identifiers[i];
}
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
// #pragma weak identifier
pragmaUnusedTok.setAnnotationEndLoc(AliasName.getLocation());
Toks[1] = WeakName;
Toks[2] = AliasName;
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
} else {
MutableArrayRef<Token> Toks(
PP.getPreprocessorAllocator().Allocate<Token>(2), 2);
pragmaUnusedTok.setLocation(WeakLoc);
pragmaUnusedTok.setAnnotationEndLoc(WeakLoc);
Toks[1] = WeakName;
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
}
pragmaRedefTok.setAnnotationEndLoc(AliasName.getLocation());
Toks[1] = RedefName;
Toks[2] = AliasName;
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
Toks[0].setAnnotationEndLoc(Tok.getLocation());
Toks[0].setAnnotationValue(reinterpret_cast<void*>(
static_cast<uintptr_t>(OOS)));
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
}
void
Toks[0].setLocation(NameLoc);
Toks[0].setAnnotationValue(static_cast<void*>(Info));
Toks[0].setAnnotationEndLoc(StateLoc);
- PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true);
+ PP.EnterTokenStream(Toks, /*DisableMacroExpansion=*/true,
+ /*IsReinject=*/false);
if (PP.getPPCallbacks())
PP.getPPCallbacks()->PragmaOpenCLExtension(NameLoc, Ext,
auto Toks = llvm::make_unique<Token[]>(Pragma.size());
std::copy(Pragma.begin(), Pragma.end(), Toks.get());
PP.EnterTokenStream(std::move(Toks), Pragma.size(),
- /*DisableMacroExpansion=*/false);
+ /*DisableMacroExpansion=*/false, /*IsReinject=*/false);
}
/// Handle '#pragma pointers_to_members'
AnnotTok.setAnnotationEndLoc(EndLoc);
AnnotTok.setAnnotationValue(
reinterpret_cast<void *>(static_cast<uintptr_t>(RepresentationMethod)));
- PP.EnterToken(AnnotTok);
+ PP.EnterToken(AnnotTok, /*IsReinject=*/true);
}
/// Handle '#pragma vtordisp'
AnnotTok.setAnnotationEndLoc(EndLoc);
AnnotTok.setAnnotationValue(reinterpret_cast<void *>(
static_cast<uintptr_t>((Action << 16) | (Value & 0xFFFF))));
- PP.EnterToken(AnnotTok);
+ PP.EnterToken(AnnotTok, /*IsReinject=*/false);
}
/// Handle all MS pragmas. Simply forwards the tokens after inserting
std::pair<std::unique_ptr<Token[]>, size_t>(std::move(TokenArray),
TokenVector.size());
AnnotTok.setAnnotationValue(Value);
- PP.EnterToken(AnnotTok);
+ PP.EnterToken(AnnotTok, /*IsReinject*/ false);
}
/// Handle the Microsoft \#pragma detect_mismatch extension.
std::copy(TokenList.begin(), TokenList.end(), TokenArray.get());
PP.EnterTokenStream(std::move(TokenArray), TokenList.size(),
- /*DisableMacroExpansion=*/false);
+ /*DisableMacroExpansion=*/false, /*IsReinject=*/false);
}
void Parser::HandlePragmaFP() {
std::copy(TokenList.begin(), TokenList.end(), TokenArray.get());
PP.EnterTokenStream(std::move(TokenArray), TokenList.size(),
- /*DisableMacroExpansion=*/false);
+ /*DisableMacroExpansion=*/false, /*IsReinject=*/false);
}
/// Handle the loop unroll optimization pragmas.
TokenArray[0].setAnnotationEndLoc(PragmaName.getLocation());
TokenArray[0].setAnnotationValue(static_cast<void *>(Info));
PP.EnterTokenStream(std::move(TokenArray), 1,
- /*DisableMacroExpansion=*/false);
+ /*DisableMacroExpansion=*/false, /*IsReinject=*/false);
}
/// Handle the Microsoft \#pragma intrinsic extension.
TokenArray[0].setAnnotationEndLoc(FirstToken.getLocation());
TokenArray[0].setAnnotationValue(static_cast<void *>(Info));
PP.EnterTokenStream(std::move(TokenArray), 1,
- /*DisableMacroExpansion=*/false);
+ /*DisableMacroExpansion=*/false, /*IsReinject=*/false);
}
// Also copy the current token over.
LineToks.push_back(Tok);
- PP.EnterTokenStream(LineToks, /*DisableMacroExpansions*/ true);
+ PP.EnterTokenStream(LineToks, /*DisableMacroExpansions*/ true,
+ /*IsReinject*/ true);
// Clear the current token and advance to the first token in LineToks.
ConsumeAnyToken();
PrevTokLocation = RAngleLoc;
} else {
PrevTokLocation = TokBeforeGreaterLoc;
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject=*/true);
Tok = Greater;
}
// Append the current token at the end of the new token stream so that it
// doesn't get lost.
LPT.Toks.push_back(Tok);
- PP.EnterTokenStream(LPT.Toks, true);
+ PP.EnterTokenStream(LPT.Toks, true, /*IsReinject*/true);
// Consume the previously pushed token.
ConsumeAnyToken(/*ConsumeCodeCompletionTok=*/true);
if (PP.isBacktrackEnabled())
PP.RevertCachedTokens(1);
else
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject=*/true);
Tok.setKind(tok::annot_cxxscope);
Tok.setAnnotationValue(Actions.SaveNestedNameSpecifierAnnotation(SS));
Tok.setAnnotationRange(SS.getRange());
Token TypedefToken;
PP.Lex(TypedefToken);
bool Result = TryAnnotateTypeOrScopeToken();
- PP.EnterToken(Tok);
+ PP.EnterToken(Tok, /*IsReinject=*/true);
Tok = TypedefToken;
if (!Result)
Diag(Tok.getLocation(), diag::warn_expected_qualified_after_typename);
// Enter the tokens we just lexed. This will cause them to be macro expanded
// but won't enter sub-files (because we removed #'s).
- TmpPP.EnterTokenStream(TokenStream, false);
+ TmpPP.EnterTokenStream(TokenStream, false, /*IsReinject=*/false);
TokenConcatenation ConcatInfo(TmpPP);