1 //===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines the Preprocessor interface.
12 //===----------------------------------------------------------------------===//
14 #ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
15 #define LLVM_CLANG_LEX_PREPROCESSOR_H
17 #include "clang/Lex/Lexer.h"
18 #include "clang/Lex/PTHLexer.h"
19 #include "clang/Lex/PPCallbacks.h"
20 #include "clang/Lex/TokenLexer.h"
21 #include "clang/Basic/IdentifierTable.h"
22 #include "clang/Basic/SourceLocation.h"
23 #include "llvm/ADT/DenseMap.h"
24 #include "llvm/ADT/OwningPtr.h"
32 class PragmaNamespace
;
37 class DirectoryLookup
;
39 /// Preprocessor - This object engages in a tight little dance with the lexer to
40 /// efficiently preprocess tokens. Lexers know only about tokens within a
41 /// single source file, and don't know anything about preprocessor-level issues
42 /// like the #include stack, token expansion, etc.
46 const LangOptions
&Features
;
49 SourceManager
&SourceMgr
;
50 ScratchBuffer
*ScratchBuf
;
51 HeaderSearch
&HeaderInfo
;
53 /// Identifiers for builtin macros and other builtins.
54 IdentifierInfo
*Ident__LINE__
, *Ident__FILE__
; // __LINE__, __FILE__
55 IdentifierInfo
*Ident__DATE__
, *Ident__TIME__
; // __DATE__, __TIME__
56 IdentifierInfo
*Ident__INCLUDE_LEVEL__
; // __INCLUDE_LEVEL__
57 IdentifierInfo
*Ident__BASE_FILE__
; // __BASE_FILE__
58 IdentifierInfo
*Ident__TIMESTAMP__
; // __TIMESTAMP__
59 IdentifierInfo
*Ident_Pragma
, *Ident__VA_ARGS__
; // _Pragma, __VA_ARGS__
61 SourceLocation DATELoc
, TIMELoc
;
64 /// MaxIncludeStackDepth - Maximum depth of #includes.
65 MaxAllowedIncludeStackDepth
= 200
68 // State that is set before the preprocessor begins.
69 bool KeepComments
: 1;
70 bool KeepMacroComments
: 1;
72 // State that changes while the preprocessor runs:
73 bool DisableMacroExpansion
: 1; // True if macro expansion is disabled.
74 bool InMacroArgs
: 1; // True if parsing fn macro invocation args.
76 /// Identifiers - This is mapping/lookup information for all identifiers in
77 /// the program, including program keywords.
78 IdentifierTable Identifiers
;
80 /// Selectors - This table contains all the selectors in the program. Unlike
81 /// IdentifierTable above, this table *isn't* populated by the preprocessor.
82 /// It is declared/instantiated here because it's role/lifetime is
83 /// conceptually similar the IdentifierTable. In addition, the current control
84 /// flow (in clang::ParseAST()), make it convenient to put here.
85 /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
86 /// the lifetime fo the preprocessor.
87 SelectorTable Selectors
;
89 /// PragmaHandlers - This tracks all of the pragmas that the client registered
90 /// with this preprocessor.
91 PragmaNamespace
*PragmaHandlers
;
93 /// CurLexer - This is the current top of the stack that we're lexing from if
94 /// not expanding a macro and we are lexing directly from source code.
95 /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
96 llvm::OwningPtr
<Lexer
> CurLexer
;
98 /// CurPTHLexer - This is the current top of stack that we're lexing from if
99 /// not expanding from a macro and we are lexing from a PTH cache.
100 /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
101 llvm::OwningPtr
<PTHLexer
> CurPTHLexer
;
103 /// CurPPLexer - This is the current top of the stack what we're lexing from
104 /// if not expanding a macro. This is an alias for either CurLexer or
106 PreprocessorLexer
* CurPPLexer
;
108 /// CurLookup - The DirectoryLookup structure used to find the current
109 /// FileEntry, if CurLexer is non-null and if applicable. This allows us to
110 /// implement #include_next and find directory-specific properties.
111 const DirectoryLookup
*CurDirLookup
;
113 /// CurTokenLexer - This is the current macro we are expanding, if we are
114 /// expanding a macro. One of CurLexer and CurTokenLexer must be null.
115 llvm::OwningPtr
<TokenLexer
> CurTokenLexer
;
117 /// IncludeMacroStack - This keeps track of the stack of files currently
118 /// #included, and macros currently being expanded from, not counting
119 /// CurLexer/CurTokenLexer.
120 struct IncludeStackInfo
{
122 PTHLexer
*ThePTHLexer
;
123 PreprocessorLexer
*ThePPLexer
;
124 TokenLexer
*TheTokenLexer
;
125 const DirectoryLookup
*TheDirLookup
;
127 IncludeStackInfo(Lexer
*L
, PTHLexer
* P
, PreprocessorLexer
* PPL
,
128 TokenLexer
* TL
, const DirectoryLookup
*D
)
129 : TheLexer(L
), ThePTHLexer(P
), ThePPLexer(PPL
), TheTokenLexer(TL
),
132 std::vector
<IncludeStackInfo
> IncludeMacroStack
;
134 /// Callbacks - These are actions invoked when some preprocessor activity is
135 /// encountered (e.g. a file is #included, etc).
136 PPCallbacks
*Callbacks
;
138 /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
139 /// to the actual definition of the macro.
140 llvm::DenseMap
<IdentifierInfo
*, MacroInfo
*> Macros
;
142 // Various statistics we track for performance analysis.
143 unsigned NumDirectives
, NumIncluded
, NumDefined
, NumUndefined
, NumPragma
;
144 unsigned NumIf
, NumElse
, NumEndif
;
145 unsigned NumEnteredSourceFiles
, MaxIncludeStackDepth
;
146 unsigned NumMacroExpanded
, NumFnMacroExpanded
, NumBuiltinMacroExpanded
;
147 unsigned NumFastMacroExpanded
, NumTokenPaste
, NumFastTokenPaste
;
150 /// Predefines - This string is the predefined macros that preprocessor
151 /// should use from the command line etc.
152 std::string Predefines
;
154 /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
155 enum { TokenLexerCacheSize
= 8 };
156 unsigned NumCachedTokenLexers
;
157 TokenLexer
*TokenLexerCache
[TokenLexerCacheSize
];
159 private: // Cached tokens state.
160 typedef std::vector
<Token
> CachedTokensTy
;
162 /// CachedTokens - Cached tokens are stored here when we do backtracking or
163 /// lookahead. They are "lexed" by the CachingLex() method.
164 CachedTokensTy CachedTokens
;
166 /// CachedLexPos - The position of the cached token that CachingLex() should
167 /// "lex" next. If it points beyond the CachedTokens vector, it means that
168 /// a normal Lex() should be invoked.
169 CachedTokensTy::size_type CachedLexPos
;
171 /// BacktrackPositions - Stack of backtrack positions, allowing nested
172 /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
173 /// indicate where CachedLexPos should be set when the BackTrack() method is
174 /// invoked (at which point the last position is popped).
175 std::vector
<CachedTokensTy::size_type
> BacktrackPositions
;
178 Preprocessor(Diagnostic
&diags
, const LangOptions
&opts
, TargetInfo
&target
,
179 SourceManager
&SM
, HeaderSearch
&Headers
);
182 Diagnostic
&getDiagnostics() const { return Diags
; }
183 const LangOptions
&getLangOptions() const { return Features
; }
184 TargetInfo
&getTargetInfo() const { return Target
; }
185 FileManager
&getFileManager() const { return FileMgr
; }
186 SourceManager
&getSourceManager() const { return SourceMgr
; }
187 HeaderSearch
&getHeaderSearchInfo() const { return HeaderInfo
; }
189 IdentifierTable
&getIdentifierTable() { return Identifiers
; }
190 SelectorTable
&getSelectorTable() { return Selectors
; }
192 inline FullSourceLoc
getFullLoc(SourceLocation Loc
) const {
193 return FullSourceLoc(Loc
, getSourceManager());
196 /// SetCommentRetentionState - Control whether or not the preprocessor retains
197 /// comments in output.
198 void SetCommentRetentionState(bool KeepComments
, bool KeepMacroComments
) {
199 this->KeepComments
= KeepComments
| KeepMacroComments
;
200 this->KeepMacroComments
= KeepMacroComments
;
203 bool getCommentRetentionState() const { return KeepComments
; }
205 /// isCurrentLexer - Return true if we are lexing directly from the specified
207 bool isCurrentLexer(const Lexer
*L
) const {
208 return CurLexer
.get() == L
;
211 /// getCurrentLexer - Return the current file lexer being lexed from. Note
212 /// that this ignores any potentially active macro expansions and _Pragma
213 /// expansions going on at the time.
214 Lexer
*getCurrentFileLexer() const;
216 /// getPPCallbacks/setPPCallbacks - Accessors for preprocessor callbacks.
217 /// Note that this class takes ownership of any PPCallbacks object given to
219 PPCallbacks
*getPPCallbacks() const { return Callbacks
; }
220 void setPPCallbacks(PPCallbacks
*C
) {
225 /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
226 /// or null if it isn't #define'd.
227 MacroInfo
*getMacroInfo(IdentifierInfo
*II
) const {
228 return II
->hasMacroDefinition() ? Macros
.find(II
)->second
: 0;
231 /// setMacroInfo - Specify a macro for this identifier.
233 void setMacroInfo(IdentifierInfo
*II
, MacroInfo
*MI
);
235 const std::string
&getPredefines() const { return Predefines
; }
236 /// setPredefines - Set the predefines for this Preprocessor. These
237 /// predefines are automatically injected when parsing the main file.
238 void setPredefines(const char *P
) { Predefines
= P
; }
239 void setPredefines(const std::string
&P
) { Predefines
= P
; }
241 /// getIdentifierInfo - Return information about the specified preprocessor
242 /// identifier token. The version of this method that takes two character
243 /// pointers is preferred unless the identifier is already available as a
244 /// string (this avoids allocation and copying of memory to construct an
246 IdentifierInfo
*getIdentifierInfo(const char *NameStart
,
247 const char *NameEnd
) {
248 return &Identifiers
.get(NameStart
, NameEnd
);
250 IdentifierInfo
*getIdentifierInfo(const char *NameStr
) {
251 return getIdentifierInfo(NameStr
, NameStr
+strlen(NameStr
));
254 /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
255 /// If 'Namespace' is non-null, then it is a token required to exist on the
256 /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
257 void AddPragmaHandler(const char *Namespace
, PragmaHandler
*Handler
);
259 /// RemovePragmaHandler - Remove the specific pragma handler from
260 /// the preprocessor. If \arg Namespace is non-null, then it should
261 /// be the namespace that \arg Handler was added to. It is an error
262 /// to remove a handler that has not been registered.
263 void RemovePragmaHandler(const char *Namespace
, PragmaHandler
*Handler
);
265 /// EnterMainSourceFile - Enter the specified FileID as the main source file,
266 /// which implicitly adds the builtin defines etc.
267 void EnterMainSourceFile();
269 /// EnterSourceFile - Add a source file to the top of the include stack and
270 /// start lexing tokens from it instead of the current buffer. If isMainFile
271 /// is true, this is the main file for the translation unit.
272 void EnterSourceFile(unsigned CurFileID
, const DirectoryLookup
*Dir
);
274 /// EnterMacro - Add a Macro to the top of the include stack and start lexing
275 /// tokens from it instead of the current buffer. Args specifies the
276 /// tokens input to a function-like macro.
277 void EnterMacro(Token
&Identifier
, MacroArgs
*Args
);
279 /// EnterTokenStream - Add a "macro" context to the top of the include stack,
280 /// which will cause the lexer to start returning the specified tokens.
282 /// If DisableMacroExpansion is true, tokens lexed from the token stream will
283 /// not be subject to further macro expansion. Otherwise, these tokens will
284 /// be re-macro-expanded when/if expansion is enabled.
286 /// If OwnsTokens is false, this method assumes that the specified stream of
287 /// tokens has a permanent owner somewhere, so they do not need to be copied.
288 /// If it is true, it assumes the array of tokens is allocated with new[] and
291 void EnterTokenStream(const Token
*Toks
, unsigned NumToks
,
292 bool DisableMacroExpansion
, bool OwnsTokens
);
294 /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
295 /// lexer stack. This should only be used in situations where the current
296 /// state of the top-of-stack lexer is known.
297 void RemoveTopOfLexerStack();
299 /// EnableBacktrackAtThisPos - From the point that this method is called, and
300 /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
301 /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
302 /// make the Preprocessor re-lex the same tokens.
304 /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
305 /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
306 /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
308 /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
309 /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
310 /// tokens will continue indefinitely.
312 void EnableBacktrackAtThisPos();
314 /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
315 void CommitBacktrackedTokens();
317 /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
318 /// EnableBacktrackAtThisPos() was previously called.
321 /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
322 /// caching of tokens is on.
323 bool isBacktrackEnabled() const { return !BacktrackPositions
.empty(); }
325 /// Lex - To lex a token from the preprocessor, just pull a token from the
326 /// current lexer or macro object.
327 void Lex(Token
&Result
) {
329 CurLexer
->Lex(Result
);
330 else if (CurPTHLexer
)
331 CurPTHLexer
->Lex(Result
);
332 else if (CurTokenLexer
)
333 CurTokenLexer
->Lex(Result
);
338 /// LexNonComment - Lex a token. If it's a comment, keep lexing until we get
339 /// something not a comment. This is useful in -E -C mode where comments
340 /// would foul up preprocessor directive handling.
341 void LexNonComment(Token
&Result
) {
344 while (Result
.getKind() == tok::comment
);
347 /// LexUnexpandedToken - This is just like Lex, but this disables macro
348 /// expansion of identifier tokens.
349 void LexUnexpandedToken(Token
&Result
) {
350 // Disable macro expansion.
351 bool OldVal
= DisableMacroExpansion
;
352 DisableMacroExpansion
= true;
357 DisableMacroExpansion
= OldVal
;
360 /// LookAhead - This peeks ahead N tokens and returns that token without
361 /// consuming any tokens. LookAhead(0) returns the next token that would be
362 /// returned by Lex(), LookAhead(1) returns the token after it, etc. This
363 /// returns normal tokens after phase 5. As such, it is equivalent to using
364 /// 'Lex', not 'LexUnexpandedToken'.
365 const Token
&LookAhead(unsigned N
) {
366 if (CachedLexPos
+ N
< CachedTokens
.size())
367 return CachedTokens
[CachedLexPos
+N
];
369 return PeekAhead(N
+1);
372 /// EnterToken - Enters a token in the token stream to be lexed next. If
373 /// BackTrack() is called afterwards, the token will remain at the insertion
375 void EnterToken(const Token
&Tok
) {
376 EnterCachingLexMode();
377 CachedTokens
.insert(CachedTokens
.begin()+CachedLexPos
, Tok
);
380 /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
381 /// tokens (because backtrack is enabled) it should replace the most recent
382 /// cached tokens with the given annotation token. This function has no effect
383 /// if backtracking is not enabled.
385 /// Note that the use of this function is just for optimization; so that the
386 /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
388 void AnnotateCachedTokens(const Token
&Tok
) {
389 assert(Tok
.isAnnotationToken() && "Expected annotation token");
390 if (CachedLexPos
!= 0 && InCachingLexMode())
391 AnnotatePreviousCachedTokens(Tok
);
394 /// Diag - Forwarding function for diagnostics. This emits a diagnostic at
395 /// the specified Token's location, translating the token's start
396 /// position in the current buffer into a SourcePosition object for rendering.
397 DiagnosticInfo
Diag(SourceLocation Loc
, unsigned DiagID
);
398 DiagnosticInfo
Diag(const Token
&Tok
, unsigned DiagID
);
400 /// getSpelling() - Return the 'spelling' of the Tok token. The spelling of a
401 /// token is the characters used to represent the token in the source file
402 /// after trigraph expansion and escaped-newline folding. In particular, this
403 /// wants to get the true, uncanonicalized, spelling of things like digraphs
405 std::string
getSpelling(const Token
&Tok
) const;
407 /// getSpelling - This method is used to get the spelling of a token into a
408 /// preallocated buffer, instead of as an std::string. The caller is required
409 /// to allocate enough space for the token, which is guaranteed to be at least
410 /// Tok.getLength() bytes long. The length of the actual result is returned.
412 /// Note that this method may do two possible things: it may either fill in
413 /// the buffer specified with characters, or it may *change the input pointer*
414 /// to point to a constant buffer with the data already in it (avoiding a
415 /// copy). The caller is not allowed to modify the returned buffer pointer
416 /// if an internal buffer is returned.
417 unsigned getSpelling(const Token
&Tok
, const char *&Buffer
) const;
420 /// CreateString - Plop the specified string into a scratch buffer and return
421 /// a location for it. If specified, the source location provides a source
422 /// location for the token.
423 SourceLocation
CreateString(const char *Buf
, unsigned Len
,
424 SourceLocation SourceLoc
= SourceLocation());
426 /// DumpToken - Print the token to stderr, used for debugging.
428 void DumpToken(const Token
&Tok
, bool DumpFlags
= false) const;
429 void DumpLocation(SourceLocation Loc
) const;
430 void DumpMacro(const MacroInfo
&MI
) const;
432 /// AdvanceToTokenCharacter - Given a location that specifies the start of a
433 /// token, return a new location that specifies a character within the token.
434 SourceLocation
AdvanceToTokenCharacter(SourceLocation TokStart
,unsigned Char
);
436 /// IncrementPasteCounter - Increment the counters for the number of token
437 /// paste operations performed. If fast was specified, this is a 'fast paste'
440 void IncrementPasteCounter(bool isFast
) {
449 /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
450 /// comment (/##/) in microsoft mode, this method handles updating the current
451 /// state, returning the token on the next source line.
452 void HandleMicrosoftCommentPaste(Token
&Tok
);
454 //===--------------------------------------------------------------------===//
455 // Preprocessor callback methods. These are invoked by a lexer as various
456 // directives and events are found.
458 /// LookUpIdentifierInfo - Given a tok::identifier token, look up the
459 /// identifier information for the token and install it into the token.
460 IdentifierInfo
*LookUpIdentifierInfo(Token
&Identifier
,
461 const char *BufPtr
= 0);
463 /// HandleIdentifier - This callback is invoked when the lexer reads an
464 /// identifier and has filled in the tokens IdentifierInfo member. This
465 /// callback potentially macro expands it or turns it into a named token (like
467 void HandleIdentifier(Token
&Identifier
);
470 /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
471 /// the current file. This either returns the EOF token and returns true, or
472 /// pops a level off the include stack and returns false, at which point the
473 /// client should call lex again.
474 bool HandleEndOfFile(Token
&Result
, bool isEndOfMacro
= false);
476 /// HandleEndOfTokenLexer - This callback is invoked when the current
477 /// TokenLexer hits the end of its token stream.
478 bool HandleEndOfTokenLexer(Token
&Result
);
480 /// HandleDirective - This callback is invoked when the lexer sees a # token
481 /// at the start of a line. This consumes the directive, modifies the
482 /// lexer/preprocessor state, and advances the lexer(s) so that the next token
483 /// read is the correct one.
484 void HandleDirective(Token
&Result
);
486 /// CheckEndOfDirective - Ensure that the next token is a tok::eom token. If
487 /// not, emit a diagnostic and consume up until the eom.
488 void CheckEndOfDirective(const char *Directive
);
491 void PushIncludeMacroStack() {
492 IncludeMacroStack
.push_back(IncludeStackInfo(CurLexer
.take(),
495 CurTokenLexer
.take(),
500 void PopIncludeMacroStack() {
501 CurLexer
.reset(IncludeMacroStack
.back().TheLexer
);
502 CurPTHLexer
.reset(IncludeMacroStack
.back().ThePTHLexer
);
503 CurPPLexer
= IncludeMacroStack
.back().ThePPLexer
;
504 CurTokenLexer
.reset(IncludeMacroStack
.back().TheTokenLexer
);
505 CurDirLookup
= IncludeMacroStack
.back().TheDirLookup
;
506 IncludeMacroStack
.pop_back();
509 /// isInPrimaryFile - Return true if we're in the top-level file, not in a
511 bool isInPrimaryFile() const;
513 /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
514 /// current line until the tok::eom token is found.
515 void DiscardUntilEndOfDirective();
517 /// ReadMacroName - Lex and validate a macro name, which occurs after a
518 /// #define or #undef. This emits a diagnostic, sets the token kind to eom,
519 /// and discards the rest of the macro line if the macro name is invalid.
520 void ReadMacroName(Token
&MacroNameTok
, char isDefineUndef
= 0);
522 /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
523 /// definition has just been read. Lex the rest of the arguments and the
524 /// closing ), updating MI with what we learn. Return true if an error occurs
525 /// parsing the arg list.
526 bool ReadMacroDefinitionArgList(MacroInfo
*MI
);
528 /// SkipExcludedConditionalBlock - We just read a #if or related directive and
529 /// decided that the subsequent tokens are in the #if'd out portion of the
530 /// file. Lex the rest of the file, until we see an #endif. If
531 /// FoundNonSkipPortion is true, then we have already emitted code for part of
532 /// this #if directive, so #else/#elif blocks should never be entered. If
533 /// FoundElse is false, then #else directives are ok, if not, then we have
534 /// already seen one so a #else directive is a duplicate. When this returns,
535 /// the caller can lex the first valid token.
536 void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc
,
537 bool FoundNonSkipPortion
, bool FoundElse
);
539 /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
540 /// may occur after a #if or #elif directive and return it as a bool. If the
541 /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
542 bool EvaluateDirectiveExpression(IdentifierInfo
*&IfNDefMacro
);
544 /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
545 /// #pragma GCC poison/system_header/dependency and #pragma once.
546 void RegisterBuiltinPragmas();
548 /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
549 /// identifier table.
550 void RegisterBuiltinMacros();
551 IdentifierInfo
*RegisterBuiltinMacro(const char *Name
);
553 /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
554 /// be expanded as a macro, handle it and return the next token as 'Tok'. If
555 /// the macro should not be expanded return true, otherwise return false.
556 bool HandleMacroExpandedIdentifier(Token
&Tok
, MacroInfo
*MI
);
558 /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
559 /// lexed is a '('. If so, consume the token and return true, if not, this
560 /// method should have no observable side-effect on the lexed tokens.
561 bool isNextPPTokenLParen();
563 /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
564 /// invoked to read all of the formal arguments specified for the macro
565 /// invocation. This returns null on error.
566 MacroArgs
*ReadFunctionLikeMacroArgs(Token
&MacroName
, MacroInfo
*MI
);
568 /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
569 /// as a builtin macro, handle it and return the next token as 'Tok'.
570 void ExpandBuiltinMacro(Token
&Tok
);
572 /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
573 /// return the first token after the directive. The _Pragma token has just
574 /// been read into 'Tok'.
575 void Handle_Pragma(Token
&Tok
);
578 /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
579 /// start lexing tokens from it instead of the current buffer.
580 void EnterSourceFileWithLexer(Lexer
*TheLexer
, const DirectoryLookup
*Dir
);
582 /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
583 /// checked and spelled filename, e.g. as an operand of #include. This returns
584 /// true if the input filename was in <>'s or false if it were in ""'s. The
585 /// caller is expected to provide a buffer that is large enough to hold the
586 /// spelling of the filename, but is also expected to handle the case when
587 /// this method decides to use a different buffer.
588 bool GetIncludeFilenameSpelling(SourceLocation Loc
,
589 const char *&BufStart
, const char *&BufEnd
);
591 /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
592 /// return null on failure. isAngled indicates whether the file reference is
593 /// for system #include's or not (i.e. using <> instead of "").
594 const FileEntry
*LookupFile(const char *FilenameStart
,const char *FilenameEnd
,
595 bool isAngled
, const DirectoryLookup
*FromDir
,
596 const DirectoryLookup
*&CurDir
);
598 //===--------------------------------------------------------------------===//
600 void CachingLex(Token
&Result
);
601 bool InCachingLexMode() const { return CurLexer
== 0 && CurTokenLexer
== 0; }
602 void EnterCachingLexMode();
603 void ExitCachingLexMode() {
604 if (InCachingLexMode())
605 RemoveTopOfLexerStack();
607 const Token
&PeekAhead(unsigned N
);
608 void AnnotatePreviousCachedTokens(const Token
&Tok
);
610 //===--------------------------------------------------------------------===//
611 /// Handle*Directive - implement the various preprocessor directives. These
612 /// should side-effect the current preprocessor object so that the next call
613 /// to Lex() will return the appropriate token next.
615 void HandleUserDiagnosticDirective(Token
&Tok
, bool isWarning
);
616 void HandleIdentSCCSDirective(Token
&Tok
);
619 void HandleIncludeDirective(Token
&Tok
,
620 const DirectoryLookup
*LookupFrom
= 0,
621 bool isImport
= false);
622 void HandleIncludeNextDirective(Token
&Tok
);
623 void HandleImportDirective(Token
&Tok
);
626 void HandleDefineDirective(Token
&Tok
);
627 void HandleUndefDirective(Token
&Tok
);
628 // HandleAssertDirective(Token &Tok);
629 // HandleUnassertDirective(Token &Tok);
631 // Conditional Inclusion.
632 void HandleIfdefDirective(Token
&Tok
, bool isIfndef
,
633 bool ReadAnyTokensBeforeDirective
);
634 void HandleIfDirective(Token
&Tok
, bool ReadAnyTokensBeforeDirective
);
635 void HandleEndifDirective(Token
&Tok
);
636 void HandleElseDirective(Token
&Tok
);
637 void HandleElifDirective(Token
&Tok
);
640 void HandlePragmaDirective();
642 void HandlePragmaOnce(Token
&OnceTok
);
643 void HandlePragmaMark();
644 void HandlePragmaPoison(Token
&PoisonTok
);
645 void HandlePragmaSystemHeader(Token
&SysHeaderTok
);
646 void HandlePragmaDependency(Token
&DependencyTok
);
649 /// PreprocessorFactory - A generic factory interface for lazily creating
650 /// Preprocessor objects on-demand when they are needed.
651 class PreprocessorFactory
{
653 virtual ~PreprocessorFactory();
654 virtual Preprocessor
* CreatePreprocessor() = 0;
657 } // end namespace clang