summaryrefslogtreecommitdiff
path: root/clang/include/clang/Tooling/Syntax/Tokens.h
diff options
context:
space:
mode:
Diffstat (limited to 'clang/include/clang/Tooling/Syntax/Tokens.h')
-rw-r--r--clang/include/clang/Tooling/Syntax/Tokens.h26
1 files changed, 23 insertions, 3 deletions
diff --git a/clang/include/clang/Tooling/Syntax/Tokens.h b/clang/include/clang/Tooling/Syntax/Tokens.h
index 301432d3888b..a210815d49f9 100644
--- a/clang/include/clang/Tooling/Syntax/Tokens.h
+++ b/clang/include/clang/Tooling/Syntax/Tokens.h
@@ -78,6 +78,10 @@ struct FileRange {
/// Gets the substring that this FileRange refers to.
llvm::StringRef text(const SourceManager &SM) const;
+ /// Convert to the clang range. The returned range is always a char range,
+ /// never a token range.
+ CharSourceRange toCharRange(const SourceManager &SM) const;
+
friend bool operator==(const FileRange &L, const FileRange &R) {
return std::tie(L.File, L.Begin, L.End) == std::tie(R.File, R.Begin, R.End);
}
@@ -175,6 +179,7 @@ public:
/// All tokens produced by the preprocessor after all macro replacements,
/// directives, etc. Source locations found in the clang AST will always
/// point to one of these tokens.
+ /// Tokens are in TU order (per SourceManager::isBeforeInTranslationUnit()).
/// FIXME: figure out how to handle token splitting, e.g. '>>' can be split
/// into two '>' tokens by the parser. However, TokenBuffer currently
/// keeps it as a single '>>' token.
@@ -182,6 +187,10 @@ public:
return ExpandedTokens;
}
+ /// Returns the subrange of expandedTokens() corresponding to the closed
+ /// token range R.
+ llvm::ArrayRef<syntax::Token> expandedTokens(SourceRange R) const;
+
/// Find the subrange of spelled tokens that produced the corresponding \p
/// Expanded tokens.
///
@@ -231,9 +240,9 @@ public:
/// Lexed tokens of a file before preprocessing. E.g. for the following input
/// #define DECL(name) int name = 10
/// DECL(a);
- /// spelledTokens() returns {"#", "define", "DECL", "(", "name", ")", "eof"}.
- /// FIXME: we do not yet store tokens of directives, like #include, #define,
- /// #pragma, etc.
+ /// spelledTokens() returns
+ /// {"#", "define", "DECL", "(", "name", ")", "int", "name", "=", "10",
+ /// "DECL", "(", "a", ")", ";"}
llvm::ArrayRef<syntax::Token> spelledTokens(FileID FID) const;
/// Get all tokens that expand a macro in \p FID. For the following input
@@ -304,6 +313,17 @@ private:
const SourceManager *SourceMgr;
};
+/// The spelled tokens that overlap or touch a spelling location Loc.
+/// This always returns 0-2 tokens.
+llvm::ArrayRef<syntax::Token>
+spelledTokensTouching(SourceLocation Loc, const syntax::TokenBuffer &Tokens);
+
+/// The identifier token that overlaps or touches a spelling location Loc.
+/// If there is none, returns nullptr.
+const syntax::Token *
+spelledIdentifierTouching(SourceLocation Loc,
+ const syntax::TokenBuffer &Tokens);
+
/// Lex the text buffer, corresponding to \p FID, in raw mode and record the
/// resulting spelled tokens. Does minimal post-processing on raw identifiers,
/// setting the appropriate token kind (instead of the raw_identifier reported