diff options
Diffstat (limited to 'lib/Tooling/Syntax')
-rw-r--r-- | lib/Tooling/Syntax/BuildTree.cpp | 12 | ||||
-rw-r--r-- | lib/Tooling/Syntax/Tokens.cpp | 17 |
2 files changed, 24 insertions, 5 deletions
diff --git a/lib/Tooling/Syntax/BuildTree.cpp b/lib/Tooling/Syntax/BuildTree.cpp index 03c439c59e39d..a0b653df133d4 100644 --- a/lib/Tooling/Syntax/BuildTree.cpp +++ b/lib/Tooling/Syntax/BuildTree.cpp @@ -58,8 +58,11 @@ public: /// Finish building the tree and consume the root node. syntax::TranslationUnit *finalize() && { auto Tokens = Arena.tokenBuffer().expandedTokens(); + assert(!Tokens.empty()); + assert(Tokens.back().kind() == tok::eof); + // Build the root of the tree, consuming all the children. - Pending.foldChildren(Tokens, + Pending.foldChildren(Tokens.drop_back(), new (Arena.allocator()) syntax::TranslationUnit); return cast<syntax::TranslationUnit>(std::move(Pending).finalize()); @@ -96,10 +99,11 @@ private: /// Ensures that added nodes properly nest and cover the whole token stream. struct Forest { Forest(syntax::Arena &A) { - // FIXME: do not add 'eof' to the tree. - + assert(!A.tokenBuffer().expandedTokens().empty()); + assert(A.tokenBuffer().expandedTokens().back().kind() == tok::eof); // Create all leaf nodes. - for (auto &T : A.tokenBuffer().expandedTokens()) + // Note that we do not have 'eof' in the tree. + for (auto &T : A.tokenBuffer().expandedTokens().drop_back()) Trees.insert(Trees.end(), {&T, NodeAndRole{new (A.allocator()) syntax::Leaf(&T)}}); } diff --git a/lib/Tooling/Syntax/Tokens.cpp b/lib/Tooling/Syntax/Tokens.cpp index d82dc1f35c944..a2c3bc137d6ba 100644 --- a/lib/Tooling/Syntax/Tokens.cpp +++ b/lib/Tooling/Syntax/Tokens.cpp @@ -232,6 +232,21 @@ TokenBuffer::expansionStartingAt(const syntax::Token *Spelled) const { return E; } +std::vector<const syntax::Token *> +TokenBuffer::macroExpansions(FileID FID) const { + auto FileIt = Files.find(FID); + assert(FileIt != Files.end() && "file not tracked by token buffer"); + auto &File = FileIt->second; + std::vector<const syntax::Token *> Expansions; + auto &Spelled = File.SpelledTokens; + for (auto Mapping : File.Mappings) { + const syntax::Token *Token = &Spelled[Mapping.BeginSpelled]; + if (Token->kind() == tok::TokenKind::identifier) + Expansions.push_back(Token); + } + return Expansions; +} + std::vector<syntax::Token> syntax::tokenize(FileID FID, const SourceManager &SM, const LangOptions &LO) { std::vector<syntax::Token> Tokens; @@ -321,7 +336,7 @@ TokenCollector::TokenCollector(Preprocessor &PP) : PP(PP) { }); // And locations of macro calls, to properly recover boundaries of those in // case of empty expansions. - auto CB = llvm::make_unique<CollectPPExpansions>(*this); + auto CB = std::make_unique<CollectPPExpansions>(*this); this->Collector = CB.get(); PP.addPPCallbacks(std::move(CB)); } |