#include "Protocol.h"
#include "Selection.h"
#include "SourceCode.h"
+#include "clang-pseudo/Bracket.h"
+#include "clang-pseudo/DirectiveTree.h"
+#include "clang-pseudo/Token.h"
#include "clang/AST/DeclBase.h"
#include "clang/Basic/SourceLocation.h"
#include "clang/Basic/SourceManager.h"
return collectFoldingRanges(SyntaxTree, TM);
}
+// FIXME(kirillbobyrev): Collect comments, PP conditional regions, includes and
+// other code regions (e.g. public/private/protected sections of classes,
+// control flow statement bodies).
+// Related issue: https://github.com/clangd/clangd/issues/310
+llvm::Expected<std::vector<FoldingRange>>
+getFoldingRanges(const std::string &Code) {
+ auto OrigStream = clang::pseudo::lex(Code, clang::pseudo::genericLangOpts());
+
+ auto DirectiveStructure = clang::pseudo::DirectiveTree::parse(OrigStream);
+ clang::pseudo::chooseConditionalBranches(DirectiveStructure, OrigStream);
+
+ // FIXME: Provide ranges in the disabled-PP regions as well.
+ auto Preprocessed = DirectiveStructure.stripDirectives(OrigStream);
+
+ auto ParseableStream = cook(Preprocessed, clang::pseudo::genericLangOpts());
+ pseudo::pairBrackets(ParseableStream);
+
+ std::vector<FoldingRange> Result;
+ for (const auto &Tok : ParseableStream.tokens()) {
+ if (auto *Paired = Tok.pair()) {
+ // Process only token at the start of the range. Avoid ranges on a single
+ // line.
+ if (Tok.Line < Paired->Line) {
+ Position Start = offsetToPosition(
+ Code,
+ OrigStream.tokens()[Tok.OriginalIndex].text().data() - Code.data());
+ Position End = offsetToPosition(
+ Code, OrigStream.tokens()[Paired->OriginalIndex].text().data() -
+ Code.data());
+ FoldingRange FR;
+ FR.startLine = Start.line;
+ FR.startCharacter = Start.character + 1;
+ FR.endLine = End.line;
+ FR.endCharacter = End.character;
+ Result.push_back(FR);
+ }
+ }
+ }
+ return Result;
+}
+
} // namespace clangd
} // namespace clang
return arg.Line == (unsigned)Line && arg.Indent == (unsigned)Indent;
}
+MATCHER_P(originalIndex, index, "") {
+ return arg.OriginalIndex == (Token::Index)index;
+}
+
TEST(TokenTest, Lex) {
LangOptions Opts;
std::string Code = R"cpp(
Raw.tokens(),
ElementsAre(AllOf(token("one_\\\ntoken", tok::raw_identifier),
hasFlag(LexFlags::StartsPPLine),
- hasFlag(LexFlags::NeedsCleaning), lineIndent(1, 0)),
+ hasFlag(LexFlags::NeedsCleaning), lineIndent(1, 0),
+ originalIndex(0)),
AllOf(token("two", tok::raw_identifier),
hasFlag(LexFlags::StartsPPLine),
- Not(hasFlag(LexFlags::NeedsCleaning))),
+ Not(hasFlag(LexFlags::NeedsCleaning)),
+ originalIndex(1)),
AllOf(token("\\\ntokens", tok::raw_identifier),
Not(hasFlag(LexFlags::StartsPPLine)),
- hasFlag(LexFlags::NeedsCleaning))));
+ hasFlag(LexFlags::NeedsCleaning), originalIndex(2))));
TokenStream Cooked = cook(Raw, Opts);
EXPECT_THAT(
Cooked.tokens(),
- ElementsAre(AllOf(token("one_token", tok::identifier), lineIndent(1, 0)),
- token("two", tok::identifier),
- token("tokens", tok::identifier)));
+ ElementsAre(AllOf(token("one_token", tok::identifier), lineIndent(1, 0),
+ originalIndex(0)),
+ AllOf(token("two", tok::identifier), originalIndex(1)),
+ AllOf(token("tokens", tok::identifier), originalIndex(2))));
}
TEST(TokenTest, EncodedCharacters) {
)cpp";
TokenStream Cook = cook(lex(Code, Opts), Opts);
TokenStream Split = stripComments(Cook);
- EXPECT_THAT(Split.tokens(), ElementsAreArray({
- token(">", tok::greater),
- token(">", tok::greater),
- token(">", tok::greater),
- token(">", tok::greater),
- token(">>=", tok::greatergreaterequal),
- }));
+ EXPECT_THAT(Split.tokens(),
+ ElementsAre(AllOf(token(">", tok::greater), originalIndex(0)),
+ AllOf(token(">", tok::greater), originalIndex(0)),
+ // Token 1 and 2 are comments.
+ AllOf(token(">", tok::greater), originalIndex(3)),
+ AllOf(token(">", tok::greater), originalIndex(3)),
+ AllOf(token(">>=", tok::greatergreaterequal),
+ originalIndex(4))));
}
TEST(TokenTest, DropComments) {
)cpp";
TokenStream Raw = cook(lex(Code, Opts), Opts);
TokenStream Stripped = stripComments(Raw);
- EXPECT_THAT(Raw.tokens(),
- ElementsAreArray(
- {token("// comment", tok::comment), token("int", tok::kw_int),
- token("/*abc*/", tok::comment), token(";", tok::semi)}));
-
- EXPECT_THAT(Stripped.tokens(), ElementsAreArray({token("int", tok::kw_int),
- token(";", tok::semi)}));
+ EXPECT_THAT(
+ Raw.tokens(),
+ ElementsAre(AllOf(token("// comment", tok::comment), originalIndex(0)),
+ AllOf(token("int", tok::kw_int), originalIndex(1)),
+ AllOf(token("/*abc*/", tok::comment), originalIndex(2)),
+ AllOf(token(";", tok::semi), originalIndex(3))));
+
+ EXPECT_THAT(Stripped.tokens(),
+ ElementsAre(AllOf(token("int", tok::kw_int), originalIndex(1)),
+ AllOf(token(";", tok::semi), originalIndex(3))));
}
} // namespace