return token_.type_;
token_.pos_ = cur_;
- if (cur_indent_ == -1) {
+ if (whitespace_significant_ && cur_indent_ == -1) {
cur_indent_ = cur_ - cur_line_;
if (cur_indent_ != last_indent_) {
if (cur_indent_ > last_indent_) {
token_.Clear();
}
+MakefileParser::MakefileParser() : tokenizer_(false) {}
+
bool MakefileParser::Parse(const string& input, string* err) {
tokenizer_.Start(input.data(), input.data() + input.size());
+ tokenizer_.SkipWhitespace(true);
+
if (!tokenizer_.ReadIdent(&out_))
return tokenizer_.ErrorExpected("output filename", err);
if (!tokenizer_.ExpectToken(Token::COLON, err))
}
ManifestParser::ManifestParser(State* state, FileReader* file_reader)
- : state_(state), file_reader_(file_reader) {
+ : state_(state), file_reader_(file_reader), tokenizer_(true) {
env_ = &state->bindings_;
}
bool ManifestParser::Load(const string& filename, string* err) {
};
struct Tokenizer {
- Tokenizer()
- : token_(Token::NONE), line_number_(1),
+ Tokenizer(bool whitespace_significant)
+ : whitespace_significant_(whitespace_significant),
+ token_(Token::NONE), line_number_(1),
last_indent_(0), cur_indent_(-1) {}
void Start(const char* start, const char* end);
Token::Type PeekToken();
void ConsumeToken();
+ bool whitespace_significant_;
+
const char* cur_;
const char* end_;
};
struct MakefileParser {
+ MakefileParser();
bool Parse(const string& input, string* err);
Tokenizer tokenizer_;