h *= m;
h ^= h >> r;
return h;
-}
+}
#undef BIG_CONSTANT
// static
uint64_t BuildLog::LogEntry::HashCommand(StringPiece command) {
- return MurmurHash64A(command.str_, command.len_);
+ return MurmurHash64A(command.str(), command.len());
}
BuildLog::BuildLog()
if (!is_target) {
ins_.push_back(StringPiece(filename, len));
- } else if (!out_.str_) {
+ } else if (!out_.str()) {
out_ = StringPiece(filename, len);
} else if (out_ != StringPiece(filename, len)) {
*err = "depfile has multiple output paths.";
// Although the algorithm is typically described using an m x n
// array, only two rows are used at a time, so this implemenation
// just keeps two separate vectors for those two rows.
- int m = s1.len_;
- int n = s2.len_;
+ int m = s1.len();
+ int n = s2.len();
std::vector<int> previous(n + 1);
std::vector<int> current(n + 1);
for (int x = 1; x <= n; ++x) {
if (allow_replacements) {
- current[x] = min(previous[x-1] + (s1.str_[y-1] == s2.str_[x-1] ? 0 : 1),
- min(current[x-1], previous[x])+1);
+ current[x] = min(previous[x-1] + (s1.str()[y-1] == s2.str()[x-1] ?
+ 0 : 1), min(current[x-1], previous[x]) + 1);
}
else {
- if (s1.str_[y-1] == s2.str_[x-1])
+ if (s1.str()[y-1] == s2.str()[x-1])
current[x] = previous[x-1];
else
current[x] = min(current[x-1], previous[x]) + 1;
void EvalString::AddText(StringPiece text) {
// Add it to the end of an existing RAW token if possible.
if (!parsed_.empty() && parsed_.back().second == RAW) {
- parsed_.back().first.append(text.str_, text.len_);
+ parsed_.back().first.append(text.str(), text.len());
} else {
parsed_.push_back(make_pair(text.AsString(), RAW));
}
// Add all its in-edges.
for (vector<StringPiece>::iterator i = depfile.ins_.begin();
i != depfile.ins_.end(); ++i, ++implicit_dep) {
- if (!CanonicalizePath(const_cast<char*>(i->str_), &i->len_, err))
+ int length = i->len();
+ if (!CanonicalizePath(const_cast<char*>(i->str()), &length, err))
return false;
Node* node = state->GetNode(*i);
template<>
struct hash<StringPiece> {
size_t operator()(StringPiece key) const {
- return MurmurHash2(key.str_, key.len_);
+ return MurmurHash2(key.str(), key.len());
}
};
bool Lexer::Error(const string& message, string* err) {
// Compute line/column.
int line = 1;
- const char* context = input_.str_;
- for (const char* p = input_.str_; p < last_token_; ++p) {
+ const char* context = input_.str();
+ for (const char* p = input_.str(); p < last_token_; ++p) {
if (*p == '\n') {
++line;
context = p + 1;
void Lexer::Start(StringPiece filename, StringPiece input) {
filename_ = filename;
input_ = input;
- ofs_ = input_.str_;
+ ofs_ = input_.str();
last_token_ = NULL;
}
return len_ ? string(str_, len_) : string();
}
+ const char* str() const { return str_; }
+ int len() const { return len_; }
+
+ private:
const char* str_;
int len_;
};