1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include <cmath> // For isfinite.
8 #include "src/builtins.h"
9 #include "src/code-stubs.h"
10 #include "src/contexts.h"
11 #include "src/conversions.h"
12 #include "src/hashmap.h"
13 #include "src/parser.h"
14 #include "src/property.h"
15 #include "src/property-details.h"
16 #include "src/scopes.h"
17 #include "src/string-stream.h"
18 #include "src/type-info.h"
23 // ----------------------------------------------------------------------------
24 // All the Accept member functions for each syntax tree node type.
26 #define DECL_ACCEPT(type) \
27 void type::Accept(AstVisitor* v) { v->Visit##type(this); }
28 AST_NODE_LIST(DECL_ACCEPT)
32 // ----------------------------------------------------------------------------
33 // Implementation of other node functionality.
36 bool Expression::IsSmiLiteral() const {
37 return IsLiteral() && AsLiteral()->value()->IsSmi();
41 bool Expression::IsStringLiteral() const {
42 return IsLiteral() && AsLiteral()->value()->IsString();
46 bool Expression::IsNullLiteral() const {
47 return IsLiteral() && AsLiteral()->value()->IsNull();
51 bool Expression::IsUndefinedLiteral(Isolate* isolate) const {
52 const VariableProxy* var_proxy = AsVariableProxy();
53 if (var_proxy == NULL) return false;
54 Variable* var = var_proxy->var();
55 // The global identifier "undefined" is immutable. Everything
56 // else could be reassigned.
57 return var != NULL && var->location() == Variable::UNALLOCATED &&
58 var_proxy->raw_name()->IsOneByteEqualTo("undefined");
62 VariableProxy::VariableProxy(Zone* zone, Variable* var, int position)
63 : Expression(zone, position),
64 name_(var->raw_name()),
65 var_(NULL), // Will be set by the call to BindTo.
66 is_this_(var->is_this()),
68 interface_(var->interface()),
69 variable_feedback_slot_(kInvalidFeedbackSlot) {
74 VariableProxy::VariableProxy(Zone* zone,
75 const AstRawString* name,
79 : Expression(zone, position),
84 interface_(interface),
85 variable_feedback_slot_(kInvalidFeedbackSlot) {
89 void VariableProxy::BindTo(Variable* var) {
90 DCHECK(var_ == NULL); // must be bound only once
91 DCHECK(var != NULL); // must bind
92 DCHECK(!FLAG_harmony_modules || interface_->IsUnified(var->interface()));
93 DCHECK((is_this() && var->is_this()) || name_ == var->raw_name());
94 // Ideally CONST-ness should match. However, this is very hard to achieve
95 // because we don't know the exact semantics of conflicting (const and
96 // non-const) multiple variable declarations, const vars introduced via
97 // eval() etc. Const-ness and variable declarations are a complete mess
104 Assignment::Assignment(Zone* zone,
109 : Expression(zone, pos),
113 binary_operation_(NULL),
114 assignment_id_(GetNextId(zone)),
115 is_uninitialized_(false),
116 store_mode_(STANDARD_STORE) { }
119 Token::Value Assignment::binary_op() const {
121 case Token::ASSIGN_BIT_OR: return Token::BIT_OR;
122 case Token::ASSIGN_BIT_XOR: return Token::BIT_XOR;
123 case Token::ASSIGN_BIT_AND: return Token::BIT_AND;
124 case Token::ASSIGN_SHL: return Token::SHL;
125 case Token::ASSIGN_SAR: return Token::SAR;
126 case Token::ASSIGN_SHR: return Token::SHR;
127 case Token::ASSIGN_ADD: return Token::ADD;
128 case Token::ASSIGN_SUB: return Token::SUB;
129 case Token::ASSIGN_MUL: return Token::MUL;
130 case Token::ASSIGN_DIV: return Token::DIV;
131 case Token::ASSIGN_MOD: return Token::MOD;
132 default: UNREACHABLE();
134 return Token::ILLEGAL;
138 bool FunctionLiteral::AllowsLazyCompilation() {
139 return scope()->AllowsLazyCompilation();
143 bool FunctionLiteral::AllowsLazyCompilationWithoutContext() {
144 return scope()->AllowsLazyCompilationWithoutContext();
148 int FunctionLiteral::start_position() const {
149 return scope()->start_position();
153 int FunctionLiteral::end_position() const {
154 return scope()->end_position();
158 StrictMode FunctionLiteral::strict_mode() const {
159 return scope()->strict_mode();
163 void FunctionLiteral::InitializeSharedInfo(
164 Handle<Code> unoptimized_code) {
165 for (RelocIterator it(*unoptimized_code); !it.done(); it.next()) {
166 RelocInfo* rinfo = it.rinfo();
167 if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT) continue;
168 Object* obj = rinfo->target_object();
169 if (obj->IsSharedFunctionInfo()) {
170 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
171 if (shared->start_position() == start_position()) {
172 shared_info_ = Handle<SharedFunctionInfo>(shared);
180 ObjectLiteralProperty::ObjectLiteralProperty(Zone* zone,
181 AstValueFactory* ast_value_factory,
182 Literal* key, Expression* value) {
186 if (key->raw_value()->EqualsString(ast_value_factory->proto_string())) {
188 } else if (value_->AsMaterializedLiteral() != NULL) {
189 kind_ = MATERIALIZED_LITERAL;
190 } else if (value_->IsLiteral()) {
198 ObjectLiteralProperty::ObjectLiteralProperty(
199 Zone* zone, bool is_getter, FunctionLiteral* value) {
202 kind_ = is_getter ? GETTER : SETTER;
206 bool ObjectLiteral::Property::IsCompileTimeValue() {
207 return kind_ == CONSTANT ||
208 (kind_ == MATERIALIZED_LITERAL &&
209 CompileTimeValue::IsCompileTimeValue(value_));
213 void ObjectLiteral::Property::set_emit_store(bool emit_store) {
214 emit_store_ = emit_store;
218 bool ObjectLiteral::Property::emit_store() {
223 void ObjectLiteral::CalculateEmitStore(Zone* zone) {
224 ZoneAllocationPolicy allocator(zone);
226 ZoneHashMap table(Literal::Match, ZoneHashMap::kDefaultHashMapCapacity,
228 for (int i = properties()->length() - 1; i >= 0; i--) {
229 ObjectLiteral::Property* property = properties()->at(i);
230 Literal* literal = property->key();
231 if (literal->value()->IsNull()) continue;
232 uint32_t hash = literal->Hash();
233 // If the key of a computed property is in the table, do not emit
234 // a store for the property later.
235 if ((property->kind() == ObjectLiteral::Property::MATERIALIZED_LITERAL ||
236 property->kind() == ObjectLiteral::Property::COMPUTED) &&
237 table.Lookup(literal, hash, false, allocator) != NULL) {
238 property->set_emit_store(false);
240 // Add key to the table.
241 table.Lookup(literal, hash, true, allocator);
247 bool ObjectLiteral::IsBoilerplateProperty(ObjectLiteral::Property* property) {
248 return property != NULL &&
249 property->kind() != ObjectLiteral::Property::PROTOTYPE;
253 void ObjectLiteral::BuildConstantProperties(Isolate* isolate) {
254 if (!constant_properties_.is_null()) return;
256 // Allocate a fixed array to hold all the constant properties.
257 Handle<FixedArray> constant_properties = isolate->factory()->NewFixedArray(
258 boilerplate_properties_ * 2, TENURED);
261 // Accumulate the value in local variables and store it at the end.
262 bool is_simple = true;
264 uint32_t max_element_index = 0;
265 uint32_t elements = 0;
266 for (int i = 0; i < properties()->length(); i++) {
267 ObjectLiteral::Property* property = properties()->at(i);
268 if (!IsBoilerplateProperty(property)) {
272 MaterializedLiteral* m_literal = property->value()->AsMaterializedLiteral();
273 if (m_literal != NULL) {
274 m_literal->BuildConstants(isolate);
275 if (m_literal->depth() >= depth_acc) depth_acc = m_literal->depth() + 1;
278 // Add CONSTANT and COMPUTED properties to boilerplate. Use undefined
279 // value for COMPUTED properties, the real value is filled in at
280 // runtime. The enumeration order is maintained.
281 Handle<Object> key = property->key()->value();
282 Handle<Object> value = GetBoilerplateValue(property->value(), isolate);
284 // Ensure objects that may, at any point in time, contain fields with double
285 // representation are always treated as nested objects. This is true for
286 // computed fields (value is undefined), and smi and double literals
287 // (value->IsNumber()).
288 // TODO(verwaest): Remove once we can store them inline.
289 if (FLAG_track_double_fields &&
290 (value->IsNumber() || value->IsUninitialized())) {
291 may_store_doubles_ = true;
294 is_simple = is_simple && !value->IsUninitialized();
296 // Keep track of the number of elements in the object literal and
297 // the largest element index. If the largest element index is
298 // much larger than the number of elements, creating an object
299 // literal with fast elements will be a waste of space.
300 uint32_t element_index = 0;
302 && Handle<String>::cast(key)->AsArrayIndex(&element_index)
303 && element_index > max_element_index) {
304 max_element_index = element_index;
306 } else if (key->IsSmi()) {
307 int key_value = Smi::cast(*key)->value();
309 && static_cast<uint32_t>(key_value) > max_element_index) {
310 max_element_index = key_value;
315 // Add name, value pair to the fixed array.
316 constant_properties->set(position++, *key);
317 constant_properties->set(position++, *value);
320 constant_properties_ = constant_properties;
322 (max_element_index <= 32) || ((2 * elements) >= max_element_index);
323 set_is_simple(is_simple);
324 set_depth(depth_acc);
328 void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
329 if (!constant_elements_.is_null()) return;
331 // Allocate a fixed array to hold all the object literals.
332 Handle<JSArray> array =
333 isolate->factory()->NewJSArray(0, FAST_HOLEY_SMI_ELEMENTS);
334 JSArray::Expand(array, values()->length());
336 // Fill in the literals.
337 bool is_simple = true;
339 bool is_holey = false;
340 for (int i = 0, n = values()->length(); i < n; i++) {
341 Expression* element = values()->at(i);
342 MaterializedLiteral* m_literal = element->AsMaterializedLiteral();
343 if (m_literal != NULL) {
344 m_literal->BuildConstants(isolate);
345 if (m_literal->depth() + 1 > depth_acc) {
346 depth_acc = m_literal->depth() + 1;
349 Handle<Object> boilerplate_value = GetBoilerplateValue(element, isolate);
350 if (boilerplate_value->IsTheHole()) {
352 } else if (boilerplate_value->IsUninitialized()) {
354 JSObject::SetOwnElement(
355 array, i, handle(Smi::FromInt(0), isolate), SLOPPY).Assert();
357 JSObject::SetOwnElement(array, i, boilerplate_value, SLOPPY).Assert();
361 Handle<FixedArrayBase> element_values(array->elements());
363 // Simple and shallow arrays can be lazily copied, we transform the
364 // elements array to a copy-on-write array.
365 if (is_simple && depth_acc == 1 && values()->length() > 0 &&
366 array->HasFastSmiOrObjectElements()) {
367 element_values->set_map(isolate->heap()->fixed_cow_array_map());
370 // Remember both the literal's constant values as well as the ElementsKind
371 // in a 2-element FixedArray.
372 Handle<FixedArray> literals = isolate->factory()->NewFixedArray(2, TENURED);
374 ElementsKind kind = array->GetElementsKind();
375 kind = is_holey ? GetHoleyElementsKind(kind) : GetPackedElementsKind(kind);
377 literals->set(0, Smi::FromInt(kind));
378 literals->set(1, *element_values);
380 constant_elements_ = literals;
381 set_is_simple(is_simple);
382 set_depth(depth_acc);
386 Handle<Object> MaterializedLiteral::GetBoilerplateValue(Expression* expression,
388 if (expression->IsLiteral()) {
389 return expression->AsLiteral()->value();
391 if (CompileTimeValue::IsCompileTimeValue(expression)) {
392 return CompileTimeValue::GetValue(isolate, expression);
394 return isolate->factory()->uninitialized_value();
398 void MaterializedLiteral::BuildConstants(Isolate* isolate) {
399 if (IsArrayLiteral()) {
400 return AsArrayLiteral()->BuildConstantElements(isolate);
402 if (IsObjectLiteral()) {
403 return AsObjectLiteral()->BuildConstantProperties(isolate);
405 DCHECK(IsRegExpLiteral());
406 DCHECK(depth() >= 1); // Depth should be initialized.
410 void TargetCollector::AddTarget(Label* target, Zone* zone) {
411 // Add the label to the collector, but discard duplicates.
412 int length = targets_.length();
413 for (int i = 0; i < length; i++) {
414 if (targets_[i] == target) return;
416 targets_.Add(target, zone);
420 void UnaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
421 // TODO(olivf) If this Operation is used in a test context, then the
422 // expression has a ToBoolean stub and we want to collect the type
423 // information. However the GraphBuilder expects it to be on the instruction
424 // corresponding to the TestContext, therefore we have to store it here and
425 // not on the operand.
426 set_to_boolean_types(oracle->ToBooleanTypes(expression()->test_id()));
430 void BinaryOperation::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
431 // TODO(olivf) If this Operation is used in a test context, then the right
432 // hand side has a ToBoolean stub and we want to collect the type information.
433 // However the GraphBuilder expects it to be on the instruction corresponding
434 // to the TestContext, therefore we have to store it here and not on the
435 // right hand operand.
436 set_to_boolean_types(oracle->ToBooleanTypes(right()->test_id()));
440 bool BinaryOperation::ResultOverwriteAllowed() const {
465 static bool IsTypeof(Expression* expr) {
466 UnaryOperation* maybe_unary = expr->AsUnaryOperation();
467 return maybe_unary != NULL && maybe_unary->op() == Token::TYPEOF;
471 // Check for the pattern: typeof <expression> equals <string literal>.
472 static bool MatchLiteralCompareTypeof(Expression* left,
476 Handle<String>* check) {
477 if (IsTypeof(left) && right->IsStringLiteral() && Token::IsEqualityOp(op)) {
478 *expr = left->AsUnaryOperation()->expression();
479 *check = Handle<String>::cast(right->AsLiteral()->value());
486 bool CompareOperation::IsLiteralCompareTypeof(Expression** expr,
487 Handle<String>* check) {
488 return MatchLiteralCompareTypeof(left_, op_, right_, expr, check) ||
489 MatchLiteralCompareTypeof(right_, op_, left_, expr, check);
493 static bool IsVoidOfLiteral(Expression* expr) {
494 UnaryOperation* maybe_unary = expr->AsUnaryOperation();
495 return maybe_unary != NULL &&
496 maybe_unary->op() == Token::VOID &&
497 maybe_unary->expression()->IsLiteral();
501 // Check for the pattern: void <literal> equals <expression> or
502 // undefined equals <expression>
503 static bool MatchLiteralCompareUndefined(Expression* left,
508 if (IsVoidOfLiteral(left) && Token::IsEqualityOp(op)) {
512 if (left->IsUndefinedLiteral(isolate) && Token::IsEqualityOp(op)) {
520 bool CompareOperation::IsLiteralCompareUndefined(
521 Expression** expr, Isolate* isolate) {
522 return MatchLiteralCompareUndefined(left_, op_, right_, expr, isolate) ||
523 MatchLiteralCompareUndefined(right_, op_, left_, expr, isolate);
527 // Check for the pattern: null equals <expression>
528 static bool MatchLiteralCompareNull(Expression* left,
532 if (left->IsNullLiteral() && Token::IsEqualityOp(op)) {
540 bool CompareOperation::IsLiteralCompareNull(Expression** expr) {
541 return MatchLiteralCompareNull(left_, op_, right_, expr) ||
542 MatchLiteralCompareNull(right_, op_, left_, expr);
546 // ----------------------------------------------------------------------------
549 bool Declaration::IsInlineable() const {
550 return proxy()->var()->IsStackAllocated();
553 bool FunctionDeclaration::IsInlineable() const {
558 // ----------------------------------------------------------------------------
559 // Recording of type feedback
561 // TODO(rossberg): all RecordTypeFeedback functions should disappear
562 // once we use the common type field in the AST consistently.
564 void Expression::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
565 to_boolean_types_ = oracle->ToBooleanTypes(test_id());
569 bool Call::IsUsingCallFeedbackSlot(Isolate* isolate) const {
570 CallType call_type = GetCallType(isolate);
571 return (call_type != POSSIBLY_EVAL_CALL);
575 Call::CallType Call::GetCallType(Isolate* isolate) const {
576 VariableProxy* proxy = expression()->AsVariableProxy();
578 if (proxy->var()->is_possibly_eval(isolate)) {
579 return POSSIBLY_EVAL_CALL;
580 } else if (proxy->var()->IsUnallocated()) {
582 } else if (proxy->var()->IsLookupSlot()) {
583 return LOOKUP_SLOT_CALL;
587 Property* property = expression()->AsProperty();
588 return property != NULL ? PROPERTY_CALL : OTHER_CALL;
592 bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
593 LookupResult* lookup) {
594 target_ = Handle<JSFunction>::null();
595 cell_ = Handle<Cell>::null();
596 DCHECK(lookup->IsFound() &&
597 lookup->type() == NORMAL &&
598 lookup->holder() == *global);
599 cell_ = Handle<Cell>(global->GetPropertyCell(lookup));
600 if (cell_->value()->IsJSFunction()) {
601 Handle<JSFunction> candidate(JSFunction::cast(cell_->value()));
602 // If the function is in new space we assume it's more likely to
603 // change and thus prefer the general IC code.
604 if (!lookup->isolate()->heap()->InNewSpace(*candidate)) {
613 void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
614 int allocation_site_feedback_slot = FLAG_pretenuring_call_new
615 ? AllocationSiteFeedbackSlot()
616 : CallNewFeedbackSlot();
618 oracle->GetCallNewAllocationSite(allocation_site_feedback_slot);
619 is_monomorphic_ = oracle->CallNewIsMonomorphic(CallNewFeedbackSlot());
620 if (is_monomorphic_) {
621 target_ = oracle->GetCallNewTarget(CallNewFeedbackSlot());
622 if (!allocation_site_.is_null()) {
623 elements_kind_ = allocation_site_->GetElementsKind();
629 void ObjectLiteral::Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
630 TypeFeedbackId id = key()->LiteralFeedbackId();
632 oracle->CollectReceiverTypes(id, &maps);
633 receiver_type_ = maps.length() == 1 ? maps.at(0)
634 : Handle<Map>::null();
638 // ----------------------------------------------------------------------------
639 // Implementation of AstVisitor
641 void AstVisitor::VisitDeclarations(ZoneList<Declaration*>* declarations) {
642 for (int i = 0; i < declarations->length(); i++) {
643 Visit(declarations->at(i));
648 void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) {
649 for (int i = 0; i < statements->length(); i++) {
650 Statement* stmt = statements->at(i);
652 if (stmt->IsJump()) break;
657 void AstVisitor::VisitExpressions(ZoneList<Expression*>* expressions) {
658 for (int i = 0; i < expressions->length(); i++) {
659 // The variable statement visiting code may pass NULL expressions
660 // to this code. Maybe this should be handled by introducing an
661 // undefined expression or literal? Revisit this code if this
663 Expression* expression = expressions->at(i);
664 if (expression != NULL) Visit(expression);
669 // ----------------------------------------------------------------------------
670 // Regular expressions
672 #define MAKE_ACCEPT(Name) \
673 void* RegExp##Name::Accept(RegExpVisitor* visitor, void* data) { \
674 return visitor->Visit##Name(this, data); \
676 FOR_EACH_REG_EXP_TREE_TYPE(MAKE_ACCEPT)
679 #define MAKE_TYPE_CASE(Name) \
680 RegExp##Name* RegExpTree::As##Name() { \
683 bool RegExpTree::Is##Name() { return false; }
684 FOR_EACH_REG_EXP_TREE_TYPE(MAKE_TYPE_CASE)
685 #undef MAKE_TYPE_CASE
687 #define MAKE_TYPE_CASE(Name) \
688 RegExp##Name* RegExp##Name::As##Name() { \
691 bool RegExp##Name::Is##Name() { return true; }
692 FOR_EACH_REG_EXP_TREE_TYPE(MAKE_TYPE_CASE)
693 #undef MAKE_TYPE_CASE
696 static Interval ListCaptureRegisters(ZoneList<RegExpTree*>* children) {
697 Interval result = Interval::Empty();
698 for (int i = 0; i < children->length(); i++)
699 result = result.Union(children->at(i)->CaptureRegisters());
704 Interval RegExpAlternative::CaptureRegisters() {
705 return ListCaptureRegisters(nodes());
709 Interval RegExpDisjunction::CaptureRegisters() {
710 return ListCaptureRegisters(alternatives());
714 Interval RegExpLookahead::CaptureRegisters() {
715 return body()->CaptureRegisters();
719 Interval RegExpCapture::CaptureRegisters() {
720 Interval self(StartRegister(index()), EndRegister(index()));
721 return self.Union(body()->CaptureRegisters());
725 Interval RegExpQuantifier::CaptureRegisters() {
726 return body()->CaptureRegisters();
730 bool RegExpAssertion::IsAnchoredAtStart() {
731 return assertion_type() == RegExpAssertion::START_OF_INPUT;
735 bool RegExpAssertion::IsAnchoredAtEnd() {
736 return assertion_type() == RegExpAssertion::END_OF_INPUT;
740 bool RegExpAlternative::IsAnchoredAtStart() {
741 ZoneList<RegExpTree*>* nodes = this->nodes();
742 for (int i = 0; i < nodes->length(); i++) {
743 RegExpTree* node = nodes->at(i);
744 if (node->IsAnchoredAtStart()) { return true; }
745 if (node->max_match() > 0) { return false; }
751 bool RegExpAlternative::IsAnchoredAtEnd() {
752 ZoneList<RegExpTree*>* nodes = this->nodes();
753 for (int i = nodes->length() - 1; i >= 0; i--) {
754 RegExpTree* node = nodes->at(i);
755 if (node->IsAnchoredAtEnd()) { return true; }
756 if (node->max_match() > 0) { return false; }
762 bool RegExpDisjunction::IsAnchoredAtStart() {
763 ZoneList<RegExpTree*>* alternatives = this->alternatives();
764 for (int i = 0; i < alternatives->length(); i++) {
765 if (!alternatives->at(i)->IsAnchoredAtStart())
772 bool RegExpDisjunction::IsAnchoredAtEnd() {
773 ZoneList<RegExpTree*>* alternatives = this->alternatives();
774 for (int i = 0; i < alternatives->length(); i++) {
775 if (!alternatives->at(i)->IsAnchoredAtEnd())
782 bool RegExpLookahead::IsAnchoredAtStart() {
783 return is_positive() && body()->IsAnchoredAtStart();
787 bool RegExpCapture::IsAnchoredAtStart() {
788 return body()->IsAnchoredAtStart();
792 bool RegExpCapture::IsAnchoredAtEnd() {
793 return body()->IsAnchoredAtEnd();
797 // Convert regular expression trees to a simple sexp representation.
798 // This representation should be different from the input grammar
799 // in as many cases as possible, to make it more difficult for incorrect
800 // parses to look as correct ones which is likely if the input and
801 // output formats are alike.
802 class RegExpUnparser V8_FINAL : public RegExpVisitor {
804 RegExpUnparser(OStream& os, Zone* zone) : os_(os), zone_(zone) {}
805 void VisitCharacterRange(CharacterRange that);
806 #define MAKE_CASE(Name) virtual void* Visit##Name(RegExp##Name*, \
807 void* data) V8_OVERRIDE;
808 FOR_EACH_REG_EXP_TREE_TYPE(MAKE_CASE)
816 void* RegExpUnparser::VisitDisjunction(RegExpDisjunction* that, void* data) {
818 for (int i = 0; i < that->alternatives()->length(); i++) {
820 that->alternatives()->at(i)->Accept(this, data);
827 void* RegExpUnparser::VisitAlternative(RegExpAlternative* that, void* data) {
829 for (int i = 0; i < that->nodes()->length(); i++) {
831 that->nodes()->at(i)->Accept(this, data);
838 void RegExpUnparser::VisitCharacterRange(CharacterRange that) {
839 os_ << AsUC16(that.from());
840 if (!that.IsSingleton()) {
841 os_ << "-" << AsUC16(that.to());
847 void* RegExpUnparser::VisitCharacterClass(RegExpCharacterClass* that,
849 if (that->is_negated()) os_ << "^";
851 for (int i = 0; i < that->ranges(zone_)->length(); i++) {
852 if (i > 0) os_ << " ";
853 VisitCharacterRange(that->ranges(zone_)->at(i));
860 void* RegExpUnparser::VisitAssertion(RegExpAssertion* that, void* data) {
861 switch (that->assertion_type()) {
862 case RegExpAssertion::START_OF_INPUT:
865 case RegExpAssertion::END_OF_INPUT:
868 case RegExpAssertion::START_OF_LINE:
871 case RegExpAssertion::END_OF_LINE:
874 case RegExpAssertion::BOUNDARY:
877 case RegExpAssertion::NON_BOUNDARY:
885 void* RegExpUnparser::VisitAtom(RegExpAtom* that, void* data) {
887 Vector<const uc16> chardata = that->data();
888 for (int i = 0; i < chardata.length(); i++) {
889 os_ << AsUC16(chardata[i]);
896 void* RegExpUnparser::VisitText(RegExpText* that, void* data) {
897 if (that->elements()->length() == 1) {
898 that->elements()->at(0).tree()->Accept(this, data);
901 for (int i = 0; i < that->elements()->length(); i++) {
903 that->elements()->at(i).tree()->Accept(this, data);
911 void* RegExpUnparser::VisitQuantifier(RegExpQuantifier* that, void* data) {
912 os_ << "(# " << that->min() << " ";
913 if (that->max() == RegExpTree::kInfinity) {
916 os_ << that->max() << " ";
918 os_ << (that->is_greedy() ? "g " : that->is_possessive() ? "p " : "n ");
919 that->body()->Accept(this, data);
925 void* RegExpUnparser::VisitCapture(RegExpCapture* that, void* data) {
927 that->body()->Accept(this, data);
933 void* RegExpUnparser::VisitLookahead(RegExpLookahead* that, void* data) {
934 os_ << "(-> " << (that->is_positive() ? "+ " : "- ");
935 that->body()->Accept(this, data);
941 void* RegExpUnparser::VisitBackReference(RegExpBackReference* that,
943 os_ << "(<- " << that->index() << ")";
948 void* RegExpUnparser::VisitEmpty(RegExpEmpty* that, void* data) {
954 OStream& RegExpTree::Print(OStream& os, Zone* zone) { // NOLINT
955 RegExpUnparser unparser(os, zone);
956 Accept(&unparser, NULL);
961 RegExpDisjunction::RegExpDisjunction(ZoneList<RegExpTree*>* alternatives)
962 : alternatives_(alternatives) {
963 DCHECK(alternatives->length() > 1);
964 RegExpTree* first_alternative = alternatives->at(0);
965 min_match_ = first_alternative->min_match();
966 max_match_ = first_alternative->max_match();
967 for (int i = 1; i < alternatives->length(); i++) {
968 RegExpTree* alternative = alternatives->at(i);
969 min_match_ = Min(min_match_, alternative->min_match());
970 max_match_ = Max(max_match_, alternative->max_match());
975 static int IncreaseBy(int previous, int increase) {
976 if (RegExpTree::kInfinity - previous < increase) {
977 return RegExpTree::kInfinity;
979 return previous + increase;
983 RegExpAlternative::RegExpAlternative(ZoneList<RegExpTree*>* nodes)
985 DCHECK(nodes->length() > 1);
988 for (int i = 0; i < nodes->length(); i++) {
989 RegExpTree* node = nodes->at(i);
990 int node_min_match = node->min_match();
991 min_match_ = IncreaseBy(min_match_, node_min_match);
992 int node_max_match = node->max_match();
993 max_match_ = IncreaseBy(max_match_, node_max_match);
998 CaseClause::CaseClause(Zone* zone,
1000 ZoneList<Statement*>* statements,
1002 : Expression(zone, pos),
1004 statements_(statements),
1005 compare_type_(Type::None(zone)),
1006 compare_id_(AstNode::GetNextId(zone)),
1007 entry_id_(AstNode::GetNextId(zone)) {
1011 #define REGULAR_NODE(NodeType) \
1012 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1013 increase_node_count(); \
1015 #define REGULAR_NODE_WITH_FEEDBACK_SLOTS(NodeType) \
1016 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1017 increase_node_count(); \
1018 add_slot_node(node); \
1020 #define DONT_OPTIMIZE_NODE(NodeType) \
1021 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1022 increase_node_count(); \
1023 set_dont_optimize_reason(k##NodeType); \
1024 add_flag(kDontSelfOptimize); \
1026 #define DONT_OPTIMIZE_NODE_WITH_FEEDBACK_SLOTS(NodeType) \
1027 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1028 increase_node_count(); \
1029 add_slot_node(node); \
1030 set_dont_optimize_reason(k##NodeType); \
1031 add_flag(kDontSelfOptimize); \
1033 #define DONT_SELFOPTIMIZE_NODE(NodeType) \
1034 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1035 increase_node_count(); \
1036 add_flag(kDontSelfOptimize); \
1038 #define DONT_SELFOPTIMIZE_NODE_WITH_FEEDBACK_SLOTS(NodeType) \
1039 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1040 increase_node_count(); \
1041 add_slot_node(node); \
1042 add_flag(kDontSelfOptimize); \
1044 #define DONT_CACHE_NODE(NodeType) \
1045 void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
1046 increase_node_count(); \
1047 set_dont_optimize_reason(k##NodeType); \
1048 add_flag(kDontSelfOptimize); \
1049 add_flag(kDontCache); \
1052 REGULAR_NODE(VariableDeclaration)
1053 REGULAR_NODE(FunctionDeclaration)
1055 REGULAR_NODE(ExpressionStatement)
1056 REGULAR_NODE(EmptyStatement)
1057 REGULAR_NODE(IfStatement)
1058 REGULAR_NODE(ContinueStatement)
1059 REGULAR_NODE(BreakStatement)
1060 REGULAR_NODE(ReturnStatement)
1061 REGULAR_NODE(SwitchStatement)
1062 REGULAR_NODE(CaseClause)
1063 REGULAR_NODE(Conditional)
1064 REGULAR_NODE(Literal)
1065 REGULAR_NODE(ArrayLiteral)
1066 REGULAR_NODE(ObjectLiteral)
1067 REGULAR_NODE(RegExpLiteral)
1068 REGULAR_NODE(FunctionLiteral)
1069 REGULAR_NODE(Assignment)
1071 REGULAR_NODE(UnaryOperation)
1072 REGULAR_NODE(CountOperation)
1073 REGULAR_NODE(BinaryOperation)
1074 REGULAR_NODE(CompareOperation)
1075 REGULAR_NODE(ThisFunction)
1077 REGULAR_NODE_WITH_FEEDBACK_SLOTS(Call)
1078 REGULAR_NODE_WITH_FEEDBACK_SLOTS(CallNew)
1079 REGULAR_NODE_WITH_FEEDBACK_SLOTS(Property)
1080 // In theory, for VariableProxy we'd have to add:
1081 // if (node->var()->IsLookupSlot())
1082 // set_dont_optimize_reason(kReferenceToAVariableWhichRequiresDynamicLookup);
1083 // But node->var() is usually not bound yet at VariableProxy creation time, and
1084 // LOOKUP variables only result from constructs that cannot be inlined anyway.
1085 REGULAR_NODE_WITH_FEEDBACK_SLOTS(VariableProxy)
1087 // We currently do not optimize any modules.
1088 DONT_OPTIMIZE_NODE(ModuleDeclaration)
1089 DONT_OPTIMIZE_NODE(ImportDeclaration)
1090 DONT_OPTIMIZE_NODE(ExportDeclaration)
1091 DONT_OPTIMIZE_NODE(ModuleVariable)
1092 DONT_OPTIMIZE_NODE(ModulePath)
1093 DONT_OPTIMIZE_NODE(ModuleUrl)
1094 DONT_OPTIMIZE_NODE(ModuleStatement)
1095 DONT_OPTIMIZE_NODE(WithStatement)
1096 DONT_OPTIMIZE_NODE(TryCatchStatement)
1097 DONT_OPTIMIZE_NODE(TryFinallyStatement)
1098 DONT_OPTIMIZE_NODE(DebuggerStatement)
1099 DONT_OPTIMIZE_NODE(NativeFunctionLiteral)
1101 DONT_OPTIMIZE_NODE_WITH_FEEDBACK_SLOTS(Yield)
1103 DONT_SELFOPTIMIZE_NODE(DoWhileStatement)
1104 DONT_SELFOPTIMIZE_NODE(WhileStatement)
1105 DONT_SELFOPTIMIZE_NODE(ForStatement)
1106 DONT_SELFOPTIMIZE_NODE(ForOfStatement)
1108 DONT_SELFOPTIMIZE_NODE_WITH_FEEDBACK_SLOTS(ForInStatement)
1110 DONT_CACHE_NODE(ModuleLiteral)
1113 void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
1114 increase_node_count();
1115 add_slot_node(node);
1116 if (node->is_jsruntime()) {
1117 // Don't try to optimize JS runtime calls because we bailout on them.
1118 set_dont_optimize_reason(kCallToAJavaScriptRuntimeFunction);
1123 #undef DONT_OPTIMIZE_NODE
1124 #undef DONT_SELFOPTIMIZE_NODE
1125 #undef DONT_CACHE_NODE
1128 Handle<String> Literal::ToString() {
1129 if (value_->IsString()) return value_->AsString()->string();
1130 DCHECK(value_->IsNumber());
1132 Vector<char> buffer(arr, ARRAY_SIZE(arr));
1134 if (value()->IsSmi()) {
1135 // Optimization only, the heap number case would subsume this.
1136 SNPrintF(buffer, "%d", Smi::cast(*value())->value());
1139 str = DoubleToCString(value()->Number(), buffer);
1141 return isolate_->factory()->NewStringFromAsciiChecked(str);
1145 } } // namespace v8::internal