FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
__ ldr(literal, FieldMemOperand(tmp, literal_offset));
- JumpTarget done;
+ JumpTarget materialized;
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(literal, ip);
// This branch locks the virtual frame at the done label to match the
// one we have here, where the literal register is not on the stack and
// nothing is spilled.
- done.Branch(ne);
+ materialized.Branch(ne);
// If the entry is undefined we call the runtime system to compute
// the literal.
frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
__ Move(literal, r0);
- // This call to bind will get us back to the virtual frame we had before
- // where things are not spilled and the literal register is not on the stack.
- done.Bind();
- // Push the literal.
+ materialized.Bind();
+
frame_->EmitPush(literal);
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+ frame_->EmitPush(Operand(Smi::FromInt(size)));
+ frame_->CallRuntime(Runtime::kAllocateInNewSpace, 1);
+ // TODO(lrn): Use AllocateInNewSpace macro with fallback to runtime.
+ // r0 is newly allocated space.
+
+ // Reuse literal variable with (possibly) a new register, still holding
+ // the materialized boilerplate.
+ literal = frame_->PopToRegister(r0);
+
+ __ CopyFields(r0, literal, tmp.bit(), size / kPointerSize);
+
+ // Push the clone.
+ frame_->EmitPush(r0);
ASSERT_EQ(original_height + 1, frame_->height());
}
}
+void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 2);
+
+ // Load the two objects into registers and perform the comparison.
+ Load(args->at(0));
+ Load(args->at(1));
+ Register right = frame_->PopToRegister();
+ Register left = frame_->PopToRegister(right);
+ Register tmp = frame_->scratch0();
+ Register tmp2 = frame_->scratch1();
+
+ // Jumps to done must have the eq flag set if the test is successful
+ // and clear if the test has failed.
+ Label done;
+
+ // Fail if either is a non-HeapObject.
+ __ cmp(left, Operand(right));
+ __ b(eq, &done);
+ __ and_(tmp, left, Operand(right));
+ __ eor(tmp, tmp, Operand(kSmiTagMask));
+ __ tst(tmp, Operand(kSmiTagMask));
+ __ b(ne, &done);
+ __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
+ __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
+ __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
+ __ b(ne, &done);
+ __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
+ __ cmp(tmp, Operand(tmp2));
+ __ b(ne, &done);
+ __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
+ __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
+ __ cmp(tmp, tmp2);
+ __ bind(&done);
+ cc_reg_ = eq;
+}
+
+
+
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
#ifdef DEBUG
int original_height = frame_->height();
__ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
// Copy the elements array.
- for (int i = 0; i < elements_size; i += kPointerSize) {
- __ ldr(r1, FieldMemOperand(r3, i));
- __ str(r1, FieldMemOperand(r2, i));
- }
+ __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
}
// Return and remove the on-stack parameters.
__ ldr(r4, MemOperand(r4, offset));
// Copy the JS object part.
- for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
- __ ldr(r3, FieldMemOperand(r4, i));
- __ str(r3, FieldMemOperand(r0, i));
- }
+ __ CopyFields(r0, r4, r3.bit(), JSObject::kHeaderSize / kPointerSize);
// Setup the callee in-object property.
STATIC_ASSERT(Heap::arguments_callee_index == 0);
void GenerateMathCos(ZoneList<Expression*>* args);
void GenerateMathSqrt(ZoneList<Expression*>* args);
+ void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
+
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral");
- Label done;
+ Label materialized;
// Registers will be used as follows:
// r4 = JS function, literals array
// r3 = literal index
// r2 = RegExp pattern
// r1 = RegExp flags
- // r0 = temp + return value (RegExp literal)
+ // r0 = temp + materialized value (RegExp literal)
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
int literal_offset =
__ ldr(r0, FieldMemOperand(r4, literal_offset));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
- __ b(ne, &done);
+ __ b(ne, &materialized);
__ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r2, Operand(expr->pattern()));
__ mov(r1, Operand(expr->flags()));
__ Push(r4, r3, r2, r1);
__ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
- __ bind(&done);
+ __ bind(&materialized);
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+ __ push(r0);
+ __ mov(r0, Operand(Smi::FromInt(size)));
+ __ push(r0);
+ __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+ // After this, registers are used as follows:
+ // r0: Newly allocated regexp.
+ // r1: Materialized regexp
+ // r2: temp.
+ __ pop(r1);
+ __ CopyFields(r0, r1, r2.bit(), size / kPointerSize);
Apply(context_, r0);
}
}
+void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
+ ASSERT_EQ(2, args->length());
+
+ Register right = r0;
+ Register left = r1;
+ Register tmp = r2;
+ Register tmp2 = r3;
+
+ VisitForValue(args->at(0), kStack);
+ VisitForValue(args->at(1), kAccumulator);
+ __ pop(left);
+
+ Label done, fail, ok;
+ __ cmp(left, Operand(right));
+ __ b(eq, &ok);
+ // Fail if either is a non-HeapObject.
+ __ and_(tmp, left, Operand(right));
+ __ tst(tmp, Operand(kSmiTagMask));
+ __ b(eq, &fail);
+ __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
+ __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
+ __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
+ __ b(ne, &fail);
+ __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
+ __ cmp(tmp, Operand(tmp2));
+ __ b(ne, &fail);
+ __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
+ __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
+ __ cmp(tmp, tmp2);
+ __ b(eq, &ok);
+ __ bind(&fail);
+ __ LoadRoot(r0, Heap::kFalseValueRootIndex);
+ __ jmp(&done);
+ __ bind(&ok);
+ __ LoadRoot(r0, Heap::kTrueValueRootIndex);
+ __ bind(&done);
+
+ Apply(context_, r0);
+}
+
+
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
}
+// Copies a fixed number of fields of heap objects from src to dst.
+void MacroAssembler::CopyFields(Register dst,
+ Register src,
+ RegList temps,
+ int field_count) {
+ // At least one bit set in the first 15 registers.
+ ASSERT((temps & ((1 << 15) - 1)) != 0);
+ ASSERT((temps & dst.bit()) == 0);
+ ASSERT((temps & src.bit()) == 0);
+ // Primitive implementation using only one temporary register.
+
+ Register tmp = no_reg;
+ // Find a temp register in temps list.
+ for (int i = 0; i < 15; i++) {
+ if ((temps & (1 << i)) != 0) {
+ tmp.set_code(i);
+ break;
+ }
+ }
+ ASSERT(!tmp.is(no_reg));
+
+ for (int i = 0; i < field_count; i++) {
+ ldr(tmp, FieldMemOperand(src, i * kPointerSize));
+ str(tmp, FieldMemOperand(dst, i * kPointerSize));
+ }
+}
+
+
void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
Register source, // Input.
Register scratch) {
Register heap_number_map,
Label* gc_required);
+ // Copies a fixed number of fields of heap objects from src to dst.
+ void CopyFields(Register dst, Register src, RegList temps, int field_count);
// ---------------------------------------------------------------------------
// Support functions.
// Types of uncatchable exceptions.
enum UncatchableExceptionType { OUT_OF_MEMORY, TERMINATION };
-
-#if V8_TARGET_ARCH_IA32
-#include "ia32/codegen-ia32.h"
-#elif V8_TARGET_ARCH_X64
-#include "x64/codegen-x64.h"
-#elif V8_TARGET_ARCH_ARM
-#include "arm/codegen-arm.h"
-#elif V8_TARGET_ARCH_MIPS
-#include "mips/codegen-mips.h"
-#else
-#error Unsupported target architecture.
-#endif
-
-#include "register-allocator.h"
-
-namespace v8 {
-namespace internal {
-
-
#define INLINE_RUNTIME_FUNCTION_LIST(F) \
F(IsSmi, 1, 1) \
F(IsNonNegativeSmi, 1, 1) \
F(MathPow, 2, 1) \
F(MathSin, 1, 1) \
F(MathCos, 1, 1) \
- F(MathSqrt, 1, 1)
+ F(MathSqrt, 1, 1) \
+ F(IsRegExpEquivalent, 2, 1)
+
+
+#if V8_TARGET_ARCH_IA32
+#include "ia32/codegen-ia32.h"
+#elif V8_TARGET_ARCH_X64
+#include "x64/codegen-x64.h"
+#elif V8_TARGET_ARCH_ARM
+#include "arm/codegen-arm.h"
+#elif V8_TARGET_ARCH_MIPS
+#include "mips/codegen-mips.h"
+#else
+#error Unsupported target architecture.
+#endif
+#include "register-allocator.h"
+
+namespace v8 {
+namespace internal {
// Support for "structured" code comments.
#ifdef DEBUG
EmitSwapElements(expr->arguments());
} else if (strcmp("_GetFromCache", *name->ToCString()) == 0) {
EmitGetFromCache(expr->arguments());
+ } else if (strcmp("_IsRegExpEquivalent", *name->ToCString()) == 0) {
+ EmitIsRegExpEquivalent(expr->arguments());
} else {
UNREACHABLE();
}
void EmitRegExpConstructResult(ZoneList<Expression*>* arguments);
void EmitSwapElements(ZoneList<Expression*>* arguments);
void EmitGetFromCache(ZoneList<Expression*>* arguments);
+ void EmitIsRegExpEquivalent(ZoneList<Expression*>* arguments);
// Platform-specific code for loading variables.
void EmitVariableLoad(Variable* expr, Expression::Context context);
owner_->set_state(previous_);
}
-
// -------------------------------------------------------------------------
// CodeGenerator implementation.
}
+class DeferredAllocateInNewSpace: public DeferredCode {
+ public:
+ DeferredAllocateInNewSpace(int size, Register target)
+ : size_(size), target_(target) {
+ ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
+ set_comment("[ DeferredAllocateInNewSpace");
+ }
+ void Generate();
+
+ private:
+ int size_;
+ Register target_;
+};
+
+
+void DeferredAllocateInNewSpace::Generate() {
+ __ push(Immediate(Smi::FromInt(size_)));
+ __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+ if (!target_.is(eax)) {
+ __ mov(target_, eax);
+ }
+}
+
+
void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
ASSERT(!in_safe_int32_mode());
Comment cmnt(masm_, "[ RegExp Literal");
__ cmp(boilerplate.reg(), Factory::undefined_value());
deferred->Branch(equal);
deferred->BindExit();
- literals.Unuse();
- // Push the boilerplate object.
+ // Register of boilerplate contains RegExp object.
+
+ Result tmp = allocator()->Allocate();
+ ASSERT(tmp.is_valid());
+
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+
+ DeferredAllocateInNewSpace* allocate_fallback =
+ new DeferredAllocateInNewSpace(size, literals.reg());
frame_->Push(&boilerplate);
+ frame_->SpillTop();
+ __ AllocateInNewSpace(size,
+ literals.reg(),
+ tmp.reg(),
+ no_reg,
+ allocate_fallback->entry_label(),
+ TAG_OBJECT);
+ allocate_fallback->BindExit();
+ boilerplate = frame_->Pop();
+ // Copy from boilerplate to clone and return clone.
+
+ for (int i = 0; i < size; i += kPointerSize) {
+ __ mov(tmp.reg(), FieldOperand(boilerplate.reg(), i));
+ __ mov(FieldOperand(literals.reg(), i), tmp.reg());
+ }
+ frame_->Push(&literals);
}
}
+void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
+ ASSERT_EQ(2, args->length());
+ Load(args->at(0));
+ Load(args->at(1));
+ Result right_res = frame_->Pop();
+ Result left_res = frame_->Pop();
+ right_res.ToRegister();
+ left_res.ToRegister();
+ Result tmp_res = allocator()->Allocate();
+ ASSERT(tmp_res.is_valid());
+ Register right = right_res.reg();
+ Register left = left_res.reg();
+ Register tmp = tmp_res.reg();
+ right_res.Unuse();
+ left_res.Unuse();
+ tmp_res.Unuse();
+ __ cmp(left, Operand(right));
+ destination()->true_target()->Branch(equal);
+ // Fail if either is a non-HeapObject.
+ __ mov(tmp, left);
+ __ and_(Operand(tmp), right);
+ __ test(Operand(tmp), Immediate(kSmiTagMask));
+ destination()->false_target()->Branch(equal);
+ __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
+ __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
+ static_cast<int8_t>(JS_REGEXP_TYPE));
+ destination()->false_target()->Branch(not_equal);
+ __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
+ destination()->false_target()->Branch(not_equal);
+ __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
+ __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
+ destination()->Split(equal);
+}
+
+
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
ASSERT(!in_safe_int32_mode());
if (CheckForInlineRuntimeCall(node)) {
// Instantiate the function based on the shared function info.
Result InstantiateFunction(Handle<SharedFunctionInfo> function_info);
- // Support for type checks.
+ // Support for types.
void GenerateIsSmi(ZoneList<Expression*>* args);
void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
void GenerateIsArray(ZoneList<Expression*>* args);
void GenerateMathCos(ZoneList<Expression*>* args);
void GenerateMathSqrt(ZoneList<Expression*>* args);
+ // Check whether two RegExps are equivalent
+ void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
+
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral");
- Label done;
+ Label materialized;
// Registers will be used as follows:
// edi = JS function.
- // ebx = literals array.
- // eax = regexp literal.
+ // ecx = literals array.
+ // ebx = regexp literal.
+ // eax = regexp literal clone.
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ mov(ebx, FieldOperand(edi, JSFunction::kLiteralsOffset));
+ __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
int literal_offset =
FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
- __ mov(eax, FieldOperand(ebx, literal_offset));
- __ cmp(eax, Factory::undefined_value());
- __ j(not_equal, &done);
+ __ mov(ebx, FieldOperand(ecx, literal_offset));
+ __ cmp(ebx, Factory::undefined_value());
+ __ j(not_equal, &materialized);
+
// Create regexp literal using runtime function
// Result will be in eax.
- __ push(ebx);
+ __ push(ecx);
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(expr->pattern()));
__ push(Immediate(expr->flags()));
__ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
- // Label done:
- __ bind(&done);
+ __ mov(ebx, eax);
+
+ __ bind(&materialized);
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+ Label allocated, runtime_allocate;
+ __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
+ __ jmp(&allocated);
+
+ __ bind(&runtime_allocate);
+ __ push(ebx);
+ __ push(Immediate(Smi::FromInt(size)));
+ __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+ __ pop(ebx);
+
+ __ bind(&allocated);
+ // Copy the content into the newly allocated memory.
+ // (Unroll copy loop once for better throughput).
+ for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
+ __ mov(edx, FieldOperand(ebx, i));
+ __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
+ __ mov(FieldOperand(eax, i), edx);
+ __ mov(FieldOperand(eax, i + kPointerSize), ecx);
+ }
+ if ((size % (2 * kPointerSize)) != 0) {
+ __ mov(edx, FieldOperand(ebx, size - kPointerSize));
+ __ mov(FieldOperand(eax, size - kPointerSize), edx);
+ }
Apply(context_, eax);
}
}
+void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
+ ASSERT_EQ(2, args->length());
+
+ Register right = eax;
+ Register left = ebx;
+ Register tmp = ecx;
+
+ VisitForValue(args->at(0), kStack);
+ VisitForValue(args->at(1), kAccumulator);
+ __ pop(left);
+
+ Label done, fail, ok;
+ __ cmp(left, Operand(right));
+ __ j(equal, &ok);
+ // Fail if either is a non-HeapObject.
+ __ mov(tmp, left);
+ __ and_(Operand(tmp), right);
+ __ test(Operand(tmp), Immediate(kSmiTagMask));
+ __ j(zero, &fail);
+ __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
+ __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
+ static_cast<int8_t>(JS_REGEXP_TYPE));
+ __ j(not_equal, &fail);
+ __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
+ __ j(not_equal, &fail);
+ __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
+ __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
+ __ j(equal, &ok);
+ __ bind(&fail);
+ __ mov(eax, Immediate(Factory::false_value()));
+ __ jmp(&done);
+ __ bind(&ok);
+ __ mov(eax, Immediate(Factory::true_value()));
+ __ bind(&done);
+
+ Apply(context_, eax);
+}
+
+
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
// Load address of new object into result.
LoadAllocationTopHelper(result, result_end, scratch, flags);
+ Register top_reg = result_end.is_valid() ? result_end : result;
+
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
- lea(result_end, Operand(result, object_size));
- cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
- j(above, gc_required, not_taken);
- // Tag result if requested.
- if ((flags & TAG_OBJECT) != 0) {
- lea(result, Operand(result, kHeapObjectTag));
+ if (top_reg.is(result)) {
+ add(Operand(top_reg), Immediate(object_size));
+ } else {
+ lea(top_reg, Operand(result, object_size));
}
+ cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
+ j(above, gc_required, not_taken);
// Update allocation top.
- UpdateAllocationTopHelper(result_end, scratch);
+ UpdateAllocationTopHelper(top_reg, scratch);
+
+ // Tag result if requested.
+ if (top_reg.is(result)) {
+ if ((flags & TAG_OBJECT) != 0) {
+ sub(Operand(result), Immediate(object_size - kHeapObjectTag));
+ } else {
+ sub(Operand(result), Immediate(object_size));
+ }
+ } else if ((flags & TAG_OBJECT) != 0) {
+ add(Operand(result), Immediate(kHeapObjectTag));
+ }
}
static const int kIgnoreCaseFieldIndex = 2;
static const int kMultilineFieldIndex = 3;
static const int kLastIndexFieldIndex = 4;
+ static const int kInObjectFieldCount = 5;
};
this.regExp = 0;
this.subject = 0;
this.replaceString = 0;
- this.lastIndex = 0;
+ this.lastIndex = 0; // Also used for splitLimit when type is "split"
this.answer = 0;
// answerSaved marks whether the contents of answer is valid for a cache
// hit in RegExpExec, StringMatch and StringSplit.
if (%_ObjectEquals(cache.type, 'exec') &&
%_ObjectEquals(cache.lastIndex, this.lastIndex) &&
- %_ObjectEquals(cache.regExp, this) &&
+ %_IsRegExpEquivalent(cache.regExp, this) &&
%_ObjectEquals(cache.subject, string)) {
if (cache.answerSaved) {
return CloneRegExpResult(cache.answer);
var lastIndex = this.lastIndex;
var cache = regExpCache;
if (%_ObjectEquals(cache.type, 'test') &&
- %_ObjectEquals(cache.regExp, this) &&
+ %_IsRegExpEquivalent(cache.regExp, this) &&
%_ObjectEquals(cache.subject, string) &&
%_ObjectEquals(cache.lastIndex, lastIndex)) {
return cache.answer;
}
+static Object* Runtime_AllocateInNewSpace(Arguments args) {
+ // Allocate a block of memory in NewSpace (filled with a filler).
+ // Use as fallback for allocation in generated code when NewSpace
+ // is full.
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_CHECKED(Smi, size_smi, 0);
+ int size = size_smi->value();
+ RUNTIME_ASSERT(IsAligned(size, kPointerSize));
+ RUNTIME_ASSERT(size > 0);
+ static const int kMinFreeNewSpaceAfterGC =
+ Heap::InitialSemiSpaceSize() * 3/4;
+ RUNTIME_ASSERT(size <= kMinFreeNewSpaceAfterGC);
+ Object* allocation = Heap::new_space()->AllocateRaw(size);
+ if (!allocation->IsFailure()) {
+ Heap::CreateFillerObjectAt(HeapObject::cast(allocation)->address(), size);
+ }
+ return allocation;
+}
+
+
// Push an array unto an array of arrays if it is not already in the
// array. Returns true if the element was pushed on the stack and
// false otherwise.
F(NewArgumentsFast, 3, 1) \
F(LazyCompile, 1, 1) \
F(SetNewFunctionAttributes, 1, 1) \
+ F(AllocateInNewSpace, 1, 1) \
\
/* Array join support */ \
F(PushIfAbsent, 2, 1) \
var saveAnswer = false;
if (%_ObjectEquals(cache.type, 'match') &&
- %_ObjectEquals(cache.regExp, regexp) &&
+ %_IsRegExpEquivalent(cache.regExp, regexp) &&
%_ObjectEquals(cache.subject, subject)) {
if (cache.answerSaved) {
return CloneDenseArray(cache.answer);
// Helper function for regular expressions in String.prototype.replace.
function StringReplaceRegExp(subject, regexp, replace) {
var cache = regExpCache;
- if (%_ObjectEquals(cache.regExp, regexp) &&
- %_ObjectEquals(cache.type, 'replace') &&
+ if (%_ObjectEquals(cache.type, 'replace') &&
+ %_IsRegExpEquivalent(cache.regExp, regexp) &&
%_ObjectEquals(cache.replaceString, replace) &&
%_ObjectEquals(cache.subject, subject)) {
return cache.answer;
var saveAnswer = false;
if (%_ObjectEquals(cache.type, 'split') &&
- %_ObjectEquals(cache.regExp, separator) &&
- %_ObjectEquals(cache.subject, subject)) {
+ %_IsRegExpEquivalent(cache.regExp, separator) &&
+ %_ObjectEquals(cache.subject, subject) &&
+ %_ObjectEquals(cache.lastIndex, limit)) {
if (cache.answerSaved) {
return CloneDenseArray(cache.answer);
} else {
cache.type = 'split';
cache.regExp = separator;
cache.subject = subject;
+ // Reuse lastIndex field for split limit when type is "split".
+ cache.lastIndex = limit;
%_Log('regexp', 'regexp-split,%0S,%1r', [subject, separator]);
}
+class DeferredAllocateInNewSpace: public DeferredCode {
+ public:
+ DeferredAllocateInNewSpace(int size, Register target)
+ : size_(size), target_(target) {
+ ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
+ set_comment("[ DeferredAllocateInNewSpace");
+ }
+ void Generate();
+
+ private:
+ int size_;
+ Register target_;
+};
+
+
+void DeferredAllocateInNewSpace::Generate() {
+ __ Push(Smi::FromInt(size_));
+ __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+ if (!target_.is(rax)) {
+ __ movq(target_, rax);
+ }
+}
+
+
void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Comment cmnt(masm_, "[ RegExp Literal");
__ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
deferred->Branch(equal);
deferred->BindExit();
- literals.Unuse();
- // Push the boilerplate object.
+ // Register of boilerplate contains RegExp object.
+
+ Result tmp = allocator()->Allocate();
+ ASSERT(tmp.is_valid());
+
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+
+ DeferredAllocateInNewSpace* allocate_fallback =
+ new DeferredAllocateInNewSpace(size, literals.reg());
frame_->Push(&boilerplate);
+ frame_->SpillTop();
+ __ AllocateInNewSpace(size,
+ literals.reg(),
+ tmp.reg(),
+ no_reg,
+ allocate_fallback->entry_label(),
+ TAG_OBJECT);
+ allocate_fallback->BindExit();
+ boilerplate = frame_->Pop();
+ // Copy from boilerplate to clone and return clone.
+
+ for (int i = 0; i < size; i += kPointerSize) {
+ __ movq(tmp.reg(), FieldOperand(boilerplate.reg(), i));
+ __ movq(FieldOperand(literals.reg(), i), tmp.reg());
+ }
+ frame_->Push(&literals);
}
}
+void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
+ ASSERT_EQ(2, args->length());
+ Load(args->at(0));
+ Load(args->at(1));
+ Result right_res = frame_->Pop();
+ Result left_res = frame_->Pop();
+ right_res.ToRegister();
+ left_res.ToRegister();
+ Result tmp_res = allocator()->Allocate();
+ ASSERT(tmp_res.is_valid());
+ Register right = right_res.reg();
+ Register left = left_res.reg();
+ Register tmp = tmp_res.reg();
+ right_res.Unuse();
+ left_res.Unuse();
+ tmp_res.Unuse();
+ __ cmpq(left, right);
+ destination()->true_target()->Branch(equal);
+ // Fail if either is a non-HeapObject.
+ Condition either_smi =
+ masm()->CheckEitherSmi(left, right, tmp);
+ destination()->false_target()->Branch(either_smi);
+ __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
+ __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
+ Immediate(JS_REGEXP_TYPE));
+ destination()->false_target()->Branch(not_equal);
+ __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
+ destination()->false_target()->Branch(not_equal);
+ __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
+ __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
+ destination()->Split(equal);
+}
+
+
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
if (CheckForInlineRuntimeCall(node)) {
return;
void GenerateMathCos(ZoneList<Expression*>* args);
void GenerateMathSqrt(ZoneList<Expression*>* args);
+ void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
+
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral");
- Label done;
+ Label materialized;
// Registers will be used as follows:
// rdi = JS function.
- // rbx = literals array.
- // rax = regexp literal.
+ // rcx = literals array.
+ // rbx = regexp literal.
+ // rax = regexp literal clone.
__ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
- __ movq(rbx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
+ __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
int literal_offset =
FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
- __ movq(rax, FieldOperand(rbx, literal_offset));
- __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
- __ j(not_equal, &done);
+ __ movq(rbx, FieldOperand(rcx, literal_offset));
+ __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
+ __ j(not_equal, &materialized);
+
// Create regexp literal using runtime function
// Result will be in rax.
- __ push(rbx);
+ __ push(rcx);
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(expr->pattern());
__ Push(expr->flags());
__ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
- __ bind(&done);
+ __ movq(rbx, rax);
+
+ __ bind(&materialized);
+ int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
+ Label allocated, runtime_allocate;
+ __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
+ __ jmp(&allocated);
+
+ __ bind(&runtime_allocate);
+ __ push(rbx);
+ __ Push(Smi::FromInt(size));
+ __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
+ __ pop(rbx);
+
+ __ bind(&allocated);
+ // Copy the content into the newly allocated memory.
+ // (Unroll copy loop once for better throughput).
+ for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
+ __ movq(rdx, FieldOperand(rbx, i));
+ __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
+ __ movq(FieldOperand(rax, i), rdx);
+ __ movq(FieldOperand(rax, i + kPointerSize), rcx);
+ }
+ if ((size % (2 * kPointerSize)) != 0) {
+ __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
+ __ movq(FieldOperand(rax, size - kPointerSize), rdx);
+ }
Apply(context_, rax);
}
}
+void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
+ ASSERT_EQ(2, args->length());
+
+ Register right = rax;
+ Register left = rbx;
+ Register tmp = rcx;
+
+ VisitForValue(args->at(0), kStack);
+ VisitForValue(args->at(1), kAccumulator);
+ __ pop(left);
+
+ Label done, fail, ok;
+ __ cmpq(left, right);
+ __ j(equal, &ok);
+ // Fail if either is a non-HeapObject.
+ Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
+ __ j(either_smi, &fail);
+ __ j(zero, &fail);
+ __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
+ __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
+ Immediate(JS_REGEXP_TYPE));
+ __ j(not_equal, &fail);
+ __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
+ __ j(not_equal, &fail);
+ __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
+ __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
+ __ j(equal, &ok);
+ __ bind(&fail);
+ __ Move(rax, Factory::false_value());
+ __ jmp(&done);
+ __ bind(&ok);
+ __ Move(rax, Factory::true_value());
+ __ bind(&done);
+
+ Apply(context_, rax);
+}
+
+
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
}
-Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
+Condition MacroAssembler::CheckEitherSmi(Register first,
+ Register second,
+ Register scratch) {
if (first.is(second)) {
return CheckSmi(first);
}
- movl(kScratchRegister, first);
- andl(kScratchRegister, second);
- testb(kScratchRegister, Immediate(kSmiTagMask));
+ if (scratch.is(second)) {
+ andl(scratch, first);
+ } else {
+ if (!scratch.is(first)) {
+ movl(scratch, first);
+ }
+ andl(scratch, second);
+ }
+ testb(scratch, Immediate(kSmiTagMask));
return zero;
}
Condition CheckBothPositiveSmi(Register first, Register second);
// Are either value a tagged smi.
- Condition CheckEitherSmi(Register first, Register second);
+ Condition CheckEitherSmi(Register first,
+ Register second,
+ Register scratch = kScratchRegister);
// Is the value the minimum smi value (since we are using
// two's complement numbers, negating the value is known to yield
a = makeRegexpInArray();
var b = makeRegexpInArray();
-assertTrue(a[0][0] === b[0][0]);
+assertFalse(a[0][0] === b[0][0]);
assertFalse(a[0][1] === b[0][1]);
function makeRegexpInObject() { return { a: { b: /b*/, c: {} } }; }
a = makeRegexpInObject();
b = makeRegexpInObject();
-assertTrue(a.a.b === b.a.b);
+assertFalse(a.a.b === b.a.b);
assertFalse(a.a.c === b.a.c);