}
+void GenerateFastPixelArrayLoad(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register elements_map,
+ Register elements,
+ Register scratch1,
+ Register scratch2,
+ Register result,
+ Label* not_pixel_array,
+ Label* key_not_smi,
+ Label* out_of_range) {
+ // Register use:
+ //
+ // receiver - holds the receiver on entry.
+ // Unchanged unless 'result' is the same register.
+ //
+ // key - holds the smi key on entry.
+ // Unchanged unless 'result' is the same register.
+ //
+ // elements - set to be the receiver's elements on exit.
+ //
+ // elements_map - set to be the map of the receiver's elements
+ // on exit.
+ //
+ // result - holds the result of the pixel array load on exit,
+ // tagged as a smi if successful.
+ //
+ // Scratch registers:
+ //
+ // scratch1 - used a scratch register in map check, if map
+ // check is successful, contains the length of the
+ // pixel array, the pointer to external elements and
+ // the untagged result.
+ //
+ // scratch2 - holds the untaged key.
+
+ // Some callers already have verified that the key is a smi. key_not_smi is
+ // set to NULL as a sentinel for that case. Otherwise, add an explicit check
+ // to ensure the key is a smi must be added.
+ if (key_not_smi != NULL) {
+ __ JumpIfNotSmi(key, key_not_smi);
+ } else {
+ if (FLAG_debug_code) {
+ __ AbortIfNotSmi(key);
+ }
+ }
+ __ SmiUntag(scratch2, key);
+
+ // Verify that the receiver has pixel array elements.
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ CheckMap(elements, scratch1, Heap::kPixelArrayMapRootIndex,
+ not_pixel_array, true);
+
+ // Key must be in range of the pixel array.
+ __ ldr(scratch1, FieldMemOperand(elements, PixelArray::kLengthOffset));
+ __ cmp(scratch2, scratch1);
+ __ b(hs, out_of_range); // unsigned check handles negative keys.
+
+ // Perform the indexed load and tag the result as a smi.
+ __ ldr(scratch1,
+ FieldMemOperand(elements, PixelArray::kExternalPointerOffset));
+ __ ldrb(scratch1, MemOperand(scratch1, scratch2));
+ __ SmiTag(r0, scratch1);
+ __ Ret();
+}
+
+
#undef __
} } // namespace v8::internal
};
+// Generate code the to load an element from a pixel array. The receiver is
+// assumed to not be a smi and to have elements, the caller must guarantee this
+// precondition. If the receiver does not have elements that are pixel arrays,
+// the generated code jumps to not_pixel_array. If key is not a smi, then the
+// generated code branches to key_not_smi. Callers can specify NULL for
+// key_not_smi to signal that a smi check has already been performed on key so
+// that the smi check is not generated . If key is not a valid index within the
+// bounds of the pixel array, the generated code jumps to out_of_range.
+void GenerateFastPixelArrayLoad(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register elements_map,
+ Register elements,
+ Register scratch1,
+ Register scratch2,
+ Register result,
+ Label* not_pixel_array,
+ Label* key_not_smi,
+ Label* out_of_range);
+
+
} } // namespace v8::internal
#endif // V8_ARM_CODE_STUBS_ARM_H_
// r0: key
// r1: receiver
__ bind(&check_pixel_array);
- __ ldr(r4, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ ldr(r3, FieldMemOperand(r4, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
- __ cmp(r3, ip);
- __ b(ne, &check_number_dictionary);
- __ ldr(ip, FieldMemOperand(r4, PixelArray::kLengthOffset));
- __ mov(r2, Operand(key, ASR, kSmiTagSize));
- __ cmp(r2, ip);
- __ b(hs, &slow);
- __ ldr(ip, FieldMemOperand(r4, PixelArray::kExternalPointerOffset));
- __ ldrb(r2, MemOperand(ip, r2));
- __ mov(r0, Operand(r2, LSL, kSmiTagSize)); // Tag result as smi.
- __ Ret();
+
+ GenerateFastPixelArrayLoad(masm,
+ r1,
+ r0,
+ r3,
+ r4,
+ r2,
+ r5,
+ r0,
+ &check_number_dictionary,
+ NULL,
+ &slow);
__ bind(&check_number_dictionary);
// Check whether the elements is a number dictionary.
}
+MaybeObject* KeyedLoadStubCompiler::CompileLoadPixelArray(JSObject* receiver) {
+ // ----------- S t a t e -------------
+ // -- lr : return address
+ // -- r0 : key
+ // -- r1 : receiver
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map matches.
+ __ CheckMap(r1, r2, Handle<Map>(receiver->map()), &miss, false);
+
+ GenerateFastPixelArrayLoad(masm(),
+ r1,
+ r0,
+ r2,
+ r3,
+ r4,
+ r5,
+ r0,
+ &miss,
+ &miss,
+ &miss);
+
+ __ bind(&miss);
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, NULL);
+}
+
+
MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
int index,
Map* transition,
V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
V(KeyedLoadSpecialized_symbol, "KeyedLoadSpecialized") \
V(KeyedStoreSpecialized_symbol, "KeyedStoreSpecialized") \
+ V(KeyedLoadPixelArray_symbol, "KeyedLoadPixelArray") \
V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
V(illegal_access_symbol, "illegal access") \
V(out_of_memory_symbol, "out-of-memory") \
}
+// Loads a indexed element from a pixel array.
+void GenerateFastPixelArrayLoad(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register elements,
+ Register untagged_key,
+ Register result,
+ Label* not_pixel_array,
+ Label* key_not_smi,
+ Label* out_of_range) {
+ // Register use:
+ // receiver - holds the receiver and is unchanged.
+ // key - holds the key and is unchanged (must be a smi).
+ // elements - is set to the the receiver's element if
+ // the receiver doesn't have a pixel array or the
+ // key is not a smi, otherwise it's the elements'
+ // external pointer.
+ // untagged_key - is set to the untagged key
+
+ // Some callers already have verified that the key is a smi. key_not_smi is
+ // set to NULL as a sentinel for that case. Otherwise, add an explicit check
+ // to ensure the key is a smi must be added.
+ if (key_not_smi != NULL) {
+ __ JumpIfNotSmi(key, key_not_smi);
+ } else {
+ if (FLAG_debug_code) {
+ __ AbortIfNotSmi(key);
+ }
+ }
+ __ mov(untagged_key, key);
+ __ SmiUntag(untagged_key);
+
+ // Verify that the receiver has pixel array elements.
+ __ mov(elements, FieldOperand(receiver, JSObject::kElementsOffset));
+ __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
+
+ // Key must be in range.
+ __ cmp(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
+ __ j(above_equal, out_of_range); // unsigned check handles negative keys.
+
+ // Perform the indexed load and tag the result as a smi.
+ __ mov(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset));
+ __ movzx_b(result, Operand(elements, untagged_key, times_1, 0));
+ __ SmiTag(result);
+ __ ret(0);
+}
+
+
#undef __
} } // namespace v8::internal
};
+// Generate code the to load an element from a pixel array. The receiver is
+// assumed to not be a smi and to have elements, the caller must guarantee this
+// precondition. If the receiver does not have elements that are pixel arrays,
+// the generated code jumps to not_pixel_array. If key is not a smi, then the
+// generated code branches to key_not_smi. Callers can specify NULL for
+// key_not_smi to signal that a smi check has already been performed on key so
+// that the smi check is not generated . If key is not a valid index within the
+// bounds of the pixel array, the generated code jumps to out_of_range.
+void GenerateFastPixelArrayLoad(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register elements,
+ Register untagged_key,
+ Register result,
+ Label* not_pixel_array,
+ Label* key_not_smi,
+ Label* out_of_range);
+
+
} } // namespace v8::internal
#endif // V8_IA32_CODE_STUBS_IA32_H_
__ ret(0);
__ bind(&check_pixel_array);
- // Check whether the elements is a pixel array.
- // edx: receiver
- // eax: key
- __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
- __ mov(ebx, eax);
- __ SmiUntag(ebx);
- __ CheckMap(ecx, Factory::pixel_array_map(), &check_number_dictionary, true);
- __ cmp(ebx, FieldOperand(ecx, PixelArray::kLengthOffset));
- __ j(above_equal, &slow);
- __ mov(eax, FieldOperand(ecx, PixelArray::kExternalPointerOffset));
- __ movzx_b(eax, Operand(eax, ebx, times_1, 0));
- __ SmiTag(eax);
- __ ret(0);
+ GenerateFastPixelArrayLoad(masm,
+ edx,
+ eax,
+ ecx,
+ ebx,
+ eax,
+ &check_number_dictionary,
+ NULL,
+ &slow);
__ bind(&check_number_dictionary);
// Check whether the elements is a number dictionary.
j(not_carry, is_smi);
}
+ // Jump the register contains a smi.
+ inline void JumpIfSmi(Register value, Label* smi_label) {
+ test(value, Immediate(kSmiTagMask));
+ j(zero, smi_label, not_taken);
+ }
+ // Jump if register contain a non-smi.
+ inline void JumpIfNotSmi(Register value, Label* not_smi_label) {
+ test(value, Immediate(kSmiTagMask));
+ j(not_zero, not_smi_label, not_taken);
+ }
+
// Assumes input is a heap object.
void JumpIfNotNumber(Register reg, TypeInfo info, Label* on_not_number);
}
+MaybeObject* KeyedLoadStubCompiler::CompileLoadPixelArray(JSObject* receiver) {
+ // ----------- S t a t e -------------
+ // -- eax : key
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map matches.
+ __ CheckMap(edx, Handle<Map>(receiver->map()), &miss, false);
+
+ GenerateFastPixelArrayLoad(masm(),
+ edx,
+ eax,
+ ecx,
+ ebx,
+ eax,
+ &miss,
+ &miss,
+ &miss);
+
+ // Handle load cache miss.
+ __ bind(&miss);
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Miss));
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, NULL);
+}
+
+
// Specialized stub for constructing objects from functions which only have only
// simple assignments of the form this.x = ...; in their body.
MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
if (use_ic) {
Code* stub = generic_stub();
- if (object->IsString() && key->IsNumber()) {
- stub = string_stub();
- } else if (object->IsJSObject()) {
- Handle<JSObject> receiver = Handle<JSObject>::cast(object);
- if (receiver->HasExternalArrayElements()) {
- MaybeObject* probe =
- StubCache::ComputeKeyedLoadOrStoreExternalArray(*receiver, false);
- stub =
- probe->IsFailure() ? NULL : Code::cast(probe->ToObjectUnchecked());
- } else if (receiver->HasIndexedInterceptor()) {
- stub = indexed_interceptor_stub();
- } else if (state == UNINITIALIZED &&
- key->IsSmi() &&
- receiver->map()->has_fast_elements()) {
- MaybeObject* probe = StubCache::ComputeKeyedLoadSpecialized(*receiver);
- stub =
- probe->IsFailure() ? NULL : Code::cast(probe->ToObjectUnchecked());
+ if (state == UNINITIALIZED) {
+ if (object->IsString() && key->IsNumber()) {
+ stub = string_stub();
+ } else if (object->IsJSObject()) {
+ Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+ if (receiver->HasExternalArrayElements()) {
+ MaybeObject* probe =
+ StubCache::ComputeKeyedLoadOrStoreExternalArray(*receiver,
+ false);
+ stub = probe->IsFailure() ?
+ NULL : Code::cast(probe->ToObjectUnchecked());
+ } else if (receiver->HasIndexedInterceptor()) {
+ stub = indexed_interceptor_stub();
+ } else if (receiver->HasPixelElements()) {
+ MaybeObject* probe =
+ StubCache::ComputeKeyedLoadPixelArray(*receiver);
+ stub = probe->IsFailure() ?
+ NULL : Code::cast(probe->ToObjectUnchecked());
+ } else if (key->IsSmi() &&
+ receiver->map()->has_fast_elements()) {
+ MaybeObject* probe =
+ StubCache::ComputeKeyedLoadSpecialized(*receiver);
+ stub = probe->IsFailure() ?
+ NULL : Code::cast(probe->ToObjectUnchecked());
+ }
}
}
if (stub != NULL) set_target(stub);
HandleScope scope;
Handle<String> str = args.at<String>(0);
int index = Smi::cast(args[1])->value();
- Handle<Object> result = GetCharAt(str, index);
- return *result;
+ if (index >= 0 && index < str->length()) {
+ Handle<Object> result = GetCharAt(str, index);
+ return *result;
+ }
}
// Fall back to GetObjectProperty.
MaybeObject* StubCache::ComputeKeyedLoadSpecialized(JSObject* receiver) {
+ // Using NORMAL as the PropertyType for array element loads is a misuse. The
+ // generated stub always accesses fast elements, not slow-mode fields, but
+ // some property type is required for the stub lookup. Note that overloading
+ // the NORMAL PropertyType is only safe as long as no stubs are generated for
+ // other keyed field loads. This is guaranteed to be the case since all field
+ // keyed loads that are not array elements go through a generic builtin stub.
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, NORMAL);
String* name = Heap::KeyedLoadSpecialized_symbol();
}
+MaybeObject* StubCache::ComputeKeyedLoadPixelArray(JSObject* receiver) {
+ // Using NORMAL as the PropertyType for array element loads is a misuse. The
+ // generated stub always accesses fast elements, not slow-mode fields, but
+ // some property type is required for the stub lookup. Note that overloading
+ // the NORMAL PropertyType is only safe as long as no stubs are generated for
+ // other keyed field loads. This is guaranteed to be the case since all field
+ // keyed loads that are not array elements go through a generic builtin stub.
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, NORMAL);
+ String* name = Heap::KeyedLoadPixelArray_symbol();
+ Object* code = receiver->map()->FindInCodeCache(name, flags);
+ if (code->IsUndefined()) {
+ KeyedLoadStubCompiler compiler;
+ { MaybeObject* maybe_code = compiler.CompileLoadPixelArray(receiver);
+ if (!maybe_code->ToObject(&code)) return maybe_code;
+ }
+ PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), 0));
+ Object* result;
+ { MaybeObject* maybe_result =
+ receiver->UpdateMapCodeCache(name, Code::cast(code));
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ }
+ return code;
+}
+
+
MaybeObject* StubCache::ComputeStoreField(String* name,
JSObject* receiver,
int field_index,
MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadSpecialized(
JSObject* receiver);
+ MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadPixelArray(
+ JSObject* receiver);
+
// ---
MUST_USE_RESULT static MaybeObject* ComputeStoreField(String* name,
MUST_USE_RESULT MaybeObject* CompileLoadFunctionPrototype(String* name);
MUST_USE_RESULT MaybeObject* CompileLoadSpecialized(JSObject* receiver);
+ MUST_USE_RESULT MaybeObject* CompileLoadPixelArray(JSObject* receiver);
private:
MaybeObject* GetCode(PropertyType type, String* name);
__ jmp(rdi);
}
+
+void GenerateFastPixelArrayLoad(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register elements,
+ Register untagged_key,
+ Register result,
+ Label* not_pixel_array,
+ Label* key_not_smi,
+ Label* out_of_range) {
+ // Register use:
+ // receiver - holds the receiver and is unchanged.
+ // key - holds the key and is unchanged (must be a smi).
+ // elements - is set to the the receiver's element if
+ // the receiver doesn't have a pixel array or the
+ // key is not a smi, otherwise it's the elements'
+ // external pointer.
+ // untagged_key - is set to the untagged key
+
+ // Some callers already have verified that the key is a smi. key_not_smi is
+ // set to NULL as a sentinel for that case. Otherwise, add an explicit check
+ // to ensure the key is a smi must be added.
+ if (key_not_smi != NULL) {
+ __ JumpIfNotSmi(key, key_not_smi);
+ } else {
+ if (FLAG_debug_code) {
+ __ AbortIfNotSmi(key);
+ }
+ }
+ __ SmiToInteger32(untagged_key, key);
+
+ // Verify that the receiver has pixel array elements.
+ __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
+ __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true);
+
+ // Check that the smi is in range.
+ __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset));
+ __ j(above_equal, out_of_range); // unsigned check handles negative keys.
+
+ // Load and tag the element as a smi.
+ __ movq(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset));
+ __ movzxbq(result, Operand(elements, untagged_key, times_1, 0));
+ __ Integer32ToSmi(result, result);
+ __ ret(0);
+}
+
+
#undef __
} } // namespace v8::internal
};
+// Generate code the to load an element from a pixel array. The receiver is
+// assumed to not be a smi and to have elements, the caller must guarantee this
+// precondition. If the receiver does not have elements that are pixel arrays,
+// the generated code jumps to not_pixel_array. If key is not a smi, then the
+// generated code branches to key_not_smi. Callers can specify NULL for
+// key_not_smi to signal that a smi check has already been performed on key so
+// that the smi check is not generated . If key is not a valid index within the
+// bounds of the pixel array, the generated code jumps to out_of_range.
+void GenerateFastPixelArrayLoad(MacroAssembler* masm,
+ Register receiver,
+ Register key,
+ Register elements,
+ Register untagged_key,
+ Register result,
+ Label* not_pixel_array,
+ Label* key_not_smi,
+ Label* out_of_range);
+
+
} } // namespace v8::internal
#endif // V8_X64_CODE_STUBS_X64_H_
__ ret(0);
__ bind(&check_pixel_array);
- // Check whether the elements object is a pixel array.
- // rdx: receiver
- // rax: key
- __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
- __ SmiToInteger32(rbx, rax); // Used on both directions of next branch.
- __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
- Heap::kPixelArrayMapRootIndex);
- __ j(not_equal, &check_number_dictionary);
- __ cmpl(rbx, FieldOperand(rcx, PixelArray::kLengthOffset));
- __ j(above_equal, &slow);
- __ movq(rax, FieldOperand(rcx, PixelArray::kExternalPointerOffset));
- __ movzxbq(rax, Operand(rax, rbx, times_1, 0));
- __ Integer32ToSmi(rax, rax);
- __ ret(0);
+ GenerateFastPixelArrayLoad(masm,
+ rdx,
+ rax,
+ rcx,
+ rbx,
+ rax,
+ &check_number_dictionary,
+ NULL,
+ &slow);
__ bind(&check_number_dictionary);
// Check whether the elements is a number dictionary.
}
+MaybeObject* KeyedLoadStubCompiler::CompileLoadPixelArray(JSObject* receiver) {
+ // ----------- S t a t e -------------
+ // -- rax : key
+ // -- rdx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map matches.
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, false);
+
+ GenerateFastPixelArrayLoad(masm(),
+ rdx,
+ rax,
+ rbx,
+ rcx,
+ rax,
+ &miss,
+ &miss,
+ &miss);
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(NORMAL, NULL);
+}
+
+
// Specialized stub for constructing objects from functions which only have only
// simple assignments of the form this.x = ...; in their body.
MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
"i");
CHECK_EQ(255, result->Int32Value());
+ // Make sure that pixel array ICs recognize when a non-pixel array
+ // is passed to it.
+ result = CompileRun("function pa_load(p) {"
+ " var sum = 0;"
+ " for (var j = 0; j < 256; j++) { sum += p[j]; }"
+ " return sum;"
+ "}"
+ "for (var i = 0; i < 256; ++i) { pixels[i] = i; }"
+ "for (var i = 0; i < 10; ++i) { pa_load(pixels); }"
+ "just_ints = new Object();"
+ "for (var i = 0; i < 256; ++i) { just_ints[i] = i; }"
+ "for (var i = 0; i < 10; ++i) {"
+ " result = pa_load(just_ints);"
+ "}"
+ "result");
+ CHECK_EQ(32640, result->Int32Value());
+
+ // Make sure that pixel array ICs recognize out-of-bound accesses.
+ result = CompileRun("function pa_load(p, start) {"
+ " var sum = 0;"
+ " for (var j = start; j < 256; j++) { sum += p[j]; }"
+ " return sum;"
+ "}"
+ "for (var i = 0; i < 256; ++i) { pixels[i] = i; }"
+ "for (var i = 0; i < 10; ++i) { pa_load(pixels,0); }"
+ "for (var i = 0; i < 10; ++i) {"
+ " result = pa_load(pixels,-10);"
+ "}"
+ "result");
+ CHECK_EQ(0, result->Int32Value());
+
+ // Make sure that generic ICs properly handles a pixel array.
+ result = CompileRun("function pa_load(p) {"
+ " var sum = 0;"
+ " for (var j = 0; j < 256; j++) { sum += p[j]; }"
+ " return sum;"
+ "}"
+ "for (var i = 0; i < 256; ++i) { pixels[i] = i; }"
+ "just_ints = new Object();"
+ "for (var i = 0; i < 256; ++i) { just_ints[i] = i; }"
+ "for (var i = 0; i < 10; ++i) { pa_load(just_ints); }"
+ "for (var i = 0; i < 10; ++i) {"
+ " result = pa_load(pixels);"
+ "}"
+ "result");
+ CHECK_EQ(32640, result->Int32Value());
+
+ // Make sure that generic load ICs recognize out-of-bound accesses in
+ // pixel arrays.
+ result = CompileRun("function pa_load(p, start) {"
+ " var sum = 0;"
+ " for (var j = start; j < 256; j++) { sum += p[j]; }"
+ " return sum;"
+ "}"
+ "for (var i = 0; i < 256; ++i) { pixels[i] = i; }"
+ "just_ints = new Object();"
+ "for (var i = 0; i < 256; ++i) { just_ints[i] = i; }"
+ "for (var i = 0; i < 10; ++i) { pa_load(just_ints,0); }"
+ "for (var i = 0; i < 10; ++i) { pa_load(pixels,0); }"
+ "for (var i = 0; i < 10; ++i) {"
+ " result = pa_load(pixels,-10);"
+ "}"
+ "result");
+ CHECK_EQ(0, result->Int32Value());
+
+ // Make sure that generic ICs properly handles other types than pixel
+ // arrays (that the inlined fast pixel array test leaves the right information
+ // in the right registers).
+ result = CompileRun("function pa_load(p) {"
+ " var sum = 0;"
+ " for (var j = 0; j < 256; j++) { sum += p[j]; }"
+ " return sum;"
+ "}"
+ "for (var i = 0; i < 256; ++i) { pixels[i] = i; }"
+ "just_ints = new Object();"
+ "for (var i = 0; i < 256; ++i) { just_ints[i] = i; }"
+ "for (var i = 0; i < 10; ++i) { pa_load(just_ints); }"
+ "for (var i = 0; i < 10; ++i) { pa_load(pixels); }"
+ "sparse_array = new Object();"
+ "for (var i = 0; i < 256; ++i) { sparse_array[i] = i; }"
+ "sparse_array[1000000] = 3;"
+ "for (var i = 0; i < 10; ++i) {"
+ " result = pa_load(sparse_array);"
+ "}"
+ "result");
+ CHECK_EQ(32640, result->Int32Value());
+
free(pixel_data);
}
}
f();
f();
+f();
+f();
assertTrue(2[11] === undefined);
Number.prototype[11] = 'y';