1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_CODE_STUBS_H_
29 #define V8_CODE_STUBS_H_
31 #include "allocation.h"
38 // List of code stubs used on all platforms.
39 #define CODE_STUB_LIST_ALL_PLATFORMS(V) \
51 V(StoreBufferOverflow) \
53 V(TranscendentalCache) \
56 V(WriteInt32ToHeapNumber) \
61 V(FastNewBlockContext) \
62 V(FastCloneShallowArray) \
63 V(FastCloneShallowObject) \
67 V(RegExpConstructResult) \
72 V(KeyedStoreElement) \
73 V(DebuggerStatement) \
74 V(StringDictionaryLookup) \
75 V(ElementsTransitionAndStore) \
76 V(StoreArrayLiteralElement)
78 // List of code stubs only used on ARM platforms.
79 #ifdef V8_TARGET_ARCH_ARM
80 #define CODE_STUB_LIST_ARM(V) \
87 #define CODE_STUB_LIST_ARM(V)
90 // List of code stubs only used on MIPS platforms.
91 #ifdef V8_TARGET_ARCH_MIPS
92 #define CODE_STUB_LIST_MIPS(V) \
96 #define CODE_STUB_LIST_MIPS(V)
99 // Combined list of code stubs.
100 #define CODE_STUB_LIST(V) \
101 CODE_STUB_LIST_ALL_PLATFORMS(V) \
102 CODE_STUB_LIST_ARM(V) \
103 CODE_STUB_LIST_MIPS(V)
105 // Mode to overwrite BinaryExpression values.
106 enum OverwriteMode { NO_OVERWRITE, OVERWRITE_LEFT, OVERWRITE_RIGHT };
107 enum UnaryOverwriteMode { UNARY_OVERWRITE, UNARY_NO_OVERWRITE };
110 // Stub is base classes of all stubs.
111 class CodeStub BASE_EMBEDDED {
114 #define DEF_ENUM(name) name,
115 CODE_STUB_LIST(DEF_ENUM)
117 NoCache, // marker for stubs that do custom caching
121 // Retrieve the code for the stub. Generate the code if needed.
122 Handle<Code> GetCode();
124 static Major MajorKeyFromKey(uint32_t key) {
125 return static_cast<Major>(MajorKeyBits::decode(key));
127 static int MinorKeyFromKey(uint32_t key) {
128 return MinorKeyBits::decode(key);
131 // Gets the major key from a code object that is a code stub or binary op IC.
132 static Major GetMajorKey(Code* code_stub) {
133 return static_cast<Major>(code_stub->major_key());
136 static const char* MajorName(Major major_key, bool allow_unknown_keys);
138 virtual ~CodeStub() {}
140 bool CompilingCallsToThisStubIsGCSafe() {
141 bool is_pregenerated = IsPregenerated();
143 CHECK(!is_pregenerated || FindCodeInCache(&code));
144 return is_pregenerated;
147 // See comment above, where Instanceof is defined.
148 virtual bool IsPregenerated() { return false; }
150 static void GenerateStubsAheadOfTime();
151 static void GenerateFPStubs();
153 // Some stubs put untagged junk on the stack that cannot be scanned by the
154 // GC. This means that we must be statically sure that no GC can occur while
155 // they are running. If that is the case they should override this to return
156 // true, which will cause an assertion if we try to call something that can
157 // GC or if we try to put a stack frame on top of the junk, which would not
158 // result in a traversable stack.
159 virtual bool SometimesSetsUpAFrame() { return true; }
161 // Lookup the code in the (possibly custom) cache.
162 bool FindCodeInCache(Code** code_out);
165 static const int kMajorBits = 6;
166 static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits;
169 // Nonvirtual wrapper around the stub-specific Generate function. Call
170 // this function to set up the macro assembler and generate the code.
171 void GenerateCode(MacroAssembler* masm);
173 // Generates the assembler code for the stub.
174 virtual void Generate(MacroAssembler* masm) = 0;
176 // Perform bookkeeping required after code generation when stub code is
177 // initially generated.
178 void RecordCodeGeneration(Code* code, MacroAssembler* masm);
180 // Finish the code object after it has been generated.
181 virtual void FinishCode(Handle<Code> code) { }
183 // Activate newly generated stub. Is called after
184 // registering stub in the stub cache.
185 virtual void Activate(Code* code) { }
187 // Returns information for computing the number key.
188 virtual Major MajorKey() = 0;
189 virtual int MinorKey() = 0;
191 // BinaryOpStub needs to override this.
192 virtual int GetCodeKind();
194 // BinaryOpStub needs to override this.
195 virtual InlineCacheState GetICState() {
196 return UNINITIALIZED;
199 // Add the code to a specialized cache, specific to an individual
200 // stub type. Please note, this method must add the code object to a
201 // roots object, otherwise we will remove the code during GC.
202 virtual void AddToSpecialCache(Handle<Code> new_object) { }
204 // Find code in a specialized cache, work is delegated to the specific stub.
205 virtual bool FindCodeInSpecialCache(Code** code_out) { return false; }
207 // If a stub uses a special cache override this.
208 virtual bool UseSpecialCache() { return false; }
210 // Returns a name for logging/debugging purposes.
211 SmartArrayPointer<const char> GetName();
212 virtual void PrintName(StringStream* stream);
214 // Returns whether the code generated for this stub needs to be allocated as
215 // a fixed (non-moveable) code object.
216 virtual bool NeedsImmovableCode() { return false; }
218 // Computes the key based on major and minor.
220 ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS);
221 return MinorKeyBits::encode(MinorKey()) |
222 MajorKeyBits::encode(MajorKey());
225 class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {};
226 class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {};
228 friend class BreakPointIterator;
232 // Helper interface to prepare to/restore after making runtime calls.
233 class RuntimeCallHelper {
235 virtual ~RuntimeCallHelper() {}
237 virtual void BeforeCall(MacroAssembler* masm) const = 0;
239 virtual void AfterCall(MacroAssembler* masm) const = 0;
242 RuntimeCallHelper() {}
245 DISALLOW_COPY_AND_ASSIGN(RuntimeCallHelper);
248 } } // namespace v8::internal
250 #if V8_TARGET_ARCH_IA32
251 #include "ia32/code-stubs-ia32.h"
252 #elif V8_TARGET_ARCH_X64
253 #include "x64/code-stubs-x64.h"
254 #elif V8_TARGET_ARCH_ARM
255 #include "arm/code-stubs-arm.h"
256 #elif V8_TARGET_ARCH_MIPS
257 #include "mips/code-stubs-mips.h"
259 #error Unsupported target architecture.
266 // RuntimeCallHelper implementation used in stubs: enters/leaves a
267 // newly created internal frame before/after the runtime call.
268 class StubRuntimeCallHelper : public RuntimeCallHelper {
270 StubRuntimeCallHelper() {}
272 virtual void BeforeCall(MacroAssembler* masm) const;
274 virtual void AfterCall(MacroAssembler* masm) const;
278 // Trivial RuntimeCallHelper implementation.
279 class NopRuntimeCallHelper : public RuntimeCallHelper {
281 NopRuntimeCallHelper() {}
283 virtual void BeforeCall(MacroAssembler* masm) const {}
285 virtual void AfterCall(MacroAssembler* masm) const {}
289 class StackCheckStub : public CodeStub {
293 void Generate(MacroAssembler* masm);
296 Major MajorKey() { return StackCheck; }
297 int MinorKey() { return 0; }
301 class InterruptStub : public CodeStub {
305 void Generate(MacroAssembler* masm);
308 Major MajorKey() { return Interrupt; }
309 int MinorKey() { return 0; }
313 class ToNumberStub: public CodeStub {
317 void Generate(MacroAssembler* masm);
320 Major MajorKey() { return ToNumber; }
321 int MinorKey() { return 0; }
325 class FastNewClosureStub : public CodeStub {
327 explicit FastNewClosureStub(LanguageMode language_mode)
328 : language_mode_(language_mode) { }
330 void Generate(MacroAssembler* masm);
333 Major MajorKey() { return FastNewClosure; }
334 int MinorKey() { return language_mode_ == CLASSIC_MODE
335 ? kNonStrictMode : kStrictMode; }
337 LanguageMode language_mode_;
341 class FastNewContextStub : public CodeStub {
343 static const int kMaximumSlots = 64;
345 explicit FastNewContextStub(int slots) : slots_(slots) {
346 ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
349 void Generate(MacroAssembler* masm);
354 Major MajorKey() { return FastNewContext; }
355 int MinorKey() { return slots_; }
359 class FastNewBlockContextStub : public CodeStub {
361 static const int kMaximumSlots = 64;
363 explicit FastNewBlockContextStub(int slots) : slots_(slots) {
364 ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
367 void Generate(MacroAssembler* masm);
372 Major MajorKey() { return FastNewBlockContext; }
373 int MinorKey() { return slots_; }
377 class FastCloneShallowArrayStub : public CodeStub {
379 // Maximum length of copied elements array.
380 static const int kMaximumClonedLength = 8;
384 CLONE_DOUBLE_ELEMENTS,
385 COPY_ON_WRITE_ELEMENTS,
389 FastCloneShallowArrayStub(Mode mode, int length)
391 length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
392 ASSERT_GE(length_, 0);
393 ASSERT_LE(length_, kMaximumClonedLength);
396 void Generate(MacroAssembler* masm);
402 Major MajorKey() { return FastCloneShallowArray; }
404 ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2 || mode_ == 3);
405 return length_ * 4 + mode_;
410 class FastCloneShallowObjectStub : public CodeStub {
412 // Maximum number of properties in copied object.
413 static const int kMaximumClonedProperties = 6;
415 explicit FastCloneShallowObjectStub(int length) : length_(length) {
416 ASSERT_GE(length_, 0);
417 ASSERT_LE(length_, kMaximumClonedProperties);
420 void Generate(MacroAssembler* masm);
425 Major MajorKey() { return FastCloneShallowObject; }
426 int MinorKey() { return length_; }
430 class InstanceofStub: public CodeStub {
434 kArgsInRegisters = 1 << 0,
435 kCallSiteInlineCheck = 1 << 1,
436 kReturnTrueFalseObject = 1 << 2
439 explicit InstanceofStub(Flags flags) : flags_(flags) { }
441 static Register left();
442 static Register right();
444 void Generate(MacroAssembler* masm);
447 Major MajorKey() { return Instanceof; }
448 int MinorKey() { return static_cast<int>(flags_); }
450 bool HasArgsInRegisters() const {
451 return (flags_ & kArgsInRegisters) != 0;
454 bool HasCallSiteInlineCheck() const {
455 return (flags_ & kCallSiteInlineCheck) != 0;
458 bool ReturnTrueFalseObject() const {
459 return (flags_ & kReturnTrueFalseObject) != 0;
462 virtual void PrintName(StringStream* stream);
468 class MathPowStub: public CodeStub {
470 enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK};
472 explicit MathPowStub(ExponentType exponent_type)
473 : exponent_type_(exponent_type) { }
474 virtual void Generate(MacroAssembler* masm);
477 virtual CodeStub::Major MajorKey() { return MathPow; }
478 virtual int MinorKey() { return exponent_type_; }
480 ExponentType exponent_type_;
484 class ICCompareStub: public CodeStub {
486 ICCompareStub(Token::Value op, CompareIC::State state)
487 : op_(op), state_(state) {
488 ASSERT(Token::IsCompareOp(op));
491 virtual void Generate(MacroAssembler* masm);
493 void set_known_map(Handle<Map> map) { known_map_ = map; }
496 class OpField: public BitField<int, 0, 3> { };
497 class StateField: public BitField<int, 3, 5> { };
499 virtual void FinishCode(Handle<Code> code) {
500 code->set_compare_state(state_);
501 code->set_compare_operation(op_);
504 virtual CodeStub::Major MajorKey() { return CompareIC; }
505 virtual int MinorKey();
507 virtual int GetCodeKind() { return Code::COMPARE_IC; }
509 void GenerateSmis(MacroAssembler* masm);
510 void GenerateHeapNumbers(MacroAssembler* masm);
511 void GenerateSymbols(MacroAssembler* masm);
512 void GenerateStrings(MacroAssembler* masm);
513 void GenerateObjects(MacroAssembler* masm);
514 void GenerateMiss(MacroAssembler* masm);
515 void GenerateKnownObjects(MacroAssembler* masm);
517 bool strict() const { return op_ == Token::EQ_STRICT; }
518 Condition GetCondition() const { return CompareIC::ComputeCondition(op_); }
520 virtual void AddToSpecialCache(Handle<Code> new_object);
521 virtual bool FindCodeInSpecialCache(Code** code_out);
522 virtual bool UseSpecialCache() { return state_ == CompareIC::KNOWN_OBJECTS; }
525 CompareIC::State state_;
526 Handle<Map> known_map_;
530 // Flags that control the compare stub code generation.
532 NO_COMPARE_FLAGS = 0,
533 NO_SMI_COMPARE_IN_STUB = 1 << 0,
534 NO_NUMBER_COMPARE_IN_STUB = 1 << 1,
535 CANT_BOTH_BE_NAN = 1 << 2
539 enum NaNInformation {
545 class CompareStub: public CodeStub {
547 CompareStub(Condition cc,
554 never_nan_nan_((flags & CANT_BOTH_BE_NAN) != 0),
555 include_number_compare_((flags & NO_NUMBER_COMPARE_IN_STUB) == 0),
556 include_smi_compare_((flags & NO_SMI_COMPARE_IN_STUB) == 0),
560 CompareStub(Condition cc,
562 CompareFlags flags) :
565 never_nan_nan_((flags & CANT_BOTH_BE_NAN) != 0),
566 include_number_compare_((flags & NO_NUMBER_COMPARE_IN_STUB) == 0),
567 include_smi_compare_((flags & NO_SMI_COMPARE_IN_STUB) == 0),
571 void Generate(MacroAssembler* masm);
576 // Only used for 'equal' comparisons. Tells the stub that we already know
577 // that at least one side of the comparison is not NaN. This allows the
578 // stub to use object identity in the positive case. We ignore it when
579 // generating the minor key for other comparisons to avoid creating more
582 // Do generate the number comparison code in the stub. Stubs without number
583 // comparison code is used when the number comparison has been inlined, and
584 // the stub will be called if one of the operands is not a number.
585 bool include_number_compare_;
587 // Generate the comparison code for two smi operands in the stub.
588 bool include_smi_compare_;
590 // Register holding the left hand side of the comparison if the stub gives
591 // a choice, no_reg otherwise.
594 // Register holding the right hand side of the comparison if the stub gives
595 // a choice, no_reg otherwise.
598 // Encoding of the minor key in 16 bits.
599 class StrictField: public BitField<bool, 0, 1> {};
600 class NeverNanNanField: public BitField<bool, 1, 1> {};
601 class IncludeNumberCompareField: public BitField<bool, 2, 1> {};
602 class IncludeSmiCompareField: public BitField<bool, 3, 1> {};
603 class RegisterField: public BitField<bool, 4, 1> {};
604 class ConditionField: public BitField<int, 5, 11> {};
606 Major MajorKey() { return Compare; }
610 virtual int GetCodeKind() { return Code::COMPARE_IC; }
611 virtual void FinishCode(Handle<Code> code) {
612 code->set_compare_state(CompareIC::GENERIC);
615 // Branch to the label if the given object isn't a symbol.
616 void BranchIfNonSymbol(MacroAssembler* masm,
621 // Unfortunately you have to run without snapshots to see most of these
622 // names in the profile since most compare stubs end up in the snapshot.
623 virtual void PrintName(StringStream* stream);
627 class CEntryStub : public CodeStub {
629 explicit CEntryStub(int result_size,
630 SaveFPRegsMode save_doubles = kDontSaveFPRegs)
631 : result_size_(result_size), save_doubles_(save_doubles) { }
633 void Generate(MacroAssembler* masm);
635 // The version of this stub that doesn't save doubles is generated ahead of
636 // time, so it's OK to call it from other stubs that can't cope with GC during
637 // their code generation. On machines that always have gp registers (x64) we
638 // can generate both variants ahead of time.
639 virtual bool IsPregenerated();
640 static void GenerateAheadOfTime();
643 void GenerateCore(MacroAssembler* masm,
644 Label* throw_normal_exception,
645 Label* throw_termination_exception,
646 Label* throw_out_of_memory_exception,
648 bool always_allocate_scope);
650 // Number of pointers/values returned.
651 const int result_size_;
652 SaveFPRegsMode save_doubles_;
654 Major MajorKey() { return CEntry; }
657 bool NeedsImmovableCode();
661 class JSEntryStub : public CodeStub {
665 void Generate(MacroAssembler* masm) { GenerateBody(masm, false); }
668 void GenerateBody(MacroAssembler* masm, bool is_construct);
671 Major MajorKey() { return JSEntry; }
672 int MinorKey() { return 0; }
674 virtual void FinishCode(Handle<Code> code);
680 class JSConstructEntryStub : public JSEntryStub {
682 JSConstructEntryStub() { }
684 void Generate(MacroAssembler* masm) { GenerateBody(masm, true); }
687 int MinorKey() { return 1; }
689 virtual void PrintName(StringStream* stream) {
690 stream->Add("JSConstructEntryStub");
695 class ArgumentsAccessStub: public CodeStub {
704 explicit ArgumentsAccessStub(Type type) : type_(type) { }
709 Major MajorKey() { return ArgumentsAccess; }
710 int MinorKey() { return type_; }
712 void Generate(MacroAssembler* masm);
713 void GenerateReadElement(MacroAssembler* masm);
714 void GenerateNewStrict(MacroAssembler* masm);
715 void GenerateNewNonStrictFast(MacroAssembler* masm);
716 void GenerateNewNonStrictSlow(MacroAssembler* masm);
718 virtual void PrintName(StringStream* stream);
722 class RegExpExecStub: public CodeStub {
727 Major MajorKey() { return RegExpExec; }
728 int MinorKey() { return 0; }
730 void Generate(MacroAssembler* masm);
734 class RegExpConstructResultStub: public CodeStub {
736 RegExpConstructResultStub() { }
739 Major MajorKey() { return RegExpConstructResult; }
740 int MinorKey() { return 0; }
742 void Generate(MacroAssembler* masm);
746 class CallFunctionStub: public CodeStub {
748 CallFunctionStub(int argc, CallFunctionFlags flags)
749 : argc_(argc), flags_(flags) { }
751 void Generate(MacroAssembler* masm);
753 virtual void FinishCode(Handle<Code> code) {
754 code->set_has_function_cache(RecordCallTarget());
757 static int ExtractArgcFromMinorKey(int minor_key) {
758 return ArgcBits::decode(minor_key);
763 CallFunctionFlags flags_;
765 virtual void PrintName(StringStream* stream);
767 // Minor key encoding in 32 bits with Bitfield <Type, shift, size>.
768 class FlagBits: public BitField<CallFunctionFlags, 0, 2> {};
769 class ArgcBits: public BitField<unsigned, 2, 32 - 2> {};
771 Major MajorKey() { return CallFunction; }
773 // Encode the parameters in a unique 32 bit value.
774 return FlagBits::encode(flags_) | ArgcBits::encode(argc_);
777 bool ReceiverMightBeImplicit() {
778 return (flags_ & RECEIVER_MIGHT_BE_IMPLICIT) != 0;
781 bool RecordCallTarget() {
782 return (flags_ & RECORD_CALL_TARGET) != 0;
787 class CallConstructStub: public CodeStub {
789 explicit CallConstructStub(CallFunctionFlags flags) : flags_(flags) {}
791 void Generate(MacroAssembler* masm);
793 virtual void FinishCode(Handle<Code> code) {
794 code->set_has_function_cache(RecordCallTarget());
798 CallFunctionFlags flags_;
800 virtual void PrintName(StringStream* stream);
802 Major MajorKey() { return CallConstruct; }
803 int MinorKey() { return flags_; }
805 bool RecordCallTarget() {
806 return (flags_ & RECORD_CALL_TARGET) != 0;
811 enum StringIndexFlags {
812 // Accepts smis or heap numbers.
813 STRING_INDEX_IS_NUMBER,
815 // Accepts smis or heap numbers that are valid array indices
816 // (ECMA-262 15.4). Invalid indices are reported as being out of
818 STRING_INDEX_IS_ARRAY_INDEX
822 // Generates code implementing String.prototype.charCodeAt.
824 // Only supports the case when the receiver is a string and the index
825 // is a number (smi or heap number) that is a valid index into the
826 // string. Additional index constraints are specified by the
827 // flags. Otherwise, bails out to the provided labels.
829 // Register usage: |object| may be changed to another string in a way
830 // that doesn't affect charCodeAt/charAt semantics, |index| is
831 // preserved, |scratch| and |result| are clobbered.
832 class StringCharCodeAtGenerator {
834 StringCharCodeAtGenerator(Register object,
837 Label* receiver_not_string,
838 Label* index_not_number,
839 Label* index_out_of_range,
840 StringIndexFlags index_flags)
844 receiver_not_string_(receiver_not_string),
845 index_not_number_(index_not_number),
846 index_out_of_range_(index_out_of_range),
847 index_flags_(index_flags) {
848 ASSERT(!result_.is(object_));
849 ASSERT(!result_.is(index_));
852 // Generates the fast case code. On the fallthrough path |result|
853 // register contains the result.
854 void GenerateFast(MacroAssembler* masm);
856 // Generates the slow case code. Must not be naturally
857 // reachable. Expected to be put after a ret instruction (e.g., in
858 // deferred code). Always jumps back to the fast case.
859 void GenerateSlow(MacroAssembler* masm,
860 const RuntimeCallHelper& call_helper);
867 Label* receiver_not_string_;
868 Label* index_not_number_;
869 Label* index_out_of_range_;
871 StringIndexFlags index_flags_;
874 Label index_not_smi_;
875 Label got_smi_index_;
878 DISALLOW_COPY_AND_ASSIGN(StringCharCodeAtGenerator);
882 // Generates code for creating a one-char string from a char code.
883 class StringCharFromCodeGenerator {
885 StringCharFromCodeGenerator(Register code,
889 ASSERT(!code_.is(result_));
892 // Generates the fast case code. On the fallthrough path |result|
893 // register contains the result.
894 void GenerateFast(MacroAssembler* masm);
896 // Generates the slow case code. Must not be naturally
897 // reachable. Expected to be put after a ret instruction (e.g., in
898 // deferred code). Always jumps back to the fast case.
899 void GenerateSlow(MacroAssembler* masm,
900 const RuntimeCallHelper& call_helper);
909 DISALLOW_COPY_AND_ASSIGN(StringCharFromCodeGenerator);
913 // Generates code implementing String.prototype.charAt.
915 // Only supports the case when the receiver is a string and the index
916 // is a number (smi or heap number) that is a valid index into the
917 // string. Additional index constraints are specified by the
918 // flags. Otherwise, bails out to the provided labels.
920 // Register usage: |object| may be changed to another string in a way
921 // that doesn't affect charCodeAt/charAt semantics, |index| is
922 // preserved, |scratch1|, |scratch2|, and |result| are clobbered.
923 class StringCharAtGenerator {
925 StringCharAtGenerator(Register object,
929 Label* receiver_not_string,
930 Label* index_not_number,
931 Label* index_out_of_range,
932 StringIndexFlags index_flags)
933 : char_code_at_generator_(object,
940 char_from_code_generator_(scratch, result) {}
942 // Generates the fast case code. On the fallthrough path |result|
943 // register contains the result.
944 void GenerateFast(MacroAssembler* masm);
946 // Generates the slow case code. Must not be naturally
947 // reachable. Expected to be put after a ret instruction (e.g., in
948 // deferred code). Always jumps back to the fast case.
949 void GenerateSlow(MacroAssembler* masm,
950 const RuntimeCallHelper& call_helper);
953 StringCharCodeAtGenerator char_code_at_generator_;
954 StringCharFromCodeGenerator char_from_code_generator_;
956 DISALLOW_COPY_AND_ASSIGN(StringCharAtGenerator);
960 class AllowStubCallsScope {
962 AllowStubCallsScope(MacroAssembler* masm, bool allow)
963 : masm_(masm), previous_allow_(masm->allow_stub_calls()) {
964 masm_->set_allow_stub_calls(allow);
966 ~AllowStubCallsScope() {
967 masm_->set_allow_stub_calls(previous_allow_);
971 MacroAssembler* masm_;
972 bool previous_allow_;
974 DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope);
978 class KeyedLoadElementStub : public CodeStub {
980 explicit KeyedLoadElementStub(ElementsKind elements_kind)
981 : elements_kind_(elements_kind)
984 Major MajorKey() { return KeyedLoadElement; }
985 int MinorKey() { return elements_kind_; }
987 void Generate(MacroAssembler* masm);
990 ElementsKind elements_kind_;
992 DISALLOW_COPY_AND_ASSIGN(KeyedLoadElementStub);
996 class KeyedStoreElementStub : public CodeStub {
998 KeyedStoreElementStub(bool is_js_array,
999 ElementsKind elements_kind,
1000 KeyedAccessGrowMode grow_mode)
1001 : is_js_array_(is_js_array),
1002 elements_kind_(elements_kind),
1003 grow_mode_(grow_mode) { }
1005 Major MajorKey() { return KeyedStoreElement; }
1007 return ElementsKindBits::encode(elements_kind_) |
1008 IsJSArrayBits::encode(is_js_array_) |
1009 GrowModeBits::encode(grow_mode_);
1012 void Generate(MacroAssembler* masm);
1015 class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
1016 class GrowModeBits: public BitField<KeyedAccessGrowMode, 8, 1> {};
1017 class IsJSArrayBits: public BitField<bool, 9, 1> {};
1020 ElementsKind elements_kind_;
1021 KeyedAccessGrowMode grow_mode_;
1023 DISALLOW_COPY_AND_ASSIGN(KeyedStoreElementStub);
1027 class ToBooleanStub: public CodeStub {
1040 // At most 8 different types can be distinguished, because the Code object
1041 // only has room for a single byte to hold a set of these types. :-P
1042 STATIC_ASSERT(NUMBER_OF_TYPES <= 8);
1047 explicit Types(byte bits) : set_(bits) {}
1049 bool IsEmpty() const { return set_.IsEmpty(); }
1050 bool Contains(Type type) const { return set_.Contains(type); }
1051 void Add(Type type) { set_.Add(type); }
1052 byte ToByte() const { return set_.ToIntegral(); }
1053 void Print(StringStream* stream) const;
1054 void TraceTransition(Types to) const;
1055 bool Record(Handle<Object> object);
1056 bool NeedsMap() const;
1057 bool CanBeUndetectable() const;
1060 EnumSet<Type, byte> set_;
1063 static Types no_types() { return Types(); }
1064 static Types all_types() { return Types((1 << NUMBER_OF_TYPES) - 1); }
1066 explicit ToBooleanStub(Register tos, Types types = Types())
1067 : tos_(tos), types_(types) { }
1069 void Generate(MacroAssembler* masm);
1070 virtual int GetCodeKind() { return Code::TO_BOOLEAN_IC; }
1071 virtual void PrintName(StringStream* stream);
1073 virtual bool SometimesSetsUpAFrame() { return false; }
1076 Major MajorKey() { return ToBoolean; }
1077 int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); }
1079 virtual void FinishCode(Handle<Code> code) {
1080 code->set_to_boolean_state(types_.ToByte());
1083 void CheckOddball(MacroAssembler* masm,
1085 Heap::RootListIndex value,
1087 void GenerateTypeTransition(MacroAssembler* masm);
1094 class ElementsTransitionAndStoreStub : public CodeStub {
1096 ElementsTransitionAndStoreStub(ElementsKind from,
1099 StrictModeFlag strict_mode,
1100 KeyedAccessGrowMode grow_mode)
1103 is_jsarray_(is_jsarray),
1104 strict_mode_(strict_mode),
1105 grow_mode_(grow_mode) {}
1108 class FromBits: public BitField<ElementsKind, 0, 8> {};
1109 class ToBits: public BitField<ElementsKind, 8, 8> {};
1110 class IsJSArrayBits: public BitField<bool, 16, 1> {};
1111 class StrictModeBits: public BitField<StrictModeFlag, 17, 1> {};
1112 class GrowModeBits: public BitField<KeyedAccessGrowMode, 18, 1> {};
1114 Major MajorKey() { return ElementsTransitionAndStore; }
1116 return FromBits::encode(from_) |
1117 ToBits::encode(to_) |
1118 IsJSArrayBits::encode(is_jsarray_) |
1119 StrictModeBits::encode(strict_mode_) |
1120 GrowModeBits::encode(grow_mode_);
1123 void Generate(MacroAssembler* masm);
1128 StrictModeFlag strict_mode_;
1129 KeyedAccessGrowMode grow_mode_;
1131 DISALLOW_COPY_AND_ASSIGN(ElementsTransitionAndStoreStub);
1135 class StoreArrayLiteralElementStub : public CodeStub {
1137 explicit StoreArrayLiteralElementStub() {}
1140 Major MajorKey() { return StoreArrayLiteralElement; }
1141 int MinorKey() { return 0; }
1143 void Generate(MacroAssembler* masm);
1145 DISALLOW_COPY_AND_ASSIGN(StoreArrayLiteralElementStub);
1148 } } // namespace v8::internal
1150 #endif // V8_CODE_STUBS_H_