1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_CODE_STUBS_H_
29 #define V8_CODE_STUBS_H_
31 #include "allocation.h"
38 // List of code stubs used on all platforms.
39 #define CODE_STUB_LIST_ALL_PLATFORMS(V) \
51 V(StoreBufferOverflow) \
53 V(TranscendentalCache) \
56 V(WriteInt32ToHeapNumber) \
61 V(FastNewBlockContext) \
62 V(FastCloneShallowArray) \
63 V(FastCloneShallowObject) \
67 V(RegExpConstructResult) \
72 V(KeyedStoreElement) \
73 V(DebuggerStatement) \
74 V(StringDictionaryLookup) \
75 V(ElementsTransitionAndStore) \
76 V(StoreArrayLiteralElement)
78 // List of code stubs only used on ARM platforms.
79 #ifdef V8_TARGET_ARCH_ARM
80 #define CODE_STUB_LIST_ARM(V) \
87 #define CODE_STUB_LIST_ARM(V)
90 // List of code stubs only used on MIPS platforms.
91 #ifdef V8_TARGET_ARCH_MIPS
92 #define CODE_STUB_LIST_MIPS(V) \
96 #define CODE_STUB_LIST_MIPS(V)
99 // Combined list of code stubs.
100 #define CODE_STUB_LIST(V) \
101 CODE_STUB_LIST_ALL_PLATFORMS(V) \
102 CODE_STUB_LIST_ARM(V) \
103 CODE_STUB_LIST_MIPS(V)
105 // Mode to overwrite BinaryExpression values.
106 enum OverwriteMode { NO_OVERWRITE, OVERWRITE_LEFT, OVERWRITE_RIGHT };
107 enum UnaryOverwriteMode { UNARY_OVERWRITE, UNARY_NO_OVERWRITE };
110 // Stub is base classes of all stubs.
111 class CodeStub BASE_EMBEDDED {
114 #define DEF_ENUM(name) name,
115 CODE_STUB_LIST(DEF_ENUM)
117 NoCache, // marker for stubs that do custom caching
121 // Retrieve the code for the stub. Generate the code if needed.
122 Handle<Code> GetCode();
124 static Major MajorKeyFromKey(uint32_t key) {
125 return static_cast<Major>(MajorKeyBits::decode(key));
127 static int MinorKeyFromKey(uint32_t key) {
128 return MinorKeyBits::decode(key);
131 // Gets the major key from a code object that is a code stub or binary op IC.
132 static Major GetMajorKey(Code* code_stub) {
133 return static_cast<Major>(code_stub->major_key());
136 static const char* MajorName(Major major_key, bool allow_unknown_keys);
138 virtual ~CodeStub() {}
140 bool CompilingCallsToThisStubIsGCSafe() {
141 bool is_pregenerated = IsPregenerated();
143 CHECK(!is_pregenerated || FindCodeInCache(&code));
144 return is_pregenerated;
147 // See comment above, where Instanceof is defined.
148 virtual bool IsPregenerated() { return false; }
150 static void GenerateStubsAheadOfTime();
151 static void GenerateFPStubs();
153 // Some stubs put untagged junk on the stack that cannot be scanned by the
154 // GC. This means that we must be statically sure that no GC can occur while
155 // they are running. If that is the case they should override this to return
156 // true, which will cause an assertion if we try to call something that can
157 // GC or if we try to put a stack frame on top of the junk, which would not
158 // result in a traversable stack.
159 virtual bool SometimesSetsUpAFrame() { return true; }
161 // Lookup the code in the (possibly custom) cache.
162 bool FindCodeInCache(Code** code_out);
165 static const int kMajorBits = 6;
166 static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits;
169 // Nonvirtual wrapper around the stub-specific Generate function. Call
170 // this function to set up the macro assembler and generate the code.
171 void GenerateCode(MacroAssembler* masm);
173 // Generates the assembler code for the stub.
174 virtual void Generate(MacroAssembler* masm) = 0;
176 // Perform bookkeeping required after code generation when stub code is
177 // initially generated.
178 void RecordCodeGeneration(Code* code, MacroAssembler* masm);
180 // Finish the code object after it has been generated.
181 virtual void FinishCode(Handle<Code> code) { }
183 // Activate newly generated stub. Is called after
184 // registering stub in the stub cache.
185 virtual void Activate(Code* code) { }
187 // Returns information for computing the number key.
188 virtual Major MajorKey() = 0;
189 virtual int MinorKey() = 0;
191 // BinaryOpStub needs to override this.
192 virtual int GetCodeKind();
194 // BinaryOpStub needs to override this.
195 virtual InlineCacheState GetICState() {
196 return UNINITIALIZED;
199 // Add the code to a specialized cache, specific to an individual
200 // stub type. Please note, this method must add the code object to a
201 // roots object, otherwise we will remove the code during GC.
202 virtual void AddToSpecialCache(Handle<Code> new_object) { }
204 // Find code in a specialized cache, work is delegated to the specific stub.
205 virtual bool FindCodeInSpecialCache(Code** code_out) { return false; }
207 // If a stub uses a special cache override this.
208 virtual bool UseSpecialCache() { return false; }
210 // Returns a name for logging/debugging purposes.
211 SmartArrayPointer<const char> GetName();
212 virtual void PrintName(StringStream* stream);
214 // Returns whether the code generated for this stub needs to be allocated as
215 // a fixed (non-moveable) code object.
216 virtual bool NeedsImmovableCode() { return false; }
218 // Computes the key based on major and minor.
220 ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS);
221 return MinorKeyBits::encode(MinorKey()) |
222 MajorKeyBits::encode(MajorKey());
225 class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {};
226 class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {};
228 friend class BreakPointIterator;
232 // Helper interface to prepare to/restore after making runtime calls.
233 class RuntimeCallHelper {
235 virtual ~RuntimeCallHelper() {}
237 virtual void BeforeCall(MacroAssembler* masm) const = 0;
239 virtual void AfterCall(MacroAssembler* masm) const = 0;
242 RuntimeCallHelper() {}
245 DISALLOW_COPY_AND_ASSIGN(RuntimeCallHelper);
248 } } // namespace v8::internal
250 #if V8_TARGET_ARCH_IA32
251 #include "ia32/code-stubs-ia32.h"
252 #elif V8_TARGET_ARCH_X64
253 #include "x64/code-stubs-x64.h"
254 #elif V8_TARGET_ARCH_ARM
255 #include "arm/code-stubs-arm.h"
256 #elif V8_TARGET_ARCH_MIPS
257 #include "mips/code-stubs-mips.h"
259 #error Unsupported target architecture.
266 // RuntimeCallHelper implementation used in stubs: enters/leaves a
267 // newly created internal frame before/after the runtime call.
268 class StubRuntimeCallHelper : public RuntimeCallHelper {
270 StubRuntimeCallHelper() {}
272 virtual void BeforeCall(MacroAssembler* masm) const;
274 virtual void AfterCall(MacroAssembler* masm) const;
278 // Trivial RuntimeCallHelper implementation.
279 class NopRuntimeCallHelper : public RuntimeCallHelper {
281 NopRuntimeCallHelper() {}
283 virtual void BeforeCall(MacroAssembler* masm) const {}
285 virtual void AfterCall(MacroAssembler* masm) const {}
289 class StackCheckStub : public CodeStub {
293 void Generate(MacroAssembler* masm);
296 Major MajorKey() { return StackCheck; }
297 int MinorKey() { return 0; }
301 class InterruptStub : public CodeStub {
305 void Generate(MacroAssembler* masm);
308 Major MajorKey() { return Interrupt; }
309 int MinorKey() { return 0; }
313 class ToNumberStub: public CodeStub {
317 void Generate(MacroAssembler* masm);
320 Major MajorKey() { return ToNumber; }
321 int MinorKey() { return 0; }
325 class FastNewClosureStub : public CodeStub {
327 explicit FastNewClosureStub(LanguageMode language_mode)
328 : language_mode_(language_mode) { }
330 void Generate(MacroAssembler* masm);
333 Major MajorKey() { return FastNewClosure; }
334 int MinorKey() { return language_mode_ == CLASSIC_MODE
335 ? kNonStrictMode : kStrictMode; }
337 LanguageMode language_mode_;
341 class FastNewContextStub : public CodeStub {
343 static const int kMaximumSlots = 64;
345 explicit FastNewContextStub(int slots) : slots_(slots) {
346 ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
349 void Generate(MacroAssembler* masm);
354 Major MajorKey() { return FastNewContext; }
355 int MinorKey() { return slots_; }
359 class FastNewBlockContextStub : public CodeStub {
361 static const int kMaximumSlots = 64;
363 explicit FastNewBlockContextStub(int slots) : slots_(slots) {
364 ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
367 void Generate(MacroAssembler* masm);
372 Major MajorKey() { return FastNewBlockContext; }
373 int MinorKey() { return slots_; }
377 class FastCloneShallowArrayStub : public CodeStub {
379 // Maximum length of copied elements array.
380 static const int kMaximumClonedLength = 8;
384 CLONE_DOUBLE_ELEMENTS,
385 COPY_ON_WRITE_ELEMENTS,
389 FastCloneShallowArrayStub(Mode mode, int length)
391 length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
392 ASSERT_GE(length_, 0);
393 ASSERT_LE(length_, kMaximumClonedLength);
396 void Generate(MacroAssembler* masm);
402 Major MajorKey() { return FastCloneShallowArray; }
404 ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2 || mode_ == 3);
405 return length_ * 4 + mode_;
410 class FastCloneShallowObjectStub : public CodeStub {
412 // Maximum number of properties in copied object.
413 static const int kMaximumClonedProperties = 6;
415 explicit FastCloneShallowObjectStub(int length) : length_(length) {
416 ASSERT_GE(length_, 0);
417 ASSERT_LE(length_, kMaximumClonedProperties);
420 void Generate(MacroAssembler* masm);
425 Major MajorKey() { return FastCloneShallowObject; }
426 int MinorKey() { return length_; }
430 class InstanceofStub: public CodeStub {
434 kArgsInRegisters = 1 << 0,
435 kCallSiteInlineCheck = 1 << 1,
436 kReturnTrueFalseObject = 1 << 2
439 explicit InstanceofStub(Flags flags) : flags_(flags) { }
441 static Register left();
442 static Register right();
444 void Generate(MacroAssembler* masm);
447 Major MajorKey() { return Instanceof; }
448 int MinorKey() { return static_cast<int>(flags_); }
450 bool HasArgsInRegisters() const {
451 return (flags_ & kArgsInRegisters) != 0;
454 bool HasCallSiteInlineCheck() const {
455 return (flags_ & kCallSiteInlineCheck) != 0;
458 bool ReturnTrueFalseObject() const {
459 return (flags_ & kReturnTrueFalseObject) != 0;
462 virtual void PrintName(StringStream* stream);
468 class MathPowStub: public CodeStub {
470 enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK};
472 explicit MathPowStub(ExponentType exponent_type)
473 : exponent_type_(exponent_type) { }
474 virtual void Generate(MacroAssembler* masm);
477 virtual CodeStub::Major MajorKey() { return MathPow; }
478 virtual int MinorKey() { return exponent_type_; }
480 ExponentType exponent_type_;
484 class ICCompareStub: public CodeStub {
486 ICCompareStub(Token::Value op, CompareIC::State state)
487 : op_(op), state_(state) {
488 ASSERT(Token::IsCompareOp(op));
491 virtual void Generate(MacroAssembler* masm);
493 void set_known_map(Handle<Map> map) { known_map_ = map; }
496 class OpField: public BitField<int, 0, 3> { };
497 class StateField: public BitField<int, 3, 5> { };
499 virtual void FinishCode(Handle<Code> code) {
500 code->set_compare_state(state_);
503 virtual CodeStub::Major MajorKey() { return CompareIC; }
504 virtual int MinorKey();
506 virtual int GetCodeKind() { return Code::COMPARE_IC; }
508 void GenerateSmis(MacroAssembler* masm);
509 void GenerateHeapNumbers(MacroAssembler* masm);
510 void GenerateSymbols(MacroAssembler* masm);
511 void GenerateStrings(MacroAssembler* masm);
512 void GenerateObjects(MacroAssembler* masm);
513 void GenerateMiss(MacroAssembler* masm);
514 void GenerateKnownObjects(MacroAssembler* masm);
516 bool strict() const { return op_ == Token::EQ_STRICT; }
517 Condition GetCondition() const { return CompareIC::ComputeCondition(op_); }
519 virtual void AddToSpecialCache(Handle<Code> new_object);
520 virtual bool FindCodeInSpecialCache(Code** code_out);
521 virtual bool UseSpecialCache() { return state_ == CompareIC::KNOWN_OBJECTS; }
524 CompareIC::State state_;
525 Handle<Map> known_map_;
529 // Flags that control the compare stub code generation.
531 NO_COMPARE_FLAGS = 0,
532 NO_SMI_COMPARE_IN_STUB = 1 << 0,
533 NO_NUMBER_COMPARE_IN_STUB = 1 << 1,
534 CANT_BOTH_BE_NAN = 1 << 2
538 enum NaNInformation {
544 class CompareStub: public CodeStub {
546 CompareStub(Condition cc,
553 never_nan_nan_((flags & CANT_BOTH_BE_NAN) != 0),
554 include_number_compare_((flags & NO_NUMBER_COMPARE_IN_STUB) == 0),
555 include_smi_compare_((flags & NO_SMI_COMPARE_IN_STUB) == 0),
559 CompareStub(Condition cc,
561 CompareFlags flags) :
564 never_nan_nan_((flags & CANT_BOTH_BE_NAN) != 0),
565 include_number_compare_((flags & NO_NUMBER_COMPARE_IN_STUB) == 0),
566 include_smi_compare_((flags & NO_SMI_COMPARE_IN_STUB) == 0),
570 void Generate(MacroAssembler* masm);
575 // Only used for 'equal' comparisons. Tells the stub that we already know
576 // that at least one side of the comparison is not NaN. This allows the
577 // stub to use object identity in the positive case. We ignore it when
578 // generating the minor key for other comparisons to avoid creating more
581 // Do generate the number comparison code in the stub. Stubs without number
582 // comparison code is used when the number comparison has been inlined, and
583 // the stub will be called if one of the operands is not a number.
584 bool include_number_compare_;
586 // Generate the comparison code for two smi operands in the stub.
587 bool include_smi_compare_;
589 // Register holding the left hand side of the comparison if the stub gives
590 // a choice, no_reg otherwise.
593 // Register holding the right hand side of the comparison if the stub gives
594 // a choice, no_reg otherwise.
597 // Encoding of the minor key in 16 bits.
598 class StrictField: public BitField<bool, 0, 1> {};
599 class NeverNanNanField: public BitField<bool, 1, 1> {};
600 class IncludeNumberCompareField: public BitField<bool, 2, 1> {};
601 class IncludeSmiCompareField: public BitField<bool, 3, 1> {};
602 class RegisterField: public BitField<bool, 4, 1> {};
603 class ConditionField: public BitField<int, 5, 11> {};
605 Major MajorKey() { return Compare; }
609 virtual int GetCodeKind() { return Code::COMPARE_IC; }
610 virtual void FinishCode(Handle<Code> code) {
611 code->set_compare_state(CompareIC::GENERIC);
614 // Branch to the label if the given object isn't a symbol.
615 void BranchIfNonSymbol(MacroAssembler* masm,
620 // Unfortunately you have to run without snapshots to see most of these
621 // names in the profile since most compare stubs end up in the snapshot.
622 virtual void PrintName(StringStream* stream);
626 class CEntryStub : public CodeStub {
628 explicit CEntryStub(int result_size,
629 SaveFPRegsMode save_doubles = kDontSaveFPRegs)
630 : result_size_(result_size), save_doubles_(save_doubles) { }
632 void Generate(MacroAssembler* masm);
634 // The version of this stub that doesn't save doubles is generated ahead of
635 // time, so it's OK to call it from other stubs that can't cope with GC during
636 // their code generation. On machines that always have gp registers (x64) we
637 // can generate both variants ahead of time.
638 virtual bool IsPregenerated();
639 static void GenerateAheadOfTime();
642 void GenerateCore(MacroAssembler* masm,
643 Label* throw_normal_exception,
644 Label* throw_termination_exception,
645 Label* throw_out_of_memory_exception,
647 bool always_allocate_scope);
649 // Number of pointers/values returned.
650 const int result_size_;
651 SaveFPRegsMode save_doubles_;
653 Major MajorKey() { return CEntry; }
656 bool NeedsImmovableCode();
660 class JSEntryStub : public CodeStub {
664 void Generate(MacroAssembler* masm) { GenerateBody(masm, false); }
667 void GenerateBody(MacroAssembler* masm, bool is_construct);
670 Major MajorKey() { return JSEntry; }
671 int MinorKey() { return 0; }
673 virtual void FinishCode(Handle<Code> code);
679 class JSConstructEntryStub : public JSEntryStub {
681 JSConstructEntryStub() { }
683 void Generate(MacroAssembler* masm) { GenerateBody(masm, true); }
686 int MinorKey() { return 1; }
688 virtual void PrintName(StringStream* stream) {
689 stream->Add("JSConstructEntryStub");
694 class ArgumentsAccessStub: public CodeStub {
703 explicit ArgumentsAccessStub(Type type) : type_(type) { }
708 Major MajorKey() { return ArgumentsAccess; }
709 int MinorKey() { return type_; }
711 void Generate(MacroAssembler* masm);
712 void GenerateReadElement(MacroAssembler* masm);
713 void GenerateNewStrict(MacroAssembler* masm);
714 void GenerateNewNonStrictFast(MacroAssembler* masm);
715 void GenerateNewNonStrictSlow(MacroAssembler* masm);
717 virtual void PrintName(StringStream* stream);
721 class RegExpExecStub: public CodeStub {
726 Major MajorKey() { return RegExpExec; }
727 int MinorKey() { return 0; }
729 void Generate(MacroAssembler* masm);
733 class RegExpConstructResultStub: public CodeStub {
735 RegExpConstructResultStub() { }
738 Major MajorKey() { return RegExpConstructResult; }
739 int MinorKey() { return 0; }
741 void Generate(MacroAssembler* masm);
745 class CallFunctionStub: public CodeStub {
747 CallFunctionStub(int argc, CallFunctionFlags flags)
748 : argc_(argc), flags_(flags) { }
750 void Generate(MacroAssembler* masm);
752 virtual void FinishCode(Handle<Code> code) {
753 code->set_has_function_cache(RecordCallTarget());
756 static int ExtractArgcFromMinorKey(int minor_key) {
757 return ArgcBits::decode(minor_key);
762 CallFunctionFlags flags_;
764 virtual void PrintName(StringStream* stream);
766 // Minor key encoding in 32 bits with Bitfield <Type, shift, size>.
767 class FlagBits: public BitField<CallFunctionFlags, 0, 2> {};
768 class ArgcBits: public BitField<unsigned, 2, 32 - 2> {};
770 Major MajorKey() { return CallFunction; }
772 // Encode the parameters in a unique 32 bit value.
773 return FlagBits::encode(flags_) | ArgcBits::encode(argc_);
776 bool ReceiverMightBeImplicit() {
777 return (flags_ & RECEIVER_MIGHT_BE_IMPLICIT) != 0;
780 bool RecordCallTarget() {
781 return (flags_ & RECORD_CALL_TARGET) != 0;
786 class CallConstructStub: public CodeStub {
788 explicit CallConstructStub(CallFunctionFlags flags) : flags_(flags) {}
790 void Generate(MacroAssembler* masm);
792 virtual void FinishCode(Handle<Code> code) {
793 code->set_has_function_cache(RecordCallTarget());
797 CallFunctionFlags flags_;
799 virtual void PrintName(StringStream* stream);
801 Major MajorKey() { return CallConstruct; }
802 int MinorKey() { return flags_; }
804 bool RecordCallTarget() {
805 return (flags_ & RECORD_CALL_TARGET) != 0;
810 enum StringIndexFlags {
811 // Accepts smis or heap numbers.
812 STRING_INDEX_IS_NUMBER,
814 // Accepts smis or heap numbers that are valid array indices
815 // (ECMA-262 15.4). Invalid indices are reported as being out of
817 STRING_INDEX_IS_ARRAY_INDEX
821 // Generates code implementing String.prototype.charCodeAt.
823 // Only supports the case when the receiver is a string and the index
824 // is a number (smi or heap number) that is a valid index into the
825 // string. Additional index constraints are specified by the
826 // flags. Otherwise, bails out to the provided labels.
828 // Register usage: |object| may be changed to another string in a way
829 // that doesn't affect charCodeAt/charAt semantics, |index| is
830 // preserved, |scratch| and |result| are clobbered.
831 class StringCharCodeAtGenerator {
833 StringCharCodeAtGenerator(Register object,
836 Label* receiver_not_string,
837 Label* index_not_number,
838 Label* index_out_of_range,
839 StringIndexFlags index_flags)
843 receiver_not_string_(receiver_not_string),
844 index_not_number_(index_not_number),
845 index_out_of_range_(index_out_of_range),
846 index_flags_(index_flags) {
847 ASSERT(!result_.is(object_));
848 ASSERT(!result_.is(index_));
851 // Generates the fast case code. On the fallthrough path |result|
852 // register contains the result.
853 void GenerateFast(MacroAssembler* masm);
855 // Generates the slow case code. Must not be naturally
856 // reachable. Expected to be put after a ret instruction (e.g., in
857 // deferred code). Always jumps back to the fast case.
858 void GenerateSlow(MacroAssembler* masm,
859 const RuntimeCallHelper& call_helper);
866 Label* receiver_not_string_;
867 Label* index_not_number_;
868 Label* index_out_of_range_;
870 StringIndexFlags index_flags_;
873 Label index_not_smi_;
874 Label got_smi_index_;
877 DISALLOW_COPY_AND_ASSIGN(StringCharCodeAtGenerator);
881 // Generates code for creating a one-char string from a char code.
882 class StringCharFromCodeGenerator {
884 StringCharFromCodeGenerator(Register code,
888 ASSERT(!code_.is(result_));
891 // Generates the fast case code. On the fallthrough path |result|
892 // register contains the result.
893 void GenerateFast(MacroAssembler* masm);
895 // Generates the slow case code. Must not be naturally
896 // reachable. Expected to be put after a ret instruction (e.g., in
897 // deferred code). Always jumps back to the fast case.
898 void GenerateSlow(MacroAssembler* masm,
899 const RuntimeCallHelper& call_helper);
908 DISALLOW_COPY_AND_ASSIGN(StringCharFromCodeGenerator);
912 // Generates code implementing String.prototype.charAt.
914 // Only supports the case when the receiver is a string and the index
915 // is a number (smi or heap number) that is a valid index into the
916 // string. Additional index constraints are specified by the
917 // flags. Otherwise, bails out to the provided labels.
919 // Register usage: |object| may be changed to another string in a way
920 // that doesn't affect charCodeAt/charAt semantics, |index| is
921 // preserved, |scratch1|, |scratch2|, and |result| are clobbered.
922 class StringCharAtGenerator {
924 StringCharAtGenerator(Register object,
928 Label* receiver_not_string,
929 Label* index_not_number,
930 Label* index_out_of_range,
931 StringIndexFlags index_flags)
932 : char_code_at_generator_(object,
939 char_from_code_generator_(scratch, result) {}
941 // Generates the fast case code. On the fallthrough path |result|
942 // register contains the result.
943 void GenerateFast(MacroAssembler* masm);
945 // Generates the slow case code. Must not be naturally
946 // reachable. Expected to be put after a ret instruction (e.g., in
947 // deferred code). Always jumps back to the fast case.
948 void GenerateSlow(MacroAssembler* masm,
949 const RuntimeCallHelper& call_helper);
952 StringCharCodeAtGenerator char_code_at_generator_;
953 StringCharFromCodeGenerator char_from_code_generator_;
955 DISALLOW_COPY_AND_ASSIGN(StringCharAtGenerator);
959 class AllowStubCallsScope {
961 AllowStubCallsScope(MacroAssembler* masm, bool allow)
962 : masm_(masm), previous_allow_(masm->allow_stub_calls()) {
963 masm_->set_allow_stub_calls(allow);
965 ~AllowStubCallsScope() {
966 masm_->set_allow_stub_calls(previous_allow_);
970 MacroAssembler* masm_;
971 bool previous_allow_;
973 DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope);
977 class KeyedLoadElementStub : public CodeStub {
979 explicit KeyedLoadElementStub(ElementsKind elements_kind)
980 : elements_kind_(elements_kind)
983 Major MajorKey() { return KeyedLoadElement; }
984 int MinorKey() { return elements_kind_; }
986 void Generate(MacroAssembler* masm);
989 ElementsKind elements_kind_;
991 DISALLOW_COPY_AND_ASSIGN(KeyedLoadElementStub);
995 class KeyedStoreElementStub : public CodeStub {
997 KeyedStoreElementStub(bool is_js_array,
998 ElementsKind elements_kind,
999 KeyedAccessGrowMode grow_mode)
1000 : is_js_array_(is_js_array),
1001 elements_kind_(elements_kind),
1002 grow_mode_(grow_mode) { }
1004 Major MajorKey() { return KeyedStoreElement; }
1006 return ElementsKindBits::encode(elements_kind_) |
1007 IsJSArrayBits::encode(is_js_array_) |
1008 GrowModeBits::encode(grow_mode_);
1011 void Generate(MacroAssembler* masm);
1014 class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
1015 class GrowModeBits: public BitField<KeyedAccessGrowMode, 8, 1> {};
1016 class IsJSArrayBits: public BitField<bool, 9, 1> {};
1019 ElementsKind elements_kind_;
1020 KeyedAccessGrowMode grow_mode_;
1022 DISALLOW_COPY_AND_ASSIGN(KeyedStoreElementStub);
1026 class ToBooleanStub: public CodeStub {
1039 // At most 8 different types can be distinguished, because the Code object
1040 // only has room for a single byte to hold a set of these types. :-P
1041 STATIC_ASSERT(NUMBER_OF_TYPES <= 8);
1046 explicit Types(byte bits) : set_(bits) {}
1048 bool IsEmpty() const { return set_.IsEmpty(); }
1049 bool Contains(Type type) const { return set_.Contains(type); }
1050 void Add(Type type) { set_.Add(type); }
1051 byte ToByte() const { return set_.ToIntegral(); }
1052 void Print(StringStream* stream) const;
1053 void TraceTransition(Types to) const;
1054 bool Record(Handle<Object> object);
1055 bool NeedsMap() const;
1056 bool CanBeUndetectable() const;
1059 EnumSet<Type, byte> set_;
1062 static Types no_types() { return Types(); }
1063 static Types all_types() { return Types((1 << NUMBER_OF_TYPES) - 1); }
1065 explicit ToBooleanStub(Register tos, Types types = Types())
1066 : tos_(tos), types_(types) { }
1068 void Generate(MacroAssembler* masm);
1069 virtual int GetCodeKind() { return Code::TO_BOOLEAN_IC; }
1070 virtual void PrintName(StringStream* stream);
1072 virtual bool SometimesSetsUpAFrame() { return false; }
1075 Major MajorKey() { return ToBoolean; }
1076 int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); }
1078 virtual void FinishCode(Handle<Code> code) {
1079 code->set_to_boolean_state(types_.ToByte());
1082 void CheckOddball(MacroAssembler* masm,
1084 Heap::RootListIndex value,
1086 void GenerateTypeTransition(MacroAssembler* masm);
1093 class ElementsTransitionAndStoreStub : public CodeStub {
1095 ElementsTransitionAndStoreStub(ElementsKind from,
1098 StrictModeFlag strict_mode,
1099 KeyedAccessGrowMode grow_mode)
1102 is_jsarray_(is_jsarray),
1103 strict_mode_(strict_mode),
1104 grow_mode_(grow_mode) {}
1107 class FromBits: public BitField<ElementsKind, 0, 8> {};
1108 class ToBits: public BitField<ElementsKind, 8, 8> {};
1109 class IsJSArrayBits: public BitField<bool, 16, 1> {};
1110 class StrictModeBits: public BitField<StrictModeFlag, 17, 1> {};
1111 class GrowModeBits: public BitField<KeyedAccessGrowMode, 18, 1> {};
1113 Major MajorKey() { return ElementsTransitionAndStore; }
1115 return FromBits::encode(from_) |
1116 ToBits::encode(to_) |
1117 IsJSArrayBits::encode(is_jsarray_) |
1118 StrictModeBits::encode(strict_mode_) |
1119 GrowModeBits::encode(grow_mode_);
1122 void Generate(MacroAssembler* masm);
1127 StrictModeFlag strict_mode_;
1128 KeyedAccessGrowMode grow_mode_;
1130 DISALLOW_COPY_AND_ASSIGN(ElementsTransitionAndStoreStub);
1134 class StoreArrayLiteralElementStub : public CodeStub {
1136 explicit StoreArrayLiteralElementStub() {}
1139 Major MajorKey() { return StoreArrayLiteralElement; }
1140 int MinorKey() { return 0; }
1142 void Generate(MacroAssembler* masm);
1144 DISALLOW_COPY_AND_ASSIGN(StoreArrayLiteralElementStub);
1147 } } // namespace v8::internal
1149 #endif // V8_CODE_STUBS_H_