1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_ARM_CODEGEN_ARM_H_
29 #define V8_ARM_CODEGEN_ARM_H_
32 #include "code-stubs-arm.h"
38 // Forward declarations
39 class CompilationInfo;
42 class RegisterAllocator;
45 enum InitState { CONST_INIT, NOT_CONST_INIT };
46 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
47 enum GenerateInlineSmi { DONT_GENERATE_INLINE_SMI, GENERATE_INLINE_SMI };
48 enum WriteBarrierCharacter { UNLIKELY_SMI, LIKELY_SMI, NEVER_NEWSPACE };
51 // -------------------------------------------------------------------------
54 // A reference is a C++ stack-allocated object that puts a
55 // reference on the virtual frame. The reference may be consumed
56 // by GetValue, TakeValue, SetValue, and Codegen::UnloadReference.
57 // When the lifetime (scope) of a valid reference ends, it must have
58 // been consumed, and be in state UNLOADED.
59 class Reference BASE_EMBEDDED {
61 // The values of the types is important, see size().
62 enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
63 Reference(CodeGenerator* cgen,
64 Expression* expression,
65 bool persist_after_get = false);
68 Expression* expression() const { return expression_; }
69 Type type() const { return type_; }
70 void set_type(Type value) {
71 ASSERT_EQ(ILLEGAL, type_);
76 ASSERT_NE(ILLEGAL, type_);
77 ASSERT_NE(UNLOADED, type_);
80 // The size the reference takes up on the stack.
82 return (type_ < SLOT) ? 0 : type_;
85 bool is_illegal() const { return type_ == ILLEGAL; }
86 bool is_slot() const { return type_ == SLOT; }
87 bool is_property() const { return type_ == NAMED || type_ == KEYED; }
88 bool is_unloaded() const { return type_ == UNLOADED; }
90 // Return the name. Only valid for named property references.
91 Handle<String> GetName();
93 // Generate code to push the value of the reference on top of the
94 // expression stack. The reference is expected to be already on top of
95 // the expression stack, and it is consumed by the call unless the
96 // reference is for a compound assignment.
97 // If the reference is not consumed, it is left in place under its value.
100 // Generate code to store the value on top of the expression stack in the
101 // reference. The reference is expected to be immediately below the value
102 // on the expression stack. The value is stored in the location specified
103 // by the reference, and is left on top of the stack, after the reference
104 // is popped from beneath it (unloaded).
105 void SetValue(InitState init_state, WriteBarrierCharacter wb);
107 // This is in preparation for something that uses the reference on the stack.
108 // If we need this reference afterwards get then dup it now. Otherwise mark
110 inline void DupIfPersist();
113 CodeGenerator* cgen_;
114 Expression* expression_;
116 // Keep the reference on the stack after get, so it can be used by set later.
117 bool persist_after_get_;
121 // -------------------------------------------------------------------------
122 // Code generation state
124 // The state is passed down the AST by the code generator (and back up, in
125 // the form of the state of the label pair). It is threaded through the
126 // call stack. Constructing a state implicitly pushes it on the owning code
127 // generator's stack of states, and destroying one implicitly pops it.
129 class CodeGenState BASE_EMBEDDED {
131 // Create an initial code generator state. Destroying the initial state
132 // leaves the code generator with a NULL state.
133 explicit CodeGenState(CodeGenerator* owner);
135 // Destroy a code generator state and restore the owning code generator's
137 virtual ~CodeGenState();
139 virtual JumpTarget* true_target() const { return NULL; }
140 virtual JumpTarget* false_target() const { return NULL; }
143 inline CodeGenerator* owner() { return owner_; }
144 inline CodeGenState* previous() const { return previous_; }
147 CodeGenerator* owner_;
148 CodeGenState* previous_;
152 class ConditionCodeGenState : public CodeGenState {
154 // Create a code generator state based on a code generator's current
155 // state. The new state has its own pair of branch labels.
156 ConditionCodeGenState(CodeGenerator* owner,
157 JumpTarget* true_target,
158 JumpTarget* false_target);
160 virtual JumpTarget* true_target() const { return true_target_; }
161 virtual JumpTarget* false_target() const { return false_target_; }
164 JumpTarget* true_target_;
165 JumpTarget* false_target_;
169 class TypeInfoCodeGenState : public CodeGenState {
171 TypeInfoCodeGenState(CodeGenerator* owner,
174 ~TypeInfoCodeGenState();
176 virtual JumpTarget* true_target() const { return previous()->true_target(); }
177 virtual JumpTarget* false_target() const {
178 return previous()->false_target();
183 TypeInfo old_type_info_;
187 // -------------------------------------------------------------------------
188 // Arguments allocation mode
190 enum ArgumentsAllocationMode {
191 NO_ARGUMENTS_ALLOCATION,
192 EAGER_ARGUMENTS_ALLOCATION,
193 LAZY_ARGUMENTS_ALLOCATION
197 // Different nop operations are used by the code generator to detect certain
198 // states of the generated code.
199 enum NopMarkerTypes {
201 PROPERTY_ACCESS_INLINED
205 // -------------------------------------------------------------------------
208 class CodeGenerator: public AstVisitor {
210 static bool MakeCode(CompilationInfo* info);
212 // Printing of AST, etc. as requested by flags.
213 static void MakeCodePrologue(CompilationInfo* info);
215 // Allocate and install the code.
216 static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm,
218 CompilationInfo* info);
220 #ifdef ENABLE_LOGGING_AND_PROFILING
221 static bool ShouldGenerateLog(Expression* type);
224 static void SetFunctionInfo(Handle<JSFunction> fun,
225 FunctionLiteral* lit,
227 Handle<Script> script);
229 static bool RecordPositions(MacroAssembler* masm,
231 bool right_here = false);
234 MacroAssembler* masm() { return masm_; }
235 VirtualFrame* frame() const { return frame_; }
236 inline Handle<Script> script();
238 bool has_valid_frame() const { return frame_ != NULL; }
240 // Set the virtual frame to be new_frame, with non-frame register
241 // reference counts given by non_frame_registers. The non-frame
242 // register reference counts of the old frame are returned in
243 // non_frame_registers.
244 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
248 RegisterAllocator* allocator() const { return allocator_; }
250 CodeGenState* state() { return state_; }
251 void set_state(CodeGenState* state) { state_ = state; }
253 TypeInfo type_info(Slot* slot) {
254 int index = NumberOfSlot(slot);
255 if (index == kInvalidSlotNumber) return TypeInfo::Unknown();
256 return (*type_info_)[index];
259 TypeInfo set_type_info(Slot* slot, TypeInfo info) {
260 int index = NumberOfSlot(slot);
261 ASSERT(index >= kInvalidSlotNumber);
262 if (index != kInvalidSlotNumber) {
263 TypeInfo previous_value = (*type_info_)[index];
264 (*type_info_)[index] = info;
265 return previous_value;
267 return TypeInfo::Unknown();
270 void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
272 // Constants related to patching of inlined load/store.
273 static int GetInlinedKeyedLoadInstructionsAfterPatch() {
274 return FLAG_debug_code ? 32 : 13;
276 static const int kInlinedKeyedStoreInstructionsAfterPatch = 5;
277 static int GetInlinedNamedStoreInstructionsAfterPatch() {
278 ASSERT(inlined_write_barrier_size_ != -1);
279 return inlined_write_barrier_size_ + 4;
282 static MemOperand ContextOperand(Register context, int index) {
283 return MemOperand(context, Context::SlotOffset(index));
287 // Type of a member function that generates inline code for a native function.
288 typedef void (CodeGenerator::*InlineFunctionGenerator)
289 (ZoneList<Expression*>*);
291 static const InlineFunctionGenerator kInlineFunctionGenerators[];
293 // Construction/Destruction
294 explicit CodeGenerator(MacroAssembler* masm);
297 inline bool is_eval();
298 inline Scope* scope();
300 // Generating deferred code.
301 void ProcessDeferred();
303 static const int kInvalidSlotNumber = -1;
305 int NumberOfSlot(Slot* slot);
308 bool has_cc() const { return cc_reg_ != al; }
309 JumpTarget* true_target() const { return state_->true_target(); }
310 JumpTarget* false_target() const { return state_->false_target(); }
312 // Track loop nesting level.
313 int loop_nesting() const { return loop_nesting_; }
314 void IncrementLoopNesting() { loop_nesting_++; }
315 void DecrementLoopNesting() { loop_nesting_--; }
318 void VisitStatements(ZoneList<Statement*>* statements);
320 #define DEF_VISIT(type) \
321 void Visit##type(type* node);
322 AST_NODE_LIST(DEF_VISIT)
325 // Main code generation function
326 void Generate(CompilationInfo* info);
328 // Generate the return sequence code. Should be called no more than
329 // once per compiled function, immediately after binding the return
330 // target (which can not be done more than once). The return value should
332 void GenerateReturnSequence();
334 // Returns the arguments allocation mode.
335 ArgumentsAllocationMode ArgumentsMode();
337 // Store the arguments object and allocate it if necessary.
338 void StoreArgumentsObject(bool initial);
340 // The following are used by class Reference.
341 void LoadReference(Reference* ref);
342 void UnloadReference(Reference* ref);
344 MemOperand SlotOperand(Slot* slot, Register tmp);
346 MemOperand ContextSlotOperandCheckExtensions(Slot* slot,
352 static MemOperand GlobalObject() {
353 return ContextOperand(cp, Context::GLOBAL_INDEX);
356 void LoadCondition(Expression* x,
357 JumpTarget* true_target,
358 JumpTarget* false_target,
360 void Load(Expression* expr);
362 void LoadGlobalReceiver(Register scratch);
364 // Read a value from a slot and leave it on top of the expression stack.
365 void LoadFromSlot(Slot* slot, TypeofState typeof_state);
366 void LoadFromSlotCheckForArguments(Slot* slot, TypeofState state);
368 // Store the value on top of the stack to a slot.
369 void StoreToSlot(Slot* slot, InitState init_state);
371 // Support for compiling assignment expressions.
372 void EmitSlotAssignment(Assignment* node);
373 void EmitNamedPropertyAssignment(Assignment* node);
374 void EmitKeyedPropertyAssignment(Assignment* node);
376 // Load a named property, returning it in r0. The receiver is passed on the
377 // stack, and remains there.
378 void EmitNamedLoad(Handle<String> name, bool is_contextual);
380 // Store to a named property. If the store is contextual, value is passed on
381 // the frame and consumed. Otherwise, receiver and value are passed on the
382 // frame and consumed. The result is returned in r0.
383 void EmitNamedStore(Handle<String> name, bool is_contextual);
385 // Load a keyed property, leaving it in r0. The receiver and key are
386 // passed on the stack, and remain there.
387 void EmitKeyedLoad();
389 // Store a keyed property. Key and receiver are on the stack and the value is
390 // in r0. Result is returned in r0.
391 void EmitKeyedStore(StaticType* key_type, WriteBarrierCharacter wb_info);
393 void LoadFromGlobalSlotCheckExtensions(Slot* slot,
394 TypeofState typeof_state,
397 // Support for loading from local/global variables and arguments
398 // whose location is known unless they are shadowed by
399 // eval-introduced bindings. Generates no code for unsupported slot
400 // types and therefore expects to fall through to the slow jump target.
401 void EmitDynamicLoadFromSlotFastCase(Slot* slot,
402 TypeofState typeof_state,
406 // Special code for typeof expressions: Unfortunately, we must
407 // be careful when loading the expression in 'typeof'
408 // expressions. We are not allowed to throw reference errors for
409 // non-existing properties of the global object, so we must make it
410 // look like an explicit property access, instead of an access
411 // through the context chain.
412 void LoadTypeofExpression(Expression* x);
414 void ToBoolean(JumpTarget* true_target, JumpTarget* false_target);
416 // Generate code that computes a shortcutting logical operation.
417 void GenerateLogicalBooleanOperation(BinaryOperation* node);
419 void GenericBinaryOperation(Token::Value op,
420 OverwriteMode overwrite_mode,
421 GenerateInlineSmi inline_smi,
423 GenericBinaryOpStub::kUnknownIntValue);
424 void Comparison(Condition cc,
427 bool strict = false);
429 void SmiOperation(Token::Value op,
430 Handle<Object> value,
434 void CallWithArguments(ZoneList<Expression*>* arguments,
435 CallFunctionFlags flags,
438 // An optimized implementation of expressions of the form
439 // x.apply(y, arguments). We call x the applicand and y the receiver.
440 // The optimization avoids allocating an arguments object if possible.
441 void CallApplyLazy(Expression* applicand,
442 Expression* receiver,
443 VariableProxy* arguments,
447 void Branch(bool if_true, JumpTarget* target);
450 static InlineFunctionGenerator FindInlineFunctionGenerator(
451 Runtime::FunctionId function_id);
453 bool CheckForInlineRuntimeCall(CallRuntime* node);
455 static Handle<Code> ComputeLazyCompile(int argc);
456 void ProcessDeclarations(ZoneList<Declaration*>* declarations);
458 static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
460 static Handle<Code> ComputeKeyedCallInitialize(int argc, InLoopFlag in_loop);
462 // Declare global variables and functions in the given array of
464 void DeclareGlobals(Handle<FixedArray> pairs);
466 // Instantiate the function based on the shared function info.
467 void InstantiateFunction(Handle<SharedFunctionInfo> function_info);
469 // Support for type checks.
470 void GenerateIsSmi(ZoneList<Expression*>* args);
471 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
472 void GenerateIsArray(ZoneList<Expression*>* args);
473 void GenerateIsRegExp(ZoneList<Expression*>* args);
474 void GenerateIsObject(ZoneList<Expression*>* args);
475 void GenerateIsSpecObject(ZoneList<Expression*>* args);
476 void GenerateIsFunction(ZoneList<Expression*>* args);
477 void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
478 void GenerateIsStringWrapperSafeForDefaultValueOf(
479 ZoneList<Expression*>* args);
481 // Support for construct call checks.
482 void GenerateIsConstructCall(ZoneList<Expression*>* args);
484 // Support for arguments.length and arguments[?].
485 void GenerateArgumentsLength(ZoneList<Expression*>* args);
486 void GenerateArguments(ZoneList<Expression*>* args);
488 // Support for accessing the class and value fields of an object.
489 void GenerateClassOf(ZoneList<Expression*>* args);
490 void GenerateValueOf(ZoneList<Expression*>* args);
491 void GenerateSetValueOf(ZoneList<Expression*>* args);
493 // Fast support for charCodeAt(n).
494 void GenerateStringCharCodeAt(ZoneList<Expression*>* args);
496 // Fast support for string.charAt(n) and string[n].
497 void GenerateStringCharFromCode(ZoneList<Expression*>* args);
499 // Fast support for string.charAt(n) and string[n].
500 void GenerateStringCharAt(ZoneList<Expression*>* args);
502 // Fast support for object equality testing.
503 void GenerateObjectEquals(ZoneList<Expression*>* args);
505 void GenerateLog(ZoneList<Expression*>* args);
507 // Fast support for Math.random().
508 void GenerateRandomHeapNumber(ZoneList<Expression*>* args);
510 // Fast support for StringAdd.
511 void GenerateStringAdd(ZoneList<Expression*>* args);
513 // Fast support for SubString.
514 void GenerateSubString(ZoneList<Expression*>* args);
516 // Fast support for StringCompare.
517 void GenerateStringCompare(ZoneList<Expression*>* args);
519 // Support for direct calls from JavaScript to native RegExp code.
520 void GenerateRegExpExec(ZoneList<Expression*>* args);
522 void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
524 void GenerateRegExpCloneResult(ZoneList<Expression*>* args);
526 // Support for fast native caches.
527 void GenerateGetFromCache(ZoneList<Expression*>* args);
529 // Fast support for number to string.
530 void GenerateNumberToString(ZoneList<Expression*>* args);
532 // Fast swapping of elements.
533 void GenerateSwapElements(ZoneList<Expression*>* args);
535 // Fast call for custom callbacks.
536 void GenerateCallFunction(ZoneList<Expression*>* args);
538 // Fast call to math functions.
539 void GenerateMathPow(ZoneList<Expression*>* args);
540 void GenerateMathSin(ZoneList<Expression*>* args);
541 void GenerateMathCos(ZoneList<Expression*>* args);
542 void GenerateMathSqrt(ZoneList<Expression*>* args);
544 void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
546 void GenerateHasCachedArrayIndex(ZoneList<Expression*>* args);
547 void GenerateGetCachedArrayIndex(ZoneList<Expression*>* args);
549 // Simple condition analysis.
550 enum ConditionAnalysis {
555 ConditionAnalysis AnalyzeCondition(Expression* cond);
557 // Methods used to indicate which source code is generated for. Source
558 // positions are collected by the assembler and emitted with the relocation
560 void CodeForFunctionPosition(FunctionLiteral* fun);
561 void CodeForReturnPosition(FunctionLiteral* fun);
562 void CodeForStatementPosition(Statement* node);
563 void CodeForDoWhileConditionPosition(DoWhileStatement* stmt);
564 void CodeForSourcePosition(int pos);
567 // True if the registers are valid for entry to a block.
568 bool HasValidEntryRegisters();
571 List<DeferredCode*> deferred_;
574 MacroAssembler* masm_; // to generate code
576 CompilationInfo* info_;
578 // Code generation state
579 VirtualFrame* frame_;
580 RegisterAllocator* allocator_;
582 CodeGenState* state_;
585 Vector<TypeInfo>* type_info_;
588 BreakTarget function_return_;
590 // True if the function return is shadowed (ie, jumping to the target
591 // function_return_ does not jump to the true function return, but rather
592 // to some unlinking code).
593 bool function_return_is_shadowed_;
595 // Size of inlined write barriers generated by EmitNamedStore.
596 static int inlined_write_barrier_size_;
598 friend class VirtualFrame;
599 friend class JumpTarget;
600 friend class Reference;
601 friend class FastCodeGenerator;
602 friend class FullCodeGenerator;
603 friend class FullCodeGenSyntaxChecker;
605 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
609 } } // namespace v8::internal
611 #endif // V8_ARM_CODEGEN_ARM_H_