1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_ARM_CODEGEN_ARM_H_
29 #define V8_ARM_CODEGEN_ARM_H_
32 #include "code-stubs-arm.h"
38 // Forward declarations
39 class CompilationInfo;
42 class RegisterAllocator;
45 enum InitState { CONST_INIT, NOT_CONST_INIT };
46 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
47 enum GenerateInlineSmi { DONT_GENERATE_INLINE_SMI, GENERATE_INLINE_SMI };
48 enum WriteBarrierCharacter { UNLIKELY_SMI, LIKELY_SMI, NEVER_NEWSPACE };
51 // -------------------------------------------------------------------------
54 // A reference is a C++ stack-allocated object that puts a
55 // reference on the virtual frame. The reference may be consumed
56 // by GetValue, TakeValue, SetValue, and Codegen::UnloadReference.
57 // When the lifetime (scope) of a valid reference ends, it must have
58 // been consumed, and be in state UNLOADED.
59 class Reference BASE_EMBEDDED {
61 // The values of the types is important, see size().
62 enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
63 Reference(CodeGenerator* cgen,
64 Expression* expression,
65 bool persist_after_get = false);
68 Expression* expression() const { return expression_; }
69 Type type() const { return type_; }
70 void set_type(Type value) {
71 ASSERT_EQ(ILLEGAL, type_);
76 ASSERT_NE(ILLEGAL, type_);
77 ASSERT_NE(UNLOADED, type_);
80 // The size the reference takes up on the stack.
82 return (type_ < SLOT) ? 0 : type_;
85 bool is_illegal() const { return type_ == ILLEGAL; }
86 bool is_slot() const { return type_ == SLOT; }
87 bool is_property() const { return type_ == NAMED || type_ == KEYED; }
88 bool is_unloaded() const { return type_ == UNLOADED; }
90 // Return the name. Only valid for named property references.
91 Handle<String> GetName();
93 // Generate code to push the value of the reference on top of the
94 // expression stack. The reference is expected to be already on top of
95 // the expression stack, and it is consumed by the call unless the
96 // reference is for a compound assignment.
97 // If the reference is not consumed, it is left in place under its value.
100 // Generate code to store the value on top of the expression stack in the
101 // reference. The reference is expected to be immediately below the value
102 // on the expression stack. The value is stored in the location specified
103 // by the reference, and is left on top of the stack, after the reference
104 // is popped from beneath it (unloaded).
105 void SetValue(InitState init_state, WriteBarrierCharacter wb);
107 // This is in preparation for something that uses the reference on the stack.
108 // If we need this reference afterwards get then dup it now. Otherwise mark
110 inline void DupIfPersist();
113 CodeGenerator* cgen_;
114 Expression* expression_;
116 // Keep the reference on the stack after get, so it can be used by set later.
117 bool persist_after_get_;
121 // -------------------------------------------------------------------------
122 // Code generation state
124 // The state is passed down the AST by the code generator (and back up, in
125 // the form of the state of the label pair). It is threaded through the
126 // call stack. Constructing a state implicitly pushes it on the owning code
127 // generator's stack of states, and destroying one implicitly pops it.
129 class CodeGenState BASE_EMBEDDED {
131 // Create an initial code generator state. Destroying the initial state
132 // leaves the code generator with a NULL state.
133 explicit CodeGenState(CodeGenerator* owner);
135 // Destroy a code generator state and restore the owning code generator's
137 virtual ~CodeGenState();
139 virtual JumpTarget* true_target() const { return NULL; }
140 virtual JumpTarget* false_target() const { return NULL; }
143 inline CodeGenerator* owner() { return owner_; }
144 inline CodeGenState* previous() const { return previous_; }
147 CodeGenerator* owner_;
148 CodeGenState* previous_;
152 class ConditionCodeGenState : public CodeGenState {
154 // Create a code generator state based on a code generator's current
155 // state. The new state has its own pair of branch labels.
156 ConditionCodeGenState(CodeGenerator* owner,
157 JumpTarget* true_target,
158 JumpTarget* false_target);
160 virtual JumpTarget* true_target() const { return true_target_; }
161 virtual JumpTarget* false_target() const { return false_target_; }
164 JumpTarget* true_target_;
165 JumpTarget* false_target_;
169 class TypeInfoCodeGenState : public CodeGenState {
171 TypeInfoCodeGenState(CodeGenerator* owner,
174 ~TypeInfoCodeGenState();
176 virtual JumpTarget* true_target() const { return previous()->true_target(); }
177 virtual JumpTarget* false_target() const {
178 return previous()->false_target();
183 TypeInfo old_type_info_;
187 // -------------------------------------------------------------------------
188 // Arguments allocation mode
190 enum ArgumentsAllocationMode {
191 NO_ARGUMENTS_ALLOCATION,
192 EAGER_ARGUMENTS_ALLOCATION,
193 LAZY_ARGUMENTS_ALLOCATION
197 // -------------------------------------------------------------------------
200 class CodeGenerator: public AstVisitor {
202 static bool MakeCode(CompilationInfo* info);
204 // Printing of AST, etc. as requested by flags.
205 static void MakeCodePrologue(CompilationInfo* info);
207 // Allocate and install the code.
208 static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm,
210 CompilationInfo* info);
212 // Print the code after compiling it.
213 static void PrintCode(Handle<Code> code, CompilationInfo* info);
215 #ifdef ENABLE_LOGGING_AND_PROFILING
216 static bool ShouldGenerateLog(Expression* type);
219 static void SetFunctionInfo(Handle<JSFunction> fun,
220 FunctionLiteral* lit,
222 Handle<Script> script);
224 static bool RecordPositions(MacroAssembler* masm,
226 bool right_here = false);
229 MacroAssembler* masm() { return masm_; }
230 VirtualFrame* frame() const { return frame_; }
231 inline Handle<Script> script();
233 bool has_valid_frame() const { return frame_ != NULL; }
235 // Set the virtual frame to be new_frame, with non-frame register
236 // reference counts given by non_frame_registers. The non-frame
237 // register reference counts of the old frame are returned in
238 // non_frame_registers.
239 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
243 RegisterAllocator* allocator() const { return allocator_; }
245 CodeGenState* state() { return state_; }
246 void set_state(CodeGenState* state) { state_ = state; }
248 TypeInfo type_info(Slot* slot) {
249 int index = NumberOfSlot(slot);
250 if (index == kInvalidSlotNumber) return TypeInfo::Unknown();
251 return (*type_info_)[index];
254 TypeInfo set_type_info(Slot* slot, TypeInfo info) {
255 int index = NumberOfSlot(slot);
256 ASSERT(index >= kInvalidSlotNumber);
257 if (index != kInvalidSlotNumber) {
258 TypeInfo previous_value = (*type_info_)[index];
259 (*type_info_)[index] = info;
260 return previous_value;
262 return TypeInfo::Unknown();
265 void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
267 // Constants related to patching of inlined load/store.
268 static int GetInlinedKeyedLoadInstructionsAfterPatch() {
269 return FLAG_debug_code ? 32 : 13;
271 static const int kInlinedKeyedStoreInstructionsAfterPatch = 8;
272 static int GetInlinedNamedStoreInstructionsAfterPatch() {
273 ASSERT(inlined_write_barrier_size_ != -1);
274 return inlined_write_barrier_size_ + 4;
278 // Type of a member function that generates inline code for a native function.
279 typedef void (CodeGenerator::*InlineFunctionGenerator)
280 (ZoneList<Expression*>*);
282 static const InlineFunctionGenerator kInlineFunctionGenerators[];
284 // Construction/Destruction
285 explicit CodeGenerator(MacroAssembler* masm);
288 inline bool is_eval();
289 inline Scope* scope();
290 inline bool is_strict_mode();
291 inline StrictModeFlag strict_mode_flag();
293 // Generating deferred code.
294 void ProcessDeferred();
296 static const int kInvalidSlotNumber = -1;
298 int NumberOfSlot(Slot* slot);
301 bool has_cc() const { return cc_reg_ != al; }
302 JumpTarget* true_target() const { return state_->true_target(); }
303 JumpTarget* false_target() const { return state_->false_target(); }
305 // Track loop nesting level.
306 int loop_nesting() const { return loop_nesting_; }
307 void IncrementLoopNesting() { loop_nesting_++; }
308 void DecrementLoopNesting() { loop_nesting_--; }
311 void VisitStatements(ZoneList<Statement*>* statements);
313 virtual void VisitSlot(Slot* node);
314 #define DEF_VISIT(type) \
315 virtual void Visit##type(type* node);
316 AST_NODE_LIST(DEF_VISIT)
319 // Main code generation function
320 void Generate(CompilationInfo* info);
322 // Generate the return sequence code. Should be called no more than
323 // once per compiled function, immediately after binding the return
324 // target (which can not be done more than once). The return value should
326 void GenerateReturnSequence();
328 // Returns the arguments allocation mode.
329 ArgumentsAllocationMode ArgumentsMode();
331 // Store the arguments object and allocate it if necessary.
332 void StoreArgumentsObject(bool initial);
334 // The following are used by class Reference.
335 void LoadReference(Reference* ref);
336 void UnloadReference(Reference* ref);
338 MemOperand SlotOperand(Slot* slot, Register tmp);
340 MemOperand ContextSlotOperandCheckExtensions(Slot* slot,
346 void LoadCondition(Expression* x,
347 JumpTarget* true_target,
348 JumpTarget* false_target,
350 void Load(Expression* expr);
352 void LoadGlobalReceiver(Register scratch);
354 // Read a value from a slot and leave it on top of the expression stack.
355 void LoadFromSlot(Slot* slot, TypeofState typeof_state);
356 void LoadFromSlotCheckForArguments(Slot* slot, TypeofState state);
358 // Store the value on top of the stack to a slot.
359 void StoreToSlot(Slot* slot, InitState init_state);
361 // Support for compiling assignment expressions.
362 void EmitSlotAssignment(Assignment* node);
363 void EmitNamedPropertyAssignment(Assignment* node);
364 void EmitKeyedPropertyAssignment(Assignment* node);
366 // Load a named property, returning it in r0. The receiver is passed on the
367 // stack, and remains there.
368 void EmitNamedLoad(Handle<String> name, bool is_contextual);
370 // Store to a named property. If the store is contextual, value is passed on
371 // the frame and consumed. Otherwise, receiver and value are passed on the
372 // frame and consumed. The result is returned in r0.
373 void EmitNamedStore(Handle<String> name, bool is_contextual);
375 // Load a keyed property, leaving it in r0. The receiver and key are
376 // passed on the stack, and remain there.
377 void EmitKeyedLoad();
379 // Store a keyed property. Key and receiver are on the stack and the value is
380 // in r0. Result is returned in r0.
381 void EmitKeyedStore(StaticType* key_type, WriteBarrierCharacter wb_info);
383 void LoadFromGlobalSlotCheckExtensions(Slot* slot,
384 TypeofState typeof_state,
387 // Support for loading from local/global variables and arguments
388 // whose location is known unless they are shadowed by
389 // eval-introduced bindings. Generates no code for unsupported slot
390 // types and therefore expects to fall through to the slow jump target.
391 void EmitDynamicLoadFromSlotFastCase(Slot* slot,
392 TypeofState typeof_state,
396 // Special code for typeof expressions: Unfortunately, we must
397 // be careful when loading the expression in 'typeof'
398 // expressions. We are not allowed to throw reference errors for
399 // non-existing properties of the global object, so we must make it
400 // look like an explicit property access, instead of an access
401 // through the context chain.
402 void LoadTypeofExpression(Expression* x);
404 void ToBoolean(JumpTarget* true_target, JumpTarget* false_target);
406 // Generate code that computes a shortcutting logical operation.
407 void GenerateLogicalBooleanOperation(BinaryOperation* node);
409 void GenericBinaryOperation(Token::Value op,
410 OverwriteMode overwrite_mode,
411 GenerateInlineSmi inline_smi,
413 GenericBinaryOpStub::kUnknownIntValue);
414 void Comparison(Condition cc,
417 bool strict = false);
419 void SmiOperation(Token::Value op,
420 Handle<Object> value,
424 void CallWithArguments(ZoneList<Expression*>* arguments,
425 CallFunctionFlags flags,
428 // An optimized implementation of expressions of the form
429 // x.apply(y, arguments). We call x the applicand and y the receiver.
430 // The optimization avoids allocating an arguments object if possible.
431 void CallApplyLazy(Expression* applicand,
432 Expression* receiver,
433 VariableProxy* arguments,
437 void Branch(bool if_true, JumpTarget* target);
440 bool CheckForInlineRuntimeCall(CallRuntime* node);
442 static Handle<Code> ComputeLazyCompile(int argc);
443 void ProcessDeclarations(ZoneList<Declaration*>* declarations);
445 // Declare global variables and functions in the given array of
447 void DeclareGlobals(Handle<FixedArray> pairs);
449 // Instantiate the function based on the shared function info.
450 void InstantiateFunction(Handle<SharedFunctionInfo> function_info,
453 // Support for type checks.
454 void GenerateIsSmi(ZoneList<Expression*>* args);
455 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
456 void GenerateIsArray(ZoneList<Expression*>* args);
457 void GenerateIsRegExp(ZoneList<Expression*>* args);
458 void GenerateIsObject(ZoneList<Expression*>* args);
459 void GenerateIsSpecObject(ZoneList<Expression*>* args);
460 void GenerateIsFunction(ZoneList<Expression*>* args);
461 void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
462 void GenerateIsStringWrapperSafeForDefaultValueOf(
463 ZoneList<Expression*>* args);
465 // Support for construct call checks.
466 void GenerateIsConstructCall(ZoneList<Expression*>* args);
468 // Support for arguments.length and arguments[?].
469 void GenerateArgumentsLength(ZoneList<Expression*>* args);
470 void GenerateArguments(ZoneList<Expression*>* args);
472 // Support for accessing the class and value fields of an object.
473 void GenerateClassOf(ZoneList<Expression*>* args);
474 void GenerateValueOf(ZoneList<Expression*>* args);
475 void GenerateSetValueOf(ZoneList<Expression*>* args);
477 // Fast support for charCodeAt(n).
478 void GenerateStringCharCodeAt(ZoneList<Expression*>* args);
480 // Fast support for string.charAt(n) and string[n].
481 void GenerateStringCharFromCode(ZoneList<Expression*>* args);
483 // Fast support for string.charAt(n) and string[n].
484 void GenerateStringCharAt(ZoneList<Expression*>* args);
486 // Fast support for object equality testing.
487 void GenerateObjectEquals(ZoneList<Expression*>* args);
489 void GenerateLog(ZoneList<Expression*>* args);
491 // Fast support for Math.random().
492 void GenerateRandomHeapNumber(ZoneList<Expression*>* args);
494 // Fast support for StringAdd.
495 void GenerateStringAdd(ZoneList<Expression*>* args);
497 // Fast support for SubString.
498 void GenerateSubString(ZoneList<Expression*>* args);
500 // Fast support for StringCompare.
501 void GenerateStringCompare(ZoneList<Expression*>* args);
503 // Support for direct calls from JavaScript to native RegExp code.
504 void GenerateRegExpExec(ZoneList<Expression*>* args);
506 void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
508 // Support for fast native caches.
509 void GenerateGetFromCache(ZoneList<Expression*>* args);
511 // Fast support for number to string.
512 void GenerateNumberToString(ZoneList<Expression*>* args);
514 // Fast swapping of elements.
515 void GenerateSwapElements(ZoneList<Expression*>* args);
517 // Fast call for custom callbacks.
518 void GenerateCallFunction(ZoneList<Expression*>* args);
520 // Fast call to math functions.
521 void GenerateMathPow(ZoneList<Expression*>* args);
522 void GenerateMathSin(ZoneList<Expression*>* args);
523 void GenerateMathCos(ZoneList<Expression*>* args);
524 void GenerateMathSqrt(ZoneList<Expression*>* args);
525 void GenerateMathLog(ZoneList<Expression*>* args);
527 void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
529 void GenerateHasCachedArrayIndex(ZoneList<Expression*>* args);
530 void GenerateGetCachedArrayIndex(ZoneList<Expression*>* args);
531 void GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args);
533 // Simple condition analysis.
534 enum ConditionAnalysis {
539 ConditionAnalysis AnalyzeCondition(Expression* cond);
541 // Methods used to indicate which source code is generated for. Source
542 // positions are collected by the assembler and emitted with the relocation
544 void CodeForFunctionPosition(FunctionLiteral* fun);
545 void CodeForReturnPosition(FunctionLiteral* fun);
546 void CodeForStatementPosition(Statement* node);
547 void CodeForDoWhileConditionPosition(DoWhileStatement* stmt);
548 void CodeForSourcePosition(int pos);
551 // True if the registers are valid for entry to a block.
552 bool HasValidEntryRegisters();
555 List<DeferredCode*> deferred_;
558 MacroAssembler* masm_; // to generate code
560 CompilationInfo* info_;
562 // Code generation state
563 VirtualFrame* frame_;
564 RegisterAllocator* allocator_;
566 CodeGenState* state_;
569 Vector<TypeInfo>* type_info_;
572 BreakTarget function_return_;
574 // True if the function return is shadowed (ie, jumping to the target
575 // function_return_ does not jump to the true function return, but rather
576 // to some unlinking code).
577 bool function_return_is_shadowed_;
579 // Size of inlined write barriers generated by EmitNamedStore.
580 static int inlined_write_barrier_size_;
582 friend class VirtualFrame;
583 friend class JumpTarget;
584 friend class Reference;
585 friend class FastCodeGenerator;
586 friend class FullCodeGenerator;
587 friend class FullCodeGenSyntaxChecker;
588 friend class LCodeGen;
590 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
594 } } // namespace v8::internal
596 #endif // V8_ARM_CODEGEN_ARM_H_