1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_ARM_CODEGEN_ARM_H_
29 #define V8_ARM_CODEGEN_ARM_H_
32 #include "code-stubs-arm.h"
38 // Forward declarations
39 class CompilationInfo;
42 class RegisterAllocator;
45 enum InitState { CONST_INIT, NOT_CONST_INIT };
46 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
47 enum GenerateInlineSmi { DONT_GENERATE_INLINE_SMI, GENERATE_INLINE_SMI };
48 enum WriteBarrierCharacter { UNLIKELY_SMI, LIKELY_SMI, NEVER_NEWSPACE };
51 // -------------------------------------------------------------------------
54 // A reference is a C++ stack-allocated object that puts a
55 // reference on the virtual frame. The reference may be consumed
56 // by GetValue, TakeValue, SetValue, and Codegen::UnloadReference.
57 // When the lifetime (scope) of a valid reference ends, it must have
58 // been consumed, and be in state UNLOADED.
59 class Reference BASE_EMBEDDED {
61 // The values of the types is important, see size().
62 enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
63 Reference(CodeGenerator* cgen,
64 Expression* expression,
65 bool persist_after_get = false);
68 Expression* expression() const { return expression_; }
69 Type type() const { return type_; }
70 void set_type(Type value) {
71 ASSERT_EQ(ILLEGAL, type_);
76 ASSERT_NE(ILLEGAL, type_);
77 ASSERT_NE(UNLOADED, type_);
80 // The size the reference takes up on the stack.
82 return (type_ < SLOT) ? 0 : type_;
85 bool is_illegal() const { return type_ == ILLEGAL; }
86 bool is_slot() const { return type_ == SLOT; }
87 bool is_property() const { return type_ == NAMED || type_ == KEYED; }
88 bool is_unloaded() const { return type_ == UNLOADED; }
90 // Return the name. Only valid for named property references.
91 Handle<String> GetName();
93 // Generate code to push the value of the reference on top of the
94 // expression stack. The reference is expected to be already on top of
95 // the expression stack, and it is consumed by the call unless the
96 // reference is for a compound assignment.
97 // If the reference is not consumed, it is left in place under its value.
100 // Generate code to store the value on top of the expression stack in the
101 // reference. The reference is expected to be immediately below the value
102 // on the expression stack. The value is stored in the location specified
103 // by the reference, and is left on top of the stack, after the reference
104 // is popped from beneath it (unloaded).
105 void SetValue(InitState init_state, WriteBarrierCharacter wb);
107 // This is in preparation for something that uses the reference on the stack.
108 // If we need this reference afterwards get then dup it now. Otherwise mark
110 inline void DupIfPersist();
113 CodeGenerator* cgen_;
114 Expression* expression_;
116 // Keep the reference on the stack after get, so it can be used by set later.
117 bool persist_after_get_;
121 // -------------------------------------------------------------------------
122 // Code generation state
124 // The state is passed down the AST by the code generator (and back up, in
125 // the form of the state of the label pair). It is threaded through the
126 // call stack. Constructing a state implicitly pushes it on the owning code
127 // generator's stack of states, and destroying one implicitly pops it.
129 class CodeGenState BASE_EMBEDDED {
131 // Create an initial code generator state. Destroying the initial state
132 // leaves the code generator with a NULL state.
133 explicit CodeGenState(CodeGenerator* owner);
135 // Destroy a code generator state and restore the owning code generator's
137 virtual ~CodeGenState();
139 virtual JumpTarget* true_target() const { return NULL; }
140 virtual JumpTarget* false_target() const { return NULL; }
143 inline CodeGenerator* owner() { return owner_; }
144 inline CodeGenState* previous() const { return previous_; }
147 CodeGenerator* owner_;
148 CodeGenState* previous_;
152 class ConditionCodeGenState : public CodeGenState {
154 // Create a code generator state based on a code generator's current
155 // state. The new state has its own pair of branch labels.
156 ConditionCodeGenState(CodeGenerator* owner,
157 JumpTarget* true_target,
158 JumpTarget* false_target);
160 virtual JumpTarget* true_target() const { return true_target_; }
161 virtual JumpTarget* false_target() const { return false_target_; }
164 JumpTarget* true_target_;
165 JumpTarget* false_target_;
169 class TypeInfoCodeGenState : public CodeGenState {
171 TypeInfoCodeGenState(CodeGenerator* owner,
174 ~TypeInfoCodeGenState();
176 virtual JumpTarget* true_target() const { return previous()->true_target(); }
177 virtual JumpTarget* false_target() const {
178 return previous()->false_target();
183 TypeInfo old_type_info_;
187 // -------------------------------------------------------------------------
188 // Arguments allocation mode
190 enum ArgumentsAllocationMode {
191 NO_ARGUMENTS_ALLOCATION,
192 EAGER_ARGUMENTS_ALLOCATION,
193 LAZY_ARGUMENTS_ALLOCATION
197 // -------------------------------------------------------------------------
200 class CodeGenerator: public AstVisitor {
202 static bool MakeCode(CompilationInfo* info);
204 // Printing of AST, etc. as requested by flags.
205 static void MakeCodePrologue(CompilationInfo* info);
207 // Allocate and install the code.
208 static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm,
210 CompilationInfo* info);
212 // Print the code after compiling it.
213 static void PrintCode(Handle<Code> code, CompilationInfo* info);
215 #ifdef ENABLE_LOGGING_AND_PROFILING
216 static bool ShouldGenerateLog(Expression* type);
219 static void SetFunctionInfo(Handle<JSFunction> fun,
220 FunctionLiteral* lit,
222 Handle<Script> script);
224 static bool RecordPositions(MacroAssembler* masm,
226 bool right_here = false);
229 MacroAssembler* masm() { return masm_; }
230 VirtualFrame* frame() const { return frame_; }
231 inline Handle<Script> script();
233 bool has_valid_frame() const { return frame_ != NULL; }
235 // Set the virtual frame to be new_frame, with non-frame register
236 // reference counts given by non_frame_registers. The non-frame
237 // register reference counts of the old frame are returned in
238 // non_frame_registers.
239 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
243 RegisterAllocator* allocator() const { return allocator_; }
245 CodeGenState* state() { return state_; }
246 void set_state(CodeGenState* state) { state_ = state; }
248 TypeInfo type_info(Slot* slot) {
249 int index = NumberOfSlot(slot);
250 if (index == kInvalidSlotNumber) return TypeInfo::Unknown();
251 return (*type_info_)[index];
254 TypeInfo set_type_info(Slot* slot, TypeInfo info) {
255 int index = NumberOfSlot(slot);
256 ASSERT(index >= kInvalidSlotNumber);
257 if (index != kInvalidSlotNumber) {
258 TypeInfo previous_value = (*type_info_)[index];
259 (*type_info_)[index] = info;
260 return previous_value;
262 return TypeInfo::Unknown();
265 void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
267 // Constants related to patching of inlined load/store.
268 static int GetInlinedKeyedLoadInstructionsAfterPatch() {
269 return FLAG_debug_code ? 32 : 13;
271 static const int kInlinedKeyedStoreInstructionsAfterPatch = 8;
272 static int GetInlinedNamedStoreInstructionsAfterPatch() {
273 ASSERT(inlined_write_barrier_size_ != -1);
274 return inlined_write_barrier_size_ + 4;
278 // Type of a member function that generates inline code for a native function.
279 typedef void (CodeGenerator::*InlineFunctionGenerator)
280 (ZoneList<Expression*>*);
282 static const InlineFunctionGenerator kInlineFunctionGenerators[];
284 // Construction/Destruction
285 explicit CodeGenerator(MacroAssembler* masm);
288 inline bool is_eval();
289 inline Scope* scope();
290 inline StrictModeFlag strict_mode_flag();
292 // Generating deferred code.
293 void ProcessDeferred();
295 static const int kInvalidSlotNumber = -1;
297 int NumberOfSlot(Slot* slot);
300 bool has_cc() const { return cc_reg_ != al; }
301 JumpTarget* true_target() const { return state_->true_target(); }
302 JumpTarget* false_target() const { return state_->false_target(); }
304 // Track loop nesting level.
305 int loop_nesting() const { return loop_nesting_; }
306 void IncrementLoopNesting() { loop_nesting_++; }
307 void DecrementLoopNesting() { loop_nesting_--; }
310 void VisitStatements(ZoneList<Statement*>* statements);
312 virtual void VisitSlot(Slot* node);
313 #define DEF_VISIT(type) \
314 virtual void Visit##type(type* node);
315 AST_NODE_LIST(DEF_VISIT)
318 // Main code generation function
319 void Generate(CompilationInfo* info);
321 // Generate the return sequence code. Should be called no more than
322 // once per compiled function, immediately after binding the return
323 // target (which can not be done more than once). The return value should
325 void GenerateReturnSequence();
327 // Returns the arguments allocation mode.
328 ArgumentsAllocationMode ArgumentsMode();
330 // Store the arguments object and allocate it if necessary.
331 void StoreArgumentsObject(bool initial);
333 // The following are used by class Reference.
334 void LoadReference(Reference* ref);
335 void UnloadReference(Reference* ref);
337 MemOperand SlotOperand(Slot* slot, Register tmp);
339 MemOperand ContextSlotOperandCheckExtensions(Slot* slot,
345 void LoadCondition(Expression* x,
346 JumpTarget* true_target,
347 JumpTarget* false_target,
349 void Load(Expression* expr);
351 void LoadGlobalReceiver(Register scratch);
353 // Read a value from a slot and leave it on top of the expression stack.
354 void LoadFromSlot(Slot* slot, TypeofState typeof_state);
355 void LoadFromSlotCheckForArguments(Slot* slot, TypeofState state);
357 // Store the value on top of the stack to a slot.
358 void StoreToSlot(Slot* slot, InitState init_state);
360 // Support for compiling assignment expressions.
361 void EmitSlotAssignment(Assignment* node);
362 void EmitNamedPropertyAssignment(Assignment* node);
363 void EmitKeyedPropertyAssignment(Assignment* node);
365 // Load a named property, returning it in r0. The receiver is passed on the
366 // stack, and remains there.
367 void EmitNamedLoad(Handle<String> name, bool is_contextual);
369 // Store to a named property. If the store is contextual, value is passed on
370 // the frame and consumed. Otherwise, receiver and value are passed on the
371 // frame and consumed. The result is returned in r0.
372 void EmitNamedStore(Handle<String> name, bool is_contextual);
374 // Load a keyed property, leaving it in r0. The receiver and key are
375 // passed on the stack, and remain there.
376 void EmitKeyedLoad();
378 // Store a keyed property. Key and receiver are on the stack and the value is
379 // in r0. Result is returned in r0.
380 void EmitKeyedStore(StaticType* key_type, WriteBarrierCharacter wb_info);
382 void LoadFromGlobalSlotCheckExtensions(Slot* slot,
383 TypeofState typeof_state,
386 // Support for loading from local/global variables and arguments
387 // whose location is known unless they are shadowed by
388 // eval-introduced bindings. Generates no code for unsupported slot
389 // types and therefore expects to fall through to the slow jump target.
390 void EmitDynamicLoadFromSlotFastCase(Slot* slot,
391 TypeofState typeof_state,
395 // Special code for typeof expressions: Unfortunately, we must
396 // be careful when loading the expression in 'typeof'
397 // expressions. We are not allowed to throw reference errors for
398 // non-existing properties of the global object, so we must make it
399 // look like an explicit property access, instead of an access
400 // through the context chain.
401 void LoadTypeofExpression(Expression* x);
403 void ToBoolean(JumpTarget* true_target, JumpTarget* false_target);
405 // Generate code that computes a shortcutting logical operation.
406 void GenerateLogicalBooleanOperation(BinaryOperation* node);
408 void GenericBinaryOperation(Token::Value op,
409 OverwriteMode overwrite_mode,
410 GenerateInlineSmi inline_smi,
412 GenericBinaryOpStub::kUnknownIntValue);
413 void Comparison(Condition cc,
416 bool strict = false);
418 void SmiOperation(Token::Value op,
419 Handle<Object> value,
423 void CallWithArguments(ZoneList<Expression*>* arguments,
424 CallFunctionFlags flags,
427 // An optimized implementation of expressions of the form
428 // x.apply(y, arguments). We call x the applicand and y the receiver.
429 // The optimization avoids allocating an arguments object if possible.
430 void CallApplyLazy(Expression* applicand,
431 Expression* receiver,
432 VariableProxy* arguments,
436 void Branch(bool if_true, JumpTarget* target);
439 bool CheckForInlineRuntimeCall(CallRuntime* node);
441 static Handle<Code> ComputeLazyCompile(int argc);
442 void ProcessDeclarations(ZoneList<Declaration*>* declarations);
444 // Declare global variables and functions in the given array of
446 void DeclareGlobals(Handle<FixedArray> pairs);
448 // Instantiate the function based on the shared function info.
449 void InstantiateFunction(Handle<SharedFunctionInfo> function_info,
452 // Support for type checks.
453 void GenerateIsSmi(ZoneList<Expression*>* args);
454 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
455 void GenerateIsArray(ZoneList<Expression*>* args);
456 void GenerateIsRegExp(ZoneList<Expression*>* args);
457 void GenerateIsObject(ZoneList<Expression*>* args);
458 void GenerateIsSpecObject(ZoneList<Expression*>* args);
459 void GenerateIsFunction(ZoneList<Expression*>* args);
460 void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
461 void GenerateIsStringWrapperSafeForDefaultValueOf(
462 ZoneList<Expression*>* args);
464 // Support for construct call checks.
465 void GenerateIsConstructCall(ZoneList<Expression*>* args);
467 // Support for arguments.length and arguments[?].
468 void GenerateArgumentsLength(ZoneList<Expression*>* args);
469 void GenerateArguments(ZoneList<Expression*>* args);
471 // Support for accessing the class and value fields of an object.
472 void GenerateClassOf(ZoneList<Expression*>* args);
473 void GenerateValueOf(ZoneList<Expression*>* args);
474 void GenerateSetValueOf(ZoneList<Expression*>* args);
476 // Fast support for charCodeAt(n).
477 void GenerateStringCharCodeAt(ZoneList<Expression*>* args);
479 // Fast support for string.charAt(n) and string[n].
480 void GenerateStringCharFromCode(ZoneList<Expression*>* args);
482 // Fast support for string.charAt(n) and string[n].
483 void GenerateStringCharAt(ZoneList<Expression*>* args);
485 // Fast support for object equality testing.
486 void GenerateObjectEquals(ZoneList<Expression*>* args);
488 void GenerateLog(ZoneList<Expression*>* args);
490 // Fast support for Math.random().
491 void GenerateRandomHeapNumber(ZoneList<Expression*>* args);
493 // Fast support for StringAdd.
494 void GenerateStringAdd(ZoneList<Expression*>* args);
496 // Fast support for SubString.
497 void GenerateSubString(ZoneList<Expression*>* args);
499 // Fast support for StringCompare.
500 void GenerateStringCompare(ZoneList<Expression*>* args);
502 // Support for direct calls from JavaScript to native RegExp code.
503 void GenerateRegExpExec(ZoneList<Expression*>* args);
505 void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
507 // Support for fast native caches.
508 void GenerateGetFromCache(ZoneList<Expression*>* args);
510 // Fast support for number to string.
511 void GenerateNumberToString(ZoneList<Expression*>* args);
513 // Fast swapping of elements.
514 void GenerateSwapElements(ZoneList<Expression*>* args);
516 // Fast call for custom callbacks.
517 void GenerateCallFunction(ZoneList<Expression*>* args);
519 // Fast call to math functions.
520 void GenerateMathPow(ZoneList<Expression*>* args);
521 void GenerateMathSin(ZoneList<Expression*>* args);
522 void GenerateMathCos(ZoneList<Expression*>* args);
523 void GenerateMathSqrt(ZoneList<Expression*>* args);
524 void GenerateMathLog(ZoneList<Expression*>* args);
526 void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
528 void GenerateHasCachedArrayIndex(ZoneList<Expression*>* args);
529 void GenerateGetCachedArrayIndex(ZoneList<Expression*>* args);
530 void GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args);
532 // Simple condition analysis.
533 enum ConditionAnalysis {
538 ConditionAnalysis AnalyzeCondition(Expression* cond);
540 // Methods used to indicate which source code is generated for. Source
541 // positions are collected by the assembler and emitted with the relocation
543 void CodeForFunctionPosition(FunctionLiteral* fun);
544 void CodeForReturnPosition(FunctionLiteral* fun);
545 void CodeForStatementPosition(Statement* node);
546 void CodeForDoWhileConditionPosition(DoWhileStatement* stmt);
547 void CodeForSourcePosition(int pos);
550 // True if the registers are valid for entry to a block.
551 bool HasValidEntryRegisters();
554 List<DeferredCode*> deferred_;
557 MacroAssembler* masm_; // to generate code
559 CompilationInfo* info_;
561 // Code generation state
562 VirtualFrame* frame_;
563 RegisterAllocator* allocator_;
565 CodeGenState* state_;
568 Vector<TypeInfo>* type_info_;
571 BreakTarget function_return_;
573 // True if the function return is shadowed (ie, jumping to the target
574 // function_return_ does not jump to the true function return, but rather
575 // to some unlinking code).
576 bool function_return_is_shadowed_;
578 // Size of inlined write barriers generated by EmitNamedStore.
579 static int inlined_write_barrier_size_;
581 friend class VirtualFrame;
582 friend class JumpTarget;
583 friend class Reference;
584 friend class FastCodeGenerator;
585 friend class FullCodeGenerator;
586 friend class FullCodeGenSyntaxChecker;
587 friend class LCodeGen;
589 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
593 } } // namespace v8::internal
595 #endif // V8_ARM_CODEGEN_ARM_H_