Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / v8 / src / compiler / code-generator.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/code-generator.h"
6
7 #include "src/compiler/code-generator-impl.h"
8 #include "src/compiler/linkage.h"
9 #include "src/compiler/pipeline.h"
10
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14
15 CodeGenerator::CodeGenerator(InstructionSequence* code)
16     : code_(code),
17       current_block_(NULL),
18       current_source_position_(SourcePosition::Invalid()),
19       masm_(code->zone()->isolate(), NULL, 0),
20       resolver_(this),
21       safepoints_(code->zone()),
22       lazy_deoptimization_entries_(
23           LazyDeoptimizationEntries::allocator_type(code->zone())),
24       deoptimization_states_(
25           DeoptimizationStates::allocator_type(code->zone())),
26       deoptimization_literals_(Literals::allocator_type(code->zone())),
27       translations_(code->zone()) {
28   deoptimization_states_.resize(code->GetDeoptimizationEntryCount(), NULL);
29 }
30
31
32 Handle<Code> CodeGenerator::GenerateCode() {
33   CompilationInfo* info = linkage()->info();
34
35   // Emit a code line info recording start event.
36   PositionsRecorder* recorder = masm()->positions_recorder();
37   LOG_CODE_EVENT(isolate(), CodeStartLinePosInfoRecordEvent(recorder));
38
39   // Place function entry hook if requested to do so.
40   if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
41     ProfileEntryHookStub::MaybeCallEntryHook(masm());
42   }
43
44   // Architecture-specific, linkage-specific prologue.
45   info->set_prologue_offset(masm()->pc_offset());
46   AssemblePrologue();
47
48   // Assemble all instructions.
49   for (InstructionSequence::const_iterator i = code()->begin();
50        i != code()->end(); ++i) {
51     AssembleInstruction(*i);
52   }
53
54   FinishCode(masm());
55
56   safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
57
58   // TODO(titzer): what are the right code flags here?
59   Code::Kind kind = Code::STUB;
60   if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
61     kind = Code::OPTIMIZED_FUNCTION;
62   }
63   Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
64       masm(), Code::ComputeFlags(kind), info);
65   result->set_is_turbofanned(true);
66   result->set_stack_slots(frame()->GetSpillSlotCount());
67   result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
68
69   PopulateDeoptimizationData(result);
70
71   // Emit a code line info recording stop event.
72   void* line_info = recorder->DetachJITHandlerData();
73   LOG_CODE_EVENT(isolate(), CodeEndLinePosInfoRecordEvent(*result, line_info));
74
75   return result;
76 }
77
78
79 void CodeGenerator::RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
80                                     int arguments,
81                                     Safepoint::DeoptMode deopt_mode) {
82   const ZoneList<InstructionOperand*>* operands =
83       pointers->GetNormalizedOperands();
84   Safepoint safepoint =
85       safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
86   for (int i = 0; i < operands->length(); i++) {
87     InstructionOperand* pointer = operands->at(i);
88     if (pointer->IsStackSlot()) {
89       safepoint.DefinePointerSlot(pointer->index(), zone());
90     } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
91       Register reg = Register::FromAllocationIndex(pointer->index());
92       safepoint.DefinePointerRegister(reg, zone());
93     }
94   }
95 }
96
97
98 void CodeGenerator::AssembleInstruction(Instruction* instr) {
99   if (instr->IsBlockStart()) {
100     // Bind a label for a block start and handle parallel moves.
101     BlockStartInstruction* block_start = BlockStartInstruction::cast(instr);
102     current_block_ = block_start->block();
103     if (FLAG_code_comments) {
104       // TODO(titzer): these code comments are a giant memory leak.
105       Vector<char> buffer = Vector<char>::New(32);
106       SNPrintF(buffer, "-- B%d start --", block_start->block()->id());
107       masm()->RecordComment(buffer.start());
108     }
109     masm()->bind(block_start->label());
110   }
111   if (instr->IsGapMoves()) {
112     // Handle parallel moves associated with the gap instruction.
113     AssembleGap(GapInstruction::cast(instr));
114   } else if (instr->IsSourcePosition()) {
115     AssembleSourcePosition(SourcePositionInstruction::cast(instr));
116   } else {
117     // Assemble architecture-specific code for the instruction.
118     AssembleArchInstruction(instr);
119
120     // Assemble branches or boolean materializations after this instruction.
121     FlagsMode mode = FlagsModeField::decode(instr->opcode());
122     FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
123     switch (mode) {
124       case kFlags_none:
125         return;
126       case kFlags_set:
127         return AssembleArchBoolean(instr, condition);
128       case kFlags_branch:
129         return AssembleArchBranch(instr, condition);
130     }
131     UNREACHABLE();
132   }
133 }
134
135
136 void CodeGenerator::AssembleSourcePosition(SourcePositionInstruction* instr) {
137   SourcePosition source_position = instr->source_position();
138   if (source_position == current_source_position_) return;
139   DCHECK(!source_position.IsInvalid());
140   if (!source_position.IsUnknown()) {
141     int code_pos = source_position.raw();
142     masm()->positions_recorder()->RecordPosition(source_position.raw());
143     masm()->positions_recorder()->WriteRecordedPositions();
144     if (FLAG_code_comments) {
145       Vector<char> buffer = Vector<char>::New(256);
146       CompilationInfo* info = linkage()->info();
147       int ln = Script::GetLineNumber(info->script(), code_pos);
148       int cn = Script::GetColumnNumber(info->script(), code_pos);
149       if (info->script()->name()->IsString()) {
150         Handle<String> file(String::cast(info->script()->name()));
151         base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
152                            file->ToCString().get(), ln, cn);
153       } else {
154         base::OS::SNPrintF(buffer.start(), buffer.length(),
155                            "-- <unknown>:%d:%d --", ln, cn);
156       }
157       masm()->RecordComment(buffer.start());
158     }
159   }
160   current_source_position_ = source_position;
161 }
162
163
164 void CodeGenerator::AssembleGap(GapInstruction* instr) {
165   for (int i = GapInstruction::FIRST_INNER_POSITION;
166        i <= GapInstruction::LAST_INNER_POSITION; i++) {
167     GapInstruction::InnerPosition inner_pos =
168         static_cast<GapInstruction::InnerPosition>(i);
169     ParallelMove* move = instr->GetParallelMove(inner_pos);
170     if (move != NULL) resolver()->Resolve(move);
171   }
172 }
173
174
175 void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
176   CompilationInfo* info = linkage()->info();
177   int deopt_count = code()->GetDeoptimizationEntryCount();
178   int patch_count = static_cast<int>(lazy_deoptimization_entries_.size());
179   if (patch_count == 0 && deopt_count == 0) return;
180   Handle<DeoptimizationInputData> data = DeoptimizationInputData::New(
181       isolate(), deopt_count, patch_count, TENURED);
182
183   Handle<ByteArray> translation_array =
184       translations_.CreateByteArray(isolate()->factory());
185
186   data->SetTranslationByteArray(*translation_array);
187   data->SetInlinedFunctionCount(Smi::FromInt(0));
188   data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
189   // TODO(jarin) The following code was copied over from Lithium, not sure
190   // whether the scope or the IsOptimizing condition are really needed.
191   if (info->IsOptimizing()) {
192     // Reference to shared function info does not change between phases.
193     AllowDeferredHandleDereference allow_handle_dereference;
194     data->SetSharedFunctionInfo(*info->shared_info());
195   } else {
196     data->SetSharedFunctionInfo(Smi::FromInt(0));
197   }
198
199   Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
200       static_cast<int>(deoptimization_literals_.size()), TENURED);
201   {
202     AllowDeferredHandleDereference copy_handles;
203     for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
204       literals->set(i, *deoptimization_literals_[i]);
205     }
206     data->SetLiteralArray(*literals);
207   }
208
209   // No OSR in Turbofan yet...
210   BailoutId osr_ast_id = BailoutId::None();
211   data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
212   data->SetOsrPcOffset(Smi::FromInt(-1));
213
214   // Populate deoptimization entries.
215   for (int i = 0; i < deopt_count; i++) {
216     FrameStateDescriptor* descriptor = code()->GetDeoptimizationEntry(i);
217     data->SetAstId(i, descriptor->bailout_id());
218     CHECK_NE(NULL, deoptimization_states_[i]);
219     data->SetTranslationIndex(
220         i, Smi::FromInt(deoptimization_states_[i]->translation_id_));
221     data->SetArgumentsStackHeight(i, Smi::FromInt(0));
222     data->SetPc(i, Smi::FromInt(-1));
223   }
224
225   // Populate the return address patcher entries.
226   for (int i = 0; i < patch_count; ++i) {
227     LazyDeoptimizationEntry entry = lazy_deoptimization_entries_[i];
228     DCHECK(entry.position_after_call() == entry.continuation()->pos() ||
229            IsNopForSmiCodeInlining(code_object, entry.position_after_call(),
230                                    entry.continuation()->pos()));
231     data->SetReturnAddressPc(i, Smi::FromInt(entry.position_after_call()));
232     data->SetPatchedAddressPc(i, Smi::FromInt(entry.deoptimization()->pos()));
233   }
234
235   code_object->set_deoptimization_data(*data);
236 }
237
238
239 void CodeGenerator::RecordLazyDeoptimizationEntry(Instruction* instr) {
240   InstructionOperandConverter i(this, instr);
241
242   Label after_call;
243   masm()->bind(&after_call);
244
245   // The continuation and deoptimization are the last two inputs:
246   BasicBlock* cont_block =
247       i.InputBlock(static_cast<int>(instr->InputCount()) - 2);
248   BasicBlock* deopt_block =
249       i.InputBlock(static_cast<int>(instr->InputCount()) - 1);
250
251   Label* cont_label = code_->GetLabel(cont_block);
252   Label* deopt_label = code_->GetLabel(deopt_block);
253
254   lazy_deoptimization_entries_.push_back(
255       LazyDeoptimizationEntry(after_call.pos(), cont_label, deopt_label));
256 }
257
258
259 int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
260   int result = static_cast<int>(deoptimization_literals_.size());
261   for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
262     if (deoptimization_literals_[i].is_identical_to(literal)) return i;
263   }
264   deoptimization_literals_.push_back(literal);
265   return result;
266 }
267
268
269 void CodeGenerator::BuildTranslation(Instruction* instr,
270                                      int deoptimization_id) {
271   // We should build translation only once.
272   DCHECK_EQ(NULL, deoptimization_states_[deoptimization_id]);
273
274   FrameStateDescriptor* descriptor =
275       code()->GetDeoptimizationEntry(deoptimization_id);
276   Translation translation(&translations_, 1, 1, zone());
277   translation.BeginJSFrame(descriptor->bailout_id(),
278                            Translation::kSelfLiteralId,
279                            descriptor->size() - descriptor->parameters_count());
280
281   for (int i = 0; i < descriptor->size(); i++) {
282     AddTranslationForOperand(&translation, instr, instr->InputAt(i));
283   }
284
285   deoptimization_states_[deoptimization_id] =
286       new (zone()) DeoptimizationState(translation.index());
287 }
288
289
290 void CodeGenerator::AddTranslationForOperand(Translation* translation,
291                                              Instruction* instr,
292                                              InstructionOperand* op) {
293   if (op->IsStackSlot()) {
294     translation->StoreStackSlot(op->index());
295   } else if (op->IsDoubleStackSlot()) {
296     translation->StoreDoubleStackSlot(op->index());
297   } else if (op->IsRegister()) {
298     InstructionOperandConverter converter(this, instr);
299     translation->StoreRegister(converter.ToRegister(op));
300   } else if (op->IsDoubleRegister()) {
301     InstructionOperandConverter converter(this, instr);
302     translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
303   } else if (op->IsImmediate()) {
304     InstructionOperandConverter converter(this, instr);
305     Constant constant = converter.ToConstant(op);
306     Handle<Object> constant_object;
307     switch (constant.type()) {
308       case Constant::kInt32:
309         constant_object =
310             isolate()->factory()->NewNumberFromInt(constant.ToInt32());
311         break;
312       case Constant::kFloat64:
313         constant_object =
314             isolate()->factory()->NewHeapNumber(constant.ToFloat64());
315         break;
316       case Constant::kHeapObject:
317         constant_object = constant.ToHeapObject();
318         break;
319       default:
320         UNREACHABLE();
321     }
322     int literal_id = DefineDeoptimizationLiteral(constant_object);
323     translation->StoreLiteral(literal_id);
324   } else {
325     UNREACHABLE();
326   }
327 }
328
329 #if !V8_TURBOFAN_BACKEND
330
331 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
332   UNIMPLEMENTED();
333 }
334
335
336 void CodeGenerator::AssembleArchBranch(Instruction* instr,
337                                        FlagsCondition condition) {
338   UNIMPLEMENTED();
339 }
340
341
342 void CodeGenerator::AssembleArchBoolean(Instruction* instr,
343                                         FlagsCondition condition) {
344   UNIMPLEMENTED();
345 }
346
347
348 void CodeGenerator::AssemblePrologue() { UNIMPLEMENTED(); }
349
350
351 void CodeGenerator::AssembleReturn() { UNIMPLEMENTED(); }
352
353
354 void CodeGenerator::AssembleMove(InstructionOperand* source,
355                                  InstructionOperand* destination) {
356   UNIMPLEMENTED();
357 }
358
359
360 void CodeGenerator::AssembleSwap(InstructionOperand* source,
361                                  InstructionOperand* destination) {
362   UNIMPLEMENTED();
363 }
364
365
366 void CodeGenerator::AddNopForSmiCodeInlining() { UNIMPLEMENTED(); }
367
368
369 #ifdef DEBUG
370 bool CodeGenerator::IsNopForSmiCodeInlining(Handle<Code> code, int start_pc,
371                                             int end_pc) {
372   UNIMPLEMENTED();
373   return false;
374 }
375 #endif
376
377 #endif  // !V8_TURBOFAN_BACKEND
378
379 }  // namespace compiler
380 }  // namespace internal
381 }  // namespace v8