PROFILE(isolate,
CodeCreateEvent(Logger::BUILTIN_TAG, *code, functions[i].s_name));
builtins_[i] = *code;
+ if (code->kind() == Code::BUILTIN) code->set_builtin_index(i);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_builtin_code) {
CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
}
+int Code::builtin_index() {
+ ASSERT_EQ(BUILTIN, kind());
+ return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
+}
+
+
+void Code::set_builtin_index(int index) {
+ ASSERT_EQ(BUILTIN, kind());
+ WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
+}
+
+
unsigned Code::stack_slots() {
ASSERT(is_crankshafted());
return StackSlotsField::decode(
inline int profiler_ticks();
inline void set_profiler_ticks(int ticks);
+ // [builtin_index]: For BUILTIN kind, tells which builtin index it has.
+ inline int builtin_index();
+ inline void set_builtin_index(int id);
+
// [stack_slots]: For kind OPTIMIZED_FUNCTION, the number of stack slots
// reserved in the code prologue.
inline unsigned stack_slots();
SerializeBuiltin(code_object, how_to_code, where_to_point, skip);
return;
}
+ // TODO(yangguo) figure out whether other code kinds can be handled smarter.
}
if (heap_object == source_) {
ASSERT((how_to_code == kPlain && where_to_point == kStartOfObject) ||
(how_to_code == kFromCode && where_to_point == kInnerPointer));
- int id = 0;
- do { // Look for existing builtins in the list.
- Code* b = isolate()->builtins()->builtin(static_cast<Builtins::Name>(id));
- if (builtin == b) break;
- } while (++id < Builtins::builtin_count);
- ASSERT(id < Builtins::builtin_count); // We must have found a one.
-
+ int builtin_index = builtin->builtin_index();
+ ASSERT_LT(builtin_index, Builtins::builtin_count);
+ ASSERT_LE(0, builtin_index);
sink_->Put(kBuiltin + how_to_code + where_to_point, "Builtin");
- sink_->PutInt(id, "builtin_index");
+ sink_->PutInt(builtin_index, "builtin_index");
}