heap_stats.new_space_size = &new_space_size;
int new_space_capacity;
heap_stats.new_space_capacity = &new_space_capacity;
- int old_pointer_space_size;
+ intptr_t old_pointer_space_size;
heap_stats.old_pointer_space_size = &old_pointer_space_size;
- int old_pointer_space_capacity;
+ intptr_t old_pointer_space_capacity;
heap_stats.old_pointer_space_capacity = &old_pointer_space_capacity;
- int old_data_space_size;
+ intptr_t old_data_space_size;
heap_stats.old_data_space_size = &old_data_space_size;
- int old_data_space_capacity;
+ intptr_t old_data_space_capacity;
heap_stats.old_data_space_capacity = &old_data_space_capacity;
- int code_space_size;
+ intptr_t code_space_size;
heap_stats.code_space_size = &code_space_size;
- int code_space_capacity;
+ intptr_t code_space_capacity;
heap_stats.code_space_capacity = &code_space_capacity;
- int map_space_size;
+ intptr_t map_space_size;
heap_stats.map_space_size = &map_space_size;
- int map_space_capacity;
+ intptr_t map_space_capacity;
heap_stats.map_space_capacity = &map_space_capacity;
- int cell_space_size;
+ intptr_t cell_space_size;
heap_stats.cell_space_size = &cell_space_size;
- int cell_space_capacity;
+ intptr_t cell_space_capacity;
heap_stats.cell_space_capacity = &cell_space_capacity;
- int lo_space_size;
+ intptr_t lo_space_size;
heap_stats.lo_space_size = &lo_space_size;
int global_handle_count;
heap_stats.global_handle_count = &global_handle_count;
heap_stats.near_death_global_handle_count = &near_death_global_handle_count;
int destroyed_global_handle_count;
heap_stats.destroyed_global_handle_count = &destroyed_global_handle_count;
- int memory_allocator_size;
+ intptr_t memory_allocator_size;
heap_stats.memory_allocator_size = &memory_allocator_size;
- int memory_allocator_capacity;
+ intptr_t memory_allocator_capacity;
heap_stats.memory_allocator_capacity = &memory_allocator_capacity;
int objects_per_type[LAST_TYPE + 1] = {0};
heap_stats.objects_per_type = objects_per_type;
} else if (GetVFPSingleValue(arg1, &svalue)) {
PrintF("%s: %f \n", arg1, svalue);
} else if (GetVFPDoubleValue(arg1, &dvalue)) {
- PrintF("%s: %lf \n", arg1, dvalue);
+ PrintF("%s: %f \n", arg1, dvalue);
} else {
PrintF("%s unrecognized\n", arg1);
}
end = cur + words;
while (cur < end) {
- PrintF(" 0x%08x: 0x%08x %10d\n", cur, *cur, *cur);
+ PrintF(" 0x%08x: 0x%08x %10d\n",
+ reinterpret_cast<intptr_t>(cur), *cur, *cur);
cur++;
}
} else if (strcmp(cmd, "disasm") == 0) {
while (cur < end) {
dasm.InstructionDecode(buffer, cur);
- PrintF(" 0x%08x %s\n", cur, buffer.start());
+ PrintF(" 0x%08x %s\n",
+ reinterpret_cast<intptr_t>(cur), buffer.start());
cur += Instr::kInstrSize;
}
} else if (strcmp(cmd, "gdb") == 0) {
// Unsupported instructions use Format to print an error and stop execution.
void Simulator::Format(Instr* instr, const char* format) {
PrintF("Simulator found unsupported instruction:\n 0x%08x: %s\n",
- instr, format);
+ reinterpret_cast<intptr_t>(instr), format);
UNIMPLEMENTED();
}
v8::internal::EmbeddedVector<char, 256> buffer;
dasm.InstructionDecode(buffer,
reinterpret_cast<byte*>(instr));
- PrintF(" 0x%08x %s\n", instr, buffer.start());
+ PrintF(" 0x%08x %s\n", reinterpret_cast<intptr_t>(instr), buffer.start());
}
if (instr->ConditionField() == special_condition) {
DecodeUnconditional(instr);
void RelocInfo::Print() {
PrintF("%p %s", pc_, RelocModeName(rmode_));
if (IsComment(rmode_)) {
- PrintF(" (%s)", data_);
+ PrintF(" (%s)", reinterpret_cast<char*>(data_));
} else if (rmode_ == EMBEDDED_OBJECT) {
PrintF(" (");
target_object()->ShortPrint();
Code* code = Code::GetCodeFromTargetAddress(target_address());
PrintF(" (%s) (%p)", Code::Kind2String(code->kind()), target_address());
} else if (IsPosition(rmode_)) {
- PrintF(" (%d)", data());
+ PrintF(" (%" V8_PTR_PREFIX "d)", data());
}
PrintF("\n");
do {
if (FLAG_trace_contexts) {
- PrintF(" - looking in context %p", *context);
+ PrintF(" - looking in context %p", reinterpret_cast<void*>(*context));
if (context->IsGlobalContext()) PrintF(" (global context)");
PrintF("\n");
}
if (*attributes != ABSENT) {
// property found
if (FLAG_trace_contexts) {
- PrintF("=> found property in context object %p\n", *extension);
+ PrintF("=> found property in context object %p\n",
+ reinterpret_cast<void*>(*extension));
}
return extension;
}
if (Contains(i)) {
if (!first) PrintF(",");
first = false;
- PrintF("%d");
+ PrintF("%d", i);
}
}
PrintF("}");
void Disassembler::Dump(FILE* f, byte* begin, byte* end) {
for (byte* pc = begin; pc < end; pc++) {
if (f == NULL) {
- PrintF("%" V8PRIxPTR " %4" V8PRIdPTR " %02x\n", pc, pc - begin, *pc);
+ PrintF("%" V8PRIxPTR " %4" V8PRIdPTR " %02x\n",
+ reinterpret_cast<intptr_t>(pc),
+ pc - begin,
+ *pc);
} else {
fprintf(f, "%" V8PRIxPTR " %4" V8PRIdPTR " %02x\n",
reinterpret_cast<uintptr_t>(pc), pc - begin, *pc);
"always inline smi code in non-opt code")
// heap.cc
-DEFINE_int(max_new_space_size, 0, "max size of the new generation")
-DEFINE_int(max_old_space_size, 0, "max size of the old generation")
+DEFINE_int(max_new_space_size, 0, "max size of the new generation (in kBytes)")
+DEFINE_int(max_old_space_size, 0, "max size of the old generation (in Mbytes)")
DEFINE_bool(gc_global, false, "always perform global GCs")
DEFINE_int(gc_interval, -1, "garbage collect after <n> allocations")
DEFINE_bool(trace_gc, false,
}
PrintF("Global Handle Statistics:\n");
- PrintF(" allocated memory = %dB\n", sizeof(Node) * total);
+ PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
PrintF(" # weak = %d\n", weak);
PrintF(" # pending = %d\n", pending);
PrintF(" # near_death = %d\n", near_death);
void GlobalHandles::Print() {
PrintF("Global handles:\n");
for (Node* current = head_; current != NULL; current = current->next()) {
- PrintF(" handle %p to %p (weak=%d)\n", current->handle().location(),
- *current->handle(), current->state_ == Node::WEAK);
+ PrintF(" handle %p to %p (weak=%d)\n",
+ reinterpret_cast<void*>(current->handle().location()),
+ reinterpret_cast<void*>(*current->handle()),
+ current->state_ == Node::WEAK);
}
}
namespace internal {
void Heap::UpdateOldSpaceLimits() {
- int old_gen_size = PromotedSpaceSize();
+ intptr_t old_gen_size = PromotedSpaceSize();
old_gen_promotion_limit_ =
old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
old_gen_allocation_limit_ =
CellSpace* Heap::cell_space_ = NULL;
LargeObjectSpace* Heap::lo_space_ = NULL;
-int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
-int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
+intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
+intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
int Heap::old_gen_exhausted_ = false;
// a multiple of Page::kPageSize.
#if defined(ANDROID)
int Heap::max_semispace_size_ = 2*MB;
-int Heap::max_old_generation_size_ = 192*MB;
+intptr_t Heap::max_old_generation_size_ = 192*MB;
int Heap::initial_semispace_size_ = 128*KB;
-size_t Heap::code_range_size_ = 0;
+intptr_t Heap::code_range_size_ = 0;
#elif defined(V8_TARGET_ARCH_X64)
int Heap::max_semispace_size_ = 16*MB;
-int Heap::max_old_generation_size_ = 1*GB;
+intptr_t Heap::max_old_generation_size_ = 1*GB;
int Heap::initial_semispace_size_ = 1*MB;
-size_t Heap::code_range_size_ = 512*MB;
+intptr_t Heap::code_range_size_ = 512*MB;
#else
int Heap::max_semispace_size_ = 8*MB;
-int Heap::max_old_generation_size_ = 512*MB;
+intptr_t Heap::max_old_generation_size_ = 512*MB;
int Heap::initial_semispace_size_ = 512*KB;
-size_t Heap::code_range_size_ = 0;
+intptr_t Heap::code_range_size_ = 0;
#endif
// The snapshot semispace size will be the default semispace size if
// Will be 4 * reserved_semispace_size_ to ensure that young
// generation can be aligned to its size.
int Heap::survived_since_last_expansion_ = 0;
-int Heap::external_allocation_limit_ = 0;
+intptr_t Heap::external_allocation_limit_ = 0;
Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
bool Heap::disallow_allocation_failure_ = false;
#endif // DEBUG
-int GCTracer::alive_after_last_gc_ = 0;
+intptr_t GCTracer::alive_after_last_gc_ = 0;
double GCTracer::last_gc_end_timestamp_ = 0.0;
int GCTracer::max_gc_pause_ = 0;
-int GCTracer::max_alive_after_gc_ = 0;
+intptr_t GCTracer::max_alive_after_gc_ = 0;
int GCTracer::min_in_mutator_ = kMaxInt;
-int Heap::Capacity() {
+intptr_t Heap::Capacity() {
if (!HasBeenSetup()) return 0;
return new_space_.Capacity() +
}
-int Heap::CommittedMemory() {
+intptr_t Heap::CommittedMemory() {
if (!HasBeenSetup()) return 0;
return new_space_.CommittedMemory() +
}
-int Heap::Available() {
+intptr_t Heap::Available() {
if (!HasBeenSetup()) return 0;
return new_space_.Available() +
#if defined(ENABLE_LOGGING_AND_PROFILING)
void Heap::PrintShortHeapStatistics() {
if (!FLAG_trace_gc_verbose) return;
- PrintF("Memory allocator, used: %8d, available: %8d\n",
+ PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
+ ", available: %8" V8_PTR_PREFIX "d\n",
MemoryAllocator::Size(),
MemoryAllocator::Available());
- PrintF("New space, used: %8d, available: %8d\n",
+ PrintF("New space, used: %8" V8_PTR_PREFIX "d"
+ ", available: %8" V8_PTR_PREFIX "d\n",
Heap::new_space_.Size(),
new_space_.Available());
- PrintF("Old pointers, used: %8d, available: %8d, waste: %8d\n",
+ PrintF("Old pointers, used: %8" V8_PTR_PREFIX "d"
+ ", available: %8" V8_PTR_PREFIX "d"
+ ", waste: %8" V8_PTR_PREFIX "d\n",
old_pointer_space_->Size(),
old_pointer_space_->Available(),
old_pointer_space_->Waste());
- PrintF("Old data space, used: %8d, available: %8d, waste: %8d\n",
+ PrintF("Old data space, used: %8" V8_PTR_PREFIX "d"
+ ", available: %8" V8_PTR_PREFIX "d"
+ ", waste: %8" V8_PTR_PREFIX "d\n",
old_data_space_->Size(),
old_data_space_->Available(),
old_data_space_->Waste());
- PrintF("Code space, used: %8d, available: %8d, waste: %8d\n",
+ PrintF("Code space, used: %8" V8_PTR_PREFIX "d"
+ ", available: %8" V8_PTR_PREFIX "d"
+ ", waste: %8" V8_PTR_PREFIX "d\n",
code_space_->Size(),
code_space_->Available(),
code_space_->Waste());
- PrintF("Map space, used: %8d, available: %8d, waste: %8d\n",
+ PrintF("Map space, used: %8" V8_PTR_PREFIX "d"
+ ", available: %8" V8_PTR_PREFIX "d"
+ ", waste: %8" V8_PTR_PREFIX "d\n",
map_space_->Size(),
map_space_->Available(),
map_space_->Waste());
- PrintF("Cell space, used: %8d, available: %8d, waste: %8d\n",
+ PrintF("Cell space, used: %8" V8_PTR_PREFIX "d"
+ ", available: %8" V8_PTR_PREFIX "d"
+ ", waste: %8" V8_PTR_PREFIX "d\n",
cell_space_->Size(),
cell_space_->Available(),
cell_space_->Waste());
- PrintF("Large object space, used: %8d, avaialble: %8d\n",
+ PrintF("Large object space, used: %8" V8_PTR_PREFIX "d"
+ ", available: %8" V8_PTR_PREFIX "d\n",
lo_space_->Size(),
lo_space_->Available());
}
#endif
}
-int Heap::SizeOfObjects() {
- int total = 0;
+intptr_t Heap::SizeOfObjects() {
+ intptr_t total = 0;
AllSpaces spaces;
for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
total += space->Size();
if (FLAG_code_stats) ReportCodeStatistics("After GC");
#endif
- Counters::alive_after_last_gc.Set(SizeOfObjects());
+ Counters::alive_after_last_gc.Set(static_cast<int>(SizeOfObjects()));
Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
EnsureFromSpaceIsCommitted();
- int start_new_space_size = Heap::new_space()->Size();
+ int start_new_space_size = Heap::new_space()->SizeAsInt();
if (collector == MARK_COMPACTOR) {
// Perform mark-sweep with optional compaction.
DescriptorLookupCache::Clear();
// Used for updating survived_since_last_expansion_ at function end.
- int survived_watermark = PromotedSpaceSize();
+ intptr_t survived_watermark = PromotedSpaceSize();
CheckNewSpaceExpansionCriteria();
new_space_.set_age_mark(new_space_.top());
// Update how much has survived scavenge.
- IncrementYoungSurvivorsCounter(
- (PromotedSpaceSize() - survived_watermark) + new_space_.Size());
+ IncrementYoungSurvivorsCounter(static_cast<int>(
+ (PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
LOG(ResourceEvent("scavenge", "end"));
PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
title, gc_count_);
PrintF("mark-compact GC : %d\n", mc_count_);
- PrintF("old_gen_promotion_limit_ %d\n", old_gen_promotion_limit_);
- PrintF("old_gen_allocation_limit_ %d\n", old_gen_allocation_limit_);
+ PrintF("old_gen_promotion_limit_ %" V8_PTR_PREFIX "d\n",
+ old_gen_promotion_limit_);
+ PrintF("old_gen_allocation_limit_ %" V8_PTR_PREFIX "d\n",
+ old_gen_allocation_limit_);
PrintF("\n");
PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
bool Heap::ConfigureHeapDefault() {
- return ConfigureHeap(FLAG_max_new_space_size / 2, FLAG_max_old_space_size);
+ return ConfigureHeap(
+ FLAG_max_new_space_size * (KB / 2), FLAG_max_old_space_size * MB);
}
void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->start_marker = HeapStats::kStartMarker;
*stats->end_marker = HeapStats::kEndMarker;
- *stats->new_space_size = new_space_.Size();
- *stats->new_space_capacity = new_space_.Capacity();
+ *stats->new_space_size = new_space_.SizeAsInt();
+ *stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
*stats->old_pointer_space_size = old_pointer_space_->Size();
*stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
*stats->old_data_space_size = old_data_space_->Size();
}
-int Heap::PromotedSpaceSize() {
+intptr_t Heap::PromotedSpaceSize() {
return old_pointer_space_->Size()
+ old_data_space_->Size()
+ code_space_->Size()
if (!CreateInitialObjects()) return false;
}
- LOG(IntEvent("heap-capacity", Capacity()));
- LOG(IntEvent("heap-available", Available()));
+ LOG(IntPtrTEvent("heap-capacity", Capacity()));
+ LOG(IntPtrTEvent("heap-available", Available()));
#ifdef ENABLE_LOGGING_AND_PROFILING
// This should be called only after initial objects have been created.
PrintF("mark_compact_count=%d ", mc_count_);
PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause());
PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator());
- PrintF("max_alive_after_gc=%d ", GCTracer::get_max_alive_after_gc());
+ PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
+ GCTracer::get_max_alive_after_gc());
PrintF("\n\n");
}
public:
void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++)
- PrintF(" handle %p to %p\n", p, *p);
+ PrintF(" handle %p to %p\n",
+ reinterpret_cast<void*>(p),
+ reinterpret_cast<void*>(*p));
}
};
#endif
-static int CountTotalHolesSize() {
- int holes_size = 0;
+static intptr_t CountTotalHolesSize() {
+ intptr_t holes_size = 0;
OldSpaces spaces;
for (OldSpace* space = spaces.next();
space != NULL;
PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
- PrintF("total_size_before=%d ", start_size_);
- PrintF("total_size_after=%d ", Heap::SizeOfObjects());
- PrintF("holes_size_before=%d ", in_free_list_or_wasted_before_gc_);
- PrintF("holes_size_after=%d ", CountTotalHolesSize());
+ PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
+ PrintF("total_size_after=%" V8_PTR_PREFIX "d ", Heap::SizeOfObjects());
+ PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
+ in_free_list_or_wasted_before_gc_);
+ PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
- PrintF("allocated=%d ", allocated_since_last_gc_);
- PrintF("promoted=%d ", promoted_objects_size_);
+ PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_);
+ PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_);
PrintF("\n");
}
// semi space. The young generation consists of two semi spaces and
// we reserve twice the amount needed for those in order to ensure
// that new space can be aligned to its size.
- static int MaxReserved() {
+ static intptr_t MaxReserved() {
return 4 * reserved_semispace_size_ + max_old_generation_size_;
}
static int MaxSemiSpaceSize() { return max_semispace_size_; }
static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
static int InitialSemiSpaceSize() { return initial_semispace_size_; }
- static int MaxOldGenerationSize() { return max_old_generation_size_; }
+ static intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
// more spaces are needed until it reaches the limit.
- static int Capacity();
+ static intptr_t Capacity();
// Returns the amount of memory currently committed for the heap.
- static int CommittedMemory();
+ static intptr_t CommittedMemory();
// Returns the available bytes in space w/o growing.
// Heap doesn't guarantee that it can allocate an object that requires
// all available bytes. Check MaxHeapObjectSize() instead.
- static int Available();
+ static intptr_t Available();
// Returns the maximum object size in paged space.
static inline int MaxObjectSizeInPagedSpace();
// Returns of size of all objects residing in the heap.
- static int SizeOfObjects();
+ static intptr_t SizeOfObjects();
// Return the starting address and a mask for the new space. And-masking an
// address with the mask will result in the start address of the new space
static int reserved_semispace_size_;
static int max_semispace_size_;
static int initial_semispace_size_;
- static int max_old_generation_size_;
- static size_t code_range_size_;
+ static intptr_t max_old_generation_size_;
+ static intptr_t code_range_size_;
// For keeping track of how much data has survived
// scavenge since last new space expansion.
static HeapState gc_state_;
// Returns the size of object residing in non new spaces.
- static int PromotedSpaceSize();
+ static intptr_t PromotedSpaceSize();
// Returns the amount of external memory registered since last global gc.
static int PromotedExternalMemorySize();
// Limit that triggers a global GC on the next (normally caused) GC. This
// is checked when we have already decided to do a GC to help determine
// which collector to invoke.
- static int old_gen_promotion_limit_;
+ static intptr_t old_gen_promotion_limit_;
// Limit that triggers a global GC as soon as is reasonable. This is
// checked before expanding a paged space in the old generation and on
// every allocation in large object space.
- static int old_gen_allocation_limit_;
+ static intptr_t old_gen_allocation_limit_;
// Limit on the amount of externally allocated memory allowed
// between global GCs. If reached a global GC is forced.
- static int external_allocation_limit_;
+ static intptr_t external_allocation_limit_;
// The amount of external memory registered through the API kept alive
// by global handles
GCTracer* tracer,
CollectionPolicy collectionPolicy);
- static const int kMinimumPromotionLimit = 2 * MB;
- static const int kMinimumAllocationLimit = 8 * MB;
+ static const intptr_t kMinimumPromotionLimit = 2 * MB;
+ static const intptr_t kMinimumAllocationLimit = 8 * MB;
inline static void UpdateOldSpaceLimits();
int* start_marker; // 0
int* new_space_size; // 1
int* new_space_capacity; // 2
- int* old_pointer_space_size; // 3
- int* old_pointer_space_capacity; // 4
- int* old_data_space_size; // 5
- int* old_data_space_capacity; // 6
- int* code_space_size; // 7
- int* code_space_capacity; // 8
- int* map_space_size; // 9
- int* map_space_capacity; // 10
- int* cell_space_size; // 11
- int* cell_space_capacity; // 12
- int* lo_space_size; // 13
+ intptr_t* old_pointer_space_size; // 3
+ intptr_t* old_pointer_space_capacity; // 4
+ intptr_t* old_data_space_size; // 5
+ intptr_t* old_data_space_capacity; // 6
+ intptr_t* code_space_size; // 7
+ intptr_t* code_space_capacity; // 8
+ intptr_t* map_space_size; // 9
+ intptr_t* map_space_capacity; // 10
+ intptr_t* cell_space_size; // 11
+ intptr_t* cell_space_capacity; // 12
+ intptr_t* lo_space_size; // 13
int* global_handle_count; // 14
int* weak_global_handle_count; // 15
int* pending_global_handle_count; // 16
int* near_death_global_handle_count; // 17
int* destroyed_global_handle_count; // 18
- int* memory_allocator_size; // 19
- int* memory_allocator_capacity; // 20
+ intptr_t* memory_allocator_size; // 19
+ intptr_t* memory_allocator_capacity; // 20
int* objects_per_type; // 21
int* size_per_type; // 22
int* os_error; // 23
static int get_max_gc_pause() { return max_gc_pause_; }
// Returns maximum size of objects alive after GC.
- static int get_max_alive_after_gc() { return max_alive_after_gc_; }
+ static intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
// Returns minimal interval between two subsequent collections.
static int get_min_in_mutator() { return min_in_mutator_; }
}
double start_time_; // Timestamp set in the constructor.
- int start_size_; // Size of objects in heap set in constructor.
+ intptr_t start_size_; // Size of objects in heap set in constructor.
GarbageCollector collector_; // Type of collector.
// A count (including this one, eg, the first collection is 1) of the
// Total amount of space either wasted or contained in one of free lists
// before the current GC.
- int in_free_list_or_wasted_before_gc_;
+ intptr_t in_free_list_or_wasted_before_gc_;
// Difference between space used in the heap at the beginning of the current
// collection and the end of the previous collection.
- int allocated_since_last_gc_;
+ intptr_t allocated_since_last_gc_;
// Amount of time spent in mutator that is time elapsed between end of the
// previous collection and the beginning of the current one.
double spent_in_mutator_;
// Size of objects promoted during the current collection.
- int promoted_objects_size_;
+ intptr_t promoted_objects_size_;
// Maximum GC pause.
static int max_gc_pause_;
// Maximum size of objects alive after GC.
- static int max_alive_after_gc_;
+ static intptr_t max_alive_after_gc_;
// Minimal interval between two subsequent collections.
static int min_in_mutator_;
// Size of objects alive after last GC.
- static int alive_after_last_gc_;
+ static intptr_t alive_after_last_gc_;
static double last_gc_end_timestamp_;
};
}
+void Logger::IntPtrTEvent(const char* name, intptr_t value) {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ if (FLAG_log) UncheckedIntPtrTEvent(name, value);
+#endif
+}
+
+
#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::UncheckedIntEvent(const char* name, int value) {
if (!Log::IsEnabled()) return;
#endif
+#ifdef ENABLE_LOGGING_AND_PROFILING
+void Logger::UncheckedIntPtrTEvent(const char* name, intptr_t value) {
+ if (!Log::IsEnabled()) return;
+ LogMessageBuilder msg;
+ msg.Append("%s,%" V8_PTR_PREFIX "d\n", name, value);
+ msg.WriteToLogFile();
+}
+#endif
+
+
void Logger::HandleEvent(const char* name, Object** location) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log_handles) return;
void Logger::HeapSampleStats(const char* space, const char* kind,
- int capacity, int used) {
+ intptr_t capacity, intptr_t used) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg;
- msg.Append("heap-sample-stats,\"%s\",\"%s\",%d,%d\n",
+ msg.Append("heap-sample-stats,\"%s\",\"%s\","
+ "%" V8_PTR_PREFIX "d,%" V8_PTR_PREFIX "d\n",
space, kind, capacity, used);
msg.WriteToLogFile();
#endif
// Emits an event with an int value -> (name, value).
static void IntEvent(const char* name, int value);
+ static void IntPtrTEvent(const char* name, intptr_t value);
// Emits an event with an handle value -> (name, location).
static void HandleEvent(const char* name, Object** location);
static void HeapSampleJSProducerEvent(const char* constructor,
Address* stack);
static void HeapSampleStats(const char* space, const char* kind,
- int capacity, int used);
+ intptr_t capacity, intptr_t used);
static void SharedLibraryEvent(const char* library_path,
uintptr_t start,
// Logs an IntEvent regardless of whether FLAG_log is true.
static void UncheckedIntEvent(const char* name, int value);
+ static void UncheckedIntPtrTEvent(const char* name, intptr_t value);
// Stops logging and profiling in case of insufficient resources.
static void StopLoggingAndProfiling();
// reclaiming the waste and free list blocks).
static const int kFragmentationLimit = 15; // Percent.
static const int kFragmentationAllowed = 1 * MB; // Absolute.
- int old_gen_recoverable = 0;
- int old_gen_used = 0;
+ intptr_t old_gen_recoverable = 0;
+ intptr_t old_gen_used = 0;
OldSpaces spaces;
for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
#ifdef DEBUG
if (FLAG_gc_verbose) {
- PrintF("update %p : %p -> %p\n", obj->address(),
- map, new_map);
+ PrintF("update %p : %p -> %p\n",
+ obj->address(),
+ reinterpret_cast<void*>(map),
+ reinterpret_cast<void*>(new_map));
}
#endif
}
&UpdatePointerToNewGen,
Heap::WATERMARK_SHOULD_BE_VALID);
- int live_maps_size = Heap::map_space()->Size();
- int live_maps = live_maps_size / Map::kSize;
+ intptr_t live_maps_size = Heap::map_space()->Size();
+ int live_maps = static_cast<int>(live_maps_size / Map::kSize);
ASSERT(live_map_objects_size_ == live_maps_size);
if (Heap::map_space()->NeedsCompaction(live_maps)) {
void HeapObject::PrintHeader(const char* id) {
- PrintF("%p: [%s]\n", this, id);
+ PrintF("%p: [%s]\n", reinterpret_cast<void*>(this), id);
}
void JSObject::JSObjectPrint() {
- PrintF("%p: [JSObject]\n", this);
- PrintF(" - map = %p\n", map());
- PrintF(" - prototype = %p\n", GetPrototype());
+ PrintF("%p: [JSObject]\n", reinterpret_cast<void*>(this));
+ PrintF(" - map = %p\n", reinterpret_cast<void*>(map()));
+ PrintF(" - prototype = %p\n", reinterpret_cast<void*>(GetPrototype()));
PrintF(" {\n");
PrintProperties();
PrintElements();
void JSFunction::JSFunctionPrint() {
HeapObject::PrintHeader("Function");
- PrintF(" - map = 0x%p\n", map());
+ PrintF(" - map = 0x%p\n", reinterpret_cast<void*>(map()));
PrintF(" - initial_map = ");
if (has_initial_map()) {
initial_map()->ShortPrint();
void BreakPointInfo::BreakPointInfoPrint() {
HeapObject::PrintHeader("BreakPointInfo");
- PrintF("\n - code_position: %d", code_position());
- PrintF("\n - source_position: %d", source_position());
- PrintF("\n - statement_position: %d", statement_position());
+ PrintF("\n - code_position: %d", code_position()->value());
+ PrintF("\n - source_position: %d", source_position()->value());
+ PrintF("\n - statement_position: %d", statement_position()->value());
PrintF("\n - break_point_objects: ");
break_point_objects()->ShortPrint();
}
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2006-2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
ScriptDataImpl(const char* backing_store, int length)
: store_(reinterpret_cast<unsigned*>(const_cast<char*>(backing_store)),
- length / sizeof(unsigned)),
+ length / static_cast<int>(sizeof(unsigned))),
owns_store_(false) {
- ASSERT_EQ(0, reinterpret_cast<intptr_t>(backing_store) % sizeof(unsigned));
+ ASSERT_EQ(0, static_cast<int>(
+ reinterpret_cast<intptr_t>(backing_store) % sizeof(unsigned)));
}
// Read strings written by ParserRecorder::WriteString.
}
+// This is used for printing out debugging information. It makes an integer
+// that is closely related to the address of an object.
+static int LabelToInt(Label* label) {
+ return static_cast<int>(reinterpret_cast<intptr_t>(label));
+}
+
+
void RegExpMacroAssemblerTracer::Bind(Label* label) {
- PrintF("label[%08x]: (Bind)\n", label, label);
+ PrintF("label[%08x]: (Bind)\n", LabelToInt(label));
assembler_->Bind(label);
}
void RegExpMacroAssemblerTracer::CheckGreedyLoop(Label* label) {
- PrintF(" CheckGreedyLoop(label[%08x]);\n\n", label);
+ PrintF(" CheckGreedyLoop(label[%08x]);\n\n", LabelToInt(label));
assembler_->CheckGreedyLoop(label);
}
void RegExpMacroAssemblerTracer::GoTo(Label* label) {
- PrintF(" GoTo(label[%08x]);\n\n", label);
+ PrintF(" GoTo(label[%08x]);\n\n", LabelToInt(label));
assembler_->GoTo(label);
}
void RegExpMacroAssemblerTracer::PushBacktrack(Label* label) {
- PrintF(" PushBacktrack(label[%08x]);\n",
- label);
+ PrintF(" PushBacktrack(label[%08x]);\n", LabelToInt(label));
assembler_->PushBacktrack(label);
}
const char* check_msg = check_bounds ? "" : " (unchecked)";
PrintF(" LoadCurrentCharacter(cp_offset=%d, label[%08x]%s (%d chars));\n",
cp_offset,
- on_end_of_input,
+ LabelToInt(on_end_of_input),
check_msg,
characters);
assembler_->LoadCurrentCharacter(cp_offset,
void RegExpMacroAssemblerTracer::CheckCharacterLT(uc16 limit, Label* on_less) {
- PrintF(" CheckCharacterLT(c='u%04x', label[%08x]);\n", limit, on_less);
+ PrintF(" CheckCharacterLT(c='u%04x', label[%08x]);\n",
+ limit, LabelToInt(on_less));
assembler_->CheckCharacterLT(limit, on_less);
}
void RegExpMacroAssemblerTracer::CheckCharacterGT(uc16 limit,
Label* on_greater) {
- PrintF(" CheckCharacterGT(c='u%04x', label[%08x]);\n", limit, on_greater);
+ PrintF(" CheckCharacterGT(c='u%04x', label[%08x]);\n",
+ limit, LabelToInt(on_greater));
assembler_->CheckCharacterGT(limit, on_greater);
}
void RegExpMacroAssemblerTracer::CheckCharacter(uint32_t c, Label* on_equal) {
- PrintF(" CheckCharacter(c='u%04x', label[%08x]);\n", c, on_equal);
+ PrintF(" CheckCharacter(c='u%04x', label[%08x]);\n",
+ c, LabelToInt(on_equal));
assembler_->CheckCharacter(c, on_equal);
}
void RegExpMacroAssemblerTracer::CheckAtStart(Label* on_at_start) {
- PrintF(" CheckAtStart(label[%08x]);\n", on_at_start);
+ PrintF(" CheckAtStart(label[%08x]);\n", LabelToInt(on_at_start));
assembler_->CheckAtStart(on_at_start);
}
void RegExpMacroAssemblerTracer::CheckNotAtStart(Label* on_not_at_start) {
- PrintF(" CheckNotAtStart(label[%08x]);\n", on_not_at_start);
+ PrintF(" CheckNotAtStart(label[%08x]);\n", LabelToInt(on_not_at_start));
assembler_->CheckNotAtStart(on_not_at_start);
}
void RegExpMacroAssemblerTracer::CheckNotCharacter(uint32_t c,
Label* on_not_equal) {
- PrintF(" CheckNotCharacter(c='u%04x', label[%08x]);\n", c, on_not_equal);
+ PrintF(" CheckNotCharacter(c='u%04x', label[%08x]);\n",
+ c, LabelToInt(on_not_equal));
assembler_->CheckNotCharacter(c, on_not_equal);
}
PrintF(" CheckCharacterAfterAnd(c='u%04x', mask=0x%04x, label[%08x]);\n",
c,
mask,
- on_equal);
+ LabelToInt(on_equal));
assembler_->CheckCharacterAfterAnd(c, mask, on_equal);
}
PrintF(" CheckNotCharacterAfterAnd(c='u%04x', mask=0x%04x, label[%08x]);\n",
c,
mask,
- on_not_equal);
+ LabelToInt(on_not_equal));
assembler_->CheckNotCharacterAfterAnd(c, mask, on_not_equal);
}
c,
minus,
mask,
- on_not_equal);
+ LabelToInt(on_not_equal));
assembler_->CheckNotCharacterAfterMinusAnd(c, minus, mask, on_not_equal);
}
void RegExpMacroAssemblerTracer::CheckNotBackReference(int start_reg,
Label* on_no_match) {
PrintF(" CheckNotBackReference(register=%d, label[%08x]);\n", start_reg,
- on_no_match);
+ LabelToInt(on_no_match));
assembler_->CheckNotBackReference(start_reg, on_no_match);
}
int start_reg,
Label* on_no_match) {
PrintF(" CheckNotBackReferenceIgnoreCase(register=%d, label[%08x]);\n",
- start_reg, on_no_match);
+ start_reg, LabelToInt(on_no_match));
assembler_->CheckNotBackReferenceIgnoreCase(start_reg, on_no_match);
}
PrintF(" CheckNotRegistersEqual(reg1=%d, reg2=%d, label[%08x]);\n",
reg1,
reg2,
- on_not_equal);
+ LabelToInt(on_not_equal));
assembler_->CheckNotRegistersEqual(reg1, reg2, on_not_equal);
}
for (int i = 0; i < str.length(); i++) {
PrintF("u%04x", str[i]);
}
- PrintF("\", cp_offset=%d, label[%08x])\n", cp_offset, on_failure);
+ PrintF("\", cp_offset=%d, label[%08x])\n",
+ cp_offset, LabelToInt(on_failure));
assembler_->CheckCharacters(str, cp_offset, on_failure, check_end_of_string);
}
on_no_match);
PrintF(" CheckSpecialCharacterClass(type='%c', label[%08x]): %s;\n",
type,
- on_no_match,
+ LabelToInt(on_no_match),
supported ? "true" : "false");
return supported;
}
void RegExpMacroAssemblerTracer::IfRegisterLT(int register_index,
int comparand, Label* if_lt) {
PrintF(" IfRegisterLT(register=%d, number=%d, label[%08x]);\n",
- register_index, comparand, if_lt);
+ register_index, comparand, LabelToInt(if_lt));
assembler_->IfRegisterLT(register_index, comparand, if_lt);
}
void RegExpMacroAssemblerTracer::IfRegisterEqPos(int register_index,
Label* if_eq) {
PrintF(" IfRegisterEqPos(register=%d, label[%08x]);\n",
- register_index, if_eq);
+ register_index, LabelToInt(if_eq));
assembler_->IfRegisterEqPos(register_index, if_eq);
}
void RegExpMacroAssemblerTracer::IfRegisterGE(int register_index,
int comparand, Label* if_ge) {
PrintF(" IfRegisterGE(register=%d, number=%d, label[%08x]);\n",
- register_index, comparand, if_ge);
+ register_index, comparand, LabelToInt(if_ge));
assembler_->IfRegisterGE(register_index, comparand, if_ge);
}
} else if (obj->IsFalse()) {
PrintF("<false>");
} else {
- PrintF("%p", obj);
+ PrintF("%p", reinterpret_cast<void*>(obj));
}
}
// -----------------------------------------------------------------------------
// MemoryAllocator
//
-int MemoryAllocator::capacity_ = 0;
-int MemoryAllocator::size_ = 0;
-int MemoryAllocator::size_executable_ = 0;
+intptr_t MemoryAllocator::capacity_ = 0;
+intptr_t MemoryAllocator::size_ = 0;
+intptr_t MemoryAllocator::size_executable_ = 0;
List<MemoryAllocator::MemoryAllocationCallbackRegistration>
MemoryAllocator::memory_allocation_callbacks_;
}
-bool MemoryAllocator::Setup(int capacity) {
+bool MemoryAllocator::Setup(intptr_t capacity) {
capacity_ = RoundUp(capacity, Page::kPageSize);
// Over-estimate the size of chunks_ array. It assumes the expansion of old
//
// Reserve two chunk ids for semispaces, one for map space, one for old
// space, and one for code space.
- max_nof_chunks_ = (capacity_ / (kChunkSize - Page::kPageSize)) + 5;
+ max_nof_chunks_ =
+ static_cast<int>((capacity_ / (kChunkSize - Page::kPageSize))) + 5;
if (max_nof_chunks_ > kMaxNofChunks) return false;
size_ = 0;
#ifdef DEBUG
void MemoryAllocator::ReportStatistics() {
float pct = static_cast<float>(capacity_ - size_) / capacity_;
- PrintF(" capacity: %d, used: %d, available: %%%d\n\n",
+ PrintF(" capacity: %" V8_PTR_PREFIX "d"
+ ", used: %" V8_PTR_PREFIX "d"
+ ", available: %%%d\n\n",
capacity_, size_, static_cast<int>(pct*100));
}
#endif
// -----------------------------------------------------------------------------
// PagedSpace implementation
-PagedSpace::PagedSpace(int max_capacity,
+PagedSpace::PagedSpace(intptr_t max_capacity,
AllocationSpace id,
Executability executable)
: Space(id, executable) {
Page::kPageSize * pages_in_chunk,
this, &num_pages);
} else {
- int requested_pages = Min(MemoryAllocator::kPagesPerChunk,
- max_capacity_ / Page::kObjectAreaSize);
+ int requested_pages =
+ Min(MemoryAllocator::kPagesPerChunk,
+ static_cast<int>(max_capacity_ / Page::kObjectAreaSize));
first_page_ =
MemoryAllocator::AllocatePages(requested_pages, &num_pages, this);
if (!first_page_->is_valid()) return false;
// Last page must be valid and its next page is invalid.
ASSERT(last_page->is_valid() && !last_page->next_page()->is_valid());
- int available_pages = (max_capacity_ - Capacity()) / Page::kObjectAreaSize;
+ int available_pages =
+ static_cast<int>((max_capacity_ - Capacity()) / Page::kObjectAreaSize);
if (available_pages <= 0) return false;
int desired_pages = Min(available_pages, MemoryAllocator::kPagesPerChunk);
void NewSpace::Shrink() {
- int new_capacity = Max(InitialCapacity(), 2 * Size());
+ int new_capacity = Max(InitialCapacity(), 2 * SizeAsInt());
int rounded_new_capacity =
RoundUp(new_capacity, static_cast<int>(OS::AllocateAlignment()));
if (rounded_new_capacity < Capacity() &&
#ifdef DEBUG
if (FLAG_heap_stats) {
float pct = static_cast<float>(Available()) / Capacity();
- PrintF(" capacity: %d, available: %d, %%%d\n",
+ PrintF(" capacity: %" V8_PTR_PREFIX "d"
+ ", available: %" V8_PTR_PREFIX "d, %%%d\n",
Capacity(), Available(), static_cast<int>(pct*100));
PrintF("\n Object Histogram:\n");
for (int i = 0; i <= LAST_TYPE; i++) {
void OldSpace::ReportStatistics() {
- int pct = Available() * 100 / Capacity();
- PrintF(" capacity: %d, waste: %d, available: %d, %%%d\n",
+ int pct = static_cast<int>(Available() * 100 / Capacity());
+ PrintF(" capacity: %" V8_PTR_PREFIX "d"
+ ", waste: %" V8_PTR_PREFIX "d"
+ ", available: %" V8_PTR_PREFIX "d, %%%d\n",
Capacity(), Waste(), Available(), pct);
ClearHistograms();
#ifdef DEBUG
void FixedSpace::ReportStatistics() {
- int pct = Available() * 100 / Capacity();
- PrintF(" capacity: %d, waste: %d, available: %d, %%%d\n",
+ int pct = static_cast<int>(Available() * 100 / Capacity());
+ PrintF(" capacity: %" V8_PTR_PREFIX "d"
+ ", waste: %" V8_PTR_PREFIX "d"
+ ", available: %" V8_PTR_PREFIX "d, %%%d\n",
Capacity(), Waste(), Available(), pct);
ClearHistograms();
void LargeObjectSpace::ReportStatistics() {
- PrintF(" size: %d\n", size_);
+ PrintF(" size: %" V8_PTR_PREFIX "d\n", size_);
int num_objects = 0;
ClearHistograms();
LargeObjectIterator it(this);
// Identity used in error reporting.
AllocationSpace identity() { return id_; }
- virtual int Size() = 0;
+ virtual intptr_t Size() = 0;
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the space by marking it read-only/writable.
public:
// Initializes its internal bookkeeping structures.
// Max capacity of the total space.
- static bool Setup(int max_capacity);
+ static bool Setup(intptr_t max_capacity);
// Deletes valid chunks.
static void TearDown();
MemoryAllocationCallback callback);
// Returns the maximum available bytes of heaps.
- static int Available() { return capacity_ < size_ ? 0 : capacity_ - size_; }
+ static intptr_t Available() {
+ return capacity_ < size_ ? 0 : capacity_ - size_;
+ }
// Returns allocated spaces in bytes.
- static int Size() { return size_; }
+ static intptr_t Size() { return size_; }
// Returns allocated executable spaces in bytes.
- static int SizeExecutable() { return size_executable_; }
+ static intptr_t SizeExecutable() { return size_executable_; }
// Returns maximum available bytes that the old space can have.
- static int MaxAvailable() {
+ static intptr_t MaxAvailable() {
return (Available() / Page::kPageSize) * Page::kObjectAreaSize;
}
private:
// Maximum space size in bytes.
- static int capacity_;
+ static intptr_t capacity_;
// Allocated space size in bytes.
- static int size_;
+ static intptr_t size_;
// Allocated executable space size in bytes.
- static int size_executable_;
+ static intptr_t size_executable_;
struct MemoryAllocationCallbackRegistration {
MemoryAllocationCallbackRegistration(MemoryAllocationCallback callback,
}
// Accessors for the allocation statistics.
- int Capacity() { return capacity_; }
- int Available() { return available_; }
- int Size() { return size_; }
- int Waste() { return waste_; }
+ intptr_t Capacity() { return capacity_; }
+ intptr_t Available() { return available_; }
+ intptr_t Size() { return size_; }
+ intptr_t Waste() { return waste_; }
// Grow the space by adding available bytes.
void ExpandSpace(int size_in_bytes) {
}
// Allocate from available bytes (available -> size).
- void AllocateBytes(int size_in_bytes) {
+ void AllocateBytes(intptr_t size_in_bytes) {
available_ -= size_in_bytes;
size_ += size_in_bytes;
}
// Free allocated bytes, making them available (size -> available).
- void DeallocateBytes(int size_in_bytes) {
+ void DeallocateBytes(intptr_t size_in_bytes) {
size_ -= size_in_bytes;
available_ += size_in_bytes;
}
// Consider the wasted bytes to be allocated, as they contain filler
// objects (waste -> size).
- void FillWastedBytes(int size_in_bytes) {
+ void FillWastedBytes(intptr_t size_in_bytes) {
waste_ -= size_in_bytes;
size_ += size_in_bytes;
}
private:
- int capacity_;
- int available_;
- int size_;
- int waste_;
+ intptr_t capacity_;
+ intptr_t available_;
+ intptr_t size_;
+ intptr_t waste_;
};
class PagedSpace : public Space {
public:
// Creates a space with a maximum capacity, and an id.
- PagedSpace(int max_capacity, AllocationSpace id, Executability executable);
+ PagedSpace(intptr_t max_capacity,
+ AllocationSpace id,
+ Executability executable);
virtual ~PagedSpace() {}
}
// Current capacity without growing (Size() + Available() + Waste()).
- int Capacity() { return accounting_stats_.Capacity(); }
+ intptr_t Capacity() { return accounting_stats_.Capacity(); }
// Total amount of memory committed for this space. For paged
// spaces this equals the capacity.
- int CommittedMemory() { return Capacity(); }
+ intptr_t CommittedMemory() { return Capacity(); }
// Available bytes without growing.
- int Available() { return accounting_stats_.Available(); }
+ intptr_t Available() { return accounting_stats_.Available(); }
// Allocated bytes in this space.
- virtual int Size() { return accounting_stats_.Size(); }
+ virtual intptr_t Size() { return accounting_stats_.Size(); }
// Wasted bytes due to fragmentation and not recoverable until the
// next GC of this space.
- int Waste() { return accounting_stats_.Waste(); }
+ intptr_t Waste() { return accounting_stats_.Waste(); }
// Returns the address of the first object in this space.
Address bottom() { return first_page_->ObjectAreaStart(); }
protected:
// Maximum capacity of this space.
- int max_capacity_;
+ intptr_t max_capacity_;
// Accounting information for this space.
AllocationStats accounting_stats_;
// If we don't have these here then SemiSpace will be abstract. However
// they should never be called.
- virtual int Size() {
+ virtual intptr_t Size() {
UNREACHABLE();
return 0;
}
}
// Return the allocated bytes in the active semispace.
- virtual int Size() { return static_cast<int>(top() - bottom()); }
+ virtual intptr_t Size() { return static_cast<int>(top() - bottom()); }
+ // The same, but returning an int. We have to have the one that returns
+ // intptr_t because it is inherited, but if we know we are dealing with the
+ // new space, which can't get as big as the other spaces then this is useful:
+ int SizeAsInt() { return static_cast<int>(Size()); }
// Return the current capacity of a semispace.
- int Capacity() {
+ intptr_t Capacity() {
ASSERT(to_space_.Capacity() == from_space_.Capacity());
return to_space_.Capacity();
}
// Return the total amount of memory committed for new space.
- int CommittedMemory() {
+ intptr_t CommittedMemory() {
if (from_space_.is_committed()) return 2 * Capacity();
return Capacity();
}
// Return the available bytes without growing in the active semispace.
- int Available() { return Capacity() - Size(); }
+ intptr_t Available() { return Capacity() - Size(); }
// Return the maximum capacity of a semispace.
int MaximumCapacity() {
void Reset();
// Return the number of bytes available on the free list.
- int available() { return available_; }
+ intptr_t available() { return available_; }
// Place a node on the free list. The block of size 'size_in_bytes'
// starting at 'start' is placed on the free list. The return value is the
void Reset();
// Return the number of bytes available on the free list.
- int available() { return available_; }
+ intptr_t available() { return available_; }
// Place a node on the free list. The block starting at 'start' (assumed to
// have size object_size_) is placed on the free list. Bookkeeping
private:
// Available bytes on the free list.
- int available_;
+ intptr_t available_;
// The head of the free list.
Address head_;
public:
// Creates an old space object with a given maximum capacity.
// The constructor does not allocate pages from OS.
- explicit OldSpace(int max_capacity,
+ explicit OldSpace(intptr_t max_capacity,
AllocationSpace id,
Executability executable)
: PagedSpace(max_capacity, id, executable), free_list_(id) {
// The bytes available on the free list (ie, not above the linear allocation
// pointer).
- int AvailableFree() { return free_list_.available(); }
+ intptr_t AvailableFree() { return free_list_.available(); }
// The limit of allocation for a page in this space.
virtual Address PageAllocationLimit(Page* page) {
class FixedSpace : public PagedSpace {
public:
- FixedSpace(int max_capacity,
+ FixedSpace(intptr_t max_capacity,
AllocationSpace id,
int object_size_in_bytes,
const char* name)
class MapSpace : public FixedSpace {
public:
// Creates a map space object with a maximum capacity.
- MapSpace(int max_capacity, int max_map_space_pages, AllocationSpace id)
+ MapSpace(intptr_t max_capacity, int max_map_space_pages, AllocationSpace id)
: FixedSpace(max_capacity, id, Map::kSize, "map"),
max_map_space_pages_(max_map_space_pages) {
ASSERT(max_map_space_pages < kMaxMapPageIndex);
class CellSpace : public FixedSpace {
public:
// Creates a property cell space object with a maximum capacity.
- CellSpace(int max_capacity, AllocationSpace id)
+ CellSpace(intptr_t max_capacity, AllocationSpace id)
: FixedSpace(max_capacity, id, JSGlobalPropertyCell::kSize, "cell") {}
protected:
// Given a chunk size, returns the object size it can accommodate. Used by
// LargeObjectSpace::Available.
- static int ObjectSizeFor(int chunk_size) {
+ static intptr_t ObjectSizeFor(intptr_t chunk_size) {
if (chunk_size <= (Page::kPageSize + Page::kObjectStartOffset)) return 0;
return chunk_size - Page::kPageSize - Page::kObjectStartOffset;
}
Object* AllocateRawFixedArray(int size_in_bytes);
// Available bytes for objects in this space.
- int Available() {
+ intptr_t Available() {
return LargeObjectChunk::ObjectSizeFor(MemoryAllocator::Available());
}
- virtual int Size() {
+ virtual intptr_t Size() {
return size_;
}
private:
// The head of the linked list of large object chunks.
LargeObjectChunk* first_chunk_;
- int size_; // allocated bytes
+ intptr_t size_; // allocated bytes
int page_count_; // number of chunks
// ----------------------------------------------------------------------------
// I/O support.
-// Our version of printf(). Avoids compilation errors that we get
-// with standard printf when attempting to print pointers, etc.
-// (the errors are due to the extra compilation flags, which we
-// want elsewhere).
-void PrintF(const char* format, ...);
+#if __GNUC__ >= 4
+// On gcc we can ask the compiler to check the types of %d-style format
+// specifiers and their associated arguments. TODO(erikcorry) fix this
+// so it works on MacOSX.
+#if defined(__MACH__) && defined(__APPLE__)
+#define PRINTF_CHECKING
+#else // MacOsX.
+#define PRINTF_CHECKING __attribute__ ((format (printf, 1, 2)))
+#endif
+#else
+#define PRINTF_CHECKING
+#endif
+
+// Our version of printf().
+void PRINTF_CHECKING PrintF(const char* format, ...);
// Our version of fflush.
void Flush();
NewSpace* new_space = Heap::new_space();
static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
while (new_space->Available() > kNewSpaceFillerSize) {
- int available_before = new_space->Available();
+ int available_before = static_cast<int>(new_space->Available());
CHECK(!Heap::AllocateByteArray(0)->IsFailure());
if (available_before == new_space->Available()) {
// It seems that we are avoiding new space allocations when
char* data = new char[100];
void* aligned = data;
- CHECK_EQ(0, reinterpret_cast<uintptr_t>(aligned) & 0x1);
+ CHECK_EQ(0, static_cast<int>(reinterpret_cast<uintptr_t>(aligned) & 0x1));
void* unaligned = data + 1;
- CHECK_EQ(1, reinterpret_cast<uintptr_t>(unaligned) & 0x1);
+ CHECK_EQ(1, static_cast<int>(reinterpret_cast<uintptr_t>(unaligned) & 0x1));
// Check reading and writing aligned pointers.
obj->SetPointerInInternalField(0, aligned);
char* data = new char[100];
void* aligned = data;
- CHECK_EQ(0, reinterpret_cast<uintptr_t>(aligned) & 0x1);
+ CHECK_EQ(0, static_cast<int>(reinterpret_cast<uintptr_t>(aligned) & 0x1));
void* unaligned = data + 1;
- CHECK_EQ(1, reinterpret_cast<uintptr_t>(unaligned) & 0x1);
+ CHECK_EQ(1, static_cast<int>(reinterpret_cast<uintptr_t>(unaligned) & 0x1));
obj->SetPointerInInternalField(0, aligned);
i::Heap::CollectAllGarbage(false);
args.This()->Set(v8_str("low_bits"), v8_num(low_bits >> 1));
#elif defined(V8_HOST_ARCH_64_BIT)
uint64_t fp = reinterpret_cast<uint64_t>(calling_frame->fp());
- int32_t low_bits = fp & 0xffffffff;
- int32_t high_bits = fp >> 32;
+ int32_t low_bits = static_cast<int32_t>(fp & 0xffffffff);
+ int32_t high_bits = static_cast<int32_t>(fp >> 32);
args.This()->Set(v8_str("low_bits"), v8_num(low_bits));
args.This()->Set(v8_str("high_bits"), v8_num(high_bits));
#else
CHECK(lo->Contains(ho));
while (true) {
- int available = lo->Available();
+ intptr_t available = lo->Available();
obj = lo->AllocateRaw(lo_size);
if (obj->IsFailure()) break;
HeapObject::cast(obj)->set_map(faked_map);
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --max-new-space-size=262144
+// Flags: --max-new-space-size=256
// Check that a mod where the stub code hits a failure in heap number
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --max-new-space-size=262144
+// Flags: --max-new-space-size=256
function zero() {
var x = 0.5;
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --max-new-space-size=262144
+// Flags: --max-new-space-size=256
function zero() {
var x = 0.5;