From 4465836c8a615ed6c15c8d80bb414ce8d9f0624c Mon Sep 17 00:00:00 2001 From: bmeurer Date: Thu, 12 Feb 2015 04:46:58 -0800 Subject: [PATCH] Fix invalid use of int in Zone. Review URL: https://codereview.chromium.org/924453002 Cr-Commit-Position: refs/heads/master@{#26609} --- src/ast.h | 4 +- src/compiler.cc | 2 +- src/compiler.h | 2 +- src/compiler/instruction-selector.cc | 2 +- src/compiler/node-cache.cc | 4 +- src/compiler/raw-machine-assembler.cc | 3 +- src/compiler/register-allocator-verifier.cc | 3 +- src/compiler/register-allocator.cc | 2 +- src/compiler/scheduler.cc | 5 +- src/compiler/value-numbering-reducer.cc | 2 +- src/compiler/x64/code-generator-x64.cc | 2 +- src/hydrogen.cc | 8 +-- src/hydrogen.h | 10 ++-- src/lithium-allocator.cc | 4 +- src/lithium-allocator.h | 2 +- src/typing.h | 4 +- src/zone.cc | 49 +++++++++---------- src/zone.h | 45 +++++++---------- test/unittests/compiler/zone-pool-unittest.cc | 6 +-- 19 files changed, 70 insertions(+), 89 deletions(-) diff --git a/src/ast.h b/src/ast.h index e457e66a2..0229880b7 100644 --- a/src/ast.h +++ b/src/ast.h @@ -200,9 +200,7 @@ class AstNode: public ZoneObject { }; #undef DECLARE_TYPE_ENUM - void* operator new(size_t size, Zone* zone) { - return zone->New(static_cast(size)); - } + void* operator new(size_t size, Zone* zone) { return zone->New(size); } explicit AstNode(int position): position_(position) {} virtual ~AstNode() {} diff --git a/src/compiler.cc b/src/compiler.cc index 86dbe8fcf..73f601a56 100644 --- a/src/compiler.cc +++ b/src/compiler.cc @@ -1565,7 +1565,7 @@ CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info) CompilationPhase::~CompilationPhase() { if (FLAG_hydrogen_stats) { - unsigned size = zone()->allocation_size(); + size_t size = zone()->allocation_size(); size += info_->zone()->allocation_size() - info_zone_start_allocation_size_; isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size); } diff --git a/src/compiler.h b/src/compiler.h index 076467a7f..6af3c3539 100644 --- a/src/compiler.h +++ b/src/compiler.h @@ -773,7 +773,7 @@ class CompilationPhase BASE_EMBEDDED { const char* name_; CompilationInfo* info_; Zone zone_; - unsigned info_zone_start_allocation_size_; + size_t info_zone_start_allocation_size_; base::ElapsedTimer timer_; DISALLOW_COPY_AND_ASSIGN(CompilationPhase); diff --git a/src/compiler/instruction-selector.cc b/src/compiler/instruction-selector.cc index 105e25504..e8a52a61e 100644 --- a/src/compiler/instruction-selector.cc +++ b/src/compiler/instruction-selector.cc @@ -1064,7 +1064,7 @@ void InstructionSelector::VisitSwitch(Node* node, BasicBlock** branches, Node* const value = node->InputAt(0); size_t const input_count = branch_count + 1; InstructionOperand* const inputs = - zone()->NewArray(static_cast(input_count)); + zone()->NewArray(input_count); inputs[0] = g.UseRegister(value); for (size_t index = 0; index < branch_count; ++index) { inputs[index + 1] = g.Label(branches[index]); diff --git a/src/compiler/node-cache.cc b/src/compiler/node-cache.cc index 92a3fa078..79c342b44 100644 --- a/src/compiler/node-cache.cc +++ b/src/compiler/node-cache.cc @@ -36,7 +36,7 @@ bool NodeCache::Resize(Zone* zone) { size_t old_size = size_ + kLinearProbe; size_ *= 4; size_t num_entries = size_ + kLinearProbe; - entries_ = zone->NewArray(static_cast(num_entries)); + entries_ = zone->NewArray(num_entries); memset(entries_, 0, sizeof(Entry) * num_entries); // Insert the old entries into the new block. @@ -66,7 +66,7 @@ Node** NodeCache::Find(Zone* zone, Key key) { if (!entries_) { // Allocate the initial entries and insert the first entry. size_t num_entries = kInitialSize + kLinearProbe; - entries_ = zone->NewArray(static_cast(num_entries)); + entries_ = zone->NewArray(num_entries); size_ = kInitialSize; memset(entries_, 0, sizeof(Entry) * num_entries); Entry* entry = &entries_[hash & (kInitialSize - 1)]; diff --git a/src/compiler/raw-machine-assembler.cc b/src/compiler/raw-machine-assembler.cc index fc1df1ecb..562f77d14 100644 --- a/src/compiler/raw-machine-assembler.cc +++ b/src/compiler/raw-machine-assembler.cc @@ -80,8 +80,7 @@ void RawMachineAssembler::Switch(Node* index, Label** succ_labels, size_t succ_count) { DCHECK_NE(schedule()->end(), current_block_); Node* sw = NewNode(common()->Switch(succ_count), index); - BasicBlock** succ_blocks = - zone()->NewArray(static_cast(succ_count)); + BasicBlock** succ_blocks = zone()->NewArray(succ_count); for (size_t index = 0; index < succ_count; ++index) { succ_blocks[index] = Use(succ_labels[index]); } diff --git a/src/compiler/register-allocator-verifier.cc b/src/compiler/register-allocator-verifier.cc index 45bf5ecc5..434e965bf 100644 --- a/src/compiler/register-allocator-verifier.cc +++ b/src/compiler/register-allocator-verifier.cc @@ -61,8 +61,7 @@ RegisterAllocatorVerifier::RegisterAllocatorVerifier( // kSameAsFirst along the way. for (const auto* instr : sequence->instructions()) { const size_t operand_count = OperandCount(instr); - auto* op_constraints = - zone->NewArray(static_cast(operand_count)); + auto* op_constraints = zone->NewArray(operand_count); size_t count = 0; for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { BuildConstraint(instr->InputAt(i), &op_constraints[count]); diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc index 0d53c06f5..1de5773e7 100644 --- a/src/compiler/register-allocator.cc +++ b/src/compiler/register-allocator.cc @@ -1514,7 +1514,7 @@ class LiveRangeBoundArray { void Initialize(Zone* zone, const LiveRange* const range) { size_t length = 0; for (auto i = range; i != nullptr; i = i->next()) length++; - start_ = zone->NewArray(static_cast(length)); + start_ = zone->NewArray(length); length_ = length; auto curr = start_; for (auto i = range; i != nullptr; i = i->next(), ++curr) { diff --git a/src/compiler/scheduler.cc b/src/compiler/scheduler.cc index 555625691..25bdba496 100644 --- a/src/compiler/scheduler.cc +++ b/src/compiler/scheduler.cc @@ -365,8 +365,7 @@ class CFGBuilder : public ZoneObject { void BuildBlocksForSuccessors(Node* node) { size_t const successor_count = node->op()->ControlOutputCount(); - Node** successors = - zone_->NewArray(static_cast(successor_count)); + Node** successors = zone_->NewArray(successor_count); CollectSuccessorProjections(node, successors, successor_count); for (size_t index = 0; index < successor_count; ++index) { BuildBlockForNode(successors[index]); @@ -457,7 +456,7 @@ class CFGBuilder : public ZoneObject { void ConnectSwitch(Node* sw) { size_t const successor_count = sw->op()->ControlOutputCount(); BasicBlock** successor_blocks = - zone_->NewArray(static_cast(successor_count)); + zone_->NewArray(successor_count); CollectSuccessorBlocks(sw, successor_blocks, successor_count); if (sw == component_entry_) { diff --git a/src/compiler/value-numbering-reducer.cc b/src/compiler/value-numbering-reducer.cc index 5e924d7a3..555570d22 100644 --- a/src/compiler/value-numbering-reducer.cc +++ b/src/compiler/value-numbering-reducer.cc @@ -135,7 +135,7 @@ void ValueNumberingReducer::Grow() { Node** const old_entries = entries_; size_t const old_capacity = capacity_; capacity_ *= kCapacityToSizeRatio; - entries_ = zone()->NewArray(static_cast(capacity_)); + entries_ = zone()->NewArray(capacity_); memset(entries_, 0, sizeof(*entries_) * capacity_); size_ = 0; size_t const mask = capacity_ - 1; diff --git a/src/compiler/x64/code-generator-x64.cc b/src/compiler/x64/code-generator-x64.cc index bad87b2ff..927dbef49 100644 --- a/src/compiler/x64/code-generator-x64.cc +++ b/src/compiler/x64/code-generator-x64.cc @@ -1073,7 +1073,7 @@ void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { void CodeGenerator::AssembleArchSwitch(Instruction* instr) { X64OperandConverter i(this, instr); size_t const label_count = instr->InputCount() - 1; - Label** labels = zone()->NewArray(static_cast(label_count)); + Label** labels = zone()->NewArray(label_count); for (size_t index = 0; index < label_count; ++index) { labels[index] = GetLabel(i.InputRpo(static_cast(index + 1))); } diff --git a/src/hydrogen.cc b/src/hydrogen.cc index 7e5880a0a..902c43c13 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -13452,9 +13452,9 @@ void HStatistics::Print() { double percent = times_[i].PercentOf(sum); PrintF(" %8.3f ms / %4.1f %% ", ms, percent); - unsigned size = sizes_[i]; + size_t size = sizes_[i]; double size_percent = static_cast(size) * 100 / total_size_; - PrintF(" %9u bytes / %4.1f %%\n", size, size_percent); + PrintF(" %9zu bytes / %4.1f %%\n", size, size_percent); } PrintF( @@ -13470,7 +13470,7 @@ void HStatistics::Print() { PrintF( "----------------------------------------" "----------------------------------------\n"); - PrintF("%33s %8.3f ms %9u bytes\n", "Total", + PrintF("%33s %8.3f ms %9zu bytes\n", "Total", total.InMillisecondsF(), total_size_); PrintF("%33s (%.1f times slower than full code gen)\n", "", total.TimesOf(full_code_gen_)); @@ -13489,7 +13489,7 @@ void HStatistics::Print() { void HStatistics::SaveTiming(const char* name, base::TimeDelta time, - unsigned size) { + size_t size) { total_size_ += size; for (int i = 0; i < names_.length(); ++i) { if (strcmp(names_[i], name) == 0) { diff --git a/src/hydrogen.h b/src/hydrogen.h index 0ca78e9f5..ae16e5a34 100644 --- a/src/hydrogen.h +++ b/src/hydrogen.h @@ -2134,9 +2134,7 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor { void VisitDeclarations(ZoneList* declarations) OVERRIDE; - void* operator new(size_t size, Zone* zone) { - return zone->New(static_cast(size)); - } + void* operator new(size_t size, Zone* zone) { return zone->New(size); } void operator delete(void* pointer, Zone* zone) { } void operator delete(void* pointer) { } @@ -2800,7 +2798,7 @@ class HStatistics FINAL: public Malloced { void Initialize(CompilationInfo* info); void Print(); - void SaveTiming(const char* name, base::TimeDelta time, unsigned size); + void SaveTiming(const char* name, base::TimeDelta time, size_t size); void IncrementFullCodeGen(base::TimeDelta full_code_gen) { full_code_gen_ += full_code_gen; @@ -2825,11 +2823,11 @@ class HStatistics FINAL: public Malloced { private: List times_; List names_; - List sizes_; + List sizes_; base::TimeDelta create_graph_; base::TimeDelta optimize_graph_; base::TimeDelta generate_code_; - unsigned total_size_; + size_t total_size_; base::TimeDelta full_code_gen_; double source_size_; }; diff --git a/src/lithium-allocator.cc b/src/lithium-allocator.cc index 3b9285689..36c8c7d90 100644 --- a/src/lithium-allocator.cc +++ b/src/lithium-allocator.cc @@ -2174,8 +2174,8 @@ LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator) LAllocatorPhase::~LAllocatorPhase() { if (FLAG_hydrogen_stats) { - unsigned size = allocator_->zone()->allocation_size() - - allocator_zone_start_allocation_size_; + size_t size = allocator_->zone()->allocation_size() - + allocator_zone_start_allocation_size_; isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size); } diff --git a/src/lithium-allocator.h b/src/lithium-allocator.h index f63077e19..2a8080011 100644 --- a/src/lithium-allocator.h +++ b/src/lithium-allocator.h @@ -564,7 +564,7 @@ class LAllocatorPhase : public CompilationPhase { private: LAllocator* allocator_; - unsigned allocator_zone_start_allocation_size_; + size_t allocator_zone_start_allocation_size_; DISALLOW_COPY_AND_ASSIGN(LAllocatorPhase); }; diff --git a/src/typing.h b/src/typing.h index 0ca92f3a4..34649775f 100644 --- a/src/typing.h +++ b/src/typing.h @@ -23,9 +23,7 @@ class AstTyper: public AstVisitor { public: static void Run(CompilationInfo* info); - void* operator new(size_t size, Zone* zone) { - return zone->New(static_cast(size)); - } + void* operator new(size_t size, Zone* zone) { return zone->New(size); } void operator delete(void* pointer, Zone* zone) { } void operator delete(void* pointer) { } diff --git a/src/zone.cc b/src/zone.cc index e3f559aac..5a7245bd4 100644 --- a/src/zone.cc +++ b/src/zone.cc @@ -19,7 +19,7 @@ namespace { #if V8_USE_ADDRESS_SANITIZER -const int kASanRedzoneBytes = 24; // Must be a multiple of 8. +const size_t kASanRedzoneBytes = 24; // Must be a multiple of 8. #else @@ -35,7 +35,7 @@ const int kASanRedzoneBytes = 24; // Must be a multiple of 8. USE(size); \ } while (false) -const int kASanRedzoneBytes = 0; +const size_t kASanRedzoneBytes = 0; #endif // V8_USE_ADDRESS_SANITIZER @@ -50,7 +50,7 @@ const int kASanRedzoneBytes = 0; class Segment { public: - void Initialize(Segment* next, int size) { + void Initialize(Segment* next, size_t size) { next_ = next; size_ = size; } @@ -58,20 +58,18 @@ class Segment { Segment* next() const { return next_; } void clear_next() { next_ = nullptr; } - int size() const { return size_; } - int capacity() const { return size_ - sizeof(Segment); } + size_t size() const { return size_; } + size_t capacity() const { return size_ - sizeof(Segment); } Address start() const { return address(sizeof(Segment)); } Address end() const { return address(size_); } private: // Computes the address of the nth byte in this segment. - Address address(int n) const { - return Address(this) + n; - } + Address address(size_t n) const { return Address(this) + n; } Segment* next_; - int size_; + size_t size_; }; @@ -91,7 +89,7 @@ Zone::~Zone() { } -void* Zone::New(int size) { +void* Zone::New(size_t size) { // Round up the requested size to fit the alignment. size = RoundUp(size, kAlignment); @@ -106,8 +104,8 @@ void* Zone::New(int size) { // Check if the requested size is available without expanding. Address result = position_; - const int size_with_redzone = size + kASanRedzoneBytes; - if (size_with_redzone > limit_ - position_) { + const size_t size_with_redzone = size + kASanRedzoneBytes; + if (limit_ < position_ + size_with_redzone) { result = NewExpand(size_with_redzone); } else { position_ += size_with_redzone; @@ -141,7 +139,7 @@ void Zone::DeleteAll() { keep = current; keep->clear_next(); } else { - int size = current->size(); + size_t size = current->size(); #ifdef DEBUG // Un-poison first so the zapping doesn't trigger ASan complaints. ASAN_UNPOISON_MEMORY_REGION(current, size); @@ -185,7 +183,7 @@ void Zone::DeleteKeptSegment() { DCHECK(segment_head_ == nullptr || segment_head_->next() == nullptr); if (segment_head_ != nullptr) { - int size = segment_head_->size(); + size_t size = segment_head_->size(); #ifdef DEBUG // Un-poison first so the zapping doesn't trigger ASan complaints. ASAN_UNPOISON_MEMORY_REGION(segment_head_, size); @@ -202,7 +200,7 @@ void Zone::DeleteKeptSegment() { // Creates a new segment, sets it size, and pushes it to the front // of the segment chain. Returns the new segment. -Segment* Zone::NewSegment(int size) { +Segment* Zone::NewSegment(size_t size) { Segment* result = reinterpret_cast(Malloced::New(size)); segment_bytes_allocated_ += size; if (result != nullptr) { @@ -214,17 +212,17 @@ Segment* Zone::NewSegment(int size) { // Deletes the given segment. Does not touch the segment chain. -void Zone::DeleteSegment(Segment* segment, int size) { +void Zone::DeleteSegment(Segment* segment, size_t size) { segment_bytes_allocated_ -= size; Malloced::Delete(segment); } -Address Zone::NewExpand(int size) { +Address Zone::NewExpand(size_t size) { // Make sure the requested size is already properly aligned and that // there isn't enough room in the Zone to satisfy the request. - DCHECK(size == RoundDown(size, kAlignment)); - DCHECK(size > limit_ - position_); + DCHECK_EQ(size, RoundDown(size, kAlignment)); + DCHECK_LT(limit_, position_ + size); // Compute the new segment size. We use a 'high water mark' // strategy, where we increase the segment size every time we expand @@ -235,27 +233,26 @@ Address Zone::NewExpand(int size) { static const size_t kSegmentOverhead = sizeof(Segment) + kAlignment; const size_t new_size_no_overhead = size + (old_size << 1); size_t new_size = kSegmentOverhead + new_size_no_overhead; - const size_t min_new_size = kSegmentOverhead + static_cast(size); + const size_t min_new_size = kSegmentOverhead + size; // Guard against integer overflow. - if (new_size_no_overhead < static_cast(size) || - new_size < static_cast(kSegmentOverhead)) { + if (new_size_no_overhead < size || new_size < kSegmentOverhead) { V8::FatalProcessOutOfMemory("Zone"); return nullptr; } - if (new_size < static_cast(kMinimumSegmentSize)) { + if (new_size < kMinimumSegmentSize) { new_size = kMinimumSegmentSize; - } else if (new_size > static_cast(kMaximumSegmentSize)) { + } else if (new_size > kMaximumSegmentSize) { // Limit the size of new segments to avoid growing the segment size // exponentially, thus putting pressure on contiguous virtual address space. // All the while making sure to allocate a segment large enough to hold the // requested size. - new_size = Max(min_new_size, static_cast(kMaximumSegmentSize)); + new_size = Max(min_new_size, kMaximumSegmentSize); } if (new_size > INT_MAX) { V8::FatalProcessOutOfMemory("Zone"); return nullptr; } - Segment* segment = NewSegment(static_cast(new_size)); + Segment* segment = NewSegment(new_size); if (segment == nullptr) { V8::FatalProcessOutOfMemory("Zone"); return nullptr; diff --git a/src/zone.h b/src/zone.h index da657155f..a3511cdaa 100644 --- a/src/zone.h +++ b/src/zone.h @@ -40,12 +40,11 @@ class Zone FINAL { // Allocate 'size' bytes of memory in the Zone; expands the Zone by // allocating new segments of memory on demand using malloc(). - void* New(int size); + void* New(size_t size); template - T* NewArray(int length) { - DCHECK(std::numeric_limits::max() / static_cast(sizeof(T)) > - length); + T* NewArray(size_t length) { + DCHECK_LT(length, std::numeric_limits::max() / sizeof(T)); return static_cast(New(length * sizeof(T))); } @@ -63,51 +62,51 @@ class Zone FINAL { return segment_bytes_allocated_ > kExcessLimit; } - unsigned allocation_size() const { return allocation_size_; } + size_t allocation_size() const { return allocation_size_; } private: // All pointers returned from New() have this alignment. In addition, if the // object being allocated has a size that is divisible by 8 then its alignment // will be 8. ASan requires 8-byte alignment. #ifdef V8_USE_ADDRESS_SANITIZER - static const int kAlignment = 8; + static const size_t kAlignment = 8; STATIC_ASSERT(kPointerSize <= 8); #else - static const int kAlignment = kPointerSize; + static const size_t kAlignment = kPointerSize; #endif // Never allocate segments smaller than this size in bytes. - static const int kMinimumSegmentSize = 8 * KB; + static const size_t kMinimumSegmentSize = 8 * KB; // Never allocate segments larger than this size in bytes. - static const int kMaximumSegmentSize = 1 * MB; + static const size_t kMaximumSegmentSize = 1 * MB; // Never keep segments larger than this size in bytes around. - static const int kMaximumKeptSegmentSize = 64 * KB; + static const size_t kMaximumKeptSegmentSize = 64 * KB; // Report zone excess when allocation exceeds this limit. - static const int kExcessLimit = 256 * MB; + static const size_t kExcessLimit = 256 * MB; // The number of bytes allocated in this zone so far. - unsigned allocation_size_; + size_t allocation_size_; // The number of bytes allocated in segments. Note that this number // includes memory allocated from the OS but not yet allocated from // the zone. - int segment_bytes_allocated_; + size_t segment_bytes_allocated_; // Expand the Zone to hold at least 'size' more bytes and allocate // the bytes. Returns the address of the newly allocated chunk of // memory in the Zone. Should only be called if there isn't enough // room in the Zone already. - Address NewExpand(int size); + Address NewExpand(size_t size); // Creates a new segment, sets it size, and pushes it to the front // of the segment chain. Returns the new segment. - inline Segment* NewSegment(int size); + inline Segment* NewSegment(size_t size); // Deletes the given segment. Does not touch the segment chain. - inline void DeleteSegment(Segment* segment, int size); + inline void DeleteSegment(Segment* segment, size_t size); // The free region in the current (front) segment is represented as // the half-open interval [position, limit). The 'position' variable @@ -124,9 +123,7 @@ class Zone FINAL { class ZoneObject { public: // Allocate a new ZoneObject of 'size' bytes in the Zone. - void* operator new(size_t size, Zone* zone) { - return zone->New(static_cast(size)); - } + void* operator new(size_t size, Zone* zone) { return zone->New(size); } // Ideally, the delete operator should be private instead of // public, but unfortunately the compiler sometimes synthesizes @@ -160,7 +157,7 @@ class ZoneScope FINAL { class ZoneAllocationPolicy FINAL { public: explicit ZoneAllocationPolicy(Zone* zone) : zone_(zone) { } - void* New(size_t size) { return zone()->New(static_cast(size)); } + void* New(size_t size) { return zone()->New(size); } static void Delete(void* pointer) {} Zone* zone() const { return zone_; } @@ -181,9 +178,7 @@ class ZoneList FINAL : public List { ZoneList(int capacity, Zone* zone) : List(capacity, ZoneAllocationPolicy(zone)) { } - void* operator new(size_t size, Zone* zone) { - return zone->New(static_cast(size)); - } + void* operator new(size_t size, Zone* zone) { return zone->New(size); } // Construct a new ZoneList by copying the elements of the given ZoneList. ZoneList(const ZoneList& other, Zone* zone) @@ -239,9 +234,7 @@ class ZoneSplayTree FINAL : public SplayTree { SplayTree::ResetRoot(); } - void* operator new(size_t size, Zone* zone) { - return zone->New(static_cast(size)); - } + void* operator new(size_t size, Zone* zone) { return zone->New(size); } void operator delete(void* pointer) { UNREACHABLE(); } void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); } diff --git a/test/unittests/compiler/zone-pool-unittest.cc b/test/unittests/compiler/zone-pool-unittest.cc index b005d2cba..3bfde4bdc 100644 --- a/test/unittests/compiler/zone-pool-unittest.cc +++ b/test/unittests/compiler/zone-pool-unittest.cc @@ -32,9 +32,9 @@ class ZonePoolTest : public TestWithIsolate { size_t Allocate(Zone* zone) { size_t bytes = rng.NextInt(25) + 7; - int size_before = zone->allocation_size(); - zone->New(static_cast(bytes)); - return static_cast(zone->allocation_size() - size_before); + size_t size_before = zone->allocation_size(); + zone->New(bytes); + return zone->allocation_size() - size_before; } private: -- 2.34.1