1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_ZONE_INL_H_
29 #define V8_ZONE_INL_H_
36 #include "v8-counters.h"
42 inline void* Zone::New(int size) {
43 // Round up the requested size to fit the alignment.
44 size = RoundUp(size, kAlignment);
46 // If the allocation size is divisible by 8 then we return an 8-byte aligned
48 if (kPointerSize == 4 && kAlignment == 4) {
49 position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4);
51 ASSERT(kAlignment >= kPointerSize);
54 // Check if the requested size is available without expanding.
55 Address result = position_;
57 if (size > limit_ - position_) {
58 result = NewExpand(size);
63 // Check that the result has the proper alignment and return it.
64 ASSERT(IsAddressAligned(result, kAlignment, 0));
65 allocation_size_ += size;
66 return reinterpret_cast<void*>(result);
71 T* Zone::NewArray(int length) {
72 return static_cast<T*>(New(length * sizeof(T)));
76 bool Zone::excess_allocation() {
77 return segment_bytes_allocated_ > kExcessLimit;
81 void Zone::adjust_segment_bytes_allocated(int delta) {
82 segment_bytes_allocated_ += delta;
83 isolate_->counters()->zone_segment_bytes()->Set(segment_bytes_allocated_);
87 template <typename Config>
88 ZoneSplayTree<Config>::~ZoneSplayTree() {
89 // Reset the root to avoid unneeded iteration over all tree nodes
90 // in the destructor. For a zone-allocated tree, nodes will be
92 SplayTree<Config, ZoneAllocationPolicy>::ResetRoot();
96 void* ZoneObject::operator new(size_t size, Zone* zone) {
97 return zone->New(static_cast<int>(size));
100 inline void* ZoneAllocationPolicy::New(size_t size) {
102 return zone_->New(static_cast<int>(size));
106 template <typename T>
107 void* ZoneList<T>::operator new(size_t size, Zone* zone) {
108 return zone->New(static_cast<int>(size));
112 template <typename T>
113 void* ZoneSplayTree<T>::operator new(size_t size, Zone* zone) {
114 return zone->New(static_cast<int>(size));
118 } } // namespace v8::internal
120 #endif // V8_ZONE_INL_H_