ee60086ed2407d75684d9116d03eed708094686a
[profile/ivi/qtjsbackend.git] / src / 3rdparty / v8 / test / cctest / test-spaces.cc
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdlib.h>
29
30 #include "v8.h"
31 #include "cctest.h"
32
33 using namespace v8::internal;
34
35 #if 0
36 static void VerifyRegionMarking(Address page_start) {
37 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER
38   Page* p = Page::FromAddress(page_start);
39
40   p->SetRegionMarks(Page::kAllRegionsCleanMarks);
41
42   for (Address addr = p->ObjectAreaStart();
43        addr < p->ObjectAreaEnd();
44        addr += kPointerSize) {
45     CHECK(!Page::FromAddress(addr)->IsRegionDirty(addr));
46   }
47
48   for (Address addr = p->ObjectAreaStart();
49        addr < p->ObjectAreaEnd();
50        addr += kPointerSize) {
51     Page::FromAddress(addr)->MarkRegionDirty(addr);
52   }
53
54   for (Address addr = p->ObjectAreaStart();
55        addr < p->ObjectAreaEnd();
56        addr += kPointerSize) {
57     CHECK(Page::FromAddress(addr)->IsRegionDirty(addr));
58   }
59 #endif
60 }
61 #endif
62
63
64 // TODO(gc) you can no longer allocate pages like this. Details are hidden.
65 #if 0
66 TEST(Page) {
67   byte* mem = NewArray<byte>(2*Page::kPageSize);
68   CHECK(mem != NULL);
69
70   Address start = reinterpret_cast<Address>(mem);
71   Address page_start = RoundUp(start, Page::kPageSize);
72
73   Page* p = Page::FromAddress(page_start);
74   // Initialized Page has heap pointer, normally set by memory_allocator.
75   p->heap_ = HEAP;
76   CHECK(p->address() == page_start);
77   CHECK(p->is_valid());
78
79   p->opaque_header = 0;
80   p->SetIsLargeObjectPage(false);
81   CHECK(!p->next_page()->is_valid());
82
83   CHECK(p->ObjectAreaStart() == page_start + Page::kObjectStartOffset);
84   CHECK(p->ObjectAreaEnd() == page_start + Page::kPageSize);
85
86   CHECK(p->Offset(page_start + Page::kObjectStartOffset) ==
87         Page::kObjectStartOffset);
88   CHECK(p->Offset(page_start + Page::kPageSize) == Page::kPageSize);
89
90   CHECK(p->OffsetToAddress(Page::kObjectStartOffset) == p->ObjectAreaStart());
91   CHECK(p->OffsetToAddress(Page::kPageSize) == p->ObjectAreaEnd());
92
93   // test region marking
94   VerifyRegionMarking(page_start);
95
96   DeleteArray(mem);
97 }
98 #endif
99
100
101 namespace v8 {
102 namespace internal {
103
104 // Temporarily sets a given allocator in an isolate.
105 class TestMemoryAllocatorScope {
106  public:
107   TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
108       : isolate_(isolate),
109         old_allocator_(isolate->memory_allocator_) {
110     isolate->memory_allocator_ = allocator;
111   }
112
113   ~TestMemoryAllocatorScope() {
114     isolate_->memory_allocator_ = old_allocator_;
115   }
116
117  private:
118   Isolate* isolate_;
119   MemoryAllocator* old_allocator_;
120
121   DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
122 };
123
124 } }  // namespace v8::internal
125
126
127 TEST(MemoryAllocator) {
128   OS::Setup();
129   Isolate* isolate = Isolate::Current();
130   isolate->InitializeLoggingAndCounters();
131   Heap* heap = isolate->heap();
132   CHECK(isolate->heap()->ConfigureHeapDefault());
133
134   MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
135   CHECK(memory_allocator->Setup(heap->MaxReserved(),
136                                 heap->MaxExecutableSize()));
137
138   int total_pages = 0;
139   OldSpace faked_space(heap,
140                        heap->MaxReserved(),
141                        OLD_POINTER_SPACE,
142                        NOT_EXECUTABLE);
143   Page* first_page =
144       memory_allocator->AllocatePage(&faked_space, NOT_EXECUTABLE);
145
146   first_page->InsertAfter(faked_space.anchor()->prev_page());
147   CHECK(first_page->is_valid());
148   CHECK(first_page->next_page() == faked_space.anchor());
149   total_pages++;
150
151   for (Page* p = first_page; p != faked_space.anchor(); p = p->next_page()) {
152     CHECK(p->owner() == &faked_space);
153   }
154
155   // Again, we should get n or n - 1 pages.
156   Page* other =
157       memory_allocator->AllocatePage(&faked_space, NOT_EXECUTABLE);
158   CHECK(other->is_valid());
159   total_pages++;
160   other->InsertAfter(first_page);
161   int page_count = 0;
162   for (Page* p = first_page; p != faked_space.anchor(); p = p->next_page()) {
163     CHECK(p->owner() == &faked_space);
164     page_count++;
165   }
166   CHECK(total_pages == page_count);
167
168   Page* second_page = first_page->next_page();
169   CHECK(second_page->is_valid());
170   memory_allocator->Free(first_page);
171   memory_allocator->Free(second_page);
172   memory_allocator->TearDown();
173   delete memory_allocator;
174 }
175
176
177 TEST(NewSpace) {
178   OS::Setup();
179   Isolate* isolate = Isolate::Current();
180   isolate->InitializeLoggingAndCounters();
181   Heap* heap = isolate->heap();
182   CHECK(heap->ConfigureHeapDefault());
183   MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
184   CHECK(memory_allocator->Setup(heap->MaxReserved(),
185                                 heap->MaxExecutableSize()));
186   TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
187
188   NewSpace new_space(heap);
189
190   CHECK(new_space.Setup(HEAP->ReservedSemiSpaceSize(),
191                         HEAP->ReservedSemiSpaceSize()));
192   CHECK(new_space.HasBeenSetup());
193
194   while (new_space.Available() >= Page::kMaxHeapObjectSize) {
195     Object* obj =
196         new_space.AllocateRaw(Page::kMaxHeapObjectSize)->ToObjectUnchecked();
197     CHECK(new_space.Contains(HeapObject::cast(obj)));
198   }
199
200   new_space.TearDown();
201   memory_allocator->TearDown();
202   delete memory_allocator;
203 }
204
205
206 TEST(OldSpace) {
207   OS::Setup();
208   Isolate* isolate = Isolate::Current();
209   isolate->InitializeLoggingAndCounters();
210   Heap* heap = isolate->heap();
211   CHECK(heap->ConfigureHeapDefault());
212   MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
213   CHECK(memory_allocator->Setup(heap->MaxReserved(),
214                                 heap->MaxExecutableSize()));
215   TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
216
217   OldSpace* s = new OldSpace(heap,
218                              heap->MaxOldGenerationSize(),
219                              OLD_POINTER_SPACE,
220                              NOT_EXECUTABLE);
221   CHECK(s != NULL);
222
223   CHECK(s->Setup());
224
225   while (s->Available() > 0) {
226     s->AllocateRaw(Page::kMaxHeapObjectSize)->ToObjectUnchecked();
227   }
228
229   s->TearDown();
230   delete s;
231   memory_allocator->TearDown();
232   delete memory_allocator;
233 }
234
235
236 TEST(LargeObjectSpace) {
237   v8::V8::Initialize();
238
239   LargeObjectSpace* lo = HEAP->lo_space();
240   CHECK(lo != NULL);
241
242   int lo_size = Page::kPageSize;
243
244   Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->ToObjectUnchecked();
245   CHECK(obj->IsHeapObject());
246
247   HeapObject* ho = HeapObject::cast(obj);
248
249   CHECK(lo->Contains(HeapObject::cast(obj)));
250
251   CHECK(lo->FindObject(ho->address()) == obj);
252
253   CHECK(lo->Contains(ho));
254
255   while (true) {
256     intptr_t available = lo->Available();
257     { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE);
258       if (!maybe_obj->ToObject(&obj)) break;
259     }
260     CHECK(lo->Available() < available);
261   };
262
263   CHECK(!lo->IsEmpty());
264
265   CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->IsFailure());
266 }