Upstream version 11.40.277.0
[platform/framework/web/crosswalk.git] / src / v8 / test / cctest / test-weakmaps.cc
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <utility>
29
30 #include "src/v8.h"
31
32 #include "src/global-handles.h"
33 #include "src/snapshot.h"
34 #include "test/cctest/cctest.h"
35
36 using namespace v8::internal;
37
38
39 static Isolate* GetIsolateFrom(LocalContext* context) {
40   return reinterpret_cast<Isolate*>((*context)->GetIsolate());
41 }
42
43
44 static Handle<JSWeakMap> AllocateJSWeakMap(Isolate* isolate) {
45   Factory* factory = isolate->factory();
46   Handle<Map> map = factory->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize);
47   Handle<JSObject> weakmap_obj = factory->NewJSObjectFromMap(map);
48   Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
49   // Do not leak handles for the hash table, it would make entries strong.
50   {
51     HandleScope scope(isolate);
52     Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 1);
53     weakmap->set_table(*table);
54   }
55   return weakmap;
56 }
57
58 static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
59                            Handle<JSObject> key,
60                            Handle<Object> value) {
61   Handle<ObjectHashTable> table = ObjectHashTable::Put(
62       Handle<ObjectHashTable>(ObjectHashTable::cast(weakmap->table())),
63       Handle<JSObject>(JSObject::cast(*key)),
64       value);
65   weakmap->set_table(*table);
66 }
67
68 static int NumberOfWeakCalls = 0;
69 static void WeakPointerCallback(
70     const v8::WeakCallbackData<v8::Value, void>& data) {
71   std::pair<v8::Persistent<v8::Value>*, int>* p =
72       reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
73           data.GetParameter());
74   DCHECK_EQ(1234, p->second);
75   NumberOfWeakCalls++;
76   p->first->Reset();
77 }
78
79
80 TEST(Weakness) {
81   FLAG_incremental_marking = false;
82   LocalContext context;
83   Isolate* isolate = GetIsolateFrom(&context);
84   Factory* factory = isolate->factory();
85   Heap* heap = isolate->heap();
86   HandleScope scope(isolate);
87   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
88   GlobalHandles* global_handles = isolate->global_handles();
89
90   // Keep global reference to the key.
91   Handle<Object> key;
92   {
93     HandleScope scope(isolate);
94     Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
95     Handle<JSObject> object = factory->NewJSObjectFromMap(map);
96     key = global_handles->Create(*object);
97   }
98   CHECK(!global_handles->IsWeak(key.location()));
99
100   // Put entry into weak map.
101   {
102     HandleScope scope(isolate);
103     PutIntoWeakMap(weakmap,
104                    Handle<JSObject>(JSObject::cast(*key)),
105                    Handle<Smi>(Smi::FromInt(23), isolate));
106   }
107   CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
108
109   // Force a full GC.
110   heap->CollectAllGarbage(false);
111   CHECK_EQ(0, NumberOfWeakCalls);
112   CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
113   CHECK_EQ(
114       0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
115
116   // Make the global reference to the key weak.
117   {
118     HandleScope scope(isolate);
119     std::pair<Handle<Object>*, int> handle_and_id(&key, 1234);
120     GlobalHandles::MakeWeak(key.location(),
121                             reinterpret_cast<void*>(&handle_and_id),
122                             &WeakPointerCallback);
123   }
124   CHECK(global_handles->IsWeak(key.location()));
125
126   // Force a full GC.
127   // Perform two consecutive GCs because the first one will only clear
128   // weak references whereas the second one will also clear weak maps.
129   heap->CollectAllGarbage(false);
130   CHECK_EQ(1, NumberOfWeakCalls);
131   CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
132   CHECK_EQ(
133       0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
134   heap->CollectAllGarbage(false);
135   CHECK_EQ(1, NumberOfWeakCalls);
136   CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
137   CHECK_EQ(
138       1, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
139 }
140
141
142 TEST(Shrinking) {
143   LocalContext context;
144   Isolate* isolate = GetIsolateFrom(&context);
145   Factory* factory = isolate->factory();
146   Heap* heap = isolate->heap();
147   HandleScope scope(isolate);
148   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
149
150   // Check initial capacity.
151   CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
152
153   // Fill up weak map to trigger capacity change.
154   {
155     HandleScope scope(isolate);
156     Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
157     for (int i = 0; i < 32; i++) {
158       Handle<JSObject> object = factory->NewJSObjectFromMap(map);
159       PutIntoWeakMap(weakmap, object, Handle<Smi>(Smi::FromInt(i), isolate));
160     }
161   }
162
163   // Check increased capacity.
164   CHECK_EQ(128, ObjectHashTable::cast(weakmap->table())->Capacity());
165
166   // Force a full GC.
167   CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
168   CHECK_EQ(
169       0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
170   heap->CollectAllGarbage(false);
171   CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
172   CHECK_EQ(
173       32, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
174
175   // Check shrunk capacity.
176   CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
177 }
178
179
180 // Test that weak map values on an evacuation candidate which are not reachable
181 // by other paths are correctly recorded in the slots buffer.
182 TEST(Regress2060a) {
183   if (i::FLAG_never_compact) return;
184   FLAG_always_compact = true;
185   LocalContext context;
186   Isolate* isolate = GetIsolateFrom(&context);
187   Factory* factory = isolate->factory();
188   Heap* heap = isolate->heap();
189   HandleScope scope(isolate);
190   Handle<JSFunction> function = factory->NewFunction(
191       factory->function_string());
192   Handle<JSObject> key = factory->NewJSObject(function);
193   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
194
195   // Start second old-space page so that values land on evacuation candidate.
196   Page* first_page = heap->old_pointer_space()->anchor()->next_page();
197   factory->NewFixedArray(900 * KB / kPointerSize, TENURED);
198
199   // Fill up weak map with values on an evacuation candidate.
200   {
201     HandleScope scope(isolate);
202     for (int i = 0; i < 32; i++) {
203       Handle<JSObject> object = factory->NewJSObject(function, TENURED);
204       CHECK(!heap->InNewSpace(object->address()));
205       CHECK(!first_page->Contains(object->address()));
206       PutIntoWeakMap(weakmap, key, object);
207     }
208   }
209
210   // Force compacting garbage collection.
211   CHECK(FLAG_always_compact);
212   heap->CollectAllGarbage(Heap::kNoGCFlags);
213 }
214
215
216 // Test that weak map keys on an evacuation candidate which are reachable by
217 // other strong paths are correctly recorded in the slots buffer.
218 TEST(Regress2060b) {
219   if (i::FLAG_never_compact) return;
220   FLAG_always_compact = true;
221 #ifdef VERIFY_HEAP
222   FLAG_verify_heap = true;
223 #endif
224
225   LocalContext context;
226   Isolate* isolate = GetIsolateFrom(&context);
227   Factory* factory = isolate->factory();
228   Heap* heap = isolate->heap();
229   HandleScope scope(isolate);
230   Handle<JSFunction> function = factory->NewFunction(
231       factory->function_string());
232
233   // Start second old-space page so that keys land on evacuation candidate.
234   Page* first_page = heap->old_pointer_space()->anchor()->next_page();
235   factory->NewFixedArray(900 * KB / kPointerSize, TENURED);
236
237   // Fill up weak map with keys on an evacuation candidate.
238   Handle<JSObject> keys[32];
239   for (int i = 0; i < 32; i++) {
240     keys[i] = factory->NewJSObject(function, TENURED);
241     CHECK(!heap->InNewSpace(keys[i]->address()));
242     CHECK(!first_page->Contains(keys[i]->address()));
243   }
244   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
245   for (int i = 0; i < 32; i++) {
246     PutIntoWeakMap(weakmap,
247                    keys[i],
248                    Handle<Smi>(Smi::FromInt(i), isolate));
249   }
250
251   // Force compacting garbage collection. The subsequent collections are used
252   // to verify that key references were actually updated.
253   CHECK(FLAG_always_compact);
254   heap->CollectAllGarbage(Heap::kNoGCFlags);
255   heap->CollectAllGarbage(Heap::kNoGCFlags);
256   heap->CollectAllGarbage(Heap::kNoGCFlags);
257 }
258
259
260 TEST(Regress399527) {
261   CcTest::InitializeVM();
262   v8::HandleScope scope(CcTest::isolate());
263   Isolate* isolate = CcTest::i_isolate();
264   Heap* heap = isolate->heap();
265   {
266     HandleScope scope(isolate);
267     AllocateJSWeakMap(isolate);
268     SimulateIncrementalMarking(heap);
269   }
270   // The weak map is marked black here but leaving the handle scope will make
271   // the object unreachable. Aborting incremental marking will clear all the
272   // marking bits which makes the weak map garbage.
273   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
274 }