Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / v8 / test / cctest / test-weakmaps.cc
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <utility>
29
30 #include "v8.h"
31
32 #include "global-handles.h"
33 #include "snapshot.h"
34 #include "cctest.h"
35
36 using namespace v8::internal;
37
38
39 static Isolate* GetIsolateFrom(LocalContext* context) {
40   return reinterpret_cast<Isolate*>((*context)->GetIsolate());
41 }
42
43
44 static Handle<JSWeakMap> AllocateJSWeakMap(Isolate* isolate) {
45   Factory* factory = isolate->factory();
46   Handle<Map> map = factory->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize);
47   Handle<JSObject> weakmap_obj = factory->NewJSObjectFromMap(map);
48   Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
49   // Do not use handles for the hash table, it would make entries strong.
50   Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 1);
51   weakmap->set_table(*table);
52   weakmap->set_next(Smi::FromInt(0));
53   return weakmap;
54 }
55
56 static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
57                            Handle<JSObject> key,
58                            Handle<Object> value) {
59   Handle<ObjectHashTable> table = ObjectHashTable::Put(
60       Handle<ObjectHashTable>(ObjectHashTable::cast(weakmap->table())),
61       Handle<JSObject>(JSObject::cast(*key)),
62       value);
63   weakmap->set_table(*table);
64 }
65
66 static int NumberOfWeakCalls = 0;
67 static void WeakPointerCallback(
68     const v8::WeakCallbackData<v8::Value, void>& data) {
69   std::pair<v8::Persistent<v8::Value>*, int>* p =
70       reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
71           data.GetParameter());
72   ASSERT_EQ(1234, p->second);
73   NumberOfWeakCalls++;
74   p->first->Reset();
75 }
76
77
78 TEST(Weakness) {
79   FLAG_incremental_marking = false;
80   LocalContext context;
81   Isolate* isolate = GetIsolateFrom(&context);
82   Factory* factory = isolate->factory();
83   Heap* heap = isolate->heap();
84   HandleScope scope(isolate);
85   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
86   GlobalHandles* global_handles = isolate->global_handles();
87
88   // Keep global reference to the key.
89   Handle<Object> key;
90   {
91     HandleScope scope(isolate);
92     Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
93     Handle<JSObject> object = factory->NewJSObjectFromMap(map);
94     key = global_handles->Create(*object);
95   }
96   CHECK(!global_handles->IsWeak(key.location()));
97
98   // Put entry into weak map.
99   {
100     HandleScope scope(isolate);
101     PutIntoWeakMap(weakmap,
102                    Handle<JSObject>(JSObject::cast(*key)),
103                    Handle<Smi>(Smi::FromInt(23), isolate));
104   }
105   CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
106
107   // Force a full GC.
108   heap->CollectAllGarbage(false);
109   CHECK_EQ(0, NumberOfWeakCalls);
110   CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
111   CHECK_EQ(
112       0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
113
114   // Make the global reference to the key weak.
115   {
116     HandleScope scope(isolate);
117     std::pair<Handle<Object>*, int> handle_and_id(&key, 1234);
118     GlobalHandles::MakeWeak(key.location(),
119                             reinterpret_cast<void*>(&handle_and_id),
120                             &WeakPointerCallback);
121   }
122   CHECK(global_handles->IsWeak(key.location()));
123
124   // Force a full GC.
125   // Perform two consecutive GCs because the first one will only clear
126   // weak references whereas the second one will also clear weak maps.
127   heap->CollectAllGarbage(false);
128   CHECK_EQ(1, NumberOfWeakCalls);
129   CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
130   CHECK_EQ(
131       0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
132   heap->CollectAllGarbage(false);
133   CHECK_EQ(1, NumberOfWeakCalls);
134   CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
135   CHECK_EQ(
136       1, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
137 }
138
139
140 TEST(Shrinking) {
141   LocalContext context;
142   Isolate* isolate = GetIsolateFrom(&context);
143   Factory* factory = isolate->factory();
144   Heap* heap = isolate->heap();
145   HandleScope scope(isolate);
146   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
147
148   // Check initial capacity.
149   CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
150
151   // Fill up weak map to trigger capacity change.
152   {
153     HandleScope scope(isolate);
154     Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
155     for (int i = 0; i < 32; i++) {
156       Handle<JSObject> object = factory->NewJSObjectFromMap(map);
157       PutIntoWeakMap(weakmap, object, Handle<Smi>(Smi::FromInt(i), isolate));
158     }
159   }
160
161   // Check increased capacity.
162   CHECK_EQ(128, ObjectHashTable::cast(weakmap->table())->Capacity());
163
164   // Force a full GC.
165   CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
166   CHECK_EQ(
167       0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
168   heap->CollectAllGarbage(false);
169   CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
170   CHECK_EQ(
171       32, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
172
173   // Check shrunk capacity.
174   CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
175 }
176
177
178 // Test that weak map values on an evacuation candidate which are not reachable
179 // by other paths are correctly recorded in the slots buffer.
180 TEST(Regress2060a) {
181   if (i::FLAG_never_compact) return;
182   FLAG_always_compact = true;
183   LocalContext context;
184   Isolate* isolate = GetIsolateFrom(&context);
185   Factory* factory = isolate->factory();
186   Heap* heap = isolate->heap();
187   HandleScope scope(isolate);
188   Handle<JSFunction> function = factory->NewFunctionWithPrototype(
189       factory->function_string(), factory->null_value());
190   Handle<JSObject> key = factory->NewJSObject(function);
191   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
192
193   // Start second old-space page so that values land on evacuation candidate.
194   Page* first_page = heap->old_pointer_space()->anchor()->next_page();
195   factory->NewFixedArray(900 * KB / kPointerSize, TENURED);
196
197   // Fill up weak map with values on an evacuation candidate.
198   {
199     HandleScope scope(isolate);
200     for (int i = 0; i < 32; i++) {
201       Handle<JSObject> object = factory->NewJSObject(function, TENURED);
202       CHECK(!heap->InNewSpace(object->address()));
203       CHECK(!first_page->Contains(object->address()));
204       PutIntoWeakMap(weakmap, key, object);
205     }
206   }
207
208   // Force compacting garbage collection.
209   CHECK(FLAG_always_compact);
210   heap->CollectAllGarbage(Heap::kNoGCFlags);
211 }
212
213
214 // Test that weak map keys on an evacuation candidate which are reachable by
215 // other strong paths are correctly recorded in the slots buffer.
216 TEST(Regress2060b) {
217   if (i::FLAG_never_compact) return;
218   FLAG_always_compact = true;
219 #ifdef VERIFY_HEAP
220   FLAG_verify_heap = true;
221 #endif
222
223   LocalContext context;
224   Isolate* isolate = GetIsolateFrom(&context);
225   Factory* factory = isolate->factory();
226   Heap* heap = isolate->heap();
227   HandleScope scope(isolate);
228   Handle<JSFunction> function = factory->NewFunctionWithPrototype(
229       factory->function_string(), factory->null_value());
230
231   // Start second old-space page so that keys land on evacuation candidate.
232   Page* first_page = heap->old_pointer_space()->anchor()->next_page();
233   factory->NewFixedArray(900 * KB / kPointerSize, TENURED);
234
235   // Fill up weak map with keys on an evacuation candidate.
236   Handle<JSObject> keys[32];
237   for (int i = 0; i < 32; i++) {
238     keys[i] = factory->NewJSObject(function, TENURED);
239     CHECK(!heap->InNewSpace(keys[i]->address()));
240     CHECK(!first_page->Contains(keys[i]->address()));
241   }
242   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
243   for (int i = 0; i < 32; i++) {
244     PutIntoWeakMap(weakmap,
245                    keys[i],
246                    Handle<Smi>(Smi::FromInt(i), isolate));
247   }
248
249   // Force compacting garbage collection. The subsequent collections are used
250   // to verify that key references were actually updated.
251   CHECK(FLAG_always_compact);
252   heap->CollectAllGarbage(Heap::kNoGCFlags);
253   heap->CollectAllGarbage(Heap::kNoGCFlags);
254   heap->CollectAllGarbage(Heap::kNoGCFlags);
255 }