1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "global-handles.h"
31 #include "snapshot.h"
32 #include "cctest.h"
33
34 using namespace v8::internal;
35
36
GetIsolateFrom(LocalContext * context)37 static Isolate* GetIsolateFrom(LocalContext* context) {
38 return reinterpret_cast<Isolate*>((*context)->GetIsolate());
39 }
40
41
AllocateJSWeakMap(Isolate * isolate)42 static Handle<JSWeakMap> AllocateJSWeakMap(Isolate* isolate) {
43 Factory* factory = isolate->factory();
44 Heap* heap = isolate->heap();
45 Handle<Map> map = factory->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize);
46 Handle<JSObject> weakmap_obj = factory->NewJSObjectFromMap(map);
47 Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
48 // Do not use handles for the hash table, it would make entries strong.
49 Object* table_obj = ObjectHashTable::Allocate(heap, 1)->ToObjectChecked();
50 ObjectHashTable* table = ObjectHashTable::cast(table_obj);
51 weakmap->set_table(table);
52 weakmap->set_next(Smi::FromInt(0));
53 return weakmap;
54 }
55
PutIntoWeakMap(Handle<JSWeakMap> weakmap,Handle<JSObject> key,Handle<Object> value)56 static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
57 Handle<JSObject> key,
58 Handle<Object> value) {
59 Handle<ObjectHashTable> table = ObjectHashTable::Put(
60 Handle<ObjectHashTable>(ObjectHashTable::cast(weakmap->table())),
61 Handle<JSObject>(JSObject::cast(*key)),
62 value);
63 weakmap->set_table(*table);
64 }
65
66 static int NumberOfWeakCalls = 0;
WeakPointerCallback(v8::Isolate * isolate,v8::Persistent<v8::Value> * handle,void * id)67 static void WeakPointerCallback(v8::Isolate* isolate,
68 v8::Persistent<v8::Value>* handle,
69 void* id) {
70 ASSERT(id == reinterpret_cast<void*>(1234));
71 NumberOfWeakCalls++;
72 handle->Reset();
73 }
74
75
TEST(Weakness)76 TEST(Weakness) {
77 FLAG_incremental_marking = false;
78 LocalContext context;
79 Isolate* isolate = GetIsolateFrom(&context);
80 Factory* factory = isolate->factory();
81 Heap* heap = isolate->heap();
82 HandleScope scope(isolate);
83 Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
84 GlobalHandles* global_handles = isolate->global_handles();
85
86 // Keep global reference to the key.
87 Handle<Object> key;
88 {
89 HandleScope scope(isolate);
90 Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
91 Handle<JSObject> object = factory->NewJSObjectFromMap(map);
92 key = global_handles->Create(*object);
93 }
94 CHECK(!global_handles->IsWeak(key.location()));
95
96 // Put entry into weak map.
97 {
98 HandleScope scope(isolate);
99 PutIntoWeakMap(weakmap,
100 Handle<JSObject>(JSObject::cast(*key)),
101 Handle<Smi>(Smi::FromInt(23), isolate));
102 }
103 CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
104
105 // Force a full GC.
106 heap->CollectAllGarbage(false);
107 CHECK_EQ(0, NumberOfWeakCalls);
108 CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
109 CHECK_EQ(
110 0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
111
112 // Make the global reference to the key weak.
113 {
114 HandleScope scope(isolate);
115 global_handles->MakeWeak(key.location(),
116 reinterpret_cast<void*>(1234),
117 &WeakPointerCallback);
118 }
119 CHECK(global_handles->IsWeak(key.location()));
120
121 // Force a full GC.
122 // Perform two consecutive GCs because the first one will only clear
123 // weak references whereas the second one will also clear weak maps.
124 heap->CollectAllGarbage(false);
125 CHECK_EQ(1, NumberOfWeakCalls);
126 CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
127 CHECK_EQ(
128 0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
129 heap->CollectAllGarbage(false);
130 CHECK_EQ(1, NumberOfWeakCalls);
131 CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
132 CHECK_EQ(
133 1, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
134 }
135
136
TEST(Shrinking)137 TEST(Shrinking) {
138 LocalContext context;
139 Isolate* isolate = GetIsolateFrom(&context);
140 Factory* factory = isolate->factory();
141 Heap* heap = isolate->heap();
142 HandleScope scope(isolate);
143 Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
144
145 // Check initial capacity.
146 CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
147
148 // Fill up weak map to trigger capacity change.
149 {
150 HandleScope scope(isolate);
151 Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
152 for (int i = 0; i < 32; i++) {
153 Handle<JSObject> object = factory->NewJSObjectFromMap(map);
154 PutIntoWeakMap(weakmap, object, Handle<Smi>(Smi::FromInt(i), isolate));
155 }
156 }
157
158 // Check increased capacity.
159 CHECK_EQ(128, ObjectHashTable::cast(weakmap->table())->Capacity());
160
161 // Force a full GC.
162 CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
163 CHECK_EQ(
164 0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
165 heap->CollectAllGarbage(false);
166 CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
167 CHECK_EQ(
168 32, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
169
170 // Check shrunk capacity.
171 CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
172 }
173
174
175 // Test that weak map values on an evacuation candidate which are not reachable
176 // by other paths are correctly recorded in the slots buffer.
TEST(Regress2060a)177 TEST(Regress2060a) {
178 FLAG_always_compact = true;
179 LocalContext context;
180 Isolate* isolate = GetIsolateFrom(&context);
181 Factory* factory = isolate->factory();
182 Heap* heap = isolate->heap();
183 HandleScope scope(isolate);
184 Handle<JSFunction> function =
185 factory->NewFunction(factory->function_string(), factory->null_value());
186 Handle<JSObject> key = factory->NewJSObject(function);
187 Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
188
189 // Start second old-space page so that values land on evacuation candidate.
190 Page* first_page = heap->old_pointer_space()->anchor()->next_page();
191 factory->NewFixedArray(900 * KB / kPointerSize, TENURED);
192
193 // Fill up weak map with values on an evacuation candidate.
194 {
195 HandleScope scope(isolate);
196 for (int i = 0; i < 32; i++) {
197 Handle<JSObject> object = factory->NewJSObject(function, TENURED);
198 CHECK(!heap->InNewSpace(object->address()));
199 CHECK(!first_page->Contains(object->address()));
200 PutIntoWeakMap(weakmap, key, object);
201 }
202 }
203
204 // Force compacting garbage collection.
205 CHECK(FLAG_always_compact);
206 heap->CollectAllGarbage(Heap::kNoGCFlags);
207 }
208
209
210 // Test that weak map keys on an evacuation candidate which are reachable by
211 // other strong paths are correctly recorded in the slots buffer.
TEST(Regress2060b)212 TEST(Regress2060b) {
213 FLAG_always_compact = true;
214 #ifdef VERIFY_HEAP
215 FLAG_verify_heap = true;
216 #endif
217
218 LocalContext context;
219 Isolate* isolate = GetIsolateFrom(&context);
220 Factory* factory = isolate->factory();
221 Heap* heap = isolate->heap();
222 HandleScope scope(isolate);
223 Handle<JSFunction> function =
224 factory->NewFunction(factory->function_string(), factory->null_value());
225
226 // Start second old-space page so that keys land on evacuation candidate.
227 Page* first_page = heap->old_pointer_space()->anchor()->next_page();
228 factory->NewFixedArray(900 * KB / kPointerSize, TENURED);
229
230 // Fill up weak map with keys on an evacuation candidate.
231 Handle<JSObject> keys[32];
232 for (int i = 0; i < 32; i++) {
233 keys[i] = factory->NewJSObject(function, TENURED);
234 CHECK(!heap->InNewSpace(keys[i]->address()));
235 CHECK(!first_page->Contains(keys[i]->address()));
236 }
237 Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
238 for (int i = 0; i < 32; i++) {
239 PutIntoWeakMap(weakmap,
240 keys[i],
241 Handle<Smi>(Smi::FromInt(i), isolate));
242 }
243
244 // Force compacting garbage collection. The subsequent collections are used
245 // to verify that key references were actually updated.
246 CHECK(FLAG_always_compact);
247 heap->CollectAllGarbage(Heap::kNoGCFlags);
248 heap->CollectAllGarbage(Heap::kNoGCFlags);
249 heap->CollectAllGarbage(Heap::kNoGCFlags);
250 }
251