1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdlib.h>
29
30 #include "v8.h"
31
32 #include "global-handles.h"
33 #include "snapshot.h"
34 #include "top.h"
35 #include "cctest.h"
36
37 using namespace v8::internal;
38
39 static v8::Persistent<v8::Context> env;
40
InitializeVM()41 static void InitializeVM() {
42 if (env.IsEmpty()) env = v8::Context::New();
43 v8::HandleScope scope;
44 env->Enter();
45 }
46
47
TEST(MarkingStack)48 TEST(MarkingStack) {
49 int mem_size = 20 * kPointerSize;
50 byte* mem = NewArray<byte>(20*kPointerSize);
51 Address low = reinterpret_cast<Address>(mem);
52 Address high = low + mem_size;
53 MarkingStack s;
54 s.Initialize(low, high);
55
56 Address address = NULL;
57 while (!s.is_full()) {
58 s.Push(HeapObject::FromAddress(address));
59 address += kPointerSize;
60 }
61
62 while (!s.is_empty()) {
63 Address value = s.Pop()->address();
64 address -= kPointerSize;
65 CHECK_EQ(address, value);
66 }
67
68 CHECK_EQ(NULL, address);
69 DeleteArray(mem);
70 }
71
72
TEST(Promotion)73 TEST(Promotion) {
74 // Ensure that we get a compacting collection so that objects are promoted
75 // from new space.
76 FLAG_gc_global = true;
77 FLAG_always_compact = true;
78 Heap::ConfigureHeap(2*256*KB, 4*MB);
79
80 InitializeVM();
81
82 v8::HandleScope sc;
83
84 // Allocate a fixed array in the new space.
85 int array_size =
86 (Heap::MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
87 (kPointerSize * 4);
88 Object* obj = Heap::AllocateFixedArray(array_size);
89 CHECK(!obj->IsFailure());
90
91 Handle<FixedArray> array(FixedArray::cast(obj));
92
93 // Array should be in the new space.
94 CHECK(Heap::InSpace(*array, NEW_SPACE));
95
96 // Call the m-c collector, so array becomes an old object.
97 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
98
99 // Array now sits in the old space
100 CHECK(Heap::InSpace(*array, OLD_POINTER_SPACE));
101 }
102
103
TEST(NoPromotion)104 TEST(NoPromotion) {
105 Heap::ConfigureHeap(2*256*KB, 4*MB);
106
107 // Test the situation that some objects in new space are promoted to
108 // the old space
109 InitializeVM();
110
111 v8::HandleScope sc;
112
113 // Do a mark compact GC to shrink the heap.
114 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
115
116 // Allocate a big Fixed array in the new space.
117 int size = (Heap::MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
118 kPointerSize;
119 Object* obj = Heap::AllocateFixedArray(size);
120
121 Handle<FixedArray> array(FixedArray::cast(obj));
122
123 // Array still stays in the new space.
124 CHECK(Heap::InSpace(*array, NEW_SPACE));
125
126 // Allocate objects in the old space until out of memory.
127 FixedArray* host = *array;
128 while (true) {
129 Object* obj = Heap::AllocateFixedArray(100, TENURED);
130 if (obj->IsFailure()) break;
131
132 host->set(0, obj);
133 host = FixedArray::cast(obj);
134 }
135
136 // Call mark compact GC, and it should pass.
137 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
138
139 // array should not be promoted because the old space is full.
140 CHECK(Heap::InSpace(*array, NEW_SPACE));
141 }
142
143
TEST(MarkCompactCollector)144 TEST(MarkCompactCollector) {
145 InitializeVM();
146
147 v8::HandleScope sc;
148 // call mark-compact when heap is empty
149 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
150
151 // keep allocating garbage in new space until it fails
152 const int ARRAY_SIZE = 100;
153 Object* array;
154 do {
155 array = Heap::AllocateFixedArray(ARRAY_SIZE);
156 } while (!array->IsFailure());
157 CHECK(Heap::CollectGarbage(0, NEW_SPACE));
158
159 array = Heap::AllocateFixedArray(ARRAY_SIZE);
160 CHECK(!array->IsFailure());
161
162 // keep allocating maps until it fails
163 Object* mapp;
164 do {
165 mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
166 } while (!mapp->IsFailure());
167 CHECK(Heap::CollectGarbage(0, MAP_SPACE));
168 mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
169 CHECK(!mapp->IsFailure());
170
171 // allocate a garbage
172 String* func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
173 SharedFunctionInfo* function_share =
174 SharedFunctionInfo::cast(Heap::AllocateSharedFunctionInfo(func_name));
175 JSFunction* function =
176 JSFunction::cast(Heap::AllocateFunction(*Top::function_map(),
177 function_share,
178 Heap::undefined_value()));
179 Map* initial_map =
180 Map::cast(Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
181 function->set_initial_map(initial_map);
182 Top::context()->global()->SetProperty(func_name, function, NONE);
183
184 JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
185 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
186
187 func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
188 CHECK(Top::context()->global()->HasLocalProperty(func_name));
189 Object* func_value = Top::context()->global()->GetProperty(func_name);
190 CHECK(func_value->IsJSFunction());
191 function = JSFunction::cast(func_value);
192
193 obj = JSObject::cast(Heap::AllocateJSObject(function));
194 String* obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
195 Top::context()->global()->SetProperty(obj_name, obj, NONE);
196 String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
197 obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
198
199 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
200
201 obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
202 CHECK(Top::context()->global()->HasLocalProperty(obj_name));
203 CHECK(Top::context()->global()->GetProperty(obj_name)->IsJSObject());
204 obj = JSObject::cast(Top::context()->global()->GetProperty(obj_name));
205 prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
206 CHECK(obj->GetProperty(prop_name) == Smi::FromInt(23));
207 }
208
209
CreateMap()210 static Handle<Map> CreateMap() {
211 return Factory::NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
212 }
213
214
TEST(MapCompact)215 TEST(MapCompact) {
216 FLAG_max_map_space_pages = 16;
217 InitializeVM();
218
219 {
220 v8::HandleScope sc;
221 // keep allocating maps while pointers are still encodable and thus
222 // mark compact is permitted.
223 Handle<JSObject> root = Factory::NewJSObjectFromMap(CreateMap());
224 do {
225 Handle<Map> map = CreateMap();
226 map->set_prototype(*root);
227 root = Factory::NewJSObjectFromMap(map);
228 } while (Heap::map_space()->MapPointersEncodable());
229 }
230 // Now, as we don't have any handles to just allocated maps, we should
231 // be able to trigger map compaction.
232 // To give an additional chance to fail, try to force compaction which
233 // should be impossible right now.
234 Heap::CollectAllGarbage(true);
235 // And now map pointers should be encodable again.
236 CHECK(Heap::map_space()->MapPointersEncodable());
237 }
238
239
240 static int gc_starts = 0;
241 static int gc_ends = 0;
242
GCPrologueCallbackFunc()243 static void GCPrologueCallbackFunc() {
244 CHECK(gc_starts == gc_ends);
245 gc_starts++;
246 }
247
248
GCEpilogueCallbackFunc()249 static void GCEpilogueCallbackFunc() {
250 CHECK(gc_starts == gc_ends + 1);
251 gc_ends++;
252 }
253
254
TEST(GCCallback)255 TEST(GCCallback) {
256 InitializeVM();
257
258 Heap::SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
259 Heap::SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
260
261 // Scavenge does not call GC callback functions.
262 Heap::PerformScavenge();
263
264 CHECK_EQ(0, gc_starts);
265 CHECK_EQ(gc_ends, gc_starts);
266
267 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
268 CHECK_EQ(1, gc_starts);
269 CHECK_EQ(gc_ends, gc_starts);
270 }
271
272
273 static int NumberOfWeakCalls = 0;
WeakPointerCallback(v8::Persistent<v8::Value> handle,void * id)274 static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
275 NumberOfWeakCalls++;
276 }
277
TEST(ObjectGroups)278 TEST(ObjectGroups) {
279 InitializeVM();
280
281 NumberOfWeakCalls = 0;
282 v8::HandleScope handle_scope;
283
284 Handle<Object> g1s1 =
285 GlobalHandles::Create(Heap::AllocateFixedArray(1));
286 Handle<Object> g1s2 =
287 GlobalHandles::Create(Heap::AllocateFixedArray(1));
288 GlobalHandles::MakeWeak(g1s1.location(),
289 reinterpret_cast<void*>(1234),
290 &WeakPointerCallback);
291 GlobalHandles::MakeWeak(g1s2.location(),
292 reinterpret_cast<void*>(1234),
293 &WeakPointerCallback);
294
295 Handle<Object> g2s1 =
296 GlobalHandles::Create(Heap::AllocateFixedArray(1));
297 Handle<Object> g2s2 =
298 GlobalHandles::Create(Heap::AllocateFixedArray(1));
299 GlobalHandles::MakeWeak(g2s1.location(),
300 reinterpret_cast<void*>(1234),
301 &WeakPointerCallback);
302 GlobalHandles::MakeWeak(g2s2.location(),
303 reinterpret_cast<void*>(1234),
304 &WeakPointerCallback);
305
306 Handle<Object> root = GlobalHandles::Create(*g1s1); // make a root.
307
308 // Connect group 1 and 2, make a cycle.
309 Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
310 Handle<FixedArray>::cast(g2s1)->set(0, *g1s1);
311
312 {
313 Object** g1_objects[] = { g1s1.location(), g1s2.location() };
314 Object** g2_objects[] = { g2s1.location(), g2s2.location() };
315 GlobalHandles::AddGroup(g1_objects, 2);
316 GlobalHandles::AddGroup(g2_objects, 2);
317 }
318 // Do a full GC
319 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
320
321 // All object should be alive.
322 CHECK_EQ(0, NumberOfWeakCalls);
323
324 // Weaken the root.
325 GlobalHandles::MakeWeak(root.location(),
326 reinterpret_cast<void*>(1234),
327 &WeakPointerCallback);
328
329 // Groups are deleted, rebuild groups.
330 {
331 Object** g1_objects[] = { g1s1.location(), g1s2.location() };
332 Object** g2_objects[] = { g2s1.location(), g2s2.location() };
333 GlobalHandles::AddGroup(g1_objects, 2);
334 GlobalHandles::AddGroup(g2_objects, 2);
335 }
336
337 CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
338
339 // All objects should be gone. 5 global handles in total.
340 CHECK_EQ(5, NumberOfWeakCalls);
341 }
342