1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdlib.h>
29
30 #ifdef __linux__
31 #include <sys/types.h>
32 #include <sys/stat.h>
33 #include <fcntl.h>
34 #include <unistd.h>
35 #include <errno.h>
36 #endif
37
38 #include "v8.h"
39
40 #include "global-handles.h"
41 #include "snapshot.h"
42 #include "cctest.h"
43
44 using namespace v8::internal;
45
46 static v8::Persistent<v8::Context> env;
47
InitializeVM()48 static void InitializeVM() {
49 if (env.IsEmpty()) env = v8::Context::New();
50 v8::HandleScope scope;
51 env->Enter();
52 }
53
54
TEST(MarkingDeque)55 TEST(MarkingDeque) {
56 int mem_size = 20 * kPointerSize;
57 byte* mem = NewArray<byte>(20*kPointerSize);
58 Address low = reinterpret_cast<Address>(mem);
59 Address high = low + mem_size;
60 MarkingDeque s;
61 s.Initialize(low, high);
62
63 Address address = NULL;
64 while (!s.IsFull()) {
65 s.PushBlack(HeapObject::FromAddress(address));
66 address += kPointerSize;
67 }
68
69 while (!s.IsEmpty()) {
70 Address value = s.Pop()->address();
71 address -= kPointerSize;
72 CHECK_EQ(address, value);
73 }
74
75 CHECK_EQ(NULL, address);
76 DeleteArray(mem);
77 }
78
79
TEST(Promotion)80 TEST(Promotion) {
81 // This test requires compaction. If compaction is turned off, we
82 // skip the entire test.
83 if (FLAG_never_compact) return;
84
85 // Ensure that we get a compacting collection so that objects are promoted
86 // from new space.
87 FLAG_gc_global = true;
88 FLAG_always_compact = true;
89 HEAP->ConfigureHeap(2*256*KB, 8*MB, 8*MB);
90
91 InitializeVM();
92
93 v8::HandleScope sc;
94
95 // Allocate a fixed array in the new space.
96 int array_size =
97 (Page::kMaxNonCodeHeapObjectSize - FixedArray::kHeaderSize) /
98 (kPointerSize * 4);
99 Object* obj = HEAP->AllocateFixedArray(array_size)->ToObjectChecked();
100
101 Handle<FixedArray> array(FixedArray::cast(obj));
102
103 // Array should be in the new space.
104 CHECK(HEAP->InSpace(*array, NEW_SPACE));
105
106 // Call the m-c collector, so array becomes an old object.
107 HEAP->CollectGarbage(OLD_POINTER_SPACE);
108
109 // Array now sits in the old space
110 CHECK(HEAP->InSpace(*array, OLD_POINTER_SPACE));
111 }
112
113
TEST(NoPromotion)114 TEST(NoPromotion) {
115 HEAP->ConfigureHeap(2*256*KB, 8*MB, 8*MB);
116
117 // Test the situation that some objects in new space are promoted to
118 // the old space
119 InitializeVM();
120
121 v8::HandleScope sc;
122
123 // Do a mark compact GC to shrink the heap.
124 HEAP->CollectGarbage(OLD_POINTER_SPACE);
125
126 // Allocate a big Fixed array in the new space.
127 int max_size =
128 Min(Page::kMaxNonCodeHeapObjectSize, HEAP->MaxObjectSizeInNewSpace());
129
130 int length = (max_size - FixedArray::kHeaderSize) / (2*kPointerSize);
131 Object* obj = i::Isolate::Current()->heap()->AllocateFixedArray(length)->
132 ToObjectChecked();
133
134 Handle<FixedArray> array(FixedArray::cast(obj));
135
136 // Array still stays in the new space.
137 CHECK(HEAP->InSpace(*array, NEW_SPACE));
138
139 // Allocate objects in the old space until out of memory.
140 FixedArray* host = *array;
141 while (true) {
142 Object* obj;
143 { MaybeObject* maybe_obj = HEAP->AllocateFixedArray(100, TENURED);
144 if (!maybe_obj->ToObject(&obj)) break;
145 }
146
147 host->set(0, obj);
148 host = FixedArray::cast(obj);
149 }
150
151 // Call mark compact GC, and it should pass.
152 HEAP->CollectGarbage(OLD_POINTER_SPACE);
153 }
154
155
TEST(MarkCompactCollector)156 TEST(MarkCompactCollector) {
157 InitializeVM();
158
159 v8::HandleScope sc;
160 // call mark-compact when heap is empty
161 HEAP->CollectGarbage(OLD_POINTER_SPACE);
162
163 // keep allocating garbage in new space until it fails
164 const int ARRAY_SIZE = 100;
165 Object* array;
166 MaybeObject* maybe_array;
167 do {
168 maybe_array = HEAP->AllocateFixedArray(ARRAY_SIZE);
169 } while (maybe_array->ToObject(&array));
170 HEAP->CollectGarbage(NEW_SPACE);
171
172 array = HEAP->AllocateFixedArray(ARRAY_SIZE)->ToObjectChecked();
173
174 // keep allocating maps until it fails
175 Object* mapp;
176 MaybeObject* maybe_mapp;
177 do {
178 maybe_mapp = HEAP->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
179 } while (maybe_mapp->ToObject(&mapp));
180 HEAP->CollectGarbage(MAP_SPACE);
181 mapp = HEAP->AllocateMap(JS_OBJECT_TYPE,
182 JSObject::kHeaderSize)->ToObjectChecked();
183
184 // allocate a garbage
185 String* func_name =
186 String::cast(HEAP->LookupAsciiSymbol("theFunction")->ToObjectChecked());
187 SharedFunctionInfo* function_share = SharedFunctionInfo::cast(
188 HEAP->AllocateSharedFunctionInfo(func_name)->ToObjectChecked());
189 JSFunction* function = JSFunction::cast(
190 HEAP->AllocateFunction(*Isolate::Current()->function_map(),
191 function_share,
192 HEAP->undefined_value())->ToObjectChecked());
193 Map* initial_map =
194 Map::cast(HEAP->AllocateMap(JS_OBJECT_TYPE,
195 JSObject::kHeaderSize)->ToObjectChecked());
196 function->set_initial_map(initial_map);
197 Isolate::Current()->context()->global()->SetProperty(
198 func_name, function, NONE, kNonStrictMode)->ToObjectChecked();
199
200 JSObject* obj = JSObject::cast(
201 HEAP->AllocateJSObject(function)->ToObjectChecked());
202 HEAP->CollectGarbage(OLD_POINTER_SPACE);
203
204 func_name =
205 String::cast(HEAP->LookupAsciiSymbol("theFunction")->ToObjectChecked());
206 CHECK(Isolate::Current()->context()->global()->HasLocalProperty(func_name));
207 Object* func_value = Isolate::Current()->context()->global()->
208 GetProperty(func_name)->ToObjectChecked();
209 CHECK(func_value->IsJSFunction());
210 function = JSFunction::cast(func_value);
211
212 obj = JSObject::cast(HEAP->AllocateJSObject(function)->ToObjectChecked());
213 String* obj_name =
214 String::cast(HEAP->LookupAsciiSymbol("theObject")->ToObjectChecked());
215 Isolate::Current()->context()->global()->SetProperty(
216 obj_name, obj, NONE, kNonStrictMode)->ToObjectChecked();
217 String* prop_name =
218 String::cast(HEAP->LookupAsciiSymbol("theSlot")->ToObjectChecked());
219 obj->SetProperty(prop_name,
220 Smi::FromInt(23),
221 NONE,
222 kNonStrictMode)->ToObjectChecked();
223
224 HEAP->CollectGarbage(OLD_POINTER_SPACE);
225
226 obj_name =
227 String::cast(HEAP->LookupAsciiSymbol("theObject")->ToObjectChecked());
228 CHECK(Isolate::Current()->context()->global()->HasLocalProperty(obj_name));
229 CHECK(Isolate::Current()->context()->global()->
230 GetProperty(obj_name)->ToObjectChecked()->IsJSObject());
231 obj = JSObject::cast(Isolate::Current()->context()->global()->
232 GetProperty(obj_name)->ToObjectChecked());
233 prop_name =
234 String::cast(HEAP->LookupAsciiSymbol("theSlot")->ToObjectChecked());
235 CHECK(obj->GetProperty(prop_name) == Smi::FromInt(23));
236 }
237
238
239 // TODO(1600): compaction of map space is temporary removed from GC.
240 #if 0
241 static Handle<Map> CreateMap() {
242 return FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
243 }
244
245
246 TEST(MapCompact) {
247 FLAG_max_map_space_pages = 16;
248 InitializeVM();
249
250 {
251 v8::HandleScope sc;
252 // keep allocating maps while pointers are still encodable and thus
253 // mark compact is permitted.
254 Handle<JSObject> root = FACTORY->NewJSObjectFromMap(CreateMap());
255 do {
256 Handle<Map> map = CreateMap();
257 map->set_prototype(*root);
258 root = FACTORY->NewJSObjectFromMap(map);
259 } while (HEAP->map_space()->MapPointersEncodable());
260 }
261 // Now, as we don't have any handles to just allocated maps, we should
262 // be able to trigger map compaction.
263 // To give an additional chance to fail, try to force compaction which
264 // should be impossible right now.
265 HEAP->CollectAllGarbage(Heap::kForceCompactionMask);
266 // And now map pointers should be encodable again.
267 CHECK(HEAP->map_space()->MapPointersEncodable());
268 }
269 #endif
270
271 static int gc_starts = 0;
272 static int gc_ends = 0;
273
GCPrologueCallbackFunc()274 static void GCPrologueCallbackFunc() {
275 CHECK(gc_starts == gc_ends);
276 gc_starts++;
277 }
278
279
GCEpilogueCallbackFunc()280 static void GCEpilogueCallbackFunc() {
281 CHECK(gc_starts == gc_ends + 1);
282 gc_ends++;
283 }
284
285
TEST(GCCallback)286 TEST(GCCallback) {
287 InitializeVM();
288
289 HEAP->SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
290 HEAP->SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
291
292 // Scavenge does not call GC callback functions.
293 HEAP->PerformScavenge();
294
295 CHECK_EQ(0, gc_starts);
296 CHECK_EQ(gc_ends, gc_starts);
297
298 HEAP->CollectGarbage(OLD_POINTER_SPACE);
299 CHECK_EQ(1, gc_starts);
300 CHECK_EQ(gc_ends, gc_starts);
301 }
302
303
304 static int NumberOfWeakCalls = 0;
WeakPointerCallback(v8::Persistent<v8::Value> handle,void * id)305 static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
306 ASSERT(id == reinterpret_cast<void*>(1234));
307 NumberOfWeakCalls++;
308 handle.Dispose();
309 }
310
TEST(ObjectGroups)311 TEST(ObjectGroups) {
312 InitializeVM();
313 GlobalHandles* global_handles = Isolate::Current()->global_handles();
314
315 NumberOfWeakCalls = 0;
316 v8::HandleScope handle_scope;
317
318 Handle<Object> g1s1 =
319 global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
320 Handle<Object> g1s2 =
321 global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
322 Handle<Object> g1c1 =
323 global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
324 global_handles->MakeWeak(g1s1.location(),
325 reinterpret_cast<void*>(1234),
326 &WeakPointerCallback);
327 global_handles->MakeWeak(g1s2.location(),
328 reinterpret_cast<void*>(1234),
329 &WeakPointerCallback);
330 global_handles->MakeWeak(g1c1.location(),
331 reinterpret_cast<void*>(1234),
332 &WeakPointerCallback);
333
334 Handle<Object> g2s1 =
335 global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
336 Handle<Object> g2s2 =
337 global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
338 Handle<Object> g2c1 =
339 global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
340 global_handles->MakeWeak(g2s1.location(),
341 reinterpret_cast<void*>(1234),
342 &WeakPointerCallback);
343 global_handles->MakeWeak(g2s2.location(),
344 reinterpret_cast<void*>(1234),
345 &WeakPointerCallback);
346 global_handles->MakeWeak(g2c1.location(),
347 reinterpret_cast<void*>(1234),
348 &WeakPointerCallback);
349
350 Handle<Object> root = global_handles->Create(*g1s1); // make a root.
351
352 // Connect group 1 and 2, make a cycle.
353 Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
354 Handle<FixedArray>::cast(g2s1)->set(0, *g1s1);
355
356 {
357 Object** g1_objects[] = { g1s1.location(), g1s2.location() };
358 Object** g1_children[] = { g1c1.location() };
359 Object** g2_objects[] = { g2s1.location(), g2s2.location() };
360 Object** g2_children[] = { g2c1.location() };
361 global_handles->AddObjectGroup(g1_objects, 2, NULL);
362 global_handles->AddImplicitReferences(
363 Handle<HeapObject>::cast(g1s1).location(), g1_children, 1);
364 global_handles->AddObjectGroup(g2_objects, 2, NULL);
365 global_handles->AddImplicitReferences(
366 Handle<HeapObject>::cast(g2s2).location(), g2_children, 1);
367 }
368 // Do a full GC
369 HEAP->CollectGarbage(OLD_POINTER_SPACE);
370
371 // All object should be alive.
372 CHECK_EQ(0, NumberOfWeakCalls);
373
374 // Weaken the root.
375 global_handles->MakeWeak(root.location(),
376 reinterpret_cast<void*>(1234),
377 &WeakPointerCallback);
378 // But make children strong roots---all the objects (except for children)
379 // should be collectable now.
380 global_handles->ClearWeakness(g1c1.location());
381 global_handles->ClearWeakness(g2c1.location());
382
383 // Groups are deleted, rebuild groups.
384 {
385 Object** g1_objects[] = { g1s1.location(), g1s2.location() };
386 Object** g1_children[] = { g1c1.location() };
387 Object** g2_objects[] = { g2s1.location(), g2s2.location() };
388 Object** g2_children[] = { g2c1.location() };
389 global_handles->AddObjectGroup(g1_objects, 2, NULL);
390 global_handles->AddImplicitReferences(
391 Handle<HeapObject>::cast(g1s1).location(), g1_children, 1);
392 global_handles->AddObjectGroup(g2_objects, 2, NULL);
393 global_handles->AddImplicitReferences(
394 Handle<HeapObject>::cast(g2s2).location(), g2_children, 1);
395 }
396
397 HEAP->CollectGarbage(OLD_POINTER_SPACE);
398
399 // All objects should be gone. 5 global handles in total.
400 CHECK_EQ(5, NumberOfWeakCalls);
401
402 // And now make children weak again and collect them.
403 global_handles->MakeWeak(g1c1.location(),
404 reinterpret_cast<void*>(1234),
405 &WeakPointerCallback);
406 global_handles->MakeWeak(g2c1.location(),
407 reinterpret_cast<void*>(1234),
408 &WeakPointerCallback);
409
410 HEAP->CollectGarbage(OLD_POINTER_SPACE);
411 CHECK_EQ(7, NumberOfWeakCalls);
412 }
413
414
415 class TestRetainedObjectInfo : public v8::RetainedObjectInfo {
416 public:
TestRetainedObjectInfo()417 TestRetainedObjectInfo() : has_been_disposed_(false) {}
418
has_been_disposed()419 bool has_been_disposed() { return has_been_disposed_; }
420
Dispose()421 virtual void Dispose() {
422 ASSERT(!has_been_disposed_);
423 has_been_disposed_ = true;
424 }
425
IsEquivalent(v8::RetainedObjectInfo * other)426 virtual bool IsEquivalent(v8::RetainedObjectInfo* other) {
427 return other == this;
428 }
429
GetHash()430 virtual intptr_t GetHash() { return 0; }
431
GetLabel()432 virtual const char* GetLabel() { return "whatever"; }
433
434 private:
435 bool has_been_disposed_;
436 };
437
438
TEST(EmptyObjectGroups)439 TEST(EmptyObjectGroups) {
440 InitializeVM();
441 GlobalHandles* global_handles = Isolate::Current()->global_handles();
442
443 v8::HandleScope handle_scope;
444
445 Handle<Object> object =
446 global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
447
448 TestRetainedObjectInfo info;
449 global_handles->AddObjectGroup(NULL, 0, &info);
450 ASSERT(info.has_been_disposed());
451
452 global_handles->AddImplicitReferences(
453 Handle<HeapObject>::cast(object).location(), NULL, 0);
454 }
455
456
457 // Here is a memory use test that uses /proc, and is therefore Linux-only. We
458 // do not care how much memory the simulator uses, since it is only there for
459 // debugging purposes.
460 #if defined(__linux__) && !defined(USE_SIMULATOR)
461
462
ReadLong(char * buffer,intptr_t * position,int base)463 static uintptr_t ReadLong(char* buffer, intptr_t* position, int base) {
464 char* end_address = buffer + *position;
465 uintptr_t result = strtoul(buffer + *position, &end_address, base);
466 CHECK(result != ULONG_MAX || errno != ERANGE);
467 CHECK(end_address > buffer + *position);
468 *position = end_address - buffer;
469 return result;
470 }
471
472
MemoryInUse()473 static intptr_t MemoryInUse() {
474 intptr_t memory_use = 0;
475
476 int fd = open("/proc/self/maps", O_RDONLY);
477 if (fd < 0) return -1;
478
479 const int kBufSize = 10000;
480 char buffer[kBufSize];
481 int length = read(fd, buffer, kBufSize);
482 intptr_t line_start = 0;
483 CHECK_LT(length, kBufSize); // Make the buffer bigger.
484 CHECK_GT(length, 0); // We have to find some data in the file.
485 while (line_start < length) {
486 if (buffer[line_start] == '\n') {
487 line_start++;
488 continue;
489 }
490 intptr_t position = line_start;
491 uintptr_t start = ReadLong(buffer, &position, 16);
492 CHECK_EQ(buffer[position++], '-');
493 uintptr_t end = ReadLong(buffer, &position, 16);
494 CHECK_EQ(buffer[position++], ' ');
495 CHECK(buffer[position] == '-' || buffer[position] == 'r');
496 bool read_permission = (buffer[position++] == 'r');
497 CHECK(buffer[position] == '-' || buffer[position] == 'w');
498 bool write_permission = (buffer[position++] == 'w');
499 CHECK(buffer[position] == '-' || buffer[position] == 'x');
500 bool execute_permission = (buffer[position++] == 'x');
501 CHECK(buffer[position] == '-' || buffer[position] == 'p');
502 bool private_mapping = (buffer[position++] == 'p');
503 CHECK_EQ(buffer[position++], ' ');
504 uintptr_t offset = ReadLong(buffer, &position, 16);
505 USE(offset);
506 CHECK_EQ(buffer[position++], ' ');
507 uintptr_t major = ReadLong(buffer, &position, 16);
508 USE(major);
509 CHECK_EQ(buffer[position++], ':');
510 uintptr_t minor = ReadLong(buffer, &position, 16);
511 USE(minor);
512 CHECK_EQ(buffer[position++], ' ');
513 uintptr_t inode = ReadLong(buffer, &position, 10);
514 while (position < length && buffer[position] != '\n') position++;
515 if ((read_permission || write_permission || execute_permission) &&
516 private_mapping && inode == 0) {
517 memory_use += (end - start);
518 }
519
520 line_start = position;
521 }
522 close(fd);
523 return memory_use;
524 }
525
526
TEST(BootUpMemoryUse)527 TEST(BootUpMemoryUse) {
528 intptr_t initial_memory = MemoryInUse();
529 FLAG_crankshaft = false; // Avoid flakiness.
530 // Only Linux has the proc filesystem and only if it is mapped. If it's not
531 // there we just skip the test.
532 if (initial_memory >= 0) {
533 InitializeVM();
534 intptr_t booted_memory = MemoryInUse();
535 if (sizeof(initial_memory) == 8) {
536 if (v8::internal::Snapshot::IsEnabled()) {
537 CHECK_LE(booted_memory - initial_memory, 6686 * 1024); // 6476.
538 } else {
539 CHECK_LE(booted_memory - initial_memory, 6809 * 1024); // 6628.
540 }
541 } else {
542 if (v8::internal::Snapshot::IsEnabled()) {
543 CHECK_LE(booted_memory - initial_memory, 6532 * 1024); // 6388.
544 } else {
545 CHECK_LE(booted_memory - initial_memory, 6940 * 1024); // 6456
546 }
547 }
548 }
549 }
550
551 #endif // __linux__ and !USE_SIMULATOR
552