1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdlib.h>
29
30 #include "v8.h"
31
32 #include "compilation-cache.h"
33 #include "execution.h"
34 #include "factory.h"
35 #include "macro-assembler.h"
36 #include "global-handles.h"
37 #include "stub-cache.h"
38 #include "cctest.h"
39
40 using namespace v8::internal;
41
42
43 // Go through all incremental marking steps in one swoop.
SimulateIncrementalMarking()44 static void SimulateIncrementalMarking() {
45 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
46 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
47 if (collector->IsConcurrentSweepingInProgress()) {
48 collector->WaitUntilSweepingCompleted();
49 }
50 CHECK(marking->IsMarking() || marking->IsStopped());
51 if (marking->IsStopped()) {
52 marking->Start();
53 }
54 CHECK(marking->IsMarking());
55 while (!marking->IsComplete()) {
56 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
57 }
58 CHECK(marking->IsComplete());
59 }
60
61
CheckMap(Map * map,int type,int instance_size)62 static void CheckMap(Map* map, int type, int instance_size) {
63 CHECK(map->IsHeapObject());
64 #ifdef DEBUG
65 CHECK(CcTest::heap()->Contains(map));
66 #endif
67 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
68 CHECK_EQ(type, map->instance_type());
69 CHECK_EQ(instance_size, map->instance_size());
70 }
71
72
TEST(HeapMaps)73 TEST(HeapMaps) {
74 CcTest::InitializeVM();
75 Heap* heap = CcTest::heap();
76 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
77 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
78 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
79 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
80 }
81
82
CheckOddball(Isolate * isolate,Object * obj,const char * string)83 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
84 CHECK(obj->IsOddball());
85 bool exc;
86 Handle<Object> handle(obj, isolate);
87 Object* print_string =
88 *Execution::ToString(isolate, handle, &exc);
89 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
90 }
91
92
CheckSmi(Isolate * isolate,int value,const char * string)93 static void CheckSmi(Isolate* isolate, int value, const char* string) {
94 bool exc;
95 Handle<Object> handle(Smi::FromInt(value), isolate);
96 Object* print_string =
97 *Execution::ToString(isolate, handle, &exc);
98 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
99 }
100
101
CheckNumber(Isolate * isolate,double value,const char * string)102 static void CheckNumber(Isolate* isolate, double value, const char* string) {
103 Object* obj = CcTest::heap()->NumberFromDouble(value)->ToObjectChecked();
104 CHECK(obj->IsNumber());
105 bool exc;
106 Handle<Object> handle(obj, isolate);
107 Object* print_string =
108 *Execution::ToString(isolate, handle, &exc);
109 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
110 }
111
112
CheckFindCodeObject(Isolate * isolate)113 static void CheckFindCodeObject(Isolate* isolate) {
114 // Test FindCodeObject
115 #define __ assm.
116
117 Assembler assm(isolate, NULL, 0);
118
119 __ nop(); // supported on all architectures
120
121 CodeDesc desc;
122 assm.GetCode(&desc);
123 Heap* heap = isolate->heap();
124 Object* code = heap->CreateCode(
125 desc,
126 Code::ComputeFlags(Code::STUB),
127 Handle<Code>())->ToObjectChecked();
128 CHECK(code->IsCode());
129
130 HeapObject* obj = HeapObject::cast(code);
131 Address obj_addr = obj->address();
132
133 for (int i = 0; i < obj->Size(); i += kPointerSize) {
134 Object* found = isolate->FindCodeObject(obj_addr + i);
135 CHECK_EQ(code, found);
136 }
137
138 Object* copy = heap->CreateCode(
139 desc,
140 Code::ComputeFlags(Code::STUB),
141 Handle<Code>())->ToObjectChecked();
142 CHECK(copy->IsCode());
143 HeapObject* obj_copy = HeapObject::cast(copy);
144 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
145 obj_copy->Size() / 2);
146 CHECK(not_right != code);
147 }
148
149
TEST(HeapObjects)150 TEST(HeapObjects) {
151 CcTest::InitializeVM();
152 Isolate* isolate = CcTest::i_isolate();
153 Factory* factory = isolate->factory();
154 Heap* heap = isolate->heap();
155
156 HandleScope sc(isolate);
157 Object* value = heap->NumberFromDouble(1.000123)->ToObjectChecked();
158 CHECK(value->IsHeapNumber());
159 CHECK(value->IsNumber());
160 CHECK_EQ(1.000123, value->Number());
161
162 value = heap->NumberFromDouble(1.0)->ToObjectChecked();
163 CHECK(value->IsSmi());
164 CHECK(value->IsNumber());
165 CHECK_EQ(1.0, value->Number());
166
167 value = heap->NumberFromInt32(1024)->ToObjectChecked();
168 CHECK(value->IsSmi());
169 CHECK(value->IsNumber());
170 CHECK_EQ(1024.0, value->Number());
171
172 value = heap->NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
173 CHECK(value->IsSmi());
174 CHECK(value->IsNumber());
175 CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
176
177 value = heap->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
178 CHECK(value->IsSmi());
179 CHECK(value->IsNumber());
180 CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
181
182 #ifndef V8_TARGET_ARCH_X64
183 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
184 value = heap->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
185 CHECK(value->IsHeapNumber());
186 CHECK(value->IsNumber());
187 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
188 #endif
189
190 MaybeObject* maybe_value =
191 heap->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
192 value = maybe_value->ToObjectChecked();
193 CHECK(value->IsHeapNumber());
194 CHECK(value->IsNumber());
195 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
196 value->Number());
197
198 maybe_value = heap->NumberFromUint32(static_cast<uint32_t>(1) << 31);
199 value = maybe_value->ToObjectChecked();
200 CHECK(value->IsHeapNumber());
201 CHECK(value->IsNumber());
202 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
203 value->Number());
204
205 // nan oddball checks
206 CHECK(heap->nan_value()->IsNumber());
207 CHECK(std::isnan(heap->nan_value()->Number()));
208
209 Handle<String> s = factory->NewStringFromAscii(CStrVector("fisk hest "));
210 CHECK(s->IsString());
211 CHECK_EQ(10, s->length());
212
213 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
214 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
215 CHECK(JSReceiver::HasLocalProperty(global, object_string));
216
217 // Check ToString for oddballs
218 CheckOddball(isolate, heap->true_value(), "true");
219 CheckOddball(isolate, heap->false_value(), "false");
220 CheckOddball(isolate, heap->null_value(), "null");
221 CheckOddball(isolate, heap->undefined_value(), "undefined");
222
223 // Check ToString for Smis
224 CheckSmi(isolate, 0, "0");
225 CheckSmi(isolate, 42, "42");
226 CheckSmi(isolate, -42, "-42");
227
228 // Check ToString for Numbers
229 CheckNumber(isolate, 1.1, "1.1");
230
231 CheckFindCodeObject(isolate);
232 }
233
234
TEST(Tagging)235 TEST(Tagging) {
236 CcTest::InitializeVM();
237 int request = 24;
238 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
239 CHECK(Smi::FromInt(42)->IsSmi());
240 CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure());
241 CHECK_EQ(NEW_SPACE,
242 Failure::RetryAfterGC(NEW_SPACE)->allocation_space());
243 CHECK_EQ(OLD_POINTER_SPACE,
244 Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space());
245 CHECK(Failure::Exception()->IsFailure());
246 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
247 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
248 }
249
250
TEST(GarbageCollection)251 TEST(GarbageCollection) {
252 CcTest::InitializeVM();
253 Isolate* isolate = CcTest::i_isolate();
254 Heap* heap = isolate->heap();
255 Factory* factory = isolate->factory();
256
257 HandleScope sc(isolate);
258 // Check GC.
259 heap->CollectGarbage(NEW_SPACE);
260
261 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
262 Handle<String> name = factory->InternalizeUtf8String("theFunction");
263 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
264 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
265 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
266 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
267 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
268
269 {
270 HandleScope inner_scope(isolate);
271 // Allocate a function and keep it in global object's property.
272 Handle<JSFunction> function =
273 factory->NewFunction(name, factory->undefined_value());
274 Handle<Map> initial_map =
275 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
276 function->set_initial_map(*initial_map);
277 JSReceiver::SetProperty(global, name, function, NONE, kNonStrictMode);
278 // Allocate an object. Unrooted after leaving the scope.
279 Handle<JSObject> obj = factory->NewJSObject(function);
280 JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, kNonStrictMode);
281 JSReceiver::SetProperty(obj, prop_namex, twenty_four, NONE, kNonStrictMode);
282
283 CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
284 CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
285 }
286
287 heap->CollectGarbage(NEW_SPACE);
288
289 // Function should be alive.
290 CHECK(JSReceiver::HasLocalProperty(global, name));
291 // Check function is retained.
292 Object* func_value = CcTest::i_isolate()->context()->global_object()->
293 GetProperty(*name)->ToObjectChecked();
294 CHECK(func_value->IsJSFunction());
295 Handle<JSFunction> function(JSFunction::cast(func_value));
296
297 {
298 HandleScope inner_scope(isolate);
299 // Allocate another object, make it reachable from global.
300 Handle<JSObject> obj = factory->NewJSObject(function);
301 JSReceiver::SetProperty(global, obj_name, obj, NONE, kNonStrictMode);
302 JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, kNonStrictMode);
303 }
304
305 // After gc, it should survive.
306 heap->CollectGarbage(NEW_SPACE);
307
308 CHECK(JSReceiver::HasLocalProperty(global, obj_name));
309 CHECK(CcTest::i_isolate()->context()->global_object()->
310 GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
311 Object* obj = CcTest::i_isolate()->context()->global_object()->
312 GetProperty(*obj_name)->ToObjectChecked();
313 JSObject* js_obj = JSObject::cast(obj);
314 CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
315 }
316
317
VerifyStringAllocation(Isolate * isolate,const char * string)318 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
319 HandleScope scope(isolate);
320 Handle<String> s = isolate->factory()->NewStringFromUtf8(CStrVector(string));
321 CHECK_EQ(StrLength(string), s->length());
322 for (int index = 0; index < s->length(); index++) {
323 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
324 }
325 }
326
327
TEST(String)328 TEST(String) {
329 CcTest::InitializeVM();
330 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
331
332 VerifyStringAllocation(isolate, "a");
333 VerifyStringAllocation(isolate, "ab");
334 VerifyStringAllocation(isolate, "abc");
335 VerifyStringAllocation(isolate, "abcd");
336 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
337 }
338
339
TEST(LocalHandles)340 TEST(LocalHandles) {
341 CcTest::InitializeVM();
342 Isolate* isolate = CcTest::i_isolate();
343 Factory* factory = isolate->factory();
344
345 v8::HandleScope scope(CcTest::isolate());
346 const char* name = "Kasper the spunky";
347 Handle<String> string = factory->NewStringFromAscii(CStrVector(name));
348 CHECK_EQ(StrLength(name), string->length());
349 }
350
351
TEST(GlobalHandles)352 TEST(GlobalHandles) {
353 CcTest::InitializeVM();
354 Isolate* isolate = CcTest::i_isolate();
355 Heap* heap = isolate->heap();
356 Factory* factory = isolate->factory();
357 GlobalHandles* global_handles = isolate->global_handles();
358
359 Handle<Object> h1;
360 Handle<Object> h2;
361 Handle<Object> h3;
362 Handle<Object> h4;
363
364 {
365 HandleScope scope(isolate);
366
367 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
368 Handle<Object> u = factory->NewNumber(1.12344);
369
370 h1 = global_handles->Create(*i);
371 h2 = global_handles->Create(*u);
372 h3 = global_handles->Create(*i);
373 h4 = global_handles->Create(*u);
374 }
375
376 // after gc, it should survive
377 heap->CollectGarbage(NEW_SPACE);
378
379 CHECK((*h1)->IsString());
380 CHECK((*h2)->IsHeapNumber());
381 CHECK((*h3)->IsString());
382 CHECK((*h4)->IsHeapNumber());
383
384 CHECK_EQ(*h3, *h1);
385 global_handles->Destroy(h1.location());
386 global_handles->Destroy(h3.location());
387
388 CHECK_EQ(*h4, *h2);
389 global_handles->Destroy(h2.location());
390 global_handles->Destroy(h4.location());
391 }
392
393
394 static bool WeakPointerCleared = false;
395
TestWeakGlobalHandleCallback(v8::Isolate * isolate,v8::Persistent<v8::Value> * handle,void * id)396 static void TestWeakGlobalHandleCallback(v8::Isolate* isolate,
397 v8::Persistent<v8::Value>* handle,
398 void* id) {
399 if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true;
400 handle->Reset();
401 }
402
403
TEST(WeakGlobalHandlesScavenge)404 TEST(WeakGlobalHandlesScavenge) {
405 i::FLAG_stress_compaction = false;
406 CcTest::InitializeVM();
407 Isolate* isolate = CcTest::i_isolate();
408 Heap* heap = isolate->heap();
409 Factory* factory = isolate->factory();
410 GlobalHandles* global_handles = isolate->global_handles();
411
412 WeakPointerCleared = false;
413
414 Handle<Object> h1;
415 Handle<Object> h2;
416
417 {
418 HandleScope scope(isolate);
419
420 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
421 Handle<Object> u = factory->NewNumber(1.12344);
422
423 h1 = global_handles->Create(*i);
424 h2 = global_handles->Create(*u);
425 }
426
427 global_handles->MakeWeak(h2.location(),
428 reinterpret_cast<void*>(1234),
429 &TestWeakGlobalHandleCallback);
430
431 // Scavenge treats weak pointers as normal roots.
432 heap->PerformScavenge();
433
434 CHECK((*h1)->IsString());
435 CHECK((*h2)->IsHeapNumber());
436
437 CHECK(!WeakPointerCleared);
438 CHECK(!global_handles->IsNearDeath(h2.location()));
439 CHECK(!global_handles->IsNearDeath(h1.location()));
440
441 global_handles->Destroy(h1.location());
442 global_handles->Destroy(h2.location());
443 }
444
445
TEST(WeakGlobalHandlesMark)446 TEST(WeakGlobalHandlesMark) {
447 CcTest::InitializeVM();
448 Isolate* isolate = CcTest::i_isolate();
449 Heap* heap = isolate->heap();
450 Factory* factory = isolate->factory();
451 GlobalHandles* global_handles = isolate->global_handles();
452
453 WeakPointerCleared = false;
454
455 Handle<Object> h1;
456 Handle<Object> h2;
457
458 {
459 HandleScope scope(isolate);
460
461 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
462 Handle<Object> u = factory->NewNumber(1.12344);
463
464 h1 = global_handles->Create(*i);
465 h2 = global_handles->Create(*u);
466 }
467
468 // Make sure the objects are promoted.
469 heap->CollectGarbage(OLD_POINTER_SPACE);
470 heap->CollectGarbage(NEW_SPACE);
471 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
472
473 global_handles->MakeWeak(h2.location(),
474 reinterpret_cast<void*>(1234),
475 &TestWeakGlobalHandleCallback);
476 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
477 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
478
479 // Incremental marking potentially marked handles before they turned weak.
480 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
481
482 CHECK((*h1)->IsString());
483
484 CHECK(WeakPointerCleared);
485 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
486
487 global_handles->Destroy(h1.location());
488 }
489
490
TEST(DeleteWeakGlobalHandle)491 TEST(DeleteWeakGlobalHandle) {
492 i::FLAG_stress_compaction = false;
493 CcTest::InitializeVM();
494 Isolate* isolate = CcTest::i_isolate();
495 Heap* heap = isolate->heap();
496 Factory* factory = isolate->factory();
497 GlobalHandles* global_handles = isolate->global_handles();
498
499 WeakPointerCleared = false;
500
501 Handle<Object> h;
502
503 {
504 HandleScope scope(isolate);
505
506 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
507 h = global_handles->Create(*i);
508 }
509
510 global_handles->MakeWeak(h.location(),
511 reinterpret_cast<void*>(1234),
512 &TestWeakGlobalHandleCallback);
513
514 // Scanvenge does not recognize weak reference.
515 heap->PerformScavenge();
516
517 CHECK(!WeakPointerCleared);
518
519 // Mark-compact treats weak reference properly.
520 heap->CollectGarbage(OLD_POINTER_SPACE);
521
522 CHECK(WeakPointerCleared);
523 }
524
525
526 static const char* not_so_random_string_table[] = {
527 "abstract",
528 "boolean",
529 "break",
530 "byte",
531 "case",
532 "catch",
533 "char",
534 "class",
535 "const",
536 "continue",
537 "debugger",
538 "default",
539 "delete",
540 "do",
541 "double",
542 "else",
543 "enum",
544 "export",
545 "extends",
546 "false",
547 "final",
548 "finally",
549 "float",
550 "for",
551 "function",
552 "goto",
553 "if",
554 "implements",
555 "import",
556 "in",
557 "instanceof",
558 "int",
559 "interface",
560 "long",
561 "native",
562 "new",
563 "null",
564 "package",
565 "private",
566 "protected",
567 "public",
568 "return",
569 "short",
570 "static",
571 "super",
572 "switch",
573 "synchronized",
574 "this",
575 "throw",
576 "throws",
577 "transient",
578 "true",
579 "try",
580 "typeof",
581 "var",
582 "void",
583 "volatile",
584 "while",
585 "with",
586 0
587 };
588
589
CheckInternalizedStrings(const char ** strings)590 static void CheckInternalizedStrings(const char** strings) {
591 for (const char* string = *strings; *strings != 0; string = *strings++) {
592 Object* a;
593 MaybeObject* maybe_a = CcTest::heap()->InternalizeUtf8String(string);
594 // InternalizeUtf8String may return a failure if a GC is needed.
595 if (!maybe_a->ToObject(&a)) continue;
596 CHECK(a->IsInternalizedString());
597 Object* b;
598 MaybeObject* maybe_b = CcTest::heap()->InternalizeUtf8String(string);
599 if (!maybe_b->ToObject(&b)) continue;
600 CHECK_EQ(b, a);
601 CHECK(String::cast(b)->IsUtf8EqualTo(CStrVector(string)));
602 }
603 }
604
605
TEST(StringTable)606 TEST(StringTable) {
607 CcTest::InitializeVM();
608
609 CheckInternalizedStrings(not_so_random_string_table);
610 CheckInternalizedStrings(not_so_random_string_table);
611 }
612
613
TEST(FunctionAllocation)614 TEST(FunctionAllocation) {
615 CcTest::InitializeVM();
616 Isolate* isolate = CcTest::i_isolate();
617 Factory* factory = isolate->factory();
618
619 v8::HandleScope sc(CcTest::isolate());
620 Handle<String> name = factory->InternalizeUtf8String("theFunction");
621 Handle<JSFunction> function =
622 factory->NewFunction(name, factory->undefined_value());
623 Handle<Map> initial_map =
624 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
625 function->set_initial_map(*initial_map);
626
627 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
628 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
629
630 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
631 Handle<JSObject> obj = factory->NewJSObject(function);
632 JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, kNonStrictMode);
633 CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
634 // Check that we can add properties to function objects.
635 JSReceiver::SetProperty(function, prop_name, twenty_four, NONE,
636 kNonStrictMode);
637 CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name));
638 }
639
640
TEST(ObjectProperties)641 TEST(ObjectProperties) {
642 CcTest::InitializeVM();
643 Isolate* isolate = CcTest::i_isolate();
644 Factory* factory = isolate->factory();
645
646 v8::HandleScope sc(CcTest::isolate());
647 String* object_string = String::cast(CcTest::heap()->Object_string());
648 Object* raw_object = CcTest::i_isolate()->context()->global_object()->
649 GetProperty(object_string)->ToObjectChecked();
650 JSFunction* object_function = JSFunction::cast(raw_object);
651 Handle<JSFunction> constructor(object_function);
652 Handle<JSObject> obj = factory->NewJSObject(constructor);
653 Handle<String> first = factory->InternalizeUtf8String("first");
654 Handle<String> second = factory->InternalizeUtf8String("second");
655
656 Handle<Smi> one(Smi::FromInt(1), isolate);
657 Handle<Smi> two(Smi::FromInt(2), isolate);
658
659 // check for empty
660 CHECK(!JSReceiver::HasLocalProperty(obj, first));
661
662 // add first
663 JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
664 CHECK(JSReceiver::HasLocalProperty(obj, first));
665
666 // delete first
667 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
668 CHECK(!JSReceiver::HasLocalProperty(obj, first));
669
670 // add first and then second
671 JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
672 JSReceiver::SetProperty(obj, second, two, NONE, kNonStrictMode);
673 CHECK(JSReceiver::HasLocalProperty(obj, first));
674 CHECK(JSReceiver::HasLocalProperty(obj, second));
675
676 // delete first and then second
677 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
678 CHECK(JSReceiver::HasLocalProperty(obj, second));
679 JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
680 CHECK(!JSReceiver::HasLocalProperty(obj, first));
681 CHECK(!JSReceiver::HasLocalProperty(obj, second));
682
683 // add first and then second
684 JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
685 JSReceiver::SetProperty(obj, second, two, NONE, kNonStrictMode);
686 CHECK(JSReceiver::HasLocalProperty(obj, first));
687 CHECK(JSReceiver::HasLocalProperty(obj, second));
688
689 // delete second and then first
690 JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
691 CHECK(JSReceiver::HasLocalProperty(obj, first));
692 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
693 CHECK(!JSReceiver::HasLocalProperty(obj, first));
694 CHECK(!JSReceiver::HasLocalProperty(obj, second));
695
696 // check string and internalized string match
697 const char* string1 = "fisk";
698 Handle<String> s1 = factory->NewStringFromAscii(CStrVector(string1));
699 JSReceiver::SetProperty(obj, s1, one, NONE, kNonStrictMode);
700 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
701 CHECK(JSReceiver::HasLocalProperty(obj, s1_string));
702
703 // check internalized string and string match
704 const char* string2 = "fugl";
705 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
706 JSReceiver::SetProperty(obj, s2_string, one, NONE, kNonStrictMode);
707 Handle<String> s2 = factory->NewStringFromAscii(CStrVector(string2));
708 CHECK(JSReceiver::HasLocalProperty(obj, s2));
709 }
710
711
TEST(JSObjectMaps)712 TEST(JSObjectMaps) {
713 CcTest::InitializeVM();
714 Isolate* isolate = CcTest::i_isolate();
715 Factory* factory = isolate->factory();
716
717 v8::HandleScope sc(CcTest::isolate());
718 Handle<String> name = factory->InternalizeUtf8String("theFunction");
719 Handle<JSFunction> function =
720 factory->NewFunction(name, factory->undefined_value());
721 Handle<Map> initial_map =
722 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
723 function->set_initial_map(*initial_map);
724
725 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
726 Handle<JSObject> obj = factory->NewJSObject(function);
727
728 // Set a propery
729 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
730 JSReceiver::SetProperty(obj, prop_name, twenty_three, NONE, kNonStrictMode);
731 CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
732
733 // Check the map has changed
734 CHECK(*initial_map != obj->map());
735 }
736
737
TEST(JSArray)738 TEST(JSArray) {
739 CcTest::InitializeVM();
740 Isolate* isolate = CcTest::i_isolate();
741 Factory* factory = isolate->factory();
742
743 v8::HandleScope sc(CcTest::isolate());
744 Handle<String> name = factory->InternalizeUtf8String("Array");
745 Object* raw_object = CcTest::i_isolate()->context()->global_object()->
746 GetProperty(*name)->ToObjectChecked();
747 Handle<JSFunction> function = Handle<JSFunction>(
748 JSFunction::cast(raw_object));
749
750 // Allocate the object.
751 Handle<JSObject> object = factory->NewJSObject(function);
752 Handle<JSArray> array = Handle<JSArray>::cast(object);
753 // We just initialized the VM, no heap allocation failure yet.
754 array->Initialize(0)->ToObjectChecked();
755
756 // Set array length to 0.
757 array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
758 CHECK_EQ(Smi::FromInt(0), array->length());
759 // Must be in fast mode.
760 CHECK(array->HasFastSmiOrObjectElements());
761
762 // array[length] = name.
763 JSReceiver::SetElement(array, 0, name, NONE, kNonStrictMode);
764 CHECK_EQ(Smi::FromInt(1), array->length());
765 CHECK_EQ(array->GetElement(isolate, 0), *name);
766
767 // Set array length with larger than smi value.
768 Handle<Object> length =
769 factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
770 array->SetElementsLength(*length)->ToObjectChecked();
771
772 uint32_t int_length = 0;
773 CHECK(length->ToArrayIndex(&int_length));
774 CHECK_EQ(*length, array->length());
775 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
776
777 // array[length] = name.
778 JSReceiver::SetElement(array, int_length, name, NONE, kNonStrictMode);
779 uint32_t new_int_length = 0;
780 CHECK(array->length()->ToArrayIndex(&new_int_length));
781 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
782 CHECK_EQ(array->GetElement(isolate, int_length), *name);
783 CHECK_EQ(array->GetElement(isolate, 0), *name);
784 }
785
786
TEST(JSObjectCopy)787 TEST(JSObjectCopy) {
788 CcTest::InitializeVM();
789 Isolate* isolate = CcTest::i_isolate();
790 Factory* factory = isolate->factory();
791
792 v8::HandleScope sc(CcTest::isolate());
793 String* object_string = String::cast(CcTest::heap()->Object_string());
794 Object* raw_object = CcTest::i_isolate()->context()->global_object()->
795 GetProperty(object_string)->ToObjectChecked();
796 JSFunction* object_function = JSFunction::cast(raw_object);
797 Handle<JSFunction> constructor(object_function);
798 Handle<JSObject> obj = factory->NewJSObject(constructor);
799 Handle<String> first = factory->InternalizeUtf8String("first");
800 Handle<String> second = factory->InternalizeUtf8String("second");
801
802 Handle<Smi> one(Smi::FromInt(1), isolate);
803 Handle<Smi> two(Smi::FromInt(2), isolate);
804
805 JSReceiver::SetProperty(obj, first, one, NONE, kNonStrictMode);
806 JSReceiver::SetProperty(obj, second, two, NONE, kNonStrictMode);
807
808 JSReceiver::SetElement(obj, 0, first, NONE, kNonStrictMode);
809 JSReceiver::SetElement(obj, 1, second, NONE, kNonStrictMode);
810
811 // Make the clone.
812 Handle<JSObject> clone = JSObject::Copy(obj);
813 CHECK(!clone.is_identical_to(obj));
814
815 CHECK_EQ(obj->GetElement(isolate, 0), clone->GetElement(isolate, 0));
816 CHECK_EQ(obj->GetElement(isolate, 1), clone->GetElement(isolate, 1));
817
818 CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*first));
819 CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second));
820
821 // Flip the values.
822 JSReceiver::SetProperty(clone, first, two, NONE, kNonStrictMode);
823 JSReceiver::SetProperty(clone, second, one, NONE, kNonStrictMode);
824
825 JSReceiver::SetElement(clone, 0, second, NONE, kNonStrictMode);
826 JSReceiver::SetElement(clone, 1, first, NONE, kNonStrictMode);
827
828 CHECK_EQ(obj->GetElement(isolate, 1), clone->GetElement(isolate, 0));
829 CHECK_EQ(obj->GetElement(isolate, 0), clone->GetElement(isolate, 1));
830
831 CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*first));
832 CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*second));
833 }
834
835
TEST(StringAllocation)836 TEST(StringAllocation) {
837 CcTest::InitializeVM();
838 Isolate* isolate = CcTest::i_isolate();
839 Factory* factory = isolate->factory();
840
841 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
842 for (int length = 0; length < 100; length++) {
843 v8::HandleScope scope(CcTest::isolate());
844 char* non_ascii = NewArray<char>(3 * length + 1);
845 char* ascii = NewArray<char>(length + 1);
846 non_ascii[3 * length] = 0;
847 ascii[length] = 0;
848 for (int i = 0; i < length; i++) {
849 ascii[i] = 'a';
850 non_ascii[3 * i] = chars[0];
851 non_ascii[3 * i + 1] = chars[1];
852 non_ascii[3 * i + 2] = chars[2];
853 }
854 Handle<String> non_ascii_sym =
855 factory->InternalizeUtf8String(
856 Vector<const char>(non_ascii, 3 * length));
857 CHECK_EQ(length, non_ascii_sym->length());
858 Handle<String> ascii_sym =
859 factory->InternalizeOneByteString(OneByteVector(ascii, length));
860 CHECK_EQ(length, ascii_sym->length());
861 Handle<String> non_ascii_str =
862 factory->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
863 non_ascii_str->Hash();
864 CHECK_EQ(length, non_ascii_str->length());
865 Handle<String> ascii_str =
866 factory->NewStringFromUtf8(Vector<const char>(ascii, length));
867 ascii_str->Hash();
868 CHECK_EQ(length, ascii_str->length());
869 DeleteArray(non_ascii);
870 DeleteArray(ascii);
871 }
872 }
873
874
ObjectsFoundInHeap(Heap * heap,Handle<Object> objs[],int size)875 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
876 // Count the number of objects found in the heap.
877 int found_count = 0;
878 heap->EnsureHeapIsIterable();
879 HeapIterator iterator(heap);
880 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
881 for (int i = 0; i < size; i++) {
882 if (*objs[i] == obj) {
883 found_count++;
884 }
885 }
886 }
887 return found_count;
888 }
889
890
TEST(Iteration)891 TEST(Iteration) {
892 CcTest::InitializeVM();
893 Isolate* isolate = CcTest::i_isolate();
894 Factory* factory = isolate->factory();
895 v8::HandleScope scope(CcTest::isolate());
896
897 // Array of objects to scan haep for.
898 const int objs_count = 6;
899 Handle<Object> objs[objs_count];
900 int next_objs_index = 0;
901
902 // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
903 objs[next_objs_index++] = factory->NewJSArray(10);
904 objs[next_objs_index++] = factory->NewJSArray(10,
905 FAST_HOLEY_ELEMENTS,
906 TENURED);
907
908 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
909 objs[next_objs_index++] =
910 factory->NewStringFromAscii(CStrVector("abcdefghij"));
911 objs[next_objs_index++] =
912 factory->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
913
914 // Allocate a large string (for large object space).
915 int large_size = Page::kMaxNonCodeHeapObjectSize + 1;
916 char* str = new char[large_size];
917 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
918 str[large_size - 1] = '\0';
919 objs[next_objs_index++] =
920 factory->NewStringFromAscii(CStrVector(str), TENURED);
921 delete[] str;
922
923 // Add a Map object to look for.
924 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
925
926 CHECK_EQ(objs_count, next_objs_index);
927 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
928 }
929
930
TEST(EmptyHandleEscapeFrom)931 TEST(EmptyHandleEscapeFrom) {
932 CcTest::InitializeVM();
933
934 v8::HandleScope scope(CcTest::isolate());
935 Handle<JSObject> runaway;
936
937 {
938 v8::EscapableHandleScope nested(CcTest::isolate());
939 Handle<JSObject> empty;
940 runaway = empty.EscapeFrom(&nested);
941 }
942
943 CHECK(runaway.is_null());
944 }
945
946
LenFromSize(int size)947 static int LenFromSize(int size) {
948 return (size - FixedArray::kHeaderSize) / kPointerSize;
949 }
950
951
TEST(Regression39128)952 TEST(Regression39128) {
953 // Test case for crbug.com/39128.
954 CcTest::InitializeVM();
955 Isolate* isolate = CcTest::i_isolate();
956 Factory* factory = isolate->factory();
957 Heap* heap = isolate->heap();
958
959 // Increase the chance of 'bump-the-pointer' allocation in old space.
960 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
961
962 v8::HandleScope scope(CcTest::isolate());
963
964 // The plan: create JSObject which references objects in new space.
965 // Then clone this object (forcing it to go into old space) and check
966 // that region dirty marks are updated correctly.
967
968 // Step 1: prepare a map for the object. We add 1 inobject property to it.
969 Handle<JSFunction> object_ctor(
970 CcTest::i_isolate()->native_context()->object_function());
971 CHECK(object_ctor->has_initial_map());
972 Handle<Map> object_map(object_ctor->initial_map());
973 // Create a map with single inobject property.
974 Handle<Map> my_map = factory->CopyMap(object_map, 1);
975 int n_properties = my_map->inobject_properties();
976 CHECK_GT(n_properties, 0);
977
978 int object_size = my_map->instance_size();
979
980 // Step 2: allocate a lot of objects so to almost fill new space: we need
981 // just enough room to allocate JSObject and thus fill the newspace.
982
983 int allocation_amount = Min(FixedArray::kMaxSize,
984 Page::kMaxNonCodeHeapObjectSize + kPointerSize);
985 int allocation_len = LenFromSize(allocation_amount);
986 NewSpace* new_space = heap->new_space();
987 Address* top_addr = new_space->allocation_top_address();
988 Address* limit_addr = new_space->allocation_limit_address();
989 while ((*limit_addr - *top_addr) > allocation_amount) {
990 CHECK(!heap->always_allocate());
991 Object* array = heap->AllocateFixedArray(allocation_len)->ToObjectChecked();
992 CHECK(!array->IsFailure());
993 CHECK(new_space->Contains(array));
994 }
995
996 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
997 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
998 int fixed_array_len = LenFromSize(to_fill);
999 CHECK(fixed_array_len < FixedArray::kMaxLength);
1000
1001 CHECK(!heap->always_allocate());
1002 Object* array = heap->AllocateFixedArray(fixed_array_len)->ToObjectChecked();
1003 CHECK(!array->IsFailure());
1004 CHECK(new_space->Contains(array));
1005
1006 Object* object = heap->AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
1007 CHECK(new_space->Contains(object));
1008 JSObject* jsobject = JSObject::cast(object);
1009 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1010 CHECK_EQ(0, jsobject->properties()->length());
1011 // Create a reference to object in new space in jsobject.
1012 jsobject->FastPropertyAtPut(-1, array);
1013
1014 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1015
1016 // Step 4: clone jsobject, but force always allocate first to create a clone
1017 // in old pointer space.
1018 Address old_pointer_space_top = heap->old_pointer_space()->top();
1019 AlwaysAllocateScope aa_scope;
1020 Object* clone_obj = heap->CopyJSObject(jsobject)->ToObjectChecked();
1021 JSObject* clone = JSObject::cast(clone_obj);
1022 if (clone->address() != old_pointer_space_top) {
1023 // Alas, got allocated from free list, we cannot do checks.
1024 return;
1025 }
1026 CHECK(heap->old_pointer_space()->Contains(clone->address()));
1027 }
1028
1029
TEST(TestCodeFlushing)1030 TEST(TestCodeFlushing) {
1031 // If we do not flush code this test is invalid.
1032 if (!FLAG_flush_code) return;
1033 i::FLAG_allow_natives_syntax = true;
1034 i::FLAG_optimize_for_size = false;
1035 CcTest::InitializeVM();
1036 Isolate* isolate = CcTest::i_isolate();
1037 Factory* factory = isolate->factory();
1038 v8::HandleScope scope(CcTest::isolate());
1039 const char* source = "function foo() {"
1040 " var x = 42;"
1041 " var y = 42;"
1042 " var z = x + y;"
1043 "};"
1044 "foo()";
1045 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1046
1047 // This compile will add the code to the compilation cache.
1048 { v8::HandleScope scope(CcTest::isolate());
1049 CompileRun(source);
1050 }
1051
1052 // Check function is compiled.
1053 Object* func_value = CcTest::i_isolate()->context()->global_object()->
1054 GetProperty(*foo_name)->ToObjectChecked();
1055 CHECK(func_value->IsJSFunction());
1056 Handle<JSFunction> function(JSFunction::cast(func_value));
1057 CHECK(function->shared()->is_compiled());
1058
1059 // The code will survive at least two GCs.
1060 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1061 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1062 CHECK(function->shared()->is_compiled());
1063
1064 // Simulate several GCs that use full marking.
1065 const int kAgingThreshold = 6;
1066 for (int i = 0; i < kAgingThreshold; i++) {
1067 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1068 }
1069
1070 // foo should no longer be in the compilation cache
1071 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1072 CHECK(!function->is_compiled() || function->IsOptimized());
1073 // Call foo to get it recompiled.
1074 CompileRun("foo()");
1075 CHECK(function->shared()->is_compiled());
1076 CHECK(function->is_compiled());
1077 }
1078
1079
TEST(TestCodeFlushingPreAged)1080 TEST(TestCodeFlushingPreAged) {
1081 // If we do not flush code this test is invalid.
1082 if (!FLAG_flush_code) return;
1083 i::FLAG_allow_natives_syntax = true;
1084 i::FLAG_optimize_for_size = true;
1085 CcTest::InitializeVM();
1086 Isolate* isolate = Isolate::Current();
1087 Factory* factory = isolate->factory();
1088 v8::HandleScope scope(CcTest::isolate());
1089 const char* source = "function foo() {"
1090 " var x = 42;"
1091 " var y = 42;"
1092 " var z = x + y;"
1093 "};"
1094 "foo()";
1095 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1096
1097 // Compile foo, but don't run it.
1098 { v8::HandleScope scope(CcTest::isolate());
1099 CompileRun(source);
1100 }
1101
1102 // Check function is compiled.
1103 Object* func_value = Isolate::Current()->context()->global_object()->
1104 GetProperty(*foo_name)->ToObjectChecked();
1105 CHECK(func_value->IsJSFunction());
1106 Handle<JSFunction> function(JSFunction::cast(func_value));
1107 CHECK(function->shared()->is_compiled());
1108
1109 // The code has been run so will survive at least one GC.
1110 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1111 CHECK(function->shared()->is_compiled());
1112
1113 // The code was only run once, so it should be pre-aged and collected on the
1114 // next GC.
1115 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1116 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1117
1118 // Execute the function again twice, and ensure it is reset to the young age.
1119 { v8::HandleScope scope(CcTest::isolate());
1120 CompileRun("foo();"
1121 "foo();");
1122 }
1123
1124 // The code will survive at least two GC now that it is young again.
1125 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1126 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1127 CHECK(function->shared()->is_compiled());
1128
1129 // Simulate several GCs that use full marking.
1130 const int kAgingThreshold = 6;
1131 for (int i = 0; i < kAgingThreshold; i++) {
1132 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1133 }
1134
1135 // foo should no longer be in the compilation cache
1136 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1137 CHECK(!function->is_compiled() || function->IsOptimized());
1138 // Call foo to get it recompiled.
1139 CompileRun("foo()");
1140 CHECK(function->shared()->is_compiled());
1141 CHECK(function->is_compiled());
1142 }
1143
1144
TEST(TestCodeFlushingIncremental)1145 TEST(TestCodeFlushingIncremental) {
1146 // If we do not flush code this test is invalid.
1147 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1148 i::FLAG_allow_natives_syntax = true;
1149 i::FLAG_optimize_for_size = false;
1150 CcTest::InitializeVM();
1151 Isolate* isolate = CcTest::i_isolate();
1152 Factory* factory = isolate->factory();
1153 v8::HandleScope scope(CcTest::isolate());
1154 const char* source = "function foo() {"
1155 " var x = 42;"
1156 " var y = 42;"
1157 " var z = x + y;"
1158 "};"
1159 "foo()";
1160 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1161
1162 // This compile will add the code to the compilation cache.
1163 { v8::HandleScope scope(CcTest::isolate());
1164 CompileRun(source);
1165 }
1166
1167 // Check function is compiled.
1168 Object* func_value = CcTest::i_isolate()->context()->global_object()->
1169 GetProperty(*foo_name)->ToObjectChecked();
1170 CHECK(func_value->IsJSFunction());
1171 Handle<JSFunction> function(JSFunction::cast(func_value));
1172 CHECK(function->shared()->is_compiled());
1173
1174 // The code will survive at least two GCs.
1175 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1176 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1177 CHECK(function->shared()->is_compiled());
1178
1179 // Simulate several GCs that use incremental marking.
1180 const int kAgingThreshold = 6;
1181 for (int i = 0; i < kAgingThreshold; i++) {
1182 SimulateIncrementalMarking();
1183 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1184 }
1185 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1186 CHECK(!function->is_compiled() || function->IsOptimized());
1187
1188 // This compile will compile the function again.
1189 { v8::HandleScope scope(CcTest::isolate());
1190 CompileRun("foo();");
1191 }
1192
1193 // Simulate several GCs that use incremental marking but make sure
1194 // the loop breaks once the function is enqueued as a candidate.
1195 for (int i = 0; i < kAgingThreshold; i++) {
1196 SimulateIncrementalMarking();
1197 if (!function->next_function_link()->IsUndefined()) break;
1198 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1199 }
1200
1201 // Force optimization while incremental marking is active and while
1202 // the function is enqueued as a candidate.
1203 { v8::HandleScope scope(CcTest::isolate());
1204 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1205 }
1206
1207 // Simulate one final GC to make sure the candidate queue is sane.
1208 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1209 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1210 CHECK(function->is_compiled() || !function->IsOptimized());
1211 }
1212
1213
TEST(TestCodeFlushingIncrementalScavenge)1214 TEST(TestCodeFlushingIncrementalScavenge) {
1215 // If we do not flush code this test is invalid.
1216 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1217 i::FLAG_allow_natives_syntax = true;
1218 i::FLAG_optimize_for_size = false;
1219 CcTest::InitializeVM();
1220 Isolate* isolate = CcTest::i_isolate();
1221 Factory* factory = isolate->factory();
1222 v8::HandleScope scope(CcTest::isolate());
1223 const char* source = "var foo = function() {"
1224 " var x = 42;"
1225 " var y = 42;"
1226 " var z = x + y;"
1227 "};"
1228 "foo();"
1229 "var bar = function() {"
1230 " var x = 23;"
1231 "};"
1232 "bar();";
1233 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1234 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1235
1236 // Perfrom one initial GC to enable code flushing.
1237 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1238
1239 // This compile will add the code to the compilation cache.
1240 { v8::HandleScope scope(CcTest::isolate());
1241 CompileRun(source);
1242 }
1243
1244 // Check functions are compiled.
1245 Object* func_value = CcTest::i_isolate()->context()->global_object()->
1246 GetProperty(*foo_name)->ToObjectChecked();
1247 CHECK(func_value->IsJSFunction());
1248 Handle<JSFunction> function(JSFunction::cast(func_value));
1249 CHECK(function->shared()->is_compiled());
1250 Object* func_value2 = CcTest::i_isolate()->context()->global_object()->
1251 GetProperty(*bar_name)->ToObjectChecked();
1252 CHECK(func_value2->IsJSFunction());
1253 Handle<JSFunction> function2(JSFunction::cast(func_value2));
1254 CHECK(function2->shared()->is_compiled());
1255
1256 // Clear references to functions so that one of them can die.
1257 { v8::HandleScope scope(CcTest::isolate());
1258 CompileRun("foo = 0; bar = 0;");
1259 }
1260
1261 // Bump the code age so that flushing is triggered while the function
1262 // object is still located in new-space.
1263 const int kAgingThreshold = 6;
1264 for (int i = 0; i < kAgingThreshold; i++) {
1265 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1266 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1267 }
1268
1269 // Simulate incremental marking so that the functions are enqueued as
1270 // code flushing candidates. Then kill one of the functions. Finally
1271 // perform a scavenge while incremental marking is still running.
1272 SimulateIncrementalMarking();
1273 *function2.location() = NULL;
1274 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1275
1276 // Simulate one final GC to make sure the candidate queue is sane.
1277 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1278 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1279 CHECK(!function->is_compiled() || function->IsOptimized());
1280 }
1281
1282
TEST(TestCodeFlushingIncrementalAbort)1283 TEST(TestCodeFlushingIncrementalAbort) {
1284 // If we do not flush code this test is invalid.
1285 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1286 i::FLAG_allow_natives_syntax = true;
1287 i::FLAG_optimize_for_size = false;
1288 CcTest::InitializeVM();
1289 Isolate* isolate = CcTest::i_isolate();
1290 Factory* factory = isolate->factory();
1291 Heap* heap = isolate->heap();
1292 v8::HandleScope scope(CcTest::isolate());
1293 const char* source = "function foo() {"
1294 " var x = 42;"
1295 " var y = 42;"
1296 " var z = x + y;"
1297 "};"
1298 "foo()";
1299 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1300
1301 // This compile will add the code to the compilation cache.
1302 { v8::HandleScope scope(CcTest::isolate());
1303 CompileRun(source);
1304 }
1305
1306 // Check function is compiled.
1307 Object* func_value = CcTest::i_isolate()->context()->global_object()->
1308 GetProperty(*foo_name)->ToObjectChecked();
1309 CHECK(func_value->IsJSFunction());
1310 Handle<JSFunction> function(JSFunction::cast(func_value));
1311 CHECK(function->shared()->is_compiled());
1312
1313 // The code will survive at least two GCs.
1314 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1315 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1316 CHECK(function->shared()->is_compiled());
1317
1318 // Bump the code age so that flushing is triggered.
1319 const int kAgingThreshold = 6;
1320 for (int i = 0; i < kAgingThreshold; i++) {
1321 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1322 }
1323
1324 // Simulate incremental marking so that the function is enqueued as
1325 // code flushing candidate.
1326 SimulateIncrementalMarking();
1327
1328 #ifdef ENABLE_DEBUGGER_SUPPORT
1329 // Enable the debugger and add a breakpoint while incremental marking
1330 // is running so that incremental marking aborts and code flushing is
1331 // disabled.
1332 int position = 0;
1333 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1334 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1335 isolate->debug()->ClearAllBreakPoints();
1336 #endif // ENABLE_DEBUGGER_SUPPORT
1337
1338 // Force optimization now that code flushing is disabled.
1339 { v8::HandleScope scope(CcTest::isolate());
1340 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1341 }
1342
1343 // Simulate one final GC to make sure the candidate queue is sane.
1344 heap->CollectAllGarbage(Heap::kNoGCFlags);
1345 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1346 CHECK(function->is_compiled() || !function->IsOptimized());
1347 }
1348
1349
1350 // Count the number of native contexts in the weak list of native contexts.
CountNativeContexts()1351 int CountNativeContexts() {
1352 int count = 0;
1353 Object* object = CcTest::heap()->native_contexts_list();
1354 while (!object->IsUndefined()) {
1355 count++;
1356 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1357 }
1358 return count;
1359 }
1360
1361
1362 // Count the number of user functions in the weak list of optimized
1363 // functions attached to a native context.
CountOptimizedUserFunctions(v8::Handle<v8::Context> context)1364 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1365 int count = 0;
1366 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1367 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1368 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1369 count++;
1370 object = JSFunction::cast(object)->next_function_link();
1371 }
1372 return count;
1373 }
1374
1375
TEST(TestInternalWeakLists)1376 TEST(TestInternalWeakLists) {
1377 v8::V8::Initialize();
1378
1379 // Some flags turn Scavenge collections into Mark-sweep collections
1380 // and hence are incompatible with this test case.
1381 if (FLAG_gc_global || FLAG_stress_compaction) return;
1382
1383 static const int kNumTestContexts = 10;
1384
1385 Isolate* isolate = CcTest::i_isolate();
1386 Heap* heap = isolate->heap();
1387 HandleScope scope(isolate);
1388 v8::Handle<v8::Context> ctx[kNumTestContexts];
1389
1390 CHECK_EQ(0, CountNativeContexts());
1391
1392 // Create a number of global contests which gets linked together.
1393 for (int i = 0; i < kNumTestContexts; i++) {
1394 ctx[i] = v8::Context::New(CcTest::isolate());
1395
1396 // Collect garbage that might have been created by one of the
1397 // installed extensions.
1398 isolate->compilation_cache()->Clear();
1399 heap->CollectAllGarbage(Heap::kNoGCFlags);
1400
1401 bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1402
1403 CHECK_EQ(i + 1, CountNativeContexts());
1404
1405 ctx[i]->Enter();
1406
1407 // Create a handle scope so no function objects get stuch in the outer
1408 // handle scope
1409 HandleScope scope(isolate);
1410 const char* source = "function f1() { };"
1411 "function f2() { };"
1412 "function f3() { };"
1413 "function f4() { };"
1414 "function f5() { };";
1415 CompileRun(source);
1416 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1417 CompileRun("f1()");
1418 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1419 CompileRun("f2()");
1420 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1421 CompileRun("f3()");
1422 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1423 CompileRun("f4()");
1424 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1425 CompileRun("f5()");
1426 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1427
1428 // Remove function f1, and
1429 CompileRun("f1=null");
1430
1431 // Scavenge treats these references as strong.
1432 for (int j = 0; j < 10; j++) {
1433 CcTest::heap()->PerformScavenge();
1434 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1435 }
1436
1437 // Mark compact handles the weak references.
1438 isolate->compilation_cache()->Clear();
1439 heap->CollectAllGarbage(Heap::kNoGCFlags);
1440 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1441
1442 // Get rid of f3 and f5 in the same way.
1443 CompileRun("f3=null");
1444 for (int j = 0; j < 10; j++) {
1445 CcTest::heap()->PerformScavenge();
1446 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1447 }
1448 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1449 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1450 CompileRun("f5=null");
1451 for (int j = 0; j < 10; j++) {
1452 CcTest::heap()->PerformScavenge();
1453 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1454 }
1455 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1456 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1457
1458 ctx[i]->Exit();
1459 }
1460
1461 // Force compilation cache cleanup.
1462 CcTest::heap()->NotifyContextDisposed();
1463 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1464
1465 // Dispose the native contexts one by one.
1466 for (int i = 0; i < kNumTestContexts; i++) {
1467 // TODO(dcarney): is there a better way to do this?
1468 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1469 *unsafe = CcTest::heap()->undefined_value();
1470 ctx[i].Clear();
1471
1472 // Scavenge treats these references as strong.
1473 for (int j = 0; j < 10; j++) {
1474 CcTest::heap()->PerformScavenge();
1475 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1476 }
1477
1478 // Mark compact handles the weak references.
1479 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1480 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1481 }
1482
1483 CHECK_EQ(0, CountNativeContexts());
1484 }
1485
1486
1487 // Count the number of native contexts in the weak list of native contexts
1488 // causing a GC after the specified number of elements.
CountNativeContextsWithGC(Isolate * isolate,int n)1489 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1490 Heap* heap = isolate->heap();
1491 int count = 0;
1492 Handle<Object> object(heap->native_contexts_list(), isolate);
1493 while (!object->IsUndefined()) {
1494 count++;
1495 if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1496 object =
1497 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1498 isolate);
1499 }
1500 return count;
1501 }
1502
1503
1504 // Count the number of user functions in the weak list of optimized
1505 // functions attached to a native context causing a GC after the
1506 // specified number of elements.
CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,int n)1507 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1508 int n) {
1509 int count = 0;
1510 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1511 Isolate* isolate = icontext->GetIsolate();
1512 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1513 isolate);
1514 while (object->IsJSFunction() &&
1515 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1516 count++;
1517 if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1518 object = Handle<Object>(
1519 Object::cast(JSFunction::cast(*object)->next_function_link()),
1520 isolate);
1521 }
1522 return count;
1523 }
1524
1525
TEST(TestInternalWeakListsTraverseWithGC)1526 TEST(TestInternalWeakListsTraverseWithGC) {
1527 v8::V8::Initialize();
1528 Isolate* isolate = CcTest::i_isolate();
1529
1530 static const int kNumTestContexts = 10;
1531
1532 HandleScope scope(isolate);
1533 v8::Handle<v8::Context> ctx[kNumTestContexts];
1534
1535 CHECK_EQ(0, CountNativeContexts());
1536
1537 // Create an number of contexts and check the length of the weak list both
1538 // with and without GCs while iterating the list.
1539 for (int i = 0; i < kNumTestContexts; i++) {
1540 ctx[i] = v8::Context::New(CcTest::isolate());
1541 CHECK_EQ(i + 1, CountNativeContexts());
1542 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1543 }
1544
1545 bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1546
1547 // Compile a number of functions the length of the weak list of optimized
1548 // functions both with and without GCs while iterating the list.
1549 ctx[0]->Enter();
1550 const char* source = "function f1() { };"
1551 "function f2() { };"
1552 "function f3() { };"
1553 "function f4() { };"
1554 "function f5() { };";
1555 CompileRun(source);
1556 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1557 CompileRun("f1()");
1558 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1559 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1560 CompileRun("f2()");
1561 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1562 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1563 CompileRun("f3()");
1564 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1565 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1566 CompileRun("f4()");
1567 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1568 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1569 CompileRun("f5()");
1570 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1571 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1572
1573 ctx[0]->Exit();
1574 }
1575
1576
TEST(TestSizeOfObjects)1577 TEST(TestSizeOfObjects) {
1578 v8::V8::Initialize();
1579
1580 // Get initial heap size after several full GCs, which will stabilize
1581 // the heap size and return with sweeping finished completely.
1582 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1583 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1584 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1585 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1586 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1587 CHECK(CcTest::heap()->old_pointer_space()->IsLazySweepingComplete());
1588 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1589
1590 {
1591 // Allocate objects on several different old-space pages so that
1592 // lazy sweeping kicks in for subsequent GC runs.
1593 AlwaysAllocateScope always_allocate;
1594 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1595 for (int i = 1; i <= 100; i++) {
1596 CcTest::heap()->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
1597 CHECK_EQ(initial_size + i * filler_size,
1598 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1599 }
1600 }
1601
1602 // The heap size should go back to initial size after a full GC, even
1603 // though sweeping didn't finish yet.
1604 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1605
1606 // Normally sweeping would not be complete here, but no guarantees.
1607
1608 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1609
1610 // Advancing the sweeper step-wise should not change the heap size.
1611 while (!CcTest::heap()->old_pointer_space()->IsLazySweepingComplete()) {
1612 CcTest::heap()->old_pointer_space()->AdvanceSweeper(KB);
1613 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1614 }
1615 }
1616
1617
TEST(TestSizeOfObjectsVsHeapIteratorPrecision)1618 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1619 CcTest::InitializeVM();
1620 CcTest::heap()->EnsureHeapIsIterable();
1621 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1622 HeapIterator iterator(CcTest::heap());
1623 intptr_t size_of_objects_2 = 0;
1624 for (HeapObject* obj = iterator.next();
1625 obj != NULL;
1626 obj = iterator.next()) {
1627 if (!obj->IsFreeSpace()) {
1628 size_of_objects_2 += obj->Size();
1629 }
1630 }
1631 // Delta must be within 5% of the larger result.
1632 // TODO(gc): Tighten this up by distinguishing between byte
1633 // arrays that are real and those that merely mark free space
1634 // on the heap.
1635 if (size_of_objects_1 > size_of_objects_2) {
1636 intptr_t delta = size_of_objects_1 - size_of_objects_2;
1637 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1638 "Iterator: %" V8_PTR_PREFIX "d, "
1639 "delta: %" V8_PTR_PREFIX "d\n",
1640 size_of_objects_1, size_of_objects_2, delta);
1641 CHECK_GT(size_of_objects_1 / 20, delta);
1642 } else {
1643 intptr_t delta = size_of_objects_2 - size_of_objects_1;
1644 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1645 "Iterator: %" V8_PTR_PREFIX "d, "
1646 "delta: %" V8_PTR_PREFIX "d\n",
1647 size_of_objects_1, size_of_objects_2, delta);
1648 CHECK_GT(size_of_objects_2 / 20, delta);
1649 }
1650 }
1651
1652
FillUpNewSpace(NewSpace * new_space)1653 static void FillUpNewSpace(NewSpace* new_space) {
1654 // Fill up new space to the point that it is completely full. Make sure
1655 // that the scavenger does not undo the filling.
1656 Heap* heap = new_space->heap();
1657 Isolate* isolate = heap->isolate();
1658 Factory* factory = isolate->factory();
1659 HandleScope scope(isolate);
1660 AlwaysAllocateScope always_allocate;
1661 intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
1662 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1663 for (intptr_t i = 0; i < number_of_fillers; i++) {
1664 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1665 }
1666 }
1667
1668
TEST(GrowAndShrinkNewSpace)1669 TEST(GrowAndShrinkNewSpace) {
1670 CcTest::InitializeVM();
1671 Heap* heap = CcTest::heap();
1672 NewSpace* new_space = heap->new_space();
1673
1674 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1675 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1676 // The max size cannot exceed the reserved size, since semispaces must be
1677 // always within the reserved space. We can't test new space growing and
1678 // shrinking if the reserved size is the same as the minimum (initial) size.
1679 return;
1680 }
1681
1682 // Explicitly growing should double the space capacity.
1683 intptr_t old_capacity, new_capacity;
1684 old_capacity = new_space->Capacity();
1685 new_space->Grow();
1686 new_capacity = new_space->Capacity();
1687 CHECK(2 * old_capacity == new_capacity);
1688
1689 old_capacity = new_space->Capacity();
1690 FillUpNewSpace(new_space);
1691 new_capacity = new_space->Capacity();
1692 CHECK(old_capacity == new_capacity);
1693
1694 // Explicitly shrinking should not affect space capacity.
1695 old_capacity = new_space->Capacity();
1696 new_space->Shrink();
1697 new_capacity = new_space->Capacity();
1698 CHECK(old_capacity == new_capacity);
1699
1700 // Let the scavenger empty the new space.
1701 heap->CollectGarbage(NEW_SPACE);
1702 CHECK_LE(new_space->Size(), old_capacity);
1703
1704 // Explicitly shrinking should halve the space capacity.
1705 old_capacity = new_space->Capacity();
1706 new_space->Shrink();
1707 new_capacity = new_space->Capacity();
1708 CHECK(old_capacity == 2 * new_capacity);
1709
1710 // Consecutive shrinking should not affect space capacity.
1711 old_capacity = new_space->Capacity();
1712 new_space->Shrink();
1713 new_space->Shrink();
1714 new_space->Shrink();
1715 new_capacity = new_space->Capacity();
1716 CHECK(old_capacity == new_capacity);
1717 }
1718
1719
TEST(CollectingAllAvailableGarbageShrinksNewSpace)1720 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1721 CcTest::InitializeVM();
1722 Heap* heap = CcTest::heap();
1723 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1724 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1725 // The max size cannot exceed the reserved size, since semispaces must be
1726 // always within the reserved space. We can't test new space growing and
1727 // shrinking if the reserved size is the same as the minimum (initial) size.
1728 return;
1729 }
1730
1731 v8::HandleScope scope(CcTest::isolate());
1732 NewSpace* new_space = heap->new_space();
1733 intptr_t old_capacity, new_capacity;
1734 old_capacity = new_space->Capacity();
1735 new_space->Grow();
1736 new_capacity = new_space->Capacity();
1737 CHECK(2 * old_capacity == new_capacity);
1738 FillUpNewSpace(new_space);
1739 heap->CollectAllAvailableGarbage();
1740 new_capacity = new_space->Capacity();
1741 CHECK(old_capacity == new_capacity);
1742 }
1743
1744
NumberOfGlobalObjects()1745 static int NumberOfGlobalObjects() {
1746 int count = 0;
1747 HeapIterator iterator(CcTest::heap());
1748 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1749 if (obj->IsGlobalObject()) count++;
1750 }
1751 return count;
1752 }
1753
1754
1755 // Test that we don't embed maps from foreign contexts into
1756 // optimized code.
TEST(LeakNativeContextViaMap)1757 TEST(LeakNativeContextViaMap) {
1758 i::FLAG_allow_natives_syntax = true;
1759 v8::Isolate* isolate = CcTest::isolate();
1760 v8::HandleScope outer_scope(isolate);
1761 v8::Persistent<v8::Context> ctx1p;
1762 v8::Persistent<v8::Context> ctx2p;
1763 {
1764 v8::HandleScope scope(isolate);
1765 ctx1p.Reset(isolate, v8::Context::New(isolate));
1766 ctx2p.Reset(isolate, v8::Context::New(isolate));
1767 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1768 }
1769
1770 CcTest::heap()->CollectAllAvailableGarbage();
1771 CHECK_EQ(4, NumberOfGlobalObjects());
1772
1773 {
1774 v8::HandleScope inner_scope(isolate);
1775 CompileRun("var v = {x: 42}");
1776 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1777 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1778 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1779 ctx2->Enter();
1780 ctx2->Global()->Set(v8_str("o"), v);
1781 v8::Local<v8::Value> res = CompileRun(
1782 "function f() { return o.x; }"
1783 "for (var i = 0; i < 10; ++i) f();"
1784 "%OptimizeFunctionOnNextCall(f);"
1785 "f();");
1786 CHECK_EQ(42, res->Int32Value());
1787 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1788 ctx2->Exit();
1789 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1790 ctx1p.Reset();
1791 v8::V8::ContextDisposedNotification();
1792 }
1793 CcTest::heap()->CollectAllAvailableGarbage();
1794 CHECK_EQ(2, NumberOfGlobalObjects());
1795 ctx2p.Reset();
1796 CcTest::heap()->CollectAllAvailableGarbage();
1797 CHECK_EQ(0, NumberOfGlobalObjects());
1798 }
1799
1800
1801 // Test that we don't embed functions from foreign contexts into
1802 // optimized code.
TEST(LeakNativeContextViaFunction)1803 TEST(LeakNativeContextViaFunction) {
1804 i::FLAG_allow_natives_syntax = true;
1805 v8::Isolate* isolate = CcTest::isolate();
1806 v8::HandleScope outer_scope(isolate);
1807 v8::Persistent<v8::Context> ctx1p;
1808 v8::Persistent<v8::Context> ctx2p;
1809 {
1810 v8::HandleScope scope(isolate);
1811 ctx1p.Reset(isolate, v8::Context::New(isolate));
1812 ctx2p.Reset(isolate, v8::Context::New(isolate));
1813 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1814 }
1815
1816 CcTest::heap()->CollectAllAvailableGarbage();
1817 CHECK_EQ(4, NumberOfGlobalObjects());
1818
1819 {
1820 v8::HandleScope inner_scope(isolate);
1821 CompileRun("var v = function() { return 42; }");
1822 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1823 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1824 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1825 ctx2->Enter();
1826 ctx2->Global()->Set(v8_str("o"), v);
1827 v8::Local<v8::Value> res = CompileRun(
1828 "function f(x) { return x(); }"
1829 "for (var i = 0; i < 10; ++i) f(o);"
1830 "%OptimizeFunctionOnNextCall(f);"
1831 "f(o);");
1832 CHECK_EQ(42, res->Int32Value());
1833 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1834 ctx2->Exit();
1835 ctx1->Exit();
1836 ctx1p.Reset();
1837 v8::V8::ContextDisposedNotification();
1838 }
1839 CcTest::heap()->CollectAllAvailableGarbage();
1840 CHECK_EQ(2, NumberOfGlobalObjects());
1841 ctx2p.Reset();
1842 CcTest::heap()->CollectAllAvailableGarbage();
1843 CHECK_EQ(0, NumberOfGlobalObjects());
1844 }
1845
1846
TEST(LeakNativeContextViaMapKeyed)1847 TEST(LeakNativeContextViaMapKeyed) {
1848 i::FLAG_allow_natives_syntax = true;
1849 v8::Isolate* isolate = CcTest::isolate();
1850 v8::HandleScope outer_scope(isolate);
1851 v8::Persistent<v8::Context> ctx1p;
1852 v8::Persistent<v8::Context> ctx2p;
1853 {
1854 v8::HandleScope scope(isolate);
1855 ctx1p.Reset(isolate, v8::Context::New(isolate));
1856 ctx2p.Reset(isolate, v8::Context::New(isolate));
1857 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1858 }
1859
1860 CcTest::heap()->CollectAllAvailableGarbage();
1861 CHECK_EQ(4, NumberOfGlobalObjects());
1862
1863 {
1864 v8::HandleScope inner_scope(isolate);
1865 CompileRun("var v = [42, 43]");
1866 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1867 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1868 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1869 ctx2->Enter();
1870 ctx2->Global()->Set(v8_str("o"), v);
1871 v8::Local<v8::Value> res = CompileRun(
1872 "function f() { return o[0]; }"
1873 "for (var i = 0; i < 10; ++i) f();"
1874 "%OptimizeFunctionOnNextCall(f);"
1875 "f();");
1876 CHECK_EQ(42, res->Int32Value());
1877 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1878 ctx2->Exit();
1879 ctx1->Exit();
1880 ctx1p.Reset();
1881 v8::V8::ContextDisposedNotification();
1882 }
1883 CcTest::heap()->CollectAllAvailableGarbage();
1884 CHECK_EQ(2, NumberOfGlobalObjects());
1885 ctx2p.Reset();
1886 CcTest::heap()->CollectAllAvailableGarbage();
1887 CHECK_EQ(0, NumberOfGlobalObjects());
1888 }
1889
1890
TEST(LeakNativeContextViaMapProto)1891 TEST(LeakNativeContextViaMapProto) {
1892 i::FLAG_allow_natives_syntax = true;
1893 v8::Isolate* isolate = CcTest::isolate();
1894 v8::HandleScope outer_scope(isolate);
1895 v8::Persistent<v8::Context> ctx1p;
1896 v8::Persistent<v8::Context> ctx2p;
1897 {
1898 v8::HandleScope scope(isolate);
1899 ctx1p.Reset(isolate, v8::Context::New(isolate));
1900 ctx2p.Reset(isolate, v8::Context::New(isolate));
1901 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1902 }
1903
1904 CcTest::heap()->CollectAllAvailableGarbage();
1905 CHECK_EQ(4, NumberOfGlobalObjects());
1906
1907 {
1908 v8::HandleScope inner_scope(isolate);
1909 CompileRun("var v = { y: 42}");
1910 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1911 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1912 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1913 ctx2->Enter();
1914 ctx2->Global()->Set(v8_str("o"), v);
1915 v8::Local<v8::Value> res = CompileRun(
1916 "function f() {"
1917 " var p = {x: 42};"
1918 " p.__proto__ = o;"
1919 " return p.x;"
1920 "}"
1921 "for (var i = 0; i < 10; ++i) f();"
1922 "%OptimizeFunctionOnNextCall(f);"
1923 "f();");
1924 CHECK_EQ(42, res->Int32Value());
1925 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1926 ctx2->Exit();
1927 ctx1->Exit();
1928 ctx1p.Reset();
1929 v8::V8::ContextDisposedNotification();
1930 }
1931 CcTest::heap()->CollectAllAvailableGarbage();
1932 CHECK_EQ(2, NumberOfGlobalObjects());
1933 ctx2p.Reset();
1934 CcTest::heap()->CollectAllAvailableGarbage();
1935 CHECK_EQ(0, NumberOfGlobalObjects());
1936 }
1937
1938
TEST(InstanceOfStubWriteBarrier)1939 TEST(InstanceOfStubWriteBarrier) {
1940 i::FLAG_allow_natives_syntax = true;
1941 #ifdef VERIFY_HEAP
1942 i::FLAG_verify_heap = true;
1943 #endif
1944
1945 CcTest::InitializeVM();
1946 if (!CcTest::i_isolate()->use_crankshaft()) return;
1947 if (i::FLAG_force_marking_deque_overflows) return;
1948 v8::HandleScope outer_scope(CcTest::isolate());
1949
1950 {
1951 v8::HandleScope scope(CcTest::isolate());
1952 CompileRun(
1953 "function foo () { }"
1954 "function mkbar () { return new (new Function(\"\")) (); }"
1955 "function f (x) { return (x instanceof foo); }"
1956 "function g () { f(mkbar()); }"
1957 "f(new foo()); f(new foo());"
1958 "%OptimizeFunctionOnNextCall(f);"
1959 "f(new foo()); g();");
1960 }
1961
1962 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
1963 marking->Abort();
1964 marking->Start();
1965
1966 Handle<JSFunction> f =
1967 v8::Utils::OpenHandle(
1968 *v8::Handle<v8::Function>::Cast(
1969 CcTest::global()->Get(v8_str("f"))));
1970
1971 CHECK(f->IsOptimized());
1972
1973 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
1974 !marking->IsStopped()) {
1975 // Discard any pending GC requests otherwise we will get GC when we enter
1976 // code below.
1977 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1978 }
1979
1980 CHECK(marking->IsMarking());
1981
1982 {
1983 v8::HandleScope scope(CcTest::isolate());
1984 v8::Handle<v8::Object> global = CcTest::global();
1985 v8::Handle<v8::Function> g =
1986 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
1987 g->Call(global, 0, NULL);
1988 }
1989
1990 CcTest::heap()->incremental_marking()->set_should_hurry(true);
1991 CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
1992 }
1993
1994
TEST(PrototypeTransitionClearing)1995 TEST(PrototypeTransitionClearing) {
1996 CcTest::InitializeVM();
1997 Isolate* isolate = CcTest::i_isolate();
1998 Factory* factory = isolate->factory();
1999 v8::HandleScope scope(CcTest::isolate());
2000
2001 CompileRun(
2002 "var base = {};"
2003 "var live = [];"
2004 "for (var i = 0; i < 10; i++) {"
2005 " var object = {};"
2006 " var prototype = {};"
2007 " object.__proto__ = prototype;"
2008 " if (i >= 3) live.push(object, prototype);"
2009 "}");
2010
2011 Handle<JSObject> baseObject =
2012 v8::Utils::OpenHandle(
2013 *v8::Handle<v8::Object>::Cast(
2014 CcTest::global()->Get(v8_str("base"))));
2015
2016 // Verify that only dead prototype transitions are cleared.
2017 CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
2018 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2019 const int transitions = 10 - 3;
2020 CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
2021
2022 // Verify that prototype transitions array was compacted.
2023 FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
2024 for (int i = 0; i < transitions; i++) {
2025 int j = Map::kProtoTransitionHeaderSize +
2026 i * Map::kProtoTransitionElementsPerEntry;
2027 CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
2028 Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
2029 CHECK(proto->IsTheHole() || proto->IsJSObject());
2030 }
2031
2032 // Make sure next prototype is placed on an old-space evacuation candidate.
2033 Handle<JSObject> prototype;
2034 PagedSpace* space = CcTest::heap()->old_pointer_space();
2035 {
2036 AlwaysAllocateScope always_allocate;
2037 SimulateFullSpace(space);
2038 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
2039 }
2040
2041 // Add a prototype on an evacuation candidate and verify that transition
2042 // clearing correctly records slots in prototype transition array.
2043 i::FLAG_always_compact = true;
2044 Handle<Map> map(baseObject->map());
2045 CHECK(!space->LastPage()->Contains(
2046 map->GetPrototypeTransitions()->address()));
2047 CHECK(space->LastPage()->Contains(prototype->address()));
2048 JSObject::SetPrototype(baseObject, prototype, false);
2049 CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
2050 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2051 CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
2052 }
2053
2054
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking)2055 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2056 i::FLAG_stress_compaction = false;
2057 i::FLAG_allow_natives_syntax = true;
2058 #ifdef VERIFY_HEAP
2059 i::FLAG_verify_heap = true;
2060 #endif
2061
2062 CcTest::InitializeVM();
2063 if (!CcTest::i_isolate()->use_crankshaft()) return;
2064 v8::HandleScope outer_scope(CcTest::isolate());
2065
2066 {
2067 v8::HandleScope scope(CcTest::isolate());
2068 CompileRun(
2069 "function f () {"
2070 " var s = 0;"
2071 " for (var i = 0; i < 100; i++) s += i;"
2072 " return s;"
2073 "}"
2074 "f(); f();"
2075 "%OptimizeFunctionOnNextCall(f);"
2076 "f();");
2077 }
2078 Handle<JSFunction> f =
2079 v8::Utils::OpenHandle(
2080 *v8::Handle<v8::Function>::Cast(
2081 CcTest::global()->Get(v8_str("f"))));
2082 CHECK(f->IsOptimized());
2083
2084 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2085 marking->Abort();
2086 marking->Start();
2087
2088 // The following two calls will increment CcTest::heap()->global_ic_age().
2089 const int kLongIdlePauseInMs = 1000;
2090 v8::V8::ContextDisposedNotification();
2091 v8::V8::IdleNotification(kLongIdlePauseInMs);
2092
2093 while (!marking->IsStopped() && !marking->IsComplete()) {
2094 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2095 }
2096 if (!marking->IsStopped() || marking->should_hurry()) {
2097 // We don't normally finish a GC via Step(), we normally finish by
2098 // setting the stack guard and then do the final steps in the stack
2099 // guard interrupt. But here we didn't ask for that, and there is no
2100 // JS code running to trigger the interrupt, so we explicitly finalize
2101 // here.
2102 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags,
2103 "Test finalizing incremental mark-sweep");
2104 }
2105
2106 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2107 CHECK_EQ(0, f->shared()->opt_count());
2108 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2109 }
2110
2111
TEST(ResetSharedFunctionInfoCountersDuringMarkSweep)2112 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2113 i::FLAG_stress_compaction = false;
2114 i::FLAG_allow_natives_syntax = true;
2115 #ifdef VERIFY_HEAP
2116 i::FLAG_verify_heap = true;
2117 #endif
2118
2119 CcTest::InitializeVM();
2120 if (!CcTest::i_isolate()->use_crankshaft()) return;
2121 v8::HandleScope outer_scope(CcTest::isolate());
2122
2123 {
2124 v8::HandleScope scope(CcTest::isolate());
2125 CompileRun(
2126 "function f () {"
2127 " var s = 0;"
2128 " for (var i = 0; i < 100; i++) s += i;"
2129 " return s;"
2130 "}"
2131 "f(); f();"
2132 "%OptimizeFunctionOnNextCall(f);"
2133 "f();");
2134 }
2135 Handle<JSFunction> f =
2136 v8::Utils::OpenHandle(
2137 *v8::Handle<v8::Function>::Cast(
2138 CcTest::global()->Get(v8_str("f"))));
2139 CHECK(f->IsOptimized());
2140
2141 CcTest::heap()->incremental_marking()->Abort();
2142
2143 // The following two calls will increment CcTest::heap()->global_ic_age().
2144 // Since incremental marking is off, IdleNotification will do full GC.
2145 const int kLongIdlePauseInMs = 1000;
2146 v8::V8::ContextDisposedNotification();
2147 v8::V8::IdleNotification(kLongIdlePauseInMs);
2148
2149 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2150 CHECK_EQ(0, f->shared()->opt_count());
2151 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2152 }
2153
2154
2155 // Test that HAllocateObject will always return an object in new-space.
TEST(OptimizedAllocationAlwaysInNewSpace)2156 TEST(OptimizedAllocationAlwaysInNewSpace) {
2157 i::FLAG_allow_natives_syntax = true;
2158 CcTest::InitializeVM();
2159 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2160 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2161 v8::HandleScope scope(CcTest::isolate());
2162
2163 SimulateFullSpace(CcTest::heap()->new_space());
2164 AlwaysAllocateScope always_allocate;
2165 v8::Local<v8::Value> res = CompileRun(
2166 "function c(x) {"
2167 " this.x = x;"
2168 " for (var i = 0; i < 32; i++) {"
2169 " this['x' + i] = x;"
2170 " }"
2171 "}"
2172 "function f(x) { return new c(x); };"
2173 "f(1); f(2); f(3);"
2174 "%OptimizeFunctionOnNextCall(f);"
2175 "f(4);");
2176 CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2177
2178 Handle<JSObject> o =
2179 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2180
2181 CHECK(CcTest::heap()->InNewSpace(*o));
2182 }
2183
2184
TEST(OptimizedPretenuringAllocationFolding)2185 TEST(OptimizedPretenuringAllocationFolding) {
2186 i::FLAG_allow_natives_syntax = true;
2187 i::FLAG_allocation_site_pretenuring = false;
2188 CcTest::InitializeVM();
2189 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2190 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2191 v8::HandleScope scope(CcTest::isolate());
2192 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2193
2194 v8::Local<v8::Value> res = CompileRun(
2195 "function DataObject() {"
2196 " this.a = 1.1;"
2197 " this.b = [{}];"
2198 " this.c = 1.2;"
2199 " this.d = [{}];"
2200 " this.e = 1.3;"
2201 " this.f = [{}];"
2202 "}"
2203 "function f() {"
2204 " return new DataObject();"
2205 "};"
2206 "f(); f(); f();"
2207 "%OptimizeFunctionOnNextCall(f);"
2208 "f();");
2209
2210 Handle<JSObject> o =
2211 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2212
2213 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(0)));
2214 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(1)));
2215 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(2)));
2216 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(3)));
2217 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(4)));
2218 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(5)));
2219 }
2220
2221
TEST(OptimizedPretenuringAllocationFoldingBlocks)2222 TEST(OptimizedPretenuringAllocationFoldingBlocks) {
2223 i::FLAG_allow_natives_syntax = true;
2224 i::FLAG_allocation_site_pretenuring = false;
2225 CcTest::InitializeVM();
2226 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2227 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2228 v8::HandleScope scope(CcTest::isolate());
2229 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2230
2231 v8::Local<v8::Value> res = CompileRun(
2232 "function DataObject() {"
2233 " this.a = [{}];"
2234 " this.b = [{}];"
2235 " this.c = 1.1;"
2236 " this.d = 1.2;"
2237 " this.e = [{}];"
2238 " this.f = 1.3;"
2239 "}"
2240 "function f() {"
2241 " return new DataObject();"
2242 "};"
2243 "f(); f(); f();"
2244 "%OptimizeFunctionOnNextCall(f);"
2245 "f();");
2246
2247 Handle<JSObject> o =
2248 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2249
2250 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(0)));
2251 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(1)));
2252 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(2)));
2253 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(3)));
2254 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(4)));
2255 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(5)));
2256 }
2257
2258
TEST(OptimizedPretenuringObjectArrayLiterals)2259 TEST(OptimizedPretenuringObjectArrayLiterals) {
2260 i::FLAG_allow_natives_syntax = true;
2261 i::FLAG_allocation_site_pretenuring = false;
2262 CcTest::InitializeVM();
2263 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2264 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2265 v8::HandleScope scope(CcTest::isolate());
2266 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2267
2268 v8::Local<v8::Value> res = CompileRun(
2269 "function f() {"
2270 " var numbers = [{}, {}, {}];"
2271 " return numbers;"
2272 "};"
2273 "f(); f(); f();"
2274 "%OptimizeFunctionOnNextCall(f);"
2275 "f();");
2276
2277 Handle<JSObject> o =
2278 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2279
2280 CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
2281 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2282 }
2283
2284
TEST(OptimizedPretenuringMixedInObjectProperties)2285 TEST(OptimizedPretenuringMixedInObjectProperties) {
2286 i::FLAG_allow_natives_syntax = true;
2287 i::FLAG_allocation_site_pretenuring = false;
2288 CcTest::InitializeVM();
2289 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2290 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2291 v8::HandleScope scope(CcTest::isolate());
2292 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2293
2294 v8::Local<v8::Value> res = CompileRun(
2295 "function f() {"
2296 " var numbers = {a: {c: 2.2, d: {}}, b: 1.1};"
2297 " return numbers;"
2298 "};"
2299 "f(); f(); f();"
2300 "%OptimizeFunctionOnNextCall(f);"
2301 "f();");
2302
2303 Handle<JSObject> o =
2304 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2305
2306 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2307 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(0)));
2308 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(1)));
2309
2310 JSObject* inner_object = reinterpret_cast<JSObject*>(o->RawFastPropertyAt(0));
2311 CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
2312 CHECK(CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(0)));
2313 CHECK(CcTest::heap()->InOldPointerSpace(inner_object->RawFastPropertyAt(1)));
2314 }
2315
2316
TEST(OptimizedPretenuringDoubleArrayProperties)2317 TEST(OptimizedPretenuringDoubleArrayProperties) {
2318 i::FLAG_allow_natives_syntax = true;
2319 i::FLAG_allocation_site_pretenuring = false;
2320 CcTest::InitializeVM();
2321 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2322 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2323 v8::HandleScope scope(CcTest::isolate());
2324 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2325
2326 v8::Local<v8::Value> res = CompileRun(
2327 "function f() {"
2328 " var numbers = {a: 1.1, b: 2.2};"
2329 " return numbers;"
2330 "};"
2331 "f(); f(); f();"
2332 "%OptimizeFunctionOnNextCall(f);"
2333 "f();");
2334
2335 Handle<JSObject> o =
2336 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2337
2338 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2339 CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
2340 }
2341
2342
TEST(OptimizedPretenuringdoubleArrayLiterals)2343 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2344 i::FLAG_allow_natives_syntax = true;
2345 i::FLAG_allocation_site_pretenuring = false;
2346 CcTest::InitializeVM();
2347 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2348 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2349 v8::HandleScope scope(CcTest::isolate());
2350 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2351
2352 v8::Local<v8::Value> res = CompileRun(
2353 "function f() {"
2354 " var numbers = [1.1, 2.2, 3.3];"
2355 " return numbers;"
2356 "};"
2357 "f(); f(); f();"
2358 "%OptimizeFunctionOnNextCall(f);"
2359 "f();");
2360
2361 Handle<JSObject> o =
2362 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2363
2364 CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
2365 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2366 }
2367
2368
TEST(OptimizedPretenuringNestedMixedArrayLiterals)2369 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2370 i::FLAG_allow_natives_syntax = true;
2371 i::FLAG_allocation_site_pretenuring = false;
2372 CcTest::InitializeVM();
2373 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2374 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2375 v8::HandleScope scope(CcTest::isolate());
2376 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2377
2378 v8::Local<v8::Value> res = CompileRun(
2379 "function f() {"
2380 " var numbers = [[{}, {}, {}],[1.1, 2.2, 3.3]];"
2381 " return numbers;"
2382 "};"
2383 "f(); f(); f();"
2384 "%OptimizeFunctionOnNextCall(f);"
2385 "f();");
2386
2387 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2388 Handle<JSObject> int_array_handle =
2389 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2390 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2391 Handle<JSObject> double_array_handle =
2392 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2393
2394 Handle<JSObject> o =
2395 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2396 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2397 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2398 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2399 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2400 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2401 }
2402
2403
TEST(OptimizedPretenuringNestedObjectLiterals)2404 TEST(OptimizedPretenuringNestedObjectLiterals) {
2405 i::FLAG_allow_natives_syntax = true;
2406 i::FLAG_allocation_site_pretenuring = false;
2407 CcTest::InitializeVM();
2408 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2409 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2410 v8::HandleScope scope(CcTest::isolate());
2411 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2412
2413 v8::Local<v8::Value> res = CompileRun(
2414 "function f() {"
2415 " var numbers = [[{}, {}, {}],[{}, {}, {}]];"
2416 " return numbers;"
2417 "};"
2418 "f(); f(); f();"
2419 "%OptimizeFunctionOnNextCall(f);"
2420 "f();");
2421
2422 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
2423 Handle<JSObject> int_array_handle_1 =
2424 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
2425 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
2426 Handle<JSObject> int_array_handle_2 =
2427 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
2428
2429 Handle<JSObject> o =
2430 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2431 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2432 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
2433 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
2434 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
2435 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
2436 }
2437
2438
TEST(OptimizedPretenuringNestedDoubleLiterals)2439 TEST(OptimizedPretenuringNestedDoubleLiterals) {
2440 i::FLAG_allow_natives_syntax = true;
2441 i::FLAG_allocation_site_pretenuring = false;
2442 CcTest::InitializeVM();
2443 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2444 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2445 v8::HandleScope scope(CcTest::isolate());
2446 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2447
2448 v8::Local<v8::Value> res = CompileRun(
2449 "function f() {"
2450 " var numbers = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2451 " return numbers;"
2452 "};"
2453 "f(); f(); f();"
2454 "%OptimizeFunctionOnNextCall(f);"
2455 "f();");
2456
2457 v8::Local<v8::Value> double_array_1 =
2458 v8::Object::Cast(*res)->Get(v8_str("0"));
2459 Handle<JSObject> double_array_handle_1 =
2460 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
2461 v8::Local<v8::Value> double_array_2 =
2462 v8::Object::Cast(*res)->Get(v8_str("1"));
2463 Handle<JSObject> double_array_handle_2 =
2464 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
2465
2466 Handle<JSObject> o =
2467 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2468 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2469 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
2470 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
2471 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
2472 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
2473 }
2474
2475
2476 // Test regular array literals allocation.
TEST(OptimizedAllocationArrayLiterals)2477 TEST(OptimizedAllocationArrayLiterals) {
2478 i::FLAG_allow_natives_syntax = true;
2479 CcTest::InitializeVM();
2480 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2481 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2482 v8::HandleScope scope(CcTest::isolate());
2483
2484 v8::Local<v8::Value> res = CompileRun(
2485 "function f() {"
2486 " var numbers = new Array(1, 2, 3);"
2487 " numbers[0] = 3.14;"
2488 " return numbers;"
2489 "};"
2490 "f(); f(); f();"
2491 "%OptimizeFunctionOnNextCall(f);"
2492 "f();");
2493 CHECK_EQ(static_cast<int>(3.14),
2494 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2495
2496 Handle<JSObject> o =
2497 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2498
2499 CHECK(CcTest::heap()->InNewSpace(o->elements()));
2500 }
2501
2502
TEST(OptimizedPretenuringCallNew)2503 TEST(OptimizedPretenuringCallNew) {
2504 i::FLAG_allow_natives_syntax = true;
2505 i::FLAG_allocation_site_pretenuring = false;
2506 i::FLAG_pretenuring_call_new = true;
2507 CcTest::InitializeVM();
2508 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2509 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2510 v8::HandleScope scope(CcTest::isolate());
2511 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
2512
2513 AlwaysAllocateScope always_allocate;
2514 v8::Local<v8::Value> res = CompileRun(
2515 "function g() { this.a = 0; }"
2516 "function f() {"
2517 " return new g();"
2518 "};"
2519 "f(); f(); f();"
2520 "%OptimizeFunctionOnNextCall(f);"
2521 "f();");
2522
2523 Handle<JSObject> o =
2524 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2525 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2526 }
2527
2528
CountMapTransitions(Map * map)2529 static int CountMapTransitions(Map* map) {
2530 return map->transitions()->number_of_transitions();
2531 }
2532
2533
2534 // Test that map transitions are cleared and maps are collected with
2535 // incremental marking as well.
TEST(Regress1465)2536 TEST(Regress1465) {
2537 i::FLAG_stress_compaction = false;
2538 i::FLAG_allow_natives_syntax = true;
2539 i::FLAG_trace_incremental_marking = true;
2540 CcTest::InitializeVM();
2541 v8::HandleScope scope(CcTest::isolate());
2542 static const int transitions_count = 256;
2543
2544 {
2545 AlwaysAllocateScope always_allocate;
2546 for (int i = 0; i < transitions_count; i++) {
2547 EmbeddedVector<char, 64> buffer;
2548 OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
2549 CompileRun(buffer.start());
2550 }
2551 CompileRun("var root = new Object;");
2552 }
2553
2554 Handle<JSObject> root =
2555 v8::Utils::OpenHandle(
2556 *v8::Handle<v8::Object>::Cast(
2557 CcTest::global()->Get(v8_str("root"))));
2558
2559 // Count number of live transitions before marking.
2560 int transitions_before = CountMapTransitions(root->map());
2561 CompileRun("%DebugPrint(root);");
2562 CHECK_EQ(transitions_count, transitions_before);
2563
2564 SimulateIncrementalMarking();
2565 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2566
2567 // Count number of live transitions after marking. Note that one transition
2568 // is left, because 'o' still holds an instance of one transition target.
2569 int transitions_after = CountMapTransitions(root->map());
2570 CompileRun("%DebugPrint(root);");
2571 CHECK_EQ(1, transitions_after);
2572 }
2573
2574
TEST(Regress2143a)2575 TEST(Regress2143a) {
2576 i::FLAG_collect_maps = true;
2577 i::FLAG_incremental_marking = true;
2578 CcTest::InitializeVM();
2579 v8::HandleScope scope(CcTest::isolate());
2580
2581 // Prepare a map transition from the root object together with a yet
2582 // untransitioned root object.
2583 CompileRun("var root = new Object;"
2584 "root.foo = 0;"
2585 "root = new Object;");
2586
2587 SimulateIncrementalMarking();
2588
2589 // Compile a StoreIC that performs the prepared map transition. This
2590 // will restart incremental marking and should make sure the root is
2591 // marked grey again.
2592 CompileRun("function f(o) {"
2593 " o.foo = 0;"
2594 "}"
2595 "f(new Object);"
2596 "f(root);");
2597
2598 // This bug only triggers with aggressive IC clearing.
2599 CcTest::heap()->AgeInlineCaches();
2600
2601 // Explicitly request GC to perform final marking step and sweeping.
2602 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2603
2604 Handle<JSObject> root =
2605 v8::Utils::OpenHandle(
2606 *v8::Handle<v8::Object>::Cast(
2607 CcTest::global()->Get(v8_str("root"))));
2608
2609 // The root object should be in a sane state.
2610 CHECK(root->IsJSObject());
2611 CHECK(root->map()->IsMap());
2612 }
2613
2614
TEST(Regress2143b)2615 TEST(Regress2143b) {
2616 i::FLAG_collect_maps = true;
2617 i::FLAG_incremental_marking = true;
2618 i::FLAG_allow_natives_syntax = true;
2619 CcTest::InitializeVM();
2620 v8::HandleScope scope(CcTest::isolate());
2621
2622 // Prepare a map transition from the root object together with a yet
2623 // untransitioned root object.
2624 CompileRun("var root = new Object;"
2625 "root.foo = 0;"
2626 "root = new Object;");
2627
2628 SimulateIncrementalMarking();
2629
2630 // Compile an optimized LStoreNamedField that performs the prepared
2631 // map transition. This will restart incremental marking and should
2632 // make sure the root is marked grey again.
2633 CompileRun("function f(o) {"
2634 " o.foo = 0;"
2635 "}"
2636 "f(new Object);"
2637 "f(new Object);"
2638 "%OptimizeFunctionOnNextCall(f);"
2639 "f(root);"
2640 "%DeoptimizeFunction(f);");
2641
2642 // This bug only triggers with aggressive IC clearing.
2643 CcTest::heap()->AgeInlineCaches();
2644
2645 // Explicitly request GC to perform final marking step and sweeping.
2646 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2647
2648 Handle<JSObject> root =
2649 v8::Utils::OpenHandle(
2650 *v8::Handle<v8::Object>::Cast(
2651 CcTest::global()->Get(v8_str("root"))));
2652
2653 // The root object should be in a sane state.
2654 CHECK(root->IsJSObject());
2655 CHECK(root->map()->IsMap());
2656 }
2657
2658
TEST(ReleaseOverReservedPages)2659 TEST(ReleaseOverReservedPages) {
2660 i::FLAG_trace_gc = true;
2661 // The optimizer can allocate stuff, messing up the test.
2662 i::FLAG_crankshaft = false;
2663 i::FLAG_always_opt = false;
2664 CcTest::InitializeVM();
2665 Isolate* isolate = CcTest::i_isolate();
2666 Factory* factory = isolate->factory();
2667 Heap* heap = isolate->heap();
2668 v8::HandleScope scope(CcTest::isolate());
2669 static const int number_of_test_pages = 20;
2670
2671 // Prepare many pages with low live-bytes count.
2672 PagedSpace* old_pointer_space = heap->old_pointer_space();
2673 CHECK_EQ(1, old_pointer_space->CountTotalPages());
2674 for (int i = 0; i < number_of_test_pages; i++) {
2675 AlwaysAllocateScope always_allocate;
2676 SimulateFullSpace(old_pointer_space);
2677 factory->NewFixedArray(1, TENURED);
2678 }
2679 CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2680
2681 // Triggering one GC will cause a lot of garbage to be discovered but
2682 // even spread across all allocated pages.
2683 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
2684 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2685
2686 // Triggering subsequent GCs should cause at least half of the pages
2687 // to be released to the OS after at most two cycles.
2688 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
2689 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2690 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
2691 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
2692
2693 // Triggering a last-resort GC should cause all pages to be released to the
2694 // OS so that other processes can seize the memory. If we get a failure here
2695 // where there are 2 pages left instead of 1, then we should increase the
2696 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
2697 // first page should be small in order to reduce memory used when the VM
2698 // boots, but if the 20 small arrays don't fit on the first page then that's
2699 // an indication that it is too small.
2700 heap->CollectAllAvailableGarbage("triggered really hard");
2701 CHECK_EQ(1, old_pointer_space->CountTotalPages());
2702 }
2703
2704
TEST(Regress2237)2705 TEST(Regress2237) {
2706 i::FLAG_stress_compaction = false;
2707 CcTest::InitializeVM();
2708 Isolate* isolate = CcTest::i_isolate();
2709 Factory* factory = isolate->factory();
2710 v8::HandleScope scope(CcTest::isolate());
2711 Handle<String> slice(CcTest::heap()->empty_string());
2712
2713 {
2714 // Generate a parent that lives in new-space.
2715 v8::HandleScope inner_scope(CcTest::isolate());
2716 const char* c = "This text is long enough to trigger sliced strings.";
2717 Handle<String> s = factory->NewStringFromAscii(CStrVector(c));
2718 CHECK(s->IsSeqOneByteString());
2719 CHECK(CcTest::heap()->InNewSpace(*s));
2720
2721 // Generate a sliced string that is based on the above parent and
2722 // lives in old-space.
2723 SimulateFullSpace(CcTest::heap()->new_space());
2724 AlwaysAllocateScope always_allocate;
2725 Handle<String> t = factory->NewProperSubString(s, 5, 35);
2726 CHECK(t->IsSlicedString());
2727 CHECK(!CcTest::heap()->InNewSpace(*t));
2728 *slice.location() = *t.location();
2729 }
2730
2731 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
2732 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2733 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
2734 }
2735
2736
2737 #ifdef OBJECT_PRINT
TEST(PrintSharedFunctionInfo)2738 TEST(PrintSharedFunctionInfo) {
2739 CcTest::InitializeVM();
2740 v8::HandleScope scope(CcTest::isolate());
2741 const char* source = "f = function() { return 987654321; }\n"
2742 "g = function() { return 123456789; }\n";
2743 CompileRun(source);
2744 Handle<JSFunction> g =
2745 v8::Utils::OpenHandle(
2746 *v8::Handle<v8::Function>::Cast(
2747 CcTest::global()->Get(v8_str("g"))));
2748
2749 DisallowHeapAllocation no_allocation;
2750 g->shared()->PrintLn();
2751 }
2752 #endif // OBJECT_PRINT
2753
2754
TEST(Regress2211)2755 TEST(Regress2211) {
2756 CcTest::InitializeVM();
2757 v8::HandleScope scope(CcTest::isolate());
2758
2759 v8::Handle<v8::String> value = v8_str("val string");
2760 Smi* hash = Smi::FromInt(321);
2761 Heap* heap = CcTest::heap();
2762
2763 for (int i = 0; i < 2; i++) {
2764 // Store identity hash first and common hidden property second.
2765 v8::Handle<v8::Object> obj = v8::Object::New();
2766 Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
2767 CHECK(internal_obj->HasFastProperties());
2768
2769 // In the first iteration, set hidden value first and identity hash second.
2770 // In the second iteration, reverse the order.
2771 if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
2772 JSObject::SetIdentityHash(internal_obj, handle(hash, CcTest::i_isolate()));
2773 if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
2774
2775 // Check values.
2776 CHECK_EQ(hash,
2777 internal_obj->GetHiddenProperty(heap->identity_hash_string()));
2778 CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
2779
2780 // Check size.
2781 DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
2782 ObjectHashTable* hashtable = ObjectHashTable::cast(
2783 internal_obj->RawFastPropertyAt(descriptors->GetFieldIndex(0)));
2784 // HashTable header (5) and 4 initial entries (8).
2785 CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
2786 }
2787 }
2788
2789
TEST(IncrementalMarkingClearsTypeFeedbackCells)2790 TEST(IncrementalMarkingClearsTypeFeedbackCells) {
2791 if (i::FLAG_always_opt) return;
2792 CcTest::InitializeVM();
2793 v8::HandleScope scope(CcTest::isolate());
2794 v8::Local<v8::Value> fun1, fun2;
2795
2796 {
2797 LocalContext env;
2798 CompileRun("function fun() {};");
2799 fun1 = env->Global()->Get(v8_str("fun"));
2800 }
2801
2802 {
2803 LocalContext env;
2804 CompileRun("function fun() {};");
2805 fun2 = env->Global()->Get(v8_str("fun"));
2806 }
2807
2808 // Prepare function f that contains type feedback for closures
2809 // originating from two different native contexts.
2810 CcTest::global()->Set(v8_str("fun1"), fun1);
2811 CcTest::global()->Set(v8_str("fun2"), fun2);
2812 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
2813 Handle<JSFunction> f =
2814 v8::Utils::OpenHandle(
2815 *v8::Handle<v8::Function>::Cast(
2816 CcTest::global()->Get(v8_str("f"))));
2817 Handle<TypeFeedbackCells> cells(TypeFeedbackInfo::cast(
2818 f->shared()->code()->type_feedback_info())->type_feedback_cells());
2819
2820 CHECK_EQ(2, cells->CellCount());
2821 CHECK(cells->GetCell(0)->value()->IsJSFunction());
2822 CHECK(cells->GetCell(1)->value()->IsJSFunction());
2823
2824 SimulateIncrementalMarking();
2825 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2826
2827 CHECK_EQ(2, cells->CellCount());
2828 CHECK(cells->GetCell(0)->value()->IsTheHole());
2829 CHECK(cells->GetCell(1)->value()->IsTheHole());
2830 }
2831
2832
FindFirstIC(Code * code,Code::Kind kind)2833 static Code* FindFirstIC(Code* code, Code::Kind kind) {
2834 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
2835 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
2836 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
2837 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
2838 for (RelocIterator it(code, mask); !it.done(); it.next()) {
2839 RelocInfo* info = it.rinfo();
2840 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
2841 if (target->is_inline_cache_stub() && target->kind() == kind) {
2842 return target;
2843 }
2844 }
2845 return NULL;
2846 }
2847
2848
TEST(IncrementalMarkingPreservesMonomorhpicIC)2849 TEST(IncrementalMarkingPreservesMonomorhpicIC) {
2850 if (i::FLAG_always_opt) return;
2851 CcTest::InitializeVM();
2852 v8::HandleScope scope(CcTest::isolate());
2853
2854 // Prepare function f that contains a monomorphic IC for object
2855 // originating from the same native context.
2856 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
2857 "function f(o) { return o.x; } f(obj); f(obj);");
2858 Handle<JSFunction> f =
2859 v8::Utils::OpenHandle(
2860 *v8::Handle<v8::Function>::Cast(
2861 CcTest::global()->Get(v8_str("f"))));
2862
2863 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2864 CHECK(ic_before->ic_state() == MONOMORPHIC);
2865
2866 SimulateIncrementalMarking();
2867 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2868
2869 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2870 CHECK(ic_after->ic_state() == MONOMORPHIC);
2871 }
2872
2873
TEST(IncrementalMarkingClearsMonomorhpicIC)2874 TEST(IncrementalMarkingClearsMonomorhpicIC) {
2875 if (i::FLAG_always_opt) return;
2876 CcTest::InitializeVM();
2877 v8::HandleScope scope(CcTest::isolate());
2878 v8::Local<v8::Value> obj1;
2879
2880 {
2881 LocalContext env;
2882 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2883 obj1 = env->Global()->Get(v8_str("obj"));
2884 }
2885
2886 // Prepare function f that contains a monomorphic IC for object
2887 // originating from a different native context.
2888 CcTest::global()->Set(v8_str("obj1"), obj1);
2889 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
2890 Handle<JSFunction> f =
2891 v8::Utils::OpenHandle(
2892 *v8::Handle<v8::Function>::Cast(
2893 CcTest::global()->Get(v8_str("f"))));
2894
2895 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2896 CHECK(ic_before->ic_state() == MONOMORPHIC);
2897
2898 // Fire context dispose notification.
2899 v8::V8::ContextDisposedNotification();
2900 SimulateIncrementalMarking();
2901 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2902
2903 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2904 CHECK(IC::IsCleared(ic_after));
2905 }
2906
2907
TEST(IncrementalMarkingClearsPolymorhpicIC)2908 TEST(IncrementalMarkingClearsPolymorhpicIC) {
2909 if (i::FLAG_always_opt) return;
2910 CcTest::InitializeVM();
2911 v8::HandleScope scope(CcTest::isolate());
2912 v8::Local<v8::Value> obj1, obj2;
2913
2914 {
2915 LocalContext env;
2916 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2917 obj1 = env->Global()->Get(v8_str("obj"));
2918 }
2919
2920 {
2921 LocalContext env;
2922 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
2923 obj2 = env->Global()->Get(v8_str("obj"));
2924 }
2925
2926 // Prepare function f that contains a polymorphic IC for objects
2927 // originating from two different native contexts.
2928 CcTest::global()->Set(v8_str("obj1"), obj1);
2929 CcTest::global()->Set(v8_str("obj2"), obj2);
2930 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
2931 Handle<JSFunction> f =
2932 v8::Utils::OpenHandle(
2933 *v8::Handle<v8::Function>::Cast(
2934 CcTest::global()->Get(v8_str("f"))));
2935
2936 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2937 CHECK(ic_before->ic_state() == POLYMORPHIC);
2938
2939 // Fire context dispose notification.
2940 v8::V8::ContextDisposedNotification();
2941 SimulateIncrementalMarking();
2942 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2943
2944 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2945 CHECK(IC::IsCleared(ic_after));
2946 }
2947
2948
2949 class SourceResource: public v8::String::ExternalAsciiStringResource {
2950 public:
SourceResource(const char * data)2951 explicit SourceResource(const char* data)
2952 : data_(data), length_(strlen(data)) { }
2953
Dispose()2954 virtual void Dispose() {
2955 i::DeleteArray(data_);
2956 data_ = NULL;
2957 }
2958
data() const2959 const char* data() const { return data_; }
2960
length() const2961 size_t length() const { return length_; }
2962
IsDisposed()2963 bool IsDisposed() { return data_ == NULL; }
2964
2965 private:
2966 const char* data_;
2967 size_t length_;
2968 };
2969
2970
ReleaseStackTraceDataTest(const char * source,const char * accessor)2971 void ReleaseStackTraceDataTest(const char* source, const char* accessor) {
2972 // Test that the data retained by the Error.stack accessor is released
2973 // after the first time the accessor is fired. We use external string
2974 // to check whether the data is being released since the external string
2975 // resource's callback is fired when the external string is GC'ed.
2976 v8::HandleScope scope(CcTest::isolate());
2977 SourceResource* resource = new SourceResource(i::StrDup(source));
2978 {
2979 v8::HandleScope scope(CcTest::isolate());
2980 v8::Handle<v8::String> source_string =
2981 v8::String::NewExternal(CcTest::isolate(), resource);
2982 CcTest::heap()->CollectAllAvailableGarbage();
2983 v8::Script::Compile(source_string)->Run();
2984 CHECK(!resource->IsDisposed());
2985 }
2986 // CcTest::heap()->CollectAllAvailableGarbage();
2987 CHECK(!resource->IsDisposed());
2988
2989 CompileRun(accessor);
2990 CcTest::heap()->CollectAllAvailableGarbage();
2991
2992 // External source has been released.
2993 CHECK(resource->IsDisposed());
2994 delete resource;
2995 }
2996
2997
TEST(ReleaseStackTraceData)2998 TEST(ReleaseStackTraceData) {
2999 FLAG_use_ic = false; // ICs retain objects.
3000 FLAG_concurrent_recompilation = false;
3001 CcTest::InitializeVM();
3002 static const char* source1 = "var error = null; "
3003 /* Normal Error */ "try { "
3004 " throw new Error(); "
3005 "} catch (e) { "
3006 " error = e; "
3007 "} ";
3008 static const char* source2 = "var error = null; "
3009 /* Stack overflow */ "try { "
3010 " (function f() { f(); })(); "
3011 "} catch (e) { "
3012 " error = e; "
3013 "} ";
3014 static const char* source3 = "var error = null; "
3015 /* Normal Error */ "try { "
3016 /* as prototype */ " throw new Error(); "
3017 "} catch (e) { "
3018 " error = {}; "
3019 " error.__proto__ = e; "
3020 "} ";
3021 static const char* source4 = "var error = null; "
3022 /* Stack overflow */ "try { "
3023 /* as prototype */ " (function f() { f(); })(); "
3024 "} catch (e) { "
3025 " error = {}; "
3026 " error.__proto__ = e; "
3027 "} ";
3028 static const char* getter = "error.stack";
3029 static const char* setter = "error.stack = 0";
3030
3031 ReleaseStackTraceDataTest(source1, setter);
3032 ReleaseStackTraceDataTest(source2, setter);
3033 // We do not test source3 and source4 with setter, since the setter is
3034 // supposed to (untypically) write to the receiver, not the holder. This is
3035 // to emulate the behavior of a data property.
3036
3037 ReleaseStackTraceDataTest(source1, getter);
3038 ReleaseStackTraceDataTest(source2, getter);
3039 ReleaseStackTraceDataTest(source3, getter);
3040 ReleaseStackTraceDataTest(source4, getter);
3041 }
3042
3043
TEST(Regression144230)3044 TEST(Regression144230) {
3045 i::FLAG_stress_compaction = false;
3046 CcTest::InitializeVM();
3047 Isolate* isolate = CcTest::i_isolate();
3048 Heap* heap = isolate->heap();
3049 HandleScope scope(isolate);
3050
3051 // First make sure that the uninitialized CallIC stub is on a single page
3052 // that will later be selected as an evacuation candidate.
3053 {
3054 HandleScope inner_scope(isolate);
3055 AlwaysAllocateScope always_allocate;
3056 SimulateFullSpace(heap->code_space());
3057 isolate->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
3058 }
3059
3060 // Second compile a CallIC and execute it once so that it gets patched to
3061 // the pre-monomorphic stub. These code objects are on yet another page.
3062 {
3063 HandleScope inner_scope(isolate);
3064 AlwaysAllocateScope always_allocate;
3065 SimulateFullSpace(heap->code_space());
3066 CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
3067 "function call() { o.f(1,2,3,4,5,6,7,8,9); };"
3068 "call();");
3069 }
3070
3071 // Third we fill up the last page of the code space so that it does not get
3072 // chosen as an evacuation candidate.
3073 {
3074 HandleScope inner_scope(isolate);
3075 AlwaysAllocateScope always_allocate;
3076 CompileRun("for (var i = 0; i < 2000; i++) {"
3077 " eval('function f' + i + '() { return ' + i +'; };' +"
3078 " 'f' + i + '();');"
3079 "}");
3080 }
3081 heap->CollectAllGarbage(Heap::kNoGCFlags);
3082
3083 // Fourth is the tricky part. Make sure the code containing the CallIC is
3084 // visited first without clearing the IC. The shared function info is then
3085 // visited later, causing the CallIC to be cleared.
3086 Handle<String> name = isolate->factory()->InternalizeUtf8String("call");
3087 Handle<GlobalObject> global(isolate->context()->global_object());
3088 Handle<Smi> zero(Smi::FromInt(0), isolate);
3089 MaybeObject* maybe_call = global->GetProperty(*name);
3090 JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
3091 JSReceiver::SetProperty(global, name, zero, NONE, kNonStrictMode);
3092 isolate->compilation_cache()->Clear();
3093 call->shared()->set_ic_age(heap->global_ic_age() + 1);
3094 Handle<Object> call_code(call->code(), isolate);
3095 Handle<Object> call_function(call, isolate);
3096
3097 // Now we are ready to mess up the heap.
3098 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
3099
3100 // Either heap verification caught the problem already or we go kaboom once
3101 // the CallIC is executed the next time.
3102 JSReceiver::SetProperty(global, name, call_function, NONE, kNonStrictMode);
3103 CompileRun("call();");
3104 }
3105
3106
TEST(Regress159140)3107 TEST(Regress159140) {
3108 i::FLAG_allow_natives_syntax = true;
3109 i::FLAG_flush_code_incrementally = true;
3110 CcTest::InitializeVM();
3111 Isolate* isolate = CcTest::i_isolate();
3112 Heap* heap = isolate->heap();
3113 HandleScope scope(isolate);
3114
3115 // Perform one initial GC to enable code flushing.
3116 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3117
3118 // Prepare several closures that are all eligible for code flushing
3119 // because all reachable ones are not optimized. Make sure that the
3120 // optimized code object is directly reachable through a handle so
3121 // that it is marked black during incremental marking.
3122 Handle<Code> code;
3123 {
3124 HandleScope inner_scope(isolate);
3125 CompileRun("function h(x) {}"
3126 "function mkClosure() {"
3127 " return function(x) { return x + 1; };"
3128 "}"
3129 "var f = mkClosure();"
3130 "var g = mkClosure();"
3131 "f(1); f(2);"
3132 "g(1); g(2);"
3133 "h(1); h(2);"
3134 "%OptimizeFunctionOnNextCall(f); f(3);"
3135 "%OptimizeFunctionOnNextCall(h); h(3);");
3136
3137 Handle<JSFunction> f =
3138 v8::Utils::OpenHandle(
3139 *v8::Handle<v8::Function>::Cast(
3140 CcTest::global()->Get(v8_str("f"))));
3141 CHECK(f->is_compiled());
3142 CompileRun("f = null;");
3143
3144 Handle<JSFunction> g =
3145 v8::Utils::OpenHandle(
3146 *v8::Handle<v8::Function>::Cast(
3147 CcTest::global()->Get(v8_str("g"))));
3148 CHECK(g->is_compiled());
3149 const int kAgingThreshold = 6;
3150 for (int i = 0; i < kAgingThreshold; i++) {
3151 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3152 }
3153
3154 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3155 }
3156
3157 // Simulate incremental marking so that the functions are enqueued as
3158 // code flushing candidates. Then optimize one function. Finally
3159 // finish the GC to complete code flushing.
3160 SimulateIncrementalMarking();
3161 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3162 heap->CollectAllGarbage(Heap::kNoGCFlags);
3163
3164 // Unoptimized code is missing and the deoptimizer will go ballistic.
3165 CompileRun("g('bozo');");
3166 }
3167
3168
TEST(Regress165495)3169 TEST(Regress165495) {
3170 i::FLAG_allow_natives_syntax = true;
3171 i::FLAG_flush_code_incrementally = true;
3172 CcTest::InitializeVM();
3173 Isolate* isolate = CcTest::i_isolate();
3174 Heap* heap = isolate->heap();
3175 HandleScope scope(isolate);
3176
3177 // Perform one initial GC to enable code flushing.
3178 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3179
3180 // Prepare an optimized closure that the optimized code map will get
3181 // populated. Then age the unoptimized code to trigger code flushing
3182 // but make sure the optimized code is unreachable.
3183 {
3184 HandleScope inner_scope(isolate);
3185 CompileRun("function mkClosure() {"
3186 " return function(x) { return x + 1; };"
3187 "}"
3188 "var f = mkClosure();"
3189 "f(1); f(2);"
3190 "%OptimizeFunctionOnNextCall(f); f(3);");
3191
3192 Handle<JSFunction> f =
3193 v8::Utils::OpenHandle(
3194 *v8::Handle<v8::Function>::Cast(
3195 CcTest::global()->Get(v8_str("f"))));
3196 CHECK(f->is_compiled());
3197 const int kAgingThreshold = 6;
3198 for (int i = 0; i < kAgingThreshold; i++) {
3199 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3200 }
3201
3202 CompileRun("f = null;");
3203 }
3204
3205 // Simulate incremental marking so that unoptimized code is flushed
3206 // even though it still is cached in the optimized code map.
3207 SimulateIncrementalMarking();
3208 heap->CollectAllGarbage(Heap::kNoGCFlags);
3209
3210 // Make a new closure that will get code installed from the code map.
3211 // Unoptimized code is missing and the deoptimizer will go ballistic.
3212 CompileRun("var g = mkClosure(); g('bozo');");
3213 }
3214
3215
TEST(Regress169209)3216 TEST(Regress169209) {
3217 i::FLAG_stress_compaction = false;
3218 i::FLAG_allow_natives_syntax = true;
3219 i::FLAG_flush_code_incrementally = true;
3220
3221 CcTest::InitializeVM();
3222 Isolate* isolate = CcTest::i_isolate();
3223 Heap* heap = isolate->heap();
3224 HandleScope scope(isolate);
3225
3226 // Perform one initial GC to enable code flushing.
3227 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3228
3229 // Prepare a shared function info eligible for code flushing for which
3230 // the unoptimized code will be replaced during optimization.
3231 Handle<SharedFunctionInfo> shared1;
3232 {
3233 HandleScope inner_scope(isolate);
3234 CompileRun("function f() { return 'foobar'; }"
3235 "function g(x) { if (x) f(); }"
3236 "f();"
3237 "g(false);"
3238 "g(false);");
3239
3240 Handle<JSFunction> f =
3241 v8::Utils::OpenHandle(
3242 *v8::Handle<v8::Function>::Cast(
3243 CcTest::global()->Get(v8_str("f"))));
3244 CHECK(f->is_compiled());
3245 const int kAgingThreshold = 6;
3246 for (int i = 0; i < kAgingThreshold; i++) {
3247 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3248 }
3249
3250 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3251 }
3252
3253 // Prepare a shared function info eligible for code flushing that will
3254 // represent the dangling tail of the candidate list.
3255 Handle<SharedFunctionInfo> shared2;
3256 {
3257 HandleScope inner_scope(isolate);
3258 CompileRun("function flushMe() { return 0; }"
3259 "flushMe(1);");
3260
3261 Handle<JSFunction> f =
3262 v8::Utils::OpenHandle(
3263 *v8::Handle<v8::Function>::Cast(
3264 CcTest::global()->Get(v8_str("flushMe"))));
3265 CHECK(f->is_compiled());
3266 const int kAgingThreshold = 6;
3267 for (int i = 0; i < kAgingThreshold; i++) {
3268 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3269 }
3270
3271 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3272 }
3273
3274 // Simulate incremental marking and collect code flushing candidates.
3275 SimulateIncrementalMarking();
3276 CHECK(shared1->code()->gc_metadata() != NULL);
3277
3278 // Optimize function and make sure the unoptimized code is replaced.
3279 #ifdef DEBUG
3280 FLAG_stop_at = "f";
3281 #endif
3282 CompileRun("%OptimizeFunctionOnNextCall(g);"
3283 "g(false);");
3284
3285 // Finish garbage collection cycle.
3286 heap->CollectAllGarbage(Heap::kNoGCFlags);
3287 CHECK(shared1->code()->gc_metadata() == NULL);
3288 }
3289
3290
3291 // Helper function that simulates a fill new-space in the heap.
AllocateAllButNBytes(v8::internal::NewSpace * space,int extra_bytes)3292 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
3293 int extra_bytes) {
3294 int space_remaining = static_cast<int>(
3295 *space->allocation_limit_address() - *space->allocation_top_address());
3296 CHECK(space_remaining >= extra_bytes);
3297 int new_linear_size = space_remaining - extra_bytes;
3298 v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size);
3299 v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe);
3300 node->set_size(space->heap(), new_linear_size);
3301 }
3302
3303
TEST(Regress169928)3304 TEST(Regress169928) {
3305 i::FLAG_allow_natives_syntax = true;
3306 i::FLAG_crankshaft = false;
3307 CcTest::InitializeVM();
3308 Isolate* isolate = CcTest::i_isolate();
3309 Factory* factory = isolate->factory();
3310 v8::HandleScope scope(CcTest::isolate());
3311
3312 // Some flags turn Scavenge collections into Mark-sweep collections
3313 // and hence are incompatible with this test case.
3314 if (FLAG_gc_global || FLAG_stress_compaction) return;
3315
3316 // Prepare the environment
3317 CompileRun("function fastliteralcase(literal, value) {"
3318 " literal[0] = value;"
3319 " return literal;"
3320 "}"
3321 "function get_standard_literal() {"
3322 " var literal = [1, 2, 3];"
3323 " return literal;"
3324 "}"
3325 "obj = fastliteralcase(get_standard_literal(), 1);"
3326 "obj = fastliteralcase(get_standard_literal(), 1.5);"
3327 "obj = fastliteralcase(get_standard_literal(), 2);");
3328
3329 // prepare the heap
3330 v8::Local<v8::String> mote_code_string =
3331 v8_str("fastliteralcase(mote, 2.5);");
3332
3333 v8::Local<v8::String> array_name = v8_str("mote");
3334 CcTest::global()->Set(array_name, v8::Int32::New(0));
3335
3336 // First make sure we flip spaces
3337 CcTest::heap()->CollectGarbage(NEW_SPACE);
3338
3339 // Allocate the object.
3340 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3341 array_data->set(0, Smi::FromInt(1));
3342 array_data->set(1, Smi::FromInt(2));
3343
3344 AllocateAllButNBytes(CcTest::heap()->new_space(),
3345 JSArray::kSize + AllocationMemento::kSize +
3346 kPointerSize);
3347
3348 Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3349 FAST_SMI_ELEMENTS,
3350 NOT_TENURED);
3351
3352 CHECK_EQ(Smi::FromInt(2), array->length());
3353 CHECK(array->HasFastSmiOrObjectElements());
3354
3355 // We need filler the size of AllocationMemento object, plus an extra
3356 // fill pointer value.
3357 MaybeObject* maybe_object = CcTest::heap()->AllocateRaw(
3358 AllocationMemento::kSize + kPointerSize, NEW_SPACE, OLD_POINTER_SPACE);
3359 Object* obj = NULL;
3360 CHECK(maybe_object->ToObject(&obj));
3361 Address addr_obj = reinterpret_cast<Address>(
3362 reinterpret_cast<byte*>(obj - kHeapObjectTag));
3363 CcTest::heap()->CreateFillerObjectAt(addr_obj,
3364 AllocationMemento::kSize + kPointerSize);
3365
3366 // Give the array a name, making sure not to allocate strings.
3367 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3368 CcTest::global()->Set(array_name, array_obj);
3369
3370 // This should crash with a protection violation if we are running a build
3371 // with the bug.
3372 AlwaysAllocateScope aa_scope;
3373 v8::Script::Compile(mote_code_string)->Run();
3374 }
3375
3376
TEST(Regress168801)3377 TEST(Regress168801) {
3378 i::FLAG_always_compact = true;
3379 i::FLAG_cache_optimized_code = false;
3380 i::FLAG_allow_natives_syntax = true;
3381 i::FLAG_flush_code_incrementally = true;
3382 CcTest::InitializeVM();
3383 Isolate* isolate = CcTest::i_isolate();
3384 Heap* heap = isolate->heap();
3385 HandleScope scope(isolate);
3386
3387 // Perform one initial GC to enable code flushing.
3388 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3389
3390 // Ensure the code ends up on an evacuation candidate.
3391 SimulateFullSpace(heap->code_space());
3392
3393 // Prepare an unoptimized function that is eligible for code flushing.
3394 Handle<JSFunction> function;
3395 {
3396 HandleScope inner_scope(isolate);
3397 CompileRun("function mkClosure() {"
3398 " return function(x) { return x + 1; };"
3399 "}"
3400 "var f = mkClosure();"
3401 "f(1); f(2);");
3402
3403 Handle<JSFunction> f =
3404 v8::Utils::OpenHandle(
3405 *v8::Handle<v8::Function>::Cast(
3406 CcTest::global()->Get(v8_str("f"))));
3407 CHECK(f->is_compiled());
3408 const int kAgingThreshold = 6;
3409 for (int i = 0; i < kAgingThreshold; i++) {
3410 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3411 }
3412
3413 function = inner_scope.CloseAndEscape(handle(*f, isolate));
3414 }
3415
3416 // Simulate incremental marking so that unoptimized function is enqueued as a
3417 // candidate for code flushing. The shared function info however will not be
3418 // explicitly enqueued.
3419 SimulateIncrementalMarking();
3420
3421 // Now optimize the function so that it is taken off the candidate list.
3422 {
3423 HandleScope inner_scope(isolate);
3424 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
3425 }
3426
3427 // This cycle will bust the heap and subsequent cycles will go ballistic.
3428 heap->CollectAllGarbage(Heap::kNoGCFlags);
3429 heap->CollectAllGarbage(Heap::kNoGCFlags);
3430 }
3431
3432
TEST(Regress173458)3433 TEST(Regress173458) {
3434 i::FLAG_always_compact = true;
3435 i::FLAG_cache_optimized_code = false;
3436 i::FLAG_allow_natives_syntax = true;
3437 i::FLAG_flush_code_incrementally = true;
3438 CcTest::InitializeVM();
3439 Isolate* isolate = CcTest::i_isolate();
3440 Heap* heap = isolate->heap();
3441 HandleScope scope(isolate);
3442
3443 // Perform one initial GC to enable code flushing.
3444 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3445
3446 // Ensure the code ends up on an evacuation candidate.
3447 SimulateFullSpace(heap->code_space());
3448
3449 // Prepare an unoptimized function that is eligible for code flushing.
3450 Handle<JSFunction> function;
3451 {
3452 HandleScope inner_scope(isolate);
3453 CompileRun("function mkClosure() {"
3454 " return function(x) { return x + 1; };"
3455 "}"
3456 "var f = mkClosure();"
3457 "f(1); f(2);");
3458
3459 Handle<JSFunction> f =
3460 v8::Utils::OpenHandle(
3461 *v8::Handle<v8::Function>::Cast(
3462 CcTest::global()->Get(v8_str("f"))));
3463 CHECK(f->is_compiled());
3464 const int kAgingThreshold = 6;
3465 for (int i = 0; i < kAgingThreshold; i++) {
3466 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3467 }
3468
3469 function = inner_scope.CloseAndEscape(handle(*f, isolate));
3470 }
3471
3472 // Simulate incremental marking so that unoptimized function is enqueued as a
3473 // candidate for code flushing. The shared function info however will not be
3474 // explicitly enqueued.
3475 SimulateIncrementalMarking();
3476
3477 #ifdef ENABLE_DEBUGGER_SUPPORT
3478 // Now enable the debugger which in turn will disable code flushing.
3479 CHECK(isolate->debug()->Load());
3480 #endif // ENABLE_DEBUGGER_SUPPORT
3481
3482 // This cycle will bust the heap and subsequent cycles will go ballistic.
3483 heap->CollectAllGarbage(Heap::kNoGCFlags);
3484 heap->CollectAllGarbage(Heap::kNoGCFlags);
3485 }
3486
3487
3488 class DummyVisitor : public ObjectVisitor {
3489 public:
VisitPointers(Object ** start,Object ** end)3490 void VisitPointers(Object** start, Object** end) { }
3491 };
3492
3493
TEST(DeferredHandles)3494 TEST(DeferredHandles) {
3495 CcTest::InitializeVM();
3496 Isolate* isolate = CcTest::i_isolate();
3497 Heap* heap = isolate->heap();
3498 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3499 v8::ImplementationUtilities::HandleScopeData* data =
3500 isolate->handle_scope_data();
3501 Handle<Object> init(heap->empty_string(), isolate);
3502 while (data->next < data->limit) {
3503 Handle<Object> obj(heap->empty_string(), isolate);
3504 }
3505 // An entire block of handles has been filled.
3506 // Next handle would require a new block.
3507 ASSERT(data->next == data->limit);
3508
3509 DeferredHandleScope deferred(isolate);
3510 DummyVisitor visitor;
3511 isolate->handle_scope_implementer()->Iterate(&visitor);
3512 deferred.Detach();
3513 }
3514
3515
TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects)3516 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3517 CcTest::InitializeVM();
3518 v8::HandleScope scope(CcTest::isolate());
3519 CompileRun("function f(n) {"
3520 " var a = new Array(n);"
3521 " for (var i = 0; i < n; i += 100) a[i] = i;"
3522 "};"
3523 "f(10 * 1024 * 1024);");
3524 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3525 if (marking->IsStopped()) marking->Start();
3526 // This big step should be sufficient to mark the whole array.
3527 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
3528 ASSERT(marking->IsComplete());
3529 }
3530
3531
TEST(DisableInlineAllocation)3532 TEST(DisableInlineAllocation) {
3533 i::FLAG_allow_natives_syntax = true;
3534 CcTest::InitializeVM();
3535 v8::HandleScope scope(CcTest::isolate());
3536 CompileRun("function test() {"
3537 " var x = [];"
3538 " for (var i = 0; i < 10; i++) {"
3539 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
3540 " }"
3541 "}"
3542 "function run() {"
3543 " %OptimizeFunctionOnNextCall(test);"
3544 " test();"
3545 " %DeoptimizeFunction(test);"
3546 "}");
3547
3548 // Warm-up with inline allocation enabled.
3549 CompileRun("test(); test(); run();");
3550
3551 // Run test with inline allocation disabled.
3552 CcTest::heap()->DisableInlineAllocation();
3553 CompileRun("run()");
3554
3555 // Run test with inline allocation disabled and pretenuring.
3556 CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
3557 CompileRun("run()");
3558
3559 // Run test with inline allocation re-enabled.
3560 CcTest::heap()->EnableInlineAllocation();
3561 CompileRun("run()");
3562 }
3563
3564
AllocationSitesCount(Heap * heap)3565 static int AllocationSitesCount(Heap* heap) {
3566 int count = 0;
3567 for (Object* site = heap->allocation_sites_list();
3568 !(site->IsUndefined());
3569 site = AllocationSite::cast(site)->weak_next()) {
3570 count++;
3571 }
3572 return count;
3573 }
3574
3575
TEST(EnsureAllocationSiteDependentCodesProcessed)3576 TEST(EnsureAllocationSiteDependentCodesProcessed) {
3577 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
3578 i::FLAG_allow_natives_syntax = true;
3579 CcTest::InitializeVM();
3580 Isolate* isolate = CcTest::i_isolate();
3581 v8::internal::Heap* heap = CcTest::heap();
3582 GlobalHandles* global_handles = isolate->global_handles();
3583
3584 if (!isolate->use_crankshaft()) return;
3585
3586 // The allocation site at the head of the list is ours.
3587 Handle<AllocationSite> site;
3588 {
3589 LocalContext context;
3590 v8::HandleScope scope(context->GetIsolate());
3591
3592 int count = AllocationSitesCount(heap);
3593 CompileRun("var bar = function() { return (new Array()); };"
3594 "var a = bar();"
3595 "bar();"
3596 "bar();");
3597
3598 // One allocation site should have been created.
3599 int new_count = AllocationSitesCount(heap);
3600 CHECK_EQ(new_count, (count + 1));
3601 site = Handle<AllocationSite>::cast(
3602 global_handles->Create(
3603 AllocationSite::cast(heap->allocation_sites_list())));
3604
3605 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
3606
3607 DependentCode::GroupStartIndexes starts(site->dependent_code());
3608 CHECK_GE(starts.number_of_entries(), 1);
3609 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
3610 CHECK(site->dependent_code()->is_code_at(index));
3611 Code* function_bar = site->dependent_code()->code_at(index);
3612 Handle<JSFunction> bar_handle =
3613 v8::Utils::OpenHandle(
3614 *v8::Handle<v8::Function>::Cast(
3615 CcTest::global()->Get(v8_str("bar"))));
3616 CHECK_EQ(bar_handle->code(), function_bar);
3617 }
3618
3619 // Now make sure that a gc should get rid of the function, even though we
3620 // still have the allocation site alive.
3621 for (int i = 0; i < 4; i++) {
3622 heap->CollectAllGarbage(false);
3623 }
3624
3625 // The site still exists because of our global handle, but the code is no
3626 // longer referred to by dependent_code().
3627 DependentCode::GroupStartIndexes starts(site->dependent_code());
3628 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
3629 CHECK(!(site->dependent_code()->is_code_at(index)));
3630 }
3631