• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include <stdlib.h>
29 
30 #include "v8.h"
31 #include "cctest.h"
32 
33 using namespace v8::internal;
34 
35 #if 0
36 static void VerifyRegionMarking(Address page_start) {
37 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER
38   Page* p = Page::FromAddress(page_start);
39 
40   p->SetRegionMarks(Page::kAllRegionsCleanMarks);
41 
42   for (Address addr = p->ObjectAreaStart();
43        addr < p->ObjectAreaEnd();
44        addr += kPointerSize) {
45     CHECK(!Page::FromAddress(addr)->IsRegionDirty(addr));
46   }
47 
48   for (Address addr = p->ObjectAreaStart();
49        addr < p->ObjectAreaEnd();
50        addr += kPointerSize) {
51     Page::FromAddress(addr)->MarkRegionDirty(addr);
52   }
53 
54   for (Address addr = p->ObjectAreaStart();
55        addr < p->ObjectAreaEnd();
56        addr += kPointerSize) {
57     CHECK(Page::FromAddress(addr)->IsRegionDirty(addr));
58   }
59 #endif
60 }
61 #endif
62 
63 
64 // TODO(gc) you can no longer allocate pages like this. Details are hidden.
65 #if 0
66 TEST(Page) {
67   byte* mem = NewArray<byte>(2*Page::kPageSize);
68   CHECK(mem != NULL);
69 
70   Address start = reinterpret_cast<Address>(mem);
71   Address page_start = RoundUp(start, Page::kPageSize);
72 
73   Page* p = Page::FromAddress(page_start);
74   // Initialized Page has heap pointer, normally set by memory_allocator.
75   p->heap_ = HEAP;
76   CHECK(p->address() == page_start);
77   CHECK(p->is_valid());
78 
79   p->opaque_header = 0;
80   p->SetIsLargeObjectPage(false);
81   CHECK(!p->next_page()->is_valid());
82 
83   CHECK(p->ObjectAreaStart() == page_start + Page::kObjectStartOffset);
84   CHECK(p->ObjectAreaEnd() == page_start + Page::kPageSize);
85 
86   CHECK(p->Offset(page_start + Page::kObjectStartOffset) ==
87         Page::kObjectStartOffset);
88   CHECK(p->Offset(page_start + Page::kPageSize) == Page::kPageSize);
89 
90   CHECK(p->OffsetToAddress(Page::kObjectStartOffset) == p->ObjectAreaStart());
91   CHECK(p->OffsetToAddress(Page::kPageSize) == p->ObjectAreaEnd());
92 
93   // test region marking
94   VerifyRegionMarking(page_start);
95 
96   DeleteArray(mem);
97 }
98 #endif
99 
100 
101 namespace v8 {
102 namespace internal {
103 
104 // Temporarily sets a given allocator in an isolate.
105 class TestMemoryAllocatorScope {
106  public:
TestMemoryAllocatorScope(Isolate * isolate,MemoryAllocator * allocator)107   TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
108       : isolate_(isolate),
109         old_allocator_(isolate->memory_allocator_) {
110     isolate->memory_allocator_ = allocator;
111   }
112 
~TestMemoryAllocatorScope()113   ~TestMemoryAllocatorScope() {
114     isolate_->memory_allocator_ = old_allocator_;
115   }
116 
117  private:
118   Isolate* isolate_;
119   MemoryAllocator* old_allocator_;
120 
121   DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
122 };
123 
124 } }  // namespace v8::internal
125 
126 
TEST(MemoryAllocator)127 TEST(MemoryAllocator) {
128   OS::SetUp();
129   Isolate* isolate = Isolate::Current();
130   isolate->InitializeLoggingAndCounters();
131   Heap* heap = isolate->heap();
132   CHECK(isolate->heap()->ConfigureHeapDefault());
133 
134   MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
135   CHECK(memory_allocator->SetUp(heap->MaxReserved(),
136                                 heap->MaxExecutableSize()));
137 
138   int total_pages = 0;
139   OldSpace faked_space(heap,
140                        heap->MaxReserved(),
141                        OLD_POINTER_SPACE,
142                        NOT_EXECUTABLE);
143   Page* first_page =
144       memory_allocator->AllocatePage(&faked_space, NOT_EXECUTABLE);
145 
146   first_page->InsertAfter(faked_space.anchor()->prev_page());
147   CHECK(first_page->is_valid());
148   CHECK(first_page->next_page() == faked_space.anchor());
149   total_pages++;
150 
151   for (Page* p = first_page; p != faked_space.anchor(); p = p->next_page()) {
152     CHECK(p->owner() == &faked_space);
153   }
154 
155   // Again, we should get n or n - 1 pages.
156   Page* other =
157       memory_allocator->AllocatePage(&faked_space, NOT_EXECUTABLE);
158   CHECK(other->is_valid());
159   total_pages++;
160   other->InsertAfter(first_page);
161   int page_count = 0;
162   for (Page* p = first_page; p != faked_space.anchor(); p = p->next_page()) {
163     CHECK(p->owner() == &faked_space);
164     page_count++;
165   }
166   CHECK(total_pages == page_count);
167 
168   Page* second_page = first_page->next_page();
169   CHECK(second_page->is_valid());
170   memory_allocator->Free(first_page);
171   memory_allocator->Free(second_page);
172   memory_allocator->TearDown();
173   delete memory_allocator;
174 }
175 
176 
TEST(NewSpace)177 TEST(NewSpace) {
178   OS::SetUp();
179   Isolate* isolate = Isolate::Current();
180   isolate->InitializeLoggingAndCounters();
181   Heap* heap = isolate->heap();
182   CHECK(heap->ConfigureHeapDefault());
183   MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
184   CHECK(memory_allocator->SetUp(heap->MaxReserved(),
185                                 heap->MaxExecutableSize()));
186   TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
187 
188   NewSpace new_space(heap);
189 
190   CHECK(new_space.SetUp(HEAP->ReservedSemiSpaceSize(),
191                         HEAP->ReservedSemiSpaceSize()));
192   CHECK(new_space.HasBeenSetUp());
193 
194   while (new_space.Available() >= Page::kMaxNonCodeHeapObjectSize) {
195     Object* obj =
196         new_space.AllocateRaw(Page::kMaxNonCodeHeapObjectSize)->
197         ToObjectUnchecked();
198     CHECK(new_space.Contains(HeapObject::cast(obj)));
199   }
200 
201   new_space.TearDown();
202   memory_allocator->TearDown();
203   delete memory_allocator;
204 }
205 
206 
TEST(OldSpace)207 TEST(OldSpace) {
208   OS::SetUp();
209   Isolate* isolate = Isolate::Current();
210   isolate->InitializeLoggingAndCounters();
211   Heap* heap = isolate->heap();
212   CHECK(heap->ConfigureHeapDefault());
213   MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
214   CHECK(memory_allocator->SetUp(heap->MaxReserved(),
215                                 heap->MaxExecutableSize()));
216   TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
217 
218   OldSpace* s = new OldSpace(heap,
219                              heap->MaxOldGenerationSize(),
220                              OLD_POINTER_SPACE,
221                              NOT_EXECUTABLE);
222   CHECK(s != NULL);
223 
224   CHECK(s->SetUp());
225 
226   while (s->Available() > 0) {
227     s->AllocateRaw(Page::kMaxNonCodeHeapObjectSize)->ToObjectUnchecked();
228   }
229 
230   s->TearDown();
231   delete s;
232   memory_allocator->TearDown();
233   delete memory_allocator;
234 }
235 
236 
TEST(LargeObjectSpace)237 TEST(LargeObjectSpace) {
238   v8::V8::Initialize();
239 
240   LargeObjectSpace* lo = HEAP->lo_space();
241   CHECK(lo != NULL);
242 
243   int lo_size = Page::kPageSize;
244 
245   Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->ToObjectUnchecked();
246   CHECK(obj->IsHeapObject());
247 
248   HeapObject* ho = HeapObject::cast(obj);
249 
250   CHECK(lo->Contains(HeapObject::cast(obj)));
251 
252   CHECK(lo->FindObject(ho->address()) == obj);
253 
254   CHECK(lo->Contains(ho));
255 
256   while (true) {
257     intptr_t available = lo->Available();
258     { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE);
259       if (!maybe_obj->ToObject(&obj)) break;
260     }
261     CHECK(lo->Available() < available);
262   };
263 
264   CHECK(!lo->IsEmpty());
265 
266   CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->IsFailure());
267 }
268