1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29 #include "accessors.h"
30
31 #include "cctest.h"
32
33
34 using namespace v8::internal;
35
36
AllocateAfterFailures()37 static MaybeObject* AllocateAfterFailures() {
38 static int attempts = 0;
39 if (++attempts < 3) return Failure::RetryAfterGC();
40 Heap* heap = Isolate::Current()->heap();
41
42 // New space.
43 NewSpace* new_space = heap->new_space();
44 static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
45 while (new_space->Available() > kNewSpaceFillerSize) {
46 int available_before = static_cast<int>(new_space->Available());
47 CHECK(!heap->AllocateByteArray(0)->IsFailure());
48 if (available_before == new_space->Available()) {
49 // It seems that we are avoiding new space allocations when
50 // allocation is forced, so no need to fill up new space
51 // in order to make the test harder.
52 break;
53 }
54 }
55 CHECK(!heap->AllocateByteArray(100)->IsFailure());
56 CHECK(!heap->AllocateFixedArray(100, NOT_TENURED)->IsFailure());
57
58 // Make sure we can allocate through optimized allocation functions
59 // for specific kinds.
60 CHECK(!heap->AllocateFixedArray(100)->IsFailure());
61 CHECK(!heap->AllocateHeapNumber(0.42)->IsFailure());
62 CHECK(!heap->AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
63 Object* object = heap->AllocateJSObject(
64 *Isolate::Current()->object_function())->ToObjectChecked();
65 CHECK(!heap->CopyJSObject(JSObject::cast(object))->IsFailure());
66
67 // Old data space.
68 OldSpace* old_data_space = heap->old_data_space();
69 static const int kOldDataSpaceFillerSize = ByteArray::SizeFor(0);
70 while (old_data_space->Available() > kOldDataSpaceFillerSize) {
71 CHECK(!heap->AllocateByteArray(0, TENURED)->IsFailure());
72 }
73 CHECK(!heap->AllocateRawAsciiString(100, TENURED)->IsFailure());
74
75 // Old pointer space.
76 OldSpace* old_pointer_space = heap->old_pointer_space();
77 static const int kOldPointerSpaceFillerLength = 10000;
78 static const int kOldPointerSpaceFillerSize = FixedArray::SizeFor(
79 kOldPointerSpaceFillerLength);
80 while (old_pointer_space->Available() > kOldPointerSpaceFillerSize) {
81 CHECK(!heap->AllocateFixedArray(kOldPointerSpaceFillerLength, TENURED)->
82 IsFailure());
83 }
84 CHECK(!heap->AllocateFixedArray(kOldPointerSpaceFillerLength, TENURED)->
85 IsFailure());
86
87 // Large object space.
88 static const int kLargeObjectSpaceFillerLength = 300000;
89 static const int kLargeObjectSpaceFillerSize = FixedArray::SizeFor(
90 kLargeObjectSpaceFillerLength);
91 ASSERT(kLargeObjectSpaceFillerSize > heap->old_pointer_space()->AreaSize());
92 while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
93 CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)->
94 IsFailure());
95 }
96 CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)->
97 IsFailure());
98
99 // Map space.
100 MapSpace* map_space = heap->map_space();
101 static const int kMapSpaceFillerSize = Map::kSize;
102 InstanceType instance_type = JS_OBJECT_TYPE;
103 int instance_size = JSObject::kHeaderSize;
104 while (map_space->Available() > kMapSpaceFillerSize) {
105 CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
106 }
107 CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
108
109 // Test that we can allocate in old pointer space and code space.
110 CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure());
111 CHECK(!heap->CopyCode(Isolate::Current()->builtins()->builtin(
112 Builtins::kIllegal))->IsFailure());
113
114 // Return success.
115 return Smi::FromInt(42);
116 }
117
118
Test()119 static Handle<Object> Test() {
120 CALL_HEAP_FUNCTION(ISOLATE, AllocateAfterFailures(), Object);
121 }
122
123
TEST(StressHandles)124 TEST(StressHandles) {
125 v8::Persistent<v8::Context> env = v8::Context::New();
126 v8::HandleScope scope;
127 env->Enter();
128 Handle<Object> o = Test();
129 CHECK(o->IsSmi() && Smi::cast(*o)->value() == 42);
130 env->Exit();
131 }
132
133
TestAccessorGet(Object * object,void *)134 static MaybeObject* TestAccessorGet(Object* object, void*) {
135 return AllocateAfterFailures();
136 }
137
138
139 const AccessorDescriptor kDescriptor = {
140 TestAccessorGet,
141 0,
142 0
143 };
144
145
TEST(StressJS)146 TEST(StressJS) {
147 v8::Persistent<v8::Context> env = v8::Context::New();
148 v8::HandleScope scope;
149 env->Enter();
150 Handle<JSFunction> function =
151 FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
152 // Force the creation of an initial map and set the code to
153 // something empty.
154 FACTORY->NewJSObject(function);
155 function->ReplaceCode(Isolate::Current()->builtins()->builtin(
156 Builtins::kEmptyFunction));
157 // Patch the map to have an accessor for "get".
158 Handle<Map> map(function->initial_map());
159 Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
160 Handle<Foreign> foreign = FACTORY->NewForeign(&kDescriptor);
161 instance_descriptors = FACTORY->CopyAppendForeignDescriptor(
162 instance_descriptors,
163 FACTORY->NewStringFromAscii(Vector<const char>("get", 3)),
164 foreign,
165 static_cast<PropertyAttributes>(0));
166 map->set_instance_descriptors(*instance_descriptors);
167 // Add the Foo constructor the global object.
168 env->Global()->Set(v8::String::New("Foo"), v8::Utils::ToLocal(function));
169 // Call the accessor through JavaScript.
170 v8::Handle<v8::Value> result =
171 v8::Script::Compile(v8::String::New("(new Foo).get"))->Run();
172 CHECK_EQ(42, result->Int32Value());
173 env->Exit();
174 }
175
176
177 // CodeRange test.
178 // Tests memory management in a CodeRange by allocating and freeing blocks,
179 // using a pseudorandom generator to choose block sizes geometrically
180 // distributed between 2 * Page::kPageSize and 2^5 + 1 * Page::kPageSize.
181 // Ensure that the freed chunks are collected and reused by allocating (in
182 // total) more than the size of the CodeRange.
183
184 // This pseudorandom generator does not need to be particularly good.
185 // Use the lower half of the V8::Random() generator.
Pseudorandom()186 unsigned int Pseudorandom() {
187 static uint32_t lo = 2345;
188 lo = 18273 * (lo & 0xFFFF) + (lo >> 16); // Provably not 0.
189 return lo & 0xFFFF;
190 }
191
192
193 // Plain old data class. Represents a block of allocated memory.
194 class Block {
195 public:
Block(Address base_arg,int size_arg)196 Block(Address base_arg, int size_arg)
197 : base(base_arg), size(size_arg) {}
198
199 Address base;
200 int size;
201 };
202
203
TEST(CodeRange)204 TEST(CodeRange) {
205 const int code_range_size = 32*MB;
206 OS::SetUp();
207 Isolate::Current()->InitializeLoggingAndCounters();
208 CodeRange* code_range = new CodeRange(Isolate::Current());
209 code_range->SetUp(code_range_size);
210 int current_allocated = 0;
211 int total_allocated = 0;
212 List<Block> blocks(1000);
213
214 while (total_allocated < 5 * code_range_size) {
215 if (current_allocated < code_range_size / 10) {
216 // Allocate a block.
217 // Geometrically distributed sizes, greater than
218 // Page::kMaxNonCodeHeapObjectSize (which is greater than code page area).
219 // TODO(gc): instead of using 3 use some contant based on code_range_size
220 // kMaxHeapObjectSize.
221 size_t requested =
222 (Page::kMaxNonCodeHeapObjectSize << (Pseudorandom() % 3)) +
223 Pseudorandom() % 5000 + 1;
224 size_t allocated = 0;
225 Address base = code_range->AllocateRawMemory(requested, &allocated);
226 CHECK(base != NULL);
227 blocks.Add(Block(base, static_cast<int>(allocated)));
228 current_allocated += static_cast<int>(allocated);
229 total_allocated += static_cast<int>(allocated);
230 } else {
231 // Free a block.
232 int index = Pseudorandom() % blocks.length();
233 code_range->FreeRawMemory(blocks[index].base, blocks[index].size);
234 current_allocated -= blocks[index].size;
235 if (index < blocks.length() - 1) {
236 blocks[index] = blocks.RemoveLast();
237 } else {
238 blocks.RemoveLast();
239 }
240 }
241 }
242
243 code_range->TearDown();
244 delete code_range;
245 }
246