1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29 #include "accessors.h"
30
31 #include "cctest.h"
32
33
34 using namespace v8::internal;
35
36
AllocateAfterFailures()37 static MaybeObject* AllocateAfterFailures() {
38 static int attempts = 0;
39 if (++attempts < 3) return Failure::RetryAfterGC();
40 Heap* heap = CcTest::heap();
41
42 // New space.
43 SimulateFullSpace(heap->new_space());
44 CHECK(!heap->AllocateByteArray(100)->IsFailure());
45 CHECK(!heap->AllocateFixedArray(100, NOT_TENURED)->IsFailure());
46
47 // Make sure we can allocate through optimized allocation functions
48 // for specific kinds.
49 CHECK(!heap->AllocateFixedArray(100)->IsFailure());
50 CHECK(!heap->AllocateHeapNumber(0.42)->IsFailure());
51 CHECK(!heap->AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
52 Object* object = heap->AllocateJSObject(
53 *CcTest::i_isolate()->object_function())->ToObjectChecked();
54 CHECK(!heap->CopyJSObject(JSObject::cast(object))->IsFailure());
55
56 // Old data space.
57 SimulateFullSpace(heap->old_data_space());
58 CHECK(!heap->AllocateRawOneByteString(100, TENURED)->IsFailure());
59
60 // Old pointer space.
61 SimulateFullSpace(heap->old_pointer_space());
62 CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure());
63
64 // Large object space.
65 static const int kLargeObjectSpaceFillerLength = 300000;
66 static const int kLargeObjectSpaceFillerSize = FixedArray::SizeFor(
67 kLargeObjectSpaceFillerLength);
68 ASSERT(kLargeObjectSpaceFillerSize > heap->old_pointer_space()->AreaSize());
69 while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
70 CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)->
71 IsFailure());
72 }
73 CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)->
74 IsFailure());
75
76 // Map space.
77 SimulateFullSpace(heap->map_space());
78 int instance_size = JSObject::kHeaderSize;
79 CHECK(!heap->AllocateMap(JS_OBJECT_TYPE, instance_size)->IsFailure());
80
81 // Test that we can allocate in old pointer space and code space.
82 SimulateFullSpace(heap->code_space());
83 CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure());
84 CHECK(!heap->CopyCode(CcTest::i_isolate()->builtins()->builtin(
85 Builtins::kIllegal))->IsFailure());
86
87 // Return success.
88 return Smi::FromInt(42);
89 }
90
91
Test()92 static Handle<Object> Test() {
93 CALL_HEAP_FUNCTION(CcTest::i_isolate(), AllocateAfterFailures(), Object);
94 }
95
96
TEST(StressHandles)97 TEST(StressHandles) {
98 v8::HandleScope scope(CcTest::isolate());
99 v8::Handle<v8::Context> env = v8::Context::New(CcTest::isolate());
100 env->Enter();
101 Handle<Object> o = Test();
102 CHECK(o->IsSmi() && Smi::cast(*o)->value() == 42);
103 env->Exit();
104 }
105
106
TestAccessorGet(Isolate * isolate,Object * object,void *)107 static MaybeObject* TestAccessorGet(Isolate* isolate, Object* object, void*) {
108 return AllocateAfterFailures();
109 }
110
111
112 const AccessorDescriptor kDescriptor = {
113 TestAccessorGet,
114 0,
115 0
116 };
117
118
TEST(StressJS)119 TEST(StressJS) {
120 Isolate* isolate = CcTest::i_isolate();
121 Factory* factory = isolate->factory();
122 v8::HandleScope scope(CcTest::isolate());
123 v8::Handle<v8::Context> env = v8::Context::New(CcTest::isolate());
124 env->Enter();
125 Handle<JSFunction> function =
126 factory->NewFunction(factory->function_string(), factory->null_value());
127 // Force the creation of an initial map and set the code to
128 // something empty.
129 factory->NewJSObject(function);
130 function->ReplaceCode(CcTest::i_isolate()->builtins()->builtin(
131 Builtins::kEmptyFunction));
132 // Patch the map to have an accessor for "get".
133 Handle<Map> map(function->initial_map());
134 Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
135 Handle<Foreign> foreign = factory->NewForeign(&kDescriptor);
136 Handle<String> name =
137 factory->NewStringFromAscii(Vector<const char>("get", 3));
138 ASSERT(instance_descriptors->IsEmpty());
139
140 Handle<DescriptorArray> new_descriptors = factory->NewDescriptorArray(0, 1);
141
142 v8::internal::DescriptorArray::WhitenessWitness witness(*new_descriptors);
143 map->set_instance_descriptors(*new_descriptors);
144
145 CallbacksDescriptor d(*name,
146 *foreign,
147 static_cast<PropertyAttributes>(0));
148 map->AppendDescriptor(&d, witness);
149
150 // Add the Foo constructor the global object.
151 env->Global()->Set(v8::String::NewFromUtf8(CcTest::isolate(), "Foo"),
152 v8::Utils::ToLocal(function));
153 // Call the accessor through JavaScript.
154 v8::Handle<v8::Value> result = v8::Script::Compile(
155 v8::String::NewFromUtf8(CcTest::isolate(), "(new Foo).get"))->Run();
156 CHECK_EQ(42, result->Int32Value());
157 env->Exit();
158 }
159
160
161 // CodeRange test.
162 // Tests memory management in a CodeRange by allocating and freeing blocks,
163 // using a pseudorandom generator to choose block sizes geometrically
164 // distributed between 2 * Page::kPageSize and 2^5 + 1 * Page::kPageSize.
165 // Ensure that the freed chunks are collected and reused by allocating (in
166 // total) more than the size of the CodeRange.
167
168 // This pseudorandom generator does not need to be particularly good.
169 // Use the lower half of the V8::Random() generator.
Pseudorandom()170 unsigned int Pseudorandom() {
171 static uint32_t lo = 2345;
172 lo = 18273 * (lo & 0xFFFF) + (lo >> 16); // Provably not 0.
173 return lo & 0xFFFF;
174 }
175
176
177 // Plain old data class. Represents a block of allocated memory.
178 class Block {
179 public:
Block(Address base_arg,int size_arg)180 Block(Address base_arg, int size_arg)
181 : base(base_arg), size(size_arg) {}
182
183 Address base;
184 int size;
185 };
186
187
TEST(CodeRange)188 TEST(CodeRange) {
189 const int code_range_size = 32*MB;
190 CcTest::InitializeVM();
191 CodeRange code_range(reinterpret_cast<Isolate*>(CcTest::isolate()));
192 code_range.SetUp(code_range_size);
193 int current_allocated = 0;
194 int total_allocated = 0;
195 List<Block> blocks(1000);
196
197 while (total_allocated < 5 * code_range_size) {
198 if (current_allocated < code_range_size / 10) {
199 // Allocate a block.
200 // Geometrically distributed sizes, greater than
201 // Page::kMaxNonCodeHeapObjectSize (which is greater than code page area).
202 // TODO(gc): instead of using 3 use some contant based on code_range_size
203 // kMaxHeapObjectSize.
204 size_t requested =
205 (Page::kMaxNonCodeHeapObjectSize << (Pseudorandom() % 3)) +
206 Pseudorandom() % 5000 + 1;
207 size_t allocated = 0;
208 Address base = code_range.AllocateRawMemory(requested,
209 requested,
210 &allocated);
211 CHECK(base != NULL);
212 blocks.Add(Block(base, static_cast<int>(allocated)));
213 current_allocated += static_cast<int>(allocated);
214 total_allocated += static_cast<int>(allocated);
215 } else {
216 // Free a block.
217 int index = Pseudorandom() % blocks.length();
218 code_range.FreeRawMemory(blocks[index].base, blocks[index].size);
219 current_allocated -= blocks[index].size;
220 if (index < blocks.length() - 1) {
221 blocks[index] = blocks.RemoveLast();
222 } else {
223 blocks.RemoveLast();
224 }
225 }
226 }
227
228 code_range.TearDown();
229 }
230