1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "cfg.h"
31 #include "codegen-inl.h"
32 #include "codegen-arm.h" // Include after codegen-inl.h.
33 #include "macro-assembler-arm.h"
34
35 namespace v8 {
36 namespace internal {
37
38 #define __ ACCESS_MASM(masm)
39
Compile(MacroAssembler * masm)40 void InstructionBlock::Compile(MacroAssembler* masm) {
41 ASSERT(!is_marked());
42 is_marked_ = true;
43 {
44 Comment cmt(masm, "[ InstructionBlock");
45 for (int i = 0, len = instructions_.length(); i < len; i++) {
46 // If the location of the current instruction is a temp, then the
47 // instruction cannot be in tail position in the block. Allocate the
48 // temp based on peeking ahead to the next instruction.
49 Instruction* instr = instructions_[i];
50 Location* loc = instr->location();
51 if (loc->is_temporary()) {
52 instructions_[i+1]->FastAllocate(TempLocation::cast(loc));
53 }
54 instructions_[i]->Compile(masm);
55 }
56 }
57 successor_->Compile(masm);
58 }
59
60
Compile(MacroAssembler * masm)61 void EntryNode::Compile(MacroAssembler* masm) {
62 ASSERT(!is_marked());
63 is_marked_ = true;
64 {
65 Comment cmnt(masm, "[ EntryNode");
66 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
67 __ add(fp, sp, Operand(2 * kPointerSize));
68 int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
69 if (count > 0) {
70 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
71 for (int i = 0; i < count; i++) {
72 __ push(ip);
73 }
74 }
75 if (FLAG_trace) {
76 __ CallRuntime(Runtime::kTraceEnter, 0);
77 }
78 if (FLAG_check_stack) {
79 StackCheckStub stub;
80 __ CallStub(&stub);
81 }
82 }
83 successor_->Compile(masm);
84 }
85
86
Compile(MacroAssembler * masm)87 void ExitNode::Compile(MacroAssembler* masm) {
88 ASSERT(!is_marked());
89 is_marked_ = true;
90 Comment cmnt(masm, "[ ExitNode");
91 if (FLAG_trace) {
92 __ push(r0);
93 __ CallRuntime(Runtime::kTraceExit, 1);
94 }
95 __ mov(sp, fp);
96 __ ldm(ia_w, sp, fp.bit() | lr.bit());
97 int count = CfgGlobals::current()->fun()->scope()->num_parameters();
98 __ add(sp, sp, Operand((count + 1) * kPointerSize));
99 __ Jump(lr);
100 }
101
102
Compile(MacroAssembler * masm)103 void PropLoadInstr::Compile(MacroAssembler* masm) {
104 // The key should not be on the stack---if it is a compiler-generated
105 // temporary it is in the accumulator.
106 ASSERT(!key()->is_on_stack());
107
108 Comment cmnt(masm, "[ Load from Property");
109 // If the key is known at compile-time we may be able to use a load IC.
110 bool is_keyed_load = true;
111 if (key()->is_constant()) {
112 // Still use the keyed load IC if the key can be parsed as an integer so
113 // we will get into the case that handles [] on string objects.
114 Handle<Object> key_val = Constant::cast(key())->handle();
115 uint32_t ignored;
116 if (key_val->IsSymbol() &&
117 !String::cast(*key_val)->AsArrayIndex(&ignored)) {
118 is_keyed_load = false;
119 }
120 }
121
122 if (!object()->is_on_stack()) object()->Push(masm);
123
124 if (is_keyed_load) {
125 key()->Push(masm);
126 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
127 __ Call(ic, RelocInfo::CODE_TARGET);
128 // Discard key and receiver.
129 __ add(sp, sp, Operand(2 * kPointerSize));
130 } else {
131 key()->Get(masm, r2);
132 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
133 __ Call(ic, RelocInfo::CODE_TARGET);
134 __ pop(); // Discard receiver.
135 }
136 location()->Set(masm, r0);
137 }
138
139
Compile(MacroAssembler * masm)140 void BinaryOpInstr::Compile(MacroAssembler* masm) {
141 // The right-hand value should not be on the stack---if it is a
142 // compiler-generated temporary it is in the accumulator.
143 ASSERT(!right()->is_on_stack());
144
145 Comment cmnt(masm, "[ BinaryOpInstr");
146 // We can overwrite one of the operands if it is a temporary.
147 OverwriteMode mode = NO_OVERWRITE;
148 if (left()->is_temporary()) {
149 mode = OVERWRITE_LEFT;
150 } else if (right()->is_temporary()) {
151 mode = OVERWRITE_RIGHT;
152 }
153
154 // Move left to r1 and right to r0.
155 left()->Get(masm, r1);
156 right()->Get(masm, r0);
157 GenericBinaryOpStub stub(op(), mode);
158 __ CallStub(&stub);
159 location()->Set(masm, r0);
160 }
161
162
Compile(MacroAssembler * masm)163 void ReturnInstr::Compile(MacroAssembler* masm) {
164 // The location should be 'Effect'. As a side effect, move the value to
165 // the accumulator.
166 Comment cmnt(masm, "[ ReturnInstr");
167 value()->Get(masm, r0);
168 }
169
170
Get(MacroAssembler * masm,Register reg)171 void Constant::Get(MacroAssembler* masm, Register reg) {
172 __ mov(reg, Operand(handle_));
173 }
174
175
Push(MacroAssembler * masm)176 void Constant::Push(MacroAssembler* masm) {
177 __ mov(ip, Operand(handle_));
178 __ push(ip);
179 }
180
181
ToMemOperand(SlotLocation * loc)182 static MemOperand ToMemOperand(SlotLocation* loc) {
183 switch (loc->type()) {
184 case Slot::PARAMETER: {
185 int count = CfgGlobals::current()->fun()->scope()->num_parameters();
186 return MemOperand(fp, (1 + count - loc->index()) * kPointerSize);
187 }
188 case Slot::LOCAL: {
189 const int kOffset = JavaScriptFrameConstants::kLocal0Offset;
190 return MemOperand(fp, kOffset - loc->index() * kPointerSize);
191 }
192 default:
193 UNREACHABLE();
194 return MemOperand(r0);
195 }
196 }
197
198
MoveToSlot(MacroAssembler * masm,SlotLocation * loc)199 void Constant::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
200 __ mov(ip, Operand(handle_));
201 __ str(ip, ToMemOperand(loc));
202 }
203
204
Get(MacroAssembler * masm,Register reg)205 void SlotLocation::Get(MacroAssembler* masm, Register reg) {
206 __ ldr(reg, ToMemOperand(this));
207 }
208
209
Set(MacroAssembler * masm,Register reg)210 void SlotLocation::Set(MacroAssembler* masm, Register reg) {
211 __ str(reg, ToMemOperand(this));
212 }
213
214
Push(MacroAssembler * masm)215 void SlotLocation::Push(MacroAssembler* masm) {
216 __ ldr(ip, ToMemOperand(this));
217 __ push(ip); // Push will not destroy ip.
218 }
219
220
Move(MacroAssembler * masm,Value * value)221 void SlotLocation::Move(MacroAssembler* masm, Value* value) {
222 // Double dispatch.
223 value->MoveToSlot(masm, this);
224 }
225
226
MoveToSlot(MacroAssembler * masm,SlotLocation * loc)227 void SlotLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
228 __ ldr(ip, ToMemOperand(this));
229 __ str(ip, ToMemOperand(loc));
230 }
231
232
Get(MacroAssembler * masm,Register reg)233 void TempLocation::Get(MacroAssembler* masm, Register reg) {
234 switch (where_) {
235 case ACCUMULATOR:
236 if (!reg.is(r0)) __ mov(reg, r0);
237 break;
238 case STACK:
239 __ pop(reg);
240 break;
241 case NOT_ALLOCATED:
242 UNREACHABLE();
243 }
244 }
245
246
Set(MacroAssembler * masm,Register reg)247 void TempLocation::Set(MacroAssembler* masm, Register reg) {
248 switch (where_) {
249 case ACCUMULATOR:
250 if (!reg.is(r0)) __ mov(r0, reg);
251 break;
252 case STACK:
253 __ push(reg);
254 break;
255 case NOT_ALLOCATED:
256 UNREACHABLE();
257 }
258 }
259
260
Push(MacroAssembler * masm)261 void TempLocation::Push(MacroAssembler* masm) {
262 switch (where_) {
263 case ACCUMULATOR:
264 __ push(r0);
265 break;
266 case STACK:
267 case NOT_ALLOCATED:
268 UNREACHABLE();
269 }
270 }
271
272
Move(MacroAssembler * masm,Value * value)273 void TempLocation::Move(MacroAssembler* masm, Value* value) {
274 switch (where_) {
275 case ACCUMULATOR:
276 value->Get(masm, r0);
277 case STACK:
278 value->Push(masm);
279 break;
280 case NOT_ALLOCATED:
281 UNREACHABLE();
282 }
283 }
284
285
MoveToSlot(MacroAssembler * masm,SlotLocation * loc)286 void TempLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
287 switch (where_) {
288 case ACCUMULATOR:
289 __ str(r0, ToMemOperand(loc));
290 case STACK:
291 __ pop(ip);
292 __ str(ip, ToMemOperand(loc));
293 break;
294 case NOT_ALLOCATED:
295 UNREACHABLE();
296 }
297 }
298
299 #undef __
300
301 } } // namespace v8::internal
302