• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "codegen-inl.h"
31 #include "fast-codegen.h"
32 
33 namespace v8 {
34 namespace internal {
35 
36 #define __ ACCESS_MASM(masm())
37 
accumulator0()38 Register FastCodeGenerator::accumulator0() { return r0; }
accumulator1()39 Register FastCodeGenerator::accumulator1() { return r1; }
scratch0()40 Register FastCodeGenerator::scratch0() { return r3; }
scratch1()41 Register FastCodeGenerator::scratch1() { return r4; }
receiver_reg()42 Register FastCodeGenerator::receiver_reg() { return r2; }
context_reg()43 Register FastCodeGenerator::context_reg() { return cp; }
44 
45 
EmitLoadReceiver()46 void FastCodeGenerator::EmitLoadReceiver() {
47   // Offset 2 is due to return address and saved frame pointer.
48   int index = 2 + scope()->num_parameters();
49   __ ldr(receiver_reg(), MemOperand(sp, index * kPointerSize));
50 }
51 
52 
EmitGlobalVariableLoad(Handle<Object> cell)53 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
54   ASSERT(!destination().is(no_reg));
55   ASSERT(cell->IsJSGlobalPropertyCell());
56 
57   __ mov(destination(), Operand(cell));
58   __ ldr(destination(),
59          FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset));
60   if (FLAG_debug_code) {
61     __ mov(ip, Operand(Factory::the_hole_value()));
62     __ cmp(destination(), ip);
63     __ Check(ne, "DontDelete cells can't contain the hole");
64   }
65 
66   // The loaded value is not known to be a smi.
67   clear_as_smi(destination());
68 }
69 
70 
EmitThisPropertyStore(Handle<String> name)71 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
72   LookupResult lookup;
73   info()->receiver()->Lookup(*name, &lookup);
74 
75   ASSERT(lookup.holder() == *info()->receiver());
76   ASSERT(lookup.type() == FIELD);
77   Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
78   int index = lookup.GetFieldIndex() - map->inobject_properties();
79   int offset = index * kPointerSize;
80 
81   // We will emit the write barrier unless the stored value is statically
82   // known to be a smi.
83   bool needs_write_barrier = !is_smi(accumulator0());
84 
85   // Negative offsets are inobject properties.
86   if (offset < 0) {
87     offset += map->instance_size();
88     __ str(accumulator0(), FieldMemOperand(receiver_reg(), offset));
89     if (needs_write_barrier) {
90       // Preserve receiver from write barrier.
91       __ mov(scratch0(), receiver_reg());
92     }
93   } else {
94     offset += FixedArray::kHeaderSize;
95     __ ldr(scratch0(),
96            FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset));
97     __ str(accumulator0(), FieldMemOperand(scratch0(), offset));
98   }
99 
100   if (needs_write_barrier) {
101     __ mov(scratch1(), Operand(offset));
102     __ RecordWrite(scratch0(), scratch1(), ip);
103   }
104 
105   if (destination().is(accumulator1())) {
106     __ mov(accumulator1(), accumulator0());
107     if (is_smi(accumulator0())) {
108       set_as_smi(accumulator1());
109     } else {
110       clear_as_smi(accumulator1());
111     }
112   }
113 }
114 
115 
EmitThisPropertyLoad(Handle<String> name)116 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
117   ASSERT(!destination().is(no_reg));
118   LookupResult lookup;
119   info()->receiver()->Lookup(*name, &lookup);
120 
121   ASSERT(lookup.holder() == *info()->receiver());
122   ASSERT(lookup.type() == FIELD);
123   Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
124   int index = lookup.GetFieldIndex() - map->inobject_properties();
125   int offset = index * kPointerSize;
126 
127   // Perform the load.  Negative offsets are inobject properties.
128   if (offset < 0) {
129     offset += map->instance_size();
130     __ ldr(destination(), FieldMemOperand(receiver_reg(), offset));
131   } else {
132     offset += FixedArray::kHeaderSize;
133     __ ldr(scratch0(),
134            FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset));
135     __ ldr(destination(), FieldMemOperand(scratch0(), offset));
136   }
137 
138   // The loaded value is not known to be a smi.
139   clear_as_smi(destination());
140 }
141 
142 
EmitBitOr()143 void FastCodeGenerator::EmitBitOr() {
144   if (is_smi(accumulator0()) && is_smi(accumulator1())) {
145     // If both operands are known to be a smi then there is no need to check
146     // the operands or result.  There is no need to perform the operation in
147     // an effect context.
148     if (!destination().is(no_reg)) {
149       __ orr(destination(), accumulator1(), Operand(accumulator0()));
150     }
151   } else {
152     // Left is in accumulator1, right in accumulator0.
153     if (destination().is(accumulator0())) {
154       __ mov(scratch0(), accumulator0());
155       __ orr(destination(), accumulator1(), Operand(accumulator1()));
156       Label* bailout =
157           info()->AddBailout(accumulator1(), scratch0());  // Left, right.
158       __ BranchOnNotSmi(destination(), bailout);
159     } else if (destination().is(accumulator1())) {
160       __ mov(scratch0(), accumulator1());
161       __ orr(destination(), accumulator1(), Operand(accumulator0()));
162       Label* bailout = info()->AddBailout(scratch0(), accumulator0());
163       __ BranchOnNotSmi(destination(), bailout);
164     } else {
165       ASSERT(destination().is(no_reg));
166       __ orr(scratch0(), accumulator1(), Operand(accumulator0()));
167       Label* bailout = info()->AddBailout(accumulator1(), accumulator0());
168       __ BranchOnNotSmi(scratch0(), bailout);
169     }
170   }
171 
172   // If we didn't bailout, the result (in fact, both inputs too) is known to
173   // be a smi.
174   set_as_smi(accumulator0());
175   set_as_smi(accumulator1());
176 }
177 
178 
Generate(CompilationInfo * compilation_info)179 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
180   ASSERT(info_ == NULL);
181   info_ = compilation_info;
182 
183   // Save the caller's frame pointer and set up our own.
184   Comment prologue_cmnt(masm(), ";; Prologue");
185   __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
186   __ add(fp, sp, Operand(2 * kPointerSize));
187   // Note that we keep a live register reference to cp (context) at
188   // this point.
189 
190   Label* bailout_to_beginning = info()->AddBailout();
191   // Receiver (this) is allocated to a fixed register.
192   if (info()->has_this_properties()) {
193     Comment cmnt(masm(), ";; MapCheck(this)");
194     if (FLAG_print_ir) {
195       PrintF("MapCheck(this)\n");
196     }
197     ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
198     Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
199     Handle<Map> map(object->map());
200     EmitLoadReceiver();
201     __ CheckMap(receiver_reg(), scratch0(), map, bailout_to_beginning, false);
202   }
203 
204   // If there is a global variable access check if the global object is the
205   // same as at lazy-compilation time.
206   if (info()->has_globals()) {
207     Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
208     if (FLAG_print_ir) {
209       PrintF("MapCheck(GLOBAL)\n");
210     }
211     ASSERT(info()->has_global_object());
212     Handle<Map> map(info()->global_object()->map());
213     __ ldr(scratch0(), CodeGenerator::GlobalObject());
214     __ CheckMap(scratch0(), scratch1(), map, bailout_to_beginning, true);
215   }
216 
217   VisitStatements(function()->body());
218 
219   Comment return_cmnt(masm(), ";; Return(<undefined>)");
220   if (FLAG_print_ir) {
221     PrintF("Return(<undefined>)\n");
222   }
223   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
224   __ mov(sp, fp);
225   __ ldm(ia_w, sp, fp.bit() | lr.bit());
226   int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
227   __ add(sp, sp, Operand(sp_delta));
228   __ Jump(lr);
229 }
230 
231 
232 #undef __
233 
234 
235 } }  // namespace v8::internal
236