• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "assembler_x86.h"
18 
19 #include "base/casts.h"
20 #include "entrypoints/quick/quick_entrypoints.h"
21 #include "memory_region.h"
22 #include "thread.h"
23 
24 namespace art {
25 namespace x86 {
26 
27 class DirectCallRelocation : public AssemblerFixup {
28  public:
Process(const MemoryRegion & region,int position)29   void Process(const MemoryRegion& region, int position) {
30     // Direct calls are relative to the following instruction on x86.
31     int32_t pointer = region.Load<int32_t>(position);
32     int32_t start = reinterpret_cast<int32_t>(region.start());
33     int32_t delta = start + position + sizeof(int32_t);
34     region.Store<int32_t>(position, pointer - delta);
35   }
36 };
37 
operator <<(std::ostream & os,const XmmRegister & reg)38 std::ostream& operator<<(std::ostream& os, const XmmRegister& reg) {
39   return os << "XMM" << static_cast<int>(reg);
40 }
41 
operator <<(std::ostream & os,const X87Register & reg)42 std::ostream& operator<<(std::ostream& os, const X87Register& reg) {
43   return os << "ST" << static_cast<int>(reg);
44 }
45 
call(Register reg)46 void X86Assembler::call(Register reg) {
47   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
48   EmitUint8(0xFF);
49   EmitRegisterOperand(2, reg);
50 }
51 
52 
call(const Address & address)53 void X86Assembler::call(const Address& address) {
54   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
55   EmitUint8(0xFF);
56   EmitOperand(2, address);
57 }
58 
59 
call(Label * label)60 void X86Assembler::call(Label* label) {
61   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
62   EmitUint8(0xE8);
63   static const int kSize = 5;
64   EmitLabel(label, kSize);
65 }
66 
67 
pushl(Register reg)68 void X86Assembler::pushl(Register reg) {
69   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
70   EmitUint8(0x50 + reg);
71 }
72 
73 
pushl(const Address & address)74 void X86Assembler::pushl(const Address& address) {
75   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
76   EmitUint8(0xFF);
77   EmitOperand(6, address);
78 }
79 
80 
pushl(const Immediate & imm)81 void X86Assembler::pushl(const Immediate& imm) {
82   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
83   if (imm.is_int8()) {
84     EmitUint8(0x6A);
85     EmitUint8(imm.value() & 0xFF);
86   } else {
87     EmitUint8(0x68);
88     EmitImmediate(imm);
89   }
90 }
91 
92 
popl(Register reg)93 void X86Assembler::popl(Register reg) {
94   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
95   EmitUint8(0x58 + reg);
96 }
97 
98 
popl(const Address & address)99 void X86Assembler::popl(const Address& address) {
100   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
101   EmitUint8(0x8F);
102   EmitOperand(0, address);
103 }
104 
105 
movl(Register dst,const Immediate & imm)106 void X86Assembler::movl(Register dst, const Immediate& imm) {
107   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
108   EmitUint8(0xB8 + dst);
109   EmitImmediate(imm);
110 }
111 
112 
movl(Register dst,Register src)113 void X86Assembler::movl(Register dst, Register src) {
114   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
115   EmitUint8(0x89);
116   EmitRegisterOperand(src, dst);
117 }
118 
119 
movl(Register dst,const Address & src)120 void X86Assembler::movl(Register dst, const Address& src) {
121   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
122   EmitUint8(0x8B);
123   EmitOperand(dst, src);
124 }
125 
126 
movl(const Address & dst,Register src)127 void X86Assembler::movl(const Address& dst, Register src) {
128   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
129   EmitUint8(0x89);
130   EmitOperand(src, dst);
131 }
132 
133 
movl(const Address & dst,const Immediate & imm)134 void X86Assembler::movl(const Address& dst, const Immediate& imm) {
135   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
136   EmitUint8(0xC7);
137   EmitOperand(0, dst);
138   EmitImmediate(imm);
139 }
140 
movl(const Address & dst,Label * lbl)141 void X86Assembler::movl(const Address& dst, Label* lbl) {
142   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
143   EmitUint8(0xC7);
144   EmitOperand(0, dst);
145   EmitLabel(lbl, dst.length_ + 5);
146 }
147 
movzxb(Register dst,ByteRegister src)148 void X86Assembler::movzxb(Register dst, ByteRegister src) {
149   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
150   EmitUint8(0x0F);
151   EmitUint8(0xB6);
152   EmitRegisterOperand(dst, src);
153 }
154 
155 
movzxb(Register dst,const Address & src)156 void X86Assembler::movzxb(Register dst, const Address& src) {
157   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
158   EmitUint8(0x0F);
159   EmitUint8(0xB6);
160   EmitOperand(dst, src);
161 }
162 
163 
movsxb(Register dst,ByteRegister src)164 void X86Assembler::movsxb(Register dst, ByteRegister src) {
165   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
166   EmitUint8(0x0F);
167   EmitUint8(0xBE);
168   EmitRegisterOperand(dst, src);
169 }
170 
171 
movsxb(Register dst,const Address & src)172 void X86Assembler::movsxb(Register dst, const Address& src) {
173   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
174   EmitUint8(0x0F);
175   EmitUint8(0xBE);
176   EmitOperand(dst, src);
177 }
178 
179 
movb(Register,const Address &)180 void X86Assembler::movb(Register /*dst*/, const Address& /*src*/) {
181   LOG(FATAL) << "Use movzxb or movsxb instead.";
182 }
183 
184 
movb(const Address & dst,ByteRegister src)185 void X86Assembler::movb(const Address& dst, ByteRegister src) {
186   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
187   EmitUint8(0x88);
188   EmitOperand(src, dst);
189 }
190 
191 
movb(const Address & dst,const Immediate & imm)192 void X86Assembler::movb(const Address& dst, const Immediate& imm) {
193   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
194   EmitUint8(0xC6);
195   EmitOperand(EAX, dst);
196   CHECK(imm.is_int8());
197   EmitUint8(imm.value() & 0xFF);
198 }
199 
200 
movzxw(Register dst,Register src)201 void X86Assembler::movzxw(Register dst, Register src) {
202   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
203   EmitUint8(0x0F);
204   EmitUint8(0xB7);
205   EmitRegisterOperand(dst, src);
206 }
207 
208 
movzxw(Register dst,const Address & src)209 void X86Assembler::movzxw(Register dst, const Address& src) {
210   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
211   EmitUint8(0x0F);
212   EmitUint8(0xB7);
213   EmitOperand(dst, src);
214 }
215 
216 
movsxw(Register dst,Register src)217 void X86Assembler::movsxw(Register dst, Register src) {
218   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
219   EmitUint8(0x0F);
220   EmitUint8(0xBF);
221   EmitRegisterOperand(dst, src);
222 }
223 
224 
movsxw(Register dst,const Address & src)225 void X86Assembler::movsxw(Register dst, const Address& src) {
226   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
227   EmitUint8(0x0F);
228   EmitUint8(0xBF);
229   EmitOperand(dst, src);
230 }
231 
232 
movw(Register,const Address &)233 void X86Assembler::movw(Register /*dst*/, const Address& /*src*/) {
234   LOG(FATAL) << "Use movzxw or movsxw instead.";
235 }
236 
237 
movw(const Address & dst,Register src)238 void X86Assembler::movw(const Address& dst, Register src) {
239   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
240   EmitOperandSizeOverride();
241   EmitUint8(0x89);
242   EmitOperand(src, dst);
243 }
244 
245 
leal(Register dst,const Address & src)246 void X86Assembler::leal(Register dst, const Address& src) {
247   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
248   EmitUint8(0x8D);
249   EmitOperand(dst, src);
250 }
251 
252 
cmovl(Condition condition,Register dst,Register src)253 void X86Assembler::cmovl(Condition condition, Register dst, Register src) {
254   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
255   EmitUint8(0x0F);
256   EmitUint8(0x40 + condition);
257   EmitRegisterOperand(dst, src);
258 }
259 
260 
setb(Condition condition,Register dst)261 void X86Assembler::setb(Condition condition, Register dst) {
262   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
263   EmitUint8(0x0F);
264   EmitUint8(0x90 + condition);
265   EmitOperand(0, Operand(dst));
266 }
267 
268 
movss(XmmRegister dst,const Address & src)269 void X86Assembler::movss(XmmRegister dst, const Address& src) {
270   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
271   EmitUint8(0xF3);
272   EmitUint8(0x0F);
273   EmitUint8(0x10);
274   EmitOperand(dst, src);
275 }
276 
277 
movss(const Address & dst,XmmRegister src)278 void X86Assembler::movss(const Address& dst, XmmRegister src) {
279   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
280   EmitUint8(0xF3);
281   EmitUint8(0x0F);
282   EmitUint8(0x11);
283   EmitOperand(src, dst);
284 }
285 
286 
movss(XmmRegister dst,XmmRegister src)287 void X86Assembler::movss(XmmRegister dst, XmmRegister src) {
288   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
289   EmitUint8(0xF3);
290   EmitUint8(0x0F);
291   EmitUint8(0x11);
292   EmitXmmRegisterOperand(src, dst);
293 }
294 
295 
movd(XmmRegister dst,Register src)296 void X86Assembler::movd(XmmRegister dst, Register src) {
297   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
298   EmitUint8(0x66);
299   EmitUint8(0x0F);
300   EmitUint8(0x6E);
301   EmitOperand(dst, Operand(src));
302 }
303 
304 
movd(Register dst,XmmRegister src)305 void X86Assembler::movd(Register dst, XmmRegister src) {
306   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
307   EmitUint8(0x66);
308   EmitUint8(0x0F);
309   EmitUint8(0x7E);
310   EmitOperand(src, Operand(dst));
311 }
312 
313 
addss(XmmRegister dst,XmmRegister src)314 void X86Assembler::addss(XmmRegister dst, XmmRegister src) {
315   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
316   EmitUint8(0xF3);
317   EmitUint8(0x0F);
318   EmitUint8(0x58);
319   EmitXmmRegisterOperand(dst, src);
320 }
321 
322 
addss(XmmRegister dst,const Address & src)323 void X86Assembler::addss(XmmRegister dst, const Address& src) {
324   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
325   EmitUint8(0xF3);
326   EmitUint8(0x0F);
327   EmitUint8(0x58);
328   EmitOperand(dst, src);
329 }
330 
331 
subss(XmmRegister dst,XmmRegister src)332 void X86Assembler::subss(XmmRegister dst, XmmRegister src) {
333   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
334   EmitUint8(0xF3);
335   EmitUint8(0x0F);
336   EmitUint8(0x5C);
337   EmitXmmRegisterOperand(dst, src);
338 }
339 
340 
subss(XmmRegister dst,const Address & src)341 void X86Assembler::subss(XmmRegister dst, const Address& src) {
342   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
343   EmitUint8(0xF3);
344   EmitUint8(0x0F);
345   EmitUint8(0x5C);
346   EmitOperand(dst, src);
347 }
348 
349 
mulss(XmmRegister dst,XmmRegister src)350 void X86Assembler::mulss(XmmRegister dst, XmmRegister src) {
351   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
352   EmitUint8(0xF3);
353   EmitUint8(0x0F);
354   EmitUint8(0x59);
355   EmitXmmRegisterOperand(dst, src);
356 }
357 
358 
mulss(XmmRegister dst,const Address & src)359 void X86Assembler::mulss(XmmRegister dst, const Address& src) {
360   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
361   EmitUint8(0xF3);
362   EmitUint8(0x0F);
363   EmitUint8(0x59);
364   EmitOperand(dst, src);
365 }
366 
367 
divss(XmmRegister dst,XmmRegister src)368 void X86Assembler::divss(XmmRegister dst, XmmRegister src) {
369   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
370   EmitUint8(0xF3);
371   EmitUint8(0x0F);
372   EmitUint8(0x5E);
373   EmitXmmRegisterOperand(dst, src);
374 }
375 
376 
divss(XmmRegister dst,const Address & src)377 void X86Assembler::divss(XmmRegister dst, const Address& src) {
378   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
379   EmitUint8(0xF3);
380   EmitUint8(0x0F);
381   EmitUint8(0x5E);
382   EmitOperand(dst, src);
383 }
384 
385 
flds(const Address & src)386 void X86Assembler::flds(const Address& src) {
387   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
388   EmitUint8(0xD9);
389   EmitOperand(0, src);
390 }
391 
392 
fstps(const Address & dst)393 void X86Assembler::fstps(const Address& dst) {
394   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
395   EmitUint8(0xD9);
396   EmitOperand(3, dst);
397 }
398 
399 
movsd(XmmRegister dst,const Address & src)400 void X86Assembler::movsd(XmmRegister dst, const Address& src) {
401   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
402   EmitUint8(0xF2);
403   EmitUint8(0x0F);
404   EmitUint8(0x10);
405   EmitOperand(dst, src);
406 }
407 
408 
movsd(const Address & dst,XmmRegister src)409 void X86Assembler::movsd(const Address& dst, XmmRegister src) {
410   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
411   EmitUint8(0xF2);
412   EmitUint8(0x0F);
413   EmitUint8(0x11);
414   EmitOperand(src, dst);
415 }
416 
417 
movsd(XmmRegister dst,XmmRegister src)418 void X86Assembler::movsd(XmmRegister dst, XmmRegister src) {
419   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
420   EmitUint8(0xF2);
421   EmitUint8(0x0F);
422   EmitUint8(0x11);
423   EmitXmmRegisterOperand(src, dst);
424 }
425 
426 
addsd(XmmRegister dst,XmmRegister src)427 void X86Assembler::addsd(XmmRegister dst, XmmRegister src) {
428   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
429   EmitUint8(0xF2);
430   EmitUint8(0x0F);
431   EmitUint8(0x58);
432   EmitXmmRegisterOperand(dst, src);
433 }
434 
435 
addsd(XmmRegister dst,const Address & src)436 void X86Assembler::addsd(XmmRegister dst, const Address& src) {
437   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
438   EmitUint8(0xF2);
439   EmitUint8(0x0F);
440   EmitUint8(0x58);
441   EmitOperand(dst, src);
442 }
443 
444 
subsd(XmmRegister dst,XmmRegister src)445 void X86Assembler::subsd(XmmRegister dst, XmmRegister src) {
446   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
447   EmitUint8(0xF2);
448   EmitUint8(0x0F);
449   EmitUint8(0x5C);
450   EmitXmmRegisterOperand(dst, src);
451 }
452 
453 
subsd(XmmRegister dst,const Address & src)454 void X86Assembler::subsd(XmmRegister dst, const Address& src) {
455   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
456   EmitUint8(0xF2);
457   EmitUint8(0x0F);
458   EmitUint8(0x5C);
459   EmitOperand(dst, src);
460 }
461 
462 
mulsd(XmmRegister dst,XmmRegister src)463 void X86Assembler::mulsd(XmmRegister dst, XmmRegister src) {
464   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
465   EmitUint8(0xF2);
466   EmitUint8(0x0F);
467   EmitUint8(0x59);
468   EmitXmmRegisterOperand(dst, src);
469 }
470 
471 
mulsd(XmmRegister dst,const Address & src)472 void X86Assembler::mulsd(XmmRegister dst, const Address& src) {
473   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
474   EmitUint8(0xF2);
475   EmitUint8(0x0F);
476   EmitUint8(0x59);
477   EmitOperand(dst, src);
478 }
479 
480 
divsd(XmmRegister dst,XmmRegister src)481 void X86Assembler::divsd(XmmRegister dst, XmmRegister src) {
482   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
483   EmitUint8(0xF2);
484   EmitUint8(0x0F);
485   EmitUint8(0x5E);
486   EmitXmmRegisterOperand(dst, src);
487 }
488 
489 
divsd(XmmRegister dst,const Address & src)490 void X86Assembler::divsd(XmmRegister dst, const Address& src) {
491   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
492   EmitUint8(0xF2);
493   EmitUint8(0x0F);
494   EmitUint8(0x5E);
495   EmitOperand(dst, src);
496 }
497 
498 
cvtsi2ss(XmmRegister dst,Register src)499 void X86Assembler::cvtsi2ss(XmmRegister dst, Register src) {
500   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
501   EmitUint8(0xF3);
502   EmitUint8(0x0F);
503   EmitUint8(0x2A);
504   EmitOperand(dst, Operand(src));
505 }
506 
507 
cvtsi2sd(XmmRegister dst,Register src)508 void X86Assembler::cvtsi2sd(XmmRegister dst, Register src) {
509   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
510   EmitUint8(0xF2);
511   EmitUint8(0x0F);
512   EmitUint8(0x2A);
513   EmitOperand(dst, Operand(src));
514 }
515 
516 
cvtss2si(Register dst,XmmRegister src)517 void X86Assembler::cvtss2si(Register dst, XmmRegister src) {
518   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
519   EmitUint8(0xF3);
520   EmitUint8(0x0F);
521   EmitUint8(0x2D);
522   EmitXmmRegisterOperand(dst, src);
523 }
524 
525 
cvtss2sd(XmmRegister dst,XmmRegister src)526 void X86Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) {
527   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
528   EmitUint8(0xF3);
529   EmitUint8(0x0F);
530   EmitUint8(0x5A);
531   EmitXmmRegisterOperand(dst, src);
532 }
533 
534 
cvtsd2si(Register dst,XmmRegister src)535 void X86Assembler::cvtsd2si(Register dst, XmmRegister src) {
536   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
537   EmitUint8(0xF2);
538   EmitUint8(0x0F);
539   EmitUint8(0x2D);
540   EmitXmmRegisterOperand(dst, src);
541 }
542 
543 
cvttss2si(Register dst,XmmRegister src)544 void X86Assembler::cvttss2si(Register dst, XmmRegister src) {
545   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
546   EmitUint8(0xF3);
547   EmitUint8(0x0F);
548   EmitUint8(0x2C);
549   EmitXmmRegisterOperand(dst, src);
550 }
551 
552 
cvttsd2si(Register dst,XmmRegister src)553 void X86Assembler::cvttsd2si(Register dst, XmmRegister src) {
554   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
555   EmitUint8(0xF2);
556   EmitUint8(0x0F);
557   EmitUint8(0x2C);
558   EmitXmmRegisterOperand(dst, src);
559 }
560 
561 
cvtsd2ss(XmmRegister dst,XmmRegister src)562 void X86Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) {
563   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
564   EmitUint8(0xF2);
565   EmitUint8(0x0F);
566   EmitUint8(0x5A);
567   EmitXmmRegisterOperand(dst, src);
568 }
569 
570 
cvtdq2pd(XmmRegister dst,XmmRegister src)571 void X86Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) {
572   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
573   EmitUint8(0xF3);
574   EmitUint8(0x0F);
575   EmitUint8(0xE6);
576   EmitXmmRegisterOperand(dst, src);
577 }
578 
579 
comiss(XmmRegister a,XmmRegister b)580 void X86Assembler::comiss(XmmRegister a, XmmRegister b) {
581   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
582   EmitUint8(0x0F);
583   EmitUint8(0x2F);
584   EmitXmmRegisterOperand(a, b);
585 }
586 
587 
comisd(XmmRegister a,XmmRegister b)588 void X86Assembler::comisd(XmmRegister a, XmmRegister b) {
589   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
590   EmitUint8(0x66);
591   EmitUint8(0x0F);
592   EmitUint8(0x2F);
593   EmitXmmRegisterOperand(a, b);
594 }
595 
596 
sqrtsd(XmmRegister dst,XmmRegister src)597 void X86Assembler::sqrtsd(XmmRegister dst, XmmRegister src) {
598   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
599   EmitUint8(0xF2);
600   EmitUint8(0x0F);
601   EmitUint8(0x51);
602   EmitXmmRegisterOperand(dst, src);
603 }
604 
605 
sqrtss(XmmRegister dst,XmmRegister src)606 void X86Assembler::sqrtss(XmmRegister dst, XmmRegister src) {
607   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
608   EmitUint8(0xF3);
609   EmitUint8(0x0F);
610   EmitUint8(0x51);
611   EmitXmmRegisterOperand(dst, src);
612 }
613 
614 
xorpd(XmmRegister dst,const Address & src)615 void X86Assembler::xorpd(XmmRegister dst, const Address& src) {
616   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
617   EmitUint8(0x66);
618   EmitUint8(0x0F);
619   EmitUint8(0x57);
620   EmitOperand(dst, src);
621 }
622 
623 
xorpd(XmmRegister dst,XmmRegister src)624 void X86Assembler::xorpd(XmmRegister dst, XmmRegister src) {
625   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
626   EmitUint8(0x66);
627   EmitUint8(0x0F);
628   EmitUint8(0x57);
629   EmitXmmRegisterOperand(dst, src);
630 }
631 
632 
xorps(XmmRegister dst,const Address & src)633 void X86Assembler::xorps(XmmRegister dst, const Address& src) {
634   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
635   EmitUint8(0x0F);
636   EmitUint8(0x57);
637   EmitOperand(dst, src);
638 }
639 
640 
xorps(XmmRegister dst,XmmRegister src)641 void X86Assembler::xorps(XmmRegister dst, XmmRegister src) {
642   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
643   EmitUint8(0x0F);
644   EmitUint8(0x57);
645   EmitXmmRegisterOperand(dst, src);
646 }
647 
648 
andpd(XmmRegister dst,const Address & src)649 void X86Assembler::andpd(XmmRegister dst, const Address& src) {
650   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
651   EmitUint8(0x66);
652   EmitUint8(0x0F);
653   EmitUint8(0x54);
654   EmitOperand(dst, src);
655 }
656 
657 
fldl(const Address & src)658 void X86Assembler::fldl(const Address& src) {
659   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
660   EmitUint8(0xDD);
661   EmitOperand(0, src);
662 }
663 
664 
fstpl(const Address & dst)665 void X86Assembler::fstpl(const Address& dst) {
666   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
667   EmitUint8(0xDD);
668   EmitOperand(3, dst);
669 }
670 
671 
fnstcw(const Address & dst)672 void X86Assembler::fnstcw(const Address& dst) {
673   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
674   EmitUint8(0xD9);
675   EmitOperand(7, dst);
676 }
677 
678 
fldcw(const Address & src)679 void X86Assembler::fldcw(const Address& src) {
680   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
681   EmitUint8(0xD9);
682   EmitOperand(5, src);
683 }
684 
685 
fistpl(const Address & dst)686 void X86Assembler::fistpl(const Address& dst) {
687   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
688   EmitUint8(0xDF);
689   EmitOperand(7, dst);
690 }
691 
692 
fistps(const Address & dst)693 void X86Assembler::fistps(const Address& dst) {
694   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
695   EmitUint8(0xDB);
696   EmitOperand(3, dst);
697 }
698 
699 
fildl(const Address & src)700 void X86Assembler::fildl(const Address& src) {
701   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
702   EmitUint8(0xDF);
703   EmitOperand(5, src);
704 }
705 
706 
fincstp()707 void X86Assembler::fincstp() {
708   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
709   EmitUint8(0xD9);
710   EmitUint8(0xF7);
711 }
712 
713 
ffree(const Immediate & index)714 void X86Assembler::ffree(const Immediate& index) {
715   CHECK_LT(index.value(), 7);
716   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
717   EmitUint8(0xDD);
718   EmitUint8(0xC0 + index.value());
719 }
720 
721 
fsin()722 void X86Assembler::fsin() {
723   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
724   EmitUint8(0xD9);
725   EmitUint8(0xFE);
726 }
727 
728 
fcos()729 void X86Assembler::fcos() {
730   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
731   EmitUint8(0xD9);
732   EmitUint8(0xFF);
733 }
734 
735 
fptan()736 void X86Assembler::fptan() {
737   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
738   EmitUint8(0xD9);
739   EmitUint8(0xF2);
740 }
741 
742 
xchgl(Register dst,Register src)743 void X86Assembler::xchgl(Register dst, Register src) {
744   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
745   EmitUint8(0x87);
746   EmitRegisterOperand(dst, src);
747 }
748 
xchgl(Register reg,const Address & address)749 void X86Assembler::xchgl(Register reg, const Address& address) {
750   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
751   EmitUint8(0x87);
752   EmitOperand(reg, address);
753 }
754 
755 
cmpl(Register reg,const Immediate & imm)756 void X86Assembler::cmpl(Register reg, const Immediate& imm) {
757   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
758   EmitComplex(7, Operand(reg), imm);
759 }
760 
761 
cmpl(Register reg0,Register reg1)762 void X86Assembler::cmpl(Register reg0, Register reg1) {
763   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
764   EmitUint8(0x3B);
765   EmitOperand(reg0, Operand(reg1));
766 }
767 
768 
cmpl(Register reg,const Address & address)769 void X86Assembler::cmpl(Register reg, const Address& address) {
770   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
771   EmitUint8(0x3B);
772   EmitOperand(reg, address);
773 }
774 
775 
addl(Register dst,Register src)776 void X86Assembler::addl(Register dst, Register src) {
777   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
778   EmitUint8(0x03);
779   EmitRegisterOperand(dst, src);
780 }
781 
782 
addl(Register reg,const Address & address)783 void X86Assembler::addl(Register reg, const Address& address) {
784   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
785   EmitUint8(0x03);
786   EmitOperand(reg, address);
787 }
788 
789 
cmpl(const Address & address,Register reg)790 void X86Assembler::cmpl(const Address& address, Register reg) {
791   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
792   EmitUint8(0x39);
793   EmitOperand(reg, address);
794 }
795 
796 
cmpl(const Address & address,const Immediate & imm)797 void X86Assembler::cmpl(const Address& address, const Immediate& imm) {
798   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
799   EmitComplex(7, address, imm);
800 }
801 
802 
testl(Register reg1,Register reg2)803 void X86Assembler::testl(Register reg1, Register reg2) {
804   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
805   EmitUint8(0x85);
806   EmitRegisterOperand(reg1, reg2);
807 }
808 
809 
testl(Register reg,const Immediate & immediate)810 void X86Assembler::testl(Register reg, const Immediate& immediate) {
811   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
812   // For registers that have a byte variant (EAX, EBX, ECX, and EDX)
813   // we only test the byte register to keep the encoding short.
814   if (immediate.is_uint8() && reg < 4) {
815     // Use zero-extended 8-bit immediate.
816     if (reg == EAX) {
817       EmitUint8(0xA8);
818     } else {
819       EmitUint8(0xF6);
820       EmitUint8(0xC0 + reg);
821     }
822     EmitUint8(immediate.value() & 0xFF);
823   } else if (reg == EAX) {
824     // Use short form if the destination is EAX.
825     EmitUint8(0xA9);
826     EmitImmediate(immediate);
827   } else {
828     EmitUint8(0xF7);
829     EmitOperand(0, Operand(reg));
830     EmitImmediate(immediate);
831   }
832 }
833 
834 
andl(Register dst,Register src)835 void X86Assembler::andl(Register dst, Register src) {
836   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
837   EmitUint8(0x23);
838   EmitOperand(dst, Operand(src));
839 }
840 
841 
andl(Register dst,const Immediate & imm)842 void X86Assembler::andl(Register dst, const Immediate& imm) {
843   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
844   EmitComplex(4, Operand(dst), imm);
845 }
846 
847 
orl(Register dst,Register src)848 void X86Assembler::orl(Register dst, Register src) {
849   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
850   EmitUint8(0x0B);
851   EmitOperand(dst, Operand(src));
852 }
853 
854 
orl(Register dst,const Immediate & imm)855 void X86Assembler::orl(Register dst, const Immediate& imm) {
856   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
857   EmitComplex(1, Operand(dst), imm);
858 }
859 
860 
xorl(Register dst,Register src)861 void X86Assembler::xorl(Register dst, Register src) {
862   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
863   EmitUint8(0x33);
864   EmitOperand(dst, Operand(src));
865 }
866 
867 
addl(Register reg,const Immediate & imm)868 void X86Assembler::addl(Register reg, const Immediate& imm) {
869   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
870   EmitComplex(0, Operand(reg), imm);
871 }
872 
873 
addl(const Address & address,Register reg)874 void X86Assembler::addl(const Address& address, Register reg) {
875   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
876   EmitUint8(0x01);
877   EmitOperand(reg, address);
878 }
879 
880 
addl(const Address & address,const Immediate & imm)881 void X86Assembler::addl(const Address& address, const Immediate& imm) {
882   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
883   EmitComplex(0, address, imm);
884 }
885 
886 
adcl(Register reg,const Immediate & imm)887 void X86Assembler::adcl(Register reg, const Immediate& imm) {
888   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
889   EmitComplex(2, Operand(reg), imm);
890 }
891 
892 
adcl(Register dst,Register src)893 void X86Assembler::adcl(Register dst, Register src) {
894   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
895   EmitUint8(0x13);
896   EmitOperand(dst, Operand(src));
897 }
898 
899 
adcl(Register dst,const Address & address)900 void X86Assembler::adcl(Register dst, const Address& address) {
901   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
902   EmitUint8(0x13);
903   EmitOperand(dst, address);
904 }
905 
906 
subl(Register dst,Register src)907 void X86Assembler::subl(Register dst, Register src) {
908   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
909   EmitUint8(0x2B);
910   EmitOperand(dst, Operand(src));
911 }
912 
913 
subl(Register reg,const Immediate & imm)914 void X86Assembler::subl(Register reg, const Immediate& imm) {
915   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
916   EmitComplex(5, Operand(reg), imm);
917 }
918 
919 
subl(Register reg,const Address & address)920 void X86Assembler::subl(Register reg, const Address& address) {
921   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
922   EmitUint8(0x2B);
923   EmitOperand(reg, address);
924 }
925 
926 
cdq()927 void X86Assembler::cdq() {
928   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
929   EmitUint8(0x99);
930 }
931 
932 
idivl(Register reg)933 void X86Assembler::idivl(Register reg) {
934   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
935   EmitUint8(0xF7);
936   EmitUint8(0xF8 | reg);
937 }
938 
939 
imull(Register dst,Register src)940 void X86Assembler::imull(Register dst, Register src) {
941   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
942   EmitUint8(0x0F);
943   EmitUint8(0xAF);
944   EmitOperand(dst, Operand(src));
945 }
946 
947 
imull(Register reg,const Immediate & imm)948 void X86Assembler::imull(Register reg, const Immediate& imm) {
949   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
950   EmitUint8(0x69);
951   EmitOperand(reg, Operand(reg));
952   EmitImmediate(imm);
953 }
954 
955 
imull(Register reg,const Address & address)956 void X86Assembler::imull(Register reg, const Address& address) {
957   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
958   EmitUint8(0x0F);
959   EmitUint8(0xAF);
960   EmitOperand(reg, address);
961 }
962 
963 
imull(Register reg)964 void X86Assembler::imull(Register reg) {
965   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
966   EmitUint8(0xF7);
967   EmitOperand(5, Operand(reg));
968 }
969 
970 
imull(const Address & address)971 void X86Assembler::imull(const Address& address) {
972   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
973   EmitUint8(0xF7);
974   EmitOperand(5, address);
975 }
976 
977 
mull(Register reg)978 void X86Assembler::mull(Register reg) {
979   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
980   EmitUint8(0xF7);
981   EmitOperand(4, Operand(reg));
982 }
983 
984 
mull(const Address & address)985 void X86Assembler::mull(const Address& address) {
986   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
987   EmitUint8(0xF7);
988   EmitOperand(4, address);
989 }
990 
991 
sbbl(Register dst,Register src)992 void X86Assembler::sbbl(Register dst, Register src) {
993   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
994   EmitUint8(0x1B);
995   EmitOperand(dst, Operand(src));
996 }
997 
998 
sbbl(Register reg,const Immediate & imm)999 void X86Assembler::sbbl(Register reg, const Immediate& imm) {
1000   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1001   EmitComplex(3, Operand(reg), imm);
1002 }
1003 
1004 
sbbl(Register dst,const Address & address)1005 void X86Assembler::sbbl(Register dst, const Address& address) {
1006   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1007   EmitUint8(0x1B);
1008   EmitOperand(dst, address);
1009 }
1010 
1011 
incl(Register reg)1012 void X86Assembler::incl(Register reg) {
1013   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1014   EmitUint8(0x40 + reg);
1015 }
1016 
1017 
incl(const Address & address)1018 void X86Assembler::incl(const Address& address) {
1019   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1020   EmitUint8(0xFF);
1021   EmitOperand(0, address);
1022 }
1023 
1024 
decl(Register reg)1025 void X86Assembler::decl(Register reg) {
1026   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1027   EmitUint8(0x48 + reg);
1028 }
1029 
1030 
decl(const Address & address)1031 void X86Assembler::decl(const Address& address) {
1032   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1033   EmitUint8(0xFF);
1034   EmitOperand(1, address);
1035 }
1036 
1037 
shll(Register reg,const Immediate & imm)1038 void X86Assembler::shll(Register reg, const Immediate& imm) {
1039   EmitGenericShift(4, reg, imm);
1040 }
1041 
1042 
shll(Register operand,Register shifter)1043 void X86Assembler::shll(Register operand, Register shifter) {
1044   EmitGenericShift(4, operand, shifter);
1045 }
1046 
1047 
shrl(Register reg,const Immediate & imm)1048 void X86Assembler::shrl(Register reg, const Immediate& imm) {
1049   EmitGenericShift(5, reg, imm);
1050 }
1051 
1052 
shrl(Register operand,Register shifter)1053 void X86Assembler::shrl(Register operand, Register shifter) {
1054   EmitGenericShift(5, operand, shifter);
1055 }
1056 
1057 
sarl(Register reg,const Immediate & imm)1058 void X86Assembler::sarl(Register reg, const Immediate& imm) {
1059   EmitGenericShift(7, reg, imm);
1060 }
1061 
1062 
sarl(Register operand,Register shifter)1063 void X86Assembler::sarl(Register operand, Register shifter) {
1064   EmitGenericShift(7, operand, shifter);
1065 }
1066 
1067 
shld(Register dst,Register src)1068 void X86Assembler::shld(Register dst, Register src) {
1069   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1070   EmitUint8(0x0F);
1071   EmitUint8(0xA5);
1072   EmitRegisterOperand(src, dst);
1073 }
1074 
1075 
negl(Register reg)1076 void X86Assembler::negl(Register reg) {
1077   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1078   EmitUint8(0xF7);
1079   EmitOperand(3, Operand(reg));
1080 }
1081 
1082 
notl(Register reg)1083 void X86Assembler::notl(Register reg) {
1084   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1085   EmitUint8(0xF7);
1086   EmitUint8(0xD0 | reg);
1087 }
1088 
1089 
enter(const Immediate & imm)1090 void X86Assembler::enter(const Immediate& imm) {
1091   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1092   EmitUint8(0xC8);
1093   CHECK(imm.is_uint16());
1094   EmitUint8(imm.value() & 0xFF);
1095   EmitUint8((imm.value() >> 8) & 0xFF);
1096   EmitUint8(0x00);
1097 }
1098 
1099 
leave()1100 void X86Assembler::leave() {
1101   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1102   EmitUint8(0xC9);
1103 }
1104 
1105 
ret()1106 void X86Assembler::ret() {
1107   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1108   EmitUint8(0xC3);
1109 }
1110 
1111 
ret(const Immediate & imm)1112 void X86Assembler::ret(const Immediate& imm) {
1113   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1114   EmitUint8(0xC2);
1115   CHECK(imm.is_uint16());
1116   EmitUint8(imm.value() & 0xFF);
1117   EmitUint8((imm.value() >> 8) & 0xFF);
1118 }
1119 
1120 
1121 
nop()1122 void X86Assembler::nop() {
1123   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1124   EmitUint8(0x90);
1125 }
1126 
1127 
int3()1128 void X86Assembler::int3() {
1129   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1130   EmitUint8(0xCC);
1131 }
1132 
1133 
hlt()1134 void X86Assembler::hlt() {
1135   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1136   EmitUint8(0xF4);
1137 }
1138 
1139 
j(Condition condition,Label * label)1140 void X86Assembler::j(Condition condition, Label* label) {
1141   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1142   if (label->IsBound()) {
1143     static const int kShortSize = 2;
1144     static const int kLongSize = 6;
1145     int offset = label->Position() - buffer_.Size();
1146     CHECK_LE(offset, 0);
1147     if (IsInt(8, offset - kShortSize)) {
1148       EmitUint8(0x70 + condition);
1149       EmitUint8((offset - kShortSize) & 0xFF);
1150     } else {
1151       EmitUint8(0x0F);
1152       EmitUint8(0x80 + condition);
1153       EmitInt32(offset - kLongSize);
1154     }
1155   } else {
1156     EmitUint8(0x0F);
1157     EmitUint8(0x80 + condition);
1158     EmitLabelLink(label);
1159   }
1160 }
1161 
1162 
jmp(Register reg)1163 void X86Assembler::jmp(Register reg) {
1164   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1165   EmitUint8(0xFF);
1166   EmitRegisterOperand(4, reg);
1167 }
1168 
jmp(const Address & address)1169 void X86Assembler::jmp(const Address& address) {
1170   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1171   EmitUint8(0xFF);
1172   EmitOperand(4, address);
1173 }
1174 
jmp(Label * label)1175 void X86Assembler::jmp(Label* label) {
1176   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1177   if (label->IsBound()) {
1178     static const int kShortSize = 2;
1179     static const int kLongSize = 5;
1180     int offset = label->Position() - buffer_.Size();
1181     CHECK_LE(offset, 0);
1182     if (IsInt(8, offset - kShortSize)) {
1183       EmitUint8(0xEB);
1184       EmitUint8((offset - kShortSize) & 0xFF);
1185     } else {
1186       EmitUint8(0xE9);
1187       EmitInt32(offset - kLongSize);
1188     }
1189   } else {
1190     EmitUint8(0xE9);
1191     EmitLabelLink(label);
1192   }
1193 }
1194 
1195 
lock()1196 X86Assembler* X86Assembler::lock() {
1197   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1198   EmitUint8(0xF0);
1199   return this;
1200 }
1201 
1202 
cmpxchgl(const Address & address,Register reg)1203 void X86Assembler::cmpxchgl(const Address& address, Register reg) {
1204   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1205   EmitUint8(0x0F);
1206   EmitUint8(0xB1);
1207   EmitOperand(reg, address);
1208 }
1209 
mfence()1210 void X86Assembler::mfence() {
1211   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1212   EmitUint8(0x0F);
1213   EmitUint8(0xAE);
1214   EmitUint8(0xF0);
1215 }
1216 
fs()1217 X86Assembler* X86Assembler::fs() {
1218   // TODO: fs is a prefix and not an instruction
1219   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1220   EmitUint8(0x64);
1221   return this;
1222 }
1223 
AddImmediate(Register reg,const Immediate & imm)1224 void X86Assembler::AddImmediate(Register reg, const Immediate& imm) {
1225   int value = imm.value();
1226   if (value > 0) {
1227     if (value == 1) {
1228       incl(reg);
1229     } else if (value != 0) {
1230       addl(reg, imm);
1231     }
1232   } else if (value < 0) {
1233     value = -value;
1234     if (value == 1) {
1235       decl(reg);
1236     } else if (value != 0) {
1237       subl(reg, Immediate(value));
1238     }
1239   }
1240 }
1241 
1242 
LoadDoubleConstant(XmmRegister dst,double value)1243 void X86Assembler::LoadDoubleConstant(XmmRegister dst, double value) {
1244   // TODO: Need to have a code constants table.
1245   int64_t constant = bit_cast<int64_t, double>(value);
1246   pushl(Immediate(High32Bits(constant)));
1247   pushl(Immediate(Low32Bits(constant)));
1248   movsd(dst, Address(ESP, 0));
1249   addl(ESP, Immediate(2 * kWordSize));
1250 }
1251 
1252 
FloatNegate(XmmRegister f)1253 void X86Assembler::FloatNegate(XmmRegister f) {
1254   static const struct {
1255     uint32_t a;
1256     uint32_t b;
1257     uint32_t c;
1258     uint32_t d;
1259   } float_negate_constant __attribute__((aligned(16))) =
1260       { 0x80000000, 0x00000000, 0x80000000, 0x00000000 };
1261   xorps(f, Address::Absolute(reinterpret_cast<uword>(&float_negate_constant)));
1262 }
1263 
1264 
DoubleNegate(XmmRegister d)1265 void X86Assembler::DoubleNegate(XmmRegister d) {
1266   static const struct {
1267     uint64_t a;
1268     uint64_t b;
1269   } double_negate_constant __attribute__((aligned(16))) =
1270       {0x8000000000000000LL, 0x8000000000000000LL};
1271   xorpd(d, Address::Absolute(reinterpret_cast<uword>(&double_negate_constant)));
1272 }
1273 
1274 
DoubleAbs(XmmRegister reg)1275 void X86Assembler::DoubleAbs(XmmRegister reg) {
1276   static const struct {
1277     uint64_t a;
1278     uint64_t b;
1279   } double_abs_constant __attribute__((aligned(16))) =
1280       {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL};
1281   andpd(reg, Address::Absolute(reinterpret_cast<uword>(&double_abs_constant)));
1282 }
1283 
1284 
Align(int alignment,int offset)1285 void X86Assembler::Align(int alignment, int offset) {
1286   CHECK(IsPowerOfTwo(alignment));
1287   // Emit nop instruction until the real position is aligned.
1288   while (((offset + buffer_.GetPosition()) & (alignment-1)) != 0) {
1289     nop();
1290   }
1291 }
1292 
1293 
Bind(Label * label)1294 void X86Assembler::Bind(Label* label) {
1295   int bound = buffer_.Size();
1296   CHECK(!label->IsBound());  // Labels can only be bound once.
1297   while (label->IsLinked()) {
1298     int position = label->LinkPosition();
1299     int next = buffer_.Load<int32_t>(position);
1300     buffer_.Store<int32_t>(position, bound - (position + 4));
1301     label->position_ = next;
1302   }
1303   label->BindTo(bound);
1304 }
1305 
1306 
Stop(const char * message)1307 void X86Assembler::Stop(const char* message) {
1308   // Emit the message address as immediate operand in the test rax instruction,
1309   // followed by the int3 instruction.
1310   // Execution can be resumed with the 'cont' command in gdb.
1311   testl(EAX, Immediate(reinterpret_cast<int32_t>(message)));
1312   int3();
1313 }
1314 
1315 
EmitOperand(int reg_or_opcode,const Operand & operand)1316 void X86Assembler::EmitOperand(int reg_or_opcode, const Operand& operand) {
1317   CHECK_GE(reg_or_opcode, 0);
1318   CHECK_LT(reg_or_opcode, 8);
1319   const int length = operand.length_;
1320   CHECK_GT(length, 0);
1321   // Emit the ModRM byte updated with the given reg value.
1322   CHECK_EQ(operand.encoding_[0] & 0x38, 0);
1323   EmitUint8(operand.encoding_[0] + (reg_or_opcode << 3));
1324   // Emit the rest of the encoded operand.
1325   for (int i = 1; i < length; i++) {
1326     EmitUint8(operand.encoding_[i]);
1327   }
1328 }
1329 
1330 
EmitImmediate(const Immediate & imm)1331 void X86Assembler::EmitImmediate(const Immediate& imm) {
1332   EmitInt32(imm.value());
1333 }
1334 
1335 
EmitComplex(int reg_or_opcode,const Operand & operand,const Immediate & immediate)1336 void X86Assembler::EmitComplex(int reg_or_opcode,
1337                                const Operand& operand,
1338                                const Immediate& immediate) {
1339   CHECK_GE(reg_or_opcode, 0);
1340   CHECK_LT(reg_or_opcode, 8);
1341   if (immediate.is_int8()) {
1342     // Use sign-extended 8-bit immediate.
1343     EmitUint8(0x83);
1344     EmitOperand(reg_or_opcode, operand);
1345     EmitUint8(immediate.value() & 0xFF);
1346   } else if (operand.IsRegister(EAX)) {
1347     // Use short form if the destination is eax.
1348     EmitUint8(0x05 + (reg_or_opcode << 3));
1349     EmitImmediate(immediate);
1350   } else {
1351     EmitUint8(0x81);
1352     EmitOperand(reg_or_opcode, operand);
1353     EmitImmediate(immediate);
1354   }
1355 }
1356 
1357 
EmitLabel(Label * label,int instruction_size)1358 void X86Assembler::EmitLabel(Label* label, int instruction_size) {
1359   if (label->IsBound()) {
1360     int offset = label->Position() - buffer_.Size();
1361     CHECK_LE(offset, 0);
1362     EmitInt32(offset - instruction_size);
1363   } else {
1364     EmitLabelLink(label);
1365   }
1366 }
1367 
1368 
EmitLabelLink(Label * label)1369 void X86Assembler::EmitLabelLink(Label* label) {
1370   CHECK(!label->IsBound());
1371   int position = buffer_.Size();
1372   EmitInt32(label->position_);
1373   label->LinkTo(position);
1374 }
1375 
1376 
EmitGenericShift(int reg_or_opcode,Register reg,const Immediate & imm)1377 void X86Assembler::EmitGenericShift(int reg_or_opcode,
1378                                     Register reg,
1379                                     const Immediate& imm) {
1380   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1381   CHECK(imm.is_int8());
1382   if (imm.value() == 1) {
1383     EmitUint8(0xD1);
1384     EmitOperand(reg_or_opcode, Operand(reg));
1385   } else {
1386     EmitUint8(0xC1);
1387     EmitOperand(reg_or_opcode, Operand(reg));
1388     EmitUint8(imm.value() & 0xFF);
1389   }
1390 }
1391 
1392 
EmitGenericShift(int reg_or_opcode,Register operand,Register shifter)1393 void X86Assembler::EmitGenericShift(int reg_or_opcode,
1394                                     Register operand,
1395                                     Register shifter) {
1396   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1397   CHECK_EQ(shifter, ECX);
1398   EmitUint8(0xD3);
1399   EmitOperand(reg_or_opcode, Operand(operand));
1400 }
1401 
BuildFrame(size_t frame_size,ManagedRegister method_reg,const std::vector<ManagedRegister> & spill_regs,const std::vector<ManagedRegister> & entry_spills)1402 void X86Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
1403                               const std::vector<ManagedRegister>& spill_regs,
1404                               const std::vector<ManagedRegister>& entry_spills) {
1405   CHECK_ALIGNED(frame_size, kStackAlignment);
1406   for (int i = spill_regs.size() - 1; i >= 0; --i) {
1407     pushl(spill_regs.at(i).AsX86().AsCpuRegister());
1408   }
1409   // return address then method on stack
1410   addl(ESP, Immediate(-frame_size + (spill_regs.size() * kPointerSize) +
1411                       kPointerSize /*method*/ + kPointerSize /*return address*/));
1412   pushl(method_reg.AsX86().AsCpuRegister());
1413   for (size_t i = 0; i < entry_spills.size(); ++i) {
1414     movl(Address(ESP, frame_size + kPointerSize + (i * kPointerSize)),
1415          entry_spills.at(i).AsX86().AsCpuRegister());
1416   }
1417 }
1418 
RemoveFrame(size_t frame_size,const std::vector<ManagedRegister> & spill_regs)1419 void X86Assembler::RemoveFrame(size_t frame_size,
1420                             const std::vector<ManagedRegister>& spill_regs) {
1421   CHECK_ALIGNED(frame_size, kStackAlignment);
1422   addl(ESP, Immediate(frame_size - (spill_regs.size() * kPointerSize) - kPointerSize));
1423   for (size_t i = 0; i < spill_regs.size(); ++i) {
1424     popl(spill_regs.at(i).AsX86().AsCpuRegister());
1425   }
1426   ret();
1427 }
1428 
IncreaseFrameSize(size_t adjust)1429 void X86Assembler::IncreaseFrameSize(size_t adjust) {
1430   CHECK_ALIGNED(adjust, kStackAlignment);
1431   addl(ESP, Immediate(-adjust));
1432 }
1433 
DecreaseFrameSize(size_t adjust)1434 void X86Assembler::DecreaseFrameSize(size_t adjust) {
1435   CHECK_ALIGNED(adjust, kStackAlignment);
1436   addl(ESP, Immediate(adjust));
1437 }
1438 
Store(FrameOffset offs,ManagedRegister msrc,size_t size)1439 void X86Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
1440   X86ManagedRegister src = msrc.AsX86();
1441   if (src.IsNoRegister()) {
1442     CHECK_EQ(0u, size);
1443   } else if (src.IsCpuRegister()) {
1444     CHECK_EQ(4u, size);
1445     movl(Address(ESP, offs), src.AsCpuRegister());
1446   } else if (src.IsRegisterPair()) {
1447     CHECK_EQ(8u, size);
1448     movl(Address(ESP, offs), src.AsRegisterPairLow());
1449     movl(Address(ESP, FrameOffset(offs.Int32Value()+4)),
1450          src.AsRegisterPairHigh());
1451   } else if (src.IsX87Register()) {
1452     if (size == 4) {
1453       fstps(Address(ESP, offs));
1454     } else {
1455       fstpl(Address(ESP, offs));
1456     }
1457   } else {
1458     CHECK(src.IsXmmRegister());
1459     if (size == 4) {
1460       movss(Address(ESP, offs), src.AsXmmRegister());
1461     } else {
1462       movsd(Address(ESP, offs), src.AsXmmRegister());
1463     }
1464   }
1465 }
1466 
StoreRef(FrameOffset dest,ManagedRegister msrc)1467 void X86Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
1468   X86ManagedRegister src = msrc.AsX86();
1469   CHECK(src.IsCpuRegister());
1470   movl(Address(ESP, dest), src.AsCpuRegister());
1471 }
1472 
StoreRawPtr(FrameOffset dest,ManagedRegister msrc)1473 void X86Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
1474   X86ManagedRegister src = msrc.AsX86();
1475   CHECK(src.IsCpuRegister());
1476   movl(Address(ESP, dest), src.AsCpuRegister());
1477 }
1478 
StoreImmediateToFrame(FrameOffset dest,uint32_t imm,ManagedRegister)1479 void X86Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
1480                                          ManagedRegister) {
1481   movl(Address(ESP, dest), Immediate(imm));
1482 }
1483 
StoreImmediateToThread(ThreadOffset dest,uint32_t imm,ManagedRegister)1484 void X86Assembler::StoreImmediateToThread(ThreadOffset dest, uint32_t imm,
1485                                           ManagedRegister) {
1486   fs()->movl(Address::Absolute(dest), Immediate(imm));
1487 }
1488 
StoreStackOffsetToThread(ThreadOffset thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)1489 void X86Assembler::StoreStackOffsetToThread(ThreadOffset thr_offs,
1490                                             FrameOffset fr_offs,
1491                                             ManagedRegister mscratch) {
1492   X86ManagedRegister scratch = mscratch.AsX86();
1493   CHECK(scratch.IsCpuRegister());
1494   leal(scratch.AsCpuRegister(), Address(ESP, fr_offs));
1495   fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
1496 }
1497 
StoreStackPointerToThread(ThreadOffset thr_offs)1498 void X86Assembler::StoreStackPointerToThread(ThreadOffset thr_offs) {
1499   fs()->movl(Address::Absolute(thr_offs), ESP);
1500 }
1501 
StoreLabelToThread(ThreadOffset thr_offs,Label * lbl)1502 void X86Assembler::StoreLabelToThread(ThreadOffset thr_offs, Label* lbl) {
1503   fs()->movl(Address::Absolute(thr_offs), lbl);
1504 }
1505 
StoreSpanning(FrameOffset,ManagedRegister,FrameOffset,ManagedRegister)1506 void X86Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/,
1507                                  FrameOffset /*in_off*/, ManagedRegister /*scratch*/) {
1508   UNIMPLEMENTED(FATAL);  // this case only currently exists for ARM
1509 }
1510 
Load(ManagedRegister mdest,FrameOffset src,size_t size)1511 void X86Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
1512   X86ManagedRegister dest = mdest.AsX86();
1513   if (dest.IsNoRegister()) {
1514     CHECK_EQ(0u, size);
1515   } else if (dest.IsCpuRegister()) {
1516     CHECK_EQ(4u, size);
1517     movl(dest.AsCpuRegister(), Address(ESP, src));
1518   } else if (dest.IsRegisterPair()) {
1519     CHECK_EQ(8u, size);
1520     movl(dest.AsRegisterPairLow(), Address(ESP, src));
1521     movl(dest.AsRegisterPairHigh(), Address(ESP, FrameOffset(src.Int32Value()+4)));
1522   } else if (dest.IsX87Register()) {
1523     if (size == 4) {
1524       flds(Address(ESP, src));
1525     } else {
1526       fldl(Address(ESP, src));
1527     }
1528   } else {
1529     CHECK(dest.IsXmmRegister());
1530     if (size == 4) {
1531       movss(dest.AsXmmRegister(), Address(ESP, src));
1532     } else {
1533       movsd(dest.AsXmmRegister(), Address(ESP, src));
1534     }
1535   }
1536 }
1537 
Load(ManagedRegister mdest,ThreadOffset src,size_t size)1538 void X86Assembler::Load(ManagedRegister mdest, ThreadOffset src, size_t size) {
1539   X86ManagedRegister dest = mdest.AsX86();
1540   if (dest.IsNoRegister()) {
1541     CHECK_EQ(0u, size);
1542   } else if (dest.IsCpuRegister()) {
1543     CHECK_EQ(4u, size);
1544     fs()->movl(dest.AsCpuRegister(), Address::Absolute(src));
1545   } else if (dest.IsRegisterPair()) {
1546     CHECK_EQ(8u, size);
1547     fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src));
1548     fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset(src.Int32Value()+4)));
1549   } else if (dest.IsX87Register()) {
1550     if (size == 4) {
1551       fs()->flds(Address::Absolute(src));
1552     } else {
1553       fs()->fldl(Address::Absolute(src));
1554     }
1555   } else {
1556     CHECK(dest.IsXmmRegister());
1557     if (size == 4) {
1558       fs()->movss(dest.AsXmmRegister(), Address::Absolute(src));
1559     } else {
1560       fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src));
1561     }
1562   }
1563 }
1564 
LoadRef(ManagedRegister mdest,FrameOffset src)1565 void X86Assembler::LoadRef(ManagedRegister mdest, FrameOffset  src) {
1566   X86ManagedRegister dest = mdest.AsX86();
1567   CHECK(dest.IsCpuRegister());
1568   movl(dest.AsCpuRegister(), Address(ESP, src));
1569 }
1570 
LoadRef(ManagedRegister mdest,ManagedRegister base,MemberOffset offs)1571 void X86Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base,
1572                            MemberOffset offs) {
1573   X86ManagedRegister dest = mdest.AsX86();
1574   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
1575   movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
1576 }
1577 
LoadRawPtr(ManagedRegister mdest,ManagedRegister base,Offset offs)1578 void X86Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base,
1579                               Offset offs) {
1580   X86ManagedRegister dest = mdest.AsX86();
1581   CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
1582   movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
1583 }
1584 
LoadRawPtrFromThread(ManagedRegister mdest,ThreadOffset offs)1585 void X86Assembler::LoadRawPtrFromThread(ManagedRegister mdest,
1586                                         ThreadOffset offs) {
1587   X86ManagedRegister dest = mdest.AsX86();
1588   CHECK(dest.IsCpuRegister());
1589   fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs));
1590 }
1591 
SignExtend(ManagedRegister mreg,size_t size)1592 void X86Assembler::SignExtend(ManagedRegister mreg, size_t size) {
1593   X86ManagedRegister reg = mreg.AsX86();
1594   CHECK(size == 1 || size == 2) << size;
1595   CHECK(reg.IsCpuRegister()) << reg;
1596   if (size == 1) {
1597     movsxb(reg.AsCpuRegister(), reg.AsByteRegister());
1598   } else {
1599     movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
1600   }
1601 }
1602 
ZeroExtend(ManagedRegister mreg,size_t size)1603 void X86Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
1604   X86ManagedRegister reg = mreg.AsX86();
1605   CHECK(size == 1 || size == 2) << size;
1606   CHECK(reg.IsCpuRegister()) << reg;
1607   if (size == 1) {
1608     movzxb(reg.AsCpuRegister(), reg.AsByteRegister());
1609   } else {
1610     movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
1611   }
1612 }
1613 
Move(ManagedRegister mdest,ManagedRegister msrc,size_t size)1614 void X86Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
1615   X86ManagedRegister dest = mdest.AsX86();
1616   X86ManagedRegister src = msrc.AsX86();
1617   if (!dest.Equals(src)) {
1618     if (dest.IsCpuRegister() && src.IsCpuRegister()) {
1619       movl(dest.AsCpuRegister(), src.AsCpuRegister());
1620     } else if (src.IsX87Register() && dest.IsXmmRegister()) {
1621       // Pass via stack and pop X87 register
1622       subl(ESP, Immediate(16));
1623       if (size == 4) {
1624         CHECK_EQ(src.AsX87Register(), ST0);
1625         fstps(Address(ESP, 0));
1626         movss(dest.AsXmmRegister(), Address(ESP, 0));
1627       } else {
1628         CHECK_EQ(src.AsX87Register(), ST0);
1629         fstpl(Address(ESP, 0));
1630         movsd(dest.AsXmmRegister(), Address(ESP, 0));
1631       }
1632       addl(ESP, Immediate(16));
1633     } else {
1634       // TODO: x87, SSE
1635       UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
1636     }
1637   }
1638 }
1639 
CopyRef(FrameOffset dest,FrameOffset src,ManagedRegister mscratch)1640 void X86Assembler::CopyRef(FrameOffset dest, FrameOffset src,
1641                            ManagedRegister mscratch) {
1642   X86ManagedRegister scratch = mscratch.AsX86();
1643   CHECK(scratch.IsCpuRegister());
1644   movl(scratch.AsCpuRegister(), Address(ESP, src));
1645   movl(Address(ESP, dest), scratch.AsCpuRegister());
1646 }
1647 
CopyRawPtrFromThread(FrameOffset fr_offs,ThreadOffset thr_offs,ManagedRegister mscratch)1648 void X86Assembler::CopyRawPtrFromThread(FrameOffset fr_offs,
1649                                         ThreadOffset thr_offs,
1650                                         ManagedRegister mscratch) {
1651   X86ManagedRegister scratch = mscratch.AsX86();
1652   CHECK(scratch.IsCpuRegister());
1653   fs()->movl(scratch.AsCpuRegister(), Address::Absolute(thr_offs));
1654   Store(fr_offs, scratch, 4);
1655 }
1656 
CopyRawPtrToThread(ThreadOffset thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)1657 void X86Assembler::CopyRawPtrToThread(ThreadOffset thr_offs,
1658                                       FrameOffset fr_offs,
1659                                       ManagedRegister mscratch) {
1660   X86ManagedRegister scratch = mscratch.AsX86();
1661   CHECK(scratch.IsCpuRegister());
1662   Load(scratch, fr_offs, 4);
1663   fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
1664 }
1665 
Copy(FrameOffset dest,FrameOffset src,ManagedRegister mscratch,size_t size)1666 void X86Assembler::Copy(FrameOffset dest, FrameOffset src,
1667                         ManagedRegister mscratch,
1668                         size_t size) {
1669   X86ManagedRegister scratch = mscratch.AsX86();
1670   if (scratch.IsCpuRegister() && size == 8) {
1671     Load(scratch, src, 4);
1672     Store(dest, scratch, 4);
1673     Load(scratch, FrameOffset(src.Int32Value() + 4), 4);
1674     Store(FrameOffset(dest.Int32Value() + 4), scratch, 4);
1675   } else {
1676     Load(scratch, src, size);
1677     Store(dest, scratch, size);
1678   }
1679 }
1680 
Copy(FrameOffset,ManagedRegister,Offset,ManagedRegister,size_t)1681 void X86Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/,
1682                         ManagedRegister /*scratch*/, size_t /*size*/) {
1683   UNIMPLEMENTED(FATAL);
1684 }
1685 
Copy(ManagedRegister dest_base,Offset dest_offset,FrameOffset src,ManagedRegister scratch,size_t size)1686 void X86Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
1687                         ManagedRegister scratch, size_t size) {
1688   CHECK(scratch.IsNoRegister());
1689   CHECK_EQ(size, 4u);
1690   pushl(Address(ESP, src));
1691   popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset));
1692 }
1693 
Copy(FrameOffset dest,FrameOffset src_base,Offset src_offset,ManagedRegister mscratch,size_t size)1694 void X86Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
1695                         ManagedRegister mscratch, size_t size) {
1696   Register scratch = mscratch.AsX86().AsCpuRegister();
1697   CHECK_EQ(size, 4u);
1698   movl(scratch, Address(ESP, src_base));
1699   movl(scratch, Address(scratch, src_offset));
1700   movl(Address(ESP, dest), scratch);
1701 }
1702 
Copy(ManagedRegister dest,Offset dest_offset,ManagedRegister src,Offset src_offset,ManagedRegister scratch,size_t size)1703 void X86Assembler::Copy(ManagedRegister dest, Offset dest_offset,
1704                         ManagedRegister src, Offset src_offset,
1705                         ManagedRegister scratch, size_t size) {
1706   CHECK_EQ(size, 4u);
1707   CHECK(scratch.IsNoRegister());
1708   pushl(Address(src.AsX86().AsCpuRegister(), src_offset));
1709   popl(Address(dest.AsX86().AsCpuRegister(), dest_offset));
1710 }
1711 
Copy(FrameOffset dest,Offset dest_offset,FrameOffset src,Offset src_offset,ManagedRegister mscratch,size_t size)1712 void X86Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
1713                         ManagedRegister mscratch, size_t size) {
1714   Register scratch = mscratch.AsX86().AsCpuRegister();
1715   CHECK_EQ(size, 4u);
1716   CHECK_EQ(dest.Int32Value(), src.Int32Value());
1717   movl(scratch, Address(ESP, src));
1718   pushl(Address(scratch, src_offset));
1719   popl(Address(scratch, dest_offset));
1720 }
1721 
MemoryBarrier(ManagedRegister)1722 void X86Assembler::MemoryBarrier(ManagedRegister) {
1723 #if ANDROID_SMP != 0
1724   mfence();
1725 #endif
1726 }
1727 
CreateSirtEntry(ManagedRegister mout_reg,FrameOffset sirt_offset,ManagedRegister min_reg,bool null_allowed)1728 void X86Assembler::CreateSirtEntry(ManagedRegister mout_reg,
1729                                    FrameOffset sirt_offset,
1730                                    ManagedRegister min_reg, bool null_allowed) {
1731   X86ManagedRegister out_reg = mout_reg.AsX86();
1732   X86ManagedRegister in_reg = min_reg.AsX86();
1733   CHECK(in_reg.IsCpuRegister());
1734   CHECK(out_reg.IsCpuRegister());
1735   VerifyObject(in_reg, null_allowed);
1736   if (null_allowed) {
1737     Label null_arg;
1738     if (!out_reg.Equals(in_reg)) {
1739       xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
1740     }
1741     testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
1742     j(kZero, &null_arg);
1743     leal(out_reg.AsCpuRegister(), Address(ESP, sirt_offset));
1744     Bind(&null_arg);
1745   } else {
1746     leal(out_reg.AsCpuRegister(), Address(ESP, sirt_offset));
1747   }
1748 }
1749 
CreateSirtEntry(FrameOffset out_off,FrameOffset sirt_offset,ManagedRegister mscratch,bool null_allowed)1750 void X86Assembler::CreateSirtEntry(FrameOffset out_off,
1751                                    FrameOffset sirt_offset,
1752                                    ManagedRegister mscratch,
1753                                    bool null_allowed) {
1754   X86ManagedRegister scratch = mscratch.AsX86();
1755   CHECK(scratch.IsCpuRegister());
1756   if (null_allowed) {
1757     Label null_arg;
1758     movl(scratch.AsCpuRegister(), Address(ESP, sirt_offset));
1759     testl(scratch.AsCpuRegister(), scratch.AsCpuRegister());
1760     j(kZero, &null_arg);
1761     leal(scratch.AsCpuRegister(), Address(ESP, sirt_offset));
1762     Bind(&null_arg);
1763   } else {
1764     leal(scratch.AsCpuRegister(), Address(ESP, sirt_offset));
1765   }
1766   Store(out_off, scratch, 4);
1767 }
1768 
1769 // Given a SIRT entry, load the associated reference.
LoadReferenceFromSirt(ManagedRegister mout_reg,ManagedRegister min_reg)1770 void X86Assembler::LoadReferenceFromSirt(ManagedRegister mout_reg,
1771                                          ManagedRegister min_reg) {
1772   X86ManagedRegister out_reg = mout_reg.AsX86();
1773   X86ManagedRegister in_reg = min_reg.AsX86();
1774   CHECK(out_reg.IsCpuRegister());
1775   CHECK(in_reg.IsCpuRegister());
1776   Label null_arg;
1777   if (!out_reg.Equals(in_reg)) {
1778     xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
1779   }
1780   testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
1781   j(kZero, &null_arg);
1782   movl(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0));
1783   Bind(&null_arg);
1784 }
1785 
VerifyObject(ManagedRegister,bool)1786 void X86Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
1787   // TODO: not validating references
1788 }
1789 
VerifyObject(FrameOffset,bool)1790 void X86Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
1791   // TODO: not validating references
1792 }
1793 
Call(ManagedRegister mbase,Offset offset,ManagedRegister)1794 void X86Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) {
1795   X86ManagedRegister base = mbase.AsX86();
1796   CHECK(base.IsCpuRegister());
1797   call(Address(base.AsCpuRegister(), offset.Int32Value()));
1798   // TODO: place reference map on call
1799 }
1800 
Call(FrameOffset base,Offset offset,ManagedRegister mscratch)1801 void X86Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
1802   Register scratch = mscratch.AsX86().AsCpuRegister();
1803   movl(scratch, Address(ESP, base));
1804   call(Address(scratch, offset));
1805 }
1806 
Call(ThreadOffset offset,ManagedRegister)1807 void X86Assembler::Call(ThreadOffset offset, ManagedRegister /*mscratch*/) {
1808   fs()->call(Address::Absolute(offset));
1809 }
1810 
GetCurrentThread(ManagedRegister tr)1811 void X86Assembler::GetCurrentThread(ManagedRegister tr) {
1812   fs()->movl(tr.AsX86().AsCpuRegister(),
1813              Address::Absolute(Thread::SelfOffset()));
1814 }
1815 
GetCurrentThread(FrameOffset offset,ManagedRegister mscratch)1816 void X86Assembler::GetCurrentThread(FrameOffset offset,
1817                                     ManagedRegister mscratch) {
1818   X86ManagedRegister scratch = mscratch.AsX86();
1819   fs()->movl(scratch.AsCpuRegister(), Address::Absolute(Thread::SelfOffset()));
1820   movl(Address(ESP, offset), scratch.AsCpuRegister());
1821 }
1822 
ExceptionPoll(ManagedRegister,size_t stack_adjust)1823 void X86Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) {
1824   X86ExceptionSlowPath* slow = new X86ExceptionSlowPath(stack_adjust);
1825   buffer_.EnqueueSlowPath(slow);
1826   fs()->cmpl(Address::Absolute(Thread::ExceptionOffset()), Immediate(0));
1827   j(kNotEqual, slow->Entry());
1828 }
1829 
Emit(Assembler * sasm)1830 void X86ExceptionSlowPath::Emit(Assembler *sasm) {
1831   X86Assembler* sp_asm = down_cast<X86Assembler*>(sasm);
1832 #define __ sp_asm->
1833   __ Bind(&entry_);
1834   // Note: the return value is dead
1835   if (stack_adjust_ != 0) {  // Fix up the frame.
1836     __ DecreaseFrameSize(stack_adjust_);
1837   }
1838   // Pass exception as argument in EAX
1839   __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset()));
1840   __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(pDeliverException)));
1841   // this call should never return
1842   __ int3();
1843 #undef __
1844 }
1845 
1846 }  // namespace x86
1847 }  // namespace art
1848