• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 const char *tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
25     "%rax",
26     "%rcx",
27     "%rdx",
28     "%rbx",
29     "%rsp",
30     "%rbp",
31     "%rsi",
32     "%rdi",
33     "%r8",
34     "%r9",
35     "%r10",
36     "%r11",
37     "%r12",
38     "%r13",
39     "%r14",
40     "%r15",
41 };
42 
43 int tcg_target_reg_alloc_order[] = {
44     TCG_REG_RDI,
45     TCG_REG_RSI,
46     TCG_REG_RDX,
47     TCG_REG_RCX,
48     TCG_REG_R8,
49     TCG_REG_R9,
50     TCG_REG_RAX,
51     TCG_REG_R10,
52     TCG_REG_R11,
53 
54     TCG_REG_RBP,
55     TCG_REG_RBX,
56     TCG_REG_R12,
57     TCG_REG_R13,
58     TCG_REG_R14,
59     TCG_REG_R15,
60 };
61 
62 const int tcg_target_call_iarg_regs[6] = {
63     TCG_REG_RDI,
64     TCG_REG_RSI,
65     TCG_REG_RDX,
66     TCG_REG_RCX,
67     TCG_REG_R8,
68     TCG_REG_R9,
69 };
70 
71 const int tcg_target_call_oarg_regs[2] = {
72     TCG_REG_RAX,
73     TCG_REG_RDX
74 };
75 
76 static uint8_t *tb_ret_addr;
77 
patch_reloc(uint8_t * code_ptr,int type,tcg_target_long value,tcg_target_long addend)78 static void patch_reloc(uint8_t *code_ptr, int type,
79                         tcg_target_long value, tcg_target_long addend)
80 {
81     value += addend;
82     switch(type) {
83     case R_X86_64_32:
84         if (value != (uint32_t)value)
85             tcg_abort();
86         *(uint32_t *)code_ptr = value;
87         break;
88     case R_X86_64_32S:
89         if (value != (int32_t)value)
90             tcg_abort();
91         *(uint32_t *)code_ptr = value;
92         break;
93     case R_386_PC32:
94         value -= (long)code_ptr;
95         if (value != (int32_t)value)
96             tcg_abort();
97         *(uint32_t *)code_ptr = value;
98         break;
99     default:
100         tcg_abort();
101     }
102 }
103 
104 /* maximum number of register used for input function arguments */
tcg_target_get_call_iarg_regs_count(int flags)105 static inline int tcg_target_get_call_iarg_regs_count(int flags)
106 {
107     return 6;
108 }
109 
110 /* parse target specific constraints */
target_parse_constraint(TCGArgConstraint * ct,const char ** pct_str)111 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
112 {
113     const char *ct_str;
114 
115     ct_str = *pct_str;
116     switch(ct_str[0]) {
117     case 'a':
118         ct->ct |= TCG_CT_REG;
119         tcg_regset_set_reg(ct->u.regs, TCG_REG_RAX);
120         break;
121     case 'b':
122         ct->ct |= TCG_CT_REG;
123         tcg_regset_set_reg(ct->u.regs, TCG_REG_RBX);
124         break;
125     case 'c':
126         ct->ct |= TCG_CT_REG;
127         tcg_regset_set_reg(ct->u.regs, TCG_REG_RCX);
128         break;
129     case 'd':
130         ct->ct |= TCG_CT_REG;
131         tcg_regset_set_reg(ct->u.regs, TCG_REG_RDX);
132         break;
133     case 'S':
134         ct->ct |= TCG_CT_REG;
135         tcg_regset_set_reg(ct->u.regs, TCG_REG_RSI);
136         break;
137     case 'D':
138         ct->ct |= TCG_CT_REG;
139         tcg_regset_set_reg(ct->u.regs, TCG_REG_RDI);
140         break;
141     case 'q':
142         ct->ct |= TCG_CT_REG;
143         tcg_regset_set32(ct->u.regs, 0, 0xf);
144         break;
145     case 'r':
146         ct->ct |= TCG_CT_REG;
147         tcg_regset_set32(ct->u.regs, 0, 0xffff);
148         break;
149     case 'L': /* qemu_ld/st constraint */
150         ct->ct |= TCG_CT_REG;
151         tcg_regset_set32(ct->u.regs, 0, 0xffff);
152         tcg_regset_reset_reg(ct->u.regs, TCG_REG_RSI);
153         tcg_regset_reset_reg(ct->u.regs, TCG_REG_RDI);
154         break;
155     case 'e':
156         ct->ct |= TCG_CT_CONST_S32;
157         break;
158     case 'Z':
159         ct->ct |= TCG_CT_CONST_U32;
160         break;
161     default:
162         return -1;
163     }
164     ct_str++;
165     *pct_str = ct_str;
166     return 0;
167 }
168 
169 /* test if a constant matches the constraint */
tcg_target_const_match(tcg_target_long val,const TCGArgConstraint * arg_ct)170 static inline int tcg_target_const_match(tcg_target_long val,
171                                          const TCGArgConstraint *arg_ct)
172 {
173     int ct;
174     ct = arg_ct->ct;
175     if (ct & TCG_CT_CONST)
176         return 1;
177     else if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val)
178         return 1;
179     else if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val)
180         return 1;
181     else
182         return 0;
183 }
184 
185 #define ARITH_ADD 0
186 #define ARITH_OR  1
187 #define ARITH_ADC 2
188 #define ARITH_SBB 3
189 #define ARITH_AND 4
190 #define ARITH_SUB 5
191 #define ARITH_XOR 6
192 #define ARITH_CMP 7
193 
194 #define SHIFT_SHL 4
195 #define SHIFT_SHR 5
196 #define SHIFT_SAR 7
197 
198 #define JCC_JMP (-1)
199 #define JCC_JO  0x0
200 #define JCC_JNO 0x1
201 #define JCC_JB  0x2
202 #define JCC_JAE 0x3
203 #define JCC_JE  0x4
204 #define JCC_JNE 0x5
205 #define JCC_JBE 0x6
206 #define JCC_JA  0x7
207 #define JCC_JS  0x8
208 #define JCC_JNS 0x9
209 #define JCC_JP  0xa
210 #define JCC_JNP 0xb
211 #define JCC_JL  0xc
212 #define JCC_JGE 0xd
213 #define JCC_JLE 0xe
214 #define JCC_JG  0xf
215 
216 #define P_EXT   0x100 /* 0x0f opcode prefix */
217 #define P_REXW  0x200 /* set rex.w = 1 */
218 #define P_REXB  0x400 /* force rex use for byte registers */
219 
220 static const uint8_t tcg_cond_to_jcc[10] = {
221     [TCG_COND_EQ] = JCC_JE,
222     [TCG_COND_NE] = JCC_JNE,
223     [TCG_COND_LT] = JCC_JL,
224     [TCG_COND_GE] = JCC_JGE,
225     [TCG_COND_LE] = JCC_JLE,
226     [TCG_COND_GT] = JCC_JG,
227     [TCG_COND_LTU] = JCC_JB,
228     [TCG_COND_GEU] = JCC_JAE,
229     [TCG_COND_LEU] = JCC_JBE,
230     [TCG_COND_GTU] = JCC_JA,
231 };
232 
tcg_out_opc(TCGContext * s,int opc,int r,int rm,int x)233 static inline void tcg_out_opc(TCGContext *s, int opc, int r, int rm, int x)
234 {
235     int rex;
236     rex = ((opc >> 6) & 0x8) | ((r >> 1) & 0x4) |
237         ((x >> 2) & 2) | ((rm >> 3) & 1);
238     if (rex || (opc & P_REXB)) {
239         tcg_out8(s, rex | 0x40);
240     }
241     if (opc & P_EXT)
242         tcg_out8(s, 0x0f);
243     tcg_out8(s, opc);
244 }
245 
tcg_out_modrm(TCGContext * s,int opc,int r,int rm)246 static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
247 {
248     tcg_out_opc(s, opc, r, rm, 0);
249     tcg_out8(s, 0xc0 | ((r & 7) << 3) | (rm & 7));
250 }
251 
252 /* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
tcg_out_modrm_offset(TCGContext * s,int opc,int r,int rm,tcg_target_long offset)253 static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm,
254                                         tcg_target_long offset)
255 {
256     if (rm < 0) {
257         tcg_target_long val;
258         tcg_out_opc(s, opc, r, 0, 0);
259         val = offset - ((tcg_target_long)s->code_ptr + 5 + (-rm - 1));
260         if (val == (int32_t)val) {
261             /* eip relative */
262             tcg_out8(s, 0x05 | ((r & 7) << 3));
263             tcg_out32(s, val);
264         } else if (offset == (int32_t)offset) {
265             tcg_out8(s, 0x04 | ((r & 7) << 3));
266             tcg_out8(s, 0x25); /* sib */
267             tcg_out32(s, offset);
268         } else {
269             tcg_abort();
270         }
271     } else if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
272         tcg_out_opc(s, opc, r, rm, 0);
273         if ((rm & 7) == TCG_REG_RSP) {
274             tcg_out8(s, 0x04 | ((r & 7) << 3));
275             tcg_out8(s, 0x24);
276         } else {
277             tcg_out8(s, 0x00 | ((r & 7) << 3) | (rm & 7));
278         }
279     } else if ((int8_t)offset == offset) {
280         tcg_out_opc(s, opc, r, rm, 0);
281         if ((rm & 7) == TCG_REG_RSP) {
282             tcg_out8(s, 0x44 | ((r & 7) << 3));
283             tcg_out8(s, 0x24);
284         } else {
285             tcg_out8(s, 0x40 | ((r & 7) << 3) | (rm & 7));
286         }
287         tcg_out8(s, offset);
288     } else {
289         tcg_out_opc(s, opc, r, rm, 0);
290         if ((rm & 7) == TCG_REG_RSP) {
291             tcg_out8(s, 0x84 | ((r & 7) << 3));
292             tcg_out8(s, 0x24);
293         } else {
294             tcg_out8(s, 0x80 | ((r & 7) << 3) | (rm & 7));
295         }
296         tcg_out32(s, offset);
297     }
298 }
299 
300 #if defined(CONFIG_SOFTMMU)
301 /* XXX: incomplete. index must be different from ESP */
tcg_out_modrm_offset2(TCGContext * s,int opc,int r,int rm,int index,int shift,tcg_target_long offset)302 static void tcg_out_modrm_offset2(TCGContext *s, int opc, int r, int rm,
303                                   int index, int shift,
304                                   tcg_target_long offset)
305 {
306     int mod;
307     if (rm == -1)
308         tcg_abort();
309     if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
310         mod = 0;
311     } else if (offset == (int8_t)offset) {
312         mod = 0x40;
313     } else if (offset == (int32_t)offset) {
314         mod = 0x80;
315     } else {
316         tcg_abort();
317     }
318     if (index == -1) {
319         tcg_out_opc(s, opc, r, rm, 0);
320         if ((rm & 7) == TCG_REG_RSP) {
321             tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
322             tcg_out8(s, 0x04 | (rm & 7));
323         } else {
324             tcg_out8(s, mod | ((r & 7) << 3) | (rm & 7));
325         }
326     } else {
327         tcg_out_opc(s, opc, r, rm, index);
328         tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
329         tcg_out8(s, (shift << 6) | ((index & 7) << 3) | (rm & 7));
330     }
331     if (mod == 0x40) {
332         tcg_out8(s, offset);
333     } else if (mod == 0x80) {
334         tcg_out32(s, offset);
335     }
336 }
337 #endif
338 
tcg_out_mov(TCGContext * s,int ret,int arg)339 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
340 {
341     tcg_out_modrm(s, 0x8b | P_REXW, ret, arg);
342 }
343 
tcg_out_movi(TCGContext * s,TCGType type,int ret,tcg_target_long arg)344 static inline void tcg_out_movi(TCGContext *s, TCGType type,
345                                 int ret, tcg_target_long arg)
346 {
347     if (arg == 0) {
348         tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret); /* xor r0,r0 */
349     } else if (arg == (uint32_t)arg || type == TCG_TYPE_I32) {
350         tcg_out_opc(s, 0xb8 + (ret & 7), 0, ret, 0);
351         tcg_out32(s, arg);
352     } else if (arg == (int32_t)arg) {
353         tcg_out_modrm(s, 0xc7 | P_REXW, 0, ret);
354         tcg_out32(s, arg);
355     } else {
356         tcg_out_opc(s, (0xb8 + (ret & 7)) | P_REXW, 0, ret, 0);
357         tcg_out32(s, arg);
358         tcg_out32(s, arg >> 32);
359     }
360 }
361 
tcg_out_ld(TCGContext * s,TCGType type,int ret,int arg1,tcg_target_long arg2)362 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
363                               int arg1, tcg_target_long arg2)
364 {
365     if (type == TCG_TYPE_I32)
366         tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2); /* movl */
367     else
368         tcg_out_modrm_offset(s, 0x8b | P_REXW, ret, arg1, arg2); /* movq */
369 }
370 
tcg_out_st(TCGContext * s,TCGType type,int arg,int arg1,tcg_target_long arg2)371 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
372                               int arg1, tcg_target_long arg2)
373 {
374     if (type == TCG_TYPE_I32)
375         tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2); /* movl */
376     else
377         tcg_out_modrm_offset(s, 0x89 | P_REXW, arg, arg1, arg2); /* movq */
378 }
379 
tgen_arithi32(TCGContext * s,int c,int r0,int32_t val)380 static inline void tgen_arithi32(TCGContext *s, int c, int r0, int32_t val)
381 {
382     if (val == (int8_t)val) {
383         tcg_out_modrm(s, 0x83, c, r0);
384         tcg_out8(s, val);
385     } else if (c == ARITH_AND && val == 0xffu) {
386         /* movzbl */
387         tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, r0, r0);
388     } else if (c == ARITH_AND && val == 0xffffu) {
389         /* movzwl */
390         tcg_out_modrm(s, 0xb7 | P_EXT, r0, r0);
391     } else {
392         tcg_out_modrm(s, 0x81, c, r0);
393         tcg_out32(s, val);
394     }
395 }
396 
tgen_arithi64(TCGContext * s,int c,int r0,int64_t val)397 static inline void tgen_arithi64(TCGContext *s, int c, int r0, int64_t val)
398 {
399     if (val == (int8_t)val) {
400         tcg_out_modrm(s, 0x83 | P_REXW, c, r0);
401         tcg_out8(s, val);
402     } else if (c == ARITH_AND && val == 0xffu) {
403         /* movzbl */
404         tcg_out_modrm(s, 0xb6 | P_EXT | P_REXW, r0, r0);
405     } else if (c == ARITH_AND && val == 0xffffu) {
406         /* movzwl */
407         tcg_out_modrm(s, 0xb7 | P_EXT | P_REXW, r0, r0);
408     } else if (c == ARITH_AND && val == 0xffffffffu) {
409         /* 32-bit mov zero extends */
410         tcg_out_modrm(s, 0x8b, r0, r0);
411     } else if (val == (int32_t)val) {
412         tcg_out_modrm(s, 0x81 | P_REXW, c, r0);
413         tcg_out32(s, val);
414     } else if (c == ARITH_AND && val == (uint32_t)val) {
415         tcg_out_modrm(s, 0x81, c, r0);
416         tcg_out32(s, val);
417     } else {
418         tcg_abort();
419     }
420 }
421 
tcg_out_addi(TCGContext * s,int reg,tcg_target_long val)422 static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
423 {
424     if (val != 0)
425         tgen_arithi64(s, ARITH_ADD, reg, val);
426 }
427 
tcg_out_jxx(TCGContext * s,int opc,int label_index)428 static void tcg_out_jxx(TCGContext *s, int opc, int label_index)
429 {
430     int32_t val, val1;
431     TCGLabel *l = &s->labels[label_index];
432 
433     if (l->has_value) {
434         val = l->u.value - (tcg_target_long)s->code_ptr;
435         val1 = val - 2;
436         if ((int8_t)val1 == val1) {
437             if (opc == -1)
438                 tcg_out8(s, 0xeb);
439             else
440                 tcg_out8(s, 0x70 + opc);
441             tcg_out8(s, val1);
442         } else {
443             if (opc == -1) {
444                 tcg_out8(s, 0xe9);
445                 tcg_out32(s, val - 5);
446             } else {
447                 tcg_out8(s, 0x0f);
448                 tcg_out8(s, 0x80 + opc);
449                 tcg_out32(s, val - 6);
450             }
451         }
452     } else {
453         if (opc == -1) {
454             tcg_out8(s, 0xe9);
455         } else {
456             tcg_out8(s, 0x0f);
457             tcg_out8(s, 0x80 + opc);
458         }
459         tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
460         s->code_ptr += 4;
461     }
462 }
463 
tcg_out_brcond(TCGContext * s,int cond,TCGArg arg1,TCGArg arg2,int const_arg2,int label_index,int rexw)464 static void tcg_out_brcond(TCGContext *s, int cond,
465                            TCGArg arg1, TCGArg arg2, int const_arg2,
466                            int label_index, int rexw)
467 {
468     if (const_arg2) {
469         if (arg2 == 0) {
470             /* test r, r */
471             tcg_out_modrm(s, 0x85 | rexw, arg1, arg1);
472         } else {
473             if (rexw)
474                 tgen_arithi64(s, ARITH_CMP, arg1, arg2);
475             else
476                 tgen_arithi32(s, ARITH_CMP, arg1, arg2);
477         }
478     } else {
479         tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3) | rexw, arg2, arg1);
480     }
481     tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
482 }
483 
484 #if defined(CONFIG_SOFTMMU)
485 
486 #include "../../softmmu_defs.h"
487 
488 static void *qemu_ld_helpers[4] = {
489     __ldb_mmu,
490     __ldw_mmu,
491     __ldl_mmu,
492     __ldq_mmu,
493 };
494 
495 static void *qemu_st_helpers[4] = {
496     __stb_mmu,
497     __stw_mmu,
498     __stl_mmu,
499     __stq_mmu,
500 };
501 #endif
502 
tcg_out_qemu_ld(TCGContext * s,const TCGArg * args,int opc)503 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
504                             int opc)
505 {
506     int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
507 #if defined(CONFIG_SOFTMMU)
508     uint8_t *label1_ptr, *label2_ptr;
509 #endif
510 
511     data_reg = *args++;
512     addr_reg = *args++;
513     mem_index = *args;
514     s_bits = opc & 3;
515 
516     r0 = TCG_REG_RDI;
517     r1 = TCG_REG_RSI;
518 
519 #if TARGET_LONG_BITS == 32
520     rexw = 0;
521 #else
522     rexw = P_REXW;
523 #endif
524 #if defined(CONFIG_SOFTMMU)
525     /* mov */
526     tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
527 
528     /* mov */
529     tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
530 
531     tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
532     tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
533 
534     tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
535     tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
536 
537     tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
538     tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
539 
540     /* lea offset(r1, env), r1 */
541     tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
542                           offsetof(CPUState, tlb_table[mem_index][0].addr_read));
543 
544     /* cmp 0(r1), r0 */
545     tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
546 
547     /* mov */
548     tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
549 
550     /* je label1 */
551     tcg_out8(s, 0x70 + JCC_JE);
552     label1_ptr = s->code_ptr;
553     s->code_ptr++;
554 
555     /* XXX: move that code at the end of the TB */
556     tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RSI, mem_index);
557     tcg_out8(s, 0xe8);
558     tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] -
559               (tcg_target_long)s->code_ptr - 4);
560 
561     switch(opc) {
562     case 0 | 4:
563         /* movsbq */
564         tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
565         break;
566     case 1 | 4:
567         /* movswq */
568         tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
569         break;
570     case 2 | 4:
571         /* movslq */
572         tcg_out_modrm(s, 0x63 | P_REXW, data_reg, TCG_REG_RAX);
573         break;
574     case 0:
575     case 1:
576     case 2:
577     default:
578         /* movl */
579         tcg_out_modrm(s, 0x8b, data_reg, TCG_REG_RAX);
580         break;
581     case 3:
582         tcg_out_mov(s, data_reg, TCG_REG_RAX);
583         break;
584     }
585 
586     /* jmp label2 */
587     tcg_out8(s, 0xeb);
588     label2_ptr = s->code_ptr;
589     s->code_ptr++;
590 
591     /* label1: */
592     *label1_ptr = s->code_ptr - label1_ptr - 1;
593 
594     /* add x(r1), r0 */
595     tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) -
596                          offsetof(CPUTLBEntry, addr_read));
597 #else
598     r0 = addr_reg;
599 #endif
600 
601 #ifdef TARGET_WORDS_BIGENDIAN
602     bswap = 1;
603 #else
604     bswap = 0;
605 #endif
606     switch(opc) {
607     case 0:
608         /* movzbl */
609         tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, 0);
610         break;
611     case 0 | 4:
612         /* movsbX */
613         tcg_out_modrm_offset(s, 0xbe | P_EXT | rexw, data_reg, r0, 0);
614         break;
615     case 1:
616         /* movzwl */
617         tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
618         if (bswap) {
619             /* rolw $8, data_reg */
620             tcg_out8(s, 0x66);
621             tcg_out_modrm(s, 0xc1, 0, data_reg);
622             tcg_out8(s, 8);
623         }
624         break;
625     case 1 | 4:
626         if (bswap) {
627             /* movzwl */
628             tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, 0);
629             /* rolw $8, data_reg */
630             tcg_out8(s, 0x66);
631             tcg_out_modrm(s, 0xc1, 0, data_reg);
632             tcg_out8(s, 8);
633 
634             /* movswX data_reg, data_reg */
635             tcg_out_modrm(s, 0xbf | P_EXT | rexw, data_reg, data_reg);
636         } else {
637             /* movswX */
638             tcg_out_modrm_offset(s, 0xbf | P_EXT | rexw, data_reg, r0, 0);
639         }
640         break;
641     case 2:
642         /* movl (r0), data_reg */
643         tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
644         if (bswap) {
645             /* bswap */
646             tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
647         }
648         break;
649     case 2 | 4:
650         if (bswap) {
651             /* movl (r0), data_reg */
652             tcg_out_modrm_offset(s, 0x8b, data_reg, r0, 0);
653             /* bswap */
654             tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
655             /* movslq */
656             tcg_out_modrm(s, 0x63 | P_REXW, data_reg, data_reg);
657         } else {
658             /* movslq */
659             tcg_out_modrm_offset(s, 0x63 | P_REXW, data_reg, r0, 0);
660         }
661         break;
662     case 3:
663         /* movq (r0), data_reg */
664         tcg_out_modrm_offset(s, 0x8b | P_REXW, data_reg, r0, 0);
665         if (bswap) {
666             /* bswap */
667             tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT | P_REXW, 0, data_reg, 0);
668         }
669         break;
670     default:
671         tcg_abort();
672     }
673 
674 #if defined(CONFIG_SOFTMMU)
675     /* label2: */
676     *label2_ptr = s->code_ptr - label2_ptr - 1;
677 #endif
678 }
679 
tcg_out_qemu_st(TCGContext * s,const TCGArg * args,int opc)680 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
681                             int opc)
682 {
683     int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
684 #if defined(CONFIG_SOFTMMU)
685     uint8_t *label1_ptr, *label2_ptr;
686 #endif
687 
688     data_reg = *args++;
689     addr_reg = *args++;
690     mem_index = *args;
691 
692     s_bits = opc;
693 
694     r0 = TCG_REG_RDI;
695     r1 = TCG_REG_RSI;
696 
697 #if TARGET_LONG_BITS == 32
698     rexw = 0;
699 #else
700     rexw = P_REXW;
701 #endif
702 #if defined(CONFIG_SOFTMMU)
703     /* mov */
704     tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
705 
706     /* mov */
707     tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
708 
709     tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
710     tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
711 
712     tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
713     tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
714 
715     tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
716     tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
717 
718     /* lea offset(r1, env), r1 */
719     tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
720                           offsetof(CPUState, tlb_table[mem_index][0].addr_write));
721 
722     /* cmp 0(r1), r0 */
723     tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
724 
725     /* mov */
726     tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
727 
728     /* je label1 */
729     tcg_out8(s, 0x70 + JCC_JE);
730     label1_ptr = s->code_ptr;
731     s->code_ptr++;
732 
733     /* XXX: move that code at the end of the TB */
734     switch(opc) {
735     case 0:
736         /* movzbl */
737         tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB, TCG_REG_RSI, data_reg);
738         break;
739     case 1:
740         /* movzwl */
741         tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_RSI, data_reg);
742         break;
743     case 2:
744         /* movl */
745         tcg_out_modrm(s, 0x8b, TCG_REG_RSI, data_reg);
746         break;
747     default:
748     case 3:
749         tcg_out_mov(s, TCG_REG_RSI, data_reg);
750         break;
751     }
752     tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
753     tcg_out8(s, 0xe8);
754     tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
755               (tcg_target_long)s->code_ptr - 4);
756 
757     /* jmp label2 */
758     tcg_out8(s, 0xeb);
759     label2_ptr = s->code_ptr;
760     s->code_ptr++;
761 
762     /* label1: */
763     *label1_ptr = s->code_ptr - label1_ptr - 1;
764 
765     /* add x(r1), r0 */
766     tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) -
767                          offsetof(CPUTLBEntry, addr_write));
768 #else
769     r0 = addr_reg;
770 #endif
771 
772 #ifdef TARGET_WORDS_BIGENDIAN
773     bswap = 1;
774 #else
775     bswap = 0;
776 #endif
777     switch(opc) {
778     case 0:
779         /* movb */
780         tcg_out_modrm_offset(s, 0x88 | P_REXB, data_reg, r0, 0);
781         break;
782     case 1:
783         if (bswap) {
784             tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
785             tcg_out8(s, 0x66); /* rolw $8, %ecx */
786             tcg_out_modrm(s, 0xc1, 0, r1);
787             tcg_out8(s, 8);
788             data_reg = r1;
789         }
790         /* movw */
791         tcg_out8(s, 0x66);
792         tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
793         break;
794     case 2:
795         if (bswap) {
796             tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
797             /* bswap data_reg */
798             tcg_out_opc(s, (0xc8 + r1) | P_EXT, 0, r1, 0);
799             data_reg = r1;
800         }
801         /* movl */
802         tcg_out_modrm_offset(s, 0x89, data_reg, r0, 0);
803         break;
804     case 3:
805         if (bswap) {
806             tcg_out_mov(s, r1, data_reg);
807             /* bswap data_reg */
808             tcg_out_opc(s, (0xc8 + r1) | P_EXT | P_REXW, 0, r1, 0);
809             data_reg = r1;
810         }
811         /* movq */
812         tcg_out_modrm_offset(s, 0x89 | P_REXW, data_reg, r0, 0);
813         break;
814     default:
815         tcg_abort();
816     }
817 
818 #if defined(CONFIG_SOFTMMU)
819     /* label2: */
820     *label2_ptr = s->code_ptr - label2_ptr - 1;
821 #endif
822 }
823 
tcg_out_op(TCGContext * s,int opc,const TCGArg * args,const int * const_args)824 static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
825                               const int *const_args)
826 {
827     int c;
828 
829     switch(opc) {
830     case INDEX_op_exit_tb:
831         tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RAX, args[0]);
832         tcg_out8(s, 0xe9); /* jmp tb_ret_addr */
833         tcg_out32(s, tb_ret_addr - s->code_ptr - 4);
834         break;
835     case INDEX_op_goto_tb:
836         if (s->tb_jmp_offset) {
837             /* direct jump method */
838             tcg_out8(s, 0xe9); /* jmp im */
839             s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
840             tcg_out32(s, 0);
841         } else {
842             /* indirect jump method */
843             /* jmp Ev */
844             tcg_out_modrm_offset(s, 0xff, 4, -1,
845                                  (tcg_target_long)(s->tb_next +
846                                                    args[0]));
847         }
848         s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
849         break;
850     case INDEX_op_call:
851         if (const_args[0]) {
852             tcg_out8(s, 0xe8);
853             tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
854         } else {
855             tcg_out_modrm(s, 0xff, 2, args[0]);
856         }
857         break;
858     case INDEX_op_jmp:
859         if (const_args[0]) {
860             tcg_out8(s, 0xe9);
861             tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
862         } else {
863             tcg_out_modrm(s, 0xff, 4, args[0]);
864         }
865         break;
866     case INDEX_op_br:
867         tcg_out_jxx(s, JCC_JMP, args[0]);
868         break;
869     case INDEX_op_movi_i32:
870         tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
871         break;
872     case INDEX_op_movi_i64:
873         tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
874         break;
875     case INDEX_op_ld8u_i32:
876     case INDEX_op_ld8u_i64:
877         /* movzbl */
878         tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
879         break;
880     case INDEX_op_ld8s_i32:
881         /* movsbl */
882         tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
883         break;
884     case INDEX_op_ld8s_i64:
885         /* movsbq */
886         tcg_out_modrm_offset(s, 0xbe | P_EXT | P_REXW, args[0], args[1], args[2]);
887         break;
888     case INDEX_op_ld16u_i32:
889     case INDEX_op_ld16u_i64:
890         /* movzwl */
891         tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
892         break;
893     case INDEX_op_ld16s_i32:
894         /* movswl */
895         tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
896         break;
897     case INDEX_op_ld16s_i64:
898         /* movswq */
899         tcg_out_modrm_offset(s, 0xbf | P_EXT | P_REXW, args[0], args[1], args[2]);
900         break;
901     case INDEX_op_ld_i32:
902     case INDEX_op_ld32u_i64:
903         /* movl */
904         tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
905         break;
906     case INDEX_op_ld32s_i64:
907         /* movslq */
908         tcg_out_modrm_offset(s, 0x63 | P_REXW, args[0], args[1], args[2]);
909         break;
910     case INDEX_op_ld_i64:
911         /* movq */
912         tcg_out_modrm_offset(s, 0x8b | P_REXW, args[0], args[1], args[2]);
913         break;
914 
915     case INDEX_op_st8_i32:
916     case INDEX_op_st8_i64:
917         /* movb */
918         tcg_out_modrm_offset(s, 0x88 | P_REXB, args[0], args[1], args[2]);
919         break;
920     case INDEX_op_st16_i32:
921     case INDEX_op_st16_i64:
922         /* movw */
923         tcg_out8(s, 0x66);
924         tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
925         break;
926     case INDEX_op_st_i32:
927     case INDEX_op_st32_i64:
928         /* movl */
929         tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
930         break;
931     case INDEX_op_st_i64:
932         /* movq */
933         tcg_out_modrm_offset(s, 0x89 | P_REXW, args[0], args[1], args[2]);
934         break;
935 
936     case INDEX_op_sub_i32:
937         c = ARITH_SUB;
938         goto gen_arith32;
939     case INDEX_op_and_i32:
940         c = ARITH_AND;
941         goto gen_arith32;
942     case INDEX_op_or_i32:
943         c = ARITH_OR;
944         goto gen_arith32;
945     case INDEX_op_xor_i32:
946         c = ARITH_XOR;
947         goto gen_arith32;
948     case INDEX_op_add_i32:
949         c = ARITH_ADD;
950     gen_arith32:
951         if (const_args[2]) {
952             tgen_arithi32(s, c, args[0], args[2]);
953         } else {
954             tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
955         }
956         break;
957 
958     case INDEX_op_sub_i64:
959         c = ARITH_SUB;
960         goto gen_arith64;
961     case INDEX_op_and_i64:
962         c = ARITH_AND;
963         goto gen_arith64;
964     case INDEX_op_or_i64:
965         c = ARITH_OR;
966         goto gen_arith64;
967     case INDEX_op_xor_i64:
968         c = ARITH_XOR;
969         goto gen_arith64;
970     case INDEX_op_add_i64:
971         c = ARITH_ADD;
972     gen_arith64:
973         if (const_args[2]) {
974             tgen_arithi64(s, c, args[0], args[2]);
975         } else {
976             tcg_out_modrm(s, 0x01 | (c << 3) | P_REXW, args[2], args[0]);
977         }
978         break;
979 
980     case INDEX_op_mul_i32:
981         if (const_args[2]) {
982             int32_t val;
983             val = args[2];
984             if (val == (int8_t)val) {
985                 tcg_out_modrm(s, 0x6b, args[0], args[0]);
986                 tcg_out8(s, val);
987             } else {
988                 tcg_out_modrm(s, 0x69, args[0], args[0]);
989                 tcg_out32(s, val);
990             }
991         } else {
992             tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
993         }
994         break;
995     case INDEX_op_mul_i64:
996         if (const_args[2]) {
997             int32_t val;
998             val = args[2];
999             if (val == (int8_t)val) {
1000                 tcg_out_modrm(s, 0x6b | P_REXW, args[0], args[0]);
1001                 tcg_out8(s, val);
1002             } else {
1003                 tcg_out_modrm(s, 0x69 | P_REXW, args[0], args[0]);
1004                 tcg_out32(s, val);
1005             }
1006         } else {
1007             tcg_out_modrm(s, 0xaf | P_EXT | P_REXW, args[0], args[2]);
1008         }
1009         break;
1010     case INDEX_op_div2_i32:
1011         tcg_out_modrm(s, 0xf7, 7, args[4]);
1012         break;
1013     case INDEX_op_divu2_i32:
1014         tcg_out_modrm(s, 0xf7, 6, args[4]);
1015         break;
1016     case INDEX_op_div2_i64:
1017         tcg_out_modrm(s, 0xf7 | P_REXW, 7, args[4]);
1018         break;
1019     case INDEX_op_divu2_i64:
1020         tcg_out_modrm(s, 0xf7 | P_REXW, 6, args[4]);
1021         break;
1022 
1023     case INDEX_op_shl_i32:
1024         c = SHIFT_SHL;
1025     gen_shift32:
1026         if (const_args[2]) {
1027             if (args[2] == 1) {
1028                 tcg_out_modrm(s, 0xd1, c, args[0]);
1029             } else {
1030                 tcg_out_modrm(s, 0xc1, c, args[0]);
1031                 tcg_out8(s, args[2]);
1032             }
1033         } else {
1034             tcg_out_modrm(s, 0xd3, c, args[0]);
1035         }
1036         break;
1037     case INDEX_op_shr_i32:
1038         c = SHIFT_SHR;
1039         goto gen_shift32;
1040     case INDEX_op_sar_i32:
1041         c = SHIFT_SAR;
1042         goto gen_shift32;
1043 
1044     case INDEX_op_shl_i64:
1045         c = SHIFT_SHL;
1046     gen_shift64:
1047         if (const_args[2]) {
1048             if (args[2] == 1) {
1049                 tcg_out_modrm(s, 0xd1 | P_REXW, c, args[0]);
1050             } else {
1051                 tcg_out_modrm(s, 0xc1 | P_REXW, c, args[0]);
1052                 tcg_out8(s, args[2]);
1053             }
1054         } else {
1055             tcg_out_modrm(s, 0xd3 | P_REXW, c, args[0]);
1056         }
1057         break;
1058     case INDEX_op_shr_i64:
1059         c = SHIFT_SHR;
1060         goto gen_shift64;
1061     case INDEX_op_sar_i64:
1062         c = SHIFT_SAR;
1063         goto gen_shift64;
1064 
1065     case INDEX_op_brcond_i32:
1066         tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1067                        args[3], 0);
1068         break;
1069     case INDEX_op_brcond_i64:
1070         tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1071                        args[3], P_REXW);
1072         break;
1073 
1074     case INDEX_op_bswap_i32:
1075         tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT, 0, args[0], 0);
1076         break;
1077     case INDEX_op_bswap_i64:
1078         tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT | P_REXW, 0, args[0], 0);
1079         break;
1080 
1081     case INDEX_op_neg_i32:
1082         tcg_out_modrm(s, 0xf7, 3, args[0]);
1083         break;
1084     case INDEX_op_neg_i64:
1085         tcg_out_modrm(s, 0xf7 | P_REXW, 3, args[0]);
1086         break;
1087 
1088     case INDEX_op_ext8s_i32:
1089         tcg_out_modrm(s, 0xbe | P_EXT | P_REXB, args[0], args[1]);
1090         break;
1091     case INDEX_op_ext16s_i32:
1092         tcg_out_modrm(s, 0xbf | P_EXT, args[0], args[1]);
1093         break;
1094     case INDEX_op_ext8s_i64:
1095         tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, args[0], args[1]);
1096         break;
1097     case INDEX_op_ext16s_i64:
1098         tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, args[0], args[1]);
1099         break;
1100     case INDEX_op_ext32s_i64:
1101         tcg_out_modrm(s, 0x63 | P_REXW, args[0], args[1]);
1102         break;
1103 
1104     case INDEX_op_qemu_ld8u:
1105         tcg_out_qemu_ld(s, args, 0);
1106         break;
1107     case INDEX_op_qemu_ld8s:
1108         tcg_out_qemu_ld(s, args, 0 | 4);
1109         break;
1110     case INDEX_op_qemu_ld16u:
1111         tcg_out_qemu_ld(s, args, 1);
1112         break;
1113     case INDEX_op_qemu_ld16s:
1114         tcg_out_qemu_ld(s, args, 1 | 4);
1115         break;
1116     case INDEX_op_qemu_ld32u:
1117         tcg_out_qemu_ld(s, args, 2);
1118         break;
1119     case INDEX_op_qemu_ld32s:
1120         tcg_out_qemu_ld(s, args, 2 | 4);
1121         break;
1122     case INDEX_op_qemu_ld64:
1123         tcg_out_qemu_ld(s, args, 3);
1124         break;
1125 
1126     case INDEX_op_qemu_st8:
1127         tcg_out_qemu_st(s, args, 0);
1128         break;
1129     case INDEX_op_qemu_st16:
1130         tcg_out_qemu_st(s, args, 1);
1131         break;
1132     case INDEX_op_qemu_st32:
1133         tcg_out_qemu_st(s, args, 2);
1134         break;
1135     case INDEX_op_qemu_st64:
1136         tcg_out_qemu_st(s, args, 3);
1137         break;
1138 
1139     default:
1140         tcg_abort();
1141     }
1142 }
1143 
1144 static int tcg_target_callee_save_regs[] = {
1145     TCG_REG_RBP,
1146     TCG_REG_RBX,
1147     TCG_REG_R12,
1148     TCG_REG_R13,
1149     /*    TCG_REG_R14, */ /* currently used for the global env, so no
1150                              need to save */
1151     TCG_REG_R15,
1152 };
1153 
tcg_out_push(TCGContext * s,int reg)1154 static inline void tcg_out_push(TCGContext *s, int reg)
1155 {
1156     tcg_out_opc(s, (0x50 + (reg & 7)), 0, reg, 0);
1157 }
1158 
tcg_out_pop(TCGContext * s,int reg)1159 static inline void tcg_out_pop(TCGContext *s, int reg)
1160 {
1161     tcg_out_opc(s, (0x58 + (reg & 7)), 0, reg, 0);
1162 }
1163 
1164 /* Generate global QEMU prologue and epilogue code */
tcg_target_qemu_prologue(TCGContext * s)1165 void tcg_target_qemu_prologue(TCGContext *s)
1166 {
1167     int i, frame_size, push_size, stack_addend;
1168 
1169     /* TB prologue */
1170     /* save all callee saved registers */
1171     for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1172         tcg_out_push(s, tcg_target_callee_save_regs[i]);
1173 
1174     }
1175     /* reserve some stack space */
1176     push_size = 8 + ARRAY_SIZE(tcg_target_callee_save_regs) * 8;
1177     frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1178     frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1179         ~(TCG_TARGET_STACK_ALIGN - 1);
1180     stack_addend = frame_size - push_size;
1181     tcg_out_addi(s, TCG_REG_RSP, -stack_addend);
1182 
1183     tcg_out_modrm(s, 0xff, 4, TCG_REG_RDI); /* jmp *%rdi */
1184 
1185     /* TB epilogue */
1186     tb_ret_addr = s->code_ptr;
1187     tcg_out_addi(s, TCG_REG_RSP, stack_addend);
1188     for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1189         tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1190     }
1191     tcg_out8(s, 0xc3); /* ret */
1192 }
1193 
1194 static const TCGTargetOpDef x86_64_op_defs[] = {
1195     { INDEX_op_exit_tb, { } },
1196     { INDEX_op_goto_tb, { } },
1197     { INDEX_op_call, { "ri" } }, /* XXX: might need a specific constant constraint */
1198     { INDEX_op_jmp, { "ri" } }, /* XXX: might need a specific constant constraint */
1199     { INDEX_op_br, { } },
1200 
1201     { INDEX_op_mov_i32, { "r", "r" } },
1202     { INDEX_op_movi_i32, { "r" } },
1203     { INDEX_op_ld8u_i32, { "r", "r" } },
1204     { INDEX_op_ld8s_i32, { "r", "r" } },
1205     { INDEX_op_ld16u_i32, { "r", "r" } },
1206     { INDEX_op_ld16s_i32, { "r", "r" } },
1207     { INDEX_op_ld_i32, { "r", "r" } },
1208     { INDEX_op_st8_i32, { "r", "r" } },
1209     { INDEX_op_st16_i32, { "r", "r" } },
1210     { INDEX_op_st_i32, { "r", "r" } },
1211 
1212     { INDEX_op_add_i32, { "r", "0", "ri" } },
1213     { INDEX_op_mul_i32, { "r", "0", "ri" } },
1214     { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1215     { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1216     { INDEX_op_sub_i32, { "r", "0", "ri" } },
1217     { INDEX_op_and_i32, { "r", "0", "ri" } },
1218     { INDEX_op_or_i32, { "r", "0", "ri" } },
1219     { INDEX_op_xor_i32, { "r", "0", "ri" } },
1220 
1221     { INDEX_op_shl_i32, { "r", "0", "ci" } },
1222     { INDEX_op_shr_i32, { "r", "0", "ci" } },
1223     { INDEX_op_sar_i32, { "r", "0", "ci" } },
1224 
1225     { INDEX_op_brcond_i32, { "r", "ri" } },
1226 
1227     { INDEX_op_mov_i64, { "r", "r" } },
1228     { INDEX_op_movi_i64, { "r" } },
1229     { INDEX_op_ld8u_i64, { "r", "r" } },
1230     { INDEX_op_ld8s_i64, { "r", "r" } },
1231     { INDEX_op_ld16u_i64, { "r", "r" } },
1232     { INDEX_op_ld16s_i64, { "r", "r" } },
1233     { INDEX_op_ld32u_i64, { "r", "r" } },
1234     { INDEX_op_ld32s_i64, { "r", "r" } },
1235     { INDEX_op_ld_i64, { "r", "r" } },
1236     { INDEX_op_st8_i64, { "r", "r" } },
1237     { INDEX_op_st16_i64, { "r", "r" } },
1238     { INDEX_op_st32_i64, { "r", "r" } },
1239     { INDEX_op_st_i64, { "r", "r" } },
1240 
1241     { INDEX_op_add_i64, { "r", "0", "re" } },
1242     { INDEX_op_mul_i64, { "r", "0", "re" } },
1243     { INDEX_op_div2_i64, { "a", "d", "0", "1", "r" } },
1244     { INDEX_op_divu2_i64, { "a", "d", "0", "1", "r" } },
1245     { INDEX_op_sub_i64, { "r", "0", "re" } },
1246     { INDEX_op_and_i64, { "r", "0", "reZ" } },
1247     { INDEX_op_or_i64, { "r", "0", "re" } },
1248     { INDEX_op_xor_i64, { "r", "0", "re" } },
1249 
1250     { INDEX_op_shl_i64, { "r", "0", "ci" } },
1251     { INDEX_op_shr_i64, { "r", "0", "ci" } },
1252     { INDEX_op_sar_i64, { "r", "0", "ci" } },
1253 
1254     { INDEX_op_brcond_i64, { "r", "re" } },
1255 
1256     { INDEX_op_bswap_i32, { "r", "0" } },
1257     { INDEX_op_bswap_i64, { "r", "0" } },
1258 
1259     { INDEX_op_neg_i32, { "r", "0" } },
1260     { INDEX_op_neg_i64, { "r", "0" } },
1261 
1262     { INDEX_op_ext8s_i32, { "r", "r"} },
1263     { INDEX_op_ext16s_i32, { "r", "r"} },
1264     { INDEX_op_ext8s_i64, { "r", "r"} },
1265     { INDEX_op_ext16s_i64, { "r", "r"} },
1266     { INDEX_op_ext32s_i64, { "r", "r"} },
1267 
1268     { INDEX_op_qemu_ld8u, { "r", "L" } },
1269     { INDEX_op_qemu_ld8s, { "r", "L" } },
1270     { INDEX_op_qemu_ld16u, { "r", "L" } },
1271     { INDEX_op_qemu_ld16s, { "r", "L" } },
1272     { INDEX_op_qemu_ld32u, { "r", "L" } },
1273     { INDEX_op_qemu_ld32s, { "r", "L" } },
1274     { INDEX_op_qemu_ld64, { "r", "L" } },
1275 
1276     { INDEX_op_qemu_st8, { "L", "L" } },
1277     { INDEX_op_qemu_st16, { "L", "L" } },
1278     { INDEX_op_qemu_st32, { "L", "L" } },
1279     { INDEX_op_qemu_st64, { "L", "L", "L" } },
1280 
1281     { -1 },
1282 };
1283 
tcg_target_init(TCGContext * s)1284 void tcg_target_init(TCGContext *s)
1285 {
1286     /* fail safe */
1287     if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1288         tcg_abort();
1289 
1290     tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
1291     tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
1292     tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1293                      (1 << TCG_REG_RDI) |
1294                      (1 << TCG_REG_RSI) |
1295                      (1 << TCG_REG_RDX) |
1296                      (1 << TCG_REG_RCX) |
1297                      (1 << TCG_REG_R8) |
1298                      (1 << TCG_REG_R9) |
1299                      (1 << TCG_REG_RAX) |
1300                      (1 << TCG_REG_R10) |
1301                      (1 << TCG_REG_R11));
1302 
1303     tcg_regset_clear(s->reserved_regs);
1304     tcg_regset_set_reg(s->reserved_regs, TCG_REG_RSP);
1305 
1306     tcg_add_target_add_op_defs(x86_64_op_defs);
1307 }
1308