• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #ifndef NDEBUG
26 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27     "%rax",
28     "%rcx",
29     "%rdx",
30     "%rbx",
31     "%rsp",
32     "%rbp",
33     "%rsi",
34     "%rdi",
35     "%r8",
36     "%r9",
37     "%r10",
38     "%r11",
39     "%r12",
40     "%r13",
41     "%r14",
42     "%r15",
43 };
44 #endif
45 
46 static const int tcg_target_reg_alloc_order[] = {
47     TCG_REG_RBP,
48     TCG_REG_RBX,
49     TCG_REG_R12,
50     TCG_REG_R13,
51     TCG_REG_R14,
52     TCG_REG_R15,
53     TCG_REG_R10,
54     TCG_REG_R11,
55     TCG_REG_R9,
56     TCG_REG_R8,
57     TCG_REG_RCX,
58     TCG_REG_RDX,
59     TCG_REG_RSI,
60     TCG_REG_RDI,
61     TCG_REG_RAX,
62 };
63 
64 static const int tcg_target_call_iarg_regs[6] = {
65     TCG_REG_RDI,
66     TCG_REG_RSI,
67     TCG_REG_RDX,
68     TCG_REG_RCX,
69     TCG_REG_R8,
70     TCG_REG_R9,
71 };
72 
73 static const int tcg_target_call_oarg_regs[2] = {
74     TCG_REG_RAX,
75     TCG_REG_RDX
76 };
77 
78 static uint8_t *tb_ret_addr;
79 
patch_reloc(uint8_t * code_ptr,int type,tcg_target_long value,tcg_target_long addend)80 static void patch_reloc(uint8_t *code_ptr, int type,
81                         tcg_target_long value, tcg_target_long addend)
82 {
83     value += addend;
84     switch(type) {
85     case R_X86_64_32:
86         if (value != (uint32_t)value)
87             tcg_abort();
88         *(uint32_t *)code_ptr = value;
89         break;
90     case R_X86_64_32S:
91         if (value != (int32_t)value)
92             tcg_abort();
93         *(uint32_t *)code_ptr = value;
94         break;
95     case R_386_PC32:
96         value -= (long)code_ptr;
97         if (value != (int32_t)value)
98             tcg_abort();
99         *(uint32_t *)code_ptr = value;
100         break;
101     default:
102         tcg_abort();
103     }
104 }
105 
106 /* maximum number of register used for input function arguments */
tcg_target_get_call_iarg_regs_count(int flags)107 static inline int tcg_target_get_call_iarg_regs_count(int flags)
108 {
109     return 6;
110 }
111 
112 /* parse target specific constraints */
target_parse_constraint(TCGArgConstraint * ct,const char ** pct_str)113 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
114 {
115     const char *ct_str;
116 
117     ct_str = *pct_str;
118     switch(ct_str[0]) {
119     case 'a':
120         ct->ct |= TCG_CT_REG;
121         tcg_regset_set_reg(ct->u.regs, TCG_REG_RAX);
122         break;
123     case 'b':
124         ct->ct |= TCG_CT_REG;
125         tcg_regset_set_reg(ct->u.regs, TCG_REG_RBX);
126         break;
127     case 'c':
128         ct->ct |= TCG_CT_REG;
129         tcg_regset_set_reg(ct->u.regs, TCG_REG_RCX);
130         break;
131     case 'd':
132         ct->ct |= TCG_CT_REG;
133         tcg_regset_set_reg(ct->u.regs, TCG_REG_RDX);
134         break;
135     case 'S':
136         ct->ct |= TCG_CT_REG;
137         tcg_regset_set_reg(ct->u.regs, TCG_REG_RSI);
138         break;
139     case 'D':
140         ct->ct |= TCG_CT_REG;
141         tcg_regset_set_reg(ct->u.regs, TCG_REG_RDI);
142         break;
143     case 'q':
144         ct->ct |= TCG_CT_REG;
145         tcg_regset_set32(ct->u.regs, 0, 0xf);
146         break;
147     case 'r':
148         ct->ct |= TCG_CT_REG;
149         tcg_regset_set32(ct->u.regs, 0, 0xffff);
150         break;
151     case 'L': /* qemu_ld/st constraint */
152         ct->ct |= TCG_CT_REG;
153         tcg_regset_set32(ct->u.regs, 0, 0xffff);
154         tcg_regset_reset_reg(ct->u.regs, TCG_REG_RSI);
155         tcg_regset_reset_reg(ct->u.regs, TCG_REG_RDI);
156         break;
157     case 'e':
158         ct->ct |= TCG_CT_CONST_S32;
159         break;
160     case 'Z':
161         ct->ct |= TCG_CT_CONST_U32;
162         break;
163     default:
164         return -1;
165     }
166     ct_str++;
167     *pct_str = ct_str;
168     return 0;
169 }
170 
171 /* test if a constant matches the constraint */
tcg_target_const_match(tcg_target_long val,const TCGArgConstraint * arg_ct)172 static inline int tcg_target_const_match(tcg_target_long val,
173                                          const TCGArgConstraint *arg_ct)
174 {
175     int ct;
176     ct = arg_ct->ct;
177     if (ct & TCG_CT_CONST)
178         return 1;
179     else if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val)
180         return 1;
181     else if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val)
182         return 1;
183     else
184         return 0;
185 }
186 
187 #define ARITH_ADD 0
188 #define ARITH_OR  1
189 #define ARITH_ADC 2
190 #define ARITH_SBB 3
191 #define ARITH_AND 4
192 #define ARITH_SUB 5
193 #define ARITH_XOR 6
194 #define ARITH_CMP 7
195 
196 #define SHIFT_ROL 0
197 #define SHIFT_ROR 1
198 #define SHIFT_SHL 4
199 #define SHIFT_SHR 5
200 #define SHIFT_SAR 7
201 
202 #define JCC_JMP (-1)
203 #define JCC_JO  0x0
204 #define JCC_JNO 0x1
205 #define JCC_JB  0x2
206 #define JCC_JAE 0x3
207 #define JCC_JE  0x4
208 #define JCC_JNE 0x5
209 #define JCC_JBE 0x6
210 #define JCC_JA  0x7
211 #define JCC_JS  0x8
212 #define JCC_JNS 0x9
213 #define JCC_JP  0xa
214 #define JCC_JNP 0xb
215 #define JCC_JL  0xc
216 #define JCC_JGE 0xd
217 #define JCC_JLE 0xe
218 #define JCC_JG  0xf
219 
220 #define P_EXT		0x100		/* 0x0f opcode prefix */
221 #define P_REXW		0x200		/* set rex.w = 1 */
222 #define P_REXB_R	0x400		/* REG field as byte register */
223 #define P_REXB_RM	0x800		/* R/M field as byte register */
224 
225 static const uint8_t tcg_cond_to_jcc[10] = {
226     [TCG_COND_EQ] = JCC_JE,
227     [TCG_COND_NE] = JCC_JNE,
228     [TCG_COND_LT] = JCC_JL,
229     [TCG_COND_GE] = JCC_JGE,
230     [TCG_COND_LE] = JCC_JLE,
231     [TCG_COND_GT] = JCC_JG,
232     [TCG_COND_LTU] = JCC_JB,
233     [TCG_COND_GEU] = JCC_JAE,
234     [TCG_COND_LEU] = JCC_JBE,
235     [TCG_COND_GTU] = JCC_JA,
236 };
237 
tcg_out_opc(TCGContext * s,int opc,int r,int rm,int x)238 static void tcg_out_opc(TCGContext *s, int opc, int r, int rm, int x)
239 {
240     int rex = 0;
241 
242     rex |= (opc & P_REXW) >> 6;		/* REX.W */
243     rex |= (r & 8) >> 1;		/* REX.R */
244     rex |= (x & 8) >> 2;		/* REX.X */
245     rex |= (rm & 8) >> 3;		/* REX.B */
246 
247     /* P_REXB_{R,RM} indicates that the given register is the low byte.
248        For %[abcd]l we need no REX prefix, but for %{si,di,bp,sp}l we do,
249        as otherwise the encoding indicates %[abcd]h.  Note that the values
250        that are ORed in merely indicate that the REX byte must be present;
251        those bits get discarded in output.  */
252     rex |= opc & (r >= 4 ? P_REXB_R : 0);
253     rex |= opc & (rm >= 4 ? P_REXB_RM : 0);
254 
255     if (rex) {
256         tcg_out8(s, (uint8_t)(rex | 0x40));
257     }
258     if (opc & P_EXT) {
259         tcg_out8(s, 0x0f);
260     }
261     tcg_out8(s, opc & 0xff);
262 }
263 
tcg_out_modrm(TCGContext * s,int opc,int r,int rm)264 static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
265 {
266     tcg_out_opc(s, opc, r, rm, 0);
267     tcg_out8(s, 0xc0 | ((r & 7) << 3) | (rm & 7));
268 }
269 
270 /* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
tcg_out_modrm_offset(TCGContext * s,int opc,int r,int rm,tcg_target_long offset)271 static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm,
272                                         tcg_target_long offset)
273 {
274     if (rm < 0) {
275         tcg_target_long val;
276         tcg_out_opc(s, opc, r, 0, 0);
277         val = offset - ((tcg_target_long)s->code_ptr + 5 + (-rm - 1));
278         if (val == (int32_t)val) {
279             /* eip relative */
280             tcg_out8(s, 0x05 | ((r & 7) << 3));
281             tcg_out32(s, val);
282         } else if (offset == (int32_t)offset) {
283             tcg_out8(s, 0x04 | ((r & 7) << 3));
284             tcg_out8(s, 0x25); /* sib */
285             tcg_out32(s, offset);
286         } else {
287             tcg_abort();
288         }
289     } else if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
290         tcg_out_opc(s, opc, r, rm, 0);
291         if ((rm & 7) == TCG_REG_RSP) {
292             tcg_out8(s, 0x04 | ((r & 7) << 3));
293             tcg_out8(s, 0x24);
294         } else {
295             tcg_out8(s, 0x00 | ((r & 7) << 3) | (rm & 7));
296         }
297     } else if ((int8_t)offset == offset) {
298         tcg_out_opc(s, opc, r, rm, 0);
299         if ((rm & 7) == TCG_REG_RSP) {
300             tcg_out8(s, 0x44 | ((r & 7) << 3));
301             tcg_out8(s, 0x24);
302         } else {
303             tcg_out8(s, 0x40 | ((r & 7) << 3) | (rm & 7));
304         }
305         tcg_out8(s, offset);
306     } else {
307         tcg_out_opc(s, opc, r, rm, 0);
308         if ((rm & 7) == TCG_REG_RSP) {
309             tcg_out8(s, 0x84 | ((r & 7) << 3));
310             tcg_out8(s, 0x24);
311         } else {
312             tcg_out8(s, 0x80 | ((r & 7) << 3) | (rm & 7));
313         }
314         tcg_out32(s, offset);
315     }
316 }
317 
318 #if defined(CONFIG_SOFTMMU)
319 /* XXX: incomplete. index must be different from ESP */
tcg_out_modrm_offset2(TCGContext * s,int opc,int r,int rm,int index,int shift,tcg_target_long offset)320 static void tcg_out_modrm_offset2(TCGContext *s, int opc, int r, int rm,
321                                   int index, int shift,
322                                   tcg_target_long offset)
323 {
324     int mod;
325     if (rm == -1)
326         tcg_abort();
327     if (offset == 0 && (rm & 7) != TCG_REG_RBP) {
328         mod = 0;
329     } else if (offset == (int8_t)offset) {
330         mod = 0x40;
331     } else if (offset == (int32_t)offset) {
332         mod = 0x80;
333     } else {
334         tcg_abort();
335     }
336     if (index == -1) {
337         tcg_out_opc(s, opc, r, rm, 0);
338         if ((rm & 7) == TCG_REG_RSP) {
339             tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
340             tcg_out8(s, 0x04 | (rm & 7));
341         } else {
342             tcg_out8(s, mod | ((r & 7) << 3) | (rm & 7));
343         }
344     } else {
345         tcg_out_opc(s, opc, r, rm, index);
346         tcg_out8(s, mod | ((r & 7) << 3) | 0x04);
347         tcg_out8(s, (shift << 6) | ((index & 7) << 3) | (rm & 7));
348     }
349     if (mod == 0x40) {
350         tcg_out8(s, offset);
351     } else if (mod == 0x80) {
352         tcg_out32(s, offset);
353     }
354 }
355 #endif
356 
tcg_out_mov(TCGContext * s,int ret,int arg)357 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
358 {
359     tcg_out_modrm(s, 0x8b | P_REXW, ret, arg);
360 }
361 
tcg_out_movi(TCGContext * s,TCGType type,int ret,tcg_target_long arg)362 static inline void tcg_out_movi(TCGContext *s, TCGType type,
363                                 int ret, tcg_target_long arg)
364 {
365     if (arg == 0) {
366         tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret); /* xor r0,r0 */
367     } else if (arg == (uint32_t)arg || type == TCG_TYPE_I32) {
368         tcg_out_opc(s, 0xb8 + (ret & 7), 0, ret, 0);
369         tcg_out32(s, arg);
370     } else if (arg == (int32_t)arg) {
371         tcg_out_modrm(s, 0xc7 | P_REXW, 0, ret);
372         tcg_out32(s, arg);
373     } else {
374         tcg_out_opc(s, (0xb8 + (ret & 7)) | P_REXW, 0, ret, 0);
375         tcg_out32(s, arg);
376         tcg_out32(s, arg >> 32);
377     }
378 }
379 
tcg_out_goto(TCGContext * s,int call,uint8_t * target)380 static void tcg_out_goto(TCGContext *s, int call, uint8_t *target)
381 {
382     int32_t disp;
383 
384     disp = target - s->code_ptr - 5;
385     if (disp == (target - s->code_ptr - 5)) {
386         tcg_out8(s, call ? 0xe8 : 0xe9);
387         tcg_out32(s, disp);
388     } else {
389         tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R10, (tcg_target_long) target);
390         tcg_out_modrm(s, 0xff, call ? 2 : 4, TCG_REG_R10);
391     }
392 }
393 
tcg_out_ld(TCGContext * s,TCGType type,int ret,int arg1,tcg_target_long arg2)394 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
395                               int arg1, tcg_target_long arg2)
396 {
397     if (type == TCG_TYPE_I32)
398         tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2); /* movl */
399     else
400         tcg_out_modrm_offset(s, 0x8b | P_REXW, ret, arg1, arg2); /* movq */
401 }
402 
tcg_out_st(TCGContext * s,TCGType type,int arg,int arg1,tcg_target_long arg2)403 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
404                               int arg1, tcg_target_long arg2)
405 {
406     if (type == TCG_TYPE_I32)
407         tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2); /* movl */
408     else
409         tcg_out_modrm_offset(s, 0x89 | P_REXW, arg, arg1, arg2); /* movq */
410 }
411 
tgen_arithi32(TCGContext * s,int c,int r0,int32_t val)412 static inline void tgen_arithi32(TCGContext *s, int c, int r0, int32_t val)
413 {
414     if ((c == ARITH_ADD && val == 1) || (c == ARITH_SUB && val == -1)) {
415         /* inc */
416         tcg_out_modrm(s, 0xff, 0, r0);
417     } else if ((c == ARITH_ADD && val == -1) || (c == ARITH_SUB && val == 1)) {
418         /* dec */
419         tcg_out_modrm(s, 0xff, 1, r0);
420     } else if (val == (int8_t)val) {
421         tcg_out_modrm(s, 0x83, c, r0);
422         tcg_out8(s, val);
423     } else if (c == ARITH_AND && val == 0xffu) {
424         /* movzbl */
425         tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB_RM, r0, r0);
426     } else if (c == ARITH_AND && val == 0xffffu) {
427         /* movzwl */
428         tcg_out_modrm(s, 0xb7 | P_EXT, r0, r0);
429     } else {
430         tcg_out_modrm(s, 0x81, c, r0);
431         tcg_out32(s, val);
432     }
433 }
434 
tgen_arithi64(TCGContext * s,int c,int r0,int64_t val)435 static inline void tgen_arithi64(TCGContext *s, int c, int r0, int64_t val)
436 {
437     if ((c == ARITH_ADD && val == 1) || (c == ARITH_SUB && val == -1)) {
438         /* inc */
439         tcg_out_modrm(s, 0xff | P_REXW, 0, r0);
440     } else if ((c == ARITH_ADD && val == -1) || (c == ARITH_SUB && val == 1)) {
441         /* dec */
442         tcg_out_modrm(s, 0xff | P_REXW, 1, r0);
443     } else if (c == ARITH_AND && val == 0xffffffffu) {
444         /* 32-bit mov zero extends */
445         tcg_out_modrm(s, 0x8b, r0, r0);
446     } else if (c == ARITH_AND && val == (uint32_t)val) {
447         /* AND with no high bits set can use a 32-bit operation.  */
448         tgen_arithi32(s, c, r0, (uint32_t)val);
449     } else if (val == (int8_t)val) {
450         tcg_out_modrm(s, 0x83 | P_REXW, c, r0);
451         tcg_out8(s, val);
452     } else if (val == (int32_t)val) {
453         tcg_out_modrm(s, 0x81 | P_REXW, c, r0);
454         tcg_out32(s, val);
455     } else {
456         tcg_abort();
457     }
458 }
459 
tcg_out_addi(TCGContext * s,int reg,tcg_target_long val)460 static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
461 {
462     if (val != 0)
463         tgen_arithi64(s, ARITH_ADD, reg, val);
464 }
465 
tcg_out_jxx(TCGContext * s,int opc,int label_index)466 static void tcg_out_jxx(TCGContext *s, int opc, int label_index)
467 {
468     int32_t val, val1;
469     TCGLabel *l = &s->labels[label_index];
470 
471     if (l->has_value) {
472         val = l->u.value - (tcg_target_long)s->code_ptr;
473         val1 = val - 2;
474         if ((int8_t)val1 == val1) {
475             if (opc == -1)
476                 tcg_out8(s, 0xeb);
477             else
478                 tcg_out8(s, 0x70 + opc);
479             tcg_out8(s, val1);
480         } else {
481             if (opc == -1) {
482                 tcg_out8(s, 0xe9);
483                 tcg_out32(s, val - 5);
484             } else {
485                 tcg_out8(s, 0x0f);
486                 tcg_out8(s, 0x80 + opc);
487                 tcg_out32(s, val - 6);
488             }
489         }
490     } else {
491         if (opc == -1) {
492             tcg_out8(s, 0xe9);
493         } else {
494             tcg_out8(s, 0x0f);
495             tcg_out8(s, 0x80 + opc);
496         }
497         tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
498         s->code_ptr += 4;
499     }
500 }
501 
tcg_out_cmp(TCGContext * s,TCGArg arg1,TCGArg arg2,int const_arg2,int rexw)502 static void tcg_out_cmp(TCGContext *s, TCGArg arg1, TCGArg arg2,
503                         int const_arg2, int rexw)
504 {
505     if (const_arg2) {
506         if (arg2 == 0) {
507             /* test r, r */
508             tcg_out_modrm(s, 0x85 | rexw, arg1, arg1);
509         } else {
510             if (rexw) {
511                 tgen_arithi64(s, ARITH_CMP, arg1, arg2);
512             } else {
513                 tgen_arithi32(s, ARITH_CMP, arg1, arg2);
514             }
515         }
516     } else {
517         tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3) | rexw, arg2, arg1);
518     }
519 }
520 
tcg_out_brcond(TCGContext * s,int cond,TCGArg arg1,TCGArg arg2,int const_arg2,int label_index,int rexw)521 static void tcg_out_brcond(TCGContext *s, int cond,
522                            TCGArg arg1, TCGArg arg2, int const_arg2,
523                            int label_index, int rexw)
524 {
525     tcg_out_cmp(s, arg1, arg2, const_arg2, rexw);
526     tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index);
527 }
528 
tcg_out_setcond(TCGContext * s,int cond,TCGArg dest,TCGArg arg1,TCGArg arg2,int const_arg2,int rexw)529 static void tcg_out_setcond(TCGContext *s, int cond, TCGArg dest,
530                             TCGArg arg1, TCGArg arg2, int const_arg2, int rexw)
531 {
532     tcg_out_cmp(s, arg1, arg2, const_arg2, rexw);
533     /* setcc */
534     tcg_out_modrm(s, 0x90 | tcg_cond_to_jcc[cond] | P_EXT | P_REXB_RM, 0, dest);
535     tgen_arithi32(s, ARITH_AND, dest, 0xff);
536 }
537 
538 #if defined(CONFIG_SOFTMMU)
539 
540 #include "../../softmmu_defs.h"
541 
542 static void *qemu_ld_helpers[4] = {
543     __ldb_mmu,
544     __ldw_mmu,
545     __ldl_mmu,
546     __ldq_mmu,
547 };
548 
549 static void *qemu_st_helpers[4] = {
550     __stb_mmu,
551     __stw_mmu,
552     __stl_mmu,
553     __stq_mmu,
554 };
555 #endif
556 
tcg_out_qemu_ld(TCGContext * s,const TCGArg * args,int opc)557 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
558                             int opc)
559 {
560     int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
561     int32_t offset;
562 #if defined(CONFIG_SOFTMMU)
563     uint8_t *label1_ptr, *label2_ptr;
564 #endif
565 
566     data_reg = *args++;
567     addr_reg = *args++;
568     mem_index = *args;
569     s_bits = opc & 3;
570 
571     r0 = TCG_REG_RDI;
572     r1 = TCG_REG_RSI;
573 
574 #if TARGET_LONG_BITS == 32
575     rexw = 0;
576 #else
577     rexw = P_REXW;
578 #endif
579 #if defined(CONFIG_SOFTMMU)
580     /* mov */
581     tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
582 
583     /* mov */
584     tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
585 
586     tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
587     tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
588 
589     tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
590     tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
591 
592     tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
593     tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
594 
595     /* lea offset(r1, env), r1 */
596     tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
597                           offsetof(CPUState, tlb_table[mem_index][0].addr_read));
598 
599     /* cmp 0(r1), r0 */
600     tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
601 
602     /* mov */
603     tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
604 
605     /* je label1 */
606     tcg_out8(s, 0x70 + JCC_JE);
607     label1_ptr = s->code_ptr;
608     s->code_ptr++;
609 
610     /* XXX: move that code at the end of the TB */
611     tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RSI, mem_index);
612     tcg_out_goto(s, 1, qemu_ld_helpers[s_bits]);
613 
614     switch(opc) {
615     case 0 | 4:
616         /* movsbq */
617         tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
618         break;
619     case 1 | 4:
620         /* movswq */
621         tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
622         break;
623     case 2 | 4:
624         /* movslq */
625         tcg_out_modrm(s, 0x63 | P_REXW, data_reg, TCG_REG_RAX);
626         break;
627     case 0:
628         /* movzbq */
629         tcg_out_modrm(s, 0xb6 | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
630         break;
631     case 1:
632         /* movzwq */
633         tcg_out_modrm(s, 0xb7 | P_EXT | P_REXW, data_reg, TCG_REG_RAX);
634         break;
635     case 2:
636     default:
637         /* movl */
638         tcg_out_modrm(s, 0x8b, data_reg, TCG_REG_RAX);
639         break;
640     case 3:
641         tcg_out_mov(s, data_reg, TCG_REG_RAX);
642         break;
643     }
644 
645     /* jmp label2 */
646     tcg_out8(s, 0xeb);
647     label2_ptr = s->code_ptr;
648     s->code_ptr++;
649 
650     /* label1: */
651     *label1_ptr = s->code_ptr - label1_ptr - 1;
652 
653     /* add x(r1), r0 */
654     tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) -
655                          offsetof(CPUTLBEntry, addr_read));
656     offset = 0;
657 #else
658     if (GUEST_BASE == (int32_t)GUEST_BASE) {
659         r0 = addr_reg;
660         offset = GUEST_BASE;
661     } else {
662         offset = 0;
663         /* movq $GUEST_BASE, r0 */
664         tcg_out_opc(s, (0xb8 + (r0 & 7)) | P_REXW, 0, r0, 0);
665         tcg_out32(s, GUEST_BASE);
666         tcg_out32(s, GUEST_BASE >> 32);
667         /* addq addr_reg, r0 */
668         tcg_out_modrm(s, 0x01 | P_REXW, addr_reg, r0);
669     }
670 #endif
671 
672 #ifdef TARGET_WORDS_BIGENDIAN
673     bswap = 1;
674 #else
675     bswap = 0;
676 #endif
677     switch(opc) {
678     case 0:
679         /* movzbl */
680         tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, offset);
681         break;
682     case 0 | 4:
683         /* movsbX */
684         tcg_out_modrm_offset(s, 0xbe | P_EXT | rexw, data_reg, r0, offset);
685         break;
686     case 1:
687         /* movzwl */
688         tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, offset);
689         if (bswap) {
690             /* rolw $8, data_reg */
691             tcg_out8(s, 0x66);
692             tcg_out_modrm(s, 0xc1, 0, data_reg);
693             tcg_out8(s, 8);
694         }
695         break;
696     case 1 | 4:
697         if (bswap) {
698             /* movzwl */
699             tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, offset);
700             /* rolw $8, data_reg */
701             tcg_out8(s, 0x66);
702             tcg_out_modrm(s, 0xc1, 0, data_reg);
703             tcg_out8(s, 8);
704 
705             /* movswX data_reg, data_reg */
706             tcg_out_modrm(s, 0xbf | P_EXT | rexw, data_reg, data_reg);
707         } else {
708             /* movswX */
709             tcg_out_modrm_offset(s, 0xbf | P_EXT | rexw, data_reg, r0, offset);
710         }
711         break;
712     case 2:
713         /* movl (r0), data_reg */
714         tcg_out_modrm_offset(s, 0x8b, data_reg, r0, offset);
715         if (bswap) {
716             /* bswap */
717             tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
718         }
719         break;
720     case 2 | 4:
721         if (bswap) {
722             /* movl (r0), data_reg */
723             tcg_out_modrm_offset(s, 0x8b, data_reg, r0, offset);
724             /* bswap */
725             tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT, 0, data_reg, 0);
726             /* movslq */
727             tcg_out_modrm(s, 0x63 | P_REXW, data_reg, data_reg);
728         } else {
729             /* movslq */
730             tcg_out_modrm_offset(s, 0x63 | P_REXW, data_reg, r0, offset);
731         }
732         break;
733     case 3:
734         /* movq (r0), data_reg */
735         tcg_out_modrm_offset(s, 0x8b | P_REXW, data_reg, r0, offset);
736         if (bswap) {
737             /* bswap */
738             tcg_out_opc(s, (0xc8 + (data_reg & 7)) | P_EXT | P_REXW, 0, data_reg, 0);
739         }
740         break;
741     default:
742         tcg_abort();
743     }
744 
745 #if defined(CONFIG_SOFTMMU)
746     /* label2: */
747     *label2_ptr = s->code_ptr - label2_ptr - 1;
748 #endif
749 }
750 
tcg_out_qemu_st(TCGContext * s,const TCGArg * args,int opc)751 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
752                             int opc)
753 {
754     int addr_reg, data_reg, r0, r1, mem_index, s_bits, bswap, rexw;
755     int32_t offset;
756 #if defined(CONFIG_SOFTMMU)
757     uint8_t *label1_ptr, *label2_ptr;
758 #endif
759 
760     data_reg = *args++;
761     addr_reg = *args++;
762     mem_index = *args;
763 
764     s_bits = opc;
765 
766     r0 = TCG_REG_RDI;
767     r1 = TCG_REG_RSI;
768 
769 #if TARGET_LONG_BITS == 32
770     rexw = 0;
771 #else
772     rexw = P_REXW;
773 #endif
774 #if defined(CONFIG_SOFTMMU)
775     /* mov */
776     tcg_out_modrm(s, 0x8b | rexw, r1, addr_reg);
777 
778     /* mov */
779     tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
780 
781     tcg_out_modrm(s, 0xc1 | rexw, 5, r1); /* shr $x, r1 */
782     tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
783 
784     tcg_out_modrm(s, 0x81 | rexw, 4, r0); /* andl $x, r0 */
785     tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
786 
787     tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
788     tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
789 
790     /* lea offset(r1, env), r1 */
791     tcg_out_modrm_offset2(s, 0x8d | P_REXW, r1, r1, TCG_AREG0, 0,
792                           offsetof(CPUState, tlb_table[mem_index][0].addr_write));
793 
794     /* cmp 0(r1), r0 */
795     tcg_out_modrm_offset(s, 0x3b | rexw, r0, r1, 0);
796 
797     /* mov */
798     tcg_out_modrm(s, 0x8b | rexw, r0, addr_reg);
799 
800     /* je label1 */
801     tcg_out8(s, 0x70 + JCC_JE);
802     label1_ptr = s->code_ptr;
803     s->code_ptr++;
804 
805     /* XXX: move that code at the end of the TB */
806     switch(opc) {
807     case 0:
808         /* movzbl */
809         tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB_RM, TCG_REG_RSI, data_reg);
810         break;
811     case 1:
812         /* movzwl */
813         tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_RSI, data_reg);
814         break;
815     case 2:
816         /* movl */
817         tcg_out_modrm(s, 0x8b, TCG_REG_RSI, data_reg);
818         break;
819     default:
820     case 3:
821         tcg_out_mov(s, TCG_REG_RSI, data_reg);
822         break;
823     }
824     tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_RDX, mem_index);
825     tcg_out_goto(s, 1, qemu_st_helpers[s_bits]);
826 
827     /* jmp label2 */
828     tcg_out8(s, 0xeb);
829     label2_ptr = s->code_ptr;
830     s->code_ptr++;
831 
832     /* label1: */
833     *label1_ptr = s->code_ptr - label1_ptr - 1;
834 
835     /* add x(r1), r0 */
836     tcg_out_modrm_offset(s, 0x03 | P_REXW, r0, r1, offsetof(CPUTLBEntry, addend) -
837                          offsetof(CPUTLBEntry, addr_write));
838     offset = 0;
839 #else
840     if (GUEST_BASE == (int32_t)GUEST_BASE) {
841         r0 = addr_reg;
842         offset = GUEST_BASE;
843     } else {
844         offset = 0;
845         /* movq $GUEST_BASE, r0 */
846         tcg_out_opc(s, (0xb8 + (r0 & 7)) | P_REXW, 0, r0, 0);
847         tcg_out32(s, GUEST_BASE);
848         tcg_out32(s, GUEST_BASE >> 32);
849         /* addq addr_reg, r0 */
850         tcg_out_modrm(s, 0x01 | P_REXW, addr_reg, r0);
851     }
852 #endif
853 
854 #ifdef TARGET_WORDS_BIGENDIAN
855     bswap = 1;
856 #else
857     bswap = 0;
858 #endif
859     switch(opc) {
860     case 0:
861         /* movb */
862         tcg_out_modrm_offset(s, 0x88 | P_REXB_R, data_reg, r0, offset);
863         break;
864     case 1:
865         if (bswap) {
866             tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
867             tcg_out8(s, 0x66); /* rolw $8, %ecx */
868             tcg_out_modrm(s, 0xc1, 0, r1);
869             tcg_out8(s, 8);
870             data_reg = r1;
871         }
872         /* movw */
873         tcg_out8(s, 0x66);
874         tcg_out_modrm_offset(s, 0x89, data_reg, r0, offset);
875         break;
876     case 2:
877         if (bswap) {
878             tcg_out_modrm(s, 0x8b, r1, data_reg); /* movl */
879             /* bswap data_reg */
880             tcg_out_opc(s, (0xc8 + r1) | P_EXT, 0, r1, 0);
881             data_reg = r1;
882         }
883         /* movl */
884         tcg_out_modrm_offset(s, 0x89, data_reg, r0, offset);
885         break;
886     case 3:
887         if (bswap) {
888             tcg_out_mov(s, r1, data_reg);
889             /* bswap data_reg */
890             tcg_out_opc(s, (0xc8 + r1) | P_EXT | P_REXW, 0, r1, 0);
891             data_reg = r1;
892         }
893         /* movq */
894         tcg_out_modrm_offset(s, 0x89 | P_REXW, data_reg, r0, offset);
895         break;
896     default:
897         tcg_abort();
898     }
899 
900 #if defined(CONFIG_SOFTMMU)
901     /* label2: */
902     *label2_ptr = s->code_ptr - label2_ptr - 1;
903 #endif
904 }
905 
tcg_out_op(TCGContext * s,int opc,const TCGArg * args,const int * const_args)906 static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
907                               const int *const_args)
908 {
909     int c;
910 
911     switch(opc) {
912     case INDEX_op_exit_tb:
913         tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RAX, args[0]);
914         tcg_out_goto(s, 0, tb_ret_addr);
915         break;
916     case INDEX_op_goto_tb:
917         if (s->tb_jmp_offset) {
918             /* direct jump method */
919             tcg_out8(s, 0xe9); /* jmp im */
920             s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
921             tcg_out32(s, 0);
922         } else {
923             /* indirect jump method */
924             /* jmp Ev */
925             tcg_out_modrm_offset(s, 0xff, 4, -1,
926                                  (tcg_target_long)(s->tb_next +
927                                                    args[0]));
928         }
929         s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
930         break;
931     case INDEX_op_call:
932         if (const_args[0]) {
933             tcg_out_goto(s, 1, (void *) args[0]);
934         } else {
935             tcg_out_modrm(s, 0xff, 2, args[0]);
936         }
937         break;
938     case INDEX_op_jmp:
939         if (const_args[0]) {
940             tcg_out_goto(s, 0, (void *) args[0]);
941         } else {
942             tcg_out_modrm(s, 0xff, 4, args[0]);
943         }
944         break;
945     case INDEX_op_br:
946         tcg_out_jxx(s, JCC_JMP, args[0]);
947         break;
948     case INDEX_op_movi_i32:
949         tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
950         break;
951     case INDEX_op_movi_i64:
952         tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
953         break;
954     case INDEX_op_ld8u_i32:
955     case INDEX_op_ld8u_i64:
956         /* movzbl */
957         tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
958         break;
959     case INDEX_op_ld8s_i32:
960         /* movsbl */
961         tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
962         break;
963     case INDEX_op_ld8s_i64:
964         /* movsbq */
965         tcg_out_modrm_offset(s, 0xbe | P_EXT | P_REXW, args[0], args[1], args[2]);
966         break;
967     case INDEX_op_ld16u_i32:
968     case INDEX_op_ld16u_i64:
969         /* movzwl */
970         tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
971         break;
972     case INDEX_op_ld16s_i32:
973         /* movswl */
974         tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
975         break;
976     case INDEX_op_ld16s_i64:
977         /* movswq */
978         tcg_out_modrm_offset(s, 0xbf | P_EXT | P_REXW, args[0], args[1], args[2]);
979         break;
980     case INDEX_op_ld_i32:
981     case INDEX_op_ld32u_i64:
982         /* movl */
983         tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
984         break;
985     case INDEX_op_ld32s_i64:
986         /* movslq */
987         tcg_out_modrm_offset(s, 0x63 | P_REXW, args[0], args[1], args[2]);
988         break;
989     case INDEX_op_ld_i64:
990         /* movq */
991         tcg_out_modrm_offset(s, 0x8b | P_REXW, args[0], args[1], args[2]);
992         break;
993 
994     case INDEX_op_st8_i32:
995     case INDEX_op_st8_i64:
996         /* movb */
997         tcg_out_modrm_offset(s, 0x88 | P_REXB_R, args[0], args[1], args[2]);
998         break;
999     case INDEX_op_st16_i32:
1000     case INDEX_op_st16_i64:
1001         /* movw */
1002         tcg_out8(s, 0x66);
1003         tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
1004         break;
1005     case INDEX_op_st_i32:
1006     case INDEX_op_st32_i64:
1007         /* movl */
1008         tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
1009         break;
1010     case INDEX_op_st_i64:
1011         /* movq */
1012         tcg_out_modrm_offset(s, 0x89 | P_REXW, args[0], args[1], args[2]);
1013         break;
1014 
1015     case INDEX_op_sub_i32:
1016         c = ARITH_SUB;
1017         goto gen_arith32;
1018     case INDEX_op_and_i32:
1019         c = ARITH_AND;
1020         goto gen_arith32;
1021     case INDEX_op_or_i32:
1022         c = ARITH_OR;
1023         goto gen_arith32;
1024     case INDEX_op_xor_i32:
1025         c = ARITH_XOR;
1026         goto gen_arith32;
1027     case INDEX_op_add_i32:
1028         c = ARITH_ADD;
1029     gen_arith32:
1030         if (const_args[2]) {
1031             tgen_arithi32(s, c, args[0], args[2]);
1032         } else {
1033             tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
1034         }
1035         break;
1036 
1037     case INDEX_op_sub_i64:
1038         c = ARITH_SUB;
1039         goto gen_arith64;
1040     case INDEX_op_and_i64:
1041         c = ARITH_AND;
1042         goto gen_arith64;
1043     case INDEX_op_or_i64:
1044         c = ARITH_OR;
1045         goto gen_arith64;
1046     case INDEX_op_xor_i64:
1047         c = ARITH_XOR;
1048         goto gen_arith64;
1049     case INDEX_op_add_i64:
1050         c = ARITH_ADD;
1051     gen_arith64:
1052         if (const_args[2]) {
1053             tgen_arithi64(s, c, args[0], args[2]);
1054         } else {
1055             tcg_out_modrm(s, 0x01 | (c << 3) | P_REXW, args[2], args[0]);
1056         }
1057         break;
1058 
1059     case INDEX_op_mul_i32:
1060         if (const_args[2]) {
1061             int32_t val;
1062             val = args[2];
1063             if (val == (int8_t)val) {
1064                 tcg_out_modrm(s, 0x6b, args[0], args[0]);
1065                 tcg_out8(s, val);
1066             } else {
1067                 tcg_out_modrm(s, 0x69, args[0], args[0]);
1068                 tcg_out32(s, val);
1069             }
1070         } else {
1071             tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
1072         }
1073         break;
1074     case INDEX_op_mul_i64:
1075         if (const_args[2]) {
1076             int32_t val;
1077             val = args[2];
1078             if (val == (int8_t)val) {
1079                 tcg_out_modrm(s, 0x6b | P_REXW, args[0], args[0]);
1080                 tcg_out8(s, val);
1081             } else {
1082                 tcg_out_modrm(s, 0x69 | P_REXW, args[0], args[0]);
1083                 tcg_out32(s, val);
1084             }
1085         } else {
1086             tcg_out_modrm(s, 0xaf | P_EXT | P_REXW, args[0], args[2]);
1087         }
1088         break;
1089     case INDEX_op_div2_i32:
1090         tcg_out_modrm(s, 0xf7, 7, args[4]);
1091         break;
1092     case INDEX_op_divu2_i32:
1093         tcg_out_modrm(s, 0xf7, 6, args[4]);
1094         break;
1095     case INDEX_op_div2_i64:
1096         tcg_out_modrm(s, 0xf7 | P_REXW, 7, args[4]);
1097         break;
1098     case INDEX_op_divu2_i64:
1099         tcg_out_modrm(s, 0xf7 | P_REXW, 6, args[4]);
1100         break;
1101 
1102     case INDEX_op_shl_i32:
1103         c = SHIFT_SHL;
1104     gen_shift32:
1105         if (const_args[2]) {
1106             if (args[2] == 1) {
1107                 tcg_out_modrm(s, 0xd1, c, args[0]);
1108             } else {
1109                 tcg_out_modrm(s, 0xc1, c, args[0]);
1110                 tcg_out8(s, args[2]);
1111             }
1112         } else {
1113             tcg_out_modrm(s, 0xd3, c, args[0]);
1114         }
1115         break;
1116     case INDEX_op_shr_i32:
1117         c = SHIFT_SHR;
1118         goto gen_shift32;
1119     case INDEX_op_sar_i32:
1120         c = SHIFT_SAR;
1121         goto gen_shift32;
1122     case INDEX_op_rotl_i32:
1123         c = SHIFT_ROL;
1124         goto gen_shift32;
1125     case INDEX_op_rotr_i32:
1126         c = SHIFT_ROR;
1127         goto gen_shift32;
1128 
1129     case INDEX_op_shl_i64:
1130         c = SHIFT_SHL;
1131     gen_shift64:
1132         if (const_args[2]) {
1133             if (args[2] == 1) {
1134                 tcg_out_modrm(s, 0xd1 | P_REXW, c, args[0]);
1135             } else {
1136                 tcg_out_modrm(s, 0xc1 | P_REXW, c, args[0]);
1137                 tcg_out8(s, args[2]);
1138             }
1139         } else {
1140             tcg_out_modrm(s, 0xd3 | P_REXW, c, args[0]);
1141         }
1142         break;
1143     case INDEX_op_shr_i64:
1144         c = SHIFT_SHR;
1145         goto gen_shift64;
1146     case INDEX_op_sar_i64:
1147         c = SHIFT_SAR;
1148         goto gen_shift64;
1149     case INDEX_op_rotl_i64:
1150         c = SHIFT_ROL;
1151         goto gen_shift64;
1152     case INDEX_op_rotr_i64:
1153         c = SHIFT_ROR;
1154         goto gen_shift64;
1155 
1156     case INDEX_op_brcond_i32:
1157         tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1158                        args[3], 0);
1159         break;
1160     case INDEX_op_brcond_i64:
1161         tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1162                        args[3], P_REXW);
1163         break;
1164 
1165     case INDEX_op_bswap16_i32:
1166     case INDEX_op_bswap16_i64:
1167         tcg_out8(s, 0x66);
1168         tcg_out_modrm(s, 0xc1, SHIFT_ROL, args[0]);
1169         tcg_out8(s, 8);
1170         break;
1171     case INDEX_op_bswap32_i32:
1172     case INDEX_op_bswap32_i64:
1173         tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT, 0, args[0], 0);
1174         break;
1175     case INDEX_op_bswap64_i64:
1176         tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT | P_REXW, 0, args[0], 0);
1177         break;
1178 
1179     case INDEX_op_neg_i32:
1180         tcg_out_modrm(s, 0xf7, 3, args[0]);
1181         break;
1182     case INDEX_op_neg_i64:
1183         tcg_out_modrm(s, 0xf7 | P_REXW, 3, args[0]);
1184         break;
1185 
1186     case INDEX_op_not_i32:
1187         tcg_out_modrm(s, 0xf7, 2, args[0]);
1188         break;
1189     case INDEX_op_not_i64:
1190         tcg_out_modrm(s, 0xf7 | P_REXW, 2, args[0]);
1191         break;
1192 
1193     case INDEX_op_ext8s_i32:
1194         tcg_out_modrm(s, 0xbe | P_EXT | P_REXB_RM, args[0], args[1]);
1195         break;
1196     case INDEX_op_ext16s_i32:
1197         tcg_out_modrm(s, 0xbf | P_EXT, args[0], args[1]);
1198         break;
1199     case INDEX_op_ext8s_i64:
1200         tcg_out_modrm(s, 0xbe | P_EXT | P_REXW, args[0], args[1]);
1201         break;
1202     case INDEX_op_ext16s_i64:
1203         tcg_out_modrm(s, 0xbf | P_EXT | P_REXW, args[0], args[1]);
1204         break;
1205     case INDEX_op_ext32s_i64:
1206         tcg_out_modrm(s, 0x63 | P_REXW, args[0], args[1]);
1207         break;
1208     case INDEX_op_ext8u_i32:
1209     case INDEX_op_ext8u_i64:
1210         tcg_out_modrm(s, 0xb6 | P_EXT | P_REXB_RM, args[0], args[1]);
1211         break;
1212     case INDEX_op_ext16u_i32:
1213     case INDEX_op_ext16u_i64:
1214         tcg_out_modrm(s, 0xb7 | P_EXT, args[0], args[1]);
1215         break;
1216     case INDEX_op_ext32u_i64:
1217         tcg_out_modrm(s, 0x8b, args[0], args[1]);
1218         break;
1219 
1220     case INDEX_op_setcond_i32:
1221         tcg_out_setcond(s, args[3], args[0], args[1], args[2],
1222                         const_args[2], 0);
1223         break;
1224     case INDEX_op_setcond_i64:
1225         tcg_out_setcond(s, args[3], args[0], args[1], args[2],
1226                         const_args[2], P_REXW);
1227         break;
1228 
1229     case INDEX_op_qemu_ld8u:
1230         tcg_out_qemu_ld(s, args, 0);
1231         break;
1232     case INDEX_op_qemu_ld8s:
1233         tcg_out_qemu_ld(s, args, 0 | 4);
1234         break;
1235     case INDEX_op_qemu_ld16u:
1236         tcg_out_qemu_ld(s, args, 1);
1237         break;
1238     case INDEX_op_qemu_ld16s:
1239         tcg_out_qemu_ld(s, args, 1 | 4);
1240         break;
1241     case INDEX_op_qemu_ld32u:
1242         tcg_out_qemu_ld(s, args, 2);
1243         break;
1244     case INDEX_op_qemu_ld32s:
1245         tcg_out_qemu_ld(s, args, 2 | 4);
1246         break;
1247     case INDEX_op_qemu_ld64:
1248         tcg_out_qemu_ld(s, args, 3);
1249         break;
1250 
1251     case INDEX_op_qemu_st8:
1252         tcg_out_qemu_st(s, args, 0);
1253         break;
1254     case INDEX_op_qemu_st16:
1255         tcg_out_qemu_st(s, args, 1);
1256         break;
1257     case INDEX_op_qemu_st32:
1258         tcg_out_qemu_st(s, args, 2);
1259         break;
1260     case INDEX_op_qemu_st64:
1261         tcg_out_qemu_st(s, args, 3);
1262         break;
1263 
1264     default:
1265         tcg_abort();
1266     }
1267 }
1268 
1269 static int tcg_target_callee_save_regs[] = {
1270     TCG_REG_RBP,
1271     TCG_REG_RBX,
1272     TCG_REG_R12,
1273     TCG_REG_R13,
1274     /*    TCG_REG_R14, */ /* currently used for the global env, so no
1275                              need to save */
1276     TCG_REG_R15,
1277 };
1278 
tcg_out_push(TCGContext * s,int reg)1279 static inline void tcg_out_push(TCGContext *s, int reg)
1280 {
1281     tcg_out_opc(s, (0x50 + (reg & 7)), 0, reg, 0);
1282 }
1283 
tcg_out_pop(TCGContext * s,int reg)1284 static inline void tcg_out_pop(TCGContext *s, int reg)
1285 {
1286     tcg_out_opc(s, (0x58 + (reg & 7)), 0, reg, 0);
1287 }
1288 
1289 /* Generate global QEMU prologue and epilogue code */
tcg_target_qemu_prologue(TCGContext * s)1290 void tcg_target_qemu_prologue(TCGContext *s)
1291 {
1292     int i, frame_size, push_size, stack_addend;
1293 
1294     /* TB prologue */
1295     /* save all callee saved registers */
1296     for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1297         tcg_out_push(s, tcg_target_callee_save_regs[i]);
1298 
1299     }
1300     /* reserve some stack space */
1301     push_size = 8 + ARRAY_SIZE(tcg_target_callee_save_regs) * 8;
1302     frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1303     frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1304         ~(TCG_TARGET_STACK_ALIGN - 1);
1305     stack_addend = frame_size - push_size;
1306     tcg_out_addi(s, TCG_REG_RSP, -stack_addend);
1307 
1308     tcg_out_modrm(s, 0xff, 4, TCG_REG_RDI); /* jmp *%rdi */
1309 
1310     /* TB epilogue */
1311     tb_ret_addr = s->code_ptr;
1312     tcg_out_addi(s, TCG_REG_RSP, stack_addend);
1313     for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1314         tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1315     }
1316     tcg_out8(s, 0xc3); /* ret */
1317 }
1318 
1319 static const TCGTargetOpDef x86_64_op_defs[] = {
1320     { INDEX_op_exit_tb, { } },
1321     { INDEX_op_goto_tb, { } },
1322     { INDEX_op_call, { "ri" } }, /* XXX: might need a specific constant constraint */
1323     { INDEX_op_jmp, { "ri" } }, /* XXX: might need a specific constant constraint */
1324     { INDEX_op_br, { } },
1325 
1326     { INDEX_op_mov_i32, { "r", "r" } },
1327     { INDEX_op_movi_i32, { "r" } },
1328     { INDEX_op_ld8u_i32, { "r", "r" } },
1329     { INDEX_op_ld8s_i32, { "r", "r" } },
1330     { INDEX_op_ld16u_i32, { "r", "r" } },
1331     { INDEX_op_ld16s_i32, { "r", "r" } },
1332     { INDEX_op_ld_i32, { "r", "r" } },
1333     { INDEX_op_st8_i32, { "r", "r" } },
1334     { INDEX_op_st16_i32, { "r", "r" } },
1335     { INDEX_op_st_i32, { "r", "r" } },
1336 
1337     { INDEX_op_add_i32, { "r", "0", "ri" } },
1338     { INDEX_op_mul_i32, { "r", "0", "ri" } },
1339     { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1340     { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1341     { INDEX_op_sub_i32, { "r", "0", "ri" } },
1342     { INDEX_op_and_i32, { "r", "0", "ri" } },
1343     { INDEX_op_or_i32, { "r", "0", "ri" } },
1344     { INDEX_op_xor_i32, { "r", "0", "ri" } },
1345 
1346     { INDEX_op_shl_i32, { "r", "0", "ci" } },
1347     { INDEX_op_shr_i32, { "r", "0", "ci" } },
1348     { INDEX_op_sar_i32, { "r", "0", "ci" } },
1349     { INDEX_op_rotl_i32, { "r", "0", "ci" } },
1350     { INDEX_op_rotr_i32, { "r", "0", "ci" } },
1351 
1352     { INDEX_op_brcond_i32, { "r", "ri" } },
1353 
1354     { INDEX_op_mov_i64, { "r", "r" } },
1355     { INDEX_op_movi_i64, { "r" } },
1356     { INDEX_op_ld8u_i64, { "r", "r" } },
1357     { INDEX_op_ld8s_i64, { "r", "r" } },
1358     { INDEX_op_ld16u_i64, { "r", "r" } },
1359     { INDEX_op_ld16s_i64, { "r", "r" } },
1360     { INDEX_op_ld32u_i64, { "r", "r" } },
1361     { INDEX_op_ld32s_i64, { "r", "r" } },
1362     { INDEX_op_ld_i64, { "r", "r" } },
1363     { INDEX_op_st8_i64, { "r", "r" } },
1364     { INDEX_op_st16_i64, { "r", "r" } },
1365     { INDEX_op_st32_i64, { "r", "r" } },
1366     { INDEX_op_st_i64, { "r", "r" } },
1367 
1368     { INDEX_op_add_i64, { "r", "0", "re" } },
1369     { INDEX_op_mul_i64, { "r", "0", "re" } },
1370     { INDEX_op_div2_i64, { "a", "d", "0", "1", "r" } },
1371     { INDEX_op_divu2_i64, { "a", "d", "0", "1", "r" } },
1372     { INDEX_op_sub_i64, { "r", "0", "re" } },
1373     { INDEX_op_and_i64, { "r", "0", "reZ" } },
1374     { INDEX_op_or_i64, { "r", "0", "re" } },
1375     { INDEX_op_xor_i64, { "r", "0", "re" } },
1376 
1377     { INDEX_op_shl_i64, { "r", "0", "ci" } },
1378     { INDEX_op_shr_i64, { "r", "0", "ci" } },
1379     { INDEX_op_sar_i64, { "r", "0", "ci" } },
1380     { INDEX_op_rotl_i64, { "r", "0", "ci" } },
1381     { INDEX_op_rotr_i64, { "r", "0", "ci" } },
1382 
1383     { INDEX_op_brcond_i64, { "r", "re" } },
1384 
1385     { INDEX_op_bswap16_i32, { "r", "0" } },
1386     { INDEX_op_bswap16_i64, { "r", "0" } },
1387     { INDEX_op_bswap32_i32, { "r", "0" } },
1388     { INDEX_op_bswap32_i64, { "r", "0" } },
1389     { INDEX_op_bswap64_i64, { "r", "0" } },
1390 
1391     { INDEX_op_neg_i32, { "r", "0" } },
1392     { INDEX_op_neg_i64, { "r", "0" } },
1393 
1394     { INDEX_op_not_i32, { "r", "0" } },
1395     { INDEX_op_not_i64, { "r", "0" } },
1396 
1397     { INDEX_op_ext8s_i32, { "r", "r"} },
1398     { INDEX_op_ext16s_i32, { "r", "r"} },
1399     { INDEX_op_ext8s_i64, { "r", "r"} },
1400     { INDEX_op_ext16s_i64, { "r", "r"} },
1401     { INDEX_op_ext32s_i64, { "r", "r"} },
1402     { INDEX_op_ext8u_i32, { "r", "r"} },
1403     { INDEX_op_ext16u_i32, { "r", "r"} },
1404     { INDEX_op_ext8u_i64, { "r", "r"} },
1405     { INDEX_op_ext16u_i64, { "r", "r"} },
1406     { INDEX_op_ext32u_i64, { "r", "r"} },
1407 
1408     { INDEX_op_setcond_i32, { "r", "r", "ri" } },
1409     { INDEX_op_setcond_i64, { "r", "r", "re" } },
1410 
1411     { INDEX_op_qemu_ld8u, { "r", "L" } },
1412     { INDEX_op_qemu_ld8s, { "r", "L" } },
1413     { INDEX_op_qemu_ld16u, { "r", "L" } },
1414     { INDEX_op_qemu_ld16s, { "r", "L" } },
1415     { INDEX_op_qemu_ld32u, { "r", "L" } },
1416     { INDEX_op_qemu_ld32s, { "r", "L" } },
1417     { INDEX_op_qemu_ld64, { "r", "L" } },
1418 
1419     { INDEX_op_qemu_st8, { "L", "L" } },
1420     { INDEX_op_qemu_st16, { "L", "L" } },
1421     { INDEX_op_qemu_st32, { "L", "L" } },
1422     { INDEX_op_qemu_st64, { "L", "L" } },
1423 
1424     { -1 },
1425 };
1426 
tcg_target_init(TCGContext * s)1427 void tcg_target_init(TCGContext *s)
1428 {
1429     /* fail safe */
1430     if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1431         tcg_abort();
1432 
1433     tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
1434     tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
1435     tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1436                      (1 << TCG_REG_RDI) |
1437                      (1 << TCG_REG_RSI) |
1438                      (1 << TCG_REG_RDX) |
1439                      (1 << TCG_REG_RCX) |
1440                      (1 << TCG_REG_R8) |
1441                      (1 << TCG_REG_R9) |
1442                      (1 << TCG_REG_RAX) |
1443                      (1 << TCG_REG_R10) |
1444                      (1 << TCG_REG_R11));
1445 
1446     tcg_regset_clear(s->reserved_regs);
1447     tcg_regset_set_reg(s->reserved_regs, TCG_REG_RSP);
1448 
1449     tcg_add_target_add_op_defs(x86_64_op_defs);
1450 }
1451