1 /*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <signal.h>
26
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "tcg-op.h"
31
32 #include "helper.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
35 #include "hax.h"
36
37 #define PREFIX_REPZ 0x01
38 #define PREFIX_REPNZ 0x02
39 #define PREFIX_LOCK 0x04
40 #define PREFIX_DATA 0x08
41 #define PREFIX_ADR 0x10
42
43 #ifdef TARGET_X86_64
44 #define X86_64_ONLY(x) x
45 #define X86_64_DEF(...) __VA_ARGS__
46 #define CODE64(s) ((s)->code64)
47 #define REX_X(s) ((s)->rex_x)
48 #define REX_B(s) ((s)->rex_b)
49 /* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
50 #if 1
51 #define BUGGY_64(x) NULL
52 #endif
53 #else
54 #define X86_64_ONLY(x) NULL
55 #define X86_64_DEF(...)
56 #define CODE64(s) 0
57 #define REX_X(s) 0
58 #define REX_B(s) 0
59 #endif
60
61 //#define MACRO_TEST 1
62
63 /* global register indexes */
64 static TCGv_ptr cpu_env;
65 static TCGv cpu_A0, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
66 static TCGv_i32 cpu_cc_op;
67 /* local temps */
68 static TCGv cpu_T[2], cpu_T3;
69 /* local register indexes (only used inside old micro ops) */
70 static TCGv cpu_tmp0, cpu_tmp4;
71 static TCGv_ptr cpu_ptr0, cpu_ptr1;
72 static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
73 static TCGv_i64 cpu_tmp1_i64;
74 static TCGv cpu_tmp5, cpu_tmp6;
75
76 #include "gen-icount.h"
77
78 #ifdef TARGET_X86_64
79 static int x86_64_hregs;
80 #endif
81
82 typedef struct DisasContext {
83 /* current insn context */
84 int override; /* -1 if no override */
85 int prefix;
86 int aflag, dflag;
87 target_ulong pc; /* pc = eip + cs_base */
88 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
89 static state change (stop translation) */
90 /* current block context */
91 target_ulong cs_base; /* base of CS segment */
92 int pe; /* protected mode */
93 int code32; /* 32 bit code segment */
94 #ifdef TARGET_X86_64
95 int lma; /* long mode active */
96 int code64; /* 64 bit code segment */
97 int rex_x, rex_b;
98 #endif
99 int ss32; /* 32 bit stack segment */
100 int cc_op; /* current CC operation */
101 int addseg; /* non zero if either DS/ES/SS have a non zero base */
102 int f_st; /* currently unused */
103 int vm86; /* vm86 mode */
104 int cpl;
105 int iopl;
106 int tf; /* TF cpu flag */
107 int singlestep_enabled; /* "hardware" single step enabled */
108 int jmp_opt; /* use direct block chaining for direct jumps */
109 int mem_index; /* select memory access functions */
110 uint64_t flags; /* all execution flags */
111 struct TranslationBlock *tb;
112 int popl_esp_hack; /* for correct popl with esp base handling */
113 int rip_offset; /* only used in x86_64, but left for simplicity */
114 int cpuid_features;
115 int cpuid_ext_features;
116 int cpuid_ext2_features;
117 int cpuid_ext3_features;
118 } DisasContext;
119
120 static void gen_eob(DisasContext *s);
121 static void gen_jmp(DisasContext *s, target_ulong eip);
122 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
123
124 /* i386 arith/logic operations */
125 enum {
126 OP_ADDL,
127 OP_ORL,
128 OP_ADCL,
129 OP_SBBL,
130 OP_ANDL,
131 OP_SUBL,
132 OP_XORL,
133 OP_CMPL,
134 };
135
136 /* i386 shift ops */
137 enum {
138 OP_ROL,
139 OP_ROR,
140 OP_RCL,
141 OP_RCR,
142 OP_SHL,
143 OP_SHR,
144 OP_SHL1, /* undocumented */
145 OP_SAR = 7,
146 };
147
148 enum {
149 JCC_O,
150 JCC_B,
151 JCC_Z,
152 JCC_BE,
153 JCC_S,
154 JCC_P,
155 JCC_L,
156 JCC_LE,
157 };
158
159 /* operand size */
160 enum {
161 OT_BYTE = 0,
162 OT_WORD,
163 OT_LONG,
164 OT_QUAD,
165 };
166
167 enum {
168 /* I386 int registers */
169 OR_EAX, /* MUST be even numbered */
170 OR_ECX,
171 OR_EDX,
172 OR_EBX,
173 OR_ESP,
174 OR_EBP,
175 OR_ESI,
176 OR_EDI,
177
178 OR_TMP0 = 16, /* temporary operand register */
179 OR_TMP1,
180 OR_A0, /* temporary register used when doing address evaluation */
181 };
182
gen_op_movl_T0_0(void)183 static inline void gen_op_movl_T0_0(void)
184 {
185 tcg_gen_movi_tl(cpu_T[0], 0);
186 }
187
gen_op_movl_T0_im(int32_t val)188 static inline void gen_op_movl_T0_im(int32_t val)
189 {
190 tcg_gen_movi_tl(cpu_T[0], val);
191 }
192
gen_op_movl_T0_imu(uint32_t val)193 static inline void gen_op_movl_T0_imu(uint32_t val)
194 {
195 tcg_gen_movi_tl(cpu_T[0], val);
196 }
197
gen_op_movl_T1_im(int32_t val)198 static inline void gen_op_movl_T1_im(int32_t val)
199 {
200 tcg_gen_movi_tl(cpu_T[1], val);
201 }
202
gen_op_movl_T1_imu(uint32_t val)203 static inline void gen_op_movl_T1_imu(uint32_t val)
204 {
205 tcg_gen_movi_tl(cpu_T[1], val);
206 }
207
gen_op_movl_A0_im(uint32_t val)208 static inline void gen_op_movl_A0_im(uint32_t val)
209 {
210 tcg_gen_movi_tl(cpu_A0, val);
211 }
212
213 #ifdef TARGET_X86_64
gen_op_movq_A0_im(int64_t val)214 static inline void gen_op_movq_A0_im(int64_t val)
215 {
216 tcg_gen_movi_tl(cpu_A0, val);
217 }
218 #endif
219
gen_movtl_T0_im(target_ulong val)220 static inline void gen_movtl_T0_im(target_ulong val)
221 {
222 tcg_gen_movi_tl(cpu_T[0], val);
223 }
224
gen_movtl_T1_im(target_ulong val)225 static inline void gen_movtl_T1_im(target_ulong val)
226 {
227 tcg_gen_movi_tl(cpu_T[1], val);
228 }
229
gen_op_andl_T0_ffff(void)230 static inline void gen_op_andl_T0_ffff(void)
231 {
232 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
233 }
234
gen_op_andl_T0_im(uint32_t val)235 static inline void gen_op_andl_T0_im(uint32_t val)
236 {
237 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
238 }
239
gen_op_movl_T0_T1(void)240 static inline void gen_op_movl_T0_T1(void)
241 {
242 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
243 }
244
gen_op_andl_A0_ffff(void)245 static inline void gen_op_andl_A0_ffff(void)
246 {
247 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
248 }
249
250 #ifdef TARGET_X86_64
251
252 #define NB_OP_SIZES 4
253
254 #else /* !TARGET_X86_64 */
255
256 #define NB_OP_SIZES 3
257
258 #endif /* !TARGET_X86_64 */
259
260 #if defined(WORDS_BIGENDIAN)
261 #define REG_B_OFFSET (sizeof(target_ulong) - 1)
262 #define REG_H_OFFSET (sizeof(target_ulong) - 2)
263 #define REG_W_OFFSET (sizeof(target_ulong) - 2)
264 #define REG_L_OFFSET (sizeof(target_ulong) - 4)
265 #define REG_LH_OFFSET (sizeof(target_ulong) - 8)
266 #else
267 #define REG_B_OFFSET 0
268 #define REG_H_OFFSET 1
269 #define REG_W_OFFSET 0
270 #define REG_L_OFFSET 0
271 #define REG_LH_OFFSET 4
272 #endif
273
gen_op_mov_reg_v(int ot,int reg,TCGv t0)274 static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
275 {
276 switch(ot) {
277 case OT_BYTE:
278 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
279 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
280 } else {
281 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
282 }
283 break;
284 case OT_WORD:
285 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
286 break;
287 #ifdef TARGET_X86_64
288 case OT_LONG:
289 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
290 /* high part of register set to zero */
291 tcg_gen_movi_tl(cpu_tmp0, 0);
292 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
293 break;
294 default:
295 case OT_QUAD:
296 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
297 break;
298 #else
299 default:
300 case OT_LONG:
301 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
302 break;
303 #endif
304 }
305 }
306
gen_op_mov_reg_T0(int ot,int reg)307 static inline void gen_op_mov_reg_T0(int ot, int reg)
308 {
309 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
310 }
311
gen_op_mov_reg_T1(int ot,int reg)312 static inline void gen_op_mov_reg_T1(int ot, int reg)
313 {
314 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
315 }
316
gen_op_mov_reg_A0(int size,int reg)317 static inline void gen_op_mov_reg_A0(int size, int reg)
318 {
319 switch(size) {
320 case 0:
321 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
322 break;
323 #ifdef TARGET_X86_64
324 case 1:
325 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
326 /* high part of register set to zero */
327 tcg_gen_movi_tl(cpu_tmp0, 0);
328 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
329 break;
330 default:
331 case 2:
332 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
333 break;
334 #else
335 default:
336 case 1:
337 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
338 break;
339 #endif
340 }
341 }
342
gen_op_mov_v_reg(int ot,TCGv t0,int reg)343 static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
344 {
345 switch(ot) {
346 case OT_BYTE:
347 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
348 goto std_case;
349 } else {
350 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
351 }
352 break;
353 default:
354 std_case:
355 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
356 break;
357 }
358 }
359
gen_op_mov_TN_reg(int ot,int t_index,int reg)360 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
361 {
362 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
363 }
364
gen_op_movl_A0_reg(int reg)365 static inline void gen_op_movl_A0_reg(int reg)
366 {
367 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368 }
369
gen_op_addl_A0_im(int32_t val)370 static inline void gen_op_addl_A0_im(int32_t val)
371 {
372 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373 #ifdef TARGET_X86_64
374 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375 #endif
376 }
377
378 #ifdef TARGET_X86_64
gen_op_addq_A0_im(int64_t val)379 static inline void gen_op_addq_A0_im(int64_t val)
380 {
381 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382 }
383 #endif
384
gen_add_A0_im(DisasContext * s,int val)385 static void gen_add_A0_im(DisasContext *s, int val)
386 {
387 #ifdef TARGET_X86_64
388 if (CODE64(s))
389 gen_op_addq_A0_im(val);
390 else
391 #endif
392 gen_op_addl_A0_im(val);
393 }
394
gen_op_addl_T0_T1(void)395 static inline void gen_op_addl_T0_T1(void)
396 {
397 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398 }
399
gen_op_jmp_T0(void)400 static inline void gen_op_jmp_T0(void)
401 {
402 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403 }
404
gen_op_add_reg_im(int size,int reg,int32_t val)405 static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
406 {
407 switch(size) {
408 case 0:
409 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
410 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
411 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
412 break;
413 case 1:
414 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416 #ifdef TARGET_X86_64
417 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418 #endif
419 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
420 break;
421 #ifdef TARGET_X86_64
422 case 2:
423 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
424 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
426 break;
427 #endif
428 }
429 }
430
gen_op_add_reg_T0(int size,int reg)431 static inline void gen_op_add_reg_T0(int size, int reg)
432 {
433 switch(size) {
434 case 0:
435 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
436 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
437 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
438 break;
439 case 1:
440 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
442 #ifdef TARGET_X86_64
443 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
444 #endif
445 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
446 break;
447 #ifdef TARGET_X86_64
448 case 2:
449 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
450 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
451 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
452 break;
453 #endif
454 }
455 }
456
gen_op_set_cc_op(int32_t val)457 static inline void gen_op_set_cc_op(int32_t val)
458 {
459 tcg_gen_movi_i32(cpu_cc_op, val);
460 }
461
gen_op_addl_A0_reg_sN(int shift,int reg)462 static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
463 {
464 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
465 if (shift != 0)
466 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
467 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
468 #ifdef TARGET_X86_64
469 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
470 #endif
471 }
472
gen_op_movl_A0_seg(int reg)473 static inline void gen_op_movl_A0_seg(int reg)
474 {
475 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
476 }
477
gen_op_addl_A0_seg(int reg)478 static inline void gen_op_addl_A0_seg(int reg)
479 {
480 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
481 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
482 #ifdef TARGET_X86_64
483 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
484 #endif
485 }
486
487 #ifdef TARGET_X86_64
gen_op_movq_A0_seg(int reg)488 static inline void gen_op_movq_A0_seg(int reg)
489 {
490 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
491 }
492
gen_op_addq_A0_seg(int reg)493 static inline void gen_op_addq_A0_seg(int reg)
494 {
495 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
496 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
497 }
498
gen_op_movq_A0_reg(int reg)499 static inline void gen_op_movq_A0_reg(int reg)
500 {
501 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
502 }
503
gen_op_addq_A0_reg_sN(int shift,int reg)504 static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
505 {
506 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
507 if (shift != 0)
508 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
509 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
510 }
511 #endif
512
gen_op_lds_T0_A0(int idx)513 static inline void gen_op_lds_T0_A0(int idx)
514 {
515 int mem_index = (idx >> 2) - 1;
516 switch(idx & 3) {
517 case 0:
518 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
519 break;
520 case 1:
521 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
522 break;
523 default:
524 case 2:
525 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
526 break;
527 }
528 }
529
gen_op_ld_v(int idx,TCGv t0,TCGv a0)530 static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
531 {
532 int mem_index = (idx >> 2) - 1;
533 switch(idx & 3) {
534 case 0:
535 tcg_gen_qemu_ld8u(t0, a0, mem_index);
536 break;
537 case 1:
538 tcg_gen_qemu_ld16u(t0, a0, mem_index);
539 break;
540 case 2:
541 tcg_gen_qemu_ld32u(t0, a0, mem_index);
542 break;
543 default:
544 case 3:
545 /* Should never happen on 32-bit targets. */
546 #ifdef TARGET_X86_64
547 tcg_gen_qemu_ld64(t0, a0, mem_index);
548 #endif
549 break;
550 }
551 }
552
553 /* XXX: always use ldu or lds */
gen_op_ld_T0_A0(int idx)554 static inline void gen_op_ld_T0_A0(int idx)
555 {
556 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
557 }
558
gen_op_ldu_T0_A0(int idx)559 static inline void gen_op_ldu_T0_A0(int idx)
560 {
561 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
562 }
563
gen_op_ld_T1_A0(int idx)564 static inline void gen_op_ld_T1_A0(int idx)
565 {
566 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
567 }
568
gen_op_st_v(int idx,TCGv t0,TCGv a0)569 static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
570 {
571 int mem_index = (idx >> 2) - 1;
572 switch(idx & 3) {
573 case 0:
574 tcg_gen_qemu_st8(t0, a0, mem_index);
575 break;
576 case 1:
577 tcg_gen_qemu_st16(t0, a0, mem_index);
578 break;
579 case 2:
580 tcg_gen_qemu_st32(t0, a0, mem_index);
581 break;
582 default:
583 case 3:
584 /* Should never happen on 32-bit targets. */
585 #ifdef TARGET_X86_64
586 tcg_gen_qemu_st64(t0, a0, mem_index);
587 #endif
588 break;
589 }
590 }
591
gen_op_st_T0_A0(int idx)592 static inline void gen_op_st_T0_A0(int idx)
593 {
594 gen_op_st_v(idx, cpu_T[0], cpu_A0);
595 }
596
gen_op_st_T1_A0(int idx)597 static inline void gen_op_st_T1_A0(int idx)
598 {
599 gen_op_st_v(idx, cpu_T[1], cpu_A0);
600 }
601
gen_jmp_im(target_ulong pc)602 static inline void gen_jmp_im(target_ulong pc)
603 {
604 tcg_gen_movi_tl(cpu_tmp0, pc);
605 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
606 }
607
gen_string_movl_A0_ESI(DisasContext * s)608 static inline void gen_string_movl_A0_ESI(DisasContext *s)
609 {
610 int override;
611
612 override = s->override;
613 #ifdef TARGET_X86_64
614 if (s->aflag == 2) {
615 if (override >= 0) {
616 gen_op_movq_A0_seg(override);
617 gen_op_addq_A0_reg_sN(0, R_ESI);
618 } else {
619 gen_op_movq_A0_reg(R_ESI);
620 }
621 } else
622 #endif
623 if (s->aflag) {
624 /* 32 bit address */
625 if (s->addseg && override < 0)
626 override = R_DS;
627 if (override >= 0) {
628 gen_op_movl_A0_seg(override);
629 gen_op_addl_A0_reg_sN(0, R_ESI);
630 } else {
631 gen_op_movl_A0_reg(R_ESI);
632 }
633 } else {
634 /* 16 address, always override */
635 if (override < 0)
636 override = R_DS;
637 gen_op_movl_A0_reg(R_ESI);
638 gen_op_andl_A0_ffff();
639 gen_op_addl_A0_seg(override);
640 }
641 }
642
gen_string_movl_A0_EDI(DisasContext * s)643 static inline void gen_string_movl_A0_EDI(DisasContext *s)
644 {
645 #ifdef TARGET_X86_64
646 if (s->aflag == 2) {
647 gen_op_movq_A0_reg(R_EDI);
648 } else
649 #endif
650 if (s->aflag) {
651 if (s->addseg) {
652 gen_op_movl_A0_seg(R_ES);
653 gen_op_addl_A0_reg_sN(0, R_EDI);
654 } else {
655 gen_op_movl_A0_reg(R_EDI);
656 }
657 } else {
658 gen_op_movl_A0_reg(R_EDI);
659 gen_op_andl_A0_ffff();
660 gen_op_addl_A0_seg(R_ES);
661 }
662 }
663
gen_op_movl_T0_Dshift(int ot)664 static inline void gen_op_movl_T0_Dshift(int ot)
665 {
666 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
667 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
668 };
669
gen_extu(int ot,TCGv reg)670 static void gen_extu(int ot, TCGv reg)
671 {
672 switch(ot) {
673 case OT_BYTE:
674 tcg_gen_ext8u_tl(reg, reg);
675 break;
676 case OT_WORD:
677 tcg_gen_ext16u_tl(reg, reg);
678 break;
679 case OT_LONG:
680 tcg_gen_ext32u_tl(reg, reg);
681 break;
682 default:
683 break;
684 }
685 }
686
gen_exts(int ot,TCGv reg)687 static void gen_exts(int ot, TCGv reg)
688 {
689 switch(ot) {
690 case OT_BYTE:
691 tcg_gen_ext8s_tl(reg, reg);
692 break;
693 case OT_WORD:
694 tcg_gen_ext16s_tl(reg, reg);
695 break;
696 case OT_LONG:
697 tcg_gen_ext32s_tl(reg, reg);
698 break;
699 default:
700 break;
701 }
702 }
703
gen_op_jnz_ecx(int size,int label1)704 static inline void gen_op_jnz_ecx(int size, int label1)
705 {
706 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
707 gen_extu(size + 1, cpu_tmp0);
708 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
709 }
710
gen_op_jz_ecx(int size,int label1)711 static inline void gen_op_jz_ecx(int size, int label1)
712 {
713 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
714 gen_extu(size + 1, cpu_tmp0);
715 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
716 }
717
gen_helper_in_func(int ot,TCGv v,TCGv_i32 n)718 static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n)
719 {
720 switch (ot) {
721 case 0: gen_helper_inb(v, n); break;
722 case 1: gen_helper_inw(v, n); break;
723 case 2: gen_helper_inl(v, n); break;
724 }
725
726 }
727
gen_helper_out_func(int ot,TCGv_i32 v,TCGv_i32 n)728 static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n)
729 {
730 switch (ot) {
731 case 0: gen_helper_outb(v, n); break;
732 case 1: gen_helper_outw(v, n); break;
733 case 2: gen_helper_outl(v, n); break;
734 }
735
736 }
737
gen_check_io(DisasContext * s,int ot,target_ulong cur_eip,uint32_t svm_flags)738 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
739 uint32_t svm_flags)
740 {
741 int state_saved;
742 target_ulong next_eip;
743
744 state_saved = 0;
745 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
746 if (s->cc_op != CC_OP_DYNAMIC)
747 gen_op_set_cc_op(s->cc_op);
748 gen_jmp_im(cur_eip);
749 state_saved = 1;
750 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
751 switch (ot) {
752 case 0: gen_helper_check_iob(cpu_tmp2_i32); break;
753 case 1: gen_helper_check_iow(cpu_tmp2_i32); break;
754 case 2: gen_helper_check_iol(cpu_tmp2_i32); break;
755 }
756 }
757 if(s->flags & HF_SVMI_MASK) {
758 if (!state_saved) {
759 if (s->cc_op != CC_OP_DYNAMIC)
760 gen_op_set_cc_op(s->cc_op);
761 gen_jmp_im(cur_eip);
762 state_saved = 1;
763 }
764 svm_flags |= (1 << (4 + ot));
765 next_eip = s->pc - s->cs_base;
766 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
767 gen_helper_svm_check_io(cpu_tmp2_i32, tcg_const_i32(svm_flags),
768 tcg_const_i32(next_eip - cur_eip));
769 }
770 }
771
gen_movs(DisasContext * s,int ot)772 static inline void gen_movs(DisasContext *s, int ot)
773 {
774 gen_string_movl_A0_ESI(s);
775 gen_op_ld_T0_A0(ot + s->mem_index);
776 gen_string_movl_A0_EDI(s);
777 gen_op_st_T0_A0(ot + s->mem_index);
778 gen_op_movl_T0_Dshift(ot);
779 gen_op_add_reg_T0(s->aflag, R_ESI);
780 gen_op_add_reg_T0(s->aflag, R_EDI);
781 }
782
gen_update_cc_op(DisasContext * s)783 static inline void gen_update_cc_op(DisasContext *s)
784 {
785 if (s->cc_op != CC_OP_DYNAMIC) {
786 gen_op_set_cc_op(s->cc_op);
787 s->cc_op = CC_OP_DYNAMIC;
788 }
789 }
790
gen_op_update1_cc(void)791 static void gen_op_update1_cc(void)
792 {
793 tcg_gen_discard_tl(cpu_cc_src);
794 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
795 }
796
gen_op_update2_cc(void)797 static void gen_op_update2_cc(void)
798 {
799 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
800 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
801 }
802
gen_op_cmpl_T0_T1_cc(void)803 static inline void gen_op_cmpl_T0_T1_cc(void)
804 {
805 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
806 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
807 }
808
gen_op_testl_T0_T1_cc(void)809 static inline void gen_op_testl_T0_T1_cc(void)
810 {
811 tcg_gen_discard_tl(cpu_cc_src);
812 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
813 }
814
gen_op_update_neg_cc(void)815 static void gen_op_update_neg_cc(void)
816 {
817 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
818 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
819 }
820
821 /* compute eflags.C to reg */
gen_compute_eflags_c(TCGv reg)822 static void gen_compute_eflags_c(TCGv reg)
823 {
824 gen_helper_cc_compute_c(cpu_tmp2_i32, cpu_cc_op);
825 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
826 }
827
828 /* compute all eflags to cc_src */
gen_compute_eflags(TCGv reg)829 static void gen_compute_eflags(TCGv reg)
830 {
831 gen_helper_cc_compute_all(cpu_tmp2_i32, cpu_cc_op);
832 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
833 }
834
gen_setcc_slow_T0(DisasContext * s,int jcc_op)835 static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
836 {
837 if (s->cc_op != CC_OP_DYNAMIC)
838 gen_op_set_cc_op(s->cc_op);
839 switch(jcc_op) {
840 case JCC_O:
841 gen_compute_eflags(cpu_T[0]);
842 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
843 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
844 break;
845 case JCC_B:
846 gen_compute_eflags_c(cpu_T[0]);
847 break;
848 case JCC_Z:
849 gen_compute_eflags(cpu_T[0]);
850 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
851 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
852 break;
853 case JCC_BE:
854 gen_compute_eflags(cpu_tmp0);
855 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
856 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
857 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
858 break;
859 case JCC_S:
860 gen_compute_eflags(cpu_T[0]);
861 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
862 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
863 break;
864 case JCC_P:
865 gen_compute_eflags(cpu_T[0]);
866 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
867 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
868 break;
869 case JCC_L:
870 gen_compute_eflags(cpu_tmp0);
871 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
872 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
873 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
874 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
875 break;
876 default:
877 case JCC_LE:
878 gen_compute_eflags(cpu_tmp0);
879 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
880 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
881 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
882 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
883 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
884 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
885 break;
886 }
887 }
888
889 /* return true if setcc_slow is not needed (WARNING: must be kept in
890 sync with gen_jcc1) */
is_fast_jcc_case(DisasContext * s,int b)891 static int is_fast_jcc_case(DisasContext *s, int b)
892 {
893 int jcc_op;
894 jcc_op = (b >> 1) & 7;
895 switch(s->cc_op) {
896 /* we optimize the cmp/jcc case */
897 case CC_OP_SUBB:
898 case CC_OP_SUBW:
899 case CC_OP_SUBL:
900 case CC_OP_SUBQ:
901 if (jcc_op == JCC_O || jcc_op == JCC_P)
902 goto slow_jcc;
903 break;
904
905 /* some jumps are easy to compute */
906 case CC_OP_ADDB:
907 case CC_OP_ADDW:
908 case CC_OP_ADDL:
909 case CC_OP_ADDQ:
910
911 case CC_OP_LOGICB:
912 case CC_OP_LOGICW:
913 case CC_OP_LOGICL:
914 case CC_OP_LOGICQ:
915
916 case CC_OP_INCB:
917 case CC_OP_INCW:
918 case CC_OP_INCL:
919 case CC_OP_INCQ:
920
921 case CC_OP_DECB:
922 case CC_OP_DECW:
923 case CC_OP_DECL:
924 case CC_OP_DECQ:
925
926 case CC_OP_SHLB:
927 case CC_OP_SHLW:
928 case CC_OP_SHLL:
929 case CC_OP_SHLQ:
930 if (jcc_op != JCC_Z && jcc_op != JCC_S)
931 goto slow_jcc;
932 break;
933 default:
934 slow_jcc:
935 return 0;
936 }
937 return 1;
938 }
939
940 /* generate a conditional jump to label 'l1' according to jump opcode
941 value 'b'. In the fast case, T0 is guaranted not to be used. */
gen_jcc1(DisasContext * s,int cc_op,int b,int l1)942 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
943 {
944 int inv, jcc_op, size, cond;
945 TCGv t0;
946
947 inv = b & 1;
948 jcc_op = (b >> 1) & 7;
949
950 switch(cc_op) {
951 /* we optimize the cmp/jcc case */
952 case CC_OP_SUBB:
953 case CC_OP_SUBW:
954 case CC_OP_SUBL:
955 case CC_OP_SUBQ:
956
957 size = cc_op - CC_OP_SUBB;
958 switch(jcc_op) {
959 case JCC_Z:
960 fast_jcc_z:
961 switch(size) {
962 case 0:
963 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
964 t0 = cpu_tmp0;
965 break;
966 case 1:
967 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
968 t0 = cpu_tmp0;
969 break;
970 #ifdef TARGET_X86_64
971 case 2:
972 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
973 t0 = cpu_tmp0;
974 break;
975 #endif
976 default:
977 t0 = cpu_cc_dst;
978 break;
979 }
980 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
981 break;
982 case JCC_S:
983 fast_jcc_s:
984 switch(size) {
985 case 0:
986 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
987 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
988 0, l1);
989 break;
990 case 1:
991 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
992 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
993 0, l1);
994 break;
995 #ifdef TARGET_X86_64
996 case 2:
997 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
998 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
999 0, l1);
1000 break;
1001 #endif
1002 default:
1003 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1004 0, l1);
1005 break;
1006 }
1007 break;
1008
1009 case JCC_B:
1010 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1011 goto fast_jcc_b;
1012 case JCC_BE:
1013 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1014 fast_jcc_b:
1015 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1016 switch(size) {
1017 case 0:
1018 t0 = cpu_tmp0;
1019 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1020 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1021 break;
1022 case 1:
1023 t0 = cpu_tmp0;
1024 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1025 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1026 break;
1027 #ifdef TARGET_X86_64
1028 case 2:
1029 t0 = cpu_tmp0;
1030 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1031 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1032 break;
1033 #endif
1034 default:
1035 t0 = cpu_cc_src;
1036 break;
1037 }
1038 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1039 break;
1040
1041 case JCC_L:
1042 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1043 goto fast_jcc_l;
1044 case JCC_LE:
1045 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1046 fast_jcc_l:
1047 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1048 switch(size) {
1049 case 0:
1050 t0 = cpu_tmp0;
1051 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1052 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1053 break;
1054 case 1:
1055 t0 = cpu_tmp0;
1056 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1057 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1058 break;
1059 #ifdef TARGET_X86_64
1060 case 2:
1061 t0 = cpu_tmp0;
1062 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1063 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1064 break;
1065 #endif
1066 default:
1067 t0 = cpu_cc_src;
1068 break;
1069 }
1070 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1071 break;
1072
1073 default:
1074 goto slow_jcc;
1075 }
1076 break;
1077
1078 /* some jumps are easy to compute */
1079 case CC_OP_ADDB:
1080 case CC_OP_ADDW:
1081 case CC_OP_ADDL:
1082 case CC_OP_ADDQ:
1083
1084 case CC_OP_ADCB:
1085 case CC_OP_ADCW:
1086 case CC_OP_ADCL:
1087 case CC_OP_ADCQ:
1088
1089 case CC_OP_SBBB:
1090 case CC_OP_SBBW:
1091 case CC_OP_SBBL:
1092 case CC_OP_SBBQ:
1093
1094 case CC_OP_LOGICB:
1095 case CC_OP_LOGICW:
1096 case CC_OP_LOGICL:
1097 case CC_OP_LOGICQ:
1098
1099 case CC_OP_INCB:
1100 case CC_OP_INCW:
1101 case CC_OP_INCL:
1102 case CC_OP_INCQ:
1103
1104 case CC_OP_DECB:
1105 case CC_OP_DECW:
1106 case CC_OP_DECL:
1107 case CC_OP_DECQ:
1108
1109 case CC_OP_SHLB:
1110 case CC_OP_SHLW:
1111 case CC_OP_SHLL:
1112 case CC_OP_SHLQ:
1113
1114 case CC_OP_SARB:
1115 case CC_OP_SARW:
1116 case CC_OP_SARL:
1117 case CC_OP_SARQ:
1118 switch(jcc_op) {
1119 case JCC_Z:
1120 size = (cc_op - CC_OP_ADDB) & 3;
1121 goto fast_jcc_z;
1122 case JCC_S:
1123 size = (cc_op - CC_OP_ADDB) & 3;
1124 goto fast_jcc_s;
1125 default:
1126 goto slow_jcc;
1127 }
1128 break;
1129 default:
1130 slow_jcc:
1131 gen_setcc_slow_T0(s, jcc_op);
1132 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1133 cpu_T[0], 0, l1);
1134 break;
1135 }
1136 }
1137
1138 /* XXX: does not work with gdbstub "ice" single step - not a
1139 serious problem */
gen_jz_ecx_string(DisasContext * s,target_ulong next_eip)1140 static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1141 {
1142 int l1, l2;
1143
1144 l1 = gen_new_label();
1145 l2 = gen_new_label();
1146 gen_op_jnz_ecx(s->aflag, l1);
1147 gen_set_label(l2);
1148 gen_jmp_tb(s, next_eip, 1);
1149 gen_set_label(l1);
1150 return l2;
1151 }
1152
gen_stos(DisasContext * s,int ot)1153 static inline void gen_stos(DisasContext *s, int ot)
1154 {
1155 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1156 gen_string_movl_A0_EDI(s);
1157 gen_op_st_T0_A0(ot + s->mem_index);
1158 gen_op_movl_T0_Dshift(ot);
1159 gen_op_add_reg_T0(s->aflag, R_EDI);
1160 }
1161
gen_lods(DisasContext * s,int ot)1162 static inline void gen_lods(DisasContext *s, int ot)
1163 {
1164 gen_string_movl_A0_ESI(s);
1165 gen_op_ld_T0_A0(ot + s->mem_index);
1166 gen_op_mov_reg_T0(ot, R_EAX);
1167 gen_op_movl_T0_Dshift(ot);
1168 gen_op_add_reg_T0(s->aflag, R_ESI);
1169 }
1170
gen_scas(DisasContext * s,int ot)1171 static inline void gen_scas(DisasContext *s, int ot)
1172 {
1173 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1174 gen_string_movl_A0_EDI(s);
1175 gen_op_ld_T1_A0(ot + s->mem_index);
1176 gen_op_cmpl_T0_T1_cc();
1177 gen_op_movl_T0_Dshift(ot);
1178 gen_op_add_reg_T0(s->aflag, R_EDI);
1179 }
1180
gen_cmps(DisasContext * s,int ot)1181 static inline void gen_cmps(DisasContext *s, int ot)
1182 {
1183 gen_string_movl_A0_ESI(s);
1184 gen_op_ld_T0_A0(ot + s->mem_index);
1185 gen_string_movl_A0_EDI(s);
1186 gen_op_ld_T1_A0(ot + s->mem_index);
1187 gen_op_cmpl_T0_T1_cc();
1188 gen_op_movl_T0_Dshift(ot);
1189 gen_op_add_reg_T0(s->aflag, R_ESI);
1190 gen_op_add_reg_T0(s->aflag, R_EDI);
1191 }
1192
gen_ins(DisasContext * s,int ot)1193 static inline void gen_ins(DisasContext *s, int ot)
1194 {
1195 if (use_icount)
1196 gen_io_start();
1197 gen_string_movl_A0_EDI(s);
1198 /* Note: we must do this dummy write first to be restartable in
1199 case of page fault. */
1200 gen_op_movl_T0_0();
1201 gen_op_st_T0_A0(ot + s->mem_index);
1202 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1203 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1204 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1205 gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32);
1206 gen_op_st_T0_A0(ot + s->mem_index);
1207 gen_op_movl_T0_Dshift(ot);
1208 gen_op_add_reg_T0(s->aflag, R_EDI);
1209 if (use_icount)
1210 gen_io_end();
1211 }
1212
gen_outs(DisasContext * s,int ot)1213 static inline void gen_outs(DisasContext *s, int ot)
1214 {
1215 if (use_icount)
1216 gen_io_start();
1217 gen_string_movl_A0_ESI(s);
1218 gen_op_ld_T0_A0(ot + s->mem_index);
1219
1220 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1221 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1222 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1223 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1224 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
1225
1226 gen_op_movl_T0_Dshift(ot);
1227 gen_op_add_reg_T0(s->aflag, R_ESI);
1228 if (use_icount)
1229 gen_io_end();
1230 }
1231
1232 /* same method as Valgrind : we generate jumps to current or next
1233 instruction */
1234 #define GEN_REPZ(op) \
1235 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1236 target_ulong cur_eip, target_ulong next_eip) \
1237 { \
1238 int l2;\
1239 gen_update_cc_op(s); \
1240 l2 = gen_jz_ecx_string(s, next_eip); \
1241 gen_ ## op(s, ot); \
1242 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1243 /* a loop would cause two single step exceptions if ECX = 1 \
1244 before rep string_insn */ \
1245 if (!s->jmp_opt) \
1246 gen_op_jz_ecx(s->aflag, l2); \
1247 gen_jmp(s, cur_eip); \
1248 }
1249
1250 #define GEN_REPZ2(op) \
1251 static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1252 target_ulong cur_eip, \
1253 target_ulong next_eip, \
1254 int nz) \
1255 { \
1256 int l2;\
1257 gen_update_cc_op(s); \
1258 l2 = gen_jz_ecx_string(s, next_eip); \
1259 gen_ ## op(s, ot); \
1260 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1261 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1262 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1263 if (!s->jmp_opt) \
1264 gen_op_jz_ecx(s->aflag, l2); \
1265 gen_jmp(s, cur_eip); \
1266 }
1267
1268 GEN_REPZ(movs)
GEN_REPZ(stos)1269 GEN_REPZ(stos)
1270 GEN_REPZ(lods)
1271 GEN_REPZ(ins)
1272 GEN_REPZ(outs)
1273 GEN_REPZ2(scas)
1274 GEN_REPZ2(cmps)
1275
1276 static void gen_helper_fp_arith_ST0_FT0(int op)
1277 {
1278 switch (op) {
1279 case 0: gen_helper_fadd_ST0_FT0(); break;
1280 case 1: gen_helper_fmul_ST0_FT0(); break;
1281 case 2: gen_helper_fcom_ST0_FT0(); break;
1282 case 3: gen_helper_fcom_ST0_FT0(); break;
1283 case 4: gen_helper_fsub_ST0_FT0(); break;
1284 case 5: gen_helper_fsubr_ST0_FT0(); break;
1285 case 6: gen_helper_fdiv_ST0_FT0(); break;
1286 case 7: gen_helper_fdivr_ST0_FT0(); break;
1287 }
1288 }
1289
1290 /* NOTE the exception in "r" op ordering */
gen_helper_fp_arith_STN_ST0(int op,int opreg)1291 static void gen_helper_fp_arith_STN_ST0(int op, int opreg)
1292 {
1293 TCGv_i32 tmp = tcg_const_i32(opreg);
1294 switch (op) {
1295 case 0: gen_helper_fadd_STN_ST0(tmp); break;
1296 case 1: gen_helper_fmul_STN_ST0(tmp); break;
1297 case 4: gen_helper_fsubr_STN_ST0(tmp); break;
1298 case 5: gen_helper_fsub_STN_ST0(tmp); break;
1299 case 6: gen_helper_fdivr_STN_ST0(tmp); break;
1300 case 7: gen_helper_fdiv_STN_ST0(tmp); break;
1301 }
1302 }
1303
1304 /* if d == OR_TMP0, it means memory operand (address in A0) */
gen_op(DisasContext * s1,int op,int ot,int d)1305 static void gen_op(DisasContext *s1, int op, int ot, int d)
1306 {
1307 if (d != OR_TMP0) {
1308 gen_op_mov_TN_reg(ot, 0, d);
1309 } else {
1310 gen_op_ld_T0_A0(ot + s1->mem_index);
1311 }
1312 switch(op) {
1313 case OP_ADCL:
1314 if (s1->cc_op != CC_OP_DYNAMIC)
1315 gen_op_set_cc_op(s1->cc_op);
1316 gen_compute_eflags_c(cpu_tmp4);
1317 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1318 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1319 if (d != OR_TMP0)
1320 gen_op_mov_reg_T0(ot, d);
1321 else
1322 gen_op_st_T0_A0(ot + s1->mem_index);
1323 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1324 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1325 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1326 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1327 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1328 s1->cc_op = CC_OP_DYNAMIC;
1329 break;
1330 case OP_SBBL:
1331 if (s1->cc_op != CC_OP_DYNAMIC)
1332 gen_op_set_cc_op(s1->cc_op);
1333 gen_compute_eflags_c(cpu_tmp4);
1334 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1335 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1336 if (d != OR_TMP0)
1337 gen_op_mov_reg_T0(ot, d);
1338 else
1339 gen_op_st_T0_A0(ot + s1->mem_index);
1340 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1341 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1342 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1343 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1344 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1345 s1->cc_op = CC_OP_DYNAMIC;
1346 break;
1347 case OP_ADDL:
1348 gen_op_addl_T0_T1();
1349 if (d != OR_TMP0)
1350 gen_op_mov_reg_T0(ot, d);
1351 else
1352 gen_op_st_T0_A0(ot + s1->mem_index);
1353 gen_op_update2_cc();
1354 s1->cc_op = CC_OP_ADDB + ot;
1355 break;
1356 case OP_SUBL:
1357 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1358 if (d != OR_TMP0)
1359 gen_op_mov_reg_T0(ot, d);
1360 else
1361 gen_op_st_T0_A0(ot + s1->mem_index);
1362 gen_op_update2_cc();
1363 s1->cc_op = CC_OP_SUBB + ot;
1364 break;
1365 default:
1366 case OP_ANDL:
1367 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1368 if (d != OR_TMP0)
1369 gen_op_mov_reg_T0(ot, d);
1370 else
1371 gen_op_st_T0_A0(ot + s1->mem_index);
1372 gen_op_update1_cc();
1373 s1->cc_op = CC_OP_LOGICB + ot;
1374 break;
1375 case OP_ORL:
1376 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1377 if (d != OR_TMP0)
1378 gen_op_mov_reg_T0(ot, d);
1379 else
1380 gen_op_st_T0_A0(ot + s1->mem_index);
1381 gen_op_update1_cc();
1382 s1->cc_op = CC_OP_LOGICB + ot;
1383 break;
1384 case OP_XORL:
1385 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1386 if (d != OR_TMP0)
1387 gen_op_mov_reg_T0(ot, d);
1388 else
1389 gen_op_st_T0_A0(ot + s1->mem_index);
1390 gen_op_update1_cc();
1391 s1->cc_op = CC_OP_LOGICB + ot;
1392 break;
1393 case OP_CMPL:
1394 gen_op_cmpl_T0_T1_cc();
1395 s1->cc_op = CC_OP_SUBB + ot;
1396 break;
1397 }
1398 }
1399
1400 /* if d == OR_TMP0, it means memory operand (address in A0) */
gen_inc(DisasContext * s1,int ot,int d,int c)1401 static void gen_inc(DisasContext *s1, int ot, int d, int c)
1402 {
1403 if (d != OR_TMP0)
1404 gen_op_mov_TN_reg(ot, 0, d);
1405 else
1406 gen_op_ld_T0_A0(ot + s1->mem_index);
1407 if (s1->cc_op != CC_OP_DYNAMIC)
1408 gen_op_set_cc_op(s1->cc_op);
1409 if (c > 0) {
1410 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1411 s1->cc_op = CC_OP_INCB + ot;
1412 } else {
1413 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1414 s1->cc_op = CC_OP_DECB + ot;
1415 }
1416 if (d != OR_TMP0)
1417 gen_op_mov_reg_T0(ot, d);
1418 else
1419 gen_op_st_T0_A0(ot + s1->mem_index);
1420 gen_compute_eflags_c(cpu_cc_src);
1421 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1422 }
1423
gen_shift_rm_T1(DisasContext * s,int ot,int op1,int is_right,int is_arith)1424 static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1425 int is_right, int is_arith)
1426 {
1427 target_ulong mask;
1428 int shift_label;
1429 TCGv t0, t1;
1430
1431 if (ot == OT_QUAD)
1432 mask = 0x3f;
1433 else
1434 mask = 0x1f;
1435
1436 /* load */
1437 if (op1 == OR_TMP0)
1438 gen_op_ld_T0_A0(ot + s->mem_index);
1439 else
1440 gen_op_mov_TN_reg(ot, 0, op1);
1441
1442 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1443
1444 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1445
1446 if (is_right) {
1447 if (is_arith) {
1448 gen_exts(ot, cpu_T[0]);
1449 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1450 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1451 } else {
1452 gen_extu(ot, cpu_T[0]);
1453 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1454 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1455 }
1456 } else {
1457 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1458 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1459 }
1460
1461 /* store */
1462 if (op1 == OR_TMP0)
1463 gen_op_st_T0_A0(ot + s->mem_index);
1464 else
1465 gen_op_mov_reg_T0(ot, op1);
1466
1467 /* update eflags if non zero shift */
1468 if (s->cc_op != CC_OP_DYNAMIC)
1469 gen_op_set_cc_op(s->cc_op);
1470
1471 /* XXX: inefficient */
1472 t0 = tcg_temp_local_new();
1473 t1 = tcg_temp_local_new();
1474
1475 tcg_gen_mov_tl(t0, cpu_T[0]);
1476 tcg_gen_mov_tl(t1, cpu_T3);
1477
1478 shift_label = gen_new_label();
1479 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1480
1481 tcg_gen_mov_tl(cpu_cc_src, t1);
1482 tcg_gen_mov_tl(cpu_cc_dst, t0);
1483 if (is_right)
1484 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1485 else
1486 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1487
1488 gen_set_label(shift_label);
1489 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1490
1491 tcg_temp_free(t0);
1492 tcg_temp_free(t1);
1493 }
1494
gen_shift_rm_im(DisasContext * s,int ot,int op1,int op2,int is_right,int is_arith)1495 static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1496 int is_right, int is_arith)
1497 {
1498 int mask;
1499
1500 if (ot == OT_QUAD)
1501 mask = 0x3f;
1502 else
1503 mask = 0x1f;
1504
1505 /* load */
1506 if (op1 == OR_TMP0)
1507 gen_op_ld_T0_A0(ot + s->mem_index);
1508 else
1509 gen_op_mov_TN_reg(ot, 0, op1);
1510
1511 op2 &= mask;
1512 if (op2 != 0) {
1513 if (is_right) {
1514 if (is_arith) {
1515 gen_exts(ot, cpu_T[0]);
1516 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1517 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1518 } else {
1519 gen_extu(ot, cpu_T[0]);
1520 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1521 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1522 }
1523 } else {
1524 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1525 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1526 }
1527 }
1528
1529 /* store */
1530 if (op1 == OR_TMP0)
1531 gen_op_st_T0_A0(ot + s->mem_index);
1532 else
1533 gen_op_mov_reg_T0(ot, op1);
1534
1535 /* update eflags if non zero shift */
1536 if (op2 != 0) {
1537 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1538 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1539 if (is_right)
1540 s->cc_op = CC_OP_SARB + ot;
1541 else
1542 s->cc_op = CC_OP_SHLB + ot;
1543 }
1544 }
1545
tcg_gen_lshift(TCGv ret,TCGv arg1,target_long arg2)1546 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1547 {
1548 if (arg2 >= 0)
1549 tcg_gen_shli_tl(ret, arg1, arg2);
1550 else
1551 tcg_gen_shri_tl(ret, arg1, -arg2);
1552 }
1553
gen_rot_rm_T1(DisasContext * s,int ot,int op1,int is_right)1554 static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1555 int is_right)
1556 {
1557 target_ulong mask;
1558 int label1, label2, data_bits;
1559 TCGv t0, t1, t2, a0;
1560
1561 /* XXX: inefficient, but we must use local temps */
1562 t0 = tcg_temp_local_new();
1563 t1 = tcg_temp_local_new();
1564 t2 = tcg_temp_local_new();
1565 a0 = tcg_temp_local_new();
1566
1567 if (ot == OT_QUAD)
1568 mask = 0x3f;
1569 else
1570 mask = 0x1f;
1571
1572 /* load */
1573 if (op1 == OR_TMP0) {
1574 tcg_gen_mov_tl(a0, cpu_A0);
1575 gen_op_ld_v(ot + s->mem_index, t0, a0);
1576 } else {
1577 gen_op_mov_v_reg(ot, t0, op1);
1578 }
1579
1580 tcg_gen_mov_tl(t1, cpu_T[1]);
1581
1582 tcg_gen_andi_tl(t1, t1, mask);
1583
1584 /* Must test zero case to avoid using undefined behaviour in TCG
1585 shifts. */
1586 label1 = gen_new_label();
1587 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1588
1589 if (ot <= OT_WORD)
1590 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1591 else
1592 tcg_gen_mov_tl(cpu_tmp0, t1);
1593
1594 gen_extu(ot, t0);
1595 tcg_gen_mov_tl(t2, t0);
1596
1597 data_bits = 8 << ot;
1598 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1599 fix TCG definition) */
1600 if (is_right) {
1601 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1602 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1603 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1604 } else {
1605 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1606 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1607 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1608 }
1609 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1610
1611 gen_set_label(label1);
1612 /* store */
1613 if (op1 == OR_TMP0) {
1614 gen_op_st_v(ot + s->mem_index, t0, a0);
1615 } else {
1616 gen_op_mov_reg_v(ot, op1, t0);
1617 }
1618
1619 /* update eflags */
1620 if (s->cc_op != CC_OP_DYNAMIC)
1621 gen_op_set_cc_op(s->cc_op);
1622
1623 label2 = gen_new_label();
1624 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1625
1626 gen_compute_eflags(cpu_cc_src);
1627 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1628 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1629 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1630 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1631 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1632 if (is_right) {
1633 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1634 }
1635 tcg_gen_andi_tl(t0, t0, CC_C);
1636 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1637
1638 tcg_gen_discard_tl(cpu_cc_dst);
1639 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1640
1641 gen_set_label(label2);
1642 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1643
1644 tcg_temp_free(t0);
1645 tcg_temp_free(t1);
1646 tcg_temp_free(t2);
1647 tcg_temp_free(a0);
1648 }
1649
gen_rot_rm_im(DisasContext * s,int ot,int op1,int op2,int is_right)1650 static void gen_rot_rm_im(DisasContext *s, int ot, int op1, int op2,
1651 int is_right)
1652 {
1653 int mask;
1654 int data_bits;
1655 TCGv t0, t1, a0;
1656
1657 /* XXX: inefficient, but we must use local temps */
1658 t0 = tcg_temp_local_new();
1659 t1 = tcg_temp_local_new();
1660 a0 = tcg_temp_local_new();
1661
1662 if (ot == OT_QUAD)
1663 mask = 0x3f;
1664 else
1665 mask = 0x1f;
1666
1667 /* load */
1668 if (op1 == OR_TMP0) {
1669 tcg_gen_mov_tl(a0, cpu_A0);
1670 gen_op_ld_v(ot + s->mem_index, t0, a0);
1671 } else {
1672 gen_op_mov_v_reg(ot, t0, op1);
1673 }
1674
1675 gen_extu(ot, t0);
1676 tcg_gen_mov_tl(t1, t0);
1677
1678 op2 &= mask;
1679 data_bits = 8 << ot;
1680 if (op2 != 0) {
1681 int shift = op2 & ((1 << (3 + ot)) - 1);
1682 if (is_right) {
1683 tcg_gen_shri_tl(cpu_tmp4, t0, shift);
1684 tcg_gen_shli_tl(t0, t0, data_bits - shift);
1685 }
1686 else {
1687 tcg_gen_shli_tl(cpu_tmp4, t0, shift);
1688 tcg_gen_shri_tl(t0, t0, data_bits - shift);
1689 }
1690 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1691 }
1692
1693 /* store */
1694 if (op1 == OR_TMP0) {
1695 gen_op_st_v(ot + s->mem_index, t0, a0);
1696 } else {
1697 gen_op_mov_reg_v(ot, op1, t0);
1698 }
1699
1700 if (op2 != 0) {
1701 /* update eflags */
1702 if (s->cc_op != CC_OP_DYNAMIC)
1703 gen_op_set_cc_op(s->cc_op);
1704
1705 gen_compute_eflags(cpu_cc_src);
1706 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1707 tcg_gen_xor_tl(cpu_tmp0, t1, t0);
1708 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1709 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1710 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1711 if (is_right) {
1712 tcg_gen_shri_tl(t0, t0, data_bits - 1);
1713 }
1714 tcg_gen_andi_tl(t0, t0, CC_C);
1715 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1716
1717 tcg_gen_discard_tl(cpu_cc_dst);
1718 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1719 s->cc_op = CC_OP_EFLAGS;
1720 }
1721
1722 tcg_temp_free(t0);
1723 tcg_temp_free(t1);
1724 tcg_temp_free(a0);
1725 }
1726
1727 /* XXX: add faster immediate = 1 case */
gen_rotc_rm_T1(DisasContext * s,int ot,int op1,int is_right)1728 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
1729 int is_right)
1730 {
1731 int label1;
1732
1733 if (s->cc_op != CC_OP_DYNAMIC)
1734 gen_op_set_cc_op(s->cc_op);
1735
1736 /* load */
1737 if (op1 == OR_TMP0)
1738 gen_op_ld_T0_A0(ot + s->mem_index);
1739 else
1740 gen_op_mov_TN_reg(ot, 0, op1);
1741
1742 if (is_right) {
1743 switch (ot) {
1744 case 0: gen_helper_rcrb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1745 case 1: gen_helper_rcrw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1746 case 2: gen_helper_rcrl(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1747 #ifdef TARGET_X86_64
1748 case 3: gen_helper_rcrq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1749 #endif
1750 }
1751 } else {
1752 switch (ot) {
1753 case 0: gen_helper_rclb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1754 case 1: gen_helper_rclw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1755 case 2: gen_helper_rcll(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1756 #ifdef TARGET_X86_64
1757 case 3: gen_helper_rclq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
1758 #endif
1759 }
1760 }
1761 /* store */
1762 if (op1 == OR_TMP0)
1763 gen_op_st_T0_A0(ot + s->mem_index);
1764 else
1765 gen_op_mov_reg_T0(ot, op1);
1766
1767 /* update eflags */
1768 label1 = gen_new_label();
1769 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1770
1771 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1772 tcg_gen_discard_tl(cpu_cc_dst);
1773 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1774
1775 gen_set_label(label1);
1776 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1777 }
1778
1779 /* XXX: add faster immediate case */
gen_shiftd_rm_T1_T3(DisasContext * s,int ot,int op1,int is_right)1780 static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
1781 int is_right)
1782 {
1783 int label1, label2, data_bits;
1784 target_ulong mask;
1785 TCGv t0, t1, t2, a0;
1786
1787 t0 = tcg_temp_local_new();
1788 t1 = tcg_temp_local_new();
1789 t2 = tcg_temp_local_new();
1790 a0 = tcg_temp_local_new();
1791
1792 if (ot == OT_QUAD)
1793 mask = 0x3f;
1794 else
1795 mask = 0x1f;
1796
1797 /* load */
1798 if (op1 == OR_TMP0) {
1799 tcg_gen_mov_tl(a0, cpu_A0);
1800 gen_op_ld_v(ot + s->mem_index, t0, a0);
1801 } else {
1802 gen_op_mov_v_reg(ot, t0, op1);
1803 }
1804
1805 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1806
1807 tcg_gen_mov_tl(t1, cpu_T[1]);
1808 tcg_gen_mov_tl(t2, cpu_T3);
1809
1810 /* Must test zero case to avoid using undefined behaviour in TCG
1811 shifts. */
1812 label1 = gen_new_label();
1813 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1814
1815 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1816 if (ot == OT_WORD) {
1817 /* Note: we implement the Intel behaviour for shift count > 16 */
1818 if (is_right) {
1819 tcg_gen_andi_tl(t0, t0, 0xffff);
1820 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1821 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1822 tcg_gen_ext32u_tl(t0, t0);
1823
1824 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1825
1826 /* only needed if count > 16, but a test would complicate */
1827 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1828 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1829
1830 tcg_gen_shr_tl(t0, t0, t2);
1831
1832 tcg_gen_or_tl(t0, t0, cpu_tmp0);
1833 } else {
1834 /* XXX: not optimal */
1835 tcg_gen_andi_tl(t0, t0, 0xffff);
1836 tcg_gen_shli_tl(t1, t1, 16);
1837 tcg_gen_or_tl(t1, t1, t0);
1838 tcg_gen_ext32u_tl(t1, t1);
1839
1840 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1841 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1842 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
1843 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1844
1845 tcg_gen_shl_tl(t0, t0, t2);
1846 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1847 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1848 tcg_gen_or_tl(t0, t0, t1);
1849 }
1850 } else {
1851 data_bits = 8 << ot;
1852 if (is_right) {
1853 if (ot == OT_LONG)
1854 tcg_gen_ext32u_tl(t0, t0);
1855
1856 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1857
1858 tcg_gen_shr_tl(t0, t0, t2);
1859 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1860 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1861 tcg_gen_or_tl(t0, t0, t1);
1862
1863 } else {
1864 if (ot == OT_LONG)
1865 tcg_gen_ext32u_tl(t1, t1);
1866
1867 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1868
1869 tcg_gen_shl_tl(t0, t0, t2);
1870 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1871 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1872 tcg_gen_or_tl(t0, t0, t1);
1873 }
1874 }
1875 tcg_gen_mov_tl(t1, cpu_tmp4);
1876
1877 gen_set_label(label1);
1878 /* store */
1879 if (op1 == OR_TMP0) {
1880 gen_op_st_v(ot + s->mem_index, t0, a0);
1881 } else {
1882 gen_op_mov_reg_v(ot, op1, t0);
1883 }
1884
1885 /* update eflags */
1886 if (s->cc_op != CC_OP_DYNAMIC)
1887 gen_op_set_cc_op(s->cc_op);
1888
1889 label2 = gen_new_label();
1890 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1891
1892 tcg_gen_mov_tl(cpu_cc_src, t1);
1893 tcg_gen_mov_tl(cpu_cc_dst, t0);
1894 if (is_right) {
1895 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1896 } else {
1897 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1898 }
1899 gen_set_label(label2);
1900 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1901
1902 tcg_temp_free(t0);
1903 tcg_temp_free(t1);
1904 tcg_temp_free(t2);
1905 tcg_temp_free(a0);
1906 }
1907
gen_shift(DisasContext * s1,int op,int ot,int d,int s)1908 static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1909 {
1910 if (s != OR_TMP1)
1911 gen_op_mov_TN_reg(ot, 1, s);
1912 switch(op) {
1913 case OP_ROL:
1914 gen_rot_rm_T1(s1, ot, d, 0);
1915 break;
1916 case OP_ROR:
1917 gen_rot_rm_T1(s1, ot, d, 1);
1918 break;
1919 case OP_SHL:
1920 case OP_SHL1:
1921 gen_shift_rm_T1(s1, ot, d, 0, 0);
1922 break;
1923 case OP_SHR:
1924 gen_shift_rm_T1(s1, ot, d, 1, 0);
1925 break;
1926 case OP_SAR:
1927 gen_shift_rm_T1(s1, ot, d, 1, 1);
1928 break;
1929 case OP_RCL:
1930 gen_rotc_rm_T1(s1, ot, d, 0);
1931 break;
1932 case OP_RCR:
1933 gen_rotc_rm_T1(s1, ot, d, 1);
1934 break;
1935 }
1936 }
1937
gen_shifti(DisasContext * s1,int op,int ot,int d,int c)1938 static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1939 {
1940 switch(op) {
1941 case OP_ROL:
1942 gen_rot_rm_im(s1, ot, d, c, 0);
1943 break;
1944 case OP_ROR:
1945 gen_rot_rm_im(s1, ot, d, c, 1);
1946 break;
1947 case OP_SHL:
1948 case OP_SHL1:
1949 gen_shift_rm_im(s1, ot, d, c, 0, 0);
1950 break;
1951 case OP_SHR:
1952 gen_shift_rm_im(s1, ot, d, c, 1, 0);
1953 break;
1954 case OP_SAR:
1955 gen_shift_rm_im(s1, ot, d, c, 1, 1);
1956 break;
1957 default:
1958 /* currently not optimized */
1959 gen_op_movl_T1_im(c);
1960 gen_shift(s1, op, ot, d, OR_TMP1);
1961 break;
1962 }
1963 }
1964
gen_lea_modrm(DisasContext * s,int modrm,int * reg_ptr,int * offset_ptr)1965 static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1966 {
1967 target_long disp;
1968 int havesib;
1969 int base;
1970 int index;
1971 int scale;
1972 int opreg;
1973 int mod, rm, code, override, must_add_seg;
1974
1975 override = s->override;
1976 must_add_seg = s->addseg;
1977 if (override >= 0)
1978 must_add_seg = 1;
1979 mod = (modrm >> 6) & 3;
1980 rm = modrm & 7;
1981
1982 if (s->aflag) {
1983
1984 havesib = 0;
1985 base = rm;
1986 index = 0;
1987 scale = 0;
1988
1989 if (base == 4) {
1990 havesib = 1;
1991 code = ldub_code(s->pc++);
1992 scale = (code >> 6) & 3;
1993 index = ((code >> 3) & 7) | REX_X(s);
1994 base = (code & 7);
1995 }
1996 base |= REX_B(s);
1997
1998 switch (mod) {
1999 case 0:
2000 if ((base & 7) == 5) {
2001 base = -1;
2002 disp = (int32_t)ldl_code(s->pc);
2003 s->pc += 4;
2004 if (CODE64(s) && !havesib) {
2005 disp += s->pc + s->rip_offset;
2006 }
2007 } else {
2008 disp = 0;
2009 }
2010 break;
2011 case 1:
2012 disp = (int8_t)ldub_code(s->pc++);
2013 break;
2014 default:
2015 case 2:
2016 disp = ldl_code(s->pc);
2017 s->pc += 4;
2018 break;
2019 }
2020
2021 if (base >= 0) {
2022 /* for correct popl handling with esp */
2023 if (base == 4 && s->popl_esp_hack)
2024 disp += s->popl_esp_hack;
2025 #ifdef TARGET_X86_64
2026 if (s->aflag == 2) {
2027 gen_op_movq_A0_reg(base);
2028 if (disp != 0) {
2029 gen_op_addq_A0_im(disp);
2030 }
2031 } else
2032 #endif
2033 {
2034 gen_op_movl_A0_reg(base);
2035 if (disp != 0)
2036 gen_op_addl_A0_im(disp);
2037 }
2038 } else {
2039 #ifdef TARGET_X86_64
2040 if (s->aflag == 2) {
2041 gen_op_movq_A0_im(disp);
2042 } else
2043 #endif
2044 {
2045 gen_op_movl_A0_im(disp);
2046 }
2047 }
2048 /* XXX: index == 4 is always invalid */
2049 if (havesib && (index != 4 || scale != 0)) {
2050 #ifdef TARGET_X86_64
2051 if (s->aflag == 2) {
2052 gen_op_addq_A0_reg_sN(scale, index);
2053 } else
2054 #endif
2055 {
2056 gen_op_addl_A0_reg_sN(scale, index);
2057 }
2058 }
2059 if (must_add_seg) {
2060 if (override < 0) {
2061 if (base == R_EBP || base == R_ESP)
2062 override = R_SS;
2063 else
2064 override = R_DS;
2065 }
2066 #ifdef TARGET_X86_64
2067 if (s->aflag == 2) {
2068 gen_op_addq_A0_seg(override);
2069 } else
2070 #endif
2071 {
2072 gen_op_addl_A0_seg(override);
2073 }
2074 }
2075 } else {
2076 switch (mod) {
2077 case 0:
2078 if (rm == 6) {
2079 disp = lduw_code(s->pc);
2080 s->pc += 2;
2081 gen_op_movl_A0_im(disp);
2082 rm = 0; /* avoid SS override */
2083 goto no_rm;
2084 } else {
2085 disp = 0;
2086 }
2087 break;
2088 case 1:
2089 disp = (int8_t)ldub_code(s->pc++);
2090 break;
2091 default:
2092 case 2:
2093 disp = lduw_code(s->pc);
2094 s->pc += 2;
2095 break;
2096 }
2097 switch(rm) {
2098 case 0:
2099 gen_op_movl_A0_reg(R_EBX);
2100 gen_op_addl_A0_reg_sN(0, R_ESI);
2101 break;
2102 case 1:
2103 gen_op_movl_A0_reg(R_EBX);
2104 gen_op_addl_A0_reg_sN(0, R_EDI);
2105 break;
2106 case 2:
2107 gen_op_movl_A0_reg(R_EBP);
2108 gen_op_addl_A0_reg_sN(0, R_ESI);
2109 break;
2110 case 3:
2111 gen_op_movl_A0_reg(R_EBP);
2112 gen_op_addl_A0_reg_sN(0, R_EDI);
2113 break;
2114 case 4:
2115 gen_op_movl_A0_reg(R_ESI);
2116 break;
2117 case 5:
2118 gen_op_movl_A0_reg(R_EDI);
2119 break;
2120 case 6:
2121 gen_op_movl_A0_reg(R_EBP);
2122 break;
2123 default:
2124 case 7:
2125 gen_op_movl_A0_reg(R_EBX);
2126 break;
2127 }
2128 if (disp != 0)
2129 gen_op_addl_A0_im(disp);
2130 gen_op_andl_A0_ffff();
2131 no_rm:
2132 if (must_add_seg) {
2133 if (override < 0) {
2134 if (rm == 2 || rm == 3 || rm == 6)
2135 override = R_SS;
2136 else
2137 override = R_DS;
2138 }
2139 gen_op_addl_A0_seg(override);
2140 }
2141 }
2142
2143 opreg = OR_A0;
2144 disp = 0;
2145 *reg_ptr = opreg;
2146 *offset_ptr = disp;
2147 }
2148
gen_nop_modrm(DisasContext * s,int modrm)2149 static void gen_nop_modrm(DisasContext *s, int modrm)
2150 {
2151 int mod, rm, base, code;
2152
2153 mod = (modrm >> 6) & 3;
2154 if (mod == 3)
2155 return;
2156 rm = modrm & 7;
2157
2158 if (s->aflag) {
2159
2160 base = rm;
2161
2162 if (base == 4) {
2163 code = ldub_code(s->pc++);
2164 base = (code & 7);
2165 }
2166
2167 switch (mod) {
2168 case 0:
2169 if (base == 5) {
2170 s->pc += 4;
2171 }
2172 break;
2173 case 1:
2174 s->pc++;
2175 break;
2176 default:
2177 case 2:
2178 s->pc += 4;
2179 break;
2180 }
2181 } else {
2182 switch (mod) {
2183 case 0:
2184 if (rm == 6) {
2185 s->pc += 2;
2186 }
2187 break;
2188 case 1:
2189 s->pc++;
2190 break;
2191 default:
2192 case 2:
2193 s->pc += 2;
2194 break;
2195 }
2196 }
2197 }
2198
2199 /* used for LEA and MOV AX, mem */
gen_add_A0_ds_seg(DisasContext * s)2200 static void gen_add_A0_ds_seg(DisasContext *s)
2201 {
2202 int override, must_add_seg;
2203 must_add_seg = s->addseg;
2204 override = R_DS;
2205 if (s->override >= 0) {
2206 override = s->override;
2207 must_add_seg = 1;
2208 } else {
2209 override = R_DS;
2210 }
2211 if (must_add_seg) {
2212 #ifdef TARGET_X86_64
2213 if (CODE64(s)) {
2214 gen_op_addq_A0_seg(override);
2215 } else
2216 #endif
2217 {
2218 gen_op_addl_A0_seg(override);
2219 }
2220 }
2221 }
2222
2223 /* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2224 OR_TMP0 */
gen_ldst_modrm(DisasContext * s,int modrm,int ot,int reg,int is_store)2225 static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2226 {
2227 int mod, rm, opreg, disp;
2228
2229 mod = (modrm >> 6) & 3;
2230 rm = (modrm & 7) | REX_B(s);
2231 if (mod == 3) {
2232 if (is_store) {
2233 if (reg != OR_TMP0)
2234 gen_op_mov_TN_reg(ot, 0, reg);
2235 gen_op_mov_reg_T0(ot, rm);
2236 } else {
2237 gen_op_mov_TN_reg(ot, 0, rm);
2238 if (reg != OR_TMP0)
2239 gen_op_mov_reg_T0(ot, reg);
2240 }
2241 } else {
2242 gen_lea_modrm(s, modrm, &opreg, &disp);
2243 if (is_store) {
2244 if (reg != OR_TMP0)
2245 gen_op_mov_TN_reg(ot, 0, reg);
2246 gen_op_st_T0_A0(ot + s->mem_index);
2247 } else {
2248 gen_op_ld_T0_A0(ot + s->mem_index);
2249 if (reg != OR_TMP0)
2250 gen_op_mov_reg_T0(ot, reg);
2251 }
2252 }
2253 }
2254
insn_get(DisasContext * s,int ot)2255 static inline uint32_t insn_get(DisasContext *s, int ot)
2256 {
2257 uint32_t ret;
2258
2259 switch(ot) {
2260 case OT_BYTE:
2261 ret = ldub_code(s->pc);
2262 s->pc++;
2263 break;
2264 case OT_WORD:
2265 ret = lduw_code(s->pc);
2266 s->pc += 2;
2267 break;
2268 default:
2269 case OT_LONG:
2270 ret = ldl_code(s->pc);
2271 s->pc += 4;
2272 break;
2273 }
2274 return ret;
2275 }
2276
insn_const_size(unsigned int ot)2277 static inline int insn_const_size(unsigned int ot)
2278 {
2279 if (ot <= OT_LONG)
2280 return 1 << ot;
2281 else
2282 return 4;
2283 }
2284
gen_goto_tb(DisasContext * s,int tb_num,target_ulong eip)2285 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2286 {
2287 TranslationBlock *tb;
2288 target_ulong pc;
2289
2290 pc = s->cs_base + eip;
2291 tb = s->tb;
2292 /* NOTE: we handle the case where the TB spans two pages here */
2293 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2294 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2295 /* jump to same page: we can use a direct jump */
2296 tcg_gen_goto_tb(tb_num);
2297 gen_jmp_im(eip);
2298 tcg_gen_exit_tb((long)tb + tb_num);
2299 } else {
2300 /* jump to another page: currently not optimized */
2301 gen_jmp_im(eip);
2302 gen_eob(s);
2303 }
2304 }
2305
gen_jcc(DisasContext * s,int b,target_ulong val,target_ulong next_eip)2306 static inline void gen_jcc(DisasContext *s, int b,
2307 target_ulong val, target_ulong next_eip)
2308 {
2309 int l1, l2, cc_op;
2310
2311 cc_op = s->cc_op;
2312 if (s->cc_op != CC_OP_DYNAMIC) {
2313 gen_op_set_cc_op(s->cc_op);
2314 s->cc_op = CC_OP_DYNAMIC;
2315 }
2316 if (s->jmp_opt) {
2317 l1 = gen_new_label();
2318 gen_jcc1(s, cc_op, b, l1);
2319
2320 gen_goto_tb(s, 0, next_eip);
2321
2322 gen_set_label(l1);
2323 gen_goto_tb(s, 1, val);
2324 s->is_jmp = 3;
2325 } else {
2326
2327 l1 = gen_new_label();
2328 l2 = gen_new_label();
2329 gen_jcc1(s, cc_op, b, l1);
2330
2331 gen_jmp_im(next_eip);
2332 tcg_gen_br(l2);
2333
2334 gen_set_label(l1);
2335 gen_jmp_im(val);
2336 gen_set_label(l2);
2337 gen_eob(s);
2338 }
2339 }
2340
gen_setcc(DisasContext * s,int b)2341 static void gen_setcc(DisasContext *s, int b)
2342 {
2343 int inv, jcc_op, l1;
2344 TCGv t0;
2345
2346 if (is_fast_jcc_case(s, b)) {
2347 /* nominal case: we use a jump */
2348 /* XXX: make it faster by adding new instructions in TCG */
2349 t0 = tcg_temp_local_new();
2350 tcg_gen_movi_tl(t0, 0);
2351 l1 = gen_new_label();
2352 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2353 tcg_gen_movi_tl(t0, 1);
2354 gen_set_label(l1);
2355 tcg_gen_mov_tl(cpu_T[0], t0);
2356 tcg_temp_free(t0);
2357 } else {
2358 /* slow case: it is more efficient not to generate a jump,
2359 although it is questionnable whether this optimization is
2360 worth to */
2361 inv = b & 1;
2362 jcc_op = (b >> 1) & 7;
2363 gen_setcc_slow_T0(s, jcc_op);
2364 if (inv) {
2365 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2366 }
2367 }
2368 }
2369
gen_op_movl_T0_seg(int seg_reg)2370 static inline void gen_op_movl_T0_seg(int seg_reg)
2371 {
2372 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2373 offsetof(CPUX86State,segs[seg_reg].selector));
2374 }
2375
gen_op_movl_seg_T0_vm(int seg_reg)2376 static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2377 {
2378 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2379 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2380 offsetof(CPUX86State,segs[seg_reg].selector));
2381 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2382 tcg_gen_st_tl(cpu_T[0], cpu_env,
2383 offsetof(CPUX86State,segs[seg_reg].base));
2384 }
2385
2386 /* move T0 to seg_reg and compute if the CPU state may change. Never
2387 call this function with seg_reg == R_CS */
gen_movl_seg_T0(DisasContext * s,int seg_reg,target_ulong cur_eip)2388 static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2389 {
2390 if (s->pe && !s->vm86) {
2391 /* XXX: optimize by finding processor state dynamically */
2392 if (s->cc_op != CC_OP_DYNAMIC)
2393 gen_op_set_cc_op(s->cc_op);
2394 gen_jmp_im(cur_eip);
2395 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2396 gen_helper_load_seg(tcg_const_i32(seg_reg), cpu_tmp2_i32);
2397 /* abort translation because the addseg value may change or
2398 because ss32 may change. For R_SS, translation must always
2399 stop as a special handling must be done to disable hardware
2400 interrupts for the next instruction */
2401 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2402 s->is_jmp = 3;
2403 } else {
2404 gen_op_movl_seg_T0_vm(seg_reg);
2405 if (seg_reg == R_SS)
2406 s->is_jmp = 3;
2407 }
2408 }
2409
svm_is_rep(int prefixes)2410 static inline int svm_is_rep(int prefixes)
2411 {
2412 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2413 }
2414
2415 static inline void
gen_svm_check_intercept_param(DisasContext * s,target_ulong pc_start,uint32_t type,uint64_t param)2416 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2417 uint32_t type, uint64_t param)
2418 {
2419 /* no SVM activated; fast case */
2420 if (likely(!(s->flags & HF_SVMI_MASK)))
2421 return;
2422 if (s->cc_op != CC_OP_DYNAMIC)
2423 gen_op_set_cc_op(s->cc_op);
2424 gen_jmp_im(pc_start - s->cs_base);
2425 gen_helper_svm_check_intercept_param(tcg_const_i32(type),
2426 tcg_const_i64(param));
2427 }
2428
2429 static inline void
gen_svm_check_intercept(DisasContext * s,target_ulong pc_start,uint64_t type)2430 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2431 {
2432 gen_svm_check_intercept_param(s, pc_start, type, 0);
2433 }
2434
gen_stack_update(DisasContext * s,int addend)2435 static inline void gen_stack_update(DisasContext *s, int addend)
2436 {
2437 #ifdef TARGET_X86_64
2438 if (CODE64(s)) {
2439 gen_op_add_reg_im(2, R_ESP, addend);
2440 } else
2441 #endif
2442 if (s->ss32) {
2443 gen_op_add_reg_im(1, R_ESP, addend);
2444 } else {
2445 gen_op_add_reg_im(0, R_ESP, addend);
2446 }
2447 }
2448
2449 /* generate a push. It depends on ss32, addseg and dflag */
gen_push_T0(DisasContext * s)2450 static void gen_push_T0(DisasContext *s)
2451 {
2452 #ifdef TARGET_X86_64
2453 if (CODE64(s)) {
2454 gen_op_movq_A0_reg(R_ESP);
2455 if (s->dflag) {
2456 gen_op_addq_A0_im(-8);
2457 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2458 } else {
2459 gen_op_addq_A0_im(-2);
2460 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2461 }
2462 gen_op_mov_reg_A0(2, R_ESP);
2463 } else
2464 #endif
2465 {
2466 gen_op_movl_A0_reg(R_ESP);
2467 if (!s->dflag)
2468 gen_op_addl_A0_im(-2);
2469 else
2470 gen_op_addl_A0_im(-4);
2471 if (s->ss32) {
2472 if (s->addseg) {
2473 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2474 gen_op_addl_A0_seg(R_SS);
2475 }
2476 } else {
2477 gen_op_andl_A0_ffff();
2478 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2479 gen_op_addl_A0_seg(R_SS);
2480 }
2481 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2482 if (s->ss32 && !s->addseg)
2483 gen_op_mov_reg_A0(1, R_ESP);
2484 else
2485 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2486 }
2487 }
2488
2489 /* generate a push. It depends on ss32, addseg and dflag */
2490 /* slower version for T1, only used for call Ev */
gen_push_T1(DisasContext * s)2491 static void gen_push_T1(DisasContext *s)
2492 {
2493 #ifdef TARGET_X86_64
2494 if (CODE64(s)) {
2495 gen_op_movq_A0_reg(R_ESP);
2496 if (s->dflag) {
2497 gen_op_addq_A0_im(-8);
2498 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2499 } else {
2500 gen_op_addq_A0_im(-2);
2501 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2502 }
2503 gen_op_mov_reg_A0(2, R_ESP);
2504 } else
2505 #endif
2506 {
2507 gen_op_movl_A0_reg(R_ESP);
2508 if (!s->dflag)
2509 gen_op_addl_A0_im(-2);
2510 else
2511 gen_op_addl_A0_im(-4);
2512 if (s->ss32) {
2513 if (s->addseg) {
2514 gen_op_addl_A0_seg(R_SS);
2515 }
2516 } else {
2517 gen_op_andl_A0_ffff();
2518 gen_op_addl_A0_seg(R_SS);
2519 }
2520 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2521
2522 if (s->ss32 && !s->addseg)
2523 gen_op_mov_reg_A0(1, R_ESP);
2524 else
2525 gen_stack_update(s, (-2) << s->dflag);
2526 }
2527 }
2528
2529 /* two step pop is necessary for precise exceptions */
gen_pop_T0(DisasContext * s)2530 static void gen_pop_T0(DisasContext *s)
2531 {
2532 #ifdef TARGET_X86_64
2533 if (CODE64(s)) {
2534 gen_op_movq_A0_reg(R_ESP);
2535 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2536 } else
2537 #endif
2538 {
2539 gen_op_movl_A0_reg(R_ESP);
2540 if (s->ss32) {
2541 if (s->addseg)
2542 gen_op_addl_A0_seg(R_SS);
2543 } else {
2544 gen_op_andl_A0_ffff();
2545 gen_op_addl_A0_seg(R_SS);
2546 }
2547 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2548 }
2549 }
2550
gen_pop_update(DisasContext * s)2551 static void gen_pop_update(DisasContext *s)
2552 {
2553 #ifdef TARGET_X86_64
2554 if (CODE64(s) && s->dflag) {
2555 gen_stack_update(s, 8);
2556 } else
2557 #endif
2558 {
2559 gen_stack_update(s, 2 << s->dflag);
2560 }
2561 }
2562
gen_stack_A0(DisasContext * s)2563 static void gen_stack_A0(DisasContext *s)
2564 {
2565 gen_op_movl_A0_reg(R_ESP);
2566 if (!s->ss32)
2567 gen_op_andl_A0_ffff();
2568 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2569 if (s->addseg)
2570 gen_op_addl_A0_seg(R_SS);
2571 }
2572
2573 /* NOTE: wrap around in 16 bit not fully handled */
gen_pusha(DisasContext * s)2574 static void gen_pusha(DisasContext *s)
2575 {
2576 int i;
2577 gen_op_movl_A0_reg(R_ESP);
2578 gen_op_addl_A0_im(-16 << s->dflag);
2579 if (!s->ss32)
2580 gen_op_andl_A0_ffff();
2581 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2582 if (s->addseg)
2583 gen_op_addl_A0_seg(R_SS);
2584 for(i = 0;i < 8; i++) {
2585 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2586 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2587 gen_op_addl_A0_im(2 << s->dflag);
2588 }
2589 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2590 }
2591
2592 /* NOTE: wrap around in 16 bit not fully handled */
gen_popa(DisasContext * s)2593 static void gen_popa(DisasContext *s)
2594 {
2595 int i;
2596 gen_op_movl_A0_reg(R_ESP);
2597 if (!s->ss32)
2598 gen_op_andl_A0_ffff();
2599 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2600 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2601 if (s->addseg)
2602 gen_op_addl_A0_seg(R_SS);
2603 for(i = 0;i < 8; i++) {
2604 /* ESP is not reloaded */
2605 if (i != 3) {
2606 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2607 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2608 }
2609 gen_op_addl_A0_im(2 << s->dflag);
2610 }
2611 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2612 }
2613
gen_enter(DisasContext * s,int esp_addend,int level)2614 static void gen_enter(DisasContext *s, int esp_addend, int level)
2615 {
2616 int ot, opsize;
2617
2618 level &= 0x1f;
2619 #ifdef TARGET_X86_64
2620 if (CODE64(s)) {
2621 ot = s->dflag ? OT_QUAD : OT_WORD;
2622 opsize = 1 << ot;
2623
2624 gen_op_movl_A0_reg(R_ESP);
2625 gen_op_addq_A0_im(-opsize);
2626 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2627
2628 /* push bp */
2629 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2630 gen_op_st_T0_A0(ot + s->mem_index);
2631 if (level) {
2632 /* XXX: must save state */
2633 gen_helper_enter64_level(tcg_const_i32(level),
2634 tcg_const_i32((ot == OT_QUAD)),
2635 cpu_T[1]);
2636 }
2637 gen_op_mov_reg_T1(ot, R_EBP);
2638 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2639 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2640 } else
2641 #endif
2642 {
2643 ot = s->dflag + OT_WORD;
2644 opsize = 2 << s->dflag;
2645
2646 gen_op_movl_A0_reg(R_ESP);
2647 gen_op_addl_A0_im(-opsize);
2648 if (!s->ss32)
2649 gen_op_andl_A0_ffff();
2650 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2651 if (s->addseg)
2652 gen_op_addl_A0_seg(R_SS);
2653 /* push bp */
2654 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2655 gen_op_st_T0_A0(ot + s->mem_index);
2656 if (level) {
2657 /* XXX: must save state */
2658 gen_helper_enter_level(tcg_const_i32(level),
2659 tcg_const_i32(s->dflag),
2660 cpu_T[1]);
2661 }
2662 gen_op_mov_reg_T1(ot, R_EBP);
2663 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2664 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2665 }
2666 }
2667
gen_exception(DisasContext * s,int trapno,target_ulong cur_eip)2668 static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2669 {
2670 if (s->cc_op != CC_OP_DYNAMIC)
2671 gen_op_set_cc_op(s->cc_op);
2672 gen_jmp_im(cur_eip);
2673 gen_helper_raise_exception(tcg_const_i32(trapno));
2674 s->is_jmp = 3;
2675 }
2676
2677 /* an interrupt is different from an exception because of the
2678 privilege checks */
gen_interrupt(DisasContext * s,int intno,target_ulong cur_eip,target_ulong next_eip)2679 static void gen_interrupt(DisasContext *s, int intno,
2680 target_ulong cur_eip, target_ulong next_eip)
2681 {
2682 if (s->cc_op != CC_OP_DYNAMIC)
2683 gen_op_set_cc_op(s->cc_op);
2684 gen_jmp_im(cur_eip);
2685 gen_helper_raise_interrupt(tcg_const_i32(intno),
2686 tcg_const_i32(next_eip - cur_eip));
2687 s->is_jmp = 3;
2688 }
2689
gen_debug(DisasContext * s,target_ulong cur_eip)2690 static void gen_debug(DisasContext *s, target_ulong cur_eip)
2691 {
2692 if (s->cc_op != CC_OP_DYNAMIC)
2693 gen_op_set_cc_op(s->cc_op);
2694 gen_jmp_im(cur_eip);
2695 gen_helper_debug();
2696 s->is_jmp = 3;
2697 }
2698
2699 /* generate a generic end of block. Trace exception is also generated
2700 if needed */
gen_eob(DisasContext * s)2701 static void gen_eob(DisasContext *s)
2702 {
2703 if (s->cc_op != CC_OP_DYNAMIC)
2704 gen_op_set_cc_op(s->cc_op);
2705 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2706 gen_helper_reset_inhibit_irq();
2707 }
2708 if (s->tb->flags & HF_RF_MASK) {
2709 gen_helper_reset_rf();
2710 }
2711 if (s->singlestep_enabled) {
2712 gen_helper_debug();
2713 } else if (s->tf) {
2714 gen_helper_single_step();
2715 } else {
2716 tcg_gen_exit_tb(0);
2717 }
2718 s->is_jmp = 3;
2719 }
2720
2721 /* generate a jump to eip. No segment change must happen before as a
2722 direct call to the next block may occur */
gen_jmp_tb(DisasContext * s,target_ulong eip,int tb_num)2723 static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2724 {
2725 if (s->jmp_opt) {
2726 if (s->cc_op != CC_OP_DYNAMIC) {
2727 gen_op_set_cc_op(s->cc_op);
2728 s->cc_op = CC_OP_DYNAMIC;
2729 }
2730 gen_goto_tb(s, tb_num, eip);
2731 s->is_jmp = 3;
2732 } else {
2733 gen_jmp_im(eip);
2734 gen_eob(s);
2735 }
2736 }
2737
gen_jmp(DisasContext * s,target_ulong eip)2738 static void gen_jmp(DisasContext *s, target_ulong eip)
2739 {
2740 gen_jmp_tb(s, eip, 0);
2741 }
2742
gen_ldq_env_A0(int idx,int offset)2743 static inline void gen_ldq_env_A0(int idx, int offset)
2744 {
2745 int mem_index = (idx >> 2) - 1;
2746 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2747 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2748 }
2749
gen_stq_env_A0(int idx,int offset)2750 static inline void gen_stq_env_A0(int idx, int offset)
2751 {
2752 int mem_index = (idx >> 2) - 1;
2753 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2754 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2755 }
2756
gen_ldo_env_A0(int idx,int offset)2757 static inline void gen_ldo_env_A0(int idx, int offset)
2758 {
2759 int mem_index = (idx >> 2) - 1;
2760 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2761 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2762 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2763 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2764 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2765 }
2766
gen_sto_env_A0(int idx,int offset)2767 static inline void gen_sto_env_A0(int idx, int offset)
2768 {
2769 int mem_index = (idx >> 2) - 1;
2770 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2771 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2772 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2773 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2774 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2775 }
2776
gen_op_movo(int d_offset,int s_offset)2777 static inline void gen_op_movo(int d_offset, int s_offset)
2778 {
2779 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2780 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2781 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2782 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2783 }
2784
gen_op_movq(int d_offset,int s_offset)2785 static inline void gen_op_movq(int d_offset, int s_offset)
2786 {
2787 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2788 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2789 }
2790
gen_op_movl(int d_offset,int s_offset)2791 static inline void gen_op_movl(int d_offset, int s_offset)
2792 {
2793 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2794 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2795 }
2796
gen_op_movq_env_0(int d_offset)2797 static inline void gen_op_movq_env_0(int d_offset)
2798 {
2799 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2800 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2801 }
2802
2803 #define SSE_SPECIAL ((void *)1)
2804 #define SSE_DUMMY ((void *)2)
2805
2806 #define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
2807 #define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
2808 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
2809
2810 static void *sse_op_table1[256][4] = {
2811 /* 3DNow! extensions */
2812 [0x0e] = { SSE_DUMMY }, /* femms */
2813 [0x0f] = { SSE_DUMMY }, /* pf... */
2814 /* pure SSE operations */
2815 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2816 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2817 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2818 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2819 [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm },
2820 [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm },
2821 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2822 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2823
2824 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2825 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2826 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2827 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2828 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2829 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2830 [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd },
2831 [0x2f] = { gen_helper_comiss, gen_helper_comisd },
2832 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2833 [0x51] = SSE_FOP(sqrt),
2834 [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL },
2835 [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL },
2836 [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */
2837 [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */
2838 [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */
2839 [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */
2840 [0x58] = SSE_FOP(add),
2841 [0x59] = SSE_FOP(mul),
2842 [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps,
2843 gen_helper_cvtss2sd, gen_helper_cvtsd2ss },
2844 [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq },
2845 [0x5c] = SSE_FOP(sub),
2846 [0x5d] = SSE_FOP(min),
2847 [0x5e] = SSE_FOP(div),
2848 [0x5f] = SSE_FOP(max),
2849
2850 [0xc2] = SSE_FOP(cmpeq),
2851 [0xc6] = { gen_helper_shufps, gen_helper_shufpd },
2852
2853 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2854 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
2855
2856 /* MMX ops and their SSE extensions */
2857 [0x60] = MMX_OP2(punpcklbw),
2858 [0x61] = MMX_OP2(punpcklwd),
2859 [0x62] = MMX_OP2(punpckldq),
2860 [0x63] = MMX_OP2(packsswb),
2861 [0x64] = MMX_OP2(pcmpgtb),
2862 [0x65] = MMX_OP2(pcmpgtw),
2863 [0x66] = MMX_OP2(pcmpgtl),
2864 [0x67] = MMX_OP2(packuswb),
2865 [0x68] = MMX_OP2(punpckhbw),
2866 [0x69] = MMX_OP2(punpckhwd),
2867 [0x6a] = MMX_OP2(punpckhdq),
2868 [0x6b] = MMX_OP2(packssdw),
2869 [0x6c] = { NULL, gen_helper_punpcklqdq_xmm },
2870 [0x6d] = { NULL, gen_helper_punpckhqdq_xmm },
2871 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2872 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2873 [0x70] = { gen_helper_pshufw_mmx,
2874 gen_helper_pshufd_xmm,
2875 gen_helper_pshufhw_xmm,
2876 gen_helper_pshuflw_xmm },
2877 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2878 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2879 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2880 [0x74] = MMX_OP2(pcmpeqb),
2881 [0x75] = MMX_OP2(pcmpeqw),
2882 [0x76] = MMX_OP2(pcmpeql),
2883 [0x77] = { SSE_DUMMY }, /* emms */
2884 [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps },
2885 [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps },
2886 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2887 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2888 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2889 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2890 [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps },
2891 [0xd1] = MMX_OP2(psrlw),
2892 [0xd2] = MMX_OP2(psrld),
2893 [0xd3] = MMX_OP2(psrlq),
2894 [0xd4] = MMX_OP2(paddq),
2895 [0xd5] = MMX_OP2(pmullw),
2896 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2897 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2898 [0xd8] = MMX_OP2(psubusb),
2899 [0xd9] = MMX_OP2(psubusw),
2900 [0xda] = MMX_OP2(pminub),
2901 [0xdb] = MMX_OP2(pand),
2902 [0xdc] = MMX_OP2(paddusb),
2903 [0xdd] = MMX_OP2(paddusw),
2904 [0xde] = MMX_OP2(pmaxub),
2905 [0xdf] = MMX_OP2(pandn),
2906 [0xe0] = MMX_OP2(pavgb),
2907 [0xe1] = MMX_OP2(psraw),
2908 [0xe2] = MMX_OP2(psrad),
2909 [0xe3] = MMX_OP2(pavgw),
2910 [0xe4] = MMX_OP2(pmulhuw),
2911 [0xe5] = MMX_OP2(pmulhw),
2912 [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq },
2913 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2914 [0xe8] = MMX_OP2(psubsb),
2915 [0xe9] = MMX_OP2(psubsw),
2916 [0xea] = MMX_OP2(pminsw),
2917 [0xeb] = MMX_OP2(por),
2918 [0xec] = MMX_OP2(paddsb),
2919 [0xed] = MMX_OP2(paddsw),
2920 [0xee] = MMX_OP2(pmaxsw),
2921 [0xef] = MMX_OP2(pxor),
2922 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2923 [0xf1] = MMX_OP2(psllw),
2924 [0xf2] = MMX_OP2(pslld),
2925 [0xf3] = MMX_OP2(psllq),
2926 [0xf4] = MMX_OP2(pmuludq),
2927 [0xf5] = MMX_OP2(pmaddwd),
2928 [0xf6] = MMX_OP2(psadbw),
2929 [0xf7] = MMX_OP2(maskmov),
2930 [0xf8] = MMX_OP2(psubb),
2931 [0xf9] = MMX_OP2(psubw),
2932 [0xfa] = MMX_OP2(psubl),
2933 [0xfb] = MMX_OP2(psubq),
2934 [0xfc] = MMX_OP2(paddb),
2935 [0xfd] = MMX_OP2(paddw),
2936 [0xfe] = MMX_OP2(paddl),
2937 };
2938
2939 static void *sse_op_table2[3 * 8][2] = {
2940 [0 + 2] = MMX_OP2(psrlw),
2941 [0 + 4] = MMX_OP2(psraw),
2942 [0 + 6] = MMX_OP2(psllw),
2943 [8 + 2] = MMX_OP2(psrld),
2944 [8 + 4] = MMX_OP2(psrad),
2945 [8 + 6] = MMX_OP2(pslld),
2946 [16 + 2] = MMX_OP2(psrlq),
2947 [16 + 3] = { NULL, gen_helper_psrldq_xmm },
2948 [16 + 6] = MMX_OP2(psllq),
2949 [16 + 7] = { NULL, gen_helper_pslldq_xmm },
2950 };
2951
2952 static void *sse_op_table3[4 * 3] = {
2953 gen_helper_cvtsi2ss,
2954 gen_helper_cvtsi2sd,
2955 X86_64_ONLY(gen_helper_cvtsq2ss),
2956 X86_64_ONLY(gen_helper_cvtsq2sd),
2957
2958 gen_helper_cvttss2si,
2959 gen_helper_cvttsd2si,
2960 X86_64_ONLY(gen_helper_cvttss2sq),
2961 X86_64_ONLY(gen_helper_cvttsd2sq),
2962
2963 gen_helper_cvtss2si,
2964 gen_helper_cvtsd2si,
2965 X86_64_ONLY(gen_helper_cvtss2sq),
2966 X86_64_ONLY(gen_helper_cvtsd2sq),
2967 };
2968
2969 static void *sse_op_table4[8][4] = {
2970 SSE_FOP(cmpeq),
2971 SSE_FOP(cmplt),
2972 SSE_FOP(cmple),
2973 SSE_FOP(cmpunord),
2974 SSE_FOP(cmpneq),
2975 SSE_FOP(cmpnlt),
2976 SSE_FOP(cmpnle),
2977 SSE_FOP(cmpord),
2978 };
2979
2980 static void *sse_op_table5[256] = {
2981 [0x0c] = gen_helper_pi2fw,
2982 [0x0d] = gen_helper_pi2fd,
2983 [0x1c] = gen_helper_pf2iw,
2984 [0x1d] = gen_helper_pf2id,
2985 [0x8a] = gen_helper_pfnacc,
2986 [0x8e] = gen_helper_pfpnacc,
2987 [0x90] = gen_helper_pfcmpge,
2988 [0x94] = gen_helper_pfmin,
2989 [0x96] = gen_helper_pfrcp,
2990 [0x97] = gen_helper_pfrsqrt,
2991 [0x9a] = gen_helper_pfsub,
2992 [0x9e] = gen_helper_pfadd,
2993 [0xa0] = gen_helper_pfcmpgt,
2994 [0xa4] = gen_helper_pfmax,
2995 [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */
2996 [0xa7] = gen_helper_movq, /* pfrsqit1 */
2997 [0xaa] = gen_helper_pfsubr,
2998 [0xae] = gen_helper_pfacc,
2999 [0xb0] = gen_helper_pfcmpeq,
3000 [0xb4] = gen_helper_pfmul,
3001 [0xb6] = gen_helper_movq, /* pfrcpit2 */
3002 [0xb7] = gen_helper_pmulhrw_mmx,
3003 [0xbb] = gen_helper_pswapd,
3004 [0xbf] = gen_helper_pavgb_mmx /* pavgusb */
3005 };
3006
3007 struct sse_op_helper_s {
3008 void *op[2]; uint32_t ext_mask;
3009 };
3010 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3011 #define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3012 #define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3013 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3014 static struct sse_op_helper_s sse_op_table6[256] = {
3015 [0x00] = SSSE3_OP(pshufb),
3016 [0x01] = SSSE3_OP(phaddw),
3017 [0x02] = SSSE3_OP(phaddd),
3018 [0x03] = SSSE3_OP(phaddsw),
3019 [0x04] = SSSE3_OP(pmaddubsw),
3020 [0x05] = SSSE3_OP(phsubw),
3021 [0x06] = SSSE3_OP(phsubd),
3022 [0x07] = SSSE3_OP(phsubsw),
3023 [0x08] = SSSE3_OP(psignb),
3024 [0x09] = SSSE3_OP(psignw),
3025 [0x0a] = SSSE3_OP(psignd),
3026 [0x0b] = SSSE3_OP(pmulhrsw),
3027 [0x10] = SSE41_OP(pblendvb),
3028 [0x14] = SSE41_OP(blendvps),
3029 [0x15] = SSE41_OP(blendvpd),
3030 [0x17] = SSE41_OP(ptest),
3031 [0x1c] = SSSE3_OP(pabsb),
3032 [0x1d] = SSSE3_OP(pabsw),
3033 [0x1e] = SSSE3_OP(pabsd),
3034 [0x20] = SSE41_OP(pmovsxbw),
3035 [0x21] = SSE41_OP(pmovsxbd),
3036 [0x22] = SSE41_OP(pmovsxbq),
3037 [0x23] = SSE41_OP(pmovsxwd),
3038 [0x24] = SSE41_OP(pmovsxwq),
3039 [0x25] = SSE41_OP(pmovsxdq),
3040 [0x28] = SSE41_OP(pmuldq),
3041 [0x29] = SSE41_OP(pcmpeqq),
3042 [0x2a] = SSE41_SPECIAL, /* movntqda */
3043 [0x2b] = SSE41_OP(packusdw),
3044 [0x30] = SSE41_OP(pmovzxbw),
3045 [0x31] = SSE41_OP(pmovzxbd),
3046 [0x32] = SSE41_OP(pmovzxbq),
3047 [0x33] = SSE41_OP(pmovzxwd),
3048 [0x34] = SSE41_OP(pmovzxwq),
3049 [0x35] = SSE41_OP(pmovzxdq),
3050 [0x37] = SSE42_OP(pcmpgtq),
3051 [0x38] = SSE41_OP(pminsb),
3052 [0x39] = SSE41_OP(pminsd),
3053 [0x3a] = SSE41_OP(pminuw),
3054 [0x3b] = SSE41_OP(pminud),
3055 [0x3c] = SSE41_OP(pmaxsb),
3056 [0x3d] = SSE41_OP(pmaxsd),
3057 [0x3e] = SSE41_OP(pmaxuw),
3058 [0x3f] = SSE41_OP(pmaxud),
3059 [0x40] = SSE41_OP(pmulld),
3060 [0x41] = SSE41_OP(phminposuw),
3061 };
3062
3063 static struct sse_op_helper_s sse_op_table7[256] = {
3064 [0x08] = SSE41_OP(roundps),
3065 [0x09] = SSE41_OP(roundpd),
3066 [0x0a] = SSE41_OP(roundss),
3067 [0x0b] = SSE41_OP(roundsd),
3068 [0x0c] = SSE41_OP(blendps),
3069 [0x0d] = SSE41_OP(blendpd),
3070 [0x0e] = SSE41_OP(pblendw),
3071 [0x0f] = SSSE3_OP(palignr),
3072 [0x14] = SSE41_SPECIAL, /* pextrb */
3073 [0x15] = SSE41_SPECIAL, /* pextrw */
3074 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3075 [0x17] = SSE41_SPECIAL, /* extractps */
3076 [0x20] = SSE41_SPECIAL, /* pinsrb */
3077 [0x21] = SSE41_SPECIAL, /* insertps */
3078 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3079 [0x40] = SSE41_OP(dpps),
3080 [0x41] = SSE41_OP(dppd),
3081 [0x42] = SSE41_OP(mpsadbw),
3082 [0x60] = SSE42_OP(pcmpestrm),
3083 [0x61] = SSE42_OP(pcmpestri),
3084 [0x62] = SSE42_OP(pcmpistrm),
3085 [0x63] = SSE42_OP(pcmpistri),
3086 };
3087
gen_sse(DisasContext * s,int b,target_ulong pc_start,int rex_r)3088 static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3089 {
3090 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3091 int modrm, mod, rm, reg, reg_addr, offset_addr;
3092 void *sse_op2;
3093
3094 b &= 0xff;
3095 if (s->prefix & PREFIX_DATA)
3096 b1 = 1;
3097 else if (s->prefix & PREFIX_REPZ)
3098 b1 = 2;
3099 else if (s->prefix & PREFIX_REPNZ)
3100 b1 = 3;
3101 else
3102 b1 = 0;
3103 sse_op2 = sse_op_table1[b][b1];
3104 if (!sse_op2)
3105 goto illegal_op;
3106 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3107 is_xmm = 1;
3108 } else {
3109 if (b1 == 0) {
3110 /* MMX case */
3111 is_xmm = 0;
3112 } else {
3113 is_xmm = 1;
3114 }
3115 }
3116 /* simple MMX/SSE operation */
3117 if (s->flags & HF_TS_MASK) {
3118 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3119 return;
3120 }
3121 if (s->flags & HF_EM_MASK) {
3122 illegal_op:
3123 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3124 return;
3125 }
3126 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3127 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3128 goto illegal_op;
3129 if (b == 0x0e) {
3130 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3131 goto illegal_op;
3132 /* femms */
3133 gen_helper_emms();
3134 return;
3135 }
3136 if (b == 0x77) {
3137 /* emms */
3138 gen_helper_emms();
3139 return;
3140 }
3141 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3142 the static cpu state) */
3143 if (!is_xmm) {
3144 gen_helper_enter_mmx();
3145 }
3146
3147 modrm = ldub_code(s->pc++);
3148 reg = ((modrm >> 3) & 7);
3149 if (is_xmm)
3150 reg |= rex_r;
3151 mod = (modrm >> 6) & 3;
3152 if (sse_op2 == SSE_SPECIAL) {
3153 b |= (b1 << 8);
3154 switch(b) {
3155 case 0x0e7: /* movntq */
3156 if (mod == 3)
3157 goto illegal_op;
3158 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3159 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3160 break;
3161 case 0x1e7: /* movntdq */
3162 case 0x02b: /* movntps */
3163 case 0x12b: /* movntps */
3164 case 0x3f0: /* lddqu */
3165 if (mod == 3)
3166 goto illegal_op;
3167 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3168 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3169 break;
3170 case 0x6e: /* movd mm, ea */
3171 #ifdef TARGET_X86_64
3172 if (s->dflag == 2) {
3173 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3174 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3175 } else
3176 #endif
3177 {
3178 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3179 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3180 offsetof(CPUX86State,fpregs[reg].mmx));
3181 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3182 gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32);
3183 }
3184 break;
3185 case 0x16e: /* movd xmm, ea */
3186 #ifdef TARGET_X86_64
3187 if (s->dflag == 2) {
3188 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3189 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3190 offsetof(CPUX86State,xmm_regs[reg]));
3191 gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]);
3192 } else
3193 #endif
3194 {
3195 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3196 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3197 offsetof(CPUX86State,xmm_regs[reg]));
3198 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3199 gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32);
3200 }
3201 break;
3202 case 0x6f: /* movq mm, ea */
3203 if (mod != 3) {
3204 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3205 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3206 } else {
3207 rm = (modrm & 7);
3208 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3209 offsetof(CPUX86State,fpregs[rm].mmx));
3210 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3211 offsetof(CPUX86State,fpregs[reg].mmx));
3212 }
3213 break;
3214 case 0x010: /* movups */
3215 case 0x110: /* movupd */
3216 case 0x028: /* movaps */
3217 case 0x128: /* movapd */
3218 case 0x16f: /* movdqa xmm, ea */
3219 case 0x26f: /* movdqu xmm, ea */
3220 if (mod != 3) {
3221 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3222 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3223 } else {
3224 rm = (modrm & 7) | REX_B(s);
3225 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3226 offsetof(CPUX86State,xmm_regs[rm]));
3227 }
3228 break;
3229 case 0x210: /* movss xmm, ea */
3230 if (mod != 3) {
3231 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3232 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3233 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3234 gen_op_movl_T0_0();
3235 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3236 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3237 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3238 } else {
3239 rm = (modrm & 7) | REX_B(s);
3240 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3241 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3242 }
3243 break;
3244 case 0x310: /* movsd xmm, ea */
3245 if (mod != 3) {
3246 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3247 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3248 gen_op_movl_T0_0();
3249 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3250 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3251 } else {
3252 rm = (modrm & 7) | REX_B(s);
3253 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3254 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3255 }
3256 break;
3257 case 0x012: /* movlps */
3258 case 0x112: /* movlpd */
3259 if (mod != 3) {
3260 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3261 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3262 } else {
3263 /* movhlps */
3264 rm = (modrm & 7) | REX_B(s);
3265 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3266 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3267 }
3268 break;
3269 case 0x212: /* movsldup */
3270 if (mod != 3) {
3271 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3272 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3273 } else {
3274 rm = (modrm & 7) | REX_B(s);
3275 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3276 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3277 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3278 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3279 }
3280 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3281 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3282 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3283 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3284 break;
3285 case 0x312: /* movddup */
3286 if (mod != 3) {
3287 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3288 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3289 } else {
3290 rm = (modrm & 7) | REX_B(s);
3291 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3292 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3293 }
3294 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3295 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3296 break;
3297 case 0x016: /* movhps */
3298 case 0x116: /* movhpd */
3299 if (mod != 3) {
3300 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3301 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3302 } else {
3303 /* movlhps */
3304 rm = (modrm & 7) | REX_B(s);
3305 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3306 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3307 }
3308 break;
3309 case 0x216: /* movshdup */
3310 if (mod != 3) {
3311 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3312 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3313 } else {
3314 rm = (modrm & 7) | REX_B(s);
3315 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3316 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3317 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3318 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3319 }
3320 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3321 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3322 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3323 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3324 break;
3325 case 0x7e: /* movd ea, mm */
3326 #ifdef TARGET_X86_64
3327 if (s->dflag == 2) {
3328 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3329 offsetof(CPUX86State,fpregs[reg].mmx));
3330 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3331 } else
3332 #endif
3333 {
3334 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3335 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3336 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3337 }
3338 break;
3339 case 0x17e: /* movd ea, xmm */
3340 #ifdef TARGET_X86_64
3341 if (s->dflag == 2) {
3342 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3343 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3344 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3345 } else
3346 #endif
3347 {
3348 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3349 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3350 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3351 }
3352 break;
3353 case 0x27e: /* movq xmm, ea */
3354 if (mod != 3) {
3355 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3356 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3357 } else {
3358 rm = (modrm & 7) | REX_B(s);
3359 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3360 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3361 }
3362 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3363 break;
3364 case 0x7f: /* movq ea, mm */
3365 if (mod != 3) {
3366 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3367 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3368 } else {
3369 rm = (modrm & 7);
3370 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3371 offsetof(CPUX86State,fpregs[reg].mmx));
3372 }
3373 break;
3374 case 0x011: /* movups */
3375 case 0x111: /* movupd */
3376 case 0x029: /* movaps */
3377 case 0x129: /* movapd */
3378 case 0x17f: /* movdqa ea, xmm */
3379 case 0x27f: /* movdqu ea, xmm */
3380 if (mod != 3) {
3381 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3382 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3383 } else {
3384 rm = (modrm & 7) | REX_B(s);
3385 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3386 offsetof(CPUX86State,xmm_regs[reg]));
3387 }
3388 break;
3389 case 0x211: /* movss ea, xmm */
3390 if (mod != 3) {
3391 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3392 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3393 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3394 } else {
3395 rm = (modrm & 7) | REX_B(s);
3396 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3397 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3398 }
3399 break;
3400 case 0x311: /* movsd ea, xmm */
3401 if (mod != 3) {
3402 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3403 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3404 } else {
3405 rm = (modrm & 7) | REX_B(s);
3406 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3407 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3408 }
3409 break;
3410 case 0x013: /* movlps */
3411 case 0x113: /* movlpd */
3412 if (mod != 3) {
3413 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3414 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3415 } else {
3416 goto illegal_op;
3417 }
3418 break;
3419 case 0x017: /* movhps */
3420 case 0x117: /* movhpd */
3421 if (mod != 3) {
3422 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3423 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3424 } else {
3425 goto illegal_op;
3426 }
3427 break;
3428 case 0x71: /* shift mm, im */
3429 case 0x72:
3430 case 0x73:
3431 case 0x171: /* shift xmm, im */
3432 case 0x172:
3433 case 0x173:
3434 val = ldub_code(s->pc++);
3435 if (is_xmm) {
3436 gen_op_movl_T0_im(val);
3437 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3438 gen_op_movl_T0_0();
3439 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3440 op1_offset = offsetof(CPUX86State,xmm_t0);
3441 } else {
3442 gen_op_movl_T0_im(val);
3443 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3444 gen_op_movl_T0_0();
3445 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3446 op1_offset = offsetof(CPUX86State,mmx_t0);
3447 }
3448 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3449 if (!sse_op2)
3450 goto illegal_op;
3451 if (is_xmm) {
3452 rm = (modrm & 7) | REX_B(s);
3453 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3454 } else {
3455 rm = (modrm & 7);
3456 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3457 }
3458 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3459 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3460 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3461 break;
3462 case 0x050: /* movmskps */
3463 rm = (modrm & 7) | REX_B(s);
3464 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3465 offsetof(CPUX86State,xmm_regs[rm]));
3466 gen_helper_movmskps(cpu_tmp2_i32, cpu_ptr0);
3467 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3468 gen_op_mov_reg_T0(OT_LONG, reg);
3469 break;
3470 case 0x150: /* movmskpd */
3471 rm = (modrm & 7) | REX_B(s);
3472 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3473 offsetof(CPUX86State,xmm_regs[rm]));
3474 gen_helper_movmskpd(cpu_tmp2_i32, cpu_ptr0);
3475 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3476 gen_op_mov_reg_T0(OT_LONG, reg);
3477 break;
3478 case 0x02a: /* cvtpi2ps */
3479 case 0x12a: /* cvtpi2pd */
3480 gen_helper_enter_mmx();
3481 if (mod != 3) {
3482 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3483 op2_offset = offsetof(CPUX86State,mmx_t0);
3484 gen_ldq_env_A0(s->mem_index, op2_offset);
3485 } else {
3486 rm = (modrm & 7);
3487 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3488 }
3489 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3490 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3491 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3492 switch(b >> 8) {
3493 case 0x0:
3494 gen_helper_cvtpi2ps(cpu_ptr0, cpu_ptr1);
3495 break;
3496 default:
3497 case 0x1:
3498 gen_helper_cvtpi2pd(cpu_ptr0, cpu_ptr1);
3499 break;
3500 }
3501 break;
3502 case 0x22a: /* cvtsi2ss */
3503 case 0x32a: /* cvtsi2sd */
3504 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3505 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3506 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3507 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3508 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3509 if (ot == OT_LONG) {
3510 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3511 ((void (*)(TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_tmp2_i32);
3512 } else {
3513 ((void (*)(TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_T[0]);
3514 }
3515 break;
3516 case 0x02c: /* cvttps2pi */
3517 case 0x12c: /* cvttpd2pi */
3518 case 0x02d: /* cvtps2pi */
3519 case 0x12d: /* cvtpd2pi */
3520 gen_helper_enter_mmx();
3521 if (mod != 3) {
3522 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3523 op2_offset = offsetof(CPUX86State,xmm_t0);
3524 gen_ldo_env_A0(s->mem_index, op2_offset);
3525 } else {
3526 rm = (modrm & 7) | REX_B(s);
3527 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3528 }
3529 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3530 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3531 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3532 switch(b) {
3533 case 0x02c:
3534 gen_helper_cvttps2pi(cpu_ptr0, cpu_ptr1);
3535 break;
3536 case 0x12c:
3537 gen_helper_cvttpd2pi(cpu_ptr0, cpu_ptr1);
3538 break;
3539 case 0x02d:
3540 gen_helper_cvtps2pi(cpu_ptr0, cpu_ptr1);
3541 break;
3542 case 0x12d:
3543 gen_helper_cvtpd2pi(cpu_ptr0, cpu_ptr1);
3544 break;
3545 }
3546 break;
3547 case 0x22c: /* cvttss2si */
3548 case 0x32c: /* cvttsd2si */
3549 case 0x22d: /* cvtss2si */
3550 case 0x32d: /* cvtsd2si */
3551 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3552 if (mod != 3) {
3553 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3554 if ((b >> 8) & 1) {
3555 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3556 } else {
3557 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3558 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3559 }
3560 op2_offset = offsetof(CPUX86State,xmm_t0);
3561 } else {
3562 rm = (modrm & 7) | REX_B(s);
3563 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3564 }
3565 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3566 (b & 1) * 4];
3567 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3568 if (ot == OT_LONG) {
3569 ((void (*)(TCGv_i32, TCGv_ptr))sse_op2)(cpu_tmp2_i32, cpu_ptr0);
3570 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3571 } else {
3572 ((void (*)(TCGv, TCGv_ptr))sse_op2)(cpu_T[0], cpu_ptr0);
3573 }
3574 gen_op_mov_reg_T0(ot, reg);
3575 break;
3576 case 0xc4: /* pinsrw */
3577 case 0x1c4:
3578 s->rip_offset = 1;
3579 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3580 val = ldub_code(s->pc++);
3581 if (b1) {
3582 val &= 7;
3583 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3584 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3585 } else {
3586 val &= 3;
3587 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3588 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3589 }
3590 break;
3591 case 0xc5: /* pextrw */
3592 case 0x1c5:
3593 if (mod != 3)
3594 goto illegal_op;
3595 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3596 val = ldub_code(s->pc++);
3597 if (b1) {
3598 val &= 7;
3599 rm = (modrm & 7) | REX_B(s);
3600 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3601 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3602 } else {
3603 val &= 3;
3604 rm = (modrm & 7);
3605 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3606 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3607 }
3608 reg = ((modrm >> 3) & 7) | rex_r;
3609 gen_op_mov_reg_T0(ot, reg);
3610 break;
3611 case 0x1d6: /* movq ea, xmm */
3612 if (mod != 3) {
3613 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3614 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3615 } else {
3616 rm = (modrm & 7) | REX_B(s);
3617 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3618 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3619 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3620 }
3621 break;
3622 case 0x2d6: /* movq2dq */
3623 gen_helper_enter_mmx();
3624 rm = (modrm & 7);
3625 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3626 offsetof(CPUX86State,fpregs[rm].mmx));
3627 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3628 break;
3629 case 0x3d6: /* movdq2q */
3630 gen_helper_enter_mmx();
3631 rm = (modrm & 7) | REX_B(s);
3632 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3633 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3634 break;
3635 case 0xd7: /* pmovmskb */
3636 case 0x1d7:
3637 if (mod != 3)
3638 goto illegal_op;
3639 if (b1) {
3640 rm = (modrm & 7) | REX_B(s);
3641 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3642 gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_ptr0);
3643 } else {
3644 rm = (modrm & 7);
3645 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3646 gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_ptr0);
3647 }
3648 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3649 reg = ((modrm >> 3) & 7) | rex_r;
3650 gen_op_mov_reg_T0(OT_LONG, reg);
3651 break;
3652 case 0x138:
3653 if (s->prefix & PREFIX_REPNZ)
3654 goto crc32;
3655 case 0x038:
3656 b = modrm;
3657 modrm = ldub_code(s->pc++);
3658 rm = modrm & 7;
3659 reg = ((modrm >> 3) & 7) | rex_r;
3660 mod = (modrm >> 6) & 3;
3661
3662 sse_op2 = sse_op_table6[b].op[b1];
3663 if (!sse_op2)
3664 goto illegal_op;
3665 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
3666 goto illegal_op;
3667
3668 if (b1) {
3669 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3670 if (mod == 3) {
3671 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3672 } else {
3673 op2_offset = offsetof(CPUX86State,xmm_t0);
3674 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3675 switch (b) {
3676 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
3677 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
3678 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
3679 gen_ldq_env_A0(s->mem_index, op2_offset +
3680 offsetof(XMMReg, XMM_Q(0)));
3681 break;
3682 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
3683 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
3684 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3685 (s->mem_index >> 2) - 1);
3686 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3687 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
3688 offsetof(XMMReg, XMM_L(0)));
3689 break;
3690 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
3691 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
3692 (s->mem_index >> 2) - 1);
3693 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
3694 offsetof(XMMReg, XMM_W(0)));
3695 break;
3696 case 0x2a: /* movntqda */
3697 gen_ldo_env_A0(s->mem_index, op1_offset);
3698 return;
3699 default:
3700 gen_ldo_env_A0(s->mem_index, op2_offset);
3701 }
3702 }
3703 } else {
3704 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3705 if (mod == 3) {
3706 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3707 } else {
3708 op2_offset = offsetof(CPUX86State,mmx_t0);
3709 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3710 gen_ldq_env_A0(s->mem_index, op2_offset);
3711 }
3712 }
3713 if (sse_op2 == SSE_SPECIAL)
3714 goto illegal_op;
3715
3716 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3717 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3718 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3719
3720 if (b == 0x17)
3721 s->cc_op = CC_OP_EFLAGS;
3722 break;
3723 case 0x338: /* crc32 */
3724 crc32:
3725 b = modrm;
3726 modrm = ldub_code(s->pc++);
3727 reg = ((modrm >> 3) & 7) | rex_r;
3728
3729 if (b != 0xf0 && b != 0xf1)
3730 goto illegal_op;
3731 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
3732 goto illegal_op;
3733
3734 if (b == 0xf0)
3735 ot = OT_BYTE;
3736 else if (b == 0xf1 && s->dflag != 2)
3737 if (s->prefix & PREFIX_DATA)
3738 ot = OT_WORD;
3739 else
3740 ot = OT_LONG;
3741 else
3742 ot = OT_QUAD;
3743
3744 gen_op_mov_TN_reg(OT_LONG, 0, reg);
3745 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3746 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3747 gen_helper_crc32(cpu_T[0], cpu_tmp2_i32,
3748 cpu_T[0], tcg_const_i32(8 << ot));
3749
3750 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3751 gen_op_mov_reg_T0(ot, reg);
3752 break;
3753 case 0x03a:
3754 case 0x13a:
3755 b = modrm;
3756 modrm = ldub_code(s->pc++);
3757 rm = modrm & 7;
3758 reg = ((modrm >> 3) & 7) | rex_r;
3759 mod = (modrm >> 6) & 3;
3760
3761 sse_op2 = sse_op_table7[b].op[b1];
3762 if (!sse_op2)
3763 goto illegal_op;
3764 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
3765 goto illegal_op;
3766
3767 if (sse_op2 == SSE_SPECIAL) {
3768 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3769 rm = (modrm & 7) | REX_B(s);
3770 if (mod != 3)
3771 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3772 reg = ((modrm >> 3) & 7) | rex_r;
3773 val = ldub_code(s->pc++);
3774 switch (b) {
3775 case 0x14: /* pextrb */
3776 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3777 xmm_regs[reg].XMM_B(val & 15)));
3778 if (mod == 3)
3779 gen_op_mov_reg_T0(ot, rm);
3780 else
3781 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
3782 (s->mem_index >> 2) - 1);
3783 break;
3784 case 0x15: /* pextrw */
3785 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3786 xmm_regs[reg].XMM_W(val & 7)));
3787 if (mod == 3)
3788 gen_op_mov_reg_T0(ot, rm);
3789 else
3790 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
3791 (s->mem_index >> 2) - 1);
3792 break;
3793 case 0x16:
3794 if (ot == OT_LONG) { /* pextrd */
3795 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3796 offsetof(CPUX86State,
3797 xmm_regs[reg].XMM_L(val & 3)));
3798 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3799 if (mod == 3)
3800 gen_op_mov_reg_v(ot, rm, cpu_T[0]);
3801 else
3802 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3803 (s->mem_index >> 2) - 1);
3804 } else { /* pextrq */
3805 #ifdef TARGET_X86_64
3806 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3807 offsetof(CPUX86State,
3808 xmm_regs[reg].XMM_Q(val & 1)));
3809 if (mod == 3)
3810 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
3811 else
3812 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
3813 (s->mem_index >> 2) - 1);
3814 #else
3815 goto illegal_op;
3816 #endif
3817 }
3818 break;
3819 case 0x17: /* extractps */
3820 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
3821 xmm_regs[reg].XMM_L(val & 3)));
3822 if (mod == 3)
3823 gen_op_mov_reg_T0(ot, rm);
3824 else
3825 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
3826 (s->mem_index >> 2) - 1);
3827 break;
3828 case 0x20: /* pinsrb */
3829 if (mod == 3)
3830 gen_op_mov_TN_reg(OT_LONG, 0, rm);
3831 else
3832 tcg_gen_qemu_ld8u(cpu_tmp0, cpu_A0,
3833 (s->mem_index >> 2) - 1);
3834 tcg_gen_st8_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State,
3835 xmm_regs[reg].XMM_B(val & 15)));
3836 break;
3837 case 0x21: /* insertps */
3838 if (mod == 3) {
3839 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
3840 offsetof(CPUX86State,xmm_regs[rm]
3841 .XMM_L((val >> 6) & 3)));
3842 } else {
3843 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3844 (s->mem_index >> 2) - 1);
3845 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3846 }
3847 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3848 offsetof(CPUX86State,xmm_regs[reg]
3849 .XMM_L((val >> 4) & 3)));
3850 if ((val >> 0) & 1)
3851 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3852 cpu_env, offsetof(CPUX86State,
3853 xmm_regs[reg].XMM_L(0)));
3854 if ((val >> 1) & 1)
3855 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3856 cpu_env, offsetof(CPUX86State,
3857 xmm_regs[reg].XMM_L(1)));
3858 if ((val >> 2) & 1)
3859 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3860 cpu_env, offsetof(CPUX86State,
3861 xmm_regs[reg].XMM_L(2)));
3862 if ((val >> 3) & 1)
3863 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
3864 cpu_env, offsetof(CPUX86State,
3865 xmm_regs[reg].XMM_L(3)));
3866 break;
3867 case 0x22:
3868 if (ot == OT_LONG) { /* pinsrd */
3869 if (mod == 3)
3870 gen_op_mov_v_reg(ot, cpu_tmp0, rm);
3871 else
3872 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
3873 (s->mem_index >> 2) - 1);
3874 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
3875 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
3876 offsetof(CPUX86State,
3877 xmm_regs[reg].XMM_L(val & 3)));
3878 } else { /* pinsrq */
3879 #ifdef TARGET_X86_64
3880 if (mod == 3)
3881 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
3882 else
3883 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
3884 (s->mem_index >> 2) - 1);
3885 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3886 offsetof(CPUX86State,
3887 xmm_regs[reg].XMM_Q(val & 1)));
3888 #else
3889 goto illegal_op;
3890 #endif
3891 }
3892 break;
3893 }
3894 return;
3895 }
3896
3897 if (b1) {
3898 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3899 if (mod == 3) {
3900 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3901 } else {
3902 op2_offset = offsetof(CPUX86State,xmm_t0);
3903 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3904 gen_ldo_env_A0(s->mem_index, op2_offset);
3905 }
3906 } else {
3907 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3908 if (mod == 3) {
3909 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3910 } else {
3911 op2_offset = offsetof(CPUX86State,mmx_t0);
3912 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3913 gen_ldq_env_A0(s->mem_index, op2_offset);
3914 }
3915 }
3916 val = ldub_code(s->pc++);
3917
3918 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
3919 s->cc_op = CC_OP_EFLAGS;
3920
3921 if (s->dflag == 2)
3922 /* The helper must use entire 64-bit gp registers */
3923 val |= 1 << 8;
3924 }
3925
3926 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3927 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3928 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3929 break;
3930 default:
3931 goto illegal_op;
3932 }
3933 } else {
3934 /* generic MMX or SSE operation */
3935 switch(b) {
3936 case 0x70: /* pshufx insn */
3937 case 0xc6: /* pshufx insn */
3938 case 0xc2: /* compare insns */
3939 s->rip_offset = 1;
3940 break;
3941 default:
3942 break;
3943 }
3944 if (is_xmm) {
3945 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3946 if (mod != 3) {
3947 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3948 op2_offset = offsetof(CPUX86State,xmm_t0);
3949 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3950 b == 0xc2)) {
3951 /* specific case for SSE single instructions */
3952 if (b1 == 2) {
3953 /* 32 bit access */
3954 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3955 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3956 } else {
3957 /* 64 bit access */
3958 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3959 }
3960 } else {
3961 gen_ldo_env_A0(s->mem_index, op2_offset);
3962 }
3963 } else {
3964 rm = (modrm & 7) | REX_B(s);
3965 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3966 }
3967 } else {
3968 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3969 if (mod != 3) {
3970 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
3971 op2_offset = offsetof(CPUX86State,mmx_t0);
3972 gen_ldq_env_A0(s->mem_index, op2_offset);
3973 } else {
3974 rm = (modrm & 7);
3975 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3976 }
3977 }
3978 switch(b) {
3979 case 0x0f: /* 3DNow! data insns */
3980 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3981 goto illegal_op;
3982 val = ldub_code(s->pc++);
3983 sse_op2 = sse_op_table5[val];
3984 if (!sse_op2)
3985 goto illegal_op;
3986 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3987 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3988 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
3989 break;
3990 case 0x70: /* pshufx insn */
3991 case 0xc6: /* pshufx insn */
3992 val = ldub_code(s->pc++);
3993 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3994 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3995 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3996 break;
3997 case 0xc2:
3998 /* compare insns */
3999 val = ldub_code(s->pc++);
4000 if (val >= 8)
4001 goto illegal_op;
4002 sse_op2 = sse_op_table4[val][b1];
4003 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4004 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4005 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4006 break;
4007 case 0xf7:
4008 /* maskmov : we must prepare A0 */
4009 if (mod != 3)
4010 goto illegal_op;
4011 #ifdef TARGET_X86_64
4012 if (s->aflag == 2) {
4013 gen_op_movq_A0_reg(R_EDI);
4014 } else
4015 #endif
4016 {
4017 gen_op_movl_A0_reg(R_EDI);
4018 if (s->aflag == 0)
4019 gen_op_andl_A0_ffff();
4020 }
4021 gen_add_A0_ds_seg(s);
4022
4023 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4024 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4025 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_ptr1, cpu_A0);
4026 break;
4027 default:
4028 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4029 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4030 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
4031 break;
4032 }
4033 if (b == 0x2e || b == 0x2f) {
4034 s->cc_op = CC_OP_EFLAGS;
4035 }
4036 }
4037 }
4038
4039 /* convert one instruction. s->is_jmp is set if the translation must
4040 be stopped. Return the next pc value */
disas_insn(DisasContext * s,target_ulong pc_start)4041 static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4042 {
4043 int b, prefixes, aflag, dflag;
4044 int shift, ot;
4045 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4046 target_ulong next_eip, tval;
4047 int rex_w, rex_r;
4048
4049 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
4050 tcg_gen_debug_insn_start(pc_start);
4051 s->pc = pc_start;
4052 prefixes = 0;
4053 aflag = s->code32;
4054 dflag = s->code32;
4055 s->override = -1;
4056 rex_w = -1;
4057 rex_r = 0;
4058 #ifdef TARGET_X86_64
4059 s->rex_x = 0;
4060 s->rex_b = 0;
4061 x86_64_hregs = 0;
4062 #endif
4063 s->rip_offset = 0; /* for relative ip address */
4064 next_byte:
4065 b = ldub_code(s->pc);
4066 s->pc++;
4067 /* check prefixes */
4068 #ifdef TARGET_X86_64
4069 if (CODE64(s)) {
4070 switch (b) {
4071 case 0xf3:
4072 prefixes |= PREFIX_REPZ;
4073 goto next_byte;
4074 case 0xf2:
4075 prefixes |= PREFIX_REPNZ;
4076 goto next_byte;
4077 case 0xf0:
4078 prefixes |= PREFIX_LOCK;
4079 goto next_byte;
4080 case 0x2e:
4081 s->override = R_CS;
4082 goto next_byte;
4083 case 0x36:
4084 s->override = R_SS;
4085 goto next_byte;
4086 case 0x3e:
4087 s->override = R_DS;
4088 goto next_byte;
4089 case 0x26:
4090 s->override = R_ES;
4091 goto next_byte;
4092 case 0x64:
4093 s->override = R_FS;
4094 goto next_byte;
4095 case 0x65:
4096 s->override = R_GS;
4097 goto next_byte;
4098 case 0x66:
4099 prefixes |= PREFIX_DATA;
4100 goto next_byte;
4101 case 0x67:
4102 prefixes |= PREFIX_ADR;
4103 goto next_byte;
4104 case 0x40 ... 0x4f:
4105 /* REX prefix */
4106 rex_w = (b >> 3) & 1;
4107 rex_r = (b & 0x4) << 1;
4108 s->rex_x = (b & 0x2) << 2;
4109 REX_B(s) = (b & 0x1) << 3;
4110 x86_64_hregs = 1; /* select uniform byte register addressing */
4111 goto next_byte;
4112 }
4113 if (rex_w == 1) {
4114 /* 0x66 is ignored if rex.w is set */
4115 dflag = 2;
4116 } else {
4117 if (prefixes & PREFIX_DATA)
4118 dflag ^= 1;
4119 }
4120 if (!(prefixes & PREFIX_ADR))
4121 aflag = 2;
4122 } else
4123 #endif
4124 {
4125 switch (b) {
4126 case 0xf3:
4127 prefixes |= PREFIX_REPZ;
4128 goto next_byte;
4129 case 0xf2:
4130 prefixes |= PREFIX_REPNZ;
4131 goto next_byte;
4132 case 0xf0:
4133 prefixes |= PREFIX_LOCK;
4134 goto next_byte;
4135 case 0x2e:
4136 s->override = R_CS;
4137 goto next_byte;
4138 case 0x36:
4139 s->override = R_SS;
4140 goto next_byte;
4141 case 0x3e:
4142 s->override = R_DS;
4143 goto next_byte;
4144 case 0x26:
4145 s->override = R_ES;
4146 goto next_byte;
4147 case 0x64:
4148 s->override = R_FS;
4149 goto next_byte;
4150 case 0x65:
4151 s->override = R_GS;
4152 goto next_byte;
4153 case 0x66:
4154 prefixes |= PREFIX_DATA;
4155 goto next_byte;
4156 case 0x67:
4157 prefixes |= PREFIX_ADR;
4158 goto next_byte;
4159 }
4160 if (prefixes & PREFIX_DATA)
4161 dflag ^= 1;
4162 if (prefixes & PREFIX_ADR)
4163 aflag ^= 1;
4164 }
4165
4166 s->prefix = prefixes;
4167 s->aflag = aflag;
4168 s->dflag = dflag;
4169
4170 /* lock generation */
4171 if (prefixes & PREFIX_LOCK)
4172 gen_helper_lock();
4173
4174 /* now check op code */
4175 reswitch:
4176 switch(b) {
4177 case 0x0f:
4178 /**************************/
4179 /* extended op code */
4180 b = ldub_code(s->pc++) | 0x100;
4181 goto reswitch;
4182
4183 /**************************/
4184 /* arith & logic */
4185 case 0x00 ... 0x05:
4186 case 0x08 ... 0x0d:
4187 case 0x10 ... 0x15:
4188 case 0x18 ... 0x1d:
4189 case 0x20 ... 0x25:
4190 case 0x28 ... 0x2d:
4191 case 0x30 ... 0x35:
4192 case 0x38 ... 0x3d:
4193 {
4194 int op, f, val;
4195 op = (b >> 3) & 7;
4196 f = (b >> 1) & 3;
4197
4198 if ((b & 1) == 0)
4199 ot = OT_BYTE;
4200 else
4201 ot = dflag + OT_WORD;
4202
4203 switch(f) {
4204 case 0: /* OP Ev, Gv */
4205 modrm = ldub_code(s->pc++);
4206 reg = ((modrm >> 3) & 7) | rex_r;
4207 mod = (modrm >> 6) & 3;
4208 rm = (modrm & 7) | REX_B(s);
4209 if (mod != 3) {
4210 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4211 opreg = OR_TMP0;
4212 } else if (op == OP_XORL && rm == reg) {
4213 xor_zero:
4214 /* xor reg, reg optimisation */
4215 gen_op_movl_T0_0();
4216 s->cc_op = CC_OP_LOGICB + ot;
4217 gen_op_mov_reg_T0(ot, reg);
4218 gen_op_update1_cc();
4219 break;
4220 } else {
4221 opreg = rm;
4222 }
4223 gen_op_mov_TN_reg(ot, 1, reg);
4224 gen_op(s, op, ot, opreg);
4225 break;
4226 case 1: /* OP Gv, Ev */
4227 modrm = ldub_code(s->pc++);
4228 mod = (modrm >> 6) & 3;
4229 reg = ((modrm >> 3) & 7) | rex_r;
4230 rm = (modrm & 7) | REX_B(s);
4231 if (mod != 3) {
4232 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4233 gen_op_ld_T1_A0(ot + s->mem_index);
4234 } else if (op == OP_XORL && rm == reg) {
4235 goto xor_zero;
4236 } else {
4237 gen_op_mov_TN_reg(ot, 1, rm);
4238 }
4239 gen_op(s, op, ot, reg);
4240 break;
4241 case 2: /* OP A, Iv */
4242 val = insn_get(s, ot);
4243 gen_op_movl_T1_im(val);
4244 gen_op(s, op, ot, OR_EAX);
4245 break;
4246 }
4247 }
4248 break;
4249
4250 case 0x82:
4251 if (CODE64(s))
4252 goto illegal_op;
4253 case 0x80: /* GRP1 */
4254 case 0x81:
4255 case 0x83:
4256 {
4257 int val;
4258
4259 if ((b & 1) == 0)
4260 ot = OT_BYTE;
4261 else
4262 ot = dflag + OT_WORD;
4263
4264 modrm = ldub_code(s->pc++);
4265 mod = (modrm >> 6) & 3;
4266 rm = (modrm & 7) | REX_B(s);
4267 op = (modrm >> 3) & 7;
4268
4269 if (mod != 3) {
4270 if (b == 0x83)
4271 s->rip_offset = 1;
4272 else
4273 s->rip_offset = insn_const_size(ot);
4274 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4275 opreg = OR_TMP0;
4276 } else {
4277 opreg = rm;
4278 }
4279
4280 switch(b) {
4281 default:
4282 case 0x80:
4283 case 0x81:
4284 case 0x82:
4285 val = insn_get(s, ot);
4286 break;
4287 case 0x83:
4288 val = (int8_t)insn_get(s, OT_BYTE);
4289 break;
4290 }
4291 gen_op_movl_T1_im(val);
4292 gen_op(s, op, ot, opreg);
4293 }
4294 break;
4295
4296 /**************************/
4297 /* inc, dec, and other misc arith */
4298 case 0x40 ... 0x47: /* inc Gv */
4299 ot = dflag ? OT_LONG : OT_WORD;
4300 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4301 break;
4302 case 0x48 ... 0x4f: /* dec Gv */
4303 ot = dflag ? OT_LONG : OT_WORD;
4304 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4305 break;
4306 case 0xf6: /* GRP3 */
4307 case 0xf7:
4308 if ((b & 1) == 0)
4309 ot = OT_BYTE;
4310 else
4311 ot = dflag + OT_WORD;
4312
4313 modrm = ldub_code(s->pc++);
4314 mod = (modrm >> 6) & 3;
4315 rm = (modrm & 7) | REX_B(s);
4316 op = (modrm >> 3) & 7;
4317 if (mod != 3) {
4318 if (op == 0)
4319 s->rip_offset = insn_const_size(ot);
4320 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4321 gen_op_ld_T0_A0(ot + s->mem_index);
4322 } else {
4323 gen_op_mov_TN_reg(ot, 0, rm);
4324 }
4325
4326 switch(op) {
4327 case 0: /* test */
4328 val = insn_get(s, ot);
4329 gen_op_movl_T1_im(val);
4330 gen_op_testl_T0_T1_cc();
4331 s->cc_op = CC_OP_LOGICB + ot;
4332 break;
4333 case 2: /* not */
4334 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4335 if (mod != 3) {
4336 gen_op_st_T0_A0(ot + s->mem_index);
4337 } else {
4338 gen_op_mov_reg_T0(ot, rm);
4339 }
4340 break;
4341 case 3: /* neg */
4342 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4343 if (mod != 3) {
4344 gen_op_st_T0_A0(ot + s->mem_index);
4345 } else {
4346 gen_op_mov_reg_T0(ot, rm);
4347 }
4348 gen_op_update_neg_cc();
4349 s->cc_op = CC_OP_SUBB + ot;
4350 break;
4351 case 4: /* mul */
4352 switch(ot) {
4353 case OT_BYTE:
4354 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4355 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4356 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4357 /* XXX: use 32 bit mul which could be faster */
4358 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4359 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4360 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4361 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4362 s->cc_op = CC_OP_MULB;
4363 break;
4364 case OT_WORD:
4365 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4366 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4367 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4368 /* XXX: use 32 bit mul which could be faster */
4369 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4370 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4371 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4372 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4373 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4374 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4375 s->cc_op = CC_OP_MULW;
4376 break;
4377 default:
4378 case OT_LONG:
4379 #ifdef TARGET_X86_64
4380 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4381 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4382 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4383 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4384 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4385 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4386 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4387 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4388 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4389 #else
4390 {
4391 TCGv_i64 t0, t1;
4392 t0 = tcg_temp_new_i64();
4393 t1 = tcg_temp_new_i64();
4394 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4395 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4396 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4397 tcg_gen_mul_i64(t0, t0, t1);
4398 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4399 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4400 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4401 tcg_gen_shri_i64(t0, t0, 32);
4402 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4403 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4404 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4405 }
4406 #endif
4407 s->cc_op = CC_OP_MULL;
4408 break;
4409 #ifdef TARGET_X86_64
4410 case OT_QUAD:
4411 gen_helper_mulq_EAX_T0(cpu_T[0]);
4412 s->cc_op = CC_OP_MULQ;
4413 break;
4414 #endif
4415 }
4416 break;
4417 case 5: /* imul */
4418 switch(ot) {
4419 case OT_BYTE:
4420 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4421 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4422 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4423 /* XXX: use 32 bit mul which could be faster */
4424 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4425 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4426 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4427 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4428 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4429 s->cc_op = CC_OP_MULB;
4430 break;
4431 case OT_WORD:
4432 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4433 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4434 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4435 /* XXX: use 32 bit mul which could be faster */
4436 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4437 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4438 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4439 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4440 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4441 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4442 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4443 s->cc_op = CC_OP_MULW;
4444 break;
4445 default:
4446 case OT_LONG:
4447 #ifdef TARGET_X86_64
4448 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4449 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4450 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4451 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4452 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4453 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4454 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4455 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4456 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4457 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4458 #else
4459 {
4460 TCGv_i64 t0, t1;
4461 t0 = tcg_temp_new_i64();
4462 t1 = tcg_temp_new_i64();
4463 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4464 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4465 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4466 tcg_gen_mul_i64(t0, t0, t1);
4467 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4468 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4469 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4470 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4471 tcg_gen_shri_i64(t0, t0, 32);
4472 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4473 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4474 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4475 }
4476 #endif
4477 s->cc_op = CC_OP_MULL;
4478 break;
4479 #ifdef TARGET_X86_64
4480 case OT_QUAD:
4481 gen_helper_imulq_EAX_T0(cpu_T[0]);
4482 s->cc_op = CC_OP_MULQ;
4483 break;
4484 #endif
4485 }
4486 break;
4487 case 6: /* div */
4488 switch(ot) {
4489 case OT_BYTE:
4490 gen_jmp_im(pc_start - s->cs_base);
4491 gen_helper_divb_AL(cpu_T[0]);
4492 break;
4493 case OT_WORD:
4494 gen_jmp_im(pc_start - s->cs_base);
4495 gen_helper_divw_AX(cpu_T[0]);
4496 break;
4497 default:
4498 case OT_LONG:
4499 gen_jmp_im(pc_start - s->cs_base);
4500 gen_helper_divl_EAX(cpu_T[0]);
4501 break;
4502 #ifdef TARGET_X86_64
4503 case OT_QUAD:
4504 gen_jmp_im(pc_start - s->cs_base);
4505 gen_helper_divq_EAX(cpu_T[0]);
4506 break;
4507 #endif
4508 }
4509 break;
4510 case 7: /* idiv */
4511 switch(ot) {
4512 case OT_BYTE:
4513 gen_jmp_im(pc_start - s->cs_base);
4514 gen_helper_idivb_AL(cpu_T[0]);
4515 break;
4516 case OT_WORD:
4517 gen_jmp_im(pc_start - s->cs_base);
4518 gen_helper_idivw_AX(cpu_T[0]);
4519 break;
4520 default:
4521 case OT_LONG:
4522 gen_jmp_im(pc_start - s->cs_base);
4523 gen_helper_idivl_EAX(cpu_T[0]);
4524 break;
4525 #ifdef TARGET_X86_64
4526 case OT_QUAD:
4527 gen_jmp_im(pc_start - s->cs_base);
4528 gen_helper_idivq_EAX(cpu_T[0]);
4529 break;
4530 #endif
4531 }
4532 break;
4533 default:
4534 goto illegal_op;
4535 }
4536 break;
4537
4538 case 0xfe: /* GRP4 */
4539 case 0xff: /* GRP5 */
4540 if ((b & 1) == 0)
4541 ot = OT_BYTE;
4542 else
4543 ot = dflag + OT_WORD;
4544
4545 modrm = ldub_code(s->pc++);
4546 mod = (modrm >> 6) & 3;
4547 rm = (modrm & 7) | REX_B(s);
4548 op = (modrm >> 3) & 7;
4549 if (op >= 2 && b == 0xfe) {
4550 goto illegal_op;
4551 }
4552 if (CODE64(s)) {
4553 if (op == 2 || op == 4) {
4554 /* operand size for jumps is 64 bit */
4555 ot = OT_QUAD;
4556 } else if (op == 3 || op == 5) {
4557 /* for call calls, the operand is 16 or 32 bit, even
4558 in long mode */
4559 ot = dflag ? OT_LONG : OT_WORD;
4560 } else if (op == 6) {
4561 /* default push size is 64 bit */
4562 ot = dflag ? OT_QUAD : OT_WORD;
4563 }
4564 }
4565 if (mod != 3) {
4566 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4567 if (op >= 2 && op != 3 && op != 5)
4568 gen_op_ld_T0_A0(ot + s->mem_index);
4569 } else {
4570 gen_op_mov_TN_reg(ot, 0, rm);
4571 }
4572
4573 switch(op) {
4574 case 0: /* inc Ev */
4575 if (mod != 3)
4576 opreg = OR_TMP0;
4577 else
4578 opreg = rm;
4579 gen_inc(s, ot, opreg, 1);
4580 break;
4581 case 1: /* dec Ev */
4582 if (mod != 3)
4583 opreg = OR_TMP0;
4584 else
4585 opreg = rm;
4586 gen_inc(s, ot, opreg, -1);
4587 break;
4588 case 2: /* call Ev */
4589 /* XXX: optimize if memory (no 'and' is necessary) */
4590 if (s->dflag == 0)
4591 gen_op_andl_T0_ffff();
4592 next_eip = s->pc - s->cs_base;
4593 gen_movtl_T1_im(next_eip);
4594 gen_push_T1(s);
4595 gen_op_jmp_T0();
4596 gen_eob(s);
4597 break;
4598 case 3: /* lcall Ev */
4599 gen_op_ld_T1_A0(ot + s->mem_index);
4600 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4601 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4602 do_lcall:
4603 if (s->pe && !s->vm86) {
4604 if (s->cc_op != CC_OP_DYNAMIC)
4605 gen_op_set_cc_op(s->cc_op);
4606 gen_jmp_im(pc_start - s->cs_base);
4607 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4608 gen_helper_lcall_protected(cpu_tmp2_i32, cpu_T[1],
4609 tcg_const_i32(dflag),
4610 tcg_const_i32(s->pc - pc_start));
4611 } else {
4612 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4613 gen_helper_lcall_real(cpu_tmp2_i32, cpu_T[1],
4614 tcg_const_i32(dflag),
4615 tcg_const_i32(s->pc - s->cs_base));
4616 }
4617 gen_eob(s);
4618 break;
4619 case 4: /* jmp Ev */
4620 if (s->dflag == 0)
4621 gen_op_andl_T0_ffff();
4622 gen_op_jmp_T0();
4623 gen_eob(s);
4624 break;
4625 case 5: /* ljmp Ev */
4626 gen_op_ld_T1_A0(ot + s->mem_index);
4627 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4628 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4629 do_ljmp:
4630 if (s->pe && !s->vm86) {
4631 if (s->cc_op != CC_OP_DYNAMIC)
4632 gen_op_set_cc_op(s->cc_op);
4633 gen_jmp_im(pc_start - s->cs_base);
4634 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4635 gen_helper_ljmp_protected(cpu_tmp2_i32, cpu_T[1],
4636 tcg_const_i32(s->pc - pc_start));
4637 } else {
4638 gen_op_movl_seg_T0_vm(R_CS);
4639 gen_op_movl_T0_T1();
4640 gen_op_jmp_T0();
4641 }
4642 gen_eob(s);
4643 break;
4644 case 6: /* push Ev */
4645 gen_push_T0(s);
4646 break;
4647 default:
4648 goto illegal_op;
4649 }
4650 break;
4651
4652 case 0x84: /* test Ev, Gv */
4653 case 0x85:
4654 if ((b & 1) == 0)
4655 ot = OT_BYTE;
4656 else
4657 ot = dflag + OT_WORD;
4658
4659 modrm = ldub_code(s->pc++);
4660 mod = (modrm >> 6) & 3;
4661 rm = (modrm & 7) | REX_B(s);
4662 reg = ((modrm >> 3) & 7) | rex_r;
4663
4664 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4665 gen_op_mov_TN_reg(ot, 1, reg);
4666 gen_op_testl_T0_T1_cc();
4667 s->cc_op = CC_OP_LOGICB + ot;
4668 break;
4669
4670 case 0xa8: /* test eAX, Iv */
4671 case 0xa9:
4672 if ((b & 1) == 0)
4673 ot = OT_BYTE;
4674 else
4675 ot = dflag + OT_WORD;
4676 val = insn_get(s, ot);
4677
4678 gen_op_mov_TN_reg(ot, 0, OR_EAX);
4679 gen_op_movl_T1_im(val);
4680 gen_op_testl_T0_T1_cc();
4681 s->cc_op = CC_OP_LOGICB + ot;
4682 break;
4683
4684 case 0x98: /* CWDE/CBW */
4685 #ifdef TARGET_X86_64
4686 if (dflag == 2) {
4687 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4688 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4689 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4690 } else
4691 #endif
4692 if (dflag == 1) {
4693 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4694 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4695 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4696 } else {
4697 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4698 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4699 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4700 }
4701 break;
4702 case 0x99: /* CDQ/CWD */
4703 #ifdef TARGET_X86_64
4704 if (dflag == 2) {
4705 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4706 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4707 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4708 } else
4709 #endif
4710 if (dflag == 1) {
4711 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4712 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4713 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4714 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4715 } else {
4716 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4717 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4718 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4719 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4720 }
4721 break;
4722 case 0x1af: /* imul Gv, Ev */
4723 case 0x69: /* imul Gv, Ev, I */
4724 case 0x6b:
4725 ot = dflag + OT_WORD;
4726 modrm = ldub_code(s->pc++);
4727 reg = ((modrm >> 3) & 7) | rex_r;
4728 if (b == 0x69)
4729 s->rip_offset = insn_const_size(ot);
4730 else if (b == 0x6b)
4731 s->rip_offset = 1;
4732 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4733 if (b == 0x69) {
4734 val = insn_get(s, ot);
4735 gen_op_movl_T1_im(val);
4736 } else if (b == 0x6b) {
4737 val = (int8_t)insn_get(s, OT_BYTE);
4738 gen_op_movl_T1_im(val);
4739 } else {
4740 gen_op_mov_TN_reg(ot, 1, reg);
4741 }
4742
4743 #ifdef TARGET_X86_64
4744 if (ot == OT_QUAD) {
4745 gen_helper_imulq_T0_T1(cpu_T[0], cpu_T[0], cpu_T[1]);
4746 } else
4747 #endif
4748 if (ot == OT_LONG) {
4749 #ifdef TARGET_X86_64
4750 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4751 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4752 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4753 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4754 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4755 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4756 #else
4757 {
4758 TCGv_i64 t0, t1;
4759 t0 = tcg_temp_new_i64();
4760 t1 = tcg_temp_new_i64();
4761 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4762 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4763 tcg_gen_mul_i64(t0, t0, t1);
4764 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4765 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4766 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4767 tcg_gen_shri_i64(t0, t0, 32);
4768 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4769 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4770 }
4771 #endif
4772 } else {
4773 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4774 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4775 /* XXX: use 32 bit mul which could be faster */
4776 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4777 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4778 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4779 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4780 }
4781 gen_op_mov_reg_T0(ot, reg);
4782 s->cc_op = CC_OP_MULB + ot;
4783 break;
4784 case 0x1c0:
4785 case 0x1c1: /* xadd Ev, Gv */
4786 if ((b & 1) == 0)
4787 ot = OT_BYTE;
4788 else
4789 ot = dflag + OT_WORD;
4790 modrm = ldub_code(s->pc++);
4791 reg = ((modrm >> 3) & 7) | rex_r;
4792 mod = (modrm >> 6) & 3;
4793 if (mod == 3) {
4794 rm = (modrm & 7) | REX_B(s);
4795 gen_op_mov_TN_reg(ot, 0, reg);
4796 gen_op_mov_TN_reg(ot, 1, rm);
4797 gen_op_addl_T0_T1();
4798 gen_op_mov_reg_T1(ot, reg);
4799 gen_op_mov_reg_T0(ot, rm);
4800 } else {
4801 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4802 gen_op_mov_TN_reg(ot, 0, reg);
4803 gen_op_ld_T1_A0(ot + s->mem_index);
4804 gen_op_addl_T0_T1();
4805 gen_op_st_T0_A0(ot + s->mem_index);
4806 gen_op_mov_reg_T1(ot, reg);
4807 }
4808 gen_op_update2_cc();
4809 s->cc_op = CC_OP_ADDB + ot;
4810 break;
4811 case 0x1b0:
4812 case 0x1b1: /* cmpxchg Ev, Gv */
4813 {
4814 int label1, label2;
4815 TCGv t0, t1, t2, a0;
4816
4817 if ((b & 1) == 0)
4818 ot = OT_BYTE;
4819 else
4820 ot = dflag + OT_WORD;
4821 modrm = ldub_code(s->pc++);
4822 reg = ((modrm >> 3) & 7) | rex_r;
4823 mod = (modrm >> 6) & 3;
4824 t0 = tcg_temp_local_new();
4825 t1 = tcg_temp_local_new();
4826 t2 = tcg_temp_local_new();
4827 a0 = tcg_temp_local_new();
4828 gen_op_mov_v_reg(ot, t1, reg);
4829 if (mod == 3) {
4830 rm = (modrm & 7) | REX_B(s);
4831 gen_op_mov_v_reg(ot, t0, rm);
4832 } else {
4833 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4834 tcg_gen_mov_tl(a0, cpu_A0);
4835 gen_op_ld_v(ot + s->mem_index, t0, a0);
4836 rm = 0; /* avoid warning */
4837 }
4838 label1 = gen_new_label();
4839 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
4840 tcg_gen_sub_tl(t2, t2, t0);
4841 gen_extu(ot, t2);
4842 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4843 if (mod == 3) {
4844 label2 = gen_new_label();
4845 gen_op_mov_reg_v(ot, R_EAX, t0);
4846 tcg_gen_br(label2);
4847 gen_set_label(label1);
4848 gen_op_mov_reg_v(ot, rm, t1);
4849 gen_set_label(label2);
4850 } else {
4851 tcg_gen_mov_tl(t1, t0);
4852 gen_op_mov_reg_v(ot, R_EAX, t0);
4853 gen_set_label(label1);
4854 /* always store */
4855 gen_op_st_v(ot + s->mem_index, t1, a0);
4856 }
4857 tcg_gen_mov_tl(cpu_cc_src, t0);
4858 tcg_gen_mov_tl(cpu_cc_dst, t2);
4859 s->cc_op = CC_OP_SUBB + ot;
4860 tcg_temp_free(t0);
4861 tcg_temp_free(t1);
4862 tcg_temp_free(t2);
4863 tcg_temp_free(a0);
4864 }
4865 break;
4866 case 0x1c7: /* cmpxchg8b */
4867 modrm = ldub_code(s->pc++);
4868 mod = (modrm >> 6) & 3;
4869 if ((mod == 3) || ((modrm & 0x38) != 0x8))
4870 goto illegal_op;
4871 #ifdef TARGET_X86_64
4872 if (dflag == 2) {
4873 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4874 goto illegal_op;
4875 gen_jmp_im(pc_start - s->cs_base);
4876 if (s->cc_op != CC_OP_DYNAMIC)
4877 gen_op_set_cc_op(s->cc_op);
4878 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4879 gen_helper_cmpxchg16b(cpu_A0);
4880 } else
4881 #endif
4882 {
4883 if (!(s->cpuid_features & CPUID_CX8))
4884 goto illegal_op;
4885 gen_jmp_im(pc_start - s->cs_base);
4886 if (s->cc_op != CC_OP_DYNAMIC)
4887 gen_op_set_cc_op(s->cc_op);
4888 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
4889 gen_helper_cmpxchg8b(cpu_A0);
4890 }
4891 s->cc_op = CC_OP_EFLAGS;
4892 break;
4893
4894 /**************************/
4895 /* push/pop */
4896 case 0x50 ... 0x57: /* push */
4897 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4898 gen_push_T0(s);
4899 break;
4900 case 0x58 ... 0x5f: /* pop */
4901 if (CODE64(s)) {
4902 ot = dflag ? OT_QUAD : OT_WORD;
4903 } else {
4904 ot = dflag + OT_WORD;
4905 }
4906 gen_pop_T0(s);
4907 /* NOTE: order is important for pop %sp */
4908 gen_pop_update(s);
4909 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4910 break;
4911 case 0x60: /* pusha */
4912 if (CODE64(s))
4913 goto illegal_op;
4914 gen_pusha(s);
4915 break;
4916 case 0x61: /* popa */
4917 if (CODE64(s))
4918 goto illegal_op;
4919 gen_popa(s);
4920 break;
4921 case 0x68: /* push Iv */
4922 case 0x6a:
4923 if (CODE64(s)) {
4924 ot = dflag ? OT_QUAD : OT_WORD;
4925 } else {
4926 ot = dflag + OT_WORD;
4927 }
4928 if (b == 0x68)
4929 val = insn_get(s, ot);
4930 else
4931 val = (int8_t)insn_get(s, OT_BYTE);
4932 gen_op_movl_T0_im(val);
4933 gen_push_T0(s);
4934 break;
4935 case 0x8f: /* pop Ev */
4936 if (CODE64(s)) {
4937 ot = dflag ? OT_QUAD : OT_WORD;
4938 } else {
4939 ot = dflag + OT_WORD;
4940 }
4941 modrm = ldub_code(s->pc++);
4942 mod = (modrm >> 6) & 3;
4943 gen_pop_T0(s);
4944 if (mod == 3) {
4945 /* NOTE: order is important for pop %sp */
4946 gen_pop_update(s);
4947 rm = (modrm & 7) | REX_B(s);
4948 gen_op_mov_reg_T0(ot, rm);
4949 } else {
4950 /* NOTE: order is important too for MMU exceptions */
4951 s->popl_esp_hack = 1 << ot;
4952 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4953 s->popl_esp_hack = 0;
4954 gen_pop_update(s);
4955 }
4956 break;
4957 case 0xc8: /* enter */
4958 {
4959 int level;
4960 val = lduw_code(s->pc);
4961 s->pc += 2;
4962 level = ldub_code(s->pc++);
4963 gen_enter(s, val, level);
4964 }
4965 break;
4966 case 0xc9: /* leave */
4967 /* XXX: exception not precise (ESP is updated before potential exception) */
4968 if (CODE64(s)) {
4969 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4970 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4971 } else if (s->ss32) {
4972 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4973 gen_op_mov_reg_T0(OT_LONG, R_ESP);
4974 } else {
4975 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4976 gen_op_mov_reg_T0(OT_WORD, R_ESP);
4977 }
4978 gen_pop_T0(s);
4979 if (CODE64(s)) {
4980 ot = dflag ? OT_QUAD : OT_WORD;
4981 } else {
4982 ot = dflag + OT_WORD;
4983 }
4984 gen_op_mov_reg_T0(ot, R_EBP);
4985 gen_pop_update(s);
4986 break;
4987 case 0x06: /* push es */
4988 case 0x0e: /* push cs */
4989 case 0x16: /* push ss */
4990 case 0x1e: /* push ds */
4991 if (CODE64(s))
4992 goto illegal_op;
4993 gen_op_movl_T0_seg(b >> 3);
4994 gen_push_T0(s);
4995 break;
4996 case 0x1a0: /* push fs */
4997 case 0x1a8: /* push gs */
4998 gen_op_movl_T0_seg((b >> 3) & 7);
4999 gen_push_T0(s);
5000 break;
5001 case 0x07: /* pop es */
5002 case 0x17: /* pop ss */
5003 case 0x1f: /* pop ds */
5004 if (CODE64(s))
5005 goto illegal_op;
5006 reg = b >> 3;
5007 gen_pop_T0(s);
5008 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5009 gen_pop_update(s);
5010 if (reg == R_SS) {
5011 /* if reg == SS, inhibit interrupts/trace. */
5012 /* If several instructions disable interrupts, only the
5013 _first_ does it */
5014 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5015 gen_helper_set_inhibit_irq();
5016 s->tf = 0;
5017 }
5018 if (s->is_jmp) {
5019 gen_jmp_im(s->pc - s->cs_base);
5020 gen_eob(s);
5021 }
5022 break;
5023 case 0x1a1: /* pop fs */
5024 case 0x1a9: /* pop gs */
5025 gen_pop_T0(s);
5026 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5027 gen_pop_update(s);
5028 if (s->is_jmp) {
5029 gen_jmp_im(s->pc - s->cs_base);
5030 gen_eob(s);
5031 }
5032 break;
5033
5034 /**************************/
5035 /* mov */
5036 case 0x88:
5037 case 0x89: /* mov Gv, Ev */
5038 if ((b & 1) == 0)
5039 ot = OT_BYTE;
5040 else
5041 ot = dflag + OT_WORD;
5042 modrm = ldub_code(s->pc++);
5043 reg = ((modrm >> 3) & 7) | rex_r;
5044
5045 /* generate a generic store */
5046 gen_ldst_modrm(s, modrm, ot, reg, 1);
5047 break;
5048 case 0xc6:
5049 case 0xc7: /* mov Ev, Iv */
5050 if ((b & 1) == 0)
5051 ot = OT_BYTE;
5052 else
5053 ot = dflag + OT_WORD;
5054 modrm = ldub_code(s->pc++);
5055 mod = (modrm >> 6) & 3;
5056 if (mod != 3) {
5057 s->rip_offset = insn_const_size(ot);
5058 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5059 }
5060 val = insn_get(s, ot);
5061 gen_op_movl_T0_im(val);
5062 if (mod != 3)
5063 gen_op_st_T0_A0(ot + s->mem_index);
5064 else
5065 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5066 break;
5067 case 0x8a:
5068 case 0x8b: /* mov Ev, Gv */
5069 if ((b & 1) == 0)
5070 ot = OT_BYTE;
5071 else
5072 ot = OT_WORD + dflag;
5073 modrm = ldub_code(s->pc++);
5074 reg = ((modrm >> 3) & 7) | rex_r;
5075
5076 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5077 gen_op_mov_reg_T0(ot, reg);
5078 break;
5079 case 0x8e: /* mov seg, Gv */
5080 modrm = ldub_code(s->pc++);
5081 reg = (modrm >> 3) & 7;
5082 if (reg >= 6 || reg == R_CS)
5083 goto illegal_op;
5084 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5085 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5086 if (reg == R_SS) {
5087 /* if reg == SS, inhibit interrupts/trace */
5088 /* If several instructions disable interrupts, only the
5089 _first_ does it */
5090 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5091 gen_helper_set_inhibit_irq();
5092 s->tf = 0;
5093 }
5094 if (s->is_jmp) {
5095 gen_jmp_im(s->pc - s->cs_base);
5096 gen_eob(s);
5097 }
5098 break;
5099 case 0x8c: /* mov Gv, seg */
5100 modrm = ldub_code(s->pc++);
5101 reg = (modrm >> 3) & 7;
5102 mod = (modrm >> 6) & 3;
5103 if (reg >= 6)
5104 goto illegal_op;
5105 gen_op_movl_T0_seg(reg);
5106 if (mod == 3)
5107 ot = OT_WORD + dflag;
5108 else
5109 ot = OT_WORD;
5110 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5111 break;
5112
5113 case 0x1b6: /* movzbS Gv, Eb */
5114 case 0x1b7: /* movzwS Gv, Eb */
5115 case 0x1be: /* movsbS Gv, Eb */
5116 case 0x1bf: /* movswS Gv, Eb */
5117 {
5118 int d_ot;
5119 /* d_ot is the size of destination */
5120 d_ot = dflag + OT_WORD;
5121 /* ot is the size of source */
5122 ot = (b & 1) + OT_BYTE;
5123 modrm = ldub_code(s->pc++);
5124 reg = ((modrm >> 3) & 7) | rex_r;
5125 mod = (modrm >> 6) & 3;
5126 rm = (modrm & 7) | REX_B(s);
5127
5128 if (mod == 3) {
5129 gen_op_mov_TN_reg(ot, 0, rm);
5130 switch(ot | (b & 8)) {
5131 case OT_BYTE:
5132 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5133 break;
5134 case OT_BYTE | 8:
5135 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5136 break;
5137 case OT_WORD:
5138 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5139 break;
5140 default:
5141 case OT_WORD | 8:
5142 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5143 break;
5144 }
5145 gen_op_mov_reg_T0(d_ot, reg);
5146 } else {
5147 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5148 if (b & 8) {
5149 gen_op_lds_T0_A0(ot + s->mem_index);
5150 } else {
5151 gen_op_ldu_T0_A0(ot + s->mem_index);
5152 }
5153 gen_op_mov_reg_T0(d_ot, reg);
5154 }
5155 }
5156 break;
5157
5158 case 0x8d: /* lea */
5159 ot = dflag + OT_WORD;
5160 modrm = ldub_code(s->pc++);
5161 mod = (modrm >> 6) & 3;
5162 if (mod == 3)
5163 goto illegal_op;
5164 reg = ((modrm >> 3) & 7) | rex_r;
5165 /* we must ensure that no segment is added */
5166 s->override = -1;
5167 val = s->addseg;
5168 s->addseg = 0;
5169 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5170 s->addseg = val;
5171 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5172 break;
5173
5174 case 0xa0: /* mov EAX, Ov */
5175 case 0xa1:
5176 case 0xa2: /* mov Ov, EAX */
5177 case 0xa3:
5178 {
5179 target_ulong offset_addr;
5180
5181 if ((b & 1) == 0)
5182 ot = OT_BYTE;
5183 else
5184 ot = dflag + OT_WORD;
5185 #ifdef TARGET_X86_64
5186 if (s->aflag == 2) {
5187 offset_addr = ldq_code(s->pc);
5188 s->pc += 8;
5189 gen_op_movq_A0_im(offset_addr);
5190 } else
5191 #endif
5192 {
5193 if (s->aflag) {
5194 offset_addr = insn_get(s, OT_LONG);
5195 } else {
5196 offset_addr = insn_get(s, OT_WORD);
5197 }
5198 gen_op_movl_A0_im(offset_addr);
5199 }
5200 gen_add_A0_ds_seg(s);
5201 if ((b & 2) == 0) {
5202 gen_op_ld_T0_A0(ot + s->mem_index);
5203 gen_op_mov_reg_T0(ot, R_EAX);
5204 } else {
5205 gen_op_mov_TN_reg(ot, 0, R_EAX);
5206 gen_op_st_T0_A0(ot + s->mem_index);
5207 }
5208 }
5209 break;
5210 case 0xd7: /* xlat */
5211 #ifdef TARGET_X86_64
5212 if (s->aflag == 2) {
5213 gen_op_movq_A0_reg(R_EBX);
5214 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5215 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5216 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5217 } else
5218 #endif
5219 {
5220 gen_op_movl_A0_reg(R_EBX);
5221 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5222 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5223 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5224 if (s->aflag == 0)
5225 gen_op_andl_A0_ffff();
5226 else
5227 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5228 }
5229 gen_add_A0_ds_seg(s);
5230 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5231 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5232 break;
5233 case 0xb0 ... 0xb7: /* mov R, Ib */
5234 val = insn_get(s, OT_BYTE);
5235 gen_op_movl_T0_im(val);
5236 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5237 break;
5238 case 0xb8 ... 0xbf: /* mov R, Iv */
5239 #ifdef TARGET_X86_64
5240 if (dflag == 2) {
5241 uint64_t tmp;
5242 /* 64 bit case */
5243 tmp = ldq_code(s->pc);
5244 s->pc += 8;
5245 reg = (b & 7) | REX_B(s);
5246 gen_movtl_T0_im(tmp);
5247 gen_op_mov_reg_T0(OT_QUAD, reg);
5248 } else
5249 #endif
5250 {
5251 ot = dflag ? OT_LONG : OT_WORD;
5252 val = insn_get(s, ot);
5253 reg = (b & 7) | REX_B(s);
5254 gen_op_movl_T0_im(val);
5255 gen_op_mov_reg_T0(ot, reg);
5256 }
5257 break;
5258
5259 case 0x91 ... 0x97: /* xchg R, EAX */
5260 ot = dflag + OT_WORD;
5261 reg = (b & 7) | REX_B(s);
5262 rm = R_EAX;
5263 goto do_xchg_reg;
5264 case 0x86:
5265 case 0x87: /* xchg Ev, Gv */
5266 if ((b & 1) == 0)
5267 ot = OT_BYTE;
5268 else
5269 ot = dflag + OT_WORD;
5270 modrm = ldub_code(s->pc++);
5271 reg = ((modrm >> 3) & 7) | rex_r;
5272 mod = (modrm >> 6) & 3;
5273 if (mod == 3) {
5274 rm = (modrm & 7) | REX_B(s);
5275 do_xchg_reg:
5276 gen_op_mov_TN_reg(ot, 0, reg);
5277 gen_op_mov_TN_reg(ot, 1, rm);
5278 gen_op_mov_reg_T0(ot, rm);
5279 gen_op_mov_reg_T1(ot, reg);
5280 } else {
5281 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5282 gen_op_mov_TN_reg(ot, 0, reg);
5283 /* for xchg, lock is implicit */
5284 if (!(prefixes & PREFIX_LOCK))
5285 gen_helper_lock();
5286 gen_op_ld_T1_A0(ot + s->mem_index);
5287 gen_op_st_T0_A0(ot + s->mem_index);
5288 if (!(prefixes & PREFIX_LOCK))
5289 gen_helper_unlock();
5290 gen_op_mov_reg_T1(ot, reg);
5291 }
5292 break;
5293 case 0xc4: /* les Gv */
5294 if (CODE64(s))
5295 goto illegal_op;
5296 op = R_ES;
5297 goto do_lxx;
5298 case 0xc5: /* lds Gv */
5299 if (CODE64(s))
5300 goto illegal_op;
5301 op = R_DS;
5302 goto do_lxx;
5303 case 0x1b2: /* lss Gv */
5304 op = R_SS;
5305 goto do_lxx;
5306 case 0x1b4: /* lfs Gv */
5307 op = R_FS;
5308 goto do_lxx;
5309 case 0x1b5: /* lgs Gv */
5310 op = R_GS;
5311 do_lxx:
5312 ot = dflag ? OT_LONG : OT_WORD;
5313 modrm = ldub_code(s->pc++);
5314 reg = ((modrm >> 3) & 7) | rex_r;
5315 mod = (modrm >> 6) & 3;
5316 if (mod == 3)
5317 goto illegal_op;
5318 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5319 gen_op_ld_T1_A0(ot + s->mem_index);
5320 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5321 /* load the segment first to handle exceptions properly */
5322 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5323 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5324 /* then put the data */
5325 gen_op_mov_reg_T1(ot, reg);
5326 if (s->is_jmp) {
5327 gen_jmp_im(s->pc - s->cs_base);
5328 gen_eob(s);
5329 }
5330 break;
5331
5332 /************************/
5333 /* shifts */
5334 case 0xc0:
5335 case 0xc1:
5336 /* shift Ev,Ib */
5337 shift = 2;
5338 GRP2:
5339 {
5340 if ((b & 1) == 0)
5341 ot = OT_BYTE;
5342 else
5343 ot = dflag + OT_WORD;
5344
5345 modrm = ldub_code(s->pc++);
5346 mod = (modrm >> 6) & 3;
5347 op = (modrm >> 3) & 7;
5348
5349 if (mod != 3) {
5350 if (shift == 2) {
5351 s->rip_offset = 1;
5352 }
5353 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5354 opreg = OR_TMP0;
5355 } else {
5356 opreg = (modrm & 7) | REX_B(s);
5357 }
5358
5359 /* simpler op */
5360 if (shift == 0) {
5361 gen_shift(s, op, ot, opreg, OR_ECX);
5362 } else {
5363 if (shift == 2) {
5364 shift = ldub_code(s->pc++);
5365 }
5366 gen_shifti(s, op, ot, opreg, shift);
5367 }
5368 }
5369 break;
5370 case 0xd0:
5371 case 0xd1:
5372 /* shift Ev,1 */
5373 shift = 1;
5374 goto GRP2;
5375 case 0xd2:
5376 case 0xd3:
5377 /* shift Ev,cl */
5378 shift = 0;
5379 goto GRP2;
5380
5381 case 0x1a4: /* shld imm */
5382 op = 0;
5383 shift = 1;
5384 goto do_shiftd;
5385 case 0x1a5: /* shld cl */
5386 op = 0;
5387 shift = 0;
5388 goto do_shiftd;
5389 case 0x1ac: /* shrd imm */
5390 op = 1;
5391 shift = 1;
5392 goto do_shiftd;
5393 case 0x1ad: /* shrd cl */
5394 op = 1;
5395 shift = 0;
5396 do_shiftd:
5397 ot = dflag + OT_WORD;
5398 modrm = ldub_code(s->pc++);
5399 mod = (modrm >> 6) & 3;
5400 rm = (modrm & 7) | REX_B(s);
5401 reg = ((modrm >> 3) & 7) | rex_r;
5402 if (mod != 3) {
5403 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5404 opreg = OR_TMP0;
5405 } else {
5406 opreg = rm;
5407 }
5408 gen_op_mov_TN_reg(ot, 1, reg);
5409
5410 if (shift) {
5411 val = ldub_code(s->pc++);
5412 tcg_gen_movi_tl(cpu_T3, val);
5413 } else {
5414 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5415 }
5416 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5417 break;
5418
5419 /************************/
5420 /* floats */
5421 case 0xd8 ... 0xdf:
5422 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5423 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5424 /* XXX: what to do if illegal op ? */
5425 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5426 break;
5427 }
5428 modrm = ldub_code(s->pc++);
5429 mod = (modrm >> 6) & 3;
5430 rm = modrm & 7;
5431 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5432 if (mod != 3) {
5433 /* memory op */
5434 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
5435 switch(op) {
5436 case 0x00 ... 0x07: /* fxxxs */
5437 case 0x10 ... 0x17: /* fixxxl */
5438 case 0x20 ... 0x27: /* fxxxl */
5439 case 0x30 ... 0x37: /* fixxx */
5440 {
5441 int op1;
5442 op1 = op & 7;
5443
5444 switch(op >> 4) {
5445 case 0:
5446 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5447 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5448 gen_helper_flds_FT0(cpu_tmp2_i32);
5449 break;
5450 case 1:
5451 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5452 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5453 gen_helper_fildl_FT0(cpu_tmp2_i32);
5454 break;
5455 case 2:
5456 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5457 (s->mem_index >> 2) - 1);
5458 gen_helper_fldl_FT0(cpu_tmp1_i64);
5459 break;
5460 case 3:
5461 default:
5462 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5463 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5464 gen_helper_fildl_FT0(cpu_tmp2_i32);
5465 break;
5466 }
5467
5468 gen_helper_fp_arith_ST0_FT0(op1);
5469 if (op1 == 3) {
5470 /* fcomp needs pop */
5471 gen_helper_fpop();
5472 }
5473 }
5474 break;
5475 case 0x08: /* flds */
5476 case 0x0a: /* fsts */
5477 case 0x0b: /* fstps */
5478 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5479 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5480 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5481 switch(op & 7) {
5482 case 0:
5483 switch(op >> 4) {
5484 case 0:
5485 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5486 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5487 gen_helper_flds_ST0(cpu_tmp2_i32);
5488 break;
5489 case 1:
5490 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5491 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5492 gen_helper_fildl_ST0(cpu_tmp2_i32);
5493 break;
5494 case 2:
5495 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5496 (s->mem_index >> 2) - 1);
5497 gen_helper_fldl_ST0(cpu_tmp1_i64);
5498 break;
5499 case 3:
5500 default:
5501 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5502 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5503 gen_helper_fildl_ST0(cpu_tmp2_i32);
5504 break;
5505 }
5506 break;
5507 case 1:
5508 /* XXX: the corresponding CPUID bit must be tested ! */
5509 switch(op >> 4) {
5510 case 1:
5511 gen_helper_fisttl_ST0(cpu_tmp2_i32);
5512 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5513 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5514 break;
5515 case 2:
5516 gen_helper_fisttll_ST0(cpu_tmp1_i64);
5517 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5518 (s->mem_index >> 2) - 1);
5519 break;
5520 case 3:
5521 default:
5522 gen_helper_fistt_ST0(cpu_tmp2_i32);
5523 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5524 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5525 break;
5526 }
5527 gen_helper_fpop();
5528 break;
5529 default:
5530 switch(op >> 4) {
5531 case 0:
5532 gen_helper_fsts_ST0(cpu_tmp2_i32);
5533 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5534 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5535 break;
5536 case 1:
5537 gen_helper_fistl_ST0(cpu_tmp2_i32);
5538 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5539 gen_op_st_T0_A0(OT_LONG + s->mem_index);
5540 break;
5541 case 2:
5542 gen_helper_fstl_ST0(cpu_tmp1_i64);
5543 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5544 (s->mem_index >> 2) - 1);
5545 break;
5546 case 3:
5547 default:
5548 gen_helper_fist_ST0(cpu_tmp2_i32);
5549 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5550 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5551 break;
5552 }
5553 if ((op & 7) == 3)
5554 gen_helper_fpop();
5555 break;
5556 }
5557 break;
5558 case 0x0c: /* fldenv mem */
5559 if (s->cc_op != CC_OP_DYNAMIC)
5560 gen_op_set_cc_op(s->cc_op);
5561 gen_jmp_im(pc_start - s->cs_base);
5562 gen_helper_fldenv(
5563 cpu_A0, tcg_const_i32(s->dflag));
5564 break;
5565 case 0x0d: /* fldcw mem */
5566 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5567 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5568 gen_helper_fldcw(cpu_tmp2_i32);
5569 break;
5570 case 0x0e: /* fnstenv mem */
5571 if (s->cc_op != CC_OP_DYNAMIC)
5572 gen_op_set_cc_op(s->cc_op);
5573 gen_jmp_im(pc_start - s->cs_base);
5574 gen_helper_fstenv(cpu_A0, tcg_const_i32(s->dflag));
5575 break;
5576 case 0x0f: /* fnstcw mem */
5577 gen_helper_fnstcw(cpu_tmp2_i32);
5578 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5579 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5580 break;
5581 case 0x1d: /* fldt mem */
5582 if (s->cc_op != CC_OP_DYNAMIC)
5583 gen_op_set_cc_op(s->cc_op);
5584 gen_jmp_im(pc_start - s->cs_base);
5585 gen_helper_fldt_ST0(cpu_A0);
5586 break;
5587 case 0x1f: /* fstpt mem */
5588 if (s->cc_op != CC_OP_DYNAMIC)
5589 gen_op_set_cc_op(s->cc_op);
5590 gen_jmp_im(pc_start - s->cs_base);
5591 gen_helper_fstt_ST0(cpu_A0);
5592 gen_helper_fpop();
5593 break;
5594 case 0x2c: /* frstor mem */
5595 if (s->cc_op != CC_OP_DYNAMIC)
5596 gen_op_set_cc_op(s->cc_op);
5597 gen_jmp_im(pc_start - s->cs_base);
5598 gen_helper_frstor(cpu_A0, tcg_const_i32(s->dflag));
5599 break;
5600 case 0x2e: /* fnsave mem */
5601 if (s->cc_op != CC_OP_DYNAMIC)
5602 gen_op_set_cc_op(s->cc_op);
5603 gen_jmp_im(pc_start - s->cs_base);
5604 gen_helper_fsave(cpu_A0, tcg_const_i32(s->dflag));
5605 break;
5606 case 0x2f: /* fnstsw mem */
5607 gen_helper_fnstsw(cpu_tmp2_i32);
5608 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5609 gen_op_st_T0_A0(OT_WORD + s->mem_index);
5610 break;
5611 case 0x3c: /* fbld */
5612 if (s->cc_op != CC_OP_DYNAMIC)
5613 gen_op_set_cc_op(s->cc_op);
5614 gen_jmp_im(pc_start - s->cs_base);
5615 gen_helper_fbld_ST0(cpu_A0);
5616 break;
5617 case 0x3e: /* fbstp */
5618 if (s->cc_op != CC_OP_DYNAMIC)
5619 gen_op_set_cc_op(s->cc_op);
5620 gen_jmp_im(pc_start - s->cs_base);
5621 gen_helper_fbst_ST0(cpu_A0);
5622 gen_helper_fpop();
5623 break;
5624 case 0x3d: /* fildll */
5625 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5626 (s->mem_index >> 2) - 1);
5627 gen_helper_fildll_ST0(cpu_tmp1_i64);
5628 break;
5629 case 0x3f: /* fistpll */
5630 gen_helper_fistll_ST0(cpu_tmp1_i64);
5631 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
5632 (s->mem_index >> 2) - 1);
5633 gen_helper_fpop();
5634 break;
5635 default:
5636 goto illegal_op;
5637 }
5638 } else {
5639 /* register float ops */
5640 opreg = rm;
5641
5642 switch(op) {
5643 case 0x08: /* fld sti */
5644 gen_helper_fpush();
5645 gen_helper_fmov_ST0_STN(tcg_const_i32((opreg + 1) & 7));
5646 break;
5647 case 0x09: /* fxchg sti */
5648 case 0x29: /* fxchg4 sti, undocumented op */
5649 case 0x39: /* fxchg7 sti, undocumented op */
5650 gen_helper_fxchg_ST0_STN(tcg_const_i32(opreg));
5651 break;
5652 case 0x0a: /* grp d9/2 */
5653 switch(rm) {
5654 case 0: /* fnop */
5655 /* check exceptions (FreeBSD FPU probe) */
5656 if (s->cc_op != CC_OP_DYNAMIC)
5657 gen_op_set_cc_op(s->cc_op);
5658 gen_jmp_im(pc_start - s->cs_base);
5659 gen_helper_fwait();
5660 break;
5661 default:
5662 goto illegal_op;
5663 }
5664 break;
5665 case 0x0c: /* grp d9/4 */
5666 switch(rm) {
5667 case 0: /* fchs */
5668 gen_helper_fchs_ST0();
5669 break;
5670 case 1: /* fabs */
5671 gen_helper_fabs_ST0();
5672 break;
5673 case 4: /* ftst */
5674 gen_helper_fldz_FT0();
5675 gen_helper_fcom_ST0_FT0();
5676 break;
5677 case 5: /* fxam */
5678 gen_helper_fxam_ST0();
5679 break;
5680 default:
5681 goto illegal_op;
5682 }
5683 break;
5684 case 0x0d: /* grp d9/5 */
5685 {
5686 switch(rm) {
5687 case 0:
5688 gen_helper_fpush();
5689 gen_helper_fld1_ST0();
5690 break;
5691 case 1:
5692 gen_helper_fpush();
5693 gen_helper_fldl2t_ST0();
5694 break;
5695 case 2:
5696 gen_helper_fpush();
5697 gen_helper_fldl2e_ST0();
5698 break;
5699 case 3:
5700 gen_helper_fpush();
5701 gen_helper_fldpi_ST0();
5702 break;
5703 case 4:
5704 gen_helper_fpush();
5705 gen_helper_fldlg2_ST0();
5706 break;
5707 case 5:
5708 gen_helper_fpush();
5709 gen_helper_fldln2_ST0();
5710 break;
5711 case 6:
5712 gen_helper_fpush();
5713 gen_helper_fldz_ST0();
5714 break;
5715 default:
5716 goto illegal_op;
5717 }
5718 }
5719 break;
5720 case 0x0e: /* grp d9/6 */
5721 switch(rm) {
5722 case 0: /* f2xm1 */
5723 gen_helper_f2xm1();
5724 break;
5725 case 1: /* fyl2x */
5726 gen_helper_fyl2x();
5727 break;
5728 case 2: /* fptan */
5729 gen_helper_fptan();
5730 break;
5731 case 3: /* fpatan */
5732 gen_helper_fpatan();
5733 break;
5734 case 4: /* fxtract */
5735 gen_helper_fxtract();
5736 break;
5737 case 5: /* fprem1 */
5738 gen_helper_fprem1();
5739 break;
5740 case 6: /* fdecstp */
5741 gen_helper_fdecstp();
5742 break;
5743 default:
5744 case 7: /* fincstp */
5745 gen_helper_fincstp();
5746 break;
5747 }
5748 break;
5749 case 0x0f: /* grp d9/7 */
5750 switch(rm) {
5751 case 0: /* fprem */
5752 gen_helper_fprem();
5753 break;
5754 case 1: /* fyl2xp1 */
5755 gen_helper_fyl2xp1();
5756 break;
5757 case 2: /* fsqrt */
5758 gen_helper_fsqrt();
5759 break;
5760 case 3: /* fsincos */
5761 gen_helper_fsincos();
5762 break;
5763 case 5: /* fscale */
5764 gen_helper_fscale();
5765 break;
5766 case 4: /* frndint */
5767 gen_helper_frndint();
5768 break;
5769 case 6: /* fsin */
5770 gen_helper_fsin();
5771 break;
5772 default:
5773 case 7: /* fcos */
5774 gen_helper_fcos();
5775 break;
5776 }
5777 break;
5778 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5779 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5780 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5781 {
5782 int op1;
5783
5784 op1 = op & 7;
5785 if (op >= 0x20) {
5786 gen_helper_fp_arith_STN_ST0(op1, opreg);
5787 if (op >= 0x30)
5788 gen_helper_fpop();
5789 } else {
5790 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5791 gen_helper_fp_arith_ST0_FT0(op1);
5792 }
5793 }
5794 break;
5795 case 0x02: /* fcom */
5796 case 0x22: /* fcom2, undocumented op */
5797 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5798 gen_helper_fcom_ST0_FT0();
5799 break;
5800 case 0x03: /* fcomp */
5801 case 0x23: /* fcomp3, undocumented op */
5802 case 0x32: /* fcomp5, undocumented op */
5803 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5804 gen_helper_fcom_ST0_FT0();
5805 gen_helper_fpop();
5806 break;
5807 case 0x15: /* da/5 */
5808 switch(rm) {
5809 case 1: /* fucompp */
5810 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5811 gen_helper_fucom_ST0_FT0();
5812 gen_helper_fpop();
5813 gen_helper_fpop();
5814 break;
5815 default:
5816 goto illegal_op;
5817 }
5818 break;
5819 case 0x1c:
5820 switch(rm) {
5821 case 0: /* feni (287 only, just do nop here) */
5822 break;
5823 case 1: /* fdisi (287 only, just do nop here) */
5824 break;
5825 case 2: /* fclex */
5826 gen_helper_fclex();
5827 break;
5828 case 3: /* fninit */
5829 gen_helper_fninit();
5830 break;
5831 case 4: /* fsetpm (287 only, just do nop here) */
5832 break;
5833 default:
5834 goto illegal_op;
5835 }
5836 break;
5837 case 0x1d: /* fucomi */
5838 if (s->cc_op != CC_OP_DYNAMIC)
5839 gen_op_set_cc_op(s->cc_op);
5840 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5841 gen_helper_fucomi_ST0_FT0();
5842 s->cc_op = CC_OP_EFLAGS;
5843 break;
5844 case 0x1e: /* fcomi */
5845 if (s->cc_op != CC_OP_DYNAMIC)
5846 gen_op_set_cc_op(s->cc_op);
5847 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5848 gen_helper_fcomi_ST0_FT0();
5849 s->cc_op = CC_OP_EFLAGS;
5850 break;
5851 case 0x28: /* ffree sti */
5852 gen_helper_ffree_STN(tcg_const_i32(opreg));
5853 break;
5854 case 0x2a: /* fst sti */
5855 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
5856 break;
5857 case 0x2b: /* fstp sti */
5858 case 0x0b: /* fstp1 sti, undocumented op */
5859 case 0x3a: /* fstp8 sti, undocumented op */
5860 case 0x3b: /* fstp9 sti, undocumented op */
5861 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
5862 gen_helper_fpop();
5863 break;
5864 case 0x2c: /* fucom st(i) */
5865 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5866 gen_helper_fucom_ST0_FT0();
5867 break;
5868 case 0x2d: /* fucomp st(i) */
5869 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5870 gen_helper_fucom_ST0_FT0();
5871 gen_helper_fpop();
5872 break;
5873 case 0x33: /* de/3 */
5874 switch(rm) {
5875 case 1: /* fcompp */
5876 gen_helper_fmov_FT0_STN(tcg_const_i32(1));
5877 gen_helper_fcom_ST0_FT0();
5878 gen_helper_fpop();
5879 gen_helper_fpop();
5880 break;
5881 default:
5882 goto illegal_op;
5883 }
5884 break;
5885 case 0x38: /* ffreep sti, undocumented op */
5886 gen_helper_ffree_STN(tcg_const_i32(opreg));
5887 gen_helper_fpop();
5888 break;
5889 case 0x3c: /* df/4 */
5890 switch(rm) {
5891 case 0:
5892 gen_helper_fnstsw(cpu_tmp2_i32);
5893 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5894 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5895 break;
5896 default:
5897 goto illegal_op;
5898 }
5899 break;
5900 case 0x3d: /* fucomip */
5901 if (s->cc_op != CC_OP_DYNAMIC)
5902 gen_op_set_cc_op(s->cc_op);
5903 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5904 gen_helper_fucomi_ST0_FT0();
5905 gen_helper_fpop();
5906 s->cc_op = CC_OP_EFLAGS;
5907 break;
5908 case 0x3e: /* fcomip */
5909 if (s->cc_op != CC_OP_DYNAMIC)
5910 gen_op_set_cc_op(s->cc_op);
5911 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
5912 gen_helper_fcomi_ST0_FT0();
5913 gen_helper_fpop();
5914 s->cc_op = CC_OP_EFLAGS;
5915 break;
5916 case 0x10 ... 0x13: /* fcmovxx */
5917 case 0x18 ... 0x1b:
5918 {
5919 int op1, l1;
5920 static const uint8_t fcmov_cc[8] = {
5921 (JCC_B << 1),
5922 (JCC_Z << 1),
5923 (JCC_BE << 1),
5924 (JCC_P << 1),
5925 };
5926 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5927 l1 = gen_new_label();
5928 gen_jcc1(s, s->cc_op, op1, l1);
5929 gen_helper_fmov_ST0_STN(tcg_const_i32(opreg));
5930 gen_set_label(l1);
5931 }
5932 break;
5933 default:
5934 goto illegal_op;
5935 }
5936 }
5937 break;
5938 /************************/
5939 /* string ops */
5940
5941 case 0xa4: /* movsS */
5942 case 0xa5:
5943 if ((b & 1) == 0)
5944 ot = OT_BYTE;
5945 else
5946 ot = dflag + OT_WORD;
5947
5948 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5949 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5950 } else {
5951 gen_movs(s, ot);
5952 }
5953 break;
5954
5955 case 0xaa: /* stosS */
5956 case 0xab:
5957 if ((b & 1) == 0)
5958 ot = OT_BYTE;
5959 else
5960 ot = dflag + OT_WORD;
5961
5962 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5963 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5964 } else {
5965 gen_stos(s, ot);
5966 }
5967 break;
5968 case 0xac: /* lodsS */
5969 case 0xad:
5970 if ((b & 1) == 0)
5971 ot = OT_BYTE;
5972 else
5973 ot = dflag + OT_WORD;
5974 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5975 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5976 } else {
5977 gen_lods(s, ot);
5978 }
5979 break;
5980 case 0xae: /* scasS */
5981 case 0xaf:
5982 if ((b & 1) == 0)
5983 ot = OT_BYTE;
5984 else
5985 ot = dflag + OT_WORD;
5986 if (prefixes & PREFIX_REPNZ) {
5987 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5988 } else if (prefixes & PREFIX_REPZ) {
5989 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5990 } else {
5991 gen_scas(s, ot);
5992 s->cc_op = CC_OP_SUBB + ot;
5993 }
5994 break;
5995
5996 case 0xa6: /* cmpsS */
5997 case 0xa7:
5998 if ((b & 1) == 0)
5999 ot = OT_BYTE;
6000 else
6001 ot = dflag + OT_WORD;
6002 if (prefixes & PREFIX_REPNZ) {
6003 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6004 } else if (prefixes & PREFIX_REPZ) {
6005 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6006 } else {
6007 gen_cmps(s, ot);
6008 s->cc_op = CC_OP_SUBB + ot;
6009 }
6010 break;
6011 case 0x6c: /* insS */
6012 case 0x6d:
6013 if ((b & 1) == 0)
6014 ot = OT_BYTE;
6015 else
6016 ot = dflag ? OT_LONG : OT_WORD;
6017 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6018 gen_op_andl_T0_ffff();
6019 gen_check_io(s, ot, pc_start - s->cs_base,
6020 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6021 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6022 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6023 } else {
6024 gen_ins(s, ot);
6025 if (use_icount) {
6026 gen_jmp(s, s->pc - s->cs_base);
6027 }
6028 }
6029 break;
6030 case 0x6e: /* outsS */
6031 case 0x6f:
6032 if ((b & 1) == 0)
6033 ot = OT_BYTE;
6034 else
6035 ot = dflag ? OT_LONG : OT_WORD;
6036 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6037 gen_op_andl_T0_ffff();
6038 gen_check_io(s, ot, pc_start - s->cs_base,
6039 svm_is_rep(prefixes) | 4);
6040 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6041 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6042 } else {
6043 gen_outs(s, ot);
6044 if (use_icount) {
6045 gen_jmp(s, s->pc - s->cs_base);
6046 }
6047 }
6048 break;
6049
6050 /************************/
6051 /* port I/O */
6052
6053 case 0xe4:
6054 case 0xe5:
6055 if ((b & 1) == 0)
6056 ot = OT_BYTE;
6057 else
6058 ot = dflag ? OT_LONG : OT_WORD;
6059 val = ldub_code(s->pc++);
6060 gen_op_movl_T0_im(val);
6061 gen_check_io(s, ot, pc_start - s->cs_base,
6062 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6063 if (use_icount)
6064 gen_io_start();
6065 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6066 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
6067 gen_op_mov_reg_T1(ot, R_EAX);
6068 if (use_icount) {
6069 gen_io_end();
6070 gen_jmp(s, s->pc - s->cs_base);
6071 }
6072 break;
6073 case 0xe6:
6074 case 0xe7:
6075 if ((b & 1) == 0)
6076 ot = OT_BYTE;
6077 else
6078 ot = dflag ? OT_LONG : OT_WORD;
6079 val = ldub_code(s->pc++);
6080 gen_op_movl_T0_im(val);
6081 gen_check_io(s, ot, pc_start - s->cs_base,
6082 svm_is_rep(prefixes));
6083 gen_op_mov_TN_reg(ot, 1, R_EAX);
6084
6085 if (use_icount)
6086 gen_io_start();
6087 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6088 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6089 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6090 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
6091 if (use_icount) {
6092 gen_io_end();
6093 gen_jmp(s, s->pc - s->cs_base);
6094 }
6095 break;
6096 case 0xec:
6097 case 0xed:
6098 if ((b & 1) == 0)
6099 ot = OT_BYTE;
6100 else
6101 ot = dflag ? OT_LONG : OT_WORD;
6102 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6103 gen_op_andl_T0_ffff();
6104 gen_check_io(s, ot, pc_start - s->cs_base,
6105 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6106 if (use_icount)
6107 gen_io_start();
6108 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6109 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
6110 gen_op_mov_reg_T1(ot, R_EAX);
6111 if (use_icount) {
6112 gen_io_end();
6113 gen_jmp(s, s->pc - s->cs_base);
6114 }
6115 break;
6116 case 0xee:
6117 case 0xef:
6118 if ((b & 1) == 0)
6119 ot = OT_BYTE;
6120 else
6121 ot = dflag ? OT_LONG : OT_WORD;
6122 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6123 gen_op_andl_T0_ffff();
6124 gen_check_io(s, ot, pc_start - s->cs_base,
6125 svm_is_rep(prefixes));
6126 gen_op_mov_TN_reg(ot, 1, R_EAX);
6127
6128 if (use_icount)
6129 gen_io_start();
6130 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6131 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6132 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6133 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
6134 if (use_icount) {
6135 gen_io_end();
6136 gen_jmp(s, s->pc - s->cs_base);
6137 }
6138 break;
6139
6140 /************************/
6141 /* control */
6142 case 0xc2: /* ret im */
6143 val = ldsw_code(s->pc);
6144 s->pc += 2;
6145 gen_pop_T0(s);
6146 if (CODE64(s) && s->dflag)
6147 s->dflag = 2;
6148 gen_stack_update(s, val + (2 << s->dflag));
6149 if (s->dflag == 0)
6150 gen_op_andl_T0_ffff();
6151 gen_op_jmp_T0();
6152 gen_eob(s);
6153 break;
6154 case 0xc3: /* ret */
6155 gen_pop_T0(s);
6156 gen_pop_update(s);
6157 if (s->dflag == 0)
6158 gen_op_andl_T0_ffff();
6159 gen_op_jmp_T0();
6160 gen_eob(s);
6161 break;
6162 case 0xca: /* lret im */
6163 val = ldsw_code(s->pc);
6164 s->pc += 2;
6165 do_lret:
6166 if (s->pe && !s->vm86) {
6167 if (s->cc_op != CC_OP_DYNAMIC)
6168 gen_op_set_cc_op(s->cc_op);
6169 gen_jmp_im(pc_start - s->cs_base);
6170 gen_helper_lret_protected(tcg_const_i32(s->dflag),
6171 tcg_const_i32(val));
6172 } else {
6173 gen_stack_A0(s);
6174 /* pop offset */
6175 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6176 if (s->dflag == 0)
6177 gen_op_andl_T0_ffff();
6178 /* NOTE: keeping EIP updated is not a problem in case of
6179 exception */
6180 gen_op_jmp_T0();
6181 /* pop selector */
6182 gen_op_addl_A0_im(2 << s->dflag);
6183 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6184 gen_op_movl_seg_T0_vm(R_CS);
6185 /* add stack offset */
6186 gen_stack_update(s, val + (4 << s->dflag));
6187 }
6188 gen_eob(s);
6189 break;
6190 case 0xcb: /* lret */
6191 val = 0;
6192 goto do_lret;
6193 case 0xcf: /* iret */
6194 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6195 if (!s->pe) {
6196 /* real mode */
6197 gen_helper_iret_real(tcg_const_i32(s->dflag));
6198 s->cc_op = CC_OP_EFLAGS;
6199 } else if (s->vm86) {
6200 if (s->iopl != 3) {
6201 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6202 } else {
6203 gen_helper_iret_real(tcg_const_i32(s->dflag));
6204 s->cc_op = CC_OP_EFLAGS;
6205 }
6206 } else {
6207 if (s->cc_op != CC_OP_DYNAMIC)
6208 gen_op_set_cc_op(s->cc_op);
6209 gen_jmp_im(pc_start - s->cs_base);
6210 gen_helper_iret_protected(tcg_const_i32(s->dflag),
6211 tcg_const_i32(s->pc - s->cs_base));
6212 s->cc_op = CC_OP_EFLAGS;
6213 }
6214 gen_eob(s);
6215 break;
6216 case 0xe8: /* call im */
6217 {
6218 if (dflag)
6219 tval = (int32_t)insn_get(s, OT_LONG);
6220 else
6221 tval = (int16_t)insn_get(s, OT_WORD);
6222 next_eip = s->pc - s->cs_base;
6223 tval += next_eip;
6224 if (s->dflag == 0)
6225 tval &= 0xffff;
6226 gen_movtl_T0_im(next_eip);
6227 gen_push_T0(s);
6228 gen_jmp(s, tval);
6229 }
6230 break;
6231 case 0x9a: /* lcall im */
6232 {
6233 unsigned int selector, offset;
6234
6235 if (CODE64(s))
6236 goto illegal_op;
6237 ot = dflag ? OT_LONG : OT_WORD;
6238 offset = insn_get(s, ot);
6239 selector = insn_get(s, OT_WORD);
6240
6241 gen_op_movl_T0_im(selector);
6242 gen_op_movl_T1_imu(offset);
6243 }
6244 goto do_lcall;
6245 case 0xe9: /* jmp im */
6246 if (dflag)
6247 tval = (int32_t)insn_get(s, OT_LONG);
6248 else
6249 tval = (int16_t)insn_get(s, OT_WORD);
6250 tval += s->pc - s->cs_base;
6251 if (s->dflag == 0)
6252 tval &= 0xffff;
6253 else if(!CODE64(s))
6254 tval &= 0xffffffff;
6255 gen_jmp(s, tval);
6256 break;
6257 case 0xea: /* ljmp im */
6258 {
6259 unsigned int selector, offset;
6260
6261 if (CODE64(s))
6262 goto illegal_op;
6263 ot = dflag ? OT_LONG : OT_WORD;
6264 offset = insn_get(s, ot);
6265 selector = insn_get(s, OT_WORD);
6266
6267 gen_op_movl_T0_im(selector);
6268 gen_op_movl_T1_imu(offset);
6269 }
6270 goto do_ljmp;
6271 case 0xeb: /* jmp Jb */
6272 tval = (int8_t)insn_get(s, OT_BYTE);
6273 tval += s->pc - s->cs_base;
6274 if (s->dflag == 0)
6275 tval &= 0xffff;
6276 gen_jmp(s, tval);
6277 break;
6278 case 0x70 ... 0x7f: /* jcc Jb */
6279 tval = (int8_t)insn_get(s, OT_BYTE);
6280 goto do_jcc;
6281 case 0x180 ... 0x18f: /* jcc Jv */
6282 if (dflag) {
6283 tval = (int32_t)insn_get(s, OT_LONG);
6284 } else {
6285 tval = (int16_t)insn_get(s, OT_WORD);
6286 }
6287 do_jcc:
6288 next_eip = s->pc - s->cs_base;
6289 tval += next_eip;
6290 if (s->dflag == 0)
6291 tval &= 0xffff;
6292 gen_jcc(s, b, tval, next_eip);
6293 break;
6294
6295 case 0x190 ... 0x19f: /* setcc Gv */
6296 modrm = ldub_code(s->pc++);
6297 gen_setcc(s, b);
6298 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6299 break;
6300 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6301 {
6302 int l1;
6303 TCGv t0;
6304
6305 ot = dflag + OT_WORD;
6306 modrm = ldub_code(s->pc++);
6307 reg = ((modrm >> 3) & 7) | rex_r;
6308 mod = (modrm >> 6) & 3;
6309 t0 = tcg_temp_local_new();
6310 if (mod != 3) {
6311 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6312 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6313 } else {
6314 rm = (modrm & 7) | REX_B(s);
6315 gen_op_mov_v_reg(ot, t0, rm);
6316 }
6317 #ifdef TARGET_X86_64
6318 if (ot == OT_LONG) {
6319 /* XXX: specific Intel behaviour ? */
6320 l1 = gen_new_label();
6321 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6322 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6323 gen_set_label(l1);
6324 tcg_gen_movi_tl(cpu_tmp0, 0);
6325 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6326 } else
6327 #endif
6328 {
6329 l1 = gen_new_label();
6330 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6331 gen_op_mov_reg_v(ot, reg, t0);
6332 gen_set_label(l1);
6333 }
6334 tcg_temp_free(t0);
6335 }
6336 break;
6337
6338 /************************/
6339 /* flags */
6340 case 0x9c: /* pushf */
6341 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6342 if (s->vm86 && s->iopl != 3) {
6343 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6344 } else {
6345 if (s->cc_op != CC_OP_DYNAMIC)
6346 gen_op_set_cc_op(s->cc_op);
6347 gen_helper_read_eflags(cpu_T[0]);
6348 gen_push_T0(s);
6349 }
6350 break;
6351 case 0x9d: /* popf */
6352 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6353 if (s->vm86 && s->iopl != 3) {
6354 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6355 } else {
6356 gen_pop_T0(s);
6357 if (s->cpl == 0) {
6358 if (s->dflag) {
6359 gen_helper_write_eflags(cpu_T[0],
6360 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6361 } else {
6362 gen_helper_write_eflags(cpu_T[0],
6363 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6364 }
6365 } else {
6366 if (s->cpl <= s->iopl) {
6367 if (s->dflag) {
6368 gen_helper_write_eflags(cpu_T[0],
6369 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6370 } else {
6371 gen_helper_write_eflags(cpu_T[0],
6372 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6373 }
6374 } else {
6375 if (s->dflag) {
6376 gen_helper_write_eflags(cpu_T[0],
6377 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6378 } else {
6379 gen_helper_write_eflags(cpu_T[0],
6380 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6381 }
6382 }
6383 }
6384 gen_pop_update(s);
6385 s->cc_op = CC_OP_EFLAGS;
6386 /* abort translation because TF flag may change */
6387 gen_jmp_im(s->pc - s->cs_base);
6388 gen_eob(s);
6389 }
6390 break;
6391 case 0x9e: /* sahf */
6392 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6393 goto illegal_op;
6394 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6395 if (s->cc_op != CC_OP_DYNAMIC)
6396 gen_op_set_cc_op(s->cc_op);
6397 gen_compute_eflags(cpu_cc_src);
6398 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6399 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6400 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6401 s->cc_op = CC_OP_EFLAGS;
6402 break;
6403 case 0x9f: /* lahf */
6404 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6405 goto illegal_op;
6406 if (s->cc_op != CC_OP_DYNAMIC)
6407 gen_op_set_cc_op(s->cc_op);
6408 gen_compute_eflags(cpu_T[0]);
6409 /* Note: gen_compute_eflags() only gives the condition codes */
6410 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6411 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6412 break;
6413 case 0xf5: /* cmc */
6414 if (s->cc_op != CC_OP_DYNAMIC)
6415 gen_op_set_cc_op(s->cc_op);
6416 gen_compute_eflags(cpu_cc_src);
6417 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6418 s->cc_op = CC_OP_EFLAGS;
6419 break;
6420 case 0xf8: /* clc */
6421 if (s->cc_op != CC_OP_DYNAMIC)
6422 gen_op_set_cc_op(s->cc_op);
6423 gen_compute_eflags(cpu_cc_src);
6424 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6425 s->cc_op = CC_OP_EFLAGS;
6426 break;
6427 case 0xf9: /* stc */
6428 if (s->cc_op != CC_OP_DYNAMIC)
6429 gen_op_set_cc_op(s->cc_op);
6430 gen_compute_eflags(cpu_cc_src);
6431 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6432 s->cc_op = CC_OP_EFLAGS;
6433 break;
6434 case 0xfc: /* cld */
6435 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6436 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6437 break;
6438 case 0xfd: /* std */
6439 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
6440 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6441 break;
6442
6443 /************************/
6444 /* bit operations */
6445 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6446 ot = dflag + OT_WORD;
6447 modrm = ldub_code(s->pc++);
6448 op = (modrm >> 3) & 7;
6449 mod = (modrm >> 6) & 3;
6450 rm = (modrm & 7) | REX_B(s);
6451 if (mod != 3) {
6452 s->rip_offset = 1;
6453 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6454 gen_op_ld_T0_A0(ot + s->mem_index);
6455 } else {
6456 gen_op_mov_TN_reg(ot, 0, rm);
6457 }
6458 /* load shift */
6459 val = ldub_code(s->pc++);
6460 gen_op_movl_T1_im(val);
6461 if (op < 4)
6462 goto illegal_op;
6463 op -= 4;
6464 goto bt_op;
6465 case 0x1a3: /* bt Gv, Ev */
6466 op = 0;
6467 goto do_btx;
6468 case 0x1ab: /* bts */
6469 op = 1;
6470 goto do_btx;
6471 case 0x1b3: /* btr */
6472 op = 2;
6473 goto do_btx;
6474 case 0x1bb: /* btc */
6475 op = 3;
6476 do_btx:
6477 ot = dflag + OT_WORD;
6478 modrm = ldub_code(s->pc++);
6479 reg = ((modrm >> 3) & 7) | rex_r;
6480 mod = (modrm >> 6) & 3;
6481 rm = (modrm & 7) | REX_B(s);
6482 gen_op_mov_TN_reg(OT_LONG, 1, reg);
6483 if (mod != 3) {
6484 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6485 /* specific case: we need to add a displacement */
6486 gen_exts(ot, cpu_T[1]);
6487 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6488 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6489 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6490 gen_op_ld_T0_A0(ot + s->mem_index);
6491 } else {
6492 gen_op_mov_TN_reg(ot, 0, rm);
6493 }
6494 bt_op:
6495 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6496 switch(op) {
6497 case 0:
6498 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6499 tcg_gen_movi_tl(cpu_cc_dst, 0);
6500 break;
6501 case 1:
6502 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6503 tcg_gen_movi_tl(cpu_tmp0, 1);
6504 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6505 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6506 break;
6507 case 2:
6508 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6509 tcg_gen_movi_tl(cpu_tmp0, 1);
6510 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6511 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6512 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6513 break;
6514 default:
6515 case 3:
6516 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6517 tcg_gen_movi_tl(cpu_tmp0, 1);
6518 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6519 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6520 break;
6521 }
6522 s->cc_op = CC_OP_SARB + ot;
6523 if (op != 0) {
6524 if (mod != 3)
6525 gen_op_st_T0_A0(ot + s->mem_index);
6526 else
6527 gen_op_mov_reg_T0(ot, rm);
6528 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6529 tcg_gen_movi_tl(cpu_cc_dst, 0);
6530 }
6531 break;
6532 case 0x1bc: /* bsf */
6533 case 0x1bd: /* bsr */
6534 {
6535 int label1;
6536 TCGv t0;
6537
6538 ot = dflag + OT_WORD;
6539 modrm = ldub_code(s->pc++);
6540 reg = ((modrm >> 3) & 7) | rex_r;
6541 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6542 gen_extu(ot, cpu_T[0]);
6543 label1 = gen_new_label();
6544 tcg_gen_movi_tl(cpu_cc_dst, 0);
6545 t0 = tcg_temp_local_new();
6546 tcg_gen_mov_tl(t0, cpu_T[0]);
6547 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6548 if (b & 1) {
6549 gen_helper_bsr(cpu_T[0], t0);
6550 } else {
6551 gen_helper_bsf(cpu_T[0], t0);
6552 }
6553 gen_op_mov_reg_T0(ot, reg);
6554 tcg_gen_movi_tl(cpu_cc_dst, 1);
6555 gen_set_label(label1);
6556 tcg_gen_discard_tl(cpu_cc_src);
6557 s->cc_op = CC_OP_LOGICB + ot;
6558 tcg_temp_free(t0);
6559 }
6560 break;
6561 /************************/
6562 /* bcd */
6563 case 0x27: /* daa */
6564 if (CODE64(s))
6565 goto illegal_op;
6566 if (s->cc_op != CC_OP_DYNAMIC)
6567 gen_op_set_cc_op(s->cc_op);
6568 gen_helper_daa();
6569 s->cc_op = CC_OP_EFLAGS;
6570 break;
6571 case 0x2f: /* das */
6572 if (CODE64(s))
6573 goto illegal_op;
6574 if (s->cc_op != CC_OP_DYNAMIC)
6575 gen_op_set_cc_op(s->cc_op);
6576 gen_helper_das();
6577 s->cc_op = CC_OP_EFLAGS;
6578 break;
6579 case 0x37: /* aaa */
6580 if (CODE64(s))
6581 goto illegal_op;
6582 if (s->cc_op != CC_OP_DYNAMIC)
6583 gen_op_set_cc_op(s->cc_op);
6584 gen_helper_aaa();
6585 s->cc_op = CC_OP_EFLAGS;
6586 break;
6587 case 0x3f: /* aas */
6588 if (CODE64(s))
6589 goto illegal_op;
6590 if (s->cc_op != CC_OP_DYNAMIC)
6591 gen_op_set_cc_op(s->cc_op);
6592 gen_helper_aas();
6593 s->cc_op = CC_OP_EFLAGS;
6594 break;
6595 case 0xd4: /* aam */
6596 if (CODE64(s))
6597 goto illegal_op;
6598 val = ldub_code(s->pc++);
6599 if (val == 0) {
6600 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6601 } else {
6602 gen_helper_aam(tcg_const_i32(val));
6603 s->cc_op = CC_OP_LOGICB;
6604 }
6605 break;
6606 case 0xd5: /* aad */
6607 if (CODE64(s))
6608 goto illegal_op;
6609 val = ldub_code(s->pc++);
6610 gen_helper_aad(tcg_const_i32(val));
6611 s->cc_op = CC_OP_LOGICB;
6612 break;
6613 /************************/
6614 /* misc */
6615 case 0x90: /* nop */
6616 /* XXX: xchg + rex handling */
6617 /* XXX: correct lock test for all insn */
6618 if (prefixes & PREFIX_LOCK)
6619 goto illegal_op;
6620 if (prefixes & PREFIX_REPZ) {
6621 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6622 }
6623 break;
6624 case 0x9b: /* fwait */
6625 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6626 (HF_MP_MASK | HF_TS_MASK)) {
6627 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6628 } else {
6629 if (s->cc_op != CC_OP_DYNAMIC)
6630 gen_op_set_cc_op(s->cc_op);
6631 gen_jmp_im(pc_start - s->cs_base);
6632 gen_helper_fwait();
6633 }
6634 break;
6635 case 0xcc: /* int3 */
6636 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6637 break;
6638 case 0xcd: /* int N */
6639 val = ldub_code(s->pc++);
6640 if (s->vm86 && s->iopl != 3) {
6641 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6642 } else {
6643 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6644 }
6645 break;
6646 case 0xce: /* into */
6647 if (CODE64(s))
6648 goto illegal_op;
6649 if (s->cc_op != CC_OP_DYNAMIC)
6650 gen_op_set_cc_op(s->cc_op);
6651 gen_jmp_im(pc_start - s->cs_base);
6652 gen_helper_into(tcg_const_i32(s->pc - pc_start));
6653 break;
6654 #ifdef WANT_ICEBP
6655 case 0xf1: /* icebp (undocumented, exits to external debugger) */
6656 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
6657 #if 1
6658 gen_debug(s, pc_start - s->cs_base);
6659 #else
6660 /* start debug */
6661 tb_flush(cpu_single_env);
6662 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6663 #endif
6664 break;
6665 #endif
6666 case 0xfa: /* cli */
6667 if (!s->vm86) {
6668 if (s->cpl <= s->iopl) {
6669 gen_helper_cli();
6670 } else {
6671 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6672 }
6673 } else {
6674 if (s->iopl == 3) {
6675 gen_helper_cli();
6676 } else {
6677 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6678 }
6679 }
6680 break;
6681 case 0xfb: /* sti */
6682 if (!s->vm86) {
6683 if (s->cpl <= s->iopl) {
6684 gen_sti:
6685 gen_helper_sti();
6686 /* interruptions are enabled only the first insn after sti */
6687 /* If several instructions disable interrupts, only the
6688 _first_ does it */
6689 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6690 gen_helper_set_inhibit_irq();
6691 /* give a chance to handle pending irqs */
6692 gen_jmp_im(s->pc - s->cs_base);
6693 gen_eob(s);
6694 } else {
6695 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6696 }
6697 } else {
6698 if (s->iopl == 3) {
6699 goto gen_sti;
6700 } else {
6701 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6702 }
6703 }
6704 break;
6705 case 0x62: /* bound */
6706 if (CODE64(s))
6707 goto illegal_op;
6708 ot = dflag ? OT_LONG : OT_WORD;
6709 modrm = ldub_code(s->pc++);
6710 reg = (modrm >> 3) & 7;
6711 mod = (modrm >> 6) & 3;
6712 if (mod == 3)
6713 goto illegal_op;
6714 gen_op_mov_TN_reg(ot, 0, reg);
6715 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6716 gen_jmp_im(pc_start - s->cs_base);
6717 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6718 if (ot == OT_WORD)
6719 gen_helper_boundw(cpu_A0, cpu_tmp2_i32);
6720 else
6721 gen_helper_boundl(cpu_A0, cpu_tmp2_i32);
6722 break;
6723 case 0x1c8 ... 0x1cf: /* bswap reg */
6724 reg = (b & 7) | REX_B(s);
6725 #ifdef TARGET_X86_64
6726 if (dflag == 2) {
6727 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6728 tcg_gen_bswap64_i64(cpu_T[0], cpu_T[0]);
6729 gen_op_mov_reg_T0(OT_QUAD, reg);
6730 } else
6731 #endif
6732 {
6733 gen_op_mov_TN_reg(OT_LONG, 0, reg);
6734 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
6735 tcg_gen_bswap32_tl(cpu_T[0], cpu_T[0]);
6736 gen_op_mov_reg_T0(OT_LONG, reg);
6737 }
6738 break;
6739 case 0xd6: /* salc */
6740 if (CODE64(s))
6741 goto illegal_op;
6742 if (s->cc_op != CC_OP_DYNAMIC)
6743 gen_op_set_cc_op(s->cc_op);
6744 gen_compute_eflags_c(cpu_T[0]);
6745 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6746 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6747 break;
6748 case 0xe0: /* loopnz */
6749 case 0xe1: /* loopz */
6750 case 0xe2: /* loop */
6751 case 0xe3: /* jecxz */
6752 {
6753 int l1, l2, l3;
6754
6755 tval = (int8_t)insn_get(s, OT_BYTE);
6756 next_eip = s->pc - s->cs_base;
6757 tval += next_eip;
6758 if (s->dflag == 0)
6759 tval &= 0xffff;
6760
6761 l1 = gen_new_label();
6762 l2 = gen_new_label();
6763 l3 = gen_new_label();
6764 b &= 3;
6765 switch(b) {
6766 case 0: /* loopnz */
6767 case 1: /* loopz */
6768 if (s->cc_op != CC_OP_DYNAMIC)
6769 gen_op_set_cc_op(s->cc_op);
6770 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6771 gen_op_jz_ecx(s->aflag, l3);
6772 gen_compute_eflags(cpu_tmp0);
6773 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6774 if (b == 0) {
6775 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6776 } else {
6777 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6778 }
6779 break;
6780 case 2: /* loop */
6781 gen_op_add_reg_im(s->aflag, R_ECX, -1);
6782 gen_op_jnz_ecx(s->aflag, l1);
6783 break;
6784 default:
6785 case 3: /* jcxz */
6786 gen_op_jz_ecx(s->aflag, l1);
6787 break;
6788 }
6789
6790 gen_set_label(l3);
6791 gen_jmp_im(next_eip);
6792 tcg_gen_br(l2);
6793
6794 gen_set_label(l1);
6795 gen_jmp_im(tval);
6796 gen_set_label(l2);
6797 gen_eob(s);
6798 }
6799 break;
6800 case 0x130: /* wrmsr */
6801 case 0x132: /* rdmsr */
6802 if (s->cpl != 0) {
6803 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6804 } else {
6805 if (s->cc_op != CC_OP_DYNAMIC)
6806 gen_op_set_cc_op(s->cc_op);
6807 gen_jmp_im(pc_start - s->cs_base);
6808 if (b & 2) {
6809 gen_helper_rdmsr();
6810 } else {
6811 gen_helper_wrmsr();
6812 }
6813 }
6814 break;
6815 case 0x131: /* rdtsc */
6816 if (s->cc_op != CC_OP_DYNAMIC)
6817 gen_op_set_cc_op(s->cc_op);
6818 gen_jmp_im(pc_start - s->cs_base);
6819 if (use_icount)
6820 gen_io_start();
6821 gen_helper_rdtsc();
6822 if (use_icount) {
6823 gen_io_end();
6824 gen_jmp(s, s->pc - s->cs_base);
6825 }
6826 break;
6827 case 0x133: /* rdpmc */
6828 if (s->cc_op != CC_OP_DYNAMIC)
6829 gen_op_set_cc_op(s->cc_op);
6830 gen_jmp_im(pc_start - s->cs_base);
6831 gen_helper_rdpmc();
6832 break;
6833 case 0x134: /* sysenter */
6834 /* For Intel SYSENTER is valid on 64-bit */
6835 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6836 goto illegal_op;
6837 if (!s->pe) {
6838 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6839 } else {
6840 if (s->cc_op != CC_OP_DYNAMIC) {
6841 gen_op_set_cc_op(s->cc_op);
6842 s->cc_op = CC_OP_DYNAMIC;
6843 }
6844 gen_jmp_im(pc_start - s->cs_base);
6845 gen_helper_sysenter();
6846 gen_eob(s);
6847 }
6848 break;
6849 case 0x135: /* sysexit */
6850 /* For Intel SYSEXIT is valid on 64-bit */
6851 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
6852 goto illegal_op;
6853 if (!s->pe) {
6854 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6855 } else {
6856 if (s->cc_op != CC_OP_DYNAMIC) {
6857 gen_op_set_cc_op(s->cc_op);
6858 s->cc_op = CC_OP_DYNAMIC;
6859 }
6860 gen_jmp_im(pc_start - s->cs_base);
6861 gen_helper_sysexit(tcg_const_i32(dflag));
6862 gen_eob(s);
6863 }
6864 break;
6865 #ifdef TARGET_X86_64
6866 case 0x105: /* syscall */
6867 /* XXX: is it usable in real mode ? */
6868 if (s->cc_op != CC_OP_DYNAMIC) {
6869 gen_op_set_cc_op(s->cc_op);
6870 s->cc_op = CC_OP_DYNAMIC;
6871 }
6872 gen_jmp_im(pc_start - s->cs_base);
6873 gen_helper_syscall(tcg_const_i32(s->pc - pc_start));
6874 gen_eob(s);
6875 break;
6876 case 0x107: /* sysret */
6877 if (!s->pe) {
6878 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6879 } else {
6880 if (s->cc_op != CC_OP_DYNAMIC) {
6881 gen_op_set_cc_op(s->cc_op);
6882 s->cc_op = CC_OP_DYNAMIC;
6883 }
6884 gen_jmp_im(pc_start - s->cs_base);
6885 gen_helper_sysret(tcg_const_i32(s->dflag));
6886 /* condition codes are modified only in long mode */
6887 if (s->lma)
6888 s->cc_op = CC_OP_EFLAGS;
6889 gen_eob(s);
6890 }
6891 break;
6892 #endif
6893 case 0x1a2: /* cpuid */
6894 if (s->cc_op != CC_OP_DYNAMIC)
6895 gen_op_set_cc_op(s->cc_op);
6896 gen_jmp_im(pc_start - s->cs_base);
6897 gen_helper_cpuid();
6898 break;
6899 case 0xf4: /* hlt */
6900 if (s->cpl != 0) {
6901 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6902 } else {
6903 if (s->cc_op != CC_OP_DYNAMIC)
6904 gen_op_set_cc_op(s->cc_op);
6905 gen_jmp_im(pc_start - s->cs_base);
6906 gen_helper_hlt(tcg_const_i32(s->pc - pc_start));
6907 s->is_jmp = 3;
6908 }
6909 break;
6910 case 0x100:
6911 modrm = ldub_code(s->pc++);
6912 mod = (modrm >> 6) & 3;
6913 op = (modrm >> 3) & 7;
6914 switch(op) {
6915 case 0: /* sldt */
6916 if (!s->pe || s->vm86)
6917 goto illegal_op;
6918 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
6919 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6920 ot = OT_WORD;
6921 if (mod == 3)
6922 ot += s->dflag;
6923 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6924 break;
6925 case 2: /* lldt */
6926 if (!s->pe || s->vm86)
6927 goto illegal_op;
6928 if (s->cpl != 0) {
6929 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6930 } else {
6931 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
6932 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6933 gen_jmp_im(pc_start - s->cs_base);
6934 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6935 gen_helper_lldt(cpu_tmp2_i32);
6936 }
6937 break;
6938 case 1: /* str */
6939 if (!s->pe || s->vm86)
6940 goto illegal_op;
6941 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
6942 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6943 ot = OT_WORD;
6944 if (mod == 3)
6945 ot += s->dflag;
6946 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6947 break;
6948 case 3: /* ltr */
6949 if (!s->pe || s->vm86)
6950 goto illegal_op;
6951 if (s->cpl != 0) {
6952 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6953 } else {
6954 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
6955 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6956 gen_jmp_im(pc_start - s->cs_base);
6957 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6958 gen_helper_ltr(cpu_tmp2_i32);
6959 }
6960 break;
6961 case 4: /* verr */
6962 case 5: /* verw */
6963 if (!s->pe || s->vm86)
6964 goto illegal_op;
6965 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6966 if (s->cc_op != CC_OP_DYNAMIC)
6967 gen_op_set_cc_op(s->cc_op);
6968 if (op == 4)
6969 gen_helper_verr(cpu_T[0]);
6970 else
6971 gen_helper_verw(cpu_T[0]);
6972 s->cc_op = CC_OP_EFLAGS;
6973 break;
6974 default:
6975 goto illegal_op;
6976 }
6977 break;
6978 case 0x101:
6979 modrm = ldub_code(s->pc++);
6980 mod = (modrm >> 6) & 3;
6981 op = (modrm >> 3) & 7;
6982 rm = modrm & 7;
6983 switch(op) {
6984 case 0: /* sgdt */
6985 if (mod == 3)
6986 goto illegal_op;
6987 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
6988 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
6989 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6990 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6991 gen_add_A0_im(s, 2);
6992 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6993 if (!s->dflag)
6994 gen_op_andl_T0_im(0xffffff);
6995 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6996 break;
6997 case 1:
6998 if (mod == 3) {
6999 switch (rm) {
7000 case 0: /* monitor */
7001 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7002 s->cpl != 0)
7003 goto illegal_op;
7004 if (s->cc_op != CC_OP_DYNAMIC)
7005 gen_op_set_cc_op(s->cc_op);
7006 gen_jmp_im(pc_start - s->cs_base);
7007 #ifdef TARGET_X86_64
7008 if (s->aflag == 2) {
7009 gen_op_movq_A0_reg(R_EAX);
7010 } else
7011 #endif
7012 {
7013 gen_op_movl_A0_reg(R_EAX);
7014 if (s->aflag == 0)
7015 gen_op_andl_A0_ffff();
7016 }
7017 gen_add_A0_ds_seg(s);
7018 gen_helper_monitor(cpu_A0);
7019 break;
7020 case 1: /* mwait */
7021 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7022 s->cpl != 0)
7023 goto illegal_op;
7024 if (s->cc_op != CC_OP_DYNAMIC) {
7025 gen_op_set_cc_op(s->cc_op);
7026 s->cc_op = CC_OP_DYNAMIC;
7027 }
7028 gen_jmp_im(pc_start - s->cs_base);
7029 gen_helper_mwait(tcg_const_i32(s->pc - pc_start));
7030 gen_eob(s);
7031 break;
7032 default:
7033 goto illegal_op;
7034 }
7035 } else { /* sidt */
7036 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7037 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7038 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7039 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7040 gen_add_A0_im(s, 2);
7041 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7042 if (!s->dflag)
7043 gen_op_andl_T0_im(0xffffff);
7044 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7045 }
7046 break;
7047 case 2: /* lgdt */
7048 case 3: /* lidt */
7049 if (mod == 3) {
7050 if (s->cc_op != CC_OP_DYNAMIC)
7051 gen_op_set_cc_op(s->cc_op);
7052 gen_jmp_im(pc_start - s->cs_base);
7053 switch(rm) {
7054 case 0: /* VMRUN */
7055 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7056 goto illegal_op;
7057 if (s->cpl != 0) {
7058 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7059 break;
7060 } else {
7061 gen_helper_vmrun(tcg_const_i32(s->aflag),
7062 tcg_const_i32(s->pc - pc_start));
7063 tcg_gen_exit_tb(0);
7064 s->is_jmp = 3;
7065 }
7066 break;
7067 case 1: /* VMMCALL */
7068 if (!(s->flags & HF_SVME_MASK))
7069 goto illegal_op;
7070 gen_helper_vmmcall();
7071 break;
7072 case 2: /* VMLOAD */
7073 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7074 goto illegal_op;
7075 if (s->cpl != 0) {
7076 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7077 break;
7078 } else {
7079 gen_helper_vmload(tcg_const_i32(s->aflag));
7080 }
7081 break;
7082 case 3: /* VMSAVE */
7083 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7084 goto illegal_op;
7085 if (s->cpl != 0) {
7086 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7087 break;
7088 } else {
7089 gen_helper_vmsave(tcg_const_i32(s->aflag));
7090 }
7091 break;
7092 case 4: /* STGI */
7093 if ((!(s->flags & HF_SVME_MASK) &&
7094 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7095 !s->pe)
7096 goto illegal_op;
7097 if (s->cpl != 0) {
7098 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7099 break;
7100 } else {
7101 gen_helper_stgi();
7102 }
7103 break;
7104 case 5: /* CLGI */
7105 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7106 goto illegal_op;
7107 if (s->cpl != 0) {
7108 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7109 break;
7110 } else {
7111 gen_helper_clgi();
7112 }
7113 break;
7114 case 6: /* SKINIT */
7115 if ((!(s->flags & HF_SVME_MASK) &&
7116 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7117 !s->pe)
7118 goto illegal_op;
7119 gen_helper_skinit();
7120 break;
7121 case 7: /* INVLPGA */
7122 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7123 goto illegal_op;
7124 if (s->cpl != 0) {
7125 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7126 break;
7127 } else {
7128 gen_helper_invlpga(tcg_const_i32(s->aflag));
7129 }
7130 break;
7131 default:
7132 goto illegal_op;
7133 }
7134 } else if (s->cpl != 0) {
7135 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7136 } else {
7137 gen_svm_check_intercept(s, pc_start,
7138 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7139 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7140 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7141 gen_add_A0_im(s, 2);
7142 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7143 if (!s->dflag)
7144 gen_op_andl_T0_im(0xffffff);
7145 if (op == 2) {
7146 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7147 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7148 } else {
7149 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7150 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7151 }
7152 }
7153 break;
7154 case 4: /* smsw */
7155 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7156 #if defined TARGET_X86_64 && defined WORDS_BIGENDIAN
7157 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]) + 4);
7158 #else
7159 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7160 #endif
7161 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7162 break;
7163 case 6: /* lmsw */
7164 if (s->cpl != 0) {
7165 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7166 } else {
7167 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7168 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7169 gen_helper_lmsw(cpu_T[0]);
7170 gen_jmp_im(s->pc - s->cs_base);
7171 gen_eob(s);
7172 }
7173 break;
7174 case 7: /* invlpg */
7175 if (s->cpl != 0) {
7176 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7177 } else {
7178 if (mod == 3) {
7179 #ifdef TARGET_X86_64
7180 if (CODE64(s) && rm == 0) {
7181 /* swapgs */
7182 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7183 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7184 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7185 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7186 } else
7187 #endif
7188 {
7189 goto illegal_op;
7190 }
7191 } else {
7192 if (s->cc_op != CC_OP_DYNAMIC)
7193 gen_op_set_cc_op(s->cc_op);
7194 gen_jmp_im(pc_start - s->cs_base);
7195 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7196 gen_helper_invlpg(cpu_A0);
7197 gen_jmp_im(s->pc - s->cs_base);
7198 gen_eob(s);
7199 }
7200 }
7201 break;
7202 default:
7203 goto illegal_op;
7204 }
7205 break;
7206 case 0x108: /* invd */
7207 case 0x109: /* wbinvd */
7208 if (s->cpl != 0) {
7209 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7210 } else {
7211 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7212 /* nothing to do */
7213 }
7214 break;
7215 case 0x63: /* arpl or movslS (x86_64) */
7216 #ifdef TARGET_X86_64
7217 if (CODE64(s)) {
7218 int d_ot;
7219 /* d_ot is the size of destination */
7220 d_ot = dflag + OT_WORD;
7221
7222 modrm = ldub_code(s->pc++);
7223 reg = ((modrm >> 3) & 7) | rex_r;
7224 mod = (modrm >> 6) & 3;
7225 rm = (modrm & 7) | REX_B(s);
7226
7227 if (mod == 3) {
7228 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7229 /* sign extend */
7230 if (d_ot == OT_QUAD)
7231 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7232 gen_op_mov_reg_T0(d_ot, reg);
7233 } else {
7234 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7235 if (d_ot == OT_QUAD) {
7236 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7237 } else {
7238 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7239 }
7240 gen_op_mov_reg_T0(d_ot, reg);
7241 }
7242 } else
7243 #endif
7244 {
7245 int label1;
7246 TCGv t0, t1, t2;
7247
7248 if (!s->pe || s->vm86)
7249 goto illegal_op;
7250 t0 = tcg_temp_local_new();
7251 t1 = tcg_temp_local_new();
7252 t2 = tcg_temp_local_new();
7253 ot = OT_WORD;
7254 modrm = ldub_code(s->pc++);
7255 reg = (modrm >> 3) & 7;
7256 mod = (modrm >> 6) & 3;
7257 rm = modrm & 7;
7258 if (mod != 3) {
7259 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7260 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7261 } else {
7262 gen_op_mov_v_reg(ot, t0, rm);
7263 }
7264 gen_op_mov_v_reg(ot, t1, reg);
7265 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7266 tcg_gen_andi_tl(t1, t1, 3);
7267 tcg_gen_movi_tl(t2, 0);
7268 label1 = gen_new_label();
7269 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7270 tcg_gen_andi_tl(t0, t0, ~3);
7271 tcg_gen_or_tl(t0, t0, t1);
7272 tcg_gen_movi_tl(t2, CC_Z);
7273 gen_set_label(label1);
7274 if (mod != 3) {
7275 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7276 } else {
7277 gen_op_mov_reg_v(ot, rm, t0);
7278 }
7279 if (s->cc_op != CC_OP_DYNAMIC)
7280 gen_op_set_cc_op(s->cc_op);
7281 gen_compute_eflags(cpu_cc_src);
7282 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7283 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7284 s->cc_op = CC_OP_EFLAGS;
7285 tcg_temp_free(t0);
7286 tcg_temp_free(t1);
7287 tcg_temp_free(t2);
7288 }
7289 break;
7290 case 0x102: /* lar */
7291 case 0x103: /* lsl */
7292 {
7293 int label1;
7294 TCGv t0;
7295 if (!s->pe || s->vm86)
7296 goto illegal_op;
7297 ot = dflag ? OT_LONG : OT_WORD;
7298 modrm = ldub_code(s->pc++);
7299 reg = ((modrm >> 3) & 7) | rex_r;
7300 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7301 t0 = tcg_temp_local_new();
7302 if (s->cc_op != CC_OP_DYNAMIC)
7303 gen_op_set_cc_op(s->cc_op);
7304 if (b == 0x102)
7305 gen_helper_lar(t0, cpu_T[0]);
7306 else
7307 gen_helper_lsl(t0, cpu_T[0]);
7308 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7309 label1 = gen_new_label();
7310 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7311 gen_op_mov_reg_v(ot, reg, t0);
7312 gen_set_label(label1);
7313 s->cc_op = CC_OP_EFLAGS;
7314 tcg_temp_free(t0);
7315 }
7316 break;
7317 case 0x118:
7318 modrm = ldub_code(s->pc++);
7319 mod = (modrm >> 6) & 3;
7320 op = (modrm >> 3) & 7;
7321 switch(op) {
7322 case 0: /* prefetchnta */
7323 case 1: /* prefetchnt0 */
7324 case 2: /* prefetchnt0 */
7325 case 3: /* prefetchnt0 */
7326 if (mod == 3)
7327 goto illegal_op;
7328 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7329 /* nothing more to do */
7330 break;
7331 default: /* nop (multi byte) */
7332 gen_nop_modrm(s, modrm);
7333 break;
7334 }
7335 break;
7336 case 0x119 ... 0x11f: /* nop (multi byte) */
7337 modrm = ldub_code(s->pc++);
7338 gen_nop_modrm(s, modrm);
7339 break;
7340 case 0x120: /* mov reg, crN */
7341 case 0x122: /* mov crN, reg */
7342 if (s->cpl != 0) {
7343 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7344 } else {
7345 modrm = ldub_code(s->pc++);
7346 if ((modrm & 0xc0) != 0xc0)
7347 goto illegal_op;
7348 rm = (modrm & 7) | REX_B(s);
7349 reg = ((modrm >> 3) & 7) | rex_r;
7350 if (CODE64(s))
7351 ot = OT_QUAD;
7352 else
7353 ot = OT_LONG;
7354 switch(reg) {
7355 case 0:
7356 case 2:
7357 case 3:
7358 case 4:
7359 case 8:
7360 if (s->cc_op != CC_OP_DYNAMIC)
7361 gen_op_set_cc_op(s->cc_op);
7362 gen_jmp_im(pc_start - s->cs_base);
7363 if (b & 2) {
7364 gen_op_mov_TN_reg(ot, 0, rm);
7365 gen_helper_write_crN(tcg_const_i32(reg), cpu_T[0]);
7366 gen_jmp_im(s->pc - s->cs_base);
7367 gen_eob(s);
7368 } else {
7369 gen_helper_read_crN(cpu_T[0], tcg_const_i32(reg));
7370 gen_op_mov_reg_T0(ot, rm);
7371 }
7372 break;
7373 default:
7374 goto illegal_op;
7375 }
7376 }
7377 break;
7378 case 0x121: /* mov reg, drN */
7379 case 0x123: /* mov drN, reg */
7380 if (s->cpl != 0) {
7381 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7382 } else {
7383 modrm = ldub_code(s->pc++);
7384 if ((modrm & 0xc0) != 0xc0)
7385 goto illegal_op;
7386 rm = (modrm & 7) | REX_B(s);
7387 reg = ((modrm >> 3) & 7) | rex_r;
7388 if (CODE64(s))
7389 ot = OT_QUAD;
7390 else
7391 ot = OT_LONG;
7392 /* XXX: do it dynamically with CR4.DE bit */
7393 if (reg == 4 || reg == 5 || reg >= 8)
7394 goto illegal_op;
7395 if (b & 2) {
7396 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
7397 gen_op_mov_TN_reg(ot, 0, rm);
7398 gen_helper_movl_drN_T0(tcg_const_i32(reg), cpu_T[0]);
7399 gen_jmp_im(s->pc - s->cs_base);
7400 gen_eob(s);
7401 } else {
7402 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
7403 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
7404 gen_op_mov_reg_T0(ot, rm);
7405 }
7406 }
7407 break;
7408 case 0x106: /* clts */
7409 if (s->cpl != 0) {
7410 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7411 } else {
7412 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7413 gen_helper_clts();
7414 /* abort block because static cpu state changed */
7415 gen_jmp_im(s->pc - s->cs_base);
7416 gen_eob(s);
7417 }
7418 break;
7419 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
7420 case 0x1c3: /* MOVNTI reg, mem */
7421 if (!(s->cpuid_features & CPUID_SSE2))
7422 goto illegal_op;
7423 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
7424 modrm = ldub_code(s->pc++);
7425 mod = (modrm >> 6) & 3;
7426 if (mod == 3)
7427 goto illegal_op;
7428 reg = ((modrm >> 3) & 7) | rex_r;
7429 /* generate a generic store */
7430 gen_ldst_modrm(s, modrm, ot, reg, 1);
7431 break;
7432 case 0x1ae:
7433 modrm = ldub_code(s->pc++);
7434 mod = (modrm >> 6) & 3;
7435 op = (modrm >> 3) & 7;
7436 switch(op) {
7437 case 0: /* fxsave */
7438 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7439 (s->flags & HF_EM_MASK))
7440 goto illegal_op;
7441 if (s->flags & HF_TS_MASK) {
7442 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7443 break;
7444 }
7445 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7446 if (s->cc_op != CC_OP_DYNAMIC)
7447 gen_op_set_cc_op(s->cc_op);
7448 gen_jmp_im(pc_start - s->cs_base);
7449 gen_helper_fxsave(cpu_A0, tcg_const_i32((s->dflag == 2)));
7450 break;
7451 case 1: /* fxrstor */
7452 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7453 (s->flags & HF_EM_MASK))
7454 goto illegal_op;
7455 if (s->flags & HF_TS_MASK) {
7456 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7457 break;
7458 }
7459 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7460 if (s->cc_op != CC_OP_DYNAMIC)
7461 gen_op_set_cc_op(s->cc_op);
7462 gen_jmp_im(pc_start - s->cs_base);
7463 gen_helper_fxrstor(cpu_A0, tcg_const_i32((s->dflag == 2)));
7464 break;
7465 case 2: /* ldmxcsr */
7466 case 3: /* stmxcsr */
7467 if (s->flags & HF_TS_MASK) {
7468 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7469 break;
7470 }
7471 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
7472 mod == 3)
7473 goto illegal_op;
7474 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7475 if (op == 2) {
7476 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7477 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7478 } else {
7479 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7480 gen_op_st_T0_A0(OT_LONG + s->mem_index);
7481 }
7482 break;
7483 case 5: /* lfence */
7484 case 6: /* mfence */
7485 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
7486 goto illegal_op;
7487 break;
7488 case 7: /* sfence / clflush */
7489 if ((modrm & 0xc7) == 0xc0) {
7490 /* sfence */
7491 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7492 if (!(s->cpuid_features & CPUID_SSE))
7493 goto illegal_op;
7494 } else {
7495 /* clflush */
7496 if (!(s->cpuid_features & CPUID_CLFLUSH))
7497 goto illegal_op;
7498 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7499 }
7500 break;
7501 default:
7502 goto illegal_op;
7503 }
7504 break;
7505 case 0x10d: /* 3DNow! prefetch(w) */
7506 modrm = ldub_code(s->pc++);
7507 mod = (modrm >> 6) & 3;
7508 if (mod == 3)
7509 goto illegal_op;
7510 gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
7511 /* ignore for now */
7512 break;
7513 case 0x1aa: /* rsm */
7514 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
7515 if (!(s->flags & HF_SMM_MASK))
7516 goto illegal_op;
7517 if (s->cc_op != CC_OP_DYNAMIC) {
7518 gen_op_set_cc_op(s->cc_op);
7519 s->cc_op = CC_OP_DYNAMIC;
7520 }
7521 gen_jmp_im(s->pc - s->cs_base);
7522 gen_helper_rsm();
7523 gen_eob(s);
7524 break;
7525 case 0x1b8: /* SSE4.2 popcnt */
7526 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
7527 PREFIX_REPZ)
7528 goto illegal_op;
7529 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
7530 goto illegal_op;
7531
7532 modrm = ldub_code(s->pc++);
7533 reg = ((modrm >> 3) & 7);
7534
7535 if (s->prefix & PREFIX_DATA)
7536 ot = OT_WORD;
7537 else if (s->dflag != 2)
7538 ot = OT_LONG;
7539 else
7540 ot = OT_QUAD;
7541
7542 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7543 gen_helper_popcnt(cpu_T[0], cpu_T[0], tcg_const_i32(ot));
7544 gen_op_mov_reg_T0(ot, reg);
7545
7546 s->cc_op = CC_OP_EFLAGS;
7547 break;
7548 case 0x10e ... 0x10f:
7549 /* 3DNow! instructions, ignore prefixes */
7550 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7551 case 0x110 ... 0x117:
7552 case 0x128 ... 0x12f:
7553 case 0x138 ... 0x13a:
7554 case 0x150 ... 0x177:
7555 case 0x17c ... 0x17f:
7556 case 0x1c2:
7557 case 0x1c4 ... 0x1c6:
7558 case 0x1d0 ... 0x1fe:
7559 gen_sse(s, b, pc_start, rex_r);
7560 break;
7561 default:
7562 goto illegal_op;
7563 }
7564 /* lock generation */
7565 if (s->prefix & PREFIX_LOCK)
7566 gen_helper_unlock();
7567 return s->pc;
7568 illegal_op:
7569 if (s->prefix & PREFIX_LOCK)
7570 gen_helper_unlock();
7571 /* XXX: ensure that no lock was generated */
7572 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7573 return s->pc;
7574 }
7575
optimize_flags_init(void)7576 void optimize_flags_init(void)
7577 {
7578 #if TCG_TARGET_REG_BITS == 32
7579 assert(sizeof(CCTable) == (1 << 3));
7580 #else
7581 assert(sizeof(CCTable) == (1 << 4));
7582 #endif
7583 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
7584 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0,
7585 offsetof(CPUState, cc_op), "cc_op");
7586 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
7587 "cc_src");
7588 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
7589 "cc_dst");
7590 cpu_cc_tmp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_tmp),
7591 "cc_tmp");
7592
7593 /* register helpers */
7594 #define GEN_HELPER 2
7595 #include "helper.h"
7596 }
7597
7598 /* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7599 basic block 'tb'. If search_pc is TRUE, also generate PC
7600 information for each intermediate instruction. */
gen_intermediate_code_internal(CPUState * env,TranslationBlock * tb,int search_pc)7601 static inline void gen_intermediate_code_internal(CPUState *env,
7602 TranslationBlock *tb,
7603 int search_pc)
7604 {
7605 DisasContext dc1, *dc = &dc1;
7606 target_ulong pc_ptr;
7607 uint16_t *gen_opc_end;
7608 CPUBreakpoint *bp;
7609 int j, lj, cflags;
7610 uint64_t flags;
7611 target_ulong pc_start;
7612 target_ulong cs_base;
7613 int num_insns;
7614 int max_insns;
7615
7616 /* generate intermediate code */
7617 pc_start = tb->pc;
7618 cs_base = tb->cs_base;
7619 flags = tb->flags;
7620 cflags = tb->cflags;
7621
7622 dc->pe = (flags >> HF_PE_SHIFT) & 1;
7623 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7624 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7625 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7626 dc->f_st = 0;
7627 dc->vm86 = (flags >> VM_SHIFT) & 1;
7628 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7629 dc->iopl = (flags >> IOPL_SHIFT) & 3;
7630 dc->tf = (flags >> TF_SHIFT) & 1;
7631 dc->singlestep_enabled = env->singlestep_enabled;
7632 dc->cc_op = CC_OP_DYNAMIC;
7633 dc->cs_base = cs_base;
7634 dc->tb = tb;
7635 dc->popl_esp_hack = 0;
7636 /* select memory access functions */
7637 dc->mem_index = 0;
7638 if (flags & HF_SOFTMMU_MASK) {
7639 if (dc->cpl == 3)
7640 dc->mem_index = 2 * 4;
7641 else
7642 dc->mem_index = 1 * 4;
7643 }
7644 dc->cpuid_features = env->cpuid_features;
7645 dc->cpuid_ext_features = env->cpuid_ext_features;
7646 dc->cpuid_ext2_features = env->cpuid_ext2_features;
7647 dc->cpuid_ext3_features = env->cpuid_ext3_features;
7648 #ifdef TARGET_X86_64
7649 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7650 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7651 #endif
7652 dc->flags = flags;
7653 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7654 (flags & HF_INHIBIT_IRQ_MASK)
7655 #ifndef CONFIG_SOFTMMU
7656 || (flags & HF_SOFTMMU_MASK)
7657 #endif
7658 );
7659 #if 0
7660 /* check addseg logic */
7661 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7662 printf("ERROR addseg\n");
7663 #endif
7664
7665 cpu_T[0] = tcg_temp_new();
7666 cpu_T[1] = tcg_temp_new();
7667 cpu_A0 = tcg_temp_new();
7668 cpu_T3 = tcg_temp_new();
7669
7670 cpu_tmp0 = tcg_temp_new();
7671 cpu_tmp1_i64 = tcg_temp_new_i64();
7672 cpu_tmp2_i32 = tcg_temp_new_i32();
7673 cpu_tmp3_i32 = tcg_temp_new_i32();
7674 cpu_tmp4 = tcg_temp_new();
7675 cpu_tmp5 = tcg_temp_new();
7676 cpu_tmp6 = tcg_temp_new();
7677 cpu_ptr0 = tcg_temp_new_ptr();
7678 cpu_ptr1 = tcg_temp_new_ptr();
7679
7680 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7681
7682 dc->is_jmp = DISAS_NEXT;
7683 pc_ptr = pc_start;
7684 lj = -1;
7685 num_insns = 0;
7686 max_insns = tb->cflags & CF_COUNT_MASK;
7687 if (max_insns == 0)
7688 max_insns = CF_COUNT_MASK;
7689
7690 gen_icount_start();
7691 for(;;) {
7692 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
7693 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
7694 if (bp->pc == pc_ptr &&
7695 !((bp->flags & BP_CPU) && (tb->flags & HF_RF_MASK))) {
7696 gen_debug(dc, pc_ptr - dc->cs_base);
7697 break;
7698 }
7699 }
7700 }
7701 if (search_pc) {
7702 j = gen_opc_ptr - gen_opc_buf;
7703 if (lj < j) {
7704 lj++;
7705 while (lj < j)
7706 gen_opc_instr_start[lj++] = 0;
7707 }
7708 gen_opc_pc[lj] = pc_ptr;
7709 gen_opc_cc_op[lj] = dc->cc_op;
7710 gen_opc_instr_start[lj] = 1;
7711 gen_opc_icount[lj] = num_insns;
7712 }
7713 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7714 gen_io_start();
7715
7716 pc_ptr = disas_insn(dc, pc_ptr);
7717 num_insns++;
7718 #ifdef CONFIG_HAX
7719 if (hax_enabled() && hax_stop_translate(env))
7720 {
7721 gen_jmp_im(pc_ptr - dc->cs_base);
7722 gen_eob(dc);
7723 break;
7724 }
7725 #endif
7726 /* stop translation if indicated */
7727 if (dc->is_jmp)
7728 break;
7729 /* if single step mode, we generate only one instruction and
7730 generate an exception */
7731 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7732 the flag and abort the translation to give the irqs a
7733 change to be happen */
7734 if (dc->tf || dc->singlestep_enabled ||
7735 (flags & HF_INHIBIT_IRQ_MASK)) {
7736 gen_jmp_im(pc_ptr - dc->cs_base);
7737 gen_eob(dc);
7738 break;
7739 }
7740 /* if too long translation, stop generation too */
7741 if (gen_opc_ptr >= gen_opc_end ||
7742 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
7743 num_insns >= max_insns) {
7744 gen_jmp_im(pc_ptr - dc->cs_base);
7745 gen_eob(dc);
7746 break;
7747 }
7748 if (singlestep) {
7749 gen_jmp_im(pc_ptr - dc->cs_base);
7750 gen_eob(dc);
7751 break;
7752 }
7753 }
7754 if (tb->cflags & CF_LAST_IO)
7755 gen_io_end();
7756 gen_icount_end(tb, num_insns);
7757 *gen_opc_ptr = INDEX_op_end;
7758 /* we don't forget to fill the last values */
7759 if (search_pc) {
7760 j = gen_opc_ptr - gen_opc_buf;
7761 lj++;
7762 while (lj <= j)
7763 gen_opc_instr_start[lj++] = 0;
7764 }
7765
7766 #ifdef DEBUG_DISAS
7767 log_cpu_state_mask(CPU_LOG_TB_CPU, env, X86_DUMP_CCOP);
7768 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
7769 int disas_flags;
7770 qemu_log("----------------\n");
7771 qemu_log("IN: %s\n", lookup_symbol(pc_start));
7772 #ifdef TARGET_X86_64
7773 if (dc->code64)
7774 disas_flags = 2;
7775 else
7776 #endif
7777 disas_flags = !dc->code32;
7778 log_target_disas(pc_start, pc_ptr - pc_start, disas_flags);
7779 qemu_log("\n");
7780 }
7781 #endif
7782
7783 if (!search_pc) {
7784 tb->size = pc_ptr - pc_start;
7785 tb->icount = num_insns;
7786 }
7787 }
7788
gen_intermediate_code(CPUState * env,TranslationBlock * tb)7789 void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7790 {
7791 gen_intermediate_code_internal(env, tb, 0);
7792 }
7793
gen_intermediate_code_pc(CPUState * env,TranslationBlock * tb)7794 void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7795 {
7796 gen_intermediate_code_internal(env, tb, 1);
7797 }
7798
restore_state_to_opc(CPUState * env,TranslationBlock * tb,int pc_pos)7799 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
7800 {
7801 int cc_op;
7802 #ifdef DEBUG_DISAS
7803 if (qemu_loglevel_mask(CPU_LOG_TB_OP)) {
7804 int i;
7805 qemu_log("RESTORE:\n");
7806 for(i = 0;i <= pc_pos; i++) {
7807 if (gen_opc_instr_start[i]) {
7808 qemu_log("0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7809 }
7810 }
7811 qemu_log("pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7812 pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7813 (uint32_t)tb->cs_base);
7814 }
7815 #endif
7816 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7817 cc_op = gen_opc_cc_op[pc_pos];
7818 if (cc_op != CC_OP_DYNAMIC)
7819 env->cc_op = cc_op;
7820 }
7821