Searched refs:subopc (Results 1 – 7 of 7) sorted by relevance
/external/valgrind/VEX/priv/ |
D | host_x86_defs.c | 2075 Int subopc; in do_fop2_st() local 2077 case Xfp_ADD: subopc = 0; break; in do_fop2_st() 2078 case Xfp_SUB: subopc = 4; break; in do_fop2_st() 2079 case Xfp_MUL: subopc = 1; break; in do_fop2_st() 2080 case Xfp_DIV: subopc = 6; break; in do_fop2_st() 2084 p = doAMode_R_enc_enc(p, subopc, i); in do_fop2_st() 2132 UInt irno, opc, opc_rr, subopc_imm, opc_imma, opc_cl, opc_imm, subopc; in emit_X86Instr() local 2300 opc_cl = opc_imm = subopc = 0; in emit_X86Instr() 2302 case Xsh_SHR: opc_cl = 0xD3; opc_imm = 0xC1; subopc = 5; break; in emit_X86Instr() 2303 case Xsh_SAR: opc_cl = 0xD3; opc_imm = 0xC1; subopc = 7; break; in emit_X86Instr() [all …]
|
D | host_arm_defs.c | 3085 UInt instr, subopc; in emit_ARMInstr() local 3091 case ARMalu_ADD: subopc = X0100; break; in emit_ARMInstr() 3092 case ARMalu_ADC: subopc = X0101; break; in emit_ARMInstr() 3094 case ARMalu_SUB: subopc = X0010; break; in emit_ARMInstr() 3095 case ARMalu_SBC: subopc = X0110; break; in emit_ARMInstr() 3096 case ARMalu_AND: subopc = X0000; break; in emit_ARMInstr() 3097 case ARMalu_BIC: subopc = X1110; break; in emit_ARMInstr() 3098 case ARMalu_OR: subopc = X1100; break; in emit_ARMInstr() 3099 case ARMalu_XOR: subopc = X0001; break; in emit_ARMInstr() 3103 instr |= XXXXX___(X1110, (1 & (subopc >> 3)), in emit_ARMInstr() [all …]
|
D | host_amd64_defs.c | 2455 UInt /*irno,*/ opc, opc_rr, subopc_imm, opc_imma, opc_cl, opc_imm, subopc; in emit_AMD64Instr() local 2683 opc_cl = opc_imm = subopc = 0; in emit_AMD64Instr() 2685 case Ash_SHR: opc_cl = 0xD3; opc_imm = 0xC1; subopc = 5; break; in emit_AMD64Instr() 2686 case Ash_SAR: opc_cl = 0xD3; opc_imm = 0xC1; subopc = 7; break; in emit_AMD64Instr() 2687 case Ash_SHL: opc_cl = 0xD3; opc_imm = 0xC1; subopc = 4; break; in emit_AMD64Instr() 2693 p = doAMode_R_enc_reg(p, subopc, i->Ain.Sh64.dst); in emit_AMD64Instr() 2698 p = doAMode_R_enc_reg(p, subopc, i->Ain.Sh64.dst); in emit_AMD64Instr() 2797 subopc = i->Ain.MulL.syned ? 5 : 4; in emit_AMD64Instr() 2802 p = doAMode_M_enc(p, subopc, i->Ain.MulL.src->Arm.Mem.am); in emit_AMD64Instr() 2807 p = doAMode_R_enc_reg(p, subopc, i->Ain.MulL.src->Arm.Reg.reg); in emit_AMD64Instr() [all …]
|
D | guest_x86_toIR.c | 6093 UChar byte2, subopc; in dis_MMX() local 6097 subopc = toUChar( (byte2 >> 3) & 7 ); in dis_MMX() 6103 if (subopc == 2 /*SRL*/ && opc == 0x71) in dis_MMX() 6105 else if (subopc == 2 /*SRL*/ && opc == 0x72) in dis_MMX() 6107 else if (subopc == 2 /*SRL*/ && opc == 0x73) in dis_MMX() 6110 else if (subopc == 4 /*SAR*/ && opc == 0x71) in dis_MMX() 6112 else if (subopc == 4 /*SAR*/ && opc == 0x72) in dis_MMX() 6115 else if (subopc == 6 /*SHL*/ && opc == 0x71) in dis_MMX() 6117 else if (subopc == 6 /*SHL*/ && opc == 0x72) in dis_MMX() 6119 else if (subopc == 6 /*SHL*/ && opc == 0x73) in dis_MMX()
|
D | guest_amd64_toIR.c | 7765 UChar byte2, subopc; in dis_MMX() local 7769 subopc = toUChar( (byte2 >> 3) & 7 ); in dis_MMX() 7775 if (subopc == 2 /*SRL*/ && opc == 0x71) in dis_MMX() 7777 else if (subopc == 2 /*SRL*/ && opc == 0x72) in dis_MMX() 7779 else if (subopc == 2 /*SRL*/ && opc == 0x73) in dis_MMX() 7782 else if (subopc == 4 /*SAR*/ && opc == 0x71) in dis_MMX() 7784 else if (subopc == 4 /*SAR*/ && opc == 0x72) in dis_MMX() 7787 else if (subopc == 6 /*SHL*/ && opc == 0x71) in dis_MMX() 7789 else if (subopc == 6 /*SHL*/ && opc == 0x72) in dis_MMX() 7791 else if (subopc == 6 /*SHL*/ && opc == 0x73) in dis_MMX()
|
D | guest_arm_toIR.c | 17733 UInt subopc = INSN(27,20) & BITS8(0,0,0,0,0, 1,1,1); in disInstr_ARM_WRK() local 17734 if (subopc != BITS4(0,0,0,1) && subopc != BITS4(0,1,0,1)) { in disInstr_ARM_WRK() 17744 switch (subopc) { in disInstr_ARM_WRK()
|
D | guest_arm64_toIR.c | 2557 UInt subopc = INSN(30,29); in dis_ARM64_data_processing_immediate() local 2561 if (subopc == BITS2(0,1) || (!is64 && hw >= 2)) { in dis_ARM64_data_processing_immediate() 2567 switch (subopc) { in dis_ARM64_data_processing_immediate()
|