1 # CS_ARCH_ARM, CS_MODE_ARM, None 2 0xa0,0x0b,0x71,0xee = vadd.f64 d16, d17, d16 3 0x80,0x0a,0x30,0xee = vadd.f32 s0, s1, s0 4 0xe0,0x0b,0x71,0xee = vsub.f64 d16, d17, d16 5 0xc0,0x0a,0x30,0xee = vsub.f32 s0, s1, s0 6 0xa0,0x0b,0xc1,0xee = vdiv.f64 d16, d17, d16 7 0x80,0x0a,0x80,0xee = vdiv.f32 s0, s1, s0 8 0xa3,0x2a,0xc2,0xee = vdiv.f32 s5, s5, s7 9 0x07,0x5b,0x85,0xee = vdiv.f64 d5, d5, d7 10 0xa0,0x0b,0x61,0xee = vmul.f64 d16, d17, d16 11 0xa1,0x4b,0x64,0xee = vmul.f64 d20, d20, d17 12 0x80,0x0a,0x20,0xee = vmul.f32 s0, s1, s0 13 0xaa,0x5a,0x65,0xee = vmul.f32 s11, s11, s21 14 0xe0,0x0b,0x61,0xee = vnmul.f64 d16, d17, d16 15 0xc0,0x0a,0x20,0xee = vnmul.f32 s0, s1, s0 16 0xe0,0x1b,0xf4,0xee = vcmpe.f64 d17, d16 17 0xc0,0x0a,0xf4,0xee = vcmpe.f32 s1, s0 18 0xc0,0x0b,0xf5,0xee = vcmpe.f64 d16, #0 19 0xc0,0x0a,0xb5,0xee = vcmpe.f32 s0, #0 20 0xe0,0x0b,0xf0,0xee = vabs.f64 d16, d16 21 0xc0,0x0a,0xb0,0xee = vabs.f32 s0, s0 22 0xe0,0x0b,0xb7,0xee = vcvt.f32.f64 s0, d16 23 0xc0,0x0a,0xf7,0xee = vcvt.f64.f32 d16, s0 24 0x60,0x0b,0xf1,0xee = vneg.f64 d16, d16 25 0x40,0x0a,0xb1,0xee = vneg.f32 s0, s0 26 0xe0,0x0b,0xf1,0xee = vsqrt.f64 d16, d16 27 0xc0,0x0a,0xb1,0xee = vsqrt.f32 s0, s0 28 0xc0,0x0b,0xf8,0xee = vcvt.f64.s32 d16, s0 29 0xc0,0x0a,0xb8,0xee = vcvt.f32.s32 s0, s0 30 0x40,0x0b,0xf8,0xee = vcvt.f64.u32 d16, s0 31 0x40,0x0a,0xb8,0xee = vcvt.f32.u32 s0, s0 32 0xe0,0x0b,0xbd,0xee = vcvt.s32.f64 s0, d16 33 0xc0,0x0a,0xbd,0xee = vcvt.s32.f32 s0, s0 34 0xe0,0x0b,0xbc,0xee = vcvt.u32.f64 s0, d16 35 0xc0,0x0a,0xbc,0xee = vcvt.u32.f32 s0, s0 36 0xa1,0x0b,0x42,0xee = vmla.f64 d16, d18, d17 37 0x00,0x0a,0x41,0xee = vmla.f32 s1, s2, s0 38 0xe1,0x0b,0x42,0xee = vmls.f64 d16, d18, d17 39 0x40,0x0a,0x41,0xee = vmls.f32 s1, s2, s0 40 0xe1,0x0b,0x52,0xee = vnmla.f64 d16, d18, d17 41 0x40,0x0a,0x51,0xee = vnmla.f32 s1, s2, s0 42 0xa1,0x0b,0x52,0xee = vnmls.f64 d16, d18, d17 43 0x00,0x0a,0x51,0xee = vnmls.f32 s1, s2, s0 44 0x10,0xfa,0xf1,0xee = vmrs APSR_nzcv, fpscr 45 0x10,0xfa,0xf1,0xee = vmrs APSR_nzcv, fpscr 46 0x10,0xfa,0xf1,0xee = vmrs APSR_nzcv, fpscr 47 0x10,0x2a,0xf0,0xee = vmrs r2, fpsid 48 0x10,0x3a,0xf0,0xee = vmrs r3, fpsid 49 0x10,0x4a,0xf7,0xee = vmrs r4, mvfr0 50 0x10,0x5a,0xf6,0xee = vmrs r5, mvfr1 51 0x60,0x0b,0xf1,0x1e = vnegne.f64 d16, d16 52 0x10,0x0a,0x00,0x1e = vmovne s0, r0 53 0x10,0x1a,0x00,0x0e = vmoveq s0, r1 54 0x10,0x1a,0x11,0xee = vmov r1, s2 55 0x10,0x3a,0x02,0xee = vmov s4, r3 56 0x12,0x1b,0x55,0xec = vmov r1, r5, d2 57 0x14,0x3b,0x49,0xec = vmov d4, r3, r9 58 0x10,0x0a,0xf1,0xee = vmrs r0, fpscr 59 0x10,0x0a,0xf8,0xee = vmrs r0, fpexc 60 0x10,0x0a,0xf0,0xee = vmrs r0, fpsid 61 0x10,0x1a,0xf9,0xee = vmrs r1, fpinst 62 0x10,0x8a,0xfa,0xee = vmrs r8, fpinst2 63 0x10,0x0a,0xe1,0xee = vmsr fpscr, r0 64 0x10,0x0a,0xe8,0xee = vmsr fpexc, r0 65 0x10,0x0a,0xe0,0xee = vmsr fpsid, r0 66 0x10,0x3a,0xe9,0xee = vmsr fpinst, r3 67 0x10,0x4a,0xea,0xee = vmsr fpinst2, r4 68 0x08,0x0b,0xf0,0xee = vmov.f64 d16, #3.000000e+00 69 0x08,0x0a,0xb0,0xee = vmov.f32 s0, #3.000000e+00 70 0x08,0x0b,0xf8,0xee = vmov.f64 d16, #-3.000000e+00 71 0x08,0x0a,0xb8,0xee = vmov.f32 s0, #-3.000000e+00 72 0x10,0x0a,0x00,0xee = vmov s0, r0 73 0x90,0x1a,0x00,0xee = vmov s1, r1 74 0x10,0x2a,0x01,0xee = vmov s2, r2 75 0x90,0x3a,0x01,0xee = vmov s3, r3 76 0x10,0x0a,0x10,0xee = vmov r0, s0 77 0x90,0x1a,0x10,0xee = vmov r1, s1 78 0x10,0x2a,0x11,0xee = vmov r2, s2 79 0x90,0x3a,0x11,0xee = vmov r3, s3 80 0x30,0x0b,0x51,0xec = vmov r0, r1, d16 81 0x31,0x1a,0x42,0xec = vmov s3, s4, r1, r2 82 0x11,0x1a,0x42,0xec = vmov s2, s3, r1, r2 83 0x31,0x1a,0x52,0xec = vmov r1, r2, s3, s4 84 0x11,0x1a,0x52,0xec = vmov r1, r2, s2, s3 85 0x1f,0x1b,0x42,0xec = vmov d15, r1, r2 86 0x30,0x1b,0x42,0xec = vmov d16, r1, r2 87 0x1f,0x1b,0x52,0xec = vmov r1, r2, d15 88 0x30,0x1b,0x52,0xec = vmov r1, r2, d16 89 0x00,0x1b,0xd0,0xed = vldr d17, [r0] 90 0x00,0x0a,0x9e,0xed = vldr s0, [lr] 91 0x00,0x0b,0x9e,0xed = vldr d0, [lr] 92 0x08,0x1b,0x92,0xed = vldr d1, [r2, #32] 93 0x08,0x1b,0x12,0xed = vldr d1, [r2, #-32] 94 0x00,0x2b,0x93,0xed = vldr d2, [r3] 95 0x00,0x3b,0x9f,0xed = vldr d3, [pc] 96 0x00,0x3b,0x9f,0xed = vldr d3, [pc] 97 0x00,0x3b,0x1f,0xed = vldr d3, [pc, #-0] 98 0x00,0x6a,0xd0,0xed = vldr s13, [r0] 99 0x08,0x0a,0xd2,0xed = vldr s1, [r2, #32] 100 0x08,0x0a,0x52,0xed = vldr s1, [r2, #-32] 101 0x00,0x1a,0x93,0xed = vldr s2, [r3] 102 0x00,0x2a,0xdf,0xed = vldr s5, [pc] 103 0x00,0x2a,0xdf,0xed = vldr s5, [pc] 104 0x00,0x2a,0x5f,0xed = vldr s5, [pc, #-0] 105 0x00,0x4b,0x81,0xed = vstr d4, [r1] 106 0x06,0x4b,0x81,0xed = vstr d4, [r1, #24] 107 0x06,0x4b,0x01,0xed = vstr d4, [r1, #-24] 108 0x00,0x0a,0x8e,0xed = vstr s0, [lr] 109 0x00,0x0b,0x8e,0xed = vstr d0, [lr] 110 0x00,0x2a,0x81,0xed = vstr s4, [r1] 111 0x06,0x2a,0x81,0xed = vstr s4, [r1, #24] 112 0x06,0x2a,0x01,0xed = vstr s4, [r1, #-24] 113 0x0c,0x2b,0x91,0xec = vldmia r1, {d2, d3, d4, d5, d6, d7} 114 0x06,0x1a,0x91,0xec = vldmia r1, {s2, s3, s4, s5, s6, s7} 115 0x0c,0x2b,0x81,0xec = vstmia r1, {d2, d3, d4, d5, d6, d7} 116 0x06,0x1a,0x81,0xec = vstmia r1, {s2, s3, s4, s5, s6, s7} 117 0x10,0x8b,0x2d,0xed = vpush {d8, d9, d10, d11, d12, d13, d14, d15} 118 0x07,0x0b,0xb5,0xec = fldmiax r5!, {d0, d1, d2} 119 0x05,0x4b,0x90,0x0c = fldmiaxeq r0, {d4, d5} 120 0x07,0x4b,0x35,0x1d = fldmdbxne r5!, {d4, d5, d6} 121 0x11,0x0b,0xa5,0xec = fstmiax r5!, {d0, d1, d2, d3, d4, d5, d6, d7} 122 0x05,0x8b,0x84,0x0c = fstmiaxeq r4, {d8, d9} 123 0x07,0x2b,0x27,0x1d = fstmdbxne r7!, {d2, d3, d4} 124 0x40,0x0b,0xbd,0xee = vcvtr.s32.f64 s0, d0 125 0x60,0x0a,0xbd,0xee = vcvtr.s32.f32 s0, s1 126 0x40,0x0b,0xbc,0xee = vcvtr.u32.f64 s0, d0 127 0x60,0x0a,0xbc,0xee = vcvtr.u32.f32 s0, s1 128 0x90,0x8a,0x00,0xee = vmov s1, r8 129 0x10,0x4a,0x01,0xee = vmov s2, r4 130 0x90,0x6a,0x01,0xee = vmov s3, r6 131 0x10,0x1a,0x02,0xee = vmov s4, r1 132 0x90,0x2a,0x02,0xee = vmov s5, r2 133 0x10,0x3a,0x03,0xee = vmov s6, r3 134 0x10,0x1a,0x14,0xee = vmov r1, s8 135 0x10,0x2a,0x12,0xee = vmov r2, s4 136 0x10,0x3a,0x13,0xee = vmov r3, s6 137 0x90,0x4a,0x10,0xee = vmov r4, s1 138 0x10,0x5a,0x11,0xee = vmov r5, s2 139 0x90,0x6a,0x11,0xee = vmov r6, s3 140 0xc6,0x0a,0xbb,0xee = vcvt.f32.u32 s0, s0, #20 141 0xc0,0x0b,0xba,0xee = vcvt.f64.s32 d0, d0, #32 142 0x67,0x0a,0xbb,0xee = vcvt.f32.u16 s0, s0, #1 143 0x40,0x0b,0xba,0xee = vcvt.f64.s16 d0, d0, #16 144 0xc6,0x0a,0xfa,0xee = vcvt.f32.s32 s1, s1, #20 145 0xc0,0x4b,0xfb,0xee = vcvt.f64.u32 d20, d20, #32 146 0x67,0x8a,0xfa,0xee = vcvt.f32.s16 s17, s17, #1 147 0x40,0x7b,0xfb,0xee = vcvt.f64.u16 d23, d23, #16 148 0xc6,0x6a,0xbf,0xee = vcvt.u32.f32 s12, s12, #20 149 0xc0,0x2b,0xbe,0xee = vcvt.s32.f64 d2, d2, #32 150 0x67,0xea,0xbf,0xee = vcvt.u16.f32 s28, s28, #1 151 0x40,0xfb,0xbe,0xee = vcvt.s16.f64 d15, d15, #16 152 0xc6,0x0a,0xfe,0xee = vcvt.s32.f32 s1, s1, #20 153 0xc0,0x4b,0xff,0xee = vcvt.u32.f64 d20, d20, #32 154 0x67,0x8a,0xfe,0xee = vcvt.s16.f32 s17, s17, #1 155 0x40,0x7b,0xff,0xee = vcvt.u16.f64 d23, d23, #16 156 0x10,0x40,0x80,0xf2 = vmov.i32 d4, #0x0 157 0x12,0x46,0x84,0xf2 = vmov.i32 d4, #0x42000000 158