/external/valgrind/main/VEX/priv/ |
D | main_main.c | 128 vassert(!vex_initdone); in LibVEX_Init() 129 vassert(failure_exit); in LibVEX_Init() 130 vassert(log_bytes); in LibVEX_Init() 131 vassert(debuglevel >= 0); in LibVEX_Init() 133 vassert(vcon->iropt_verbosity >= 0); in LibVEX_Init() 134 vassert(vcon->iropt_level >= 0); in LibVEX_Init() 135 vassert(vcon->iropt_level <= 2); in LibVEX_Init() 136 vassert(vcon->iropt_unroll_thresh >= 0); in LibVEX_Init() 137 vassert(vcon->iropt_unroll_thresh <= 400); in LibVEX_Init() 138 vassert(vcon->guest_max_insns >= 1); in LibVEX_Init() [all …]
|
D | host_generic_reg_alloc2.c | 188 vassert(search_from_instr >= 0); in findMostDistantlyMentionedVReg() 192 vassert(state[k].disp == Bound); in findMostDistantlyMentionedVReg() 212 vassert(0 == ((UShort)vreg->spill_offset % 16)); break; in sanity_check_spill_offset() 214 vassert(0 == ((UShort)vreg->spill_offset % 8)); break; in sanity_check_spill_offset() 227 vassert(used == *size); in ensureRRLRspace() 249 vassert(size >= 0); in sortRRLRarray() 402 vassert(0 == (guest_sizeB % 16)); in doRegisterAllocation() 403 vassert(0 == (LibVEX_N_SPILL_BYTES % 16)); in doRegisterAllocation() 404 vassert(0 == (N_SPILL64S % 2)); in doRegisterAllocation() 409 vassert(instrs_in->arr_used <= 15000); in doRegisterAllocation() [all …]
|
D | main_util.c | 74 vassert(temporary_first == &temporary[0]); in vexAllocSanityCheck() 75 vassert(temporary_last == &temporary[N_TEMPORARY_BYTES-1]); in vexAllocSanityCheck() 76 vassert(permanent_first == &permanent[0]); in vexAllocSanityCheck() 77 vassert(permanent_last == &permanent[N_PERMANENT_BYTES-1]); in vexAllocSanityCheck() 78 vassert(temporary_first <= temporary_curr); in vexAllocSanityCheck() 79 vassert(temporary_curr <= temporary_last); in vexAllocSanityCheck() 80 vassert(permanent_first <= permanent_curr); in vexAllocSanityCheck() 81 vassert(permanent_curr <= permanent_last); in vexAllocSanityCheck() 82 vassert(private_LibVEX_alloc_first <= private_LibVEX_alloc_curr); in vexAllocSanityCheck() 83 vassert(private_LibVEX_alloc_curr <= private_LibVEX_alloc_last); in vexAllocSanityCheck() [all …]
|
D | host_arm64_defs.c | 61 vassert(r >= 0 && r < 31); in ppHRegARM64() 66 vassert(r >= 0 && r < 32); in ppHRegARM64() 71 vassert(r >= 0 && r < 32); in ppHRegARM64() 199 vassert(i == *nregs); in getAllocableRegs_ARM64() 235 vassert(-256 <= simm9 && simm9 <= 255); in ARM64AMode_RI9() 245 vassert(uimm12 >= 0 && uimm12 <= 4095); in ARM64AMode_RI12() 248 default: vassert(0); in ARM64AMode_RI12() 282 vassert(0); in ppARM64AMode() 470 vassert(imm12 < 4096); in ARM64RIA_I12() 471 vassert(shift == 0 || shift == 12); in ARM64RIA_I12() [all …]
|
D | host_arm64_isel.c | 134 vassert(tmp >= 0); in lookupIRTemp() 135 vassert(tmp < env->n_vregmap); in lookupIRTemp() 265 vassert(off < (8 << 12)); /* otherwise it's unrepresentable */ in mk_baseblock_64bit_access_amode() 266 vassert((off & 7) == 0); /* ditto */ in mk_baseblock_64bit_access_amode() 273 vassert(off < (4 << 12)); /* otherwise it's unrepresentable */ in mk_baseblock_32bit_access_amode() 274 vassert((off & 3) == 0); /* ditto */ in mk_baseblock_32bit_access_amode() 281 vassert(off < (2 << 12)); /* otherwise it's unrepresentable */ in mk_baseblock_16bit_access_amode() 282 vassert((off & 1) == 0); /* ditto */ in mk_baseblock_16bit_access_amode() 289 vassert(off < (1 << 12)); /* otherwise it's unrepresentable */ in mk_baseblock_8bit_access_amode() 295 vassert(off < (1<<12)); in mk_baseblock_128bit_access_addr() [all …]
|
D | host_ppc_defs.c | 68 vassert(r >= 0 && r < 32); in ppHRegPPC() 73 vassert(r >= 0 && r < 32); in ppHRegPPC() 78 vassert(r >= 0 && r < 32); in ppHRegPPC() 83 vassert(r >= 0 && r < 32); in ppHRegPPC() 270 vassert(i == *nregs); in getAllocableRegs_PPC() 302 vassert(flag == Pcf_NONE); in mk_PPCCondCode() 304 vassert(flag != Pcf_NONE); in mk_PPCCondCode() 312 vassert(ct != Pct_ALWAYS); in invertCondTest() 321 vassert(idx >= -0x8000 && idx < 0x8000); in PPCAMode_IR() 404 vassert(imm16 != 0x8000); in PPCRH_Imm() [all …]
|
D | host_s390_defs.c | 102 vassert(r < 16); in s390_hreg_as_string() 210 vassert(fits_unsigned_12bit(d)); in s390_amode_b12() 227 vassert(fits_signed_20bit(d)); in s390_amode_b20() 244 vassert(fits_unsigned_12bit(d)); in s390_amode_bx12() 245 vassert(hregNumber(b) != 0); in s390_amode_bx12() 246 vassert(hregNumber(x) != 0); in s390_amode_bx12() 263 vassert(fits_signed_20bit(d)); in s390_amode_bx20() 264 vassert(hregNumber(b) != 0); in s390_amode_bx20() 265 vassert(hregNumber(x) != 0); in s390_amode_bx20() 467 vassert(offsetB >= 0); in genSpill_S390() [all …]
|
D | host_mips_defs.c | 83 vassert(hregClass(reg) == HRcInt32 || hregClass(reg) == HRcInt64 || in ppHRegMIPS() 90 vassert(r >= 0 && r < 32); in ppHRegMIPS() 95 vassert (r >= 0 && r < 32); in ppHRegMIPS() 100 vassert(r >= 0 && r < 32); in ppHRegMIPS() 105 vassert(r >= 0 && r < 32); in ppHRegMIPS() 610 vassert(i == *nregs); in getAllocableRegs_MIPS() 1014 vassert(imm16 != 0x8000); in MIPSRH_Imm() 1015 vassert(syned == True || syned == False); in MIPSRH_Imm() 1122 vassert(immR == False); /*there's no nor with an immediate operand!? */ in showMIPSAluOp() 1305 vassert(0 == (argiregs & ~mask)); in MIPSInstr_Call() [all …]
|
D | guest_generic_bb_to_IR.c | 222 vassert(sizeof(HWord) == sizeof(void*)); in bb_to_IR() 223 vassert(vex_control.guest_max_insns >= 1); in bb_to_IR() 224 vassert(vex_control.guest_max_insns < 100); in bb_to_IR() 225 vassert(vex_control.guest_chase_thresh >= 0); in bb_to_IR() 226 vassert(vex_control.guest_chase_thresh < vex_control.guest_max_insns); in bb_to_IR() 227 vassert(guest_word_type == Ity_I32 || guest_word_type == Ity_I64); in bb_to_IR() 230 vassert(szB_GUEST_IP == 4); in bb_to_IR() 231 vassert((offB_GUEST_IP % 4) == 0); in bb_to_IR() 233 vassert(szB_GUEST_IP == 8); in bb_to_IR() 234 vassert((offB_GUEST_IP % 8) == 0); in bb_to_IR() [all …]
|
D | host_arm_isel.c | 131 vassert(tmp >= 0); in lookupIRTemp() 132 vassert(tmp < env->n_vregmap); in lookupIRTemp() 138 vassert(tmp >= 0); in lookupIRTemp64() 139 vassert(tmp < env->n_vregmap); in lookupIRTemp64() 140 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTemp64() 265 vassert(sh >= 0 && sh < 32); in ROR32() 285 vassert(i == 16); in fitsIn8x4() 292 vassert(hregClass(src) == HRcInt32); in mk_iMOVds_RR() 293 vassert(hregClass(dst) == HRcInt32); in mk_iMOVds_RR() 401 vassert(ARM_N_ARGREGS == 4); in doHelperCall() [all …]
|
D | host_mips_isel.c | 135 vassert(tmp >= 0); in lookupIRTemp() 136 vassert(tmp < env->n_vregmap); in lookupIRTemp() 142 vassert(tmp >= 0); in lookupIRTemp64() 143 vassert(tmp < env->n_vregmap); in lookupIRTemp64() 144 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTemp64() 152 vassert(env->mode64); in lookupIRTempPair() 153 vassert(tmp >= 0); in lookupIRTempPair() 154 vassert(tmp < env->n_vregmap); in lookupIRTempPair() 155 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTempPair() 195 vassert(n < 256 && (n % 8) == 0); in add_to_sp() [all …]
|
D | ir_opt.c | 254 vassert(h->used < h->size); in addToHHW() 486 vassert(d2->mAddr == NULL); in flatten_Stmt() 560 vassert((*minoff & ~0xFFFF) == 0); in getArrayBounds() 561 vassert((*maxoff & ~0xFFFF) == 0); in getArrayBounds() 562 vassert(*minoff <= *maxoff); in getArrayBounds() 572 vassert((minoff & ~0xFFFF) == 0); in mk_key_GetPut() 573 vassert((maxoff & ~0xFFFF) == 0); in mk_key_GetPut() 581 vassert((minoff & ~0xFFFF) == 0); in mk_key_GetIPutI() 582 vassert((maxoff & ~0xFFFF) == 0); in mk_key_GetIPutI() 594 vassert(k_lo <= k_hi); in invalidateOverlaps() [all …]
|
D | host_amd64_defs.c | 62 vassert(r >= 0 && r < 16); in ppHRegAMD64() 67 vassert(r >= 0 && r < 6); in ppHRegAMD64() 72 vassert(r >= 0 && r < 16); in ppHRegAMD64() 96 vassert(r >= 0 && r < 16); in ppHRegAMD64_lo32() 219 vassert(shift >= 0 && shift <= 3); in AMD64AMode_IRRS() 629 vassert(op != Aalu_MUL); in AMD64Instr_Alu64M() 670 default: vassert(0); in AMD64Instr_Alu32R() 687 vassert(sz == 4 || sz == 8); in AMD64Instr_Div() 704 vassert(regparms >= 0 && regparms <= 6); in AMD64Instr_Call() 705 vassert(is_sane_RetLoc(rloc)); in AMD64Instr_Call() [all …]
|
D | guest_arm64_toIR.c | 168 vassert(n > 1 && n < 64); in sx_to_64() 273 vassert(i < 256); in mkU8() 364 vassert(isPlausibleIRType(ty)); in newTemp() 547 vassert(ty == Ity_I32); in mathROR() 550 vassert(w != 0); in mathROR() 551 vassert(imm < w); in mathROR() 570 vassert(ty == Ity_I32); in mathREPLICATE() 573 vassert(w != 0); in mathREPLICATE() 574 vassert(imm < w); in mathREPLICATE() 741 default: vassert(0); in offsetIReg64() [all …]
|
D | host_x86_isel.c | 195 vassert(tmp >= 0); in lookupIRTemp() 196 vassert(tmp < env->n_vregmap); in lookupIRTemp() 202 vassert(tmp >= 0); in lookupIRTemp64() 203 vassert(tmp < env->n_vregmap); in lookupIRTemp64() 204 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTemp64() 291 vassert(hregClass(src) == HRcInt32); in mk_iMOVsd_RR() 292 vassert(hregClass(dst) == HRcInt32); in mk_iMOVsd_RR() 301 vassert(hregClass(src) == HRcVec128); in mk_vMOVsd_RR() 302 vassert(hregClass(dst) == HRcVec128); in mk_vMOVsd_RR() 310 vassert(n > 0 && n < 256 && (n%4) == 0); in add_to_esp() [all …]
|
D | host_ppc_isel.c | 304 vassert(tmp >= 0); in lookupIRTemp() 305 vassert(tmp < env->n_vregmap); in lookupIRTemp() 312 vassert(tmp >= 0); in lookupIRTempPair() 313 vassert(tmp < env->n_vregmap); in lookupIRTempPair() 314 vassert(! hregIsInvalid(env->vregmapMedLo[tmp])); in lookupIRTempPair() 323 vassert(!env->mode64); in lookupIRTempQuad() 324 vassert(tmp >= 0); in lookupIRTempQuad() 325 vassert(tmp < env->n_vregmap); in lookupIRTempQuad() 326 vassert(! hregIsInvalid(env->vregmapMedLo[tmp])); in lookupIRTempQuad() 482 vassert(hregClass(r_dst) == hregClass(r_src)); in mk_iMOVds_RR() [all …]
|
D | host_x86_defs.c | 61 vassert(r >= 0 && r < 8); in ppHRegX86() 66 vassert(r >= 0 && r < 6); in ppHRegX86() 71 vassert(r >= 0 && r < 8); in ppHRegX86() 175 vassert(shift >= 0 && shift <= 3); in X86AMode_IRRS() 579 vassert(op != Xalu_MUL); in X86Instr_Alu32M() 632 vassert(op == Xsh_SHL || op == Xsh_SHR); in X86Instr_Sh3232() 649 vassert(regparms >= 0 && regparms <= 3); in X86Instr_Call() 650 vassert(is_sane_RetLoc(rloc)); in X86Instr_Call() 688 vassert(cond != Xcc_ALWAYS); in X86Instr_CMov32() 699 vassert(szSmall == 1 || szSmall == 2); in X86Instr_LoadEX() [all …]
|
D | guest_arm64_helpers.c | 101 vassert( ((UInt)(_cc_op)) < ARM64G_CC_OP_NUMBER); \ 102 vassert( ((UInt)(_cond)) < 16); \ 161 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_n() 171 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_n() 181 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_n() 191 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_n() 279 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_z() 289 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_z() 299 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_z() 309 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_z() [all …]
|
D | host_arm_defs.c | 64 vassert(r >= 0 && r < 16); in ppHRegARM() 69 vassert(r >= 0 && r < 32); in ppHRegARM() 74 vassert(r >= 0 && r < 32); in ppHRegARM() 79 vassert(r >= 0 && r < 16); in ppHRegARM() 187 vassert(i == *nregs); in getAllocableRegs_ARM() 224 vassert(-4095 <= simm13 && simm13 <= 4095); in ARMAMode1_RI() 233 vassert(0 <= shift && shift <= 3); in ARMAMode1_RRS() 252 vassert(0); in ppARMAMode1() 292 vassert(-255 <= simm9 && simm9 <= 255); in ARMAMode2_RI() 318 vassert(0); in ppARMAMode2() [all …]
|
D | guest_arm_helpers.c | 98 vassert( ((UInt)(_cc_op)) < ARMG_CC_OP_NUMBER); \ 145 vassert((oldC & ~1) == 0); in armg_calculate_flag_n() 155 vassert((oldC & ~1) == 0); in armg_calculate_flag_n() 225 vassert((oldC & ~1) == 0); in armg_calculate_flag_z() 235 vassert((oldC & ~1) == 0); in armg_calculate_flag_z() 305 vassert((oldC & ~1) == 0); in armg_calculate_flag_c() 315 vassert((oldC & ~1) == 0); in armg_calculate_flag_c() 322 vassert((shco & ~1) == 0); in armg_calculate_flag_c() 329 vassert((cc_dep3 & ~3) == 0); in armg_calculate_flag_c() 336 vassert((cc_dep3 & ~3) == 0); in armg_calculate_flag_c() [all …]
|
D | guest_arm_toIR.c | 180 do { vassert(__curr_is_Thumb); } while (0) 183 do { vassert(! __curr_is_Thumb); } while (0) 214 vassert(sh >= 0 && sh < 32); in ROR32() 289 vassert(i < 256); in mkU8() 364 vassert(0); in loadGuardedLE() 366 vassert(loaded != NULL); in loadGuardedLE() 380 vassert(isPlausibleIRType(ty)); in newTemp() 394 vassert(rot >= 0 && rot < 32); in genROR32() 516 default: vassert(0); in integerGuestRegOffset() 523 vassert(iregNo < 16); in llGetIReg() [all …]
|
D | host_amd64_isel.c | 170 vassert(tmp >= 0); in lookupIRTemp() 171 vassert(tmp < env->n_vregmap); in lookupIRTemp() 178 vassert(tmp >= 0); in lookupIRTempPair() 179 vassert(tmp < env->n_vregmap); in lookupIRTempPair() 180 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTempPair() 313 vassert(hregClass(src) == HRcInt64); in mk_iMOVsd_RR() 314 vassert(hregClass(dst) == HRcInt64); in mk_iMOVsd_RR() 322 vassert(hregClass(src) == HRcVec128); in mk_vMOVsd_RR() 323 vassert(hregClass(dst) == HRcVec128); in mk_vMOVsd_RR() 331 vassert(n > 0 && n < 256 && (n%8) == 0); in add_to_rsp() [all …]
|
D | host_s390_isel.c | 198 vassert(tmp < env->n_vregmap); in lookupIRTemp() 199 vassert(! hregIsInvalid(env->vregmap[tmp])); in lookupIRTemp() 209 vassert(tmp < env->n_vregmap); in lookupIRTemp128() 210 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTemp128() 332 vassert(typeOfIRExpr(env->type_env, expr) == Ity_I64); in s390_isel_amode() 337 vassert(s390_amode_is_sane(am)); in s390_isel_amode() 538 vassert(nBBPTRs == 0 || nBBPTRs == 1); in doHelperCall() 545 vassert(0); in doHelperCall() 547 vassert(retTy == Ity_V128 || retTy == Ity_V256); in doHelperCall() 548 vassert(retTy != Ity_V256); // we don't handle that yet (if ever) in doHelperCall() [all …]
|
D | s390_disasm.c | 45 vassert(vex_strlen(mnm) <= S390_MAX_MNEMONIC_LEN); in mnemonic() 66 vassert(archreg < 16); in gpr_operand() 83 vassert(archreg < 16); in fpr_operand() 100 vassert(archreg < 16); in ar_operand() 122 vassert(vex_strlen(base) + sizeof suffix[0] <= sizeof buf); in cab_operand() 156 vassert(vex_strlen(prefix) + vex_strlen(suffix) + in construct_mnemonic() 470 vassert(p < buf + sizeof buf); /* detect buffer overwrite */ in s390_disasm()
|
D | guest_x86_toIR.c | 321 vassert(isPlausibleIRType(ty)); in newTemp() 452 vassert(archreg < 8); in integerGuestRegOffset() 455 vassert(!host_is_bigendian); in integerGuestRegOffset() 471 vassert(archreg >= 4 && archreg < 8 && sz == 1); in integerGuestRegOffset() 518 vassert(!host_is_bigendian); in xmmGuestRegLane16offset() 519 vassert(laneno >= 0 && laneno < 8); in xmmGuestRegLane16offset() 526 vassert(!host_is_bigendian); in xmmGuestRegLane32offset() 527 vassert(laneno >= 0 && laneno < 4); in xmmGuestRegLane32offset() 534 vassert(!host_is_bigendian); in xmmGuestRegLane64offset() 535 vassert(laneno >= 0 && laneno < 2); in xmmGuestRegLane64offset() [all …]
|