Searched refs:VG_IS_8_ALIGNED (Results 1 – 9 of 9) sorted by relevance
121 CHECK( VG_IS_8_ALIGNED(0x0) ); in test_VG_IS_XYZ_ALIGNED()122 CHECK( ! VG_IS_8_ALIGNED(0x1) ); in test_VG_IS_XYZ_ALIGNED()123 CHECK( ! VG_IS_8_ALIGNED(0x2) ); in test_VG_IS_XYZ_ALIGNED()124 CHECK( ! VG_IS_8_ALIGNED(0x3) ); in test_VG_IS_XYZ_ALIGNED()125 CHECK( ! VG_IS_8_ALIGNED(0x4) ); in test_VG_IS_XYZ_ALIGNED()126 CHECK( ! VG_IS_8_ALIGNED(0x5) ); in test_VG_IS_XYZ_ALIGNED()127 CHECK( ! VG_IS_8_ALIGNED(0x6) ); in test_VG_IS_XYZ_ALIGNED()128 CHECK( ! VG_IS_8_ALIGNED(0x7) ); in test_VG_IS_XYZ_ALIGNED()129 CHECK( VG_IS_8_ALIGNED(0x8) ); in test_VG_IS_XYZ_ALIGNED()130 CHECK( ! VG_IS_8_ALIGNED(0x9) ); in test_VG_IS_XYZ_ALIGNED()[all …]
766 vg_assert(VG_IS_8_ALIGNED(offsetof(VexGuestX86State,guest_FPREG))); in do_pre_run_checks()781 vg_assert(VG_IS_8_ALIGNED(offsetof(VexGuestAMD64State,guest_FPREG))); in do_pre_run_checks()783 vg_assert(VG_IS_8_ALIGNED(offsetof(VexGuestAMD64State,guest_RAX))); in do_pre_run_checks()784 vg_assert(VG_IS_8_ALIGNED(offsetof(VexGuestAMD64State,guest_RIP))); in do_pre_run_checks()806 vg_assert(VG_IS_8_ALIGNED(& tst->arch.vex.guest_D1)); in do_pre_run_checks()807 vg_assert(VG_IS_8_ALIGNED(& tst->arch.vex_shadow1.guest_D1)); in do_pre_run_checks()808 vg_assert(VG_IS_8_ALIGNED(& tst->arch.vex_shadow2.guest_D1)); in do_pre_run_checks()812 vg_assert(VG_IS_8_ALIGNED(& tst->arch.vex.guest_X0)); in do_pre_run_checks()813 vg_assert(VG_IS_8_ALIGNED(& tst->arch.vex_shadow1.guest_X0)); in do_pre_run_checks()814 vg_assert(VG_IS_8_ALIGNED(& tst->arch.vex_shadow2.guest_X0)); in do_pre_run_checks()
1331 && nBits == 64 && VG_IS_8_ALIGNED(a))) { in mc_LOADVn_slow()1484 && nBits == 64 && VG_IS_8_ALIGNED(a))) { in mc_STOREVn_slow()1682 if (VG_IS_8_ALIGNED(a)) break; in set_address_range_perms()2827 if (VG_IS_8_ALIGNED( -VG_STACK_REDZONE_SZB + new_SP )) { in mc_new_mem_stack_8_w_ECU()2841 if (VG_IS_8_ALIGNED( -VG_STACK_REDZONE_SZB + new_SP )) { in mc_new_mem_stack_8()2855 if (VG_IS_8_ALIGNED( -VG_STACK_REDZONE_SZB + new_SP )) { in mc_die_mem_stack_8()2872 if (VG_IS_8_ALIGNED( -VG_STACK_REDZONE_SZB + new_SP )) { in mc_new_mem_stack_12_w_ECU()2890 if (VG_IS_8_ALIGNED( -VG_STACK_REDZONE_SZB + new_SP )) { in mc_new_mem_stack_12()2909 if (VG_IS_8_ALIGNED( -VG_STACK_REDZONE_SZB + new_SP-12 )) { in mc_die_mem_stack_12()2932 if (VG_IS_8_ALIGNED( -VG_STACK_REDZONE_SZB + new_SP )) { in mc_new_mem_stack_16_w_ECU()[all …]
187 #define VG_IS_8_ALIGNED(aaa_p) (0 == (((Addr)(aaa_p)) & ((Addr)0x7))) macro
154 vg_assert(VG_IS_8_ALIGNED(sp)); in VG_()
163 vg_assert(VG_IS_8_ALIGNED(sp)); in VG_()
3602 while (UNLIKELY(!VG_IS_8_ALIGNED(a)) && LIKELY(len > 0)) { in Filter__clear_range_SLOW()3662 if (UNLIKELY(!VG_IS_8_ALIGNED(c))) { in Filter__clear_range()3729 tl_assert(VG_IS_8_ALIGNED(c)); in Filter__clear_range()3771 if (UNLIKELY( !VG_IS_8_ALIGNED(a) )) in Filter__ok_to_skip_crd64()3886 if (UNLIKELY( !VG_IS_8_ALIGNED(a) )) in Filter__ok_to_skip_cwr64()
515 if (!VG_IS_8_ALIGNED((*sym_avmas_out).main)) { in get_elf_symbol_info()
6044 if (!VG_IS_8_ALIGNED(ehdr->e_phentsize)) {