Searched refs:LibVEX_GUEST_STATE_ALIGN (Results 1 – 8 of 8) sorted by relevance
100 VexGuestArchState vex __attribute__((aligned(LibVEX_GUEST_STATE_ALIGN)));104 __attribute__((aligned(LibVEX_GUEST_STATE_ALIGN)));106 __attribute__((aligned(LibVEX_GUEST_STATE_ALIGN)));110 __attribute__((aligned(LibVEX_GUEST_STATE_ALIGN)));
61 LibVEX_GUEST_STATE_ALIGN, in VG_()
1041 vg_assert(0 == sizeof(VexGuestX86State) % LibVEX_GUEST_STATE_ALIGN); in VG_()1063 vg_assert(0 == sizeof(VexGuestAMD64State) % LibVEX_GUEST_STATE_ALIGN); in VG_()1078 vg_assert(0 == sizeof(VexGuestPPC32State) % LibVEX_GUEST_STATE_ALIGN); in VG_()1093 vg_assert(0 == sizeof(VexGuestPPC64State) % LibVEX_GUEST_STATE_ALIGN); in VG_()1139 vg_assert(0 == sizeof(VexGuestS390XState) % LibVEX_GUEST_STATE_ALIGN); in VG_()1171 vg_assert(0 == sizeof(VexGuestMIPS32State) % LibVEX_GUEST_STATE_ALIGN); in VG_()1189 vg_assert(0 == sizeof(VexGuestMIPS64State) % LibVEX_GUEST_STATE_ALIGN); in VG_()
978 vg_assert(0 == sizeof(VexGuestX86State) % LibVEX_GUEST_STATE_ALIGN); in VG_()1007 vg_assert(0 == sizeof(VexGuestAMD64State) % LibVEX_GUEST_STATE_ALIGN); in VG_()
375 vassert(0 == sizeof(VexGuestX86State) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()392 vassert(0 == sizeof(VexGuestAMD64State) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()409 vassert(0 == sizeof(VexGuestPPC32State) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()427 vassert(0 == sizeof(VexGuestPPC64State) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()445 vassert(0 == sizeof(VexGuestS390XState) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()462 vassert(0 == sizeof(VexGuestARMState) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()479 vassert(0 == sizeof(VexGuestARM64State) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()497 vassert(0 == sizeof(VexGuestMIPS32State) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()515 vassert(0 == sizeof(VexGuestMIPS64State) % LibVEX_GUEST_STATE_ALIGN); in LibVEX_FrontEnd()
450 vassert(0 == (guest_sizeB % LibVEX_GUEST_STATE_ALIGN)); in doRegisterAllocation()451 vassert(0 == (LibVEX_N_SPILL_BYTES % LibVEX_GUEST_STATE_ALIGN)); in doRegisterAllocation()
733 vg_assert(sz_vex % LibVEX_GUEST_STATE_ALIGN == 0); in do_pre_run_checks()734 vg_assert(sz_vexsh1 % LibVEX_GUEST_STATE_ALIGN == 0); in do_pre_run_checks()735 vg_assert(sz_vexsh2 % LibVEX_GUEST_STATE_ALIGN == 0); in do_pre_run_checks()736 vg_assert(sz_spill % LibVEX_GUEST_STATE_ALIGN == 0); in do_pre_run_checks()738 vg_assert(a_vex % LibVEX_GUEST_STATE_ALIGN == 0); in do_pre_run_checks()739 vg_assert(a_vexsh1 % LibVEX_GUEST_STATE_ALIGN == 0); in do_pre_run_checks()740 vg_assert(a_vexsh2 % LibVEX_GUEST_STATE_ALIGN == 0); in do_pre_run_checks()741 vg_assert(a_spill % LibVEX_GUEST_STATE_ALIGN == 0); in do_pre_run_checks()
580 #define LibVEX_GUEST_STATE_ALIGN 16 macro