/art/runtime/quick/ |
D | inline_method_analyser.cc | 38 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIGet(Instruction::IGET), 40 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIGet(Instruction::IGET_WIDE), 42 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIGet(Instruction::IGET_OBJECT), 44 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIGet(Instruction::IGET_BOOLEAN), 46 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIGet(Instruction::IGET_BYTE), 48 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIGet(Instruction::IGET_CHAR), 50 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIGet(Instruction::IGET_SHORT), 53 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIPut(Instruction::IPUT), 55 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIPut(Instruction::IPUT_WIDE), 57 COMPILE_ASSERT(InlineMethodAnalyser::IsInstructionIPut(Instruction::IPUT_OBJECT), [all …]
|
D | inline_method_analyser.h | 121 COMPILE_ASSERT(sizeof(InlineIGetIPutData) == sizeof(uint64_t), InvalidSizeOfInlineIGetIPutData); 130 COMPILE_ASSERT(sizeof(InlineReturnArgData) == sizeof(uint64_t), InvalidSizeOfInlineReturnArgData);
|
/art/compiler/dex/quick/ |
D | resource_mask.cc | 36 COMPILE_ASSERT(kNoRegMasks[127-ResourceMask::kHeapRef].Equals( 38 COMPILE_ASSERT(kNoRegMasks[127-ResourceMask::kLiteral].Equals( 40 COMPILE_ASSERT(kNoRegMasks[127-ResourceMask::kDalvikReg].Equals( 42 COMPILE_ASSERT(kNoRegMasks[127-ResourceMask::kFPStatus].Equals( 44 COMPILE_ASSERT(kNoRegMasks[127-ResourceMask::kCCode].Equals( 77 COMPILE_ASSERT(kSingleRegMasks[SingleRegMaskIndex(127-ResourceMask::kHeapRef, 0)].Equals( 79 COMPILE_ASSERT(kSingleRegMasks[SingleRegMaskIndex(127-ResourceMask::kLiteral, 0)].Equals( 81 COMPILE_ASSERT(kSingleRegMasks[SingleRegMaskIndex(127-ResourceMask::kDalvikReg, 0)].Equals( 83 COMPILE_ASSERT(kSingleRegMasks[SingleRegMaskIndex(127-ResourceMask::kFPStatus, 0)].Equals( 85 COMPILE_ASSERT(kSingleRegMasks[SingleRegMaskIndex(127-ResourceMask::kCCode, 0)].Equals( [all …]
|
D | dex_file_method_inliner.cc | 69 COMPILE_ASSERT(arraysize(kIntrinsicIsStatic) == kInlineOpNop, check_arraysize_kIntrinsicIsStatic); 70 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicDoubleCvt], DoubleCvt_must_be_static); 71 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicFloatCvt], FloatCvt_must_be_static); 72 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicReverseBits], ReverseBits_must_be_static); 73 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicReverseBytes], ReverseBytes_must_be_static); 74 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicAbsInt], AbsInt_must_be_static); 75 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicAbsLong], AbsLong_must_be_static); 76 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicAbsFloat], AbsFloat_must_be_static); 77 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicAbsDouble], AbsDouble_must_be_static); 78 COMPILE_ASSERT(kIntrinsicIsStatic[kIntrinsicMinMaxInt], MinMaxInt_must_be_static); [all …]
|
D | mir_to_lir.h | 1189 COMPILE_ASSERT((kArg1 == kArg0 + 1) && (kArg2 == kArg1 + 1) && (kArg3 == kArg2 + 1) && in TargetReg() 1192 COMPILE_ASSERT((kFArg1 == kFArg0 + 1) && (kFArg2 == kFArg1 + 1) && (kFArg3 == kFArg2 + 1) && in TargetReg() 1195 COMPILE_ASSERT(kRet1 == kRet0 + 1, kret_range_unexpected); in TargetReg()
|
/art/compiler/optimizing/ |
D | locations.h | 57 COMPILE_ASSERT((kInvalid & kLocationTagMask) != kConstant, TagError); in Location() 58 COMPILE_ASSERT((kUnallocated & kLocationTagMask) != kConstant, TagError); in Location() 59 COMPILE_ASSERT((kStackSlot & kLocationTagMask) != kConstant, TagError); in Location() 60 COMPILE_ASSERT((kDoubleStackSlot & kLocationTagMask) != kConstant, TagError); in Location() 61 COMPILE_ASSERT((kRegister & kLocationTagMask) != kConstant, TagError); in Location() 62 COMPILE_ASSERT((kConstant & kLocationTagMask) == kConstant, TagError); in Location() 63 COMPILE_ASSERT((kQuickParameter & kLocationTagMask) == kConstant, TagError); in Location()
|
/art/compiler/dex/ |
D | mir_method_info.h | 63 COMPILE_ASSERT(kMethodInfoBitEnd <= 16, too_many_flags); 168 COMPILE_ASSERT(kMethodLoweringInfoEnd <= 16, too_many_flags); 172 COMPILE_ASSERT((1u << (kBitInvokeTypeEnd - kBitInvokeTypeBegin)) - 1u == kInvokeTypeMask, 174 COMPILE_ASSERT((1u << (kBitSharpTypeEnd - kBitSharpTypeBegin)) - 1u == kInvokeTypeMask,
|
D | frontend.cc | 84 COMPILE_ASSERT(0U == static_cast<size_t>(kNone), kNone_not_0); 85 COMPILE_ASSERT(1U == static_cast<size_t>(kArm), kArm_not_1); 86 COMPILE_ASSERT(2U == static_cast<size_t>(kArm64), kArm64_not_2); 87 COMPILE_ASSERT(3U == static_cast<size_t>(kThumb2), kThumb2_not_3); 88 COMPILE_ASSERT(4U == static_cast<size_t>(kX86), kX86_not_4); 89 COMPILE_ASSERT(5U == static_cast<size_t>(kX86_64), kX86_64_not_5); 90 COMPILE_ASSERT(6U == static_cast<size_t>(kMips), kMips_not_6); 91 COMPILE_ASSERT(7U == static_cast<size_t>(kMips64), kMips64_not_7); 124 COMPILE_ASSERT(sizeof(kDisabledOptimizationsPerISA) == 8 * sizeof(uint32_t), kDisabledOpts_unexp); 155 COMPILE_ASSERT(sizeof(kSupportedTypes) == 8 * sizeof(char*), kSupportedTypes_unexp); [all …]
|
D | mir_field_info.h | 133 COMPILE_ASSERT(kIFieldLoweringInfoBitEnd <= 16, too_many_flags); 195 COMPILE_ASSERT(kSFieldLoweringInfoBitEnd <= 16, too_many_flags);
|
D | mir_optimization.cc | 212 COMPILE_ASSERT(arraysize(kIfCcZConditionCodes) == Instruction::IF_LEZ - Instruction::IF_EQZ + 1, 223 COMPILE_ASSERT(ConditionCodeForIfCcZ(Instruction::IF_EQZ) == kCondEq, check_if_eqz_ccode); 224 COMPILE_ASSERT(ConditionCodeForIfCcZ(Instruction::IF_NEZ) == kCondNe, check_if_nez_ccode); 225 COMPILE_ASSERT(ConditionCodeForIfCcZ(Instruction::IF_LTZ) == kCondLt, check_if_ltz_ccode); 226 COMPILE_ASSERT(ConditionCodeForIfCcZ(Instruction::IF_GEZ) == kCondGe, check_if_gez_ccode); 227 COMPILE_ASSERT(ConditionCodeForIfCcZ(Instruction::IF_GTZ) == kCondGt, check_if_gtz_ccode); 228 COMPILE_ASSERT(ConditionCodeForIfCcZ(Instruction::IF_LEZ) == kCondLe, check_if_lez_ccode);
|
D | local_value_numbering.h | 358 COMPILE_ASSERT(sizeof(BasicBlockId) == sizeof(uint16_t), BasicBlockId_must_be_16_bit);
|
/art/runtime/ |
D | atomic.h | 296 COMPILE_ASSERT(sizeof(AtomicInteger) == sizeof(int32_t), weird_atomic_int_size); 297 COMPILE_ASSERT(alignof(AtomicInteger) == alignof(int32_t), 299 COMPILE_ASSERT(sizeof(Atomic<int64_t>) == sizeof(int64_t), weird_atomic_int64_size); 305 COMPILE_ASSERT(alignof(Atomic<int64_t>) == alignof(int64_t),
|
D | utils.h | 91 COMPILE_ASSERT((n & (n - 1)) == 0, n_not_power_of_two); in IsAligned() 253 COMPILE_ASSERT(sizeof(U) <= sizeof(V), size_of_u_not_le_size_of_v);
|
D | thread.h | 929 COMPILE_ASSERT(sizeof(StateAndFlags) == sizeof(int32_t), weird_state_and_flags_size); 965 COMPILE_ASSERT(sizeof(union StateAndFlags) == sizeof(int32_t),
|
D | dex_file.h | 208 COMPILE_ASSERT((kAccValidClassFlags & kAccJavaFlagsMask) == kAccValidClassFlags, in GetJavaAccessFlags() 210 COMPILE_ASSERT((kAccValidInterfaceFlags & kAccJavaFlagsMask) == kAccValidInterfaceFlags, in GetJavaAccessFlags()
|
/art/runtime/gc/accounting/ |
D | card_table.cc | 69 COMPILE_ASSERT(kCardClean == 0, card_clean_must_be_0); in Create() 101 COMPILE_ASSERT(kCardClean == 0, clean_card_must_be_0); in ClearCardTable()
|
/art/runtime/base/ |
D | casts.h | 85 COMPILE_ASSERT(sizeof(Dest) == sizeof(Source), verify_sizes_are_equal); in bit_cast()
|
D | mutex.h | 435 #define MutexLock(x) COMPILE_ASSERT(0, mutex_lock_declaration_missing_variable_name) 457 #define ReaderMutexLock(x) COMPILE_ASSERT(0, reader_mutex_lock_declaration_missing_variable_name) 479 #define WriterMutexLock(x) COMPILE_ASSERT(0, writer_mutex_lock_declaration_missing_variable_name)
|
D | bit_vector.cc | 47 COMPILE_ASSERT(sizeof(*storage_) == kWordBytes, check_word_bytes); in BitVector() 48 COMPILE_ASSERT(sizeof(*storage_) * 8u == kWordBits, check_word_bits); in BitVector()
|
D | macros.h | 63 #define COMPILE_ASSERT(expr, msg) \ macro
|
D | allocator.h | 111 COMPILE_ASSERT(kTag < kAllocatorTagCount, must_be_less_than_count); in throw()
|
/art/compiler/ |
D | oat_writer.h | 217 COMPILE_ASSERT(mirror::Class::Status::kStatusMax < (2 ^ 16), class_status_wont_fit_in_16bits); 220 COMPILE_ASSERT(OatClassType::kOatClassMax < (2 ^ 16), oat_class_type_wont_fit_in_16bits);
|
/art/runtime/gc/space/ |
D | bump_pointer_space.h | 189 COMPILE_ASSERT(sizeof(BlockHeader) % kAlignment == 0,
|
/art/compiler/utils/ |
D | arena_allocator.cc | 105 COMPILE_ASSERT(arraysize(kAllocNames) == kNumArenaAllocKinds, check_arraysize_kAllocNames); in Dump()
|
/art/compiler/dex/quick/arm64/ |
D | assemble_arm64.cc | 763 COMPILE_ASSERT((rxzr & 0x1f) == 0x1f, rzr_register_number_must_be_31); in EncodeLIRs() 764 COMPILE_ASSERT((rsp & 0x1f) == 0x1f, rsp_register_number_must_be_31); in EncodeLIRs()
|