/external/v8/src/arm64/ |
D | assembler-arm64-inl.h | 67 inline bool CPURegister::Is64Bits() const { in Is64Bits() function 335 DCHECK(reg.Is64Bits() || (shift_amount < kWRegSizeInBits)); 352 DCHECK(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX))); 383 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_); 426 DCHECK(smi.Is64Bits()); 434 DCHECK(smi.Is64Bits()); 454 DCHECK(base.Is64Bits() && !base.IsZero()); 464 DCHECK(base.Is64Bits() && !base.IsZero()); 469 DCHECK(regoffset.Is64Bits() || (extend != SXTX)); 479 DCHECK(base.Is64Bits() && !base.IsZero()); [all …]
|
D | assembler-arm64.cc | 978 DCHECK(xn.Is64Bits()); in br() 984 DCHECK(xn.Is64Bits()); in blr() 993 DCHECK(xn.Is64Bits()); in ret() 1055 DCHECK(rt.Is64Bits() || (rt.Is32Bits() && (bit_pos < kWRegSizeInBits))); in tbz() 1070 DCHECK(rt.Is64Bits() || (rt.Is32Bits() && (bit_pos < kWRegSizeInBits))); in tbnz() 1083 DCHECK(rd.Is64Bits()); in adr() 1299 DCHECK(rd.Is64Bits() || rn.Is32Bits()); in sbfm() 1469 DCHECK(rd.Is64Bits() && ra.Is64Bits()); in smaddl() 1479 DCHECK(rd.Is64Bits() && ra.Is64Bits()); in smsubl() 1489 DCHECK(rd.Is64Bits() && ra.Is64Bits()); in umaddl() [all …]
|
D | macro-assembler-arm64-inl.h | 460 DCHECK(!rd.IsSP() && rd.Is64Bits()); in CzeroX() 473 DCHECK(rd.Is64Bits() && rn.Is64Bits()); in CmovX() 726 if (!fd.Is(fn) || !fd.Is64Bits()) { in Fmov() 745 DCHECK(fd.Is64Bits()); in Fmov() 758 if (fd.Is64Bits()) { in Fmov() 896 DCHECK(rt.Is64Bits()); in Ldr() 962 if (!rd.Is(rn) || !rd.Is64Bits()) { in Mov() 1299 DCHECK(dst.Is64Bits() && src.Is64Bits()); in SmiTag() 1310 DCHECK(dst.Is64Bits() && src.Is64Bits()); in SmiUntag() 1324 DCHECK(dst.Is64Bits() && src.Is64Bits()); in SmiUntagToDouble() [all …]
|
D | macro-assembler-arm64.cc | 84 DCHECK(rd.Is64Bits() || is_uint32(immediate)); in LogicalMacro() 102 } else if ((rd.Is64Bits() && (immediate == -1L)) || in LogicalMacro() 146 DCHECK(operand.reg().Is64Bits() || in LogicalMacro() 163 DCHECK(is_uint32(imm) || is_int32(imm) || rd.Is64Bits()); in Mov() 427 movn(dst, dst.Is64Bits() ? ~imm : (~imm & kWRegMask)); in TryOneInstrMoveImmediate() 480 if (operand.IsZero() && rd.Is(rn) && rd.Is64Bits() && rn.Is64Bits() && in AddSubMacro() 548 DCHECK(operand.reg().Is64Bits() || in AddSubWithCarryMacro() 643 DCHECK(rt.Is64Bits()); in Load() 661 DCHECK(rt.Is64Bits()); in Store() 1694 int sign_bit = value.Is64Bits() ? kXSignBit : kWSignBit; in AssertPositiveOrZero() [all …]
|
D | code-stubs-arm64.h | 81 DCHECK(object.Is64Bits()); in RecordWriteStub() 82 DCHECK(value.Is64Bits()); in RecordWriteStub() 83 DCHECK(address.Is64Bits()); in RecordWriteStub()
|
D | codegen-arm64.cc | 101 DCHECK(string.Is64Bits() && index.Is32Bits() && result.Is64Bits()); in Generate()
|
D | macro-assembler-arm64.h | 58 V(Ldrsb, Register&, rt, rt.Is64Bits() ? LDRSB_x : LDRSB_w) \ 61 V(Ldrsh, Register&, rt, rt.Is64Bits() ? LDRSH_x : LDRSH_w) \ 1089 DCHECK(as_int.Is64Bits());
|
/external/vixl/examples/aarch64/ |
D | custom-disassembler.cc | 41 AppendToOutput(reg.Is64Bits() ? "ip0" : "wip0"); in AppendRegisterNameToOutput() 44 AppendToOutput(reg.Is64Bits() ? "ip1" : "wip1"); in AppendRegisterNameToOutput() 47 AppendToOutput(reg.Is64Bits() ? "lr" : "w30"); in AppendRegisterNameToOutput() 50 AppendToOutput(reg.Is64Bits() ? "x_stack_pointer" : "w_stack_pointer"); in AppendRegisterNameToOutput() 53 AppendToOutput(reg.Is64Bits() ? "x_zero_reg" : "w_zero_reg"); in AppendRegisterNameToOutput()
|
/external/vixl/src/aarch64/ |
D | operands-aarch64.cc | 306 VIXL_ASSERT(reg.Is64Bits() || (shift_amount < kWRegSize)); in Operand() 322 VIXL_ASSERT(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX))); in Operand() 368 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_); in ToExtendedRegister() 390 VIXL_ASSERT(base.Is64Bits() && !base.IsZero()); in MemOperand() 405 VIXL_ASSERT(base.Is64Bits() && !base.IsZero()); in MemOperand() 410 VIXL_ASSERT(regoffset.Is64Bits() || (extend != SXTX)); in MemOperand() 425 VIXL_ASSERT(base.Is64Bits() && !base.IsZero()); in MemOperand() 426 VIXL_ASSERT(regoffset.Is64Bits() && !regoffset.IsSP()); in MemOperand() 438 VIXL_ASSERT(base.Is64Bits() && !base.IsZero()); in MemOperand() 453 VIXL_ASSERT(regoffset_.Is64Bits() && !regoffset_.IsSP()); in MemOperand() [all …]
|
D | assembler-aarch64.cc | 181 VIXL_ASSERT(xn.Is64Bits()); in br() 187 VIXL_ASSERT(xn.Is64Bits()); in blr() 193 VIXL_ASSERT(xn.Is64Bits()); in ret() 352 VIXL_ASSERT(rt.Is64Bits() || (rt.Is32Bits() && (bit_pos < kWRegSize))); in tbz() 365 VIXL_ASSERT(rt.Is64Bits() || (rt.Is32Bits() && (bit_pos < kWRegSize))); in tbnz() 378 VIXL_ASSERT(xd.Is64Bits()); in adr() 389 VIXL_ASSERT(xd.Is64Bits()); in adrp() 606 VIXL_ASSERT(rd.Is64Bits() || rn.Is32Bits()); in sbfm() 763 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && xm.Is64Bits()); in crc32x() 795 VIXL_ASSERT(wd.Is32Bits() && wn.Is32Bits() && xm.Is64Bits()); in crc32cx() [all …]
|
D | operands-aarch64.h | 121 bool Is64Bits() const { in Is64Bits() function 195 bool IsX() const { return IsValidRegister() && Is64Bits(); } in IsX() 207 bool IsD() const { return IsV() && Is64Bits(); } in IsD() 370 bool Is8B() const { return (Is64Bits() && (lanes_ == 8)); } in Is8B() 372 bool Is4H() const { return (Is64Bits() && (lanes_ == 4)); } in Is4H() 374 bool Is2S() const { return (Is64Bits() && (lanes_ == 2)); } in Is2S() 376 bool Is1D() const { return (Is64Bits() && (lanes_ == 1)); } in Is1D()
|
D | macro-assembler-aarch64.cc | 417 VIXL_ASSERT(IsUint32(imm) || IsInt32(imm) || rd.Is64Bits()); in MoveImmediateHelper() 525 masm->movn(dst, dst.Is64Bits() ? ~imm : (~imm & kWRegMask)); in OneInstrMoveImmediateHelper() 829 VIXL_ASSERT(rd.Is64Bits() || IsUint32(immediate)); in LogicalMacro() 849 } else if ((rd.Is64Bits() && (immediate == UINT64_C(0xffffffffffffffff))) || in LogicalMacro() 898 operand.GetRegister().Is64Bits() || in LogicalMacro() 952 movi(vd.Is64Bits() ? vd.V8B() : vd.V16B(), byte1); in Movi16bitHelper() 987 movi(vd.Is64Bits() ? vd.V1D() : vd.V2D(), ((imm << 32) | imm)); in Movi32bitHelper() 1034 Movi16bitHelper(vd.Is64Bits() ? vd.V4H() : vd.V8H(), imm & 0xffff); in Movi32bitHelper() 1067 Movi32bitHelper(vd.Is64Bits() ? vd.V2S() : vd.V4S(), imm & 0xffffffff); in Movi64bitHelper() 1688 if (operand.IsZero() && rd.Is(rn) && rd.Is64Bits() && rn.Is64Bits() && in AddSubMacro() [all …]
|
D | assembler-aarch64.h | 2692 return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits; in SF() 2868 static Instr FPType(FPRegister fd) { return fd.Is64Bits() ? FP64 : FP32; } in FPType() 2893 if (vd.Is64Bits()) { in VFormat() 2926 VIXL_ASSERT(vd.Is32Bits() || vd.Is64Bits()); in FPFormat() 2927 return vd.Is64Bits() ? FP64 : FP32; in FPFormat() 2932 VIXL_ASSERT(vd.Is64Bits() || vd.Is128Bits()); in FPFormat() 2943 if (vd.Is64Bits()) { in LSVFormat() 3086 return reg.Is64Bits() ? Register(xzr) : Register(wzr); in AppropriateZeroRegFor()
|
D | macro-assembler-aarch64.h | 49 V(Ldrsb, Register&, rt, rt.Is64Bits() ? LDRSB_x : LDRSB_w) \ 52 V(Ldrsh, Register&, rt, rt.Is64Bits() ? LDRSH_x : LDRSH_w) \ 1350 if (!vd.Is(vn) || !vd.Is64Bits()) { in Fmov() 1549 if (rt.Is64Bits()) { in Ldr()
|
D | debugger-aarch64.cc | 94 return value().Is64Bits(); in CanAddressMemory()
|
D | disasm-aarch64.cc | 4025 reg_char = reg.Is64Bits() ? 'x' : 'w'; in AppendRegisterNameToOutput() 4052 AppendToOutput("%s", reg.Is64Bits() ? "sp" : "wsp"); in AppendRegisterNameToOutput()
|
/external/vixl/test/aarch64/ |
D | test-utils-aarch64.cc | 151 VIXL_ASSERT(reg.Is64Bits()); in Equal64() 190 VIXL_ASSERT(fpreg.Is64Bits()); in EqualFP64() 198 VIXL_ASSERT(reg0.Is64Bits() && reg1.Is64Bits()); in Equal64() 208 VIXL_ASSERT(vreg.Is64Bits()); in Equal64()
|
/external/llvm/include/llvm/Object/ |
D | ELFTypes.h | 46 static const bool Is64Bits = Is64; member 140 LLVM_ELF_IMPORT_TYPES(ELFT::TargetEndianness, ELFT::Is64Bits) 365 typedef typename std::conditional<ELFT::Is64Bits, 367 typedef typename std::conditional<ELFT::Is64Bits,
|
D | MachO.h | 197 create(MemoryBufferRef Object, bool IsLittleEndian, bool Is64Bits); 445 MachOObjectFile(MemoryBufferRef Object, bool IsLittleEndian, bool Is64Bits,
|
D | ELFObjectFile.h | 346 ELFT::Is64Bits); in classof() 754 getELFType(ELFT::TargetEndianness == support::little, ELFT::Is64Bits), in ELFObjectFile() 830 return ELFT::Is64Bits ? 8 : 4; in getBytesInAddress()
|
D | ELF.h | 40 typedef typename std::conditional<ELFT::Is64Bits,
|
/external/llvm/tools/llvm-objdump/ |
D | ELFDump.cpp | 54 const char *Fmt = ELFT::Is64Bits ? "0x%016" PRIx64 " " : "0x%08" PRIx64 " "; in printProgramHeaders()
|
/external/llvm/tools/llvm-readobj/ |
D | ELFDumper.cpp | 1771 bool Is64 = ELFT::Is64Bits; in printDynamicTable() 2462 unsigned Width = ELFT::Is64Bits ? 16 : 8; in printRelocation() 2463 unsigned Bias = ELFT::Is64Bits ? 8 : 0; in printRelocation() 2530 printRelocHeader(OS, ELFT::Is64Bits, (Sec.sh_type == ELF::SHT_RELA)); in printRelocations() 2640 if (ELFT::Is64Bits) { in printSections() 2721 if (ELFT::Is64Bits) in printSymtabMessage() 2779 if (ELFT::Is64Bits) { in printSymbol() 2894 unsigned Bias = ELFT::Is64Bits ? 8 : 0; in printProgramHeaders() 2895 unsigned Width = ELFT::Is64Bits ? 18 : 10; in printProgramHeaders() 2896 unsigned SizeWidth = ELFT::Is64Bits ? 8 : 7; in printProgramHeaders() [all …]
|
/external/llvm/tools/yaml2obj/ |
D | yaml2elf.cpp | 166 Header.e_ident[EI_CLASS] = ELFT::Is64Bits ? ELFCLASS64 : ELFCLASS32; in initELFHeader()
|
/external/llvm/lib/Object/ |
D | MachOObjectFile.cpp | 253 bool Is64Bits) { in create() argument 257 Is64Bits, Err)); in create()
|