/third_party/vixl/test/aarch64/ |
D | test-utils-aarch64.h | 62 size_t lane_size = sizeof(T); in GetLane() local 64 VIXL_CHECK(kSizeInBytes >= ((lane + 1) * lane_size)); in GetLane() 66 memcpy(&result, bytes + (lane * lane_size), lane_size); in GetLane() 72 size_t lane_size = sizeof(value); in SetLane() local 73 VIXL_CHECK(kSizeInBytes >= ((lane + 1) * lane_size)); in SetLane() 74 memcpy(bytes + (lane * lane_size), &value, lane_size); in SetLane() 466 int lane_size = result.GetLaneSizeInBits(); in EqualSVE() local 467 for (int lane = 0; lane < core->GetSVELaneCount(lane_size); ++lane) { in EqualSVE()
|
D | test-utils-aarch64.cc | 361 unsigned lane_size = reg.GetLaneSizeInBits(); in EqualSVELane() local 366 VIXL_ASSERT(IsUintN(lane_size, expected) || in EqualSVELane() 367 IsIntN(lane_size, RawbitsToInt64(expected))); in EqualSVELane() 368 expected &= GetUintMask(lane_size); in EqualSVELane() 370 uint64_t result = core->zreg_lane(reg.GetCode(), lane_size, lane); in EqualSVELane() 372 unsigned lane_size_in_hex_chars = lane_size / 4; in EqualSVELane()
|
D | test-assembler-sve-aarch64.cc | 17891 int lane_size = lane_sizes[i]; in TEST_SVE() local 17893 TestFpCompareHelper(config, lane_size, gt, zn, zm, pd_fcm_gt); in TEST_SVE() 17894 TestFpCompareHelper(config, lane_size, lt, zn, zm, pd_fcm_lt); in TEST_SVE() 17895 TestFpCompareHelper(config, lane_size, ge, zn, zm, pd_fcm_ge); in TEST_SVE() 17896 TestFpCompareHelper(config, lane_size, le, zn, zm, pd_fcm_le); in TEST_SVE() 17897 TestFpCompareHelper(config, lane_size, eq, zn, zm, pd_fcm_eq); in TEST_SVE() 17898 TestFpCompareHelper(config, lane_size, ne, zn, zm, pd_fcm_ne); in TEST_SVE() 17899 TestFpCompareHelper(config, lane_size, uo, zn, zm, pd_fcm_uo); in TEST_SVE() 17902 TestFpCompareHelper(config, lane_size, gt, zn, zm, pd_fac_gt, true); in TEST_SVE() 17903 TestFpCompareHelper(config, lane_size, lt, zn, zm, pd_fac_lt, true); in TEST_SVE() [all …]
|
/third_party/vixl/src/aarch64/ |
D | simulator-aarch64.cc | 1045 unsigned reg_size, unsigned lane_size) { in Simulator() argument 1046 VIXL_ASSERT(reg_size >= lane_size); in Simulator() 1049 if (reg_size != lane_size) { in Simulator() 1063 switch (lane_size) { in Simulator() 1246 int lane_size = GetPrintRegLaneSizeInBytes(format); in Simulator() local 1251 bool access = (lane_mask & (1 << (i * lane_size))) != 0; in Simulator() 1257 switch (lane_size) { in Simulator() 1261 memcpy(&element_fp16, &value[i * lane_size], sizeof(element_fp16)); in Simulator() 1267 memcpy(&element_fp32, &value[i * lane_size], sizeof(element_fp32)); in Simulator() 1272 memcpy(&element, &value[i * lane_size], sizeof(element)); in Simulator() [all …]
|
D | logic-aarch64.cc | 565 int lane_size = LaneSizeInBitsFromFormat(vform); in add() local 586 dst.SetInt(vform, i, ur >> (64 - lane_size)); in add() 595 int lane_size = LaneSizeInBitsFromFormat(vform); in add_uint() local 596 VIXL_ASSERT(IsUintN(lane_size, value)); in add_uint() 599 uint64_t ub = value << (64 - lane_size); in add_uint() 615 dst.SetInt(vform, i, ur >> (64 - lane_size)); in add_uint() 957 int lane_size = LaneSizeInBitsFromFormat(vform); in sub() local 978 dst.SetInt(vform, i, ur >> (64 - lane_size)); in sub() 987 int lane_size = LaneSizeInBitsFromFormat(vform); in sub_uint() local 988 VIXL_ASSERT(IsUintN(lane_size, value)); in sub_uint() [all …]
|
D | assembler-aarch64.cc | 2329 unsigned lane_size = vt.GetLaneSizeInBytes(); in LoadStoreStructSingle() local 2330 VIXL_ASSERT(lane_size > 0); in LoadStoreStructSingle() 2331 VIXL_ASSERT(lane < (kQRegSizeInBytes / lane_size)); in LoadStoreStructSingle() 2335 lane *= lane_size; in LoadStoreStructSingle() 2336 if (lane_size == 8) lane++; in LoadStoreStructSingle() 2343 switch (lane_size) { in LoadStoreStructSingle() 2354 VIXL_ASSERT(lane_size == 8); in LoadStoreStructSingle() 4480 int lane_size = vn.GetLaneSizeInBytes(); in dup() local 4482 switch (lane_size) { in dup() 4493 VIXL_ASSERT(lane_size == 8); in dup() [all …]
|
D | instructions-aarch64.cc | 1404 int lane_size = LaneSizeInBitsFromFormat(vform); in MaxIntFromFormat() local 1405 return static_cast<int64_t>(GetUintMask(lane_size) >> 1); in MaxIntFromFormat()
|
D | disasm-aarch64.cc | 5225 int lane_size = instr->GetSVEBitwiseImmLaneSizeInBytesLog2(); in Disassembler() local 5226 mnemonic = SVEMoveMaskPreferred(imm, lane_size) ? "mov" : "dupm"; in Disassembler() 7617 unsigned lane_size = instr->GetSVESize(); in Disassembler() local 7649 if (lane_size <= kSRegSizeInBytesLog2) { in Disassembler() 8973 int lane_size = shift_and_lane_size.second; in Disassembler() local 8976 shift_dist = (8 << lane_size) - shift_dist; in Disassembler() 8977 if ((lane_size >= static_cast<int>(kBRegSizeInBytesLog2)) && in Disassembler() 8978 (lane_size <= static_cast<int>(kSRegSizeInBytesLog2)) && in Disassembler() 9011 int lane_size = shift_and_lane_size.second; in Disassembler() local 9012 if ((lane_size >= static_cast<int>(kBRegSizeInBytesLog2)) && in Disassembler() [all …]
|
D | registers-aarch64.h | 535 EncodedSize lane_size, 541 lane_size_(lane_size) {} in code_()
|
D | macro-assembler-sve-aarch64.cc | 481 unsigned lane_size = zd.GetLaneSizeInBits(); in Dup() local 488 } else if (IsImmLogical(imm.AsUintN(lane_size), lane_size)) { in Dup() 490 dupm(zd, imm.AsUintN(lane_size)); in Dup()
|
D | assembler-sve-aarch64.cc | 56 int lane_size = zd.GetLaneSizeInBits(); in adr() local 57 VIXL_ASSERT((lane_size == kSRegSize) || (lane_size == kDRegSize)); in adr() 67 VIXL_ASSERT(lane_size == kDRegSize); in adr() 71 VIXL_ASSERT(lane_size == kDRegSize); in adr() 76 op = (lane_size == kSRegSize) ? ADR_z_az_s_same_scaled in adr() 90 unsigned lane_size = zdn.GetLaneSizeInBits(); in SVELogicalImmediate() local 92 if (IsImmLogical(imm, lane_size, &bit_n, &imm_s, &imm_r)) { in SVELogicalImmediate() 93 Emit(op | Rd(zdn) | SVEBitN(bit_n) | SVEImmRotate(imm_r, lane_size) | in SVELogicalImmediate() 94 SVEImmSetBits(imm_s, lane_size)); in SVELogicalImmediate()
|
D | assembler-aarch64.h | 7117 static Instr SVEImmSetBits(unsigned imms, unsigned lane_size) { in SVEImmSetBits() argument 7119 VIXL_ASSERT((lane_size == kDRegSize) || IsUint6(imms + 3)); in SVEImmSetBits() 7120 USE(lane_size); in SVEImmSetBits() 7124 static Instr SVEImmRotate(unsigned immr, unsigned lane_size) { in SVEImmRotate() argument 7125 VIXL_ASSERT(IsUintN(WhichPowerOf2(lane_size), immr)); in SVEImmRotate() 7126 USE(lane_size); in SVEImmRotate()
|
D | macro-assembler-aarch64.h | 4487 int lane_size = std::max(zd.GetLaneSizeInBits(), zn.GetLaneSizeInBits()); in Fcvt() local 4489 zd.WithLaneSize(lane_size), in Fcvt() 4491 zn.WithLaneSize(lane_size)); in Fcvt()
|
D | simulator-aarch64.h | 2108 unsigned lane_size);
|
/third_party/node/deps/v8/src/codegen/arm64/ |
D | assembler-arm64.cc | 1881 int lane_size = vd.LaneSizeInBytes(); in ins() local 1883 switch (lane_size) { in ins() 1897 DCHECK_EQ(lane_size, 8); in ins() 1916 int lane_size = vn.LaneSizeInBytes(); in smov() local 1919 switch (lane_size) { in smov() 1927 DCHECK_EQ(lane_size, 4); in smov() 2067 int lane_size = vn.LaneSizeInBytes(); in umov() local 2070 switch (lane_size) { in umov() 2084 DCHECK_EQ(lane_size, 8); in umov() 2113 int lane_size = vd.LaneSizeInBytes(); in ins() local [all …]
|
/third_party/node/deps/v8/src/execution/arm64/ |
D | simulator-arm64.cc | 1196 size_t reg_size, size_t lane_size) { in GetPrintRegisterFormatForSize() argument 1197 DCHECK_GE(reg_size, lane_size); in GetPrintRegisterFormatForSize() 1200 if (reg_size != lane_size) { in GetPrintRegisterFormatForSize() 1213 switch (lane_size) { in GetPrintRegisterFormatForSize() 1573 int lane_size = 1 << lane_size_log2; in PrintVRegister() local 1584 PrintVRegisterFPHelper(code, lane_size, lane_count); in PrintVRegister() 1661 int lane_size = GetPrintRegLaneSizeInBytes(format); in PrintVWrite() local 1663 PrintVRegisterRawHelper(reg_code, reg_size, lane_size * lane); in PrintVWrite() 1665 PrintVRegisterFPHelper(reg_code, lane_size, lane_count, lane); in PrintVWrite() 4959 int lane_size = LaneSizeInBytesFromFormat(vf); in NEONLoadStoreMultiStructHelper() local [all …]
|
D | simulator-logic-arm64.cc | 647 int lane_size = LaneSizeInBitsFromFormat(vform); in add() local 668 dst.SetInt(vform, i, ur >> (64 - lane_size)); in add() 965 int lane_size = LaneSizeInBitsFromFormat(vform); in sub() local 986 dst.SetInt(vform, i, ur >> (64 - lane_size)); in sub()
|
D | simulator-arm64.h | 1293 size_t lane_size);
|
/third_party/node/deps/v8/src/compiler/backend/arm64/ |
D | instruction-selector-arm64.cc | 3803 bool ShraHelper(InstructionSelector* selector, Node* node, int lane_size, in ShraHelper() argument 3812 if (g.GetIntegerConstantValue(m.left()->InputAt(1)) % lane_size == 0) { in ShraHelper() 3817 selector->Emit(shra_code | LaneSizeField::encode(lane_size), in ShraHelper() 3825 bool AdalpHelper(InstructionSelector* selector, Node* node, int lane_size, in AdalpHelper() argument 3830 selector->Emit(adalp_code | LaneSizeField::encode(lane_size), in AdalpHelper() 3847 bool SmlalHelper(InstructionSelector* selector, Node* node, int lane_size, in SmlalHelper() argument 3853 selector->Emit(smlal_code | LaneSizeField::encode(lane_size), in SmlalHelper() 4239 Node* node, int lane_size) { in VisitSignExtendLong() argument 4241 code |= LaneSizeField::encode(lane_size); in VisitSignExtendLong()
|
/third_party/node/deps/v8/src/codegen/s390/ |
D | macro-assembler-s390.cc | 5875 #define EXT_ADD_PAIRWISE(dst, src, scratch1, scratch2, lane_size, mul_even, \ in CallRecordWriteStub() argument 5878 vrepi(scratch2, Operand(1), Condition(lane_size)); \ in CallRecordWriteStub() 5880 Condition(lane_size)); \ in CallRecordWriteStub() 5882 Condition(lane_size)); \ in CallRecordWriteStub() 5884 Condition(lane_size + 1)); in CallRecordWriteStub()
|
/third_party/node/deps/v8/src/execution/ppc/ |
D | simulator-ppc.cc | 4469 size_t lane_size = sizeof(input_type); \ in ExecuteGeneric() 4472 j = lane_size; \ in ExecuteGeneric() 4474 for (; j < kSimd128Size; i += 2, j += lane_size * 2, k++) { \ in ExecuteGeneric()
|
/third_party/node/deps/v8/src/execution/s390/ |
D | simulator-s390.cc | 3350 size_t lane_size = sizeof(input_type); \ 3353 j = lane_size; \ 3355 for (; j < kSimd128Size; i += 2, j += lane_size * 2, k++) { \
|