• Home
  • Raw
  • Download

Lines Matching refs:zd

33                                   const ZRegister& zd,  in AddSubHelper()  argument
36 VIXL_ASSERT(imm.FitsInLane(zd)); in AddSubHelper()
39 if (TrySingleAddSub(option, zd, zn, imm)) return; in AddSubHelper()
47 IntegerOperand signed_imm(imm.AsIntN(zd.GetLaneSizeInBits())); in AddSubHelper()
53 if (TrySingleAddSub(n_option, zd, zn, n_imm)) return; in AddSubHelper()
63 add(zd, zn, scratch); in AddSubHelper()
65 sub(zd, zn, scratch); in AddSubHelper()
70 const ZRegister& zd, in TrySingleAddSub() argument
73 VIXL_ASSERT(imm.FitsInLane(zd)); in TrySingleAddSub()
77 if (imm.TryEncodeAsShiftedUintNForLane<8, 0>(zd, &imm8, &shift) || in TrySingleAddSub()
78 imm.TryEncodeAsShiftedUintNForLane<8, 8>(zd, &imm8, &shift)) { in TrySingleAddSub()
79 MovprfxHelperScope guard(this, zd, zn); in TrySingleAddSub()
82 add(zd, zd, imm8, shift); in TrySingleAddSub()
85 sub(zd, zd, imm8, shift); in TrySingleAddSub()
94 const ZRegister& zd, in IntWideImmHelper() argument
101 MovprfxHelperScope guard(this, zd, zn); in IntWideImmHelper()
102 (this->*imm_fn)(zd, zd, imm.AsInt8()); in IntWideImmHelper()
108 MovprfxHelperScope guard(this, zd, zn); in IntWideImmHelper()
109 (this->*imm_fn)(zd, zd, imm.AsUint8()); in IntWideImmHelper()
116 Ptrue(pg.WithSameLaneSizeAs(zd)); in IntWideImmHelper()
120 zd.Aliases(zn) ? temps.AcquireZ().WithLaneSize(zn.GetLaneSizeInBits()) in IntWideImmHelper()
121 : zd; in IntWideImmHelper()
126 (this->*reg_macro)(zd, pg.Merging(), zn, scratch); in IntWideImmHelper()
129 void MacroAssembler::Mul(const ZRegister& zd, in Mul() argument
135 IntWideImmHelper(imm_fn, reg_fn, zd, zn, imm, true); in Mul()
138 void MacroAssembler::Smin(const ZRegister& zd, in Smin() argument
142 VIXL_ASSERT(imm.FitsInSignedLane(zd)); in Smin()
145 IntWideImmHelper(imm_fn, reg_fn, zd, zn, imm, true); in Smin()
148 void MacroAssembler::Smax(const ZRegister& zd, in Smax() argument
152 VIXL_ASSERT(imm.FitsInSignedLane(zd)); in Smax()
155 IntWideImmHelper(imm_fn, reg_fn, zd, zn, imm, true); in Smax()
158 void MacroAssembler::Umax(const ZRegister& zd, in Umax() argument
162 VIXL_ASSERT(imm.FitsInUnsignedLane(zd)); in Umax()
165 IntWideImmHelper(imm_fn, reg_fn, zd, zn, imm, false); in Umax()
168 void MacroAssembler::Umin(const ZRegister& zd, in Umin() argument
172 VIXL_ASSERT(imm.FitsInUnsignedLane(zd)); in Umin()
175 IntWideImmHelper(imm_fn, reg_fn, zd, zn, imm, false); in Umin()
363 void MacroAssembler::Cpy(const ZRegister& zd, in Cpy() argument
367 VIXL_ASSERT(imm.FitsInLane(zd)); in Cpy()
370 if (imm.TryEncodeAsShiftedIntNForLane<8, 0>(zd, &imm8, &shift) || in Cpy()
371 imm.TryEncodeAsShiftedIntNForLane<8, 8>(zd, &imm8, &shift)) { in Cpy()
373 cpy(zd, pg, imm8, shift); in Cpy()
381 dup(zd, 0); in Cpy()
386 VIXL_ASSERT(imm.FitsInLane(zd)); in Cpy()
387 if (zd.GetLaneSizeInBits() >= kHRegSize) { in Cpy()
389 switch (zd.GetLaneSizeInBits()) { in Cpy()
408 fcpy(zd, pg_m, fp_imm); in Cpy()
415 Register scratch = temps.AcquireRegisterToHoldLane(zd); in Cpy()
419 cpy(zd, pg_m, scratch); in Cpy()
427 void MacroAssembler::Fcpy(const ZRegister& zd, in Fcpy() argument
435 fcpy(zd, pg, imm); in Fcpy()
441 Cpy(zd, pg, FPToRawbitsWithSize(zd.GetLaneSizeInBits(), imm)); in Fcpy()
444 void MacroAssembler::Fcpy(const ZRegister& zd, in Fcpy() argument
452 fcpy(zd, pg, imm); in Fcpy()
458 Cpy(zd, pg, FPToRawbitsWithSize(zd.GetLaneSizeInBits(), imm)); in Fcpy()
461 void MacroAssembler::Fcpy(const ZRegister& zd, in Fcpy() argument
469 fcpy(zd, pg, imm); in Fcpy()
475 Cpy(zd, pg, FPToRawbitsWithSize(zd.GetLaneSizeInBits(), imm)); in Fcpy()
478 void MacroAssembler::Dup(const ZRegister& zd, IntegerOperand imm) { in Dup() argument
480 VIXL_ASSERT(imm.FitsInLane(zd)); in Dup()
481 unsigned lane_size = zd.GetLaneSizeInBits(); in Dup()
484 if (imm.TryEncodeAsShiftedIntNForLane<8, 0>(zd, &imm8, &shift) || in Dup()
485 imm.TryEncodeAsShiftedIntNForLane<8, 8>(zd, &imm8, &shift)) { in Dup()
487 dup(zd, imm8, shift); in Dup()
490 dupm(zd, imm.AsUintN(lane_size)); in Dup()
493 Register scratch = temps.AcquireRegisterToHoldLane(zd); in Dup()
497 dup(zd, scratch); in Dup()
502 const ZRegister& zd, in NoncommutativeArithmeticHelper() argument
508 if (zd.Aliases(zn)) { in NoncommutativeArithmeticHelper()
511 (this->*fn)(zd, pg, zn, zm); in NoncommutativeArithmeticHelper()
512 } else if (zd.Aliases(zm)) { in NoncommutativeArithmeticHelper()
515 (this->*rev_fn)(zd, pg, zm, zn); in NoncommutativeArithmeticHelper()
518 MovprfxHelperScope guard(this, zd, pg, zn); in NoncommutativeArithmeticHelper()
519 (this->*fn)(zd, pg, zd, zm); in NoncommutativeArithmeticHelper()
524 const ZRegister& zd, in FPCommutativeArithmeticHelper() argument
532 if (zd.Aliases(zn)) { in FPCommutativeArithmeticHelper()
534 (this->*fn)(zd, pg, zd, zm); in FPCommutativeArithmeticHelper()
535 } else if (zd.Aliases(zm)) { in FPCommutativeArithmeticHelper()
540 (this->*fn)(zd, pg, zd, zn); in FPCommutativeArithmeticHelper()
552 Mov(zd, scratch); in FPCommutativeArithmeticHelper()
560 MovprfxHelperScope guard(this, zd, pg, zn); in FPCommutativeArithmeticHelper()
561 (this->*fn)(zd, pg, zd, zm); in FPCommutativeArithmeticHelper()
565 void MacroAssembler::Asr(const ZRegister& zd, in Asr() argument
570 NoncommutativeArithmeticHelper(zd, in Asr()
580 void MacroAssembler::Lsl(const ZRegister& zd, in Lsl() argument
585 NoncommutativeArithmeticHelper(zd, in Lsl()
595 void MacroAssembler::Lsr(const ZRegister& zd, in Lsr() argument
600 NoncommutativeArithmeticHelper(zd, in Lsr()
610 void MacroAssembler::Fdiv(const ZRegister& zd, in Fdiv() argument
615 NoncommutativeArithmeticHelper(zd, in Fdiv()
625 void MacroAssembler::Fsub(const ZRegister& zd, in Fsub() argument
630 NoncommutativeArithmeticHelper(zd, in Fsub()
640 void MacroAssembler::Fadd(const ZRegister& zd, in Fadd() argument
646 FPCommutativeArithmeticHelper(zd, in Fadd()
655 void MacroAssembler::Fabd(const ZRegister& zd, in Fabd() argument
661 FPCommutativeArithmeticHelper(zd, in Fabd()
670 void MacroAssembler::Fmul(const ZRegister& zd, in Fmul() argument
676 FPCommutativeArithmeticHelper(zd, in Fmul()
685 void MacroAssembler::Fmulx(const ZRegister& zd, in Fmulx() argument
691 FPCommutativeArithmeticHelper(zd, in Fmulx()
700 void MacroAssembler::Fmax(const ZRegister& zd, in Fmax() argument
706 FPCommutativeArithmeticHelper(zd, in Fmax()
715 void MacroAssembler::Fmin(const ZRegister& zd, in Fmin() argument
721 FPCommutativeArithmeticHelper(zd, in Fmin()
730 void MacroAssembler::Fmaxnm(const ZRegister& zd, in Fmaxnm() argument
736 FPCommutativeArithmeticHelper(zd, in Fmaxnm()
745 void MacroAssembler::Fminnm(const ZRegister& zd, in Fminnm() argument
751 FPCommutativeArithmeticHelper(zd, in Fminnm()
760 void MacroAssembler::Fdup(const ZRegister& zd, double imm) { in Fdup() argument
763 switch (zd.GetLaneSizeInBits()) { in Fdup()
765 Fdup(zd, Float16(imm)); in Fdup()
768 Fdup(zd, static_cast<float>(imm)); in Fdup()
773 fdup(zd, imm); in Fdup()
775 Dup(zd, DoubleToRawbits(imm)); in Fdup()
781 void MacroAssembler::Fdup(const ZRegister& zd, float imm) { in Fdup() argument
784 switch (zd.GetLaneSizeInBits()) { in Fdup()
786 Fdup(zd, Float16(imm)); in Fdup()
791 fdup(zd, imm); in Fdup()
793 Dup(zd, FloatToRawbits(imm)); in Fdup()
797 Fdup(zd, static_cast<double>(imm)); in Fdup()
802 void MacroAssembler::Fdup(const ZRegister& zd, Float16 imm) { in Fdup() argument
805 switch (zd.GetLaneSizeInBits()) { in Fdup()
809 fdup(zd, imm); in Fdup()
811 Dup(zd, Float16ToRawbits(imm)); in Fdup()
815 Fdup(zd, FPToFloat(imm, kIgnoreDefaultNaN)); in Fdup()
818 Fdup(zd, FPToDouble(imm, kIgnoreDefaultNaN)); in Fdup()
823 void MacroAssembler::Index(const ZRegister& zd, in Index() argument
831 const ZRegister& zd) { in Index() argument
835 if (IntegerOperand(op).TryEncodeAsIntNForLane<5>(zd, &imm)) { in Index()
838 Register scratch = temps->AcquireRegisterToHoldLane(zd); in Index()
860 IndexOperand start_enc = IndexOperand::Prepare(this, &temps, start, zd); in Index()
861 IndexOperand step_enc = IndexOperand::Prepare(this, &temps, step, zd); in Index()
866 index(zd, start_enc.GetImm5(), step_enc.GetImm5()); in Index()
868 index(zd, start_enc.GetImm5(), step_enc.GetRegister()); in Index()
872 index(zd, start_enc.GetRegister(), step_enc.GetImm5()); in Index()
874 index(zd, start_enc.GetRegister(), step_enc.GetRegister()); in Index()
901 void MacroAssembler::Mla(const ZRegister& zd, in Mla() argument
907 if (zd.Aliases(za)) { in Mla()
910 mla(zd, pg, zn, zm); in Mla()
911 } else if (zd.Aliases(zn)) { in Mla()
914 mad(zd, pg, zm, za); in Mla()
915 } else if (zd.Aliases(zm)) { in Mla()
919 mad(zd, pg, zn, za); in Mla()
923 movprfx(zd, pg, za); in Mla()
924 mla(zd, pg, zn, zm); in Mla()
928 void MacroAssembler::Mls(const ZRegister& zd, in Mls() argument
934 if (zd.Aliases(za)) { in Mls()
937 mls(zd, pg, zn, zm); in Mls()
938 } else if (zd.Aliases(zn)) { in Mls()
941 msb(zd, pg, zm, za); in Mls()
942 } else if (zd.Aliases(zm)) { in Mls()
946 msb(zd, pg, zn, za); in Mls()
950 movprfx(zd, pg, za); in Mls()
951 mls(zd, pg, zn, zm); in Mls()
1025 void MacroAssembler::Sdiv(const ZRegister& zd, in Sdiv() argument
1030 NoncommutativeArithmeticHelper(zd, in Sdiv()
1040 void MacroAssembler::Sub(const ZRegister& zd, in Sub() argument
1047 if (imm.TryEncodeAsShiftedUintNForLane<8, 0>(zd, &imm8, &shift) || in Sub()
1048 imm.TryEncodeAsShiftedUintNForLane<8, 8>(zd, &imm8, &shift)) { in Sub()
1049 MovprfxHelperScope guard(this, zd, zm); in Sub()
1050 subr(zd, zd, imm8, shift); in Sub()
1057 sub(zd, scratch, zm); in Sub()
1061 void MacroAssembler::Sub(const ZRegister& zd, in Sub() argument
1066 NoncommutativeArithmeticHelper(zd, in Sub()
1076 void MacroAssembler::Udiv(const ZRegister& zd, in Udiv() argument
1081 NoncommutativeArithmeticHelper(zd, in Udiv()
1634 const ZRegister& zd, in SVESdotUdotIndexHelper() argument
1639 if (zd.Aliases(za)) { in SVESdotUdotIndexHelper()
1642 (this->*fn)(zd, zn, zm, index); in SVESdotUdotIndexHelper()
1644 } else if (zd.Aliases(zn) || zd.Aliases(zm)) { in SVESdotUdotIndexHelper()
1649 ZRegister scratch = temps.AcquireZ().WithSameLaneSizeAs(zd); in SVESdotUdotIndexHelper()
1655 Mov(zd, scratch); in SVESdotUdotIndexHelper()
1658 MovprfxHelperScope guard(this, zd, za); in SVESdotUdotIndexHelper()
1659 (this->*fn)(zd, zn, zm, index); in SVESdotUdotIndexHelper()
1664 const ZRegister& zd, in SVESdotUdotHelper() argument
1668 if (zd.Aliases(za)) { in SVESdotUdotHelper()
1671 (this->*fn)(zd, zn, zm); in SVESdotUdotHelper()
1673 } else if (zd.Aliases(zn) || zd.Aliases(zm)) { in SVESdotUdotHelper()
1678 ZRegister scratch = temps.AcquireZ().WithSameLaneSizeAs(zd); in SVESdotUdotHelper()
1684 Mov(zd, scratch); in SVESdotUdotHelper()
1687 MovprfxHelperScope guard(this, zd, za); in SVESdotUdotHelper()
1688 (this->*fn)(zd, zn, zm); in SVESdotUdotHelper()
1692 void MacroAssembler::Fscale(const ZRegister& zd, in Fscale() argument
1697 if (zd.Aliases(zm) && !zd.Aliases(zn)) { in Fscale()
1701 MovprfxHelperScope guard(this, zd, pg, zn); in Fscale()
1702 fscale(zd, pg, zd, scratch); in Fscale()
1704 MovprfxHelperScope guard(this, zd, pg, zn); in Fscale()
1705 fscale(zd, pg, zd, zm); in Fscale()
1709 void MacroAssembler::Sdot(const ZRegister& zd, in Sdot() argument
1714 SVESdotUdotHelper(&Assembler::sdot, zd, za, zn, zm); in Sdot()
1717 void MacroAssembler::Sdot(const ZRegister& zd, in Sdot() argument
1723 SVESdotUdotIndexHelper(&Assembler::sdot, zd, za, zn, zm, index); in Sdot()
1726 void MacroAssembler::Udot(const ZRegister& zd, in Udot() argument
1731 SVESdotUdotHelper(&Assembler::udot, zd, za, zn, zm); in Udot()
1734 void MacroAssembler::Udot(const ZRegister& zd, in Udot() argument
1740 SVESdotUdotIndexHelper(&Assembler::udot, zd, za, zn, zm, index); in Udot()
1743 void MacroAssembler::FPMulAddHelper(const ZRegister& zd, in FPMulAddHelper() argument
1753 if (zd.Aliases(za)) { in FPMulAddHelper()
1756 (this->*fn_zda)(zd, pg, zn, zm); in FPMulAddHelper()
1757 } else if (zd.Aliases(zn)) { in FPMulAddHelper()
1760 (this->*fn_zdn)(zd, pg, zm, za); in FPMulAddHelper()
1761 } else if (zd.Aliases(zm)) { in FPMulAddHelper()
1768 (this->*fn_zdn)(zd, pg, zn, za); in FPMulAddHelper()
1781 Mov(zd, scratch); in FPMulAddHelper()
1790 MovprfxHelperScope guard(this, zd, pg, za); in FPMulAddHelper()
1791 (this->*fn_zda)(zd, pg, zn, zm); in FPMulAddHelper()
1796 const ZRegister& zd, in FPMulAddIndexHelper() argument
1801 if (zd.Aliases(za)) { in FPMulAddIndexHelper()
1804 (this->*fn)(zd, zn, zm, index); in FPMulAddIndexHelper()
1806 } else if (zd.Aliases(zn) || zd.Aliases(zm)) { in FPMulAddIndexHelper()
1811 ZRegister scratch = temps.AcquireZ().WithSameLaneSizeAs(zd); in FPMulAddIndexHelper()
1816 Mov(zd, scratch); in FPMulAddIndexHelper()
1819 MovprfxHelperScope guard(this, zd, za); in FPMulAddIndexHelper()
1820 (this->*fn)(zd, zn, zm, index); in FPMulAddIndexHelper()
1824 void MacroAssembler::Fmla(const ZRegister& zd, in Fmla() argument
1831 FPMulAddHelper(zd, in Fmla()
1841 void MacroAssembler::Fmla(const ZRegister& zd, in Fmla() argument
1847 FPMulAddIndexHelper(&Assembler::fmla, zd, za, zn, zm, index); in Fmla()
1850 void MacroAssembler::Fmls(const ZRegister& zd, in Fmls() argument
1857 FPMulAddHelper(zd, in Fmls()
1867 void MacroAssembler::Fmls(const ZRegister& zd, in Fmls() argument
1873 FPMulAddIndexHelper(&Assembler::fmls, zd, za, zn, zm, index); in Fmls()
1876 void MacroAssembler::Fnmla(const ZRegister& zd, in Fnmla() argument
1883 FPMulAddHelper(zd, in Fnmla()
1893 void MacroAssembler::Fnmls(const ZRegister& zd, in Fnmls() argument
1900 FPMulAddHelper(zd, in Fnmls()
1910 void MacroAssembler::Ftmad(const ZRegister& zd, in Ftmad() argument
1915 if (zd.Aliases(zm) && !zd.Aliases(zn)) { in Ftmad()
1919 MovprfxHelperScope guard(this, zd, zn); in Ftmad()
1920 ftmad(zd, zd, scratch, imm3); in Ftmad()
1922 MovprfxHelperScope guard(this, zd, zn); in Ftmad()
1923 ftmad(zd, zd, zm, imm3); in Ftmad()
1927 void MacroAssembler::Fcadd(const ZRegister& zd, in Fcadd() argument
1933 if (zd.Aliases(zm) && !zd.Aliases(zn)) { in Fcadd()
1935 ZRegister scratch = temps.AcquireZ().WithSameLaneSizeAs(zd); in Fcadd()
1940 Mov(zd, scratch); in Fcadd()
1942 MovprfxHelperScope guard(this, zd, pg, zn); in Fcadd()
1943 fcadd(zd, pg, zd, zm, rot); in Fcadd()
1947 void MacroAssembler::Ext(const ZRegister& zd, in Ext() argument
1952 if (zd.Aliases(zm) && !zd.Aliases(zn)) { in Ext()
1955 ZRegister scratch = temps.AcquireZ().WithSameLaneSizeAs(zd); in Ext()
1960 Mov(zd, scratch); in Ext()
1964 MovprfxHelperScope guard(this, zd, zn); in Ext()
1965 ext(zd, zd, zm, offset); in Ext()
1969 void MacroAssembler::Splice(const ZRegister& zd, in Splice() argument
1974 if (zd.Aliases(zm) && !zd.Aliases(zn)) { in Splice()
1976 ZRegister scratch = temps.AcquireZ().WithSameLaneSizeAs(zd); in Splice()
1981 Mov(zd, scratch); in Splice()
1983 MovprfxHelperScope guard(this, zd, zn); in Splice()
1984 splice(zd, pg, zd, zm); in Splice()
1988 void MacroAssembler::Clasta(const ZRegister& zd, in Clasta() argument
1993 if (zd.Aliases(zm) && !zd.Aliases(zn)) { in Clasta()
1995 ZRegister scratch = temps.AcquireZ().WithSameLaneSizeAs(zd); in Clasta()
2000 Mov(zd, scratch); in Clasta()
2002 MovprfxHelperScope guard(this, zd, zn); in Clasta()
2003 clasta(zd, pg, zd, zm); in Clasta()
2007 void MacroAssembler::Clastb(const ZRegister& zd, in Clastb() argument
2012 if (zd.Aliases(zm) && !zd.Aliases(zn)) { in Clastb()
2014 ZRegister scratch = temps.AcquireZ().WithSameLaneSizeAs(zd); in Clastb()
2019 Mov(zd, scratch); in Clastb()
2021 MovprfxHelperScope guard(this, zd, zn); in Clastb()
2022 clastb(zd, pg, zd, zm); in Clastb()