Home
last modified time | relevance | path

Searched refs:Xi (Results 1 – 25 of 226) sorted by relevance

12345678910

/external/boringssl/src/crypto/fipsmodule/modes/asm/
Dghash-x86_64.pl125 $Xi="%rdi";
247 movzb 15($Xi),$Zlo
250 &loop ($Xi);
252 mov $Zlo,8($Xi)
253 mov $Zhi,($Xi)
328 &mov ($Zlo,"8($Xi)");
329 &mov ($Zhi,"0($Xi)");
339 &mov ("($Xi)",$Zhi);
340 &mov ("8($Xi)","%rdx");
375 &mov ($dat,"$j($Xi)") if (--$j%4==0);
[all …]
Dghash-x86.pl611 ($Xi,$Xhi)=("xmm0","xmm1"); $Hkey="xmm2";
618 my ($Xhi,$Xi,$Hkey,$HK)=@_;
620 &movdqa ($Xhi,$Xi); #
621 &pshufd ($T1,$Xi,0b01001110);
623 &pxor ($T1,$Xi); #
627 &pclmulqdq ($Xi,$Hkey,0x00); #######
630 &xorps ($T1,$Xi); #
637 &pxor ($Xi,$T2); #
646 my ($Xhi,$Xi,$Hkey)=@_;
648 &movdqa ($T1,$Xi); #
[all …]
Daesni-gcm-x86_64.pl77 $Z0,$Z1,$Z2,$Z3,$Xi) = map("%xmm$_",(0..8));
152 vpxor $Z0,$Xi,$Xi # modulo-scheduled
163 vpxor 16+8(%rsp),$Xi,$Xi # modulo-scheduled [vpxor $Z3,$Xi,$Xi]
227 vpxor 0x70+8(%rsp),$Xi,$Xi # accumulate I[0]
241 vpclmulqdq \$0x10,$Hkey,$Xi,$Z1
244 vpclmulqdq \$0x01,$Hkey,$Xi,$T1
248 vpclmulqdq \$0x00,$Hkey,$Xi,$T2
251 vpclmulqdq \$0x11,$Hkey,$Xi,$Xi
266 vpxor $Xi,$Z3,$Z3
356 vmovdqu $Z3,16+8(%rsp) # postpone vpxor $Z3,$Xi,$Xi
[all …]
Dghash-armv4.pl98 $Xi="r0"; # argument block
126 str $_,[$Xi,#$i]
128 str $_,[$Xi,#$i]
131 strb $_,[$Xi,#$i+3]
133 strb $Tlh,[$Xi,#$i+2]
135 strb $Thl,[$Xi,#$i+1]
136 strb $Thh,[$Xi,#$i]
201 ldrb $nhi,[$Xi,#15]
219 ldrb $nhi,[$Xi,#14]
258 ldrplb $Tll,[$Xi,$cnt]
[all …]
Dghash-ssse3-x86.pl80 my ($Xi, $Htable, $in, $len) = ("edi", "esi", "edx", "ecx");
157 &mov($Xi, &wparam(0));
160 &movdqu("xmm0", &QWP(0, $Xi));
190 &movdqu(&QWP(0, $Xi), "xmm2");
208 &mov($Xi, &wparam(0));
213 &movdqu("xmm0", &QWP(0, $Xi));
266 &movdqu(&QWP(0, $Xi), "xmm0");
Dghash-ssse3-x86_64.pl90 my ($Xi, $Htable, $in, $len) = $win64 ? ("%rcx", "%rdx", "%r8", "%r9") :
118 movdqu ($Xi), %xmm0
215 movdqu %xmm2, ($Xi)
263 movdqu ($Xi), %xmm0
313 movdqu %xmm0, ($Xi)
Dghashv8-armx.pl51 $Xi="x0"; # argument block
148 vld1.64 {$t1},[$Xi] @ load Xi
181 vst1.64 {$Xl},[$Xi] @ write out Xi
205 vld1.64 {$Xl},[$Xi] @ load [rotated] Xi
338 vst1.64 {$Xl},[$Xi] @ write out Xi
Dghash-neon-armv8.pl72 my ($Xi, $Htbl, $inp, $len) = map("x$_", (0..3)); # argument block
190 ld1 {$INlo.16b}, [$Xi] // load Xi
208 ld1 {$Xl.16b}, [$Xi] // load Xi
268 st1 {$Xl.16b}, [$Xi]
/external/boringssl/src/crypto/fipsmodule/modes/
Dinternal.h140 typedef void (*gmult_func)(uint64_t Xi[2], const u128 Htable[16]);
145 typedef void (*ghash_func)(uint64_t Xi[2], const u128 Htable[16],
175 } Yi, EKi, EK0, len, Xi; member
272 void gcm_gmult_4bit(uint64_t Xi[2], const u128 Htable[16]);
273 void gcm_ghash_4bit(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp,
280 void gcm_init_clmul(u128 Htable[16], const uint64_t Xi[2]);
281 void gcm_gmult_clmul(uint64_t Xi[2], const u128 Htable[16]);
282 void gcm_ghash_clmul(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp,
291 void gcm_init_ssse3(u128 Htable[16], const uint64_t Xi[2]);
292 void gcm_gmult_ssse3(uint64_t Xi[2], const u128 Htable[16]);
[all …]
Dgcm.c124 void gcm_gmult_4bit(uint64_t Xi[2], const u128 Htable[16]) { in gcm_gmult_4bit()
129 nlo = ((const uint8_t *)Xi)[15]; in gcm_gmult_4bit()
153 nlo = ((const uint8_t *)Xi)[cnt]; in gcm_gmult_4bit()
170 Xi[0] = CRYPTO_bswap8(Z.hi); in gcm_gmult_4bit()
171 Xi[1] = CRYPTO_bswap8(Z.lo); in gcm_gmult_4bit()
179 void gcm_ghash_4bit(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, in gcm_ghash_4bit()
187 nlo = ((const uint8_t *)Xi)[15]; in gcm_ghash_4bit()
212 nlo = ((const uint8_t *)Xi)[cnt]; in gcm_ghash_4bit()
230 Xi[0] = CRYPTO_bswap8(Z.hi); in gcm_ghash_4bit()
231 Xi[1] = CRYPTO_bswap8(Z.lo); in gcm_ghash_4bit()
[all …]
Dgcm_test.cc184 memcpy(&gcm.Xi, X, sizeof(X)); in TEST()
190 gcm.Xi.u); in TEST()
192 gcm.Xi.u); in TEST()
197 gcm.Xi.u); in TEST()
199 gcm.Xi.u); in TEST()
/external/boringssl/src/crypto/fipsmodule/sha/asm/
Dsha1-armv4-large.pl108 $Xi="r14";
114 ldr $t0,[$Xi,#15*4]
115 ldr $t1,[$Xi,#13*4]
116 ldr $t2,[$Xi,#7*4]
118 ldr $t3,[$Xi,#2*4]
125 str $t0,[$Xi,#-4]!
158 str $t0,[$Xi,#-4]!
223 mov $Xi,sp
236 teq $Xi,$t3
238 teq $Xi,sp
[all …]
Dsha1-x86_64.pl470 my $Xi=4;
607 &movdqa (eval(16*(($Xi-1)&3))."(%rsp)",@Tx[1]); # X[]+K xfer to IALU
642 &movdqa (@Tx[2],eval(2*16*(($Xi)/5)-64)."($K_XX_XX)"); # K_XX_XX
648 &pshufd (@Tx[1],@X[-1&7],0xee) if ($Xi==7); # was &movdqa (@Tx[0],@X[-1&7]) in Xupdate_ssse3_32_79
652 $Xi++; push(@X,shift(@X)); # "rotate" X[]
662 eval(shift(@insns)) if ($Xi==8);
664 eval(shift(@insns)) if ($Xi==8);
676 if ($Xi%5) {
679 &movdqa (@Tx[2],eval(2*16*($Xi/5)-64)."($K_XX_XX)");
695 &movdqa (eval(16*(($Xi-1)&3))."(%rsp)",@Tx[1]); # X[]+K xfer to IALU
[all …]
Dsha1-586.pl560 my $Xi=4; # 4xSIMD Xupdate round, start pre-seeded
689 &movdqa (&QWP(64+16*(($Xi-4)%3),"esp"),@X[-4&7]);# save X[] to backtrace buffer
707 &movdqa (&QWP(0+16*(($Xi-1)&3),"esp"),@X[3]); # X[]+K xfer to IALU
736 &movdqa (@X[2],&QWP(64+16*(($Xi-6)%3),"esp")) if ($Xi>5); # restore X[] from backtrace buffer
744 &movdqa (@X[4],&QWP(112-16+16*(($Xi)/5),"esp")); # K_XX_XX
749 &pshufd (@X[1],@X[-3&7],0xee) if ($Xi<7); # was &movdqa (@X[1],@X[-2&7])
750 &pshufd (@X[3],@X[-1&7],0xee) if ($Xi==7);
756 $Xi++; push(@X,shift(@X)); # "rotate" X[]
773 &movdqa (&QWP(64+16*(($Xi-4)%3),"esp"),@X[-4&7]); # save X[] to backtrace buffer
777 if ($Xi%5) {
[all …]
/external/boringssl/ios-arm/crypto/fipsmodule/
Dghashv8-armx32.S81 vld1.64 {q9},[r0] @ load Xi
90 .byte 0x86,0x0e,0xa8,0xf2 @ pmull q0,q12,q3 @ H.lo·Xi.lo
92 .byte 0x87,0x4e,0xa9,0xf2 @ pmull2 q2,q12,q3 @ H.hi·Xi.hi
93 .byte 0xa2,0x2e,0xaa,0xf2 @ pmull q1,q13,q9 @ (H.lo+H.hi)·(Xi.lo+Xi.hi)
114 vst1.64 {q0},[r0] @ write out Xi
126 vld1.64 {q0},[r0] @ load [rotated] Xi
146 vext.8 q0,q0,q0,#8 @ rotate Xi
160 veor q3,q3,q0 @ I[i]^=Xi
170 .byte 0x86,0x0e,0xac,0xf2 @ pmull q0,q14,q3 @ H^2.lo·Xi.lo
175 .byte 0x87,0x4e,0xad,0xf2 @ pmull2 q2,q14,q3 @ H^2.hi·Xi.hi
[all …]
Dghash-armv4.S410 vld1.64 d7,[r0]! @ load Xi
431 vld1.64 d1,[r0]! @ load Xi
448 veor q3,q0 @ inp^=Xi
591 vst1.64 d1,[r0]! @ write out Xi
/external/boringssl/linux-arm/crypto/fipsmodule/
Dghashv8-armx32.S78 vld1.64 {q9},[r0] @ load Xi
87 .byte 0x86,0x0e,0xa8,0xf2 @ pmull q0,q12,q3 @ H.lo·Xi.lo
89 .byte 0x87,0x4e,0xa9,0xf2 @ pmull2 q2,q12,q3 @ H.hi·Xi.hi
90 .byte 0xa2,0x2e,0xaa,0xf2 @ pmull q1,q13,q9 @ (H.lo+H.hi)·(Xi.lo+Xi.hi)
111 vst1.64 {q0},[r0] @ write out Xi
121 vld1.64 {q0},[r0] @ load [rotated] Xi
141 vext.8 q0,q0,q0,#8 @ rotate Xi
155 veor q3,q3,q0 @ I[i]^=Xi
165 .byte 0x86,0x0e,0xac,0xf2 @ pmull q0,q14,q3 @ H^2.lo·Xi.lo
170 .byte 0x87,0x4e,0xad,0xf2 @ pmull2 q2,q14,q3 @ H^2.hi·Xi.hi
[all …]
Dghash-armv4.S401 vld1.64 d7,[r0]! @ load Xi
420 vld1.64 d1,[r0]! @ load Xi
437 veor q3,q0 @ inp^=Xi
580 vst1.64 d1,[r0]! @ write out Xi
/external/cldr/tools/cldr-unittest/src/org/unicode/cldr/unittest/
DTestCasingTransforms.txt22 …̇́ xi̇̃ XI XÏ XJ XJ̈ XĮ XĮ̈; lt-Title; I Ï J J̈ Į Į̈ Ì Í Ĩ Xi̇̈ Xj̇̈ Xį̇̈ Xi̇̀ Xi̇́ Xi̇̃ Xi Xi̇̈ X…
/external/eigen/Eigen/src/Core/products/
DSelfadjointMatrixVector.h121 Packet Xi = pload <Packet>(resIt); in run() local
123 Xi = pcj0.pmadd(A0i,ptmp0, pcj0.pmadd(A1i,ptmp1,Xi)); in run()
126 pstore(resIt,Xi); resIt += PacketSize; in run()
/external/icu/icu4c/source/data/translit/
DLatin_ConjoiningJamo.txt170 # separator between the first and second "x" if XXf, Xf, and Xi all
172 # transliteration to A XXf Xi.
207 # where Xi also exists, must be transliterated as "ax-e" to prevent
208 # the round trip conversion to A Xi E.
240 # Split doubles. Text of the form A Xi Xf E, where XXi also occurs,
290 # to transliterate as A Xi E rather than A Xf IEUNG E.
305 # (XXi), and also Xi and Xf exist (true of all digraphs XX), we want
306 # to transliterate as A XXi E, rather than split to A Xf Xi E.
/external/cldr/common/testData/transforms/
Dru-t-zh-Latn-pinyin.txt499 Xi'an Сиань
502 Xiánníng Сяньнин
/external/skqp/third_party/angle2/
DBUILD.gn102 "Xi",
/external/skia/third_party/angle2/
DBUILD.gn106 "Xi",
/external/honggfuzz/examples/apache-httpd/corpus_http2/
D7699715d5b342304e92afe39f1dc3901.00001736.honggfuzz.cov12 轆N� ��K�c��՞�Z�4"�KM�~^�h+0�'��I�����Xi��Sp� �<����GIh�0�-�CǘN��}�I�qAI�h�t�2ؑ�l �CGFZ����…

12345678910