• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
2  * All rights reserved.
3  *
4  * This package is an SSL implementation written
5  * by Eric Young (eay@cryptsoft.com).
6  * The implementation was written so as to conform with Netscapes SSL.
7  *
8  * This library is free for commercial and non-commercial use as long as
9  * the following conditions are aheared to.  The following conditions
10  * apply to all code found in this distribution, be it the RC4, RSA,
11  * lhash, DES, etc., code; not just the SSL code.  The SSL documentation
12  * included with this distribution is covered by the same copyright terms
13  * except that the holder is Tim Hudson (tjh@cryptsoft.com).
14  *
15  * Copyright remains Eric Young's, and as such any Copyright notices in
16  * the code are not to be removed.
17  * If this package is used in a product, Eric Young should be given attribution
18  * as the author of the parts of the library used.
19  * This can be in the form of a textual message at program startup or
20  * in documentation (online or textual) provided with the package.
21  *
22  * Redistribution and use in source and binary forms, with or without
23  * modification, are permitted provided that the following conditions
24  * are met:
25  * 1. Redistributions of source code must retain the copyright
26  *    notice, this list of conditions and the following disclaimer.
27  * 2. Redistributions in binary form must reproduce the above copyright
28  *    notice, this list of conditions and the following disclaimer in the
29  *    documentation and/or other materials provided with the distribution.
30  * 3. All advertising materials mentioning features or use of this software
31  *    must display the following acknowledgement:
32  *    "This product includes cryptographic software written by
33  *     Eric Young (eay@cryptsoft.com)"
34  *    The word 'cryptographic' can be left out if the rouines from the library
35  *    being used are not cryptographic related :-).
36  * 4. If you include any Windows specific code (or a derivative thereof) from
37  *    the apps directory (application code) you must include an acknowledgement:
38  *    "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
39  *
40  * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
50  * SUCH DAMAGE.
51  *
52  * The licence and distribution terms for any publically available version or
53  * derivative of this code cannot be changed.  i.e. this code cannot simply be
54  * copied and put under another distribution licence
55  * [including the GNU Public Licence.] */
56 
57 #include <openssl/ripemd.h>
58 
59 #include <string.h>
60 
61 #include "../../crypto/internal.h"
62 #include "../../crypto/fipsmodule/digest/md32_common.h"
63 
64 
65 #define RIPEMD160_A 0x67452301L
66 #define RIPEMD160_B 0xEFCDAB89L
67 #define RIPEMD160_C 0x98BADCFEL
68 #define RIPEMD160_D 0x10325476L
69 #define RIPEMD160_E 0xC3D2E1F0L
70 
RIPEMD160_Init(RIPEMD160_CTX * ctx)71 int RIPEMD160_Init(RIPEMD160_CTX *ctx) {
72   OPENSSL_memset(ctx, 0, sizeof(*ctx));
73   ctx->h[0] = RIPEMD160_A;
74   ctx->h[1] = RIPEMD160_B;
75   ctx->h[2] = RIPEMD160_C;
76   ctx->h[3] = RIPEMD160_D;
77   ctx->h[4] = RIPEMD160_E;
78   return 1;
79 }
80 
81 static void ripemd160_block_data_order(uint32_t h[5], const uint8_t *data,
82                                        size_t num);
83 
RIPEMD160_Transform(RIPEMD160_CTX * c,const uint8_t data[RIPEMD160_CBLOCK])84 void RIPEMD160_Transform(RIPEMD160_CTX *c,
85                          const uint8_t data[RIPEMD160_CBLOCK]) {
86   ripemd160_block_data_order(c->h, data, 1);
87 }
88 
RIPEMD160_Update(RIPEMD160_CTX * c,const void * data,size_t len)89 int RIPEMD160_Update(RIPEMD160_CTX *c, const void *data, size_t len) {
90   crypto_md32_update(&ripemd160_block_data_order, c->h, c->data,
91                      RIPEMD160_CBLOCK, &c->num, &c->Nh, &c->Nl, data, len);
92   return 1;
93 }
94 
RIPEMD160_Final(uint8_t out[RIPEMD160_DIGEST_LENGTH],RIPEMD160_CTX * c)95 int RIPEMD160_Final(uint8_t out[RIPEMD160_DIGEST_LENGTH], RIPEMD160_CTX *c) {
96   crypto_md32_final(&ripemd160_block_data_order, c->h, c->data,
97                     RIPEMD160_CBLOCK, &c->num, c->Nh, c->Nl,
98                     /*is_big_endian=*/0);
99 
100   CRYPTO_store_u32_le(out, c->h[0]);
101   CRYPTO_store_u32_le(out + 4, c->h[1]);
102   CRYPTO_store_u32_le(out + 8, c->h[2]);
103   CRYPTO_store_u32_le(out + 12, c->h[3]);
104   CRYPTO_store_u32_le(out + 16, c->h[4]);
105   return 1;
106 }
107 
108 // Transformed F2 and F4 are courtesy of Wei Dai <weidai@eskimo.com>
109 #define F1(x, y, z) ((x) ^ (y) ^ (z))
110 #define F2(x, y, z) ((((y) ^ (z)) & (x)) ^ (z))
111 #define F3(x, y, z) (((~(y)) | (x)) ^ (z))
112 #define F4(x, y, z) ((((x) ^ (y)) & (z)) ^ (y))
113 #define F5(x, y, z) (((~(z)) | (y)) ^ (x))
114 
115 #define ROTATE(a, n) (((a) << (n)) | (((a)&0xffffffff) >> (32 - (n))))
116 
117 #define RIP1(a, b, c, d, e, w, s) \
118   {                               \
119     a += F1(b, c, d) + X(w);      \
120     a = ROTATE(a, s) + e;         \
121     c = ROTATE(c, 10);            \
122   }
123 
124 #define RIP2(a, b, c, d, e, w, s, K) \
125   {                                  \
126     a += F2(b, c, d) + X(w) + K;     \
127     a = ROTATE(a, s) + e;            \
128     c = ROTATE(c, 10);               \
129   }
130 
131 #define RIP3(a, b, c, d, e, w, s, K) \
132   {                                  \
133     a += F3(b, c, d) + X(w) + K;     \
134     a = ROTATE(a, s) + e;            \
135     c = ROTATE(c, 10);               \
136   }
137 
138 #define RIP4(a, b, c, d, e, w, s, K) \
139   {                                  \
140     a += F4(b, c, d) + X(w) + K;     \
141     a = ROTATE(a, s) + e;            \
142     c = ROTATE(c, 10);               \
143   }
144 
145 #define RIP5(a, b, c, d, e, w, s, K) \
146   {                                  \
147     a += F5(b, c, d) + X(w) + K;     \
148     a = ROTATE(a, s) + e;            \
149     c = ROTATE(c, 10);               \
150   }
151 
152 #define KL0 0x00000000L
153 #define KL1 0x5A827999L
154 #define KL2 0x6ED9EBA1L
155 #define KL3 0x8F1BBCDCL
156 #define KL4 0xA953FD4EL
157 
158 #define KR0 0x50A28BE6L
159 #define KR1 0x5C4DD124L
160 #define KR2 0x6D703EF3L
161 #define KR3 0x7A6D76E9L
162 #define KR4 0x00000000L
163 
164 #define WL00  0
165 #define SL00 11
166 #define WL01  1
167 #define SL01 14
168 #define WL02  2
169 #define SL02 15
170 #define WL03  3
171 #define SL03 12
172 #define WL04  4
173 #define SL04  5
174 #define WL05  5
175 #define SL05  8
176 #define WL06  6
177 #define SL06  7
178 #define WL07  7
179 #define SL07  9
180 #define WL08  8
181 #define SL08 11
182 #define WL09  9
183 #define SL09 13
184 #define WL10 10
185 #define SL10 14
186 #define WL11 11
187 #define SL11 15
188 #define WL12 12
189 #define SL12  6
190 #define WL13 13
191 #define SL13  7
192 #define WL14 14
193 #define SL14  9
194 #define WL15 15
195 #define SL15  8
196 
197 #define WL16  7
198 #define SL16  7
199 #define WL17  4
200 #define SL17  6
201 #define WL18 13
202 #define SL18  8
203 #define WL19  1
204 #define SL19 13
205 #define WL20 10
206 #define SL20 11
207 #define WL21  6
208 #define SL21  9
209 #define WL22 15
210 #define SL22  7
211 #define WL23  3
212 #define SL23 15
213 #define WL24 12
214 #define SL24  7
215 #define WL25  0
216 #define SL25 12
217 #define WL26  9
218 #define SL26 15
219 #define WL27  5
220 #define SL27  9
221 #define WL28  2
222 #define SL28 11
223 #define WL29 14
224 #define SL29  7
225 #define WL30 11
226 #define SL30 13
227 #define WL31  8
228 #define SL31 12
229 
230 #define WL32  3
231 #define SL32 11
232 #define WL33 10
233 #define SL33 13
234 #define WL34 14
235 #define SL34  6
236 #define WL35  4
237 #define SL35  7
238 #define WL36  9
239 #define SL36 14
240 #define WL37 15
241 #define SL37  9
242 #define WL38  8
243 #define SL38 13
244 #define WL39  1
245 #define SL39 15
246 #define WL40  2
247 #define SL40 14
248 #define WL41  7
249 #define SL41  8
250 #define WL42  0
251 #define SL42 13
252 #define WL43  6
253 #define SL43  6
254 #define WL44 13
255 #define SL44  5
256 #define WL45 11
257 #define SL45 12
258 #define WL46  5
259 #define SL46  7
260 #define WL47 12
261 #define SL47  5
262 
263 #define WL48  1
264 #define SL48 11
265 #define WL49  9
266 #define SL49 12
267 #define WL50 11
268 #define SL50 14
269 #define WL51 10
270 #define SL51 15
271 #define WL52  0
272 #define SL52 14
273 #define WL53  8
274 #define SL53 15
275 #define WL54 12
276 #define SL54  9
277 #define WL55  4
278 #define SL55  8
279 #define WL56 13
280 #define SL56  9
281 #define WL57  3
282 #define SL57 14
283 #define WL58  7
284 #define SL58  5
285 #define WL59 15
286 #define SL59  6
287 #define WL60 14
288 #define SL60  8
289 #define WL61  5
290 #define SL61  6
291 #define WL62  6
292 #define SL62  5
293 #define WL63  2
294 #define SL63 12
295 
296 #define WL64  4
297 #define SL64  9
298 #define WL65  0
299 #define SL65 15
300 #define WL66  5
301 #define SL66  5
302 #define WL67  9
303 #define SL67 11
304 #define WL68  7
305 #define SL68  6
306 #define WL69 12
307 #define SL69  8
308 #define WL70  2
309 #define SL70 13
310 #define WL71 10
311 #define SL71 12
312 #define WL72 14
313 #define SL72  5
314 #define WL73  1
315 #define SL73 12
316 #define WL74  3
317 #define SL74 13
318 #define WL75  8
319 #define SL75 14
320 #define WL76 11
321 #define SL76 11
322 #define WL77  6
323 #define SL77  8
324 #define WL78 15
325 #define SL78  5
326 #define WL79 13
327 #define SL79  6
328 
329 #define WR00  5
330 #define SR00  8
331 #define WR01 14
332 #define SR01  9
333 #define WR02  7
334 #define SR02  9
335 #define WR03  0
336 #define SR03 11
337 #define WR04  9
338 #define SR04 13
339 #define WR05  2
340 #define SR05 15
341 #define WR06 11
342 #define SR06 15
343 #define WR07  4
344 #define SR07  5
345 #define WR08 13
346 #define SR08  7
347 #define WR09  6
348 #define SR09  7
349 #define WR10 15
350 #define SR10  8
351 #define WR11  8
352 #define SR11 11
353 #define WR12  1
354 #define SR12 14
355 #define WR13 10
356 #define SR13 14
357 #define WR14  3
358 #define SR14 12
359 #define WR15 12
360 #define SR15  6
361 
362 #define WR16  6
363 #define SR16  9
364 #define WR17 11
365 #define SR17 13
366 #define WR18  3
367 #define SR18 15
368 #define WR19  7
369 #define SR19  7
370 #define WR20  0
371 #define SR20 12
372 #define WR21 13
373 #define SR21  8
374 #define WR22  5
375 #define SR22  9
376 #define WR23 10
377 #define SR23 11
378 #define WR24 14
379 #define SR24  7
380 #define WR25 15
381 #define SR25  7
382 #define WR26  8
383 #define SR26 12
384 #define WR27 12
385 #define SR27  7
386 #define WR28  4
387 #define SR28  6
388 #define WR29  9
389 #define SR29 15
390 #define WR30  1
391 #define SR30 13
392 #define WR31  2
393 #define SR31 11
394 
395 #define WR32 15
396 #define SR32  9
397 #define WR33  5
398 #define SR33  7
399 #define WR34  1
400 #define SR34 15
401 #define WR35  3
402 #define SR35 11
403 #define WR36  7
404 #define SR36  8
405 #define WR37 14
406 #define SR37  6
407 #define WR38  6
408 #define SR38  6
409 #define WR39  9
410 #define SR39 14
411 #define WR40 11
412 #define SR40 12
413 #define WR41  8
414 #define SR41 13
415 #define WR42 12
416 #define SR42  5
417 #define WR43  2
418 #define SR43 14
419 #define WR44 10
420 #define SR44 13
421 #define WR45  0
422 #define SR45 13
423 #define WR46  4
424 #define SR46  7
425 #define WR47 13
426 #define SR47  5
427 
428 #define WR48  8
429 #define SR48 15
430 #define WR49  6
431 #define SR49  5
432 #define WR50  4
433 #define SR50  8
434 #define WR51  1
435 #define SR51 11
436 #define WR52  3
437 #define SR52 14
438 #define WR53 11
439 #define SR53 14
440 #define WR54 15
441 #define SR54  6
442 #define WR55  0
443 #define SR55 14
444 #define WR56  5
445 #define SR56  6
446 #define WR57 12
447 #define SR57  9
448 #define WR58  2
449 #define SR58 12
450 #define WR59 13
451 #define SR59  9
452 #define WR60  9
453 #define SR60 12
454 #define WR61  7
455 #define SR61  5
456 #define WR62 10
457 #define SR62 15
458 #define WR63 14
459 #define SR63  8
460 
461 #define WR64 12
462 #define SR64  8
463 #define WR65 15
464 #define SR65  5
465 #define WR66 10
466 #define SR66 12
467 #define WR67  4
468 #define SR67  9
469 #define WR68  1
470 #define SR68 12
471 #define WR69  5
472 #define SR69  5
473 #define WR70  8
474 #define SR70 14
475 #define WR71  7
476 #define SR71  6
477 #define WR72  6
478 #define SR72  8
479 #define WR73  2
480 #define SR73 13
481 #define WR74 13
482 #define SR74  6
483 #define WR75 14
484 #define SR75  5
485 #define WR76  0
486 #define SR76 15
487 #define WR77  3
488 #define SR77 13
489 #define WR78  9
490 #define SR78 11
491 #define WR79 11
492 #define SR79 11
493 
ripemd160_block_data_order(uint32_t h[5],const uint8_t * data,size_t num)494 static void ripemd160_block_data_order(uint32_t h[5], const uint8_t *data,
495                                        size_t num) {
496   uint32_t A, B, C, D, E;
497   uint32_t a, b, c, d, e;
498   uint32_t XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7, XX8, XX9, XX10, XX11, XX12,
499       XX13, XX14, XX15;
500 #define X(i) XX##i
501 
502   for (; num--;) {
503     A = h[0];
504     B = h[1];
505     C = h[2];
506     D = h[3];
507     E = h[4];
508 
509     X(0) = CRYPTO_load_u32_le(data);
510     data += 4;
511     X(1) = CRYPTO_load_u32_le(data);
512     data += 4;
513     RIP1(A, B, C, D, E, WL00, SL00);
514     X(2) = CRYPTO_load_u32_le(data);
515     data += 4;
516     RIP1(E, A, B, C, D, WL01, SL01);
517     X(3) = CRYPTO_load_u32_le(data);
518     data += 4;
519     RIP1(D, E, A, B, C, WL02, SL02);
520     X(4) = CRYPTO_load_u32_le(data);
521     data += 4;
522     RIP1(C, D, E, A, B, WL03, SL03);
523     X(5) = CRYPTO_load_u32_le(data);
524     data += 4;
525     RIP1(B, C, D, E, A, WL04, SL04);
526     X(6) = CRYPTO_load_u32_le(data);
527     data += 4;
528     RIP1(A, B, C, D, E, WL05, SL05);
529     X(7) = CRYPTO_load_u32_le(data);
530     data += 4;
531     RIP1(E, A, B, C, D, WL06, SL06);
532     X(8) = CRYPTO_load_u32_le(data);
533     data += 4;
534     RIP1(D, E, A, B, C, WL07, SL07);
535     X(9) = CRYPTO_load_u32_le(data);
536     data += 4;
537     RIP1(C, D, E, A, B, WL08, SL08);
538     X(10) = CRYPTO_load_u32_le(data);
539     data += 4;
540     RIP1(B, C, D, E, A, WL09, SL09);
541     X(11) = CRYPTO_load_u32_le(data);
542     data += 4;
543     RIP1(A, B, C, D, E, WL10, SL10);
544     X(12) = CRYPTO_load_u32_le(data);
545     data += 4;
546     RIP1(E, A, B, C, D, WL11, SL11);
547     X(13) = CRYPTO_load_u32_le(data);
548     data += 4;
549     RIP1(D, E, A, B, C, WL12, SL12);
550     X(14) = CRYPTO_load_u32_le(data);
551     data += 4;
552     RIP1(C, D, E, A, B, WL13, SL13);
553     X(15) = CRYPTO_load_u32_le(data);
554     data += 4;
555     RIP1(B, C, D, E, A, WL14, SL14);
556     RIP1(A, B, C, D, E, WL15, SL15);
557 
558     RIP2(E, A, B, C, D, WL16, SL16, KL1);
559     RIP2(D, E, A, B, C, WL17, SL17, KL1);
560     RIP2(C, D, E, A, B, WL18, SL18, KL1);
561     RIP2(B, C, D, E, A, WL19, SL19, KL1);
562     RIP2(A, B, C, D, E, WL20, SL20, KL1);
563     RIP2(E, A, B, C, D, WL21, SL21, KL1);
564     RIP2(D, E, A, B, C, WL22, SL22, KL1);
565     RIP2(C, D, E, A, B, WL23, SL23, KL1);
566     RIP2(B, C, D, E, A, WL24, SL24, KL1);
567     RIP2(A, B, C, D, E, WL25, SL25, KL1);
568     RIP2(E, A, B, C, D, WL26, SL26, KL1);
569     RIP2(D, E, A, B, C, WL27, SL27, KL1);
570     RIP2(C, D, E, A, B, WL28, SL28, KL1);
571     RIP2(B, C, D, E, A, WL29, SL29, KL1);
572     RIP2(A, B, C, D, E, WL30, SL30, KL1);
573     RIP2(E, A, B, C, D, WL31, SL31, KL1);
574 
575     RIP3(D, E, A, B, C, WL32, SL32, KL2);
576     RIP3(C, D, E, A, B, WL33, SL33, KL2);
577     RIP3(B, C, D, E, A, WL34, SL34, KL2);
578     RIP3(A, B, C, D, E, WL35, SL35, KL2);
579     RIP3(E, A, B, C, D, WL36, SL36, KL2);
580     RIP3(D, E, A, B, C, WL37, SL37, KL2);
581     RIP3(C, D, E, A, B, WL38, SL38, KL2);
582     RIP3(B, C, D, E, A, WL39, SL39, KL2);
583     RIP3(A, B, C, D, E, WL40, SL40, KL2);
584     RIP3(E, A, B, C, D, WL41, SL41, KL2);
585     RIP3(D, E, A, B, C, WL42, SL42, KL2);
586     RIP3(C, D, E, A, B, WL43, SL43, KL2);
587     RIP3(B, C, D, E, A, WL44, SL44, KL2);
588     RIP3(A, B, C, D, E, WL45, SL45, KL2);
589     RIP3(E, A, B, C, D, WL46, SL46, KL2);
590     RIP3(D, E, A, B, C, WL47, SL47, KL2);
591 
592     RIP4(C, D, E, A, B, WL48, SL48, KL3);
593     RIP4(B, C, D, E, A, WL49, SL49, KL3);
594     RIP4(A, B, C, D, E, WL50, SL50, KL3);
595     RIP4(E, A, B, C, D, WL51, SL51, KL3);
596     RIP4(D, E, A, B, C, WL52, SL52, KL3);
597     RIP4(C, D, E, A, B, WL53, SL53, KL3);
598     RIP4(B, C, D, E, A, WL54, SL54, KL3);
599     RIP4(A, B, C, D, E, WL55, SL55, KL3);
600     RIP4(E, A, B, C, D, WL56, SL56, KL3);
601     RIP4(D, E, A, B, C, WL57, SL57, KL3);
602     RIP4(C, D, E, A, B, WL58, SL58, KL3);
603     RIP4(B, C, D, E, A, WL59, SL59, KL3);
604     RIP4(A, B, C, D, E, WL60, SL60, KL3);
605     RIP4(E, A, B, C, D, WL61, SL61, KL3);
606     RIP4(D, E, A, B, C, WL62, SL62, KL3);
607     RIP4(C, D, E, A, B, WL63, SL63, KL3);
608 
609     RIP5(B, C, D, E, A, WL64, SL64, KL4);
610     RIP5(A, B, C, D, E, WL65, SL65, KL4);
611     RIP5(E, A, B, C, D, WL66, SL66, KL4);
612     RIP5(D, E, A, B, C, WL67, SL67, KL4);
613     RIP5(C, D, E, A, B, WL68, SL68, KL4);
614     RIP5(B, C, D, E, A, WL69, SL69, KL4);
615     RIP5(A, B, C, D, E, WL70, SL70, KL4);
616     RIP5(E, A, B, C, D, WL71, SL71, KL4);
617     RIP5(D, E, A, B, C, WL72, SL72, KL4);
618     RIP5(C, D, E, A, B, WL73, SL73, KL4);
619     RIP5(B, C, D, E, A, WL74, SL74, KL4);
620     RIP5(A, B, C, D, E, WL75, SL75, KL4);
621     RIP5(E, A, B, C, D, WL76, SL76, KL4);
622     RIP5(D, E, A, B, C, WL77, SL77, KL4);
623     RIP5(C, D, E, A, B, WL78, SL78, KL4);
624     RIP5(B, C, D, E, A, WL79, SL79, KL4);
625 
626     a = A;
627     b = B;
628     c = C;
629     d = D;
630     e = E;
631     // Do other half
632     A = h[0];
633     B = h[1];
634     C = h[2];
635     D = h[3];
636     E = h[4];
637 
638     RIP5(A, B, C, D, E, WR00, SR00, KR0);
639     RIP5(E, A, B, C, D, WR01, SR01, KR0);
640     RIP5(D, E, A, B, C, WR02, SR02, KR0);
641     RIP5(C, D, E, A, B, WR03, SR03, KR0);
642     RIP5(B, C, D, E, A, WR04, SR04, KR0);
643     RIP5(A, B, C, D, E, WR05, SR05, KR0);
644     RIP5(E, A, B, C, D, WR06, SR06, KR0);
645     RIP5(D, E, A, B, C, WR07, SR07, KR0);
646     RIP5(C, D, E, A, B, WR08, SR08, KR0);
647     RIP5(B, C, D, E, A, WR09, SR09, KR0);
648     RIP5(A, B, C, D, E, WR10, SR10, KR0);
649     RIP5(E, A, B, C, D, WR11, SR11, KR0);
650     RIP5(D, E, A, B, C, WR12, SR12, KR0);
651     RIP5(C, D, E, A, B, WR13, SR13, KR0);
652     RIP5(B, C, D, E, A, WR14, SR14, KR0);
653     RIP5(A, B, C, D, E, WR15, SR15, KR0);
654 
655     RIP4(E, A, B, C, D, WR16, SR16, KR1);
656     RIP4(D, E, A, B, C, WR17, SR17, KR1);
657     RIP4(C, D, E, A, B, WR18, SR18, KR1);
658     RIP4(B, C, D, E, A, WR19, SR19, KR1);
659     RIP4(A, B, C, D, E, WR20, SR20, KR1);
660     RIP4(E, A, B, C, D, WR21, SR21, KR1);
661     RIP4(D, E, A, B, C, WR22, SR22, KR1);
662     RIP4(C, D, E, A, B, WR23, SR23, KR1);
663     RIP4(B, C, D, E, A, WR24, SR24, KR1);
664     RIP4(A, B, C, D, E, WR25, SR25, KR1);
665     RIP4(E, A, B, C, D, WR26, SR26, KR1);
666     RIP4(D, E, A, B, C, WR27, SR27, KR1);
667     RIP4(C, D, E, A, B, WR28, SR28, KR1);
668     RIP4(B, C, D, E, A, WR29, SR29, KR1);
669     RIP4(A, B, C, D, E, WR30, SR30, KR1);
670     RIP4(E, A, B, C, D, WR31, SR31, KR1);
671 
672     RIP3(D, E, A, B, C, WR32, SR32, KR2);
673     RIP3(C, D, E, A, B, WR33, SR33, KR2);
674     RIP3(B, C, D, E, A, WR34, SR34, KR2);
675     RIP3(A, B, C, D, E, WR35, SR35, KR2);
676     RIP3(E, A, B, C, D, WR36, SR36, KR2);
677     RIP3(D, E, A, B, C, WR37, SR37, KR2);
678     RIP3(C, D, E, A, B, WR38, SR38, KR2);
679     RIP3(B, C, D, E, A, WR39, SR39, KR2);
680     RIP3(A, B, C, D, E, WR40, SR40, KR2);
681     RIP3(E, A, B, C, D, WR41, SR41, KR2);
682     RIP3(D, E, A, B, C, WR42, SR42, KR2);
683     RIP3(C, D, E, A, B, WR43, SR43, KR2);
684     RIP3(B, C, D, E, A, WR44, SR44, KR2);
685     RIP3(A, B, C, D, E, WR45, SR45, KR2);
686     RIP3(E, A, B, C, D, WR46, SR46, KR2);
687     RIP3(D, E, A, B, C, WR47, SR47, KR2);
688 
689     RIP2(C, D, E, A, B, WR48, SR48, KR3);
690     RIP2(B, C, D, E, A, WR49, SR49, KR3);
691     RIP2(A, B, C, D, E, WR50, SR50, KR3);
692     RIP2(E, A, B, C, D, WR51, SR51, KR3);
693     RIP2(D, E, A, B, C, WR52, SR52, KR3);
694     RIP2(C, D, E, A, B, WR53, SR53, KR3);
695     RIP2(B, C, D, E, A, WR54, SR54, KR3);
696     RIP2(A, B, C, D, E, WR55, SR55, KR3);
697     RIP2(E, A, B, C, D, WR56, SR56, KR3);
698     RIP2(D, E, A, B, C, WR57, SR57, KR3);
699     RIP2(C, D, E, A, B, WR58, SR58, KR3);
700     RIP2(B, C, D, E, A, WR59, SR59, KR3);
701     RIP2(A, B, C, D, E, WR60, SR60, KR3);
702     RIP2(E, A, B, C, D, WR61, SR61, KR3);
703     RIP2(D, E, A, B, C, WR62, SR62, KR3);
704     RIP2(C, D, E, A, B, WR63, SR63, KR3);
705 
706     RIP1(B, C, D, E, A, WR64, SR64);
707     RIP1(A, B, C, D, E, WR65, SR65);
708     RIP1(E, A, B, C, D, WR66, SR66);
709     RIP1(D, E, A, B, C, WR67, SR67);
710     RIP1(C, D, E, A, B, WR68, SR68);
711     RIP1(B, C, D, E, A, WR69, SR69);
712     RIP1(A, B, C, D, E, WR70, SR70);
713     RIP1(E, A, B, C, D, WR71, SR71);
714     RIP1(D, E, A, B, C, WR72, SR72);
715     RIP1(C, D, E, A, B, WR73, SR73);
716     RIP1(B, C, D, E, A, WR74, SR74);
717     RIP1(A, B, C, D, E, WR75, SR75);
718     RIP1(E, A, B, C, D, WR76, SR76);
719     RIP1(D, E, A, B, C, WR77, SR77);
720     RIP1(C, D, E, A, B, WR78, SR78);
721     RIP1(B, C, D, E, A, WR79, SR79);
722 
723     D = h[1] + c + D;
724     h[1] = h[2] + d + E;
725     h[2] = h[3] + e + A;
726     h[3] = h[4] + a + B;
727     h[4] = h[0] + b + C;
728     h[0] = D;
729   }
730 
731 #undef X
732 }
733 
RIPEMD160(const uint8_t * data,size_t len,uint8_t out[RIPEMD160_DIGEST_LENGTH])734 uint8_t *RIPEMD160(const uint8_t *data, size_t len,
735                    uint8_t out[RIPEMD160_DIGEST_LENGTH]) {
736   RIPEMD160_CTX ctx;
737 
738   if (!RIPEMD160_Init(&ctx)) {
739     return NULL;
740   }
741 
742   RIPEMD160_Update(&ctx, data, len);
743   RIPEMD160_Final(out, &ctx);
744   return out;
745 }
746