• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/* Copyright (c) 2014, Intel Corporation.
2 *
3 * Permission to use, copy, modify, and/or distribute this software for any
4 * purpose with or without fee is hereby granted, provided that the above
5 * copyright notice and this permission notice appear in all copies.
6 *
7 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
10 * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
12 * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
13 * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */
14
15/* Developers and authors:
16 * Shay Gueron (1, 2), and Vlad Krasnov (1)
17 * (1) Intel Corporation, Israel Development Center
18 * (2) University of Haifa
19 * Reference:
20 *   Shay Gueron and Vlad Krasnov
21 *   "Fast Prime Field Elliptic Curve Cryptography with 256 Bit Primes"
22 *   http://eprint.iacr.org/2013/816 */
23
24#include "ecp_nistz.h"
25
26#if defined(__GNUC__)
27#pragma GCC diagnostic push
28#pragma GCC diagnostic ignored "-Wsign-conversion"
29#endif
30
31/* Point double: r = 2*a */
32void GFp_nistz384_point_double(P384_POINT *r, const P384_POINT *a) {
33  BN_ULONG S[P384_LIMBS];
34  BN_ULONG M[P384_LIMBS];
35  BN_ULONG Zsqr[P384_LIMBS];
36  BN_ULONG tmp0[P384_LIMBS];
37
38  const BN_ULONG *in_x = a->X;
39  const BN_ULONG *in_y = a->Y;
40  const BN_ULONG *in_z = a->Z;
41
42  BN_ULONG *res_x = r->X;
43  BN_ULONG *res_y = r->Y;
44  BN_ULONG *res_z = r->Z;
45
46  elem_mul_by_2(S, in_y);
47
48  elem_sqr_mont(Zsqr, in_z);
49
50  elem_sqr_mont(S, S);
51
52  elem_mul_mont(res_z, in_z, in_y);
53  elem_mul_by_2(res_z, res_z);
54
55  elem_add(M, in_x, Zsqr);
56  elem_sub(Zsqr, in_x, Zsqr);
57
58  elem_sqr_mont(res_y, S);
59  elem_div_by_2(res_y, res_y);
60
61  elem_mul_mont(M, M, Zsqr);
62  elem_mul_by_3(M, M);
63
64  elem_mul_mont(S, S, in_x);
65  elem_mul_by_2(tmp0, S);
66
67  elem_sqr_mont(res_x, M);
68
69  elem_sub(res_x, res_x, tmp0);
70  elem_sub(S, S, res_x);
71
72  elem_mul_mont(S, S, M);
73  elem_sub(res_y, S, res_y);
74}
75
76/* Point addition: r = a+b */
77void GFp_nistz384_point_add(P384_POINT *r, const P384_POINT *a,
78                            const P384_POINT *b) {
79  BN_ULONG U2[P384_LIMBS], S2[P384_LIMBS];
80  BN_ULONG U1[P384_LIMBS], S1[P384_LIMBS];
81  BN_ULONG Z1sqr[P384_LIMBS];
82  BN_ULONG Z2sqr[P384_LIMBS];
83  BN_ULONG H[P384_LIMBS], R[P384_LIMBS];
84  BN_ULONG Hsqr[P384_LIMBS];
85  BN_ULONG Rsqr[P384_LIMBS];
86  BN_ULONG Hcub[P384_LIMBS];
87
88  BN_ULONG res_x[P384_LIMBS];
89  BN_ULONG res_y[P384_LIMBS];
90  BN_ULONG res_z[P384_LIMBS];
91
92  const BN_ULONG *in1_x = a->X;
93  const BN_ULONG *in1_y = a->Y;
94  const BN_ULONG *in1_z = a->Z;
95
96  const BN_ULONG *in2_x = b->X;
97  const BN_ULONG *in2_y = b->Y;
98  const BN_ULONG *in2_z = b->Z;
99
100  BN_ULONG in1infty = is_zero(a->Z);
101  BN_ULONG in2infty = is_zero(b->Z);
102
103  elem_sqr_mont(Z2sqr, in2_z); /* Z2^2 */
104  elem_sqr_mont(Z1sqr, in1_z); /* Z1^2 */
105
106  elem_mul_mont(S1, Z2sqr, in2_z); /* S1 = Z2^3 */
107  elem_mul_mont(S2, Z1sqr, in1_z); /* S2 = Z1^3 */
108
109  elem_mul_mont(S1, S1, in1_y); /* S1 = Y1*Z2^3 */
110  elem_mul_mont(S2, S2, in2_y); /* S2 = Y2*Z1^3 */
111  elem_sub(R, S2, S1);          /* R = S2 - S1 */
112
113  elem_mul_mont(U1, in1_x, Z2sqr); /* U1 = X1*Z2^2 */
114  elem_mul_mont(U2, in2_x, Z1sqr); /* U2 = X2*Z1^2 */
115  elem_sub(H, U2, U1);             /* H = U2 - U1 */
116
117  BN_ULONG is_exceptional = is_equal(U1, U2) & ~in1infty & ~in2infty;
118  if (is_exceptional) {
119    if (is_equal(S1, S2)) {
120      GFp_nistz384_point_double(r, a);
121    } else {
122      limbs_zero(r->X, P384_LIMBS);
123      limbs_zero(r->Y, P384_LIMBS);
124      limbs_zero(r->Z, P384_LIMBS);
125    }
126    return;
127  }
128
129  elem_sqr_mont(Rsqr, R);             /* R^2 */
130  elem_mul_mont(res_z, H, in1_z);     /* Z3 = H*Z1*Z2 */
131  elem_sqr_mont(Hsqr, H);             /* H^2 */
132  elem_mul_mont(res_z, res_z, in2_z); /* Z3 = H*Z1*Z2 */
133  elem_mul_mont(Hcub, Hsqr, H);       /* H^3 */
134
135  elem_mul_mont(U2, U1, Hsqr); /* U1*H^2 */
136  elem_mul_by_2(Hsqr, U2);     /* 2*U1*H^2 */
137
138  elem_sub(res_x, Rsqr, Hsqr);
139  elem_sub(res_x, res_x, Hcub);
140
141  elem_sub(res_y, U2, res_x);
142
143  elem_mul_mont(S2, S1, Hcub);
144  elem_mul_mont(res_y, R, res_y);
145  elem_sub(res_y, res_y, S2);
146
147  copy_conditional(res_x, in2_x, in1infty);
148  copy_conditional(res_y, in2_y, in1infty);
149  copy_conditional(res_z, in2_z, in1infty);
150
151  copy_conditional(res_x, in1_x, in2infty);
152  copy_conditional(res_y, in1_y, in2infty);
153  copy_conditional(res_z, in1_z, in2infty);
154
155  limbs_copy(r->X, res_x, P384_LIMBS);
156  limbs_copy(r->Y, res_y, P384_LIMBS);
157  limbs_copy(r->Z, res_z, P384_LIMBS);
158}
159
160static void add_precomputed_w5(P384_POINT *r, crypto_word wvalue,
161                               const P384_POINT table[16]) {
162  crypto_word recoded_is_negative;
163  crypto_word recoded;
164  booth_recode(&recoded_is_negative, &recoded, wvalue, 5);
165
166  alignas(64) P384_POINT h;
167  gfp_p384_point_select_w5(&h, table, recoded);
168
169  alignas(64) BN_ULONG tmp[P384_LIMBS];
170  GFp_p384_elem_neg(tmp, h.Y);
171  copy_conditional(h.Y, tmp, recoded_is_negative);
172
173  GFp_nistz384_point_add(r, r, &h);
174}
175
176/* r = p * p_scalar */
177void GFp_nistz384_point_mul(P384_POINT *r, const BN_ULONG p_scalar[P384_LIMBS],
178                            const BN_ULONG p_x[P384_LIMBS],
179                            const BN_ULONG p_y[P384_LIMBS]) {
180  static const size_t kWindowSize = 5;
181  static const crypto_word kMask = (1 << (5 /* kWindowSize */ + 1)) - 1;
182
183  uint8_t p_str[(P384_LIMBS * sizeof(Limb)) + 1];
184  gfp_little_endian_bytes_from_scalar(p_str, sizeof(p_str) / sizeof(p_str[0]),
185                                      p_scalar, P384_LIMBS);
186
187  /* A |P384_POINT| is (3 * 48) = 144 bytes, and the 64-byte alignment should
188  * add no more than 63 bytes of overhead. Thus, |table| should require
189  * ~2367 ((144 * 16) + 63) bytes of stack space. */
190  alignas(64) P384_POINT table[16];
191
192  /* table[0] is implicitly (0,0,0) (the point at infinity), therefore it is
193  * not stored. All other values are actually stored with an offset of -1 in
194  * table. */
195  P384_POINT *row = table;
196
197  limbs_copy(row[1 - 1].X, p_x, P384_LIMBS);
198  limbs_copy(row[1 - 1].Y, p_y, P384_LIMBS);
199  limbs_copy(row[1 - 1].Z, ONE, P384_LIMBS);
200
201  GFp_nistz384_point_double(&row[2 - 1], &row[1 - 1]);
202  GFp_nistz384_point_add(&row[3 - 1], &row[2 - 1], &row[1 - 1]);
203  GFp_nistz384_point_double(&row[4 - 1], &row[2 - 1]);
204  GFp_nistz384_point_double(&row[6 - 1], &row[3 - 1]);
205  GFp_nistz384_point_double(&row[8 - 1], &row[4 - 1]);
206  GFp_nistz384_point_double(&row[12 - 1], &row[6 - 1]);
207  GFp_nistz384_point_add(&row[5 - 1], &row[4 - 1], &row[1 - 1]);
208  GFp_nistz384_point_add(&row[7 - 1], &row[6 - 1], &row[1 - 1]);
209  GFp_nistz384_point_add(&row[9 - 1], &row[8 - 1], &row[1 - 1]);
210  GFp_nistz384_point_add(&row[13 - 1], &row[12 - 1], &row[1 - 1]);
211  GFp_nistz384_point_double(&row[14 - 1], &row[7 - 1]);
212  GFp_nistz384_point_double(&row[10 - 1], &row[5 - 1]);
213  GFp_nistz384_point_add(&row[15 - 1], &row[14 - 1], &row[1 - 1]);
214  GFp_nistz384_point_add(&row[11 - 1], &row[10 - 1], &row[1 - 1]);
215  GFp_nistz384_point_double(&row[16 - 1], &row[8 - 1]);
216
217  static const size_t START_INDEX = 384 - 4;
218  size_t index = START_INDEX;
219
220  BN_ULONG recoded_is_negative;
221  crypto_word recoded;
222
223  crypto_word wvalue = p_str[(index - 1) / 8];
224  wvalue = (wvalue >> ((index - 1) % 8)) & kMask;
225
226  booth_recode(&recoded_is_negative, &recoded, wvalue, 5);
227  dev_assert_secret(!recoded_is_negative);
228
229  gfp_p384_point_select_w5(r, table, recoded);
230
231  while (index >= kWindowSize) {
232    if (index != START_INDEX) {
233      size_t off = (index - 1) / 8;
234
235      wvalue = p_str[off] | p_str[off + 1] << 8;
236      wvalue = (wvalue >> ((index - 1) % 8)) & kMask;
237      add_precomputed_w5(r, wvalue, table);
238    }
239
240    index -= kWindowSize;
241
242    GFp_nistz384_point_double(r, r);
243    GFp_nistz384_point_double(r, r);
244    GFp_nistz384_point_double(r, r);
245    GFp_nistz384_point_double(r, r);
246    GFp_nistz384_point_double(r, r);
247  }
248
249  /* Final window */
250  wvalue = p_str[0];
251  wvalue = (wvalue << 1) & kMask;
252  add_precomputed_w5(r, wvalue, table);
253}
254
255#if defined(__GNUC__)
256#pragma GCC diagnostic pop
257#endif
258