1*f0865ec9SKyle Evans /* Some common helpers useful for many algorithms */
2*f0865ec9SKyle Evans #ifndef __COMMON_H__
3*f0865ec9SKyle Evans #define __COMMON_H__
4*f0865ec9SKyle Evans
5*f0865ec9SKyle Evans /* Include our arithmetic layer */
6*f0865ec9SKyle Evans #include <libecc/libarith.h>
7*f0865ec9SKyle Evans
8*f0865ec9SKyle Evans /* I2OSP and OS2IP internal primitives */
_i2osp(nn_src_t x,u8 * buf,u16 buflen)9*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static inline int _i2osp(nn_src_t x, u8 *buf, u16 buflen)
10*f0865ec9SKyle Evans {
11*f0865ec9SKyle Evans int ret;
12*f0865ec9SKyle Evans bitcnt_t blen;
13*f0865ec9SKyle Evans
14*f0865ec9SKyle Evans /* Sanity checks */
15*f0865ec9SKyle Evans MUST_HAVE((buf != NULL), ret, err);
16*f0865ec9SKyle Evans ret = nn_check_initialized(x); EG(ret, err);
17*f0865ec9SKyle Evans
18*f0865ec9SKyle Evans /* If x >= 256^xLen (the integer does not fit in the buffer),
19*f0865ec9SKyle Evans * return an error.
20*f0865ec9SKyle Evans */
21*f0865ec9SKyle Evans ret = nn_bitlen(x, &blen); EG(ret, err);
22*f0865ec9SKyle Evans MUST_HAVE(((8 * buflen) >= blen), ret, err);
23*f0865ec9SKyle Evans
24*f0865ec9SKyle Evans /* Export to the buffer */
25*f0865ec9SKyle Evans ret = nn_export_to_buf(buf, buflen, x);
26*f0865ec9SKyle Evans
27*f0865ec9SKyle Evans err:
28*f0865ec9SKyle Evans return ret;
29*f0865ec9SKyle Evans }
30*f0865ec9SKyle Evans
_os2ip(nn_t x,const u8 * buf,u16 buflen)31*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static inline int _os2ip(nn_t x, const u8 *buf, u16 buflen)
32*f0865ec9SKyle Evans {
33*f0865ec9SKyle Evans int ret;
34*f0865ec9SKyle Evans
35*f0865ec9SKyle Evans /* We do not want to exceed our computation compatible
36*f0865ec9SKyle Evans * size.
37*f0865ec9SKyle Evans */
38*f0865ec9SKyle Evans MUST_HAVE((buflen <= NN_USABLE_MAX_BYTE_LEN), ret, err);
39*f0865ec9SKyle Evans
40*f0865ec9SKyle Evans /* Import the NN */
41*f0865ec9SKyle Evans ret = nn_init_from_buf(x, buf, buflen);
42*f0865ec9SKyle Evans
43*f0865ec9SKyle Evans err:
44*f0865ec9SKyle Evans return ret;
45*f0865ec9SKyle Evans }
46*f0865ec9SKyle Evans
47*f0865ec9SKyle Evans /* Reverses the endiannes of a buffer in place */
_reverse_endianness(u8 * buf,u16 buf_size)48*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static inline int _reverse_endianness(u8 *buf, u16 buf_size)
49*f0865ec9SKyle Evans {
50*f0865ec9SKyle Evans u16 i;
51*f0865ec9SKyle Evans u8 tmp;
52*f0865ec9SKyle Evans int ret;
53*f0865ec9SKyle Evans
54*f0865ec9SKyle Evans MUST_HAVE((buf != NULL), ret, err);
55*f0865ec9SKyle Evans
56*f0865ec9SKyle Evans if(buf_size > 1){
57*f0865ec9SKyle Evans for(i = 0; i < (buf_size / 2); i++){
58*f0865ec9SKyle Evans tmp = buf[i];
59*f0865ec9SKyle Evans buf[i] = buf[buf_size - 1 - i];
60*f0865ec9SKyle Evans buf[buf_size - 1 - i] = tmp;
61*f0865ec9SKyle Evans }
62*f0865ec9SKyle Evans }
63*f0865ec9SKyle Evans
64*f0865ec9SKyle Evans ret = 0;
65*f0865ec9SKyle Evans
66*f0865ec9SKyle Evans err:
67*f0865ec9SKyle Evans return ret;
68*f0865ec9SKyle Evans }
69*f0865ec9SKyle Evans
70*f0865ec9SKyle Evans /* Helper to fix the MSB of a scalar using the trick in
71*f0865ec9SKyle Evans * https://eprint.iacr.org/2011/232.pdf
72*f0865ec9SKyle Evans *
73*f0865ec9SKyle Evans * We distinguish three situations:
74*f0865ec9SKyle Evans * - The scalar m is < q (the order), in this case we compute:
75*f0865ec9SKyle Evans * -
76*f0865ec9SKyle Evans * | m' = m + (2 * q) if [log(k + q)] == [log(q)],
77*f0865ec9SKyle Evans * | m' = m + q otherwise.
78*f0865ec9SKyle Evans * -
79*f0865ec9SKyle Evans * - The scalar m is >= q and < q**2, in this case we compute:
80*f0865ec9SKyle Evans * -
81*f0865ec9SKyle Evans * | m' = m + (2 * (q**2)) if [log(k + (q**2))] == [log(q**2)],
82*f0865ec9SKyle Evans * | m' = m + (q**2) otherwise.
83*f0865ec9SKyle Evans * -
84*f0865ec9SKyle Evans * - The scalar m is >= (q**2), in this case m == m'
85*f0865ec9SKyle Evans * We only deal with 0 <= m < (q**2) using the countermeasure. When m >= (q**2),
86*f0865ec9SKyle Evans * we stick with m' = m, accepting MSB issues (not much can be done in this case
87*f0865ec9SKyle Evans * anyways).
88*f0865ec9SKyle Evans */
_fix_scalar_msb(nn_src_t m,nn_src_t q,nn_t m_msb_fixed)89*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static inline int _fix_scalar_msb(nn_src_t m, nn_src_t q, nn_t m_msb_fixed)
90*f0865ec9SKyle Evans {
91*f0865ec9SKyle Evans int ret, cmp;
92*f0865ec9SKyle Evans /* _m_msb_fixed to handle aliasing */
93*f0865ec9SKyle Evans nn q_square, _m_msb_fixed;
94*f0865ec9SKyle Evans q_square.magic = _m_msb_fixed.magic = WORD(0);
95*f0865ec9SKyle Evans
96*f0865ec9SKyle Evans /* Sanity checks */
97*f0865ec9SKyle Evans ret = nn_check_initialized(m); EG(ret, err);
98*f0865ec9SKyle Evans ret = nn_check_initialized(q); EG(ret, err);
99*f0865ec9SKyle Evans ret = nn_check_initialized(m_msb_fixed); EG(ret, err);
100*f0865ec9SKyle Evans
101*f0865ec9SKyle Evans ret = nn_init(&q_square, 0); EG(ret, err);
102*f0865ec9SKyle Evans ret = nn_init(&_m_msb_fixed, 0); EG(ret, err);
103*f0865ec9SKyle Evans
104*f0865ec9SKyle Evans /* First compute q**2 */
105*f0865ec9SKyle Evans ret = nn_sqr(&q_square, q); EG(ret, err);
106*f0865ec9SKyle Evans /* Then compute m' depending on m size */
107*f0865ec9SKyle Evans ret = nn_cmp(m, q, &cmp); EG(ret, err);
108*f0865ec9SKyle Evans if (cmp < 0){
109*f0865ec9SKyle Evans bitcnt_t msb_bit_len, q_bitlen;
110*f0865ec9SKyle Evans
111*f0865ec9SKyle Evans /* Case where m < q */
112*f0865ec9SKyle Evans ret = nn_add(&_m_msb_fixed, m, q); EG(ret, err);
113*f0865ec9SKyle Evans ret = nn_bitlen(&_m_msb_fixed, &msb_bit_len); EG(ret, err);
114*f0865ec9SKyle Evans ret = nn_bitlen(q, &q_bitlen); EG(ret, err);
115*f0865ec9SKyle Evans ret = nn_cnd_add((msb_bit_len == q_bitlen), m_msb_fixed,
116*f0865ec9SKyle Evans &_m_msb_fixed, q); EG(ret, err);
117*f0865ec9SKyle Evans } else {
118*f0865ec9SKyle Evans ret = nn_cmp(m, &q_square, &cmp); EG(ret, err);
119*f0865ec9SKyle Evans if (cmp < 0) {
120*f0865ec9SKyle Evans bitcnt_t msb_bit_len, q_square_bitlen;
121*f0865ec9SKyle Evans
122*f0865ec9SKyle Evans /* Case where m >= q and m < (q**2) */
123*f0865ec9SKyle Evans ret = nn_add(&_m_msb_fixed, m, &q_square); EG(ret, err);
124*f0865ec9SKyle Evans ret = nn_bitlen(&_m_msb_fixed, &msb_bit_len); EG(ret, err);
125*f0865ec9SKyle Evans ret = nn_bitlen(&q_square, &q_square_bitlen); EG(ret, err);
126*f0865ec9SKyle Evans ret = nn_cnd_add((msb_bit_len == q_square_bitlen),
127*f0865ec9SKyle Evans m_msb_fixed, &_m_msb_fixed, &q_square); EG(ret, err);
128*f0865ec9SKyle Evans } else {
129*f0865ec9SKyle Evans /* Case where m >= (q**2) */
130*f0865ec9SKyle Evans ret = nn_copy(m_msb_fixed, m); EG(ret, err);
131*f0865ec9SKyle Evans }
132*f0865ec9SKyle Evans }
133*f0865ec9SKyle Evans
134*f0865ec9SKyle Evans err:
135*f0865ec9SKyle Evans nn_uninit(&q_square);
136*f0865ec9SKyle Evans nn_uninit(&_m_msb_fixed);
137*f0865ec9SKyle Evans
138*f0865ec9SKyle Evans return ret;
139*f0865ec9SKyle Evans }
140*f0865ec9SKyle Evans
141*f0865ec9SKyle Evans /* Helper to blind the scalar.
142*f0865ec9SKyle Evans * Compute m_blind = m + (b * q) where b is a random value modulo q.
143*f0865ec9SKyle Evans * Aliasing is supported.
144*f0865ec9SKyle Evans */
_blind_scalar(nn_src_t m,nn_src_t q,nn_t m_blind)145*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static inline int _blind_scalar(nn_src_t m, nn_src_t q, nn_t m_blind)
146*f0865ec9SKyle Evans {
147*f0865ec9SKyle Evans int ret;
148*f0865ec9SKyle Evans nn tmp;
149*f0865ec9SKyle Evans tmp.magic = WORD(0);
150*f0865ec9SKyle Evans
151*f0865ec9SKyle Evans /* Sanity checks */
152*f0865ec9SKyle Evans ret = nn_check_initialized(m); EG(ret, err);
153*f0865ec9SKyle Evans ret = nn_check_initialized(q); EG(ret, err);
154*f0865ec9SKyle Evans ret = nn_check_initialized(m_blind); EG(ret, err);
155*f0865ec9SKyle Evans
156*f0865ec9SKyle Evans ret = nn_get_random_mod(&tmp, q); EG(ret, err);
157*f0865ec9SKyle Evans
158*f0865ec9SKyle Evans ret = nn_mul(&tmp, &tmp, q); EG(ret, err);
159*f0865ec9SKyle Evans ret = nn_add(m_blind, &tmp, m);
160*f0865ec9SKyle Evans
161*f0865ec9SKyle Evans err:
162*f0865ec9SKyle Evans nn_uninit(&tmp);
163*f0865ec9SKyle Evans
164*f0865ec9SKyle Evans return ret;
165*f0865ec9SKyle Evans }
166*f0865ec9SKyle Evans
167*f0865ec9SKyle Evans /*
168*f0865ec9SKyle Evans * NOT constant time at all and not secure against side-channels. This is
169*f0865ec9SKyle Evans * an internal function only used for DSA verification on public data.
170*f0865ec9SKyle Evans *
171*f0865ec9SKyle Evans * Compute (base ** exp) mod (mod) using a square and multiply algorithm.
172*f0865ec9SKyle Evans * Internally, this computes Montgomery coefficients and uses the redc
173*f0865ec9SKyle Evans * function.
174*f0865ec9SKyle Evans *
175*f0865ec9SKyle Evans * Returns 0 on success, -1 on error.
176*f0865ec9SKyle Evans */
_nn_mod_pow_insecure(nn_t out,nn_src_t base,nn_src_t exp,nn_src_t mod)177*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static inline int _nn_mod_pow_insecure(nn_t out, nn_src_t base,
178*f0865ec9SKyle Evans nn_src_t exp, nn_src_t mod)
179*f0865ec9SKyle Evans {
180*f0865ec9SKyle Evans int ret, isodd, cmp;
181*f0865ec9SKyle Evans bitcnt_t explen;
182*f0865ec9SKyle Evans u8 expbit;
183*f0865ec9SKyle Evans nn r, r_square, _base, one;
184*f0865ec9SKyle Evans word_t mpinv;
185*f0865ec9SKyle Evans r.magic = r_square.magic = _base.magic = one.magic = WORD(0);
186*f0865ec9SKyle Evans
187*f0865ec9SKyle Evans /* Aliasing is not supported for this internal helper */
188*f0865ec9SKyle Evans MUST_HAVE((out != base) && (out != exp) && (out != mod), ret, err);
189*f0865ec9SKyle Evans
190*f0865ec9SKyle Evans /* Check initializations */
191*f0865ec9SKyle Evans ret = nn_check_initialized(base); EG(ret, err);
192*f0865ec9SKyle Evans ret = nn_check_initialized(exp); EG(ret, err);
193*f0865ec9SKyle Evans ret = nn_check_initialized(mod); EG(ret, err);
194*f0865ec9SKyle Evans
195*f0865ec9SKyle Evans ret = nn_bitlen(exp, &explen); EG(ret, err);
196*f0865ec9SKyle Evans /* Sanity check */
197*f0865ec9SKyle Evans MUST_HAVE((explen > 0), ret, err);
198*f0865ec9SKyle Evans
199*f0865ec9SKyle Evans /* Check that the modulo is indeed odd */
200*f0865ec9SKyle Evans ret = nn_isodd(mod, &isodd); EG(ret, err);
201*f0865ec9SKyle Evans MUST_HAVE(isodd, ret, err);
202*f0865ec9SKyle Evans
203*f0865ec9SKyle Evans /* Compute the Montgomery coefficients */
204*f0865ec9SKyle Evans ret = nn_compute_redc1_coefs(&r, &r_square, mod, &mpinv); EG(ret, err);
205*f0865ec9SKyle Evans
206*f0865ec9SKyle Evans /* Reduce the base if necessary */
207*f0865ec9SKyle Evans ret = nn_cmp(base, mod, &cmp); EG(ret, err);
208*f0865ec9SKyle Evans if(cmp >= 0){
209*f0865ec9SKyle Evans ret = nn_mod(&_base, base, mod); EG(ret, err);
210*f0865ec9SKyle Evans }
211*f0865ec9SKyle Evans else{
212*f0865ec9SKyle Evans ret = nn_copy(&_base, base); EG(ret, err);
213*f0865ec9SKyle Evans }
214*f0865ec9SKyle Evans
215*f0865ec9SKyle Evans ret = nn_mul_redc1(&_base, &_base, &r_square, mod, mpinv); EG(ret, err);
216*f0865ec9SKyle Evans ret = nn_copy(out, &r); EG(ret, err);
217*f0865ec9SKyle Evans
218*f0865ec9SKyle Evans ret = nn_init(&one, 0); EG(ret, err);
219*f0865ec9SKyle Evans ret = nn_one(&one); EG(ret, err);
220*f0865ec9SKyle Evans
221*f0865ec9SKyle Evans while (explen > 0) {
222*f0865ec9SKyle Evans explen = (bitcnt_t)(explen - 1);
223*f0865ec9SKyle Evans
224*f0865ec9SKyle Evans /* Get the bit */
225*f0865ec9SKyle Evans ret = nn_getbit(exp, explen, &expbit); EG(ret, err);
226*f0865ec9SKyle Evans
227*f0865ec9SKyle Evans /* Square */
228*f0865ec9SKyle Evans ret = nn_mul_redc1(out, out, out, mod, mpinv); EG(ret, err);
229*f0865ec9SKyle Evans
230*f0865ec9SKyle Evans if(expbit){
231*f0865ec9SKyle Evans /* Multiply */
232*f0865ec9SKyle Evans ret = nn_mul_redc1(out, out, &_base, mod, mpinv); EG(ret, err);
233*f0865ec9SKyle Evans }
234*f0865ec9SKyle Evans }
235*f0865ec9SKyle Evans /* Unredcify the output */
236*f0865ec9SKyle Evans ret = nn_mul_redc1(out, out, &one, mod, mpinv);
237*f0865ec9SKyle Evans
238*f0865ec9SKyle Evans err:
239*f0865ec9SKyle Evans nn_uninit(&r);
240*f0865ec9SKyle Evans nn_uninit(&r_square);
241*f0865ec9SKyle Evans nn_uninit(&_base);
242*f0865ec9SKyle Evans nn_uninit(&one);
243*f0865ec9SKyle Evans
244*f0865ec9SKyle Evans return ret;
245*f0865ec9SKyle Evans }
246*f0865ec9SKyle Evans
247*f0865ec9SKyle Evans
248*f0865ec9SKyle Evans #endif /* __COMMON_H__ */
249