Embedded SDK
Embedded SDK
cx_aes_gcm.c
Go to the documentation of this file.
1 
2 /*******************************************************************************
3  * Ledger Nano S - Secure firmware
4  * (c) 2022 Ledger
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  ********************************************************************************/
18 
19 #if defined(HAVE_AES_GCM) && defined(HAVE_AES)
20 
21 #include "cx_aes_gcm.h"
22 #include "lcx_aes.h"
23 #include "lcx_aes_gcm.h"
24 #if defined(HAVE_AEAD)
25 #include "lcx_aead.h"
26 #endif // HAVE_AEAD
27 #include "cx_utils.h"
28 #include "os_math.h"
29 #include "ox_bn.h"
30 #include <stddef.h>
31 #include <string.h>
32 
33 #define GF2_128_MPI_BYTES (2 * 16)
34 #define AES_BLOCK_BYTES (16)
35 #define AES_GCM_TAG_LEN (16)
36 
37 #if defined(HAVE_AEAD)
38 static const cx_aead_base_t cx_aes_gcm_functions = {
39  (void (*)(void *ctx)) cx_aes_gcm_init,
40  (cx_err_t(*)(void *ctx, const uint8_t *key, size_t key_len)) cx_aes_gcm_set_key,
41  (cx_err_t(*)(void *ctx, uint32_t mode, const uint8_t *iv, size_t iv_len)) cx_aes_gcm_start,
42  (cx_err_t(*)(void *ctx, const uint8_t *aad, size_t aad_len)) cx_aes_gcm_update_aad,
43  (cx_err_t(*)(void *ctx, const uint8_t *input, uint8_t *output, size_t len)) cx_aes_gcm_update,
44  (cx_err_t(*)(void *ctx, uint8_t *tag, size_t tag_len)) cx_aes_gcm_finish,
45  (cx_err_t(*)(void *ctx,
46  const uint8_t *input,
47  size_t len,
48  const uint8_t *iv,
49  size_t iv_len,
50  const uint8_t *aad,
51  size_t aad_len,
52  uint8_t *output,
53  uint8_t *tag,
54  size_t tag_len)) cx_aes_gcm_encrypt_and_tag,
55  (cx_err_t(*)(void *ctx,
56  const uint8_t *input,
57  size_t len,
58  const uint8_t *iv,
59  size_t iv_len,
60  const uint8_t *aad,
61  size_t aad_len,
62  uint8_t *output,
63  const uint8_t *tag,
64  size_t tag_len)) cx_aes_gcm_decrypt_and_auth,
65  (cx_err_t(*)(void *ctx, const uint8_t *tag, size_t tag_len)) cx_aes_gcm_check_tag};
66 
67 const cx_aead_info_t cx_aes128_gcm_info = {CX_AEAD_AES128_GCM, 128, 128, &cx_aes_gcm_functions};
68 
69 const cx_aead_info_t cx_aes192_gcm_info = {CX_AEAD_AES192_GCM, 192, 128, &cx_aes_gcm_functions};
70 
71 const cx_aead_info_t cx_aes256_gcm_info = {CX_AEAD_AES256_GCM, 256, 128, &cx_aes_gcm_functions};
72 #endif // HAVE_AEAD
73 
74 // The irreducible polynomial N(x) = x^128 + x^7 + x^2 + x + 1
75 const uint8_t N[17] = {0x01,
76  0x00,
77  0x00,
78  0x00,
79  0x00,
80  0x00,
81  0x00,
82  0x00,
83  0x00,
84  0x00,
85  0x00,
86  0x00,
87  0x00,
88  0x00,
89  0x00,
90  0x00,
91  0x87};
92 
93 // 2nd Montgomery constant: R2 = x^(2*t*128) mod N(x)
94 // t = 2 since the number of bytes of R is 17.
95 const uint8_t R2[4] = {0x10, 0x00, 0x01, 0x11};
96 
101 static void cx_gcm_increment(uint8_t *in)
102 {
103  size_t i;
104 
105  for (i = 0; i < 4; i++) {
106  // Increment the current byte and propagate the carry if necessary.
107  if (++(in[15 - i]) != 0) {
108  break;
109  }
110  }
111 }
112 
120 static void cx_gcm_xor_block(uint8_t *r, const uint8_t *a, const uint8_t *b, size_t len)
121 {
122  size_t i;
123 
124  for (i = 0; i < len; i++) {
125  r[i] = a[i] ^ b[i];
126  }
127 }
128 
132 static uint8_t cx_reverse_8bits(uint8_t value)
133 {
134  value = ((value & 0xF0) >> 4) | ((value & 0x0F) << 4);
135  value = ((value & 0xCC) >> 2) | ((value & 0x33) << 2);
136  value = ((value & 0xAA) >> 1) | ((value & 0x55) << 1);
137 
138  return value;
139 }
140 
144 static void cx_reverse_buffer(uint8_t *r, uint8_t *buf, size_t buf_len)
145 {
146  size_t i;
147 
148  for (i = 0; i < buf_len; i++) {
149  r[i] = cx_reverse_8bits(buf[buf_len - 1 - i]);
150  }
151 }
152 
156 static cx_err_t cx_gcm_mul(uint8_t *r, uint8_t *a, uint8_t *b)
157 {
158  cx_err_t error;
159  cx_bn_t bn_n, bn_c, bn_a, bn_b, bn_r;
160  uint8_t tmp[AES_BLOCK_BYTES];
161 
162  CX_CHECK(cx_bn_lock(GF2_128_MPI_BYTES, 0));
163  CX_CHECK(cx_bn_alloc(&bn_r, GF2_128_MPI_BYTES));
164  cx_reverse_buffer(tmp, a, AES_BLOCK_BYTES);
165  CX_CHECK(cx_bn_alloc_init(&bn_a, GF2_128_MPI_BYTES, tmp, AES_BLOCK_BYTES));
166  cx_reverse_buffer(tmp, b, AES_BLOCK_BYTES);
167  CX_CHECK(cx_bn_alloc_init(&bn_b, GF2_128_MPI_BYTES, tmp, AES_BLOCK_BYTES));
168  CX_CHECK(cx_bn_alloc_init(&bn_n, GF2_128_MPI_BYTES, N, sizeof(N)));
169  CX_CHECK(cx_bn_alloc_init(&bn_c, GF2_128_MPI_BYTES, R2, sizeof(R2)));
170  CX_CHECK(cx_bn_gf2_n_mul(bn_r, bn_b, bn_a, bn_n, bn_c));
171  CX_CHECK(cx_bn_export(bn_r, tmp, AES_BLOCK_BYTES));
172  cx_reverse_buffer(r, tmp, AES_BLOCK_BYTES);
173 
174 end:
175  cx_bn_unlock();
176  return error;
177 }
178 
179 void cx_aes_gcm_init(cx_aes_gcm_context_t *ctx)
180 {
181  memset(ctx, 0, sizeof(cx_aes_gcm_context_t));
182 }
183 
184 cx_err_t cx_aes_gcm_set_key(cx_aes_gcm_context_t *ctx, const uint8_t *raw_key, size_t key_len)
185 {
186  return cx_aes_init_key_no_throw(raw_key, key_len, &ctx->key);
187 }
188 
189 cx_err_t cx_aes_gcm_start(cx_aes_gcm_context_t *ctx,
190  uint32_t mode,
191  const uint8_t *iv,
192  size_t iv_len)
193 {
194  cx_err_t error = CX_INVALID_PARAMETER;
195  size_t i, block_len;
196 
197  if ((NULL == iv) || (iv_len < 1)) {
198  return CX_INVALID_PARAMETER;
199  }
200 
201  ctx->mode = mode;
202  if (!ctx->flag) {
203  memset(ctx->enc_block, 0, AES_BLOCK_BYTES);
204 
205  // Compute H = AES_K(0)
206  CX_CHECK(cx_aes_enc_block(&ctx->key, ctx->enc_block, ctx->hash_key));
207  memset(ctx->J0, 0, 16);
208 
209  // J0 = (IV|0|1)
210  if (12 == iv_len) {
211  memcpy(ctx->J0, iv, iv_len);
212  STORE32BE(1, ctx->J0 + 12);
213  }
214  // J0 = GHASH_H(IV|0^(s+64)|[len(IV)]_64)
215  else {
216  i = iv_len;
217  while (i > 0) {
218  block_len = MIN(i, AES_BLOCK_BYTES);
219  cx_gcm_xor_block(ctx->J0, ctx->J0, iv, block_len);
220  CX_CHECK(cx_gcm_mul(ctx->J0, ctx->hash_key, ctx->J0));
221  iv += block_len;
222  i -= block_len;
223  }
224  memset(ctx->enc_block, 0, 8);
225  STORE64BE(iv_len * 8, ctx->enc_block + 8);
226  cx_gcm_xor_block(ctx->J0, ctx->J0, ctx->enc_block, AES_BLOCK_BYTES);
227  CX_CHECK(cx_gcm_mul(ctx->J0, ctx->hash_key, ctx->J0));
228  }
229  // Save ctx->buf for cx_aes_gcm_finish
230  CX_CHECK(cx_aes_enc_block(&ctx->key, ctx->J0, ctx->enc_block));
231  ctx->flag = 1;
232  }
233 
234  error = CX_OK;
235 end:
236  return error;
237 }
238 
239 cx_err_t cx_aes_gcm_update_aad(cx_aes_gcm_context_t *ctx, const uint8_t *aad, size_t aad_len)
240 {
241  size_t i, block_len;
242  cx_err_t error = CX_OK;
243  // Process additional data
244  // Save for cx_aes_gcm_finish
245  ctx->aad_len += aad_len;
246  i = aad_len;
247  while (i > 0) {
248  block_len = MIN(i, AES_BLOCK_BYTES);
249  cx_gcm_xor_block(ctx->processed, ctx->processed, aad, block_len);
250  CX_CHECK(cx_gcm_mul(ctx->processed, ctx->hash_key, ctx->processed));
251  aad += block_len;
252  i -= block_len;
253  }
254 end:
255  return error;
256 }
257 
258 cx_err_t cx_aes_gcm_update(cx_aes_gcm_context_t *ctx, const uint8_t *in, uint8_t *out, size_t len)
259 {
260  cx_err_t error = CX_INVALID_PARAMETER;
261  size_t i, block_len;
262  uint8_t tmp[AES_BLOCK_BYTES];
263 
264  ctx->len += len;
265  i = len;
266  switch (ctx->mode) {
267  case CX_ENCRYPT:
268  while (i > 0) {
269  block_len = MIN(i, AES_BLOCK_BYTES);
270  cx_gcm_increment(ctx->J0);
271  CX_CHECK(cx_aes_enc_block(&ctx->key, ctx->J0, tmp));
272  cx_gcm_xor_block(out, in, tmp, block_len);
273  cx_gcm_xor_block(ctx->processed, ctx->processed, out, block_len);
274  CX_CHECK(cx_gcm_mul(ctx->processed, ctx->hash_key, ctx->processed));
275  if (i - block_len > 0) {
276  in += block_len;
277  out += block_len;
278  }
279  i -= block_len;
280  }
281  break;
282  case CX_DECRYPT:
283  while (i > 0) {
284  block_len = MIN(i, AES_BLOCK_BYTES);
285  cx_gcm_xor_block(ctx->processed, ctx->processed, in, block_len);
286  CX_CHECK(cx_gcm_mul(ctx->processed, ctx->hash_key, ctx->processed));
287  cx_gcm_increment(ctx->J0);
288  CX_CHECK(cx_aes_enc_block(&ctx->key, ctx->J0, tmp));
289  cx_gcm_xor_block(out, in, tmp, block_len);
290  if (i - block_len > 0) {
291  out += block_len;
292  in += block_len;
293  }
294  i -= block_len;
295  }
296  break;
297  default:
298  return CX_INVALID_PARAMETER_VALUE;
299  }
300  error = CX_OK;
301 end:
302  return error;
303 }
304 
305 cx_err_t cx_aes_gcm_finish(cx_aes_gcm_context_t *ctx, uint8_t *tag, size_t tag_len)
306 {
307  cx_err_t error;
308 
309  STORE64BE(ctx->aad_len * 8, ctx->J0);
310  STORE64BE(ctx->len * 8, ctx->J0 + 8);
311  cx_gcm_xor_block(ctx->processed, ctx->processed, ctx->J0, AES_BLOCK_BYTES);
312  CX_CHECK(cx_gcm_mul(ctx->processed, ctx->hash_key, ctx->processed));
313  cx_gcm_xor_block(tag, ctx->enc_block, ctx->processed, tag_len);
314 
315 end:
316  return error;
317 }
318 
319 static cx_err_t cx_aes_gcm_process_and_tag(cx_aes_gcm_context_t *ctx,
320  uint32_t mode,
321  uint8_t *in,
322  size_t len,
323  const uint8_t *iv,
324  size_t iv_len,
325  const uint8_t *aad,
326  size_t aad_len,
327  uint8_t *out,
328  uint8_t *tag,
329  size_t tag_len)
330 {
331  cx_err_t error;
332 
333  CX_CHECK(cx_aes_gcm_start(ctx, mode, iv, iv_len));
334  CX_CHECK(cx_aes_gcm_update_aad(ctx, aad, aad_len));
335  CX_CHECK(cx_aes_gcm_update(ctx, in, out, len));
336  CX_CHECK(cx_aes_gcm_finish(ctx, tag, tag_len));
337 
338 end:
339  return error;
340 }
341 
342 cx_err_t cx_aes_gcm_encrypt_and_tag(cx_aes_gcm_context_t *ctx,
343  uint8_t *in,
344  size_t len,
345  const uint8_t *iv,
346  size_t iv_len,
347  const uint8_t *aad,
348  size_t aad_len,
349  uint8_t *out,
350  uint8_t *tag,
351  size_t tag_len)
352 {
353  return cx_aes_gcm_process_and_tag(
354  ctx, CX_ENCRYPT, in, len, iv, iv_len, aad, aad_len, out, tag, tag_len);
355 }
356 
357 cx_err_t cx_aes_gcm_decrypt_and_auth(cx_aes_gcm_context_t *ctx,
358  uint8_t *in,
359  size_t len,
360  const uint8_t *iv,
361  size_t iv_len,
362  const uint8_t *aad,
363  size_t aad_len,
364  uint8_t *out,
365  const uint8_t *tag,
366  size_t tag_len)
367 {
368  cx_err_t error;
369  uint8_t check_tag[AES_BLOCK_BYTES];
370  uint8_t diff;
371 
372  CX_CHECK(cx_aes_gcm_process_and_tag(
373  ctx, CX_DECRYPT, in, len, iv, iv_len, aad, aad_len, out, check_tag, tag_len));
374  diff = cx_constant_time_eq(tag, check_tag, tag_len);
375  error = ((diff == 0) ? CX_OK : CX_INVALID_PARAMETER_VALUE);
376 
377  // The output buffer is set to 0 if the tag verification fails.
378  if (error) {
379  memset(out, 0, len);
380  }
381 
382 end:
383  return error;
384 }
385 
386 cx_err_t cx_aes_gcm_check_tag(cx_aes_gcm_context_t *ctx, const uint8_t *tag, size_t tag_len)
387 {
388  cx_err_t error;
389  uint8_t diff;
390  uint8_t check_tag[AES_GCM_TAG_LEN];
391 
392  if (tag_len > AES_GCM_TAG_LEN) {
393  return CX_INVALID_PARAMETER_SIZE;
394  }
395  CX_CHECK(cx_aes_gcm_finish(ctx, check_tag, tag_len));
396 
397  diff = cx_constant_time_eq(tag, check_tag, tag_len);
398  error = diff * CX_INVALID_PARAMETER_VALUE + (1 - diff) * CX_OK;
399 end:
400  return error;
401 }
402 
403 #endif // HAVE_AES_GCM && AES
#define STORE32BE(a, p)
Definition: cx_aes_gcm.h:28
#define STORE64BE(a, p)
Definition: cx_aes_gcm.h:35
uint8_t cx_constant_time_eq(const uint8_t *buf1, uint8_t *buf2, size_t len)
Definition: cx_utils.c:181
Authenticated Encryption with Associated Data (AEAD)
AES (Advanced Encryption Standard).
AES in Galois/Counter Mode (AES-GCM)
#define CX_ENCRYPT
Definition: lcx_common.h:126
#define CX_DECRYPT
Definition: lcx_common.h:127
#define MIN(x, y)
Definition: nbgl_types.h:79
unsigned char uint8_t
Definition: usbd_conf.h:53