FD.io VPP  v20.09-64-g4f7b92f0a
Vector Packet Processing
ipsecmb.c
Go to the documentation of this file.
1 /*
2  * ipsecmb.c - Intel IPSec Multi-buffer library Crypto Engine
3  *
4  * Copyright (c) 2019 Cisco Systemss
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at:
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17 
18 #include <fcntl.h>
19 
20 #include <intel-ipsec-mb.h>
21 
22 #include <vnet/vnet.h>
23 #include <vnet/plugin/plugin.h>
24 #include <vpp/app/version.h>
25 #include <vnet/crypto/crypto.h>
26 #include <vppinfra/cpu.h>
27 
28 #define HMAC_MAX_BLOCK_SIZE SHA_512_BLOCK_SIZE
29 #define EXPANDED_KEY_N_BYTES (16 * 15)
30 
31 typedef struct
32 {
33  CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
34  MB_MGR *mgr;
35  __m128i cbc_iv;
37 
38 typedef struct
39 {
42  aes_gcm_pre_t aes_gcm_pre;
43  keyexp_t keyexp;
44  hash_one_block_t hash_one_block;
45  hash_fn_t hash_fn;
47 
48 typedef struct ipsecmb_main_t_
49 {
52  void **key_data;
54 
55 typedef struct
56 {
57  u8 enc_key_exp[EXPANDED_KEY_N_BYTES];
58  u8 dec_key_exp[EXPANDED_KEY_N_BYTES];
60 
61 static ipsecmb_main_t ipsecmb_main = { };
62 
63 /*
64  * (Alg, JOB_HASH_ALG, fn, block-size-bytes, hash-size-bytes, digest-size-bytes)
65  */
66 #define foreach_ipsecmb_hmac_op \
67  _(SHA1, SHA1, sha1, 64, 20, 20) \
68  _(SHA224, SHA_224, sha224, 64, 32, 28) \
69  _(SHA256, SHA_256, sha256, 64, 32, 32) \
70  _(SHA384, SHA_384, sha384, 128, 64, 48) \
71  _(SHA512, SHA_512, sha512, 128, 64, 64)
72 
73 /*
74  * (Alg, key-len-bits)
75  */
76 #define foreach_ipsecmb_cbc_cipher_op \
77  _(AES_128_CBC, 128) \
78  _(AES_192_CBC, 192) \
79  _(AES_256_CBC, 256)
80 
81 /*
82  * (Alg, key-len-bytes, iv-len-bytes)
83  */
84 #define foreach_ipsecmb_gcm_cipher_op \
85  _(AES_128_GCM, 128) \
86  _(AES_192_GCM, 192) \
87  _(AES_256_GCM, 256)
88 
89 always_inline void
90 ipsecmb_retire_hmac_job (JOB_AES_HMAC * job, u32 * n_fail, u32 digest_size)
91 {
92  vnet_crypto_op_t *op = job->user_data;
93  u32 len = op->digest_len ? op->digest_len : digest_size;
94 
95  if (STS_COMPLETED != job->status)
96  {
97  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
98  *n_fail = *n_fail + 1;
99  return;
100  }
101 
103  {
104  if ((memcmp (op->digest, job->auth_tag_output, len)))
105  {
106  *n_fail = *n_fail + 1;
107  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
108  return;
109  }
110  }
111  else if (len == digest_size)
112  clib_memcpy_fast (op->digest, job->auth_tag_output, digest_size);
113  else
114  clib_memcpy_fast (op->digest, job->auth_tag_output, len);
115 
116  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
117 }
118 
121  u32 n_ops, u32 block_size, u32 hash_size,
122  u32 digest_size, JOB_HASH_ALG alg)
123 {
124  ipsecmb_main_t *imbm = &ipsecmb_main;
126  vm->thread_index);
127  JOB_AES_HMAC *job;
128  u32 i, n_fail = 0;
129  u8 scratch[n_ops][digest_size];
130 
131  /*
132  * queue all the jobs first ...
133  */
134  for (i = 0; i < n_ops; i++)
135  {
136  vnet_crypto_op_t *op = ops[i];
137  u8 *kd = (u8 *) imbm->key_data[op->key_index];
138 
139  job = IMB_GET_NEXT_JOB (ptd->mgr);
140 
141  job->src = op->src;
142  job->hash_start_src_offset_in_bytes = 0;
143  job->msg_len_to_hash_in_bytes = op->len;
144  job->hash_alg = alg;
145  job->auth_tag_output_len_in_bytes = digest_size;
146  job->auth_tag_output = scratch[i];
147 
148  job->cipher_mode = NULL_CIPHER;
149  job->cipher_direction = DECRYPT;
150  job->chain_order = HASH_CIPHER;
151 
152  job->u.HMAC._hashed_auth_key_xor_ipad = kd;
153  job->u.HMAC._hashed_auth_key_xor_opad = kd + hash_size;
154  job->user_data = op;
155 
156  job = IMB_SUBMIT_JOB (ptd->mgr);
157 
158  if (job)
159  ipsecmb_retire_hmac_job (job, &n_fail, digest_size);
160  }
161 
162  while ((job = IMB_FLUSH_JOB (ptd->mgr)))
163  ipsecmb_retire_hmac_job (job, &n_fail, digest_size);
164 
165  return n_ops - n_fail;
166 }
167 
168 #define _(a, b, c, d, e, f) \
169 static_always_inline u32 \
170 ipsecmb_ops_hmac_##a (vlib_main_t * vm, \
171  vnet_crypto_op_t * ops[], \
172  u32 n_ops) \
173 { return ipsecmb_ops_hmac_inline (vm, ops, n_ops, d, e, f, b); } \
174 
176 #undef _
177 
178 always_inline void
179 ipsecmb_retire_cipher_job (JOB_AES_HMAC * job, u32 * n_fail)
180 {
181  vnet_crypto_op_t *op = job->user_data;
182 
183  if (STS_COMPLETED != job->status)
184  {
185  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
186  *n_fail = *n_fail + 1;
187  }
188  else
189  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
190 }
191 
194  u32 n_ops, u32 key_len,
195  JOB_CIPHER_DIRECTION direction)
196 {
197  ipsecmb_main_t *imbm = &ipsecmb_main;
199  vm->thread_index);
200  JOB_AES_HMAC *job;
201  u32 i, n_fail = 0;
202 
203  for (i = 0; i < n_ops; i++)
204  {
206  vnet_crypto_op_t *op = ops[i];
207  kd = (ipsecmb_aes_cbc_key_data_t *) imbm->key_data[op->key_index];
208  __m128i iv;
209 
210  job = IMB_GET_NEXT_JOB (ptd->mgr);
211 
212  job->src = op->src;
213  job->dst = op->dst;
214  job->msg_len_to_cipher_in_bytes = op->len;
215  job->cipher_start_src_offset_in_bytes = 0;
216 
217  job->hash_alg = NULL_HASH;
218  job->cipher_mode = CBC;
219  job->cipher_direction = direction;
220  job->chain_order = (direction == ENCRYPT ? CIPHER_HASH : HASH_CIPHER);
221 
222  if ((direction == ENCRYPT) && (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV))
223  {
224  iv = ptd->cbc_iv;
225  _mm_storeu_si128 ((__m128i *) op->iv, iv);
226  ptd->cbc_iv = _mm_aesenc_si128 (iv, iv);
227  }
228 
229  job->aes_key_len_in_bytes = key_len / 8;
230  job->aes_enc_key_expanded = kd->enc_key_exp;
231  job->aes_dec_key_expanded = kd->dec_key_exp;
232  job->iv = op->iv;
233  job->iv_len_in_bytes = AES_BLOCK_SIZE;
234 
235  job->user_data = op;
236 
237  job = IMB_SUBMIT_JOB (ptd->mgr);
238 
239  if (job)
240  ipsecmb_retire_cipher_job (job, &n_fail);
241  }
242 
243  while ((job = IMB_FLUSH_JOB (ptd->mgr)))
244  ipsecmb_retire_cipher_job (job, &n_fail);
245 
246  return n_ops - n_fail;
247 }
248 
249 #define _(a, b) \
250 static_always_inline u32 \
251 ipsecmb_ops_cbc_cipher_enc_##a (vlib_main_t * vm, \
252  vnet_crypto_op_t * ops[], \
253  u32 n_ops) \
254 { return ipsecmb_ops_cbc_cipher_inline (vm, ops, n_ops, b, ENCRYPT); } \
255  \
256 static_always_inline u32 \
257 ipsecmb_ops_cbc_cipher_dec_##a (vlib_main_t * vm, \
258  vnet_crypto_op_t * ops[], \
259  u32 n_ops) \
260 { return ipsecmb_ops_cbc_cipher_inline (vm, ops, n_ops, b, DECRYPT); } \
261 
263 #undef _
264 
265 #define _(a, b) \
266 static_always_inline u32 \
267 ipsecmb_ops_gcm_cipher_enc_##a##_chained (vlib_main_t * vm, \
268  vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
269 { \
270  ipsecmb_main_t *imbm = &ipsecmb_main; \
271  ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
272  vm->thread_index); \
273  MB_MGR *m = ptd->mgr; \
274  vnet_crypto_op_chunk_t *chp; \
275  u32 i, j; \
276  \
277  for (i = 0; i < n_ops; i++) \
278  { \
279  struct gcm_key_data *kd; \
280  struct gcm_context_data ctx; \
281  vnet_crypto_op_t *op = ops[i]; \
282  \
283  kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
284  ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
285  IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
286  chp = chunks + op->chunk_index; \
287  for (j = 0; j < op->n_chunks; j++) \
288  { \
289  IMB_AES##b##_GCM_ENC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
290  chp->len); \
291  chp += 1; \
292  } \
293  IMB_AES##b##_GCM_ENC_FINALIZE(m, kd, &ctx, op->tag, op->tag_len); \
294  \
295  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
296  } \
297  \
298  return n_ops; \
299 } \
300  \
301 static_always_inline u32 \
302 ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
303  u32 n_ops) \
304 { \
305  ipsecmb_main_t *imbm = &ipsecmb_main; \
306  ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
307  vm->thread_index); \
308  MB_MGR *m = ptd->mgr; \
309  u32 i; \
310  \
311  for (i = 0; i < n_ops; i++) \
312  { \
313  struct gcm_key_data *kd; \
314  struct gcm_context_data ctx; \
315  vnet_crypto_op_t *op = ops[i]; \
316  \
317  kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
318  IMB_AES##b##_GCM_ENC (m, kd, &ctx, op->dst, op->src, op->len, op->iv, \
319  op->aad, op->aad_len, op->tag, op->tag_len); \
320  \
321  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
322  } \
323  \
324  return n_ops; \
325 } \
326  \
327 static_always_inline u32 \
328 ipsecmb_ops_gcm_cipher_dec_##a##_chained (vlib_main_t * vm, \
329  vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
330 { \
331  ipsecmb_main_t *imbm = &ipsecmb_main; \
332  ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
333  vm->thread_index); \
334  MB_MGR *m = ptd->mgr; \
335  vnet_crypto_op_chunk_t *chp; \
336  u32 i, j, n_failed = 0; \
337  \
338  for (i = 0; i < n_ops; i++) \
339  { \
340  struct gcm_key_data *kd; \
341  struct gcm_context_data ctx; \
342  vnet_crypto_op_t *op = ops[i]; \
343  u8 scratch[64]; \
344  \
345  kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
346  ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
347  IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
348  chp = chunks + op->chunk_index; \
349  for (j = 0; j < op->n_chunks; j++) \
350  { \
351  IMB_AES##b##_GCM_DEC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
352  chp->len); \
353  chp += 1; \
354  } \
355  IMB_AES##b##_GCM_DEC_FINALIZE(m, kd, &ctx, scratch, op->tag_len); \
356  \
357  if ((memcmp (op->tag, scratch, op->tag_len))) \
358  { \
359  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
360  n_failed++; \
361  } \
362  else \
363  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
364  } \
365  \
366  return n_ops - n_failed; \
367 } \
368  \
369 static_always_inline u32 \
370 ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
371  u32 n_ops) \
372 { \
373  ipsecmb_main_t *imbm = &ipsecmb_main; \
374  ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
375  vm->thread_index); \
376  MB_MGR *m = ptd->mgr; \
377  u32 i, n_failed = 0; \
378  \
379  for (i = 0; i < n_ops; i++) \
380  { \
381  struct gcm_key_data *kd; \
382  struct gcm_context_data ctx; \
383  vnet_crypto_op_t *op = ops[i]; \
384  u8 scratch[64]; \
385  \
386  kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
387  IMB_AES##b##_GCM_DEC (m, kd, &ctx, op->dst, op->src, op->len, op->iv, \
388  op->aad, op->aad_len, scratch, op->tag_len); \
389  \
390  if ((memcmp (op->tag, scratch, op->tag_len))) \
391  { \
392  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
393  n_failed++; \
394  } \
395  else \
396  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
397  } \
398  \
399  return n_ops - n_failed; \
400 }
401 
403 #undef _
404 
405 clib_error_t *
407 {
409  clib_error_t *err = 0;
410  int fd;
411 
412  if ((fd = open ("/dev/urandom", O_RDONLY)) < 0)
413  return clib_error_return_unix (0, "failed to open '/dev/urandom'");
414 
415  vec_foreach (ptd, imbm->per_thread_data)
416  {
417  if (read (fd, &ptd->cbc_iv, sizeof (ptd->cbc_iv)) != sizeof (ptd->cbc_iv))
418  {
419  err = clib_error_return_unix (0, "'/dev/urandom' read failure");
420  close (fd);
421  return (err);
422  }
423  }
424 
425  close (fd);
426  return (NULL);
427 }
428 
429 static void
432 {
433  ipsecmb_main_t *imbm = &ipsecmb_main;
435  ipsecmb_alg_data_t *ad = imbm->alg_data + key->alg;
436  u32 i;
437  void *kd;
438 
439  /** TODO: add linked alg support **/
440  if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
441  return;
442 
443  if (kop == VNET_CRYPTO_KEY_OP_DEL)
444  {
445  if (idx >= vec_len (imbm->key_data))
446  return;
447 
448  if (imbm->key_data[idx] == 0)
449  return;
450 
451  clib_mem_free_s (imbm->key_data[idx]);
452  imbm->key_data[idx] = 0;
453  return;
454  }
455 
456  if (ad->data_size == 0)
457  return;
458 
460 
461  if (kop == VNET_CRYPTO_KEY_OP_MODIFY && imbm->key_data[idx])
462  {
463  clib_mem_free_s (imbm->key_data[idx]);
464  }
465 
466  kd = imbm->key_data[idx] = clib_mem_alloc_aligned (ad->data_size,
468 
469  /* AES CBC key expansion */
470  if (ad->keyexp)
471  {
472  ad->keyexp (key->data, ((ipsecmb_aes_cbc_key_data_t *) kd)->enc_key_exp,
473  ((ipsecmb_aes_cbc_key_data_t *) kd)->dec_key_exp);
474  return;
475  }
476 
477  /* AES GCM */
478  if (ad->aes_gcm_pre)
479  {
480  ad->aes_gcm_pre (key->data, (struct gcm_key_data *) kd);
481  return;
482  }
483 
484  /* HMAC */
485  if (ad->hash_one_block)
486  {
487  const int block_qw = HMAC_MAX_BLOCK_SIZE / sizeof (u64);
488  u64 pad[block_qw], key_hash[block_qw];
489 
490  clib_memset_u8 (key_hash, 0, HMAC_MAX_BLOCK_SIZE);
491  if (vec_len (key->data) <= ad->block_size)
492  clib_memcpy_fast (key_hash, key->data, vec_len (key->data));
493  else
494  ad->hash_fn (key->data, vec_len (key->data), key_hash);
495 
496  for (i = 0; i < block_qw; i++)
497  pad[i] = key_hash[i] ^ 0x3636363636363636;
498  ad->hash_one_block (pad, kd);
499 
500  for (i = 0; i < block_qw; i++)
501  pad[i] = key_hash[i] ^ 0x5c5c5c5c5c5c5c5c;
502  ad->hash_one_block (pad, ((u8 *) kd) + (ad->data_size / 2));
503 
504  return;
505  }
506 }
507 
508 static clib_error_t *
510 {
511  ipsecmb_main_t *imbm = &ipsecmb_main;
512  ipsecmb_alg_data_t *ad;
515  clib_error_t *error;
516  MB_MGR *m = 0;
517  u32 eidx;
518  u8 *name;
519 
520  if (!clib_cpu_supports_aes ())
521  return 0;
522 
523  /*
524  * A priority that is better than OpenSSL but worse than VPP natvie
525  */
526  name = format (0, "Intel(R) Multi-Buffer Crypto for IPsec Library %s%c",
527  IMB_VERSION_STR, 0);
528  eidx = vnet_crypto_register_engine (vm, "ipsecmb", 80, (char *) name);
529 
532 
533  /* *INDENT-OFF* */
534  vec_foreach (ptd, imbm->per_thread_data)
535  {
536  ptd->mgr = alloc_mb_mgr (0);
537  if (clib_cpu_supports_avx512f ())
538  init_mb_mgr_avx512 (ptd->mgr);
539  else if (clib_cpu_supports_avx2 ())
540  init_mb_mgr_avx2 (ptd->mgr);
541  else
542  init_mb_mgr_sse (ptd->mgr);
543 
544  if (ptd == imbm->per_thread_data)
545  m = ptd->mgr;
546  }
547  /* *INDENT-ON* */
548 
549  if (clib_cpu_supports_x86_aes () && (error = crypto_ipsecmb_iv_init (imbm)))
550  return (error);
551 
552 #define _(a, b, c, d, e, f) \
553  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
554  ipsecmb_ops_hmac_##a); \
555  ad = imbm->alg_data + VNET_CRYPTO_ALG_HMAC_##a; \
556  ad->block_size = d; \
557  ad->data_size = e * 2; \
558  ad->hash_one_block = m-> c##_one_block; \
559  ad->hash_fn = m-> c; \
560 
562 #undef _
563 #define _(a, b) \
564  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
565  ipsecmb_ops_cbc_cipher_enc_##a); \
566  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
567  ipsecmb_ops_cbc_cipher_dec_##a); \
568  ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
569  ad->data_size = sizeof (ipsecmb_aes_cbc_key_data_t); \
570  ad->keyexp = m->keyexp_##b; \
571 
573 #undef _
574 #define _(a, b) \
575  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
576  ipsecmb_ops_gcm_cipher_enc_##a); \
577  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
578  ipsecmb_ops_gcm_cipher_dec_##a); \
579  vnet_crypto_register_chained_ops_handler \
580  (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
581  ipsecmb_ops_gcm_cipher_enc_##a##_chained); \
582  vnet_crypto_register_chained_ops_handler \
583  (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
584  ipsecmb_ops_gcm_cipher_dec_##a##_chained); \
585  ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
586  ad->data_size = sizeof (struct gcm_key_data); \
587  ad->aes_gcm_pre = m->gcm##b##_pre; \
588 
590 #undef _
591 
593  return (NULL);
594 }
595 
596 /* *INDENT-OFF* */
598 {
599  .runs_after = VLIB_INITS ("vnet_crypto_init"),
600 };
601 /* *INDENT-ON* */
602 
603 /* *INDENT-OFF* */
605 {
606  .version = VPP_BUILD_VER,
607  .description = "Intel IPSEC Multi-buffer Crypto Engine",
608 };
609 /* *INDENT-ON* */
610 
611 /*
612  * fd.io coding-style-patch-verification: ON
613  *
614  * Local Variables:
615  * eval: (c-set-style "gnu")
616  * End:
617  */
u8 pad[3]
log2 (size of the packing page block)
Definition: bihash_doc.h:61
#define CLIB_CACHE_LINE_ALIGN_MARK(mark)
Definition: cache.h:60
#define VNET_CRYPTO_KEY_TYPE_LINK
Definition: crypto.h:192
#define EXPANDED_KEY_N_BYTES
Definition: ipsecmb.c:29
unsigned long u64
Definition: types.h:89
#define clib_memcpy_fast(a, b, c)
Definition: string.h:81
u32 thread_index
Definition: main.h:249
static void ipsecmb_retire_hmac_job(JOB_AES_HMAC *job, u32 *n_fail, u32 digest_size)
Definition: ipsecmb.c:90
u16 key_len
Definition: ikev2_types.api:95
vlib_main_t * vm
Definition: in2out_ed.c:1582
u8 * format(u8 *s, const char *fmt,...)
Definition: format.c:424
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
Definition: vec.h:520
VLIB_PLUGIN_REGISTER()
#define foreach_ipsecmb_hmac_op
Definition: ipsecmb.c:66
unsigned char u8
Definition: types.h:56
static ipsecmb_main_t ipsecmb_main
Definition: ipsecmb.c:61
u8 dec_key_exp[EXPANDED_KEY_N_BYTES]
Definition: ipsecmb.c:58
void vnet_crypto_register_key_handler(vlib_main_t *vm, u32 engine_index, vnet_crypto_key_handler_t *key_handler)
Definition: crypto.c:307
clib_error_t * crypto_ipsecmb_iv_init(ipsecmb_main_t *imbm)
Definition: ipsecmb.c:406
#define static_always_inline
Definition: clib.h:108
vnet_crypto_key_op_t
Definition: crypto.h:108
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:173
struct ipsecmb_main_t_ ipsecmb_main_t
static clib_error_t * crypto_ipsecmb_init(vlib_main_t *vm)
Definition: ipsecmb.c:509
void ** key_data
Definition: ipsecmb.c:52
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
unsigned int u32
Definition: types.h:88
static void ipsecmb_retire_cipher_job(JOB_AES_HMAC *job, u32 *n_fail)
Definition: ipsecmb.c:179
static_always_inline u32 ipsecmb_ops_hmac_inline(vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, u32 block_size, u32 hash_size, u32 digest_size, JOB_HASH_ALG alg)
Definition: ipsecmb.c:120
ipsecmb_per_thread_data_t * per_thread_data
Definition: ipsecmb.c:50
static u8 iv[]
Definition: aes_cbc.c:24
uword user_data
Definition: crypto.h:233
hash_fn_t hash_fn
Definition: ipsecmb.c:45
unsigned short u16
Definition: types.h:57
#define clib_error_return_unix(e, args...)
Definition: error.h:102
u16 block_size
Definition: ikev2_types.api:97
#define always_inline
Definition: ipsec.h:28
vnet_crypto_alg_t alg
Definition: crypto.h:182
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
Definition: crypto.h:238
u8 len
Definition: ip_types.api:92
#define foreach_ipsecmb_cbc_cipher_op
Definition: ipsecmb.c:76
#define VNET_CRYPTO_OP_FLAG_INIT_IV
Definition: crypto.h:237
sll srl srl sll sra u16x4 i
Definition: vector_sse42.h:317
#define foreach_ipsecmb_gcm_cipher_op
Definition: ipsecmb.c:84
string name[64]
Definition: ip.api:44
static void crypto_ipsecmb_key_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx)
Definition: ipsecmb.c:430
ipsecmb_alg_data_t alg_data[VNET_CRYPTO_N_ALGS]
Definition: ipsecmb.c:51
u32 vnet_crypto_key_index_t
Definition: crypto.h:346
keyexp_t keyexp
Definition: ipsecmb.c:43
u8 enc_key_exp[EXPANDED_KEY_N_BYTES]
Definition: ipsecmb.c:57
static void clib_mem_free_s(void *p)
Definition: mem.h:253
static_always_inline void clib_memset_u8(void *p, u8 val, uword count)
Definition: string.h:424
hash_one_block_t hash_one_block
Definition: ipsecmb.c:44
typedef key
Definition: ipsec_types.api:85
static foreach_aarch64_flags int clib_cpu_supports_aes()
Definition: cpu.h:239
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
Definition: crypto.h:516
#define HMAC_MAX_BLOCK_SIZE
Definition: ipsecmb.c:28
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
aes_gcm_pre_t aes_gcm_pre
Definition: ipsecmb.c:42
vnet_crypto_op_status_t status
Definition: crypto.h:235
static void * clib_mem_alloc_aligned(uword size, uword align)
Definition: mem.h:165
static_always_inline u32 ipsecmb_ops_cbc_cipher_inline(vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, u32 key_len, JOB_CIPHER_DIRECTION direction)
Definition: ipsecmb.c:193
static vlib_thread_main_t * vlib_get_thread_main()
Definition: global_funcs.h:32
#define vec_foreach(var, vec)
Vector iterator.
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
Definition: crypto.c:112
#define VLIB_INITS(...)
Definition: init.h:357