FD.io VPP  v21.06
Vector Packet Processing
main.c
Go to the documentation of this file.
1 /*
2  *------------------------------------------------------------------
3  * Copyright (c) 2019 Cisco and/or its affiliates.
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at:
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  *------------------------------------------------------------------
16  */
17 
18 #include <openssl/evp.h>
19 #include <openssl/hmac.h>
20 #include <openssl/rand.h>
21 #include <openssl/sha.h>
22 
23 #include <vlib/vlib.h>
24 #include <vnet/plugin/plugin.h>
25 #include <vnet/crypto/crypto.h>
26 #include <vpp/app/version.h>
27 
28 typedef struct
29 {
30  CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
31  EVP_CIPHER_CTX *evp_cipher_ctx;
32  HMAC_CTX *hmac_ctx;
33  EVP_MD_CTX *hash_ctx;
34 #if OPENSSL_VERSION_NUMBER < 0x10100000L
35  HMAC_CTX _hmac_ctx;
36 #endif
38 
39 static openssl_per_thread_data_t *per_thread_data = 0;
40 
41 #define foreach_openssl_aes_evp_op \
42  _ (cbc, DES_CBC, EVP_des_cbc, 8) \
43  _ (cbc, 3DES_CBC, EVP_des_ede3_cbc, 8) \
44  _ (cbc, AES_128_CBC, EVP_aes_128_cbc, 16) \
45  _ (cbc, AES_192_CBC, EVP_aes_192_cbc, 16) \
46  _ (cbc, AES_256_CBC, EVP_aes_256_cbc, 16) \
47  _ (gcm, AES_128_GCM, EVP_aes_128_gcm, 8) \
48  _ (gcm, AES_192_GCM, EVP_aes_192_gcm, 8) \
49  _ (gcm, AES_256_GCM, EVP_aes_256_gcm, 8) \
50  _ (cbc, AES_128_CTR, EVP_aes_128_ctr, 8) \
51  _ (cbc, AES_192_CTR, EVP_aes_192_ctr, 8) \
52  _ (cbc, AES_256_CTR, EVP_aes_256_ctr, 8)
53 
54 #define foreach_openssl_chacha20_evp_op \
55  _ (chacha20_poly1305, CHACHA20_POLY1305, EVP_chacha20_poly1305, 8)
56 
57 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
58 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op \
59  foreach_openssl_chacha20_evp_op
60 #else
61 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op
62 #endif
63 
64 #ifndef EVP_CTRL_AEAD_GET_TAG
65 #define EVP_CTRL_AEAD_GET_TAG EVP_CTRL_GCM_GET_TAG
66 #endif
67 
68 #ifndef EVP_CTRL_AEAD_SET_TAG
69 #define EVP_CTRL_AEAD_SET_TAG EVP_CTRL_GCM_SET_TAG
70 #endif
71 
72 #define foreach_openssl_hash_op \
73  _ (SHA1, EVP_sha1) \
74  _ (SHA224, EVP_sha224) \
75  _ (SHA256, EVP_sha256) \
76  _ (SHA384, EVP_sha384) \
77  _ (SHA512, EVP_sha512)
78 
79 #define foreach_openssl_hmac_op \
80  _(MD5, EVP_md5) \
81  _(SHA1, EVP_sha1) \
82  _(SHA224, EVP_sha224) \
83  _(SHA256, EVP_sha256) \
84  _(SHA384, EVP_sha384) \
85  _(SHA512, EVP_sha512)
86 
89  vnet_crypto_op_chunk_t *chunks, u32 n_ops,
90  const EVP_CIPHER *cipher, const int iv_len)
91 {
92  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
93  vm->thread_index);
94  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
96  u32 i, j, curr_len = 0;
97  u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
98 
99  for (i = 0; i < n_ops; i++)
100  {
101  vnet_crypto_op_t *op = ops[i];
103  int out_len = 0;
104 
106  RAND_bytes (op->iv, iv_len);
107 
108  EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
109 
111  EVP_CIPHER_CTX_set_padding (ctx, 0);
112 
114  {
115  chp = chunks + op->chunk_index;
116  u32 offset = 0;
117  for (j = 0; j < op->n_chunks; j++)
118  {
119  EVP_EncryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
120  chp->len);
121  curr_len = chp->len;
122  offset += out_len;
123  chp += 1;
124  }
125  if (out_len < curr_len)
126  EVP_EncryptFinal_ex (ctx, out_buf + offset, &out_len);
127 
128  offset = 0;
129  chp = chunks + op->chunk_index;
130  for (j = 0; j < op->n_chunks; j++)
131  {
132  clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
133  offset += chp->len;
134  chp += 1;
135  }
136  }
137  else
138  {
139  EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
140  if (out_len < op->len)
141  EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
142  }
143  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
144  }
145  return n_ops;
146 }
147 
150  vnet_crypto_op_chunk_t *chunks, u32 n_ops,
151  const EVP_CIPHER *cipher, const int iv_len)
152 {
153  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
154  vm->thread_index);
155  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
157  u32 i, j, curr_len = 0;
158  u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
159 
160  for (i = 0; i < n_ops; i++)
161  {
162  vnet_crypto_op_t *op = ops[i];
164  int out_len = 0;
165 
166  EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
167 
169  EVP_CIPHER_CTX_set_padding (ctx, 0);
170 
172  {
173  chp = chunks + op->chunk_index;
174  u32 offset = 0;
175  for (j = 0; j < op->n_chunks; j++)
176  {
177  EVP_DecryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
178  chp->len);
179  curr_len = chp->len;
180  offset += out_len;
181  chp += 1;
182  }
183  if (out_len < curr_len)
184  EVP_DecryptFinal_ex (ctx, out_buf + offset, &out_len);
185 
186  offset = 0;
187  chp = chunks + op->chunk_index;
188  for (j = 0; j < op->n_chunks; j++)
189  {
190  clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
191  offset += chp->len;
192  chp += 1;
193  }
194  }
195  else
196  {
197  EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
198  if (out_len < op->len)
199  EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
200  }
201  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
202  }
203  return n_ops;
204 }
205 
208  vnet_crypto_op_chunk_t *chunks, u32 n_ops,
209  const EVP_CIPHER *cipher, int is_gcm, const int iv_len)
210 {
211  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
212  vm->thread_index);
213  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
215  u32 i, j;
216  for (i = 0; i < n_ops; i++)
217  {
218  vnet_crypto_op_t *op = ops[i];
220  int len = 0;
221 
223  RAND_bytes (op->iv, 8);
224 
225  EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
226  if (is_gcm)
227  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
228  EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
229  if (op->aad_len)
230  EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
232  {
233  chp = chunks + op->chunk_index;
234  for (j = 0; j < op->n_chunks; j++)
235  {
236  EVP_EncryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
237  chp += 1;
238  }
239  }
240  else
241  EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
242  EVP_EncryptFinal_ex (ctx, op->dst + len, &len);
243  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_GET_TAG, op->tag_len, op->tag);
244  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
245  }
246  return n_ops;
247 }
248 
251  vnet_crypto_op_chunk_t *chunks, u32 n_ops,
252  const EVP_CIPHER *cipher, const int iv_len)
253 {
254  return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
255  /* is_gcm */ 1, iv_len);
256 }
257 
258 static_always_inline __clib_unused u32
260  vnet_crypto_op_chunk_t *chunks, u32 n_ops,
261  const EVP_CIPHER *cipher, const int iv_len)
262 {
263  return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
264  /* is_gcm */ 0, iv_len);
265 }
266 
269  vnet_crypto_op_chunk_t *chunks, u32 n_ops,
270  const EVP_CIPHER *cipher, int is_gcm, const int iv_len)
271 {
272  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
273  vm->thread_index);
274  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
276  u32 i, j, n_fail = 0;
277  for (i = 0; i < n_ops; i++)
278  {
279  vnet_crypto_op_t *op = ops[i];
281  int len = 0;
282 
283  EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0);
284  if (is_gcm)
285  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0);
286  EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv);
287  if (op->aad_len)
288  EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
290  {
291  chp = chunks + op->chunk_index;
292  for (j = 0; j < op->n_chunks; j++)
293  {
294  EVP_DecryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
295  chp += 1;
296  }
297  }
298  else
299  EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
300  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_SET_TAG, op->tag_len, op->tag);
301 
302  if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0)
303  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
304  else
305  {
306  n_fail++;
307  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
308  }
309  }
310  return n_ops - n_fail;
311 }
312 
315  vnet_crypto_op_chunk_t *chunks, u32 n_ops,
316  const EVP_CIPHER *cipher, const int iv_len)
317 {
318  return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
319  /* is_gcm */ 1, iv_len);
320 }
321 
322 static_always_inline __clib_unused u32
324  vnet_crypto_op_chunk_t *chunks, u32 n_ops,
325  const EVP_CIPHER *cipher, const int iv_len)
326 {
327  return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
328  /* is_gcm */ 0, iv_len);
329 }
330 
333  vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
334 {
336  vec_elt_at_index (per_thread_data, vm->thread_index);
337  EVP_MD_CTX *ctx = ptd->hash_ctx;
339  u32 md_len, i, j, n_fail = 0;
340 
341  for (i = 0; i < n_ops; i++)
342  {
343  vnet_crypto_op_t *op = ops[i];
344 
345  EVP_DigestInit_ex (ctx, md, NULL);
347  {
348  chp = chunks + op->chunk_index;
349  for (j = 0; j < op->n_chunks; j++)
350  {
351  EVP_DigestUpdate (ctx, chp->src, chp->len);
352  chp += 1;
353  }
354  }
355  else
356  EVP_DigestUpdate (ctx, op->src, op->len);
357 
358  EVP_DigestFinal_ex (ctx, op->digest, &md_len);
359  op->digest_len = md_len;
360  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
361  }
362  return n_ops - n_fail;
363 }
364 
367  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
368  const EVP_MD * md)
369 {
370  u8 buffer[64];
371  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
372  vm->thread_index);
373  HMAC_CTX *ctx = ptd->hmac_ctx;
375  u32 i, j, n_fail = 0;
376  for (i = 0; i < n_ops; i++)
377  {
378  vnet_crypto_op_t *op = ops[i];
380  unsigned int out_len = 0;
381  size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md);
382 
383  HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL);
385  {
386  chp = chunks + op->chunk_index;
387  for (j = 0; j < op->n_chunks; j++)
388  {
389  HMAC_Update (ctx, chp->src, chp->len);
390  chp += 1;
391  }
392  }
393  else
394  HMAC_Update (ctx, op->src, op->len);
395  HMAC_Final (ctx, buffer, &out_len);
396 
398  {
399  if ((memcmp (op->digest, buffer, sz)))
400  {
401  n_fail++;
402  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
403  continue;
404  }
405  }
406  else
407  clib_memcpy_fast (op->digest, buffer, sz);
408  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
409  }
410  return n_ops - n_fail;
411 }
412 
413 #define _(m, a, b, iv) \
414  static u32 openssl_ops_enc_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
415  u32 n_ops) \
416  { \
417  return openssl_ops_enc_##m (vm, ops, 0, n_ops, b (), iv); \
418  } \
419  \
420  u32 openssl_ops_dec_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
421  u32 n_ops) \
422  { \
423  return openssl_ops_dec_##m (vm, ops, 0, n_ops, b (), iv); \
424  } \
425  \
426  static u32 openssl_ops_enc_chained_##a ( \
427  vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
428  u32 n_ops) \
429  { \
430  return openssl_ops_enc_##m (vm, ops, chunks, n_ops, b (), iv); \
431  } \
432  \
433  static u32 openssl_ops_dec_chained_##a ( \
434  vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
435  u32 n_ops) \
436  { \
437  return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b (), iv); \
438  }
439 
441 #undef _
442 
443 #define _(a, b) \
444  static u32 openssl_ops_hash_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
445  u32 n_ops) \
446  { \
447  return openssl_ops_hash (vm, ops, 0, n_ops, b ()); \
448  } \
449  static u32 openssl_ops_hash_chained_##a ( \
450  vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
451  u32 n_ops) \
452  { \
453  return openssl_ops_hash (vm, ops, chunks, n_ops, b ()); \
454  }
455 
457 #undef _
458 
459 #define _(a, b) \
460 static u32 \
461 openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
462 { return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \
463 static u32 \
464 openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
465  vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
466 { return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \
467 
469 #undef _
470 
471 
472 clib_error_t *
474 {
477  u8 *seed_data = 0;
478  time_t t;
479  pid_t pid;
480 
481  u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL");
482 
483 #define _(m, a, b, iv) \
484  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
485  openssl_ops_enc_##a, \
486  openssl_ops_enc_chained_##a); \
487  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
488  openssl_ops_dec_##a, \
489  openssl_ops_dec_chained_##a);
490 
492 #undef _
493 
494 #define _(a, b) \
495  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
496  openssl_ops_hmac_##a, \
497  openssl_ops_hmac_chained_##a); \
498 
500 #undef _
501 
502 #define _(a, b) \
503  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HASH, \
504  openssl_ops_hash_##a, \
505  openssl_ops_hash_chained_##a);
506 
508 #undef _
509 
510  vec_validate_aligned (per_thread_data, tm->n_vlib_mains - 1,
512 
513  vec_foreach (ptd, per_thread_data)
514  {
515  ptd->evp_cipher_ctx = EVP_CIPHER_CTX_new ();
516 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
517  ptd->hmac_ctx = HMAC_CTX_new ();
518  ptd->hash_ctx = EVP_MD_CTX_create ();
519 #else
520  HMAC_CTX_init (&(ptd->_hmac_ctx));
521  ptd->hmac_ctx = &ptd->_hmac_ctx;
522 #endif
523  }
524 
525  t = time (NULL);
526  pid = getpid ();
527  vec_add (seed_data, &t, sizeof (t));
528  vec_add (seed_data, &pid, sizeof (pid));
529  vec_add (seed_data, seed_data, sizeof (seed_data));
530 
531  RAND_seed ((const void *) seed_data, vec_len (seed_data));
532 
533  vec_free (seed_data);
534 
535  return 0;
536 }
537 
538 /* *INDENT-OFF* */
540 {
541  .runs_after = VLIB_INITS ("vnet_crypto_init"),
542 };
543 /* *INDENT-ON* */
544 
545 
546 /* *INDENT-OFF* */
548  .version = VPP_BUILD_VER,
549  .description = "OpenSSL Crypto Engine",
550 };
551 /* *INDENT-ON* */
552 
553 /*
554  * fd.io coding-style-patch-verification: ON
555  *
556  * Local Variables:
557  * eval: (c-set-style "gnu")
558  * End:
559  */
#define CLIB_CACHE_LINE_ALIGN_MARK(mark)
Definition: cache.h:60
static_always_inline u32 openssl_ops_enc_gcm(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, const int iv_len)
Definition: main.c:250
static_always_inline u32 openssl_ops_hmac(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
Definition: main.c:366
EVP_MD_CTX * hash_ctx
Definition: main.c:33
static_always_inline u32 openssl_ops_enc_cbc(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, const int iv_len)
Definition: main.c:88
VLIB_PLUGIN_REGISTER()
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
Definition: crypto.h:264
static_always_inline __clib_unused u32 openssl_ops_dec_chacha20_poly1305(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, const int iv_len)
Definition: main.c:323
u32 thread_index
Definition: main.h:213
static_always_inline u32 openssl_ops_dec_gcm(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, const int iv_len)
Definition: main.c:314
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
Definition: vec.h:535
HMAC_CTX * hmac_ctx
Definition: main.c:32
unsigned char u8
Definition: types.h:56
static_always_inline u32 openssl_ops_dec_cbc(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, const int iv_len)
Definition: main.c:149
unsigned int u32
Definition: types.h:88
#define vec_add(V, E, N)
Add N elements to end of vector V (no header, unspecified alignment)
Definition: vec.h:689
#define static_always_inline
Definition: clib.h:112
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:172
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
long ctx[MAX_CONNS]
Definition: main.c:144
static_always_inline u32 openssl_ops_dec_aead(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, int is_gcm, const int iv_len)
Definition: main.c:268
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
Definition: nat44_ei.c:3047
clib_error_t * crypto_openssl_init(vlib_main_t *vm)
Definition: main.c:473
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
Definition: crypto.h:263
EVP_CIPHER_CTX * evp_cipher_ctx
Definition: main.c:31
u8 len
Definition: ip_types.api:103
#define foreach_openssl_hmac_op
Definition: main.c:79
#define VNET_CRYPTO_OP_FLAG_INIT_IV
Definition: crypto.h:262
sll srl srl sll sra u16x4 i
Definition: vector_sse42.h:261
#define vec_free(V)
Free vector&#39;s memory (no header).
Definition: vec.h:395
char * buffer
Definition: cJSON.h:163
#define foreach_openssl_hash_op
Definition: main.c:72
static_always_inline __clib_unused u32 openssl_ops_enc_chacha20_poly1305(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, const int iv_len)
Definition: main.c:259
static_always_inline void * clib_memcpy_fast(void *restrict dst, const void *restrict src, size_t n)
Definition: string.h:92
typedef key
Definition: ipsec_types.api:88
template key/value backing page structure
Definition: bihash_doc.h:44
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
Definition: crypto.h:548
#define foreach_openssl_evp_op
Definition: main.c:58
#define VLIB_BUFFER_DEFAULT_DATA_SIZE
Definition: buffer.h:53
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 pid
Definition: dhcp.api:164
static_always_inline u32 openssl_ops_hash(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
Definition: main.c:332
vnet_crypto_op_status_t status
Definition: crypto.h:260
static vlib_thread_main_t * vlib_get_thread_main()
Definition: global_funcs.h:56
#define vec_foreach(var, vec)
Vector iterator.
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
Definition: crypto.c:112
static_always_inline u32 openssl_ops_enc_aead(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, int is_gcm, const int iv_len)
Definition: main.c:207
#define VLIB_INITS(...)
Definition: init.h:352