FD.io VPP  v20.01-48-g3e0dafb74
Vector Packet Processing
ipsecmb.c
Go to the documentation of this file.
1 /*
2  * ipsecmb.c - Intel IPSec Multi-buffer library Crypto Engine
3  *
4  * Copyright (c) 2019 Cisco Systemss
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at:
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17 
18 #include <fcntl.h>
19 
20 #include <intel-ipsec-mb.h>
21 
22 #include <vnet/vnet.h>
23 #include <vnet/plugin/plugin.h>
24 #include <vpp/app/version.h>
25 #include <vnet/crypto/crypto.h>
26 #include <vppinfra/cpu.h>
27 
28 #define HMAC_MAX_BLOCK_SIZE SHA_512_BLOCK_SIZE
29 #define EXPANDED_KEY_N_BYTES (16 * 15)
30 
31 typedef struct
32 {
33  MB_MGR *mgr;
34  __m128i cbc_iv;
36 
37 typedef struct
38 {
41  aes_gcm_pre_t aes_gcm_pre;
42  keyexp_t keyexp;
43  hash_one_block_t hash_one_block;
44  hash_fn_t hash_fn;
46 
47 typedef struct ipsecmb_main_t_
48 {
51  void **key_data;
53 
54 typedef struct
55 {
56  u8 enc_key_exp[EXPANDED_KEY_N_BYTES];
57  u8 dec_key_exp[EXPANDED_KEY_N_BYTES];
59 
60 static ipsecmb_main_t ipsecmb_main = { };
61 
62 /*
63  * (Alg, JOB_HASH_ALG, fn, block-size-bytes, hash-size-bytes, digest-size-bytes)
64  */
65 #define foreach_ipsecmb_hmac_op \
66  _(SHA1, SHA1, sha1, 64, 20, 20) \
67  _(SHA224, SHA_224, sha224, 64, 32, 28) \
68  _(SHA256, SHA_256, sha256, 64, 32, 32) \
69  _(SHA384, SHA_384, sha384, 128, 64, 48) \
70  _(SHA512, SHA_512, sha512, 128, 64, 64)
71 
72 /*
73  * (Alg, key-len-bits)
74  */
75 #define foreach_ipsecmb_cbc_cipher_op \
76  _(AES_128_CBC, 128) \
77  _(AES_192_CBC, 192) \
78  _(AES_256_CBC, 256)
79 
80 /*
81  * (Alg, key-len-bytes, iv-len-bytes)
82  */
83 #define foreach_ipsecmb_gcm_cipher_op \
84  _(AES_128_GCM, 128) \
85  _(AES_192_GCM, 192) \
86  _(AES_256_GCM, 256)
87 
88 always_inline void
89 ipsecmb_retire_hmac_job (JOB_AES_HMAC * job, u32 * n_fail, u32 digest_size)
90 {
91  vnet_crypto_op_t *op = job->user_data;
92  u32 len = op->digest_len ? op->digest_len : digest_size;
93 
94  if (STS_COMPLETED != job->status)
95  {
96  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
97  *n_fail = *n_fail + 1;
98  return;
99  }
100 
102  {
103  if ((memcmp (op->digest, job->auth_tag_output, len)))
104  {
105  *n_fail = *n_fail + 1;
106  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
107  return;
108  }
109  }
110  else if (len == digest_size)
111  clib_memcpy_fast (op->digest, job->auth_tag_output, digest_size);
112  else
113  clib_memcpy_fast (op->digest, job->auth_tag_output, len);
114 
115  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
116 }
117 
120  u32 n_ops, u32 block_size, u32 hash_size,
121  u32 digest_size, JOB_HASH_ALG alg)
122 {
123  ipsecmb_main_t *imbm = &ipsecmb_main;
125  vm->thread_index);
126  JOB_AES_HMAC *job;
127  u32 i, n_fail = 0;
128  u8 scratch[n_ops][digest_size];
129 
130  /*
131  * queue all the jobs first ...
132  */
133  for (i = 0; i < n_ops; i++)
134  {
135  vnet_crypto_op_t *op = ops[i];
136  u8 *kd = (u8 *) imbm->key_data[op->key_index];
137 
138  job = IMB_GET_NEXT_JOB (ptd->mgr);
139 
140  job->src = op->src;
141  job->hash_start_src_offset_in_bytes = 0;
142  job->msg_len_to_hash_in_bytes = op->len;
143  job->hash_alg = alg;
144  job->auth_tag_output_len_in_bytes = digest_size;
145  job->auth_tag_output = scratch[i];
146 
147  job->cipher_mode = NULL_CIPHER;
148  job->cipher_direction = DECRYPT;
149  job->chain_order = HASH_CIPHER;
150 
151  job->u.HMAC._hashed_auth_key_xor_ipad = kd;
152  job->u.HMAC._hashed_auth_key_xor_opad = kd + hash_size;
153  job->user_data = op;
154 
155  job = IMB_SUBMIT_JOB (ptd->mgr);
156 
157  if (job)
158  ipsecmb_retire_hmac_job (job, &n_fail, digest_size);
159  }
160 
161  while ((job = IMB_FLUSH_JOB (ptd->mgr)))
162  ipsecmb_retire_hmac_job (job, &n_fail, digest_size);
163 
164  return n_ops - n_fail;
165 }
166 
167 #define _(a, b, c, d, e, f) \
168 static_always_inline u32 \
169 ipsecmb_ops_hmac_##a (vlib_main_t * vm, \
170  vnet_crypto_op_t * ops[], \
171  u32 n_ops) \
172 { return ipsecmb_ops_hmac_inline (vm, ops, n_ops, d, e, f, b); } \
173 
175 #undef _
176 
177 always_inline void
178 ipsecmb_retire_cipher_job (JOB_AES_HMAC * job, u32 * n_fail)
179 {
180  vnet_crypto_op_t *op = job->user_data;
181 
182  if (STS_COMPLETED != job->status)
183  {
184  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
185  *n_fail = *n_fail + 1;
186  }
187  else
188  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
189 }
190 
193  u32 n_ops, u32 key_len,
194  JOB_CIPHER_DIRECTION direction)
195 {
196  ipsecmb_main_t *imbm = &ipsecmb_main;
198  vm->thread_index);
199  JOB_AES_HMAC *job;
200  u32 i, n_fail = 0;
201 
202  for (i = 0; i < n_ops; i++)
203  {
205  vnet_crypto_op_t *op = ops[i];
206  kd = (ipsecmb_aes_cbc_key_data_t *) imbm->key_data[op->key_index];
207  __m128i iv;
208 
209  job = IMB_GET_NEXT_JOB (ptd->mgr);
210 
211  job->src = op->src;
212  job->dst = op->dst;
213  job->msg_len_to_cipher_in_bytes = op->len;
214  job->cipher_start_src_offset_in_bytes = 0;
215 
216  job->hash_alg = NULL_HASH;
217  job->cipher_mode = CBC;
218  job->cipher_direction = direction;
219  job->chain_order = (direction == ENCRYPT ? CIPHER_HASH : HASH_CIPHER);
220 
221  if ((direction == ENCRYPT) && (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV))
222  {
223  iv = ptd->cbc_iv;
224  _mm_storeu_si128 ((__m128i *) op->iv, iv);
225  ptd->cbc_iv = _mm_aesenc_si128 (iv, iv);
226  }
227 
228  job->aes_key_len_in_bytes = key_len / 8;
229  job->aes_enc_key_expanded = kd->enc_key_exp;
230  job->aes_dec_key_expanded = kd->dec_key_exp;
231  job->iv = op->iv;
232  job->iv_len_in_bytes = AES_BLOCK_SIZE;
233 
234  job->user_data = op;
235 
236  job = IMB_SUBMIT_JOB (ptd->mgr);
237 
238  if (job)
239  ipsecmb_retire_cipher_job (job, &n_fail);
240  }
241 
242  while ((job = IMB_FLUSH_JOB (ptd->mgr)))
243  ipsecmb_retire_cipher_job (job, &n_fail);
244 
245  return n_ops - n_fail;
246 }
247 
248 #define _(a, b) \
249 static_always_inline u32 \
250 ipsecmb_ops_cbc_cipher_enc_##a (vlib_main_t * vm, \
251  vnet_crypto_op_t * ops[], \
252  u32 n_ops) \
253 { return ipsecmb_ops_cbc_cipher_inline (vm, ops, n_ops, b, ENCRYPT); } \
254  \
255 static_always_inline u32 \
256 ipsecmb_ops_cbc_cipher_dec_##a (vlib_main_t * vm, \
257  vnet_crypto_op_t * ops[], \
258  u32 n_ops) \
259 { return ipsecmb_ops_cbc_cipher_inline (vm, ops, n_ops, b, DECRYPT); } \
260 
262 #undef _
263 
264 #define _(a, b) \
265 static_always_inline u32 \
266 ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
267  u32 n_ops) \
268 { \
269  ipsecmb_main_t *imbm = &ipsecmb_main; \
270  ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
271  vm->thread_index); \
272  MB_MGR *m = ptd->mgr; \
273  u32 i; \
274  \
275  for (i = 0; i < n_ops; i++) \
276  { \
277  struct gcm_key_data *kd; \
278  struct gcm_context_data ctx; \
279  vnet_crypto_op_t *op = ops[i]; \
280  \
281  kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
282  IMB_AES##b##_GCM_ENC (m, kd, &ctx, op->dst, op->src, op->len, op->iv, \
283  op->aad, op->aad_len, op->tag, op->tag_len); \
284  \
285  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
286  } \
287  \
288  return n_ops; \
289 } \
290  \
291 static_always_inline u32 \
292 ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
293  u32 n_ops) \
294 { \
295  ipsecmb_main_t *imbm = &ipsecmb_main; \
296  ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
297  vm->thread_index); \
298  MB_MGR *m = ptd->mgr; \
299  u32 i, n_failed = 0; \
300  \
301  for (i = 0; i < n_ops; i++) \
302  { \
303  struct gcm_key_data *kd; \
304  struct gcm_context_data ctx; \
305  vnet_crypto_op_t *op = ops[i]; \
306  u8 scratch[64]; \
307  \
308  kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
309  IMB_AES##b##_GCM_DEC (m, kd, &ctx, op->dst, op->src, op->len, op->iv, \
310  op->aad, op->aad_len, scratch, op->tag_len); \
311  \
312  if ((memcmp (op->tag, scratch, op->tag_len))) \
313  { \
314  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
315  n_failed++; \
316  } \
317  else \
318  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
319  } \
320  \
321  return n_ops - n_failed; \
322 }
323 
325 #undef _
326 
327 clib_error_t *
329 {
331  clib_error_t *err = 0;
332  int fd;
333 
334  if ((fd = open ("/dev/urandom", O_RDONLY)) < 0)
335  return clib_error_return_unix (0, "failed to open '/dev/urandom'");
336 
337  vec_foreach (ptd, imbm->per_thread_data)
338  {
339  if (read (fd, &ptd->cbc_iv, sizeof (ptd->cbc_iv)) != sizeof (ptd->cbc_iv))
340  {
341  err = clib_error_return_unix (0, "'/dev/urandom' read failure");
342  close (fd);
343  return (err);
344  }
345  }
346 
347  close (fd);
348  return (NULL);
349 }
350 
351 static void
354 {
355  ipsecmb_main_t *imbm = &ipsecmb_main;
357  ipsecmb_alg_data_t *ad = imbm->alg_data + key->alg;
358  u32 i;
359  void *kd;
360 
361  if (kop == VNET_CRYPTO_KEY_OP_DEL)
362  {
363  if (idx >= vec_len (imbm->key_data))
364  return;
365 
366  if (imbm->key_data[idx] == 0)
367  return;
368 
369  clib_mem_free_s (imbm->key_data[idx]);
370  imbm->key_data[idx] = 0;
371  return;
372  }
373 
374  if (ad->data_size == 0)
375  return;
376 
378 
379  if (kop == VNET_CRYPTO_KEY_OP_MODIFY && imbm->key_data[idx])
380  {
381  clib_mem_free_s (imbm->key_data[idx]);
382  }
383 
384  kd = imbm->key_data[idx] = clib_mem_alloc_aligned (ad->data_size,
386 
387  /* AES CBC key expansion */
388  if (ad->keyexp)
389  {
390  ad->keyexp (key->data, ((ipsecmb_aes_cbc_key_data_t *) kd)->enc_key_exp,
391  ((ipsecmb_aes_cbc_key_data_t *) kd)->dec_key_exp);
392  return;
393  }
394 
395  /* AES GCM */
396  if (ad->aes_gcm_pre)
397  {
398  ad->aes_gcm_pre (key->data, (struct gcm_key_data *) kd);
399  return;
400  }
401 
402  /* HMAC */
403  if (ad->hash_one_block)
404  {
405  const int block_qw = HMAC_MAX_BLOCK_SIZE / sizeof (u64);
406  u64 pad[block_qw], key_hash[block_qw];
407 
408  clib_memset_u8 (key_hash, 0, HMAC_MAX_BLOCK_SIZE);
409  if (vec_len (key->data) <= ad->block_size)
410  clib_memcpy_fast (key_hash, key->data, vec_len (key->data));
411  else
412  ad->hash_fn (key->data, vec_len (key->data), key_hash);
413 
414  for (i = 0; i < block_qw; i++)
415  pad[i] = key_hash[i] ^ 0x3636363636363636;
416  ad->hash_one_block (pad, kd);
417 
418  for (i = 0; i < block_qw; i++)
419  pad[i] = key_hash[i] ^ 0x5c5c5c5c5c5c5c5c;
420  ad->hash_one_block (pad, ((u8 *) kd) + (ad->data_size / 2));
421 
422  return;
423  }
424 }
425 
426 static clib_error_t *
428 {
429  ipsecmb_main_t *imbm = &ipsecmb_main;
430  ipsecmb_alg_data_t *ad;
433  clib_error_t *error;
434  MB_MGR *m = 0;
435  u32 eidx;
436  u8 *name;
437 
438  if (!clib_cpu_supports_aes ())
439  return 0;
440 
441  /*
442  * A priority that is better than OpenSSL but worse than VPP natvie
443  */
444  name = format (0, "Intel(R) Multi-Buffer Crypto for IPsec Library %s%c",
445  IMB_VERSION_STR, 0);
446  eidx = vnet_crypto_register_engine (vm, "ipsecmb", 80, (char *) name);
447 
448  vec_validate (imbm->per_thread_data, tm->n_vlib_mains - 1);
449 
450  /* *INDENT-OFF* */
451  vec_foreach (ptd, imbm->per_thread_data)
452  {
453  ptd->mgr = alloc_mb_mgr (0);
454  if (clib_cpu_supports_avx512f ())
455  init_mb_mgr_avx512 (ptd->mgr);
456  else if (clib_cpu_supports_avx2 ())
457  init_mb_mgr_avx2 (ptd->mgr);
458  else
459  init_mb_mgr_sse (ptd->mgr);
460 
461  if (ptd == imbm->per_thread_data)
462  m = ptd->mgr;
463  }
464  /* *INDENT-ON* */
465 
466  if (clib_cpu_supports_x86_aes () && (error = crypto_ipsecmb_iv_init (imbm)))
467  return (error);
468 
469 #define _(a, b, c, d, e, f) \
470  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
471  ipsecmb_ops_hmac_##a); \
472  ad = imbm->alg_data + VNET_CRYPTO_ALG_HMAC_##a; \
473  ad->block_size = d; \
474  ad->data_size = e * 2; \
475  ad->hash_one_block = m-> c##_one_block; \
476  ad->hash_fn = m-> c; \
477 
479 #undef _
480 #define _(a, b) \
481  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
482  ipsecmb_ops_cbc_cipher_enc_##a); \
483  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
484  ipsecmb_ops_cbc_cipher_dec_##a); \
485  ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
486  ad->data_size = sizeof (ipsecmb_aes_cbc_key_data_t); \
487  ad->keyexp = m->keyexp_##b; \
488 
490 #undef _
491 #define _(a, b) \
492  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
493  ipsecmb_ops_gcm_cipher_enc_##a); \
494  vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
495  ipsecmb_ops_gcm_cipher_dec_##a); \
496  ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
497  ad->data_size = sizeof (struct gcm_key_data); \
498  ad->aes_gcm_pre = m->gcm##b##_pre; \
499 
501 #undef _
502 
504  return (NULL);
505 }
506 
507 /* *INDENT-OFF* */
509 {
510  .runs_after = VLIB_INITS ("vnet_crypto_init"),
511 };
512 /* *INDENT-ON* */
513 
514 /* *INDENT-OFF* */
516 {
517  .version = VPP_BUILD_VER,
518  .description = "Intel IPSEC Multi-buffer Crypto Engine",
519 };
520 /* *INDENT-ON* */
521 
522 /*
523  * fd.io coding-style-patch-verification: ON
524  *
525  * Local Variables:
526  * eval: (c-set-style "gnu")
527  * End:
528  */
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
Definition: vec.h:440
u8 pad[3]
log2 (size of the packing page block)
Definition: bihash_doc.h:61
#define EXPANDED_KEY_N_BYTES
Definition: ipsecmb.c:29
unsigned long u64
Definition: types.h:89
#define clib_memcpy_fast(a, b, c)
Definition: string.h:81
#define NULL
Definition: clib.h:58
u32 thread_index
Definition: main.h:218
static void ipsecmb_retire_hmac_job(JOB_AES_HMAC *job, u32 *n_fail, u32 digest_size)
Definition: ipsecmb.c:89
int i
u8 * format(u8 *s, const char *fmt,...)
Definition: format.c:424
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
Definition: vec.h:451
VLIB_PLUGIN_REGISTER()
#define foreach_ipsecmb_hmac_op
Definition: ipsecmb.c:65
unsigned char u8
Definition: types.h:56
static ipsecmb_main_t ipsecmb_main
Definition: ipsecmb.c:60
u8 dec_key_exp[EXPANDED_KEY_N_BYTES]
Definition: ipsecmb.c:57
void vnet_crypto_register_key_handler(vlib_main_t *vm, u32 engine_index, vnet_crypto_key_handler_t *key_handler)
Definition: crypto.c:169
clib_error_t * crypto_ipsecmb_iv_init(ipsecmb_main_t *imbm)
Definition: ipsecmb.c:328
#define static_always_inline
Definition: clib.h:99
vnet_crypto_key_op_t
Definition: crypto.h:70
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:173
struct ipsecmb_main_t_ ipsecmb_main_t
static clib_error_t * crypto_ipsecmb_init(vlib_main_t *vm)
Definition: ipsecmb.c:427
void ** key_data
Definition: ipsecmb.c:51
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
unsigned int u32
Definition: types.h:88
static void ipsecmb_retire_cipher_job(JOB_AES_HMAC *job, u32 *n_fail)
Definition: ipsecmb.c:178
static_always_inline u32 ipsecmb_ops_hmac_inline(vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, u32 block_size, u32 hash_size, u32 digest_size, JOB_HASH_ALG alg)
Definition: ipsecmb.c:119
ipsecmb_per_thread_data_t * per_thread_data
Definition: ipsecmb.c:49
static u8 iv[]
Definition: aes_cbc.c:24
uword user_data
Definition: crypto.h:143
hash_fn_t hash_fn
Definition: ipsecmb.c:44
unsigned short u16
Definition: types.h:57
#define clib_error_return_unix(e, args...)
Definition: error.h:102
#define always_inline
Definition: ipsec.h:28
vnet_crypto_alg_t alg
Definition: crypto.h:102
vlib_main_t * vm
Definition: in2out_ed.c:1810
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
Definition: crypto.h:132
u8 len
Definition: ip_types.api:91
#define foreach_ipsecmb_cbc_cipher_op
Definition: ipsecmb.c:75
#define VNET_CRYPTO_OP_FLAG_INIT_IV
Definition: crypto.h:131
#define foreach_ipsecmb_gcm_cipher_op
Definition: ipsecmb.c:83
string name[64]
Definition: ip.api:44
static void crypto_ipsecmb_key_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx)
Definition: ipsecmb.c:352
ipsecmb_alg_data_t alg_data[VNET_CRYPTO_N_ALGS]
Definition: ipsecmb.c:50
u32 vnet_crypto_key_index_t
Definition: crypto.h:159
keyexp_t keyexp
Definition: ipsecmb.c:42
u8 enc_key_exp[EXPANDED_KEY_N_BYTES]
Definition: ipsecmb.c:56
static void clib_mem_free_s(void *p)
Definition: mem.h:268
static_always_inline void clib_memset_u8(void *p, u8 val, uword count)
Definition: string.h:424
hash_one_block_t hash_one_block
Definition: ipsecmb.c:43
typedef key
Definition: ipsec_types.api:83
static foreach_aarch64_flags int clib_cpu_supports_aes()
Definition: cpu.h:236
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
Definition: crypto.h:240
#define HMAC_MAX_BLOCK_SIZE
Definition: ipsecmb.c:28
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
aes_gcm_pre_t aes_gcm_pre
Definition: ipsecmb.c:41
vnet_crypto_op_status_t status
Definition: crypto.h:129
static void * clib_mem_alloc_aligned(uword size, uword align)
Definition: mem.h:161
static_always_inline u32 ipsecmb_ops_cbc_cipher_inline(vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, u32 key_len, JOB_CIPHER_DIRECTION direction)
Definition: ipsecmb.c:192
static vlib_thread_main_t * vlib_get_thread_main()
Definition: global_funcs.h:32
#define vec_foreach(var, vec)
Vector iterator.
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
Definition: crypto.c:78
#define VLIB_INITS(...)
Definition: init.h:344