27 #define EMPTY_STRUCT {0} 28 #define NUM_CRYPTO_MBUFS 16384 40 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].name = str; \ 41 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].disabled = n_mains; 48 a->
type = RTE_CRYPTO_SYM_XFORM_CIPHER;
49 a->alg = RTE_CRYPTO_CIPHER_NULL;
54 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_128];
55 a->
type = RTE_CRYPTO_SYM_XFORM_CIPHER;
56 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
61 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_192];
62 a->
type = RTE_CRYPTO_SYM_XFORM_CIPHER;
63 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
68 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_256];
69 a->
type = RTE_CRYPTO_SYM_XFORM_CIPHER;
70 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
75 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_128];
76 a->
type = RTE_CRYPTO_SYM_XFORM_CIPHER;
77 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
82 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_192];
83 a->
type = RTE_CRYPTO_SYM_XFORM_CIPHER;
84 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
89 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_256];
90 a->
type = RTE_CRYPTO_SYM_XFORM_CIPHER;
91 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
96 #define AES_GCM_TYPE RTE_CRYPTO_SYM_XFORM_AEAD 97 #define AES_GCM_ALG RTE_CRYPTO_AEAD_AES_GCM 99 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_128];
107 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_192];
115 a = &dcm->
cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_256];
127 dcm->auth_algs[IPSEC_INTEG_ALG_##f].name = str; \ 128 dcm->auth_algs[IPSEC_INTEG_ALG_##f].disabled = n_mains; 133 a = &dcm->
auth_algs[IPSEC_INTEG_ALG_NONE];
134 a->
type = RTE_CRYPTO_SYM_XFORM_AUTH;
135 a->alg = RTE_CRYPTO_AUTH_NULL;
139 a = &dcm->
auth_algs[IPSEC_INTEG_ALG_MD5_96];
140 a->
type = RTE_CRYPTO_SYM_XFORM_AUTH;
141 a->alg = RTE_CRYPTO_AUTH_MD5_HMAC;
145 a = &dcm->
auth_algs[IPSEC_INTEG_ALG_SHA1_96];
146 a->
type = RTE_CRYPTO_SYM_XFORM_AUTH;
147 a->alg = RTE_CRYPTO_AUTH_SHA1_HMAC;
151 a = &dcm->
auth_algs[IPSEC_INTEG_ALG_SHA_256_96];
152 a->
type = RTE_CRYPTO_SYM_XFORM_AUTH;
153 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
157 a = &dcm->
auth_algs[IPSEC_INTEG_ALG_SHA_256_128];
158 a->
type = RTE_CRYPTO_SYM_XFORM_AUTH;
159 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
163 a = &dcm->
auth_algs[IPSEC_INTEG_ALG_SHA_384_192];
164 a->
type = RTE_CRYPTO_SYM_XFORM_AUTH;
165 a->alg = RTE_CRYPTO_AUTH_SHA384_HMAC;
169 a = &dcm->
auth_algs[IPSEC_INTEG_ALG_SHA_512_256];
170 a->
type = RTE_CRYPTO_SYM_XFORM_AUTH;
171 a->alg = RTE_CRYPTO_AUTH_SHA512_HMAC;
198 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
204 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
205 (alg->
type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
206 (cap->sym.cipher.algo == alg->
alg) &&
209 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
210 (alg->
type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
211 (cap->sym.aead.algo == alg->
alg) &&
226 if ((cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC) ||
227 (cap->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH))
233 if ((cap->sym.auth.algo == alg->
alg) &&
251 ASSERT (c->
type == RTE_CRYPTO_SYM_XFORM_AEAD);
253 xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
254 xform->aead.algo = c->
alg;
256 xform->aead.key.length = c->
key_len;
257 xform->aead.iv.offset =
259 xform->aead.iv.length = 12;
261 xform->aead.aad_length = ipsec_sa_is_set_USE_ESN (sa) ? 12 : 8;
265 xform->aead.op = RTE_CRYPTO_AEAD_OP_ENCRYPT;
267 xform->aead.op = RTE_CRYPTO_AEAD_OP_DECRYPT;
279 ASSERT (c->
type == RTE_CRYPTO_SYM_XFORM_CIPHER);
281 xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
282 xform->cipher.algo = c->
alg;
284 xform->cipher.key.length = c->
key_len;
285 xform->cipher.iv.offset =
287 xform->cipher.iv.length = c->
iv_len;
291 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
293 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
305 ASSERT (a->
type == RTE_CRYPTO_SYM_XFORM_AUTH);
307 xform->type = RTE_CRYPTO_SYM_XFORM_AUTH;
308 xform->auth.algo = a->
alg;
310 xform->auth.key.length = a->
key_len;
315 xform->auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
317 xform->auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
330 struct rte_crypto_sym_xform cipher_xform = { 0 };
331 struct rte_crypto_sym_xform auth_xform = { 0 };
332 struct rte_crypto_sym_xform *xfs;
333 struct rte_cryptodev_sym_session **s;
339 if ((sa->
crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128) |
340 (sa->
crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192) |
341 (sa->
crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256))
353 cipher_xform.next = &auth_xform;
358 auth_xform.next = &cipher_xform;
375 session[0] = rte_cryptodev_sym_session_create (data->
session_h);
387 struct rte_mempool **mp;
392 rte_cryptodev_sym_session_init (res->
dev_id, session[0], xfs, mp[0]);
410 struct rte_mempool *mp = rte_mempool_from_obj (obj);
414 rte_mempool_put (mp, obj);
422 #if RTE_VERSION < RTE_VERSION_NUM(19, 2, 0, 0) 423 return sess->sess_private_data[driver_id];
425 if (unlikely (sess->nb_drivers <= driver_id))
428 return sess->sess_data[driver_id].data;
435 uint8_t driver_id,
void *private_data)
437 #if RTE_VERSION < RTE_VERSION_NUM(19, 2, 0, 0) 438 sess->sess_private_data[driver_id] = private_data;
440 if (unlikely (sess->nb_drivers <= driver_id))
442 sess->sess_data[driver_id].data = private_data;
478 if (rte_mempool_from_obj(s->
session))
480 ret = rte_cryptodev_sym_session_free (s->
session);
500 struct rte_cryptodev_sym_session *s;
511 case IPSEC_CRYPTO_ALG_AES_GCM_128:
512 case IPSEC_CRYPTO_ALG_AES_GCM_192:
513 case IPSEC_CRYPTO_ALG_AES_GCM_256:
532 s = (
struct rte_cryptodev_sym_session *) val[0];
563 if (sa->
integ_alg == IPSEC_INTEG_ALG_NONE)
566 case IPSEC_CRYPTO_ALG_NONE:
567 case IPSEC_CRYPTO_ALG_AES_GCM_128:
568 case IPSEC_CRYPTO_ALG_AES_GCM_192:
569 case IPSEC_CRYPTO_ALG_AES_GCM_256:
578 if (sa->
crypto_alg != IPSEC_CRYPTO_ALG_NONE &&
584 if (sa->
integ_alg != IPSEC_INTEG_ALG_NONE &&
593 const struct rte_cryptodev_capabilities *cap,
600 for (; cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED; cap++)
603 switch (cap->sym.xform_type)
605 case RTE_CRYPTO_SYM_XFORM_AEAD:
606 case RTE_CRYPTO_SYM_XFORM_CIPHER:
607 inc = cap->sym.cipher.key_size.increment;
609 for (len = cap->sym.cipher.key_size.min;
610 len <= cap->sym.cipher.key_size.max; len += inc)
621 case RTE_CRYPTO_SYM_XFORM_AUTH:
622 inc = cap->sym.auth.digest_size.increment;
624 for (len = cap->sym.auth.digest_size.min;
625 len <= cap->sym.auth.digest_size.max; len += inc)
642 #define DPDK_CRYPTO_N_QUEUE_DESC 2048 643 #define DPDK_CRYPTO_NB_SESS_OBJS 20000 648 struct rte_cryptodev_config dev_conf = { 0 };
649 struct rte_cryptodev_qp_conf qp_conf = { 0 };
654 dev_conf.socket_id = numa;
655 dev_conf.nb_queue_pairs = n_qp;
657 error_str =
"failed to configure crypto device %u";
658 ret = rte_cryptodev_configure (dev, &dev_conf);
662 error_str =
"failed to setup crypto device %u queue pair %u";
664 for (qp = 0; qp < n_qp; qp++)
666 #if RTE_VERSION < RTE_VERSION_NUM(19, 2, 0, 0) 667 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa,
NULL);
669 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa);
675 error_str =
"failed to start crypto device %u";
676 if (rte_cryptodev_start (dev))
686 struct rte_cryptodev *cryptodev;
687 struct rte_cryptodev_info info = { 0 };
692 u16 max_res_idx, res_idx, j;
698 for (i = 0; i < rte_cryptodev_count (); i++)
702 cryptodev = &rte_cryptodevs[
i];
703 rte_cryptodev_info_get (i, &info);
706 dev->
name = cryptodev->data->name;
707 dev->
numa = rte_cryptodev_socket_id (i);
709 dev->
max_qp = info.max_nb_queue_pairs;
710 drv_id = info.driver_id;
718 if (!(info.feature_flags & RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING))
727 max_res_idx = dev->
max_qp - 1;
736 for (j = 0; j <= max_res_idx; j++)
758 u32 thread_idx, skip_master;
773 if (thread_idx < skip_master)
827 void *_arg __attribute__ ((unused)),
828 void *_obj,
unsigned i __attribute__ ((unused)))
830 struct rte_crypto_op *op = _obj;
832 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
833 op->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
834 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
835 op->phys_addr = rte_mempool_virt2iova (_obj);
836 op->mempool = mempool;
846 u32 pool_priv_size =
sizeof (
struct rte_crypto_op_pool_private);
847 struct rte_crypto_op_pool_private *priv;
848 struct rte_mempool *mp;
856 pool_name =
format (0,
"crypto_pool_numa%u%c", numa, 0);
871 priv = rte_mempool_get_priv (mp);
872 priv->priv_size = pool_priv_size;
873 priv->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
886 struct rte_mempool *mp;
894 pool_name =
format (0,
"session_h_pool_numa%u%c", numa, 0);
897 elt_size = rte_cryptodev_sym_get_header_session_size ();
899 #if RTE_VERSION < RTE_VERSION_NUM(19, 2, 0, 0) 904 mp = rte_cryptodev_sym_session_pool_create ((
char *) pool_name,
906 elt_size, 512, 0, numa);
924 struct rte_mempool *mp;
938 pool_name =
format (0,
"session_drv%u_pool_numa%u%c", dev->
drv_id, numa, 0);
940 elt_size = rte_cryptodev_sym_get_private_session_size (dev->
id);
1025 u32 i, skip_master, n_mains;
1036 clib_warning (
"not enough DPDK crypto resources, default to OpenSSL");
1071 "dpdk-esp4-encrypt",
1072 "dpdk-esp4-encrypt-tun",
1073 "dpdk-esp4-decrypt",
1074 "dpdk-esp6-encrypt",
1075 "dpdk-esp6-encrypt-tun",
1076 "dpdk-esp6-decrypt",
1084 for (i = skip_master; i < n_mains; i++)
1093 .name =
"dpdk-ipsec-process",
1094 .process_log2_n_stack_bytes = 17,
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
#define vec_foreach_index(var, v)
Iterate over vector indices.
#define hash_set(h, key, value)
#define hash_unset(h, key)
static u8 cipher_alg_index(const crypto_alg_t *alg)
#define foreach_ipsec_crypto_alg
#define DPDK_CRYPTO_NB_SESS_OBJS
static_always_inline void clib_spinlock_unlock_if_init(clib_spinlock_t *p)
ipsec_integ_alg_t integ_alg
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
static clib_error_t * crypto_create_session_drv_pool(vlib_main_t *vm, crypto_dev_t *dev)
static u64 clib_cpu_time_now(void)
static_always_inline u32 crypto_op_get_priv_offset(void)
#define foreach_ipsec_integ_alg
clib_memset(h->entries, 0, sizeof(h->entries[0])*entries)
struct rte_cryptodev_sym_session * session
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
static void crypto_set_cipher_xform(struct rte_crypto_sym_xform *xform, ipsec_sa_t *sa, u8 is_outbound)
vlib_main_t ** vlib_mains
static clib_error_t * crypto_create_session_h_pool(vlib_main_t *vm, u8 numa)
#define vec_pop(V)
Returns last element of a vector and decrements its length.
static crypto_alg_t * cipher_cap_to_alg(const struct rte_cryptodev_capabilities *cap, u8 key_len)
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
static void clib_spinlock_free(clib_spinlock_t *p)
static clib_error_t * add_del_sa_session(u32 sa_index, u8 is_add)
#define clib_memcpy(d, s, n)
u16 cipher_resource_idx[IPSEC_CRYPTO_N_ALG]
static void algos_init(u32 n_mains)
dpdk_crypto_main_t dpdk_crypto_main
dpdk_config_main_t dpdk_config_main
int ipsec_select_esp_backend(ipsec_main_t *im, u32 backend_idx)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define clib_error_return(e, args...)
static void crypto_parse_capabilities(crypto_dev_t *dev, const struct rte_cryptodev_capabilities *cap, u32 n_mains)
#define vec_end(v)
End (last data address) of vector.
static clib_error_t * crypto_dev_conf(u8 dev, u16 n_qp, u8 numa)
static_always_inline void add_session_by_drv_and_sa_idx(struct rte_cryptodev_sym_session *session, crypto_data_t *data, u32 drv_id, u32 sa_idx)
static void crypto_disable(void)
static void clib_spinlock_init(clib_spinlock_t *p)
static void * get_session_private_data(const struct rte_cryptodev_sym_session *sess, uint8_t driver_id)
u32 ipsec_register_esp_backend(vlib_main_t *vm, ipsec_main_t *im, const char *name, const char *esp4_encrypt_node_name, const char *esp4_encrypt_node_tun_name, const char *esp4_decrypt_node_name, const char *esp6_encrypt_node_name, const char *esp6_encrypt_node_tun_name, const char *esp6_decrypt_node_name, check_support_cb_t esp_check_support_cb, add_del_sa_sess_cb_t esp_add_del_sa_sess_cb)
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
crypto_alg_t * cipher_algs
static_always_inline u32 crypto_op_len(void)
static clib_error_t * crypto_create_crypto_op_pool(vlib_main_t *vm, u8 numa)
static u8 auth_alg_index(const crypto_alg_t *alg)
struct rte_mempool ** session_drv
#define VLIB_REGISTER_NODE(x,...)
crypto_session_by_drv_t * session_by_drv_id_and_sa_index
#define vec_free(V)
Free vector's memory (no header).
#define clib_warning(format, args...)
static u64 unix_time_now_nsec(void)
crypto_session_disposal_t * session_disposal
static void crypto_set_aead_xform(struct rte_crypto_sym_xform *xform, ipsec_sa_t *sa, u8 is_outbound)
vlib_node_t * vlib_get_node_by_name(vlib_main_t *vm, u8 *name)
static void set_session_private_data(struct rte_cryptodev_sym_session *sess, uint8_t driver_id, void *private_data)
u8 data[IPSEC_KEY_MAX_LEN]
#define vec_delete(V, N, M)
Delete N elements starting at element M.
struct rte_mempool * crypto_op
static clib_error_t * dpdk_crypto_session_disposal(crypto_session_disposal_t *v, u64 ts)
crypto_worker_main_t * workers_main
#define clib_error_report(e)
static void vlib_node_set_state(vlib_main_t *vm, u32 node_index, vlib_node_state_t new_state)
Set node dispatch state.
crypto_resource_t * resource
#define vec_elt(v, i)
Get vector value at index i.
static clib_error_t * crypto_create_pools(vlib_main_t *vm)
struct rte_mempool * session_h
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
void crypto_auto_placement(void)
uword * session_by_sa_index
static void crypto_scan_devs(u32 n_mains)
static void crypto_op_init(struct rte_mempool *mempool, void *_arg, void *_obj, unsigned i)
struct rte_crypto_op ** ops
clib_error_t * create_sym_session(struct rte_cryptodev_sym_session **session, u32 sa_idx, crypto_resource_t *res, crypto_worker_main_t *cwm, u8 is_outbound)
enum rte_crypto_sym_xform_type type
#define DPDK_CRYPTO_N_QUEUE_DESC
static u32 random_u32(u32 *seed)
32-bit random number generator
ipsec_crypto_alg_t crypto_alg
static vlib_thread_main_t * vlib_get_thread_main()
static u32 vlib_num_workers()
static uword dpdk_ipsec_process(vlib_main_t *vm, vlib_node_runtime_t *rt, vlib_frame_t *f)
u16 auth_resource_idx[IPSEC_INTEG_N_ALG]
u8 auth_support[IPSEC_INTEG_N_ALG]
#define vec_validate_init_empty_aligned(V, I, INIT, A)
Make sure vector is long enough for given index and initialize empty space (no header, alignment alignment)
#define vec_foreach(var, vec)
Vector iterator.
static_always_inline struct rte_cryptodev_sym_session * get_session_by_drv_and_sa_idx(crypto_data_t *data, u32 drv_id, u32 sa_idx)
#define vec_validate_init_empty(V, I, INIT)
Make sure vector is long enough for given index and initialize empty space (no header, unspecified alignment)
#define CLIB_CACHE_LINE_BYTES
static_always_inline void clib_spinlock_lock_if_init(clib_spinlock_t *p)
static void crypto_set_auth_xform(struct rte_crypto_sym_xform *xform, ipsec_sa_t *sa, u8 is_outbound)
static clib_error_t * dpdk_ipsec_check_support(ipsec_sa_t *sa)
u8 cipher_support[IPSEC_CRYPTO_N_ALG]
static void clear_and_free_obj(void *obj)
static crypto_alg_t * auth_cap_to_alg(const struct rte_cryptodev_capabilities *cap, u8 trunc_size)