22 #include <vpp/app/version.h> 28 #include <rte_bus_vdev.h> 29 #include <rte_cryptodev.h> 30 #include <rte_crypto_sym.h> 31 #include <rte_crypto.h> 32 #include <rte_cryptodev_pmd.h> 33 #include <rte_config.h> 38 #define always_inline static inline 40 #define always_inline static inline __attribute__ ((__always_inline__)) 50 struct rte_crypto_aead_xform *aead_xform = &xform->aead;
51 memset (xform, 0,
sizeof (*xform));
52 xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
55 if (key->
alg != VNET_CRYPTO_ALG_AES_128_GCM &&
56 key->
alg != VNET_CRYPTO_ALG_AES_192_GCM &&
57 key->
alg != VNET_CRYPTO_ALG_AES_256_GCM)
60 aead_xform->algo = RTE_CRYPTO_AEAD_AES_GCM;
62 RTE_CRYPTO_AEAD_OP_ENCRYPT : RTE_CRYPTO_AEAD_OP_DECRYPT;
63 aead_xform->aad_length = aad_len;
64 aead_xform->digest_length = 16;
66 aead_xform->iv.length = 12;
67 aead_xform->key.data = key->
data;
78 struct rte_crypto_sym_xform *xform_cipher, *xform_auth;
80 enum rte_crypto_cipher_algorithm cipher_algo = ~0;
81 enum rte_crypto_auth_algorithm auth_algo = ~0;
86 if (!key_cipher || !key_auth)
91 xform_cipher = xforms;
92 xform_auth = xforms + 1;
93 xform_cipher->cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
94 xform_auth->auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
98 xform_cipher = xforms + 1;
100 xform_cipher->cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
101 xform_auth->auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
104 xform_cipher->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
105 xform_auth->type = RTE_CRYPTO_SYM_XFORM_AUTH;
106 xforms->next = xforms + 1;
110 #define _(a, b, c, d, e) \ 111 case VNET_CRYPTO_ALG_##a##_##d##_TAG##e: \ 112 cipher_algo = RTE_CRYPTO_CIPHER_##b; \ 113 auth_algo = RTE_CRYPTO_AUTH_##d##_HMAC; \ 123 xform_cipher->cipher.algo = cipher_algo;
124 xform_cipher->cipher.key.data = key_cipher->
data;
125 xform_cipher->cipher.key.length =
vec_len (key_cipher->
data);
126 xform_cipher->cipher.iv.length = 16;
129 xform_auth->auth.algo = auth_algo;
130 xform_auth->auth.digest_length = digest_len;
131 xform_auth->auth.key.data = key_auth->
data;
132 xform_auth->auth.key.length =
vec_len (key_auth->
data);
145 n_devs = rte_cryptodev_count ();
147 for (i = 0; i < n_devs; i++)
148 rte_cryptodev_sym_session_clear (i, sess);
150 rte_cryptodev_sym_session_free (sess);
162 if (vcap->
xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
164 if (vcap->
cipher.algo != algo)
183 if (vcap->
xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
185 if (vcap->
auth.algo != algo)
188 if (*s == digest_size)
197 u32 digest_size,
u32 aad_size)
202 u32 key_match = 0, digest_match = 0, aad_match = 0;
206 if (vcap->
xform_type != RTE_CRYPTO_SYM_XFORM_AEAD)
208 if (vcap->
aead.algo != algo)
211 if (*s == digest_size)
230 if (key_match == 1 && digest_match == 1 && aad_match == 1)
245 #define _(a, b, c, d, e) \ 246 case VNET_CRYPTO_ALG_##a##_##d##_TAG##e: \ 247 if (check_cipher_support (RTE_CRYPTO_CIPHER_##b, c) && \ 248 check_auth_support (RTE_CRYPTO_AUTH_##d##_HMAC, e)) \ 257 #define _(a, b, c, d, e, f, g) \ 258 if (key->alg == VNET_CRYPTO_ALG_##a) \ 260 if (check_aead_support (RTE_CRYPTO_AEAD_##c, g, e, f)) \ 266 if (matched < 2)
return 0;
333 struct rte_mempool *sess_pool, *sess_priv_pool;
335 struct rte_crypto_sym_xform xforms_enc[2] = { { 0 } };
336 struct rte_crypto_sym_xform xforms_dec[2] = { { 0 } };
346 rte_cryptodev_sym_session_create (sess_pool);
354 rte_cryptodev_sym_session_create (sess_pool);
377 struct rte_cryptodev *cdev = rte_cryptodev_pmd_get_dev (dev_id);
378 u32 driver_id = cdev->driver_id;
382 if (sessions[CRYPTODEV_OP_TYPE_ENCRYPT]->sess_data[driver_id].
data &&
383 sessions[CRYPTODEV_OP_TYPE_DECRYPT]->sess_data[driver_id].
data)
386 ret = rte_cryptodev_sym_session_init (
387 dev_id, sessions[CRYPTODEV_OP_TYPE_ENCRYPT], xforms_enc,
389 ret = rte_cryptodev_sym_session_init (
390 dev_id, sessions[CRYPTODEV_OP_TYPE_DECRYPT], xforms_dec,
429 u32 cryptodev_inst_index,
474 cryptodev_inst_index, 1);
490 u32 inst = va_arg (*args,
u32);
493 struct rte_cryptodev_info info;
495 rte_cryptodev_info_get (cit->
dev_id, &info);
496 s =
format (s,
"%-25s%-10u", info.device->name, cit->
q_id);
506 s =
format (s,
"%u (%v)\n", thread_index,
513 s =
format (s,
"%s\n",
"free");
537 vlib_cli_output (vm,
"Cryptodev Data Path API used: RAW Data Path API");
539 vlib_cli_output (vm,
"Cryptodev Data Path API used: crypto operation API");
544 .path =
"show cryptodev assignment",
545 .short_help =
"show cryptodev assignment",
557 u32 thread_present = 0, inst_present = 0;
567 if (
unformat (line_input,
"thread %u", &thread_index))
569 else if (
unformat (line_input,
"resource %u", &inst_index))
579 if (!thread_present || !inst_present)
613 .path =
"set cryptodev assignment",
614 .short_help =
"set cryptodev assignment thread <thread_index> " 615 "resource <inst_index>",
622 struct rte_cryptodev_info info;
623 u32 n_cryptodev = rte_cryptodev_count ();
626 for (i = 0; i < n_cryptodev; i++)
628 rte_cryptodev_info_get (i, &info);
629 q_count += info.max_nb_queue_pairs;
638 struct rte_cryptodev_config cfg;
639 struct rte_cryptodev_info info;
644 rte_cryptodev_info_get (cryptodev_id, &info);
646 if (!(info.feature_flags & RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO))
649 cfg.socket_id = info.device->numa_node;
650 cfg.nb_queue_pairs = info.max_nb_queue_pairs;
652 rte_cryptodev_configure (cryptodev_id, &cfg);
654 for (i = 0; i < info.max_nb_queue_pairs; i++)
656 struct rte_cryptodev_qp_conf qp_cfg;
658 qp_cfg.mp_session = 0;
659 qp_cfg.mp_session_private = 0;
662 ret = rte_cryptodev_queue_pair_setup (cryptodev_id, i, &qp_cfg,
663 info.device->numa_node);
666 clib_warning (
"Cryptodev: Configure device %u queue %u failed %d",
667 cryptodev_id, i, ret);
672 if (i != info.max_nb_queue_pairs)
676 rte_cryptodev_start (cryptodev_id);
678 for (i = 0; i < info.max_nb_queue_pairs; i++)
681 vec_add2(cmt->cryptodev_inst, cdev_inst, 1);
682 cdev_inst->
desc =
vec_new (
char, strlen (info.device->name) + 10);
683 cdev_inst->
dev_id = cryptodev_id;
686 snprintf (cdev_inst->
desc, strlen (info.device->name) + 9,
687 "%s_q%u", info.device->name, i);
712 if (*value == param_value)
720 u32 key_size,
u32 digest_size,
u32 aad_size)
730 if (idx->type == RTE_CRYPTO_SYM_XFORM_AUTH &&
731 cap->
auth.algo == idx->algo.auth &&
735 if (idx->type == RTE_CRYPTO_SYM_XFORM_CIPHER &&
736 cap->
cipher.algo == idx->algo.cipher &&
740 if (idx->type == RTE_CRYPTO_SYM_XFORM_AEAD &&
741 cap->
aead.algo == idx->algo.aead &&
753 u32 param_size_max,
u32 increment)
758 while (i <
vec_len (*param_sizes))
761 for (cap_param_size = param_size_min; cap_param_size <= param_size_max;
762 cap_param_size += increment)
764 if ((*param_sizes)[i] == cap_param_size)
787 case RTE_CRYPTO_SYM_XFORM_AUTH:
790 case RTE_CRYPTO_SYM_XFORM_CIPHER:
793 case RTE_CRYPTO_SYM_XFORM_AEAD:
807 const struct rte_cryptodev_capabilities *dev_caps)
810 const struct rte_cryptodev_capabilities *cap = &dev_caps[0];
812 while (cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED)
814 if (cap->sym.xform_type == temp_cap->
xform_type)
815 switch (cap->sym.xform_type)
817 case RTE_CRYPTO_SYM_XFORM_CIPHER:
818 if (cap->sym.cipher.algo == temp_cap->
cipher.algo)
821 &temp_cap->
cipher.key_sizes, cap->sym.cipher.key_size.min,
822 cap->sym.cipher.key_size.max,
823 cap->sym.cipher.key_size.increment);
828 case RTE_CRYPTO_SYM_XFORM_AUTH:
829 if (cap->sym.auth.algo == temp_cap->
auth.algo)
832 &temp_cap->
auth.digest_sizes, cap->sym.auth.digest_size.min,
833 cap->sym.auth.digest_size.max,
834 cap->sym.auth.digest_size.increment);
839 case RTE_CRYPTO_SYM_XFORM_AEAD:
840 if (cap->sym.aead.algo == temp_cap->
aead.algo)
843 &temp_cap->
aead.key_sizes, cap->sym.aead.key_size.min,
844 cap->sym.aead.key_size.max,
845 cap->sym.aead.key_size.increment);
847 &temp_cap->
aead.aad_sizes, cap->sym.aead.aad_size.min,
848 cap->sym.aead.aad_size.max,
849 cap->sym.aead.aad_size.increment);
851 &temp_cap->
aead.digest_sizes, cap->sym.aead.digest_size.min,
852 cap->sym.aead.digest_size.max,
853 cap->sym.aead.digest_size.increment);
876 struct rte_cryptodev_info dev_info;
877 u32 previous_dev_id, dev_id;
881 const struct rte_cryptodev_capabilities *cap;
882 const struct rte_cryptodev_capabilities *dev_caps;
888 rte_cryptodev_info_get (dev_inst->
dev_id, &dev_info);
889 cap = &dev_info.capabilities[0];
892 while (cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED)
894 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
901 switch (cap->sym.xform_type)
903 case RTE_CRYPTO_SYM_XFORM_CIPHER:
904 tmp_cap.
cipher.key_sizes = 0;
905 tmp_cap.
cipher.algo = cap->sym.cipher.algo;
906 for (param = cap->sym.cipher.key_size.min;
907 param <= cap->sym.cipher.key_size.max;
908 param += cap->sym.cipher.key_size.increment)
911 if (cap->sym.cipher.key_size.increment == 0)
915 case RTE_CRYPTO_SYM_XFORM_AUTH:
916 tmp_cap.
auth.algo = cap->sym.auth.algo;
917 tmp_cap.
auth.digest_sizes = 0;
918 for (param = cap->sym.auth.digest_size.min;
919 param <= cap->sym.auth.digest_size.max;
920 param += cap->sym.auth.digest_size.increment)
923 if (cap->sym.auth.digest_size.increment == 0)
927 case RTE_CRYPTO_SYM_XFORM_AEAD:
928 tmp_cap.
aead.key_sizes = 0;
929 tmp_cap.
aead.aad_sizes = 0;
930 tmp_cap.
aead.digest_sizes = 0;
931 tmp_cap.
aead.algo = cap->sym.aead.algo;
932 for (param = cap->sym.aead.key_size.min;
933 param <= cap->sym.aead.key_size.max;
934 param += cap->sym.aead.key_size.increment)
937 if (cap->sym.aead.key_size.increment == 0)
940 for (param = cap->sym.aead.aad_size.min;
941 param <= cap->sym.aead.aad_size.max;
942 param += cap->sym.aead.aad_size.increment)
945 if (cap->sym.aead.aad_size.increment == 0)
948 for (param = cap->sym.aead.digest_size.min;
949 param <= cap->sym.aead.digest_size.max;
950 param += cap->sym.aead.digest_size.increment)
953 if (cap->sym.aead.digest_size.increment == 0)
967 u32 cap_is_supported = 1;
972 dev_id = dev_inst->
dev_id;
973 if (previous_dev_id != dev_id)
975 previous_dev_id = dev_id;
976 rte_cryptodev_info_get (dev_id, &dev_info);
977 dev_caps = &dev_info.capabilities[0];
980 if (!cap_is_supported)
989 if (cap_is_supported)
1001 if (n_queues < n_workers)
1004 for (i = 0; i < rte_cryptodev_count (); i++)
1024 u32 max_sess = 0, max_dp = 0;
1028 u32 sess_sz = rte_cryptodev_sym_get_private_session_size (cinst->
dev_id);
1029 u32 dp_sz = rte_cryptodev_get_raw_dp_ctx_size (cinst->
dev_id);
1031 max_sess =
clib_max (sess_sz, max_sess);
1035 *max_sess_sz = max_sess;
1036 *max_dp_sz = max_dp;
1054 rte_mempool_free (numa_data->
sess_pool);
1067 struct rte_mempool *mp;
1106 name =
format (0,
"vcryptodev_sess_pool_%u%c", numa, 0);
1107 mp = rte_cryptodev_sym_session_pool_create (
1120 name =
format (0,
"cryptodev_sess_pool_%u%c", numa, 0);
1123 0, 0, NULL, NULL, NULL, NULL, numa, 0);
1142 "DPDK Cryptodev Engine");
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
#define CRYPTODEV_IV_OFFSET
#define vec_foreach_index(var, v)
Iterate over vector indices.
enum rte_iova_mode iova_mode
static_always_inline int cryptodev_check_supported_vnet_alg(vnet_crypto_key_t *key)
static u32 cryptodev_remove_unsupported_param_sizes(cryptodev_capability_t *temp_cap, const struct rte_cryptodev_capabilities *dev_caps)
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
static void remove_unsupported_param_size(u32 **param_sizes, u32 param_size_min, u32 param_size_max, u32 increment)
cryptodev_resource_assign_op_t
static void cryptodev_get_common_capabilities()
static int check_auth_support(enum rte_crypto_auth_algorithm algo, u32 digest_size)
static clib_error_t * cryptodev_set_assignment_fn(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
#define VNET_CRYPTO_KEY_TYPE_LINK
#define CLIB_MEMORY_STORE_BARRIER()
clib_memset(h->entries, 0, sizeof(h->entries[0]) *entries)
void cryptodev_key_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx)
static u32 cryptodev_count_queue(u32 numa)
#define foreach_cryptodev_link_async_alg
crypto (alg, cryptodev_alg, key_size), hash (alg, digest-size)
void vnet_crypto_request_async_mode(int is_enable)
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
enum rte_crypto_sym_xform_type xform_type
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
static uword * clib_bitmap_set(uword *ai, uword i, uword value)
Sets the ith bit of a bitmap to new_value Removes trailing zeros from the bitmap. ...
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
clib_bitmap_t * active_cdev_inst_mask
static uword clib_bitmap_set_no_check(uword *a, uword i, uword new_value)
Sets the ith bit of a bitmap to new_value.
clib_error_t * cryptodev_register_cop_hdl(vlib_main_t *vm, u32 eidx)
static int cryptodev_supports_param_value(u32 *params, u32 param_value)
void vnet_crypto_register_key_handler(vlib_main_t *vm, u32 engine_index, vnet_crypto_key_handler_t *key_handler)
int cryptodev_session_create(vlib_main_t *vm, vnet_crypto_key_index_t idx, u32 aad_len)
#define static_always_inline
#define CRYPTODEV_NB_SESSION
#define vec_new(T, N)
Create new vector of given type and length (unspecified alignment, no header).
description fragment has unexpected format
struct rte_mempool * sess_pool
struct rte_cryptodev_sym_session *** keys
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define clib_error_return(e, args...)
static int check_cipher_support(enum rte_crypto_cipher_algorithm algo, u32 key_size)
static_always_inline int check_aead_support(enum rte_crypto_aead_algorithm algo, u32 key_size, u32 digest_size, u32 aad_size)
struct cryptodev_capability_t::@641::@643 auth
struct cryptodev_capability_t::@641::@645 aead
static void clib_spinlock_init(clib_spinlock_t *p)
vlib_worker_thread_t * vlib_worker_threads
cryptodev_engine_thread_t * per_thread_data
clib_error_t *__clib_weak cryptodev_register_raw_hdl(vlib_main_t *vm, u32 eidx)
static void cryptodev_get_max_sz(u32 *max_sess_sz, u32 *max_dp_sz)
static int cryptodev_configure(vlib_main_t *vm, u32 cryptodev_id)
static_always_inline int cryptodev_assign_resource(cryptodev_engine_thread_t *cet, u32 cryptodev_inst_index, cryptodev_resource_assign_op_t op)
assign a cryptodev resource to a worker.
static_always_inline int prepare_linked_xform(struct rte_crypto_sym_xform *xforms, cryptodev_op_type_t op_type, const vnet_crypto_key_t *key)
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
static void cryptodev_delete_cap(cryptodev_capability_t **temp_caps, u32 temp_cap_id)
#define foreach_vnet_aead_crypto_conversion
clib_error_t * dpdk_cryptodev_init(vlib_main_t *vm)
vnet_crypto_async_alg_t async_alg
int cryptodev_check_cap_support(struct rte_cryptodev_sym_capability_idx *idx, u32 key_size, u32 digest_size, u32 aad_size)
sll srl srl sll sra u16x4 i
cryptodev_main_t cryptodev_main
#define vec_free(V)
Free vector's memory (no header).
#define clib_warning(format, args...)
#define CRYPTODEV_NB_CRYPTO_OPS
static uword clib_bitmap_get(uword *ai, uword i)
Gets the ith bit value from a bitmap.
cryptodev_numa_data_t * per_numa_data
cryptodev_inst_t * cryptodev_inst
#define clib_bitmap_vec_validate(v, i)
static int cryptodev_probe(vlib_main_t *vm, u32 n_workers)
#define VLIB_CLI_COMMAND(x,...)
void vlib_cli_output(vlib_main_t *vm, char *fmt,...)
static_always_inline void cryptodev_session_del(struct rte_cryptodev_sym_session *sess)
#define vec_delete(V, N, M)
Delete N elements starting at element M.
static u8 * format_cryptodev_inst(u8 *s, va_list *args)
static int cryptodev_cmp(void *v1, void *v2)
static clib_error_t * cryptodev_show_assignment_fn(vlib_main_t *vm, unformat_input_t *input, vlib_cli_command_t *cmd)
u32 vnet_crypto_key_index_t
struct rte_mempool * sess_priv_pool
static uword clib_bitmap_count_set_bits(uword *ai)
Return the number of set bits in a bitmap.
static vlib_main_t * vlib_get_main_by_index(u32 thread_index)
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
cryptodev_capability_t * supported_caps
#define vec_sort_with_function(vec, f)
Sort a vector using the supplied element comparison function.
void cryptodev_sess_handler(vlib_main_t *vm, vnet_crypto_key_op_t kop, vnet_crypto_key_index_t idx, u32 aad_len)
static void dpdk_disable_cryptodev_engine(vlib_main_t *vm)
struct cryptodev_capability_t::@641::@644 cipher
static vlib_thread_main_t * vlib_get_thread_main()
static u32 vlib_num_workers()
#define vec_foreach(var, vec)
Vector iterator.
void ipsec_set_async_mode(u32 is_enabled)
static uword clib_bitmap_first_clear(uword *ai)
Return the lowest numbered clear bit in a bitmap.
#define CLIB_CACHE_LINE_BYTES
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
static_always_inline int prepare_aead_xform(struct rte_crypto_sym_xform *xform, cryptodev_op_type_t op_type, const vnet_crypto_key_t *key, u32 aad_len)