FD.io VPP  v17.10-9-gd594711
Vector Packet Processing
esp.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016 Intel and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef __DPDK_ESP_H__
16 #define __DPDK_ESP_H__
17 
18 #include <dpdk/ipsec/ipsec.h>
19 #include <vnet/ipsec/ipsec.h>
20 #include <vnet/ipsec/esp.h>
21 
22 typedef struct
23 {
24  enum rte_crypto_cipher_algorithm algo;
25 #if ! DPDK_NO_AEAD
26  enum rte_crypto_aead_algorithm aead_algo;
27 #endif
31 
32 typedef struct
33 {
34  enum rte_crypto_auth_algorithm algo;
37 
38 typedef struct
39 {
43 
45 
48 {
52 
54 
55  c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_128];
56  c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
57  c->key_len = 16;
58  c->iv_len = 16;
59 
60  c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_192];
61  c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
62  c->key_len = 24;
63  c->iv_len = 16;
64 
65  c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_256];
66  c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
67  c->key_len = 32;
68  c->iv_len = 16;
69 
70  c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_GCM_128];
71 #if DPDK_NO_AEAD
72  c->algo = RTE_CRYPTO_CIPHER_AES_GCM;
73 #else
74  c->aead_algo = RTE_CRYPTO_AEAD_AES_GCM;
75 #endif
76  c->key_len = 16;
77  c->iv_len = 8;
78 
80 
81  i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA1_96];
82  i->algo = RTE_CRYPTO_AUTH_SHA1_HMAC;
83  i->trunc_size = 12;
84 
85  i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_256_96];
86  i->algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
87  i->trunc_size = 12;
88 
89  i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_256_128];
90  i->algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
91  i->trunc_size = 16;
92 
93  i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_384_192];
94  i->algo = RTE_CRYPTO_AUTH_SHA384_HMAC;
95  i->trunc_size = 24;
96 
97  i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_512_256];
98  i->algo = RTE_CRYPTO_AUTH_SHA512_HMAC;
99  i->trunc_size = 32;
100 #if DPDK_NO_AEAD
101  i = &em->esp_integ_algs[IPSEC_INTEG_ALG_AES_GCM_128];
102  i->algo = RTE_CRYPTO_AUTH_AES_GCM;
103  i->trunc_size = 16;
104 #endif
105 }
106 
109  struct rte_crypto_sym_xform *xform, u8 use_esn)
110 {
111 #if ! DPDK_NO_AEAD
112  const u16 iv_off =
113  sizeof (struct rte_crypto_op) + sizeof (struct rte_crypto_sym_op) +
114  offsetof (dpdk_cop_priv_t, cb);
115 #endif
116 
117  xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
118 
119  switch (crypto_algo)
120  {
121  case IPSEC_CRYPTO_ALG_NONE:
122 #if ! DPDK_NO_AEAD
123  xform->cipher.iv.offset = iv_off;
124  xform->cipher.iv.length = 0;
125 #endif
126  xform->cipher.algo = RTE_CRYPTO_CIPHER_NULL;
127  break;
128  case IPSEC_CRYPTO_ALG_AES_CBC_128:
129  case IPSEC_CRYPTO_ALG_AES_CBC_192:
130  case IPSEC_CRYPTO_ALG_AES_CBC_256:
131 #if ! DPDK_NO_AEAD
132  xform->cipher.iv.offset = iv_off;
133  xform->cipher.iv.length = 16;
134 #endif
135  xform->cipher.algo = RTE_CRYPTO_CIPHER_AES_CBC;
136  break;
137  case IPSEC_CRYPTO_ALG_AES_GCM_128:
138 #if DPDK_NO_AEAD
139  xform->cipher.algo = RTE_CRYPTO_CIPHER_AES_GCM;
140 #else
141  xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
142  xform->aead.algo = RTE_CRYPTO_AEAD_AES_GCM;
143  xform->aead.iv.offset = iv_off;
144  xform->aead.iv.length = 12; /* GCM IV, not ESP IV */
145  xform->aead.digest_length = 16;
146  xform->aead.aad_length = use_esn ? 12 : 8;
147 #endif
148  break;
149  default:
150  return -1;
151  }
152 
153  return 0;
154 }
155 
158  struct rte_crypto_sym_xform *auth_xform, u8 use_esn)
159 {
160  auth_xform->type = RTE_CRYPTO_SYM_XFORM_AUTH;
161 
162  switch (integ_alg)
163  {
164  case IPSEC_INTEG_ALG_NONE:
165  auth_xform->auth.algo = RTE_CRYPTO_AUTH_NULL;
166  auth_xform->auth.digest_length = 0;
167  break;
168  case IPSEC_INTEG_ALG_SHA1_96:
169  auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA1_HMAC;
170  auth_xform->auth.digest_length = 12;
171  break;
172  case IPSEC_INTEG_ALG_SHA_256_96:
173  auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
174  auth_xform->auth.digest_length = 12;
175  break;
176  case IPSEC_INTEG_ALG_SHA_256_128:
177  auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
178  auth_xform->auth.digest_length = 16;
179  break;
180  case IPSEC_INTEG_ALG_SHA_384_192:
181  auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA384_HMAC;
182  auth_xform->auth.digest_length = 24;
183  break;
184  case IPSEC_INTEG_ALG_SHA_512_256:
185  auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA512_HMAC;
186  auth_xform->auth.digest_length = 32;
187  break;
188 #if DPDK_NO_AEAD
189  case IPSEC_INTEG_ALG_AES_GCM_128:
190  auth_xform->auth.algo = RTE_CRYPTO_AUTH_AES_GCM;
191  auth_xform->auth.digest_length = 16;
192  auth_xform->auth.add_auth_data_length = use_esn ? 12 : 8;
193  break;
194 #endif
195  default:
196  return -1;
197  }
198 
199  return 0;
200 }
201 
204  u8 is_outbound)
205 {
206  u32 thread_index = vlib_get_thread_index ();
208  crypto_worker_main_t *cwm = &dcm->workers_main[thread_index];
209  struct rte_crypto_sym_xform cipher_xform = { 0 };
210  struct rte_crypto_sym_xform auth_xform = { 0 };
211  struct rte_crypto_sym_xform *xfs;
212  uword key = 0, *data;
214 #if ! DPDK_NO_AEAD
215  i32 socket_id = rte_socket_id ();
216  i32 ret;
217 #endif
218 
219  if (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128)
220  {
221  sa->crypto_key_len -= 4;
222  clib_memcpy (&sa->salt, &sa->crypto_key[sa->crypto_key_len], 4);
223  }
224  else
225  {
226  u32 seed = (u32) clib_cpu_time_now ();
227  sa->salt = random_u32 (&seed);
228  }
229 
230  if (translate_crypto_algo (sa->crypto_alg, &cipher_xform, sa->use_esn) < 0)
231  return -1;
232  p_key->cipher_algo = cipher_xform.cipher.algo;
233 
234  if (translate_integ_algo (sa->integ_alg, &auth_xform, sa->use_esn) < 0)
235  return -1;
236  p_key->auth_algo = auth_xform.auth.algo;
237 
238 #if ! DPDK_NO_AEAD
239  if (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128)
240  {
241  cipher_xform.aead.key.data = sa->crypto_key;
242  cipher_xform.aead.key.length = sa->crypto_key_len;
243 
244  if (is_outbound)
245  cipher_xform.cipher.op =
246  (enum rte_crypto_cipher_operation) RTE_CRYPTO_AEAD_OP_ENCRYPT;
247  else
248  cipher_xform.cipher.op =
249  (enum rte_crypto_cipher_operation) RTE_CRYPTO_AEAD_OP_DECRYPT;
250  cipher_xform.next = NULL;
251  xfs = &cipher_xform;
252  p_key->is_aead = 1;
253  }
254  else /* Cipher + Auth */
255 #endif
256  {
257  cipher_xform.cipher.key.data = sa->crypto_key;
258  cipher_xform.cipher.key.length = sa->crypto_key_len;
259 
260  auth_xform.auth.key.data = sa->integ_key;
261  auth_xform.auth.key.length = sa->integ_key_len;
262 
263  if (is_outbound)
264  {
265  cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
266  auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
267  cipher_xform.next = &auth_xform;
268  xfs = &cipher_xform;
269  }
270  else
271  {
272  cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
273  auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
274  auth_xform.next = &cipher_xform;
275  xfs = &auth_xform;
276  }
277  p_key->is_aead = 0;
278  }
279 
280  p_key->is_outbound = is_outbound;
281 
282  data = hash_get (cwm->algo_qp_map, key);
283  if (!data)
284  return -1;
285 
286 #if DPDK_NO_AEAD
287  sa_sess->sess =
288  rte_cryptodev_sym_session_create (cwm->qp_data[*data].dev_id, xfs);
289  if (!sa_sess->sess)
290  return -1;
291 #else
292  sa_sess->sess =
293  rte_cryptodev_sym_session_create (dcm->sess_h_pools[socket_id]);
294  if (!sa_sess->sess)
295  return -1;
296 
297  ret =
298  rte_cryptodev_sym_session_init (cwm->qp_data[*data].dev_id, sa_sess->sess,
299  xfs, dcm->sess_pools[socket_id]);
300  if (ret)
301  return -1;
302 #endif
303 
304  sa_sess->qp_index = (u8) * data;
305 
306  return 0;
307 }
308 
310 crypto_set_icb (dpdk_gcm_cnt_blk * icb, u32 salt, u32 seq, u32 seq_hi)
311 {
312  icb->salt = salt;
313  icb->iv[0] = seq;
314  icb->iv[1] = seq_hi;
315 #if DPDK_NO_AEAD
316  icb->cnt = clib_host_to_net_u32 (1);
317 #endif
318 }
319 
320 #define __unused __attribute__((unused))
322 crypto_op_setup (u8 is_aead, struct rte_mbuf *mb0,
323  struct rte_crypto_op *cop, void *session,
324  u32 cipher_off, u32 cipher_len,
325  u8 * icb __unused, u32 iv_size __unused,
326  u32 auth_off, u32 auth_len,
327  u8 * aad __unused, u32 aad_size __unused,
328  u8 * digest, u64 digest_paddr, u32 digest_size __unused)
329 {
330  struct rte_crypto_sym_op *sym_cop;
331 
332  sym_cop = (struct rte_crypto_sym_op *) (cop + 1);
333 
334  sym_cop->m_src = mb0;
335  rte_crypto_op_attach_sym_session (cop, session);
336 
337 #if DPDK_NO_AEAD
338  sym_cop->cipher.data.offset = cipher_off;
339  sym_cop->cipher.data.length = cipher_len;
340 
341  sym_cop->cipher.iv.data = icb;
342  sym_cop->cipher.iv.phys_addr =
343  cop->phys_addr + (uintptr_t) icb - (uintptr_t) cop;
344  sym_cop->cipher.iv.length = iv_size;
345 
346  if (is_aead)
347  {
348  sym_cop->auth.aad.data = aad;
349  sym_cop->auth.aad.phys_addr =
350  cop->phys_addr + (uintptr_t) aad - (uintptr_t) cop;
351  sym_cop->auth.aad.length = aad_size;
352  }
353  else
354  {
355  sym_cop->auth.data.offset = auth_off;
356  sym_cop->auth.data.length = auth_len;
357  }
358 
359  sym_cop->auth.digest.data = digest;
360  sym_cop->auth.digest.phys_addr = digest_paddr;
361  sym_cop->auth.digest.length = digest_size;
362 #else /* ! DPDK_NO_AEAD */
363  if (is_aead)
364  {
365  sym_cop->aead.data.offset = cipher_off;
366  sym_cop->aead.data.length = cipher_len;
367 
368  sym_cop->aead.aad.data = aad;
369  sym_cop->aead.aad.phys_addr =
370  cop->phys_addr + (uintptr_t) aad - (uintptr_t) cop;
371 
372  sym_cop->aead.digest.data = digest;
373  sym_cop->aead.digest.phys_addr = digest_paddr;
374  }
375  else
376  {
377  sym_cop->cipher.data.offset = cipher_off;
378  sym_cop->cipher.data.length = cipher_len;
379 
380  sym_cop->auth.data.offset = auth_off;
381  sym_cop->auth.data.length = auth_len;
382 
383  sym_cop->auth.digest.data = digest;
384  sym_cop->auth.digest.phys_addr = digest_paddr;
385  }
386 #endif /* DPDK_NO_AEAD */
387 }
388 
389 #undef __unused
390 
391 #endif /* __DPDK_ESP_H__ */
392 
393 /*
394  * fd.io coding-style-patch-verification: ON
395  *
396  * Local Variables:
397  * eval: (c-set-style "gnu")
398  * End:
399  */
#define vec_validate(V, I)
Make sure vector is long enough for given index (no header, unspecified alignment) ...
Definition: vec.h:432
sll srl srl sll sra u16x4 i
Definition: vector_sse2.h:337
enum rte_crypto_aead_algorithm aead_algo
Definition: esp.h:26
#define NULL
Definition: clib.h:55
static_always_inline i32 create_sym_sess(ipsec_sa_t *sa, crypto_sa_session_t *sa_sess, u8 is_outbound)
Definition: esp.h:203
ipsec_integ_alg_t integ_alg
Definition: ipsec.h:110
static u64 clib_cpu_time_now(void)
Definition: time.h:73
struct rte_mempool ** sess_pools
Definition: ipsec.h:86
u8 crypto_key[128]
Definition: ipsec.h:108
dpdk_crypto_main_t dpdk_crypto_main
Definition: ipsec.h:92
static_always_inline void dpdk_esp_init()
Definition: esp.h:47
uword * algo_qp_map
Definition: ipsec.h:80
#define static_always_inline
Definition: clib.h:85
u8 integ_key[128]
Definition: ipsec.h:112
u8 use_esn
Definition: ipsec.h:114
int i32
Definition: types.h:81
dpdk_esp_main_t dpdk_esp_main
Definition: esp.h:44
unsigned long u64
Definition: types.h:89
dpdk_esp_integ_alg_t * esp_integ_algs
Definition: esp.h:41
dpdk_esp_crypto_alg_t * esp_crypto_algs
Definition: esp.h:40
u32 iv[2]
Definition: ipsec.h:37
#define hash_get(h, key)
Definition: hash.h:248
ipsec_integ_alg_t
Definition: ipsec.h:86
static_always_inline int translate_crypto_algo(ipsec_crypto_alg_t crypto_algo, struct rte_crypto_sym_xform *xform, u8 use_esn)
Definition: esp.h:108
u32 salt
Definition: ipsec.h:122
static_always_inline void crypto_op_setup(u8 is_aead, struct rte_mbuf *mb0, struct rte_crypto_op *cop, void *session, u32 cipher_off, u32 cipher_len, u8 *icb __unused, u32 iv_size __unused, u32 auth_off, u32 auth_len, u8 *aad __unused, u32 aad_size __unused, u8 *digest, u64 digest_paddr, u32 digest_size __unused)
Definition: esp.h:322
svmdb_client_t * c
static_always_inline uword vlib_get_thread_index(void)
Definition: threads.h:221
#define clib_memcpy(a, b, c)
Definition: string.h:69
enum rte_crypto_auth_algorithm algo
Definition: esp.h:34
unsigned int u32
Definition: types.h:88
static_always_inline int translate_integ_algo(ipsec_integ_alg_t integ_alg, struct rte_crypto_sym_xform *auth_xform, u8 use_esn)
Definition: esp.h:157
static_always_inline void crypto_set_icb(dpdk_gcm_cnt_blk *icb, u32 salt, u32 seq, u32 seq_hi)
Definition: esp.h:310
ipsec_crypto_alg_t
Definition: ipsec.h:68
crypto_worker_main_t * workers_main
Definition: ipsec.h:88
u8 integ_key_len
Definition: ipsec.h:111
crypto_qp_data_t * qp_data
Definition: ipsec.h:79
u64 uword
Definition: types.h:112
u8 crypto_key_len
Definition: ipsec.h:107
unsigned short u16
Definition: types.h:57
unsigned char u8
Definition: types.h:56
static u32 random_u32(u32 *seed)
32-bit random number generator
Definition: random.h:69
ipsec_crypto_alg_t crypto_alg
Definition: ipsec.h:106
enum rte_crypto_cipher_algorithm algo
Definition: esp.h:24
struct rte_mempool ** sess_h_pools
Definition: ipsec.h:85