FD.io VPP  v18.10-32-g1161dda
Vector Packet Processing
buffer_funcs.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /*
16  * buffer_funcs.h: VLIB buffer related functions/inlines
17  *
18  * Copyright (c) 2008 Eliot Dresselhaus
19  *
20  * Permission is hereby granted, free of charge, to any person obtaining
21  * a copy of this software and associated documentation files (the
22  * "Software"), to deal in the Software without restriction, including
23  * without limitation the rights to use, copy, modify, merge, publish,
24  * distribute, sublicense, and/or sell copies of the Software, and to
25  * permit persons to whom the Software is furnished to do so, subject to
26  * the following conditions:
27  *
28  * The above copyright notice and this permission notice shall be
29  * included in all copies or substantial portions of the Software.
30  *
31  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38  */
39 
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
42 
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
45 
46 /** \file
47  vlib buffer access methods.
48 */
49 
50 
51 /** \brief Translate buffer index into buffer pointer
52 
53  @param vm - (vlib_main_t *) vlib main data structure pointer
54  @param buffer_index - (u32) buffer index
55  @return - (vlib_buffer_t *) buffer pointer
56 */
58 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 {
61  uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
62  ASSERT (offset < bm->buffer_mem_size);
63 
64  return uword_to_pointer (bm->buffer_mem_start + offset, void *);
65 }
66 
67 /** \brief Translate array of buffer indices into buffer pointers with offset
68 
69  @param vm - (vlib_main_t *) vlib main data structure pointer
70  @param bi - (u32 *) array of buffer indices
71  @param b - (void **) array to store buffer pointers
72  @param count - (uword) number of elements
73  @param offset - (i32) offset applied to each pointer
74 */
77  i32 offset)
78 {
79 #ifdef CLIB_HAVE_VEC256
80  u64x4 off = u64x4_splat (buffer_main.buffer_mem_start + offset);
81  /* if count is not const, compiler will not unroll while loop
82  se we maintain two-in-parallel variant */
83  while (count >= 8)
84  {
85  u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
86  u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
87  /* shift and add to get vlib_buffer_t pointer */
88  u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
89  u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
90  b += 8;
91  bi += 8;
92  count -= 8;
93  }
94 #endif
95  while (count >= 4)
96  {
97 #ifdef CLIB_HAVE_VEC256
98  u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
99  /* shift and add to get vlib_buffer_t pointer */
100  u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
101 #elif defined (CLIB_HAVE_VEC128)
102  u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
103  u32x4 bi4 = u32x4_load_unaligned (bi);
104  u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
105 #if defined (__aarch64__)
107 #else
108  bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
109  u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
110 #endif
111  u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
112  u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
113 #else
114  b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
115  b[1] = ((u8 *) vlib_get_buffer (vm, bi[1])) + offset;
116  b[2] = ((u8 *) vlib_get_buffer (vm, bi[2])) + offset;
117  b[3] = ((u8 *) vlib_get_buffer (vm, bi[3])) + offset;
118 #endif
119  b += 4;
120  bi += 4;
121  count -= 4;
122  }
123  while (count)
124  {
125  b[0] = ((u8 *) vlib_get_buffer (vm, bi[0])) + offset;
126  b += 1;
127  bi += 1;
128  count -= 1;
129  }
130 }
131 
132 /** \brief Translate array of buffer indices into buffer pointers
133 
134  @param vm - (vlib_main_t *) vlib main data structure pointer
135  @param bi - (u32 *) array of buffer indices
136  @param b - (vlib_buffer_t **) array to store buffer pointers
137  @param count - (uword) number of elements
138 */
139 
142 {
143  vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
144 }
145 
146 /** \brief Translate buffer pointer into buffer index
147 
148  @param vm - (vlib_main_t *) vlib main data structure pointer
149  @param p - (void *) buffer pointer
150  @return - (u32) buffer index
151 */
152 
155 {
159  ASSERT (offset < bm->buffer_mem_size);
160  ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
161  return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
162 }
163 
164 /** \brief Translate array of buffer pointers into buffer indices with offset
165 
166  @param vm - (vlib_main_t *) vlib main data structure pointer
167  @param b - (void **) array of buffer pointers
168  @param bi - (u32 *) array to store buffer indices
169  @param count - (uword) number of elements
170  @param offset - (i32) offset applied to each pointer
171 */
175 {
176 #ifdef CLIB_HAVE_VEC256
177  u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
178  u64x4 off4 = u64x4_splat (buffer_main.buffer_mem_start - offset);
179 
180  while (count >= 8)
181  {
182  /* load 4 pointers into 256-bit register */
183  u64x4 v0 = u64x4_load_unaligned (b);
184  u64x4 v1 = u64x4_load_unaligned (b + 4);
185  u32x8 v2, v3;
186 
187  v0 -= off4;
188  v1 -= off4;
189 
192 
193  /* permute 256-bit register so lower u32s of each buffer index are
194  * placed into lower 128-bits */
195  v2 = u32x8_permute ((u32x8) v0, mask);
196  v3 = u32x8_permute ((u32x8) v1, mask);
197 
198  /* extract lower 128-bits and save them to the array of buffer indices */
199  u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
200  u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
201  bi += 8;
202  b += 8;
203  count -= 8;
204  }
205 #endif
206  while (count >= 4)
207  {
208  /* equivalent non-nector implementation */
209  bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
210  bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
211  bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
212  bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
213  bi += 4;
214  b += 4;
215  count -= 4;
216  }
217  while (count)
218  {
219  bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
220  bi += 1;
221  b += 1;
222  count -= 1;
223  }
224 }
225 
226 /** \brief Translate array of buffer pointers into buffer indices
227 
228  @param vm - (vlib_main_t *) vlib main data structure pointer
229  @param b - (vlib_buffer_t **) array of buffer pointers
230  @param bi - (u32 *) array to store buffer indices
231  @param count - (uword) number of elements
232 */
235  uword count)
236 {
237  vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
238 }
239 
240 /** \brief Get next buffer in buffer linklist, or zero for end of list.
241 
242  @param vm - (vlib_main_t *) vlib main data structure pointer
243  @param b - (void *) buffer pointer
244  @return - (vlib_buffer_t *) next buffer, or NULL
245 */
248 {
249  return (b->flags & VLIB_BUFFER_NEXT_PRESENT
250  ? vlib_get_buffer (vm, b->next_buffer) : 0);
251 }
252 
254  vlib_buffer_t * b_first);
255 
256 /** \brief Get length in bytes of the buffer chain
257 
258  @param vm - (vlib_main_t *) vlib main data structure pointer
259  @param b - (void *) buffer pointer
260  @return - (uword) length of buffer chain
261 */
264 {
265  uword len = b->current_length;
266 
267  if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
268  return len;
269 
270  if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
272 
274 }
275 
276 /** \brief Get length in bytes of the buffer index buffer chain
277 
278  @param vm - (vlib_main_t *) vlib main data structure pointer
279  @param bi - (u32) buffer index
280  @return - (uword) length of buffer chain
281 */
284 {
285  vlib_buffer_t *b = vlib_get_buffer (vm, bi);
286  return vlib_buffer_length_in_chain (vm, b);
287 }
288 
289 /** \brief Copy buffer contents to memory
290 
291  @param vm - (vlib_main_t *) vlib main data structure pointer
292  @param buffer_index - (u32) buffer index
293  @param contents - (u8 *) memory, <strong>must be large enough</strong>
294  @return - (uword) length of buffer chain
295 */
297 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
298 {
299  uword content_len = 0;
300  uword l;
301  vlib_buffer_t *b;
302 
303  while (1)
304  {
305  b = vlib_get_buffer (vm, buffer_index);
306  l = b->current_length;
307  clib_memcpy (contents + content_len, b->data + b->current_data, l);
308  content_len += l;
309  if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
310  break;
311  buffer_index = b->next_buffer;
312  }
313 
314  return content_len;
315 }
316 
317 /* Return physical address of buffer->data start. */
320 {
322  vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
324  b->buffer_pool_index);
325 
327 }
328 
329 /** \brief Prefetch buffer metadata by buffer index
330  The first 64 bytes of buffer contains most header information
331 
332  @param vm - (vlib_main_t *) vlib main data structure pointer
333  @param bi - (u32) buffer index
334  @param type - LOAD, STORE. In most cases, STORE is the right answer
335 */
336 /* Prefetch buffer header given index. */
337 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
338  do { \
339  vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
340  vlib_prefetch_buffer_header (_b, type); \
341  } while (0)
342 
343 #if 0
344 /* Iterate over known allocated vlib bufs. You probably do not want
345  * to do this!
346  @param vm the vlib_main_t
347  @param bi found allocated buffer index
348  @param body operation to perform on buffer index
349  function executes body for each allocated buffer index
350  */
351 #define vlib_buffer_foreach_allocated(vm,bi,body) \
352 do { \
353  vlib_main_t * _vmain = (vm); \
354  vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
355  hash_pair_t * _vbpair; \
356  hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
357  if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
358  (bi) = _vbpair->key; \
359  body; \
360  } \
361  })); \
362 } while (0)
363 #endif
364 
365 typedef enum
366 {
367  /* Index is unknown. */
369 
370  /* Index is known and free/allocated. */
374 
375 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
376  uword n_buffers,
378  expected_state);
379 
381 vlib_buffer_is_known (u32 buffer_index)
382 {
384 
386  uword *p = hash_get (bm->buffer_known_hash, buffer_index);
388  return p ? p[0] : VLIB_BUFFER_UNKNOWN;
389 }
390 
391 always_inline void
394 {
396 
398  hash_set (bm->buffer_known_hash, buffer_index, state);
400 }
401 
402 /* Validates sanity of a single buffer.
403  Returns format'ed vector with error message if any. */
404 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
405  uword follow_chain);
406 
409 {
410  return round_pow2 (size, sizeof (vlib_buffer_t));
411 }
412 
415 {
416  if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
417  return b->free_list_index;
418 
419  return 0;
420 }
421 
422 always_inline void
425 {
426  if (PREDICT_FALSE (index))
427  {
428  b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
429  b->free_list_index = index;
430  }
431  else
432  b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
433 }
434 
435 /** \brief Allocate buffers from specific freelist into supplied array
436 
437  @param vm - (vlib_main_t *) vlib main data structure pointer
438  @param buffers - (u32 * ) buffer index array
439  @param n_buffers - (u32) number of buffers requested
440  @return - (u32) number of buffers actually allocated, may be
441  less than the number requested or zero
442 */
445  u32 * buffers,
446  u32 n_buffers,
448 {
451  u32 *src;
452  uword len;
453 
455 
456  fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
457 
458  len = vec_len (fl->buffers);
459 
460  if (PREDICT_FALSE (len < n_buffers))
461  {
462  bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
463  if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
464  return 0;
465 
466  /* even if fill free list didn't manage to refill free list
467  we should give what we have */
468  n_buffers = clib_min (len, n_buffers);
469 
470  /* following code is intentionaly duplicated to allow compiler
471  to optimize fast path when n_buffers is constant value */
472  src = fl->buffers + len - n_buffers;
473  clib_memcpy (buffers, src, n_buffers * sizeof (u32));
474  _vec_len (fl->buffers) -= n_buffers;
475 
476  /* Verify that buffers are known free. */
477  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
479 
480  return n_buffers;
481  }
482 
483  src = fl->buffers + len - n_buffers;
484  clib_memcpy (buffers, src, n_buffers * sizeof (u32));
485  _vec_len (fl->buffers) -= n_buffers;
486 
487  /* Verify that buffers are known free. */
488  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
490 
491  return n_buffers;
492 }
493 
494 /** \brief Allocate buffers into supplied array
495 
496  @param vm - (vlib_main_t *) vlib main data structure pointer
497  @param buffers - (u32 * ) buffer index array
498  @param n_buffers - (u32) number of buffers requested
499  @return - (u32) number of buffers actually allocated, may be
500  less than the number requested or zero
501 */
503 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
504 {
505  return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
507 }
508 
509 /** \brief Allocate buffers into ring
510 
511  @param vm - (vlib_main_t *) vlib main data structure pointer
512  @param buffers - (u32 * ) buffer index ring
513  @param start - (u32) first slot in the ring
514  @param ring_size - (u32) ring size
515  @param n_buffers - (u32) number of buffers requested
516  @return - (u32) number of buffers actually allocated, may be
517  less than the number requested or zero
518 */
521  u32 ring_size, u32 n_buffers)
522 {
523  u32 n_alloc;
524 
525  ASSERT (n_buffers <= ring_size);
526 
527  if (PREDICT_TRUE (start + n_buffers <= ring_size))
528  return vlib_buffer_alloc (vm, ring + start, n_buffers);
529 
530  n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
531 
532  if (PREDICT_TRUE (n_alloc == ring_size - start))
533  n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
534 
535  return n_alloc;
536 }
537 
538 /** \brief Free buffers
539  Frees the entire buffer chain for each buffer
540 
541  @param vm - (vlib_main_t *) vlib main data structure pointer
542  @param buffers - (u32 * ) buffer index array
543  @param n_buffers - (u32) number of buffers to free
544 
545 */
546 always_inline void
548  /* pointer to first buffer */
549  u32 * buffers,
550  /* number of buffers to free */
551  u32 n_buffers)
552 {
554 
556 
557  return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
558 }
559 
560 /** \brief Free buffers, does not free the buffer chain for each buffer
561 
562  @param vm - (vlib_main_t *) vlib main data structure pointer
563  @param buffers - (u32 * ) buffer index array
564  @param n_buffers - (u32) number of buffers to free
565 
566 */
567 always_inline void
569  /* pointer to first buffer */
570  u32 * buffers,
571  /* number of buffers to free */
572  u32 n_buffers)
573 {
575 
577 
578  return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
579 }
580 
581 /** \brief Free one buffer
582  Shorthand to free a single buffer chain.
583 
584  @param vm - (vlib_main_t *) vlib main data structure pointer
585  @param buffer_index - (u32) buffer index to free
586 */
587 always_inline void
589 {
590  vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
591 }
592 
593 /** \brief Free buffers from ring
594 
595  @param vm - (vlib_main_t *) vlib main data structure pointer
596  @param buffers - (u32 * ) buffer index ring
597  @param start - (u32) first slot in the ring
598  @param ring_size - (u32) ring size
599  @param n_buffers - (u32) number of buffers
600 */
601 always_inline void
603  u32 ring_size, u32 n_buffers)
604 {
605  ASSERT (n_buffers <= ring_size);
606 
607  if (PREDICT_TRUE (start + n_buffers <= ring_size))
608  {
609  vlib_buffer_free (vm, ring + start, n_buffers);
610  }
611  else
612  {
613  vlib_buffer_free (vm, ring + start, ring_size - start);
614  vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
615  }
616 }
617 
618 /** \brief Free buffers from ring without freeing tail buffers
619 
620  @param vm - (vlib_main_t *) vlib main data structure pointer
621  @param buffers - (u32 * ) buffer index ring
622  @param start - (u32) first slot in the ring
623  @param ring_size - (u32) ring size
624  @param n_buffers - (u32) number of buffers
625 */
626 always_inline void
628  u32 ring_size, u32 n_buffers)
629 {
630  ASSERT (n_buffers <= ring_size);
631 
632  if (PREDICT_TRUE (start + n_buffers <= ring_size))
633  {
634  vlib_buffer_free_no_next (vm, ring + start, n_buffers);
635  }
636  else
637  {
638  vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
639  vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
640  }
641 }
642 
643 /* Add/delete buffer free lists. */
645  u32 n_data_bytes,
646  char *fmt, ...);
647 always_inline void
649  vlib_buffer_free_list_index_t free_list_index)
650 {
652 
654 
655  bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
656 }
657 
658 /* Make sure we have at least given number of unaligned buffers. */
661  free_list,
662  uword n_unaligned_buffers);
663 
667 {
669 
670  *index = i = vlib_buffer_get_free_list_index (b);
672 }
673 
676  vlib_buffer_free_list_index_t free_list_index)
677 {
679 
680  f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
681 
682  /* Sanity: indices must match. */
683  ASSERT (f->index == free_list_index);
684 
685  return f;
686 }
687 
691 {
693  return f->n_data_bytes;
694 }
695 
696 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
697 
698 /* Reasonably fast buffer copy routine. */
699 always_inline void
701 {
702  while (n >= 4)
703  {
704  dst[0] = src[0];
705  dst[1] = src[1];
706  dst[2] = src[2];
707  dst[3] = src[3];
708  dst += 4;
709  src += 4;
710  n -= 4;
711  }
712  while (n > 0)
713  {
714  dst[0] = src[0];
715  dst += 1;
716  src += 1;
717  n -= 1;
718  }
719 }
720 
721 /* Append given data to end of buffer, possibly allocating new buffers. */
723  vlib_buffer_free_list_index_t free_list_index,
724  u32 buffer_index, void *data, u32 n_data_bytes);
725 
726 /* duplicate all buffers in chain */
729 {
730  vlib_buffer_t *s, *d, *fd;
731  uword n_alloc, n_buffers = 1;
732  u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
733  int i;
734 
735  s = b;
736  while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
737  {
738  n_buffers++;
739  s = vlib_get_buffer (vm, s->next_buffer);
740  }
741  u32 new_buffers[n_buffers];
742 
743  n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
744 
745  /* No guarantee that we'll get all the buffers we asked for */
746  if (PREDICT_FALSE (n_alloc < n_buffers))
747  {
748  if (n_alloc > 0)
749  vlib_buffer_free (vm, new_buffers, n_alloc);
750  return 0;
751  }
752 
753  /* 1st segment */
754  s = b;
755  fd = d = vlib_get_buffer (vm, new_buffers[0]);
756  d->current_data = s->current_data;
758  d->flags = s->flags & flag_mask;
761  clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
762  clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
765 
766  /* next segments */
767  for (i = 1; i < n_buffers; i++)
768  {
769  /* previous */
770  d->next_buffer = new_buffers[i];
771  /* current */
772  s = vlib_get_buffer (vm, s->next_buffer);
773  d = vlib_get_buffer (vm, new_buffers[i]);
774  d->current_data = s->current_data;
778  d->flags = s->flags & flag_mask;
779  }
780 
781  return fd;
782 }
783 
784 /** \brief Create a maximum of 256 clones of buffer and store them
785  in the supplied array
786 
787  @param vm - (vlib_main_t *) vlib main data structure pointer
788  @param src_buffer - (u32) source buffer index
789  @param buffers - (u32 * ) buffer index array
790  @param n_buffers - (u16) number of buffer clones requested (<=256)
791  @param head_end_offset - (u16) offset relative to current position
792  where packet head ends
793  @return - (u16) number of buffers actually cloned, may be
794  less than the number requested or zero
795 */
797 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
798  u16 n_buffers, u16 head_end_offset)
799 {
800  u16 i;
801  vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
802 
803  ASSERT (s->n_add_refs == 0);
804  ASSERT (n_buffers);
805  ASSERT (n_buffers <= 256);
806 
807  if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
808  {
809  buffers[0] = src_buffer;
810  for (i = 1; i < n_buffers; i++)
811  {
812  vlib_buffer_t *d;
813  d = vlib_buffer_copy (vm, s);
814  if (d == 0)
815  return i;
816  buffers[i] = vlib_get_buffer_index (vm, d);
817 
818  }
819  return n_buffers;
820  }
821 
822  if (PREDICT_FALSE (n_buffers == 1))
823  {
824  buffers[0] = src_buffer;
825  return 1;
826  }
827 
828  n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
830  (s));
831 
832  for (i = 0; i < n_buffers; i++)
833  {
834  vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
835  d->current_data = s->current_data;
836  d->current_length = head_end_offset;
839 
841  head_end_offset;
842  if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
843  {
846  }
847  d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
848  d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
849  clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
850  clib_memcpy (d->opaque2, s->opaque2, sizeof (s->opaque2));
852  head_end_offset);
853  d->next_buffer = src_buffer;
854  }
855  vlib_buffer_advance (s, head_end_offset);
856  s->n_add_refs = n_buffers - 1;
857  while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
858  {
859  s = vlib_get_buffer (vm, s->next_buffer);
860  s->n_add_refs = n_buffers - 1;
861  }
862 
863  return n_buffers;
864 }
865 
866 /** \brief Create multiple clones of buffer and store them
867  in the supplied array
868 
869  @param vm - (vlib_main_t *) vlib main data structure pointer
870  @param src_buffer - (u32) source buffer index
871  @param buffers - (u32 * ) buffer index array
872  @param n_buffers - (u16) number of buffer clones requested (<=256)
873  @param head_end_offset - (u16) offset relative to current position
874  where packet head ends
875  @return - (u16) number of buffers actually cloned, may be
876  less than the number requested or zero
877 */
879 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
880  u16 n_buffers, u16 head_end_offset)
881 {
882  vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
883  u16 n_cloned = 0;
884 
885  while (n_buffers > 256)
886  {
887  vlib_buffer_t *copy;
888  copy = vlib_buffer_copy (vm, s);
889  n_cloned += vlib_buffer_clone_256 (vm,
890  vlib_get_buffer_index (vm, copy),
891  (buffers + n_cloned),
892  256, head_end_offset);
893  n_buffers -= 256;
894  }
895  n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
896  buffers + n_cloned,
897  n_buffers, head_end_offset);
898 
899  return n_cloned;
900 }
901 
902 /** \brief Attach cloned tail to the buffer
903 
904  @param vm - (vlib_main_t *) vlib main data structure pointer
905  @param head - (vlib_buffer_t *) head buffer
906  @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
907 */
908 
909 always_inline void
911  vlib_buffer_t * tail)
912 {
913  ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
916 
917  head->flags |= VLIB_BUFFER_NEXT_PRESENT;
918  head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
919  head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
920  head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
921  head->next_buffer = vlib_get_buffer_index (vm, tail);
924 
925 next_segment:
926  __sync_add_and_fetch (&tail->n_add_refs, 1);
927 
928  if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
929  {
930  tail = vlib_get_buffer (vm, tail->next_buffer);
931  goto next_segment;
932  }
933 }
934 
935 /* Initializes the buffer as an empty packet with no chained buffers. */
936 always_inline void
938 {
940  first->current_length = 0;
941  first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
942  first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
943 }
944 
945 /* The provided next_bi buffer index is appended to the end of the packet. */
948  vlib_buffer_t * first,
949  vlib_buffer_t * last, u32 next_bi)
950 {
951  vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
952  last->next_buffer = next_bi;
953  last->flags |= VLIB_BUFFER_NEXT_PRESENT;
954  next_buffer->current_length = 0;
955  next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
956  return next_buffer;
957 }
958 
959 /* Increases or decreases the packet length.
960  * It does not allocate or deallocate new buffers.
961  * Therefore, the added length must be compatible
962  * with the last buffer. */
963 always_inline void
965  vlib_buffer_t * last, i32 len)
966 {
967  last->current_length += len;
968  if (first != last)
970 }
971 
972 /* Copy data to the end of the packet and increases its length.
973  * It does not allocate new buffers.
974  * Returns the number of copied bytes. */
977  vlib_buffer_free_list_index_t free_list_index,
978  vlib_buffer_t * first,
979  vlib_buffer_t * last, void *data, u16 data_len)
980 {
981  u32 n_buffer_bytes =
982  vlib_buffer_free_list_buffer_size (vm, free_list_index);
983  ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
984  u16 len = clib_min (data_len,
985  n_buffer_bytes - last->current_length -
986  last->current_data);
987  clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
988  len);
989  vlib_buffer_chain_increase_length (first, last, len);
990  return len;
991 }
992 
993 /* Copy data to the end of the packet and increases its length.
994  * Allocates additional buffers from the free list if necessary.
995  * Returns the number of copied bytes.
996  * 'last' value is modified whenever new buffers are allocated and
997  * chained and points to the last buffer in the chain. */
998 u16
1001  free_list_index,
1002  vlib_buffer_t * first,
1003  vlib_buffer_t ** last, void *data,
1004  u16 data_len);
1006 
1009 
1010 typedef struct
1011 {
1012  /* Vector of packet data. */
1014 
1015  /* Number of buffers to allocate in each call to allocator. */
1017 
1018  /* Buffer free list for this template. */
1020 
1023 
1026 
1029  void *packet_data,
1030  uword n_packet_data_bytes,
1031  uword min_n_buffers_each_alloc,
1032  char *fmt, ...);
1033 
1036  u32 * bi_result);
1037 
1038 always_inline void
1040 {
1041  vec_free (t->packet_data);
1042 }
1043 
1046 {
1047  serialize_stream_t *s = &m->stream;
1051  vlib_main_t *vm = sm->vlib_main;
1052  u32 n, *f;
1053 
1055  if (sm->last_buffer != ~0)
1056  {
1057  vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
1058  while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1059  {
1060  b = vlib_get_buffer (vm, b->next_buffer);
1061  n += b->current_length;
1062  }
1063  }
1064 
1065  /* *INDENT-OFF* */
1066  clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
1067  n += vlib_buffer_index_length_in_chain (vm, f[0]);
1068  }));
1069 /* *INDENT-ON* */
1070 
1071  return n;
1072 }
1073 
1074 /* Set a buffer quickly into "uninitialized" state. We want this to
1075  be extremely cheap and arrange for all fields that need to be
1076  initialized to be in the first 128 bits of the buffer. */
1077 always_inline void
1080 {
1082 
1083  /* Make sure vlib_buffer_t is cacheline aligned and sized */
1084  ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
1085  ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
1087  ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
1088  CLIB_CACHE_LINE_BYTES * 2);
1089 
1090  /* Make sure buffer template is sane. */
1092 
1093  clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
1094  STRUCT_MARK_PTR (src, template_start),
1095  STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
1096  STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
1097 
1098  /* Not in the first 16 octets. */
1099  dst->n_add_refs = src->n_add_refs;
1101 
1102  /* Make sure it really worked. */
1103 #define _(f) ASSERT (dst->f == src->f);
1104  _(current_data);
1105  _(current_length);
1106  _(flags);
1107 #undef _
1108  /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
1109  /* total_length_not_including_first_buffer is not in the template anymore
1110  * so it may actually not zeroed for some buffers. One option is to
1111  * uncomment the line lower (comes at a cost), the other, is to just not
1112  * care */
1113  /* dst->total_length_not_including_first_buffer = 0; */
1114  ASSERT (dst->n_add_refs == 0);
1115 }
1116 
1117 always_inline void
1120  u32 buffer_index, u8 do_init)
1121 {
1123  vlib_buffer_t *b;
1124  b = vlib_get_buffer (vm, buffer_index);
1125  if (PREDICT_TRUE (do_init))
1127  vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
1128 
1129  if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
1130  {
1131  clib_spinlock_lock (&bp->lock);
1132  /* keep last stored buffers, as they are more likely hot in the cache */
1136  f->n_alloc -= VLIB_FRAME_SIZE;
1137  clib_spinlock_unlock (&bp->lock);
1138  }
1139 }
1140 
1141 #if CLIB_DEBUG > 0
1144 extern void *vlib_buffer_state_heap;
1145 #endif
1146 
1147 static inline void
1149 {
1150 #if CLIB_DEBUG > 0
1151  uword *p;
1152  void *oldheap;
1153 
1155 
1156  while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1157  ;
1158 
1160 
1161  /* If we don't know about b, declare it to be in the expected state */
1162  if (!p)
1163  {
1165  goto out;
1166  }
1167 
1168  if (p[0] != expected)
1169  {
1170  void cj_stop (void);
1171  u32 bi;
1173 
1174  cj_stop ();
1175 
1176  bi = vlib_get_buffer_index (vm, b);
1177 
1178  clib_mem_set_heap (oldheap);
1179  clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1180  vlib_time_now (vm), bi,
1181  p[0] ? "busy" : "free", expected ? "busy" : "free");
1182  os_panic ();
1183  }
1184 out:
1187  clib_mem_set_heap (oldheap);
1188 #endif
1189 }
1190 
1191 static inline void
1193 {
1194 #if CLIB_DEBUG > 0
1195  void *oldheap;
1196 
1198 
1199  while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1200  ;
1201 
1203 
1206  clib_mem_set_heap (oldheap);
1207 #endif
1208 }
1209 
1210 /** minimum data size of first buffer in a buffer chain */
1211 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1212 
1213 /**
1214  * @brief compress buffer chain in a way where the first buffer is at least
1215  * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1216  *
1217  * @param[in] vm - vlib_main
1218  * @param[in,out] first - first buffer in chain
1219  * @param[in,out] discard_vector - vector of buffer indexes which were removed
1220  * from the chain
1221  */
1222 always_inline void
1224  vlib_buffer_t * first, u32 ** discard_vector)
1225 {
1227  !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1228  {
1229  /* this is already big enough or not a chain */
1230  return;
1231  }
1232  /* probe free list to find allocated buffer size to avoid overfill */
1234  vlib_buffer_free_list_t *free_list =
1235  vlib_buffer_get_buffer_free_list (vm, first, &index);
1236 
1238  free_list->n_data_bytes -
1239  first->current_data);
1240  do
1241  {
1242  vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1243  u32 need = want_first_size - first->current_length;
1244  u32 amount_to_copy = clib_min (need, second->current_length);
1245  clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1246  first->current_length,
1247  vlib_buffer_get_current (second), amount_to_copy);
1248  first->current_length += amount_to_copy;
1249  vlib_buffer_advance (second, amount_to_copy);
1250  if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1251  {
1252  first->total_length_not_including_first_buffer -= amount_to_copy;
1253  }
1254  if (!second->current_length)
1255  {
1256  vec_add1 (*discard_vector, first->next_buffer);
1257  if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1258  {
1259  first->next_buffer = second->next_buffer;
1260  }
1261  else
1262  {
1263  first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1264  }
1265  second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1266  }
1267  }
1268  while ((first->current_length < want_first_size) &&
1269  (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1270 }
1271 
1274 {
1275  return b->data + VLIB_BUFFER_DATA_SIZE -
1276  ((u8 *) vlib_buffer_get_current (b) + b->current_length);
1277 }
1278 
1281 {
1282  vlib_buffer_t *db = b, *sb, *first = b;
1283  int is_cloned = 0;
1284  u32 bytes_left = 0, data_size;
1285  u16 src_left, dst_left, n_buffers = 1;
1286  u8 *dp, *sp;
1287  u32 to_free = 0;
1288 
1289  if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1290  return 1;
1291 
1292  data_size = VLIB_BUFFER_DATA_SIZE;
1293 
1294  dst_left = vlib_buffer_space_left_at_end (vm, b);
1295 
1296  while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1297  {
1298  b = vlib_get_buffer (vm, b->next_buffer);
1299  if (b->n_add_refs > 0)
1300  is_cloned = 1;
1301  bytes_left += b->current_length;
1302  n_buffers++;
1303  }
1304 
1305  /* if buffer is cloned, create completely new chain - unless everything fits
1306  * into one buffer */
1307  if (is_cloned && bytes_left >= dst_left)
1308  {
1309  u32 len = 0;
1310  u32 space_needed = bytes_left - dst_left;
1311  u32 tail;
1312 
1313  if (vlib_buffer_alloc (vm, &tail, 1) == 0)
1314  return 0;
1315 
1316  ++n_buffers;
1317  len += data_size;
1318  b = vlib_get_buffer (vm, tail);
1319 
1320  while (len < space_needed)
1321  {
1322  u32 bi;
1323  if (vlib_buffer_alloc (vm, &bi, 1) == 0)
1324  {
1325  vlib_buffer_free_one (vm, tail);
1326  return 0;
1327  }
1328  b->flags = VLIB_BUFFER_NEXT_PRESENT;
1329  b->next_buffer = bi;
1330  b = vlib_get_buffer (vm, bi);
1331  len += data_size;
1332  n_buffers++;
1333  }
1334  sb = vlib_get_buffer (vm, first->next_buffer);
1335  to_free = first->next_buffer;
1336  first->next_buffer = tail;
1337  }
1338  else
1339  sb = vlib_get_buffer (vm, first->next_buffer);
1340 
1341  src_left = sb->current_length;
1342  sp = vlib_buffer_get_current (sb);
1343  dp = vlib_buffer_get_tail (db);
1344 
1345  while (bytes_left)
1346  {
1347  u16 bytes_to_copy;
1348 
1349  if (dst_left == 0)
1350  {
1351  if (db != first)
1352  db->current_data = 0;
1353  db->current_length = dp - (u8 *) vlib_buffer_get_current (db);
1354  ASSERT (db->flags & VLIB_BUFFER_NEXT_PRESENT);
1355  db = vlib_get_buffer (vm, db->next_buffer);
1356  dst_left = data_size;
1357  dp = db->data;
1358  }
1359 
1360  while (src_left == 0)
1361  {
1362  ASSERT (sb->flags & VLIB_BUFFER_NEXT_PRESENT);
1363  sb = vlib_get_buffer (vm, sb->next_buffer);
1364  src_left = sb->current_length;
1365  sp = vlib_buffer_get_current (sb);
1366  }
1367 
1368  bytes_to_copy = clib_min (dst_left, src_left);
1369 
1370  if (dp != sp)
1371  {
1372  if (sb == db)
1373  bytes_to_copy = clib_min (bytes_to_copy, sp - dp);
1374 
1375  clib_memcpy (dp, sp, bytes_to_copy);
1376  }
1377 
1378  src_left -= bytes_to_copy;
1379  dst_left -= bytes_to_copy;
1380  dp += bytes_to_copy;
1381  sp += bytes_to_copy;
1382  bytes_left -= bytes_to_copy;
1383  }
1384  if (db != first)
1385  db->current_data = 0;
1386  db->current_length = dp - (u8 *) vlib_buffer_get_current (db);
1387 
1388  if (is_cloned && to_free)
1389  vlib_buffer_free_one (vm, to_free);
1390  else
1391  {
1392  if (db->flags & VLIB_BUFFER_NEXT_PRESENT)
1394  db->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1395  b = first;
1396  n_buffers = 1;
1397  while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1398  {
1399  b = vlib_get_buffer (vm, b->next_buffer);
1400  ++n_buffers;
1401  }
1402  }
1403 
1404  first->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1405 
1406  return n_buffers;
1407 }
1408 
1409 #endif /* included_vlib_buffer_funcs_h */
1410 
1411 /*
1412  * fd.io coding-style-patch-verification: ON
1413  *
1414  * Local Variables:
1415  * eval: (c-set-style "gnu")
1416  * End:
1417  */
vlib_physmem_region_index_t physmem_region
Definition: buffer.h:414
vlib_main_t vlib_global_main
Definition: main.c:1638
#define hash_set(h, key, value)
Definition: hash.h:255
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer&#39;s data.
Definition: buffer.h:280
vl_api_address_t src
Definition: vxlan_gbp.api:33
#define clib_min(x, y)
Definition: clib.h:291
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
Definition: lock.h:89
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
Definition: lock.h:74
uword vlib_buffer_length_in_chain_slow_path(vlib_main_t *vm, vlib_buffer_t *b_first)
Definition: buffer.c:55
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
Definition: buffer_funcs.h:547
#define STRUCT_MARK_PTR(v, f)
Definition: clib.h:70
format_function_t format_vlib_buffer_contents
u32 opaque[10]
Opaque data used by sub-graphs for their own purposes.
Definition: buffer.h:145
#define PREDICT_TRUE(x)
Definition: clib.h:108
static_always_inline u64x2 u32x4_extend_to_u64x2_high(u32x4 v)
Definition: vector_neon.h:146
format_function_t format_vlib_buffer
unsigned long u64
Definition: types.h:89
static void vlib_buffer_chain_compress(vlib_main_t *vm, vlib_buffer_t *first, u32 **discard_vector)
compress buffer chain in a way where the first buffer is at least VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SI...
vlib_buffer_callbacks_t cb
Definition: buffer.h:455
static void vlib_buffer_attach_clone(vlib_main_t *vm, vlib_buffer_t *head, vlib_buffer_t *tail)
Attach cloned tail to the buffer.
Definition: buffer_funcs.h:910
static void vlib_validate_buffer_in_use(vlib_buffer_t *b, u32 expected)
static vlib_buffer_t * vlib_buffer_chain_buffer(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t *last, u32 next_bi)
Definition: buffer_funcs.h:947
static f64 vlib_time_now(vlib_main_t *vm)
Definition: main.h:227
static void vlib_buffer_chain_increase_length(vlib_buffer_t *first, vlib_buffer_t *last, i32 len)
Definition: buffer_funcs.h:964
vlib_buffer_t buffer_init_template
Definition: buffer.h:350
void os_panic(void)
Definition: unix-misc.c:174
struct vlib_main_t * vlib_main
Definition: buffer.h:476
#define CLIB_LOG2_CACHE_LINE_BYTES
Definition: cache.h:50
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
Definition: vec.h:523
u8 buffer_pool_index
index of buffer pool this buffer belongs.
Definition: buffer.h:143
int i
foreach_avx2_vec256i static foreach_avx2_vec256u u32x8 u32x8_permute(u32x8 v, u32x8 idx)
Definition: vector_avx2.h:73
#define STRUCT_OFFSET_OF(t, f)
Definition: clib.h:64
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
Definition: buffer_funcs.h:76
static void vlib_buffer_delete_free_list(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index)
Definition: buffer_funcs.h:648
u8 *( format_function_t)(u8 *s, va_list *args)
Definition: format.h:48
u16 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index, vlib_buffer_t *first, vlib_buffer_t **last, void *data, u16 data_len)
Definition: buffer.c:833
uword * vlib_buffer_state_validation_hash
Definition: buffer.c:575
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
Definition: buffer_funcs.h:263
vlib_buffer_free_list_index_t index
Definition: buffer.h:353
static u64 vlib_get_buffer_data_physical_address(vlib_main_t *vm, u32 buffer_index)
Definition: buffer_funcs.h:319
unsigned char u8
Definition: types.h:56
static vlib_buffer_t * vlib_buffer_copy(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:728
vlib_buffer_free_list_index_t free_list_index
static_always_inline void vlib_get_buffer_indices(vlib_main_t *vm, vlib_buffer_t **b, u32 *bi, uword count)
Translate array of buffer pointers into buffer indices.
Definition: buffer_funcs.h:234
void cj_stop(void)
Definition: cj.c:58
#define vec_add1_aligned(V, E, A)
Add 1 element to end of vector (alignment specified).
Definition: vec.h:533
static u16 vlib_buffer_clone_256(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create a maximum of 256 clones of buffer and store them in the supplied array.
Definition: buffer_funcs.h:797
u32 vlib_buffer_add_data(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index, u32 buffer_index, void *data, u32 n_data_bytes)
Definition: buffer.c:776
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
Definition: buffer.h:109
#define static_always_inline
Definition: clib.h:95
#define always_inline
Definition: clib.h:94
void vlib_aligned_memcpy(void *_dst, void *_src, int n_bytes)
vlib_buffer_free_no_next_cb_t * vlib_buffer_free_no_next_cb
Definition: buffer.h:394
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE
minimum data size of first buffer in a buffer chain
u32 * vlib_buffer_state_validation_lock
Definition: buffer.c:574
vhost_vring_state_t state
Definition: vhost_user.h:120
unsigned int u32
Definition: types.h:88
static void vlib_copy_buffers(u32 *dst, u32 *src, u32 n)
Definition: buffer_funcs.h:700
#define VLIB_FRAME_SIZE
Definition: node.h:382
static u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
Definition: buffer_funcs.h:154
#define fl(x, y)
epu8_epi32 epu16_epi32 u64x2
Definition: vector_sse42.h:640
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
Definition: buffer_funcs.h:602
static heap_elt_t * first(heap_header_t *h)
Definition: heap.c:59
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
#define hash_get(h, key)
Definition: hash.h:249
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
Definition: pool.h:464
uword size
u16 current_length
Nbytes between current data and the end of this buffer.
Definition: buffer.h:113
u8 * vlib_validate_buffer(vlib_main_t *vm, u32 buffer_index, uword follow_chain)
Definition: buffer.c:225
format_function_t format_vlib_buffer_and_data
unsigned short u16
Definition: types.h:57
void(* vlib_buffer_delete_free_list_cb)(struct vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index)
Definition: buffer.h:401
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
Definition: buffer.h:205
#define VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX
Definition: buffer.h:440
static_always_inline u32x4 u32x4_shuffle(u32x4 v, const int a, const int b, const int c, const int d)
Definition: vector_sse42.h:617
vlib_buffer_pool_t * buffer_pools
Definition: buffer.h:434
#define PREDICT_FALSE(x)
Definition: clib.h:107
vlib_buffer_free_list_t * buffer_free_list_pool
Definition: main.h:113
static_always_inline void vlib_get_buffer_indices_with_offset(vlib_main_t *vm, void **b, u32 *bi, uword count, i32 offset)
Translate array of buffer pointers into buffer indices with offset.
Definition: buffer_funcs.h:173
struct vlib_serialize_buffer_main_t::@32::@35 rx
#define clib_fifo_foreach(v, f, body)
Definition: fifo.h:279
static void vlib_buffer_add_to_free_list(vlib_main_t *vm, vlib_buffer_free_list_t *f, u32 buffer_index, u8 do_init)
static uword vlib_buffer_contents(vlib_main_t *vm, u32 buffer_index, u8 *contents)
Copy buffer contents to memory.
Definition: buffer_funcs.h:297
u32 flags
Definition: vhost_user.h:115
static void vlib_buffer_set_known_state(u32 buffer_index, vlib_buffer_known_state_t state)
Definition: buffer_funcs.h:392
static void vlib_buffer_chain_init(vlib_buffer_t *first)
Definition: buffer_funcs.h:937
#define vec_add_aligned(V, E, N, A)
Add N elements to end of vector V (no header, specified alignment)
Definition: vec.h:610
serialize_stream_t stream
Definition: serialize.h:147
clib_spinlock_t buffer_known_hash_lockp
Definition: buffer.h:452
vlib_buffer_fill_free_list_cb_t * vlib_buffer_fill_free_list_cb
Definition: buffer.h:392
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
Definition: buffer_funcs.h:568
static u16 vlib_buffer_clone(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create multiple clones of buffer and store them in the supplied array.
Definition: buffer_funcs.h:879
u32 current_buffer_index
Definition: serialize.h:62
vl_api_address_t dst
Definition: vxlan_gbp.api:34
void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first)
#define vec_free(V)
Free vector&#39;s memory (no header).
Definition: vec.h:339
static void * clib_mem_set_heap(void *heap)
Definition: mem.h:261
#define clib_warning(format, args...)
Definition: error.h:59
#define clib_memcpy(a, b, c)
Definition: string.h:75
clib_spinlock_t lock
Definition: buffer.h:424
static vlib_buffer_t * vlib_get_next_buffer(vlib_main_t *vm, vlib_buffer_t *b)
Get next buffer in buffer linklist, or zero for end of list.
Definition: buffer_funcs.h:247
void vlib_buffer_validate_alloc_free(vlib_main_t *vm, u32 *buffers, uword n_buffers, vlib_buffer_known_state_t expected_state)
Definition: buffer.c:309
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
static uword round_pow2(uword x, uword pow2)
Definition: clib.h:237
vlib_buffer_known_state_t
Definition: buffer_funcs.h:365
uword data_function_opaque
Definition: serialize.h:74
#define VLIB_BUFFER_DATA_SIZE
Definition: buffer.h:51
static void vlib_validate_buffer_set_in_use(vlib_buffer_t *b, u32 expected)
static_always_inline u64x2 u32x4_extend_to_u64x2(u32x4 v)
Definition: vector_neon.h:140
signed int i32
Definition: types.h:77
#define uword_to_pointer(u, type)
Definition: types.h:136
#define ASSERT(truth)
#define vec_delete(V, N, M)
Delete N elements starting at element M.
Definition: vec.h:786
vlib_buffer_free_cb_t * vlib_buffer_free_cb
Definition: buffer.h:393
static vlib_buffer_free_list_index_t vlib_buffer_get_free_list_index(vlib_buffer_t *b)
Definition: buffer_funcs.h:414
void vlib_buffer_free_list_fill_unaligned(vlib_main_t *vm, vlib_buffer_free_list_t *free_list, uword n_unaligned_buffers)
u32 next_buffer
Next buffer for this linked-list of buffers.
Definition: buffer.h:129
static vlib_buffer_free_list_t * vlib_buffer_get_buffer_free_list(vlib_main_t *vm, vlib_buffer_t *b, vlib_buffer_free_list_index_t *index)
Definition: buffer_funcs.h:665
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
Definition: buffer.h:218
size_t count
Definition: vapi.c:46
vlib_buffer_free_list_index_t vlib_buffer_create_free_list(vlib_main_t *vm, u32 n_data_bytes, char *fmt,...)
Definition: buffer.c:415
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
Definition: buffer_funcs.h:283
static uword pointer_to_uword(const void *p)
Definition: types.h:131
u8 n_add_refs
Number of additional references to this buffer.
Definition: buffer.h:141
void * vlib_buffer_state_heap
Definition: buffer.c:576
static u32 vlib_buffer_alloc_from_free_list(vlib_main_t *vm, u32 *buffers, u32 n_buffers, vlib_buffer_free_list_index_t index)
Allocate buffers from specific freelist into supplied array.
Definition: buffer_funcs.h:444
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
Definition: buffer.h:155
template key/value backing page structure
Definition: bihash_doc.h:44
static u16 vlib_buffer_chain_append_data(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index, vlib_buffer_t *first, vlib_buffer_t *last, void *data, u16 data_len)
Definition: buffer_funcs.h:976
static u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
Definition: buffer_funcs.h:520
u32 opaque2[12]
More opaque data, see ../vnet/vnet/buffer.h.
Definition: buffer.h:163
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
static u64 vlib_physmem_virtual_to_physical(vlib_main_t *vm, vlib_physmem_region_index_t idx, void *mem)
u64 uword
Definition: types.h:112
uword buffer_mem_start
Definition: buffer.h:432
uword * buffer_known_hash
Definition: buffer.h:451
static u32 vlib_buffer_free_list_buffer_size(vlib_main_t *vm, vlib_buffer_free_list_index_t index)
Definition: buffer_funcs.h:689
u64x4
Definition: vector_avx2.h:121
void * vlib_packet_template_get_packet(vlib_main_t *vm, vlib_packet_template_t *t, u32 *bi_result)
Definition: buffer.c:738
static void vlib_buffer_init_for_free_list(vlib_buffer_t *dst, vlib_buffer_free_list_t *fl)
static vlib_buffer_known_state_t vlib_buffer_is_known(u32 buffer_index)
Definition: buffer_funcs.h:381
vlib_buffer_main_t buffer_main
Definition: buffer.c:52
u8 data[0]
Packet data.
Definition: buffer.h:175
static void vlib_buffer_set_free_list_index(vlib_buffer_t *b, vlib_buffer_free_list_index_t index)
Definition: buffer_funcs.h:423
unsigned long long u32x4
Definition: ixge.c:28
static_always_inline vlib_buffer_pool_t * vlib_buffer_pool_get(u8 buffer_pool_index)
Definition: buffer.h:462
#define CLIB_MEMORY_BARRIER()
Definition: clib.h:111
u8 vlib_buffer_free_list_index_t
Definition: buffer.h:62
static void vlib_packet_template_free(vlib_main_t *vm, vlib_packet_template_t *t)
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
Definition: buffer_funcs.h:588
void vlib_packet_template_get_packet_helper(vlib_main_t *vm, vlib_packet_template_t *t)
Definition: buffer.c:758
static vlib_buffer_free_list_t * vlib_buffer_get_free_list(vlib_main_t *vm, vlib_buffer_free_list_index_t free_list_index)
Definition: buffer_funcs.h:675
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
Definition: buffer_funcs.h:141
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
void vlib_packet_template_init(vlib_main_t *vm, vlib_packet_template_t *t, void *packet_data, uword n_packet_data_bytes, uword min_n_buffers_each_alloc, char *fmt,...)
Definition: buffer.c:690
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
Definition: buffer.h:116
static u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
Definition: buffer_funcs.h:503
static void vlib_buffer_free_from_ring_no_next(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring without freeing tail buffers.
Definition: buffer_funcs.h:627
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:58
static u32 vlib_buffer_round_size(u32 size)
Definition: buffer_funcs.h:408
static u32 unserialize_vlib_buffer_n_bytes(serialize_main_t *m)
vlib_buffer_free_list_index_t free_list_index
Definition: buffer.h:159