FD.io VPP  v20.09-64-g4f7b92f0a
Vector Packet Processing
buffer_funcs.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /*
16  * buffer_funcs.h: VLIB buffer related functions/inlines
17  *
18  * Copyright (c) 2008 Eliot Dresselhaus
19  *
20  * Permission is hereby granted, free of charge, to any person obtaining
21  * a copy of this software and associated documentation files (the
22  * "Software"), to deal in the Software without restriction, including
23  * without limitation the rights to use, copy, modify, merge, publish,
24  * distribute, sublicense, and/or sell copies of the Software, and to
25  * permit persons to whom the Software is furnished to do so, subject to
26  * the following conditions:
27  *
28  * The above copyright notice and this permission notice shall be
29  * included in all copies or substantial portions of the Software.
30  *
31  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38  */
39 
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
42 
43 #include <vppinfra/hash.h>
44 #include <vppinfra/fifo.h>
45 #include <vlib/buffer.h>
46 #include <vlib/physmem_funcs.h>
47 #include <vlib/main.h>
48 #include <vlib/node.h>
49 
50 /** \file
51  vlib buffer access methods.
52 */
53 
54 always_inline void
56 {
59 
60  /* reference count in allocated buffer always must be 1 or higher */
61  ASSERT (b->ref_count > 0);
62 
63  /* verify that buffer pool index is valid */
65  ASSERT (pointer_to_uword (b) >= bp->start);
66  ASSERT (pointer_to_uword (b) < bp->start + bp->size -
67  (bp->data_size + sizeof (vlib_buffer_t)));
68 }
69 
70 always_inline void *
71 vlib_buffer_ptr_from_index (uword buffer_mem_start, u32 buffer_index,
72  uword offset)
73 {
74  offset += ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
75  return uword_to_pointer (buffer_mem_start + offset, vlib_buffer_t *);
76 }
77 
78 /** \brief Translate buffer index into buffer pointer
79 
80  @param vm - (vlib_main_t *) vlib main data structure pointer
81  @param buffer_index - (u32) buffer index
82  @return - (vlib_buffer_t *) buffer pointer
83 */
85 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
86 {
88  vlib_buffer_t *b;
89 
90  b = vlib_buffer_ptr_from_index (bm->buffer_mem_start, buffer_index, 0);
91  vlib_buffer_validate (vm, b);
92  return b;
93 }
94 
97 {
98  return vm->buffer_main->default_data_size;
99 }
100 
103 {
104 #if defined(CLIB_HAVE_VEC512)
105  while (n_indices >= 16)
106  {
107  u32x16_store_unaligned (u32x16_load_unaligned (src), dst);
108  dst += 16;
109  src += 16;
110  n_indices -= 16;
111  }
112 #endif
113 
114 #if defined(CLIB_HAVE_VEC256)
115  while (n_indices >= 8)
116  {
117  u32x8_store_unaligned (u32x8_load_unaligned (src), dst);
118  dst += 8;
119  src += 8;
120  n_indices -= 8;
121  }
122 #endif
123 
124 #if defined(CLIB_HAVE_VEC128)
125  while (n_indices >= 4)
126  {
127  u32x4_store_unaligned (u32x4_load_unaligned (src), dst);
128  dst += 4;
129  src += 4;
130  n_indices -= 4;
131  }
132 #endif
133 
134  while (n_indices)
135  {
136  dst[0] = src[0];
137  dst += 1;
138  src += 1;
139  n_indices -= 1;
140  }
141 }
142 
143 always_inline void
145  u32 ring_size, u32 n_buffers)
146 {
147  ASSERT (n_buffers <= ring_size);
148 
149  if (PREDICT_TRUE (start + n_buffers <= ring_size))
150  {
151  vlib_buffer_copy_indices (dst, ring + start, n_buffers);
152  }
153  else
154  {
155  u32 n = ring_size - start;
156  vlib_buffer_copy_indices (dst, ring + start, n);
157  vlib_buffer_copy_indices (dst + n, ring, n_buffers - n);
158  }
159 }
160 
161 always_inline void
163  u32 ring_size, u32 n_buffers)
164 {
165  ASSERT (n_buffers <= ring_size);
166 
167  if (PREDICT_TRUE (start + n_buffers <= ring_size))
168  {
169  vlib_buffer_copy_indices (ring + start, src, n_buffers);
170  }
171  else
172  {
173  u32 n = ring_size - start;
174  vlib_buffer_copy_indices (ring + start, src, n);
175  vlib_buffer_copy_indices (ring, src + n, n_buffers - n);
176  }
177 }
178 
179 STATIC_ASSERT_OFFSET_OF (vlib_buffer_t, template_end, 64);
182 {
183 #if defined CLIB_HAVE_VEC512
184  b->as_u8x64[0] = bt->as_u8x64[0];
185 #elif defined (CLIB_HAVE_VEC256)
186  b->as_u8x32[0] = bt->as_u8x32[0];
187  b->as_u8x32[1] = bt->as_u8x32[1];
188 #elif defined (CLIB_HAVE_VEC128)
189  b->as_u8x16[0] = bt->as_u8x16[0];
190  b->as_u8x16[1] = bt->as_u8x16[1];
191  b->as_u8x16[2] = bt->as_u8x16[2];
192  b->as_u8x16[3] = bt->as_u8x16[3];
193 #else
194  clib_memcpy_fast (b, bt, 64);
195 #endif
196 }
197 
200 {
201  ASSERT (numa_node < VLIB_BUFFER_MAX_NUMA_NODES);
202  return vm->buffer_main->default_buffer_pool_index_for_numa[numa_node];
203 }
204 
205 /** \brief Translate array of buffer indices into buffer pointers with offset
206 
207  @param vm - (vlib_main_t *) vlib main data structure pointer
208  @param bi - (u32 *) array of buffer indices
209  @param b - (void **) array to store buffer pointers
210  @param count - (uword) number of elements
211  @param offset - (i32) offset applied to each pointer
212 */
215  i32 offset)
216 {
217  uword buffer_mem_start = vm->buffer_main->buffer_mem_start;
218 #ifdef CLIB_HAVE_VEC256
219  u64x4 off = u64x4_splat (buffer_mem_start + offset);
220  /* if count is not const, compiler will not unroll while loop
221  se we maintain two-in-parallel variant */
222  while (count >= 8)
223  {
224  u64x4 b0 = u64x4_from_u32x4 (u32x4_load_unaligned (bi));
225  u64x4 b1 = u64x4_from_u32x4 (u32x4_load_unaligned (bi + 4));
226  /* shift and add to get vlib_buffer_t pointer */
227  u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
228  u64x4_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 4);
229  b += 8;
230  bi += 8;
231  count -= 8;
232  }
233 #endif
234  while (count >= 4)
235  {
236 #ifdef CLIB_HAVE_VEC256
237  u64x4 b0 = u64x4_from_u32x4 (u32x4_load_unaligned (bi));
238  /* shift and add to get vlib_buffer_t pointer */
239  u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
240 #elif defined (CLIB_HAVE_VEC128)
241  u64x2 off = u64x2_splat (buffer_mem_start + offset);
242  u32x4 bi4 = u32x4_load_unaligned (bi);
243  u64x2 b0 = u64x2_from_u32x4 ((u32x4) bi4);
244 #if defined (__aarch64__)
245  u64x2 b1 = u64x2_from_u32x4_high ((u32x4) bi4);
246 #else
247  bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
248  u64x2 b1 = u64x2_from_u32x4 ((u32x4) bi4);
249 #endif
250  u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
251  u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
252 #else
253  b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
254  b[1] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[1], offset);
255  b[2] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[2], offset);
256  b[3] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[3], offset);
257 #endif
258  b += 4;
259  bi += 4;
260  count -= 4;
261  }
262  while (count)
263  {
264  b[0] = vlib_buffer_ptr_from_index (buffer_mem_start, bi[0], offset);
265  b += 1;
266  bi += 1;
267  count -= 1;
268  }
269 }
270 
271 /** \brief Translate array of buffer indices into buffer pointers
272 
273  @param vm - (vlib_main_t *) vlib main data structure pointer
274  @param bi - (u32 *) array of buffer indices
275  @param b - (vlib_buffer_t **) array to store buffer pointers
276  @param count - (uword) number of elements
277 */
278 
281 {
282  vlib_get_buffers_with_offset (vm, bi, (void **) b, count, 0);
283 }
284 
285 /** \brief Translate buffer pointer into buffer index
286 
287  @param vm - (vlib_main_t *) vlib main data structure pointer
288  @param p - (void *) buffer pointer
289  @return - (u32) buffer index
290 */
291 
294 {
298  ASSERT (offset < bm->buffer_mem_size);
299  ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
300  return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
301 }
302 
303 /** \brief Translate array of buffer pointers into buffer indices with offset
304 
305  @param vm - (vlib_main_t *) vlib main data structure pointer
306  @param b - (void **) array of buffer pointers
307  @param bi - (u32 *) array to store buffer indices
308  @param count - (uword) number of elements
309  @param offset - (i32) offset applied to each pointer
310 */
314 {
315 #ifdef CLIB_HAVE_VEC256
316  u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
317  u64x4 off4 = u64x4_splat (vm->buffer_main->buffer_mem_start - offset);
318 
319  while (count >= 8)
320  {
321  /* load 4 pointers into 256-bit register */
322  u64x4 v0 = u64x4_load_unaligned (b);
323  u64x4 v1 = u64x4_load_unaligned (b + 4);
324  u32x8 v2, v3;
325 
326  v0 -= off4;
327  v1 -= off4;
328 
331 
332  /* permute 256-bit register so lower u32s of each buffer index are
333  * placed into lower 128-bits */
334  v2 = u32x8_permute ((u32x8) v0, mask);
335  v3 = u32x8_permute ((u32x8) v1, mask);
336 
337  /* extract lower 128-bits and save them to the array of buffer indices */
338  u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
339  u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
340  bi += 8;
341  b += 8;
342  count -= 8;
343  }
344 #endif
345  while (count >= 4)
346  {
347  /* equivalent non-nector implementation */
348  bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
349  bi[1] = vlib_get_buffer_index (vm, ((u8 *) b[1]) + offset);
350  bi[2] = vlib_get_buffer_index (vm, ((u8 *) b[2]) + offset);
351  bi[3] = vlib_get_buffer_index (vm, ((u8 *) b[3]) + offset);
352  bi += 4;
353  b += 4;
354  count -= 4;
355  }
356  while (count)
357  {
358  bi[0] = vlib_get_buffer_index (vm, ((u8 *) b[0]) + offset);
359  bi += 1;
360  b += 1;
361  count -= 1;
362  }
363 }
364 
365 /** \brief Translate array of buffer pointers into buffer indices
366 
367  @param vm - (vlib_main_t *) vlib main data structure pointer
368  @param b - (vlib_buffer_t **) array of buffer pointers
369  @param bi - (u32 *) array to store buffer indices
370  @param count - (uword) number of elements
371 */
374  uword count)
375 {
376  vlib_get_buffer_indices_with_offset (vm, (void **) b, bi, count, 0);
377 }
378 
379 /** \brief Get next buffer in buffer linklist, or zero for end of list.
380 
381  @param vm - (vlib_main_t *) vlib main data structure pointer
382  @param b - (void *) buffer pointer
383  @return - (vlib_buffer_t *) next buffer, or NULL
384 */
387 {
388  return (b->flags & VLIB_BUFFER_NEXT_PRESENT
389  ? vlib_get_buffer (vm, b->next_buffer) : 0);
390 }
391 
393  vlib_buffer_t * b_first);
394 
395 /** \brief Get length in bytes of the buffer chain
396 
397  @param vm - (vlib_main_t *) vlib main data structure pointer
398  @param b - (void *) buffer pointer
399  @return - (uword) length of buffer chain
400 */
403 {
404  uword len = b->current_length;
405 
406  if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
407  return len;
408 
409  if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
411 
413 }
414 
415 /** \brief Get length in bytes of the buffer index buffer chain
416 
417  @param vm - (vlib_main_t *) vlib main data structure pointer
418  @param bi - (u32) buffer index
419  @return - (uword) length of buffer chain
420 */
423 {
424  vlib_buffer_t *b = vlib_get_buffer (vm, bi);
425  return vlib_buffer_length_in_chain (vm, b);
426 }
427 
428 /** \brief Copy buffer contents to memory
429 
430  @param vm - (vlib_main_t *) vlib main data structure pointer
431  @param buffer_index - (u32) buffer index
432  @param contents - (u8 *) memory, <strong>must be large enough</strong>
433  @return - (uword) length of buffer chain
434 */
436 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
437 {
438  uword content_len = 0;
439  uword l;
440  vlib_buffer_t *b;
441 
442  while (1)
443  {
444  b = vlib_get_buffer (vm, buffer_index);
445  l = b->current_length;
446  clib_memcpy_fast (contents + content_len, b->data + b->current_data, l);
447  content_len += l;
448  if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
449  break;
450  buffer_index = b->next_buffer;
451  }
452 
453  return content_len;
454 }
455 
458 {
459  return vlib_physmem_get_pa (vm, b->data);
460 }
461 
464 {
465  return vlib_buffer_get_pa (vm, b) + b->current_data;
466 }
467 
468 /** \brief Prefetch buffer metadata by buffer index
469  The first 64 bytes of buffer contains most header information
470 
471  @param vm - (vlib_main_t *) vlib main data structure pointer
472  @param bi - (u32) buffer index
473  @param type - LOAD, STORE. In most cases, STORE is the right answer
474 */
475 /* Prefetch buffer header given index. */
476 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
477  do { \
478  vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
479  vlib_prefetch_buffer_header (_b, type); \
480  } while (0)
481 
482 typedef enum
483 {
484  /* Index is unknown. */
486 
487  /* Index is known and free/allocated. */
491 
492 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
493  uword n_buffers,
495  expected_state);
496 
499 {
501 
503  uword *p = hash_get (bm->buffer_known_hash, buffer_index);
505  return p ? p[0] : VLIB_BUFFER_UNKNOWN;
506 }
507 
508 /* Validates sanity of a single buffer.
509  Returns format'ed vector with error message if any. */
510 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
511  uword follow_chain);
512 
514  u32 * buffers,
515  uword next_buffer_stride,
516  uword n_buffers,
517  vlib_buffer_known_state_t known_state,
518  uword follow_buffer_next);
519 
521 vlib_get_buffer_pool (vlib_main_t * vm, u8 buffer_pool_index)
522 {
524  return vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
525 }
526 
527 static_always_inline __clib_warn_unused_result uword
528 vlib_buffer_pool_get (vlib_main_t * vm, u8 buffer_pool_index, u32 * buffers,
529  u32 n_buffers)
530 {
531  vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
532  u32 len;
533 
534  ASSERT (bp->buffers);
535 
536  clib_spinlock_lock (&bp->lock);
537  len = bp->n_avail;
538  if (PREDICT_TRUE (n_buffers < len))
539  {
540  len -= n_buffers;
541  vlib_buffer_copy_indices (buffers, bp->buffers + len, n_buffers);
542  bp->n_avail = len;
543  clib_spinlock_unlock (&bp->lock);
544  return n_buffers;
545  }
546  else
547  {
548  vlib_buffer_copy_indices (buffers, bp->buffers, len);
549  bp->n_avail = 0;
550  clib_spinlock_unlock (&bp->lock);
551  return len;
552  }
553 }
554 
555 
556 /** \brief Allocate buffers from specific pool into supplied array
557 
558  @param vm - (vlib_main_t *) vlib main data structure pointer
559  @param buffers - (u32 * ) buffer index array
560  @param n_buffers - (u32) number of buffers requested
561  @return - (u32) number of buffers actually allocated, may be
562  less than the number requested or zero
563 */
564 
565 always_inline __clib_warn_unused_result u32
566 vlib_buffer_alloc_from_pool (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
567  u8 buffer_pool_index)
568 {
570  vlib_buffer_pool_t *bp;
572  u32 *src, *dst, len, n_left;
573 
574  /* If buffer allocation fault injection is configured */
575  if (VLIB_BUFFER_ALLOC_FAULT_INJECTOR > 0)
576  {
577  u32 vlib_buffer_alloc_may_fail (vlib_main_t *, u32);
578 
579  /* See how many buffers we're willing to allocate */
580  n_buffers = vlib_buffer_alloc_may_fail (vm, n_buffers);
581  if (n_buffers == 0)
582  return (n_buffers);
583  }
584 
585  bp = vec_elt_at_index (bm->buffer_pools, buffer_pool_index);
586  bpt = vec_elt_at_index (bp->threads, vm->thread_index);
587 
588  dst = buffers;
589  n_left = n_buffers;
590  len = bpt->n_cached;
591 
592  /* per-thread cache contains enough buffers */
593  if (len >= n_buffers)
594  {
595  src = bpt->cached_buffers + len - n_buffers;
596  vlib_buffer_copy_indices (dst, src, n_buffers);
597  bpt->n_cached -= n_buffers;
598 
599  if (CLIB_DEBUG > 0)
600  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
602  return n_buffers;
603  }
604 
605  /* alloc bigger than cache - take buffers directly from main pool */
606  if (n_buffers >= VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ)
607  {
608  n_buffers = vlib_buffer_pool_get (vm, buffer_pool_index, buffers,
609  n_buffers);
610 
611  if (CLIB_DEBUG > 0)
612  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
614  return n_buffers;
615  }
616 
617  /* take everything available in the cache */
618  if (len)
619  {
620  vlib_buffer_copy_indices (dst, bpt->cached_buffers, len);
621  bpt->n_cached = 0;
622  dst += len;
623  n_left -= len;
624  }
625 
626  len = round_pow2 (n_left, 32);
627  len = vlib_buffer_pool_get (vm, buffer_pool_index, bpt->cached_buffers,
628  len);
629  bpt->n_cached = len;
630 
631  if (len)
632  {
633  u32 n_copy = clib_min (len, n_left);
634  src = bpt->cached_buffers + len - n_copy;
635  vlib_buffer_copy_indices (dst, src, n_copy);
636  bpt->n_cached -= n_copy;
637  n_left -= n_copy;
638  }
639 
640  n_buffers -= n_left;
641 
642  /* Verify that buffers are known free. */
643  if (CLIB_DEBUG > 0)
644  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
646 
647  return n_buffers;
648 }
649 
650 /** \brief Allocate buffers from specific numa node into supplied array
651 
652  @param vm - (vlib_main_t *) vlib main data structure pointer
653  @param buffers - (u32 * ) buffer index array
654  @param n_buffers - (u32) number of buffers requested
655  @param numa_node - (u32) numa node
656  @return - (u32) number of buffers actually allocated, may be
657  less than the number requested or zero
658 */
659 always_inline __clib_warn_unused_result u32
660 vlib_buffer_alloc_on_numa (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
661  u32 numa_node)
662 {
664  return vlib_buffer_alloc_from_pool (vm, buffers, n_buffers, index);
665 }
666 
667 /** \brief Allocate buffers into supplied array
668 
669  @param vm - (vlib_main_t *) vlib main data structure pointer
670  @param buffers - (u32 * ) buffer index array
671  @param n_buffers - (u32) number of buffers requested
672  @return - (u32) number of buffers actually allocated, may be
673  less than the number requested or zero
674 */
675 
676 always_inline __clib_warn_unused_result u32
677 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
678 {
679  return vlib_buffer_alloc_on_numa (vm, buffers, n_buffers, vm->numa_node);
680 }
681 
682 /** \brief Allocate buffers into ring
683 
684  @param vm - (vlib_main_t *) vlib main data structure pointer
685  @param buffers - (u32 * ) buffer index ring
686  @param start - (u32) first slot in the ring
687  @param ring_size - (u32) ring size
688  @param n_buffers - (u32) number of buffers requested
689  @return - (u32) number of buffers actually allocated, may be
690  less than the number requested or zero
691 */
692 always_inline __clib_warn_unused_result u32
694  u32 ring_size, u32 n_buffers)
695 {
696  u32 n_alloc;
697 
698  ASSERT (n_buffers <= ring_size);
699 
700  if (PREDICT_TRUE (start + n_buffers <= ring_size))
701  return vlib_buffer_alloc (vm, ring + start, n_buffers);
702 
703  n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
704 
705  if (PREDICT_TRUE (n_alloc == ring_size - start))
706  n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
707 
708  return n_alloc;
709 }
710 
711 /** \brief Allocate buffers into ring from specific buffer pool
712 
713  @param vm - (vlib_main_t *) vlib main data structure pointer
714  @param buffers - (u32 * ) buffer index ring
715  @param start - (u32) first slot in the ring
716  @param ring_size - (u32) ring size
717  @param n_buffers - (u32) number of buffers requested
718  @return - (u32) number of buffers actually allocated, may be
719  less than the number requested or zero
720 */
721 always_inline __clib_warn_unused_result u32
723  u32 ring_size, u32 n_buffers,
724  u8 buffer_pool_index)
725 {
726  u32 n_alloc;
727 
728  ASSERT (n_buffers <= ring_size);
729 
730  if (PREDICT_TRUE (start + n_buffers <= ring_size))
731  return vlib_buffer_alloc_from_pool (vm, ring + start, n_buffers,
732  buffer_pool_index);
733 
734  n_alloc = vlib_buffer_alloc_from_pool (vm, ring + start, ring_size - start,
735  buffer_pool_index);
736 
737  if (PREDICT_TRUE (n_alloc == ring_size - start))
738  n_alloc += vlib_buffer_alloc_from_pool (vm, ring, n_buffers - n_alloc,
739  buffer_pool_index);
740 
741  return n_alloc;
742 }
743 
745 vlib_buffer_pool_put (vlib_main_t * vm, u8 buffer_pool_index,
746  u32 * buffers, u32 n_buffers)
747 {
748  vlib_buffer_pool_t *bp = vlib_get_buffer_pool (vm, buffer_pool_index);
750  vm->thread_index);
751  u32 n_cached, n_empty;
752 
753  if (CLIB_DEBUG > 0)
754  vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
756 
757  n_cached = bpt->n_cached;
758  n_empty = VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ - n_cached;
759  if (n_buffers <= n_empty)
760  {
761  vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
762  buffers, n_buffers);
763  bpt->n_cached = n_cached + n_buffers;
764  return;
765  }
766 
767  vlib_buffer_copy_indices (bpt->cached_buffers + n_cached,
768  buffers + n_buffers - n_empty, n_empty);
770 
771  clib_spinlock_lock (&bp->lock);
772  vlib_buffer_copy_indices (bp->buffers + bp->n_avail, buffers,
773  n_buffers - n_empty);
774  bp->n_avail += n_buffers - n_empty;
775  clib_spinlock_unlock (&bp->lock);
776 }
777 
779 vlib_buffer_free_inline (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
780  int maybe_next)
781 {
782  const int queue_size = 128;
783  vlib_buffer_pool_t *bp = 0;
784  u8 buffer_pool_index = ~0;
785  u32 n_queue = 0, queue[queue_size + 4];
786  vlib_buffer_t bt = { };
787 #if defined(CLIB_HAVE_VEC128)
788  vlib_buffer_t bpi_mask = {.buffer_pool_index = ~0 };
789  vlib_buffer_t bpi_vec = {.buffer_pool_index = ~0 };
790  vlib_buffer_t flags_refs_mask = {
791  .flags = VLIB_BUFFER_NEXT_PRESENT,
792  .ref_count = ~1
793  };
794 #endif
795 
796  while (n_buffers)
797  {
798  vlib_buffer_t *b[8];
799  u32 bi, sum = 0, flags, next;
800 
801  if (n_buffers < 12)
802  goto one_by_one;
803 
804  vlib_get_buffers (vm, buffers, b, 4);
805  vlib_get_buffers (vm, buffers + 8, b + 4, 4);
806 
807  vlib_prefetch_buffer_header (b[4], LOAD);
808  vlib_prefetch_buffer_header (b[5], LOAD);
809  vlib_prefetch_buffer_header (b[6], LOAD);
810  vlib_prefetch_buffer_header (b[7], LOAD);
811 
812 #if defined(CLIB_HAVE_VEC128)
813  u8x16 p0, p1, p2, p3, r;
814  p0 = u8x16_load_unaligned (b[0]);
815  p1 = u8x16_load_unaligned (b[1]);
816  p2 = u8x16_load_unaligned (b[2]);
817  p3 = u8x16_load_unaligned (b[3]);
818 
819  r = p0 ^ bpi_vec.as_u8x16[0];
820  r |= p1 ^ bpi_vec.as_u8x16[0];
821  r |= p2 ^ bpi_vec.as_u8x16[0];
822  r |= p3 ^ bpi_vec.as_u8x16[0];
823  r &= bpi_mask.as_u8x16[0];
824  r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
825 
826  sum = !u8x16_is_all_zero (r);
827 #else
828  sum |= b[0]->flags;
829  sum |= b[1]->flags;
830  sum |= b[2]->flags;
831  sum |= b[3]->flags;
832  sum &= VLIB_BUFFER_NEXT_PRESENT;
833  sum += b[0]->ref_count - 1;
834  sum += b[1]->ref_count - 1;
835  sum += b[2]->ref_count - 1;
836  sum += b[3]->ref_count - 1;
837  sum |= b[0]->buffer_pool_index ^ buffer_pool_index;
838  sum |= b[1]->buffer_pool_index ^ buffer_pool_index;
839  sum |= b[2]->buffer_pool_index ^ buffer_pool_index;
840  sum |= b[3]->buffer_pool_index ^ buffer_pool_index;
841 #endif
842 
843  if (sum)
844  goto one_by_one;
845 
846  vlib_buffer_copy_indices (queue + n_queue, buffers, 4);
847  vlib_buffer_copy_template (b[0], &bt);
848  vlib_buffer_copy_template (b[1], &bt);
849  vlib_buffer_copy_template (b[2], &bt);
850  vlib_buffer_copy_template (b[3], &bt);
851  n_queue += 4;
852 
853  vlib_buffer_validate (vm, b[0]);
854  vlib_buffer_validate (vm, b[1]);
855  vlib_buffer_validate (vm, b[2]);
856  vlib_buffer_validate (vm, b[3]);
857 
862 
863  if (n_queue >= queue_size)
864  {
865  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
866  n_queue = 0;
867  }
868  buffers += 4;
869  n_buffers -= 4;
870  continue;
871 
872  one_by_one:
873  bi = buffers[0];
874 
875  next_in_chain:
876  b[0] = vlib_get_buffer (vm, bi);
877  flags = b[0]->flags;
878  next = b[0]->next_buffer;
879 
880  if (PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
881  {
882 
883  if (n_queue)
884  {
885  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
886  n_queue = 0;
887  }
888 
889  buffer_pool_index = b[0]->buffer_pool_index;
890 #if defined(CLIB_HAVE_VEC128)
891  bpi_vec.buffer_pool_index = buffer_pool_index;
892 #endif
893  bp = vlib_get_buffer_pool (vm, buffer_pool_index);
895  }
896 
897  vlib_buffer_validate (vm, b[0]);
898 
900 
901  if (clib_atomic_sub_fetch (&b[0]->ref_count, 1) == 0)
902  {
903  vlib_buffer_copy_template (b[0], &bt);
904  queue[n_queue++] = bi;
905  }
906 
907  if (n_queue == queue_size)
908  {
909  vlib_buffer_pool_put (vm, buffer_pool_index, queue, queue_size);
910  n_queue = 0;
911  }
912 
913  if (maybe_next && (flags & VLIB_BUFFER_NEXT_PRESENT))
914  {
915  bi = next;
916  goto next_in_chain;
917  }
918 
919  buffers++;
920  n_buffers--;
921  }
922 
923  if (n_queue)
924  vlib_buffer_pool_put (vm, buffer_pool_index, queue, n_queue);
925 }
926 
927 
928 /** \brief Free buffers
929  Frees the entire buffer chain for each buffer
930 
931  @param vm - (vlib_main_t *) vlib main data structure pointer
932  @param buffers - (u32 * ) buffer index array
933  @param n_buffers - (u32) number of buffers to free
934 
935 */
936 always_inline void
938  /* pointer to first buffer */
939  u32 * buffers,
940  /* number of buffers to free */
941  u32 n_buffers)
942 {
943  vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 1);
944 }
945 
946 /** \brief Free buffers, does not free the buffer chain for each buffer
947 
948  @param vm - (vlib_main_t *) vlib main data structure pointer
949  @param buffers - (u32 * ) buffer index array
950  @param n_buffers - (u32) number of buffers to free
951 
952 */
953 always_inline void
955  /* pointer to first buffer */
956  u32 * buffers,
957  /* number of buffers to free */
958  u32 n_buffers)
959 {
960  vlib_buffer_free_inline (vm, buffers, n_buffers, /* maybe next */ 0);
961 }
962 
963 /** \brief Free one buffer
964  Shorthand to free a single buffer chain.
965 
966  @param vm - (vlib_main_t *) vlib main data structure pointer
967  @param buffer_index - (u32) buffer index to free
968 */
969 always_inline void
971 {
972  vlib_buffer_free_inline (vm, &buffer_index, 1, /* maybe next */ 1);
973 }
974 
975 /** \brief Free buffers from ring
976 
977  @param vm - (vlib_main_t *) vlib main data structure pointer
978  @param buffers - (u32 * ) buffer index ring
979  @param start - (u32) first slot in the ring
980  @param ring_size - (u32) ring size
981  @param n_buffers - (u32) number of buffers
982 */
983 always_inline void
985  u32 ring_size, u32 n_buffers)
986 {
987  ASSERT (n_buffers <= ring_size);
988 
989  if (PREDICT_TRUE (start + n_buffers <= ring_size))
990  {
991  vlib_buffer_free (vm, ring + start, n_buffers);
992  }
993  else
994  {
995  vlib_buffer_free (vm, ring + start, ring_size - start);
996  vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
997  }
998 }
999 
1000 /** \brief Free buffers from ring without freeing tail buffers
1001 
1002  @param vm - (vlib_main_t *) vlib main data structure pointer
1003  @param buffers - (u32 * ) buffer index ring
1004  @param start - (u32) first slot in the ring
1005  @param ring_size - (u32) ring size
1006  @param n_buffers - (u32) number of buffers
1007 */
1008 always_inline void
1010  u32 ring_size, u32 n_buffers)
1011 {
1012  ASSERT (n_buffers <= ring_size);
1013 
1014  if (PREDICT_TRUE (start + n_buffers <= ring_size))
1015  {
1016  vlib_buffer_free_no_next (vm, ring + start, n_buffers);
1017  }
1018  else
1019  {
1020  vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
1021  vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
1022  }
1023 }
1024 
1025 /* Append given data to end of buffer, possibly allocating new buffers. */
1026 int vlib_buffer_add_data (vlib_main_t * vm, u32 * buffer_index, void *data,
1027  u32 n_data_bytes);
1028 
1029 /* Define vlib_buffer and vnet_buffer flags bits preserved for copy/clone */
1030 #define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK \
1031  (VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID | \
1032  VLIB_BUFFER_IS_TRACED | ~VLIB_BUFFER_FLAGS_ALL)
1033 
1034 /* duplicate all buffers in chain */
1037 {
1038  vlib_buffer_t *s, *d, *fd;
1039  uword n_alloc, n_buffers = 1;
1041  int i;
1042 
1043  s = b;
1044  while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1045  {
1046  n_buffers++;
1047  s = vlib_get_buffer (vm, s->next_buffer);
1048  }
1049  u32 new_buffers[n_buffers];
1050 
1051  n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
1052 
1053  /* No guarantee that we'll get all the buffers we asked for */
1054  if (PREDICT_FALSE (n_alloc < n_buffers))
1055  {
1056  if (n_alloc > 0)
1057  vlib_buffer_free (vm, new_buffers, n_alloc);
1058  return 0;
1059  }
1060 
1061  /* 1st segment */
1062  s = b;
1063  fd = d = vlib_get_buffer (vm, new_buffers[0]);
1064  d->current_data = s->current_data;
1066  d->flags = s->flags & flag_mask;
1067  d->trace_handle = s->trace_handle;
1070  clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1071  clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1074 
1075  /* next segments */
1076  for (i = 1; i < n_buffers; i++)
1077  {
1078  /* previous */
1079  d->next_buffer = new_buffers[i];
1080  /* current */
1081  s = vlib_get_buffer (vm, s->next_buffer);
1082  d = vlib_get_buffer (vm, new_buffers[i]);
1083  d->current_data = s->current_data;
1087  d->flags = s->flags & flag_mask;
1088  }
1089 
1090  return fd;
1091 }
1092 
1093 /* duplicate first buffer in chain */
1096 {
1097  vlib_buffer_t *d;
1098 
1099  if ((vlib_buffer_alloc (vm, di, 1)) != 1)
1100  return 0;
1101 
1102  d = vlib_get_buffer (vm, *di);
1103  /* 1st segment */
1104  d->current_data = b->current_data;
1106  clib_memcpy_fast (d->opaque, b->opaque, sizeof (b->opaque));
1107  clib_memcpy_fast (d->opaque2, b->opaque2, sizeof (b->opaque2));
1110 
1111  return d;
1112 }
1113 
1114 /* \brief Move packet from current position to offset position in buffer.
1115  Only work for small packet using one buffer with room to fit the move
1116  @param vm - (vlib_main_t *) vlib main data structure pointer
1117  @param b - (vlib_buffer_t *) pointer to buffer
1118  @param offset - (i16) position to move the packet in buffer
1119  */
1120 always_inline void
1122 {
1123  ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1124  ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1125  ASSERT (offset + b->current_length <
1127 
1128  u8 *source = vlib_buffer_get_current (b);
1129  b->current_data = offset;
1130  u8 *destination = vlib_buffer_get_current (b);
1131  u16 length = b->current_length;
1132 
1133  if (source + length <= destination) /* no overlap */
1134  clib_memcpy_fast (destination, source, length);
1135  else
1136  memmove (destination, source, length);
1137 }
1138 
1139 /** \brief Create a maximum of 256 clones of buffer and store them
1140  in the supplied array
1141 
1142  @param vm - (vlib_main_t *) vlib main data structure pointer
1143  @param src_buffer - (u32) source buffer index
1144  @param buffers - (u32 * ) buffer index array
1145  @param n_buffers - (u16) number of buffer clones requested (<=256)
1146  @param head_end_offset - (u16) offset relative to current position
1147  where packet head ends
1148  @param offset - (i16) copy packet head at current position if 0,
1149  else at offset position to change headroom space as specified
1150  @return - (u16) number of buffers actually cloned, may be
1151  less than the number requested or zero
1152 */
1154 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1155  u16 n_buffers, u16 head_end_offset, i16 offset)
1156 {
1157  u16 i;
1158  vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1159 
1160  ASSERT (s->ref_count == 1);
1161  ASSERT (n_buffers);
1162  ASSERT (n_buffers <= 256);
1163  ASSERT (offset + VLIB_BUFFER_PRE_DATA_SIZE >= 0);
1164  ASSERT ((offset + head_end_offset) <
1166 
1167  if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
1168  {
1169  buffers[0] = src_buffer;
1170  if (offset)
1171  vlib_buffer_move (vm, s, offset);
1172 
1173  for (i = 1; i < n_buffers; i++)
1174  {
1175  vlib_buffer_t *d;
1176  d = vlib_buffer_copy (vm, s);
1177  if (d == 0)
1178  return i;
1179  buffers[i] = vlib_get_buffer_index (vm, d);
1180 
1181  }
1182  return n_buffers;
1183  }
1184 
1185  if (PREDICT_FALSE ((n_buffers == 1) && (offset == 0)))
1186  {
1187  buffers[0] = src_buffer;
1188  return 1;
1189  }
1190 
1191  n_buffers = vlib_buffer_alloc_from_pool (vm, buffers, n_buffers,
1192  s->buffer_pool_index);
1193 
1194  for (i = 0; i < n_buffers; i++)
1195  {
1196  vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
1197  if (offset)
1198  d->current_data = offset;
1199  else
1200  d->current_data = s->current_data;
1201 
1202  d->current_length = head_end_offset;
1204 
1206  head_end_offset;
1207  if (PREDICT_FALSE (s->flags & VLIB_BUFFER_NEXT_PRESENT))
1208  {
1211  }
1213  VLIB_BUFFER_NEXT_PRESENT;
1214  d->trace_handle = s->trace_handle;
1215  clib_memcpy_fast (d->opaque, s->opaque, sizeof (s->opaque));
1216  clib_memcpy_fast (d->opaque2, s->opaque2, sizeof (s->opaque2));
1218  vlib_buffer_get_current (s), head_end_offset);
1219  d->next_buffer = src_buffer;
1220  }
1221  vlib_buffer_advance (s, head_end_offset);
1222  s->ref_count = n_buffers ? n_buffers : s->ref_count;
1223  while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
1224  {
1225  s = vlib_get_buffer (vm, s->next_buffer);
1226  s->ref_count = n_buffers ? n_buffers : s->ref_count;
1227  }
1228 
1229  return n_buffers;
1230 }
1231 
1232 /** \brief Create multiple clones of buffer and store them
1233  in the supplied array
1234 
1235  @param vm - (vlib_main_t *) vlib main data structure pointer
1236  @param src_buffer - (u32) source buffer index
1237  @param buffers - (u32 * ) buffer index array
1238  @param n_buffers - (u16) number of buffer clones requested (<=256)
1239  @param head_end_offset - (u16) offset relative to current position
1240  where packet head ends
1241  @param offset - (i16) copy packet head at current position if 0,
1242  else at offset position to change headroom space as specified
1243  @return - (u16) number of buffers actually cloned, may be
1244  less than the number requested or zero
1245 */
1247 vlib_buffer_clone_at_offset (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1248  u16 n_buffers, u16 head_end_offset, i16 offset)
1249 {
1250  vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
1251  u16 n_cloned = 0;
1252 
1253  while (n_buffers > 256)
1254  {
1255  vlib_buffer_t *copy;
1256  copy = vlib_buffer_copy (vm, s);
1257  n_cloned += vlib_buffer_clone_256 (vm,
1258  vlib_get_buffer_index (vm, copy),
1259  (buffers + n_cloned),
1260  256, head_end_offset, offset);
1261  n_buffers -= 256;
1262  }
1263  n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
1264  buffers + n_cloned,
1265  n_buffers, head_end_offset, offset);
1266 
1267  return n_cloned;
1268 }
1269 
1270 /** \brief Create multiple clones of buffer and store them
1271  in the supplied array
1272 
1273  @param vm - (vlib_main_t *) vlib main data structure pointer
1274  @param src_buffer - (u32) source buffer index
1275  @param buffers - (u32 * ) buffer index array
1276  @param n_buffers - (u16) number of buffer clones requested (<=256)
1277  @param head_end_offset - (u16) offset relative to current position
1278  where packet head ends
1279  @return - (u16) number of buffers actually cloned, may be
1280  less than the number requested or zero
1281 */
1283 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
1284  u16 n_buffers, u16 head_end_offset)
1285 {
1286  return vlib_buffer_clone_at_offset (vm, src_buffer, buffers, n_buffers,
1287  head_end_offset, 0);
1288 }
1289 
1290 /** \brief Attach cloned tail to the buffer
1291 
1292  @param vm - (vlib_main_t *) vlib main data structure pointer
1293  @param head - (vlib_buffer_t *) head buffer
1294  @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
1295 */
1296 
1297 always_inline void
1299  vlib_buffer_t * tail)
1300 {
1301  ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1302  ASSERT (head->buffer_pool_index == tail->buffer_pool_index);
1303 
1304  head->flags |= VLIB_BUFFER_NEXT_PRESENT;
1305  head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1306  head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1307  head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1308  head->next_buffer = vlib_get_buffer_index (vm, tail);
1311 
1312 next_segment:
1313  clib_atomic_add_fetch (&tail->ref_count, 1);
1314 
1315  if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
1316  {
1317  tail = vlib_get_buffer (vm, tail->next_buffer);
1318  goto next_segment;
1319  }
1320 }
1321 
1322 /* Initializes the buffer as an empty packet with no chained buffers. */
1323 always_inline void
1325 {
1327  first->current_length = 0;
1328  first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1329  first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1330 }
1331 
1332 /* The provided next_bi buffer index is appended to the end of the packet. */
1335 {
1336  vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
1337  last->next_buffer = next_bi;
1338  last->flags |= VLIB_BUFFER_NEXT_PRESENT;
1339  next_buffer->current_length = 0;
1340  next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1341  return next_buffer;
1342 }
1343 
1344 /* Increases or decreases the packet length.
1345  * It does not allocate or deallocate new buffers.
1346  * Therefore, the added length must be compatible
1347  * with the last buffer. */
1348 always_inline void
1350  vlib_buffer_t * last, i32 len)
1351 {
1352  last->current_length += len;
1353  if (first != last)
1355 }
1356 
1357 /* Copy data to the end of the packet and increases its length.
1358  * It does not allocate new buffers.
1359  * Returns the number of copied bytes. */
1362  vlib_buffer_t * first,
1363  vlib_buffer_t * last, void *data, u16 data_len)
1364 {
1365  u32 n_buffer_bytes = vlib_buffer_get_default_data_size (vm);
1366  ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
1367  u16 len = clib_min (data_len,
1368  n_buffer_bytes - last->current_length -
1369  last->current_data);
1371  data, len);
1372  vlib_buffer_chain_increase_length (first, last, len);
1373  return len;
1374 }
1375 
1376 /* Copy data to the end of the packet and increases its length.
1377  * Allocates additional buffers from the free list if necessary.
1378  * Returns the number of copied bytes.
1379  * 'last' value is modified whenever new buffers are allocated and
1380  * chained and points to the last buffer in the chain. */
1381 u16
1383  vlib_buffer_t * first,
1384  vlib_buffer_t ** last, void *data,
1385  u16 data_len);
1387 
1390 
1391 typedef struct
1392 {
1393  /* Vector of packet data. */
1395 
1396  /* Number of buffers to allocate in each call to allocator. */
1398 
1401 
1404  void *packet_data,
1405  uword n_packet_data_bytes,
1406  uword min_n_buffers_each_alloc,
1407  char *fmt, ...);
1408 
1411  u32 * bi_result);
1412 
1413 always_inline void
1415 {
1416  vec_free (t->packet_data);
1417 }
1418 
1421 {
1422  return b->data + vlib_buffer_get_default_data_size (vm) -
1423  ((u8 *) vlib_buffer_get_current (b) + b->current_length);
1424 }
1425 
1428 {
1429  vlib_buffer_t *db = b, *sb, *first = b;
1430  int is_cloned = 0;
1431  u32 bytes_left = 0, data_size;
1432  u16 src_left, dst_left, n_buffers = 1;
1433  u8 *dp, *sp;
1434  u32 to_free = 0;
1435 
1436  if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
1437  return 1;
1438 
1439  data_size = vlib_buffer_get_default_data_size (vm);
1440 
1441  dst_left = vlib_buffer_space_left_at_end (vm, b);
1442 
1443  while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1444  {
1445  b = vlib_get_buffer (vm, b->next_buffer);
1446  if (b->ref_count > 1)
1447  is_cloned = 1;
1448  bytes_left += b->current_length;
1449  n_buffers++;
1450  }
1451 
1452  /* if buffer is cloned, create completely new chain - unless everything fits
1453  * into one buffer */
1454  if (is_cloned && bytes_left >= dst_left)
1455  {
1456  u32 len = 0;
1457  u32 space_needed = bytes_left - dst_left;
1458  u32 tail;
1459 
1460  if (vlib_buffer_alloc (vm, &tail, 1) == 0)
1461  return 0;
1462 
1463  ++n_buffers;
1464  len += data_size;
1465  b = vlib_get_buffer (vm, tail);
1466 
1467  while (len < space_needed)
1468  {
1469  u32 bi;
1470  if (vlib_buffer_alloc (vm, &bi, 1) == 0)
1471  {
1472  vlib_buffer_free_one (vm, tail);
1473  return 0;
1474  }
1475  b->flags = VLIB_BUFFER_NEXT_PRESENT;
1476  b->next_buffer = bi;
1477  b = vlib_get_buffer (vm, bi);
1478  len += data_size;
1479  n_buffers++;
1480  }
1481  sb = vlib_get_buffer (vm, first->next_buffer);
1482  to_free = first->next_buffer;
1483  first->next_buffer = tail;
1484  }
1485  else
1486  sb = vlib_get_buffer (vm, first->next_buffer);
1487 
1488  src_left = sb->current_length;
1489  sp = vlib_buffer_get_current (sb);
1490  dp = vlib_buffer_get_tail (db);
1491 
1492  while (bytes_left)
1493  {
1494  u16 bytes_to_copy;
1495 
1496  if (dst_left == 0)
1497  {
1498  db->current_length = dp - (u8 *) vlib_buffer_get_current (db);
1499  ASSERT (db->flags & VLIB_BUFFER_NEXT_PRESENT);
1500  db = vlib_get_buffer (vm, db->next_buffer);
1501  dst_left = data_size;
1502  if (db->current_data > 0)
1503  {
1504  db->current_data = 0;
1505  }
1506  else
1507  {
1508  dst_left += -db->current_data;
1509  }
1510  dp = vlib_buffer_get_current (db);
1511  }
1512 
1513  while (src_left == 0)
1514  {
1515  ASSERT (sb->flags & VLIB_BUFFER_NEXT_PRESENT);
1516  sb = vlib_get_buffer (vm, sb->next_buffer);
1517  src_left = sb->current_length;
1518  sp = vlib_buffer_get_current (sb);
1519  }
1520 
1521  bytes_to_copy = clib_min (dst_left, src_left);
1522 
1523  if (dp != sp)
1524  {
1525  if (sb == db)
1526  bytes_to_copy = clib_min (bytes_to_copy, sp - dp);
1527 
1528  clib_memcpy_fast (dp, sp, bytes_to_copy);
1529  }
1530 
1531  src_left -= bytes_to_copy;
1532  dst_left -= bytes_to_copy;
1533  dp += bytes_to_copy;
1534  sp += bytes_to_copy;
1535  bytes_left -= bytes_to_copy;
1536  }
1537  if (db != first)
1538  db->current_data = 0;
1539  db->current_length = dp - (u8 *) vlib_buffer_get_current (db);
1540 
1541  if (is_cloned && to_free)
1542  vlib_buffer_free_one (vm, to_free);
1543  else
1544  {
1545  if (db->flags & VLIB_BUFFER_NEXT_PRESENT)
1547  db->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1548  b = first;
1549  n_buffers = 1;
1550  while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1551  {
1552  b = vlib_get_buffer (vm, b->next_buffer);
1553  ++n_buffers;
1554  }
1555  }
1556 
1557  first->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1558 
1559  return n_buffers;
1560 }
1561 
1562 #endif /* included_vlib_buffer_funcs_h */
1563 
1564 /*
1565  * fd.io coding-style-patch-verification: ON
1566  *
1567  * Local Variables:
1568  * eval: (c-set-style "gnu")
1569  * End:
1570  */
u32 opaque2[14]
Definition: buffer.h:170
#define VLIB_BUFFER_COPY_CLONE_FLAGS_MASK
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
Definition: buffer.h:124
u8 count
Definition: dhcp.api:208
static uword vlib_buffer_get_current_pa(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:463
static __clib_warn_unused_result u32 vlib_buffer_alloc_to_ring_from_pool(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers into ring from specific buffer pool.
Definition: buffer_funcs.h:722
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer&#39;s data.
Definition: buffer.h:310
#define clib_min(x, y)
Definition: clib.h:327
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
Definition: lock.h:119
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
Definition: lock.h:80
uword vlib_buffer_length_in_chain_slow_path(vlib_main_t *vm, vlib_buffer_t *b_first)
Definition: buffer.c:78
vl_api_wireguard_peer_flags_t flags
Definition: wireguard.api:103
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
Definition: buffer_funcs.h:937
#define clib_atomic_add_fetch(a, b)
Definition: atomics.h:30
format_function_t format_vlib_buffer_contents
static uword vlib_buffer_get_pa(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:457
#define PREDICT_TRUE(x)
Definition: clib.h:121
format_function_t format_vlib_buffer
static_always_inline u64x2 u64x2_from_u32x4_high(u32x4 v)
Definition: vector_neon.h:131
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
Definition: buffer.h:110
static_always_inline vlib_buffer_pool_t * vlib_get_buffer_pool(vlib_main_t *vm, u8 buffer_pool_index)
Definition: buffer_funcs.h:521
#define clib_memcpy_fast(a, b, c)
Definition: string.h:81
u16 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t **last, void *data, u16 data_len)
Definition: buffer.c:476
static void vlib_buffer_attach_clone(vlib_main_t *vm, vlib_buffer_t *head, vlib_buffer_t *tail)
Attach cloned tail to the buffer.
static void vlib_buffer_chain_increase_length(vlib_buffer_t *first, vlib_buffer_t *last, i32 len)
static void vlib_buffer_move(vlib_main_t *vm, vlib_buffer_t *b, i16 offset)
#define VLIB_BUFFER_PRE_DATA_SIZE
Definition: buffer.h:51
int vlib_buffer_add_data(vlib_main_t *vm, u32 *buffer_index, void *data, u32 n_data_bytes)
Definition: buffer.c:421
u32 thread_index
Definition: main.h:249
u16 current_length
Nbytes between current data and the end of this buffer.
Definition: buffer.h:113
#define CLIB_LOG2_CACHE_LINE_BYTES
Definition: cache.h:50
static __clib_warn_unused_result u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
Definition: buffer_funcs.h:693
vl_api_address_t src
Definition: gre.api:54
vlib_buffer_main_t * buffer_main
Definition: main.h:183
foreach_avx2_vec256i static foreach_avx2_vec256u u32x8 u32x8_permute(u32x8 v, u32x8 idx)
Definition: vector_avx2.h:73
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
Definition: buffer_funcs.h:214
u32 numa_node
Definition: main.h:251
static vlib_buffer_known_state_t vlib_buffer_is_known(vlib_main_t *vm, u32 buffer_index)
Definition: buffer_funcs.h:498
u16 mask
Definition: flow_types.api:52
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
Definition: buffer_funcs.h:402
static void vlib_buffer_copy_indices_to_ring(u32 *ring, u32 *src, u32 start, u32 ring_size, u32 n_buffers)
Definition: buffer_funcs.h:162
unsigned char u8
Definition: types.h:56
static vlib_buffer_t * vlib_buffer_copy(vlib_main_t *vm, vlib_buffer_t *b)
u8 default_buffer_pool_index_for_numa[VLIB_BUFFER_MAX_NUMA_NODES]
Definition: buffer.h:462
STATIC_ASSERT_OFFSET_OF(vlib_buffer_t, template_end, 64)
u8 data[128]
Definition: ipsec_types.api:89
u8 buffer_pool_index
index of buffer pool this buffer belongs.
Definition: buffer.h:133
static_always_inline void vlib_get_buffer_indices(vlib_main_t *vm, vlib_buffer_t **b, u32 *bi, uword count)
Translate array of buffer pointers into buffer indices.
Definition: buffer_funcs.h:373
format_function_t format_vlib_buffer_no_chain
vlib_buffer_pool_thread_t * threads
Definition: buffer.h:439
u8 *() format_function_t(u8 *s, va_list *args)
Definition: format.h:48
static_always_inline __clib_warn_unused_result uword vlib_buffer_pool_get(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
Definition: buffer_funcs.h:528
static __clib_warn_unused_result u32 vlib_buffer_alloc_on_numa(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u32 numa_node)
Allocate buffers from specific numa node into supplied array.
Definition: buffer_funcs.h:660
#define static_always_inline
Definition: clib.h:108
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
Definition: buffer.h:203
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
unsigned int u32
Definition: types.h:88
static u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
Definition: buffer_funcs.h:293
static u16 vlib_buffer_chain_append_data(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t *last, void *data, u16 data_len)
epu8_epi32 epu16_epi32 u64x2
Definition: vector_sse42.h:691
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
Definition: buffer_funcs.h:984
static heap_elt_t * first(heap_header_t *h)
Definition: heap.c:59
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
#define hash_get(h, key)
Definition: hash.h:249
static vlib_buffer_t * vlib_buffer_copy_no_chain(vlib_main_t *vm, vlib_buffer_t *b, u32 *di)
u8 * vlib_validate_buffer(vlib_main_t *vm, u32 buffer_index, uword follow_chain)
Definition: buffer.c:254
static_always_inline void vlib_buffer_copy_template(vlib_buffer_t *b, vlib_buffer_t *bt)
Definition: buffer_funcs.h:181
format_function_t format_vlib_buffer_and_data
static __clib_warn_unused_result u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
Definition: buffer_funcs.h:677
unsigned short u16
Definition: types.h:57
static void vlib_buffer_validate(vlib_main_t *vm, vlib_buffer_t *b)
Definition: buffer_funcs.h:55
u8 data_len
Definition: ikev2_types.api:24
vlib_buffer_t buffer_template
Definition: buffer.h:442
u8 * vlib_validate_buffers(vlib_main_t *vm, u32 *buffers, uword next_buffer_stride, uword n_buffers, vlib_buffer_known_state_t known_state, uword follow_buffer_next)
Definition: buffer.c:261
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
Definition: buffer.h:229
void di(unformat_input_t *i)
Definition: unformat.c:163
static_always_inline u32x4 u32x4_shuffle(u32x4 v, const int a, const int b, const int c, const int d)
Definition: vector_sse42.h:668
vlib_buffer_pool_t * buffer_pools
Definition: buffer.h:454
u32 trace_handle
Specifies trace buffer handle if VLIB_PACKET_IS_TRACED flag is set.
Definition: buffer.h:163
#define PREDICT_FALSE(x)
Definition: clib.h:120
#define always_inline
Definition: ipsec.h:28
static_always_inline void vlib_get_buffer_indices_with_offset(vlib_main_t *vm, void **b, u32 *bi, uword count, i32 offset)
Translate array of buffer pointers into buffer indices with offset.
Definition: buffer_funcs.h:312
vl_api_address_t dst
Definition: gre.api:55
static uword vlib_buffer_contents(vlib_main_t *vm, u32 buffer_index, u8 *contents)
Copy buffer contents to memory.
Definition: buffer_funcs.h:436
u8 len
Definition: ip_types.api:92
static void vlib_buffer_chain_init(vlib_buffer_t *first)
#define clib_atomic_sub_fetch(a, b)
Definition: atomics.h:31
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
Definition: buffer_funcs.h:96
clib_spinlock_t buffer_known_hash_lockp
Definition: buffer.h:461
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
Definition: buffer_funcs.h:954
static u16 vlib_buffer_clone(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create multiple clones of buffer and store them in the supplied array.
static_always_inline void vlib_buffer_pool_put(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
Definition: buffer_funcs.h:745
sll srl srl sll sra u16x4 i
Definition: vector_sse42.h:317
void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first)
#define vec_free(V)
Free vector&#39;s memory (no header).
Definition: vec.h:380
#define VLIB_BUFFER_MAX_NUMA_NODES
Definition: buffer.h:445
u8 data[]
Packet data.
Definition: buffer.h:181
clib_spinlock_t lock
Definition: buffer.h:436
static vlib_buffer_t * vlib_get_next_buffer(vlib_main_t *vm, vlib_buffer_t *b)
Get next buffer in buffer linklist, or zero for end of list.
Definition: buffer_funcs.h:386
void vlib_buffer_validate_alloc_free(vlib_main_t *vm, u32 *buffers, uword n_buffers, vlib_buffer_known_state_t expected_state)
Definition: buffer.c:338
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
static uword round_pow2(uword x, uword pow2)
Definition: clib.h:264
vlib_buffer_known_state_t
Definition: buffer_funcs.h:482
signed int i32
Definition: types.h:77
#define uword_to_pointer(u, type)
Definition: types.h:136
vlib buffer structure definition and a few select access methods.
#define ASSERT(truth)
static u16 vlib_buffer_clone_at_offset(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create multiple clones of buffer and store them in the supplied array.
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
Definition: buffer.h:248
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
Definition: buffer_funcs.h:422
static void * vlib_buffer_ptr_from_index(uword buffer_mem_start, u32 buffer_index, uword offset)
Definition: buffer_funcs.h:71
static_always_inline void vlib_buffer_free_inline(vlib_main_t *vm, u32 *buffers, u32 n_buffers, int maybe_next)
Definition: buffer_funcs.h:779
static_always_inline u64x2 u64x2_from_u32x4(u32x4 v)
Definition: vector_neon.h:125
static uword pointer_to_uword(const void *p)
Definition: types.h:131
template key/value backing page structure
Definition: bihash_doc.h:44
static u64 vlib_physmem_get_pa(vlib_main_t *vm, void *mem)
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
Definition: buffer_funcs.h:102
static vlib_buffer_t * vlib_buffer_chain_buffer(vlib_main_t *vm, vlib_buffer_t *last, u32 next_bi)
#define VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ
Definition: buffer.h:413
u32 next_buffer
Next buffer for this linked-list of buffers.
Definition: buffer.h:140
u32 cached_buffers[VLIB_BUFFER_POOL_PER_THREAD_CACHE_SZ]
Definition: buffer.h:418
#define VLIB_BUFFER_TRACE_TRAJECTORY_INIT(b)
Definition: buffer.h:492
VLIB buffer representation.
Definition: buffer.h:102
u64 uword
Definition: types.h:112
static void vlib_buffer_copy_indices_from_ring(u32 *dst, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Definition: buffer_funcs.h:144
uword buffer_mem_start
Definition: buffer.h:452
uword * buffer_known_hash
Definition: buffer.h:460
u32 default_data_size
Definition: buffer.h:467
u32 index
Definition: flow_types.api:221
struct clib_bihash_value offset
template key/value backing page structure
u64x4
Definition: vector_avx2.h:121
void * vlib_packet_template_get_packet(vlib_main_t *vm, vlib_packet_template_t *t, u32 *bi_result)
Definition: buffer.c:400
static u16 vlib_buffer_clone_256(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create a maximum of 256 clones of buffer and store them in the supplied array.
unsigned long long u32x4
Definition: ixge.c:28
static void vlib_packet_template_free(vlib_main_t *vm, vlib_packet_template_t *t)
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
Definition: buffer_funcs.h:970
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
Definition: buffer_funcs.h:280
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
Definition: buffer.h:167
void vlib_packet_template_init(vlib_main_t *vm, vlib_packet_template_t *t, void *packet_data, uword n_packet_data_bytes, uword min_n_buffers_each_alloc, char *fmt,...)
Definition: buffer.c:378
static u8 vlib_buffer_pool_get_default_for_numa(vlib_main_t *vm, u32 numa_node)
Definition: buffer_funcs.h:199
static void vlib_buffer_free_from_ring_no_next(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring without freeing tail buffers.
volatile u8 ref_count
Reference count for this buffer.
Definition: buffer.h:130
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:85
u32 opaque[10]
Opaque data used by sub-graphs for their own purposes.
Definition: buffer.h:153
static __clib_warn_unused_result u32 vlib_buffer_alloc_from_pool(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers from specific pool into supplied array.
Definition: buffer_funcs.h:566
signed short i16
Definition: types.h:46