40 #ifndef included_vlib_buffer_funcs_h 41 #define included_vlib_buffer_funcs_h 104 #if defined(CLIB_HAVE_VEC512) 105 while (n_indices >= 16)
107 u32x16_store_unaligned (u32x16_load_unaligned (src), dst);
114 #if defined(CLIB_HAVE_VEC256) 115 while (n_indices >= 8)
117 u32x8_store_unaligned (u32x8_load_unaligned (src), dst);
124 #if defined(CLIB_HAVE_VEC128) 125 while (n_indices >= 4)
127 u32x4_store_unaligned (u32x4_load_unaligned (src), dst);
147 #if defined CLIB_HAVE_VEC512 148 b->as_u8x64[0] = bt->as_u8x64[0];
149 #elif defined (CLIB_HAVE_VEC256) 150 b->as_u8x32[0] = bt->as_u8x32[0];
151 b->as_u8x32[1] = bt->as_u8x32[1];
152 #elif defined (CLIB_HAVE_VEC128) 153 b->as_u8x16[0] = bt->as_u8x16[0];
154 b->as_u8x16[1] = bt->as_u8x16[1];
155 b->as_u8x16[2] = bt->as_u8x16[2];
156 b->as_u8x16[3] = bt->as_u8x16[3];
182 #ifdef CLIB_HAVE_VEC256 183 u64x4 off = u64x4_splat (buffer_mem_start + offset);
188 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
189 u64x4 b1 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi + 4));
200 #ifdef CLIB_HAVE_VEC256 201 u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
204 #elif defined (CLIB_HAVE_VEC128) 205 u64x2 off = u64x2_splat (buffer_mem_start + offset);
206 u32x4 bi4 = u32x4_load_unaligned (bi);
208 #if defined (__aarch64__) 279 #ifdef CLIB_HAVE_VEC256 280 u32x8 mask = { 0, 2, 4, 6, 1, 3, 5, 7 };
286 u64x4 v0 = u64x4_load_unaligned (b);
287 u64x4 v1 = u64x4_load_unaligned (b + 4);
302 u32x4_store_unaligned (u32x8_extract_lo (v2), bi);
303 u32x4_store_unaligned (u32x8_extract_lo (v3), bi + 4);
352 return (b->
flags & VLIB_BUFFER_NEXT_PRESENT
402 uword content_len = 0;
412 if (!(b->
flags & VLIB_BUFFER_NEXT_PRESENT))
440 #define vlib_prefetch_buffer_with_index(vm,bi,type) \ 442 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \ 443 vlib_prefetch_buffer_header (_b, type); \ 524 u8 buffer_pool_index)
539 if (len >= n_buffers)
629 u32 ring_size,
u32 n_buffers)
633 ASSERT (n_buffers <= ring_size);
658 u32 ring_size,
u32 n_buffers,
659 u8 buffer_pool_index)
663 ASSERT (n_buffers <= ring_size);
681 u32 * buffers,
u32 n_buffers)
710 const int queue_size = 128;
712 u8 buffer_pool_index = ~0;
713 u32 n_queue = 0, queue[queue_size + 4];
715 #if defined(CLIB_HAVE_VEC128) 719 .
flags = VLIB_BUFFER_NEXT_PRESENT,
740 #if defined(CLIB_HAVE_VEC128) 741 u8x16 p0, p1, p2, p3, r;
742 p0 = u8x16_load_unaligned (b[0]);
743 p1 = u8x16_load_unaligned (b[1]);
744 p2 = u8x16_load_unaligned (b[2]);
745 p3 = u8x16_load_unaligned (b[3]);
747 r = p0 ^ bpi_vec.as_u8x16[0];
748 r |= p1 ^ bpi_vec.as_u8x16[0];
749 r |= p2 ^ bpi_vec.as_u8x16[0];
750 r |= p3 ^ bpi_vec.as_u8x16[0];
751 r &= bpi_mask.as_u8x16[0];
752 r |= (p0 | p1 | p2 | p3) & flags_refs_mask.as_u8x16[0];
754 sum = !u8x16_is_all_zero (r);
760 sum &= VLIB_BUFFER_NEXT_PRESENT;
791 if (n_queue >= queue_size)
808 if (
PREDICT_FALSE (buffer_pool_index != b[0]->buffer_pool_index))
818 #if defined(CLIB_HAVE_VEC128) 832 queue[n_queue++] = bi;
835 if (n_queue == queue_size)
841 if (
flags & VLIB_BUFFER_NEXT_PRESENT)
913 u32 ring_size,
u32 n_buffers)
915 ASSERT (n_buffers <= ring_size);
938 u32 ring_size,
u32 n_buffers)
940 ASSERT (n_buffers <= ring_size);
962 uword n_alloc, n_buffers = 1;
963 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
967 while (s->
flags & VLIB_BUFFER_NEXT_PRESENT)
972 u32 new_buffers[n_buffers];
998 for (i = 1; i < n_buffers; i++)
1045 ASSERT ((b->
flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1055 if (source + length <= destination)
1058 memmove (destination, source, length);
1084 ASSERT (n_buffers <= 256);
1086 ASSERT ((offset + head_end_offset) <
1091 buffers[0] = src_buffer;
1095 for (i = 1; i < n_buffers; i++)
1109 buffers[0] = src_buffer;
1116 for (i = 0; i < n_buffers; i++)
1134 d->
flags = s->
flags | VLIB_BUFFER_NEXT_PRESENT;
1135 d->
flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1144 while (s->
flags & VLIB_BUFFER_NEXT_PRESENT)
1174 while (n_buffers > 256)
1180 (buffers + n_cloned),
1181 256, head_end_offset, offset);
1186 n_buffers, head_end_offset, offset);
1205 u16 n_buffers,
u16 head_end_offset)
1208 head_end_offset, 0);
1222 ASSERT ((head->
flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
1225 head->
flags |= VLIB_BUFFER_NEXT_PRESENT;
1226 head->
flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1227 head->
flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
1228 head->
flags |= (tail->
flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
1236 if (tail->
flags & VLIB_BUFFER_NEXT_PRESENT)
1249 first->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1250 first->
flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
1259 last->
flags |= VLIB_BUFFER_NEXT_PRESENT;
1261 next_buffer->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1326 uword n_packet_data_bytes,
1327 uword min_n_buffers_each_alloc,
1352 u32 bytes_left = 0, data_size;
1353 u16 src_left, dst_left, n_buffers = 1;
1364 while (b->
flags & VLIB_BUFFER_NEXT_PRESENT)
1375 if (is_cloned && bytes_left >= dst_left)
1378 u32 space_needed = bytes_left - dst_left;
1388 while (len < space_needed)
1396 b->
flags = VLIB_BUFFER_NEXT_PRESENT;
1403 to_free = first->next_buffer;
1404 first->next_buffer = tail;
1409 src_left = sb->current_length;
1424 dst_left = data_size;
1428 while (src_left == 0)
1430 ASSERT (sb->flags & VLIB_BUFFER_NEXT_PRESENT);
1432 src_left = sb->current_length;
1436 bytes_to_copy =
clib_min (dst_left, src_left);
1441 bytes_to_copy =
clib_min (bytes_to_copy, sp - dp);
1446 src_left -= bytes_to_copy;
1447 dst_left -= bytes_to_copy;
1448 dp += bytes_to_copy;
1449 sp += bytes_to_copy;
1450 bytes_left -= bytes_to_copy;
1456 if (is_cloned && to_free)
1460 if (db->
flags & VLIB_BUFFER_NEXT_PRESENT)
1462 db->
flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1465 while (b->
flags & VLIB_BUFFER_NEXT_PRESENT)
1472 first->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
static uword vlib_buffer_get_current_pa(vlib_main_t *vm, vlib_buffer_t *b)
static u8 * vlib_buffer_get_tail(vlib_buffer_t *b)
Get pointer to the end of buffer's data.
static_always_inline void clib_spinlock_unlock(clib_spinlock_t *p)
static_always_inline void clib_spinlock_lock(clib_spinlock_t *p)
uword vlib_buffer_length_in_chain_slow_path(vlib_main_t *vm, vlib_buffer_t *b_first)
static void vlib_buffer_free(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers Frees the entire buffer chain for each buffer.
#define clib_atomic_add_fetch(a, b)
format_function_t format_vlib_buffer_contents
static uword vlib_buffer_get_pa(vlib_main_t *vm, vlib_buffer_t *b)
static_always_inline u64x2 u32x4_extend_to_u64x2_high(u32x4 v)
format_function_t format_vlib_buffer
i16 current_data
signed offset in data[], pre_data[] that we are currently processing.
static_always_inline vlib_buffer_pool_t * vlib_get_buffer_pool(vlib_main_t *vm, u8 buffer_pool_index)
#define clib_memcpy_fast(a, b, c)
u16 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t **last, void *data, u16 data_len)
static void vlib_buffer_attach_clone(vlib_main_t *vm, vlib_buffer_t *head, vlib_buffer_t *tail)
Attach cloned tail to the buffer.
static void vlib_buffer_chain_increase_length(vlib_buffer_t *first, vlib_buffer_t *last, i32 len)
static void vlib_buffer_move(vlib_main_t *vm, vlib_buffer_t *b, i16 offset)
#define VLIB_BUFFER_PRE_DATA_SIZE
int vlib_buffer_add_data(vlib_main_t *vm, u32 *buffer_index, void *data, u32 n_data_bytes)
u16 current_length
Nbytes between current data and the end of this buffer.
#define CLIB_LOG2_CACHE_LINE_BYTES
vlib_buffer_main_t * buffer_main
foreach_avx2_vec256i static foreach_avx2_vec256u u32x8 u32x8_permute(u32x8 v, u32x8 idx)
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
static vlib_buffer_known_state_t vlib_buffer_is_known(vlib_main_t *vm, u32 buffer_index)
static u32 vlib_buffer_alloc_to_ring_from_pool(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers into ring from specific buffer pool.
static uword vlib_buffer_length_in_chain(vlib_main_t *vm, vlib_buffer_t *b)
Get length in bytes of the buffer chain.
static u32 vlib_buffer_alloc_on_numa(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u32 numa_node)
Allocate buffers from specific numa node into supplied array.
static vlib_buffer_t * vlib_buffer_copy(vlib_main_t *vm, vlib_buffer_t *b)
u8 default_buffer_pool_index_for_numa[VLIB_BUFFER_MAX_NUMA_NODES]
STATIC_ASSERT_OFFSET_OF(vlib_buffer_t, template_end, 64)
u8 buffer_pool_index
index of buffer pool this buffer belongs.
static_always_inline void vlib_get_buffer_indices(vlib_main_t *vm, vlib_buffer_t **b, u32 *bi, uword count)
Translate array of buffer pointers into buffer indices.
vlib_buffer_pool_thread_t * threads
#define static_always_inline
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
static u32 vlib_get_buffer_index(vlib_main_t *vm, void *p)
Translate buffer pointer into buffer index.
static u16 vlib_buffer_chain_append_data(vlib_main_t *vm, vlib_buffer_t *first, vlib_buffer_t *last, void *data, u16 data_len)
epu8_epi32 epu16_epi32 u64x2
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
static heap_elt_t * first(heap_header_t *h)
static u32 vlib_buffer_chain_linearize(vlib_main_t *vm, vlib_buffer_t *b)
static vlib_buffer_t * vlib_buffer_copy_no_chain(vlib_main_t *vm, vlib_buffer_t *b, u32 *di)
u32 min_n_buffers_each_alloc
u8 * vlib_validate_buffer(vlib_main_t *vm, u32 buffer_index, uword follow_chain)
static_always_inline void vlib_buffer_copy_template(vlib_buffer_t *b, vlib_buffer_t *bt)
format_function_t format_vlib_buffer_and_data
static void vlib_buffer_validate(vlib_main_t *vm, vlib_buffer_t *b)
vlib_buffer_t buffer_template
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
static_always_inline uword vlib_buffer_pool_get(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
static_always_inline u32x4 u32x4_shuffle(u32x4 v, const int a, const int b, const int c, const int d)
vlib_buffer_pool_t * buffer_pools
static_always_inline void vlib_get_buffer_indices_with_offset(vlib_main_t *vm, void **b, u32 *bi, uword count, i32 offset)
Translate array of buffer pointers into buffer indices with offset.
static uword vlib_buffer_contents(vlib_main_t *vm, u32 buffer_index, u8 *contents)
Copy buffer contents to memory.
static void vlib_buffer_chain_init(vlib_buffer_t *first)
#define clib_atomic_sub_fetch(a, b)
static_always_inline u32 vlib_buffer_get_default_data_size(vlib_main_t *vm)
#define vec_add_aligned(V, E, N, A)
Add N elements to end of vector V (no header, specified alignment)
clib_spinlock_t buffer_known_hash_lockp
static void vlib_buffer_free_no_next(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Free buffers, does not free the buffer chain for each buffer.
static u16 vlib_buffer_clone(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset)
Create multiple clones of buffer and store them in the supplied array.
static_always_inline void vlib_buffer_pool_put(vlib_main_t *vm, u8 buffer_pool_index, u32 *buffers, u32 n_buffers)
void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first)
#define vec_free(V)
Free vector's memory (no header).
#define VLIB_BUFFER_MAX_NUMA_NODES
static vlib_buffer_t * vlib_get_next_buffer(vlib_main_t *vm, vlib_buffer_t *b)
Get next buffer in buffer linklist, or zero for end of list.
void vlib_buffer_validate_alloc_free(vlib_main_t *vm, u32 *buffers, uword n_buffers, vlib_buffer_known_state_t expected_state)
static u32 vlib_buffer_space_left_at_end(vlib_main_t *vm, vlib_buffer_t *b)
static uword round_pow2(uword x, uword pow2)
vlib_buffer_known_state_t
static_always_inline u64x2 u32x4_extend_to_u64x2(u32x4 v)
static u32 vlib_buffer_alloc_from_pool(vlib_main_t *vm, u32 *buffers, u32 n_buffers, u8 buffer_pool_index)
Allocate buffers from specific pool into supplied array.
#define uword_to_pointer(u, type)
vlib buffer structure definition and a few select access methods.
#define vec_delete(V, N, M)
Delete N elements starting at element M.
static u16 vlib_buffer_clone_at_offset(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create multiple clones of buffer and store them in the supplied array.
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
static uword vlib_buffer_index_length_in_chain(vlib_main_t *vm, u32 bi)
Get length in bytes of the buffer index buffer chain.
static void * vlib_buffer_ptr_from_index(uword buffer_mem_start, u32 buffer_index, uword offset)
static_always_inline void vlib_buffer_free_inline(vlib_main_t *vm, u32 *buffers, u32 n_buffers, int maybe_next)
static uword pointer_to_uword(const void *p)
template key/value backing page structure
static u64 vlib_physmem_get_pa(vlib_main_t *vm, void *mem)
static u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
static_always_inline void vlib_buffer_copy_indices(u32 *dst, u32 *src, u32 n_indices)
static vlib_buffer_t * vlib_buffer_chain_buffer(vlib_main_t *vm, vlib_buffer_t *last, u32 next_bi)
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 next_buffer
Next buffer for this linked-list of buffers.
#define VLIB_BUFFER_TRACE_TRAJECTORY_INIT(b)
VLIB buffer representation.
uword * buffer_known_hash
struct clib_bihash_value offset
template key/value backing page structure
void * vlib_packet_template_get_packet(vlib_main_t *vm, vlib_packet_template_t *t, u32 *bi_result)
static u16 vlib_buffer_clone_256(vlib_main_t *vm, u32 src_buffer, u32 *buffers, u16 n_buffers, u16 head_end_offset, i16 offset)
Create a maximum of 256 clones of buffer and store them in the supplied array.
static void vlib_packet_template_free(vlib_main_t *vm, vlib_packet_template_t *t)
static void vlib_buffer_free_one(vlib_main_t *vm, u32 buffer_index)
Free one buffer Shorthand to free a single buffer chain.
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
void vlib_packet_template_init(vlib_main_t *vm, vlib_packet_template_t *t, void *packet_data, uword n_packet_data_bytes, uword min_n_buffers_each_alloc, char *fmt,...)
static u32 vlib_buffer_alloc(vlib_main_t *vm, u32 *buffers, u32 n_buffers)
Allocate buffers into supplied array.
static u8 vlib_buffer_pool_get_default_for_numa(vlib_main_t *vm, u32 numa_node)
static void vlib_buffer_free_from_ring_no_next(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring without freeing tail buffers.
volatile u8 ref_count
Reference count for this buffer.
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
u32 opaque[10]
Opaque data used by sub-graphs for their own purposes.