40 #ifndef included_vlib_buffer_node_h 41 #define included_vlib_buffer_node_h 70 #define vlib_validate_buffer_enqueue_x2(vm,node,next_index,to_next,n_left_to_next,bi0,bi1,next0,next1) \ 74 int enqueue_code = (next0 != next_index) + 2*(next1 != next_index); \ 76 if (PREDICT_FALSE (enqueue_code != 0)) \ 78 switch (enqueue_code) \ 84 n_left_to_next += 1; \ 85 vlib_set_next_frame_buffer (vm, node, next0, bi0); \ 91 n_left_to_next += 1; \ 92 vlib_set_next_frame_buffer (vm, node, next1, bi1); \ 98 n_left_to_next += 2; \ 99 vlib_set_next_frame_buffer (vm, node, next0, bi0); \ 100 vlib_set_next_frame_buffer (vm, node, next1, bi1); \ 101 if (next0 == next1) \ 103 vlib_put_next_frame (vm, node, next_index, \ 105 next_index = next1; \ 106 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next); \ 140 #define vlib_validate_buffer_enqueue_x4(vm,node,next_index,to_next,n_left_to_next,bi0,bi1,bi2,bi3,next0,next1,next2,next3) \ 147 u32 fix_speculation = (next_index ^ next0) | (next_index ^ next1) \ 148 | (next_index ^ next2) | (next_index ^ next3); \ 149 if (PREDICT_FALSE(fix_speculation)) \ 153 n_left_to_next += 4; \ 156 if (next_index == next0) \ 163 vlib_set_next_frame_buffer (vm, node, next0, bi0); \ 165 if (next_index == next1) \ 172 vlib_set_next_frame_buffer (vm, node, next1, bi1); \ 174 if (next_index == next2) \ 181 vlib_set_next_frame_buffer (vm, node, next2, bi2); \ 183 if (next_index == next3) \ 191 vlib_set_next_frame_buffer (vm, node, next3, bi3); \ 194 if (next2 == next3) \ 196 vlib_put_next_frame (vm, node, next_index, n_left_to_next); \ 197 next_index = next3; \ 198 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next); \ 224 #define vlib_validate_buffer_enqueue_x1(vm,node,next_index,to_next,n_left_to_next,bi0,next0) \ 227 if (PREDICT_FALSE (next0 != next_index)) \ 229 vlib_put_next_frame (vm, node, next_index, n_left_to_next + 1); \ 230 next_index = next0; \ 231 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next); \ 235 n_left_to_next -= 1; \ 251 u32 * next0,
u32 * next1),
253 void *opaque1,
uword opaque2,
268 while (n_left_from > 0)
274 while (n_left_from >= 4 && n_left_to_next >= 2)
294 pi0 = to_next[0] = from[0];
295 pi1 = to_next[1] = from[1];
304 two_buffers (vm, opaque1, opaque2, p0, p1, &next0, &next1);
307 to_next, n_left_to_next,
308 pi0, pi1, next0, next1);
311 while (n_left_from > 0 && n_left_to_next > 0)
325 one_buffer (vm, opaque1, opaque2, p0, &next0);
328 to_next, n_left_to_next,
344 (fn) (vm, node, buffers, nexts, count);
354 (fn) (vm, node, buffers, next_index, count);
359 u32 frame_queue_index,
u32 *buffer_indices,
360 u16 *thread_indices,
u32 n_packets,
361 int drop_on_congestion)
365 return (fn) (
vm,
node, frame_queue_index, buffer_indices, thread_indices,
366 n_packets, drop_on_congestion);
void() vlib_buffer_enqueue_to_single_next_fn_t(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *ers, u16 next_index, u32 count)
vlib_buffer_enqueue_to_single_next_fn_t * buffer_enqueue_to_single_next_fn
u16 nexts[VLIB_FRAME_SIZE]
#define static_always_inline
vlib_buffer_func_main_t vlib_buffer_func_main
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
static_always_inline void vlib_buffer_enqueue_to_single_next(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 next_index, u32 count)
vlib_buffer_enqueue_to_next_fn_t * buffer_enqueue_to_next_fn
vlib_main_t * vm
X-connect all packets from the HOST to the PHY.
#define vlib_validate_buffer_enqueue_x2(vm, node, next_index, to_next, n_left_to_next, bi0, bi1, next0, next1)
Finish enqueueing two buffers forward in the graph.
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
Finish enqueueing one buffer forward in the graph.
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Get pointer to next frame vector data by (vlib_node_runtime_t, next_index).
u32() vlib_buffer_enqueue_to_thread_fn_t(vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index, u32 *buffer_indices, u16 *thread_indices, u32 n_packets, int drop_on_congestion)
void() vlib_buffer_enqueue_to_next_fn_t(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts, uword count)
#define CLIB_PREFETCH(addr, size, type)
static_always_inline void vlib_buffer_enqueue_to_next(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts, uword count)
u16 cached_next_index
Next frame index that vector arguments were last enqueued to last time this node ran.
vlib_put_next_frame(vm, node, next_index, 0)
nat44_ei_hairpin_src_next_t next_index
static uword generic_buffer_node_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame, uword sizeof_trace, void *opaque1, uword opaque2, void(*two_buffers)(vlib_main_t *vm, void *opaque1, uword opaque2, vlib_buffer_t *b0, vlib_buffer_t *b1, u32 *next0, u32 *next1), void(*one_buffer)(vlib_main_t *vm, void *opaque1, uword opaque2, vlib_buffer_t *b0, u32 *next0))
void vlib_trace_frame_buffers_only(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, uword n_buffers, uword next_buffer_stride, uword n_buffer_data_bytes_in_trace)
vlib_main_t vlib_node_runtime_t * node
VLIB buffer representation.
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
vlib_buffer_enqueue_to_thread_fn_t * buffer_enqueue_to_thread_fn
u16 flags
Copy of main node flags.
#define VLIB_NODE_FLAG_TRACE
static_always_inline u32 vlib_buffer_enqueue_to_thread(vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index, u32 *buffer_indices, u16 *thread_indices, u32 n_packets, int drop_on_congestion)
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.