FD.io VPP  v19.01.1-17-ge106252
Vector Packet Processing
ip_classify.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #include <vnet/ip/ip.h>
16 #include <vnet/ethernet/ethernet.h> /* for ethernet_header_t */
18 #include <vnet/dpo/classify_dpo.h>
19 
20 typedef struct
21 {
26 
27 /* packet trace format function */
28 static u8 *
29 format_ip_classify_trace (u8 * s, va_list * args)
30 {
31  CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
32  CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
33  ip_classify_trace_t *t = va_arg (*args, ip_classify_trace_t *);
34 
35  s = format (s, "IP_CLASSIFY: next_index %d, table %d, entry %d",
37  return s;
38 }
39 
42 
43 #define foreach_ip_classify_error \
44 _(MISS, "Classify misses") \
45 _(HIT, "Classify hits") \
46 _(CHAIN_HIT, "Classify hits after chain walk")
47 
48 typedef enum
49 {
50 #define _(sym,str) IP_CLASSIFY_ERROR_##sym,
52 #undef _
55 
56 static char *ip_classify_error_strings[] = {
57 #define _(sym,string) string,
59 #undef _
60 };
61 
62 static inline uword
64  vlib_node_runtime_t * node,
65  vlib_frame_t * frame, int is_ip4)
66 {
67  u32 n_left_from, *from, *to_next;
68  ip_lookup_next_t next_index;
70  f64 now = vlib_time_now (vm);
71  u32 hits = 0;
72  u32 misses = 0;
73  u32 chain_hits = 0;
74  u32 n_next;
75 
76  if (is_ip4)
77  {
78  n_next = IP4_LOOKUP_N_NEXT;
79  }
80  else
81  {
82  n_next = IP6_LOOKUP_N_NEXT;
83  }
84 
85  from = vlib_frame_vector_args (frame);
86  n_left_from = frame->n_vectors;
87 
88  /* First pass: compute hashes */
89 
90  while (n_left_from > 2)
91  {
92  vlib_buffer_t *b0, *b1;
93  u32 bi0, bi1;
94  u8 *h0, *h1;
95  u32 cd_index0, cd_index1;
96  classify_dpo_t *cd0, *cd1;
97  u32 table_index0, table_index1;
98  vnet_classify_table_t *t0, *t1;
99 
100  /* prefetch next iteration */
101  {
102  vlib_buffer_t *p1, *p2;
103 
104  p1 = vlib_get_buffer (vm, from[1]);
105  p2 = vlib_get_buffer (vm, from[2]);
106 
107  vlib_prefetch_buffer_header (p1, STORE);
109  vlib_prefetch_buffer_header (p2, STORE);
111  }
112 
113  bi0 = from[0];
114  b0 = vlib_get_buffer (vm, bi0);
115  h0 = (void *) vlib_buffer_get_current (b0) -
117 
118  bi1 = from[1];
119  b1 = vlib_get_buffer (vm, bi1);
120  h1 = (void *) vlib_buffer_get_current (b1) -
122 
123  cd_index0 = vnet_buffer (b0)->ip.adj_index[VLIB_TX];
124  cd0 = classify_dpo_get (cd_index0);
125  table_index0 = cd0->cd_table_index;
126 
127  cd_index1 = vnet_buffer (b1)->ip.adj_index[VLIB_TX];
128  cd1 = classify_dpo_get (cd_index1);
129  table_index1 = cd1->cd_table_index;
130 
131  t0 = pool_elt_at_index (vcm->tables, table_index0);
132 
133  t1 = pool_elt_at_index (vcm->tables, table_index1);
134 
135  vnet_buffer (b0)->l2_classify.hash =
136  vnet_classify_hash_packet (t0, (u8 *) h0);
137 
138  vnet_classify_prefetch_bucket (t0, vnet_buffer (b0)->l2_classify.hash);
139 
140  vnet_buffer (b1)->l2_classify.hash =
141  vnet_classify_hash_packet (t1, (u8 *) h1);
142 
143  vnet_classify_prefetch_bucket (t1, vnet_buffer (b1)->l2_classify.hash);
144 
145  vnet_buffer (b0)->l2_classify.table_index = table_index0;
146 
147  vnet_buffer (b1)->l2_classify.table_index = table_index1;
148 
149  from += 2;
150  n_left_from -= 2;
151  }
152 
153  while (n_left_from > 0)
154  {
155  vlib_buffer_t *b0;
156  u32 bi0;
157  u8 *h0;
158  u32 cd_index0;
159  classify_dpo_t *cd0;
160  u32 table_index0;
162 
163  bi0 = from[0];
164  b0 = vlib_get_buffer (vm, bi0);
165  h0 = (void *) vlib_buffer_get_current (b0) -
167 
168  cd_index0 = vnet_buffer (b0)->ip.adj_index[VLIB_TX];
169  cd0 = classify_dpo_get (cd_index0);
170  table_index0 = cd0->cd_table_index;
171 
172  t0 = pool_elt_at_index (vcm->tables, table_index0);
173  vnet_buffer (b0)->l2_classify.hash =
174  vnet_classify_hash_packet (t0, (u8 *) h0);
175 
176  vnet_buffer (b0)->l2_classify.table_index = table_index0;
177  vnet_classify_prefetch_bucket (t0, vnet_buffer (b0)->l2_classify.hash);
178 
179  from++;
180  n_left_from--;
181  }
182 
183  next_index = node->cached_next_index;
184  from = vlib_frame_vector_args (frame);
185  n_left_from = frame->n_vectors;
186 
187  while (n_left_from > 0)
188  {
189  u32 n_left_to_next;
190 
191  vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
192 
193  /* Not enough load/store slots to dual loop... */
194  while (n_left_from > 0 && n_left_to_next > 0)
195  {
196  u32 bi0;
197  vlib_buffer_t *b0;
198  u32 next0 = IP_LOOKUP_NEXT_DROP;
199  u32 table_index0;
201  vnet_classify_entry_t *e0;
202  u64 hash0;
203  u8 *h0;
204 
205  /* Stride 3 seems to work best */
206  if (PREDICT_TRUE (n_left_from > 3))
207  {
208  vlib_buffer_t *p1 = vlib_get_buffer (vm, from[3]);
210  u32 table_index1;
211  u64 phash1;
212 
213  table_index1 = vnet_buffer (p1)->l2_classify.table_index;
214 
215  if (PREDICT_TRUE (table_index1 != ~0))
216  {
217  tp1 = pool_elt_at_index (vcm->tables, table_index1);
218  phash1 = vnet_buffer (p1)->l2_classify.hash;
219  vnet_classify_prefetch_entry (tp1, phash1);
220  }
221  }
222 
223  /* speculatively enqueue b0 to the current next frame */
224  bi0 = from[0];
225  to_next[0] = bi0;
226  from += 1;
227  to_next += 1;
228  n_left_from -= 1;
229  n_left_to_next -= 1;
230 
231  b0 = vlib_get_buffer (vm, bi0);
232  h0 = b0->data;
233  table_index0 = vnet_buffer (b0)->l2_classify.table_index;
234  e0 = 0;
235  t0 = 0;
236  vnet_buffer (b0)->l2_classify.opaque_index = ~0;
237 
238  if (PREDICT_TRUE (table_index0 != ~0))
239  {
240  hash0 = vnet_buffer (b0)->l2_classify.hash;
241  t0 = pool_elt_at_index (vcm->tables, table_index0);
242 
243  e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
244  if (e0)
245  {
246  vnet_buffer (b0)->l2_classify.opaque_index
247  = e0->opaque_index;
248  vlib_buffer_advance (b0, e0->advance);
249  next0 = (e0->next_index < node->n_next_nodes) ?
250  e0->next_index : next0;
251  hits++;
252  }
253  else
254  {
255  while (1)
256  {
257  if (t0->next_table_index != ~0)
258  t0 = pool_elt_at_index (vcm->tables,
259  t0->next_table_index);
260  else
261  {
262  next0 = (t0->miss_next_index < n_next) ?
263  t0->miss_next_index : next0;
264  misses++;
265  break;
266  }
267 
268  hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
270  (t0, (u8 *) h0, hash0, now);
271  if (e0)
272  {
273  vnet_buffer (b0)->l2_classify.opaque_index
274  = e0->opaque_index;
275  vlib_buffer_advance (b0, e0->advance);
276  next0 = (e0->next_index < node->n_next_nodes) ?
277  e0->next_index : next0;
278  hits++;
279  chain_hits++;
280  break;
281  }
282  }
283  }
284  }
285 
287  && (b0->flags & VLIB_BUFFER_IS_TRACED)))
288  {
290  vlib_add_trace (vm, node, b0, sizeof (*t));
291  t->next_index = next0;
292  t->table_index = t0 ? t0 - vcm->tables : ~0;
293  t->entry_index = e0 ? e0 - t0->entries : ~0;
294  }
295 
296  /* verify speculative enqueue, maybe switch current next frame */
297  vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
298  to_next, n_left_to_next,
299  bi0, next0);
300  }
301 
302  vlib_put_next_frame (vm, node, next_index, n_left_to_next);
303  }
304 
306  IP_CLASSIFY_ERROR_MISS, misses);
308  IP_CLASSIFY_ERROR_HIT, hits);
310  IP_CLASSIFY_ERROR_CHAIN_HIT, chain_hits);
311  return frame->n_vectors;
312 }
313 
314 static uword
316  vlib_node_runtime_t * node, vlib_frame_t * frame)
317 {
318  return ip_classify_inline (vm, node, frame, 1 /* is_ip4 */ );
319 }
320 
321 
322 /* *INDENT-OFF* */
324  .function = ip4_classify,
325  .name = "ip4-classify",
326  .vector_size = sizeof (u32),
327  .sibling_of = "ip4-lookup",
328  .format_trace = format_ip_classify_trace,
330  .error_strings = ip_classify_error_strings,
331 
332  .n_next_nodes = 0,
333 };
334 /* *INDENT-ON* */
335 
337  static uword
339  vlib_node_runtime_t * node, vlib_frame_t * frame)
340 {
341  return ip_classify_inline (vm, node, frame, 0 /* is_ip4 */ );
342 }
343 
344 
345 /* *INDENT-OFF* */
347  .function = ip6_classify,
348  .name = "ip6-classify",
349  .vector_size = sizeof (u32),
350  .sibling_of = "ip6-lookup",
351  .format_trace = format_ip_classify_trace,
353  .error_strings = ip_classify_error_strings,
354 
355  .n_next_nodes = 0,
356 };
357 /* *INDENT-ON* */
358 
361 {
362  return 0;
363 }
364 
366 
367 /*
368  * fd.io coding-style-patch-verification: ON
369  *
370  * Local Variables:
371  * eval: (c-set-style "gnu")
372  * End:
373  */
u64 vnet_classify_hash_packet(vnet_classify_table_t *t, u8 *h)
#define foreach_ip_classify_error
Definition: ip_classify.c:43
static clib_error_t * ip_classify_init(vlib_main_t *vm)
Definition: ip_classify.c:360
#define CLIB_UNUSED(x)
Definition: clib.h:82
#define PREDICT_TRUE(x)
Definition: clib.h:112
unsigned long u64
Definition: types.h:89
static f64 vlib_time_now(vlib_main_t *vm)
Definition: main.h:232
#define ethernet_buffer_header_size(b)
Determine the size of the Ethernet headers of the current frame in the buffer.
Definition: ethernet.h:453
u8 * format(u8 *s, const char *fmt,...)
Definition: format.c:419
unsigned char u8
Definition: types.h:56
ip_lookup_next_t
An adjacency is a representation of an attached L3 peer.
Definition: adj.h:50
vlib_node_registration_t ip4_classify_node
(constructor) VLIB_REGISTER_NODE (ip4_classify_node)
Definition: ip_classify.c:40
double f64
Definition: types.h:142
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:163
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
Definition: buffer.h:188
A representation of an MPLS label for imposition in the data-path.
Definition: classify_dpo.h:26
unsigned int u32
Definition: types.h:88
static void vnet_classify_prefetch_bucket(vnet_classify_table_t *t, u64 hash)
static void vnet_classify_prefetch_entry(vnet_classify_table_t *t, u64 hash)
#define VLIB_NODE_FUNCTION_MULTIARCH(node, fn)
Definition: node.h:223
static uword ip6_classify(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: ip_classify.c:338
static uword ip_classify_inline(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame, int is_ip4)
Definition: ip_classify.c:63
#define pool_elt_at_index(p, i)
Returns pointer to element at given index.
Definition: pool.h:511
vnet_classify_entry_t * entries
static void * vlib_buffer_get_current(vlib_buffer_t *b)
Get pointer to current data to process.
Definition: buffer.h:214
#define PREDICT_FALSE(x)
Definition: clib.h:111
u32 node_index
Node index.
Definition: node.h:519
#define vlib_validate_buffer_enqueue_x1(vm, node, next_index, to_next, n_left_to_next, bi0, next0)
Finish enqueueing one buffer forward in the graph.
Definition: buffer_node.h:218
#define vlib_get_next_frame(vm, node, next_index, vectors, n_vectors_left)
Get pointer to next frame vector data by (vlib_node_runtime_t, next_index).
Definition: node_funcs.h:368
static void vlib_node_increment_counter(vlib_main_t *vm, u32 node_index, u32 counter_index, u64 increment)
Definition: node_funcs.h:1180
Adjacency to drop this packet.
Definition: adj.h:53
#define VLIB_REGISTER_NODE(x,...)
Definition: node.h:169
u16 n_vectors
Definition: node.h:420
#define CLIB_PREFETCH(addr, size, type)
Definition: cache.h:79
vlib_main_t * vm
Definition: buffer.c:301
#define ARRAY_LEN(x)
Definition: clib.h:62
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
Definition: main.c:459
struct _vnet_classify_main vnet_classify_main_t
Definition: vnet_classify.h:67
u16 cached_next_index
Next frame index that vector arguments were last enqueued to last time this node ran.
Definition: node.h:538
static char * ip_classify_error_strings[]
Definition: ip_classify.c:56
static void vlib_buffer_advance(vlib_buffer_t *b, word l)
Advance current data pointer by the supplied (signed!) amount.
Definition: buffer.h:233
vnet_classify_main_t vnet_classify_main
Definition: vnet_classify.c:22
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
Definition: trace_funcs.h:57
struct _vlib_node_registration vlib_node_registration_t
Definition: defs.h:47
ip_classify_error_t
Definition: ip_classify.c:48
u64 uword
Definition: types.h:112
static u8 * format_ip_classify_trace(u8 *s, va_list *args)
Definition: ip_classify.c:29
static void * vlib_frame_vector_args(vlib_frame_t *f)
Get pointer to frame vector data.
Definition: node_funcs.h:274
static classify_dpo_t * classify_dpo_get(index_t index)
Definition: classify_dpo.h:54
#define vnet_buffer(b)
Definition: buffer.h:368
u8 data[0]
Packet data.
Definition: buffer.h:176
u16 flags
Copy of main node flags.
Definition: node.h:532
vlib_node_registration_t ip6_classify_node
(constructor) VLIB_REGISTER_NODE (ip6_classify_node)
Definition: ip_classify.c:41
#define VLIB_NODE_FLAG_TRACE
Definition: node.h:326
static uword ip4_classify(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Definition: ip_classify.c:315
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
Definition: buffer.h:117
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
Definition: buffer_funcs.h:62
vnet_classify_entry_t * vnet_classify_find_entry(vnet_classify_table_t *t, u8 *h, u64 hash, f64 now)