40 #ifdef CLIB_HAVE_VEC128 51 u32x4_union_t key_hash;
55 } vhash_overflow_search_bucket_t;
58 set_overflow_result (vhash_overflow_search_bucket_t * b,
61 b->result.as_u32[
i] = result;
62 b->key_hash.as_u32[
i] = key_hash;
66 free_overflow_bucket (vhash_overflow_buckets_t * ob,
67 vhash_overflow_search_bucket_t * b,
u32 i)
69 u32 o = (u32x4_union_t *) b - ob->search_buckets;
71 vec_add1 (ob->free_indices, 4 * o + i);
75 get_overflow_search_bucket (vhash_overflow_buckets_t * obs,
u32 i,
78 return ((vhash_overflow_search_bucket_t *)
83 next_overflow_bucket (vhash_overflow_search_bucket_t * b,
u32 n_key_u32s)
85 return (vhash_overflow_search_bucket_t *) & b->key[n_key_u32s];
88 #define foreach_vhash_overflow_bucket(b,ob,n_key_u32s) \ 89 for ((b) = (vhash_overflow_search_bucket_t *) ob->search_buckets; \ 90 (u32x4_union_t *) (b) < vec_end (ob->search_buckets); \ 91 b = next_overflow_bucket (b, n_key_u32s)) 94 vhash_get_overflow (vhash_t * h,
u32 key_hash,
u32 vi,
u32 n_key_u32s)
96 vhash_overflow_buckets_t *ob = vhash_get_overflow_buckets (h, key_hash);
97 vhash_overflow_search_bucket_t *b;
100 foreach_vhash_overflow_bucket (b, ob, n_key_u32s)
102 u32x4 r = b->result.as_u32x4;
104 for (i = 0; i < n_key_u32s; i++)
105 r &= vhash_bucket_compare (h, &b->key[0], i, vi);
107 result = vhash_merge_results (r);
116 vhash_set_overflow (vhash_t * h,
119 vhash_overflow_buckets_t *ob = vhash_get_overflow_buckets (h, key_hash);
120 vhash_overflow_search_bucket_t *b;
121 u32 i_set,
i, old_result;
123 foreach_vhash_overflow_bucket (b, ob, n_key_u32s)
127 r = b->result.as_u32x4;
128 for (i = 0; i < n_key_u32s; i++)
129 r &= vhash_bucket_compare (h, &b->key[0], i, vi);
131 old_result = vhash_merge_results (r);
134 i_set = vhash_non_empty_result_index (r);
135 set_overflow_result (b, i_set, new_result, key_hash);
141 if (
vec_len (ob->free_indices) == 0)
145 i =
vec_len (ob->search_buckets);
147 sizeof (b[0]) /
sizeof (
u32x4) + n_key_u32s,
150 for (j = 0; j < 4; j++)
154 i =
vec_pop (ob->free_indices);
157 b = ((vhash_overflow_search_bucket_t *)
161 set_overflow_result (b, i_set, new_result, key_hash);
164 for (i = 0; i < n_key_u32s; i++)
165 b->key[i].as_u32[i_set] = vhash_get_key_word (h, i, vi);
174 vhash_unset_overflow (vhash_t * h,
u32 key_hash,
u32 vi,
u32 n_key_u32s)
176 vhash_overflow_buckets_t *ob = vhash_get_overflow_buckets (h, key_hash);
177 vhash_overflow_search_bucket_t *b;
178 u32 i_set,
i, old_result;
180 foreach_vhash_overflow_bucket (b, ob, n_key_u32s)
184 r = b->result.as_u32x4;
185 for (i = 0; i < n_key_u32s; i++)
186 r &= vhash_bucket_compare (h, &b->key[0], i, vi);
188 old_result = vhash_merge_results (r);
191 i_set = vhash_non_empty_result_index (r);
196 set_overflow_result (b, i_set, 0, ~key_hash);
198 free_overflow_bucket (ob, b, i_set);
200 ASSERT (ob->n_overflow > 0);
212 vhash_unset_refill_from_overflow (vhash_t * h,
213 vhash_search_bucket_t * sb,
214 u32 key_hash,
u32 n_key_u32s)
216 vhash_overflow_buckets_t *obs = vhash_get_overflow_buckets (h, key_hash);
217 vhash_overflow_search_bucket_t *ob;
218 u32 i, j, i_refill, bucket_mask = h->bucket_mask.as_u32[0];
221 foreach_vhash_overflow_bucket (ob, obs, n_key_u32s)
223 for (i = 0; i < 4; i++)
225 if (!ob->result.as_u32[i])
227 if ((ob->key_hash.as_u32[i] & bucket_mask)
228 != (key_hash & bucket_mask))
231 i_refill = vhash_empty_result_index (sb->result.as_u32x4);
232 sb->result.as_u32[i_refill] = ob->result.as_u32[
i];
233 for (j = 0; j < n_key_u32s; j++)
234 sb->key[j].as_u32[i_refill] = ob->key[j].as_u32[i];
235 set_overflow_result (ob, i, 0, ~key_hash);
236 free_overflow_bucket (obs, ob, i);
243 vhash_init (vhash_t * h,
u32 log2_n_keys,
u32 n_key_u32,
u32 * hash_seeds)
246 vhash_search_bucket_t *b;
251 log2_n_keys =
clib_max (log2_n_keys, 2);
253 h->log2_n_keys = log2_n_keys;
254 h->n_key_u32 = n_key_u32;
257 h->bucket_mask.as_u32[i] = m;
260 i = (sizeof (b[0]) /
sizeof (
u32x4) + n_key_u32) << (log2_n_keys - 2);
263 for (i = 0; i <
ARRAY_LEN (h->find_first_zero_table); i++)
266 for (i = 0; i <
ARRAY_LEN (h->hash_seeds); i++)
268 h->hash_seeds[i].as_u32[j] = hash_seeds[i];
272 vhash_main_key_gather (
void *_vm,
u32 vi,
u32 wi,
u32 n_key_u32)
274 vhash_main_t *vm = _vm;
275 return vec_elt (vm->keys, vi * n_key_u32 + wi);
279 vhash_main_4key_gather (
void *_vm,
u32 vi,
u32 wi,
u32 n_key_u32s)
281 vhash_main_t *vm = _vm;
284 ASSERT (n_key_u32s == vm->n_key_u32);
287 x.as_u32[0] =
vec_elt (vm->keys, (vi + 0) * n_key_u32s + wi);
288 x.as_u32[1] =
vec_elt (vm->keys, (vi + 1) * n_key_u32s + wi);
289 x.as_u32[2] =
vec_elt (vm->keys, (vi + 2) * n_key_u32s + wi);
290 x.as_u32[3] =
vec_elt (vm->keys, (vi + 3) * n_key_u32s + wi);
295 vhash_main_set_result (
void *_vm,
u32 vi,
u32 old_result,
u32 n_key_u32)
297 vhash_main_t *vm = _vm;
299 u32 new_result = p[0];
305 vhash_main_get_result (
void *_vm,
u32 vi,
u32 old_result,
u32 n_key_u32)
307 vhash_main_t *vm = _vm;
308 vec_elt (vm->results, vi) = old_result;
313 vhash_main_get_4result (
void *_vm,
u32 vi,
u32x4 old_result,
u32 n_key_u32)
315 vhash_main_t *vm = _vm;
321 #define _(N_KEY_U32) \ 322 static_always_inline u32 \ 323 vhash_main_key_gather_##N_KEY_U32 (void * _vm, u32 vi, u32 i) \ 324 { return vhash_main_key_gather (_vm, vi, i, N_KEY_U32); } \ 326 static_always_inline u32x4 \ 327 vhash_main_4key_gather_##N_KEY_U32 (void * _vm, u32 vi, u32 i) \ 328 { return vhash_main_4key_gather (_vm, vi, i, N_KEY_U32); } \ 330 clib_pipeline_stage_static \ 331 (vhash_main_gather_keys_stage_##N_KEY_U32, \ 332 vhash_main_t *, vm, i, \ 334 vhash_gather_4key_stage \ 337 vhash_main_4key_gather_##N_KEY_U32, \ 342 clib_pipeline_stage_no_inline \ 343 (vhash_main_gather_keys_mod_stage_##N_KEY_U32, \ 344 vhash_main_t *, vm, i, \ 346 vhash_gather_key_stage \ 348 vm->n_vectors_div_4, \ 349 vm->n_vectors_mod_4, \ 350 vhash_main_key_gather_##N_KEY_U32, \ 355 clib_pipeline_stage \ 356 (vhash_main_hash_finalize_stage_##N_KEY_U32, \ 357 vhash_main_t *, vm, i, \ 359 vhash_finalize_stage (vm->vhash, i, N_KEY_U32); \ 362 clib_pipeline_stage_no_inline \ 363 (vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \ 364 vhash_main_t *, vm, i, \ 366 vhash_finalize_stage (vm->vhash, vm->n_vectors_div_4, N_KEY_U32); \ 369 clib_pipeline_stage_static \ 370 (vhash_main_get_stage_##N_KEY_U32, \ 371 vhash_main_t *, vm, i, \ 373 vhash_get_4_stage (vm->vhash, \ 375 vhash_main_get_4result, \ 379 clib_pipeline_stage_no_inline \ 380 (vhash_main_get_mod_stage_##N_KEY_U32, \ 381 vhash_main_t *, vm, i, \ 383 vhash_get_stage (vm->vhash, \ 384 vm->n_vectors_div_4, \ 385 vm->n_vectors_mod_4, \ 386 vhash_main_get_result, \ 390 clib_pipeline_stage_static \ 391 (vhash_main_set_stage_##N_KEY_U32, \ 392 vhash_main_t *, vm, i, \ 394 vhash_set_stage (vm->vhash, \ 396 VECTOR_WORD_TYPE_LEN (u32), \ 397 vhash_main_set_result, \ 401 clib_pipeline_stage_no_inline \ 402 (vhash_main_set_mod_stage_##N_KEY_U32, \ 403 vhash_main_t *, vm, i, \ 405 vhash_set_stage (vm->vhash, \ 406 vm->n_vectors_div_4, \ 407 vm->n_vectors_mod_4, \ 408 vhash_main_set_result, \ 412 clib_pipeline_stage_static \ 413 (vhash_main_unset_stage_##N_KEY_U32, \ 414 vhash_main_t *, vm, i, \ 416 vhash_unset_stage (vm->vhash, \ 418 VECTOR_WORD_TYPE_LEN (u32), \ 419 vhash_main_get_result, \ 423 clib_pipeline_stage_no_inline \ 424 (vhash_main_unset_mod_stage_##N_KEY_U32, \ 425 vhash_main_t *, vm, i, \ 427 vhash_unset_stage (vm->vhash, \ 428 vm->n_vectors_div_4, \ 429 vm->n_vectors_mod_4, \ 430 vhash_main_get_result, \ 443 #define _(N_KEY_U32) \ 444 clib_pipeline_stage \ 445 (vhash_main_hash_mix_stage_##N_KEY_U32, \ 446 vhash_main_t *, vm, i, \ 448 vhash_mix_stage (vm->vhash, i, N_KEY_U32); \ 451 clib_pipeline_stage_no_inline \ 452 (vhash_main_hash_mix_mod_stage_##N_KEY_U32, \ 453 vhash_main_t *, vm, i, \ 455 vhash_mix_stage (vm->vhash, vm->n_vectors_div_4, N_KEY_U32); \ 470 vhash_main_op (vhash_main_t * vm, vhash_main_op_t op)
474 vm->n_key_u32 = vm->vhash->n_key_u32;
476 vhash_validate_sizes (vm->vhash, vm->n_key_u32, n_keys);
478 vm->n_vectors_div_4 = n_keys / 4;
479 vm->n_vectors_mod_4 = n_keys % 4;
481 if (vm->n_vectors_div_4 > 0)
483 switch (vm->n_key_u32)
489 #define _(N_KEY_U32) \ 492 clib_pipeline_run_3_stage \ 493 (vm->n_vectors_div_4, \ 495 vhash_main_gather_keys_stage_##N_KEY_U32, \ 496 vhash_main_hash_finalize_stage_##N_KEY_U32, \ 497 vhash_main_get_stage_##N_KEY_U32); \ 498 else if (op == SET) \ 499 clib_pipeline_run_3_stage \ 500 (vm->n_vectors_div_4, \ 502 vhash_main_gather_keys_stage_##N_KEY_U32, \ 503 vhash_main_hash_finalize_stage_##N_KEY_U32, \ 504 vhash_main_set_stage_##N_KEY_U32); \ 506 clib_pipeline_run_3_stage \ 507 (vm->n_vectors_div_4, \ 509 vhash_main_gather_keys_stage_##N_KEY_U32, \ 510 vhash_main_hash_finalize_stage_##N_KEY_U32, \ 511 vhash_main_unset_stage_##N_KEY_U32); \ 520 #define _(N_KEY_U32) \ 523 clib_pipeline_run_4_stage \ 524 (vm->n_vectors_div_4, \ 526 vhash_main_gather_keys_stage_##N_KEY_U32, \ 527 vhash_main_hash_mix_stage_##N_KEY_U32, \ 528 vhash_main_hash_finalize_stage_##N_KEY_U32, \ 529 vhash_main_get_stage_##N_KEY_U32); \ 530 else if (op == SET) \ 531 clib_pipeline_run_4_stage \ 532 (vm->n_vectors_div_4, \ 534 vhash_main_gather_keys_stage_##N_KEY_U32, \ 535 vhash_main_hash_mix_stage_##N_KEY_U32, \ 536 vhash_main_hash_finalize_stage_##N_KEY_U32, \ 537 vhash_main_set_stage_##N_KEY_U32); \ 539 clib_pipeline_run_4_stage \ 540 (vm->n_vectors_div_4, \ 542 vhash_main_gather_keys_stage_##N_KEY_U32, \ 543 vhash_main_hash_mix_stage_##N_KEY_U32, \ 544 vhash_main_hash_finalize_stage_##N_KEY_U32, \ 545 vhash_main_unset_stage_##N_KEY_U32); \ 557 if (vm->n_vectors_mod_4 > 0)
559 switch (vm->n_key_u32)
565 #define _(N_KEY_U32) \ 568 clib_pipeline_run_3_stage \ 571 vhash_main_gather_keys_mod_stage_##N_KEY_U32, \ 572 vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \ 573 vhash_main_get_mod_stage_##N_KEY_U32); \ 574 else if (op == SET) \ 575 clib_pipeline_run_3_stage \ 578 vhash_main_gather_keys_mod_stage_##N_KEY_U32, \ 579 vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \ 580 vhash_main_set_mod_stage_##N_KEY_U32); \ 582 clib_pipeline_run_3_stage \ 585 vhash_main_gather_keys_mod_stage_##N_KEY_U32, \ 586 vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \ 587 vhash_main_unset_mod_stage_##N_KEY_U32); \ 596 #define _(N_KEY_U32) \ 599 clib_pipeline_run_4_stage \ 602 vhash_main_gather_keys_mod_stage_##N_KEY_U32, \ 603 vhash_main_hash_mix_mod_stage_##N_KEY_U32, \ 604 vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \ 605 vhash_main_get_mod_stage_##N_KEY_U32); \ 606 else if (op == SET) \ 607 clib_pipeline_run_4_stage \ 610 vhash_main_gather_keys_mod_stage_##N_KEY_U32, \ 611 vhash_main_hash_mix_mod_stage_##N_KEY_U32, \ 612 vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \ 613 vhash_main_set_mod_stage_##N_KEY_U32); \ 615 clib_pipeline_run_4_stage \ 618 vhash_main_gather_keys_mod_stage_##N_KEY_U32, \ 619 vhash_main_hash_mix_mod_stage_##N_KEY_U32, \ 620 vhash_main_hash_finalize_mod_stage_##N_KEY_U32, \ 621 vhash_main_unset_mod_stage_##N_KEY_U32); \ 634 vhash_main_get (vhash_main_t * vm)
636 vhash_main_op (vm,
GET);
640 vhash_main_set (vhash_main_t * vm)
642 vhash_main_op (vm,
SET);
646 vhash_main_unset (vhash_main_t * vm)
648 vhash_main_op (vm,
UNSET);
652 vhash_resize_incremental (vhash_resize_t * vr,
u32 vector_index,
653 u32 n_keys_this_call)
655 vhash_t *old = vr->old;
656 vhash_main_t *vm = &vr->new;
657 vhash_t *
new = vm->vhash;
660 n_key_u32 = old->n_key_u32;
662 if (vector_index == 0)
665 hash_seeds[0] = old->hash_seeds[0].as_u32[0];
666 hash_seeds[1] = old->hash_seeds[1].as_u32[0];
667 hash_seeds[2] = old->hash_seeds[2].as_u32[0];
668 vhash_init (
new, old->log2_n_keys + 1, n_key_u32, hash_seeds);
674 if (0 == (vector_index >> old->log2_n_keys))
676 for (i = vector_index; 0 == (i >> (old->log2_n_keys - 2)); i++)
678 vhash_search_bucket_t *b =
679 vhash_get_search_bucket_with_index (old, 4 * i, n_key_u32);
683 if ((r = b->result.as_u32[I]) != 0) \ 685 vec_add1 (vm->results, r - 1); \ 686 vec_add2 (vm->keys, k, n_key_u32); \ 687 for (j = 0; j < n_key_u32; j++) \ 688 k[j] = b->key[j].as_u32[I]; \ 698 if (
vec_len (vm->results) >= n_keys_this_call)
700 vhash_main_op (vm,
SET);
708 vhash_overflow_buckets_t *ob;
709 vhash_overflow_search_bucket_t *b;
711 for (ob = old->overflow_buckets;
712 ob < old->overflow_buckets +
ARRAY_LEN (old->overflow_buckets); ob++)
714 foreach_vhash_overflow_bucket (b, ob, old->n_key_u32)
719 if ((r = b->result.as_u32[I]) != 0) \ 721 vec_add1 (vm->results, r - 1); \ 722 vec_add2 (vm->keys, k, n_key_u32); \ 723 for (j = 0; j < n_key_u32; j++) \ 724 k[j] = b->key[j].as_u32[I]; \ 737 vhash_main_op (vm,
SET);
744 vhash_resize (vhash_t * old,
u32 log2_n_keys)
746 static vhash_resize_t vr;
755 i = vhash_resize_incremental (&vr, i, 1024);
#define vec_add1(V, E)
Add 1 element to end of vector (unspecified alignment).
#define vec_add2(V, P, N)
Add N elements to end of vector V, return pointer to new elements in P.
clib_memset(h->entries, 0, sizeof(h->entries[0])*entries)
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
#define vec_pop(V)
Returns last element of a vector and decrements its length.
static uword min_log2(uword x)
#define vec_reset_length(v)
Reset vector length to zero NULL-pointer tolerant.
#define static_always_inline
static uword pow2_mask(uword x)
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define VECTOR_WORD_TYPE_LEN(t)
static uword first_set(uword x)
#define vec_elt(v, i)
Get vector value at index i.
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
#define CLIB_CACHE_LINE_BYTES
#define vec_resize_aligned(V, N, A)
Resize a vector (no header, alignment specified).