Lines Matching full:map

103     struct ck_rhs_map *map,
135 ck_rhs_entry(struct ck_rhs_map *map, long offset) in ck_rhs_entry() argument
138 if (map->read_mostly) in ck_rhs_entry()
139 return (map->entries.no_entries.entries[offset]); in ck_rhs_entry()
141 return (map->entries.descs[offset].entry); in ck_rhs_entry()
145 ck_rhs_entry_addr(struct ck_rhs_map *map, long offset) in ck_rhs_entry_addr() argument
148 if (map->read_mostly) in ck_rhs_entry_addr()
149 return (&map->entries.no_entries.entries[offset]); in ck_rhs_entry_addr()
151 return (&map->entries.descs[offset].entry); in ck_rhs_entry_addr()
155 ck_rhs_desc(struct ck_rhs_map *map, long offset) in ck_rhs_desc() argument
158 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_desc()
159 return ((struct ck_rhs_entry_desc *)(void *)&map->entries.no_entries.descs[offset]); in ck_rhs_desc()
161 return (&map->entries.descs[offset]); in ck_rhs_desc()
165 ck_rhs_wanted_inc(struct ck_rhs_map *map, long offset) in ck_rhs_wanted_inc() argument
168 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_wanted_inc()
169 map->entries.no_entries.descs[offset].wanted++; in ck_rhs_wanted_inc()
171 map->entries.descs[offset].wanted++; in ck_rhs_wanted_inc()
175 ck_rhs_probes(struct ck_rhs_map *map, long offset) in ck_rhs_probes() argument
178 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_probes()
179 return (map->entries.no_entries.descs[offset].probes); in ck_rhs_probes()
181 return (map->entries.descs[offset].probes); in ck_rhs_probes()
185 ck_rhs_set_probes(struct ck_rhs_map *map, long offset, unsigned int value) in ck_rhs_set_probes() argument
188 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_set_probes()
189 map->entries.no_entries.descs[offset].probes = value; in ck_rhs_set_probes()
191 map->entries.descs[offset].probes = value; in ck_rhs_set_probes()
195 ck_rhs_probe_bound(struct ck_rhs_map *map, long offset) in ck_rhs_probe_bound() argument
198 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_probe_bound()
199 return (map->entries.no_entries.descs[offset].probe_bound); in ck_rhs_probe_bound()
201 return (map->entries.descs[offset].probe_bound); in ck_rhs_probe_bound()
205 ck_rhs_probe_bound_addr(struct ck_rhs_map *map, long offset) in ck_rhs_probe_bound_addr() argument
208 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_probe_bound_addr()
209 return (&map->entries.no_entries.descs[offset].probe_bound); in ck_rhs_probe_bound_addr()
211 return (&map->entries.descs[offset].probe_bound); in ck_rhs_probe_bound_addr()
216 ck_rhs_in_rh(struct ck_rhs_map *map, long offset) in ck_rhs_in_rh() argument
219 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_in_rh()
220 return (map->entries.no_entries.descs[offset].in_rh); in ck_rhs_in_rh()
222 return (map->entries.descs[offset].in_rh); in ck_rhs_in_rh()
226 ck_rhs_set_rh(struct ck_rhs_map *map, long offset) in ck_rhs_set_rh() argument
229 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_set_rh()
230 map->entries.no_entries.descs[offset].in_rh = true; in ck_rhs_set_rh()
232 map->entries.descs[offset].in_rh = true; in ck_rhs_set_rh()
236 ck_rhs_unset_rh(struct ck_rhs_map *map, long offset) in ck_rhs_unset_rh() argument
239 if (CK_CC_UNLIKELY(map->read_mostly)) in ck_rhs_unset_rh()
240 map->entries.no_entries.descs[offset].in_rh = false; in ck_rhs_unset_rh()
242 map->entries.descs[offset].in_rh = false; in ck_rhs_unset_rh()
254 struct ck_rhs_map *map = hs->map; in ck_rhs_set_load_factor() local
260 map->max_entries = (map->capacity * (unsigned long)hs->load_factor) / 100; in ck_rhs_set_load_factor()
261 while (map->n_entries > map->max_entries) { in ck_rhs_set_load_factor()
262 if (ck_rhs_grow(hs, map->capacity << 1) == false) in ck_rhs_set_load_factor()
264 map = hs->map; in ck_rhs_set_load_factor()
281 struct ck_rhs_map *map = hs->map; in ck_rhs_next() local
284 if (i->offset >= map->capacity) in ck_rhs_next()
288 value = CK_CC_DECONST_PTR(ck_rhs_entry(map, i->offset)); in ck_rhs_next()
298 } while (++i->offset < map->capacity); in ck_rhs_next()
306 struct ck_rhs_map *map = hs->map; in ck_rhs_stat() local
308 st->n_entries = map->n_entries; in ck_rhs_stat()
309 st->probe_maximum = map->probe_maximum; in ck_rhs_stat()
317 return hs->map->n_entries; in ck_rhs_count()
321 ck_rhs_map_destroy(struct ck_malloc *m, struct ck_rhs_map *map, bool defer) in ck_rhs_map_destroy() argument
324 m->free(map, map->size, defer); in ck_rhs_map_destroy()
332 ck_rhs_map_destroy(hs->m, hs->map, false); in ck_rhs_destroy()
339 struct ck_rhs_map *map; in ck_rhs_map_create() local
355 map = hs->m->malloc(size); in ck_rhs_map_create()
356 if (map == NULL) in ck_rhs_map_create()
358 map->read_mostly = !!(hs->mode & CK_RHS_MODE_READ_MOSTLY); in ck_rhs_map_create()
360 map->size = size; in ck_rhs_map_create()
366 map->probe_limit = (unsigned int)limit; in ck_rhs_map_create()
367 map->probe_maximum = 0; in ck_rhs_map_create()
368 map->capacity = n_entries; in ck_rhs_map_create()
369 map->step = ck_cc_ffsl(n_entries); in ck_rhs_map_create()
370 map->mask = n_entries - 1; in ck_rhs_map_create()
371 map->n_entries = 0; in ck_rhs_map_create()
373 map->max_entries = (map->capacity * (unsigned long)hs->load_factor) / 100; in ck_rhs_map_create()
374 /* Align map allocation to cache line. */ in ck_rhs_map_create()
375 if (map->read_mostly) { in ck_rhs_map_create()
376 map->entries.no_entries.entries = (void *)(((uintptr_t)&map[1] + in ck_rhs_map_create()
378map->entries.no_entries.descs = (void *)(((uintptr_t)map->entries.no_entries.entries + (sizeof(voi… in ck_rhs_map_create()
379 memset(map->entries.no_entries.entries, 0, in ck_rhs_map_create()
381 memset(map->entries.no_entries.descs, 0, in ck_rhs_map_create()
383 map->offset_mask = (CK_MD_CACHELINE / sizeof(void *)) - 1; in ck_rhs_map_create()
384 map->probe_func = ck_rhs_map_probe_rm; in ck_rhs_map_create()
387 map->entries.descs = (void *)(((uintptr_t)&map[1] + in ck_rhs_map_create()
389 memset(map->entries.descs, 0, sizeof(struct ck_rhs_entry_desc) * n_entries); in ck_rhs_map_create()
390 map->offset_mask = (CK_MD_CACHELINE / sizeof(struct ck_rhs_entry_desc)) - 1; in ck_rhs_map_create()
391 map->probe_func = ck_rhs_map_probe; in ck_rhs_map_create()
393 memset(map->generation, 0, sizeof map->generation); in ck_rhs_map_create()
395 /* Commit entries purge with respect to map publication. */ in ck_rhs_map_create()
397 return map; in ck_rhs_map_create()
403 struct ck_rhs_map *map, *previous; in ck_rhs_reset_size() local
405 previous = hs->map; in ck_rhs_reset_size()
406 map = ck_rhs_map_create(hs, capacity); in ck_rhs_reset_size()
407 if (map == NULL) in ck_rhs_reset_size()
410 ck_pr_store_ptr(&hs->map, map); in ck_rhs_reset_size()
420 previous = hs->map; in ck_rhs_reset()
425 ck_rhs_map_probe_next(struct ck_rhs_map *map, in ck_rhs_map_probe_next() argument
430 if (probes & map->offset_mask) { in ck_rhs_map_probe_next()
431 offset = (offset &~ map->offset_mask) + in ck_rhs_map_probe_next()
432 ((offset + 1) & map->offset_mask); in ck_rhs_map_probe_next()
435 return (offset + probes) & map->mask; in ck_rhs_map_probe_next()
439 ck_rhs_map_probe_prev(struct ck_rhs_map *map, unsigned long offset, in ck_rhs_map_probe_prev() argument
443 if (probes & map->offset_mask) { in ck_rhs_map_probe_prev()
444 offset = (offset &~ map->offset_mask) + ((offset - 1) & in ck_rhs_map_probe_prev()
445 map->offset_mask); in ck_rhs_map_probe_prev()
448 return ((offset - probes) & map->mask); in ck_rhs_map_probe_prev()
497 struct ck_rhs_map *map, *update; in ck_rhs_grow() local
502 map = hs->map; in ck_rhs_grow()
503 if (map->capacity > capacity) in ck_rhs_grow()
510 for (k = 0; k < map->capacity; k++) { in ck_rhs_grow()
513 prev_saved = previous = ck_rhs_entry(map, k); in ck_rhs_grow()
531 * We have hit the probe limit, map needs to be even larger. in ck_rhs_grow()
567 ck_pr_store_ptr(&hs->map, update); in ck_rhs_grow()
568 ck_rhs_map_destroy(hs->m, map, true); in ck_rhs_grow()
576 return ck_rhs_grow(hs, hs->map->capacity); in ck_rhs_rebuild()
581 struct ck_rhs_map *map, in ck_rhs_map_probe_rm() argument
611 offset = h & map->mask; in ck_rhs_map_probe_rm()
615 offset = ck_rhs_map_probe_next(map, *priority, in ck_rhs_map_probe_rm()
632 k = ck_pr_load_ptr(&map->entries.no_entries.entries[offset]); in ck_rhs_map_probe_rm()
637 struct ck_rhs_entry_desc *desc = (void *)&map->entries.no_entries.descs[offset]; in ck_rhs_map_probe_rm()
656 offset = ck_rhs_map_probe_next(map, offset, probes); in ck_rhs_map_probe_rm()
668 offset = ck_rhs_map_probe_next(map, offset, probes); in ck_rhs_map_probe_rm()
675 offset = ck_rhs_map_probe_next(map, offset, probes); in ck_rhs_map_probe_rm()
693 struct ck_rhs_map *map, in ck_rhs_map_probe() argument
724 offset = h & map->mask; in ck_rhs_map_probe()
728 offset = ck_rhs_map_probe_next(map, *priority, in ck_rhs_map_probe()
733 probe_limit = ck_rhs_map_bound_get(map, h); in ck_rhs_map_probe()
747 k = ck_pr_load_ptr(&map->entries.descs[offset].entry); in ck_rhs_map_probe()
751 struct ck_rhs_entry_desc *desc = &map->entries.descs[offset]; in ck_rhs_map_probe()
770 offset = ck_rhs_map_probe_next(map, offset, probes); in ck_rhs_map_probe()
782 offset = ck_rhs_map_probe_next(map, offset, probes); in ck_rhs_map_probe()
789 offset = ck_rhs_map_probe_next(map, offset, probes); in ck_rhs_map_probe()
830 struct ck_rhs_map *map = hs->map; in ck_rhs_gc() local
833 for (i = 0; i < map->capacity; i++) { in ck_rhs_gc()
834 if (ck_rhs_probes(map, i) > max_probes) in ck_rhs_gc()
835 max_probes = ck_rhs_probes(map, i); in ck_rhs_gc()
837 map->probe_maximum = max_probes; in ck_rhs_gc()
845 struct ck_rhs_map *map = hs->map; in ck_rhs_add_wanted() local
851 offset = h & map->mask; in ck_rhs_add_wanted()
859 desc = ck_rhs_desc(map, offset); in ck_rhs_add_wanted()
863 offset = ck_rhs_map_probe_next(map, offset, probes); in ck_rhs_add_wanted()
871 struct ck_rhs_map *map = hs->map; in ck_rhs_remove_wanted() local
872 int probes = ck_rhs_probes(map, offset); in ck_rhs_remove_wanted()
878 offset = ck_rhs_map_probe_prev(map, offset, probes); in ck_rhs_remove_wanted()
882 desc = ck_rhs_desc(map, offset); in ck_rhs_remove_wanted()
891 ck_rhs_get_first_offset(struct ck_rhs_map *map, unsigned long offset, unsigned int probes) in ck_rhs_get_first_offset() argument
893 while (probes > (unsigned long)map->offset_mask + 1) { in ck_rhs_get_first_offset()
894 offset -= ((probes - 1) &~ map->offset_mask); in ck_rhs_get_first_offset()
895 offset &= map->mask; in ck_rhs_get_first_offset()
896 offset = (offset &~ map->offset_mask) + in ck_rhs_get_first_offset()
897 ((offset - map->offset_mask) & map->offset_mask); in ck_rhs_get_first_offset()
898 probes -= map->offset_mask + 1; in ck_rhs_get_first_offset()
900 return ((offset &~ map->offset_mask) + ((offset - (probes - 1)) & map->offset_mask)); in ck_rhs_get_first_offset()
912 struct ck_rhs_map *map; in ck_rhs_put_robin_hood() local
920 map = hs->map; in ck_rhs_put_robin_hood()
924 key = CK_CC_DECONST_PTR(ck_rhs_entry(map, first)); in ck_rhs_put_robin_hood()
931 ck_rhs_set_rh(map, orig_slot); in ck_rhs_put_robin_hood()
933 slot = map->probe_func(hs, map, &n_probes, &first, h, key, &object, in ck_rhs_put_robin_hood()
934 map->probe_limit, prevs_nb == CK_RHS_MAX_RH ? in ck_rhs_put_robin_hood()
938 if (ck_rhs_grow(hs, map->capacity << 1) == false) { in ck_rhs_put_robin_hood()
942 ck_rhs_unset_rh(map, prevs[i]); in ck_rhs_put_robin_hood()
951 desc = ck_rhs_desc(map, first); in ck_rhs_put_robin_hood()
955 h = ck_rhs_get_first_offset(map, first, n_probes); in ck_rhs_put_robin_hood()
956 ck_rhs_map_bound_set(map, h, n_probes); in ck_rhs_put_robin_hood()
963 h = ck_rhs_get_first_offset(map, slot, n_probes); in ck_rhs_put_robin_hood()
964 ck_rhs_map_bound_set(map, h, n_probes); in ck_rhs_put_robin_hood()
965 ck_pr_store_ptr(ck_rhs_entry_addr(map, slot), insert); in ck_rhs_put_robin_hood()
966 ck_pr_inc_uint(&map->generation[h & CK_RHS_G_MASK]); in ck_rhs_put_robin_hood()
968 ck_rhs_set_probes(map, slot, n_probes); in ck_rhs_put_robin_hood()
974 ck_pr_store_ptr(ck_rhs_entry_addr(map, orig_slot), in ck_rhs_put_robin_hood()
975 ck_rhs_entry(map, prev)); in ck_rhs_put_robin_hood()
976 h = ck_rhs_get_first_offset(map, orig_slot, in ck_rhs_put_robin_hood()
979 ck_pr_inc_uint(&map->generation[h & CK_RHS_G_MASK]); in ck_rhs_put_robin_hood()
983 desc = ck_rhs_desc(map, orig_slot); in ck_rhs_put_robin_hood()
991 struct ck_rhs_map *map = hs->map; in ck_rhs_do_backward_shift_delete() local
995 desc = ck_rhs_desc(map, slot); in ck_rhs_do_backward_shift_delete()
1004 while (wanted_probes < map->probe_maximum) { in ck_rhs_do_backward_shift_delete()
1006 offset = ck_rhs_map_probe_next(map, slot, probe); in ck_rhs_do_backward_shift_delete()
1007 while (probe < map->probe_maximum) { in ck_rhs_do_backward_shift_delete()
1008 new_desc = ck_rhs_desc(map, offset); in ck_rhs_do_backward_shift_delete()
1012 offset = ck_rhs_map_probe_next(map, offset, in ck_rhs_do_backward_shift_delete()
1015 if (probe < map->probe_maximum) in ck_rhs_do_backward_shift_delete()
1019 if (!(wanted_probes < map->probe_maximum)) { in ck_rhs_do_backward_shift_delete()
1025 ck_pr_store_ptr(ck_rhs_entry_addr(map, slot), in ck_rhs_do_backward_shift_delete()
1026 ck_rhs_entry(map, offset)); in ck_rhs_do_backward_shift_delete()
1027 ck_pr_inc_uint(&map->generation[h & CK_RHS_G_MASK]); in ck_rhs_do_backward_shift_delete()
1030 struct ck_rhs_entry_desc *hdesc = ck_rhs_desc(map, h); in ck_rhs_do_backward_shift_delete()
1038 max_probes = map->probe_maximum; in ck_rhs_do_backward_shift_delete()
1043 tmp_offset = ck_rhs_map_probe_next(map, offset, in ck_rhs_do_backward_shift_delete()
1046 if (h == (unsigned long)ck_rhs_get_first_offset(map, tmp_offset, probe)) in ck_rhs_do_backward_shift_delete()
1049 tmp_offset = ck_rhs_map_probe_next(map, tmp_offset, probe); in ck_rhs_do_backward_shift_delete()
1061 ck_pr_store_ptr(ck_rhs_entry_addr(map, slot), CK_RHS_EMPTY); in ck_rhs_do_backward_shift_delete()
1063 CK_RHS_STORE(ck_rhs_probe_bound_addr(map, h), in ck_rhs_do_backward_shift_delete()
1078 struct ck_rhs_map *map = hs->map; in ck_rhs_fas() local
1083 slot = map->probe_func(hs, map, &n_probes, &first, h, key, &object, in ck_rhs_fas()
1084 ck_rhs_map_bound_get(map, h), CK_RHS_PROBE); in ck_rhs_fas()
1095 desc = ck_rhs_desc(map, slot); in ck_rhs_fas()
1096 desc2 = ck_rhs_desc(map, first); in ck_rhs_fas()
1104 ck_pr_store_ptr(ck_rhs_entry_addr(map, first), insert); in ck_rhs_fas()
1105 ck_pr_inc_uint(&map->generation[h & CK_RHS_G_MASK]); in ck_rhs_fas()
1111 ck_pr_store_ptr(ck_rhs_entry_addr(map, slot), insert); in ck_rhs_fas()
1112 ck_rhs_set_probes(map, slot, n_probes); in ck_rhs_fas()
1141 struct ck_rhs_map *map; in ck_rhs_apply() local
1145 map = hs->map; in ck_rhs_apply()
1147 …slot = map->probe_func(hs, map, &n_probes, &first, h, key, &object, map->probe_limit, CK_RHS_PROBE… in ck_rhs_apply()
1149 if (ck_rhs_grow(hs, map->capacity << 1) == false) in ck_rhs_apply()
1177 ck_rhs_map_bound_set(map, h, n_probes); in ck_rhs_apply()
1187 desc = ck_rhs_desc(map, slot); in ck_rhs_apply()
1190 desc2 = ck_rhs_desc(map, first); in ck_rhs_apply()
1200 ck_pr_store_ptr(ck_rhs_entry_addr(map, first), insert); in ck_rhs_apply()
1211 ck_pr_inc_uint(&map->generation[h & CK_RHS_G_MASK]); in ck_rhs_apply()
1220 ck_pr_store_ptr(ck_rhs_entry_addr(map, slot), insert); in ck_rhs_apply()
1221 ck_rhs_set_probes(map, slot, n_probes); in ck_rhs_apply()
1227 map->n_entries++; in ck_rhs_apply()
1228 if ((map->n_entries ) > map->max_entries) in ck_rhs_apply()
1229 ck_rhs_grow(hs, map->capacity << 1); in ck_rhs_apply()
1244 struct ck_rhs_map *map; in ck_rhs_set() local
1249 map = hs->map; in ck_rhs_set()
1251 …slot = map->probe_func(hs, map, &n_probes, &first, h, key, &object, map->probe_limit, CK_RHS_PROBE… in ck_rhs_set()
1253 if (ck_rhs_grow(hs, map->capacity << 1) == false) in ck_rhs_set()
1258 ck_rhs_map_bound_set(map, h, n_probes); in ck_rhs_set()
1264 desc = ck_rhs_desc(map, slot); in ck_rhs_set()
1267 desc2 = ck_rhs_desc(map, first); in ck_rhs_set()
1277 ck_pr_store_ptr(ck_rhs_entry_addr(map, first), insert); in ck_rhs_set()
1288 ck_pr_inc_uint(&map->generation[h & CK_RHS_G_MASK]); in ck_rhs_set()
1298 ck_pr_store_ptr(ck_rhs_entry_addr(map, slot), insert); in ck_rhs_set()
1299 ck_rhs_set_probes(map, slot, n_probes); in ck_rhs_set()
1305 map->n_entries++; in ck_rhs_set()
1306 if ((map->n_entries ) > map->max_entries) in ck_rhs_set()
1307 ck_rhs_grow(hs, map->capacity << 1); in ck_rhs_set()
1324 struct ck_rhs_map *map; in ck_rhs_put_internal() local
1327 map = hs->map; in ck_rhs_put_internal()
1329 slot = map->probe_func(hs, map, &n_probes, &first, h, key, &object, in ck_rhs_put_internal()
1330 map->probe_limit, behavior); in ck_rhs_put_internal()
1333 if (ck_rhs_grow(hs, map->capacity << 1) == false) in ck_rhs_put_internal()
1343 ck_rhs_map_bound_set(map, h, n_probes); in ck_rhs_put_internal()
1347 struct ck_rhs_entry_desc *desc = ck_rhs_desc(map, first); in ck_rhs_put_internal()
1354 ck_pr_store_ptr(ck_rhs_entry_addr(map, first), insert); in ck_rhs_put_internal()
1359 ck_pr_store_ptr(ck_rhs_entry_addr(map, slot), insert); in ck_rhs_put_internal()
1360 ck_rhs_set_probes(map, slot, n_probes); in ck_rhs_put_internal()
1364 map->n_entries++; in ck_rhs_put_internal()
1365 if ((map->n_entries ) > map->max_entries) in ck_rhs_put_internal()
1366 ck_rhs_grow(hs, map->capacity << 1); in ck_rhs_put_internal()
1395 struct ck_rhs_map *map; in ck_rhs_get() local
1401 map = ck_pr_load_ptr(&hs->map); in ck_rhs_get()
1402 generation = &map->generation[h & CK_RHS_G_MASK]; in ck_rhs_get()
1404 probe = ck_rhs_map_bound_get(map, h); in ck_rhs_get()
1408 map->probe_func(hs, map, &n_probes, &first, h, key, &object, probe, CK_RHS_PROBE_NO_RH); in ck_rhs_get()
1424 struct ck_rhs_map *map = hs->map; in ck_rhs_remove() local
1427 slot = map->probe_func(hs, map, &n_probes, &first, h, key, &object, in ck_rhs_remove()
1428 ck_rhs_map_bound_get(map, h), CK_RHS_PROBE_NO_RH); in ck_rhs_remove()
1432 map->n_entries--; in ck_rhs_remove()
1450 hs->map = source->map; in ck_rhs_move()
1478 hs->map = ck_rhs_map_create(hs, n_entries); in ck_rhs_init()
1479 return hs->map != NULL; in ck_rhs_init()