Lines Matching +full:m +full:- +full:mode
2 * Copyright 2012-2015 Samy Al Bahra.
43 #define CK_HS_PROBE_L1_MASK (CK_HS_PROBE_L1 - 1)
49 #define CK_HS_VMA_MASK ((uintptr_t)((1ULL << CK_MD_VMA_BITS) - 1))
56 #define CK_HS_G_MASK (CK_HS_G - 1)
79 CK_HS_PROBE_TOMBSTONE, /* Short-circuit on tombstone. */
80 CK_HS_PROBE_INSERT /* Short-circuit on probe bound if tombstone found. */
102 ck_pr_store_uint(&map->generation[h], in ck_hs_map_signal()
103 map->generation[h] + 1); in ck_hs_map_signal()
114 if (i->offset >= map->capacity) in _ck_hs_next()
118 value = CK_CC_DECONST_PTR(map->entries[i->offset]); in _ck_hs_next()
121 if (hs->mode & CK_HS_MODE_OBJECT) in _ck_hs_next()
126 i->offset++; in _ck_hs_next()
130 } while (++i->offset < map->capacity); in _ck_hs_next()
139 iterator->cursor = NULL; in ck_hs_iterator_init()
140 iterator->offset = 0; in ck_hs_iterator_init()
141 iterator->map = NULL; in ck_hs_iterator_init()
149 return _ck_hs_next(hs, hs->map, i, key); in ck_hs_next()
155 struct ck_hs_map *m = i->map; in ck_hs_next_spmc() local
157 if (m == NULL) { in ck_hs_next_spmc()
158 m = i->map = ck_pr_load_ptr(&hs->map); in ck_hs_next_spmc()
161 return _ck_hs_next(hs, m, i, key); in ck_hs_next_spmc()
167 struct ck_hs_map *map = hs->map; in ck_hs_stat()
169 st->n_entries = map->n_entries; in ck_hs_stat()
170 st->tombstones = map->tombstones; in ck_hs_stat()
171 st->probe_maximum = map->probe_maximum; in ck_hs_stat()
179 return hs->map->n_entries; in ck_hs_count()
183 ck_hs_map_destroy(struct ck_malloc *m, struct ck_hs_map *map, bool defer) in ck_hs_map_destroy() argument
186 m->free(map, map->size, defer); in ck_hs_map_destroy()
194 ck_hs_map_destroy(hs->m, hs->map, false); in ck_hs_destroy()
208 size = sizeof(struct ck_hs_map) + (sizeof(void *) * n_entries + CK_MD_CACHELINE - 1); in ck_hs_map_create()
210 if (hs->mode & CK_HS_MODE_DELETE) { in ck_hs_map_create()
217 map = hs->m->malloc(size); in ck_hs_map_create()
221 map->size = size; in ck_hs_map_create()
228 map->probe_limit = (unsigned int)limit; in ck_hs_map_create()
229 map->probe_maximum = 0; in ck_hs_map_create()
230 map->capacity = n_entries; in ck_hs_map_create()
231 map->step = ck_cc_ffsl(n_entries); in ck_hs_map_create()
232 map->mask = n_entries - 1; in ck_hs_map_create()
233 map->n_entries = 0; in ck_hs_map_create()
236 map->entries = (void *)(((uintptr_t)&map[1] + prefix + in ck_hs_map_create()
237 CK_MD_CACHELINE - 1) & ~(CK_MD_CACHELINE - 1)); in ck_hs_map_create()
239 memset(map->entries, 0, sizeof(void *) * n_entries); in ck_hs_map_create()
240 memset(map->generation, 0, sizeof map->generation); in ck_hs_map_create()
242 if (hs->mode & CK_HS_MODE_DELETE) { in ck_hs_map_create()
243 map->probe_bound = (CK_HS_WORD *)&map[1]; in ck_hs_map_create()
244 memset(map->probe_bound, 0, prefix); in ck_hs_map_create()
246 map->probe_bound = NULL; in ck_hs_map_create()
259 previous = hs->map; in ck_hs_reset_size()
264 ck_pr_store_ptr(&hs->map, map); in ck_hs_reset_size()
265 ck_hs_map_destroy(hs->m, previous, true); in ck_hs_reset_size()
274 previous = hs->map; in ck_hs_reset()
275 return ck_hs_reset_size(hs, previous->capacity); in ck_hs_reset()
287 r = (h >> map->step) >> level; in ck_hs_map_probe_next()
291 (stride | CK_HS_PROBE_L1)) & map->mask; in ck_hs_map_probe_next()
295 ck_hs_map_bound_set(struct ck_hs_map *m, in ck_hs_map_bound_set() argument
299 unsigned long offset = h & m->mask; in ck_hs_map_bound_set()
301 if (n_probes > m->probe_maximum) in ck_hs_map_bound_set()
302 ck_pr_store_uint(&m->probe_maximum, n_probes); in ck_hs_map_bound_set()
304 if (m->probe_bound != NULL && m->probe_bound[offset] < n_probes) { in ck_hs_map_bound_set()
308 CK_HS_STORE(&m->probe_bound[offset], n_probes); in ck_hs_map_bound_set()
316 ck_hs_map_bound_get(struct ck_hs_map *m, unsigned long h) in ck_hs_map_bound_get() argument
318 unsigned long offset = h & m->mask; in ck_hs_map_bound_get()
321 if (m->probe_bound != NULL) { in ck_hs_map_bound_get()
322 r = CK_HS_LOAD(&m->probe_bound[offset]); in ck_hs_map_bound_get()
324 r = ck_pr_load_uint(&m->probe_maximum); in ck_hs_map_bound_get()
326 r = ck_pr_load_uint(&m->probe_maximum); in ck_hs_map_bound_get()
341 map = hs->map; in ck_hs_grow()
342 if (map->capacity > capacity) in ck_hs_grow()
349 for (k = 0; k < map->capacity; k++) { in ck_hs_grow()
352 previous = map->entries[k]; in ck_hs_grow()
357 if (hs->mode & CK_HS_MODE_OBJECT) in ck_hs_grow()
361 h = hs->hf(previous, hs->seed); in ck_hs_grow()
362 offset = h & update->mask; in ck_hs_grow()
366 bucket = (const void **)((uintptr_t)&update->entries[offset] & ~(CK_MD_CACHELINE - 1)); in ck_hs_grow()
369 const void **cursor = bucket + ((j + offset) & (CK_HS_PROBE_L1 - 1)); in ck_hs_grow()
371 if (probes++ == update->probe_limit) in ck_hs_grow()
375 *cursor = map->entries[k]; in ck_hs_grow()
376 update->n_entries++; in ck_hs_grow()
389 if (probes > update->probe_limit) { in ck_hs_grow()
393 ck_hs_map_destroy(hs->m, update, false); in ck_hs_grow()
400 ck_pr_store_ptr(&hs->map, update); in ck_hs_grow()
401 ck_hs_map_destroy(hs->m, map, true); in ck_hs_grow()
409 map->n_entries++; in ck_hs_map_postinsert()
410 if ((map->n_entries << 1) > map->capacity) in ck_hs_map_postinsert()
411 ck_hs_grow(hs, map->capacity << 1); in ck_hs_map_postinsert()
420 return ck_hs_grow(hs, hs->map->capacity); in ck_hs_rebuild()
442 if (hs->mode & CK_HS_MODE_OBJECT) { in ck_hs_map_probe()
452 offset = h & map->mask; in ck_hs_map_probe()
461 bucket = (const void **)((uintptr_t)&map->entries[offset] & ~(CK_MD_CACHELINE - 1)); in ck_hs_map_probe()
464 cursor = bucket + ((j + offset) & (CK_HS_PROBE_L1 - 1)); in ck_hs_map_probe()
498 if (hs->mode & CK_HS_MODE_OBJECT) { in ck_hs_map_probe()
509 if (hs->compare == NULL) in ck_hs_map_probe()
512 if (hs->compare(k, key) == true) in ck_hs_map_probe()
534 ck_hs_marshal(unsigned int mode, const void *key, unsigned long h) in ck_hs_marshal() argument
539 if (mode & CK_HS_MODE_OBJECT) { in ck_hs_marshal()
548 (void)mode; in ck_hs_marshal()
560 struct ck_hs_map *map = hs->map; in ck_hs_gc()
564 if (map->n_entries == 0) { in ck_hs_gc()
565 ck_pr_store_uint(&map->probe_maximum, 0); in ck_hs_gc()
566 if (map->probe_bound != NULL) in ck_hs_gc()
567 memset(map->probe_bound, 0, sizeof(CK_HS_WORD) * map->capacity); in ck_hs_gc()
575 if (map->probe_bound != NULL) { in ck_hs_gc()
576 size = sizeof(CK_HS_WORD) * map->capacity; in ck_hs_gc()
577 bounds = hs->m->malloc(size); in ck_hs_gc()
584 maximum = map->probe_maximum; in ck_hs_gc()
587 for (i = 0; i < map->capacity; i++) { in ck_hs_gc()
591 entry = map->entries[(i + seed) & map->mask]; in ck_hs_gc()
596 if (hs->mode & CK_HS_MODE_OBJECT) in ck_hs_gc()
600 h = hs->hf(entry, hs->seed); in ck_hs_gc()
601 offset = h & map->mask; in ck_hs_gc()
607 const void *insert = ck_hs_marshal(hs->mode, entry, h); in ck_hs_gc()
623 } else if (--cycles == 0) in ck_hs_gc()
631 if (maximum != map->probe_maximum) in ck_hs_gc()
632 ck_pr_store_uint(&map->probe_maximum, maximum); in ck_hs_gc()
635 for (i = 0; i < map->capacity; i++) in ck_hs_gc()
636 CK_HS_STORE(&map->probe_bound[i], bounds[i]); in ck_hs_gc()
638 hs->m->free(bounds, size, false); in ck_hs_gc()
651 struct ck_hs_map *map = hs->map; in ck_hs_fas()
662 insert = ck_hs_marshal(hs->mode, key, h); in ck_hs_fas()
678 * pre-existing object. The second argument is a pointer to the fifth argument
679 * passed to ck_hs_apply. If a non-NULL pointer is passed to the first argument
680 * and the return value of the apply function is NULL, then the pre-existing
683 * argument is non-NULL and the return pointer is different than that passed to
684 * the apply function, then the pre-existing value is replaced. For
700 map = hs->map; in ck_hs_apply()
702 …slot = ck_hs_map_probe(hs, map, &n_probes, &first, h, key, &object, map->probe_limit, CK_HS_PROBE_… in ck_hs_apply()
704 if (ck_hs_grow(hs, map->capacity << 1) == false) in ck_hs_apply()
721 map->n_entries--; in ck_hs_apply()
722 map->tombstones++; in ck_hs_apply()
733 insert = ck_hs_marshal(hs->mode, delta, h); in ck_hs_apply()
772 map = hs->map; in ck_hs_set()
774 …slot = ck_hs_map_probe(hs, map, &n_probes, &first, h, key, &object, map->probe_limit, CK_HS_PROBE_… in ck_hs_set()
776 if (ck_hs_grow(hs, map->capacity << 1) == false) in ck_hs_set()
783 insert = ck_hs_marshal(hs->mode, key, h); in ck_hs_set()
826 map = hs->map; in ck_hs_put_internal()
829 map->probe_limit, behavior); in ck_hs_put_internal()
832 if (ck_hs_grow(hs, map->capacity << 1) == false) in ck_hs_put_internal()
843 insert = ck_hs_marshal(hs->mode, key, h); in ck_hs_put_internal()
887 map = ck_pr_load_ptr(&hs->map); in ck_hs_get()
888 generation = &map->generation[h & CK_HS_G_MASK]; in ck_hs_get()
908 struct ck_hs_map *map = hs->map; in ck_hs_remove()
917 map->n_entries--; in ck_hs_remove()
918 map->tombstones++; in ck_hs_remove()
927 struct ck_malloc *m) in ck_hs_move() argument
930 if (m == NULL || m->malloc == NULL || m->free == NULL || hf == NULL) in ck_hs_move()
933 hs->mode = source->mode; in ck_hs_move()
934 hs->seed = source->seed; in ck_hs_move()
935 hs->map = source->map; in ck_hs_move()
936 hs->m = m; in ck_hs_move()
937 hs->hf = hf; in ck_hs_move()
938 hs->compare = compare; in ck_hs_move()
944 unsigned int mode, in ck_hs_init() argument
947 struct ck_malloc *m, in ck_hs_init() argument
952 if (m == NULL || m->malloc == NULL || m->free == NULL || hf == NULL) in ck_hs_init()
955 hs->m = m; in ck_hs_init()
956 hs->mode = mode; in ck_hs_init()
957 hs->seed = seed; in ck_hs_init()
958 hs->hf = hf; in ck_hs_init()
959 hs->compare = compare; in ck_hs_init()
961 hs->map = ck_hs_map_create(hs, n_entries); in ck_hs_init()
962 return hs->map != NULL; in ck_hs_init()