Lines Matching full:da
342 initheap(struct dxr_aux *da, uint32_t dst_u32, uint32_t chunk) in initheap() argument
344 struct heap_entry *fhp = &da->heap[0]; in initheap()
348 da->heap_index = 0; in initheap()
349 da->dst.sin_addr.s_addr = htonl(dst_u32); in initheap()
350 rt = fib4_lookup_rt(da->fibnum, da->dst.sin_addr, 0, NHR_UNLOCKED, in initheap()
361 fhp->nexthop = fib_get_nhop_idx(da->fd, rnd.rnd_nhop); in initheap()
369 chunk_size(struct dxr_aux *da, struct direct_entry *fdesc) in chunk_size() argument
375 return (da->range_tbl[fdesc->base].fragments + 2); in chunk_size()
381 chunk_hash(struct dxr_aux *da, struct direct_entry *fdesc) in chunk_hash() argument
383 uint32_t size = chunk_size(da, fdesc); in chunk_hash()
384 uint32_t *p = (uint32_t *) &da->range_tbl[fdesc->base]; in chunk_hash()
385 uint32_t *l = (uint32_t *) &da->range_tbl[fdesc->base + size]; in chunk_hash()
395 chunk_ref(struct dxr_aux *da, uint32_t chunk) in chunk_ref() argument
397 struct direct_entry *fdesc = &da->direct_tbl[chunk]; in chunk_ref()
400 uint32_t size = chunk_size(da, fdesc); in chunk_ref()
401 uint32_t hash = chunk_hash(da, fdesc); in chunk_ref()
405 LIST_FOREACH(cdp, &da->chunk_hashtbl[hash & CHUNK_HASH_MASK], in chunk_ref()
408 memcmp(&da->range_tbl[base], &da->range_tbl[cdp->cd_base], in chunk_ref()
411 da->rtbl_top = fdesc->base; in chunk_ref()
419 cdp = LIST_FIRST(&da->unused_chunks[i]); in chunk_ref()
422 LIST_FOREACH(empty_cdp, &da->unused_chunks[0], cd_hash_le) in chunk_ref()
432 bcopy(&da->range_tbl[fdesc->base], &da->range_tbl[cdp->cd_base], in chunk_ref()
435 da->rtbl_top -= size; in chunk_ref()
436 da->unused_chunks_size -= cdp->cd_max_size; in chunk_ref()
450 LIST_INSERT_HEAD(&da->unused_chunks[i], empty_cdp, in chunk_ref()
453 da->unused_chunks_size += empty_cdp->cd_max_size; in chunk_ref()
464 LIST_INSERT_HEAD(&da->all_chunks, cdp, cd_all_le); in chunk_ref()
465 MPASS(cdp->cd_base + cdp->cd_max_size == da->rtbl_top); in chunk_ref()
471 LIST_INSERT_HEAD(&da->chunk_hashtbl[hash & CHUNK_HASH_MASK], cdp, in chunk_ref()
473 if (da->rtbl_top >= da->rtbl_size) { in chunk_ref()
474 if (da->rtbl_top >= BASE_MAX) { in chunk_ref()
475 FIB_PRINTF(LOG_ERR, da->fd, in chunk_ref()
477 "range table elements", da->rtbl_top); in chunk_ref()
480 da->rtbl_size += RTBL_SIZE_INCR; in chunk_ref()
481 i = (BASE_MAX - da->rtbl_top) * LOG_DEBUG / BASE_MAX; in chunk_ref()
482 FIB_PRINTF(i, da->fd, "range table at %d%% structural limit", in chunk_ref()
483 da->rtbl_top * 100 / BASE_MAX); in chunk_ref()
484 da->range_tbl = realloc(da->range_tbl, in chunk_ref()
485 sizeof(*da->range_tbl) * da->rtbl_size + FRAGS_PREF_SHORT, in chunk_ref()
487 if (da->range_tbl == NULL) { in chunk_ref()
488 FIB_PRINTF(LOG_NOTICE, da->fd, in chunk_ref()
498 chunk_unref(struct dxr_aux *da, uint32_t chunk) in chunk_unref() argument
500 struct direct_entry *fdesc = &da->direct_tbl[chunk]; in chunk_unref()
503 uint32_t size = chunk_size(da, fdesc); in chunk_unref()
504 uint32_t hash = chunk_hash(da, fdesc); in chunk_unref()
508 LIST_FOREACH(cdp, &da->chunk_hashtbl[hash & CHUNK_HASH_MASK], in chunk_unref()
511 memcmp(&da->range_tbl[base], &da->range_tbl[cdp->cd_base], in chunk_unref()
520 da->unused_chunks_size += cdp->cd_max_size; in chunk_unref()
535 cdp2 = LIST_PREV(cdp, &da->all_chunks, chunk_desc, cd_all_le); in chunk_unref()
546 if (cdp->cd_base + cdp->cd_max_size == da->rtbl_top) { in chunk_unref()
548 MPASS(cdp == LIST_FIRST(&da->all_chunks)); in chunk_unref()
549 da->rtbl_top -= cdp->cd_max_size; in chunk_unref()
550 da->unused_chunks_size -= cdp->cd_max_size; in chunk_unref()
559 LIST_INSERT_HEAD(&da->unused_chunks[i], cdp, cd_hash_le); in chunk_unref()
563 trie_hash(struct dxr_aux *da, uint32_t dxr_x, uint32_t index) in trie_hash() argument
571 (void *) &da->direct_tbl[(index << dxr_x) + i]; in trie_hash()
580 trie_ref(struct dxr_aux *da, uint32_t index) in trie_ref() argument
583 uint32_t dxr_d = da->d_bits; in trie_ref()
585 uint32_t hash = trie_hash(da, dxr_x, index); in trie_ref()
588 LIST_FOREACH(tp, &da->trie_hashtbl[hash & TRIE_HASH_MASK], td_hash_le) in trie_ref()
590 memcmp(&da->direct_tbl[index << dxr_x], in trie_ref()
591 &da->x_tbl[tp->td_index << dxr_x], in trie_ref()
592 sizeof(*da->x_tbl) << dxr_x) == 0) { in trie_ref()
594 da->trietbl[index] = tp; in trie_ref()
598 tp = LIST_FIRST(&da->unused_trie); in trie_ref()
601 da->unused_trie_cnt--; in trie_ref()
606 LIST_INSERT_HEAD(&da->all_trie, tp, td_all_le); in trie_ref()
607 tp->td_index = da->all_trie_cnt++; in trie_ref()
612 LIST_INSERT_HEAD(&da->trie_hashtbl[hash & TRIE_HASH_MASK], tp, in trie_ref()
614 memcpy(&da->x_tbl[tp->td_index << dxr_x], in trie_ref()
615 &da->direct_tbl[index << dxr_x], sizeof(*da->x_tbl) << dxr_x); in trie_ref()
616 da->trietbl[index] = tp; in trie_ref()
617 if (da->all_trie_cnt >= da->xtbl_size >> dxr_x) { in trie_ref()
618 da->xtbl_size += XTBL_SIZE_INCR; in trie_ref()
619 da->x_tbl = realloc(da->x_tbl, in trie_ref()
620 sizeof(*da->x_tbl) * da->xtbl_size, M_DXRAUX, M_NOWAIT); in trie_ref()
621 if (da->x_tbl == NULL) { in trie_ref()
622 FIB_PRINTF(LOG_NOTICE, da->fd, in trie_ref()
631 trie_unref(struct dxr_aux *da, uint32_t index) in trie_unref() argument
633 struct trie_desc *tp = da->trietbl[index]; in trie_unref()
637 da->trietbl[index] = NULL; in trie_unref()
642 da->unused_trie_cnt++; in trie_unref()
643 if (tp->td_index != da->all_trie_cnt - 1) { in trie_unref()
644 LIST_INSERT_HEAD(&da->unused_trie, tp, td_hash_le); in trie_unref()
649 da->all_trie_cnt--; in trie_unref()
650 da->unused_trie_cnt--; in trie_unref()
653 LIST_FOREACH(tp, &da->unused_trie, td_hash_le) in trie_unref()
654 if (tp->td_index == da->all_trie_cnt - 1) { in trie_unref()
662 heap_inject(struct dxr_aux *da, uint32_t start, uint32_t end, uint32_t preflen, in heap_inject() argument
668 for (i = da->heap_index; i >= 0; i--) { in heap_inject()
669 if (preflen > da->heap[i].preflen) in heap_inject()
671 else if (preflen < da->heap[i].preflen) in heap_inject()
672 da->heap[i + 1] = da->heap[i]; in heap_inject()
677 fhp = &da->heap[i + 1]; in heap_inject()
682 da->heap_index++; in heap_inject()
688 struct dxr_aux *da = arg; in dxr_walk() local
689 uint32_t chunk = da->work_chunk; in dxr_walk()
693 &da->range_tbl[da->rtbl_top + da->rtbl_work_frags].re; in dxr_walk()
694 struct heap_entry *fhp = &da->heap[da->heap_index]; in dxr_walk()
708 nh = fib_get_nhop_idx(da->fd, rt_get_raw_nhop(rt)); in dxr_walk()
711 heap_inject(da, start, end, preflen, nh); in dxr_walk()
717 if (da->heap_index > 0) { in dxr_walk()
719 da->heap_index--; in dxr_walk()
721 initheap(da, fhp->end + 1, chunk); in dxr_walk()
724 da->rtbl_work_frags++; in dxr_walk()
732 da->rtbl_work_frags++; in dxr_walk()
734 } else if (da->rtbl_work_frags) { in dxr_walk()
736 da->rtbl_work_frags--; in dxr_walk()
741 heap_inject(da, start, end, preflen, nh); in dxr_walk()
748 update_chunk(struct dxr_aux *da, uint32_t chunk) in update_chunk() argument
759 if (da->direct_tbl[chunk].fragments != FRAGS_MARK_HIT) in update_chunk()
760 chunk_unref(da, chunk); in update_chunk()
762 initheap(da, first, chunk); in update_chunk()
764 fp = &da->range_tbl[da->rtbl_top].re; in update_chunk()
765 da->rtbl_work_frags = 0; in update_chunk()
767 fp->nexthop = da->heap[0].nexthop; in update_chunk()
769 da->dst.sin_addr.s_addr = htonl(first); in update_chunk()
770 da->mask.sin_addr.s_addr = htonl(~DXR_RANGE_MASK); in update_chunk()
772 da->work_chunk = chunk; in update_chunk()
773 rib_walk_from(da->fibnum, AF_INET, RIB_FLAG_LOCKED, in update_chunk()
774 (struct sockaddr *) &da->dst, (struct sockaddr *) &da->mask, in update_chunk()
775 dxr_walk, da); in update_chunk()
778 fp = &da->range_tbl[da->rtbl_top + da->rtbl_work_frags].re; in update_chunk()
779 fhp = &da->heap[da->heap_index]; in update_chunk()
783 if (da->heap_index > 0) { in update_chunk()
785 da->heap_index--; in update_chunk()
787 initheap(da, fhp->end + 1, chunk); in update_chunk()
793 da->rtbl_work_frags++; in update_chunk()
800 if (da->rtbl_work_frags == 0) { in update_chunk()
801 da->direct_tbl[chunk].base = fp->nexthop; in update_chunk()
802 da->direct_tbl[chunk].fragments = FRAGS_MARK_HIT; in update_chunk()
806 da->direct_tbl[chunk].base = da->rtbl_top; in update_chunk()
807 da->direct_tbl[chunk].fragments = da->rtbl_work_frags; in update_chunk()
811 fp = &da->range_tbl[da->rtbl_top].re; in update_chunk()
812 for (i = 0; i <= da->rtbl_work_frags; i++, fp++) in update_chunk()
815 if (i == da->rtbl_work_frags + 1) { in update_chunk()
816 fp = &da->range_tbl[da->rtbl_top].re; in update_chunk()
818 for (i = 0; i <= da->rtbl_work_frags; i++, fp++, fps++) { in update_chunk()
826 da->rtbl_work_frags >>= 1; in update_chunk()
827 da->direct_tbl[chunk].fragments = in update_chunk()
828 da->rtbl_work_frags | FRAGS_PREF_SHORT; in update_chunk()
831 if (da->rtbl_work_frags >= FRAGS_MARK_HIT) { in update_chunk()
832 da->direct_tbl[chunk].fragments = FRAGS_MARK_XL; in update_chunk()
833 memmove(&da->range_tbl[da->rtbl_top + 1], in update_chunk()
834 &da->range_tbl[da->rtbl_top], in update_chunk()
835 (da->rtbl_work_frags + 1) * sizeof(*da->range_tbl)); in update_chunk()
836 da->range_tbl[da->rtbl_top].fragments = da->rtbl_work_frags; in update_chunk()
837 da->rtbl_work_frags++; in update_chunk()
839 da->rtbl_top += (da->rtbl_work_frags + 1); in update_chunk()
840 return (chunk_ref(da, chunk)); in update_chunk()
846 struct dxr_aux *da = dxr->aux; in dxr_build() local
860 if (da == NULL) { in dxr_build()
861 da = malloc(sizeof(*dxr->aux), M_DXRAUX, M_NOWAIT); in dxr_build()
862 if (da == NULL) { in dxr_build()
867 dxr->aux = da; in dxr_build()
868 da->fibnum = dxr->fibnum; in dxr_build()
869 da->fd = dxr->fd; in dxr_build()
870 da->refcnt = 1; in dxr_build()
871 LIST_INIT(&da->all_chunks); in dxr_build()
872 LIST_INIT(&da->all_trie); in dxr_build()
873 da->rtbl_size = RTBL_SIZE_INCR; in dxr_build()
874 da->range_tbl = NULL; in dxr_build()
875 da->xtbl_size = XTBL_SIZE_INCR; in dxr_build()
876 da->x_tbl = NULL; in dxr_build()
877 bzero(&da->dst, sizeof(da->dst)); in dxr_build()
878 bzero(&da->mask, sizeof(da->mask)); in dxr_build()
879 da->dst.sin_len = sizeof(da->dst); in dxr_build()
880 da->mask.sin_len = sizeof(da->mask); in dxr_build()
881 da->dst.sin_family = AF_INET; in dxr_build()
882 da->mask.sin_family = AF_INET; in dxr_build()
884 if (da->range_tbl == NULL) { in dxr_build()
885 da->range_tbl = malloc(sizeof(*da->range_tbl) * da->rtbl_size in dxr_build()
887 if (da->range_tbl == NULL) { in dxr_build()
888 FIB_PRINTF(LOG_NOTICE, da->fd, in dxr_build()
894 if (da->x_tbl == NULL) { in dxr_build()
895 da->x_tbl = malloc(sizeof(*da->x_tbl) * da->xtbl_size, in dxr_build()
897 if (da->x_tbl == NULL) { in dxr_build()
898 FIB_PRINTF(LOG_NOTICE, da->fd, in dxr_build()
907 dxr->nh_tbl = fib_get_nhop_array(da->fd); in dxr_build()
908 fib_get_rtable_info(fib_get_rh(da->fd), &rinfo); in dxr_build()
910 if (da->updates_low > da->updates_high) in dxr_build()
916 bzero(da->chunk_hashtbl, sizeof(da->chunk_hashtbl)); in dxr_build()
917 while ((cdp = LIST_FIRST(&da->all_chunks)) != NULL) { in dxr_build()
922 LIST_INIT(&da->unused_chunks[i]); in dxr_build()
923 da->unused_chunks_size = 0; in dxr_build()
924 da->rtbl_top = 0; in dxr_build()
925 da->updates_low = 0; in dxr_build()
926 da->updates_high = DIRECT_TBL_SIZE - 1; in dxr_build()
927 memset(da->updates_mask, 0xff, sizeof(da->updates_mask)); in dxr_build()
929 da->direct_tbl[i].fragments = FRAGS_MARK_HIT; in dxr_build()
930 da->direct_tbl[i].base = 0; in dxr_build()
933 da->prefixes = rinfo.num_prefixes; in dxr_build()
936 for (i = da->updates_low; i <= da->updates_high; i++) { in dxr_build()
937 m = da->updates_mask[i >> 5] >> (i & 0x1f); in dxr_build()
940 else if (m & 1 && update_chunk(da, i) != 0) in dxr_build()
945 if (da->rtbl_top) in dxr_build()
946 range_frag = da->unused_chunks_size * 10000ULL / da->rtbl_top; in dxr_build()
952 r_size = sizeof(*da->range_tbl) * da->rtbl_top; in dxr_build()
956 abs(fls(da->prefixes) - fls(da->trie_rebuilt_prefixes)) > 1) in dxr_build()
961 da->trie_rebuilt_prefixes = da->prefixes; in dxr_build()
962 da->d_bits = DXR_D; in dxr_build()
963 da->updates_low = 0; in dxr_build()
964 da->updates_high = DIRECT_TBL_SIZE - 1; in dxr_build()
966 memset(da->updates_mask, 0xff, in dxr_build()
967 sizeof(da->updates_mask)); in dxr_build()
973 bzero(da->trietbl, sizeof(da->trietbl)); in dxr_build()
974 bzero(da->trie_hashtbl, sizeof(da->trie_hashtbl)); in dxr_build()
975 while ((tp = LIST_FIRST(&da->all_trie)) != NULL) { in dxr_build()
979 LIST_INIT(&da->unused_trie); in dxr_build()
980 da->all_trie_cnt = da->unused_trie_cnt = 0; in dxr_build()
984 dxr_x = DXR_TRIE_BITS - da->d_bits; in dxr_build()
985 d_tbl_size = (1 << da->d_bits); in dxr_build()
987 for (i = da->updates_low >> dxr_x; i <= da->updates_high >> dxr_x; in dxr_build()
992 m |= da->updates_mask[((i << dxr_x) + j) >> 5]; in dxr_build()
995 trie_unref(da, i); in dxr_build()
997 ti = trie_ref(da, i); in dxr_build()
1000 da->d_tbl[i] = ti; in dxr_build()
1004 if (da->all_trie_cnt) in dxr_build()
1005 trie_frag = da->unused_trie_cnt * 10000ULL / da->all_trie_cnt; in dxr_build()
1011 d_size = sizeof(*da->d_tbl) * d_tbl_size; in dxr_build()
1012 x_size = sizeof(*da->x_tbl) * DIRECT_TBL_SIZE / d_tbl_size in dxr_build()
1013 * da->all_trie_cnt; in dxr_build()
1019 da->d_bits--; in dxr_build()
1021 da->d_bits++; in dxr_build()
1031 FIB_PRINTF(LOG_NOTICE, da->fd, in dxr_build()
1035 memcpy(dxr->d, da->d_tbl, d_size); in dxr_build()
1037 memcpy(dxr->x, da->x_tbl, x_size); in dxr_build()
1039 dxr->d_shift = 32 - da->d_bits; in dxr_build()
1042 memcpy(dxr->r, da->range_tbl, r_size); in dxr_build()
1044 if (da->updates_low <= da->updates_high) in dxr_build()
1045 bzero(&da->updates_mask[da->updates_low / 32], in dxr_build()
1046 (da->updates_high - da->updates_low) / 8 + 1); in dxr_build()
1047 da->updates_low = DIRECT_TBL_SIZE - 1; in dxr_build()
1048 da->updates_high = 0; in dxr_build()
1051 FIB_PRINTF(LOG_INFO, da->fd, "D%dX%dR, %d prefixes, %d nhops (max)", in dxr_build()
1052 da->d_bits, dxr_x, rinfo.num_prefixes, rinfo.num_nhops); in dxr_build()
1056 FIB_PRINTF(LOG_INFO, da->fd, "%d.%02d KBytes, %d.%02d Bytes/prefix", in dxr_build()
1059 FIB_PRINTF(LOG_INFO, da->fd, in dxr_build()
1064 FIB_PRINTF(LOG_INFO, da->fd, "range table %s in %u.%03u ms", in dxr_build()
1067 FIB_PRINTF(LOG_INFO, da->fd, "trie %s in %u.%03u ms", in dxr_build()
1070 FIB_PRINTF(LOG_INFO, da->fd, "snapshot forked in %u.%03u ms", in dxr_build()
1091 struct dxr_aux *da = NULL; in dxr_init() local
1104 da = old_dxr->aux; in dxr_init()
1105 atomic_add_int(&da->refcnt, 1); in dxr_init()
1108 dxr->aux = da; in dxr_init()
1121 struct dxr_aux *da = dxr->aux; in dxr_destroy() local
1128 if (da == NULL || atomic_fetchadd_int(&da->refcnt, -1) > 1) in dxr_destroy()
1132 while ((cdp = LIST_FIRST(&da->all_chunks)) != NULL) { in dxr_destroy()
1136 while ((tp = LIST_FIRST(&da->all_trie)) != NULL) { in dxr_destroy()
1140 free(da->range_tbl, M_DXRAUX); in dxr_destroy()
1141 free(da->x_tbl, M_DXRAUX); in dxr_destroy()
1142 free(da, M_DXRAUX); in dxr_destroy()
1154 choose_lookup_fn(struct dxr_aux *da) in choose_lookup_fn() argument
1157 switch (da->d_bits) { in choose_lookup_fn()
1184 struct dxr_aux *da; in dxr_dump_end() local
1188 da = dxr->aux; in dxr_dump_end()
1189 if (da == NULL || dxr->d == NULL) in dxr_dump_end()
1192 if (da->rtbl_top >= BASE_MAX) in dxr_dump_end()
1195 dp->f = choose_lookup_fn(da); in dxr_dump_end()
1222 struct dxr_aux *da; in dxr_change_rib_batch() local
1235 da = dxr->aux; in dxr_change_rib_batch()
1236 MPASS(da != NULL); in dxr_change_rib_batch()
1237 MPASS(da->fd == dxr->fd); in dxr_change_rib_batch()
1238 MPASS(da->refcnt > 0); in dxr_change_rib_batch()
1240 FIB_PRINTF(LOG_INFO, da->fd, "processing %d update(s)", q->count); in dxr_change_rib_batch()
1259 da->updates_mask[i] = 0xffffffffU; in dxr_change_rib_batch()
1262 da->updates_mask[i >> 5] |= (1 << (i & 0x1f)); in dxr_change_rib_batch()
1263 if (start < da->updates_low) in dxr_change_rib_batch()
1264 da->updates_low = start; in dxr_change_rib_batch()
1265 if (end > da->updates_high) in dxr_change_rib_batch()
1266 da->updates_high = end; in dxr_change_rib_batch()
1270 fib_get_rtable_info(fib_get_rh(da->fd), &rinfo); in dxr_change_rib_batch()
1271 MPASS(da->prefixes + update_delta == rinfo.num_prefixes); in dxr_change_rib_batch()
1281 if (da->rtbl_top >= BASE_MAX) { in dxr_change_rib_batch()
1291 new_dp.f = choose_lookup_fn(da); in dxr_change_rib_batch()