Lines Matching refs:r_ctl
602 tim = rack->r_ctl.lt_bw_time; in rack_get_lt_bw()
603 bytes = rack->r_ctl.lt_bw_bytes; in rack_get_lt_bw()
607 bytes += (rack->rc_tp->snd_una - rack->r_ctl.lt_seq); in rack_get_lt_bw()
608 tim += (tcp_tv_to_lusectick(&tv) - rack->r_ctl.lt_timemark); in rack_get_lt_bw()
664 opt.val = rack->r_ctl.rc_saved_beta.beta; in rack_swap_beta_values()
671 opt.val = rack->r_ctl.rc_saved_beta.beta_ecn; in rack_swap_beta_values()
678 memcpy(&rack->r_ctl.rc_saved_beta, &old, sizeof(struct newreno)); in rack_swap_beta_values()
691 log.u_bbr.flex4 = rack->r_ctl.rc_saved_beta.beta; in rack_swap_beta_values()
692 log.u_bbr.flex5 = rack->r_ctl.rc_saved_beta.beta_ecn; in rack_swap_beta_values()
699 log.u_bbr.pkts_out = rack->r_ctl.rc_prr_sndcnt; in rack_swap_beta_values()
737 if (rack->r_ctl.pacing_method & RACK_REG_PACING) in rack_remove_pacing()
739 if (rack->r_ctl.pacing_method & RACK_DGP_PACING) in rack_remove_pacing()
742 rack->r_ctl.pacing_method = RACK_PACING_NONE; in rack_remove_pacing()
765 log.u_bbr.rttProp = rack->r_ctl.rc_gp_cumack_ts; in rack_log_gpset()
766 log.u_bbr.delRate = rack->r_ctl.rc_gp_output_ts; in rack_log_gpset()
769 log.u_bbr.pkt_epoch = rack->r_ctl.rc_app_limited_cnt; in rack_log_gpset()
770 log.u_bbr.epoch = rack->r_ctl.current_round; in rack_log_gpset()
771 log.u_bbr.lt_epoch = rack->r_ctl.rc_considered_lost; in rack_log_gpset()
1987 return (rack->r_ctl.rc_fixed_pacing_rate_rec); in rack_get_fixed_pacing_bw()
1988 else if (rack->r_ctl.cwnd_to_use < rack->rc_tp->snd_ssthresh) in rack_get_fixed_pacing_bw()
1989 return (rack->r_ctl.rc_fixed_pacing_rate_ss); in rack_get_fixed_pacing_bw()
1991 return (rack->r_ctl.rc_fixed_pacing_rate_ca); in rack_get_fixed_pacing_bw()
2048 cur = rack->r_ctl.rc_last_sft; in rack_log_hybrid_bw()
2050 if (rack->r_ctl.rack_rs.rs_flags != RACK_RTT_EMPTY) in rack_log_hybrid_bw()
2051 log.u_bbr.inflight = rack->r_ctl.rack_rs.rs_us_rtt; in rack_log_hybrid_bw()
2174 log.u_bbr.pkt_epoch = (uint32_t)(rack->r_ctl.last_tmit_time_acked & 0x00000000ffffffff); in rack_log_hybrid_sends()
2175 log.u_bbr.flex5 = (uint32_t)((rack->r_ctl.last_tmit_time_acked >> 32) & 0x00000000ffffffff); in rack_log_hybrid_sends()
2219 u_segsiz = (uint64_t)min(ctf_fixed_maxseg(rack->rc_tp), rack->r_ctl.rc_pace_min_segs); in rack_compensate_for_linerate()
2234 if (rack->r_ctl.bw_rate_cap == 0) in rack_rate_cap_bw()
2238 (rack->r_ctl.rc_last_sft != NULL)) { in rack_rate_cap_bw()
2246 ent = rack->r_ctl.rc_last_sft; in rack_rate_cap_bw()
2253 rack->r_ctl.bw_rate_cap = 0; in rack_rate_cap_bw()
2257 timeleft = rack->r_ctl.rc_last_sft->deadline - timenow; in rack_rate_cap_bw()
2262 rack->r_ctl.bw_rate_cap = 0; in rack_rate_cap_bw()
2278 rack->r_ctl.bw_rate_cap = 0; in rack_rate_cap_bw()
2297 rack->r_ctl.bw_rate_cap = 0; in rack_rate_cap_bw()
2305 if (rack->r_ctl.bw_rate_cap) in rack_rate_cap_bw()
2315 rack->r_ctl.bw_rate_cap = calcbw; in rack_rate_cap_bw()
2316 if ((rack->r_ctl.rc_last_sft->hybrid_flags & TCP_HYBRID_PACING_S_MSS) && in rack_rate_cap_bw()
2318 ((rack->r_ctl.rc_last_sft->hybrid_flags & TCP_HYBRID_PACING_SETMSS) == 0)) { in rack_rate_cap_bw()
2322 orig_max = rack->r_ctl.rc_pace_max_segs; in rack_rate_cap_bw()
2323 rack->r_ctl.rc_last_sft->hybrid_flags |= TCP_HYBRID_PACING_SETMSS; in rack_rate_cap_bw()
2324 rack->r_ctl.rc_pace_max_segs = rack_get_pacing_len(rack, calcbw, ctf_fixed_maxseg(rack->rc_tp)); in rack_rate_cap_bw()
2325 …rack_log_type_pacing_sizes(rack->rc_tp, rack, rack->r_ctl.client_suggested_maxseg, orig_max, __LIN… in rack_rate_cap_bw()
2339 if ((rack->r_ctl.bw_rate_cap > 0) && (*bw > rack->r_ctl.bw_rate_cap)) { in rack_rate_cap_bw()
2343 (rack->r_ctl.rc_last_sft != NULL) && in rack_rate_cap_bw()
2344 (rack->r_ctl.rc_last_sft->hybrid_flags & TCP_HYBRID_PACING_S_MSS) && in rack_rate_cap_bw()
2346 ((rack->r_ctl.rc_last_sft->hybrid_flags & TCP_HYBRID_PACING_SETMSS) == 0)) { in rack_rate_cap_bw()
2350 orig_max = rack->r_ctl.rc_pace_max_segs; in rack_rate_cap_bw()
2351 rack->r_ctl.rc_last_sft->hybrid_flags |= TCP_HYBRID_PACING_SETMSS; in rack_rate_cap_bw()
2352 …rack->r_ctl.rc_pace_max_segs = rack_get_pacing_len(rack, rack->r_ctl.bw_rate_cap, ctf_fixed_maxseg… in rack_rate_cap_bw()
2353 …rack_log_type_pacing_sizes(rack->rc_tp, rack, rack->r_ctl.client_suggested_maxseg, orig_max, __LIN… in rack_rate_cap_bw()
2357 *bw = rack->r_ctl.bw_rate_cap; in rack_rate_cap_bw()
2395 if (rack->r_ctl.init_rate) in rack_get_gp_est()
2396 return (rack->r_ctl.init_rate); in rack_get_gp_est()
2415 if (rack->r_ctl.num_measurements >= RACK_REQ_AVG) { in rack_get_gp_est()
2417 bw = rack->r_ctl.gp_bw; in rack_get_gp_est()
2420 bw = rack->r_ctl.gp_bw / max(rack->r_ctl.num_measurements, 1); in rack_get_gp_est()
2430 lt_bw = rack->r_ctl.gp_bw; in rack_get_gp_est()
2478 return (rack->r_ctl.rack_per_of_gp_probertt); in rack_get_output_gain()
2480 rack->r_ctl.rack_per_of_gp_rec)) { in rack_get_output_gain()
2483 return (rack->r_ctl.rack_per_of_gp_rec); in rack_get_output_gain()
2488 (rack->r_ctl.rack_per_of_gp_rec > 100)) { in rack_get_output_gain()
2497 return (rack->r_ctl.rack_per_of_gp_rec); in rack_get_output_gain()
2502 if (rack->r_ctl.cwnd_to_use < rack->rc_tp->snd_ssthresh) in rack_get_output_gain()
2503 return (rack->r_ctl.rack_per_of_gp_ss); in rack_get_output_gain()
2505 return (rack->r_ctl.rack_per_of_gp_ca); in rack_get_output_gain()
2530 log.u_bbr.flex2 = rack->r_ctl.dsack_round_end; in rack_log_dsack_event()
2531 log.u_bbr.flex3 = rack->r_ctl.num_dsack; in rack_log_dsack_event()
2535 log.u_bbr.flex7 = rack->r_ctl.dsack_persist; in rack_log_dsack_event()
2538 log.u_bbr.epoch = rack->r_ctl.current_round; in rack_log_dsack_event()
2539 log.u_bbr.lt_epoch = rack->r_ctl.rc_considered_lost; in rack_log_dsack_event()
2562 if (rack->r_ctl.crte) { in rack_log_hdwr_pacing()
2563 ifp = rack->r_ctl.crte->ptbl->rs_ifp; in rack_log_hdwr_pacing()
2579 log.u_bbr.applimited = rack->r_ctl.rc_pace_max_segs; in rack_log_hdwr_pacing()
2584 log.u_bbr.delRate = rack->r_ctl.crte_prev_rate; in rack_log_hdwr_pacing()
2585 if (rack->r_ctl.crte) in rack_log_hdwr_pacing()
2586 log.u_bbr.cur_del_rate = rack->r_ctl.crte->rate; in rack_log_hdwr_pacing()
2589 log.u_bbr.rttProp = rack->r_ctl.last_hw_bw_req; in rack_log_hdwr_pacing()
2590 log.u_bbr.epoch = rack->r_ctl.current_round; in rack_log_hdwr_pacing()
2591 log.u_bbr.lt_epoch = rack->r_ctl.rc_considered_lost; in rack_log_hdwr_pacing()
2617 if (rack->r_ctl.crte != NULL) { in rack_get_output_bw()
2619 high_rate = tcp_hw_highest_rate(rack->r_ctl.crte); in rack_get_output_bw()
2690 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_retran_reason()
2691 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_retran_reason()
2692 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_retran_reason()
2694 log.u_bbr.epoch = rack->r_ctl.current_round; in rack_log_retran_reason()
2695 log.u_bbr.lt_epoch = rack->r_ctl.rc_considered_lost; in rack_log_retran_reason()
2714 log.u_bbr.flex3 = rack->r_ctl.rc_hpts_flags; in rack_log_to_start()
2723 log.u_bbr.pkts_out = rack->r_ctl.rc_prr_sndcnt; in rack_log_to_start()
2726 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_to_start()
2727 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_to_start()
2728 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_to_start()
2734 log.u_bbr.epoch = rack->r_ctl.roundends; in rack_log_to_start()
2735 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_to_start()
2737 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_to_start()
2757 log.u_bbr.flex1 = rack->r_ctl.rc_rack_min_rtt; in rack_log_to_event()
2766 log.u_bbr.flex5 = rack->r_ctl.rc_prr_sndcnt; in rack_log_to_event()
2768 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_to_event()
2769 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_to_event()
2770 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_to_event()
2772 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_to_event()
2774 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_to_event()
2819 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_map_chg()
2823 log.u_bbr.lost = rack->r_ctl.rc_prr_sndcnt; in rack_log_map_chg()
2824 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_map_chg()
2826 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_map_chg()
2846 log.u_bbr.flex3 = rack->r_ctl.rc_rack_min_rtt; in rack_log_rtt_upd()
2847 log.u_bbr.flex4 = rack->r_ctl.rack_rs.rs_rtt_lowest; in rack_log_rtt_upd()
2848 log.u_bbr.flex5 = rack->r_ctl.rack_rs.rs_rtt_highest; in rack_log_rtt_upd()
2849 log.u_bbr.flex6 = rack->r_ctl.rack_rs.rs_us_rtrcnt; in rack_log_rtt_upd()
2851 log.u_bbr.rttProp = (uint64_t)rack->r_ctl.rack_rs.rs_rtt_tot; in rack_log_rtt_upd()
2852 log.u_bbr.flex8 = rack->r_ctl.rc_rate_sample_method; in rack_log_rtt_upd()
2854 log.u_bbr.delivered = rack->r_ctl.rack_rs.rs_us_rtrcnt; in rack_log_rtt_upd()
2855 log.u_bbr.pkts_out = rack->r_ctl.rack_rs.rs_flags; in rack_log_rtt_upd()
2856 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_rtt_upd()
2886 log.u_bbr.applimited = rack->r_ctl.rc_target_probertt_flight; in rack_log_rtt_upd()
2887 log.u_bbr.epoch = rack->r_ctl.rc_time_probertt_starts; in rack_log_rtt_upd()
2888 log.u_bbr.lt_epoch = rack->r_ctl.rc_time_probertt_entered; in rack_log_rtt_upd()
2889 log.u_bbr.cur_del_rate = rack->r_ctl.rc_lower_rtt_us_cts; in rack_log_rtt_upd()
2890 log.u_bbr.delRate = rack->r_ctl.rc_gp_srtt; in rack_log_rtt_upd()
2891 log.u_bbr.bw_inuse = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time); in rack_log_rtt_upd()
2923 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_rtt_sample()
2924 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_rtt_sample()
2925 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_rtt_sample()
2933 log.u_bbr.delRate = rack->r_ctl.rack_rs.confidence; in rack_log_rtt_sample()
2935 log.u_bbr.delRate |= rack->r_ctl.rack_rs.rs_us_rtt; in rack_log_rtt_sample()
2939 log.u_bbr.lt_epoch = rack->r_ctl.timer_slop; in rack_log_rtt_sample()
2943 log.u_bbr.bw_inuse = rack->r_ctl.act_rcv_time.tv_sec; in rack_log_rtt_sample()
2945 log.u_bbr.bw_inuse += rack->r_ctl.act_rcv_time.tv_usec; in rack_log_rtt_sample()
2969 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_rtt_sample_calc()
2971 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_rtt_sample_calc()
2996 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_rtt_sendmap()
2998 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_rtt_sendmap()
3023 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_progress_event()
3024 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_progress_event()
3025 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_progress_event()
3027 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_progress_event()
3029 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_progress_event()
3050 log.u_bbr.flex2 = rack->r_ctl.rc_prr_sndcnt; in rack_log_type_bbrsnd()
3051 log.u_bbr.flex4 = rack->r_ctl.rc_hpts_flags; in rack_log_type_bbrsnd()
3053 log.u_bbr.flex7 = (0x0000ffff & rack->r_ctl.rc_hpts_flags); in rack_log_type_bbrsnd()
3056 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_type_bbrsnd()
3057 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_type_bbrsnd()
3058 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_type_bbrsnd()
3079 log.u_bbr.flex4 = rack->r_ctl.rc_hpts_flags; in rack_log_doseg_done()
3083 log.u_bbr.flex5 = rack->r_ctl.rc_prr_sndcnt; in rack_log_doseg_done()
3085 log.u_bbr.applimited = rack->r_ctl.rc_pace_min_segs; in rack_log_doseg_done()
3094 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_doseg_done()
3098 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_doseg_done()
3099 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_doseg_done()
3101 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_doseg_done()
3103 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_doseg_done()
3124 log.u_bbr.flex1 = rack->r_ctl.rc_pace_min_segs; in rack_log_type_pacing_sizes()
3125 log.u_bbr.flex3 = rack->r_ctl.rc_pace_max_segs; in rack_log_type_pacing_sizes()
3128 log.u_bbr.flex7 = rack->r_ctl.rc_user_set_min_segs; in rack_log_type_pacing_sizes()
3132 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_type_pacing_sizes()
3133 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_type_pacing_sizes()
3134 log.u_bbr.applimited = rack->r_ctl.rc_sacked; in rack_log_type_pacing_sizes()
3135 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_type_pacing_sizes()
3154 log.u_bbr.flex2 = rack->r_ctl.rc_hpts_flags; in rack_log_type_just_return()
3159 log.u_bbr.flex5 = rack->r_ctl.rc_prr_sndcnt; in rack_log_type_just_return()
3164 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_type_just_return()
3165 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_type_just_return()
3166 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_type_just_return()
3169 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_type_just_return()
3171 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_type_just_return()
3190 log.u_bbr.flex2 = rack->r_ctl.rc_last_output_to; in rack_log_to_cancel()
3196 log.u_bbr.flex5 = rack->r_ctl.rc_prr_sndcnt; in rack_log_to_cancel()
3200 log.u_bbr.applimited = rack->r_ctl.rc_hpts_flags; in rack_log_to_cancel()
3202 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_to_cancel()
3203 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_to_cancel()
3204 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_to_cancel()
3206 log.u_bbr.bw_inuse = rack->r_ctl.current_round; in rack_log_to_cancel()
3208 log.u_bbr.bw_inuse |= rack->r_ctl.rc_considered_lost; in rack_log_to_cancel()
3260 log.u_bbr.flex3 = rack->r_ctl.rc_timer_exp; in rack_log_to_processing()
3261 log.u_bbr.flex4 = rack->r_ctl.rc_hpts_flags; in rack_log_to_processing()
3266 log.u_bbr.flex6 = rack->r_ctl.rc_prr_sndcnt; in rack_log_to_processing()
3267 log.u_bbr.pkts_out = rack->r_ctl.rc_out_at_rto; in rack_log_to_processing()
3268 log.u_bbr.delivered = rack->r_ctl.rc_snd_max_at_rto; in rack_log_to_processing()
3271 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_to_processing()
3288 log.u_bbr.flex1 = rack->r_ctl.rc_prr_out; in rack_log_to_prr()
3289 log.u_bbr.flex2 = rack->r_ctl.rc_prr_recovery_fs; in rack_log_to_prr()
3293 log.u_bbr.flex3 = rack->r_ctl.rc_prr_sndcnt; in rack_log_to_prr()
3294 log.u_bbr.flex4 = rack->r_ctl.rc_prr_delivered; in rack_log_to_prr()
3295 log.u_bbr.flex5 = rack->r_ctl.rc_sacked; in rack_log_to_prr()
3296 log.u_bbr.flex6 = rack->r_ctl.rc_holes_rxt; in rack_log_to_prr()
3301 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_to_prr()
3386 rsm = TAILQ_FIRST(&rack->r_ctl.rc_free); in rack_alloc()
3387 TAILQ_REMOVE(&rack->r_ctl.rc_free, rsm, r_tnext); in rack_alloc()
3399 rack->r_ctl.rc_num_maps_alloced++; in rack_alloc()
3409 rsm = TAILQ_FIRST(&rack->r_ctl.rc_free); in rack_alloc()
3410 TAILQ_REMOVE(&rack->r_ctl.rc_free, rsm, r_tnext); in rack_alloc()
3421 (rack->r_ctl.rc_num_maps_alloced >= V_tcp_map_entries_limit)) { in rack_alloc_full_limit()
3440 if (rack->r_ctl.rc_split_limit > 0 && in rack_alloc_limit()
3441 rack->r_ctl.rc_num_split_allocs >= rack->r_ctl.rc_split_limit) { in rack_alloc_limit()
3455 rack->r_ctl.rc_num_split_allocs++; in rack_alloc_limit()
3470 rsm = TAILQ_LAST(&rack->r_ctl.rc_free, rack_head); in rack_free_trim()
3471 TAILQ_REMOVE(&rack->r_ctl.rc_free, rsm, r_tnext); in rack_free_trim()
3473 rack->r_ctl.rc_num_maps_alloced--; in rack_free_trim()
3482 if (rack->r_ctl.rc_app_limited_cnt > 0) { in rack_free()
3483 rack->r_ctl.rc_app_limited_cnt--; in rack_free()
3488 rack->r_ctl.rc_num_split_allocs--; in rack_free()
3490 if (rsm == rack->r_ctl.rc_first_appl) { in rack_free()
3491 rack->r_ctl.cleared_app_ack_seq = rsm->r_start + (rsm->r_end - rsm->r_start); in rack_free()
3492 rack->r_ctl.cleared_app_ack = 1; in rack_free()
3493 if (rack->r_ctl.rc_app_limited_cnt == 0) in rack_free()
3494 rack->r_ctl.rc_first_appl = NULL; in rack_free()
3496 rack->r_ctl.rc_first_appl = tqhash_find(rack->r_ctl.tqh, rsm->r_nseq_appl); in rack_free()
3498 if (rsm == rack->r_ctl.rc_resend) in rack_free()
3499 rack->r_ctl.rc_resend = NULL; in rack_free()
3500 if (rsm == rack->r_ctl.rc_end_appl) in rack_free()
3501 rack->r_ctl.rc_end_appl = NULL; in rack_free()
3502 if (rack->r_ctl.rc_tlpsend == rsm) in rack_free()
3503 rack->r_ctl.rc_tlpsend = NULL; in rack_free()
3504 if (rack->r_ctl.rc_sacklast == rsm) in rack_free()
3505 rack->r_ctl.rc_sacklast = NULL; in rack_free()
3511 TAILQ_INSERT_HEAD(&rack->r_ctl.rc_free, rsm, r_tnext); in rack_free()
3521 segsiz = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_get_measure_window()
3631 segsiz = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_enough_for_measurement()
3637 if (rack->r_ctl.rc_first_appl && in rack_enough_for_measurement()
3638 (SEQ_GEQ(th_ack, rack->r_ctl.rc_first_appl->r_end))) { in rack_enough_for_measurement()
3647 srtts = (rack->r_ctl.rc_gp_srtt * rack_min_srtts); in rack_enough_for_measurement()
3648 tim = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time) - tp->gput_ts; in rack_enough_for_measurement()
3682 log.u_bbr.flex4 = rack->r_ctl.rack_per_of_gp_ss; in rack_log_timely()
3683 log.u_bbr.flex5 = rack->r_ctl.rack_per_of_gp_ca; in rack_log_timely()
3684 log.u_bbr.flex6 = rack->r_ctl.rack_per_of_gp_rec; in rack_log_timely()
3692 log.u_bbr.pkts_out = rack->r_ctl.rc_rtt_diff; in rack_log_timely()
3694 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_timely()
3695 log.u_bbr.epoch = rack->r_ctl.rc_gp_srtt; in rack_log_timely()
3696 log.u_bbr.lt_epoch = rack->r_ctl.rc_prev_gp_srtt; in rack_log_timely()
3704 log.u_bbr.lost = rack->r_ctl.rc_loss_count; in rack_log_timely()
3794 if (rack->r_ctl.rack_per_of_gp_rec < 100) { in rack_validate_multipliers_at_or_above100()
3796 rack->r_ctl.rack_per_of_gp_rec = 100; in rack_validate_multipliers_at_or_above100()
3798 if (rack->r_ctl.rack_per_of_gp_ca < 100) { in rack_validate_multipliers_at_or_above100()
3799 rack->r_ctl.rack_per_of_gp_ca = 100; in rack_validate_multipliers_at_or_above100()
3801 if (rack->r_ctl.rack_per_of_gp_ss < 100) { in rack_validate_multipliers_at_or_above100()
3802 rack->r_ctl.rack_per_of_gp_ss = 100; in rack_validate_multipliers_at_or_above100()
3809 if (rack->r_ctl.rack_per_of_gp_ca > 100) { in rack_validate_multipliers_at_or_below_100()
3810 rack->r_ctl.rack_per_of_gp_ca = 100; in rack_validate_multipliers_at_or_below_100()
3812 if (rack->r_ctl.rack_per_of_gp_ss > 100) { in rack_validate_multipliers_at_or_below_100()
3813 rack->r_ctl.rack_per_of_gp_ss = 100; in rack_validate_multipliers_at_or_below_100()
3849 ((rack->r_ctl.rc_rtt_diff <= 0) || (timely_says <= 0))) in rack_increase_bw_mul()
3854 rack->r_ctl.rack_per_of_gp_rec)) { in rack_increase_bw_mul()
3856 calc = rack->r_ctl.rack_per_of_gp_rec + plus; in rack_increase_bw_mul()
3860 rack->r_ctl.rack_per_of_gp_rec = (uint16_t)calc; in rack_increase_bw_mul()
3861 if (rack->r_ctl.rack_per_upper_bound_ca && in rack_increase_bw_mul()
3863 (rack->r_ctl.rack_per_of_gp_rec > rack->r_ctl.rack_per_upper_bound_ca)) in rack_increase_bw_mul()
3864 rack->r_ctl.rack_per_of_gp_rec = rack->r_ctl.rack_per_upper_bound_ca; in rack_increase_bw_mul()
3869 rack->r_ctl.rack_per_of_gp_ca)) { in rack_increase_bw_mul()
3871 calc = rack->r_ctl.rack_per_of_gp_ca + plus; in rack_increase_bw_mul()
3875 rack->r_ctl.rack_per_of_gp_ca = (uint16_t)calc; in rack_increase_bw_mul()
3876 if (rack->r_ctl.rack_per_upper_bound_ca && in rack_increase_bw_mul()
3878 (rack->r_ctl.rack_per_of_gp_ca > rack->r_ctl.rack_per_upper_bound_ca)) in rack_increase_bw_mul()
3879 rack->r_ctl.rack_per_of_gp_ca = rack->r_ctl.rack_per_upper_bound_ca; in rack_increase_bw_mul()
3883 rack->r_ctl.rack_per_of_gp_ss)) { in rack_increase_bw_mul()
3885 calc = rack->r_ctl.rack_per_of_gp_ss + plus; in rack_increase_bw_mul()
3888 rack->r_ctl.rack_per_of_gp_ss = (uint16_t)calc; in rack_increase_bw_mul()
3889 if (rack->r_ctl.rack_per_upper_bound_ss && in rack_increase_bw_mul()
3891 (rack->r_ctl.rack_per_of_gp_ss > rack->r_ctl.rack_per_upper_bound_ss)) in rack_increase_bw_mul()
3892 rack->r_ctl.rack_per_of_gp_ss = rack->r_ctl.rack_per_upper_bound_ss; in rack_increase_bw_mul()
3943 (uint64_t)get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt)))/ in rack_get_decrease()
3967 highrttthresh = get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt) * rack_gp_rtt_maxmul; in rack_decrease_highrtt()
4013 new_per = rack_decrease_highrtt(rack, rack->r_ctl.rack_per_of_gp_rec, rtt); in rack_decrease_bw_mul()
4014 alt = rack_get_decrease(rack, rack->r_ctl.rack_per_of_gp_rec, rtt_diff); in rack_decrease_bw_mul()
4020 val = new_per = alt = rack_get_decrease(rack, rack->r_ctl.rack_per_of_gp_rec, rtt_diff); in rack_decrease_bw_mul()
4021 if (rack->r_ctl.rack_per_of_gp_rec > val) { in rack_decrease_bw_mul()
4022 rec_red = (rack->r_ctl.rack_per_of_gp_rec - val); in rack_decrease_bw_mul()
4023 rack->r_ctl.rack_per_of_gp_rec = (uint16_t)val; in rack_decrease_bw_mul()
4025 rack->r_ctl.rack_per_of_gp_rec = rack_per_lower_bound; in rack_decrease_bw_mul()
4028 if (rack_per_lower_bound > rack->r_ctl.rack_per_of_gp_rec) in rack_decrease_bw_mul()
4029 rack->r_ctl.rack_per_of_gp_rec = rack_per_lower_bound; in rack_decrease_bw_mul()
4035 new_per = rack_decrease_highrtt(rack, rack->r_ctl.rack_per_of_gp_ss, rtt); in rack_decrease_bw_mul()
4036 alt = rack_get_decrease(rack, rack->r_ctl.rack_per_of_gp_ss, rtt_diff); in rack_decrease_bw_mul()
4042 val = new_per = alt = rack_get_decrease(rack, rack->r_ctl.rack_per_of_gp_ss, rtt_diff); in rack_decrease_bw_mul()
4043 if (rack->r_ctl.rack_per_of_gp_ss > new_per) { in rack_decrease_bw_mul()
4044 ss_red = rack->r_ctl.rack_per_of_gp_ss - val; in rack_decrease_bw_mul()
4045 rack->r_ctl.rack_per_of_gp_ss = (uint16_t)val; in rack_decrease_bw_mul()
4048 rack->r_ctl.rack_per_of_gp_ss = rack_per_lower_bound; in rack_decrease_bw_mul()
4057 logvar3 |= get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt); in rack_decrease_bw_mul()
4062 if (rack_per_lower_bound > rack->r_ctl.rack_per_of_gp_ss) in rack_decrease_bw_mul()
4063 rack->r_ctl.rack_per_of_gp_ss = rack_per_lower_bound; in rack_decrease_bw_mul()
4068 new_per = rack_decrease_highrtt(rack, rack->r_ctl.rack_per_of_gp_ca, rtt); in rack_decrease_bw_mul()
4069 alt = rack_get_decrease(rack, rack->r_ctl.rack_per_of_gp_ca, rtt_diff); in rack_decrease_bw_mul()
4075 val = new_per = alt = rack_get_decrease(rack, rack->r_ctl.rack_per_of_gp_ca, rtt_diff); in rack_decrease_bw_mul()
4076 if (rack->r_ctl.rack_per_of_gp_ca > val) { in rack_decrease_bw_mul()
4077 ca_red = rack->r_ctl.rack_per_of_gp_ca - val; in rack_decrease_bw_mul()
4078 rack->r_ctl.rack_per_of_gp_ca = (uint16_t)val; in rack_decrease_bw_mul()
4080 rack->r_ctl.rack_per_of_gp_ca = rack_per_lower_bound; in rack_decrease_bw_mul()
4090 logvar3 |= get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt); in rack_decrease_bw_mul()
4095 if (rack_per_lower_bound > rack->r_ctl.rack_per_of_gp_ca) in rack_decrease_bw_mul()
4096 rack->r_ctl.rack_per_of_gp_ca = rack_per_lower_bound; in rack_decrease_bw_mul()
4122 log.u_bbr.flex2 = rack->r_ctl.rc_time_probertt_starts; in rack_log_rtt_shrinks()
4123 log.u_bbr.flex3 = rack->r_ctl.rc_lower_rtt_us_cts; in rack_log_rtt_shrinks()
4124 log.u_bbr.flex4 = rack->r_ctl.rack_per_of_gp_ss; in rack_log_rtt_shrinks()
4135 log.u_bbr.flex7 = rack->r_ctl.rack_per_of_gp_probertt; in rack_log_rtt_shrinks()
4136 log.u_bbr.pacing_gain = rack->r_ctl.rack_per_of_gp_ca; in rack_log_rtt_shrinks()
4137 log.u_bbr.cwnd_gain = rack->r_ctl.rack_per_of_gp_rec; in rack_log_rtt_shrinks()
4141 log.u_bbr.cur_del_rate = rack->r_ctl.rc_highest_us_rtt; in rack_log_rtt_shrinks()
4143 log.u_bbr.cur_del_rate |= rack->r_ctl.rc_lowest_us_rtt; in rack_log_rtt_shrinks()
4144 log.u_bbr.applimited = rack->r_ctl.rc_time_probertt_entered; in rack_log_rtt_shrinks()
4145 log.u_bbr.pkts_out = rack->r_ctl.rc_rtt_diff; in rack_log_rtt_shrinks()
4146 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_rtt_shrinks()
4147 log.u_bbr.epoch = rack->r_ctl.rc_gp_srtt; in rack_log_rtt_shrinks()
4148 log.u_bbr.lt_epoch = rack->r_ctl.rc_prev_gp_srtt; in rack_log_rtt_shrinks()
4149 log.u_bbr.pkt_epoch = rack->r_ctl.rc_lower_rtt_us_cts; in rack_log_rtt_shrinks()
4150 log.u_bbr.delivered = rack->r_ctl.rc_target_probertt_flight; in rack_log_rtt_shrinks()
4151 log.u_bbr.lost = get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt); in rack_log_rtt_shrinks()
4154 log.u_bbr.rttProp |= rack->r_ctl.rc_entry_gp_rtt; in rack_log_rtt_shrinks()
4159 0, &log, false, &rack->r_ctl.act_rcv_time); in rack_log_rtt_shrinks()
4171 rack->r_ctl.rc_target_probertt_flight = roundup((uint32_t)bwdp, segsiz); in rack_set_prtt_target()
4172 if (rack->r_ctl.rc_target_probertt_flight < (segsiz * rack_timely_min_segs)) { in rack_set_prtt_target()
4178 rack->r_ctl.rc_target_probertt_flight = (segsiz * rack_timely_min_segs); in rack_set_prtt_target()
4199 rack->r_ctl.rc_lower_rtt_us_cts = us_cts; in rack_enter_probertt()
4220 rack->r_ctl.rack_per_of_gp_probertt = rack_per_of_gp_probertt; in rack_enter_probertt()
4221 rack->r_ctl.rc_time_probertt_entered = us_cts; in rack_enter_probertt()
4223 rack->r_ctl.rc_pace_min_segs); in rack_enter_probertt()
4226 rack->r_ctl.rc_time_probertt_starts = 0; in rack_enter_probertt()
4227 rack->r_ctl.rc_entry_gp_rtt = rack->r_ctl.rc_gp_srtt; in rack_enter_probertt()
4229 rack_set_prtt_target(rack, segsiz, get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt)); in rack_enter_probertt()
4231 rack_set_prtt_target(rack, segsiz, rack->r_ctl.rc_gp_srtt); in rack_enter_probertt()
4232 rack_log_rtt_shrinks(rack, us_cts, get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt), in rack_enter_probertt()
4243 rack->r_ctl.rc_pace_min_segs); in rack_exit_probertt()
4273 rsm = tqhash_max(rack->r_ctl.tqh); in rack_exit_probertt()
4275 if (rack->r_ctl.rc_app_limited_cnt == 0) in rack_exit_probertt()
4276 rack->r_ctl.rc_end_appl = rack->r_ctl.rc_first_appl = rsm; in rack_exit_probertt()
4283 if (rack->r_ctl.rc_end_appl) in rack_exit_probertt()
4284 rack->r_ctl.rc_end_appl->r_nseq_appl = rsm->r_start; in rack_exit_probertt()
4285 rack->r_ctl.rc_end_appl = rsm; in rack_exit_probertt()
4288 rack->r_ctl.rc_app_limited_cnt++; in rack_exit_probertt()
4307 rack->r_ctl.rack_per_of_gp_ca = rack_atexit_prtt_hbp; in rack_exit_probertt()
4308 rack->r_ctl.rack_per_of_gp_ss = rack_atexit_prtt_hbp; in rack_exit_probertt()
4311 rack->r_ctl.rack_per_of_gp_ca = rack_atexit_prtt; in rack_exit_probertt()
4312 rack->r_ctl.rack_per_of_gp_ss = rack_atexit_prtt; in rack_exit_probertt()
4318 rack->r_ctl.rc_rtt_diff = 0; in rack_exit_probertt()
4335 get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt)); in rack_exit_probertt()
4339 rack->r_ctl.rc_gp_srtt); in rack_exit_probertt()
4343 rack->r_ctl.rc_entry_gp_rtt); in rack_exit_probertt()
4348 sum = rack->r_ctl.rc_entry_gp_rtt; in rack_exit_probertt()
4350 sum /= (uint64_t)(max(1, rack->r_ctl.rc_gp_srtt)); in rack_exit_probertt()
4358 setval = rack->r_ctl.rc_entry_gp_rtt; in rack_exit_probertt()
4365 setval = rack->r_ctl.rc_gp_srtt; in rack_exit_probertt()
4366 if (setval > rack->r_ctl.rc_entry_gp_rtt) in rack_exit_probertt()
4367 setval = rack->r_ctl.rc_entry_gp_rtt; in rack_exit_probertt()
4374 setval = get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt); in rack_exit_probertt()
4381 ebdp = rack->r_ctl.rc_target_probertt_flight; in rack_exit_probertt()
4384 setto = rack->r_ctl.rc_target_probertt_flight + ebdp; in rack_exit_probertt()
4386 setto = rack->r_ctl.rc_target_probertt_flight; in rack_exit_probertt()
4396 get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt), in rack_exit_probertt()
4399 rack->r_ctl.rc_probertt_sndmax_atexit = rack->rc_tp->snd_max; in rack_exit_probertt()
4400 rack->r_ctl.rc_time_probertt_entered = us_cts; in rack_exit_probertt()
4401 rack->r_ctl.rc_time_probertt_starts = rack->r_ctl.rc_lower_rtt_us_cts = us_cts; in rack_exit_probertt()
4402 rack->r_ctl.rc_time_of_last_probertt = us_cts; in rack_exit_probertt()
4418 if (rack->r_ctl.rc_went_idle_time && in rack_check_probe_rtt()
4419 ((us_cts - rack->r_ctl.rc_went_idle_time) > rack_min_probertt_hold)) { in rack_check_probe_rtt()
4425 TSTMP_GT(us_cts, rack->r_ctl.rc_time_probertt_entered) && in rack_check_probe_rtt()
4426 ((us_cts - rack->r_ctl.rc_time_probertt_entered) > rack_probe_rtt_safety_val)) { in rack_check_probe_rtt()
4431 get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt), in rack_check_probe_rtt()
4436 endtime = rack->r_ctl.rc_time_probertt_entered + (rack->r_ctl.rc_gp_srtt * rack_max_drain_wait); in rack_check_probe_rtt()
4438 endtime += (rack->r_ctl.rc_gp_srtt * rack_max_drain_hbp); in rack_check_probe_rtt()
4440 must_stay = rack->r_ctl.rc_time_probertt_entered + (rack->r_ctl.rc_gp_srtt * rack_must_drain); in rack_check_probe_rtt()
4441 …if ((ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked) > rack->r_ctl.rc_target_probertt_flight) … in rack_check_probe_rtt()
4446 if (TSTMP_GT(us_cts, rack->r_ctl.rc_time_probertt_entered)) in rack_check_probe_rtt()
4447 calc = us_cts - rack->r_ctl.rc_time_probertt_entered; in rack_check_probe_rtt()
4450 calc /= max(rack->r_ctl.rc_gp_srtt, 1); in rack_check_probe_rtt()
4455 rack->r_ctl.rack_per_of_gp_probertt = rack_per_of_gp_lowthresh; in rack_check_probe_rtt()
4457 rack->r_ctl.rack_per_of_gp_probertt = rack_per_of_gp_probertt - calc; in rack_check_probe_rtt()
4459 if (rack->r_ctl.rack_per_of_gp_probertt < rack_per_of_gp_lowthresh) in rack_check_probe_rtt()
4460 rack->r_ctl.rack_per_of_gp_probertt = rack_per_of_gp_lowthresh; in rack_check_probe_rtt()
4465 if (rack->r_ctl.rc_time_probertt_starts == 0) { in rack_check_probe_rtt()
4468 (ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked) > in rack_check_probe_rtt()
4469 rack->r_ctl.rc_target_probertt_flight)) { in rack_check_probe_rtt()
4474 get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt), in rack_check_probe_rtt()
4476 rack->r_ctl.rc_time_probertt_starts = us_cts; in rack_check_probe_rtt()
4477 if (rack->r_ctl.rc_time_probertt_starts == 0) in rack_check_probe_rtt()
4478 rack->r_ctl.rc_time_probertt_starts = 1; in rack_check_probe_rtt()
4480 rack->r_ctl.rack_per_of_gp_probertt = rack_per_of_gp_probertt; in rack_check_probe_rtt()
4485 no_overflow = ((uint64_t)rack->r_ctl.rc_gp_srtt * in rack_check_probe_rtt()
4492 endtime += rack->r_ctl.rc_time_probertt_starts; in rack_check_probe_rtt()
4499 (TSTMP_GT(us_cts, rack->r_ctl.rc_lower_rtt_us_cts)) && in rack_check_probe_rtt()
4500 ((us_cts - rack->r_ctl.rc_lower_rtt_us_cts) >= rack_time_between_probertt)) { in rack_check_probe_rtt()
4520 losses = rack->r_ctl.rc_loss_count - rack->r_ctl.rc_loss_at_start; in rack_update_multiplier()
4523 up_bnd = rack->r_ctl.last_gp_comp_bw * (uint64_t)rack_gp_per_bw_mul_up; in rack_update_multiplier()
4525 up_bnd += rack->r_ctl.last_gp_comp_bw; in rack_update_multiplier()
4527 subfr = (uint64_t)rack->r_ctl.last_gp_comp_bw * (uint64_t)rack_gp_per_bw_mul_down; in rack_update_multiplier()
4529 low_bnd = rack->r_ctl.last_gp_comp_bw - subfr; in rack_update_multiplier()
4530 if ((timely_says == 2) && (rack->r_ctl.rc_no_push_at_mrtt)) { in rack_update_multiplier()
4543 if (rack->r_ctl.rc_no_push_at_mrtt > 1) in rack_update_multiplier()
4562 rack->r_ctl.last_gp_comp_bw = cur_bw; in rack_update_multiplier()
4574 if ((rack->rc_gp_saw_ca && rack->r_ctl.rack_per_of_gp_ca <= rack_down_raise_thresh) || in rack_update_multiplier()
4575 (rack->rc_gp_saw_ss && rack->r_ctl.rack_per_of_gp_ss <= rack_down_raise_thresh) || in rack_update_multiplier()
4612 rack->r_ctl.last_gp_comp_bw = cur_bw; in rack_update_multiplier()
4614 rack->r_ctl.rack_per_upper_bound_ss && in rack_update_multiplier()
4615 (rack->r_ctl.rack_per_of_gp_ss == rack->r_ctl.rack_per_upper_bound_ss)) { in rack_update_multiplier()
4623 rack->r_ctl.rack_per_upper_bound_ca && in rack_update_multiplier()
4624 (rack->r_ctl.rack_per_of_gp_ca == rack->r_ctl.rack_per_upper_bound_ca)) { in rack_update_multiplier()
4681 if (rtt >= (get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt) * in rack_make_timely_judgement()
4685 log_mult = get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt) * rack_gp_rtt_maxmul; in rack_make_timely_judgement()
4689 get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt), in rack_make_timely_judgement()
4691 } else if (rtt <= (get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt) + in rack_make_timely_judgement()
4692 ((get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt) * rack_gp_rtt_minmul) / in rack_make_timely_judgement()
4695 log_mult = get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt) + in rack_make_timely_judgement()
4696 ((get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt) * rack_gp_rtt_minmul) / in rack_make_timely_judgement()
4702 get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt), in rack_make_timely_judgement()
4725 get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt), log_rtt_a_diff, __LINE__, 6); in rack_make_timely_judgement()
4730 get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt), log_rtt_a_diff, __LINE__, 7); in rack_make_timely_judgement()
4796 rsm = tqhash_find(rack->r_ctl.tqh, tp->gput_seq); in rack_clear_gp_marks()
4798 rsm = tqhash_min(rack->r_ctl.tqh); in rack_clear_gp_marks()
4803 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_clear_gp_marks()
4824 rsm = tqhash_min(rack->r_ctl.tqh); in rack_tend_gp_marks()
4829 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_tend_gp_marks()
4837 rsm = tqhash_find(rack->r_ctl.tqh, tp->gput_seq); in rack_tend_gp_marks()
4850 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_tend_gp_marks()
4855 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_tend_gp_marks()
4874 log.u_bbr.delRate = rack->r_ctl.gp_bw; in rack_log_gp_calc()
4881 0, &log, false, &rack->r_ctl.act_rcv_time); in rack_log_gp_calc()
4895 us_cts = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time); in rack_do_goodput_measurement()
4896 segsiz = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_do_goodput_measurement()
4901 if (rack->r_ctl.rc_gp_cumack_ts > rack->r_ctl.rc_gp_output_ts) in rack_do_goodput_measurement()
4902 stim = rack->r_ctl.rc_gp_cumack_ts - rack->r_ctl.rc_gp_output_ts; in rack_do_goodput_measurement()
4917 rack_log_gpset(rack, th_ack, us_cts, rack->r_ctl.rc_gp_cumack_ts, __LINE__, 3, NULL); in rack_do_goodput_measurement()
4929 if (rack->r_ctl.rc_gp_lowrtt == 0xffffffff) { in rack_do_goodput_measurement()
4963 rack->r_ctl.last_max_bw = rack->r_ctl.rc_gp_high_rwnd; in rack_do_goodput_measurement()
4964 rack->r_ctl.last_max_bw *= HPTS_USEC_IN_SEC; in rack_do_goodput_measurement()
4965 rack->r_ctl.last_max_bw /= rack->r_ctl.rc_gp_lowrtt; in rack_do_goodput_measurement()
4998 rack->r_ctl.rc_prev_gp_srtt = rack->r_ctl.rc_gp_srtt; in rack_do_goodput_measurement()
5002 rack->r_ctl.rc_app_limited_cnt, in rack_do_goodput_measurement()
5010 new_rtt_diff = (int32_t)rack->r_ctl.rc_gp_srtt - (int32_t)rack->r_ctl.rc_prev_gp_srtt; in rack_do_goodput_measurement()
5013 rack->r_ctl.rc_rtt_diff = new_rtt_diff; in rack_do_goodput_measurement()
5022 rack->r_ctl.rc_rtt_diff -= (rack->r_ctl.rc_rtt_diff / 8); in rack_do_goodput_measurement()
5023 rack->r_ctl.rc_rtt_diff += (new_rtt_diff / 8); in rack_do_goodput_measurement()
5027 rack->r_ctl.rc_gp_srtt, in rack_do_goodput_measurement()
5028 rack->r_ctl.rc_rtt_diff, in rack_do_goodput_measurement()
5029 rack->r_ctl.rc_prev_gp_srtt in rack_do_goodput_measurement()
5033 if (bytes_ps > rack->r_ctl.last_max_bw) { in rack_do_goodput_measurement()
5044 bytes_ps, rack->r_ctl.last_max_bw, 0, in rack_do_goodput_measurement()
5046 bytes_ps = rack->r_ctl.last_max_bw; in rack_do_goodput_measurement()
5052 rack->r_ctl.gp_bw = bytes_ps; in rack_do_goodput_measurement()
5054 rack->r_ctl.num_measurements = 1; in rack_do_goodput_measurement()
5058 rack->r_ctl.rc_app_limited_cnt, in rack_do_goodput_measurement()
5062 (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT)) { in rack_do_goodput_measurement()
5072 rack->r_ctl.rc_hpts_flags = 0; in rack_do_goodput_measurement()
5073 rack->r_ctl.rc_last_output_to = 0; in rack_do_goodput_measurement()
5076 } else if (rack->r_ctl.num_measurements < RACK_REQ_AVG) { in rack_do_goodput_measurement()
5078 rack->r_ctl.gp_bw += bytes_ps; in rack_do_goodput_measurement()
5079 addpart = rack->r_ctl.num_measurements; in rack_do_goodput_measurement()
5080 rack->r_ctl.num_measurements++; in rack_do_goodput_measurement()
5081 if (rack->r_ctl.num_measurements >= RACK_REQ_AVG) { in rack_do_goodput_measurement()
5083 rack->r_ctl.gp_bw /= (uint64_t)rack->r_ctl.num_measurements; in rack_do_goodput_measurement()
5102 if (rack->r_ctl.num_measurements < 0xff) { in rack_do_goodput_measurement()
5103 rack->r_ctl.num_measurements++; in rack_do_goodput_measurement()
5110 if (rack->r_ctl.rc_rack_min_rtt) in rack_do_goodput_measurement()
5111 srtt = rack->r_ctl.rc_rack_min_rtt; in rack_do_goodput_measurement()
5132 subpart = rack->r_ctl.gp_bw * utim; in rack_do_goodput_measurement()
5134 if (subpart < (rack->r_ctl.gp_bw / 2)) { in rack_do_goodput_measurement()
5153 subpart = rack->r_ctl.gp_bw / 2; in rack_do_goodput_measurement()
5158 resid_bw = rack->r_ctl.gp_bw - subpart; in rack_do_goodput_measurement()
5159 rack->r_ctl.gp_bw = resid_bw + addpart; in rack_do_goodput_measurement()
5171 subpart = rack->r_ctl.gp_bw * utim; in rack_do_goodput_measurement()
5182 subpart = rack->r_ctl.gp_bw / rack_wma_divisor; in rack_do_goodput_measurement()
5187 (bytes_ps > rack->r_ctl.gp_bw)) { in rack_do_goodput_measurement()
5195 resid_bw = rack->r_ctl.gp_bw - subpart; in rack_do_goodput_measurement()
5196 rack->r_ctl.gp_bw = resid_bw + addpart; in rack_do_goodput_measurement()
5208 (rack->r_ctl.num_measurements >= RACK_REQ_AVG)) { in rack_do_goodput_measurement()
5218 log.u_bbr.flex1 = rack->r_ctl.current_round; in rack_do_goodput_measurement()
5219 log.u_bbr.flex2 = rack->r_ctl.last_rnd_of_gp_rise; in rack_do_goodput_measurement()
5221 log.u_bbr.cur_del_rate = rack->r_ctl.last_gpest; in rack_do_goodput_measurement()
5226 if ((rack->r_ctl.num_measurements == RACK_REQ_AVG) || in rack_do_goodput_measurement()
5227 (rack->r_ctl.last_gpest == 0)) { in rack_do_goodput_measurement()
5234 rack->r_ctl.last_rnd_of_gp_rise = rack->r_ctl.current_round; in rack_do_goodput_measurement()
5235 rack->r_ctl.last_gpest = rack->r_ctl.gp_bw; in rack_do_goodput_measurement()
5236 } else if (gp_est >= rack->r_ctl.last_gpest) { in rack_do_goodput_measurement()
5243 gp_est /= rack->r_ctl.last_gpest; in rack_do_goodput_measurement()
5244 if ((uint32_t)gp_est > rack->r_ctl.gp_gain_req) { in rack_do_goodput_measurement()
5254 log.u_bbr.flex1 = rack->r_ctl.current_round; in rack_do_goodput_measurement()
5256 log.u_bbr.flex3 = rack->r_ctl.gp_gain_req; in rack_do_goodput_measurement()
5258 log.u_bbr.cur_del_rate = rack->r_ctl.last_gpest; in rack_do_goodput_measurement()
5263 rack->r_ctl.last_rnd_of_gp_rise = rack->r_ctl.current_round; in rack_do_goodput_measurement()
5264 if (rack->r_ctl.use_gp_not_last == 1) in rack_do_goodput_measurement()
5265 rack->r_ctl.last_gpest = rack->r_ctl.gp_bw; in rack_do_goodput_measurement()
5267 rack->r_ctl.last_gpest = bytes_ps; in rack_do_goodput_measurement()
5272 (rack->r_ctl.num_measurements >= rack->r_ctl.req_measurements)) { in rack_do_goodput_measurement()
5289 rack->r_ctl.rc_gp_srtt, in rack_do_goodput_measurement()
5290 rack->r_ctl.rc_rtt_diff); in rack_do_goodput_measurement()
5299 rack->r_ctl.gp_bw, /* delRate */ in rack_do_goodput_measurement()
5303 rack->r_ctl.rc_prev_gp_srtt = rack->r_ctl.rc_gp_srtt; in rack_do_goodput_measurement()
5305 rack->r_ctl.rc_loss_at_start = rack->r_ctl.rc_loss_count; in rack_do_goodput_measurement()
5346 if (rack->r_ctl.rc_first_appl && in rack_do_goodput_measurement()
5348 rack->r_ctl.rc_app_limited_cnt && in rack_do_goodput_measurement()
5349 (SEQ_GT(rack->r_ctl.rc_first_appl->r_start, th_ack)) && in rack_do_goodput_measurement()
5350 ((rack->r_ctl.rc_first_appl->r_end - th_ack) > in rack_do_goodput_measurement()
5357 rack->r_ctl.rc_gp_lowrtt = 0xffffffff; in rack_do_goodput_measurement()
5358 rack->r_ctl.rc_gp_high_rwnd = rack->rc_tp->snd_wnd; in rack_do_goodput_measurement()
5359 tp->gput_ts = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time); in rack_do_goodput_measurement()
5365 (SEQ_GEQ(tp->gput_seq, rack->r_ctl.rc_probertt_sndmax_atexit))) in rack_do_goodput_measurement()
5367 if ((rack->r_ctl.rc_first_appl->r_end - th_ack) >= rack_get_measure_window(tp, rack)) { in rack_do_goodput_measurement()
5372 tp->gput_ack = tp->gput_seq + (rack->r_ctl.rc_first_appl->r_end - th_ack); in rack_do_goodput_measurement()
5399 rack->r_ctl.rc_gp_cumack_ts = 0; in rack_do_goodput_measurement()
5400 rsm = tqhash_find(rack->r_ctl.tqh, tp->gput_seq); in rack_do_goodput_measurement()
5417 nrsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_do_goodput_measurement()
5426 rack->r_ctl.rc_gp_output_ts = rsm->r_tim_lastsent[0]; in rack_do_goodput_measurement()
5436 rack->r_ctl.rc_gp_output_ts = rack_to_usec_ts(&tv); in rack_do_goodput_measurement()
5444 (((uint64_t)rack->r_ctl.rc_app_limited_cnt << 32) | (uint64_t)rack->r_ctl.rc_gp_output_ts), in rack_do_goodput_measurement()
5471 if ((post_recovery) && (rack->r_ctl.rc_early_recovery_segs)) { in rack_ack_received()
5474 max = rack->r_ctl.rc_early_recovery_segs * ctf_fixed_maxseg(tp); in rack_ack_received()
5481 ((int32_t)rack->r_ctl.cwnd_to_use) - tp->snd_wnd); in rack_ack_received()
5492 rack->r_ctl.lt_bw_bytes += (tp->snd_max - rack->r_ctl.lt_seq); in rack_ack_received()
5493 rack->r_ctl.lt_seq = tp->snd_max; in rack_ack_received()
5494 tmark = tcp_tv_to_lusectick(&rack->r_ctl.act_rcv_time); in rack_ack_received()
5495 if (tmark >= rack->r_ctl.lt_timemark) { in rack_ack_received()
5496 rack->r_ctl.lt_bw_time += (tmark - rack->r_ctl.lt_timemark); in rack_ack_received()
5498 rack->r_ctl.lt_timemark = tmark; in rack_ack_received()
5516 if (tp->t_bytes_acked >= rack->r_ctl.cwnd_to_use) { in rack_ack_received()
5517 tp->t_bytes_acked -= rack->r_ctl.cwnd_to_use; in rack_ack_received()
5559 if (SEQ_GEQ(th_ack, rack->r_ctl.rc_snd_max_at_rto)) { in rack_ack_received()
5564 rack->r_ctl.rc_out_at_rto = 0; in rack_ack_received()
5573 if (acked <= rack->r_ctl.rc_out_at_rto){ in rack_ack_received()
5574 rack->r_ctl.rc_out_at_rto -= acked; in rack_ack_received()
5576 rack->r_ctl.rc_out_at_rto = 0; in rack_ack_received()
5581 stats_voi_update_abs_ulong(tp->t_stats, VOI_TCP_LCWIN, rack->r_ctl.cwnd_to_use); in rack_ack_received()
5583 if (rack->r_ctl.rc_rack_largest_cwnd < rack->r_ctl.cwnd_to_use) { in rack_ack_received()
5584 rack->r_ctl.rc_rack_largest_cwnd = rack->r_ctl.cwnd_to_use; in rack_ack_received()
5610 if ((rack->r_ctl.rc_prr_sndcnt > 0) || in tcp_rack_partialack()
5659 log.u_bbr.pkts_out = rack->r_ctl.rc_prr_sndcnt; in rack_post_recovery()
5666 (rack->r_ctl.rc_prr_sndcnt > 0)) { in rack_post_recovery()
5682 rack->r_ctl.rc_prr_sndcnt); in rack_post_recovery()
5684 rack->r_ctl.rc_prr_sndcnt = 0; in rack_post_recovery()
5689 if (rack->r_ctl.dsack_persist) { in rack_post_recovery()
5690 rack->r_ctl.dsack_persist--; in rack_post_recovery()
5691 if (rack->r_ctl.num_dsack && (rack->r_ctl.dsack_persist == 0)) { in rack_post_recovery()
5692 rack->r_ctl.num_dsack = 0; in rack_post_recovery()
5698 if (rack->r_ctl.rto_ssthresh > tp->snd_ssthresh) in rack_post_recovery()
5699 tp->snd_ssthresh = rack->r_ctl.rto_ssthresh; in rack_post_recovery()
5731 rack->r_ctl.rc_prr_delivered = 0; in rack_cong_signal()
5732 rack->r_ctl.rc_prr_out = 0; in rack_cong_signal()
5735 rack->r_ctl.rc_prr_sndcnt = ctf_fixed_maxseg(tp); in rack_cong_signal()
5738 rack->r_ctl.rc_prr_recovery_fs = tp->snd_max - tp->snd_una; in rack_cong_signal()
5770 min(tp->snd_wnd, rack->r_ctl.cwnd_to_use) / 2 / in rack_cong_signal()
5802 rack->r_ctl.dsack_byte_cnt = 0; in rack_cong_signal()
5803 rack->r_ctl.retran_during_recovery = 0; in rack_cong_signal()
5804 rack->r_ctl.rc_cwnd_at_erec = cwnd_enter; in rack_cong_signal()
5805 rack->r_ctl.rc_ssthresh_at_erec = ssthresh_enter; in rack_cong_signal()
5862 TAILQ_FOREACH(rsm, &rack->r_ctl.rc_tmap, r_tnext) { in rack_find_lowest_rsm()
5884 TQHASH_FOREACH_REVERSE_FROM(prsm, rack->r_ctl.tqh) { in rack_find_high_nonack()
5917 if (rack->r_ctl.rc_reorder_ts) { in rack_calc_thresh_rack()
5918 if (rack->r_ctl.rc_reorder_fade) { in rack_calc_thresh_rack()
5919 if (SEQ_GEQ(cts, rack->r_ctl.rc_reorder_ts)) { in rack_calc_thresh_rack()
5920 lro = cts - rack->r_ctl.rc_reorder_ts; in rack_calc_thresh_rack()
5932 if (lro > rack->r_ctl.rc_reorder_fade) { in rack_calc_thresh_rack()
5934 rack->r_ctl.rc_reorder_ts = 0; in rack_calc_thresh_rack()
5945 thresh = srtt + rack->r_ctl.rc_pkt_delay; in rack_calc_thresh_rack()
5952 if (rack->r_ctl.rc_reorder_shift) in rack_calc_thresh_rack()
5953 thresh += (srtt >> rack->r_ctl.rc_reorder_shift); in rack_calc_thresh_rack()
5959 (rack->r_ctl.num_dsack > 0)) { in rack_calc_thresh_rack()
5964 thresh += rack->r_ctl.num_dsack * (srtt >> 2); in rack_calc_thresh_rack()
5991 if (rack->r_ctl.rc_tlp_threshold) in rack_calc_thresh_tlp()
5992 thresh = srtt + (srtt / rack->r_ctl.rc_tlp_threshold); in rack_calc_thresh_tlp()
5997 segsiz = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_calc_thresh_tlp()
6001 if (((tp->snd_max - tp->snd_una) - rack->r_ctl.rc_sacked + rack->r_ctl.rc_holes_rxt) <= segsiz) { in rack_calc_thresh_tlp()
6097 if (tqhash_empty(rack->r_ctl.tqh)) { in rack_check_recovery_mode()
6100 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_check_recovery_mode()
6133 rack_persist_min, rack_persist_max, rack->r_ctl.timer_slop); in rack_get_persists_timer_val()
6134 rack->r_ctl.rc_hpts_flags |= PACE_TMR_PERSIT; in rack_get_persists_timer_val()
6167 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_timer_start()
6172 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_timer_start()
6185 if (TSTMP_GEQ(rack->r_ctl.rc_tlp_rxt_last_time, ((uint32_t)rsm->r_tim_lastsent[idx]))) in rack_timer_start()
6186 tstmp_touse = (uint32_t)rack->r_ctl.rc_tlp_rxt_last_time; in rack_timer_start()
6194 rack->r_ctl.rc_hpts_flags |= PACE_TMR_RXT; in rack_timer_start()
6199 to = rack->r_ctl.rc_min_to; in rack_timer_start()
6251 (rack->r_ctl.rc_prr_sndcnt < ctf_fixed_maxseg(tp))) { in rack_timer_start()
6268 if (to < rack->r_ctl.rc_min_to) { in rack_timer_start()
6269 to = rack->r_ctl.rc_min_to; in rack_timer_start()
6274 to = rack->r_ctl.rc_min_to; in rack_timer_start()
6282 (rack->r_ctl.rc_tlp_cnt_out >= rack_tlp_limit)) { in rack_timer_start()
6289 rsm = TAILQ_LAST_FAST(&rack->r_ctl.rc_tmap, rack_sendmap, r_tnext); in rack_timer_start()
6301 if (TSTMP_GEQ(((uint32_t)rsm->r_tim_lastsent[idx]), rack->r_ctl.rc_tlp_rxt_last_time)) in rack_timer_start()
6304 tstmp_touse = (uint32_t)rack->r_ctl.rc_tlp_rxt_last_time; in rack_timer_start()
6336 to = rack->r_ctl.rc_min_to; in rack_timer_start()
6341 rack->r_ctl.rc_tlp_rxt_last_time, /* flex4 */ in rack_timer_start()
6358 rack->r_ctl.rc_hpts_flags |= PACE_TMR_RACK; in rack_timer_start()
6360 rack->r_ctl.rc_hpts_flags |= PACE_TMR_TLP; in rack_timer_start()
6380 if (rack->r_ctl.rc_scw) { in rack_enter_persist()
6381 tcp_shared_cwnd_idle(rack->r_ctl.rc_scw, rack->r_ctl.rc_scw_index); in rack_enter_persist()
6385 rack->r_ctl.rc_went_idle_time = cts; in rack_enter_persist()
6386 if (rack->r_ctl.rc_went_idle_time == 0) in rack_enter_persist()
6387 rack->r_ctl.rc_went_idle_time = 1; in rack_enter_persist()
6392 rack->r_ctl.lt_bw_bytes += (snd_una - rack->r_ctl.lt_seq); in rack_enter_persist()
6393 rack->r_ctl.lt_seq = snd_una; in rack_enter_persist()
6394 tmark = tcp_tv_to_lusectick(&rack->r_ctl.act_rcv_time); in rack_enter_persist()
6395 if (tmark >= rack->r_ctl.lt_timemark) { in rack_enter_persist()
6396 rack->r_ctl.lt_bw_time += (tmark - rack->r_ctl.lt_timemark); in rack_enter_persist()
6398 rack->r_ctl.lt_timemark = tmark; in rack_enter_persist()
6403 rack->r_ctl.persist_lost_ends = 0; in rack_enter_persist()
6408 rack_rto_min, rack_rto_max, rack->r_ctl.timer_slop); in rack_enter_persist()
6418 rack->r_ctl.rc_hpts_flags = 0; in rack_exit_persist()
6421 if (rack->r_ctl.rc_scw) { in rack_exit_persist()
6422 tcp_shared_cwnd_active(rack->r_ctl.rc_scw, rack->r_ctl.rc_scw_index); in rack_exit_persist()
6435 time_idle = cts - rack->r_ctl.rc_went_idle_time; in rack_exit_persist()
6439 extra = (uint64_t)rack->r_ctl.rc_gp_srtt * in rack_exit_persist()
6450 rack->r_ctl.rc_lower_rtt_us_cts = us_cts; in rack_exit_persist()
6451 rack->r_ctl.rc_time_probertt_entered = rack->r_ctl.rc_lower_rtt_us_cts; in rack_exit_persist()
6452 rack->r_ctl.rc_time_probertt_starts = rack->r_ctl.rc_lower_rtt_us_cts; in rack_exit_persist()
6453 rack->r_ctl.rc_time_of_last_probertt = rack->r_ctl.rc_lower_rtt_us_cts; in rack_exit_persist()
6461 rack->r_ctl.lt_timemark = tcp_get_u64_usecs(NULL); in rack_exit_persist()
6466 rack->r_ctl.rc_went_idle_time = 0; in rack_exit_persist()
6469 rack_rto_min, rack_rto_max, rack->r_ctl.timer_slop); in rack_exit_persist()
6470 rack->r_ctl.rc_agg_delayed = 0; in rack_exit_persist()
6473 rack->r_ctl.rc_agg_early = 0; in rack_exit_persist()
6559 if (stopped && TSTMP_GT(rack->r_ctl.rc_timer_exp, cts)) { in rack_start_hpts_timer()
6560 left = rack->r_ctl.rc_timer_exp - cts; in rack_start_hpts_timer()
6562 rack->r_ctl.rc_timer_exp = 0; in rack_start_hpts_timer()
6563 rack->r_ctl.rc_hpts_flags = 0; in rack_start_hpts_timer()
6577 slot += rack->r_ctl.rc_agg_early; in rack_start_hpts_timer()
6579 rack->r_ctl.rc_agg_early = 0; in rack_start_hpts_timer()
6589 if (rack->r_ctl.rc_agg_delayed >= slot) { in rack_start_hpts_timer()
6598 rack->r_ctl.rc_agg_delayed += (HPTS_TICKS_PER_SLOT - slot); in rack_start_hpts_timer()
6602 rack->r_ctl.rc_agg_delayed -= (slot - HPTS_TICKS_PER_SLOT); in rack_start_hpts_timer()
6606 slot -= rack->r_ctl.rc_agg_delayed; in rack_start_hpts_timer()
6607 rack->r_ctl.rc_agg_delayed = 0; in rack_start_hpts_timer()
6610 rack->r_ctl.rc_agg_delayed = HPTS_TICKS_PER_SLOT - slot; in rack_start_hpts_timer()
6613 if (rack->r_ctl.rc_agg_delayed == 0) in rack_start_hpts_timer()
6620 max_red = (slot * rack->r_ctl.max_reduction) / 100; in rack_start_hpts_timer()
6621 if (max_red >= rack->r_ctl.rc_agg_delayed) { in rack_start_hpts_timer()
6622 slot -= rack->r_ctl.rc_agg_delayed; in rack_start_hpts_timer()
6623 rack->r_ctl.rc_agg_delayed = 0; in rack_start_hpts_timer()
6626 rack->r_ctl.rc_agg_delayed -= max_red; in rack_start_hpts_timer()
6646 rack->r_ctl.rc_hpts_flags |= PACE_TMR_DELACK; in rack_start_hpts_timer()
6652 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_DELACK; in rack_start_hpts_timer()
6679 rack->r_ctl.rc_hpts_flags |= PACE_TMR_KEEP; in rack_start_hpts_timer()
6693 (rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK)) { in rack_start_hpts_timer()
6713 rack->r_ctl.rc_timer_exp = cts + hpts_timeout; in rack_start_hpts_timer()
6719 (rack->r_ctl.rc_hpts_flags & (PACE_TMR_TLP|PACE_TMR_RXT))) { in rack_start_hpts_timer()
6761 rack->r_ctl.rc_hpts_flags |= PACE_PKT_OUTPUT; in rack_start_hpts_timer()
6762 rack->r_ctl.rc_last_output_to = us_cts + slot; in rack_start_hpts_timer()
6777 if ((rack->r_ctl.rc_hpts_flags & PACE_TMR_RACK) || in rack_start_hpts_timer()
6829 rack->r_ctl.rc_hpts_flags &= ~PACE_PKT_OUTPUT; in rack_start_hpts_timer()
6857 TAILQ_FOREACH_FROM(nrsm, &rack->r_ctl.rc_tmap, r_tnext) { in rack_mark_lost()
6867 rack->r_ctl.rc_considered_lost += nrsm->r_end - nrsm->r_start; in rack_mark_lost()
6904 rack->r_ctl.rc_resend = rsm; in rack_timeout_rack()
6915 rack->r_ctl.rc_hpts_flags &= ~PACE_PKT_OUTPUT; in rack_timeout_rack()
6918 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_RACK; in rack_timeout_rack()
7033 tqhash_update_end(rack->r_ctl.tqh, rsm, nrsm->r_start); in rack_clone_rsm()
7080 tqhash_update_end(rack->r_ctl.tqh, l_rsm, r_rsm->r_end); in rack_merge_rsm()
7087 TAILQ_REMOVE(&rack->r_ctl.rc_tmap, r_rsm, r_tnext); in rack_merge_rsm()
7107 if (r_rsm == rack->r_ctl.rc_first_appl) in rack_merge_rsm()
7108 rack->r_ctl.rc_first_appl = l_rsm; in rack_merge_rsm()
7110 tqhash_remove(rack->r_ctl.tqh, r_rsm, REMOVE_TYPE_MERGE); in rack_merge_rsm()
7168 if (TSTMP_LT(cts, rack->r_ctl.rc_timer_exp)) { in rack_timeout_tlp()
7181 rack->r_ctl.retran_during_recovery = 0; in rack_timeout_tlp()
7183 rack->r_ctl.dsack_byte_cnt = 0; in rack_timeout_tlp()
7194 if (rack->r_ctl.dsack_persist && (rack->r_ctl.rc_tlp_cnt_out >= 1)) { in rack_timeout_tlp()
7195 rack->r_ctl.dsack_persist--; in rack_timeout_tlp()
7196 if (rack->r_ctl.num_dsack && (rack->r_ctl.dsack_persist == 0)) { in rack_timeout_tlp()
7197 rack->r_ctl.num_dsack = 0; in rack_timeout_tlp()
7202 (rack->r_ctl.rc_tlp_cnt_out == 1)) { in rack_timeout_tlp()
7213 rack->r_ctl.rc_gp_srtt /*flex1*/, in rack_timeout_tlp()
7221 if (rack_always_send_oldest && (TAILQ_EMPTY(&rack->r_ctl.rc_tmap) == 0)) in rack_timeout_tlp()
7241 rack->r_ctl.rc_prr_sndcnt = amm; in rack_timeout_tlp()
7242 rack->r_ctl.rc_tlp_new_data = amm; in rack_timeout_tlp()
7250 rack->r_ctl.rc_tlp_new_data = amm; in rack_timeout_tlp()
7254 rack->r_ctl.rc_tlpsend = NULL; in rack_timeout_tlp()
7265 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_timeout_tlp()
7267 rsm = tqhash_max(rack->r_ctl.tqh); in rack_timeout_tlp()
7283 if (SEQ_GT((rack->r_ctl.last_collapse_point - 1), rack->rc_tp->snd_una)) in rack_timeout_tlp()
7284 rsm = tqhash_find(rack->r_ctl.tqh, (rack->r_ctl.last_collapse_point - 1)); in rack_timeout_tlp()
7286 rsm = tqhash_min(rack->r_ctl.tqh); in rack_timeout_tlp()
7311 (void)tqhash_insert(rack->r_ctl.tqh, nrsm); in rack_timeout_tlp()
7313 if ((insret = tqhash_insert(rack->r_ctl.tqh, nrsm)) != 0) { in rack_timeout_tlp()
7319 TAILQ_INSERT_AFTER(&rack->r_ctl.rc_tmap, rsm, nrsm, r_tnext); in rack_timeout_tlp()
7324 rack->r_ctl.rc_tlpsend = rsm; in rack_timeout_tlp()
7329 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_TLP; in rack_timeout_tlp()
7332 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_TLP; in rack_timeout_tlp()
7352 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_DELACK; in rack_timeout_delack()
7365 rack->r_ctl.forced_ack_ts = tcp_get_usecs(NULL); in rack_send_ack_challange()
7400 counter_u64_add(rack_persists_lost_ends, rack->r_ctl.persist_lost_ends); in rack_timeout_persist()
7419 counter_u64_add(rack_persists_lost_ends, rack->r_ctl.persist_lost_ends); in rack_timeout_persist()
7426 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_PERSIT; in rack_timeout_persist()
7435 counter_u64_add(rack_persists_lost_ends, rack->r_ctl.persist_lost_ends); in rack_timeout_persist()
7443 rack->r_ctl.persist_lost_ends++; in rack_timeout_persist()
7468 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_KEEP; in rack_timeout_keepalive()
7520 rack->r_ctl.rc_snd_max_at_rto = tp->snd_max; in rack_remxt_tmr()
7521 rack->r_ctl.rc_last_timeout_snduna = tp->snd_una; in rack_remxt_tmr()
7524 rack->r_ctl.rc_agg_delayed = 0; in rack_remxt_tmr()
7525 rack->r_ctl.rc_agg_early = 0; in rack_remxt_tmr()
7533 rack->r_ctl.rc_resend = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_remxt_tmr()
7534 if (rack->r_ctl.rc_resend != NULL) in rack_remxt_tmr()
7535 rack->r_ctl.rc_resend->r_flags |= RACK_TO_REXT; in rack_remxt_tmr()
7555 TAILQ_INIT(&rack->r_ctl.rc_tmap); in rack_remxt_tmr()
7557 TQHASH_FOREACH(rsm, rack->r_ctl.tqh) { in rack_remxt_tmr()
7563 TAILQ_INSERT_HEAD(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_remxt_tmr()
7565 TAILQ_INSERT_AFTER(&rack->r_ctl.rc_tmap, trsm, rsm, r_tnext); in rack_remxt_tmr()
7575 rack->r_ctl.rc_considered_lost = 0; in rack_remxt_tmr()
7577 rack->r_ctl.rc_sacked = 0; in rack_remxt_tmr()
7578 rack->r_ctl.rc_sacklast = NULL; in rack_remxt_tmr()
7580 rack->r_ctl.rc_resend = tqhash_min(rack->r_ctl.tqh); in rack_remxt_tmr()
7581 if (rack->r_ctl.rc_resend != NULL) in rack_remxt_tmr()
7582 rack->r_ctl.rc_resend->r_flags |= RACK_TO_REXT; in rack_remxt_tmr()
7583 rack->r_ctl.rc_prr_sndcnt = 0; in rack_remxt_tmr()
7585 rack->r_ctl.rc_resend = tqhash_min(rack->r_ctl.tqh); in rack_remxt_tmr()
7586 if (rack->r_ctl.rc_resend != NULL) in rack_remxt_tmr()
7587 rack->r_ctl.rc_resend->r_flags |= RACK_TO_REXT; in rack_remxt_tmr()
7596 rack->r_ctl.rc_out_at_rto = ctf_flight_size(rack->rc_tp, in rack_remxt_tmr()
7597 rack->r_ctl.rc_sacked); in rack_remxt_tmr()
7668 rack->r_ctl.rc_gp_srtt /*flex1*/, in rack_timeout_rxt()
7677 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_RXT; in rack_timeout_rxt()
7678 rack->r_ctl.retran_during_recovery = 0; in rack_timeout_rxt()
7680 rack->r_ctl.dsack_byte_cnt = 0; in rack_timeout_rxt()
7690 rack->r_ctl.rto_ssthresh = tp->snd_ssthresh; in rack_timeout_rxt()
7705 if (rack->r_ctl.dsack_persist) { in rack_timeout_rxt()
7706 rack->r_ctl.dsack_persist--; in rack_timeout_rxt()
7707 if (rack->r_ctl.num_dsack && (rack->r_ctl.dsack_persist == 0)) { in rack_timeout_rxt()
7708 rack->r_ctl.num_dsack = 0; in rack_timeout_rxt()
7724 rsm = tqhash_min(rack->r_ctl.tqh); in rack_timeout_rxt()
7740 if ((rack->r_ctl.rc_resend == NULL) || in rack_timeout_rxt()
7741 ((rack->r_ctl.rc_resend->r_flags & RACK_RWND_COLLAPSED) == 0)) { in rack_timeout_rxt()
7792 max(rack_rto_min, rexmt), rack_rto_max, rack->r_ctl.timer_slop); in rack_timeout_rxt()
7927 sack_filter_clear(&rack->r_ctl.rack_sf, tp->snd_una); in rack_timeout_rxt()
7940 int32_t timers = (rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK); in rack_process_timers()
7962 rack->r_ctl.rc_gp_srtt /*flex1*/, in rack_process_timers()
7973 if (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) in rack_process_timers()
7991 if (TSTMP_LT(cts, rack->r_ctl.rc_timer_exp)) { in rack_process_timers()
7994 if (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) { in rack_process_timers()
8018 left = rack->r_ctl.rc_timer_exp - cts; in rack_process_timers()
8025 rack->r_ctl.rc_hpts_flags &= ~PACE_TMR_MASK; in rack_process_timers()
8029 rack->r_ctl.rc_tlp_rxt_last_time = cts; in rack_process_timers()
8033 rack->r_ctl.rc_tlp_rxt_last_time = cts; in rack_process_timers()
8036 rack->r_ctl.rc_tlp_rxt_last_time = cts; in rack_process_timers()
8055 flags_on_entry = rack->r_ctl.rc_hpts_flags; in rack_timer_cancel()
8057 if ((rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) && in rack_timer_cancel()
8058 ((TSTMP_GEQ(us_cts, rack->r_ctl.rc_last_output_to)) || in rack_timer_cancel()
8064 rack->r_ctl.rc_hpts_flags &= ~PACE_PKT_OUTPUT; in rack_timer_cancel()
8067 if (rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK) { in rack_timer_cancel()
8068 rack->rc_tmr_stopped = rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK; in rack_timer_cancel()
8070 ((rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) == 0)) { in rack_timer_cancel()
8079 rack->r_ctl.rc_hpts_flags &= ~(PACE_TMR_MASK); in rack_timer_cancel()
8129 rack->r_ctl.rc_holes_rxt += (rsm->r_end - rsm->r_start); in rack_update_rsm()
8139 KASSERT((rack->r_ctl.rc_considered_lost >= (rsm->r_end - rsm->r_start)), in rack_update_rsm()
8141 if (rack->r_ctl.rc_considered_lost >= (rsm->r_end - rsm->r_start)) in rack_update_rsm()
8142 rack->r_ctl.rc_considered_lost -= rsm->r_end - rsm->r_start; in rack_update_rsm()
8144 rack->r_ctl.rc_considered_lost = 0; in rack_update_rsm()
8153 rack->r_ctl.rc_sacked); in rack_update_rsm()
8157 rack->r_ctl.rc_sacked -= (rsm->r_end - rsm->r_start); in rack_update_rsm()
8160 TAILQ_REMOVE(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_update_rsm()
8165 TAILQ_INSERT_TAIL(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_update_rsm()
8171 rack->r_ctl.rc_out_at_rto -= (rsm->r_end - rsm->r_start); in rack_update_rsm()
8172 if (SEQ_GEQ(rsm->r_end, rack->r_ctl.rc_snd_max_at_rto)) { in rack_update_rsm()
8178 rack->r_ctl.rc_out_at_rto = 0; in rack_update_rsm()
8248 (void)tqhash_insert(rack->r_ctl.tqh, nrsm); in rack_update_entry()
8250 if ((insret = tqhash_insert(rack->r_ctl.tqh, nrsm)) != 0) { in rack_update_entry()
8256 TAILQ_INSERT_AFTER(&rack->r_ctl.rc_tmap, rsm, nrsm, r_tnext); in rack_update_entry()
8350 rack->r_ctl.rc_prr_out += len; in rack_log_output()
8396 rack->r_ctl.rc_sacked) + in rack_log_output()
8399 (rack->r_ctl.ss_hi_fs < rsm->r_fas)) { in rack_log_output()
8400 rack->r_ctl.ss_hi_fs = rsm->r_fas; in rack_log_output()
8436 (void)tqhash_insert(rack->r_ctl.tqh, rsm); in rack_log_output()
8438 if ((insret = tqhash_insert(rack->r_ctl.tqh, rsm)) != 0) { in rack_log_output()
8443 TAILQ_INSERT_TAIL(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_log_output()
8446 rack->r_ctl.pcm_i.send_time = cts; in rack_log_output()
8447 rack->r_ctl.pcm_i.eseq = rsm->r_end; in rack_log_output()
8450 rack->r_ctl.pcm_i.sseq = rsm->r_start; in rack_log_output()
8459 (ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked) == ctf_fixed_maxseg(tp))) { in rack_log_output()
8462 prsm = tqhash_prev(rack->r_ctl.tqh, rsm); in rack_log_output()
8489 rsm = tqhash_find(rack->r_ctl.tqh, seq_out); in rack_log_output()
8517 (void)tqhash_insert(rack->r_ctl.tqh, nrsm); in rack_log_output()
8519 if ((insret = tqhash_insert(rack->r_ctl.tqh, nrsm)) != 0) { in rack_log_output()
8525 TAILQ_INSERT_AFTER(&rack->r_ctl.rc_tmap, rsm, nrsm, r_tnext); in rack_log_output()
8547 TQHASH_FOREACH(rsm, rack->r_ctl.tqh) { in rack_log_output()
8576 if ((rack->r_ctl.rack_rs.rs_flags & RACK_RTT_EMPTY) || in tcp_rack_xmit_timer()
8577 (rack->r_ctl.rack_rs.rs_rtt_lowest > rtt)) { in tcp_rack_xmit_timer()
8578 rack->r_ctl.rack_rs.rs_rtt_lowest = rtt; in tcp_rack_xmit_timer()
8580 if ((rack->r_ctl.rack_rs.rs_flags & RACK_RTT_EMPTY) || in tcp_rack_xmit_timer()
8581 (rack->r_ctl.rack_rs.rs_rtt_highest < rtt)) { in tcp_rack_xmit_timer()
8582 rack->r_ctl.rack_rs.rs_rtt_highest = rtt; in tcp_rack_xmit_timer()
8585 if (us_rtt < rack->r_ctl.rc_gp_lowrtt) in tcp_rack_xmit_timer()
8586 rack->r_ctl.rc_gp_lowrtt = us_rtt; in tcp_rack_xmit_timer()
8587 if (rack->rc_tp->snd_wnd > rack->r_ctl.rc_gp_high_rwnd) in tcp_rack_xmit_timer()
8588 rack->r_ctl.rc_gp_high_rwnd = rack->rc_tp->snd_wnd; in tcp_rack_xmit_timer()
8608 if ((rack->r_ctl.rack_rs.rs_flags & RACK_RTT_EMPTY) || in tcp_rack_xmit_timer()
8609 (rack->r_ctl.rack_rs.rs_us_rtt > us_rtt)) { in tcp_rack_xmit_timer()
8610 if (rack->r_ctl.rack_rs.confidence == 0) { in tcp_rack_xmit_timer()
8615 rack->r_ctl.rack_rs.rs_us_rtt = us_rtt; in tcp_rack_xmit_timer()
8616 rack->r_ctl.rack_rs.confidence = confidence; in tcp_rack_xmit_timer()
8617 rack->r_ctl.rack_rs.rs_us_rtrcnt = rtrcnt; in tcp_rack_xmit_timer()
8626 rack->r_ctl.rack_rs.rs_us_rtt = us_rtt; in tcp_rack_xmit_timer()
8627 rack->r_ctl.rack_rs.confidence = confidence; in tcp_rack_xmit_timer()
8628 rack->r_ctl.rack_rs.rs_us_rtrcnt = rtrcnt; in tcp_rack_xmit_timer()
8632 rack->r_ctl.rack_rs.rs_flags = RACK_RTT_VALID; in tcp_rack_xmit_timer()
8633 rack->r_ctl.rack_rs.rs_rtt_tot += rtt; in tcp_rack_xmit_timer()
8634 rack->r_ctl.rack_rs.rs_rtt_cnt++; in tcp_rack_xmit_timer()
8647 if (rack->r_ctl.rack_rs.rs_flags & RACK_RTT_EMPTY) in tcp_rack_xmit_timer_commit()
8650 if (rack->r_ctl.rc_rate_sample_method == USE_RTT_LOW) { in tcp_rack_xmit_timer_commit()
8652 rtt = rack->r_ctl.rack_rs.rs_rtt_lowest; in tcp_rack_xmit_timer_commit()
8653 } else if (rack->r_ctl.rc_rate_sample_method == USE_RTT_HIGH) { in tcp_rack_xmit_timer_commit()
8655 rtt = rack->r_ctl.rack_rs.rs_rtt_highest; in tcp_rack_xmit_timer_commit()
8656 } else if (rack->r_ctl.rc_rate_sample_method == USE_RTT_AVG) { in tcp_rack_xmit_timer_commit()
8658 rtt = (int32_t)(rack->r_ctl.rack_rs.rs_rtt_tot / in tcp_rack_xmit_timer_commit()
8659 (uint64_t)rack->r_ctl.rack_rs.rs_rtt_cnt); in tcp_rack_xmit_timer_commit()
8662 panic("Unknown rtt variant %d", rack->r_ctl.rc_rate_sample_method); in tcp_rack_xmit_timer_commit()
8673 rack->r_ctl.rc_gp_srtt = rack->r_ctl.rack_rs.rs_us_rtt; in tcp_rack_xmit_timer_commit()
8675 } else if (rack->r_ctl.rack_rs.confidence) { in tcp_rack_xmit_timer_commit()
8677 rack->r_ctl.rc_gp_srtt -= (rack->r_ctl.rc_gp_srtt/8); in tcp_rack_xmit_timer_commit()
8678 rack->r_ctl.rc_gp_srtt += rack->r_ctl.rack_rs.rs_us_rtt / 8; in tcp_rack_xmit_timer_commit()
8680 if (rack->r_ctl.rack_rs.confidence) { in tcp_rack_xmit_timer_commit()
8685 if (rack->r_ctl.rc_highest_us_rtt < rack->r_ctl.rack_rs.rs_us_rtt) { in tcp_rack_xmit_timer_commit()
8686 rack->r_ctl.rc_highest_us_rtt = rack->r_ctl.rack_rs.rs_us_rtt; in tcp_rack_xmit_timer_commit()
8694 if ((rack->r_ctl.rc_highest_us_rtt / rack->r_ctl.rc_lowest_us_rtt) > rack_hbp_thresh) { in tcp_rack_xmit_timer_commit()
8695 rack_log_rtt_shrinks(rack, rack->r_ctl.rack_rs.rs_us_rtt, in tcp_rack_xmit_timer_commit()
8696 rack->r_ctl.rc_highest_us_rtt, in tcp_rack_xmit_timer_commit()
8697 rack->r_ctl.rc_lowest_us_rtt, in tcp_rack_xmit_timer_commit()
8703 if ((rack->r_ctl.rack_rs.confidence) || in tcp_rack_xmit_timer_commit()
8704 (rack->r_ctl.rack_rs.rs_us_rtrcnt == 1)) { in tcp_rack_xmit_timer_commit()
8709 rack->r_ctl.rc_last_us_rtt = rack->r_ctl.rack_rs.rs_us_rtt; in tcp_rack_xmit_timer_commit()
8711 if (rack->r_ctl.rc_lowest_us_rtt > rack->r_ctl.rack_rs.rs_us_rtt) { in tcp_rack_xmit_timer_commit()
8712 rack->r_ctl.rc_lowest_us_rtt = rack->r_ctl.rack_rs.rs_us_rtt; in tcp_rack_xmit_timer_commit()
8713 if (rack->r_ctl.rc_lowest_us_rtt == 0) in tcp_rack_xmit_timer_commit()
8714 rack->r_ctl.rc_lowest_us_rtt = 1; in tcp_rack_xmit_timer_commit()
8772 ms_rtt = (rack->r_ctl.rack_rs.rs_us_rtt + HPTS_USEC_IN_MSEC - 1) / HPTS_USEC_IN_MSEC; in tcp_rack_xmit_timer_commit()
8776 stats_voi_update_abs_u32(tp->t_stats, VOI_TCP_RTT, imax(0, rack->r_ctl.rack_rs.rs_us_rtt)); in tcp_rack_xmit_timer_commit()
8778 stats_voi_update_abs_u32(tp->t_stats, VOI_TCP_PATHRTT, imax(0, rack->r_ctl.rack_rs.rs_us_rtt)); in tcp_rack_xmit_timer_commit()
8780 rack->r_ctl.last_rcv_tstmp_for_rtt = tcp_tv_to_mssectick(&rack->r_ctl.act_rcv_time); in tcp_rack_xmit_timer_commit()
8793 max(rack_rto_min, rtt + 2), rack_rto_max, rack->r_ctl.timer_slop); in tcp_rack_xmit_timer_commit()
8807 old_rtt = get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt); in rack_apply_updated_usrtt()
8808 apply_filter_min_small(&rack->r_ctl.rc_gp_min_rtt, in rack_apply_updated_usrtt()
8835 ((us_cts - rack->r_ctl.rc_lower_rtt_us_cts) >= (rack_time_between_probertt - val))) { in rack_apply_updated_usrtt()
8839 rack->r_ctl.rc_lower_rtt_us_cts = us_cts; in rack_apply_updated_usrtt()
8879 if (!rack->r_ctl.rc_rack_min_rtt || in rack_update_rtt()
8880 SEQ_LT(t, rack->r_ctl.rc_rack_min_rtt)) { in rack_update_rtt()
8881 rack->r_ctl.rc_rack_min_rtt = t; in rack_update_rtt()
8882 if (rack->r_ctl.rc_rack_min_rtt == 0) { in rack_update_rtt()
8883 rack->r_ctl.rc_rack_min_rtt = 1; in rack_update_rtt()
8886 …if (TSTMP_GT(tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time), rsm->r_tim_lastsent[(rsm->r_rtr_cnt-1)… in rack_update_rtt()
8887 …us_rtt = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time) - (uint32_t)rsm->r_tim_lastsent[(rsm->r_rtr… in rack_update_rtt()
8896 rack_apply_updated_usrtt(rack, us_rtt, tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time)); in rack_update_rtt()
8946 if (rack->r_ctl.rc_tlp_cwnd_reduce) { in rack_update_rtt()
8950 if ((rack->r_ctl.rc_rack_tmit_time == 0) || in rack_update_rtt()
8951 (SEQ_LT(rack->r_ctl.rc_rack_tmit_time, in rack_update_rtt()
8954 rack->r_ctl.rc_rack_tmit_time = (uint32_t)rsm->r_tim_lastsent[(rsm->r_rtr_cnt - 1)]; in rack_update_rtt()
8955 if (rack->r_ctl.rc_rack_tmit_time == 0) in rack_update_rtt()
8956 rack->r_ctl.rc_rack_tmit_time = 1; in rack_update_rtt()
8968 rack_rto_min, rack_rto_max, rack->r_ctl.timer_slop); in rack_update_rtt()
8991 if (TSTMP_GT(tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time), rsm->r_tim_lastsent[i])) in rack_update_rtt()
8992 us_rtt = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time) - (uint32_t)rsm->r_tim_lastsent[i]; in rack_update_rtt()
9011 if (!rack->r_ctl.rc_rack_min_rtt || SEQ_LT(t, rack->r_ctl.rc_rack_min_rtt)) { in rack_update_rtt()
9012 rack->r_ctl.rc_rack_min_rtt = t; in rack_update_rtt()
9013 if (rack->r_ctl.rc_rack_min_rtt == 0) { in rack_update_rtt()
9014 rack->r_ctl.rc_rack_min_rtt = 1; in rack_update_rtt()
9017 if ((rack->r_ctl.rc_rack_tmit_time == 0) || in rack_update_rtt()
9018 (SEQ_LT(rack->r_ctl.rc_rack_tmit_time, in rack_update_rtt()
9021 rack->r_ctl.rc_rack_tmit_time = (uint32_t)rsm->r_tim_lastsent[(rsm->r_rtr_cnt - 1)]; in rack_update_rtt()
9022 if (rack->r_ctl.rc_rack_tmit_time == 0) in rack_update_rtt()
9023 rack->r_ctl.rc_rack_tmit_time = 1; in rack_update_rtt()
9051 if (rack->r_ctl.rc_rack_min_rtt && SEQ_LT(t, rack->r_ctl.rc_rack_min_rtt)) { in rack_update_rtt()
9062 } else if (rack->r_ctl.rc_rack_min_rtt) { in rack_update_rtt()
9067 if (!rack->r_ctl.rc_rack_min_rtt || in rack_update_rtt()
9068 SEQ_LT(t, rack->r_ctl.rc_rack_min_rtt)) { in rack_update_rtt()
9069 rack->r_ctl.rc_rack_min_rtt = t; in rack_update_rtt()
9070 if (rack->r_ctl.rc_rack_min_rtt == 0) { in rack_update_rtt()
9071 rack->r_ctl.rc_rack_min_rtt = 1; in rack_update_rtt()
9074 if ((rack->r_ctl.rc_rack_tmit_time == 0) || in rack_update_rtt()
9075 (SEQ_LT(rack->r_ctl.rc_rack_tmit_time, in rack_update_rtt()
9078 rack->r_ctl.rc_rack_tmit_time = (uint32_t)rsm->r_tim_lastsent[i]; in rack_update_rtt()
9079 if (rack->r_ctl.rc_rack_tmit_time == 0) in rack_update_rtt()
9080 rack->r_ctl.rc_rack_tmit_time = 1; in rack_update_rtt()
9103 TAILQ_FOREACH_REVERSE_FROM(nrsm, &rack->r_ctl.rc_tmap, in rack_log_sack_passed()
9132 rack->r_ctl.rc_considered_lost += nrsm->r_end - nrsm->r_start; in rack_log_sack_passed()
9185 tp->gput_ts = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time); in rack_need_set_test()
9226 s_rsm = tqhash_find(rack->r_ctl.tqh, tp->gput_seq); in rack_need_set_test()
9240 rack->r_ctl.rc_gp_output_ts = s_rsm->r_tim_lastsent[0]; in rack_need_set_test()
9243 rack->r_ctl.rc_gp_output_ts = rsm->r_tim_lastsent[0]; in rack_need_set_test()
9264 (SEQ_GEQ(tp->gput_seq, rack->r_ctl.rc_probertt_sndmax_atexit))) in rack_need_set_test()
9268 (((uint64_t)rack->r_ctl.rc_app_limited_cnt << 32) | in rack_need_set_test()
9269 (uint64_t)rack->r_ctl.rc_gp_output_ts), in rack_need_set_test()
9288 (((uint64_t)rack->r_ctl.rc_app_limited_cnt << 32) | in rack_need_set_test()
9289 (uint64_t)rack->r_ctl.rc_gp_output_ts), in rack_need_set_test()
9306 if (SEQ_LT(rsm->r_end, rack->r_ctl.last_tlp_acked_start)) { in is_rsm_inside_declared_tlp_block()
9310 if (SEQ_GT(rsm->r_start, rack->r_ctl.last_tlp_acked_end)) { in is_rsm_inside_declared_tlp_block()
9344 rsm = tqhash_find(rack->r_ctl.tqh, start); in rack_proc_sack_blk()
9375 if (SEQ_LT(rsm->r_start, rack->r_ctl.last_tlp_acked_start)) { in rack_proc_sack_blk()
9376 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_proc_sack_blk()
9377 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_proc_sack_blk()
9378 rack->r_ctl.last_tlp_acked_end); in rack_proc_sack_blk()
9380 if (SEQ_GT(rsm->r_end, rack->r_ctl.last_tlp_acked_end)) { in rack_proc_sack_blk()
9381 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_proc_sack_blk()
9382 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_proc_sack_blk()
9383 rack->r_ctl.last_tlp_acked_end); in rack_proc_sack_blk()
9386 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_proc_sack_blk()
9387 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_proc_sack_blk()
9414 next = tqhash_next(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
9449 tqhash_update_end(rack->r_ctl.tqh, rsm, start); in rack_proc_sack_blk()
9480 rack_to_usec_ts(&rack->r_ctl.act_rcv_time)) in rack_proc_sack_blk()
9481 next->r_ack_arrival = rack_to_usec_ts(&rack->r_ctl.act_rcv_time); in rack_proc_sack_blk()
9496 rack->r_ctl.rc_sacked += (nrsm->r_end - nrsm->r_start); in rack_proc_sack_blk()
9501 KASSERT((rack->r_ctl.rc_considered_lost >= my_chg), in rack_proc_sack_blk()
9503 if (my_chg <= rack->r_ctl.rc_considered_lost) in rack_proc_sack_blk()
9504 rack->r_ctl.rc_considered_lost -= my_chg; in rack_proc_sack_blk()
9506 rack->r_ctl.rc_considered_lost = 0; in rack_proc_sack_blk()
9509 rack->r_ctl.rc_reorder_ts = cts; in rack_proc_sack_blk()
9510 if (rack->r_ctl.rc_reorder_ts == 0) in rack_proc_sack_blk()
9511 rack->r_ctl.rc_reorder_ts = 1; in rack_proc_sack_blk()
9542 rsm = tqhash_next(rack->r_ctl.tqh, next); in rack_proc_sack_blk()
9575 (void)tqhash_insert(rack->r_ctl.tqh, nrsm); in rack_proc_sack_blk()
9577 if ((insret = tqhash_insert(rack->r_ctl.tqh, nrsm)) != 0) { in rack_proc_sack_blk()
9583 TAILQ_INSERT_AFTER(&rack->r_ctl.rc_tmap, rsm, nrsm, r_tnext); in rack_proc_sack_blk()
9596 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
9600 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
9609 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
9645 if (SEQ_LT(rsm->r_start, rack->r_ctl.last_tlp_acked_start)) { in rack_proc_sack_blk()
9646 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_proc_sack_blk()
9647 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_proc_sack_blk()
9648 rack->r_ctl.last_tlp_acked_end); in rack_proc_sack_blk()
9650 if (SEQ_GT(rsm->r_end, rack->r_ctl.last_tlp_acked_end)) { in rack_proc_sack_blk()
9651 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_proc_sack_blk()
9652 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_proc_sack_blk()
9653 rack->r_ctl.last_tlp_acked_end); in rack_proc_sack_blk()
9656 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_proc_sack_blk()
9657 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_proc_sack_blk()
9671 KASSERT((rack->r_ctl.rc_considered_lost >= my_chg), in rack_proc_sack_blk()
9673 if (my_chg <= rack->r_ctl.rc_considered_lost) in rack_proc_sack_blk()
9674 rack->r_ctl.rc_considered_lost -= my_chg; in rack_proc_sack_blk()
9676 rack->r_ctl.rc_considered_lost = 0; in rack_proc_sack_blk()
9678 rack->r_ctl.rc_sacked += (rsm->r_end - rsm->r_start); in rack_proc_sack_blk()
9684 rack->r_ctl.rc_reorder_ts = cts; in rack_proc_sack_blk()
9685 if (rack->r_ctl.rc_reorder_ts == 0) in rack_proc_sack_blk()
9686 rack->r_ctl.rc_reorder_ts = 1; in rack_proc_sack_blk()
9690 rsm->r_ack_arrival = rack_to_usec_ts(&rack->r_ctl.act_rcv_time); in rack_proc_sack_blk()
9694 TAILQ_REMOVE(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_proc_sack_blk()
9709 nrsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
9743 if (SEQ_LT(rsm->r_start, rack->r_ctl.last_tlp_acked_start)) { in rack_proc_sack_blk()
9744 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_proc_sack_blk()
9745 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_proc_sack_blk()
9746 rack->r_ctl.last_tlp_acked_end); in rack_proc_sack_blk()
9748 if (SEQ_GT(rsm->r_end, rack->r_ctl.last_tlp_acked_end)) { in rack_proc_sack_blk()
9749 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_proc_sack_blk()
9750 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_proc_sack_blk()
9751 rack->r_ctl.last_tlp_acked_end); in rack_proc_sack_blk()
9754 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_proc_sack_blk()
9755 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_proc_sack_blk()
9766 prev = tqhash_prev(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
9799 tqhash_update_end(rack->r_ctl.tqh, prev, end); in rack_proc_sack_blk()
9836 rack_to_usec_ts(&rack->r_ctl.act_rcv_time)) in rack_proc_sack_blk()
9837 prev->r_ack_arrival = rack_to_usec_ts(&rack->r_ctl.act_rcv_time); in rack_proc_sack_blk()
9855 rack->r_ctl.rc_sacked += (nrsm->r_end - nrsm->r_start); in rack_proc_sack_blk()
9860 KASSERT((rack->r_ctl.rc_considered_lost >= my_chg), in rack_proc_sack_blk()
9862 if (my_chg <= rack->r_ctl.rc_considered_lost) in rack_proc_sack_blk()
9863 rack->r_ctl.rc_considered_lost -= my_chg; in rack_proc_sack_blk()
9865 rack->r_ctl.rc_considered_lost = 0; in rack_proc_sack_blk()
9868 rack->r_ctl.rc_reorder_ts = cts; in rack_proc_sack_blk()
9869 if (rack->r_ctl.rc_reorder_ts == 0) in rack_proc_sack_blk()
9870 rack->r_ctl.rc_reorder_ts = 1; in rack_proc_sack_blk()
9903 if (SEQ_LT(rsm->r_start, rack->r_ctl.last_tlp_acked_start)) { in rack_proc_sack_blk()
9904 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_proc_sack_blk()
9905 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_proc_sack_blk()
9906 rack->r_ctl.last_tlp_acked_end); in rack_proc_sack_blk()
9908 if (SEQ_GT(rsm->r_end, rack->r_ctl.last_tlp_acked_end)) { in rack_proc_sack_blk()
9909 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_proc_sack_blk()
9910 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_proc_sack_blk()
9911 rack->r_ctl.last_tlp_acked_end); in rack_proc_sack_blk()
9914 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_proc_sack_blk()
9915 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_proc_sack_blk()
9944 (void)tqhash_insert(rack->r_ctl.tqh, nrsm); in rack_proc_sack_blk()
9946 if ((insret = tqhash_insert(rack->r_ctl.tqh, nrsm)) != 0) { in rack_proc_sack_blk()
9952 TAILQ_INSERT_AFTER(&rack->r_ctl.rc_tmap, rsm, nrsm, r_tnext); in rack_proc_sack_blk()
9964 KASSERT((rack->r_ctl.rc_considered_lost >= my_chg), in rack_proc_sack_blk()
9966 if (my_chg <= rack->r_ctl.rc_considered_lost) in rack_proc_sack_blk()
9967 rack->r_ctl.rc_considered_lost -= my_chg; in rack_proc_sack_blk()
9969 rack->r_ctl.rc_considered_lost = 0; in rack_proc_sack_blk()
9971 rack->r_ctl.rc_sacked += (rsm->r_end - rsm->r_start); in rack_proc_sack_blk()
9978 rack->r_ctl.rc_reorder_ts = cts; in rack_proc_sack_blk()
9979 if (rack->r_ctl.rc_reorder_ts == 0) in rack_proc_sack_blk()
9980 rack->r_ctl.rc_reorder_ts = 1; in rack_proc_sack_blk()
9984 rsm->r_ack_arrival = rack_to_usec_ts(&rack->r_ctl.act_rcv_time); in rack_proc_sack_blk()
9989 TAILQ_REMOVE(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_proc_sack_blk()
10008 next = tqhash_next(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
10034 next = tqhash_next(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
10039 prev = tqhash_prev(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
10065 prev = tqhash_prev(rack->r_ctl.tqh, rsm); in rack_proc_sack_blk()
10076 nrsm = tqhash_find(rack->r_ctl.tqh, end); in rack_proc_sack_blk()
10077 *prsm = rack->r_ctl.rc_sacklast = nrsm; in rack_proc_sack_blk()
10089 rack->r_ctl.rc_sacked -= (rsm->r_end - rsm->r_start); in rack_peer_reneges()
10099 TAILQ_INSERT_HEAD(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_peer_reneges()
10102 TAILQ_INSERT_AFTER(&rack->r_ctl.rc_tmap, tmap, rsm, r_tnext); in rack_peer_reneges()
10106 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_peer_reneges()
10112 sack_filter_clear(&rack->r_ctl.rack_sf, th_ack); in rack_peer_reneges()
10199 rack->r_ctl.rc_gp_cumack_ts) in rack_rsm_sender_update()
10202 rack->r_ctl.rc_gp_cumack_ts = ts; in rack_rsm_sender_update()
10218 if (sack_filter_blks_used(&rack->r_ctl.rack_sf)) { in rack_process_to_cumack()
10223 sack_filter_blks(tp, &rack->r_ctl.rack_sf, NULL, 0, th_ack); in rack_process_to_cumack()
10227 rack->r_ctl.cleared_app_ack = 0; in rack_process_to_cumack()
10231 rack->r_ctl.last_cumack_advance = acktime; in rack_process_to_cumack()
10236 (SEQ_GT(rack->r_ctl.last_tlp_acked_start, th_ack))) { in rack_process_to_cumack()
10251 rack->r_ctl.last_tlp_acked_start, in rack_process_to_cumack()
10252 rack->r_ctl.last_tlp_acked_end); in rack_process_to_cumack()
10257 (SEQ_GEQ(th_ack, rack->r_ctl.last_tlp_acked_end))) { in rack_process_to_cumack()
10266 (SEQ_GT(rack->r_ctl.last_sent_tlp_seq, th_ack))) { in rack_process_to_cumack()
10268 rack->r_ctl.last_sent_tlp_seq, in rack_process_to_cumack()
10269 (rack->r_ctl.last_sent_tlp_seq + in rack_process_to_cumack()
10270 rack->r_ctl.last_sent_tlp_len)); in rack_process_to_cumack()
10275 (SEQ_GEQ(th_ack, rack->r_ctl.last_sent_tlp_seq))) { in rack_process_to_cumack()
10282 rsm = tqhash_min(rack->r_ctl.tqh); in rack_process_to_cumack()
10339 if (SEQ_LT(rsm->r_start, rack->r_ctl.last_tlp_acked_start)) { in rack_process_to_cumack()
10340 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_process_to_cumack()
10341 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_process_to_cumack()
10342 rack->r_ctl.last_tlp_acked_end); in rack_process_to_cumack()
10344 if (SEQ_GT(rsm->r_end, rack->r_ctl.last_tlp_acked_end)) { in rack_process_to_cumack()
10345 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_process_to_cumack()
10346 rack_log_dsack_event(rack, 11, __LINE__, rack->r_ctl.last_tlp_acked_start, in rack_process_to_cumack()
10347 rack->r_ctl.last_tlp_acked_end); in rack_process_to_cumack()
10351 rack->r_ctl.last_tlp_acked_start = rsm->r_start; in rack_process_to_cumack()
10352 rack->r_ctl.last_tlp_acked_end = rsm->r_end; in rack_process_to_cumack()
10358 rack->r_ctl.last_tmit_time_acked = rsm->r_tim_lastsent[(rsm->r_rtr_cnt - 1)]; in rack_process_to_cumack()
10371 KASSERT((rack->r_ctl.rc_considered_lost >= (rsm->r_end - rsm->r_start)), in rack_process_to_cumack()
10373 if (rack->r_ctl.rc_considered_lost >= (rsm->r_end - rsm->r_start)) in rack_process_to_cumack()
10374 rack->r_ctl.rc_considered_lost -= rsm->r_end - rsm->r_start; in rack_process_to_cumack()
10376 rack->r_ctl.rc_considered_lost = 0; in rack_process_to_cumack()
10379 rack->r_ctl.rc_holes_rxt -= rsm->r_rtr_bytes; in rack_process_to_cumack()
10386 tqhash_remove(rack->r_ctl.tqh, rsm, REMOVE_TYPE_CUMACK); in rack_process_to_cumack()
10388 TAILQ_REMOVE(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_process_to_cumack()
10397 rack->r_ctl.rc_sacked -= (rsm->r_end - rsm->r_start); in rack_process_to_cumack()
10406 rsm->r_ack_arrival = rack_to_usec_ts(&rack->r_ctl.act_rcv_time); in rack_process_to_cumack()
10408 rack->r_ctl.rc_reorder_ts = cts; in rack_process_to_cumack()
10409 if (rack->r_ctl.rc_reorder_ts == 0) in rack_process_to_cumack()
10410 rack->r_ctl.rc_reorder_ts = 1; in rack_process_to_cumack()
10447 rsm = tqhash_min(rack->r_ctl.tqh); in rack_process_to_cumack()
10472 rack->r_ctl.rc_sacked -= (th_ack - rsm->r_start); in rack_process_to_cumack()
10484 KASSERT((rack->r_ctl.rc_considered_lost >= (th_ack - rsm->r_start)), in rack_process_to_cumack()
10486 if (rack->r_ctl.rc_considered_lost >= (th_ack - rsm->r_start)) in rack_process_to_cumack()
10487 rack->r_ctl.rc_considered_lost -= th_ack - rsm->r_start; in rack_process_to_cumack()
10489 rack->r_ctl.rc_considered_lost = 0; in rack_process_to_cumack()
10506 rack->r_ctl.rc_holes_rxt -= ack_am; in rack_process_to_cumack()
10526 tqhash_trim(rack->r_ctl.tqh, th_ack); in rack_process_to_cumack()
10588 TAILQ_FOREACH(rsm, &rack->r_ctl.rc_tmap, r_tnext) { in rack_handle_might_revert()
10603 tp->snd_ssthresh = rack->r_ctl.rc_ssthresh_at_erec; in rack_handle_might_revert()
10620 if (rack->r_ctl.rto_ssthresh > tp->snd_ssthresh) in rack_handle_might_revert()
10621 tp->snd_ssthresh = rack->r_ctl.rto_ssthresh; in rack_handle_might_revert()
10642 (SEQ_GEQ(start, rack->r_ctl.last_tlp_acked_start)) && in rack_note_dsack()
10643 (SEQ_LEQ(end, rack->r_ctl.last_tlp_acked_end))) { in rack_note_dsack()
10655 l_end = rack->r_ctl.last_sent_tlp_seq + rack->r_ctl.last_sent_tlp_len; in rack_note_dsack()
10656 if (SEQ_GEQ(start, rack->r_ctl.last_sent_tlp_seq) && in rack_note_dsack()
10669 rack->r_ctl.dsack_round_end = rack->rc_tp->snd_max; in rack_note_dsack()
10670 rack->r_ctl.num_dsack++; in rack_note_dsack()
10671 rack->r_ctl.dsack_persist = 16; /* 16 is from the standard */ in rack_note_dsack()
10679 rack->r_ctl.dsack_byte_cnt += am; in rack_note_dsack()
10681 rack->r_ctl.retran_during_recovery && in rack_note_dsack()
10682 (rack->r_ctl.dsack_byte_cnt >= rack->r_ctl.retran_during_recovery)) { in rack_note_dsack()
10690 rack->r_ctl.retran_during_recovery = 0; in rack_note_dsack()
10691 rack->r_ctl.dsack_byte_cnt = 0; in rack_note_dsack()
10700 (rack->r_ctl.rc_sacked + rack->r_ctl.rc_considered_lost)) + rack->r_ctl.rc_holes_rxt); in do_rack_compute_pipe()
10717 rack->r_ctl.rc_prr_delivered += changed; in rack_update_prr()
10726 rack->r_ctl.rc_prr_sndcnt = 0; in rack_update_prr()
10739 sndcnt = rack->r_ctl.rc_prr_delivered * tp->snd_ssthresh; in rack_update_prr()
10740 if (rack->r_ctl.rc_prr_recovery_fs > 0) in rack_update_prr()
10741 sndcnt /= (long)rack->r_ctl.rc_prr_recovery_fs; in rack_update_prr()
10743 rack->r_ctl.rc_prr_sndcnt = 0; in rack_update_prr()
10748 if (sndcnt > (long)rack->r_ctl.rc_prr_out) in rack_update_prr()
10749 sndcnt -= rack->r_ctl.rc_prr_out; in rack_update_prr()
10752 rack->r_ctl.rc_prr_sndcnt = sndcnt; in rack_update_prr()
10757 if (rack->r_ctl.rc_prr_delivered > rack->r_ctl.rc_prr_out) in rack_update_prr()
10758 limit = (rack->r_ctl.rc_prr_delivered - rack->r_ctl.rc_prr_out); in rack_update_prr()
10765 rack->r_ctl.rc_prr_sndcnt = min((tp->snd_ssthresh - pipe), limit); in rack_update_prr()
10768 rack->r_ctl.rc_prr_sndcnt = min(0, limit); in rack_update_prr()
10797 rsm = tqhash_min(rack->r_ctl.tqh); in rack_log_ack()
10820 tcp_tv_to_lusectick(&rack->r_ctl.act_rcv_time)); in rack_log_ack()
10867 if (SEQ_GEQ(th_ack, rack->r_ctl.dsack_round_end)) { in rack_log_ack()
10877 num_sack_blks = sack_filter_blks(tp, &rack->r_ctl.rack_sf, sack_blocks, in rack_log_ack()
10946 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_log_ack()
10982 rsm = rack->r_ctl.rc_sacklast; in rack_log_ack()
11005 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_log_ack()
11020 rack->r_ctl.rc_prr_sndcnt = ctf_fixed_maxseg(tp); in rack_log_ack()
11025 rack->r_ctl.rc_agg_early = 0; in rack_log_ack()
11034 rack->r_ctl.rc_hpts_flags &= ~PACE_PKT_OUTPUT; in rack_log_ack()
11035 rack->r_ctl.rc_resend = rsm; in rack_log_ack()
11041 if ((rsm && (rack->r_ctl.rc_prr_sndcnt >= ctf_fixed_maxseg(tp)) && in rack_log_ack()
11043 ((rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) == 0)))) { in rack_log_ack()
11049 rack->r_ctl.rc_agg_early = 0; in rack_log_ack()
11060 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_strike_dupack()
11086 rack->r_ctl.rc_resend = tcp_rack_output(rack->rc_tp, rack, cts); in rack_strike_dupack()
11087 if (rack->r_ctl.rc_resend != NULL) { in rack_strike_dupack()
11146 if ((rack->r_ctl.rack_rs.rs_flags & RACK_RTT_VALID) && in rack_check_bottom_drag()
11166 rack->r_ctl.rc_rtt_diff = 0; in rack_check_bottom_drag()
11167 rack->r_ctl.gp_bw = lt_bw; in rack_check_bottom_drag()
11169 if (rack->r_ctl.num_measurements < RACK_REQ_AVG) in rack_check_bottom_drag()
11170 rack->r_ctl.num_measurements = RACK_REQ_AVG; in rack_check_bottom_drag()
11172 } else if (lt_bw > rack->r_ctl.gp_bw) { in rack_check_bottom_drag()
11173 rack->r_ctl.rc_rtt_diff = 0; in rack_check_bottom_drag()
11174 if (rack->r_ctl.num_measurements < RACK_REQ_AVG) in rack_check_bottom_drag()
11175 rack->r_ctl.num_measurements = RACK_REQ_AVG; in rack_check_bottom_drag()
11176 rack->r_ctl.gp_bw = lt_bw; in rack_check_bottom_drag()
11181 (rack->r_ctl.num_measurements >= rack->r_ctl.req_measurements)) { in rack_check_bottom_drag()
11199 (rack->r_ctl.cwnd_to_use > max((segsiz * (rack_req_segs + 2)), minseg)) && in rack_check_bottom_drag()
11201 (ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked) <= in rack_check_bottom_drag()
11291 log.u_bbr.delRate = rack->r_ctl.bw_rate_cap; in rack_log_hybrid()
11292 log.u_bbr.bbr_substate = rack->r_ctl.client_suggested_maxseg; in rack_log_hybrid()
11293 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_hybrid()
11314 orig_ent = rack->r_ctl.rc_last_sft; in rack_set_dgp_hybrid_mode()
11328 rack->r_ctl.client_suggested_maxseg = 0; in rack_set_dgp_hybrid_mode()
11331 rack->r_ctl.bw_rate_cap = 0; in rack_set_dgp_hybrid_mode()
11333 rack->r_ctl.fillcw_cap = rack_fillcw_bw_cap; in rack_set_dgp_hybrid_mode()
11338 if (rack->r_ctl.rc_last_sft) { in rack_set_dgp_hybrid_mode()
11339 rack->r_ctl.rc_last_sft = NULL; in rack_set_dgp_hybrid_mode()
11346 rack->r_ctl.client_suggested_maxseg = 0; in rack_set_dgp_hybrid_mode()
11348 rack->r_ctl.bw_rate_cap = 0; in rack_set_dgp_hybrid_mode()
11350 if (rack->r_ctl.rc_last_sft) { in rack_set_dgp_hybrid_mode()
11351 rack->r_ctl.rc_last_sft = NULL; in rack_set_dgp_hybrid_mode()
11369 if ((rack->r_ctl.rc_last_sft != NULL) && in rack_set_dgp_hybrid_mode()
11370 (rack->r_ctl.rc_last_sft == rc_cur)) { in rack_set_dgp_hybrid_mode()
11377 rack->r_ctl.rc_last_sft = rc_cur; in rack_set_dgp_hybrid_mode()
11389 rack->r_ctl.bw_rate_cap = rack_compensate_for_linerate(rack, rc_cur->cspr); in rack_set_dgp_hybrid_mode()
11391 rack->r_ctl.fillcw_cap = rack_compensate_for_linerate(rack, rc_cur->cspr); in rack_set_dgp_hybrid_mode()
11395 rack->r_ctl.bw_rate_cap = 0; in rack_set_dgp_hybrid_mode()
11397 rack->r_ctl.fillcw_cap = rack_fillcw_bw_cap; in rack_set_dgp_hybrid_mode()
11401 rack->r_ctl.client_suggested_maxseg = rc_cur->hint_maxseg; in rack_set_dgp_hybrid_mode()
11403 rack->r_ctl.client_suggested_maxseg = 0; in rack_set_dgp_hybrid_mode()
11404 if (rc_cur->timestamp == rack->r_ctl.last_tm_mark) { in rack_set_dgp_hybrid_mode()
11461 if (rack->r_ctl.client_suggested_maxseg != 0) { in rack_set_dgp_hybrid_mode()
11475 rack->r_ctl.rc_last_sft = rc_cur; in rack_set_dgp_hybrid_mode()
11476 rack->r_ctl.last_tm_mark = rc_cur->timestamp; in rack_set_dgp_hybrid_mode()
11486 ent = rack->r_ctl.rc_last_sft; in rack_chk_req_and_hybrid_on_out()
11492 ent = rack->r_ctl.rc_last_sft; in rack_chk_req_and_hybrid_on_out()
11547 new_total = acked_amount + rack->r_ctl.fsb.left_to_send; in rack_gain_for_fastoutput()
11553 rack->r_ctl.fsb.left_to_send = new_total; in rack_gain_for_fastoutput()
11554 …KASSERT((rack->r_ctl.fsb.left_to_send <= (sbavail(&rack->rc_inp->inp_socket->so_snd) - (tp->snd_ma… in rack_gain_for_fastoutput()
11556 rack, rack->r_ctl.fsb.left_to_send, in rack_gain_for_fastoutput()
11600 rsm = tqhash_min(rack->r_ctl.tqh); in rack_adjust_sendmap_head()
11654 rsm = tqhash_next(rack->r_ctl.tqh, rsm); in rack_adjust_sendmap_head()
11698 laa = tcp_tv_to_lusectick(&rack->r_ctl.act_rcv_time); in rack_req_check_for_comp()
11723 if (ent == rack->r_ctl.rc_last_sft) { in rack_req_check_for_comp()
11724 rack->r_ctl.rc_last_sft = NULL; in rack_req_check_for_comp()
11728 rack->r_ctl.bw_rate_cap = 0; in rack_req_check_for_comp()
11730 rack->r_ctl.fillcw_cap = rack_fillcw_bw_cap; in rack_req_check_for_comp()
11731 rack->r_ctl.client_suggested_maxseg = 0; in rack_req_check_for_comp()
11812 (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT)) { in rack_process_ack()
11823 rack_rto_min, rack_rto_max, rack->r_ctl.timer_slop); in rack_process_ack()
11844 rack->r_ctl.rc_reorder_ts = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time); in rack_process_ack()
11845 if (rack->r_ctl.rc_reorder_ts == 0) in rack_process_ack()
11846 rack->r_ctl.rc_reorder_ts = 1; in rack_process_ack()
11900 rack_rto_min, rack_rto_max, rack->r_ctl.timer_slop); in rack_process_ack()
11902 rack->r_ctl.rc_tlp_cnt_out = 0; in rack_process_ack()
11907 if (rack->r_ctl.rc_hpts_flags & PACE_TMR_RXT) in rack_process_ack()
11908 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_process_ack()
11948 p_cwnd = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_process_ack()
12014 if (rack->r_ctl.rc_went_idle_time == 0) in rack_process_ack()
12015 rack->r_ctl.rc_went_idle_time = 1; in rack_process_ack()
12016 rack->r_ctl.retran_during_recovery = 0; in rack_process_ack()
12017 rack->r_ctl.dsack_byte_cnt = 0; in rack_process_ack()
12021 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_process_ack()
12025 sack_filter_clear(&rack->r_ctl.rack_sf, tp->snd_una); in rack_process_ack()
12073 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_collapse()
12093 (rack->r_ctl.last_collapse_point != (th_ack + rack->rc_tp->snd_wnd))) in rack_collapsed_window()
12095 rack->r_ctl.last_collapse_point = th_ack + rack->rc_tp->snd_wnd; in rack_collapsed_window()
12096 rack->r_ctl.high_collapse_point = rack->rc_tp->snd_max; in rack_collapsed_window()
12099 rack_log_collapse(rack, 0, th_ack, rack->r_ctl.last_collapse_point, line, 1, 0, NULL); in rack_collapsed_window()
12112 rsm = tqhash_find(rack->r_ctl.tqh, rack->r_ctl.last_collapse_point); in rack_un_collapse_window()
12119 if (SEQ_GT(rack->r_ctl.last_collapse_point, rsm->r_start)) { in rack_un_collapse_window()
12121 rack->r_ctl.last_collapse_point, line, 3, rsm->r_flags, rsm); in rack_un_collapse_window()
12130 rack_clone_rsm(rack, nrsm, rsm, rack->r_ctl.last_collapse_point); in rack_un_collapse_window()
12132 (void)tqhash_insert(rack->r_ctl.tqh, nrsm); in rack_un_collapse_window()
12134 if ((insret = tqhash_insert(rack->r_ctl.tqh, nrsm)) != 0) { in rack_un_collapse_window()
12140 rack->r_ctl.last_collapse_point, __LINE__); in rack_un_collapse_window()
12142 TAILQ_INSERT_AFTER(&rack->r_ctl.rc_tmap, rsm, nrsm, r_tnext); in rack_un_collapse_window()
12153 TQHASH_FOREACH_FROM(nrsm, rack->r_ctl.tqh, rsm) { in rack_un_collapse_window()
12171 rack->r_ctl.rc_rcvtime, __LINE__); in rack_handle_delayed_ack()
12195 if ((out + rack->r_ctl.fsb.left_to_send) > tp->snd_wnd) { in rack_validate_fo_sendwin_up()
12202 rack->r_ctl.fsb.left_to_send = tp->snd_wnd - out; in rack_validate_fo_sendwin_up()
12203 if (rack->r_ctl.fsb.left_to_send < ctf_fixed_maxseg(tp)) { in rack_validate_fo_sendwin_up()
12263 (SEQ_GT(th->th_ack, rack->r_ctl.high_collapse_point))) in rack_process_data()
12267 (tp->snd_wnd >= min((rack->r_ctl.rc_high_rwnd/2), in rack_process_data()
12268 rack->r_ctl.rc_pace_min_segs))) { in rack_process_data()
12269 rack_exit_persist(tp, rack, rack->r_ctl.rc_rcvtime); in rack_process_data()
12276 (tp->snd_wnd < min((rack->r_ctl.rc_high_rwnd/2), rack->r_ctl.rc_pace_min_segs)) && in rack_process_data()
12287 rack_enter_persist(tp, rack, rack->r_ctl.rc_rcvtime, tp->snd_una); in rack_process_data()
12456 rack->r_ctl.rc_rcvtime, __LINE__); in rack_process_data()
12473 rack->r_ctl.rc_rcvtime, __LINE__); in rack_process_data()
12483 rack->r_ctl.rc_rcvtime, __LINE__); in rack_process_data()
12494 rack->r_ctl.rc_rcvtime, __LINE__); in rack_process_data()
12628 sack_filter_clear(&rack->r_ctl.rack_sf, tp->snd_una); in rack_do_fastnewdata()
12676 if (rack->r_ctl.rc_sacked) { in rack_fastack()
12682 (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT)) { in rack_fastack()
12697 (tp->snd_wnd >= min((rack->r_ctl.rc_high_rwnd/2), in rack_fastack()
12698 rack->r_ctl.rc_pace_min_segs))) { in rack_fastack()
12703 (tp->snd_wnd < min((rack->r_ctl.rc_high_rwnd/2), rack->r_ctl.rc_pace_min_segs)) && in rack_fastack()
12714 rack_enter_persist(tp, rack, rack->r_ctl.rc_rcvtime, th->th_ack); in rack_fastack()
12771 rack_rto_min, rack_rto_max, rack->r_ctl.timer_slop); in rack_fastack()
12773 rack->r_ctl.rc_tlp_cnt_out = 0; in rack_fastack()
12778 if (rack->r_ctl.rc_hpts_flags & PACE_TMR_RXT) in rack_fastack()
12779 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_fastack()
12796 (SEQ_GT(tp->snd_una, rack->r_ctl.high_collapse_point))) in rack_fastack()
12823 rack->r_ctl.retran_during_recovery = 0; in rack_fastack()
12825 rack->r_ctl.dsack_byte_cnt = 0; in rack_fastack()
12826 rack->r_ctl.rc_went_idle_time = tcp_get_usecs(NULL); in rack_fastack()
12827 if (rack->r_ctl.rc_went_idle_time == 0) in rack_fastack()
12828 rack->r_ctl.rc_went_idle_time = 1; in rack_fastack()
12832 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_fastack()
12928 rack->r_ctl.rc_rcvtime, __LINE__); in rack_do_syn_sent()
12960 rsm = tqhash_min(rack->r_ctl.tqh); in rack_do_syn_sent()
12966 rack->r_ctl.rc_resend = rsm; in rack_do_syn_sent()
13107 if ((rack->r_ctl.rc_hpts_flags & PACE_TMR_RXT) || in rack_do_syn_recv()
13108 (rack->r_ctl.rc_hpts_flags & PACE_TMR_TLP) || in rack_do_syn_recv()
13109 (rack->r_ctl.rc_hpts_flags & PACE_TMR_RACK)) { in rack_do_syn_recv()
13315 tiwin, nxt_pkt, rack->r_ctl.rc_rcvtime)) { in rack_do_established()
13964 rack->r_ctl.rack_rs.rs_flags = RACK_RTT_EMPTY; in rack_clear_rate_sample()
13965 rack->r_ctl.rack_rs.rs_rtt_cnt = 0; in rack_clear_rate_sample()
13966 rack->r_ctl.rack_rs.rs_rtt_tot = 0; in rack_clear_rate_sample()
13978 (rack->r_ctl.rc_pace_max_segs != 0) && in rack_set_pace_segments()
13980 (rack->r_ctl.rc_last_sft != NULL)) { in rack_set_pace_segments()
13981 rack->r_ctl.rc_last_sft->hybrid_flags &= ~TCP_HYBRID_PACING_SETMSS; in rack_set_pace_segments()
13985 orig_min = rack->r_ctl.rc_pace_min_segs; in rack_set_pace_segments()
13986 orig_max = rack->r_ctl.rc_pace_max_segs; in rack_set_pace_segments()
13988 if (ctf_fixed_maxseg(tp) != rack->r_ctl.rc_pace_min_segs) in rack_set_pace_segments()
13990 rack->r_ctl.rc_pace_min_segs = ctf_fixed_maxseg(tp); in rack_set_pace_segments()
13992 if (user_max != rack->r_ctl.rc_pace_max_segs) in rack_set_pace_segments()
13996 rack->r_ctl.rc_pace_max_segs = user_max; in rack_set_pace_segments()
13999 if ((rack->r_ctl.crte == NULL) || in rack_set_pace_segments()
14000 (bw_est != rack->r_ctl.crte->rate)) { in rack_set_pace_segments()
14001 rack->r_ctl.rc_pace_max_segs = user_max; in rack_set_pace_segments()
14007 (rack->r_ctl.rc_user_set_min_segs == 1)) in rack_set_pace_segments()
14012 rack->r_ctl.rc_pace_min_segs); in rack_set_pace_segments()
14013 rack->r_ctl.rc_pace_max_segs = tcp_get_pacing_burst_size_w_divisor( in rack_set_pace_segments()
14015 rack->r_ctl.crte, NULL, rack->r_ctl.pace_len_divisor); in rack_set_pace_segments()
14018 if (rack->r_ctl.gp_bw || in rack_set_pace_segments()
14019 rack->r_ctl.init_rate) { in rack_set_pace_segments()
14024 orig = rack->r_ctl.rc_pace_max_segs; in rack_set_pace_segments()
14031 rack->r_ctl.rc_pace_max_segs = rack_get_pacing_len(rack, in rack_set_pace_segments()
14035 rack->r_ctl.rc_pace_max_segs = rack->r_ctl.rc_pace_min_segs; in rack_set_pace_segments()
14036 if (orig != rack->r_ctl.rc_pace_max_segs) in rack_set_pace_segments()
14038 } else if ((rack->r_ctl.gp_bw == 0) && in rack_set_pace_segments()
14039 (rack->r_ctl.rc_pace_max_segs == 0)) { in rack_set_pace_segments()
14045 rack->r_ctl.rc_pace_max_segs = rc_init_window(rack); in rack_set_pace_segments()
14048 if (rack->r_ctl.rc_pace_max_segs > PACE_MAX_IP_BYTES) { in rack_set_pace_segments()
14050 rack->r_ctl.rc_pace_max_segs = PACE_MAX_IP_BYTES; in rack_set_pace_segments()
14071 rack->r_ctl.fsb.tcp_ip_hdr_len = sizeof(struct ip6_hdr) + sizeof(struct tcphdr); in rack_init_fsb_block()
14072 ip6 = (struct ip6_hdr *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_init_fsb_block()
14074 rack->r_ctl.fsb.tcp_ip_hdr_len += sizeof(struct udphdr); in rack_init_fsb_block()
14078 rack->r_ctl.fsb.udp = udp; in rack_init_fsb_block()
14079 rack->r_ctl.fsb.th = (struct tcphdr *)(udp + 1); in rack_init_fsb_block()
14082 rack->r_ctl.fsb.th = (struct tcphdr *)(ip6 + 1); in rack_init_fsb_block()
14083 rack->r_ctl.fsb.udp = NULL; in rack_init_fsb_block()
14087 ip6, rack->r_ctl.fsb.th); in rack_init_fsb_block()
14088 rack->r_ctl.fsb.hoplimit = in6_selecthlim(rack->rc_inp, NULL); in rack_init_fsb_block()
14093 rack->r_ctl.fsb.tcp_ip_hdr_len = sizeof(struct tcpiphdr); in rack_init_fsb_block()
14094 ip = (struct ip *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_init_fsb_block()
14096 rack->r_ctl.fsb.tcp_ip_hdr_len += sizeof(struct udphdr); in rack_init_fsb_block()
14100 rack->r_ctl.fsb.udp = udp; in rack_init_fsb_block()
14101 rack->r_ctl.fsb.th = (struct tcphdr *)(udp + 1); in rack_init_fsb_block()
14104 rack->r_ctl.fsb.udp = NULL; in rack_init_fsb_block()
14105 rack->r_ctl.fsb.th = (struct tcphdr *)(ip + 1); in rack_init_fsb_block()
14109 ip, rack->r_ctl.fsb.th); in rack_init_fsb_block()
14110 rack->r_ctl.fsb.hoplimit = tptoinpcb(tp)->inp_ip_ttl; in rack_init_fsb_block()
14113 rack->r_ctl.fsb.recwin = lmin(lmax(sbspace(&tptosocket(tp)->so_rcv), 0), in rack_init_fsb_block()
14126 …rack->r_ctl.fsb.tcp_ip_hdr_len = sizeof(struct ip6_hdr) + sizeof(struct tcphdr) + sizeof(struct ud… in rack_init_fsb()
14128 rack->r_ctl.fsb.tcp_ip_hdr_len = sizeof(struct tcpiphdr) + sizeof(struct udphdr); in rack_init_fsb()
14130 rack->r_ctl.fsb.tcp_ip_hdr = malloc(rack->r_ctl.fsb.tcp_ip_hdr_len, in rack_init_fsb()
14132 if (rack->r_ctl.fsb.tcp_ip_hdr == NULL) { in rack_init_fsb()
14155 log.u_bbr.flex1 = rack->r_ctl.current_round; in rack_log_hystart_event()
14156 log.u_bbr.flex2 = rack->r_ctl.roundends; in rack_log_hystart_event()
14175 rack->r_ctl.roundends = tp->snd_max; in rack_deferred_init()
14176 rack->r_ctl.rc_high_rwnd = tp->snd_wnd; in rack_deferred_init()
14177 rack->r_ctl.cwnd_to_use = tp->snd_cwnd; in rack_deferred_init()
14256 rsm = tqhash_find(rack->r_ctl.tqh, reqr->req_param); in rack_chg_query()
14280 if (rack->r_ctl.rc_hpts_flags == 0) { in rack_chg_query()
14284 reqr->timer_hpts_flags = rack->r_ctl.rc_hpts_flags; in rack_chg_query()
14285 if (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) { in rack_chg_query()
14286 reqr->timer_pacing_to = rack->r_ctl.rc_last_output_to; in rack_chg_query()
14288 if (rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK) { in rack_chg_query()
14289 reqr->timer_timer_exp = rack->r_ctl.rc_timer_exp; in rack_chg_query()
14292 rack->r_ctl.rc_hpts_flags, in rack_chg_query()
14293 rack->r_ctl.rc_last_output_to, in rack_chg_query()
14294 rack->r_ctl.rc_timer_exp); in rack_chg_query()
14299 reqr->rack_num_dsacks = rack->r_ctl.num_dsack; in rack_chg_query()
14300 reqr->rack_reorder_ts = rack->r_ctl.rc_reorder_ts; in rack_chg_query()
14302 reqr->rack_rxt_last_time = rack->r_ctl.rc_tlp_rxt_last_time; in rack_chg_query()
14303 reqr->rack_min_rtt = rack->r_ctl.rc_rack_min_rtt; in rack_chg_query()
14305 reqr->rack_tmit_time = rack->r_ctl.rc_rack_tmit_time; in rack_chg_query()
14308 reqr->rack_sacked = rack->r_ctl.rc_sacked; in rack_chg_query()
14309 reqr->rack_holes_rxt = rack->r_ctl.rc_holes_rxt; in rack_chg_query()
14310 reqr->rack_prr_delivered = rack->r_ctl.rc_prr_delivered; in rack_chg_query()
14311 reqr->rack_prr_recovery_fs = rack->r_ctl.rc_prr_recovery_fs; in rack_chg_query()
14312 reqr->rack_prr_sndcnt = rack->r_ctl.rc_prr_sndcnt; in rack_chg_query()
14313 reqr->rack_prr_out = rack->r_ctl.rc_prr_out; in rack_chg_query()
14316 reqr->rack_tlp_cnt_out = rack->r_ctl.rc_tlp_cnt_out; in rack_chg_query()
14318 reqr->rack_time_went_idle = rack->r_ctl.rc_went_idle_time; in rack_chg_query()
14363 if (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) { in rack_switch_failed()
14364 if (TSTMP_GT(rack->r_ctl.rc_last_output_to, cts)) { in rack_switch_failed()
14365 toval = rack->r_ctl.rc_last_output_to - cts; in rack_switch_failed()
14370 } else if (rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK) { in rack_switch_failed()
14371 if (TSTMP_GT(rack->r_ctl.rc_timer_exp, cts)) { in rack_switch_failed()
14372 toval = rack->r_ctl.rc_timer_exp - cts; in rack_switch_failed()
14406 rsm->r_tim_lastsent[0] = rack_to_usec_ts(&rack->r_ctl.act_rcv_time); in rack_init_outstanding()
14440 if ((insret = tqhash_insert(rack->r_ctl.tqh, rsm)) != 0) { in rack_init_outstanding()
14445 (void)tqhash_insert(rack->r_ctl.tqh, rsm); in rack_init_outstanding()
14447 TAILQ_INSERT_TAIL(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_init_outstanding()
14500 if ((insret = tqhash_insert(rack->r_ctl.tqh, rsm)) != 0) { in rack_init_outstanding()
14505 (void)tqhash_insert(rack->r_ctl.tqh, rsm); in rack_init_outstanding()
14508 TAILQ_FOREACH(ersm, &rack->r_ctl.rc_tmap, r_tnext) { in rack_init_outstanding()
14526 TAILQ_INSERT_TAIL(&rack->r_ctl.rc_tmap, rsm, r_tnext); in rack_init_outstanding()
14530 if ((rack->r_ctl.rc_sacklast == NULL) || in rack_init_outstanding()
14531 (SEQ_GT(rsm->r_end, rack->r_ctl.rc_sacklast->r_end))) { in rack_init_outstanding()
14532 rack->r_ctl.rc_sacklast = rsm; in rack_init_outstanding()
14579 rack->r_ctl.tqh = malloc(sizeof(struct tailq_hash), M_TCPFSB, M_NOWAIT); in rack_init()
14580 if (rack->r_ctl.tqh == NULL) { in rack_init()
14584 tqhash_init(rack->r_ctl.tqh); in rack_init()
14585 TAILQ_INIT(&rack->r_ctl.rc_free); in rack_init()
14586 TAILQ_INIT(&rack->r_ctl.rc_tmap); in rack_init()
14608 rack->r_ctl.rc_split_limit = V_tcp_map_split_limit; in rack_init()
14611 rack->r_ctl.rc_saved_beta.newreno_flags |= CC_NEWRENO_BETA_ECN_ENABLED; in rack_init()
14612 rack->r_ctl.rc_reorder_fade = rack_reorder_fade; in rack_init()
14614 rack->r_ctl.rc_tlp_threshold = rack_tlp_thresh; in rack_init()
14618 rack->r_ctl.rc_user_set_min_segs = rack_pacing_min_seg; in rack_init()
14633 rack->r_ctl.pcm_i.cnt_alloc = RACK_DEFAULT_PCM_ARRAY; in rack_init()
14634 sz = (sizeof(struct rack_pcm_stats) * rack->r_ctl.pcm_i.cnt_alloc); in rack_init()
14635 rack->r_ctl.pcm_s = malloc(sz,M_TCPPCM, M_NOWAIT); in rack_init()
14636 if (rack->r_ctl.pcm_s == NULL) { in rack_init()
14637 rack->r_ctl.pcm_i.cnt_alloc = 0; in rack_init()
14640 rack->r_ctl.side_chan_dis_mask = tcp_sidechannel_disable_mask; in rack_init()
14642 rack->r_ctl.rack_per_upper_bound_ss = (uint8_t)rack_per_upper_bound_ss; in rack_init()
14643 rack->r_ctl.rack_per_upper_bound_ca = (uint8_t)rack_per_upper_bound_ca; in rack_init()
14646 rack->r_ctl.pace_len_divisor = rack_default_pacing_divisor; in rack_init()
14648 rack->r_ctl.max_reduction = rack_max_reduce; in rack_init()
14650 TAILQ_INIT(&rack->r_ctl.opt_list); in rack_init()
14651 rack->r_ctl.rc_saved_beta.beta = V_newreno_beta_ecn; in rack_init()
14652 rack->r_ctl.rc_saved_beta.beta_ecn = V_newreno_beta_ecn; in rack_init()
14657 rack->r_ctl.rc_saved_beta.beta = rack_hibeta_setting; in rack_init()
14658 rack->r_ctl.saved_hibeta = rack_hibeta_setting; in rack_init()
14661 rack->r_ctl.saved_hibeta = 50; in rack_init()
14668 rack->r_ctl.last_tm_mark = 0xffffffffffffffff; in rack_init()
14669 rack->r_ctl.rc_reorder_shift = rack_reorder_thresh; in rack_init()
14670 rack->r_ctl.rc_pkt_delay = rack_pkt_delay; in rack_init()
14671 rack->r_ctl.rc_tlp_cwnd_reduce = rack_lower_cwnd_at_tlp; in rack_init()
14672 rack->r_ctl.rc_lowest_us_rtt = 0xffffffff; in rack_init()
14673 rack->r_ctl.rc_highest_us_rtt = 0; in rack_init()
14674 rack->r_ctl.bw_rate_cap = rack_bw_rate_cap; in rack_init()
14677 rack->r_ctl.fillcw_cap = rack_fillcw_bw_cap; in rack_init()
14678 rack->r_ctl.timer_slop = TICKS_2_USEC(tcp_rexmit_slop); in rack_init()
14686 rack->r_ctl.pacing_method |= RACK_REG_PACING; in rack_init()
14721 rack->r_ctl.rc_rate_sample_method = rack_rate_sample_method; in rack_init()
14723 rack->r_ctl.rc_prr_sendalot = rack_send_a_lot_in_prr; in rack_init()
14724 rack->r_ctl.rc_min_to = rack_min_to; in rack_init()
14725 microuptime(&rack->r_ctl.act_rcv_time); in rack_init()
14726 rack->r_ctl.rc_last_time_decay = rack->r_ctl.act_rcv_time; in rack_init()
14727 rack->r_ctl.rack_per_of_gp_ss = rack_per_of_gp_ss; in rack_init()
14734 rack->r_ctl.rack_per_of_gp_ca = rack_do_dyn_mul; in rack_init()
14736 rack->r_ctl.rack_per_of_gp_ca = rack_per_of_gp_ca; in rack_init()
14737 rack->r_ctl.rack_per_of_gp_rec = rack_per_of_gp_rec; in rack_init()
14742 rack->r_ctl.rack_per_of_gp_rec = 90; in rack_init()
14743 rack->r_ctl.rack_per_of_gp_ca = 100; in rack_init()
14744 rack->r_ctl.rack_per_of_gp_ss = 250; in rack_init()
14746 rack->r_ctl.rack_per_of_gp_probertt = rack_per_of_gp_probertt; in rack_init()
14747 rack->r_ctl.rc_tlp_rxt_last_time = tcp_tv_to_mssectick(&rack->r_ctl.act_rcv_time); in rack_init()
14748 rack->r_ctl.last_rcv_tstmp_for_rtt = tcp_tv_to_mssectick(&rack->r_ctl.act_rcv_time); in rack_init()
14750 setup_time_filter_small(&rack->r_ctl.rc_gp_min_rtt, FILTER_TYPE_MIN, in rack_init()
14752 us_cts = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time); in rack_init()
14753 rack->r_ctl.rc_lower_rtt_us_cts = us_cts; in rack_init()
14754 rack->r_ctl.rc_time_of_last_probertt = us_cts; in rack_init()
14755 rack->r_ctl.rc_went_idle_time = us_cts; in rack_init()
14756 rack->r_ctl.rc_time_probertt_starts = 0; in rack_init()
14758 rack->r_ctl.gp_rnd_thresh = rack_rnd_cnt_req & 0xff; in rack_init()
14760 rack->r_ctl.gate_to_fs = 1; in rack_init()
14761 rack->r_ctl.gp_gain_req = rack_gp_gain_req; in rack_init()
14775 rack->r_ctl.req_measurements = rack_req_measurements; in rack_init()
14777 rack->r_ctl.req_measurements = 1; in rack_init()
14884 rack_log_hystart_event(rack, rack->r_ctl.roundends, 20); in rack_init()
14899 rack->r_ctl.rc_reorder_ts = qr.rack_reorder_ts; in rack_init()
14900 rack->r_ctl.num_dsack = qr.rack_num_dsacks; in rack_init()
14901 rack->r_ctl.rc_tlp_rxt_last_time = qr.rack_rxt_last_time; in rack_init()
14902 rack->r_ctl.rc_rack_min_rtt = qr.rack_min_rtt; in rack_init()
14904 rack->r_ctl.rc_rack_tmit_time = qr.rack_tmit_time; in rack_init()
14905 rack->r_ctl.rc_sacked = qr.rack_sacked; in rack_init()
14906 rack->r_ctl.rc_holes_rxt = qr.rack_holes_rxt; in rack_init()
14907 rack->r_ctl.rc_prr_delivered = qr.rack_prr_delivered; in rack_init()
14908 rack->r_ctl.rc_prr_recovery_fs = qr.rack_prr_recovery_fs; in rack_init()
14909 rack->r_ctl.rc_prr_sndcnt = qr.rack_prr_sndcnt; in rack_init()
14910 rack->r_ctl.rc_prr_out = qr.rack_prr_out; in rack_init()
14913 rack->r_ctl.rc_tlp_cnt_out = qr.rack_tlp_cnt_out; in rack_init()
14916 rack->r_ctl.rc_tlp_cnt_out = 0; in rack_init()
14921 rack->r_ctl.rc_went_idle_time = qr.rack_time_went_idle; in rack_init()
14923 if (rack->r_ctl.rc_scw) { in rack_init()
14924 tcp_shared_cwnd_idle(rack->r_ctl.rc_scw, rack->r_ctl.rc_scw_index); in rack_init()
14928 rack->r_ctl.persist_lost_ends = 0; in rack_init()
14934 rack_rto_min, rack_rto_max, rack->r_ctl.timer_slop); in rack_init()
14955 rack->r_ctl.rc_hpts_flags = qr.timer_hpts_flags; in rack_init()
14957 rack->r_ctl.rc_last_output_to = qr.timer_pacing_to; in rack_init()
14964 rack->r_ctl.rc_timer_exp = qr.timer_timer_exp; in rack_init()
14973 rack->r_ctl.rc_hpts_flags, in rack_init()
14974 rack->r_ctl.rc_last_output_to, in rack_init()
14975 rack->r_ctl.rc_timer_exp); in rack_init()
14981 rack_log_hpts_diag(rack, us_cts, &diag, &rack->r_ctl.act_rcv_time); in rack_init()
15050 if (rack->r_ctl.rc_scw) { in rack_fini()
15054 limit = max(1, rack->r_ctl.rc_lowest_us_rtt); in rack_fini()
15057 tcp_shared_cwnd_free_full(tp, rack->r_ctl.rc_scw, in rack_fini()
15058 rack->r_ctl.rc_scw_index, in rack_fini()
15060 rack->r_ctl.rc_scw = NULL; in rack_fini()
15063 if (rack->r_ctl.fsb.tcp_ip_hdr) { in rack_fini()
15064 free(rack->r_ctl.fsb.tcp_ip_hdr, M_TCPFSB); in rack_fini()
15065 rack->r_ctl.fsb.tcp_ip_hdr = NULL; in rack_fini()
15066 rack->r_ctl.fsb.th = NULL; in rack_fini()
15072 while (!TAILQ_EMPTY(&rack->r_ctl.opt_list)) { in rack_fini()
15075 dol = TAILQ_FIRST(&rack->r_ctl.opt_list); in rack_fini()
15076 TAILQ_REMOVE(&rack->r_ctl.opt_list, dol, next); in rack_fini()
15080 if (rack->r_ctl.crte != NULL) { in rack_fini()
15081 tcp_rel_pacing_rate(rack->r_ctl.crte, tp); in rack_fini()
15083 rack->r_ctl.crte = NULL; in rack_fini()
15093 rsm = tqhash_min(rack->r_ctl.tqh); in rack_fini()
15095 tqhash_remove(rack->r_ctl.tqh, rsm, REMOVE_TYPE_CUMACK); in rack_fini()
15096 rack->r_ctl.rc_num_maps_alloced--; in rack_fini()
15098 rsm = tqhash_min(rack->r_ctl.tqh); in rack_fini()
15100 rsm = TAILQ_FIRST(&rack->r_ctl.rc_free); in rack_fini()
15102 TAILQ_REMOVE(&rack->r_ctl.rc_free, rsm, r_tnext); in rack_fini()
15103 rack->r_ctl.rc_num_maps_alloced--; in rack_fini()
15107 rsm = TAILQ_FIRST(&rack->r_ctl.rc_free); in rack_fini()
15109 if (rack->r_ctl.pcm_s != NULL) { in rack_fini()
15110 free(rack->r_ctl.pcm_s, M_TCPPCM); in rack_fini()
15111 rack->r_ctl.pcm_s = NULL; in rack_fini()
15112 rack->r_ctl.pcm_i.cnt_alloc = 0; in rack_fini()
15113 rack->r_ctl.pcm_i.cnt = 0; in rack_fini()
15115 if ((rack->r_ctl.rc_num_maps_alloced > 0) && in rack_fini()
15122 log.u_bbr.flex1 = rack->r_ctl.rc_num_maps_alloced; in rack_fini()
15125 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_fini()
15126 rsm = tqhash_min(rack->r_ctl.tqh); in rack_fini()
15128 rsm = TAILQ_FIRST(&rack->r_ctl.rc_free); in rack_fini()
15135 KASSERT((rack->r_ctl.rc_num_maps_alloced == 0), in rack_fini()
15138 rack->r_ctl.rc_num_maps_alloced)); in rack_fini()
15140 free(rack->r_ctl.tqh, M_TCPFSB); in rack_fini()
15141 rack->r_ctl.tqh = NULL; in rack_fini()
15215 tmr_up = rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK; in rack_timer_audit()
15222 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_timer_audit()
15228 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_timer_audit()
15274 if (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) { in rack_timer_audit()
15278 if (TSTMP_GT(rack->r_ctl.rc_last_output_to, us_cts)) { in rack_timer_audit()
15280 rack->r_ctl.rc_agg_early += (rack->r_ctl.rc_last_output_to - us_cts); in rack_timer_audit()
15282 rack->r_ctl.rc_hpts_flags &= ~PACE_PKT_OUTPUT; in rack_timer_audit()
15286 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_timer_audit()
15320 (tp->snd_wnd >= min((rack->r_ctl.rc_high_rwnd/2), in rack_do_win_updates()
15321 rack->r_ctl.rc_pace_min_segs))) { in rack_do_win_updates()
15326 (tp->snd_wnd < min((rack->r_ctl.rc_high_rwnd/2), rack->r_ctl.rc_pace_min_segs)) && in rack_do_win_updates()
15337 rack_enter_persist(tp, rack, rack->r_ctl.rc_rcvtime, ack); in rack_do_win_updates()
15367 log.u_bbr.flex1 = rack->r_ctl.rc_prr_sndcnt; in rack_log_input_packet()
15373 log.u_bbr.flex2 = rack->r_ctl.rc_num_maps_alloced; in rack_log_input_packet()
15375 log.u_bbr.inflight = ctf_flight_size(tp, rack->r_ctl.rc_sacked); in rack_log_input_packet()
15377 log.u_bbr.flex4 = rack->r_ctl.rc_hpts_flags; in rack_log_input_packet()
15527 us_rtt = us_cts - rack->r_ctl.forced_ack_ts; in rack_handle_probe_response()
15552 rack->r_ctl.roundends = tp->snd_max; in rack_new_round_starts()
15569 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_pcm()
15575 log.u_bbr.flex5 = rack->r_ctl.pcm_idle_rounds; in rack_log_pcm()
15596 rack->r_ctl.current_round++; in rack_new_round_setup()
15609 rnds = rack->r_ctl.current_round - rack->r_ctl.last_pcm_round; in rack_new_round_setup()
15610 if ((rnds + rack->r_ctl.pcm_idle_rounds) >= rack_pcm_every_n_rounds) { in rack_new_round_setup()
15612 …rack_log_pcm(rack, 3, rack->r_ctl.last_pcm_round, rack_pcm_every_n_rounds, rack->r_ctl.current_rou… in rack_new_round_setup()
15614 …rack_log_pcm(rack, 3, rack->r_ctl.last_pcm_round, rack_pcm_every_n_rounds, rack->r_ctl.current_rou… in rack_new_round_setup()
15620 CC_ALGO(tp)->newround(&tp->t_ccv, rack->r_ctl.current_round); in rack_new_round_setup()
15632 (rack->r_ctl.num_measurements >= RACK_REQ_AVG) && in rack_new_round_setup()
15633 (rack->r_ctl.gp_rnd_thresh > 0) && in rack_new_round_setup()
15634 ((rack->r_ctl.current_round - rack->r_ctl.last_rnd_of_gp_rise) >= rack->r_ctl.gp_rnd_thresh)) { in rack_new_round_setup()
15652 log.u_bbr.flex1 = rack->r_ctl.current_round; in rack_new_round_setup()
15653 log.u_bbr.flex2 = rack->r_ctl.last_rnd_of_gp_rise; in rack_new_round_setup()
15654 log.u_bbr.flex3 = rack->r_ctl.gp_rnd_thresh; in rack_new_round_setup()
15655 log.u_bbr.flex4 = rack->r_ctl.gate_to_fs; in rack_new_round_setup()
15656 log.u_bbr.flex5 = rack->r_ctl.ss_hi_fs; in rack_new_round_setup()
15661 if ((rack->r_ctl.gate_to_fs == 1) && in rack_new_round_setup()
15662 (tp->snd_cwnd > rack->r_ctl.ss_hi_fs)) { in rack_new_round_setup()
15663 tp->snd_cwnd = rack->r_ctl.ss_hi_fs; in rack_new_round_setup()
15710 (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT)) in rack_do_compressed_ack_processing()
15735 rack->r_ctl.rc_gp_srtt /*flex1*/, in rack_do_compressed_ack_processing()
15753 rack->r_ctl.rc_rcvtime = cts; in rack_do_compressed_ack_processing()
15777 if (tiwin > rack->r_ctl.rc_high_rwnd) in rack_do_compressed_ack_processing()
15778 rack->r_ctl.rc_high_rwnd = tiwin; in rack_do_compressed_ack_processing()
15868 rack->r_ctl.rc_reorder_ts = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time); in rack_do_compressed_ack_processing()
15869 if (rack->r_ctl.rc_reorder_ts == 0) in rack_do_compressed_ack_processing()
15870 rack->r_ctl.rc_reorder_ts = 1; in rack_do_compressed_ack_processing()
15880 rack->r_ctl.act_rcv_time.tv_sec = ts.tv_sec; in rack_do_compressed_ack_processing()
15881 rack->r_ctl.act_rcv_time.tv_usec = ts.tv_nsec/1000; in rack_do_compressed_ack_processing()
15883 rack->r_ctl.act_rcv_time = *tv; in rack_do_compressed_ack_processing()
15887 tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time)); in rack_do_compressed_ack_processing()
15928 rack->r_ctl.act_rcv_time.tv_sec = ts.tv_sec; in rack_do_compressed_ack_processing()
15929 rack->r_ctl.act_rcv_time.tv_usec = ts.tv_nsec/1000; in rack_do_compressed_ack_processing()
15931 rack->r_ctl.act_rcv_time = *tv; in rack_do_compressed_ack_processing()
15934 tcp_tv_to_lusectick(&rack->r_ctl.act_rcv_time)); in rack_do_compressed_ack_processing()
15940 if (SEQ_GEQ(ae->ack, rack->r_ctl.dsack_round_end)) { in rack_do_compressed_ack_processing()
15971 (SEQ_GT(high_seq, rack->r_ctl.high_collapse_point))) in rack_do_compressed_ack_processing()
15986 if (SEQ_GEQ(high_seq, rack->r_ctl.roundends) && in rack_do_compressed_ack_processing()
16074 p_cwnd = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_do_compressed_ack_processing()
16100 rack_rto_min, rack_rto_max, rack->r_ctl.timer_slop); in rack_do_compressed_ack_processing()
16102 rack->r_ctl.rc_tlp_cnt_out = 0; in rack_do_compressed_ack_processing()
16112 if (rack->r_ctl.rc_hpts_flags & PACE_TMR_RXT) in rack_do_compressed_ack_processing()
16113 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_do_compressed_ack_processing()
16126 rack->r_ctl.retran_during_recovery = 0; in rack_do_compressed_ack_processing()
16128 rack->r_ctl.dsack_byte_cnt = 0; in rack_do_compressed_ack_processing()
16129 rack->r_ctl.rc_went_idle_time = tcp_get_usecs(NULL); in rack_do_compressed_ack_processing()
16130 if (rack->r_ctl.rc_went_idle_time == 0) in rack_do_compressed_ack_processing()
16131 rack->r_ctl.rc_went_idle_time = 1; in rack_do_compressed_ack_processing()
16137 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_do_compressed_ack_processing()
16138 sack_filter_clear(&rack->r_ctl.rack_sf, tp->snd_una); in rack_do_compressed_ack_processing()
16147 rack_timer_cancel(tp, rack, rack->r_ctl.rc_rcvtime, __LINE__); in rack_do_compressed_ack_processing()
16414 (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) && in rack_do_segment_nounlock()
16415 (TSTMP_LT(us_cts, rack->r_ctl.rc_last_output_to))) { in rack_do_segment_nounlock()
16422 slot_remaining = rack->r_ctl.rc_last_output_to - us_cts; in rack_do_segment_nounlock()
16461 rack->r_ctl.gp_bw, in rack_do_segment_nounlock()
16482 rack->r_ctl.act_rcv_time.tv_sec = ts.tv_sec; in rack_do_segment_nounlock()
16483 rack->r_ctl.act_rcv_time.tv_usec = ts.tv_nsec/1000; in rack_do_segment_nounlock()
16485 rack->r_ctl.act_rcv_time = *tv; in rack_do_segment_nounlock()
16555 rack->r_ctl.rc_gp_srtt /*flex1*/, in rack_do_segment_nounlock()
16576 log.u_bbr.flex1 = rack->r_ctl.rc_prr_sndcnt; in rack_do_segment_nounlock()
16582 log.u_bbr.flex2 = rack->r_ctl.rc_num_maps_alloced; in rack_do_segment_nounlock()
16584 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_do_segment_nounlock()
16587 log.u_bbr.flex4 = rack->r_ctl.rc_hpts_flags; in rack_do_segment_nounlock()
16692 if (tiwin > rack->r_ctl.rc_high_rwnd) in rack_do_segment_nounlock()
16693 rack->r_ctl.rc_high_rwnd = tiwin; in rack_do_segment_nounlock()
16715 (TSTMP_GEQ(to.to_tsecr, rack->r_ctl.last_rcv_tstmp_for_rtt))) { in rack_do_segment_nounlock()
16726 if (TSTMP_GT(cts, rack->r_ctl.last_time_of_arm_rcv)) in rack_do_segment_nounlock()
16727 rtt = (cts - rack->r_ctl.last_time_of_arm_rcv); in rack_do_segment_nounlock()
16819 sack_filter_clear(&rack->r_ctl.rack_sf, th->th_ack); in rack_do_segment_nounlock()
16823 us_cts = tcp_tv_to_usectick(&rack->r_ctl.act_rcv_time); in rack_do_segment_nounlock()
16840 rack->r_ctl.rc_rcvtime = cts; in rack_do_segment_nounlock()
16844 (rsm = tqhash_min(rack->r_ctl.tqh)) != NULL) in rack_do_segment_nounlock()
16867 (rack->r_ctl.rc_time_probertt_starts == 0)) { in rack_do_segment_nounlock()
16944 if (SEQ_GEQ(tp->snd_una, rack->r_ctl.roundends) && in rack_do_segment_nounlock()
16954 ((rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK) == 0) && in rack_do_segment_nounlock()
16963 (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT)) { in rack_do_segment_nounlock()
16969 if (rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) { in rack_do_segment_nounlock()
16971 if (TSTMP_GT(rack->r_ctl.rc_last_output_to, us_cts)) { in rack_do_segment_nounlock()
16973 rack->r_ctl.rc_agg_early += (rack->r_ctl.rc_last_output_to - us_cts); in rack_do_segment_nounlock()
16976 rack->r_ctl.rc_hpts_flags &= ~PACE_PKT_OUTPUT; in rack_do_segment_nounlock()
17041 if (tqhash_empty(rack->r_ctl.tqh)) { in tcp_rack_output()
17049 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in tcp_rack_output()
17133 log.u_bbr.flex3 = rack->r_ctl.rc_pace_min_segs; in rack_log_pacing_delay_calc()
17134 log.u_bbr.flex4 = rack->r_ctl.rc_pace_max_segs; in rack_log_pacing_delay_calc()
17135 log.u_bbr.flex5 = rack->r_ctl.rack_per_of_gp_ss; in rack_log_pacing_delay_calc()
17136 log.u_bbr.flex6 = rack->r_ctl.rack_per_of_gp_ca; in rack_log_pacing_delay_calc()
17153 log.u_bbr.epoch = rack->r_ctl.rc_agg_delayed; in rack_log_pacing_delay_calc()
17154 log.u_bbr.lt_epoch = rack->r_ctl.rc_agg_early; in rack_log_pacing_delay_calc()
17155 log.u_bbr.applimited = rack->r_ctl.rack_per_of_gp_rec; in rack_log_pacing_delay_calc()
17158 if (rack->r_ctl.gp_bw == 0) in rack_log_pacing_delay_calc()
17163 log.u_bbr.pkts_out = rack->r_ctl.rc_rack_min_rtt; in rack_log_pacing_delay_calc()
17164 log.u_bbr.lost = rack->r_ctl.rc_probertt_sndmax_atexit; in rack_log_pacing_delay_calc()
17166 if (rack->r_ctl.cwnd_to_use < rack->rc_tp->snd_ssthresh) { in rack_log_pacing_delay_calc()
17175 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_pacing_delay_calc()
17204 ((rack->r_ctl.crte == NULL) || in rack_get_pacing_len()
17205 (bw != rack->r_ctl.crte->rate))) { in rack_get_pacing_len()
17210 (rack->r_ctl.rc_user_set_min_segs == 1)) in rack_get_pacing_len()
17216 pace_one, rack->r_ctl.crte, NULL, rack->r_ctl.pace_len_divisor); in rack_get_pacing_len()
17219 if (rack->rc_hybrid_mode && rack->r_ctl.client_suggested_maxseg) { in rack_get_pacing_len()
17220 if (((uint32_t)rack->r_ctl.client_suggested_maxseg * mss) > new_tso) in rack_get_pacing_len()
17221 new_tso = (uint32_t)rack->r_ctl.client_suggested_maxseg * mss; in rack_get_pacing_len()
17223 if (rack->r_ctl.rc_user_set_min_segs && in rack_get_pacing_len()
17224 ((rack->r_ctl.rc_user_set_min_segs * mss) > new_tso)) in rack_get_pacing_len()
17225 new_tso = rack->r_ctl.rc_user_set_min_segs * mss; in rack_get_pacing_len()
17254 reduced_win = window_input * rack->r_ctl.saved_hibeta; in rack_arrive_at_discounted_rate()
17256 gain = rack->r_ctl.saved_hibeta; in rack_arrive_at_discounted_rate()
17289 if (ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked) > rack->r_ctl.cwnd_to_use) in pace_to_fill_cwnd()
17293 if (rack->r_ctl.rc_last_us_rtt == 0) in pace_to_fill_cwnd()
17296 (rack->r_ctl.rc_last_us_rtt >= in pace_to_fill_cwnd()
17297 (get_filter_value_small(&rack->r_ctl.rc_gp_min_rtt) * rack->rtt_limit_mul))) { in pace_to_fill_cwnd()
17301 if (rack->r_ctl.fillcw_cap && *rate_wanted >= rack->r_ctl.fillcw_cap) in pace_to_fill_cwnd()
17307 fill_bw = min(rack->rc_tp->snd_cwnd, rack->r_ctl.cwnd_to_use); in pace_to_fill_cwnd()
17321 fill_bw /= (uint64_t)rack->r_ctl.rc_last_us_rtt; in pace_to_fill_cwnd()
17323 if (rack->r_ctl.fillcw_cap && fill_bw >= rack->r_ctl.fillcw_cap) in pace_to_fill_cwnd()
17324 fill_bw = rack->r_ctl.fillcw_cap; in pace_to_fill_cwnd()
17371 (rack->r_ctl.crte != NULL)) { in pace_to_fill_cwnd()
17374 high_rate = tcp_hw_highest_rate(rack->r_ctl.crte); in pace_to_fill_cwnd()
17388 } else if ((rack->r_ctl.crte == NULL) && in pace_to_fill_cwnd()
17410 if (rack->r_ctl.bw_rate_cap && (fill_bw > rack->r_ctl.bw_rate_cap)) { in pace_to_fill_cwnd()
17413 fill_bw = rack->r_ctl.bw_rate_cap; in pace_to_fill_cwnd()
17441 (rack->r_ctl.rc_user_set_min_segs == 1)) in rack_get_pacing_delay()
17461 if (rack->r_ctl.rc_rack_min_rtt) in rack_get_pacing_delay()
17462 srtt = rack->r_ctl.rc_rack_min_rtt; in rack_get_pacing_delay()
17465 if (rack->r_ctl.rc_rack_largest_cwnd) in rack_get_pacing_delay()
17466 cwnd = rack->r_ctl.rc_rack_largest_cwnd; in rack_get_pacing_delay()
17468 cwnd = rack->r_ctl.cwnd_to_use; in rack_get_pacing_delay()
17512 return (rack->r_ctl.rc_min_to); in rack_get_pacing_delay()
17516 } else if ((rack->r_ctl.init_rate == 0) && in rack_get_pacing_delay()
17517 (rack->r_ctl.gp_bw == 0)) { in rack_get_pacing_delay()
17526 rate_wanted = min(rack->rc_tp->snd_cwnd, rack->r_ctl.cwnd_to_use); in rack_get_pacing_delay()
17533 rate_wanted /= (uint64_t)rack->r_ctl.rc_last_us_rtt; in rack_get_pacing_delay()
17537 rack->r_ctl.cwnd_to_use, in rack_get_pacing_delay()
17539 rack->r_ctl.rc_last_us_rtt, in rack_get_pacing_delay()
17579 if (rack->r_ctl.crte) { in rack_get_pacing_delay()
17584 tcp_rel_pacing_rate(rack->r_ctl.crte, rack->rc_tp); in rack_get_pacing_delay()
17585 rack->r_ctl.crte = NULL; in rack_get_pacing_delay()
17590 if (rack->r_ctl.crte && in rack_get_pacing_delay()
17591 (tcp_hw_highest_rate(rack->r_ctl.crte) < rate_wanted)) { in rack_get_pacing_delay()
17604 tcp_rel_pacing_rate(rack->r_ctl.crte, rack->rc_tp); in rack_get_pacing_delay()
17605 rack->r_ctl.crte = NULL; in rack_get_pacing_delay()
17610 if ((rack->r_ctl.crte != NULL) && (rack->rc_inp->inp_snd_tag == NULL)) { in rack_get_pacing_delay()
17615 tcp_rel_pacing_rate(rack->r_ctl.crte, rack->rc_tp); in rack_get_pacing_delay()
17616 rack->r_ctl.crte = NULL; in rack_get_pacing_delay()
17637 if ((rack->r_ctl.crte != NULL) && in rack_get_pacing_delay()
17638 (tcp_hw_highest_rate(rack->r_ctl.crte) < rate_wanted)) { in rack_get_pacing_delay()
17651 tcp_rel_pacing_rate(rack->r_ctl.crte, rack->rc_tp); in rack_get_pacing_delay()
17652 rack->r_ctl.crte = NULL; in rack_get_pacing_delay()
17670 rack->r_ctl.crte = tcp_set_pacing_rate(rack->rc_tp, in rack_get_pacing_delay()
17674 &err, &rack->r_ctl.crte_prev_rate); in rack_get_pacing_delay()
17675 if (rack->r_ctl.crte) { in rack_get_pacing_delay()
17677 rack->r_ctl.rc_pace_max_segs = tcp_get_pacing_burst_size_w_divisor(tp, rate_wanted, segsiz, in rack_get_pacing_delay()
17678 pace_one, rack->r_ctl.crte, in rack_get_pacing_delay()
17679 NULL, rack->r_ctl.pace_len_divisor); in rack_get_pacing_delay()
17681 rate_wanted, rack->r_ctl.crte->rate, __LINE__, in rack_get_pacing_delay()
17683 rack->r_ctl.last_hw_bw_req = rate_wanted; in rack_get_pacing_delay()
17688 (rack->r_ctl.last_hw_bw_req != rate_wanted)) { in rack_get_pacing_delay()
17693 (rate_wanted < rack->r_ctl.crte->rate)) { in rack_get_pacing_delay()
17712 if ((rate_wanted > rack->r_ctl.crte->rate) || in rack_get_pacing_delay()
17713 (rate_wanted <= rack->r_ctl.crte_prev_rate)) { in rack_get_pacing_delay()
17721 bw_est, rack->r_ctl.crte->rate, __LINE__, in rack_get_pacing_delay()
17723 tcp_rel_pacing_rate(rack->r_ctl.crte, rack->rc_tp); in rack_get_pacing_delay()
17724 rack->r_ctl.crte = NULL; in rack_get_pacing_delay()
17730 nrte = tcp_chg_pacing_rate(rack->r_ctl.crte, in rack_get_pacing_delay()
17735 &err, &rack->r_ctl.crte_prev_rate); in rack_get_pacing_delay()
17742 rack->r_ctl.crte = NULL; in rack_get_pacing_delay()
17748 } else if (nrte != rack->r_ctl.crte) { in rack_get_pacing_delay()
17749 rack->r_ctl.crte = nrte; in rack_get_pacing_delay()
17750 rack->r_ctl.rc_pace_max_segs = tcp_get_pacing_burst_size_w_divisor(tp, rate_wanted, in rack_get_pacing_delay()
17751 segsiz, pace_one, rack->r_ctl.crte, in rack_get_pacing_delay()
17752 NULL, rack->r_ctl.pace_len_divisor); in rack_get_pacing_delay()
17754 rate_wanted, rack->r_ctl.crte->rate, __LINE__, in rack_get_pacing_delay()
17756 rack->r_ctl.last_hw_bw_req = rate_wanted; in rack_get_pacing_delay()
17762 rate_wanted, rack->r_ctl.crte->rate, __LINE__, in rack_get_pacing_delay()
17764 rack->r_ctl.last_hw_bw_req = rate_wanted; in rack_get_pacing_delay()
17796 if (rack->r_ctl.crte && (rack->r_ctl.crte->rs_num_enobufs > 0)) { in rack_get_pacing_delay()
17806 hw_boost_delay = rack->r_ctl.crte->time_between * rack_enobuf_hw_boost_mult; in rack_get_pacing_delay()
17844 rack->r_ctl.rc_gp_cumack_ts = 0; in rack_start_gp_measurement()
17845 rack->r_ctl.rc_gp_lowrtt = 0xffffffff; in rack_start_gp_measurement()
17846 rack->r_ctl.rc_gp_high_rwnd = rack->rc_tp->snd_wnd; in rack_start_gp_measurement()
17852 (SEQ_GEQ(tp->gput_seq, rack->r_ctl.rc_probertt_sndmax_atexit))) in rack_start_gp_measurement()
17855 tp->gput_ts = rack->r_ctl.last_cumack_advance; in rack_start_gp_measurement()
17861 rack->r_ctl.rc_gp_output_ts = rack_to_usec_ts(&tv); in rack_start_gp_measurement()
17871 if ((rack->rc_gp_filled == 0) && (rack->r_ctl.init_rate == 0)) { in rack_start_gp_measurement()
17880 (((uint64_t)rack->r_ctl.rc_app_limited_cnt << 32) | (uint64_t)rack->r_ctl.rc_gp_output_ts), in rack_start_gp_measurement()
17893 if (rack->r_ctl.rc_app_limited_cnt == 0) { in rack_start_gp_measurement()
17900 my_rsm = tqhash_min(rack->r_ctl.tqh); in rack_start_gp_measurement()
17907 if (rack->r_ctl.rc_first_appl == NULL) { in rack_start_gp_measurement()
17928 my_rsm = tqhash_next(rack->r_ctl.tqh, rack->r_ctl.rc_first_appl); in rack_start_gp_measurement()
17932 my_rsm = tqhash_next(rack->r_ctl.tqh, my_rsm); in rack_start_gp_measurement()
17967 nrsm = tqhash_next(rack->r_ctl.tqh, my_rsm); in rack_start_gp_measurement()
17978 rack->r_ctl.rc_gp_output_ts = my_rsm->r_tim_lastsent[0]; in rack_start_gp_measurement()
17980 rack->r_ctl.rc_gp_cumack_ts = 0; in rack_start_gp_measurement()
17981 if ((rack->r_ctl.cleared_app_ack == 1) && in rack_start_gp_measurement()
17982 (SEQ_GEQ(rack->r_ctl.cleared_app_ack, tp->gput_seq))) { in rack_start_gp_measurement()
17989 rack->r_ctl.cleared_app_ack = 0; in rack_start_gp_measurement()
17996 (((uint64_t)rack->r_ctl.rc_app_limited_cnt << 32) | (uint64_t)rack->r_ctl.rc_gp_output_ts), in rack_start_gp_measurement()
18015 rack->r_ctl.rc_gp_cumack_ts = 0; in rack_start_gp_measurement()
18017 my_rsm = tqhash_find(rack->r_ctl.tqh, startseq); in rack_start_gp_measurement()
18019 rack->r_ctl.rc_gp_output_ts = my_rsm->r_tim_lastsent[0]; in rack_start_gp_measurement()
18041 rack->r_ctl.rc_gp_output_ts = rack_to_usec_ts(&tv); in rack_start_gp_measurement()
18049 (((uint64_t)rack->r_ctl.rc_app_limited_cnt << 32) | (uint64_t)rack->r_ctl.rc_gp_output_ts), in rack_start_gp_measurement()
18071 flight = ctf_flight_size(tp, rack->r_ctl.rc_sacked); in rack_what_can_we_send()
18112 log.u_bbr.flex6 = rack->r_ctl.rc_agg_early; in rack_log_fsb()
18123 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_fsb()
18301 m = rack->r_ctl.fsb.m; in rack_fo_m_copym()
18302 if (M_TRAILINGROOM(m) != rack->r_ctl.fsb.o_t_len) { in rack_fo_m_copym()
18309 KASSERT((rack->r_ctl.fsb.o_t_len > M_TRAILINGROOM(m)), in rack_fo_m_copym()
18314 rack->r_ctl.fsb.o_t_len, in rack_fo_m_copym()
18315 rack->r_ctl.fsb.o_m_len, in rack_fo_m_copym()
18317 rack->r_ctl.fsb.o_m_len += (rack->r_ctl.fsb.o_t_len - M_TRAILINGROOM(m)); in rack_fo_m_copym()
18318 rack->r_ctl.fsb.o_t_len = M_TRAILINGROOM(m); in rack_fo_m_copym()
18320 if (m->m_len < rack->r_ctl.fsb.o_m_len) { in rack_fo_m_copym()
18325 KASSERT((rack->r_ctl.fsb.off >= (rack->r_ctl.fsb.o_m_len - m->m_len)), in rack_fo_m_copym()
18328 rack, rack->r_ctl.fsb.o_m_len, in rack_fo_m_copym()
18329 rack->r_ctl.fsb.off)); in rack_fo_m_copym()
18331 if (rack->r_ctl.fsb.off >= (rack->r_ctl.fsb.o_m_len- m->m_len)) in rack_fo_m_copym()
18332 rack->r_ctl.fsb.off -= (rack->r_ctl.fsb.o_m_len - m->m_len); in rack_fo_m_copym()
18334 rack->r_ctl.fsb.off = 0; in rack_fo_m_copym()
18335 rack->r_ctl.fsb.o_m_len = m->m_len; in rack_fo_m_copym()
18337 } else if (m->m_len > rack->r_ctl.fsb.o_m_len) { in rack_fo_m_copym()
18342 soff = rack->r_ctl.fsb.off; in rack_fo_m_copym()
18350 *s_mb = rack->r_ctl.fsb.m; in rack_fo_m_copym()
18352 &rack->r_ctl.fsb, in rack_fo_m_copym()
18353 seglimit, segsize, rack->r_ctl.fsb.hw_tls); in rack_fo_m_copym()
18374 log.u_bbr.flex4 = (uint32_t)rack->r_ctl.crte->using; in rack_log_queue_level()
18375 log.u_bbr.flex5 = (uint32_t)rack->r_ctl.crte->rs_num_enobufs; in rack_log_queue_level()
18376 log.u_bbr.flex6 = rack->r_ctl.crte->time_between; in rack_log_queue_level()
18380 log.u_bbr.delRate = rack->r_ctl.crte->rate; in rack_log_queue_level()
18382 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_log_queue_level()
18425 lentime = (rack->r_ctl.rc_pace_max_segs / segsiz); in rack_check_queue_level()
18439 log.u_bbr.flex4 = (uint32_t)rack->r_ctl.crte->using; in rack_check_queue_level()
18440 log.u_bbr.flex5 = (uint32_t)rack->r_ctl.crte->rs_num_enobufs; in rack_check_queue_level()
18441 log.u_bbr.flex6 = rack->r_ctl.crte->time_between; in rack_check_queue_level()
18445 log.u_bbr.delRate = rack->r_ctl.crte->rate; in rack_check_queue_level()
18448 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_check_queue_level()
18492 ip6 = (struct ip6_hdr *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_fast_rsm_output()
18497 ip = (struct ip *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_fast_rsm_output()
18511 segsiz = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_fast_rsm_output()
18538 udp = rack->r_ctl.fsb.udp; in rack_fast_rsm_output()
18541 if (rack->r_ctl.rc_pace_max_segs) in rack_fast_rsm_output()
18542 max_val = rack->r_ctl.rc_pace_max_segs; in rack_fast_rsm_output()
18562 th = rack->r_ctl.fsb.th; in rack_fast_rsm_output()
18617 th->th_win = htons((u_short)(rack->r_ctl.fsb.recwin >> tp->rcv_scale)); in rack_fast_rsm_output()
18686 if (rack->r_ctl.crte != NULL) { in rack_fast_rsm_output()
18767 ip6->ip6_hlim = rack->r_ctl.fsb.hoplimit; in rack_fast_rsm_output()
18781 ip->ip_ttl = rack->r_ctl.fsb.hoplimit; in rack_fast_rsm_output()
18807 memcpy(cpto, rack->r_ctl.fsb.tcp_ip_hdr, rack->r_ctl.fsb.tcp_ip_hdr_len); in rack_fast_rsm_output()
18808 th = (struct tcphdr *)(cpto + ((uint8_t *)rack->r_ctl.fsb.th - rack->r_ctl.fsb.tcp_ip_hdr)); in rack_fast_rsm_output()
18828 log.u_bbr.flex1 = rack->r_ctl.rc_prr_sndcnt; in rack_fast_rsm_output()
18829 log.u_bbr.flex2 = rack->r_ctl.rc_pace_min_segs; in rack_fast_rsm_output()
18830 log.u_bbr.flex3 = rack->r_ctl.rc_pace_max_segs; in rack_fast_rsm_output()
18833 log.u_bbr.flex6 = rack->r_ctl.rc_agg_early; in rack_fast_rsm_output()
18834 log.u_bbr.applimited = rack->r_ctl.rc_agg_delayed; in rack_fast_rsm_output()
18836 log.u_bbr.cur_del_rate = rack->r_ctl.gp_bw; in rack_fast_rsm_output()
18845 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_fast_rsm_output()
18861 log.u_bbr.lt_epoch = rack->r_ctl.cwnd_to_use; in rack_fast_rsm_output()
18874 if ((rack->r_ctl.crte != NULL) && in rack_fast_rsm_output()
18904 rack->r_ctl.rc_agg_delayed = 0; in rack_fast_rsm_output()
18907 rack->r_ctl.rc_agg_early = 0; in rack_fast_rsm_output()
18913 rack->r_ctl.rc_tlp_cnt_out++; in rack_fast_rsm_output()
18921 rack->r_ctl.last_sent_tlp_seq = rsm->r_start; in rack_fast_rsm_output()
18922 rack->r_ctl.last_sent_tlp_len = rsm->r_end - rsm->r_start; in rack_fast_rsm_output()
18924 if (rack->r_ctl.rc_prr_sndcnt >= len) in rack_fast_rsm_output()
18925 rack->r_ctl.rc_prr_sndcnt -= len; in rack_fast_rsm_output()
18927 rack->r_ctl.rc_prr_sndcnt = 0; in rack_fast_rsm_output()
18932 rack->r_ctl.retran_during_recovery += len; in rack_fast_rsm_output()
18949 if (rack->r_ctl.crte != NULL) { in rack_fast_rsm_output()
18960 if (rack->r_ctl.crte != NULL) { in rack_fast_rsm_output()
18962 tcp_rl_log_enobuf(rack->r_ctl.crte); in rack_fast_rsm_output()
19017 sendwin = min(rack->r_ctl.cwnd_to_use, tp->snd_wnd); in rack_sndbuf_autoscale()
19081 ip6 = (struct ip6_hdr *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_fast_output()
19087 ip = (struct ip *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_fast_output()
19095 rack->r_ctl.cwnd_to_use = tp->snd_cwnd; in rack_fast_output()
19097 segsiz = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_fast_output()
19099 len = rack->r_ctl.fsb.left_to_send; in rack_fast_output()
19101 flags = rack->r_ctl.fsb.tcp_flags; in rack_fast_output()
19114 udp = rack->r_ctl.fsb.udp; in rack_fast_output()
19117 if (rack->r_ctl.rc_pace_max_segs) in rack_fast_output()
19118 max_val = rack->r_ctl.rc_pace_max_segs; in rack_fast_output()
19139 th = rack->r_ctl.fsb.th; in rack_fast_output()
19185 th->th_win = htons((u_short)(rack->r_ctl.fsb.recwin >> tp->rcv_scale)); in rack_fast_output()
19198 if (rack->r_ctl.fsb.m == NULL) in rack_fast_output()
19213 if (rack->r_ctl.fsb.rfo_apply_push && in rack_fast_output()
19214 (len == rack->r_ctl.fsb.left_to_send)) { in rack_fast_output()
19323 ip6->ip6_hlim = rack->r_ctl.fsb.hoplimit; in rack_fast_output()
19337 ip->ip_ttl = rack->r_ctl.fsb.hoplimit; in rack_fast_output()
19357 memcpy(cpto, rack->r_ctl.fsb.tcp_ip_hdr, rack->r_ctl.fsb.tcp_ip_hdr_len); in rack_fast_output()
19358 th = (struct tcphdr *)(cpto + ((uint8_t *)rack->r_ctl.fsb.th - rack->r_ctl.fsb.tcp_ip_hdr)); in rack_fast_output()
19365 if ((rack->r_ctl.crte != NULL) && in rack_fast_output()
19377 log.u_bbr.flex1 = rack->r_ctl.rc_prr_sndcnt; in rack_fast_output()
19378 log.u_bbr.flex2 = rack->r_ctl.rc_pace_min_segs; in rack_fast_output()
19379 log.u_bbr.flex3 = rack->r_ctl.rc_pace_max_segs; in rack_fast_output()
19382 log.u_bbr.flex6 = rack->r_ctl.rc_agg_early; in rack_fast_output()
19383 log.u_bbr.applimited = rack->r_ctl.rc_agg_delayed; in rack_fast_output()
19385 log.u_bbr.cur_del_rate = rack->r_ctl.gp_bw; in rack_fast_output()
19391 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_fast_output()
19393 log.u_bbr.lt_epoch = rack->r_ctl.cwnd_to_use; in rack_fast_output()
19433 rack->r_ctl.rc_agg_delayed = 0; in rack_fast_output()
19436 rack->r_ctl.rc_agg_early = 0; in rack_fast_output()
19440 rack->r_ctl.lt_timemark = tcp_tv_to_lusectick(tv); in rack_fast_output()
19441 rack->r_ctl.lt_seq = tp->snd_una; in rack_fast_output()
19444 (((tp->snd_max + len) - rack->r_ctl.lt_seq) > 0x7fffffff)) { in rack_fast_output()
19452 rack->r_ctl.lt_bw_bytes += (tp->snd_una - rack->r_ctl.lt_seq); in rack_fast_output()
19453 rack->r_ctl.lt_seq = tp->snd_una; in rack_fast_output()
19455 if (tmark > rack->r_ctl.lt_timemark) { in rack_fast_output()
19456 rack->r_ctl.lt_bw_time += (tmark - rack->r_ctl.lt_timemark); in rack_fast_output()
19457 rack->r_ctl.lt_timemark = tmark; in rack_fast_output()
19461 NULL, add_flag, s_mb, s_soff, rack->r_ctl.fsb.hw_tls, segsiz); in rack_fast_output()
19463 rack->r_ctl.rc_tlp_rxt_last_time = cts; in rack_fast_output()
19468 tcp_account_for_send(tp, len, 0, 0, rack->r_ctl.fsb.hw_tls); in rack_fast_output()
19488 if (len <= rack->r_ctl.fsb.left_to_send) in rack_fast_output()
19489 rack->r_ctl.fsb.left_to_send -= len; in rack_fast_output()
19491 rack->r_ctl.fsb.left_to_send = 0; in rack_fast_output()
19492 if (rack->r_ctl.fsb.left_to_send < segsiz) { in rack_fast_output()
19494 rack->r_ctl.fsb.left_to_send = 0; in rack_fast_output()
19505 if ((rack->r_ctl.fsb.left_to_send >= segsiz) && in rack_fast_output()
19510 th = rack->r_ctl.fsb.th; in rack_fast_output()
19545 rack->r_ctl.fsb.m = sbsndmbuf(sb, (tp->snd_max - tp->snd_una), &rack->r_ctl.fsb.off); in rack_setup_fast_output()
19546 rack->r_ctl.fsb.o_m_len = rack->r_ctl.fsb.m->m_len; in rack_setup_fast_output()
19547 rack->r_ctl.fsb.o_t_len = M_TRAILINGROOM(rack->r_ctl.fsb.m); in rack_setup_fast_output()
19548 rack->r_ctl.fsb.tcp_flags = flags; in rack_setup_fast_output()
19549 rack->r_ctl.fsb.left_to_send = orig_len - len; in rack_setup_fast_output()
19550 if (rack->r_ctl.fsb.left_to_send < pace_max_seg) { in rack_setup_fast_output()
19556 rack->r_ctl.fsb.left_to_send = rounddown(rack->r_ctl.fsb.left_to_send, pace_max_seg); in rack_setup_fast_output()
19559 rack->r_ctl.fsb.hw_tls = 1; in rack_setup_fast_output()
19561 rack->r_ctl.fsb.hw_tls = 0; in rack_setup_fast_output()
19562 KASSERT((rack->r_ctl.fsb.left_to_send <= (sbavail(sb) - (tp->snd_max - tp->snd_una))), in rack_setup_fast_output()
19564 rack, rack->r_ctl.fsb.left_to_send, sbavail(sb), in rack_setup_fast_output()
19566 if (rack->r_ctl.fsb.left_to_send < segsiz) in rack_setup_fast_output()
19569 if (rack->r_ctl.fsb.left_to_send == (sbavail(sb) - (tp->snd_max - tp->snd_una))) in rack_setup_fast_output()
19570 rack->r_ctl.fsb.rfo_apply_push = 1; in rack_setup_fast_output()
19572 rack->r_ctl.fsb.rfo_apply_push = 0; in rack_setup_fast_output()
19583 maxlen = (uint32_t)((rack->r_ctl.gp_bw * min_time) / (uint64_t)HPTS_USEC_IN_SEC); in rack_get_hpts_pacing_min_for_bw()
19595 rsm = tqhash_find(rack->r_ctl.tqh, rack->r_ctl.last_collapse_point); in rack_check_collapsed()
19614 rack->r_ctl.last_collapse_point = rsm->r_end; in rack_check_collapsed()
19616 if (SEQ_GEQ(rack->r_ctl.last_collapse_point, in rack_check_collapsed()
19617 rack->r_ctl.high_collapse_point)) { in rack_check_collapsed()
19766 (rack->r_ctl.rc_resend == NULL)) { /* not a retransmit */ in rack_output()
19783 if (((rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) == 0) && in rack_output()
19792 if ((rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) && in rack_output()
19793 TSTMP_GEQ(cts, rack->r_ctl.rc_last_output_to)) { in rack_output()
19795 delayed = cts - rack->r_ctl.rc_last_output_to; in rack_output()
19800 if (rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK) { in rack_output()
19863 if ((rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) && in rack_output()
19864 TSTMP_GT(rack->r_ctl.rc_last_output_to, cts)) { in rack_output()
19865 early = rack->r_ctl.rc_last_output_to - cts; in rack_output()
19869 rack->r_ctl.rc_agg_delayed += delayed; in rack_output()
19872 rack->r_ctl.rc_agg_early += early; in rack_output()
19876 rack->r_ctl.rc_agg_delayed = rack->r_ctl.rc_agg_early = 0; in rack_output()
19880 rack->r_ctl.rc_hpts_flags &= ~PACE_PKT_OUTPUT; in rack_output()
19939 rack->r_ctl.rc_went_idle_time && in rack_output()
19940 (cts > rack->r_ctl.rc_went_idle_time)) { in rack_output()
19941 tot_idle = (cts - rack->r_ctl.rc_went_idle_time); in rack_output()
19945 rack->r_ctl.rc_lower_rtt_us_cts = cts; in rack_output()
19946 rack->r_ctl.rc_time_probertt_entered = rack->r_ctl.rc_lower_rtt_us_cts; in rack_output()
19947 rack->r_ctl.rc_time_probertt_starts = rack->r_ctl.rc_lower_rtt_us_cts; in rack_output()
19948 rack->r_ctl.rc_time_of_last_probertt = rack->r_ctl.rc_lower_rtt_us_cts; in rack_output()
19956 (rack->r_ctl.fsb.tcp_ip_hdr) && in rack_output()
19994 rnds = rack->r_ctl.current_round - rack->r_ctl.last_pcm_round; in rack_output()
19995 rack->r_ctl.pcm_idle_rounds += rtts_idle; in rack_output()
19996 if ((rnds + rack->r_ctl.pcm_idle_rounds) >= rack_pcm_every_n_rounds) { in rack_output()
19998 rack_log_pcm(rack, 8, rack->r_ctl.last_pcm_round, rtts_idle, rack->r_ctl.current_round ); in rack_output()
20007 segsiz = min(ctf_fixed_maxseg(tp), rack->r_ctl.rc_pace_min_segs); in rack_output()
20009 if (rack->r_ctl.rc_pace_max_segs == 0) in rack_output()
20012 pace_max_seg = rack->r_ctl.rc_pace_max_segs; in rack_output()
20014 (rack->r_ctl.pcm_max_seg == 0)) { in rack_output()
20020 rack->r_ctl.pcm_max_seg = rc_init_window(rack); in rack_output()
20021 if (rack->r_ctl.pcm_max_seg < (ctf_fixed_maxseg(tp) * 10)) { in rack_output()
20025 rack->r_ctl.pcm_max_seg = ctf_fixed_maxseg(tp) * 10; in rack_output()
20028 if ((rack->r_ctl.pcm_max_seg != 0) && (rack->pcm_needed == 1)) { in rack_output()
20035 if (tp->snd_cwnd > ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked)) in rack_output()
20036 cwa = tp->snd_cwnd -ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_output()
20039 if ((cwa >= rack->r_ctl.pcm_max_seg) && in rack_output()
20040 (rw_avail > rack->r_ctl.pcm_max_seg)) { in rack_output()
20042 pace_max_seg = rack->r_ctl.pcm_max_seg; in rack_output()
20048 cwa, rack->r_ctl.pcm_max_seg, rw_avail); in rack_output()
20052 cwnd_to_use = rack->r_ctl.cwnd_to_use = tp->snd_cwnd; in rack_output()
20064 TAILQ_INSERT_TAIL(&rack->r_ctl.rc_free, rsm, r_tnext); in rack_output()
20077 if (rack->r_ctl.rc_resend) { in rack_output()
20079 rsm = rack->r_ctl.rc_resend; in rack_output()
20080 rack->r_ctl.rc_resend = NULL; in rack_output()
20098 rack->r_ctl.last_collapse_point = rsm->r_end; in rack_output()
20100 if (SEQ_GEQ(rack->r_ctl.last_collapse_point, in rack_output()
20101 rack->r_ctl.high_collapse_point)) in rack_output()
20138 } else if (rack->r_ctl.rc_tlpsend) { in rack_output()
20149 rsm = rack->r_ctl.rc_tlpsend; in rack_output()
20152 rack->r_ctl.rc_tlpsend = NULL; in rack_output()
20187 flight = ctf_flight_size(tp, rack->r_ctl.rc_out_at_rto); in rack_output()
20201 rsm = TAILQ_FIRST(&rack->r_ctl.rc_tmap); in rack_output()
20205 rack->r_ctl.rc_out_at_rto = 0; in rack_output()
20216 rack->r_ctl.rc_out_at_rto = 0; in rack_output()
20250 (rack->r_ctl.rc_num_maps_alloced >= V_tcp_map_entries_limit)) { in rack_output()
20292 (rack->r_ctl.rc_scw == NULL) && in rack_output()
20297 rack->r_ctl.rc_scw = tcp_shared_cwnd_alloc(tp, in rack_output()
20298 &rack->r_ctl.rc_scw_index, in rack_output()
20301 if (rack->r_ctl.rc_scw && in rack_output()
20305 tcp_shared_cwnd_active(rack->r_ctl.rc_scw, rack->r_ctl.rc_scw_index); in rack_output()
20308 if (rack->r_ctl.rc_scw) { in rack_output()
20310 rack->r_ctl.cwnd_to_use = cwnd_to_use = tcp_shared_cwnd_update(rack->r_ctl.rc_scw, in rack_output()
20311 rack->r_ctl.rc_scw_index, in rack_output()
20326 end_rsm = TAILQ_LAST_FAST(&rack->r_ctl.rc_tmap, rack_sendmap, r_tnext); in rack_output()
20349 if (rack->r_ctl.rc_tlp_new_data) { in rack_output()
20351 if (rack->r_ctl.rc_tlp_new_data > (uint32_t) (avail - sb_offset)) { in rack_output()
20352 rack->r_ctl.rc_tlp_new_data = (uint32_t) (avail - sb_offset); in rack_output()
20354 if ((rack->r_ctl.rc_tlp_new_data + sb_offset) > tp->snd_wnd) { in rack_output()
20360 len = rack->r_ctl.rc_tlp_new_data; in rack_output()
20362 rack->r_ctl.rc_tlp_new_data = 0; in rack_output()
20366 if ((rack->r_ctl.crte == NULL) && in rack_output()
20399 if ((rack->r_ctl.rc_prr_sndcnt + outstanding) > tp->snd_wnd) { in rack_output()
20419 if (len > rack->r_ctl.rc_prr_sndcnt) { in rack_output()
20420 len = rack->r_ctl.rc_prr_sndcnt; in rack_output()
20434 if (rack->r_ctl.rc_prr_sendalot == 0) { in rack_output()
20545 if ((tp->snd_wnd < min((rack->r_ctl.rc_high_rwnd/2), minseg)) && in rack_output()
20566 (ctf_flight_size(tp, rack->r_ctl.rc_sacked) > (2 * segsiz)) && in rack_output()
20581 min((rack->r_ctl.rc_high_rwnd/2), minseg)) && in rack_output()
20582 (ctf_flight_size(tp, rack->r_ctl.rc_sacked) > (2 * segsiz)) && in rack_output()
20594 } else if ((rack->r_ctl.crte != NULL) && in rack_output()
20597 (ctf_flight_size(tp, rack->r_ctl.rc_sacked) >= (2 * segsiz)) && in rack_output()
20865 rack->r_ctl.fsb.recwin = recwin; in rack_output()
20896 rsm = tqhash_max(rack->r_ctl.tqh); in rack_output()
20905 rack->r_ctl.rc_agg_delayed = 0; in rack_output()
20908 rack->r_ctl.rc_agg_early = 0; in rack_output()
20910 min(max(segsiz, (rack->r_ctl.rc_high_rwnd/2)), in rack_output()
20915 rack->r_ctl.rc_prr_sndcnt = 0; in rack_output()
20920 rack->r_ctl.rc_prr_sndcnt = 0; in rack_output()
20945 (rack->r_ctl.rc_prr_sndcnt < segsiz)) { in rack_output()
21009 rack->r_ctl.rc_gp_srtt /*flex1*/, in rack_output()
21016 rsm = tqhash_max(rack->r_ctl.tqh); in rack_output()
21018 if (rack->r_ctl.rc_app_limited_cnt == 0) in rack_output()
21019 rack->r_ctl.rc_end_appl = rack->r_ctl.rc_first_appl = rsm; in rack_output()
21026 if (rack->r_ctl.rc_end_appl) in rack_output()
21027 rack->r_ctl.rc_end_appl->r_nseq_appl = rsm->r_start; in rack_output()
21028 rack->r_ctl.rc_end_appl = rsm; in rack_output()
21031 rack->r_ctl.rc_app_limited_cnt++; in rack_output()
21035 rack->r_ctl.rc_app_limited_cnt, seq, in rack_output()
21044 (tp->snd_wnd < min((rack->r_ctl.rc_high_rwnd/2), minseg))) { in rack_output()
21046 rack_enter_persist(tp, rack, rack->r_ctl.rc_rcvtime, tp->snd_una); in rack_output()
21053 rack->r_ctl.rc_scw) { in rack_output()
21054 tcp_shared_cwnd_idle(rack->r_ctl.rc_scw, rack->r_ctl.rc_scw_index); in rack_output()
21078 if ((rack->r_ctl.crte != NULL) && in rack_output()
21090 rack->r_ctl.rc_agg_delayed = 0; in rack_output()
21091 rack->r_ctl.rc_agg_early = 0; in rack_output()
21136 (rack->r_ctl.pcm_max_seg > 0) && in rack_output()
21137 (len >= rack->r_ctl.pcm_max_seg)) { in rack_output()
21140 rack_log_pcm(rack, 5, len, rack->r_ctl.pcm_max_seg, add_flag); in rack_output()
21142 rack_log_pcm(rack, 6, len, rack->r_ctl.pcm_max_seg, add_flag); in rack_output()
21243 (ms_cts == rack->r_ctl.last_rcv_tstmp_for_rtt)) { in rack_output()
21262 ((ms_cts - rack->r_ctl.last_rcv_tstmp_for_rtt) > RCV_PATH_RTT_MS) && in rack_output()
21266 (rack->r_ctl.current_round != 0) && in rack_output()
21269 rack->r_ctl.last_rcv_tstmp_for_rtt = ms_cts; in rack_output()
21270 rack->r_ctl.last_time_of_arm_rcv = cts; in rack_output()
21627 if ((ipoptlen == 0) && (rack->r_ctl.fsb.tcp_ip_hdr) && rack->r_fsb_inited) { in rack_output()
21630 ip6 = (struct ip6_hdr *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_output()
21634 ip = (struct ip *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_output()
21636 th = rack->r_ctl.fsb.th; in rack_output()
21637 udp = rack->r_ctl.fsb.udp; in rack_output()
21756 if ((ipoptlen == 0) && (rack->r_ctl.fsb.tcp_ip_hdr) && rack->r_fsb_inited) { in rack_output()
21760 memcpy(cpto, rack->r_ctl.fsb.tcp_ip_hdr, rack->r_ctl.fsb.tcp_ip_hdr_len); in rack_output()
21780 th = (struct tcphdr *)(cpto + ((uint8_t *)rack->r_ctl.fsb.th - rack->r_ctl.fsb.tcp_ip_hdr)); in rack_output()
21783 udp = (struct udphdr *)(cpto + ((uint8_t *)rack->r_ctl.fsb.udp - rack->r_ctl.fsb.tcp_ip_hdr)); in rack_output()
21880 if ((rack->r_ctl.crte != NULL) && in rack_output()
21894 log.u_bbr.flex1 = rack->r_ctl.rc_prr_sndcnt; in rack_output()
21895 log.u_bbr.flex2 = rack->r_ctl.rc_pace_min_segs; in rack_output()
21896 log.u_bbr.flex3 = rack->r_ctl.rc_pace_max_segs; in rack_output()
21899 log.u_bbr.flex6 = rack->r_ctl.rc_agg_early; in rack_output()
21900 log.u_bbr.applimited = rack->r_ctl.rc_agg_delayed; in rack_output()
21902 log.u_bbr.cur_del_rate = rack->r_ctl.gp_bw; in rack_output()
21924 log.u_bbr.inflight = ctf_flight_size(rack->rc_tp, rack->r_ctl.rc_sacked); in rack_output()
21978 rack->r_ctl.fsb.hoplimit = ip6->ip6_hlim = in6_selecthlim(inp, NULL); in rack_output()
22017 rack->r_ctl.fsb.hoplimit = ip->ip_ttl; in rack_output()
22074 rack_log_pcm(rack, 7, len, rack->r_ctl.pcm_max_seg, add_flag); in rack_output()
22078 rack->r_ctl.lt_timemark = tcp_tv_to_lusectick(&tv); in rack_output()
22079 rack->r_ctl.lt_seq = tp->snd_una; in rack_output()
22081 } else if (((rack_seq + len) - rack->r_ctl.lt_seq) > 0x7fffffff) { in rack_output()
22088 rack->r_ctl.lt_bw_bytes += (tp->snd_una - rack->r_ctl.lt_seq); in rack_output()
22089 rack->r_ctl.lt_seq = tp->snd_una; in rack_output()
22091 if (tmark > rack->r_ctl.lt_timemark) { in rack_output()
22092 rack->r_ctl.lt_bw_time += (tmark - rack->r_ctl.lt_timemark); in rack_output()
22093 rack->r_ctl.lt_timemark = tmark; in rack_output()
22103 rack->r_ctl.last_sent_tlp_seq = rsm->r_start; in rack_output()
22104 rack->r_ctl.last_sent_tlp_len = rsm->r_end - rsm->r_start; in rack_output()
22108 rack->r_ctl.rc_agg_delayed = 0; in rack_output()
22111 rack->r_ctl.rc_agg_early = 0; in rack_output()
22145 if (rack->r_ctl.rc_prr_sndcnt >= len) in rack_output()
22146 rack->r_ctl.rc_prr_sndcnt -= len; in rack_output()
22148 rack->r_ctl.rc_prr_sndcnt = 0; in rack_output()
22163 rack->r_ctl.rc_tlp_rxt_last_time = cts; in rack_output()
22174 rack->r_ctl.rc_loss_count += rsm->r_end - rsm->r_start; in rack_output()
22186 rack->r_ctl.rc_tlp_cnt_out = 0; in rack_output()
22195 rack->r_ctl.rc_tlp_cnt_out++; in rack_output()
22254 if (rack->r_ctl.fsb.left_to_send > len) in rack_output()
22255 rack->r_ctl.fsb.left_to_send -= len; in rack_output()
22257 rack->r_ctl.fsb.left_to_send = 0; in rack_output()
22258 if (rack->r_ctl.fsb.left_to_send < segsiz) in rack_output()
22261 rack->r_ctl.fsb.m = sbsndmbuf(sb, (tp->snd_max - tp->snd_una), &rack->r_ctl.fsb.off); in rack_output()
22262 rack->r_ctl.fsb.o_m_len = rack->r_ctl.fsb.m->m_len; in rack_output()
22263 rack->r_ctl.fsb.o_t_len = M_TRAILINGROOM(rack->r_ctl.fsb.m); in rack_output()
22310 rack->r_ctl.rc_agg_delayed = 0; in rack_output()
22313 rack->r_ctl.rc_agg_early = 0; in rack_output()
22343 if (rack->r_ctl.crte != NULL) { in rack_output()
22354 if (rack->r_ctl.crte != NULL) { in rack_output()
22356 tcp_rl_log_enobuf(rack->r_ctl.crte); in rack_output()
22418 rack->r_ctl.retran_during_recovery += len; in rack_output()
22464 rack->r_ctl.rc_out_at_rto -= (rsm->r_end - rsm->r_start); in rack_output()
22465 if (SEQ_GEQ(rsm->r_end, rack->r_ctl.rc_snd_max_at_rto)) { in rack_output()
22470 rack->r_ctl.rc_out_at_rto = 0; in rack_output()
22472 } else if (SEQ_GEQ(tp->snd_max, rack->r_ctl.rc_snd_max_at_rto)) { in rack_output()
22478 rack->r_ctl.rc_out_at_rto = 0; in rack_output()
22481 rack->r_ctl.fsb.recwin = recwin; in rack_output()
22483 SEQ_GT(tp->snd_max, rack->r_ctl.rc_snd_max_at_rto)) { in rack_output()
22580 orig_val = rack->r_ctl.rc_pace_max_segs; in rack_update_seg()
22582 if (orig_val != rack->r_ctl.rc_pace_max_segs) in rack_update_seg()
22596 if (rack->r_ctl.rc_pace_min_segs != ctf_fixed_maxseg(tp)) { in rack_mtu_change()
22606 rack->r_ctl.rc_out_at_rto = ctf_flight_size(tp, in rack_mtu_change()
22607 rack->r_ctl.rc_sacked); in rack_mtu_change()
22608 rack->r_ctl.rc_snd_max_at_rto = tp->snd_max; in rack_mtu_change()
22611 TAILQ_FOREACH(rsm, &rack->r_ctl.rc_tmap, r_tnext) { in rack_mtu_change()
22615 sack_filter_clear(&rack->r_ctl.rack_sf, tp->snd_una); in rack_mtu_change()
22638 rack->r_ctl.pacing_method |= RACK_DGP_PACING; in rack_set_dgp()
22661 rack->r_ctl.rack_per_of_gp_ca = 100; in rack_set_dgp()
22665 rack->r_ctl.rc_no_push_at_mrtt = 2; in rack_set_dgp()
22675 rack->r_ctl.rack_per_of_gp_rec = 90; in rack_set_dgp()
22718 rack->r_ctl.pacing_method |= RACK_REG_PACING; in rack_set_profile()
22761 rack->r_ctl.rack_per_of_gp_ca = rack_do_dyn_mul; in rack_set_profile()
22763 rack->r_ctl.rack_per_of_gp_ca = rack_per_of_gp_ca; in rack_set_profile()
22767 rack->r_ctl.rc_no_push_at_mrtt = 0; in rack_set_profile()
22804 TAILQ_INSERT_TAIL(&rack->r_ctl.opt_list, dol, next); in rack_add_deferred_option()
22821 rack->r_ctl.rc_fixed_pacing_rate_rec = 0; in process_hybrid_pacing()
22822 rack->r_ctl.rc_fixed_pacing_rate_ca = 0; in process_hybrid_pacing()
22823 rack->r_ctl.rc_fixed_pacing_rate_ss = 0; in process_hybrid_pacing()
22866 rack->r_ctl.pacing_method |= RACK_REG_PACING; in process_hybrid_pacing()
22871 if (rack->r_ctl.pacing_method & RACK_DGP_PACING) { in process_hybrid_pacing()
22876 rack->r_ctl.pacing_method &= ~RACK_DGP_PACING; in process_hybrid_pacing()
22947 rack->r_ctl.pace_len_divisor = rack_default_pacing_divisor; in rack_process_option()
22950 rack->r_ctl.pace_len_divisor = RL_MIN_DIVISOR; in rack_process_option()
22952 rack->r_ctl.pace_len_divisor = optval; in rack_process_option()
22964 rack->r_ctl.saved_hibeta = optval; in rack_process_option()
22967 rack->r_ctl.rc_saved_beta.beta = optval; in rack_process_option()
22982 rack->r_ctl.timer_slop = optval; in rack_process_option()
22990 rack->r_ctl.timer_slop); in rack_process_option()
23018 rack->r_ctl.rc_saved_beta.beta_ecn = optval; in rack_process_option()
23019 rack->r_ctl.rc_saved_beta.newreno_flags = CC_NEWRENO_BETA_ECN_ENABLED; in rack_process_option()
23037 rack->r_ctl.req_measurements = optval; in rack_process_option()
23064 rack->r_ctl.fillcw_cap = loptval; in rack_process_option()
23069 (rack->r_ctl.pacing_method & RACK_DGP_PACING)) { in rack_process_option()
23081 rack->r_ctl.pacing_method |= RACK_REG_PACING; in rack_process_option()
23083 rack->r_ctl.pacing_method &= ~RACK_DGP_PACING; in rack_process_option()
23085 rack->r_ctl.bw_rate_cap = loptval; in rack_process_option()
23092 if (rack->r_ctl.side_chan_dis_mask & HYBRID_DIS_MASK) { in rack_process_option()
23100 rack->r_ctl.side_chan_dis_mask = optval; in rack_process_option()
23102 rack->r_ctl.side_chan_dis_mask = 0; in rack_process_option()
23151 rack->r_ctl.rc_no_push_at_mrtt = 0; in rack_process_option()
23153 rack->r_ctl.rc_no_push_at_mrtt = optval; in rack_process_option()
23211 rack->r_ctl.rack_per_of_gp_ca = optval; in rack_process_option()
23229 rack->r_ctl.rc_tlp_cwnd_reduce = optval; in rack_process_option()
23238 if (rack->r_ctl.side_chan_dis_mask & CCSP_DIS_MASK) { in rack_process_option()
23247 rack->r_ctl.pacing_method |= RACK_REG_PACING; in rack_process_option()
23274 rack->r_ctl.init_rate = val; in rack_process_option()
23290 rack->r_ctl.rc_user_set_min_segs = (0x0000ffff & optval); in rack_process_option()
23297 (rack->r_ctl.pacing_method & RACK_DGP_PACING)) { in rack_process_option()
23310 rack->r_ctl.pacing_method |= RACK_REG_PACING; in rack_process_option()
23312 rack->r_ctl.pacing_method &= ~RACK_DGP_PACING; in rack_process_option()
23323 if (rack->r_ctl.side_chan_dis_mask & CCSP_DIS_MASK) { in rack_process_option()
23335 rack->r_ctl.rc_fixed_pacing_rate_rec = optval; in rack_process_option()
23336 if (rack->r_ctl.rc_fixed_pacing_rate_ca == 0) in rack_process_option()
23337 rack->r_ctl.rc_fixed_pacing_rate_ca = optval; in rack_process_option()
23338 if (rack->r_ctl.rc_fixed_pacing_rate_ss == 0) in rack_process_option()
23339 rack->r_ctl.rc_fixed_pacing_rate_ss = optval; in rack_process_option()
23344 rack->r_ctl.rc_fixed_pacing_rate_ss, in rack_process_option()
23345 rack->r_ctl.rc_fixed_pacing_rate_ca, in rack_process_option()
23346 rack->r_ctl.rc_fixed_pacing_rate_rec, 0, 0, 8, in rack_process_option()
23353 if (rack->r_ctl.side_chan_dis_mask & CCSP_DIS_MASK) { in rack_process_option()
23365 rack->r_ctl.rc_fixed_pacing_rate_ss = optval; in rack_process_option()
23366 if (rack->r_ctl.rc_fixed_pacing_rate_ca == 0) in rack_process_option()
23367 rack->r_ctl.rc_fixed_pacing_rate_ca = optval; in rack_process_option()
23368 if (rack->r_ctl.rc_fixed_pacing_rate_rec == 0) in rack_process_option()
23369 rack->r_ctl.rc_fixed_pacing_rate_rec = optval; in rack_process_option()
23374 rack->r_ctl.rc_fixed_pacing_rate_ss, in rack_process_option()
23375 rack->r_ctl.rc_fixed_pacing_rate_ca, in rack_process_option()
23376 rack->r_ctl.rc_fixed_pacing_rate_rec, 0, 0, 8, in rack_process_option()
23383 if (rack->r_ctl.side_chan_dis_mask & CCSP_DIS_MASK) { in rack_process_option()
23395 rack->r_ctl.rc_fixed_pacing_rate_ca = optval; in rack_process_option()
23396 if (rack->r_ctl.rc_fixed_pacing_rate_ss == 0) in rack_process_option()
23397 rack->r_ctl.rc_fixed_pacing_rate_ss = optval; in rack_process_option()
23398 if (rack->r_ctl.rc_fixed_pacing_rate_rec == 0) in rack_process_option()
23399 rack->r_ctl.rc_fixed_pacing_rate_rec = optval; in rack_process_option()
23404 rack->r_ctl.rc_fixed_pacing_rate_ss, in rack_process_option()
23405 rack->r_ctl.rc_fixed_pacing_rate_ca, in rack_process_option()
23406 rack->r_ctl.rc_fixed_pacing_rate_rec, 0, 0, 8, in rack_process_option()
23411 rack->r_ctl.rack_per_of_gp_rec = optval; in rack_process_option()
23413 rack->r_ctl.rack_per_of_gp_ss, in rack_process_option()
23414 rack->r_ctl.rack_per_of_gp_ca, in rack_process_option()
23415 rack->r_ctl.rack_per_of_gp_rec, 0, 0, 1, in rack_process_option()
23429 rack->r_ctl.rack_per_of_gp_ca = ca; in rack_process_option()
23431 rack->r_ctl.rack_per_of_gp_ss, in rack_process_option()
23432 rack->r_ctl.rack_per_of_gp_ca, in rack_process_option()
23433 rack->r_ctl.rack_per_of_gp_rec, 0, 0, 1, in rack_process_option()
23447 rack->r_ctl.rack_per_of_gp_ss = ss; in rack_process_option()
23449 rack->r_ctl.rack_per_of_gp_ss, in rack_process_option()
23450 rack->r_ctl.rack_per_of_gp_ca, in rack_process_option()
23451 rack->r_ctl.rack_per_of_gp_rec, 0, 0, 1, in rack_process_option()
23482 rack->r_ctl.rack_per_upper_bound_ca = val; in rack_process_option()
23484 rack->r_ctl.rack_per_upper_bound_ss = val; in rack_process_option()
23489 rack->r_ctl.gp_rnd_thresh = optval & 0x0ff; in rack_process_option()
23491 rack->r_ctl.gate_to_fs = 1; in rack_process_option()
23493 rack->r_ctl.gate_to_fs = 0; in rack_process_option()
23496 rack->r_ctl.use_gp_not_last = 1; in rack_process_option()
23498 rack->r_ctl.use_gp_not_last = 0; in rack_process_option()
23505 rack->r_ctl.gp_gain_req = v; in rack_process_option()
23510 rack->r_ctl.gp_rnd_thresh = 0; in rack_process_option()
23515 rack->r_ctl.rc_split_limit = optval; in rack_process_option()
23528 if (rack->r_ctl.crte != NULL) { in rack_process_option()
23531 tcp_rel_pacing_rate(rack->r_ctl.crte, tp); in rack_process_option()
23532 rack->r_ctl.crte = NULL; in rack_process_option()
23541 rack->r_ctl.rc_prr_sendalot = optval; in rack_process_option()
23546 rack->r_ctl.rc_min_to = optval; in rack_process_option()
23551 rack->r_ctl.rc_early_recovery_segs = optval; in rack_process_option()
23570 rack->r_ctl.rc_reorder_shift = optval; in rack_process_option()
23577 rack->r_ctl.rc_reorder_fade = optval; in rack_process_option()
23583 rack->r_ctl.rc_tlp_threshold = optval; in rack_process_option()
23597 rack->r_ctl.rc_pkt_delay = optval; in rack_process_option()
23621 rack->r_ctl.rc_rate_sample_method = optval; in rack_process_option()
23632 rack->r_ctl.max_reduction = optval; in rack_process_option()
23648 rack->r_ctl.rack_per_of_gp_rec = 90; in rack_process_option()
23649 rack->r_ctl.rack_per_of_gp_ca = 100; in rack_process_option()
23650 rack->r_ctl.rack_per_of_gp_ss = 250; in rack_process_option()
23737 if (dest->r_ctl.pace_len_divisor != src->r_ctl.pace_len_divisor) { in rack_inherit()
23738 dest->r_ctl.pace_len_divisor = src->r_ctl.pace_len_divisor; in rack_inherit()
23745 dest->r_ctl.rc_saved_beta.beta = src->r_ctl.rc_saved_beta.beta; in rack_inherit()
23752 if (dest->r_ctl.timer_slop != src->r_ctl.timer_slop) { in rack_inherit()
23753 dest->r_ctl.timer_slop = src->r_ctl.timer_slop; in rack_inherit()
23757 if (dest->r_ctl.rc_saved_beta.beta_ecn != src->r_ctl.rc_saved_beta.beta_ecn) { in rack_inherit()
23758 dest->r_ctl.rc_saved_beta.beta_ecn = src->r_ctl.rc_saved_beta.beta_ecn; in rack_inherit()
23761 if (dest->r_ctl.rc_saved_beta.newreno_flags != src->r_ctl.rc_saved_beta.newreno_flags) { in rack_inherit()
23762 dest->r_ctl.rc_saved_beta.newreno_flags = src->r_ctl.rc_saved_beta.newreno_flags; in rack_inherit()
23767 if (dest->r_ctl.req_measurements != src->r_ctl.req_measurements) { in rack_inherit()
23768 dest->r_ctl.req_measurements = src->r_ctl.req_measurements; in rack_inherit()
23777 if (dest->r_ctl.fillcw_cap != src->r_ctl.fillcw_cap) { in rack_inherit()
23778 dest->r_ctl.fillcw_cap = src->r_ctl.fillcw_cap; in rack_inherit()
23782 if (dest->r_ctl.bw_rate_cap != src->r_ctl.bw_rate_cap) { in rack_inherit()
23783 dest->r_ctl.bw_rate_cap = src->r_ctl.bw_rate_cap; in rack_inherit()
23788 if (dest->r_ctl.side_chan_dis_mask != src->r_ctl.side_chan_dis_mask) { in rack_inherit()
23789 dest->r_ctl.side_chan_dis_mask = src->r_ctl.side_chan_dis_mask; in rack_inherit()
23811 if (dest->r_ctl.rc_no_push_at_mrtt != src->r_ctl.rc_no_push_at_mrtt) { in rack_inherit()
23812 dest->r_ctl.rc_no_push_at_mrtt = src->r_ctl.rc_no_push_at_mrtt; in rack_inherit()
23867 if (dest->r_ctl.rack_per_of_gp_ca != src->r_ctl.rack_per_of_gp_ca) { in rack_inherit()
23868 dest->r_ctl.rack_per_of_gp_ca = src->r_ctl.rack_per_of_gp_ca; in rack_inherit()
23878 if (dest->r_ctl.init_rate != src->r_ctl.init_rate) { in rack_inherit()
23879 dest->r_ctl.init_rate = src->r_ctl.init_rate; in rack_inherit()
23888 if (dest->r_ctl.rc_user_set_min_segs != src->r_ctl.rc_user_set_min_segs) { in rack_inherit()
23889 dest->r_ctl.rc_user_set_min_segs = src->r_ctl.rc_user_set_min_segs; in rack_inherit()
23894 if (dest->r_ctl.rc_fixed_pacing_rate_ca != src->r_ctl.rc_fixed_pacing_rate_ca) { in rack_inherit()
23895 dest->r_ctl.rc_fixed_pacing_rate_ca = src->r_ctl.rc_fixed_pacing_rate_ca; in rack_inherit()
23898 if (dest->r_ctl.rc_fixed_pacing_rate_ss != src->r_ctl.rc_fixed_pacing_rate_ss) { in rack_inherit()
23899 dest->r_ctl.rc_fixed_pacing_rate_ss = src->r_ctl.rc_fixed_pacing_rate_ss; in rack_inherit()
23902 if (dest->r_ctl.rc_fixed_pacing_rate_rec != src->r_ctl.rc_fixed_pacing_rate_rec) { in rack_inherit()
23903 dest->r_ctl.rc_fixed_pacing_rate_rec = src->r_ctl.rc_fixed_pacing_rate_rec; in rack_inherit()
23907 if (dest->r_ctl.rack_per_of_gp_rec != src->r_ctl.rack_per_of_gp_rec) { in rack_inherit()
23908 dest->r_ctl.rack_per_of_gp_rec = src->r_ctl.rack_per_of_gp_rec; in rack_inherit()
23911 if (dest->r_ctl.rack_per_of_gp_ca != src->r_ctl.rack_per_of_gp_ca) { in rack_inherit()
23912 dest->r_ctl.rack_per_of_gp_ca = src->r_ctl.rack_per_of_gp_ca; in rack_inherit()
23916 if (dest->r_ctl.rack_per_of_gp_ss != src->r_ctl.rack_per_of_gp_ss) { in rack_inherit()
23917 dest->r_ctl.rack_per_of_gp_ss = src->r_ctl.rack_per_of_gp_ss; in rack_inherit()
23936 if (dest->r_ctl.rack_per_upper_bound_ca != src->r_ctl.rack_per_upper_bound_ca) { in rack_inherit()
23937 dest->r_ctl.rack_per_upper_bound_ca = src->r_ctl.rack_per_upper_bound_ca; in rack_inherit()
23940 if (dest->r_ctl.rack_per_upper_bound_ss != src->r_ctl.rack_per_upper_bound_ss) { in rack_inherit()
23941 dest->r_ctl.rack_per_upper_bound_ss = src->r_ctl.rack_per_upper_bound_ss; in rack_inherit()
23945 if (dest->r_ctl.gp_rnd_thresh != src->r_ctl.gp_rnd_thresh) { in rack_inherit()
23946 dest->r_ctl.gp_rnd_thresh = src->r_ctl.gp_rnd_thresh; in rack_inherit()
23949 if (dest->r_ctl.gate_to_fs != src->r_ctl.gate_to_fs) { in rack_inherit()
23950 dest->r_ctl.gate_to_fs = src->r_ctl.gate_to_fs; in rack_inherit()
23953 if (dest->r_ctl.use_gp_not_last != src->r_ctl.use_gp_not_last) { in rack_inherit()
23954 dest->r_ctl.use_gp_not_last = src->r_ctl.use_gp_not_last; in rack_inherit()
23957 if (dest->r_ctl.gp_gain_req != src->r_ctl.gp_gain_req) { in rack_inherit()
23958 dest->r_ctl.gp_gain_req = src->r_ctl.gp_gain_req; in rack_inherit()
23971 if (dest->r_ctl.rc_prr_sendalot != src->r_ctl.rc_prr_sendalot) { in rack_inherit()
23972 dest->r_ctl.rc_prr_sendalot = src->r_ctl.rc_prr_sendalot; in rack_inherit()
23976 if (dest->r_ctl.rc_min_to != src->r_ctl.rc_min_to) { in rack_inherit()
23977 dest->r_ctl.rc_min_to = src->r_ctl.rc_min_to; in rack_inherit()
23981 if (dest->r_ctl.rc_early_recovery_segs != src->r_ctl.rc_early_recovery_segs) { in rack_inherit()
23982 dest->r_ctl.rc_early_recovery_segs = src->r_ctl.rc_early_recovery_segs; in rack_inherit()
23999 if (dest->r_ctl.rc_reorder_shift != src->r_ctl.rc_reorder_shift) { in rack_inherit()
24000 dest->r_ctl.rc_reorder_shift = src->r_ctl.rc_reorder_shift; in rack_inherit()
24004 if (dest->r_ctl.rc_reorder_fade != src->r_ctl.rc_reorder_fade) { in rack_inherit()
24005 dest->r_ctl.rc_reorder_fade = src->r_ctl.rc_reorder_fade; in rack_inherit()
24009 if (dest->r_ctl.rc_tlp_threshold != src->r_ctl.rc_tlp_threshold) { in rack_inherit()
24010 dest->r_ctl.rc_tlp_threshold = src->r_ctl.rc_tlp_threshold; in rack_inherit()
24019 if (dest->r_ctl.rc_pkt_delay != src->r_ctl.rc_pkt_delay) { in rack_inherit()
24020 dest->r_ctl.rc_pkt_delay = src->r_ctl.rc_pkt_delay; in rack_inherit()
24025 if (dest->r_ctl.rc_rate_sample_method != src->r_ctl.rc_rate_sample_method) { in rack_inherit()
24026 dest->r_ctl.rc_rate_sample_method = src->r_ctl.rc_rate_sample_method; in rack_inherit()
24034 if (dest->r_ctl.max_reduction != src->r_ctl.max_reduction) { in rack_inherit()
24035 dest->r_ctl.max_reduction = src->r_ctl.max_reduction; in rack_inherit()
24071 TAILQ_FOREACH_SAFE(dol, &rack->r_ctl.opt_list, next, sdol) { in rack_apply_deferred_options()
24072 TAILQ_REMOVE(&rack->r_ctl.opt_list, dol, next); in rack_apply_deferred_options()
24088 rack->r_ctl.fsb.hw_tls = 1; in rack_hw_tls_change()
24090 rack->r_ctl.fsb.hw_tls = 0; in rack_hw_tls_change()
24109 if (rack->r_ctl.rc_hpts_flags) { in rack_wake_check()
24111 if ((rack->r_ctl.rc_hpts_flags & PACE_PKT_OUTPUT) == PACE_PKT_OUTPUT){ in rack_wake_check()
24115 if (TSTMP_GEQ(cts, rack->r_ctl.rc_last_output_to)) in rack_wake_check()
24117 } else if ((rack->r_ctl.rc_hpts_flags & PACE_TMR_MASK) != 0) { in rack_wake_check()
24121 if (TSTMP_GEQ(cts, rack->r_ctl.rc_timer_exp)) in rack_wake_check()
24177 ip = (struct ip *)rack->r_ctl.fsb.tcp_ip_hdr; in rack_set_sockopt()
24440 optval = rack->r_ctl.rc_saved_beta.beta; in rack_get_sockopt()
24464 optval = rack->r_ctl.rc_saved_beta.beta_ecn; in rack_get_sockopt()
24509 optval = rack->r_ctl.req_measurements; in rack_get_sockopt()
24521 loptval = rack->r_ctl.fillcw_cap; in rack_get_sockopt()
24524 loptval = rack->r_ctl.bw_rate_cap; in rack_get_sockopt()
24531 optval = rack->r_ctl.side_chan_dis_mask; in rack_get_sockopt()
24544 optval = rack->r_ctl.rc_no_push_at_mrtt; in rack_get_sockopt()
24590 optval = rack->r_ctl.rc_tlp_cwnd_reduce; in rack_get_sockopt()
24593 val = rack->r_ctl.init_rate; in rack_get_sockopt()
24603 optval = rack->r_ctl.rc_user_set_min_segs; in rack_get_sockopt()
24615 optval = rack->r_ctl.rc_prr_sendalot; in rack_get_sockopt()
24619 optval = rack->r_ctl.rc_min_to; in rack_get_sockopt()
24622 optval = rack->r_ctl.rc_split_limit; in rack_get_sockopt()
24626 optval = rack->r_ctl.rc_early_recovery_segs; in rack_get_sockopt()
24630 optval = rack->r_ctl.rc_reorder_shift; in rack_get_sockopt()
24633 if (rack->r_ctl.gp_rnd_thresh) { in rack_get_sockopt()
24636 v = rack->r_ctl.gp_gain_req; in rack_get_sockopt()
24638 optval = v | (rack->r_ctl.gp_rnd_thresh & 0xff); in rack_get_sockopt()
24639 if (rack->r_ctl.gate_to_fs == 1) in rack_get_sockopt()
24646 optval = rack->r_ctl.rc_reorder_fade; in rack_get_sockopt()
24663 optval = rack->r_ctl.rc_tlp_threshold; in rack_get_sockopt()
24667 optval = rack->r_ctl.rc_pkt_delay; in rack_get_sockopt()
24676 optval = rack->r_ctl.rc_fixed_pacing_rate_ca; in rack_get_sockopt()
24679 optval = rack->r_ctl.rc_fixed_pacing_rate_ss; in rack_get_sockopt()
24682 optval = rack->r_ctl.rc_fixed_pacing_rate_rec; in rack_get_sockopt()
24685 optval = rack->r_ctl.rack_per_upper_bound_ss; in rack_get_sockopt()
24687 optval |= rack->r_ctl.rack_per_upper_bound_ca; in rack_get_sockopt()
24690 optval = rack->r_ctl.rack_per_of_gp_ca; in rack_get_sockopt()
24693 optval = rack->r_ctl.rack_per_of_gp_ss; in rack_get_sockopt()
24696 optval = rack->r_ctl.pace_len_divisor; in rack_get_sockopt()
24699 optval = rack->r_ctl.rc_rate_sample_method; in rack_get_sockopt()
24712 optval = rack->r_ctl.max_reduction; in rack_get_sockopt()
24723 optval = rack->r_ctl.timer_slop; in rack_get_sockopt()