xref: /linux/drivers/net/wireless/ath/ath12k/dp_mon.c (revision 8be4d31cb8aaeea27bde4b7ddb26e28a89062ebf)
1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3  * Copyright (c) 2019-2021 The Linux Foundation. All rights reserved.
4  * Copyright (c) 2021-2025 Qualcomm Innovation Center, Inc. All rights reserved.
5  */
6 
7 #include "dp_mon.h"
8 #include "debug.h"
9 #include "dp_rx.h"
10 #include "dp_tx.h"
11 #include "peer.h"
12 
13 #define ATH12K_LE32_DEC_ENC(value, dec_bits, enc_bits)	\
14 		u32_encode_bits(le32_get_bits(value, dec_bits), enc_bits)
15 
16 #define ATH12K_LE64_DEC_ENC(value, dec_bits, enc_bits) \
17 		u32_encode_bits(le64_get_bits(value, dec_bits), enc_bits)
18 
19 static void
ath12k_dp_mon_rx_handle_ofdma_info(const struct hal_rx_ppdu_end_user_stats * ppdu_end_user,struct hal_rx_user_status * rx_user_status)20 ath12k_dp_mon_rx_handle_ofdma_info(const struct hal_rx_ppdu_end_user_stats *ppdu_end_user,
21 				   struct hal_rx_user_status *rx_user_status)
22 {
23 	rx_user_status->ul_ofdma_user_v0_word0 =
24 		__le32_to_cpu(ppdu_end_user->usr_resp_ref);
25 	rx_user_status->ul_ofdma_user_v0_word1 =
26 		__le32_to_cpu(ppdu_end_user->usr_resp_ref_ext);
27 }
28 
29 static void
ath12k_dp_mon_rx_populate_byte_count(const struct hal_rx_ppdu_end_user_stats * stats,void * ppduinfo,struct hal_rx_user_status * rx_user_status)30 ath12k_dp_mon_rx_populate_byte_count(const struct hal_rx_ppdu_end_user_stats *stats,
31 				     void *ppduinfo,
32 				     struct hal_rx_user_status *rx_user_status)
33 {
34 	rx_user_status->mpdu_ok_byte_count =
35 		le32_get_bits(stats->info7,
36 			      HAL_RX_PPDU_END_USER_STATS_INFO7_MPDU_OK_BYTE_COUNT);
37 	rx_user_status->mpdu_err_byte_count =
38 		le32_get_bits(stats->info8,
39 			      HAL_RX_PPDU_END_USER_STATS_INFO8_MPDU_ERR_BYTE_COUNT);
40 }
41 
42 static void
ath12k_dp_mon_rx_populate_mu_user_info(const struct hal_rx_ppdu_end_user_stats * rx_tlv,struct hal_rx_mon_ppdu_info * ppdu_info,struct hal_rx_user_status * rx_user_status)43 ath12k_dp_mon_rx_populate_mu_user_info(const struct hal_rx_ppdu_end_user_stats *rx_tlv,
44 				       struct hal_rx_mon_ppdu_info *ppdu_info,
45 				       struct hal_rx_user_status *rx_user_status)
46 {
47 	rx_user_status->ast_index = ppdu_info->ast_index;
48 	rx_user_status->tid = ppdu_info->tid;
49 	rx_user_status->tcp_ack_msdu_count =
50 		ppdu_info->tcp_ack_msdu_count;
51 	rx_user_status->tcp_msdu_count =
52 		ppdu_info->tcp_msdu_count;
53 	rx_user_status->udp_msdu_count =
54 		ppdu_info->udp_msdu_count;
55 	rx_user_status->other_msdu_count =
56 		ppdu_info->other_msdu_count;
57 	rx_user_status->frame_control = ppdu_info->frame_control;
58 	rx_user_status->frame_control_info_valid =
59 		ppdu_info->frame_control_info_valid;
60 	rx_user_status->data_sequence_control_info_valid =
61 		ppdu_info->data_sequence_control_info_valid;
62 	rx_user_status->first_data_seq_ctrl =
63 		ppdu_info->first_data_seq_ctrl;
64 	rx_user_status->preamble_type = ppdu_info->preamble_type;
65 	rx_user_status->ht_flags = ppdu_info->ht_flags;
66 	rx_user_status->vht_flags = ppdu_info->vht_flags;
67 	rx_user_status->he_flags = ppdu_info->he_flags;
68 	rx_user_status->rs_flags = ppdu_info->rs_flags;
69 
70 	rx_user_status->mpdu_cnt_fcs_ok =
71 		ppdu_info->num_mpdu_fcs_ok;
72 	rx_user_status->mpdu_cnt_fcs_err =
73 		ppdu_info->num_mpdu_fcs_err;
74 	memcpy(&rx_user_status->mpdu_fcs_ok_bitmap[0], &ppdu_info->mpdu_fcs_ok_bitmap[0],
75 	       HAL_RX_NUM_WORDS_PER_PPDU_BITMAP *
76 	       sizeof(ppdu_info->mpdu_fcs_ok_bitmap[0]));
77 
78 	ath12k_dp_mon_rx_populate_byte_count(rx_tlv, ppdu_info, rx_user_status);
79 }
80 
ath12k_dp_mon_parse_vht_sig_a(const struct hal_rx_vht_sig_a_info * vht_sig,struct hal_rx_mon_ppdu_info * ppdu_info)81 static void ath12k_dp_mon_parse_vht_sig_a(const struct hal_rx_vht_sig_a_info *vht_sig,
82 					  struct hal_rx_mon_ppdu_info *ppdu_info)
83 {
84 	u32 nsts, info0, info1;
85 	u8 gi_setting;
86 
87 	info0 = __le32_to_cpu(vht_sig->info0);
88 	info1 = __le32_to_cpu(vht_sig->info1);
89 
90 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_SU_MU_CODING);
91 	ppdu_info->mcs = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_MCS);
92 	gi_setting = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_GI_SETTING);
93 	switch (gi_setting) {
94 	case HAL_RX_VHT_SIG_A_NORMAL_GI:
95 		ppdu_info->gi = HAL_RX_GI_0_8_US;
96 		break;
97 	case HAL_RX_VHT_SIG_A_SHORT_GI:
98 	case HAL_RX_VHT_SIG_A_SHORT_GI_AMBIGUITY:
99 		ppdu_info->gi = HAL_RX_GI_0_4_US;
100 		break;
101 	}
102 
103 	ppdu_info->is_stbc = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_STBC);
104 	nsts = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_NSTS);
105 	if (ppdu_info->is_stbc && nsts > 0)
106 		nsts = ((nsts + 1) >> 1) - 1;
107 
108 	ppdu_info->nss = u32_get_bits(nsts, VHT_SIG_SU_NSS_MASK);
109 	ppdu_info->bw = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_BW);
110 	ppdu_info->beamformed = u32_get_bits(info1,
111 					     HAL_RX_VHT_SIG_A_INFO_INFO1_BEAMFORMED);
112 	ppdu_info->vht_flag_values5 = u32_get_bits(info0,
113 						   HAL_RX_VHT_SIG_A_INFO_INFO0_GROUP_ID);
114 	ppdu_info->vht_flag_values3[0] = (((ppdu_info->mcs) << 4) |
115 					    ppdu_info->nss);
116 	ppdu_info->vht_flag_values2 = ppdu_info->bw;
117 	ppdu_info->vht_flag_values4 =
118 		u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_SU_MU_CODING);
119 }
120 
ath12k_dp_mon_parse_ht_sig(const struct hal_rx_ht_sig_info * ht_sig,struct hal_rx_mon_ppdu_info * ppdu_info)121 static void ath12k_dp_mon_parse_ht_sig(const struct hal_rx_ht_sig_info *ht_sig,
122 				       struct hal_rx_mon_ppdu_info *ppdu_info)
123 {
124 	u32 info0 = __le32_to_cpu(ht_sig->info0);
125 	u32 info1 = __le32_to_cpu(ht_sig->info1);
126 
127 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HT_SIG_INFO_INFO0_MCS);
128 	ppdu_info->bw = u32_get_bits(info0, HAL_RX_HT_SIG_INFO_INFO0_BW);
129 	ppdu_info->is_stbc = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_STBC);
130 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_FEC_CODING);
131 	ppdu_info->gi = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_GI);
132 	ppdu_info->nss = (ppdu_info->mcs >> 3);
133 }
134 
ath12k_dp_mon_parse_l_sig_b(const struct hal_rx_lsig_b_info * lsigb,struct hal_rx_mon_ppdu_info * ppdu_info)135 static void ath12k_dp_mon_parse_l_sig_b(const struct hal_rx_lsig_b_info *lsigb,
136 					struct hal_rx_mon_ppdu_info *ppdu_info)
137 {
138 	u32 info0 = __le32_to_cpu(lsigb->info0);
139 	u8 rate;
140 
141 	rate = u32_get_bits(info0, HAL_RX_LSIG_B_INFO_INFO0_RATE);
142 	switch (rate) {
143 	case 1:
144 		rate = HAL_RX_LEGACY_RATE_1_MBPS;
145 		break;
146 	case 2:
147 	case 5:
148 		rate = HAL_RX_LEGACY_RATE_2_MBPS;
149 		break;
150 	case 3:
151 	case 6:
152 		rate = HAL_RX_LEGACY_RATE_5_5_MBPS;
153 		break;
154 	case 4:
155 	case 7:
156 		rate = HAL_RX_LEGACY_RATE_11_MBPS;
157 		break;
158 	default:
159 		rate = HAL_RX_LEGACY_RATE_INVALID;
160 	}
161 
162 	ppdu_info->rate = rate;
163 	ppdu_info->cck_flag = 1;
164 }
165 
ath12k_dp_mon_parse_l_sig_a(const struct hal_rx_lsig_a_info * lsiga,struct hal_rx_mon_ppdu_info * ppdu_info)166 static void ath12k_dp_mon_parse_l_sig_a(const struct hal_rx_lsig_a_info *lsiga,
167 					struct hal_rx_mon_ppdu_info *ppdu_info)
168 {
169 	u32 info0 = __le32_to_cpu(lsiga->info0);
170 	u8 rate;
171 
172 	rate = u32_get_bits(info0, HAL_RX_LSIG_A_INFO_INFO0_RATE);
173 	switch (rate) {
174 	case 8:
175 		rate = HAL_RX_LEGACY_RATE_48_MBPS;
176 		break;
177 	case 9:
178 		rate = HAL_RX_LEGACY_RATE_24_MBPS;
179 		break;
180 	case 10:
181 		rate = HAL_RX_LEGACY_RATE_12_MBPS;
182 		break;
183 	case 11:
184 		rate = HAL_RX_LEGACY_RATE_6_MBPS;
185 		break;
186 	case 12:
187 		rate = HAL_RX_LEGACY_RATE_54_MBPS;
188 		break;
189 	case 13:
190 		rate = HAL_RX_LEGACY_RATE_36_MBPS;
191 		break;
192 	case 14:
193 		rate = HAL_RX_LEGACY_RATE_18_MBPS;
194 		break;
195 	case 15:
196 		rate = HAL_RX_LEGACY_RATE_9_MBPS;
197 		break;
198 	default:
199 		rate = HAL_RX_LEGACY_RATE_INVALID;
200 	}
201 
202 	ppdu_info->rate = rate;
203 }
204 
205 static void
ath12k_dp_mon_parse_he_sig_b2_ofdma(const struct hal_rx_he_sig_b2_ofdma_info * ofdma,struct hal_rx_mon_ppdu_info * ppdu_info)206 ath12k_dp_mon_parse_he_sig_b2_ofdma(const struct hal_rx_he_sig_b2_ofdma_info *ofdma,
207 				    struct hal_rx_mon_ppdu_info *ppdu_info)
208 {
209 	u32 info0, value;
210 
211 	info0 = __le32_to_cpu(ofdma->info0);
212 
213 	ppdu_info->he_data1 |= HE_MCS_KNOWN | HE_DCM_KNOWN | HE_CODING_KNOWN;
214 
215 	/* HE-data2 */
216 	ppdu_info->he_data2 |= HE_TXBF_KNOWN;
217 
218 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_MCS);
219 	value = ppdu_info->mcs << HE_TRANSMIT_MCS_SHIFT;
220 	ppdu_info->he_data3 |= value;
221 
222 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_DCM);
223 	value = value << HE_DCM_SHIFT;
224 	ppdu_info->he_data3 |= value;
225 
226 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_CODING);
227 	ppdu_info->ldpc = value;
228 	value = value << HE_CODING_SHIFT;
229 	ppdu_info->he_data3 |= value;
230 
231 	/* HE-data4 */
232 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_ID);
233 	value = value << HE_STA_ID_SHIFT;
234 	ppdu_info->he_data4 |= value;
235 
236 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_NSTS);
237 	ppdu_info->beamformed = u32_get_bits(info0,
238 					     HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_TXBF);
239 }
240 
241 static void
ath12k_dp_mon_parse_he_sig_b2_mu(const struct hal_rx_he_sig_b2_mu_info * he_sig_b2_mu,struct hal_rx_mon_ppdu_info * ppdu_info)242 ath12k_dp_mon_parse_he_sig_b2_mu(const struct hal_rx_he_sig_b2_mu_info *he_sig_b2_mu,
243 				 struct hal_rx_mon_ppdu_info *ppdu_info)
244 {
245 	u32 info0, value;
246 
247 	info0 = __le32_to_cpu(he_sig_b2_mu->info0);
248 
249 	ppdu_info->he_data1 |= HE_MCS_KNOWN | HE_CODING_KNOWN;
250 
251 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_MCS);
252 	value = ppdu_info->mcs << HE_TRANSMIT_MCS_SHIFT;
253 	ppdu_info->he_data3 |= value;
254 
255 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_CODING);
256 	ppdu_info->ldpc = value;
257 	value = value << HE_CODING_SHIFT;
258 	ppdu_info->he_data3 |= value;
259 
260 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_ID);
261 	value = value << HE_STA_ID_SHIFT;
262 	ppdu_info->he_data4 |= value;
263 
264 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_NSTS);
265 }
266 
267 static void
ath12k_dp_mon_parse_he_sig_b1_mu(const struct hal_rx_he_sig_b1_mu_info * he_sig_b1_mu,struct hal_rx_mon_ppdu_info * ppdu_info)268 ath12k_dp_mon_parse_he_sig_b1_mu(const struct hal_rx_he_sig_b1_mu_info *he_sig_b1_mu,
269 				 struct hal_rx_mon_ppdu_info *ppdu_info)
270 {
271 	u32 info0 = __le32_to_cpu(he_sig_b1_mu->info0);
272 	u16 ru_tones;
273 
274 	ru_tones = u32_get_bits(info0,
275 				HAL_RX_HE_SIG_B1_MU_INFO_INFO0_RU_ALLOCATION);
276 	ppdu_info->ru_alloc = ath12k_he_ru_tones_to_nl80211_he_ru_alloc(ru_tones);
277 	ppdu_info->he_RU[0] = ru_tones;
278 }
279 
280 static void
ath12k_dp_mon_parse_he_sig_mu(const struct hal_rx_he_sig_a_mu_dl_info * he_sig_a_mu_dl,struct hal_rx_mon_ppdu_info * ppdu_info)281 ath12k_dp_mon_parse_he_sig_mu(const struct hal_rx_he_sig_a_mu_dl_info *he_sig_a_mu_dl,
282 			      struct hal_rx_mon_ppdu_info *ppdu_info)
283 {
284 	u32 info0, info1, value;
285 	u16 he_gi = 0, he_ltf = 0;
286 
287 	info0 = __le32_to_cpu(he_sig_a_mu_dl->info0);
288 	info1 = __le32_to_cpu(he_sig_a_mu_dl->info1);
289 
290 	ppdu_info->he_mu_flags = 1;
291 
292 	ppdu_info->he_data1 = HE_MU_FORMAT_TYPE;
293 	ppdu_info->he_data1 |=
294 			HE_BSS_COLOR_KNOWN |
295 			HE_DL_UL_KNOWN |
296 			HE_LDPC_EXTRA_SYMBOL_KNOWN |
297 			HE_STBC_KNOWN |
298 			HE_DATA_BW_RU_KNOWN |
299 			HE_DOPPLER_KNOWN;
300 
301 	ppdu_info->he_data2 =
302 			HE_GI_KNOWN |
303 			HE_LTF_SYMBOLS_KNOWN |
304 			HE_PRE_FEC_PADDING_KNOWN |
305 			HE_PE_DISAMBIGUITY_KNOWN |
306 			HE_TXOP_KNOWN |
307 			HE_MIDABLE_PERIODICITY_KNOWN;
308 
309 	/* data3 */
310 	ppdu_info->he_data3 = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_BSS_COLOR);
311 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_UL_FLAG);
312 	value = value << HE_DL_UL_SHIFT;
313 	ppdu_info->he_data3 |= value;
314 
315 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_LDPC_EXTRA);
316 	value = value << HE_LDPC_EXTRA_SYMBOL_SHIFT;
317 	ppdu_info->he_data3 |= value;
318 
319 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_STBC);
320 	value = value << HE_STBC_SHIFT;
321 	ppdu_info->he_data3 |= value;
322 
323 	/* data4 */
324 	ppdu_info->he_data4 = u32_get_bits(info0,
325 					   HAL_RX_HE_SIG_A_MU_DL_INFO0_SPATIAL_REUSE);
326 	ppdu_info->he_data4 = value;
327 
328 	/* data5 */
329 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_TRANSMIT_BW);
330 	ppdu_info->he_data5 = value;
331 	ppdu_info->bw = value;
332 
333 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_CP_LTF_SIZE);
334 	switch (value) {
335 	case 0:
336 		he_gi = HE_GI_0_8;
337 		he_ltf = HE_LTF_4_X;
338 		break;
339 	case 1:
340 		he_gi = HE_GI_0_8;
341 		he_ltf = HE_LTF_2_X;
342 		break;
343 	case 2:
344 		he_gi = HE_GI_1_6;
345 		he_ltf = HE_LTF_2_X;
346 		break;
347 	case 3:
348 		he_gi = HE_GI_3_2;
349 		he_ltf = HE_LTF_4_X;
350 		break;
351 	}
352 
353 	ppdu_info->gi = he_gi;
354 	value = he_gi << HE_GI_SHIFT;
355 	ppdu_info->he_data5 |= value;
356 
357 	value = he_ltf << HE_LTF_SIZE_SHIFT;
358 	ppdu_info->he_data5 |= value;
359 
360 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_NUM_LTF_SYMB);
361 	value = (value << HE_LTF_SYM_SHIFT);
362 	ppdu_info->he_data5 |= value;
363 
364 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_PKT_EXT_FACTOR);
365 	value = value << HE_PRE_FEC_PAD_SHIFT;
366 	ppdu_info->he_data5 |= value;
367 
368 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_PKT_EXT_PE_DISAM);
369 	value = value << HE_PE_DISAMBIGUITY_SHIFT;
370 	ppdu_info->he_data5 |= value;
371 
372 	/*data6*/
373 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_DOPPLER_INDICATION);
374 	value = value << HE_DOPPLER_SHIFT;
375 	ppdu_info->he_data6 |= value;
376 
377 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_TXOP_DURATION);
378 	value = value << HE_TXOP_SHIFT;
379 	ppdu_info->he_data6 |= value;
380 
381 	/* HE-MU Flags */
382 	/* HE-MU-flags1 */
383 	ppdu_info->he_flags1 =
384 		HE_SIG_B_MCS_KNOWN |
385 		HE_SIG_B_DCM_KNOWN |
386 		HE_SIG_B_COMPRESSION_FLAG_1_KNOWN |
387 		HE_SIG_B_SYM_NUM_KNOWN |
388 		HE_RU_0_KNOWN;
389 
390 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_MCS_OF_SIGB);
391 	ppdu_info->he_flags1 |= value;
392 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_DCM_OF_SIGB);
393 	value = value << HE_DCM_FLAG_1_SHIFT;
394 	ppdu_info->he_flags1 |= value;
395 
396 	/* HE-MU-flags2 */
397 	ppdu_info->he_flags2 = HE_BW_KNOWN;
398 
399 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_TRANSMIT_BW);
400 	ppdu_info->he_flags2 |= value;
401 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_COMP_MODE_SIGB);
402 	value = value << HE_SIG_B_COMPRESSION_FLAG_2_SHIFT;
403 	ppdu_info->he_flags2 |= value;
404 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_NUM_SIGB_SYMB);
405 	value = value - 1;
406 	value = value << HE_NUM_SIG_B_SYMBOLS_SHIFT;
407 	ppdu_info->he_flags2 |= value;
408 
409 	ppdu_info->is_stbc = info1 &
410 			     HAL_RX_HE_SIG_A_MU_DL_INFO1_STBC;
411 }
412 
ath12k_dp_mon_parse_he_sig_su(const struct hal_rx_he_sig_a_su_info * he_sig_a,struct hal_rx_mon_ppdu_info * ppdu_info)413 static void ath12k_dp_mon_parse_he_sig_su(const struct hal_rx_he_sig_a_su_info *he_sig_a,
414 					  struct hal_rx_mon_ppdu_info *ppdu_info)
415 {
416 	u32 info0, info1, value;
417 	u32 dcm;
418 	u8 he_dcm = 0, he_stbc = 0;
419 	u16 he_gi = 0, he_ltf = 0;
420 
421 	ppdu_info->he_flags = 1;
422 
423 	info0 = __le32_to_cpu(he_sig_a->info0);
424 	info1 = __le32_to_cpu(he_sig_a->info1);
425 
426 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_FORMAT_IND);
427 	if (value == 0)
428 		ppdu_info->he_data1 = HE_TRIG_FORMAT_TYPE;
429 	else
430 		ppdu_info->he_data1 = HE_SU_FORMAT_TYPE;
431 
432 	ppdu_info->he_data1 |=
433 			HE_BSS_COLOR_KNOWN |
434 			HE_BEAM_CHANGE_KNOWN |
435 			HE_DL_UL_KNOWN |
436 			HE_MCS_KNOWN |
437 			HE_DCM_KNOWN |
438 			HE_CODING_KNOWN |
439 			HE_LDPC_EXTRA_SYMBOL_KNOWN |
440 			HE_STBC_KNOWN |
441 			HE_DATA_BW_RU_KNOWN |
442 			HE_DOPPLER_KNOWN;
443 
444 	ppdu_info->he_data2 |=
445 			HE_GI_KNOWN |
446 			HE_TXBF_KNOWN |
447 			HE_PE_DISAMBIGUITY_KNOWN |
448 			HE_TXOP_KNOWN |
449 			HE_LTF_SYMBOLS_KNOWN |
450 			HE_PRE_FEC_PADDING_KNOWN |
451 			HE_MIDABLE_PERIODICITY_KNOWN;
452 
453 	ppdu_info->he_data3 = u32_get_bits(info0,
454 					   HAL_RX_HE_SIG_A_SU_INFO_INFO0_BSS_COLOR);
455 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_BEAM_CHANGE);
456 	value = value << HE_BEAM_CHANGE_SHIFT;
457 	ppdu_info->he_data3 |= value;
458 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DL_UL_FLAG);
459 	value = value << HE_DL_UL_SHIFT;
460 	ppdu_info->he_data3 |= value;
461 
462 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_MCS);
463 	ppdu_info->mcs = value;
464 	value = value << HE_TRANSMIT_MCS_SHIFT;
465 	ppdu_info->he_data3 |= value;
466 
467 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DCM);
468 	he_dcm = value;
469 	value = value << HE_DCM_SHIFT;
470 	ppdu_info->he_data3 |= value;
471 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_CODING);
472 	value = value << HE_CODING_SHIFT;
473 	ppdu_info->he_data3 |= value;
474 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_LDPC_EXTRA);
475 	value = value << HE_LDPC_EXTRA_SYMBOL_SHIFT;
476 	ppdu_info->he_data3 |= value;
477 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_STBC);
478 	he_stbc = value;
479 	value = value << HE_STBC_SHIFT;
480 	ppdu_info->he_data3 |= value;
481 
482 	/* data4 */
483 	ppdu_info->he_data4 = u32_get_bits(info0,
484 					   HAL_RX_HE_SIG_A_SU_INFO_INFO0_SPATIAL_REUSE);
485 
486 	/* data5 */
487 	value = u32_get_bits(info0,
488 			     HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_BW);
489 	ppdu_info->he_data5 = value;
490 	ppdu_info->bw = value;
491 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_CP_LTF_SIZE);
492 	switch (value) {
493 	case 0:
494 		he_gi = HE_GI_0_8;
495 		he_ltf = HE_LTF_1_X;
496 		break;
497 	case 1:
498 		he_gi = HE_GI_0_8;
499 		he_ltf = HE_LTF_2_X;
500 		break;
501 	case 2:
502 		he_gi = HE_GI_1_6;
503 		he_ltf = HE_LTF_2_X;
504 		break;
505 	case 3:
506 		if (he_dcm && he_stbc) {
507 			he_gi = HE_GI_0_8;
508 			he_ltf = HE_LTF_4_X;
509 		} else {
510 			he_gi = HE_GI_3_2;
511 			he_ltf = HE_LTF_4_X;
512 		}
513 		break;
514 	}
515 	ppdu_info->gi = he_gi;
516 	value = he_gi << HE_GI_SHIFT;
517 	ppdu_info->he_data5 |= value;
518 	value = he_ltf << HE_LTF_SIZE_SHIFT;
519 	ppdu_info->ltf_size = he_ltf;
520 	ppdu_info->he_data5 |= value;
521 
522 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
523 	value = (value << HE_LTF_SYM_SHIFT);
524 	ppdu_info->he_data5 |= value;
525 
526 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_PKT_EXT_FACTOR);
527 	value = value << HE_PRE_FEC_PAD_SHIFT;
528 	ppdu_info->he_data5 |= value;
529 
530 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXBF);
531 	value = value << HE_TXBF_SHIFT;
532 	ppdu_info->he_data5 |= value;
533 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_PKT_EXT_PE_DISAM);
534 	value = value << HE_PE_DISAMBIGUITY_SHIFT;
535 	ppdu_info->he_data5 |= value;
536 
537 	/* data6 */
538 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
539 	value++;
540 	ppdu_info->he_data6 = value;
541 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_DOPPLER_IND);
542 	value = value << HE_DOPPLER_SHIFT;
543 	ppdu_info->he_data6 |= value;
544 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXOP_DURATION);
545 	value = value << HE_TXOP_SHIFT;
546 	ppdu_info->he_data6 |= value;
547 
548 	ppdu_info->mcs =
549 		u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_MCS);
550 	ppdu_info->bw =
551 		u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_BW);
552 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_CODING);
553 	ppdu_info->is_stbc = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_STBC);
554 	ppdu_info->beamformed = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXBF);
555 	dcm = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DCM);
556 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
557 	ppdu_info->dcm = dcm;
558 }
559 
560 static void
ath12k_dp_mon_hal_rx_parse_u_sig_cmn(const struct hal_mon_usig_cmn * cmn,struct hal_rx_mon_ppdu_info * ppdu_info)561 ath12k_dp_mon_hal_rx_parse_u_sig_cmn(const struct hal_mon_usig_cmn *cmn,
562 				     struct hal_rx_mon_ppdu_info *ppdu_info)
563 {
564 	u32 common;
565 
566 	ppdu_info->u_sig_info.bw = le32_get_bits(cmn->info0,
567 						 HAL_RX_USIG_CMN_INFO0_BW);
568 	ppdu_info->u_sig_info.ul_dl = le32_get_bits(cmn->info0,
569 						    HAL_RX_USIG_CMN_INFO0_UL_DL);
570 
571 	common = __le32_to_cpu(ppdu_info->u_sig_info.usig.common);
572 	common |= IEEE80211_RADIOTAP_EHT_USIG_COMMON_PHY_VER_KNOWN |
573 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BW_KNOWN |
574 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_UL_DL_KNOWN |
575 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BSS_COLOR_KNOWN |
576 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_TXOP_KNOWN |
577 		  ATH12K_LE32_DEC_ENC(cmn->info0,
578 				      HAL_RX_USIG_CMN_INFO0_PHY_VERSION,
579 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_PHY_VER) |
580 		  u32_encode_bits(ppdu_info->u_sig_info.bw,
581 				  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BW) |
582 		  u32_encode_bits(ppdu_info->u_sig_info.ul_dl,
583 				  IEEE80211_RADIOTAP_EHT_USIG_COMMON_UL_DL) |
584 		  ATH12K_LE32_DEC_ENC(cmn->info0,
585 				      HAL_RX_USIG_CMN_INFO0_BSS_COLOR,
586 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BSS_COLOR) |
587 		  ATH12K_LE32_DEC_ENC(cmn->info0,
588 				      HAL_RX_USIG_CMN_INFO0_TXOP,
589 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_TXOP);
590 	ppdu_info->u_sig_info.usig.common = cpu_to_le32(common);
591 
592 	switch (ppdu_info->u_sig_info.bw) {
593 	default:
594 		fallthrough;
595 	case HAL_EHT_BW_20:
596 		ppdu_info->bw = HAL_RX_BW_20MHZ;
597 		break;
598 	case HAL_EHT_BW_40:
599 		ppdu_info->bw = HAL_RX_BW_40MHZ;
600 		break;
601 	case HAL_EHT_BW_80:
602 		ppdu_info->bw = HAL_RX_BW_80MHZ;
603 		break;
604 	case HAL_EHT_BW_160:
605 		ppdu_info->bw = HAL_RX_BW_160MHZ;
606 		break;
607 	case HAL_EHT_BW_320_1:
608 	case HAL_EHT_BW_320_2:
609 		ppdu_info->bw = HAL_RX_BW_320MHZ;
610 		break;
611 	}
612 }
613 
614 static void
ath12k_dp_mon_hal_rx_parse_u_sig_tb(const struct hal_mon_usig_tb * usig_tb,struct hal_rx_mon_ppdu_info * ppdu_info)615 ath12k_dp_mon_hal_rx_parse_u_sig_tb(const struct hal_mon_usig_tb *usig_tb,
616 				    struct hal_rx_mon_ppdu_info *ppdu_info)
617 {
618 	struct ieee80211_radiotap_eht_usig *usig = &ppdu_info->u_sig_info.usig;
619 	enum ieee80211_radiotap_eht_usig_tb spatial_reuse1, spatial_reuse2;
620 	u32 common, value, mask;
621 
622 	spatial_reuse1 = IEEE80211_RADIOTAP_EHT_USIG2_TB_B3_B6_SPATIAL_REUSE_1;
623 	spatial_reuse2 = IEEE80211_RADIOTAP_EHT_USIG2_TB_B7_B10_SPATIAL_REUSE_2;
624 
625 	common = __le32_to_cpu(usig->common);
626 	value = __le32_to_cpu(usig->value);
627 	mask = __le32_to_cpu(usig->mask);
628 
629 	ppdu_info->u_sig_info.ppdu_type_comp_mode =
630 				le32_get_bits(usig_tb->info0,
631 					      HAL_RX_USIG_TB_INFO0_PPDU_TYPE_COMP_MODE);
632 
633 	common |= ATH12K_LE32_DEC_ENC(usig_tb->info0,
634 				      HAL_RX_USIG_TB_INFO0_RX_INTEG_CHECK_PASS,
635 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BAD_USIG_CRC);
636 
637 	value |= IEEE80211_RADIOTAP_EHT_USIG1_TB_B20_B25_DISREGARD |
638 		 u32_encode_bits(ppdu_info->u_sig_info.ppdu_type_comp_mode,
639 				 IEEE80211_RADIOTAP_EHT_USIG2_TB_B0_B1_PPDU_TYPE) |
640 		 IEEE80211_RADIOTAP_EHT_USIG2_TB_B2_VALIDATE |
641 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
642 				     HAL_RX_USIG_TB_INFO0_SPATIAL_REUSE_1,
643 				     spatial_reuse1) |
644 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
645 				     HAL_RX_USIG_TB_INFO0_SPATIAL_REUSE_2,
646 				     spatial_reuse2) |
647 		 IEEE80211_RADIOTAP_EHT_USIG2_TB_B11_B15_DISREGARD |
648 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
649 				     HAL_RX_USIG_TB_INFO0_CRC,
650 				     IEEE80211_RADIOTAP_EHT_USIG2_TB_B16_B19_CRC) |
651 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
652 				     HAL_RX_USIG_TB_INFO0_TAIL,
653 				     IEEE80211_RADIOTAP_EHT_USIG2_TB_B20_B25_TAIL);
654 
655 	mask |= IEEE80211_RADIOTAP_EHT_USIG1_TB_B20_B25_DISREGARD |
656 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B0_B1_PPDU_TYPE |
657 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B2_VALIDATE |
658 		spatial_reuse1 | spatial_reuse2 |
659 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B11_B15_DISREGARD |
660 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B16_B19_CRC |
661 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B20_B25_TAIL;
662 
663 	usig->common = cpu_to_le32(common);
664 	usig->value = cpu_to_le32(value);
665 	usig->mask = cpu_to_le32(mask);
666 }
667 
668 static void
ath12k_dp_mon_hal_rx_parse_u_sig_mu(const struct hal_mon_usig_mu * usig_mu,struct hal_rx_mon_ppdu_info * ppdu_info)669 ath12k_dp_mon_hal_rx_parse_u_sig_mu(const struct hal_mon_usig_mu *usig_mu,
670 				    struct hal_rx_mon_ppdu_info *ppdu_info)
671 {
672 	struct ieee80211_radiotap_eht_usig *usig = &ppdu_info->u_sig_info.usig;
673 	enum ieee80211_radiotap_eht_usig_mu sig_symb, punc;
674 	u32 common, value, mask;
675 
676 	sig_symb = IEEE80211_RADIOTAP_EHT_USIG2_MU_B11_B15_EHT_SIG_SYMBOLS;
677 	punc = IEEE80211_RADIOTAP_EHT_USIG2_MU_B3_B7_PUNCTURED_INFO;
678 
679 	common = __le32_to_cpu(usig->common);
680 	value = __le32_to_cpu(usig->value);
681 	mask = __le32_to_cpu(usig->mask);
682 
683 	ppdu_info->u_sig_info.ppdu_type_comp_mode =
684 				le32_get_bits(usig_mu->info0,
685 					      HAL_RX_USIG_MU_INFO0_PPDU_TYPE_COMP_MODE);
686 	ppdu_info->u_sig_info.eht_sig_mcs =
687 				le32_get_bits(usig_mu->info0,
688 					      HAL_RX_USIG_MU_INFO0_EHT_SIG_MCS);
689 	ppdu_info->u_sig_info.num_eht_sig_sym =
690 				le32_get_bits(usig_mu->info0,
691 					      HAL_RX_USIG_MU_INFO0_NUM_EHT_SIG_SYM);
692 
693 	common |= ATH12K_LE32_DEC_ENC(usig_mu->info0,
694 				      HAL_RX_USIG_MU_INFO0_RX_INTEG_CHECK_PASS,
695 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BAD_USIG_CRC);
696 
697 	value |= IEEE80211_RADIOTAP_EHT_USIG1_MU_B20_B24_DISREGARD |
698 		 IEEE80211_RADIOTAP_EHT_USIG1_MU_B25_VALIDATE |
699 		 u32_encode_bits(ppdu_info->u_sig_info.ppdu_type_comp_mode,
700 				 IEEE80211_RADIOTAP_EHT_USIG2_MU_B0_B1_PPDU_TYPE) |
701 		 IEEE80211_RADIOTAP_EHT_USIG2_MU_B2_VALIDATE |
702 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
703 				     HAL_RX_USIG_MU_INFO0_PUNC_CH_INFO,
704 				     punc) |
705 		 IEEE80211_RADIOTAP_EHT_USIG2_MU_B8_VALIDATE |
706 		 u32_encode_bits(ppdu_info->u_sig_info.eht_sig_mcs,
707 				 IEEE80211_RADIOTAP_EHT_USIG2_MU_B9_B10_SIG_MCS) |
708 		 u32_encode_bits(ppdu_info->u_sig_info.num_eht_sig_sym,
709 				 sig_symb) |
710 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
711 				     HAL_RX_USIG_MU_INFO0_CRC,
712 				     IEEE80211_RADIOTAP_EHT_USIG2_MU_B16_B19_CRC) |
713 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
714 				     HAL_RX_USIG_MU_INFO0_TAIL,
715 				     IEEE80211_RADIOTAP_EHT_USIG2_MU_B20_B25_TAIL);
716 
717 	mask |= IEEE80211_RADIOTAP_EHT_USIG1_MU_B20_B24_DISREGARD |
718 		IEEE80211_RADIOTAP_EHT_USIG1_MU_B25_VALIDATE |
719 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B0_B1_PPDU_TYPE |
720 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B2_VALIDATE |
721 		punc |
722 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B8_VALIDATE |
723 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B9_B10_SIG_MCS |
724 		sig_symb |
725 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B16_B19_CRC |
726 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B20_B25_TAIL;
727 
728 	usig->common = cpu_to_le32(common);
729 	usig->value = cpu_to_le32(value);
730 	usig->mask = cpu_to_le32(mask);
731 }
732 
733 static void
ath12k_dp_mon_hal_rx_parse_u_sig_hdr(const struct hal_mon_usig_hdr * usig,struct hal_rx_mon_ppdu_info * ppdu_info)734 ath12k_dp_mon_hal_rx_parse_u_sig_hdr(const struct hal_mon_usig_hdr *usig,
735 				     struct hal_rx_mon_ppdu_info *ppdu_info)
736 {
737 	u8 comp_mode;
738 
739 	ppdu_info->eht_usig = true;
740 
741 	ath12k_dp_mon_hal_rx_parse_u_sig_cmn(&usig->cmn, ppdu_info);
742 
743 	comp_mode = le32_get_bits(usig->non_cmn.mu.info0,
744 				  HAL_RX_USIG_MU_INFO0_PPDU_TYPE_COMP_MODE);
745 
746 	if (comp_mode == 0 && ppdu_info->u_sig_info.ul_dl)
747 		ath12k_dp_mon_hal_rx_parse_u_sig_tb(&usig->non_cmn.tb, ppdu_info);
748 	else
749 		ath12k_dp_mon_hal_rx_parse_u_sig_mu(&usig->non_cmn.mu, ppdu_info);
750 }
751 
752 static void
ath12k_dp_mon_hal_aggr_tlv(struct hal_rx_mon_ppdu_info * ppdu_info,u16 tlv_len,const void * tlv_data)753 ath12k_dp_mon_hal_aggr_tlv(struct hal_rx_mon_ppdu_info *ppdu_info,
754 			   u16 tlv_len, const void *tlv_data)
755 {
756 	if (tlv_len <= HAL_RX_MON_MAX_AGGR_SIZE - ppdu_info->tlv_aggr.cur_len) {
757 		memcpy(ppdu_info->tlv_aggr.buf + ppdu_info->tlv_aggr.cur_len,
758 		       tlv_data, tlv_len);
759 		ppdu_info->tlv_aggr.cur_len += tlv_len;
760 	}
761 }
762 
763 static inline bool
ath12k_dp_mon_hal_rx_is_frame_type_ndp(const struct hal_rx_u_sig_info * usig_info)764 ath12k_dp_mon_hal_rx_is_frame_type_ndp(const struct hal_rx_u_sig_info *usig_info)
765 {
766 	if (usig_info->ppdu_type_comp_mode == 1 &&
767 	    usig_info->eht_sig_mcs == 0 &&
768 	    usig_info->num_eht_sig_sym == 0)
769 		return true;
770 
771 	return false;
772 }
773 
774 static inline bool
ath12k_dp_mon_hal_rx_is_non_ofdma(const struct hal_rx_u_sig_info * usig_info)775 ath12k_dp_mon_hal_rx_is_non_ofdma(const struct hal_rx_u_sig_info *usig_info)
776 {
777 	u32 ppdu_type_comp_mode = usig_info->ppdu_type_comp_mode;
778 	u32 ul_dl = usig_info->ul_dl;
779 
780 	if ((ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_MIMO && ul_dl == 0) ||
781 	    (ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_OFDMA && ul_dl == 0) ||
782 	    (ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_MIMO  && ul_dl == 1))
783 		return true;
784 
785 	return false;
786 }
787 
788 static inline bool
ath12k_dp_mon_hal_rx_is_ofdma(const struct hal_rx_u_sig_info * usig_info)789 ath12k_dp_mon_hal_rx_is_ofdma(const struct hal_rx_u_sig_info *usig_info)
790 {
791 	if (usig_info->ppdu_type_comp_mode == 0 && usig_info->ul_dl == 0)
792 		return true;
793 
794 	return false;
795 }
796 
797 static void
ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(const struct hal_eht_sig_ndp_cmn_eb * eht_sig_ndp,struct hal_rx_mon_ppdu_info * ppdu_info)798 ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(const struct hal_eht_sig_ndp_cmn_eb *eht_sig_ndp,
799 				       struct hal_rx_mon_ppdu_info *ppdu_info)
800 {
801 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
802 	u32 known, data;
803 
804 	known = __le32_to_cpu(eht->known);
805 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_SPATIAL_REUSE |
806 		 IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF |
807 		 IEEE80211_RADIOTAP_EHT_KNOWN_NSS_S |
808 		 IEEE80211_RADIOTAP_EHT_KNOWN_BEAMFORMED_S |
809 		 IEEE80211_RADIOTAP_EHT_KNOWN_DISREGARD_S |
810 		 IEEE80211_RADIOTAP_EHT_KNOWN_CRC1 |
811 		 IEEE80211_RADIOTAP_EHT_KNOWN_TAIL1;
812 	eht->known = cpu_to_le32(known);
813 
814 	data = __le32_to_cpu(eht->data[0]);
815 	data |= ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
816 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_SPATIAL_REUSE,
817 				    IEEE80211_RADIOTAP_EHT_DATA0_SPATIAL_REUSE);
818 	/* GI and LTF size are separately indicated in radiotap header
819 	 * and hence will be parsed from other TLV
820 	 */
821 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
822 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_NUM_LTF_SYM,
823 				    IEEE80211_RADIOTAP_EHT_DATA0_EHT_LTF);
824 
825 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
826 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_CRC,
827 				    IEEE80211_RADIOTAP_EHT_DATA0_CRC1_O);
828 
829 	data |= ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
830 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_DISREGARD,
831 				    IEEE80211_RADIOTAP_EHT_DATA0_DISREGARD_S);
832 	eht->data[0] = cpu_to_le32(data);
833 
834 	data = __le32_to_cpu(eht->data[7]);
835 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
836 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_NSS,
837 				    IEEE80211_RADIOTAP_EHT_DATA7_NSS_S);
838 
839 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
840 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_BEAMFORMED,
841 				    IEEE80211_RADIOTAP_EHT_DATA7_BEAMFORMED_S);
842 	eht->data[7] = cpu_to_le32(data);
843 }
844 
845 static void
ath12k_dp_mon_hal_rx_parse_usig_overflow(const struct hal_eht_sig_usig_overflow * ovflow,struct hal_rx_mon_ppdu_info * ppdu_info)846 ath12k_dp_mon_hal_rx_parse_usig_overflow(const struct hal_eht_sig_usig_overflow *ovflow,
847 					 struct hal_rx_mon_ppdu_info *ppdu_info)
848 {
849 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
850 	u32 known, data;
851 
852 	known = __le32_to_cpu(eht->known);
853 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_SPATIAL_REUSE |
854 		 IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF |
855 		 IEEE80211_RADIOTAP_EHT_KNOWN_LDPC_EXTRA_SYM_OM |
856 		 IEEE80211_RADIOTAP_EHT_KNOWN_PRE_PADD_FACOR_OM |
857 		 IEEE80211_RADIOTAP_EHT_KNOWN_PE_DISAMBIGUITY_OM |
858 		 IEEE80211_RADIOTAP_EHT_KNOWN_DISREGARD_O;
859 	eht->known = cpu_to_le32(known);
860 
861 	data = __le32_to_cpu(eht->data[0]);
862 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
863 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_SPATIAL_REUSE,
864 				    IEEE80211_RADIOTAP_EHT_DATA0_SPATIAL_REUSE);
865 
866 	/* GI and LTF size are separately indicated in radiotap header
867 	 * and hence will be parsed from other TLV
868 	 */
869 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
870 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_NUM_LTF_SYM,
871 				    IEEE80211_RADIOTAP_EHT_DATA0_EHT_LTF);
872 
873 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
874 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_LDPC_EXTA_SYM,
875 				    IEEE80211_RADIOTAP_EHT_DATA0_LDPC_EXTRA_SYM_OM);
876 
877 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
878 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_PRE_FEC_PAD_FACTOR,
879 				    IEEE80211_RADIOTAP_EHT_DATA0_PRE_PADD_FACOR_OM);
880 
881 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
882 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_DISAMBIGUITY,
883 				    IEEE80211_RADIOTAP_EHT_DATA0_PE_DISAMBIGUITY_OM);
884 
885 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
886 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_DISREGARD,
887 				    IEEE80211_RADIOTAP_EHT_DATA0_DISREGARD_O);
888 	eht->data[0] = cpu_to_le32(data);
889 }
890 
891 static void
ath12k_dp_mon_hal_rx_parse_non_ofdma_users(const struct hal_eht_sig_non_ofdma_cmn_eb * eb,struct hal_rx_mon_ppdu_info * ppdu_info)892 ath12k_dp_mon_hal_rx_parse_non_ofdma_users(const struct hal_eht_sig_non_ofdma_cmn_eb *eb,
893 					   struct hal_rx_mon_ppdu_info *ppdu_info)
894 {
895 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
896 	u32 known, data;
897 
898 	known = __le32_to_cpu(eht->known);
899 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_NR_NON_OFDMA_USERS_M;
900 	eht->known = cpu_to_le32(known);
901 
902 	data = __le32_to_cpu(eht->data[7]);
903 	data |=	ATH12K_LE32_DEC_ENC(eb->info0,
904 				    HAL_RX_EHT_SIG_NON_OFDMA_INFO0_NUM_USERS,
905 				    IEEE80211_RADIOTAP_EHT_DATA7_NUM_OF_NON_OFDMA_USERS);
906 	eht->data[7] = cpu_to_le32(data);
907 }
908 
909 static void
ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(const struct hal_eht_sig_mu_mimo * user,struct hal_rx_mon_ppdu_info * ppdu_info)910 ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(const struct hal_eht_sig_mu_mimo *user,
911 					   struct hal_rx_mon_ppdu_info *ppdu_info)
912 {
913 	struct hal_rx_eht_info *eht_info = &ppdu_info->eht_info;
914 	u32 user_idx;
915 
916 	if (eht_info->num_user_info >= ARRAY_SIZE(eht_info->user_info))
917 		return;
918 
919 	user_idx = eht_info->num_user_info++;
920 
921 	eht_info->user_info[user_idx] |=
922 		IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID_KNOWN |
923 		IEEE80211_RADIOTAP_EHT_USER_INFO_MCS_KNOWN |
924 		IEEE80211_RADIOTAP_EHT_USER_INFO_CODING_KNOWN |
925 		IEEE80211_RADIOTAP_EHT_USER_INFO_SPATIAL_CONFIG_KNOWN_M |
926 		ATH12K_LE32_DEC_ENC(user->info0,
927 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_STA_ID,
928 				    IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID) |
929 		ATH12K_LE32_DEC_ENC(user->info0,
930 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_CODING,
931 				    IEEE80211_RADIOTAP_EHT_USER_INFO_CODING) |
932 		ATH12K_LE32_DEC_ENC(user->info0,
933 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_MCS,
934 				    IEEE80211_RADIOTAP_EHT_USER_INFO_MCS) |
935 		ATH12K_LE32_DEC_ENC(user->info0,
936 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_SPATIAL_CODING,
937 				    IEEE80211_RADIOTAP_EHT_USER_INFO_SPATIAL_CONFIG_M);
938 
939 	ppdu_info->mcs = le32_get_bits(user->info0,
940 				       HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_MCS);
941 }
942 
943 static void
ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(const struct hal_eht_sig_non_mu_mimo * user,struct hal_rx_mon_ppdu_info * ppdu_info)944 ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(const struct hal_eht_sig_non_mu_mimo *user,
945 					       struct hal_rx_mon_ppdu_info *ppdu_info)
946 {
947 	struct hal_rx_eht_info *eht_info = &ppdu_info->eht_info;
948 	u32 user_idx;
949 
950 	if (eht_info->num_user_info >= ARRAY_SIZE(eht_info->user_info))
951 		return;
952 
953 	user_idx = eht_info->num_user_info++;
954 
955 	eht_info->user_info[user_idx] |=
956 		IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID_KNOWN |
957 		IEEE80211_RADIOTAP_EHT_USER_INFO_MCS_KNOWN |
958 		IEEE80211_RADIOTAP_EHT_USER_INFO_CODING_KNOWN |
959 		IEEE80211_RADIOTAP_EHT_USER_INFO_NSS_KNOWN_O |
960 		IEEE80211_RADIOTAP_EHT_USER_INFO_BEAMFORMING_KNOWN_O |
961 		ATH12K_LE32_DEC_ENC(user->info0,
962 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_STA_ID,
963 				    IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID) |
964 		ATH12K_LE32_DEC_ENC(user->info0,
965 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_CODING,
966 				    IEEE80211_RADIOTAP_EHT_USER_INFO_CODING) |
967 		ATH12K_LE32_DEC_ENC(user->info0,
968 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_MCS,
969 				    IEEE80211_RADIOTAP_EHT_USER_INFO_MCS) |
970 		ATH12K_LE32_DEC_ENC(user->info0,
971 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_NSS,
972 				    IEEE80211_RADIOTAP_EHT_USER_INFO_NSS_O) |
973 		ATH12K_LE32_DEC_ENC(user->info0,
974 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_BEAMFORMED,
975 				    IEEE80211_RADIOTAP_EHT_USER_INFO_BEAMFORMING_O);
976 
977 	ppdu_info->mcs = le32_get_bits(user->info0,
978 				       HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_MCS);
979 
980 	ppdu_info->nss = le32_get_bits(user->info0,
981 				       HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_NSS) + 1;
982 }
983 
984 static inline bool
ath12k_dp_mon_hal_rx_is_mu_mimo_user(const struct hal_rx_u_sig_info * usig_info)985 ath12k_dp_mon_hal_rx_is_mu_mimo_user(const struct hal_rx_u_sig_info *usig_info)
986 {
987 	if (usig_info->ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_SU &&
988 	    usig_info->ul_dl == 1)
989 		return true;
990 
991 	return false;
992 }
993 
994 static void
ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(const void * tlv,struct hal_rx_mon_ppdu_info * ppdu_info)995 ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(const void *tlv,
996 					     struct hal_rx_mon_ppdu_info *ppdu_info)
997 {
998 	const struct hal_eht_sig_non_ofdma_cmn_eb *eb = tlv;
999 
1000 	ath12k_dp_mon_hal_rx_parse_usig_overflow(tlv, ppdu_info);
1001 	ath12k_dp_mon_hal_rx_parse_non_ofdma_users(eb, ppdu_info);
1002 
1003 	if (ath12k_dp_mon_hal_rx_is_mu_mimo_user(&ppdu_info->u_sig_info))
1004 		ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(&eb->user_field.mu_mimo,
1005 							   ppdu_info);
1006 	else
1007 		ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(&eb->user_field.n_mu_mimo,
1008 							       ppdu_info);
1009 }
1010 
1011 static void
ath12k_dp_mon_hal_rx_parse_ru_allocation(const struct hal_eht_sig_ofdma_cmn_eb * eb,struct hal_rx_mon_ppdu_info * ppdu_info)1012 ath12k_dp_mon_hal_rx_parse_ru_allocation(const struct hal_eht_sig_ofdma_cmn_eb *eb,
1013 					 struct hal_rx_mon_ppdu_info *ppdu_info)
1014 {
1015 	const struct hal_eht_sig_ofdma_cmn_eb1 *ofdma_cmn_eb1 = &eb->eb1;
1016 	const struct hal_eht_sig_ofdma_cmn_eb2 *ofdma_cmn_eb2 = &eb->eb2;
1017 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1018 	enum ieee80211_radiotap_eht_data ru_123, ru_124, ru_125, ru_126;
1019 	enum ieee80211_radiotap_eht_data ru_121, ru_122, ru_112, ru_111;
1020 	u32 data;
1021 
1022 	ru_123 = IEEE80211_RADIOTAP_EHT_DATA4_RU_ALLOC_CC_1_2_3;
1023 	ru_124 = IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_4;
1024 	ru_125 = IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_5;
1025 	ru_126 = IEEE80211_RADIOTAP_EHT_DATA6_RU_ALLOC_CC_1_2_6;
1026 	ru_121 = IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_1;
1027 	ru_122 = IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_2;
1028 	ru_112 = IEEE80211_RADIOTAP_EHT_DATA2_RU_ALLOC_CC_1_1_2;
1029 	ru_111 = IEEE80211_RADIOTAP_EHT_DATA1_RU_ALLOC_CC_1_1_1;
1030 
1031 	switch (ppdu_info->u_sig_info.bw) {
1032 	case HAL_EHT_BW_320_2:
1033 	case HAL_EHT_BW_320_1:
1034 		data = __le32_to_cpu(eht->data[4]);
1035 		/* CC1 2::3 */
1036 		data |=	IEEE80211_RADIOTAP_EHT_DATA4_RU_ALLOC_CC_1_2_3_KNOWN |
1037 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1038 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_3,
1039 					    ru_123);
1040 		eht->data[4] = cpu_to_le32(data);
1041 
1042 		data = __le32_to_cpu(eht->data[5]);
1043 		/* CC1 2::4 */
1044 		data |=	IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_4_KNOWN |
1045 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1046 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_4,
1047 					    ru_124);
1048 
1049 		/* CC1 2::5 */
1050 		data |=	IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_5_KNOWN |
1051 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1052 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_5,
1053 					    ru_125);
1054 		eht->data[5] = cpu_to_le32(data);
1055 
1056 		data = __le32_to_cpu(eht->data[6]);
1057 		/* CC1 2::6 */
1058 		data |=	IEEE80211_RADIOTAP_EHT_DATA6_RU_ALLOC_CC_1_2_6_KNOWN |
1059 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1060 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_6,
1061 					    ru_126);
1062 		eht->data[6] = cpu_to_le32(data);
1063 
1064 		fallthrough;
1065 	case HAL_EHT_BW_160:
1066 		data = __le32_to_cpu(eht->data[3]);
1067 		/* CC1 2::1 */
1068 		data |=	IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_1_KNOWN |
1069 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1070 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_1,
1071 					    ru_121);
1072 		/* CC1 2::2 */
1073 		data |=	IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_2_KNOWN |
1074 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1075 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_2,
1076 					    ru_122);
1077 		eht->data[3] = cpu_to_le32(data);
1078 
1079 		fallthrough;
1080 	case HAL_EHT_BW_80:
1081 		data = __le32_to_cpu(eht->data[2]);
1082 		/* CC1 1::2 */
1083 		data |=	IEEE80211_RADIOTAP_EHT_DATA2_RU_ALLOC_CC_1_1_2_KNOWN |
1084 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb1->info0,
1085 					    HAL_RX_EHT_SIG_OFDMA_EB1_RU_ALLOC_1_2,
1086 					    ru_112);
1087 		eht->data[2] = cpu_to_le32(data);
1088 
1089 		fallthrough;
1090 	case HAL_EHT_BW_40:
1091 		fallthrough;
1092 	case HAL_EHT_BW_20:
1093 		data = __le32_to_cpu(eht->data[1]);
1094 		/* CC1 1::1 */
1095 		data |=	IEEE80211_RADIOTAP_EHT_DATA1_RU_ALLOC_CC_1_1_1_KNOWN |
1096 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb1->info0,
1097 					    HAL_RX_EHT_SIG_OFDMA_EB1_RU_ALLOC_1_1,
1098 					    ru_111);
1099 		eht->data[1] = cpu_to_le32(data);
1100 		break;
1101 	default:
1102 		break;
1103 	}
1104 }
1105 
1106 static void
ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(const void * tlv,struct hal_rx_mon_ppdu_info * ppdu_info)1107 ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(const void *tlv,
1108 					 struct hal_rx_mon_ppdu_info *ppdu_info)
1109 {
1110 	const struct hal_eht_sig_ofdma_cmn_eb *ofdma = tlv;
1111 
1112 	ath12k_dp_mon_hal_rx_parse_usig_overflow(tlv, ppdu_info);
1113 	ath12k_dp_mon_hal_rx_parse_ru_allocation(ofdma, ppdu_info);
1114 
1115 	ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(&ofdma->user_field.n_mu_mimo,
1116 						       ppdu_info);
1117 }
1118 
1119 static void
ath12k_dp_mon_parse_eht_sig_hdr(struct hal_rx_mon_ppdu_info * ppdu_info,const void * tlv_data)1120 ath12k_dp_mon_parse_eht_sig_hdr(struct hal_rx_mon_ppdu_info *ppdu_info,
1121 				const void *tlv_data)
1122 {
1123 	ppdu_info->is_eht = true;
1124 
1125 	if (ath12k_dp_mon_hal_rx_is_frame_type_ndp(&ppdu_info->u_sig_info))
1126 		ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(tlv_data, ppdu_info);
1127 	else if (ath12k_dp_mon_hal_rx_is_non_ofdma(&ppdu_info->u_sig_info))
1128 		ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(tlv_data, ppdu_info);
1129 	else if (ath12k_dp_mon_hal_rx_is_ofdma(&ppdu_info->u_sig_info))
1130 		ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(tlv_data, ppdu_info);
1131 }
1132 
1133 static inline enum ath12k_eht_ru_size
hal_rx_mon_hal_ru_size_to_ath12k_ru_size(u32 hal_ru_size)1134 hal_rx_mon_hal_ru_size_to_ath12k_ru_size(u32 hal_ru_size)
1135 {
1136 	switch (hal_ru_size) {
1137 	case HAL_EHT_RU_26:
1138 		return ATH12K_EHT_RU_26;
1139 	case HAL_EHT_RU_52:
1140 		return ATH12K_EHT_RU_52;
1141 	case HAL_EHT_RU_78:
1142 		return ATH12K_EHT_RU_52_26;
1143 	case HAL_EHT_RU_106:
1144 		return ATH12K_EHT_RU_106;
1145 	case HAL_EHT_RU_132:
1146 		return ATH12K_EHT_RU_106_26;
1147 	case HAL_EHT_RU_242:
1148 		return ATH12K_EHT_RU_242;
1149 	case HAL_EHT_RU_484:
1150 		return ATH12K_EHT_RU_484;
1151 	case HAL_EHT_RU_726:
1152 		return ATH12K_EHT_RU_484_242;
1153 	case HAL_EHT_RU_996:
1154 		return ATH12K_EHT_RU_996;
1155 	case HAL_EHT_RU_996x2:
1156 		return ATH12K_EHT_RU_996x2;
1157 	case HAL_EHT_RU_996x3:
1158 		return ATH12K_EHT_RU_996x3;
1159 	case HAL_EHT_RU_996x4:
1160 		return ATH12K_EHT_RU_996x4;
1161 	case HAL_EHT_RU_NONE:
1162 		return ATH12K_EHT_RU_INVALID;
1163 	case HAL_EHT_RU_996_484:
1164 		return ATH12K_EHT_RU_996_484;
1165 	case HAL_EHT_RU_996x2_484:
1166 		return ATH12K_EHT_RU_996x2_484;
1167 	case HAL_EHT_RU_996x3_484:
1168 		return ATH12K_EHT_RU_996x3_484;
1169 	case HAL_EHT_RU_996_484_242:
1170 		return ATH12K_EHT_RU_996_484_242;
1171 	default:
1172 		return ATH12K_EHT_RU_INVALID;
1173 	}
1174 }
1175 
1176 static inline u32
hal_rx_ul_ofdma_ru_size_to_width(enum ath12k_eht_ru_size ru_size)1177 hal_rx_ul_ofdma_ru_size_to_width(enum ath12k_eht_ru_size ru_size)
1178 {
1179 	switch (ru_size) {
1180 	case ATH12K_EHT_RU_26:
1181 		return RU_26;
1182 	case ATH12K_EHT_RU_52:
1183 		return RU_52;
1184 	case ATH12K_EHT_RU_52_26:
1185 		return RU_52_26;
1186 	case ATH12K_EHT_RU_106:
1187 		return RU_106;
1188 	case ATH12K_EHT_RU_106_26:
1189 		return RU_106_26;
1190 	case ATH12K_EHT_RU_242:
1191 		return RU_242;
1192 	case ATH12K_EHT_RU_484:
1193 		return RU_484;
1194 	case ATH12K_EHT_RU_484_242:
1195 		return RU_484_242;
1196 	case ATH12K_EHT_RU_996:
1197 		return RU_996;
1198 	case ATH12K_EHT_RU_996_484:
1199 		return RU_996_484;
1200 	case ATH12K_EHT_RU_996_484_242:
1201 		return RU_996_484_242;
1202 	case ATH12K_EHT_RU_996x2:
1203 		return RU_2X996;
1204 	case ATH12K_EHT_RU_996x2_484:
1205 		return RU_2X996_484;
1206 	case ATH12K_EHT_RU_996x3:
1207 		return RU_3X996;
1208 	case ATH12K_EHT_RU_996x3_484:
1209 		return RU_3X996_484;
1210 	case ATH12K_EHT_RU_996x4:
1211 		return RU_4X996;
1212 	default:
1213 		return RU_INVALID;
1214 	}
1215 }
1216 
1217 static void
ath12k_dp_mon_hal_rx_parse_user_info(const struct hal_receive_user_info * rx_usr_info,u16 user_id,struct hal_rx_mon_ppdu_info * ppdu_info)1218 ath12k_dp_mon_hal_rx_parse_user_info(const struct hal_receive_user_info *rx_usr_info,
1219 				     u16 user_id,
1220 				     struct hal_rx_mon_ppdu_info *ppdu_info)
1221 {
1222 	struct hal_rx_user_status *mon_rx_user_status = NULL;
1223 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1224 	enum ath12k_eht_ru_size rtap_ru_size = ATH12K_EHT_RU_INVALID;
1225 	u32 ru_width, reception_type, ru_index = HAL_EHT_RU_INVALID;
1226 	u32 ru_type_80_0, ru_start_index_80_0;
1227 	u32 ru_type_80_1, ru_start_index_80_1;
1228 	u32 ru_type_80_2, ru_start_index_80_2;
1229 	u32 ru_type_80_3, ru_start_index_80_3;
1230 	u32 ru_size = 0, num_80mhz_with_ru = 0;
1231 	u64 ru_index_320mhz = 0;
1232 	u32 ru_index_per80mhz;
1233 
1234 	reception_type = le32_get_bits(rx_usr_info->info0,
1235 				       HAL_RX_USR_INFO0_RECEPTION_TYPE);
1236 
1237 	switch (reception_type) {
1238 	case HAL_RECEPTION_TYPE_SU:
1239 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_SU;
1240 		break;
1241 	case HAL_RECEPTION_TYPE_DL_MU_MIMO:
1242 	case HAL_RECEPTION_TYPE_UL_MU_MIMO:
1243 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_MIMO;
1244 		break;
1245 	case HAL_RECEPTION_TYPE_DL_MU_OFMA:
1246 	case HAL_RECEPTION_TYPE_UL_MU_OFDMA:
1247 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_OFDMA;
1248 		break;
1249 	case HAL_RECEPTION_TYPE_DL_MU_OFDMA_MIMO:
1250 	case HAL_RECEPTION_TYPE_UL_MU_OFDMA_MIMO:
1251 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO;
1252 	}
1253 
1254 	ppdu_info->is_stbc = le32_get_bits(rx_usr_info->info0, HAL_RX_USR_INFO0_STBC);
1255 	ppdu_info->ldpc = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_LDPC);
1256 	ppdu_info->dcm = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_STA_DCM);
1257 	ppdu_info->bw = le32_get_bits(rx_usr_info->info1, HAL_RX_USR_INFO1_RX_BW);
1258 	ppdu_info->mcs = le32_get_bits(rx_usr_info->info1, HAL_RX_USR_INFO1_MCS);
1259 	ppdu_info->nss = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_NSS) + 1;
1260 
1261 	if (user_id < HAL_MAX_UL_MU_USERS) {
1262 		mon_rx_user_status = &ppdu_info->userstats[user_id];
1263 		mon_rx_user_status->mcs = ppdu_info->mcs;
1264 		mon_rx_user_status->nss = ppdu_info->nss;
1265 	}
1266 
1267 	if (!(ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_MIMO ||
1268 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
1269 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO))
1270 		return;
1271 
1272 	/* RU allocation present only for OFDMA reception */
1273 	ru_type_80_0 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_0);
1274 	ru_start_index_80_0 = le32_get_bits(rx_usr_info->info3,
1275 					    HAL_RX_USR_INFO3_RU_START_IDX_80_0);
1276 	if (ru_type_80_0 != HAL_EHT_RU_NONE) {
1277 		ru_size += ru_type_80_0;
1278 		ru_index_per80mhz = ru_start_index_80_0;
1279 		ru_index = ru_index_per80mhz;
1280 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_0, 0, ru_index_per80mhz);
1281 		num_80mhz_with_ru++;
1282 	}
1283 
1284 	ru_type_80_1 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_1);
1285 	ru_start_index_80_1 = le32_get_bits(rx_usr_info->info3,
1286 					    HAL_RX_USR_INFO3_RU_START_IDX_80_1);
1287 	if (ru_type_80_1 != HAL_EHT_RU_NONE) {
1288 		ru_size += ru_type_80_1;
1289 		ru_index_per80mhz = ru_start_index_80_1;
1290 		ru_index = ru_index_per80mhz;
1291 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_1, 1, ru_index_per80mhz);
1292 		num_80mhz_with_ru++;
1293 	}
1294 
1295 	ru_type_80_2 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_2);
1296 	ru_start_index_80_2 = le32_get_bits(rx_usr_info->info3,
1297 					    HAL_RX_USR_INFO3_RU_START_IDX_80_2);
1298 	if (ru_type_80_2 != HAL_EHT_RU_NONE) {
1299 		ru_size += ru_type_80_2;
1300 		ru_index_per80mhz = ru_start_index_80_2;
1301 		ru_index = ru_index_per80mhz;
1302 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_2, 2, ru_index_per80mhz);
1303 		num_80mhz_with_ru++;
1304 	}
1305 
1306 	ru_type_80_3 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_3);
1307 	ru_start_index_80_3 = le32_get_bits(rx_usr_info->info2,
1308 					    HAL_RX_USR_INFO3_RU_START_IDX_80_3);
1309 	if (ru_type_80_3 != HAL_EHT_RU_NONE) {
1310 		ru_size += ru_type_80_3;
1311 		ru_index_per80mhz = ru_start_index_80_3;
1312 		ru_index = ru_index_per80mhz;
1313 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_3, 3, ru_index_per80mhz);
1314 		num_80mhz_with_ru++;
1315 	}
1316 
1317 	if (num_80mhz_with_ru > 1) {
1318 		/* Calculate the MRU index */
1319 		switch (ru_index_320mhz) {
1320 		case HAL_EHT_RU_996_484_0:
1321 		case HAL_EHT_RU_996x2_484_0:
1322 		case HAL_EHT_RU_996x3_484_0:
1323 			ru_index = 0;
1324 			break;
1325 		case HAL_EHT_RU_996_484_1:
1326 		case HAL_EHT_RU_996x2_484_1:
1327 		case HAL_EHT_RU_996x3_484_1:
1328 			ru_index = 1;
1329 			break;
1330 		case HAL_EHT_RU_996_484_2:
1331 		case HAL_EHT_RU_996x2_484_2:
1332 		case HAL_EHT_RU_996x3_484_2:
1333 			ru_index = 2;
1334 			break;
1335 		case HAL_EHT_RU_996_484_3:
1336 		case HAL_EHT_RU_996x2_484_3:
1337 		case HAL_EHT_RU_996x3_484_3:
1338 			ru_index = 3;
1339 			break;
1340 		case HAL_EHT_RU_996_484_4:
1341 		case HAL_EHT_RU_996x2_484_4:
1342 		case HAL_EHT_RU_996x3_484_4:
1343 			ru_index = 4;
1344 			break;
1345 		case HAL_EHT_RU_996_484_5:
1346 		case HAL_EHT_RU_996x2_484_5:
1347 		case HAL_EHT_RU_996x3_484_5:
1348 			ru_index = 5;
1349 			break;
1350 		case HAL_EHT_RU_996_484_6:
1351 		case HAL_EHT_RU_996x2_484_6:
1352 		case HAL_EHT_RU_996x3_484_6:
1353 			ru_index = 6;
1354 			break;
1355 		case HAL_EHT_RU_996_484_7:
1356 		case HAL_EHT_RU_996x2_484_7:
1357 		case HAL_EHT_RU_996x3_484_7:
1358 			ru_index = 7;
1359 			break;
1360 		case HAL_EHT_RU_996x2_484_8:
1361 			ru_index = 8;
1362 			break;
1363 		case HAL_EHT_RU_996x2_484_9:
1364 			ru_index = 9;
1365 			break;
1366 		case HAL_EHT_RU_996x2_484_10:
1367 			ru_index = 10;
1368 			break;
1369 		case HAL_EHT_RU_996x2_484_11:
1370 			ru_index = 11;
1371 			break;
1372 		default:
1373 			ru_index = HAL_EHT_RU_INVALID;
1374 			break;
1375 		}
1376 
1377 		ru_size += 4;
1378 	}
1379 
1380 	rtap_ru_size = hal_rx_mon_hal_ru_size_to_ath12k_ru_size(ru_size);
1381 	if (rtap_ru_size != ATH12K_EHT_RU_INVALID) {
1382 		u32 known, data;
1383 
1384 		known = __le32_to_cpu(eht->known);
1385 		known |= IEEE80211_RADIOTAP_EHT_KNOWN_RU_MRU_SIZE_OM;
1386 		eht->known = cpu_to_le32(known);
1387 
1388 		data = __le32_to_cpu(eht->data[1]);
1389 		data |=	u32_encode_bits(rtap_ru_size,
1390 					IEEE80211_RADIOTAP_EHT_DATA1_RU_SIZE);
1391 		eht->data[1] = cpu_to_le32(data);
1392 	}
1393 
1394 	if (ru_index != HAL_EHT_RU_INVALID) {
1395 		u32 known, data;
1396 
1397 		known = __le32_to_cpu(eht->known);
1398 		known |= IEEE80211_RADIOTAP_EHT_KNOWN_RU_MRU_INDEX_OM;
1399 		eht->known = cpu_to_le32(known);
1400 
1401 		data = __le32_to_cpu(eht->data[1]);
1402 		data |=	u32_encode_bits(rtap_ru_size,
1403 					IEEE80211_RADIOTAP_EHT_DATA1_RU_INDEX);
1404 		eht->data[1] = cpu_to_le32(data);
1405 	}
1406 
1407 	if (mon_rx_user_status && ru_index != HAL_EHT_RU_INVALID &&
1408 	    rtap_ru_size != ATH12K_EHT_RU_INVALID) {
1409 		mon_rx_user_status->ul_ofdma_ru_start_index = ru_index;
1410 		mon_rx_user_status->ul_ofdma_ru_size = rtap_ru_size;
1411 
1412 		ru_width = hal_rx_ul_ofdma_ru_size_to_width(rtap_ru_size);
1413 
1414 		mon_rx_user_status->ul_ofdma_ru_width = ru_width;
1415 		mon_rx_user_status->ofdma_info_valid = 1;
1416 	}
1417 }
1418 
ath12k_dp_mon_parse_rx_msdu_end_err(u32 info,u32 * errmap)1419 static void ath12k_dp_mon_parse_rx_msdu_end_err(u32 info, u32 *errmap)
1420 {
1421 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
1422 		*errmap |= HAL_RX_MPDU_ERR_FCS;
1423 
1424 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1425 		*errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1426 
1427 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1428 		*errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1429 
1430 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1431 		*errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1432 
1433 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1434 		*errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1435 
1436 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1437 		*errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1438 
1439 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1440 		*errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1441 }
1442 
1443 static void
ath12k_dp_mon_parse_status_msdu_end(struct ath12k_mon_data * pmon,const struct hal_rx_msdu_end * msdu_end)1444 ath12k_dp_mon_parse_status_msdu_end(struct ath12k_mon_data *pmon,
1445 				    const struct hal_rx_msdu_end *msdu_end)
1446 {
1447 	ath12k_dp_mon_parse_rx_msdu_end_err(__le32_to_cpu(msdu_end->info2),
1448 					    &pmon->err_bitmap);
1449 	pmon->decap_format = le32_get_bits(msdu_end->info1,
1450 					   RX_MSDU_END_INFO11_DECAP_FORMAT);
1451 }
1452 
1453 static enum hal_rx_mon_status
ath12k_dp_mon_rx_parse_status_tlv(struct ath12k * ar,struct ath12k_mon_data * pmon,const struct hal_tlv_64_hdr * tlv)1454 ath12k_dp_mon_rx_parse_status_tlv(struct ath12k *ar,
1455 				  struct ath12k_mon_data *pmon,
1456 				  const struct hal_tlv_64_hdr *tlv)
1457 {
1458 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
1459 	const void *tlv_data = tlv->value;
1460 	u32 info[7], userid;
1461 	u16 tlv_tag, tlv_len;
1462 
1463 	tlv_tag = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_TAG);
1464 	tlv_len = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_LEN);
1465 	userid = le64_get_bits(tlv->tl, HAL_TLV_64_USR_ID);
1466 
1467 	if (ppdu_info->tlv_aggr.in_progress && ppdu_info->tlv_aggr.tlv_tag != tlv_tag) {
1468 		ath12k_dp_mon_parse_eht_sig_hdr(ppdu_info, ppdu_info->tlv_aggr.buf);
1469 
1470 		ppdu_info->tlv_aggr.in_progress = false;
1471 		ppdu_info->tlv_aggr.cur_len = 0;
1472 	}
1473 
1474 	switch (tlv_tag) {
1475 	case HAL_RX_PPDU_START: {
1476 		const struct hal_rx_ppdu_start *ppdu_start = tlv_data;
1477 
1478 		u64 ppdu_ts = ath12k_le32hilo_to_u64(ppdu_start->ppdu_start_ts_63_32,
1479 						     ppdu_start->ppdu_start_ts_31_0);
1480 
1481 		info[0] = __le32_to_cpu(ppdu_start->info0);
1482 
1483 		ppdu_info->ppdu_id = u32_get_bits(info[0],
1484 						  HAL_RX_PPDU_START_INFO0_PPDU_ID);
1485 
1486 		info[1] = __le32_to_cpu(ppdu_start->info1);
1487 		ppdu_info->chan_num = u32_get_bits(info[1],
1488 						   HAL_RX_PPDU_START_INFO1_CHAN_NUM);
1489 		ppdu_info->freq = u32_get_bits(info[1],
1490 					       HAL_RX_PPDU_START_INFO1_CHAN_FREQ);
1491 		ppdu_info->ppdu_ts = ppdu_ts;
1492 
1493 		if (ppdu_info->ppdu_id != ppdu_info->last_ppdu_id) {
1494 			ppdu_info->last_ppdu_id = ppdu_info->ppdu_id;
1495 			ppdu_info->num_users = 0;
1496 			memset(&ppdu_info->mpdu_fcs_ok_bitmap, 0,
1497 			       HAL_RX_NUM_WORDS_PER_PPDU_BITMAP *
1498 			       sizeof(ppdu_info->mpdu_fcs_ok_bitmap[0]));
1499 		}
1500 		break;
1501 	}
1502 	case HAL_RX_PPDU_END_USER_STATS: {
1503 		const struct hal_rx_ppdu_end_user_stats *eu_stats = tlv_data;
1504 		u32 tid_bitmap;
1505 
1506 		info[0] = __le32_to_cpu(eu_stats->info0);
1507 		info[1] = __le32_to_cpu(eu_stats->info1);
1508 		info[2] = __le32_to_cpu(eu_stats->info2);
1509 		info[4] = __le32_to_cpu(eu_stats->info4);
1510 		info[5] = __le32_to_cpu(eu_stats->info5);
1511 		info[6] = __le32_to_cpu(eu_stats->info6);
1512 
1513 		ppdu_info->ast_index =
1514 			u32_get_bits(info[2], HAL_RX_PPDU_END_USER_STATS_INFO2_AST_INDEX);
1515 		ppdu_info->fc_valid =
1516 			u32_get_bits(info[1], HAL_RX_PPDU_END_USER_STATS_INFO1_FC_VALID);
1517 		tid_bitmap = u32_get_bits(info[6],
1518 					  HAL_RX_PPDU_END_USER_STATS_INFO6_TID_BITMAP);
1519 		ppdu_info->tid = ffs(tid_bitmap) - 1;
1520 		ppdu_info->tcp_msdu_count =
1521 			u32_get_bits(info[4],
1522 				     HAL_RX_PPDU_END_USER_STATS_INFO4_TCP_MSDU_CNT);
1523 		ppdu_info->udp_msdu_count =
1524 			u32_get_bits(info[4],
1525 				     HAL_RX_PPDU_END_USER_STATS_INFO4_UDP_MSDU_CNT);
1526 		ppdu_info->other_msdu_count =
1527 			u32_get_bits(info[5],
1528 				     HAL_RX_PPDU_END_USER_STATS_INFO5_OTHER_MSDU_CNT);
1529 		ppdu_info->tcp_ack_msdu_count =
1530 			u32_get_bits(info[5],
1531 				     HAL_RX_PPDU_END_USER_STATS_INFO5_TCP_ACK_MSDU_CNT);
1532 		ppdu_info->preamble_type =
1533 			u32_get_bits(info[1],
1534 				     HAL_RX_PPDU_END_USER_STATS_INFO1_PKT_TYPE);
1535 		ppdu_info->num_mpdu_fcs_ok =
1536 			u32_get_bits(info[1],
1537 				     HAL_RX_PPDU_END_USER_STATS_INFO1_MPDU_CNT_FCS_OK);
1538 		ppdu_info->num_mpdu_fcs_err =
1539 			u32_get_bits(info[0],
1540 				     HAL_RX_PPDU_END_USER_STATS_INFO0_MPDU_CNT_FCS_ERR);
1541 		ppdu_info->peer_id =
1542 			u32_get_bits(info[0], HAL_RX_PPDU_END_USER_STATS_INFO0_PEER_ID);
1543 
1544 		switch (ppdu_info->preamble_type) {
1545 		case HAL_RX_PREAMBLE_11N:
1546 			ppdu_info->ht_flags = 1;
1547 			break;
1548 		case HAL_RX_PREAMBLE_11AC:
1549 			ppdu_info->vht_flags = 1;
1550 			break;
1551 		case HAL_RX_PREAMBLE_11AX:
1552 			ppdu_info->he_flags = 1;
1553 			break;
1554 		case HAL_RX_PREAMBLE_11BE:
1555 			ppdu_info->is_eht = true;
1556 			break;
1557 		default:
1558 			break;
1559 		}
1560 
1561 		if (userid < HAL_MAX_UL_MU_USERS) {
1562 			struct hal_rx_user_status *rxuser_stats =
1563 				&ppdu_info->userstats[userid];
1564 
1565 			if (ppdu_info->num_mpdu_fcs_ok > 1 ||
1566 			    ppdu_info->num_mpdu_fcs_err > 1)
1567 				ppdu_info->userstats[userid].ampdu_present = true;
1568 
1569 			ppdu_info->num_users += 1;
1570 
1571 			ath12k_dp_mon_rx_handle_ofdma_info(eu_stats, rxuser_stats);
1572 			ath12k_dp_mon_rx_populate_mu_user_info(eu_stats, ppdu_info,
1573 							       rxuser_stats);
1574 		}
1575 		ppdu_info->mpdu_fcs_ok_bitmap[0] = __le32_to_cpu(eu_stats->rsvd1[0]);
1576 		ppdu_info->mpdu_fcs_ok_bitmap[1] = __le32_to_cpu(eu_stats->rsvd1[1]);
1577 		break;
1578 	}
1579 	case HAL_RX_PPDU_END_USER_STATS_EXT: {
1580 		const struct hal_rx_ppdu_end_user_stats_ext *eu_stats = tlv_data;
1581 
1582 		ppdu_info->mpdu_fcs_ok_bitmap[2] = __le32_to_cpu(eu_stats->info1);
1583 		ppdu_info->mpdu_fcs_ok_bitmap[3] = __le32_to_cpu(eu_stats->info2);
1584 		ppdu_info->mpdu_fcs_ok_bitmap[4] = __le32_to_cpu(eu_stats->info3);
1585 		ppdu_info->mpdu_fcs_ok_bitmap[5] = __le32_to_cpu(eu_stats->info4);
1586 		ppdu_info->mpdu_fcs_ok_bitmap[6] = __le32_to_cpu(eu_stats->info5);
1587 		ppdu_info->mpdu_fcs_ok_bitmap[7] = __le32_to_cpu(eu_stats->info6);
1588 		break;
1589 	}
1590 	case HAL_PHYRX_HT_SIG:
1591 		ath12k_dp_mon_parse_ht_sig(tlv_data, ppdu_info);
1592 		break;
1593 
1594 	case HAL_PHYRX_L_SIG_B:
1595 		ath12k_dp_mon_parse_l_sig_b(tlv_data, ppdu_info);
1596 		break;
1597 
1598 	case HAL_PHYRX_L_SIG_A:
1599 		ath12k_dp_mon_parse_l_sig_a(tlv_data, ppdu_info);
1600 		break;
1601 
1602 	case HAL_PHYRX_VHT_SIG_A:
1603 		ath12k_dp_mon_parse_vht_sig_a(tlv_data, ppdu_info);
1604 		break;
1605 
1606 	case HAL_PHYRX_HE_SIG_A_SU:
1607 		ath12k_dp_mon_parse_he_sig_su(tlv_data, ppdu_info);
1608 		break;
1609 
1610 	case HAL_PHYRX_HE_SIG_A_MU_DL:
1611 		ath12k_dp_mon_parse_he_sig_mu(tlv_data, ppdu_info);
1612 		break;
1613 
1614 	case HAL_PHYRX_HE_SIG_B1_MU:
1615 		ath12k_dp_mon_parse_he_sig_b1_mu(tlv_data, ppdu_info);
1616 		break;
1617 
1618 	case HAL_PHYRX_HE_SIG_B2_MU:
1619 		ath12k_dp_mon_parse_he_sig_b2_mu(tlv_data, ppdu_info);
1620 		break;
1621 
1622 	case HAL_PHYRX_HE_SIG_B2_OFDMA:
1623 		ath12k_dp_mon_parse_he_sig_b2_ofdma(tlv_data, ppdu_info);
1624 		break;
1625 
1626 	case HAL_PHYRX_RSSI_LEGACY: {
1627 		const struct hal_rx_phyrx_rssi_legacy_info *rssi = tlv_data;
1628 
1629 		info[0] = __le32_to_cpu(rssi->info0);
1630 		info[1] = __le32_to_cpu(rssi->info1);
1631 
1632 		/* TODO: Please note that the combined rssi will not be accurate
1633 		 * in MU case. Rssi in MU needs to be retrieved from
1634 		 * PHYRX_OTHER_RECEIVE_INFO TLV.
1635 		 */
1636 		ppdu_info->rssi_comb =
1637 			u32_get_bits(info[1],
1638 				     HAL_RX_PHYRX_RSSI_LEGACY_INFO_INFO1_RSSI_COMB);
1639 
1640 		ppdu_info->bw = u32_get_bits(info[0],
1641 					     HAL_RX_PHYRX_RSSI_LEGACY_INFO_INFO0_RX_BW);
1642 		break;
1643 	}
1644 	case HAL_PHYRX_OTHER_RECEIVE_INFO: {
1645 		const struct hal_phyrx_common_user_info *cmn_usr_info = tlv_data;
1646 
1647 		ppdu_info->gi = le32_get_bits(cmn_usr_info->info0,
1648 					      HAL_RX_PHY_CMN_USER_INFO0_GI);
1649 		break;
1650 	}
1651 	case HAL_RX_PPDU_START_USER_INFO:
1652 		ath12k_dp_mon_hal_rx_parse_user_info(tlv_data, userid, ppdu_info);
1653 		break;
1654 
1655 	case HAL_RXPCU_PPDU_END_INFO: {
1656 		const struct hal_rx_ppdu_end_duration *ppdu_rx_duration = tlv_data;
1657 
1658 		info[0] = __le32_to_cpu(ppdu_rx_duration->info0);
1659 		ppdu_info->rx_duration =
1660 			u32_get_bits(info[0], HAL_RX_PPDU_END_DURATION);
1661 		ppdu_info->tsft = __le32_to_cpu(ppdu_rx_duration->rsvd0[1]);
1662 		ppdu_info->tsft = (ppdu_info->tsft << 32) |
1663 				   __le32_to_cpu(ppdu_rx_duration->rsvd0[0]);
1664 		break;
1665 	}
1666 	case HAL_RX_MPDU_START: {
1667 		const struct hal_rx_mpdu_start *mpdu_start = tlv_data;
1668 		u16 peer_id;
1669 
1670 		info[1] = __le32_to_cpu(mpdu_start->info1);
1671 		peer_id = u32_get_bits(info[1], HAL_RX_MPDU_START_INFO1_PEERID);
1672 		if (peer_id)
1673 			ppdu_info->peer_id = peer_id;
1674 
1675 		ppdu_info->mpdu_len += u32_get_bits(info[1],
1676 						    HAL_RX_MPDU_START_INFO2_MPDU_LEN);
1677 		if (userid < HAL_MAX_UL_MU_USERS) {
1678 			info[0] = __le32_to_cpu(mpdu_start->info0);
1679 			ppdu_info->userid = userid;
1680 			ppdu_info->userstats[userid].ampdu_id =
1681 				u32_get_bits(info[0], HAL_RX_MPDU_START_INFO0_PPDU_ID);
1682 		}
1683 
1684 		return HAL_RX_MON_STATUS_MPDU_START;
1685 	}
1686 	case HAL_RX_MSDU_START:
1687 		/* TODO: add msdu start parsing logic */
1688 		break;
1689 	case HAL_MON_BUF_ADDR:
1690 		return HAL_RX_MON_STATUS_BUF_ADDR;
1691 	case HAL_RX_MSDU_END:
1692 		ath12k_dp_mon_parse_status_msdu_end(pmon, tlv_data);
1693 		return HAL_RX_MON_STATUS_MSDU_END;
1694 	case HAL_RX_MPDU_END:
1695 		return HAL_RX_MON_STATUS_MPDU_END;
1696 	case HAL_PHYRX_GENERIC_U_SIG:
1697 		ath12k_dp_mon_hal_rx_parse_u_sig_hdr(tlv_data, ppdu_info);
1698 		break;
1699 	case HAL_PHYRX_GENERIC_EHT_SIG:
1700 		/* Handle the case where aggregation is in progress
1701 		 * or the current TLV is one of the TLVs which should be
1702 		 * aggregated
1703 		 */
1704 		if (!ppdu_info->tlv_aggr.in_progress) {
1705 			ppdu_info->tlv_aggr.in_progress = true;
1706 			ppdu_info->tlv_aggr.tlv_tag = tlv_tag;
1707 			ppdu_info->tlv_aggr.cur_len = 0;
1708 		}
1709 
1710 		ppdu_info->is_eht = true;
1711 
1712 		ath12k_dp_mon_hal_aggr_tlv(ppdu_info, tlv_len, tlv_data);
1713 		break;
1714 	case HAL_DUMMY:
1715 		return HAL_RX_MON_STATUS_BUF_DONE;
1716 	case HAL_RX_PPDU_END_STATUS_DONE:
1717 	case 0:
1718 		return HAL_RX_MON_STATUS_PPDU_DONE;
1719 	default:
1720 		break;
1721 	}
1722 
1723 	return HAL_RX_MON_STATUS_PPDU_NOT_DONE;
1724 }
1725 
1726 static void
ath12k_dp_mon_fill_rx_stats_info(struct ath12k * ar,struct hal_rx_mon_ppdu_info * ppdu_info,struct ieee80211_rx_status * rx_status)1727 ath12k_dp_mon_fill_rx_stats_info(struct ath12k *ar,
1728 				 struct hal_rx_mon_ppdu_info *ppdu_info,
1729 				 struct ieee80211_rx_status *rx_status)
1730 {
1731 	u32 center_freq = ppdu_info->freq;
1732 
1733 	rx_status->freq = center_freq;
1734 	rx_status->bw = ath12k_mac_bw_to_mac80211_bw(ppdu_info->bw);
1735 	rx_status->nss = ppdu_info->nss;
1736 	rx_status->rate_idx = 0;
1737 	rx_status->encoding = RX_ENC_LEGACY;
1738 	rx_status->flag |= RX_FLAG_NO_SIGNAL_VAL;
1739 
1740 	if (center_freq >= ATH12K_MIN_6GHZ_FREQ &&
1741 	    center_freq <= ATH12K_MAX_6GHZ_FREQ) {
1742 		rx_status->band = NL80211_BAND_6GHZ;
1743 	} else if (center_freq >= ATH12K_MIN_2GHZ_FREQ &&
1744 		   center_freq <= ATH12K_MAX_2GHZ_FREQ) {
1745 		rx_status->band = NL80211_BAND_2GHZ;
1746 	} else if (center_freq >= ATH12K_MIN_5GHZ_FREQ &&
1747 		   center_freq <= ATH12K_MAX_5GHZ_FREQ) {
1748 		rx_status->band = NL80211_BAND_5GHZ;
1749 	} else {
1750 		rx_status->band = NUM_NL80211_BANDS;
1751 	}
1752 }
1753 
1754 static struct sk_buff
ath12k_dp_rx_alloc_mon_status_buf(struct ath12k_base * ab,struct dp_rxdma_mon_ring * rx_ring,int * buf_id)1755 *ath12k_dp_rx_alloc_mon_status_buf(struct ath12k_base *ab,
1756 				   struct dp_rxdma_mon_ring *rx_ring,
1757 				   int *buf_id)
1758 {
1759 	struct sk_buff *skb;
1760 	dma_addr_t paddr;
1761 
1762 	skb = dev_alloc_skb(RX_MON_STATUS_BUF_SIZE);
1763 
1764 	if (!skb)
1765 		goto fail_alloc_skb;
1766 
1767 	if (!IS_ALIGNED((unsigned long)skb->data,
1768 			RX_MON_STATUS_BUF_ALIGN)) {
1769 		skb_pull(skb, PTR_ALIGN(skb->data, RX_MON_STATUS_BUF_ALIGN) -
1770 			 skb->data);
1771 	}
1772 
1773 	paddr = dma_map_single(ab->dev, skb->data,
1774 			       skb->len + skb_tailroom(skb),
1775 			       DMA_FROM_DEVICE);
1776 	if (unlikely(dma_mapping_error(ab->dev, paddr)))
1777 		goto fail_free_skb;
1778 
1779 	spin_lock_bh(&rx_ring->idr_lock);
1780 	*buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0,
1781 			    rx_ring->bufs_max, GFP_ATOMIC);
1782 	spin_unlock_bh(&rx_ring->idr_lock);
1783 	if (*buf_id < 0)
1784 		goto fail_dma_unmap;
1785 
1786 	ATH12K_SKB_RXCB(skb)->paddr = paddr;
1787 	return skb;
1788 
1789 fail_dma_unmap:
1790 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
1791 			 DMA_FROM_DEVICE);
1792 fail_free_skb:
1793 	dev_kfree_skb_any(skb);
1794 fail_alloc_skb:
1795 	return NULL;
1796 }
1797 
1798 static enum dp_mon_status_buf_state
ath12k_dp_rx_mon_buf_done(struct ath12k_base * ab,struct hal_srng * srng,struct dp_rxdma_mon_ring * rx_ring)1799 ath12k_dp_rx_mon_buf_done(struct ath12k_base *ab, struct hal_srng *srng,
1800 			  struct dp_rxdma_mon_ring *rx_ring)
1801 {
1802 	struct ath12k_skb_rxcb *rxcb;
1803 	struct hal_tlv_64_hdr *tlv;
1804 	struct sk_buff *skb;
1805 	void *status_desc;
1806 	dma_addr_t paddr;
1807 	u32 cookie;
1808 	int buf_id;
1809 	u8 rbm;
1810 
1811 	status_desc = ath12k_hal_srng_src_next_peek(ab, srng);
1812 	if (!status_desc)
1813 		return DP_MON_STATUS_NO_DMA;
1814 
1815 	ath12k_hal_rx_buf_addr_info_get(status_desc, &paddr, &cookie, &rbm);
1816 
1817 	buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
1818 
1819 	spin_lock_bh(&rx_ring->idr_lock);
1820 	skb = idr_find(&rx_ring->bufs_idr, buf_id);
1821 	spin_unlock_bh(&rx_ring->idr_lock);
1822 
1823 	if (!skb)
1824 		return DP_MON_STATUS_NO_DMA;
1825 
1826 	rxcb = ATH12K_SKB_RXCB(skb);
1827 	dma_sync_single_for_cpu(ab->dev, rxcb->paddr,
1828 				skb->len + skb_tailroom(skb),
1829 				DMA_FROM_DEVICE);
1830 
1831 	tlv = (struct hal_tlv_64_hdr *)skb->data;
1832 	if (le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG) != HAL_RX_STATUS_BUFFER_DONE)
1833 		return DP_MON_STATUS_NO_DMA;
1834 
1835 	return DP_MON_STATUS_REPLINISH;
1836 }
1837 
ath12k_dp_mon_comp_ppduid(u32 msdu_ppdu_id,u32 * ppdu_id)1838 static u32 ath12k_dp_mon_comp_ppduid(u32 msdu_ppdu_id, u32 *ppdu_id)
1839 {
1840 	u32 ret = 0;
1841 
1842 	if ((*ppdu_id < msdu_ppdu_id) &&
1843 	    ((msdu_ppdu_id - *ppdu_id) < DP_NOT_PPDU_ID_WRAP_AROUND)) {
1844 		/* Hold on mon dest ring, and reap mon status ring. */
1845 		*ppdu_id = msdu_ppdu_id;
1846 		ret = msdu_ppdu_id;
1847 	} else if ((*ppdu_id > msdu_ppdu_id) &&
1848 		((*ppdu_id - msdu_ppdu_id) > DP_NOT_PPDU_ID_WRAP_AROUND)) {
1849 		/* PPDU ID has exceeded the maximum value and will
1850 		 * restart from 0.
1851 		 */
1852 		*ppdu_id = msdu_ppdu_id;
1853 		ret = msdu_ppdu_id;
1854 	}
1855 	return ret;
1856 }
1857 
1858 static
ath12k_dp_mon_next_link_desc_get(struct hal_rx_msdu_link * msdu_link,dma_addr_t * paddr,u32 * sw_cookie,u8 * rbm,struct ath12k_buffer_addr ** pp_buf_addr_info)1859 void ath12k_dp_mon_next_link_desc_get(struct hal_rx_msdu_link *msdu_link,
1860 				      dma_addr_t *paddr, u32 *sw_cookie, u8 *rbm,
1861 				      struct ath12k_buffer_addr **pp_buf_addr_info)
1862 {
1863 	struct ath12k_buffer_addr *buf_addr_info;
1864 
1865 	buf_addr_info = &msdu_link->buf_addr_info;
1866 
1867 	ath12k_hal_rx_buf_addr_info_get(buf_addr_info, paddr, sw_cookie, rbm);
1868 
1869 	*pp_buf_addr_info = buf_addr_info;
1870 }
1871 
1872 static void
ath12k_dp_mon_fill_rx_rate(struct ath12k * ar,struct hal_rx_mon_ppdu_info * ppdu_info,struct ieee80211_rx_status * rx_status)1873 ath12k_dp_mon_fill_rx_rate(struct ath12k *ar,
1874 			   struct hal_rx_mon_ppdu_info *ppdu_info,
1875 			   struct ieee80211_rx_status *rx_status)
1876 {
1877 	struct ieee80211_supported_band *sband;
1878 	enum rx_msdu_start_pkt_type pkt_type;
1879 	u8 rate_mcs, nss, sgi;
1880 	bool is_cck;
1881 
1882 	pkt_type = ppdu_info->preamble_type;
1883 	rate_mcs = ppdu_info->rate;
1884 	nss = ppdu_info->nss;
1885 	sgi = ppdu_info->gi;
1886 
1887 	switch (pkt_type) {
1888 	case RX_MSDU_START_PKT_TYPE_11A:
1889 	case RX_MSDU_START_PKT_TYPE_11B:
1890 		is_cck = (pkt_type == RX_MSDU_START_PKT_TYPE_11B);
1891 		if (rx_status->band < NUM_NL80211_BANDS) {
1892 			sband = &ar->mac.sbands[rx_status->band];
1893 			rx_status->rate_idx = ath12k_mac_hw_rate_to_idx(sband, rate_mcs,
1894 									is_cck);
1895 		}
1896 		break;
1897 	case RX_MSDU_START_PKT_TYPE_11N:
1898 		rx_status->encoding = RX_ENC_HT;
1899 		if (rate_mcs > ATH12K_HT_MCS_MAX) {
1900 			ath12k_warn(ar->ab,
1901 				    "Received with invalid mcs in HT mode %d\n",
1902 				     rate_mcs);
1903 			break;
1904 		}
1905 		rx_status->rate_idx = rate_mcs + (8 * (nss - 1));
1906 		if (sgi)
1907 			rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1908 		break;
1909 	case RX_MSDU_START_PKT_TYPE_11AC:
1910 		rx_status->encoding = RX_ENC_VHT;
1911 		rx_status->rate_idx = rate_mcs;
1912 		if (rate_mcs > ATH12K_VHT_MCS_MAX) {
1913 			ath12k_warn(ar->ab,
1914 				    "Received with invalid mcs in VHT mode %d\n",
1915 				     rate_mcs);
1916 			break;
1917 		}
1918 		if (sgi)
1919 			rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1920 		break;
1921 	case RX_MSDU_START_PKT_TYPE_11AX:
1922 		rx_status->rate_idx = rate_mcs;
1923 		if (rate_mcs > ATH12K_HE_MCS_MAX) {
1924 			ath12k_warn(ar->ab,
1925 				    "Received with invalid mcs in HE mode %d\n",
1926 				    rate_mcs);
1927 			break;
1928 		}
1929 		rx_status->encoding = RX_ENC_HE;
1930 		rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi);
1931 		break;
1932 	case RX_MSDU_START_PKT_TYPE_11BE:
1933 		rx_status->rate_idx = rate_mcs;
1934 		if (rate_mcs > ATH12K_EHT_MCS_MAX) {
1935 			ath12k_warn(ar->ab,
1936 				    "Received with invalid mcs in EHT mode %d\n",
1937 				    rate_mcs);
1938 			break;
1939 		}
1940 		rx_status->encoding = RX_ENC_EHT;
1941 		rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi);
1942 		break;
1943 	default:
1944 		ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
1945 			   "monitor receives invalid preamble type %d",
1946 			    pkt_type);
1947 		break;
1948 	}
1949 }
1950 
ath12k_dp_mon_rx_msdus_set_payload(struct ath12k * ar,struct sk_buff * head_msdu,struct sk_buff * tail_msdu)1951 static void ath12k_dp_mon_rx_msdus_set_payload(struct ath12k *ar,
1952 					       struct sk_buff *head_msdu,
1953 					       struct sk_buff *tail_msdu)
1954 {
1955 	u32 rx_pkt_offset, l2_hdr_offset, total_offset;
1956 
1957 	rx_pkt_offset = ar->ab->hal.hal_desc_sz;
1958 	l2_hdr_offset =
1959 		ath12k_dp_rx_h_l3pad(ar->ab, (struct hal_rx_desc *)tail_msdu->data);
1960 
1961 	if (ar->ab->hw_params->rxdma1_enable)
1962 		total_offset = ATH12K_MON_RX_PKT_OFFSET;
1963 	else
1964 		total_offset = rx_pkt_offset + l2_hdr_offset;
1965 
1966 	skb_pull(head_msdu, total_offset);
1967 }
1968 
1969 static struct sk_buff *
ath12k_dp_mon_rx_merg_msdus(struct ath12k * ar,struct dp_mon_mpdu * mon_mpdu,struct hal_rx_mon_ppdu_info * ppdu_info,struct ieee80211_rx_status * rxs)1970 ath12k_dp_mon_rx_merg_msdus(struct ath12k *ar,
1971 			    struct dp_mon_mpdu *mon_mpdu,
1972 			    struct hal_rx_mon_ppdu_info *ppdu_info,
1973 			    struct ieee80211_rx_status *rxs)
1974 {
1975 	struct ath12k_base *ab = ar->ab;
1976 	struct sk_buff *msdu, *mpdu_buf, *prev_buf, *head_frag_list;
1977 	struct sk_buff *head_msdu, *tail_msdu;
1978 	struct hal_rx_desc *rx_desc;
1979 	u8 *hdr_desc, *dest, decap_format = mon_mpdu->decap_format;
1980 	struct ieee80211_hdr_3addr *wh;
1981 	struct ieee80211_channel *channel;
1982 	u32 frag_list_sum_len = 0;
1983 	u8 channel_num = ppdu_info->chan_num;
1984 
1985 	mpdu_buf = NULL;
1986 	head_msdu = mon_mpdu->head;
1987 	tail_msdu = mon_mpdu->tail;
1988 
1989 	if (!head_msdu || !tail_msdu)
1990 		goto err_merge_fail;
1991 
1992 	ath12k_dp_mon_fill_rx_stats_info(ar, ppdu_info, rxs);
1993 
1994 	if (unlikely(rxs->band == NUM_NL80211_BANDS ||
1995 		     !ath12k_ar_to_hw(ar)->wiphy->bands[rxs->band])) {
1996 		ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
1997 			   "sband is NULL for status band %d channel_num %d center_freq %d pdev_id %d\n",
1998 			   rxs->band, channel_num, ppdu_info->freq, ar->pdev_idx);
1999 
2000 		spin_lock_bh(&ar->data_lock);
2001 		channel = ar->rx_channel;
2002 		if (channel) {
2003 			rxs->band = channel->band;
2004 			channel_num =
2005 				ieee80211_frequency_to_channel(channel->center_freq);
2006 		}
2007 		spin_unlock_bh(&ar->data_lock);
2008 	}
2009 
2010 	if (rxs->band < NUM_NL80211_BANDS)
2011 		rxs->freq = ieee80211_channel_to_frequency(channel_num,
2012 							   rxs->band);
2013 
2014 	ath12k_dp_mon_fill_rx_rate(ar, ppdu_info, rxs);
2015 
2016 	if (decap_format == DP_RX_DECAP_TYPE_RAW) {
2017 		ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2018 
2019 		prev_buf = head_msdu;
2020 		msdu = head_msdu->next;
2021 		head_frag_list = NULL;
2022 
2023 		while (msdu) {
2024 			ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2025 
2026 			if (!head_frag_list)
2027 				head_frag_list = msdu;
2028 
2029 			frag_list_sum_len += msdu->len;
2030 			prev_buf = msdu;
2031 			msdu = msdu->next;
2032 		}
2033 
2034 		prev_buf->next = NULL;
2035 
2036 		skb_trim(prev_buf, prev_buf->len);
2037 		if (head_frag_list) {
2038 			skb_shinfo(head_msdu)->frag_list = head_frag_list;
2039 			head_msdu->data_len = frag_list_sum_len;
2040 			head_msdu->len += head_msdu->data_len;
2041 			head_msdu->next = NULL;
2042 		}
2043 	} else if (decap_format == DP_RX_DECAP_TYPE_NATIVE_WIFI) {
2044 		u8 qos_pkt = 0;
2045 
2046 		rx_desc = (struct hal_rx_desc *)head_msdu->data;
2047 		hdr_desc =
2048 			ab->hal_rx_ops->rx_desc_get_msdu_payload(rx_desc);
2049 
2050 		/* Base size */
2051 		wh = (struct ieee80211_hdr_3addr *)hdr_desc;
2052 
2053 		if (ieee80211_is_data_qos(wh->frame_control))
2054 			qos_pkt = 1;
2055 
2056 		msdu = head_msdu;
2057 
2058 		while (msdu) {
2059 			ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2060 			if (qos_pkt) {
2061 				dest = skb_push(msdu, sizeof(__le16));
2062 				if (!dest)
2063 					goto err_merge_fail;
2064 				memcpy(dest, hdr_desc, sizeof(struct ieee80211_qos_hdr));
2065 			}
2066 			prev_buf = msdu;
2067 			msdu = msdu->next;
2068 		}
2069 		dest = skb_put(prev_buf, HAL_RX_FCS_LEN);
2070 		if (!dest)
2071 			goto err_merge_fail;
2072 
2073 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2074 			   "mpdu_buf %p mpdu_buf->len %u",
2075 			   prev_buf, prev_buf->len);
2076 	} else {
2077 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2078 			   "decap format %d is not supported!\n",
2079 			   decap_format);
2080 		goto err_merge_fail;
2081 	}
2082 
2083 	return head_msdu;
2084 
2085 err_merge_fail:
2086 	if (mpdu_buf && decap_format != DP_RX_DECAP_TYPE_RAW) {
2087 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2088 			   "err_merge_fail mpdu_buf %p", mpdu_buf);
2089 		/* Free the head buffer */
2090 		dev_kfree_skb_any(mpdu_buf);
2091 	}
2092 	return NULL;
2093 }
2094 
2095 static void
ath12k_dp_mon_rx_update_radiotap_he(struct hal_rx_mon_ppdu_info * rx_status,u8 * rtap_buf)2096 ath12k_dp_mon_rx_update_radiotap_he(struct hal_rx_mon_ppdu_info *rx_status,
2097 				    u8 *rtap_buf)
2098 {
2099 	u32 rtap_len = 0;
2100 
2101 	put_unaligned_le16(rx_status->he_data1, &rtap_buf[rtap_len]);
2102 	rtap_len += 2;
2103 
2104 	put_unaligned_le16(rx_status->he_data2, &rtap_buf[rtap_len]);
2105 	rtap_len += 2;
2106 
2107 	put_unaligned_le16(rx_status->he_data3, &rtap_buf[rtap_len]);
2108 	rtap_len += 2;
2109 
2110 	put_unaligned_le16(rx_status->he_data4, &rtap_buf[rtap_len]);
2111 	rtap_len += 2;
2112 
2113 	put_unaligned_le16(rx_status->he_data5, &rtap_buf[rtap_len]);
2114 	rtap_len += 2;
2115 
2116 	put_unaligned_le16(rx_status->he_data6, &rtap_buf[rtap_len]);
2117 }
2118 
2119 static void
ath12k_dp_mon_rx_update_radiotap_he_mu(struct hal_rx_mon_ppdu_info * rx_status,u8 * rtap_buf)2120 ath12k_dp_mon_rx_update_radiotap_he_mu(struct hal_rx_mon_ppdu_info *rx_status,
2121 				       u8 *rtap_buf)
2122 {
2123 	u32 rtap_len = 0;
2124 
2125 	put_unaligned_le16(rx_status->he_flags1, &rtap_buf[rtap_len]);
2126 	rtap_len += 2;
2127 
2128 	put_unaligned_le16(rx_status->he_flags2, &rtap_buf[rtap_len]);
2129 	rtap_len += 2;
2130 
2131 	rtap_buf[rtap_len] = rx_status->he_RU[0];
2132 	rtap_len += 1;
2133 
2134 	rtap_buf[rtap_len] = rx_status->he_RU[1];
2135 	rtap_len += 1;
2136 
2137 	rtap_buf[rtap_len] = rx_status->he_RU[2];
2138 	rtap_len += 1;
2139 
2140 	rtap_buf[rtap_len] = rx_status->he_RU[3];
2141 }
2142 
ath12k_dp_mon_update_radiotap(struct ath12k * ar,struct hal_rx_mon_ppdu_info * ppduinfo,struct sk_buff * mon_skb,struct ieee80211_rx_status * rxs)2143 static void ath12k_dp_mon_update_radiotap(struct ath12k *ar,
2144 					  struct hal_rx_mon_ppdu_info *ppduinfo,
2145 					  struct sk_buff *mon_skb,
2146 					  struct ieee80211_rx_status *rxs)
2147 {
2148 	struct ieee80211_supported_band *sband;
2149 	s32 noise_floor;
2150 	u8 *ptr = NULL;
2151 
2152 	spin_lock_bh(&ar->data_lock);
2153 	noise_floor = ath12k_pdev_get_noise_floor(ar);
2154 	spin_unlock_bh(&ar->data_lock);
2155 
2156 	rxs->flag |= RX_FLAG_MACTIME_START;
2157 	rxs->signal = ppduinfo->rssi_comb + noise_floor;
2158 	rxs->nss = ppduinfo->nss + 1;
2159 
2160 	if (ppduinfo->userstats[ppduinfo->userid].ampdu_present) {
2161 		rxs->flag |= RX_FLAG_AMPDU_DETAILS;
2162 		rxs->ampdu_reference = ppduinfo->userstats[ppduinfo->userid].ampdu_id;
2163 	}
2164 
2165 	if (ppduinfo->is_eht || ppduinfo->eht_usig) {
2166 		struct ieee80211_radiotap_tlv *tlv;
2167 		struct ieee80211_radiotap_eht *eht;
2168 		struct ieee80211_radiotap_eht_usig *usig;
2169 		u16 len = 0, i, eht_len, usig_len;
2170 		u8 user;
2171 
2172 		if (ppduinfo->is_eht) {
2173 			eht_len = struct_size(eht,
2174 					      user_info,
2175 					      ppduinfo->eht_info.num_user_info);
2176 			len += sizeof(*tlv) + eht_len;
2177 		}
2178 
2179 		if (ppduinfo->eht_usig) {
2180 			usig_len = sizeof(*usig);
2181 			len += sizeof(*tlv) + usig_len;
2182 		}
2183 
2184 		rxs->flag |= RX_FLAG_RADIOTAP_TLV_AT_END;
2185 		rxs->encoding = RX_ENC_EHT;
2186 
2187 		skb_reset_mac_header(mon_skb);
2188 
2189 		tlv = skb_push(mon_skb, len);
2190 
2191 		if (ppduinfo->is_eht) {
2192 			tlv->type = cpu_to_le16(IEEE80211_RADIOTAP_EHT);
2193 			tlv->len = cpu_to_le16(eht_len);
2194 
2195 			eht = (struct ieee80211_radiotap_eht *)tlv->data;
2196 			eht->known = ppduinfo->eht_info.eht.known;
2197 
2198 			for (i = 0;
2199 			     i < ARRAY_SIZE(eht->data) &&
2200 			     i < ARRAY_SIZE(ppduinfo->eht_info.eht.data);
2201 			     i++)
2202 				eht->data[i] = ppduinfo->eht_info.eht.data[i];
2203 
2204 			for (user = 0; user < ppduinfo->eht_info.num_user_info; user++)
2205 				put_unaligned_le32(ppduinfo->eht_info.user_info[user],
2206 						   &eht->user_info[user]);
2207 
2208 			tlv = (struct ieee80211_radiotap_tlv *)&tlv->data[eht_len];
2209 		}
2210 
2211 		if (ppduinfo->eht_usig) {
2212 			tlv->type = cpu_to_le16(IEEE80211_RADIOTAP_EHT_USIG);
2213 			tlv->len = cpu_to_le16(usig_len);
2214 
2215 			usig = (struct ieee80211_radiotap_eht_usig *)tlv->data;
2216 			*usig = ppduinfo->u_sig_info.usig;
2217 		}
2218 	} else if (ppduinfo->he_mu_flags) {
2219 		rxs->flag |= RX_FLAG_RADIOTAP_HE_MU;
2220 		rxs->encoding = RX_ENC_HE;
2221 		ptr = skb_push(mon_skb, sizeof(struct ieee80211_radiotap_he_mu));
2222 		ath12k_dp_mon_rx_update_radiotap_he_mu(ppduinfo, ptr);
2223 	} else if (ppduinfo->he_flags) {
2224 		rxs->flag |= RX_FLAG_RADIOTAP_HE;
2225 		rxs->encoding = RX_ENC_HE;
2226 		ptr = skb_push(mon_skb, sizeof(struct ieee80211_radiotap_he));
2227 		ath12k_dp_mon_rx_update_radiotap_he(ppduinfo, ptr);
2228 		rxs->rate_idx = ppduinfo->rate;
2229 	} else if (ppduinfo->vht_flags) {
2230 		rxs->encoding = RX_ENC_VHT;
2231 		rxs->rate_idx = ppduinfo->rate;
2232 	} else if (ppduinfo->ht_flags) {
2233 		rxs->encoding = RX_ENC_HT;
2234 		rxs->rate_idx = ppduinfo->rate;
2235 	} else {
2236 		rxs->encoding = RX_ENC_LEGACY;
2237 		sband = &ar->mac.sbands[rxs->band];
2238 		rxs->rate_idx = ath12k_mac_hw_rate_to_idx(sband, ppduinfo->rate,
2239 							  ppduinfo->cck_flag);
2240 	}
2241 
2242 	rxs->mactime = ppduinfo->tsft;
2243 }
2244 
ath12k_dp_mon_rx_deliver_msdu(struct ath12k * ar,struct napi_struct * napi,struct sk_buff * msdu,struct ieee80211_rx_status * status,u8 decap)2245 static void ath12k_dp_mon_rx_deliver_msdu(struct ath12k *ar, struct napi_struct *napi,
2246 					  struct sk_buff *msdu,
2247 					  struct ieee80211_rx_status *status,
2248 					  u8 decap)
2249 {
2250 	static const struct ieee80211_radiotap_he known = {
2251 		.data1 = cpu_to_le16(IEEE80211_RADIOTAP_HE_DATA1_DATA_MCS_KNOWN |
2252 				     IEEE80211_RADIOTAP_HE_DATA1_BW_RU_ALLOC_KNOWN),
2253 		.data2 = cpu_to_le16(IEEE80211_RADIOTAP_HE_DATA2_GI_KNOWN),
2254 	};
2255 	struct ieee80211_rx_status *rx_status;
2256 	struct ieee80211_radiotap_he *he = NULL;
2257 	struct ieee80211_sta *pubsta = NULL;
2258 	struct ath12k_peer *peer;
2259 	struct ath12k_skb_rxcb *rxcb = ATH12K_SKB_RXCB(msdu);
2260 	struct ath12k_dp_rx_info rx_info;
2261 	bool is_mcbc = rxcb->is_mcbc;
2262 	bool is_eapol_tkip = rxcb->is_eapol;
2263 
2264 	status->link_valid = 0;
2265 
2266 	if ((status->encoding == RX_ENC_HE) && !(status->flag & RX_FLAG_RADIOTAP_HE) &&
2267 	    !(status->flag & RX_FLAG_SKIP_MONITOR)) {
2268 		he = skb_push(msdu, sizeof(known));
2269 		memcpy(he, &known, sizeof(known));
2270 		status->flag |= RX_FLAG_RADIOTAP_HE;
2271 	}
2272 
2273 	spin_lock_bh(&ar->ab->base_lock);
2274 	rx_info.addr2_present = false;
2275 	peer = ath12k_dp_rx_h_find_peer(ar->ab, msdu, &rx_info);
2276 	if (peer && peer->sta) {
2277 		pubsta = peer->sta;
2278 		if (pubsta->valid_links) {
2279 			status->link_valid = 1;
2280 			status->link_id = peer->link_id;
2281 		}
2282 	}
2283 
2284 	spin_unlock_bh(&ar->ab->base_lock);
2285 
2286 	ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
2287 		   "rx skb %p len %u peer %pM %u %s %s%s%s%s%s%s%s%s %srate_idx %u vht_nss %u freq %u band %u flag 0x%x fcs-err %i mic-err %i amsdu-more %i\n",
2288 		   msdu,
2289 		   msdu->len,
2290 		   peer ? peer->addr : NULL,
2291 		   rxcb->tid,
2292 		   (is_mcbc) ? "mcast" : "ucast",
2293 		   (status->encoding == RX_ENC_LEGACY) ? "legacy" : "",
2294 		   (status->encoding == RX_ENC_HT) ? "ht" : "",
2295 		   (status->encoding == RX_ENC_VHT) ? "vht" : "",
2296 		   (status->encoding == RX_ENC_HE) ? "he" : "",
2297 		   (status->bw == RATE_INFO_BW_40) ? "40" : "",
2298 		   (status->bw == RATE_INFO_BW_80) ? "80" : "",
2299 		   (status->bw == RATE_INFO_BW_160) ? "160" : "",
2300 		   (status->bw == RATE_INFO_BW_320) ? "320" : "",
2301 		   status->enc_flags & RX_ENC_FLAG_SHORT_GI ? "sgi " : "",
2302 		   status->rate_idx,
2303 		   status->nss,
2304 		   status->freq,
2305 		   status->band, status->flag,
2306 		   !!(status->flag & RX_FLAG_FAILED_FCS_CRC),
2307 		   !!(status->flag & RX_FLAG_MMIC_ERROR),
2308 		   !!(status->flag & RX_FLAG_AMSDU_MORE));
2309 
2310 	ath12k_dbg_dump(ar->ab, ATH12K_DBG_DP_RX, NULL, "dp rx msdu: ",
2311 			msdu->data, msdu->len);
2312 	rx_status = IEEE80211_SKB_RXCB(msdu);
2313 	*rx_status = *status;
2314 
2315 	/* TODO: trace rx packet */
2316 
2317 	/* PN for multicast packets are not validate in HW,
2318 	 * so skip 802.3 rx path
2319 	 * Also, fast_rx expects the STA to be authorized, hence
2320 	 * eapol packets are sent in slow path.
2321 	 */
2322 	if (decap == DP_RX_DECAP_TYPE_ETHERNET2_DIX && !is_eapol_tkip &&
2323 	    !(is_mcbc && rx_status->flag & RX_FLAG_DECRYPTED))
2324 		rx_status->flag |= RX_FLAG_8023;
2325 
2326 	ieee80211_rx_napi(ath12k_ar_to_hw(ar), pubsta, msdu, napi);
2327 }
2328 
ath12k_dp_mon_rx_deliver(struct ath12k * ar,struct dp_mon_mpdu * mon_mpdu,struct hal_rx_mon_ppdu_info * ppduinfo,struct napi_struct * napi)2329 static int ath12k_dp_mon_rx_deliver(struct ath12k *ar,
2330 				    struct dp_mon_mpdu *mon_mpdu,
2331 				    struct hal_rx_mon_ppdu_info *ppduinfo,
2332 				    struct napi_struct *napi)
2333 {
2334 	struct ath12k_pdev_dp *dp = &ar->dp;
2335 	struct sk_buff *mon_skb, *skb_next, *header;
2336 	struct ieee80211_rx_status *rxs = &dp->rx_status;
2337 	u8 decap = DP_RX_DECAP_TYPE_RAW;
2338 
2339 	mon_skb = ath12k_dp_mon_rx_merg_msdus(ar, mon_mpdu, ppduinfo, rxs);
2340 	if (!mon_skb)
2341 		goto mon_deliver_fail;
2342 
2343 	header = mon_skb;
2344 	rxs->flag = 0;
2345 
2346 	if (mon_mpdu->err_bitmap & HAL_RX_MPDU_ERR_FCS)
2347 		rxs->flag = RX_FLAG_FAILED_FCS_CRC;
2348 
2349 	do {
2350 		skb_next = mon_skb->next;
2351 		if (!skb_next)
2352 			rxs->flag &= ~RX_FLAG_AMSDU_MORE;
2353 		else
2354 			rxs->flag |= RX_FLAG_AMSDU_MORE;
2355 
2356 		if (mon_skb == header) {
2357 			header = NULL;
2358 			rxs->flag &= ~RX_FLAG_ALLOW_SAME_PN;
2359 		} else {
2360 			rxs->flag |= RX_FLAG_ALLOW_SAME_PN;
2361 		}
2362 		rxs->flag |= RX_FLAG_ONLY_MONITOR;
2363 
2364 		if (!(rxs->flag & RX_FLAG_ONLY_MONITOR))
2365 			decap = mon_mpdu->decap_format;
2366 
2367 		ath12k_dp_mon_update_radiotap(ar, ppduinfo, mon_skb, rxs);
2368 		ath12k_dp_mon_rx_deliver_msdu(ar, napi, mon_skb, rxs, decap);
2369 		mon_skb = skb_next;
2370 	} while (mon_skb);
2371 	rxs->flag = 0;
2372 
2373 	return 0;
2374 
2375 mon_deliver_fail:
2376 	mon_skb = mon_mpdu->head;
2377 	while (mon_skb) {
2378 		skb_next = mon_skb->next;
2379 		dev_kfree_skb_any(mon_skb);
2380 		mon_skb = skb_next;
2381 	}
2382 	return -EINVAL;
2383 }
2384 
ath12k_dp_pkt_set_pktlen(struct sk_buff * skb,u32 len)2385 static int ath12k_dp_pkt_set_pktlen(struct sk_buff *skb, u32 len)
2386 {
2387 	if (skb->len > len) {
2388 		skb_trim(skb, len);
2389 	} else {
2390 		if (skb_tailroom(skb) < len - skb->len) {
2391 			if ((pskb_expand_head(skb, 0,
2392 					      len - skb->len - skb_tailroom(skb),
2393 					      GFP_ATOMIC))) {
2394 				return -ENOMEM;
2395 			}
2396 		}
2397 		skb_put(skb, (len - skb->len));
2398 	}
2399 
2400 	return 0;
2401 }
2402 
2403 /* Hardware fill buffer with 128 bytes aligned. So need to reap it
2404  * with 128 bytes aligned.
2405  */
2406 #define RXDMA_DATA_DMA_BLOCK_SIZE 128
2407 
2408 static void
ath12k_dp_mon_get_buf_len(struct hal_rx_msdu_desc_info * info,bool * is_frag,u32 * total_len,u32 * frag_len,u32 * msdu_cnt)2409 ath12k_dp_mon_get_buf_len(struct hal_rx_msdu_desc_info *info,
2410 			  bool *is_frag, u32 *total_len,
2411 			  u32 *frag_len, u32 *msdu_cnt)
2412 {
2413 	if (info->msdu_flags & RX_MSDU_DESC_INFO0_MSDU_CONTINUATION) {
2414 		*is_frag = true;
2415 		*frag_len = (RX_MON_STATUS_BASE_BUF_SIZE -
2416 			     sizeof(struct hal_rx_desc)) &
2417 			     ~(RXDMA_DATA_DMA_BLOCK_SIZE - 1);
2418 		*total_len += *frag_len;
2419 	} else {
2420 		if (*is_frag)
2421 			*frag_len = info->msdu_len - *total_len;
2422 		else
2423 			*frag_len = info->msdu_len;
2424 
2425 		*msdu_cnt -= 1;
2426 	}
2427 }
2428 
2429 static int
ath12k_dp_mon_parse_status_buf(struct ath12k * ar,struct ath12k_mon_data * pmon,const struct dp_mon_packet_info * packet_info)2430 ath12k_dp_mon_parse_status_buf(struct ath12k *ar,
2431 			       struct ath12k_mon_data *pmon,
2432 			       const struct dp_mon_packet_info *packet_info)
2433 {
2434 	struct ath12k_base *ab = ar->ab;
2435 	struct dp_rxdma_mon_ring *buf_ring = &ab->dp.rxdma_mon_buf_ring;
2436 	struct sk_buff *msdu;
2437 	int buf_id;
2438 	u32 offset;
2439 
2440 	buf_id = u32_get_bits(packet_info->cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
2441 
2442 	spin_lock_bh(&buf_ring->idr_lock);
2443 	msdu = idr_remove(&buf_ring->bufs_idr, buf_id);
2444 	spin_unlock_bh(&buf_ring->idr_lock);
2445 
2446 	if (unlikely(!msdu)) {
2447 		ath12k_warn(ab, "mon dest desc with inval buf_id %d\n", buf_id);
2448 		return 0;
2449 	}
2450 
2451 	dma_unmap_single(ab->dev, ATH12K_SKB_RXCB(msdu)->paddr,
2452 			 msdu->len + skb_tailroom(msdu),
2453 			 DMA_FROM_DEVICE);
2454 
2455 	offset = packet_info->dma_length + ATH12K_MON_RX_DOT11_OFFSET;
2456 	if (ath12k_dp_pkt_set_pktlen(msdu, offset)) {
2457 		dev_kfree_skb_any(msdu);
2458 		goto dest_replenish;
2459 	}
2460 
2461 	if (!pmon->mon_mpdu->head)
2462 		pmon->mon_mpdu->head = msdu;
2463 	else
2464 		pmon->mon_mpdu->tail->next = msdu;
2465 
2466 	pmon->mon_mpdu->tail = msdu;
2467 
2468 dest_replenish:
2469 	ath12k_dp_mon_buf_replenish(ab, buf_ring, 1);
2470 
2471 	return 0;
2472 }
2473 
2474 static int
ath12k_dp_mon_parse_rx_dest_tlv(struct ath12k * ar,struct ath12k_mon_data * pmon,enum hal_rx_mon_status hal_status,const void * tlv_data)2475 ath12k_dp_mon_parse_rx_dest_tlv(struct ath12k *ar,
2476 				struct ath12k_mon_data *pmon,
2477 				enum hal_rx_mon_status hal_status,
2478 				const void *tlv_data)
2479 {
2480 	switch (hal_status) {
2481 	case HAL_RX_MON_STATUS_MPDU_START:
2482 		if (WARN_ON_ONCE(pmon->mon_mpdu))
2483 			break;
2484 
2485 		pmon->mon_mpdu = kzalloc(sizeof(*pmon->mon_mpdu), GFP_ATOMIC);
2486 		if (!pmon->mon_mpdu)
2487 			return -ENOMEM;
2488 		break;
2489 	case HAL_RX_MON_STATUS_BUF_ADDR:
2490 		return ath12k_dp_mon_parse_status_buf(ar, pmon, tlv_data);
2491 	case HAL_RX_MON_STATUS_MPDU_END:
2492 		/* If no MSDU then free empty MPDU */
2493 		if (pmon->mon_mpdu->tail) {
2494 			pmon->mon_mpdu->tail->next = NULL;
2495 			list_add_tail(&pmon->mon_mpdu->list, &pmon->dp_rx_mon_mpdu_list);
2496 		} else {
2497 			kfree(pmon->mon_mpdu);
2498 		}
2499 		pmon->mon_mpdu = NULL;
2500 		break;
2501 	case HAL_RX_MON_STATUS_MSDU_END:
2502 		pmon->mon_mpdu->decap_format = pmon->decap_format;
2503 		pmon->mon_mpdu->err_bitmap = pmon->err_bitmap;
2504 		break;
2505 	default:
2506 		break;
2507 	}
2508 
2509 	return 0;
2510 }
2511 
2512 static enum hal_rx_mon_status
ath12k_dp_mon_parse_rx_dest(struct ath12k * ar,struct ath12k_mon_data * pmon,struct sk_buff * skb)2513 ath12k_dp_mon_parse_rx_dest(struct ath12k *ar, struct ath12k_mon_data *pmon,
2514 			    struct sk_buff *skb)
2515 {
2516 	struct hal_tlv_64_hdr *tlv;
2517 	struct ath12k_skb_rxcb *rxcb;
2518 	enum hal_rx_mon_status hal_status;
2519 	u16 tlv_tag, tlv_len;
2520 	u8 *ptr = skb->data;
2521 
2522 	do {
2523 		tlv = (struct hal_tlv_64_hdr *)ptr;
2524 		tlv_tag = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_TAG);
2525 
2526 		/* The actual length of PPDU_END is the combined length of many PHY
2527 		 * TLVs that follow. Skip the TLV header and
2528 		 * rx_rxpcu_classification_overview that follows the header to get to
2529 		 * next TLV.
2530 		 */
2531 
2532 		if (tlv_tag == HAL_RX_PPDU_END)
2533 			tlv_len = sizeof(struct hal_rx_rxpcu_classification_overview);
2534 		else
2535 			tlv_len = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_LEN);
2536 
2537 		hal_status = ath12k_dp_mon_rx_parse_status_tlv(ar, pmon, tlv);
2538 
2539 		if (ar->monitor_started && ar->ab->hw_params->rxdma1_enable &&
2540 		    ath12k_dp_mon_parse_rx_dest_tlv(ar, pmon, hal_status, tlv->value))
2541 			return HAL_RX_MON_STATUS_PPDU_DONE;
2542 
2543 		ptr += sizeof(*tlv) + tlv_len;
2544 		ptr = PTR_ALIGN(ptr, HAL_TLV_64_ALIGN);
2545 
2546 		if ((ptr - skb->data) > skb->len)
2547 			break;
2548 
2549 	} while ((hal_status == HAL_RX_MON_STATUS_PPDU_NOT_DONE) ||
2550 		 (hal_status == HAL_RX_MON_STATUS_BUF_ADDR) ||
2551 		 (hal_status == HAL_RX_MON_STATUS_MPDU_START) ||
2552 		 (hal_status == HAL_RX_MON_STATUS_MPDU_END) ||
2553 		 (hal_status == HAL_RX_MON_STATUS_MSDU_END));
2554 
2555 	rxcb = ATH12K_SKB_RXCB(skb);
2556 	if (rxcb->is_end_of_ppdu)
2557 		hal_status = HAL_RX_MON_STATUS_PPDU_DONE;
2558 
2559 	return hal_status;
2560 }
2561 
2562 enum hal_rx_mon_status
ath12k_dp_mon_rx_parse_mon_status(struct ath12k * ar,struct ath12k_mon_data * pmon,struct sk_buff * skb,struct napi_struct * napi)2563 ath12k_dp_mon_rx_parse_mon_status(struct ath12k *ar,
2564 				  struct ath12k_mon_data *pmon,
2565 				  struct sk_buff *skb,
2566 				  struct napi_struct *napi)
2567 {
2568 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
2569 	struct dp_mon_mpdu *tmp;
2570 	struct dp_mon_mpdu *mon_mpdu = pmon->mon_mpdu;
2571 	enum hal_rx_mon_status hal_status;
2572 
2573 	hal_status = ath12k_dp_mon_parse_rx_dest(ar, pmon, skb);
2574 	if (hal_status != HAL_RX_MON_STATUS_PPDU_DONE)
2575 		return hal_status;
2576 
2577 	list_for_each_entry_safe(mon_mpdu, tmp, &pmon->dp_rx_mon_mpdu_list, list) {
2578 		list_del(&mon_mpdu->list);
2579 
2580 		if (mon_mpdu->head && mon_mpdu->tail)
2581 			ath12k_dp_mon_rx_deliver(ar, mon_mpdu, ppdu_info, napi);
2582 
2583 		kfree(mon_mpdu);
2584 	}
2585 
2586 	return hal_status;
2587 }
2588 
ath12k_dp_mon_buf_replenish(struct ath12k_base * ab,struct dp_rxdma_mon_ring * buf_ring,int req_entries)2589 int ath12k_dp_mon_buf_replenish(struct ath12k_base *ab,
2590 				struct dp_rxdma_mon_ring *buf_ring,
2591 				int req_entries)
2592 {
2593 	struct hal_mon_buf_ring *mon_buf;
2594 	struct sk_buff *skb;
2595 	struct hal_srng *srng;
2596 	dma_addr_t paddr;
2597 	u32 cookie;
2598 	int buf_id;
2599 
2600 	srng = &ab->hal.srng_list[buf_ring->refill_buf_ring.ring_id];
2601 	spin_lock_bh(&srng->lock);
2602 	ath12k_hal_srng_access_begin(ab, srng);
2603 
2604 	while (req_entries > 0) {
2605 		skb = dev_alloc_skb(DP_RX_BUFFER_SIZE + DP_RX_BUFFER_ALIGN_SIZE);
2606 		if (unlikely(!skb))
2607 			goto fail_alloc_skb;
2608 
2609 		if (!IS_ALIGNED((unsigned long)skb->data, DP_RX_BUFFER_ALIGN_SIZE)) {
2610 			skb_pull(skb,
2611 				 PTR_ALIGN(skb->data, DP_RX_BUFFER_ALIGN_SIZE) -
2612 				 skb->data);
2613 		}
2614 
2615 		paddr = dma_map_single(ab->dev, skb->data,
2616 				       skb->len + skb_tailroom(skb),
2617 				       DMA_FROM_DEVICE);
2618 
2619 		if (unlikely(dma_mapping_error(ab->dev, paddr)))
2620 			goto fail_free_skb;
2621 
2622 		spin_lock_bh(&buf_ring->idr_lock);
2623 		buf_id = idr_alloc(&buf_ring->bufs_idr, skb, 0,
2624 				   buf_ring->bufs_max * 3, GFP_ATOMIC);
2625 		spin_unlock_bh(&buf_ring->idr_lock);
2626 
2627 		if (unlikely(buf_id < 0))
2628 			goto fail_dma_unmap;
2629 
2630 		mon_buf = ath12k_hal_srng_src_get_next_entry(ab, srng);
2631 		if (unlikely(!mon_buf))
2632 			goto fail_idr_remove;
2633 
2634 		ATH12K_SKB_RXCB(skb)->paddr = paddr;
2635 
2636 		cookie = u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
2637 
2638 		mon_buf->paddr_lo = cpu_to_le32(lower_32_bits(paddr));
2639 		mon_buf->paddr_hi = cpu_to_le32(upper_32_bits(paddr));
2640 		mon_buf->cookie = cpu_to_le64(cookie);
2641 
2642 		req_entries--;
2643 	}
2644 
2645 	ath12k_hal_srng_access_end(ab, srng);
2646 	spin_unlock_bh(&srng->lock);
2647 	return 0;
2648 
2649 fail_idr_remove:
2650 	spin_lock_bh(&buf_ring->idr_lock);
2651 	idr_remove(&buf_ring->bufs_idr, buf_id);
2652 	spin_unlock_bh(&buf_ring->idr_lock);
2653 fail_dma_unmap:
2654 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
2655 			 DMA_FROM_DEVICE);
2656 fail_free_skb:
2657 	dev_kfree_skb_any(skb);
2658 fail_alloc_skb:
2659 	ath12k_hal_srng_access_end(ab, srng);
2660 	spin_unlock_bh(&srng->lock);
2661 	return -ENOMEM;
2662 }
2663 
ath12k_dp_mon_status_bufs_replenish(struct ath12k_base * ab,struct dp_rxdma_mon_ring * rx_ring,int req_entries)2664 int ath12k_dp_mon_status_bufs_replenish(struct ath12k_base *ab,
2665 					struct dp_rxdma_mon_ring *rx_ring,
2666 					int req_entries)
2667 {
2668 	enum hal_rx_buf_return_buf_manager mgr =
2669 		ab->hw_params->hal_params->rx_buf_rbm;
2670 	int num_free, num_remain, buf_id;
2671 	struct ath12k_buffer_addr *desc;
2672 	struct hal_srng *srng;
2673 	struct sk_buff *skb;
2674 	dma_addr_t paddr;
2675 	u32 cookie;
2676 
2677 	req_entries = min(req_entries, rx_ring->bufs_max);
2678 
2679 	srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id];
2680 
2681 	spin_lock_bh(&srng->lock);
2682 
2683 	ath12k_hal_srng_access_begin(ab, srng);
2684 
2685 	num_free = ath12k_hal_srng_src_num_free(ab, srng, true);
2686 	if (!req_entries && (num_free > (rx_ring->bufs_max * 3) / 4))
2687 		req_entries = num_free;
2688 
2689 	req_entries = min(num_free, req_entries);
2690 	num_remain = req_entries;
2691 
2692 	while (num_remain > 0) {
2693 		skb = dev_alloc_skb(RX_MON_STATUS_BUF_SIZE);
2694 		if (!skb)
2695 			break;
2696 
2697 		if (!IS_ALIGNED((unsigned long)skb->data,
2698 				RX_MON_STATUS_BUF_ALIGN)) {
2699 			skb_pull(skb,
2700 				 PTR_ALIGN(skb->data, RX_MON_STATUS_BUF_ALIGN) -
2701 				 skb->data);
2702 		}
2703 
2704 		paddr = dma_map_single(ab->dev, skb->data,
2705 				       skb->len + skb_tailroom(skb),
2706 				       DMA_FROM_DEVICE);
2707 		if (dma_mapping_error(ab->dev, paddr))
2708 			goto fail_free_skb;
2709 
2710 		spin_lock_bh(&rx_ring->idr_lock);
2711 		buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0,
2712 				   rx_ring->bufs_max * 3, GFP_ATOMIC);
2713 		spin_unlock_bh(&rx_ring->idr_lock);
2714 		if (buf_id < 0)
2715 			goto fail_dma_unmap;
2716 		cookie = u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
2717 
2718 		desc = ath12k_hal_srng_src_get_next_entry(ab, srng);
2719 		if (!desc)
2720 			goto fail_buf_unassign;
2721 
2722 		ATH12K_SKB_RXCB(skb)->paddr = paddr;
2723 
2724 		num_remain--;
2725 
2726 		ath12k_hal_rx_buf_addr_info_set(desc, paddr, cookie, mgr);
2727 	}
2728 
2729 	ath12k_hal_srng_access_end(ab, srng);
2730 
2731 	spin_unlock_bh(&srng->lock);
2732 
2733 	return req_entries - num_remain;
2734 
2735 fail_buf_unassign:
2736 	spin_lock_bh(&rx_ring->idr_lock);
2737 	idr_remove(&rx_ring->bufs_idr, buf_id);
2738 	spin_unlock_bh(&rx_ring->idr_lock);
2739 fail_dma_unmap:
2740 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
2741 			 DMA_FROM_DEVICE);
2742 fail_free_skb:
2743 	dev_kfree_skb_any(skb);
2744 
2745 	ath12k_hal_srng_access_end(ab, srng);
2746 
2747 	spin_unlock_bh(&srng->lock);
2748 
2749 	return req_entries - num_remain;
2750 }
2751 
2752 static struct dp_mon_tx_ppdu_info *
ath12k_dp_mon_tx_get_ppdu_info(struct ath12k_mon_data * pmon,unsigned int ppdu_id,enum dp_mon_tx_ppdu_info_type type)2753 ath12k_dp_mon_tx_get_ppdu_info(struct ath12k_mon_data *pmon,
2754 			       unsigned int ppdu_id,
2755 			       enum dp_mon_tx_ppdu_info_type type)
2756 {
2757 	struct dp_mon_tx_ppdu_info *tx_ppdu_info;
2758 
2759 	if (type == DP_MON_TX_PROT_PPDU_INFO) {
2760 		tx_ppdu_info = pmon->tx_prot_ppdu_info;
2761 
2762 		if (tx_ppdu_info && !tx_ppdu_info->is_used)
2763 			return tx_ppdu_info;
2764 		kfree(tx_ppdu_info);
2765 	} else {
2766 		tx_ppdu_info = pmon->tx_data_ppdu_info;
2767 
2768 		if (tx_ppdu_info && !tx_ppdu_info->is_used)
2769 			return tx_ppdu_info;
2770 		kfree(tx_ppdu_info);
2771 	}
2772 
2773 	/* allocate new tx_ppdu_info */
2774 	tx_ppdu_info = kzalloc(sizeof(*tx_ppdu_info), GFP_ATOMIC);
2775 	if (!tx_ppdu_info)
2776 		return NULL;
2777 
2778 	tx_ppdu_info->is_used = 0;
2779 	tx_ppdu_info->ppdu_id = ppdu_id;
2780 
2781 	if (type == DP_MON_TX_PROT_PPDU_INFO)
2782 		pmon->tx_prot_ppdu_info = tx_ppdu_info;
2783 	else
2784 		pmon->tx_data_ppdu_info = tx_ppdu_info;
2785 
2786 	return tx_ppdu_info;
2787 }
2788 
2789 static struct dp_mon_tx_ppdu_info *
ath12k_dp_mon_hal_tx_ppdu_info(struct ath12k_mon_data * pmon,u16 tlv_tag)2790 ath12k_dp_mon_hal_tx_ppdu_info(struct ath12k_mon_data *pmon,
2791 			       u16 tlv_tag)
2792 {
2793 	switch (tlv_tag) {
2794 	case HAL_TX_FES_SETUP:
2795 	case HAL_TX_FLUSH:
2796 	case HAL_PCU_PPDU_SETUP_INIT:
2797 	case HAL_TX_PEER_ENTRY:
2798 	case HAL_TX_QUEUE_EXTENSION:
2799 	case HAL_TX_MPDU_START:
2800 	case HAL_TX_MSDU_START:
2801 	case HAL_TX_DATA:
2802 	case HAL_MON_BUF_ADDR:
2803 	case HAL_TX_MPDU_END:
2804 	case HAL_TX_LAST_MPDU_FETCHED:
2805 	case HAL_TX_LAST_MPDU_END:
2806 	case HAL_COEX_TX_REQ:
2807 	case HAL_TX_RAW_OR_NATIVE_FRAME_SETUP:
2808 	case HAL_SCH_CRITICAL_TLV_REFERENCE:
2809 	case HAL_TX_FES_SETUP_COMPLETE:
2810 	case HAL_TQM_MPDU_GLOBAL_START:
2811 	case HAL_SCHEDULER_END:
2812 	case HAL_TX_FES_STATUS_USER_PPDU:
2813 		break;
2814 	case HAL_TX_FES_STATUS_PROT: {
2815 		if (!pmon->tx_prot_ppdu_info->is_used)
2816 			pmon->tx_prot_ppdu_info->is_used = true;
2817 
2818 		return pmon->tx_prot_ppdu_info;
2819 	}
2820 	}
2821 
2822 	if (!pmon->tx_data_ppdu_info->is_used)
2823 		pmon->tx_data_ppdu_info->is_used = true;
2824 
2825 	return pmon->tx_data_ppdu_info;
2826 }
2827 
2828 #define MAX_MONITOR_HEADER 512
2829 #define MAX_DUMMY_FRM_BODY 128
2830 
ath12k_dp_mon_tx_alloc_skb(void)2831 struct sk_buff *ath12k_dp_mon_tx_alloc_skb(void)
2832 {
2833 	struct sk_buff *skb;
2834 
2835 	skb = dev_alloc_skb(MAX_MONITOR_HEADER + MAX_DUMMY_FRM_BODY);
2836 	if (!skb)
2837 		return NULL;
2838 
2839 	skb_reserve(skb, MAX_MONITOR_HEADER);
2840 
2841 	if (!IS_ALIGNED((unsigned long)skb->data, 4))
2842 		skb_pull(skb, PTR_ALIGN(skb->data, 4) - skb->data);
2843 
2844 	return skb;
2845 }
2846 
2847 static int
ath12k_dp_mon_tx_gen_cts2self_frame(struct dp_mon_tx_ppdu_info * tx_ppdu_info)2848 ath12k_dp_mon_tx_gen_cts2self_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2849 {
2850 	struct sk_buff *skb;
2851 	struct ieee80211_cts *cts;
2852 
2853 	skb = ath12k_dp_mon_tx_alloc_skb();
2854 	if (!skb)
2855 		return -ENOMEM;
2856 
2857 	cts = (struct ieee80211_cts *)skb->data;
2858 	memset(cts, 0, MAX_DUMMY_FRM_BODY);
2859 	cts->frame_control =
2860 		cpu_to_le16(IEEE80211_FTYPE_CTL | IEEE80211_STYPE_CTS);
2861 	cts->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2862 	memcpy(cts->ra, tx_ppdu_info->rx_status.addr1, sizeof(cts->ra));
2863 
2864 	skb_put(skb, sizeof(*cts));
2865 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2866 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2867 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2868 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2869 
2870 	return 0;
2871 }
2872 
2873 static int
ath12k_dp_mon_tx_gen_rts_frame(struct dp_mon_tx_ppdu_info * tx_ppdu_info)2874 ath12k_dp_mon_tx_gen_rts_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2875 {
2876 	struct sk_buff *skb;
2877 	struct ieee80211_rts *rts;
2878 
2879 	skb = ath12k_dp_mon_tx_alloc_skb();
2880 	if (!skb)
2881 		return -ENOMEM;
2882 
2883 	rts = (struct ieee80211_rts *)skb->data;
2884 	memset(rts, 0, MAX_DUMMY_FRM_BODY);
2885 	rts->frame_control =
2886 		cpu_to_le16(IEEE80211_FTYPE_CTL | IEEE80211_STYPE_RTS);
2887 	rts->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2888 	memcpy(rts->ra, tx_ppdu_info->rx_status.addr1, sizeof(rts->ra));
2889 	memcpy(rts->ta, tx_ppdu_info->rx_status.addr2, sizeof(rts->ta));
2890 
2891 	skb_put(skb, sizeof(*rts));
2892 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2893 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2894 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2895 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2896 
2897 	return 0;
2898 }
2899 
2900 static int
ath12k_dp_mon_tx_gen_3addr_qos_null_frame(struct dp_mon_tx_ppdu_info * tx_ppdu_info)2901 ath12k_dp_mon_tx_gen_3addr_qos_null_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2902 {
2903 	struct sk_buff *skb;
2904 	struct ieee80211_qos_hdr *qhdr;
2905 
2906 	skb = ath12k_dp_mon_tx_alloc_skb();
2907 	if (!skb)
2908 		return -ENOMEM;
2909 
2910 	qhdr = (struct ieee80211_qos_hdr *)skb->data;
2911 	memset(qhdr, 0, MAX_DUMMY_FRM_BODY);
2912 	qhdr->frame_control =
2913 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_NULLFUNC);
2914 	qhdr->duration_id = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2915 	memcpy(qhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2916 	memcpy(qhdr->addr2, tx_ppdu_info->rx_status.addr2, ETH_ALEN);
2917 	memcpy(qhdr->addr3, tx_ppdu_info->rx_status.addr3, ETH_ALEN);
2918 
2919 	skb_put(skb, sizeof(*qhdr));
2920 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2921 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2922 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2923 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2924 
2925 	return 0;
2926 }
2927 
2928 static int
ath12k_dp_mon_tx_gen_4addr_qos_null_frame(struct dp_mon_tx_ppdu_info * tx_ppdu_info)2929 ath12k_dp_mon_tx_gen_4addr_qos_null_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2930 {
2931 	struct sk_buff *skb;
2932 	struct dp_mon_qosframe_addr4 *qhdr;
2933 
2934 	skb = ath12k_dp_mon_tx_alloc_skb();
2935 	if (!skb)
2936 		return -ENOMEM;
2937 
2938 	qhdr = (struct dp_mon_qosframe_addr4 *)skb->data;
2939 	memset(qhdr, 0, MAX_DUMMY_FRM_BODY);
2940 	qhdr->frame_control =
2941 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_NULLFUNC);
2942 	qhdr->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2943 	memcpy(qhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2944 	memcpy(qhdr->addr2, tx_ppdu_info->rx_status.addr2, ETH_ALEN);
2945 	memcpy(qhdr->addr3, tx_ppdu_info->rx_status.addr3, ETH_ALEN);
2946 	memcpy(qhdr->addr4, tx_ppdu_info->rx_status.addr4, ETH_ALEN);
2947 
2948 	skb_put(skb, sizeof(*qhdr));
2949 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2950 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2951 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2952 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2953 
2954 	return 0;
2955 }
2956 
2957 static int
ath12k_dp_mon_tx_gen_ack_frame(struct dp_mon_tx_ppdu_info * tx_ppdu_info)2958 ath12k_dp_mon_tx_gen_ack_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2959 {
2960 	struct sk_buff *skb;
2961 	struct dp_mon_frame_min_one *fbmhdr;
2962 
2963 	skb = ath12k_dp_mon_tx_alloc_skb();
2964 	if (!skb)
2965 		return -ENOMEM;
2966 
2967 	fbmhdr = (struct dp_mon_frame_min_one *)skb->data;
2968 	memset(fbmhdr, 0, MAX_DUMMY_FRM_BODY);
2969 	fbmhdr->frame_control =
2970 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_CFACK);
2971 	memcpy(fbmhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2972 
2973 	/* set duration zero for ack frame */
2974 	fbmhdr->duration = 0;
2975 
2976 	skb_put(skb, sizeof(*fbmhdr));
2977 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2978 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2979 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2980 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2981 
2982 	return 0;
2983 }
2984 
2985 static int
ath12k_dp_mon_tx_gen_prot_frame(struct dp_mon_tx_ppdu_info * tx_ppdu_info)2986 ath12k_dp_mon_tx_gen_prot_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2987 {
2988 	int ret = 0;
2989 
2990 	switch (tx_ppdu_info->rx_status.medium_prot_type) {
2991 	case DP_MON_TX_MEDIUM_RTS_LEGACY:
2992 	case DP_MON_TX_MEDIUM_RTS_11AC_STATIC_BW:
2993 	case DP_MON_TX_MEDIUM_RTS_11AC_DYNAMIC_BW:
2994 		ret = ath12k_dp_mon_tx_gen_rts_frame(tx_ppdu_info);
2995 		break;
2996 	case DP_MON_TX_MEDIUM_CTS2SELF:
2997 		ret = ath12k_dp_mon_tx_gen_cts2self_frame(tx_ppdu_info);
2998 		break;
2999 	case DP_MON_TX_MEDIUM_QOS_NULL_NO_ACK_3ADDR:
3000 		ret = ath12k_dp_mon_tx_gen_3addr_qos_null_frame(tx_ppdu_info);
3001 		break;
3002 	case DP_MON_TX_MEDIUM_QOS_NULL_NO_ACK_4ADDR:
3003 		ret = ath12k_dp_mon_tx_gen_4addr_qos_null_frame(tx_ppdu_info);
3004 		break;
3005 	}
3006 
3007 	return ret;
3008 }
3009 
3010 static enum dp_mon_tx_tlv_status
ath12k_dp_mon_tx_parse_status_tlv(struct ath12k_base * ab,struct ath12k_mon_data * pmon,u16 tlv_tag,const void * tlv_data,u32 userid)3011 ath12k_dp_mon_tx_parse_status_tlv(struct ath12k_base *ab,
3012 				  struct ath12k_mon_data *pmon,
3013 				  u16 tlv_tag, const void *tlv_data, u32 userid)
3014 {
3015 	struct dp_mon_tx_ppdu_info *tx_ppdu_info;
3016 	enum dp_mon_tx_tlv_status status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3017 	u32 info[7];
3018 
3019 	tx_ppdu_info = ath12k_dp_mon_hal_tx_ppdu_info(pmon, tlv_tag);
3020 
3021 	switch (tlv_tag) {
3022 	case HAL_TX_FES_SETUP: {
3023 		const struct hal_tx_fes_setup *tx_fes_setup = tlv_data;
3024 
3025 		info[0] = __le32_to_cpu(tx_fes_setup->info0);
3026 		tx_ppdu_info->ppdu_id = __le32_to_cpu(tx_fes_setup->schedule_id);
3027 		tx_ppdu_info->num_users =
3028 			u32_get_bits(info[0], HAL_TX_FES_SETUP_INFO0_NUM_OF_USERS);
3029 		status = DP_MON_TX_FES_SETUP;
3030 		break;
3031 	}
3032 
3033 	case HAL_TX_FES_STATUS_END: {
3034 		const struct hal_tx_fes_status_end *tx_fes_status_end = tlv_data;
3035 		u32 tst_15_0, tst_31_16;
3036 
3037 		info[0] = __le32_to_cpu(tx_fes_status_end->info0);
3038 		tst_15_0 =
3039 			u32_get_bits(info[0],
3040 				     HAL_TX_FES_STATUS_END_INFO0_START_TIMESTAMP_15_0);
3041 		tst_31_16 =
3042 			u32_get_bits(info[0],
3043 				     HAL_TX_FES_STATUS_END_INFO0_START_TIMESTAMP_31_16);
3044 
3045 		tx_ppdu_info->rx_status.ppdu_ts = (tst_15_0 | (tst_31_16 << 16));
3046 		status = DP_MON_TX_FES_STATUS_END;
3047 		break;
3048 	}
3049 
3050 	case HAL_RX_RESPONSE_REQUIRED_INFO: {
3051 		const struct hal_rx_resp_req_info *rx_resp_req_info = tlv_data;
3052 		u32 addr_32;
3053 		u16 addr_16;
3054 
3055 		info[0] = __le32_to_cpu(rx_resp_req_info->info0);
3056 		info[1] = __le32_to_cpu(rx_resp_req_info->info1);
3057 		info[2] = __le32_to_cpu(rx_resp_req_info->info2);
3058 		info[3] = __le32_to_cpu(rx_resp_req_info->info3);
3059 		info[4] = __le32_to_cpu(rx_resp_req_info->info4);
3060 		info[5] = __le32_to_cpu(rx_resp_req_info->info5);
3061 
3062 		tx_ppdu_info->rx_status.ppdu_id =
3063 			u32_get_bits(info[0], HAL_RX_RESP_REQ_INFO0_PPDU_ID);
3064 		tx_ppdu_info->rx_status.reception_type =
3065 			u32_get_bits(info[0], HAL_RX_RESP_REQ_INFO0_RECEPTION_TYPE);
3066 		tx_ppdu_info->rx_status.rx_duration =
3067 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_DURATION);
3068 		tx_ppdu_info->rx_status.mcs =
3069 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_RATE_MCS);
3070 		tx_ppdu_info->rx_status.sgi =
3071 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_SGI);
3072 		tx_ppdu_info->rx_status.is_stbc =
3073 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_STBC);
3074 		tx_ppdu_info->rx_status.ldpc =
3075 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_LDPC);
3076 		tx_ppdu_info->rx_status.is_ampdu =
3077 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_IS_AMPDU);
3078 		tx_ppdu_info->rx_status.num_users =
3079 			u32_get_bits(info[2], HAL_RX_RESP_REQ_INFO2_NUM_USER);
3080 
3081 		addr_32 = u32_get_bits(info[3], HAL_RX_RESP_REQ_INFO3_ADDR1_31_0);
3082 		addr_16 = u32_get_bits(info[3], HAL_RX_RESP_REQ_INFO4_ADDR1_47_32);
3083 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3084 
3085 		addr_16 = u32_get_bits(info[4], HAL_RX_RESP_REQ_INFO4_ADDR1_15_0);
3086 		addr_32 = u32_get_bits(info[5], HAL_RX_RESP_REQ_INFO5_ADDR1_47_16);
3087 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr2);
3088 
3089 		if (tx_ppdu_info->rx_status.reception_type == 0)
3090 			ath12k_dp_mon_tx_gen_cts2self_frame(tx_ppdu_info);
3091 		status = DP_MON_RX_RESPONSE_REQUIRED_INFO;
3092 		break;
3093 	}
3094 
3095 	case HAL_PCU_PPDU_SETUP_INIT: {
3096 		const struct hal_tx_pcu_ppdu_setup_init *ppdu_setup = tlv_data;
3097 		u32 addr_32;
3098 		u16 addr_16;
3099 
3100 		info[0] = __le32_to_cpu(ppdu_setup->info0);
3101 		info[1] = __le32_to_cpu(ppdu_setup->info1);
3102 		info[2] = __le32_to_cpu(ppdu_setup->info2);
3103 		info[3] = __le32_to_cpu(ppdu_setup->info3);
3104 		info[4] = __le32_to_cpu(ppdu_setup->info4);
3105 		info[5] = __le32_to_cpu(ppdu_setup->info5);
3106 		info[6] = __le32_to_cpu(ppdu_setup->info6);
3107 
3108 		/* protection frame address 1 */
3109 		addr_32 = u32_get_bits(info[1],
3110 				       HAL_TX_PPDU_SETUP_INFO1_PROT_FRAME_ADDR1_31_0);
3111 		addr_16 = u32_get_bits(info[2],
3112 				       HAL_TX_PPDU_SETUP_INFO2_PROT_FRAME_ADDR1_47_32);
3113 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3114 
3115 		/* protection frame address 2 */
3116 		addr_16 = u32_get_bits(info[2],
3117 				       HAL_TX_PPDU_SETUP_INFO2_PROT_FRAME_ADDR2_15_0);
3118 		addr_32 = u32_get_bits(info[3],
3119 				       HAL_TX_PPDU_SETUP_INFO3_PROT_FRAME_ADDR2_47_16);
3120 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr2);
3121 
3122 		/* protection frame address 3 */
3123 		addr_32 = u32_get_bits(info[4],
3124 				       HAL_TX_PPDU_SETUP_INFO4_PROT_FRAME_ADDR3_31_0);
3125 		addr_16 = u32_get_bits(info[5],
3126 				       HAL_TX_PPDU_SETUP_INFO5_PROT_FRAME_ADDR3_47_32);
3127 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr3);
3128 
3129 		/* protection frame address 4 */
3130 		addr_16 = u32_get_bits(info[5],
3131 				       HAL_TX_PPDU_SETUP_INFO5_PROT_FRAME_ADDR4_15_0);
3132 		addr_32 = u32_get_bits(info[6],
3133 				       HAL_TX_PPDU_SETUP_INFO6_PROT_FRAME_ADDR4_47_16);
3134 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr4);
3135 
3136 		status = u32_get_bits(info[0],
3137 				      HAL_TX_PPDU_SETUP_INFO0_MEDIUM_PROT_TYPE);
3138 		break;
3139 	}
3140 
3141 	case HAL_TX_QUEUE_EXTENSION: {
3142 		const struct hal_tx_queue_exten *tx_q_exten = tlv_data;
3143 
3144 		info[0] = __le32_to_cpu(tx_q_exten->info0);
3145 
3146 		tx_ppdu_info->rx_status.frame_control =
3147 			u32_get_bits(info[0],
3148 				     HAL_TX_Q_EXT_INFO0_FRAME_CTRL);
3149 		tx_ppdu_info->rx_status.fc_valid = true;
3150 		break;
3151 	}
3152 
3153 	case HAL_TX_FES_STATUS_START: {
3154 		const struct hal_tx_fes_status_start *tx_fes_start = tlv_data;
3155 
3156 		info[0] = __le32_to_cpu(tx_fes_start->info0);
3157 
3158 		tx_ppdu_info->rx_status.medium_prot_type =
3159 			u32_get_bits(info[0],
3160 				     HAL_TX_FES_STATUS_START_INFO0_MEDIUM_PROT_TYPE);
3161 		break;
3162 	}
3163 
3164 	case HAL_TX_FES_STATUS_PROT: {
3165 		const struct hal_tx_fes_status_prot *tx_fes_status = tlv_data;
3166 		u32 start_timestamp;
3167 		u32 end_timestamp;
3168 
3169 		info[0] = __le32_to_cpu(tx_fes_status->info0);
3170 		info[1] = __le32_to_cpu(tx_fes_status->info1);
3171 
3172 		start_timestamp =
3173 			u32_get_bits(info[0],
3174 				     HAL_TX_FES_STAT_PROT_INFO0_STRT_FRM_TS_15_0);
3175 		start_timestamp |=
3176 			u32_get_bits(info[0],
3177 				     HAL_TX_FES_STAT_PROT_INFO0_STRT_FRM_TS_31_16) << 15;
3178 		end_timestamp =
3179 			u32_get_bits(info[1],
3180 				     HAL_TX_FES_STAT_PROT_INFO1_END_FRM_TS_15_0);
3181 		end_timestamp |=
3182 			u32_get_bits(info[1],
3183 				     HAL_TX_FES_STAT_PROT_INFO1_END_FRM_TS_31_16) << 15;
3184 		tx_ppdu_info->rx_status.rx_duration = end_timestamp - start_timestamp;
3185 
3186 		ath12k_dp_mon_tx_gen_prot_frame(tx_ppdu_info);
3187 		break;
3188 	}
3189 
3190 	case HAL_TX_FES_STATUS_START_PPDU:
3191 	case HAL_TX_FES_STATUS_START_PROT: {
3192 		const struct hal_tx_fes_status_start_prot *tx_fes_stat_start = tlv_data;
3193 		u64 ppdu_ts;
3194 
3195 		info[0] = __le32_to_cpu(tx_fes_stat_start->info0);
3196 
3197 		tx_ppdu_info->rx_status.ppdu_ts =
3198 			u32_get_bits(info[0],
3199 				     HAL_TX_FES_STAT_STRT_INFO0_PROT_TS_LOWER_32);
3200 		ppdu_ts = (u32_get_bits(info[1],
3201 					HAL_TX_FES_STAT_STRT_INFO1_PROT_TS_UPPER_32));
3202 		tx_ppdu_info->rx_status.ppdu_ts |= ppdu_ts << 32;
3203 		break;
3204 	}
3205 
3206 	case HAL_TX_FES_STATUS_USER_PPDU: {
3207 		const struct hal_tx_fes_status_user_ppdu *tx_fes_usr_ppdu = tlv_data;
3208 
3209 		info[0] = __le32_to_cpu(tx_fes_usr_ppdu->info0);
3210 
3211 		tx_ppdu_info->rx_status.rx_duration =
3212 			u32_get_bits(info[0],
3213 				     HAL_TX_FES_STAT_USR_PPDU_INFO0_DURATION);
3214 		break;
3215 	}
3216 
3217 	case HAL_MACTX_HE_SIG_A_SU:
3218 		ath12k_dp_mon_parse_he_sig_su(tlv_data, &tx_ppdu_info->rx_status);
3219 		break;
3220 
3221 	case HAL_MACTX_HE_SIG_A_MU_DL:
3222 		ath12k_dp_mon_parse_he_sig_mu(tlv_data, &tx_ppdu_info->rx_status);
3223 		break;
3224 
3225 	case HAL_MACTX_HE_SIG_B1_MU:
3226 		ath12k_dp_mon_parse_he_sig_b1_mu(tlv_data, &tx_ppdu_info->rx_status);
3227 		break;
3228 
3229 	case HAL_MACTX_HE_SIG_B2_MU:
3230 		ath12k_dp_mon_parse_he_sig_b2_mu(tlv_data, &tx_ppdu_info->rx_status);
3231 		break;
3232 
3233 	case HAL_MACTX_HE_SIG_B2_OFDMA:
3234 		ath12k_dp_mon_parse_he_sig_b2_ofdma(tlv_data, &tx_ppdu_info->rx_status);
3235 		break;
3236 
3237 	case HAL_MACTX_VHT_SIG_A:
3238 		ath12k_dp_mon_parse_vht_sig_a(tlv_data, &tx_ppdu_info->rx_status);
3239 		break;
3240 
3241 	case HAL_MACTX_L_SIG_A:
3242 		ath12k_dp_mon_parse_l_sig_a(tlv_data, &tx_ppdu_info->rx_status);
3243 		break;
3244 
3245 	case HAL_MACTX_L_SIG_B:
3246 		ath12k_dp_mon_parse_l_sig_b(tlv_data, &tx_ppdu_info->rx_status);
3247 		break;
3248 
3249 	case HAL_RX_FRAME_BITMAP_ACK: {
3250 		const struct hal_rx_frame_bitmap_ack *fbm_ack = tlv_data;
3251 		u32 addr_32;
3252 		u16 addr_16;
3253 
3254 		info[0] = __le32_to_cpu(fbm_ack->info0);
3255 		info[1] = __le32_to_cpu(fbm_ack->info1);
3256 
3257 		addr_32 = u32_get_bits(info[0],
3258 				       HAL_RX_FBM_ACK_INFO0_ADDR1_31_0);
3259 		addr_16 = u32_get_bits(info[1],
3260 				       HAL_RX_FBM_ACK_INFO1_ADDR1_47_32);
3261 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3262 
3263 		ath12k_dp_mon_tx_gen_ack_frame(tx_ppdu_info);
3264 		break;
3265 	}
3266 
3267 	case HAL_MACTX_PHY_DESC: {
3268 		const struct hal_tx_phy_desc *tx_phy_desc = tlv_data;
3269 
3270 		info[0] = __le32_to_cpu(tx_phy_desc->info0);
3271 		info[1] = __le32_to_cpu(tx_phy_desc->info1);
3272 		info[2] = __le32_to_cpu(tx_phy_desc->info2);
3273 		info[3] = __le32_to_cpu(tx_phy_desc->info3);
3274 
3275 		tx_ppdu_info->rx_status.beamformed =
3276 			u32_get_bits(info[0],
3277 				     HAL_TX_PHY_DESC_INFO0_BF_TYPE);
3278 		tx_ppdu_info->rx_status.preamble_type =
3279 			u32_get_bits(info[0],
3280 				     HAL_TX_PHY_DESC_INFO0_PREAMBLE_11B);
3281 		tx_ppdu_info->rx_status.mcs =
3282 			u32_get_bits(info[1],
3283 				     HAL_TX_PHY_DESC_INFO1_MCS);
3284 		tx_ppdu_info->rx_status.ltf_size =
3285 			u32_get_bits(info[3],
3286 				     HAL_TX_PHY_DESC_INFO3_LTF_SIZE);
3287 		tx_ppdu_info->rx_status.nss =
3288 			u32_get_bits(info[2],
3289 				     HAL_TX_PHY_DESC_INFO2_NSS);
3290 		tx_ppdu_info->rx_status.chan_num =
3291 			u32_get_bits(info[3],
3292 				     HAL_TX_PHY_DESC_INFO3_ACTIVE_CHANNEL);
3293 		tx_ppdu_info->rx_status.bw =
3294 			u32_get_bits(info[0],
3295 				     HAL_TX_PHY_DESC_INFO0_BANDWIDTH);
3296 		break;
3297 	}
3298 
3299 	case HAL_TX_MPDU_START: {
3300 		struct dp_mon_mpdu *mon_mpdu = tx_ppdu_info->tx_mon_mpdu;
3301 
3302 		mon_mpdu = kzalloc(sizeof(*mon_mpdu), GFP_ATOMIC);
3303 		if (!mon_mpdu)
3304 			return DP_MON_TX_STATUS_PPDU_NOT_DONE;
3305 		status = DP_MON_TX_MPDU_START;
3306 		break;
3307 	}
3308 
3309 	case HAL_TX_MPDU_END:
3310 		list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
3311 			      &tx_ppdu_info->dp_tx_mon_mpdu_list);
3312 		break;
3313 	}
3314 
3315 	return status;
3316 }
3317 
3318 enum dp_mon_tx_tlv_status
ath12k_dp_mon_tx_status_get_num_user(u16 tlv_tag,struct hal_tlv_hdr * tx_tlv,u8 * num_users)3319 ath12k_dp_mon_tx_status_get_num_user(u16 tlv_tag,
3320 				     struct hal_tlv_hdr *tx_tlv,
3321 				     u8 *num_users)
3322 {
3323 	u32 tlv_status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3324 	u32 info0;
3325 
3326 	switch (tlv_tag) {
3327 	case HAL_TX_FES_SETUP: {
3328 		struct hal_tx_fes_setup *tx_fes_setup =
3329 				(struct hal_tx_fes_setup *)tx_tlv;
3330 
3331 		info0 = __le32_to_cpu(tx_fes_setup->info0);
3332 
3333 		*num_users = u32_get_bits(info0, HAL_TX_FES_SETUP_INFO0_NUM_OF_USERS);
3334 		tlv_status = DP_MON_TX_FES_SETUP;
3335 		break;
3336 	}
3337 
3338 	case HAL_RX_RESPONSE_REQUIRED_INFO: {
3339 		/* TODO: need to update *num_users */
3340 		tlv_status = DP_MON_RX_RESPONSE_REQUIRED_INFO;
3341 		break;
3342 	}
3343 	}
3344 
3345 	return tlv_status;
3346 }
3347 
3348 static void
ath12k_dp_mon_tx_process_ppdu_info(struct ath12k * ar,struct napi_struct * napi,struct dp_mon_tx_ppdu_info * tx_ppdu_info)3349 ath12k_dp_mon_tx_process_ppdu_info(struct ath12k *ar,
3350 				   struct napi_struct *napi,
3351 				   struct dp_mon_tx_ppdu_info *tx_ppdu_info)
3352 {
3353 	struct dp_mon_mpdu *tmp, *mon_mpdu;
3354 
3355 	list_for_each_entry_safe(mon_mpdu, tmp,
3356 				 &tx_ppdu_info->dp_tx_mon_mpdu_list, list) {
3357 		list_del(&mon_mpdu->list);
3358 
3359 		if (mon_mpdu->head)
3360 			ath12k_dp_mon_rx_deliver(ar, mon_mpdu,
3361 						 &tx_ppdu_info->rx_status, napi);
3362 
3363 		kfree(mon_mpdu);
3364 	}
3365 }
3366 
3367 enum hal_rx_mon_status
ath12k_dp_mon_tx_parse_mon_status(struct ath12k * ar,struct ath12k_mon_data * pmon,struct sk_buff * skb,struct napi_struct * napi,u32 ppdu_id)3368 ath12k_dp_mon_tx_parse_mon_status(struct ath12k *ar,
3369 				  struct ath12k_mon_data *pmon,
3370 				  struct sk_buff *skb,
3371 				  struct napi_struct *napi,
3372 				  u32 ppdu_id)
3373 {
3374 	struct ath12k_base *ab = ar->ab;
3375 	struct dp_mon_tx_ppdu_info *tx_prot_ppdu_info, *tx_data_ppdu_info;
3376 	struct hal_tlv_hdr *tlv;
3377 	u8 *ptr = skb->data;
3378 	u16 tlv_tag;
3379 	u16 tlv_len;
3380 	u32 tlv_userid = 0;
3381 	u8 num_user;
3382 	u32 tlv_status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3383 
3384 	tx_prot_ppdu_info = ath12k_dp_mon_tx_get_ppdu_info(pmon, ppdu_id,
3385 							   DP_MON_TX_PROT_PPDU_INFO);
3386 	if (!tx_prot_ppdu_info)
3387 		return -ENOMEM;
3388 
3389 	tlv = (struct hal_tlv_hdr *)ptr;
3390 	tlv_tag = le32_get_bits(tlv->tl, HAL_TLV_HDR_TAG);
3391 
3392 	tlv_status = ath12k_dp_mon_tx_status_get_num_user(tlv_tag, tlv, &num_user);
3393 	if (tlv_status == DP_MON_TX_STATUS_PPDU_NOT_DONE || !num_user)
3394 		return -EINVAL;
3395 
3396 	tx_data_ppdu_info = ath12k_dp_mon_tx_get_ppdu_info(pmon, ppdu_id,
3397 							   DP_MON_TX_DATA_PPDU_INFO);
3398 	if (!tx_data_ppdu_info)
3399 		return -ENOMEM;
3400 
3401 	do {
3402 		tlv = (struct hal_tlv_hdr *)ptr;
3403 		tlv_tag = le32_get_bits(tlv->tl, HAL_TLV_HDR_TAG);
3404 		tlv_len = le32_get_bits(tlv->tl, HAL_TLV_HDR_LEN);
3405 		tlv_userid = le32_get_bits(tlv->tl, HAL_TLV_USR_ID);
3406 
3407 		tlv_status = ath12k_dp_mon_tx_parse_status_tlv(ab, pmon,
3408 							       tlv_tag, ptr,
3409 							       tlv_userid);
3410 		ptr += tlv_len;
3411 		ptr = PTR_ALIGN(ptr, HAL_TLV_ALIGN);
3412 		if ((ptr - skb->data) >= DP_TX_MONITOR_BUF_SIZE)
3413 			break;
3414 	} while (tlv_status != DP_MON_TX_FES_STATUS_END);
3415 
3416 	ath12k_dp_mon_tx_process_ppdu_info(ar, napi, tx_data_ppdu_info);
3417 	ath12k_dp_mon_tx_process_ppdu_info(ar, napi, tx_prot_ppdu_info);
3418 
3419 	return tlv_status;
3420 }
3421 
3422 static void
ath12k_dp_mon_rx_update_peer_rate_table_stats(struct ath12k_rx_peer_stats * rx_stats,struct hal_rx_mon_ppdu_info * ppdu_info,struct hal_rx_user_status * user_stats,u32 num_msdu)3423 ath12k_dp_mon_rx_update_peer_rate_table_stats(struct ath12k_rx_peer_stats *rx_stats,
3424 					      struct hal_rx_mon_ppdu_info *ppdu_info,
3425 					      struct hal_rx_user_status *user_stats,
3426 					      u32 num_msdu)
3427 {
3428 	struct ath12k_rx_peer_rate_stats *stats;
3429 	u32 mcs_idx = (user_stats) ? user_stats->mcs : ppdu_info->mcs;
3430 	u32 nss_idx = (user_stats) ? user_stats->nss - 1 : ppdu_info->nss - 1;
3431 	u32 bw_idx = ppdu_info->bw;
3432 	u32 gi_idx = ppdu_info->gi;
3433 	u32 len;
3434 
3435 	if (mcs_idx > HAL_RX_MAX_MCS_HT || nss_idx >= HAL_RX_MAX_NSS ||
3436 	    bw_idx >= HAL_RX_BW_MAX || gi_idx >= HAL_RX_GI_MAX) {
3437 		return;
3438 	}
3439 
3440 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AX ||
3441 	    ppdu_info->preamble_type == HAL_RX_PREAMBLE_11BE)
3442 		gi_idx = ath12k_he_gi_to_nl80211_he_gi(ppdu_info->gi);
3443 
3444 	rx_stats->pkt_stats.rx_rate[bw_idx][gi_idx][nss_idx][mcs_idx] += num_msdu;
3445 	stats = &rx_stats->byte_stats;
3446 
3447 	if (user_stats)
3448 		len = user_stats->mpdu_ok_byte_count;
3449 	else
3450 		len = ppdu_info->mpdu_len;
3451 
3452 	stats->rx_rate[bw_idx][gi_idx][nss_idx][mcs_idx] += len;
3453 }
3454 
ath12k_dp_mon_rx_update_peer_su_stats(struct ath12k * ar,struct ath12k_link_sta * arsta,struct hal_rx_mon_ppdu_info * ppdu_info)3455 static void ath12k_dp_mon_rx_update_peer_su_stats(struct ath12k *ar,
3456 						  struct ath12k_link_sta *arsta,
3457 						  struct hal_rx_mon_ppdu_info *ppdu_info)
3458 {
3459 	struct ath12k_rx_peer_stats *rx_stats = arsta->rx_stats;
3460 	u32 num_msdu;
3461 
3462 	arsta->rssi_comb = ppdu_info->rssi_comb;
3463 	ewma_avg_rssi_add(&arsta->avg_rssi, ppdu_info->rssi_comb);
3464 	if (!rx_stats)
3465 		return;
3466 
3467 	num_msdu = ppdu_info->tcp_msdu_count + ppdu_info->tcp_ack_msdu_count +
3468 		   ppdu_info->udp_msdu_count + ppdu_info->other_msdu_count;
3469 
3470 	rx_stats->num_msdu += num_msdu;
3471 	rx_stats->tcp_msdu_count += ppdu_info->tcp_msdu_count +
3472 				    ppdu_info->tcp_ack_msdu_count;
3473 	rx_stats->udp_msdu_count += ppdu_info->udp_msdu_count;
3474 	rx_stats->other_msdu_count += ppdu_info->other_msdu_count;
3475 
3476 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11A ||
3477 	    ppdu_info->preamble_type == HAL_RX_PREAMBLE_11B) {
3478 		ppdu_info->nss = 1;
3479 		ppdu_info->mcs = HAL_RX_MAX_MCS;
3480 		ppdu_info->tid = IEEE80211_NUM_TIDS;
3481 	}
3482 
3483 	if (ppdu_info->ldpc < HAL_RX_SU_MU_CODING_MAX)
3484 		rx_stats->coding_count[ppdu_info->ldpc] += num_msdu;
3485 
3486 	if (ppdu_info->tid <= IEEE80211_NUM_TIDS)
3487 		rx_stats->tid_count[ppdu_info->tid] += num_msdu;
3488 
3489 	if (ppdu_info->preamble_type < HAL_RX_PREAMBLE_MAX)
3490 		rx_stats->pream_cnt[ppdu_info->preamble_type] += num_msdu;
3491 
3492 	if (ppdu_info->reception_type < HAL_RX_RECEPTION_TYPE_MAX)
3493 		rx_stats->reception_type[ppdu_info->reception_type] += num_msdu;
3494 
3495 	if (ppdu_info->is_stbc)
3496 		rx_stats->stbc_count += num_msdu;
3497 
3498 	if (ppdu_info->beamformed)
3499 		rx_stats->beamformed_count += num_msdu;
3500 
3501 	if (ppdu_info->num_mpdu_fcs_ok > 1)
3502 		rx_stats->ampdu_msdu_count += num_msdu;
3503 	else
3504 		rx_stats->non_ampdu_msdu_count += num_msdu;
3505 
3506 	rx_stats->num_mpdu_fcs_ok += ppdu_info->num_mpdu_fcs_ok;
3507 	rx_stats->num_mpdu_fcs_err += ppdu_info->num_mpdu_fcs_err;
3508 	rx_stats->dcm_count += ppdu_info->dcm;
3509 
3510 	rx_stats->rx_duration += ppdu_info->rx_duration;
3511 	arsta->rx_duration = rx_stats->rx_duration;
3512 
3513 	if (ppdu_info->nss > 0 && ppdu_info->nss <= HAL_RX_MAX_NSS) {
3514 		rx_stats->pkt_stats.nss_count[ppdu_info->nss - 1] += num_msdu;
3515 		rx_stats->byte_stats.nss_count[ppdu_info->nss - 1] += ppdu_info->mpdu_len;
3516 	}
3517 
3518 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11N &&
3519 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_HT) {
3520 		rx_stats->pkt_stats.ht_mcs_count[ppdu_info->mcs] += num_msdu;
3521 		rx_stats->byte_stats.ht_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3522 		/* To fit into rate table for HT packets */
3523 		ppdu_info->mcs = ppdu_info->mcs % 8;
3524 	}
3525 
3526 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AC &&
3527 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_VHT) {
3528 		rx_stats->pkt_stats.vht_mcs_count[ppdu_info->mcs] += num_msdu;
3529 		rx_stats->byte_stats.vht_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3530 	}
3531 
3532 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AX &&
3533 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_HE) {
3534 		rx_stats->pkt_stats.he_mcs_count[ppdu_info->mcs] += num_msdu;
3535 		rx_stats->byte_stats.he_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3536 	}
3537 
3538 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11BE &&
3539 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_BE) {
3540 		rx_stats->pkt_stats.be_mcs_count[ppdu_info->mcs] += num_msdu;
3541 		rx_stats->byte_stats.be_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3542 	}
3543 
3544 	if ((ppdu_info->preamble_type == HAL_RX_PREAMBLE_11A ||
3545 	     ppdu_info->preamble_type == HAL_RX_PREAMBLE_11B) &&
3546 	     ppdu_info->rate < HAL_RX_LEGACY_RATE_INVALID) {
3547 		rx_stats->pkt_stats.legacy_count[ppdu_info->rate] += num_msdu;
3548 		rx_stats->byte_stats.legacy_count[ppdu_info->rate] += ppdu_info->mpdu_len;
3549 	}
3550 
3551 	if (ppdu_info->gi < HAL_RX_GI_MAX) {
3552 		rx_stats->pkt_stats.gi_count[ppdu_info->gi] += num_msdu;
3553 		rx_stats->byte_stats.gi_count[ppdu_info->gi] += ppdu_info->mpdu_len;
3554 	}
3555 
3556 	if (ppdu_info->bw < HAL_RX_BW_MAX) {
3557 		rx_stats->pkt_stats.bw_count[ppdu_info->bw] += num_msdu;
3558 		rx_stats->byte_stats.bw_count[ppdu_info->bw] += ppdu_info->mpdu_len;
3559 	}
3560 
3561 	ath12k_dp_mon_rx_update_peer_rate_table_stats(rx_stats, ppdu_info,
3562 						      NULL, num_msdu);
3563 }
3564 
ath12k_dp_mon_rx_process_ulofdma(struct hal_rx_mon_ppdu_info * ppdu_info)3565 void ath12k_dp_mon_rx_process_ulofdma(struct hal_rx_mon_ppdu_info *ppdu_info)
3566 {
3567 	struct hal_rx_user_status *rx_user_status;
3568 	u32 num_users, i, mu_ul_user_v0_word0, mu_ul_user_v0_word1, ru_size;
3569 
3570 	if (!(ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_MIMO ||
3571 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
3572 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO))
3573 		return;
3574 
3575 	num_users = ppdu_info->num_users;
3576 	if (num_users > HAL_MAX_UL_MU_USERS)
3577 		num_users = HAL_MAX_UL_MU_USERS;
3578 
3579 	for (i = 0; i < num_users; i++) {
3580 		rx_user_status = &ppdu_info->userstats[i];
3581 		mu_ul_user_v0_word0 =
3582 			rx_user_status->ul_ofdma_user_v0_word0;
3583 		mu_ul_user_v0_word1 =
3584 			rx_user_status->ul_ofdma_user_v0_word1;
3585 
3586 		if (u32_get_bits(mu_ul_user_v0_word0,
3587 				 HAL_RX_UL_OFDMA_USER_INFO_V0_W0_VALID) &&
3588 		    !u32_get_bits(mu_ul_user_v0_word0,
3589 				  HAL_RX_UL_OFDMA_USER_INFO_V0_W0_VER)) {
3590 			rx_user_status->mcs =
3591 				u32_get_bits(mu_ul_user_v0_word1,
3592 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_MCS);
3593 			rx_user_status->nss =
3594 				u32_get_bits(mu_ul_user_v0_word1,
3595 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_NSS) + 1;
3596 
3597 			rx_user_status->ofdma_info_valid = 1;
3598 			rx_user_status->ul_ofdma_ru_start_index =
3599 				u32_get_bits(mu_ul_user_v0_word1,
3600 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_RU_START);
3601 
3602 			ru_size = u32_get_bits(mu_ul_user_v0_word1,
3603 					       HAL_RX_UL_OFDMA_USER_INFO_V0_W1_RU_SIZE);
3604 			rx_user_status->ul_ofdma_ru_width = ru_size;
3605 			rx_user_status->ul_ofdma_ru_size = ru_size;
3606 		}
3607 		rx_user_status->ldpc = u32_get_bits(mu_ul_user_v0_word1,
3608 						    HAL_RX_UL_OFDMA_USER_INFO_V0_W1_LDPC);
3609 	}
3610 	ppdu_info->ldpc = 1;
3611 }
3612 
3613 static void
ath12k_dp_mon_rx_update_user_stats(struct ath12k * ar,struct hal_rx_mon_ppdu_info * ppdu_info,u32 uid)3614 ath12k_dp_mon_rx_update_user_stats(struct ath12k *ar,
3615 				   struct hal_rx_mon_ppdu_info *ppdu_info,
3616 				   u32 uid)
3617 {
3618 	struct ath12k_link_sta *arsta;
3619 	struct ath12k_rx_peer_stats *rx_stats = NULL;
3620 	struct hal_rx_user_status *user_stats = &ppdu_info->userstats[uid];
3621 	struct ath12k_peer *peer;
3622 	u32 num_msdu;
3623 
3624 	if (user_stats->ast_index == 0 || user_stats->ast_index == 0xFFFF)
3625 		return;
3626 
3627 	peer = ath12k_peer_find_by_ast(ar->ab, user_stats->ast_index);
3628 
3629 	if (!peer) {
3630 		ath12k_warn(ar->ab, "peer ast idx %d can't be found\n",
3631 			    user_stats->ast_index);
3632 		return;
3633 	}
3634 
3635 	arsta = ath12k_peer_get_link_sta(ar->ab, peer);
3636 	if (!arsta) {
3637 		ath12k_warn(ar->ab, "link sta not found on peer %pM id %d\n",
3638 			    peer->addr, peer->peer_id);
3639 		return;
3640 	}
3641 
3642 	arsta->rssi_comb = ppdu_info->rssi_comb;
3643 	ewma_avg_rssi_add(&arsta->avg_rssi, ppdu_info->rssi_comb);
3644 	rx_stats = arsta->rx_stats;
3645 	if (!rx_stats)
3646 		return;
3647 
3648 	num_msdu = user_stats->tcp_msdu_count + user_stats->tcp_ack_msdu_count +
3649 		   user_stats->udp_msdu_count + user_stats->other_msdu_count;
3650 
3651 	rx_stats->num_msdu += num_msdu;
3652 	rx_stats->tcp_msdu_count += user_stats->tcp_msdu_count +
3653 				    user_stats->tcp_ack_msdu_count;
3654 	rx_stats->udp_msdu_count += user_stats->udp_msdu_count;
3655 	rx_stats->other_msdu_count += user_stats->other_msdu_count;
3656 
3657 	if (ppdu_info->ldpc < HAL_RX_SU_MU_CODING_MAX)
3658 		rx_stats->coding_count[ppdu_info->ldpc] += num_msdu;
3659 
3660 	if (user_stats->tid <= IEEE80211_NUM_TIDS)
3661 		rx_stats->tid_count[user_stats->tid] += num_msdu;
3662 
3663 	if (user_stats->preamble_type < HAL_RX_PREAMBLE_MAX)
3664 		rx_stats->pream_cnt[user_stats->preamble_type] += num_msdu;
3665 
3666 	if (ppdu_info->reception_type < HAL_RX_RECEPTION_TYPE_MAX)
3667 		rx_stats->reception_type[ppdu_info->reception_type] += num_msdu;
3668 
3669 	if (ppdu_info->is_stbc)
3670 		rx_stats->stbc_count += num_msdu;
3671 
3672 	if (ppdu_info->beamformed)
3673 		rx_stats->beamformed_count += num_msdu;
3674 
3675 	if (user_stats->mpdu_cnt_fcs_ok > 1)
3676 		rx_stats->ampdu_msdu_count += num_msdu;
3677 	else
3678 		rx_stats->non_ampdu_msdu_count += num_msdu;
3679 
3680 	rx_stats->num_mpdu_fcs_ok += user_stats->mpdu_cnt_fcs_ok;
3681 	rx_stats->num_mpdu_fcs_err += user_stats->mpdu_cnt_fcs_err;
3682 	rx_stats->dcm_count += ppdu_info->dcm;
3683 	if (ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
3684 	    ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO)
3685 		rx_stats->ru_alloc_cnt[user_stats->ul_ofdma_ru_size] += num_msdu;
3686 
3687 	rx_stats->rx_duration += ppdu_info->rx_duration;
3688 	arsta->rx_duration = rx_stats->rx_duration;
3689 
3690 	if (user_stats->nss > 0 && user_stats->nss <= HAL_RX_MAX_NSS) {
3691 		rx_stats->pkt_stats.nss_count[user_stats->nss - 1] += num_msdu;
3692 		rx_stats->byte_stats.nss_count[user_stats->nss - 1] +=
3693 						user_stats->mpdu_ok_byte_count;
3694 	}
3695 
3696 	if (user_stats->preamble_type == HAL_RX_PREAMBLE_11AX &&
3697 	    user_stats->mcs <= HAL_RX_MAX_MCS_HE) {
3698 		rx_stats->pkt_stats.he_mcs_count[user_stats->mcs] += num_msdu;
3699 		rx_stats->byte_stats.he_mcs_count[user_stats->mcs] +=
3700 						user_stats->mpdu_ok_byte_count;
3701 	}
3702 
3703 	if (ppdu_info->gi < HAL_RX_GI_MAX) {
3704 		rx_stats->pkt_stats.gi_count[ppdu_info->gi] += num_msdu;
3705 		rx_stats->byte_stats.gi_count[ppdu_info->gi] +=
3706 						user_stats->mpdu_ok_byte_count;
3707 	}
3708 
3709 	if (ppdu_info->bw < HAL_RX_BW_MAX) {
3710 		rx_stats->pkt_stats.bw_count[ppdu_info->bw] += num_msdu;
3711 		rx_stats->byte_stats.bw_count[ppdu_info->bw] +=
3712 						user_stats->mpdu_ok_byte_count;
3713 	}
3714 
3715 	ath12k_dp_mon_rx_update_peer_rate_table_stats(rx_stats, ppdu_info,
3716 						      user_stats, num_msdu);
3717 }
3718 
3719 static void
ath12k_dp_mon_rx_update_peer_mu_stats(struct ath12k * ar,struct hal_rx_mon_ppdu_info * ppdu_info)3720 ath12k_dp_mon_rx_update_peer_mu_stats(struct ath12k *ar,
3721 				      struct hal_rx_mon_ppdu_info *ppdu_info)
3722 {
3723 	u32 num_users, i;
3724 
3725 	num_users = ppdu_info->num_users;
3726 	if (num_users > HAL_MAX_UL_MU_USERS)
3727 		num_users = HAL_MAX_UL_MU_USERS;
3728 
3729 	for (i = 0; i < num_users; i++)
3730 		ath12k_dp_mon_rx_update_user_stats(ar, ppdu_info, i);
3731 }
3732 
3733 static void
ath12k_dp_mon_rx_memset_ppdu_info(struct hal_rx_mon_ppdu_info * ppdu_info)3734 ath12k_dp_mon_rx_memset_ppdu_info(struct hal_rx_mon_ppdu_info *ppdu_info)
3735 {
3736 	memset(ppdu_info, 0, sizeof(*ppdu_info));
3737 	ppdu_info->peer_id = HAL_INVALID_PEERID;
3738 }
3739 
ath12k_dp_mon_srng_process(struct ath12k * ar,int * budget,struct napi_struct * napi)3740 int ath12k_dp_mon_srng_process(struct ath12k *ar, int *budget,
3741 			       struct napi_struct *napi)
3742 {
3743 	struct ath12k_base *ab = ar->ab;
3744 	struct ath12k_pdev_dp *pdev_dp = &ar->dp;
3745 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&pdev_dp->mon_data;
3746 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
3747 	struct ath12k_dp *dp = &ab->dp;
3748 	struct hal_mon_dest_desc *mon_dst_desc;
3749 	struct sk_buff *skb;
3750 	struct ath12k_skb_rxcb *rxcb;
3751 	struct dp_srng *mon_dst_ring;
3752 	struct hal_srng *srng;
3753 	struct dp_rxdma_mon_ring *buf_ring;
3754 	struct ath12k_link_sta *arsta;
3755 	struct ath12k_peer *peer;
3756 	struct sk_buff_head skb_list;
3757 	u64 cookie;
3758 	int num_buffs_reaped = 0, srng_id, buf_id;
3759 	u32 hal_status, end_offset, info0, end_reason;
3760 	u8 pdev_idx = ath12k_hw_mac_id_to_pdev_id(ab->hw_params, ar->pdev_idx);
3761 
3762 	__skb_queue_head_init(&skb_list);
3763 	srng_id = ath12k_hw_mac_id_to_srng_id(ab->hw_params, pdev_idx);
3764 	mon_dst_ring = &pdev_dp->rxdma_mon_dst_ring[srng_id];
3765 	buf_ring = &dp->rxdma_mon_buf_ring;
3766 
3767 	srng = &ab->hal.srng_list[mon_dst_ring->ring_id];
3768 	spin_lock_bh(&srng->lock);
3769 	ath12k_hal_srng_access_begin(ab, srng);
3770 
3771 	while (likely(*budget)) {
3772 		mon_dst_desc = ath12k_hal_srng_dst_peek(ab, srng);
3773 		if (unlikely(!mon_dst_desc))
3774 			break;
3775 
3776 		/* In case of empty descriptor, the cookie in the ring descriptor
3777 		 * is invalid. Therefore, this entry is skipped, and ring processing
3778 		 * continues.
3779 		 */
3780 		info0 = le32_to_cpu(mon_dst_desc->info0);
3781 		if (u32_get_bits(info0, HAL_MON_DEST_INFO0_EMPTY_DESC))
3782 			goto move_next;
3783 
3784 		cookie = le32_to_cpu(mon_dst_desc->cookie);
3785 		buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
3786 
3787 		spin_lock_bh(&buf_ring->idr_lock);
3788 		skb = idr_remove(&buf_ring->bufs_idr, buf_id);
3789 		spin_unlock_bh(&buf_ring->idr_lock);
3790 
3791 		if (unlikely(!skb)) {
3792 			ath12k_warn(ab, "monitor destination with invalid buf_id %d\n",
3793 				    buf_id);
3794 			goto move_next;
3795 		}
3796 
3797 		rxcb = ATH12K_SKB_RXCB(skb);
3798 		dma_unmap_single(ab->dev, rxcb->paddr,
3799 				 skb->len + skb_tailroom(skb),
3800 				 DMA_FROM_DEVICE);
3801 
3802 		end_reason = u32_get_bits(info0, HAL_MON_DEST_INFO0_END_REASON);
3803 
3804 		/* HAL_MON_FLUSH_DETECTED implies that an rx flush received at the end of
3805 		 * rx PPDU and HAL_MON_PPDU_TRUNCATED implies that the PPDU got
3806 		 * truncated due to a system level error. In both the cases, buffer data
3807 		 * can be discarded
3808 		 */
3809 		if ((end_reason == HAL_MON_FLUSH_DETECTED) ||
3810 		    (end_reason == HAL_MON_PPDU_TRUNCATED)) {
3811 			ath12k_dbg(ab, ATH12K_DBG_DATA,
3812 				   "Monitor dest descriptor end reason %d", end_reason);
3813 			dev_kfree_skb_any(skb);
3814 			goto move_next;
3815 		}
3816 
3817 		/* Calculate the budget when the ring descriptor with the
3818 		 * HAL_MON_END_OF_PPDU to ensure that one PPDU worth of data is always
3819 		 * reaped. This helps to efficiently utilize the NAPI budget.
3820 		 */
3821 		if (end_reason == HAL_MON_END_OF_PPDU) {
3822 			*budget -= 1;
3823 			rxcb->is_end_of_ppdu = true;
3824 		}
3825 
3826 		end_offset = u32_get_bits(info0, HAL_MON_DEST_INFO0_END_OFFSET);
3827 		if (likely(end_offset <= DP_RX_BUFFER_SIZE)) {
3828 			skb_put(skb, end_offset);
3829 		} else {
3830 			ath12k_warn(ab,
3831 				    "invalid offset on mon stats destination %u\n",
3832 				    end_offset);
3833 			skb_put(skb, DP_RX_BUFFER_SIZE);
3834 		}
3835 
3836 		__skb_queue_tail(&skb_list, skb);
3837 
3838 move_next:
3839 		ath12k_dp_mon_buf_replenish(ab, buf_ring, 1);
3840 		ath12k_hal_srng_dst_get_next_entry(ab, srng);
3841 		num_buffs_reaped++;
3842 	}
3843 
3844 	ath12k_hal_srng_access_end(ab, srng);
3845 	spin_unlock_bh(&srng->lock);
3846 
3847 	if (!num_buffs_reaped)
3848 		return 0;
3849 
3850 	/* In some cases, one PPDU worth of data can be spread across multiple NAPI
3851 	 * schedules, To avoid losing existing parsed ppdu_info information, skip
3852 	 * the memset of the ppdu_info structure and continue processing it.
3853 	 */
3854 	if (!ppdu_info->ppdu_continuation)
3855 		ath12k_dp_mon_rx_memset_ppdu_info(ppdu_info);
3856 
3857 	while ((skb = __skb_dequeue(&skb_list))) {
3858 		hal_status = ath12k_dp_mon_rx_parse_mon_status(ar, pmon, skb, napi);
3859 		if (hal_status != HAL_RX_MON_STATUS_PPDU_DONE) {
3860 			ppdu_info->ppdu_continuation = true;
3861 			dev_kfree_skb_any(skb);
3862 			continue;
3863 		}
3864 
3865 		if (ppdu_info->peer_id == HAL_INVALID_PEERID)
3866 			goto free_skb;
3867 
3868 		rcu_read_lock();
3869 		spin_lock_bh(&ab->base_lock);
3870 		peer = ath12k_peer_find_by_id(ab, ppdu_info->peer_id);
3871 		if (!peer || !peer->sta) {
3872 			ath12k_dbg(ab, ATH12K_DBG_DATA,
3873 				   "failed to find the peer with monitor peer_id %d\n",
3874 				   ppdu_info->peer_id);
3875 			goto next_skb;
3876 		}
3877 
3878 		if (ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_SU) {
3879 			arsta = ath12k_peer_get_link_sta(ar->ab, peer);
3880 			if (!arsta) {
3881 				ath12k_warn(ar->ab, "link sta not found on peer %pM id %d\n",
3882 					    peer->addr, peer->peer_id);
3883 				spin_unlock_bh(&ab->base_lock);
3884 				rcu_read_unlock();
3885 				dev_kfree_skb_any(skb);
3886 				continue;
3887 			}
3888 			ath12k_dp_mon_rx_update_peer_su_stats(ar, arsta,
3889 							      ppdu_info);
3890 		} else if ((ppdu_info->fc_valid) &&
3891 			   (ppdu_info->ast_index != HAL_AST_IDX_INVALID)) {
3892 			ath12k_dp_mon_rx_process_ulofdma(ppdu_info);
3893 			ath12k_dp_mon_rx_update_peer_mu_stats(ar, ppdu_info);
3894 		}
3895 
3896 next_skb:
3897 		spin_unlock_bh(&ab->base_lock);
3898 		rcu_read_unlock();
3899 free_skb:
3900 		dev_kfree_skb_any(skb);
3901 		ath12k_dp_mon_rx_memset_ppdu_info(ppdu_info);
3902 	}
3903 
3904 	return num_buffs_reaped;
3905 }
3906 
ath12k_dp_rx_reap_mon_status_ring(struct ath12k_base * ab,int mac_id,int * budget,struct sk_buff_head * skb_list)3907 static int ath12k_dp_rx_reap_mon_status_ring(struct ath12k_base *ab, int mac_id,
3908 					     int *budget, struct sk_buff_head *skb_list)
3909 {
3910 	const struct ath12k_hw_hal_params *hal_params;
3911 	int buf_id, srng_id, num_buffs_reaped = 0;
3912 	enum dp_mon_status_buf_state reap_status;
3913 	struct dp_rxdma_mon_ring *rx_ring;
3914 	struct ath12k_mon_data *pmon;
3915 	struct ath12k_skb_rxcb *rxcb;
3916 	struct hal_tlv_64_hdr *tlv;
3917 	void *rx_mon_status_desc;
3918 	struct hal_srng *srng;
3919 	struct ath12k_dp *dp;
3920 	struct sk_buff *skb;
3921 	struct ath12k *ar;
3922 	dma_addr_t paddr;
3923 	u32 cookie;
3924 	u8 rbm;
3925 
3926 	ar = ab->pdevs[ath12k_hw_mac_id_to_pdev_id(ab->hw_params, mac_id)].ar;
3927 	dp = &ab->dp;
3928 	pmon = &ar->dp.mon_data;
3929 	srng_id = ath12k_hw_mac_id_to_srng_id(ab->hw_params, mac_id);
3930 	rx_ring = &dp->rx_mon_status_refill_ring[srng_id];
3931 
3932 	srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id];
3933 
3934 	spin_lock_bh(&srng->lock);
3935 
3936 	ath12k_hal_srng_access_begin(ab, srng);
3937 
3938 	while (*budget) {
3939 		*budget -= 1;
3940 		rx_mon_status_desc = ath12k_hal_srng_src_peek(ab, srng);
3941 		if (!rx_mon_status_desc) {
3942 			pmon->buf_state = DP_MON_STATUS_REPLINISH;
3943 			break;
3944 		}
3945 		ath12k_hal_rx_buf_addr_info_get(rx_mon_status_desc, &paddr,
3946 						&cookie, &rbm);
3947 		if (paddr) {
3948 			buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
3949 
3950 			spin_lock_bh(&rx_ring->idr_lock);
3951 			skb = idr_find(&rx_ring->bufs_idr, buf_id);
3952 			spin_unlock_bh(&rx_ring->idr_lock);
3953 
3954 			if (!skb) {
3955 				ath12k_warn(ab, "rx monitor status with invalid buf_id %d\n",
3956 					    buf_id);
3957 				pmon->buf_state = DP_MON_STATUS_REPLINISH;
3958 				goto move_next;
3959 			}
3960 
3961 			rxcb = ATH12K_SKB_RXCB(skb);
3962 
3963 			dma_sync_single_for_cpu(ab->dev, rxcb->paddr,
3964 						skb->len + skb_tailroom(skb),
3965 						DMA_FROM_DEVICE);
3966 
3967 			tlv = (struct hal_tlv_64_hdr *)skb->data;
3968 			if (le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG) !=
3969 					HAL_RX_STATUS_BUFFER_DONE) {
3970 				pmon->buf_state = DP_MON_STATUS_NO_DMA;
3971 				ath12k_warn(ab,
3972 					    "mon status DONE not set %llx, buf_id %d\n",
3973 					    le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG),
3974 					    buf_id);
3975 				/* RxDMA status done bit might not be set even
3976 				 * though tp is moved by HW.
3977 				 */
3978 
3979 				/* If done status is missing:
3980 				 * 1. As per MAC team's suggestion,
3981 				 *    when HP + 1 entry is peeked and if DMA
3982 				 *    is not done and if HP + 2 entry's DMA done
3983 				 *    is set. skip HP + 1 entry and
3984 				 *    start processing in next interrupt.
3985 				 * 2. If HP + 2 entry's DMA done is not set,
3986 				 *    poll onto HP + 1 entry DMA done to be set.
3987 				 *    Check status for same buffer for next time
3988 				 *    dp_rx_mon_status_srng_process
3989 				 */
3990 				reap_status = ath12k_dp_rx_mon_buf_done(ab, srng,
3991 									rx_ring);
3992 				if (reap_status == DP_MON_STATUS_NO_DMA)
3993 					continue;
3994 
3995 				spin_lock_bh(&rx_ring->idr_lock);
3996 				idr_remove(&rx_ring->bufs_idr, buf_id);
3997 				spin_unlock_bh(&rx_ring->idr_lock);
3998 
3999 				dma_unmap_single(ab->dev, rxcb->paddr,
4000 						 skb->len + skb_tailroom(skb),
4001 						 DMA_FROM_DEVICE);
4002 
4003 				dev_kfree_skb_any(skb);
4004 				pmon->buf_state = DP_MON_STATUS_REPLINISH;
4005 				goto move_next;
4006 			}
4007 
4008 			spin_lock_bh(&rx_ring->idr_lock);
4009 			idr_remove(&rx_ring->bufs_idr, buf_id);
4010 			spin_unlock_bh(&rx_ring->idr_lock);
4011 
4012 			dma_unmap_single(ab->dev, rxcb->paddr,
4013 					 skb->len + skb_tailroom(skb),
4014 					 DMA_FROM_DEVICE);
4015 
4016 			if (ath12k_dp_pkt_set_pktlen(skb, RX_MON_STATUS_BUF_SIZE)) {
4017 				dev_kfree_skb_any(skb);
4018 				goto move_next;
4019 			}
4020 			__skb_queue_tail(skb_list, skb);
4021 		} else {
4022 			pmon->buf_state = DP_MON_STATUS_REPLINISH;
4023 		}
4024 move_next:
4025 		skb = ath12k_dp_rx_alloc_mon_status_buf(ab, rx_ring,
4026 							&buf_id);
4027 
4028 		if (!skb) {
4029 			ath12k_warn(ab, "failed to alloc buffer for status ring\n");
4030 			hal_params = ab->hw_params->hal_params;
4031 			ath12k_hal_rx_buf_addr_info_set(rx_mon_status_desc, 0, 0,
4032 							hal_params->rx_buf_rbm);
4033 			num_buffs_reaped++;
4034 			break;
4035 		}
4036 		rxcb = ATH12K_SKB_RXCB(skb);
4037 
4038 		cookie = u32_encode_bits(mac_id, DP_RXDMA_BUF_COOKIE_PDEV_ID) |
4039 			 u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
4040 
4041 		ath12k_hal_rx_buf_addr_info_set(rx_mon_status_desc, rxcb->paddr,
4042 						cookie,
4043 						ab->hw_params->hal_params->rx_buf_rbm);
4044 		ath12k_hal_srng_src_get_next_entry(ab, srng);
4045 		num_buffs_reaped++;
4046 	}
4047 	ath12k_hal_srng_access_end(ab, srng);
4048 	spin_unlock_bh(&srng->lock);
4049 
4050 	return num_buffs_reaped;
4051 }
4052 
4053 static u32
ath12k_dp_rx_mon_mpdu_pop(struct ath12k * ar,int mac_id,void * ring_entry,struct sk_buff ** head_msdu,struct sk_buff ** tail_msdu,struct list_head * used_list,u32 * npackets,u32 * ppdu_id)4054 ath12k_dp_rx_mon_mpdu_pop(struct ath12k *ar, int mac_id,
4055 			  void *ring_entry, struct sk_buff **head_msdu,
4056 			  struct sk_buff **tail_msdu,
4057 			  struct list_head *used_list,
4058 			  u32 *npackets, u32 *ppdu_id)
4059 {
4060 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4061 	struct ath12k_buffer_addr *p_buf_addr_info, *p_last_buf_addr_info;
4062 	u32 msdu_ppdu_id = 0, msdu_cnt = 0, total_len = 0, frag_len = 0;
4063 	u32 rx_buf_size, rx_pkt_offset, sw_cookie;
4064 	bool is_frag, is_first_msdu, drop_mpdu = false;
4065 	struct hal_reo_entrance_ring *ent_desc =
4066 		(struct hal_reo_entrance_ring *)ring_entry;
4067 	u32 rx_bufs_used = 0, i = 0, desc_bank = 0;
4068 	struct hal_rx_desc *rx_desc, *tail_rx_desc;
4069 	struct hal_rx_msdu_link *msdu_link_desc;
4070 	struct sk_buff *msdu = NULL, *last = NULL;
4071 	struct ath12k_rx_desc_info *desc_info;
4072 	struct ath12k_buffer_addr buf_info;
4073 	struct hal_rx_msdu_list msdu_list;
4074 	struct ath12k_skb_rxcb *rxcb;
4075 	u16 num_msdus = 0;
4076 	dma_addr_t paddr;
4077 	u8 rbm;
4078 
4079 	ath12k_hal_rx_reo_ent_buf_paddr_get(ring_entry, &paddr,
4080 					    &sw_cookie,
4081 					    &p_last_buf_addr_info, &rbm,
4082 					    &msdu_cnt);
4083 
4084 	spin_lock_bh(&pmon->mon_lock);
4085 
4086 	if (le32_get_bits(ent_desc->info1,
4087 			  HAL_REO_ENTR_RING_INFO1_RXDMA_PUSH_REASON) ==
4088 			  HAL_REO_DEST_RING_PUSH_REASON_ERR_DETECTED) {
4089 		u8 rxdma_err = le32_get_bits(ent_desc->info1,
4090 					     HAL_REO_ENTR_RING_INFO1_RXDMA_ERROR_CODE);
4091 		if (rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_FLUSH_REQUEST_ERR ||
4092 		    rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_MPDU_LEN_ERR ||
4093 		    rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_OVERFLOW_ERR) {
4094 			drop_mpdu = true;
4095 			pmon->rx_mon_stats.dest_mpdu_drop++;
4096 		}
4097 	}
4098 
4099 	is_frag = false;
4100 	is_first_msdu = true;
4101 	rx_pkt_offset = sizeof(struct hal_rx_desc);
4102 
4103 	do {
4104 		if (pmon->mon_last_linkdesc_paddr == paddr) {
4105 			pmon->rx_mon_stats.dup_mon_linkdesc_cnt++;
4106 			spin_unlock_bh(&pmon->mon_lock);
4107 			return rx_bufs_used;
4108 		}
4109 
4110 		desc_bank = u32_get_bits(sw_cookie, DP_LINK_DESC_BANK_MASK);
4111 		msdu_link_desc =
4112 			ar->ab->dp.link_desc_banks[desc_bank].vaddr +
4113 			(paddr - ar->ab->dp.link_desc_banks[desc_bank].paddr);
4114 
4115 		ath12k_hal_rx_msdu_list_get(ar, msdu_link_desc, &msdu_list,
4116 					    &num_msdus);
4117 		desc_info = ath12k_dp_get_rx_desc(ar->ab,
4118 						  msdu_list.sw_cookie[num_msdus - 1]);
4119 		tail_rx_desc = (struct hal_rx_desc *)(desc_info->skb)->data;
4120 
4121 		for (i = 0; i < num_msdus; i++) {
4122 			u32 l2_hdr_offset;
4123 
4124 			if (pmon->mon_last_buf_cookie == msdu_list.sw_cookie[i]) {
4125 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4126 					   "i %d last_cookie %d is same\n",
4127 					   i, pmon->mon_last_buf_cookie);
4128 				drop_mpdu = true;
4129 				pmon->rx_mon_stats.dup_mon_buf_cnt++;
4130 				continue;
4131 			}
4132 
4133 			desc_info =
4134 				ath12k_dp_get_rx_desc(ar->ab, msdu_list.sw_cookie[i]);
4135 			msdu = desc_info->skb;
4136 
4137 			if (!msdu) {
4138 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4139 					   "msdu_pop: invalid msdu (%d/%d)\n",
4140 					   i + 1, num_msdus);
4141 				goto next_msdu;
4142 			}
4143 			rxcb = ATH12K_SKB_RXCB(msdu);
4144 			if (rxcb->paddr != msdu_list.paddr[i]) {
4145 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4146 					   "i %d paddr %lx != %lx\n",
4147 					   i, (unsigned long)rxcb->paddr,
4148 					   (unsigned long)msdu_list.paddr[i]);
4149 				drop_mpdu = true;
4150 				continue;
4151 			}
4152 			if (!rxcb->unmapped) {
4153 				dma_unmap_single(ar->ab->dev, rxcb->paddr,
4154 						 msdu->len +
4155 						 skb_tailroom(msdu),
4156 						 DMA_FROM_DEVICE);
4157 				rxcb->unmapped = 1;
4158 			}
4159 			if (drop_mpdu) {
4160 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4161 					   "i %d drop msdu %p *ppdu_id %x\n",
4162 					   i, msdu, *ppdu_id);
4163 				dev_kfree_skb_any(msdu);
4164 				msdu = NULL;
4165 				goto next_msdu;
4166 			}
4167 
4168 			rx_desc = (struct hal_rx_desc *)msdu->data;
4169 			l2_hdr_offset = ath12k_dp_rx_h_l3pad(ar->ab, tail_rx_desc);
4170 			if (is_first_msdu) {
4171 				if (!ath12k_dp_rxdesc_mpdu_valid(ar->ab, rx_desc)) {
4172 					drop_mpdu = true;
4173 					dev_kfree_skb_any(msdu);
4174 					msdu = NULL;
4175 					pmon->mon_last_linkdesc_paddr = paddr;
4176 					goto next_msdu;
4177 				}
4178 				msdu_ppdu_id =
4179 					ath12k_dp_rxdesc_get_ppduid(ar->ab, rx_desc);
4180 
4181 				if (ath12k_dp_mon_comp_ppduid(msdu_ppdu_id,
4182 							      ppdu_id)) {
4183 					spin_unlock_bh(&pmon->mon_lock);
4184 					return rx_bufs_used;
4185 				}
4186 				pmon->mon_last_linkdesc_paddr = paddr;
4187 				is_first_msdu = false;
4188 			}
4189 			ath12k_dp_mon_get_buf_len(&msdu_list.msdu_info[i],
4190 						  &is_frag, &total_len,
4191 						  &frag_len, &msdu_cnt);
4192 			rx_buf_size = rx_pkt_offset + l2_hdr_offset + frag_len;
4193 
4194 			if (ath12k_dp_pkt_set_pktlen(msdu, rx_buf_size)) {
4195 				dev_kfree_skb_any(msdu);
4196 				goto next_msdu;
4197 			}
4198 
4199 			if (!(*head_msdu))
4200 				*head_msdu = msdu;
4201 			else if (last)
4202 				last->next = msdu;
4203 
4204 			last = msdu;
4205 next_msdu:
4206 			pmon->mon_last_buf_cookie = msdu_list.sw_cookie[i];
4207 			rx_bufs_used++;
4208 			desc_info->skb = NULL;
4209 			list_add_tail(&desc_info->list, used_list);
4210 		}
4211 
4212 		ath12k_hal_rx_buf_addr_info_set(&buf_info, paddr, sw_cookie, rbm);
4213 
4214 		ath12k_dp_mon_next_link_desc_get(msdu_link_desc, &paddr,
4215 						 &sw_cookie, &rbm,
4216 						 &p_buf_addr_info);
4217 
4218 		ath12k_dp_rx_link_desc_return(ar->ab, &buf_info,
4219 					      HAL_WBM_REL_BM_ACT_PUT_IN_IDLE);
4220 
4221 		p_last_buf_addr_info = p_buf_addr_info;
4222 
4223 	} while (paddr && msdu_cnt);
4224 
4225 	spin_unlock_bh(&pmon->mon_lock);
4226 
4227 	if (last)
4228 		last->next = NULL;
4229 
4230 	*tail_msdu = msdu;
4231 
4232 	if (msdu_cnt == 0)
4233 		*npackets = 1;
4234 
4235 	return rx_bufs_used;
4236 }
4237 
4238 /* The destination ring processing is stuck if the destination is not
4239  * moving while status ring moves 16 PPDU. The destination ring processing
4240  * skips this destination ring PPDU as a workaround.
4241  */
4242 #define MON_DEST_RING_STUCK_MAX_CNT 16
4243 
ath12k_dp_rx_mon_dest_process(struct ath12k * ar,int mac_id,u32 quota,struct napi_struct * napi)4244 static void ath12k_dp_rx_mon_dest_process(struct ath12k *ar, int mac_id,
4245 					  u32 quota, struct napi_struct *napi)
4246 {
4247 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4248 	struct ath12k_pdev_mon_stats *rx_mon_stats;
4249 	u32 ppdu_id, rx_bufs_used = 0, ring_id;
4250 	u32 mpdu_rx_bufs_used, npackets = 0;
4251 	struct ath12k_dp *dp = &ar->ab->dp;
4252 	struct ath12k_base *ab = ar->ab;
4253 	void *ring_entry, *mon_dst_srng;
4254 	struct dp_mon_mpdu *tmp_mpdu;
4255 	LIST_HEAD(rx_desc_used_list);
4256 	struct hal_srng *srng;
4257 
4258 	ring_id = dp->rxdma_err_dst_ring[mac_id].ring_id;
4259 	srng = &ab->hal.srng_list[ring_id];
4260 
4261 	mon_dst_srng = &ab->hal.srng_list[ring_id];
4262 
4263 	spin_lock_bh(&srng->lock);
4264 
4265 	ath12k_hal_srng_access_begin(ab, mon_dst_srng);
4266 
4267 	ppdu_id = pmon->mon_ppdu_info.ppdu_id;
4268 	rx_mon_stats = &pmon->rx_mon_stats;
4269 
4270 	while ((ring_entry = ath12k_hal_srng_dst_peek(ar->ab, mon_dst_srng))) {
4271 		struct sk_buff *head_msdu, *tail_msdu;
4272 
4273 		head_msdu = NULL;
4274 		tail_msdu = NULL;
4275 
4276 		mpdu_rx_bufs_used = ath12k_dp_rx_mon_mpdu_pop(ar, mac_id, ring_entry,
4277 							      &head_msdu, &tail_msdu,
4278 							      &rx_desc_used_list,
4279 							      &npackets, &ppdu_id);
4280 
4281 		rx_bufs_used += mpdu_rx_bufs_used;
4282 
4283 		if (mpdu_rx_bufs_used) {
4284 			dp->mon_dest_ring_stuck_cnt = 0;
4285 		} else {
4286 			dp->mon_dest_ring_stuck_cnt++;
4287 			rx_mon_stats->dest_mon_not_reaped++;
4288 		}
4289 
4290 		if (dp->mon_dest_ring_stuck_cnt > MON_DEST_RING_STUCK_MAX_CNT) {
4291 			rx_mon_stats->dest_mon_stuck++;
4292 			ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4293 				   "status ring ppdu_id=%d dest ring ppdu_id=%d mon_dest_ring_stuck_cnt=%d dest_mon_not_reaped=%u dest_mon_stuck=%u\n",
4294 				   pmon->mon_ppdu_info.ppdu_id, ppdu_id,
4295 				   dp->mon_dest_ring_stuck_cnt,
4296 				   rx_mon_stats->dest_mon_not_reaped,
4297 				   rx_mon_stats->dest_mon_stuck);
4298 			spin_lock_bh(&pmon->mon_lock);
4299 			pmon->mon_ppdu_info.ppdu_id = ppdu_id;
4300 			spin_unlock_bh(&pmon->mon_lock);
4301 			continue;
4302 		}
4303 
4304 		if (ppdu_id != pmon->mon_ppdu_info.ppdu_id) {
4305 			spin_lock_bh(&pmon->mon_lock);
4306 			pmon->mon_ppdu_status = DP_PPDU_STATUS_START;
4307 			spin_unlock_bh(&pmon->mon_lock);
4308 			ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4309 				   "dest_rx: new ppdu_id %x != status ppdu_id %x dest_mon_not_reaped = %u dest_mon_stuck = %u\n",
4310 				   ppdu_id, pmon->mon_ppdu_info.ppdu_id,
4311 				   rx_mon_stats->dest_mon_not_reaped,
4312 				   rx_mon_stats->dest_mon_stuck);
4313 			break;
4314 		}
4315 
4316 		if (head_msdu && tail_msdu) {
4317 			tmp_mpdu = kzalloc(sizeof(*tmp_mpdu), GFP_ATOMIC);
4318 			if (!tmp_mpdu)
4319 				break;
4320 
4321 			tmp_mpdu->head = head_msdu;
4322 			tmp_mpdu->tail = tail_msdu;
4323 			tmp_mpdu->err_bitmap = pmon->err_bitmap;
4324 			tmp_mpdu->decap_format = pmon->decap_format;
4325 			ath12k_dp_mon_rx_deliver(ar, tmp_mpdu,
4326 						 &pmon->mon_ppdu_info, napi);
4327 			rx_mon_stats->dest_mpdu_done++;
4328 			kfree(tmp_mpdu);
4329 		}
4330 
4331 		ring_entry = ath12k_hal_srng_dst_get_next_entry(ar->ab,
4332 								mon_dst_srng);
4333 	}
4334 	ath12k_hal_srng_access_end(ar->ab, mon_dst_srng);
4335 
4336 	spin_unlock_bh(&srng->lock);
4337 
4338 	if (rx_bufs_used) {
4339 		rx_mon_stats->dest_ppdu_done++;
4340 		ath12k_dp_rx_bufs_replenish(ar->ab,
4341 					    &dp->rx_refill_buf_ring,
4342 					    &rx_desc_used_list,
4343 					    rx_bufs_used);
4344 	}
4345 }
4346 
4347 static int
__ath12k_dp_mon_process_ring(struct ath12k * ar,int mac_id,struct napi_struct * napi,int * budget)4348 __ath12k_dp_mon_process_ring(struct ath12k *ar, int mac_id,
4349 			     struct napi_struct *napi, int *budget)
4350 {
4351 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4352 	struct ath12k_pdev_mon_stats *rx_mon_stats = &pmon->rx_mon_stats;
4353 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
4354 	enum hal_rx_mon_status hal_status;
4355 	struct sk_buff_head skb_list;
4356 	int num_buffs_reaped;
4357 	struct sk_buff *skb;
4358 
4359 	__skb_queue_head_init(&skb_list);
4360 
4361 	num_buffs_reaped = ath12k_dp_rx_reap_mon_status_ring(ar->ab, mac_id,
4362 							     budget, &skb_list);
4363 	if (!num_buffs_reaped)
4364 		goto exit;
4365 
4366 	while ((skb = __skb_dequeue(&skb_list))) {
4367 		memset(ppdu_info, 0, sizeof(*ppdu_info));
4368 		ppdu_info->peer_id = HAL_INVALID_PEERID;
4369 
4370 		hal_status = ath12k_dp_mon_parse_rx_dest(ar, pmon, skb);
4371 
4372 		if (ar->monitor_started &&
4373 		    pmon->mon_ppdu_status == DP_PPDU_STATUS_START &&
4374 		    hal_status == HAL_TLV_STATUS_PPDU_DONE) {
4375 			rx_mon_stats->status_ppdu_done++;
4376 			pmon->mon_ppdu_status = DP_PPDU_STATUS_DONE;
4377 			ath12k_dp_rx_mon_dest_process(ar, mac_id, *budget, napi);
4378 			pmon->mon_ppdu_status = DP_PPDU_STATUS_START;
4379 		}
4380 
4381 		dev_kfree_skb_any(skb);
4382 	}
4383 
4384 exit:
4385 	return num_buffs_reaped;
4386 }
4387 
ath12k_dp_mon_process_ring(struct ath12k_base * ab,int mac_id,struct napi_struct * napi,int budget,enum dp_monitor_mode monitor_mode)4388 int ath12k_dp_mon_process_ring(struct ath12k_base *ab, int mac_id,
4389 			       struct napi_struct *napi, int budget,
4390 			       enum dp_monitor_mode monitor_mode)
4391 {
4392 	struct ath12k *ar = ath12k_ab_to_ar(ab, mac_id);
4393 	int num_buffs_reaped = 0;
4394 
4395 	if (ab->hw_params->rxdma1_enable) {
4396 		if (monitor_mode == ATH12K_DP_RX_MONITOR_MODE)
4397 			num_buffs_reaped = ath12k_dp_mon_srng_process(ar, &budget, napi);
4398 	} else {
4399 		if (ar->monitor_started)
4400 			num_buffs_reaped =
4401 				__ath12k_dp_mon_process_ring(ar, mac_id, napi, &budget);
4402 	}
4403 
4404 	return num_buffs_reaped;
4405 }
4406