xref: /linux/drivers/net/wireless/ath/ath12k/dp_mon.c (revision 07fdad3a93756b872da7b53647715c48d0f4a2d0)
1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3  * Copyright (c) 2019-2021 The Linux Foundation. All rights reserved.
4  * Copyright (c) 2021-2025 Qualcomm Innovation Center, Inc. All rights reserved.
5  */
6 
7 #include "dp_mon.h"
8 #include "debug.h"
9 #include "dp_rx.h"
10 #include "dp_tx.h"
11 #include "peer.h"
12 
13 #define ATH12K_LE32_DEC_ENC(value, dec_bits, enc_bits)	\
14 		u32_encode_bits(le32_get_bits(value, dec_bits), enc_bits)
15 
16 #define ATH12K_LE64_DEC_ENC(value, dec_bits, enc_bits) \
17 		u32_encode_bits(le64_get_bits(value, dec_bits), enc_bits)
18 
19 static void
20 ath12k_dp_mon_rx_handle_ofdma_info(const struct hal_rx_ppdu_end_user_stats *ppdu_end_user,
21 				   struct hal_rx_user_status *rx_user_status)
22 {
23 	rx_user_status->ul_ofdma_user_v0_word0 =
24 		__le32_to_cpu(ppdu_end_user->usr_resp_ref);
25 	rx_user_status->ul_ofdma_user_v0_word1 =
26 		__le32_to_cpu(ppdu_end_user->usr_resp_ref_ext);
27 }
28 
29 static void
30 ath12k_dp_mon_rx_populate_byte_count(const struct hal_rx_ppdu_end_user_stats *stats,
31 				     void *ppduinfo,
32 				     struct hal_rx_user_status *rx_user_status)
33 {
34 	rx_user_status->mpdu_ok_byte_count =
35 		le32_get_bits(stats->info7,
36 			      HAL_RX_PPDU_END_USER_STATS_INFO7_MPDU_OK_BYTE_COUNT);
37 	rx_user_status->mpdu_err_byte_count =
38 		le32_get_bits(stats->info8,
39 			      HAL_RX_PPDU_END_USER_STATS_INFO8_MPDU_ERR_BYTE_COUNT);
40 }
41 
42 static void
43 ath12k_dp_mon_rx_populate_mu_user_info(const struct hal_rx_ppdu_end_user_stats *rx_tlv,
44 				       struct hal_rx_mon_ppdu_info *ppdu_info,
45 				       struct hal_rx_user_status *rx_user_status)
46 {
47 	rx_user_status->ast_index = ppdu_info->ast_index;
48 	rx_user_status->tid = ppdu_info->tid;
49 	rx_user_status->tcp_ack_msdu_count =
50 		ppdu_info->tcp_ack_msdu_count;
51 	rx_user_status->tcp_msdu_count =
52 		ppdu_info->tcp_msdu_count;
53 	rx_user_status->udp_msdu_count =
54 		ppdu_info->udp_msdu_count;
55 	rx_user_status->other_msdu_count =
56 		ppdu_info->other_msdu_count;
57 	rx_user_status->frame_control = ppdu_info->frame_control;
58 	rx_user_status->frame_control_info_valid =
59 		ppdu_info->frame_control_info_valid;
60 	rx_user_status->data_sequence_control_info_valid =
61 		ppdu_info->data_sequence_control_info_valid;
62 	rx_user_status->first_data_seq_ctrl =
63 		ppdu_info->first_data_seq_ctrl;
64 	rx_user_status->preamble_type = ppdu_info->preamble_type;
65 	rx_user_status->ht_flags = ppdu_info->ht_flags;
66 	rx_user_status->vht_flags = ppdu_info->vht_flags;
67 	rx_user_status->he_flags = ppdu_info->he_flags;
68 	rx_user_status->rs_flags = ppdu_info->rs_flags;
69 
70 	rx_user_status->mpdu_cnt_fcs_ok =
71 		ppdu_info->num_mpdu_fcs_ok;
72 	rx_user_status->mpdu_cnt_fcs_err =
73 		ppdu_info->num_mpdu_fcs_err;
74 	memcpy(&rx_user_status->mpdu_fcs_ok_bitmap[0], &ppdu_info->mpdu_fcs_ok_bitmap[0],
75 	       HAL_RX_NUM_WORDS_PER_PPDU_BITMAP *
76 	       sizeof(ppdu_info->mpdu_fcs_ok_bitmap[0]));
77 
78 	ath12k_dp_mon_rx_populate_byte_count(rx_tlv, ppdu_info, rx_user_status);
79 }
80 
81 static void ath12k_dp_mon_parse_vht_sig_a(const struct hal_rx_vht_sig_a_info *vht_sig,
82 					  struct hal_rx_mon_ppdu_info *ppdu_info)
83 {
84 	u32 nsts, info0, info1;
85 	u8 gi_setting;
86 
87 	info0 = __le32_to_cpu(vht_sig->info0);
88 	info1 = __le32_to_cpu(vht_sig->info1);
89 
90 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_SU_MU_CODING);
91 	ppdu_info->mcs = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_MCS);
92 	gi_setting = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_GI_SETTING);
93 	switch (gi_setting) {
94 	case HAL_RX_VHT_SIG_A_NORMAL_GI:
95 		ppdu_info->gi = HAL_RX_GI_0_8_US;
96 		break;
97 	case HAL_RX_VHT_SIG_A_SHORT_GI:
98 	case HAL_RX_VHT_SIG_A_SHORT_GI_AMBIGUITY:
99 		ppdu_info->gi = HAL_RX_GI_0_4_US;
100 		break;
101 	}
102 
103 	ppdu_info->is_stbc = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_STBC);
104 	nsts = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_NSTS);
105 	if (ppdu_info->is_stbc && nsts > 0)
106 		nsts = ((nsts + 1) >> 1) - 1;
107 
108 	ppdu_info->nss = u32_get_bits(nsts, VHT_SIG_SU_NSS_MASK);
109 	ppdu_info->bw = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_BW);
110 	ppdu_info->beamformed = u32_get_bits(info1,
111 					     HAL_RX_VHT_SIG_A_INFO_INFO1_BEAMFORMED);
112 	ppdu_info->vht_flag_values5 = u32_get_bits(info0,
113 						   HAL_RX_VHT_SIG_A_INFO_INFO0_GROUP_ID);
114 	ppdu_info->vht_flag_values3[0] = (((ppdu_info->mcs) << 4) |
115 					    ppdu_info->nss);
116 	ppdu_info->vht_flag_values2 = ppdu_info->bw;
117 	ppdu_info->vht_flag_values4 =
118 		u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_SU_MU_CODING);
119 }
120 
121 static void ath12k_dp_mon_parse_ht_sig(const struct hal_rx_ht_sig_info *ht_sig,
122 				       struct hal_rx_mon_ppdu_info *ppdu_info)
123 {
124 	u32 info0 = __le32_to_cpu(ht_sig->info0);
125 	u32 info1 = __le32_to_cpu(ht_sig->info1);
126 
127 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HT_SIG_INFO_INFO0_MCS);
128 	ppdu_info->bw = u32_get_bits(info0, HAL_RX_HT_SIG_INFO_INFO0_BW);
129 	ppdu_info->is_stbc = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_STBC);
130 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_FEC_CODING);
131 	ppdu_info->gi = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_GI);
132 	ppdu_info->nss = (ppdu_info->mcs >> 3);
133 }
134 
135 static void ath12k_dp_mon_parse_l_sig_b(const struct hal_rx_lsig_b_info *lsigb,
136 					struct hal_rx_mon_ppdu_info *ppdu_info)
137 {
138 	u32 info0 = __le32_to_cpu(lsigb->info0);
139 	u8 rate;
140 
141 	rate = u32_get_bits(info0, HAL_RX_LSIG_B_INFO_INFO0_RATE);
142 	switch (rate) {
143 	case 1:
144 		rate = HAL_RX_LEGACY_RATE_1_MBPS;
145 		break;
146 	case 2:
147 	case 5:
148 		rate = HAL_RX_LEGACY_RATE_2_MBPS;
149 		break;
150 	case 3:
151 	case 6:
152 		rate = HAL_RX_LEGACY_RATE_5_5_MBPS;
153 		break;
154 	case 4:
155 	case 7:
156 		rate = HAL_RX_LEGACY_RATE_11_MBPS;
157 		break;
158 	default:
159 		rate = HAL_RX_LEGACY_RATE_INVALID;
160 	}
161 
162 	ppdu_info->rate = rate;
163 	ppdu_info->cck_flag = 1;
164 }
165 
166 static void ath12k_dp_mon_parse_l_sig_a(const struct hal_rx_lsig_a_info *lsiga,
167 					struct hal_rx_mon_ppdu_info *ppdu_info)
168 {
169 	u32 info0 = __le32_to_cpu(lsiga->info0);
170 	u8 rate;
171 
172 	rate = u32_get_bits(info0, HAL_RX_LSIG_A_INFO_INFO0_RATE);
173 	switch (rate) {
174 	case 8:
175 		rate = HAL_RX_LEGACY_RATE_48_MBPS;
176 		break;
177 	case 9:
178 		rate = HAL_RX_LEGACY_RATE_24_MBPS;
179 		break;
180 	case 10:
181 		rate = HAL_RX_LEGACY_RATE_12_MBPS;
182 		break;
183 	case 11:
184 		rate = HAL_RX_LEGACY_RATE_6_MBPS;
185 		break;
186 	case 12:
187 		rate = HAL_RX_LEGACY_RATE_54_MBPS;
188 		break;
189 	case 13:
190 		rate = HAL_RX_LEGACY_RATE_36_MBPS;
191 		break;
192 	case 14:
193 		rate = HAL_RX_LEGACY_RATE_18_MBPS;
194 		break;
195 	case 15:
196 		rate = HAL_RX_LEGACY_RATE_9_MBPS;
197 		break;
198 	default:
199 		rate = HAL_RX_LEGACY_RATE_INVALID;
200 	}
201 
202 	ppdu_info->rate = rate;
203 }
204 
205 static void
206 ath12k_dp_mon_parse_he_sig_b2_ofdma(const struct hal_rx_he_sig_b2_ofdma_info *ofdma,
207 				    struct hal_rx_mon_ppdu_info *ppdu_info)
208 {
209 	u32 info0, value;
210 
211 	info0 = __le32_to_cpu(ofdma->info0);
212 
213 	ppdu_info->he_data1 |= HE_MCS_KNOWN | HE_DCM_KNOWN | HE_CODING_KNOWN;
214 
215 	/* HE-data2 */
216 	ppdu_info->he_data2 |= HE_TXBF_KNOWN;
217 
218 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_MCS);
219 	value = ppdu_info->mcs << HE_TRANSMIT_MCS_SHIFT;
220 	ppdu_info->he_data3 |= value;
221 
222 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_DCM);
223 	value = value << HE_DCM_SHIFT;
224 	ppdu_info->he_data3 |= value;
225 
226 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_CODING);
227 	ppdu_info->ldpc = value;
228 	value = value << HE_CODING_SHIFT;
229 	ppdu_info->he_data3 |= value;
230 
231 	/* HE-data4 */
232 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_ID);
233 	value = value << HE_STA_ID_SHIFT;
234 	ppdu_info->he_data4 |= value;
235 
236 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_NSTS);
237 	ppdu_info->beamformed = u32_get_bits(info0,
238 					     HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_TXBF);
239 }
240 
241 static void
242 ath12k_dp_mon_parse_he_sig_b2_mu(const struct hal_rx_he_sig_b2_mu_info *he_sig_b2_mu,
243 				 struct hal_rx_mon_ppdu_info *ppdu_info)
244 {
245 	u32 info0, value;
246 
247 	info0 = __le32_to_cpu(he_sig_b2_mu->info0);
248 
249 	ppdu_info->he_data1 |= HE_MCS_KNOWN | HE_CODING_KNOWN;
250 
251 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_MCS);
252 	value = ppdu_info->mcs << HE_TRANSMIT_MCS_SHIFT;
253 	ppdu_info->he_data3 |= value;
254 
255 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_CODING);
256 	ppdu_info->ldpc = value;
257 	value = value << HE_CODING_SHIFT;
258 	ppdu_info->he_data3 |= value;
259 
260 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_ID);
261 	value = value << HE_STA_ID_SHIFT;
262 	ppdu_info->he_data4 |= value;
263 
264 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_NSTS);
265 }
266 
267 static void
268 ath12k_dp_mon_parse_he_sig_b1_mu(const struct hal_rx_he_sig_b1_mu_info *he_sig_b1_mu,
269 				 struct hal_rx_mon_ppdu_info *ppdu_info)
270 {
271 	u32 info0 = __le32_to_cpu(he_sig_b1_mu->info0);
272 	u16 ru_tones;
273 
274 	ru_tones = u32_get_bits(info0,
275 				HAL_RX_HE_SIG_B1_MU_INFO_INFO0_RU_ALLOCATION);
276 	ppdu_info->ru_alloc = ath12k_he_ru_tones_to_nl80211_he_ru_alloc(ru_tones);
277 	ppdu_info->he_RU[0] = ru_tones;
278 }
279 
280 static void
281 ath12k_dp_mon_parse_he_sig_mu(const struct hal_rx_he_sig_a_mu_dl_info *he_sig_a_mu_dl,
282 			      struct hal_rx_mon_ppdu_info *ppdu_info)
283 {
284 	u32 info0, info1, value;
285 	u16 he_gi = 0, he_ltf = 0;
286 
287 	info0 = __le32_to_cpu(he_sig_a_mu_dl->info0);
288 	info1 = __le32_to_cpu(he_sig_a_mu_dl->info1);
289 
290 	ppdu_info->he_mu_flags = 1;
291 
292 	ppdu_info->he_data1 = HE_MU_FORMAT_TYPE;
293 	ppdu_info->he_data1 |=
294 			HE_BSS_COLOR_KNOWN |
295 			HE_DL_UL_KNOWN |
296 			HE_LDPC_EXTRA_SYMBOL_KNOWN |
297 			HE_STBC_KNOWN |
298 			HE_DATA_BW_RU_KNOWN |
299 			HE_DOPPLER_KNOWN;
300 
301 	ppdu_info->he_data2 =
302 			HE_GI_KNOWN |
303 			HE_LTF_SYMBOLS_KNOWN |
304 			HE_PRE_FEC_PADDING_KNOWN |
305 			HE_PE_DISAMBIGUITY_KNOWN |
306 			HE_TXOP_KNOWN |
307 			HE_MIDABLE_PERIODICITY_KNOWN;
308 
309 	/* data3 */
310 	ppdu_info->he_data3 = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_BSS_COLOR);
311 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_UL_FLAG);
312 	value = value << HE_DL_UL_SHIFT;
313 	ppdu_info->he_data3 |= value;
314 
315 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_LDPC_EXTRA);
316 	value = value << HE_LDPC_EXTRA_SYMBOL_SHIFT;
317 	ppdu_info->he_data3 |= value;
318 
319 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_STBC);
320 	value = value << HE_STBC_SHIFT;
321 	ppdu_info->he_data3 |= value;
322 
323 	/* data4 */
324 	ppdu_info->he_data4 = u32_get_bits(info0,
325 					   HAL_RX_HE_SIG_A_MU_DL_INFO0_SPATIAL_REUSE);
326 	ppdu_info->he_data4 = value;
327 
328 	/* data5 */
329 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_TRANSMIT_BW);
330 	ppdu_info->he_data5 = value;
331 	ppdu_info->bw = value;
332 
333 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_CP_LTF_SIZE);
334 	switch (value) {
335 	case 0:
336 		he_gi = HE_GI_0_8;
337 		he_ltf = HE_LTF_4_X;
338 		break;
339 	case 1:
340 		he_gi = HE_GI_0_8;
341 		he_ltf = HE_LTF_2_X;
342 		break;
343 	case 2:
344 		he_gi = HE_GI_1_6;
345 		he_ltf = HE_LTF_2_X;
346 		break;
347 	case 3:
348 		he_gi = HE_GI_3_2;
349 		he_ltf = HE_LTF_4_X;
350 		break;
351 	}
352 
353 	ppdu_info->gi = he_gi;
354 	value = he_gi << HE_GI_SHIFT;
355 	ppdu_info->he_data5 |= value;
356 
357 	value = he_ltf << HE_LTF_SIZE_SHIFT;
358 	ppdu_info->he_data5 |= value;
359 
360 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_NUM_LTF_SYMB);
361 	value = (value << HE_LTF_SYM_SHIFT);
362 	ppdu_info->he_data5 |= value;
363 
364 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_PKT_EXT_FACTOR);
365 	value = value << HE_PRE_FEC_PAD_SHIFT;
366 	ppdu_info->he_data5 |= value;
367 
368 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_PKT_EXT_PE_DISAM);
369 	value = value << HE_PE_DISAMBIGUITY_SHIFT;
370 	ppdu_info->he_data5 |= value;
371 
372 	/*data6*/
373 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_DOPPLER_INDICATION);
374 	value = value << HE_DOPPLER_SHIFT;
375 	ppdu_info->he_data6 |= value;
376 
377 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_TXOP_DURATION);
378 	value = value << HE_TXOP_SHIFT;
379 	ppdu_info->he_data6 |= value;
380 
381 	/* HE-MU Flags */
382 	/* HE-MU-flags1 */
383 	ppdu_info->he_flags1 =
384 		HE_SIG_B_MCS_KNOWN |
385 		HE_SIG_B_DCM_KNOWN |
386 		HE_SIG_B_COMPRESSION_FLAG_1_KNOWN |
387 		HE_SIG_B_SYM_NUM_KNOWN |
388 		HE_RU_0_KNOWN;
389 
390 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_MCS_OF_SIGB);
391 	ppdu_info->he_flags1 |= value;
392 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_DCM_OF_SIGB);
393 	value = value << HE_DCM_FLAG_1_SHIFT;
394 	ppdu_info->he_flags1 |= value;
395 
396 	/* HE-MU-flags2 */
397 	ppdu_info->he_flags2 = HE_BW_KNOWN;
398 
399 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_TRANSMIT_BW);
400 	ppdu_info->he_flags2 |= value;
401 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_COMP_MODE_SIGB);
402 	value = value << HE_SIG_B_COMPRESSION_FLAG_2_SHIFT;
403 	ppdu_info->he_flags2 |= value;
404 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_NUM_SIGB_SYMB);
405 	value = value - 1;
406 	value = value << HE_NUM_SIG_B_SYMBOLS_SHIFT;
407 	ppdu_info->he_flags2 |= value;
408 
409 	ppdu_info->is_stbc = info1 &
410 			     HAL_RX_HE_SIG_A_MU_DL_INFO1_STBC;
411 }
412 
413 static void ath12k_dp_mon_parse_he_sig_su(const struct hal_rx_he_sig_a_su_info *he_sig_a,
414 					  struct hal_rx_mon_ppdu_info *ppdu_info)
415 {
416 	u32 info0, info1, value;
417 	u32 dcm;
418 	u8 he_dcm = 0, he_stbc = 0;
419 	u16 he_gi = 0, he_ltf = 0;
420 
421 	ppdu_info->he_flags = 1;
422 
423 	info0 = __le32_to_cpu(he_sig_a->info0);
424 	info1 = __le32_to_cpu(he_sig_a->info1);
425 
426 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_FORMAT_IND);
427 	if (value == 0)
428 		ppdu_info->he_data1 = HE_TRIG_FORMAT_TYPE;
429 	else
430 		ppdu_info->he_data1 = HE_SU_FORMAT_TYPE;
431 
432 	ppdu_info->he_data1 |=
433 			HE_BSS_COLOR_KNOWN |
434 			HE_BEAM_CHANGE_KNOWN |
435 			HE_DL_UL_KNOWN |
436 			HE_MCS_KNOWN |
437 			HE_DCM_KNOWN |
438 			HE_CODING_KNOWN |
439 			HE_LDPC_EXTRA_SYMBOL_KNOWN |
440 			HE_STBC_KNOWN |
441 			HE_DATA_BW_RU_KNOWN |
442 			HE_DOPPLER_KNOWN;
443 
444 	ppdu_info->he_data2 |=
445 			HE_GI_KNOWN |
446 			HE_TXBF_KNOWN |
447 			HE_PE_DISAMBIGUITY_KNOWN |
448 			HE_TXOP_KNOWN |
449 			HE_LTF_SYMBOLS_KNOWN |
450 			HE_PRE_FEC_PADDING_KNOWN |
451 			HE_MIDABLE_PERIODICITY_KNOWN;
452 
453 	ppdu_info->he_data3 = u32_get_bits(info0,
454 					   HAL_RX_HE_SIG_A_SU_INFO_INFO0_BSS_COLOR);
455 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_BEAM_CHANGE);
456 	value = value << HE_BEAM_CHANGE_SHIFT;
457 	ppdu_info->he_data3 |= value;
458 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DL_UL_FLAG);
459 	value = value << HE_DL_UL_SHIFT;
460 	ppdu_info->he_data3 |= value;
461 
462 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_MCS);
463 	ppdu_info->mcs = value;
464 	value = value << HE_TRANSMIT_MCS_SHIFT;
465 	ppdu_info->he_data3 |= value;
466 
467 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DCM);
468 	he_dcm = value;
469 	value = value << HE_DCM_SHIFT;
470 	ppdu_info->he_data3 |= value;
471 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_CODING);
472 	value = value << HE_CODING_SHIFT;
473 	ppdu_info->he_data3 |= value;
474 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_LDPC_EXTRA);
475 	value = value << HE_LDPC_EXTRA_SYMBOL_SHIFT;
476 	ppdu_info->he_data3 |= value;
477 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_STBC);
478 	he_stbc = value;
479 	value = value << HE_STBC_SHIFT;
480 	ppdu_info->he_data3 |= value;
481 
482 	/* data4 */
483 	ppdu_info->he_data4 = u32_get_bits(info0,
484 					   HAL_RX_HE_SIG_A_SU_INFO_INFO0_SPATIAL_REUSE);
485 
486 	/* data5 */
487 	value = u32_get_bits(info0,
488 			     HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_BW);
489 	ppdu_info->he_data5 = value;
490 	ppdu_info->bw = value;
491 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_CP_LTF_SIZE);
492 	switch (value) {
493 	case 0:
494 		he_gi = HE_GI_0_8;
495 		he_ltf = HE_LTF_1_X;
496 		break;
497 	case 1:
498 		he_gi = HE_GI_0_8;
499 		he_ltf = HE_LTF_2_X;
500 		break;
501 	case 2:
502 		he_gi = HE_GI_1_6;
503 		he_ltf = HE_LTF_2_X;
504 		break;
505 	case 3:
506 		if (he_dcm && he_stbc) {
507 			he_gi = HE_GI_0_8;
508 			he_ltf = HE_LTF_4_X;
509 		} else {
510 			he_gi = HE_GI_3_2;
511 			he_ltf = HE_LTF_4_X;
512 		}
513 		break;
514 	}
515 	ppdu_info->gi = he_gi;
516 	value = he_gi << HE_GI_SHIFT;
517 	ppdu_info->he_data5 |= value;
518 	value = he_ltf << HE_LTF_SIZE_SHIFT;
519 	ppdu_info->ltf_size = he_ltf;
520 	ppdu_info->he_data5 |= value;
521 
522 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
523 	value = (value << HE_LTF_SYM_SHIFT);
524 	ppdu_info->he_data5 |= value;
525 
526 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_PKT_EXT_FACTOR);
527 	value = value << HE_PRE_FEC_PAD_SHIFT;
528 	ppdu_info->he_data5 |= value;
529 
530 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXBF);
531 	value = value << HE_TXBF_SHIFT;
532 	ppdu_info->he_data5 |= value;
533 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_PKT_EXT_PE_DISAM);
534 	value = value << HE_PE_DISAMBIGUITY_SHIFT;
535 	ppdu_info->he_data5 |= value;
536 
537 	/* data6 */
538 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
539 	value++;
540 	ppdu_info->he_data6 = value;
541 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_DOPPLER_IND);
542 	value = value << HE_DOPPLER_SHIFT;
543 	ppdu_info->he_data6 |= value;
544 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXOP_DURATION);
545 	value = value << HE_TXOP_SHIFT;
546 	ppdu_info->he_data6 |= value;
547 
548 	ppdu_info->mcs =
549 		u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_MCS);
550 	ppdu_info->bw =
551 		u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_BW);
552 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_CODING);
553 	ppdu_info->is_stbc = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_STBC);
554 	ppdu_info->beamformed = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXBF);
555 	dcm = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DCM);
556 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
557 	ppdu_info->dcm = dcm;
558 }
559 
560 static void
561 ath12k_dp_mon_hal_rx_parse_u_sig_cmn(const struct hal_mon_usig_cmn *cmn,
562 				     struct hal_rx_mon_ppdu_info *ppdu_info)
563 {
564 	u32 common;
565 
566 	ppdu_info->u_sig_info.bw = le32_get_bits(cmn->info0,
567 						 HAL_RX_USIG_CMN_INFO0_BW);
568 	ppdu_info->u_sig_info.ul_dl = le32_get_bits(cmn->info0,
569 						    HAL_RX_USIG_CMN_INFO0_UL_DL);
570 
571 	common = __le32_to_cpu(ppdu_info->u_sig_info.usig.common);
572 	common |= IEEE80211_RADIOTAP_EHT_USIG_COMMON_PHY_VER_KNOWN |
573 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BW_KNOWN |
574 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_UL_DL_KNOWN |
575 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BSS_COLOR_KNOWN |
576 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_TXOP_KNOWN |
577 		  ATH12K_LE32_DEC_ENC(cmn->info0,
578 				      HAL_RX_USIG_CMN_INFO0_PHY_VERSION,
579 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_PHY_VER) |
580 		  u32_encode_bits(ppdu_info->u_sig_info.bw,
581 				  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BW) |
582 		  u32_encode_bits(ppdu_info->u_sig_info.ul_dl,
583 				  IEEE80211_RADIOTAP_EHT_USIG_COMMON_UL_DL) |
584 		  ATH12K_LE32_DEC_ENC(cmn->info0,
585 				      HAL_RX_USIG_CMN_INFO0_BSS_COLOR,
586 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BSS_COLOR) |
587 		  ATH12K_LE32_DEC_ENC(cmn->info0,
588 				      HAL_RX_USIG_CMN_INFO0_TXOP,
589 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_TXOP);
590 	ppdu_info->u_sig_info.usig.common = cpu_to_le32(common);
591 
592 	switch (ppdu_info->u_sig_info.bw) {
593 	default:
594 		fallthrough;
595 	case HAL_EHT_BW_20:
596 		ppdu_info->bw = HAL_RX_BW_20MHZ;
597 		break;
598 	case HAL_EHT_BW_40:
599 		ppdu_info->bw = HAL_RX_BW_40MHZ;
600 		break;
601 	case HAL_EHT_BW_80:
602 		ppdu_info->bw = HAL_RX_BW_80MHZ;
603 		break;
604 	case HAL_EHT_BW_160:
605 		ppdu_info->bw = HAL_RX_BW_160MHZ;
606 		break;
607 	case HAL_EHT_BW_320_1:
608 	case HAL_EHT_BW_320_2:
609 		ppdu_info->bw = HAL_RX_BW_320MHZ;
610 		break;
611 	}
612 }
613 
614 static void
615 ath12k_dp_mon_hal_rx_parse_u_sig_tb(const struct hal_mon_usig_tb *usig_tb,
616 				    struct hal_rx_mon_ppdu_info *ppdu_info)
617 {
618 	struct ieee80211_radiotap_eht_usig *usig = &ppdu_info->u_sig_info.usig;
619 	enum ieee80211_radiotap_eht_usig_tb spatial_reuse1, spatial_reuse2;
620 	u32 common, value, mask;
621 
622 	spatial_reuse1 = IEEE80211_RADIOTAP_EHT_USIG2_TB_B3_B6_SPATIAL_REUSE_1;
623 	spatial_reuse2 = IEEE80211_RADIOTAP_EHT_USIG2_TB_B7_B10_SPATIAL_REUSE_2;
624 
625 	common = __le32_to_cpu(usig->common);
626 	value = __le32_to_cpu(usig->value);
627 	mask = __le32_to_cpu(usig->mask);
628 
629 	ppdu_info->u_sig_info.ppdu_type_comp_mode =
630 				le32_get_bits(usig_tb->info0,
631 					      HAL_RX_USIG_TB_INFO0_PPDU_TYPE_COMP_MODE);
632 
633 	common |= ATH12K_LE32_DEC_ENC(usig_tb->info0,
634 				      HAL_RX_USIG_TB_INFO0_RX_INTEG_CHECK_PASS,
635 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BAD_USIG_CRC);
636 
637 	value |= IEEE80211_RADIOTAP_EHT_USIG1_TB_B20_B25_DISREGARD |
638 		 u32_encode_bits(ppdu_info->u_sig_info.ppdu_type_comp_mode,
639 				 IEEE80211_RADIOTAP_EHT_USIG2_TB_B0_B1_PPDU_TYPE) |
640 		 IEEE80211_RADIOTAP_EHT_USIG2_TB_B2_VALIDATE |
641 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
642 				     HAL_RX_USIG_TB_INFO0_SPATIAL_REUSE_1,
643 				     spatial_reuse1) |
644 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
645 				     HAL_RX_USIG_TB_INFO0_SPATIAL_REUSE_2,
646 				     spatial_reuse2) |
647 		 IEEE80211_RADIOTAP_EHT_USIG2_TB_B11_B15_DISREGARD |
648 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
649 				     HAL_RX_USIG_TB_INFO0_CRC,
650 				     IEEE80211_RADIOTAP_EHT_USIG2_TB_B16_B19_CRC) |
651 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
652 				     HAL_RX_USIG_TB_INFO0_TAIL,
653 				     IEEE80211_RADIOTAP_EHT_USIG2_TB_B20_B25_TAIL);
654 
655 	mask |= IEEE80211_RADIOTAP_EHT_USIG1_TB_B20_B25_DISREGARD |
656 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B0_B1_PPDU_TYPE |
657 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B2_VALIDATE |
658 		spatial_reuse1 | spatial_reuse2 |
659 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B11_B15_DISREGARD |
660 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B16_B19_CRC |
661 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B20_B25_TAIL;
662 
663 	usig->common = cpu_to_le32(common);
664 	usig->value = cpu_to_le32(value);
665 	usig->mask = cpu_to_le32(mask);
666 }
667 
668 static void
669 ath12k_dp_mon_hal_rx_parse_u_sig_mu(const struct hal_mon_usig_mu *usig_mu,
670 				    struct hal_rx_mon_ppdu_info *ppdu_info)
671 {
672 	struct ieee80211_radiotap_eht_usig *usig = &ppdu_info->u_sig_info.usig;
673 	enum ieee80211_radiotap_eht_usig_mu sig_symb, punc;
674 	u32 common, value, mask;
675 
676 	sig_symb = IEEE80211_RADIOTAP_EHT_USIG2_MU_B11_B15_EHT_SIG_SYMBOLS;
677 	punc = IEEE80211_RADIOTAP_EHT_USIG2_MU_B3_B7_PUNCTURED_INFO;
678 
679 	common = __le32_to_cpu(usig->common);
680 	value = __le32_to_cpu(usig->value);
681 	mask = __le32_to_cpu(usig->mask);
682 
683 	ppdu_info->u_sig_info.ppdu_type_comp_mode =
684 				le32_get_bits(usig_mu->info0,
685 					      HAL_RX_USIG_MU_INFO0_PPDU_TYPE_COMP_MODE);
686 	ppdu_info->u_sig_info.eht_sig_mcs =
687 				le32_get_bits(usig_mu->info0,
688 					      HAL_RX_USIG_MU_INFO0_EHT_SIG_MCS);
689 	ppdu_info->u_sig_info.num_eht_sig_sym =
690 				le32_get_bits(usig_mu->info0,
691 					      HAL_RX_USIG_MU_INFO0_NUM_EHT_SIG_SYM);
692 
693 	common |= ATH12K_LE32_DEC_ENC(usig_mu->info0,
694 				      HAL_RX_USIG_MU_INFO0_RX_INTEG_CHECK_PASS,
695 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BAD_USIG_CRC);
696 
697 	value |= IEEE80211_RADIOTAP_EHT_USIG1_MU_B20_B24_DISREGARD |
698 		 IEEE80211_RADIOTAP_EHT_USIG1_MU_B25_VALIDATE |
699 		 u32_encode_bits(ppdu_info->u_sig_info.ppdu_type_comp_mode,
700 				 IEEE80211_RADIOTAP_EHT_USIG2_MU_B0_B1_PPDU_TYPE) |
701 		 IEEE80211_RADIOTAP_EHT_USIG2_MU_B2_VALIDATE |
702 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
703 				     HAL_RX_USIG_MU_INFO0_PUNC_CH_INFO,
704 				     punc) |
705 		 IEEE80211_RADIOTAP_EHT_USIG2_MU_B8_VALIDATE |
706 		 u32_encode_bits(ppdu_info->u_sig_info.eht_sig_mcs,
707 				 IEEE80211_RADIOTAP_EHT_USIG2_MU_B9_B10_SIG_MCS) |
708 		 u32_encode_bits(ppdu_info->u_sig_info.num_eht_sig_sym,
709 				 sig_symb) |
710 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
711 				     HAL_RX_USIG_MU_INFO0_CRC,
712 				     IEEE80211_RADIOTAP_EHT_USIG2_MU_B16_B19_CRC) |
713 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
714 				     HAL_RX_USIG_MU_INFO0_TAIL,
715 				     IEEE80211_RADIOTAP_EHT_USIG2_MU_B20_B25_TAIL);
716 
717 	mask |= IEEE80211_RADIOTAP_EHT_USIG1_MU_B20_B24_DISREGARD |
718 		IEEE80211_RADIOTAP_EHT_USIG1_MU_B25_VALIDATE |
719 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B0_B1_PPDU_TYPE |
720 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B2_VALIDATE |
721 		punc |
722 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B8_VALIDATE |
723 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B9_B10_SIG_MCS |
724 		sig_symb |
725 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B16_B19_CRC |
726 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B20_B25_TAIL;
727 
728 	usig->common = cpu_to_le32(common);
729 	usig->value = cpu_to_le32(value);
730 	usig->mask = cpu_to_le32(mask);
731 }
732 
733 static void
734 ath12k_dp_mon_hal_rx_parse_u_sig_hdr(const struct hal_mon_usig_hdr *usig,
735 				     struct hal_rx_mon_ppdu_info *ppdu_info)
736 {
737 	u8 comp_mode;
738 
739 	ppdu_info->eht_usig = true;
740 
741 	ath12k_dp_mon_hal_rx_parse_u_sig_cmn(&usig->cmn, ppdu_info);
742 
743 	comp_mode = le32_get_bits(usig->non_cmn.mu.info0,
744 				  HAL_RX_USIG_MU_INFO0_PPDU_TYPE_COMP_MODE);
745 
746 	if (comp_mode == 0 && ppdu_info->u_sig_info.ul_dl)
747 		ath12k_dp_mon_hal_rx_parse_u_sig_tb(&usig->non_cmn.tb, ppdu_info);
748 	else
749 		ath12k_dp_mon_hal_rx_parse_u_sig_mu(&usig->non_cmn.mu, ppdu_info);
750 }
751 
752 static void
753 ath12k_dp_mon_hal_aggr_tlv(struct hal_rx_mon_ppdu_info *ppdu_info,
754 			   u16 tlv_len, const void *tlv_data)
755 {
756 	if (tlv_len <= HAL_RX_MON_MAX_AGGR_SIZE - ppdu_info->tlv_aggr.cur_len) {
757 		memcpy(ppdu_info->tlv_aggr.buf + ppdu_info->tlv_aggr.cur_len,
758 		       tlv_data, tlv_len);
759 		ppdu_info->tlv_aggr.cur_len += tlv_len;
760 	}
761 }
762 
763 static inline bool
764 ath12k_dp_mon_hal_rx_is_frame_type_ndp(const struct hal_rx_u_sig_info *usig_info)
765 {
766 	if (usig_info->ppdu_type_comp_mode == 1 &&
767 	    usig_info->eht_sig_mcs == 0 &&
768 	    usig_info->num_eht_sig_sym == 0)
769 		return true;
770 
771 	return false;
772 }
773 
774 static inline bool
775 ath12k_dp_mon_hal_rx_is_non_ofdma(const struct hal_rx_u_sig_info *usig_info)
776 {
777 	u32 ppdu_type_comp_mode = usig_info->ppdu_type_comp_mode;
778 	u32 ul_dl = usig_info->ul_dl;
779 
780 	if ((ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_MIMO && ul_dl == 0) ||
781 	    (ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_OFDMA && ul_dl == 0) ||
782 	    (ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_MIMO  && ul_dl == 1))
783 		return true;
784 
785 	return false;
786 }
787 
788 static inline bool
789 ath12k_dp_mon_hal_rx_is_ofdma(const struct hal_rx_u_sig_info *usig_info)
790 {
791 	if (usig_info->ppdu_type_comp_mode == 0 && usig_info->ul_dl == 0)
792 		return true;
793 
794 	return false;
795 }
796 
797 static void
798 ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(const struct hal_eht_sig_ndp_cmn_eb *eht_sig_ndp,
799 				       struct hal_rx_mon_ppdu_info *ppdu_info)
800 {
801 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
802 	u32 known, data;
803 
804 	known = __le32_to_cpu(eht->known);
805 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_SPATIAL_REUSE |
806 		 IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF |
807 		 IEEE80211_RADIOTAP_EHT_KNOWN_NSS_S |
808 		 IEEE80211_RADIOTAP_EHT_KNOWN_BEAMFORMED_S |
809 		 IEEE80211_RADIOTAP_EHT_KNOWN_DISREGARD_S |
810 		 IEEE80211_RADIOTAP_EHT_KNOWN_CRC1 |
811 		 IEEE80211_RADIOTAP_EHT_KNOWN_TAIL1;
812 	eht->known = cpu_to_le32(known);
813 
814 	data = __le32_to_cpu(eht->data[0]);
815 	data |= ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
816 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_SPATIAL_REUSE,
817 				    IEEE80211_RADIOTAP_EHT_DATA0_SPATIAL_REUSE);
818 	/* GI and LTF size are separately indicated in radiotap header
819 	 * and hence will be parsed from other TLV
820 	 */
821 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
822 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_NUM_LTF_SYM,
823 				    IEEE80211_RADIOTAP_EHT_DATA0_EHT_LTF);
824 
825 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
826 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_CRC,
827 				    IEEE80211_RADIOTAP_EHT_DATA0_CRC1_O);
828 
829 	data |= ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
830 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_DISREGARD,
831 				    IEEE80211_RADIOTAP_EHT_DATA0_DISREGARD_S);
832 	eht->data[0] = cpu_to_le32(data);
833 
834 	data = __le32_to_cpu(eht->data[7]);
835 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
836 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_NSS,
837 				    IEEE80211_RADIOTAP_EHT_DATA7_NSS_S);
838 
839 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
840 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_BEAMFORMED,
841 				    IEEE80211_RADIOTAP_EHT_DATA7_BEAMFORMED_S);
842 	eht->data[7] = cpu_to_le32(data);
843 }
844 
845 static void
846 ath12k_dp_mon_hal_rx_parse_usig_overflow(const struct hal_eht_sig_usig_overflow *ovflow,
847 					 struct hal_rx_mon_ppdu_info *ppdu_info)
848 {
849 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
850 	u32 known, data;
851 
852 	known = __le32_to_cpu(eht->known);
853 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_SPATIAL_REUSE |
854 		 IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF |
855 		 IEEE80211_RADIOTAP_EHT_KNOWN_LDPC_EXTRA_SYM_OM |
856 		 IEEE80211_RADIOTAP_EHT_KNOWN_PRE_PADD_FACOR_OM |
857 		 IEEE80211_RADIOTAP_EHT_KNOWN_PE_DISAMBIGUITY_OM |
858 		 IEEE80211_RADIOTAP_EHT_KNOWN_DISREGARD_O;
859 	eht->known = cpu_to_le32(known);
860 
861 	data = __le32_to_cpu(eht->data[0]);
862 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
863 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_SPATIAL_REUSE,
864 				    IEEE80211_RADIOTAP_EHT_DATA0_SPATIAL_REUSE);
865 
866 	/* GI and LTF size are separately indicated in radiotap header
867 	 * and hence will be parsed from other TLV
868 	 */
869 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
870 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_NUM_LTF_SYM,
871 				    IEEE80211_RADIOTAP_EHT_DATA0_EHT_LTF);
872 
873 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
874 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_LDPC_EXTA_SYM,
875 				    IEEE80211_RADIOTAP_EHT_DATA0_LDPC_EXTRA_SYM_OM);
876 
877 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
878 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_PRE_FEC_PAD_FACTOR,
879 				    IEEE80211_RADIOTAP_EHT_DATA0_PRE_PADD_FACOR_OM);
880 
881 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
882 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_DISAMBIGUITY,
883 				    IEEE80211_RADIOTAP_EHT_DATA0_PE_DISAMBIGUITY_OM);
884 
885 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
886 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_DISREGARD,
887 				    IEEE80211_RADIOTAP_EHT_DATA0_DISREGARD_O);
888 	eht->data[0] = cpu_to_le32(data);
889 }
890 
891 static void
892 ath12k_dp_mon_hal_rx_parse_non_ofdma_users(const struct hal_eht_sig_non_ofdma_cmn_eb *eb,
893 					   struct hal_rx_mon_ppdu_info *ppdu_info)
894 {
895 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
896 	u32 known, data;
897 
898 	known = __le32_to_cpu(eht->known);
899 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_NR_NON_OFDMA_USERS_M;
900 	eht->known = cpu_to_le32(known);
901 
902 	data = __le32_to_cpu(eht->data[7]);
903 	data |=	ATH12K_LE32_DEC_ENC(eb->info0,
904 				    HAL_RX_EHT_SIG_NON_OFDMA_INFO0_NUM_USERS,
905 				    IEEE80211_RADIOTAP_EHT_DATA7_NUM_OF_NON_OFDMA_USERS);
906 	eht->data[7] = cpu_to_le32(data);
907 }
908 
909 static void
910 ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(const struct hal_eht_sig_mu_mimo *user,
911 					   struct hal_rx_mon_ppdu_info *ppdu_info)
912 {
913 	struct hal_rx_eht_info *eht_info = &ppdu_info->eht_info;
914 	u32 user_idx;
915 
916 	if (eht_info->num_user_info >= ARRAY_SIZE(eht_info->user_info))
917 		return;
918 
919 	user_idx = eht_info->num_user_info++;
920 
921 	eht_info->user_info[user_idx] |=
922 		IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID_KNOWN |
923 		IEEE80211_RADIOTAP_EHT_USER_INFO_MCS_KNOWN |
924 		IEEE80211_RADIOTAP_EHT_USER_INFO_CODING_KNOWN |
925 		IEEE80211_RADIOTAP_EHT_USER_INFO_SPATIAL_CONFIG_KNOWN_M |
926 		ATH12K_LE32_DEC_ENC(user->info0,
927 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_STA_ID,
928 				    IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID) |
929 		ATH12K_LE32_DEC_ENC(user->info0,
930 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_CODING,
931 				    IEEE80211_RADIOTAP_EHT_USER_INFO_CODING) |
932 		ATH12K_LE32_DEC_ENC(user->info0,
933 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_MCS,
934 				    IEEE80211_RADIOTAP_EHT_USER_INFO_MCS) |
935 		ATH12K_LE32_DEC_ENC(user->info0,
936 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_SPATIAL_CODING,
937 				    IEEE80211_RADIOTAP_EHT_USER_INFO_SPATIAL_CONFIG_M);
938 
939 	ppdu_info->mcs = le32_get_bits(user->info0,
940 				       HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_MCS);
941 }
942 
943 static void
944 ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(const struct hal_eht_sig_non_mu_mimo *user,
945 					       struct hal_rx_mon_ppdu_info *ppdu_info)
946 {
947 	struct hal_rx_eht_info *eht_info = &ppdu_info->eht_info;
948 	u32 user_idx;
949 
950 	if (eht_info->num_user_info >= ARRAY_SIZE(eht_info->user_info))
951 		return;
952 
953 	user_idx = eht_info->num_user_info++;
954 
955 	eht_info->user_info[user_idx] |=
956 		IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID_KNOWN |
957 		IEEE80211_RADIOTAP_EHT_USER_INFO_MCS_KNOWN |
958 		IEEE80211_RADIOTAP_EHT_USER_INFO_CODING_KNOWN |
959 		IEEE80211_RADIOTAP_EHT_USER_INFO_NSS_KNOWN_O |
960 		IEEE80211_RADIOTAP_EHT_USER_INFO_BEAMFORMING_KNOWN_O |
961 		ATH12K_LE32_DEC_ENC(user->info0,
962 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_STA_ID,
963 				    IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID) |
964 		ATH12K_LE32_DEC_ENC(user->info0,
965 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_CODING,
966 				    IEEE80211_RADIOTAP_EHT_USER_INFO_CODING) |
967 		ATH12K_LE32_DEC_ENC(user->info0,
968 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_MCS,
969 				    IEEE80211_RADIOTAP_EHT_USER_INFO_MCS) |
970 		ATH12K_LE32_DEC_ENC(user->info0,
971 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_NSS,
972 				    IEEE80211_RADIOTAP_EHT_USER_INFO_NSS_O) |
973 		ATH12K_LE32_DEC_ENC(user->info0,
974 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_BEAMFORMED,
975 				    IEEE80211_RADIOTAP_EHT_USER_INFO_BEAMFORMING_O);
976 
977 	ppdu_info->mcs = le32_get_bits(user->info0,
978 				       HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_MCS);
979 
980 	ppdu_info->nss = le32_get_bits(user->info0,
981 				       HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_NSS) + 1;
982 }
983 
984 static inline bool
985 ath12k_dp_mon_hal_rx_is_mu_mimo_user(const struct hal_rx_u_sig_info *usig_info)
986 {
987 	if (usig_info->ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_SU &&
988 	    usig_info->ul_dl == 1)
989 		return true;
990 
991 	return false;
992 }
993 
994 static void
995 ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(const void *tlv,
996 					     struct hal_rx_mon_ppdu_info *ppdu_info)
997 {
998 	const struct hal_eht_sig_non_ofdma_cmn_eb *eb = tlv;
999 
1000 	ath12k_dp_mon_hal_rx_parse_usig_overflow(tlv, ppdu_info);
1001 	ath12k_dp_mon_hal_rx_parse_non_ofdma_users(eb, ppdu_info);
1002 
1003 	if (ath12k_dp_mon_hal_rx_is_mu_mimo_user(&ppdu_info->u_sig_info))
1004 		ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(&eb->user_field.mu_mimo,
1005 							   ppdu_info);
1006 	else
1007 		ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(&eb->user_field.n_mu_mimo,
1008 							       ppdu_info);
1009 }
1010 
1011 static void
1012 ath12k_dp_mon_hal_rx_parse_ru_allocation(const struct hal_eht_sig_ofdma_cmn_eb *eb,
1013 					 struct hal_rx_mon_ppdu_info *ppdu_info)
1014 {
1015 	const struct hal_eht_sig_ofdma_cmn_eb1 *ofdma_cmn_eb1 = &eb->eb1;
1016 	const struct hal_eht_sig_ofdma_cmn_eb2 *ofdma_cmn_eb2 = &eb->eb2;
1017 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1018 	enum ieee80211_radiotap_eht_data ru_123, ru_124, ru_125, ru_126;
1019 	enum ieee80211_radiotap_eht_data ru_121, ru_122, ru_112, ru_111;
1020 	u32 data;
1021 
1022 	ru_123 = IEEE80211_RADIOTAP_EHT_DATA4_RU_ALLOC_CC_1_2_3;
1023 	ru_124 = IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_4;
1024 	ru_125 = IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_5;
1025 	ru_126 = IEEE80211_RADIOTAP_EHT_DATA6_RU_ALLOC_CC_1_2_6;
1026 	ru_121 = IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_1;
1027 	ru_122 = IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_2;
1028 	ru_112 = IEEE80211_RADIOTAP_EHT_DATA2_RU_ALLOC_CC_1_1_2;
1029 	ru_111 = IEEE80211_RADIOTAP_EHT_DATA1_RU_ALLOC_CC_1_1_1;
1030 
1031 	switch (ppdu_info->u_sig_info.bw) {
1032 	case HAL_EHT_BW_320_2:
1033 	case HAL_EHT_BW_320_1:
1034 		data = __le32_to_cpu(eht->data[4]);
1035 		/* CC1 2::3 */
1036 		data |=	IEEE80211_RADIOTAP_EHT_DATA4_RU_ALLOC_CC_1_2_3_KNOWN |
1037 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1038 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_3,
1039 					    ru_123);
1040 		eht->data[4] = cpu_to_le32(data);
1041 
1042 		data = __le32_to_cpu(eht->data[5]);
1043 		/* CC1 2::4 */
1044 		data |=	IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_4_KNOWN |
1045 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1046 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_4,
1047 					    ru_124);
1048 
1049 		/* CC1 2::5 */
1050 		data |=	IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_5_KNOWN |
1051 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1052 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_5,
1053 					    ru_125);
1054 		eht->data[5] = cpu_to_le32(data);
1055 
1056 		data = __le32_to_cpu(eht->data[6]);
1057 		/* CC1 2::6 */
1058 		data |=	IEEE80211_RADIOTAP_EHT_DATA6_RU_ALLOC_CC_1_2_6_KNOWN |
1059 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1060 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_6,
1061 					    ru_126);
1062 		eht->data[6] = cpu_to_le32(data);
1063 
1064 		fallthrough;
1065 	case HAL_EHT_BW_160:
1066 		data = __le32_to_cpu(eht->data[3]);
1067 		/* CC1 2::1 */
1068 		data |=	IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_1_KNOWN |
1069 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1070 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_1,
1071 					    ru_121);
1072 		/* CC1 2::2 */
1073 		data |=	IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_2_KNOWN |
1074 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1075 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_2,
1076 					    ru_122);
1077 		eht->data[3] = cpu_to_le32(data);
1078 
1079 		fallthrough;
1080 	case HAL_EHT_BW_80:
1081 		data = __le32_to_cpu(eht->data[2]);
1082 		/* CC1 1::2 */
1083 		data |=	IEEE80211_RADIOTAP_EHT_DATA2_RU_ALLOC_CC_1_1_2_KNOWN |
1084 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb1->info0,
1085 					    HAL_RX_EHT_SIG_OFDMA_EB1_RU_ALLOC_1_2,
1086 					    ru_112);
1087 		eht->data[2] = cpu_to_le32(data);
1088 
1089 		fallthrough;
1090 	case HAL_EHT_BW_40:
1091 		fallthrough;
1092 	case HAL_EHT_BW_20:
1093 		data = __le32_to_cpu(eht->data[1]);
1094 		/* CC1 1::1 */
1095 		data |=	IEEE80211_RADIOTAP_EHT_DATA1_RU_ALLOC_CC_1_1_1_KNOWN |
1096 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb1->info0,
1097 					    HAL_RX_EHT_SIG_OFDMA_EB1_RU_ALLOC_1_1,
1098 					    ru_111);
1099 		eht->data[1] = cpu_to_le32(data);
1100 		break;
1101 	default:
1102 		break;
1103 	}
1104 }
1105 
1106 static void
1107 ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(const void *tlv,
1108 					 struct hal_rx_mon_ppdu_info *ppdu_info)
1109 {
1110 	const struct hal_eht_sig_ofdma_cmn_eb *ofdma = tlv;
1111 
1112 	ath12k_dp_mon_hal_rx_parse_usig_overflow(tlv, ppdu_info);
1113 	ath12k_dp_mon_hal_rx_parse_ru_allocation(ofdma, ppdu_info);
1114 
1115 	ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(&ofdma->user_field.n_mu_mimo,
1116 						       ppdu_info);
1117 }
1118 
1119 static void
1120 ath12k_dp_mon_parse_eht_sig_hdr(struct hal_rx_mon_ppdu_info *ppdu_info,
1121 				const void *tlv_data)
1122 {
1123 	ppdu_info->is_eht = true;
1124 
1125 	if (ath12k_dp_mon_hal_rx_is_frame_type_ndp(&ppdu_info->u_sig_info))
1126 		ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(tlv_data, ppdu_info);
1127 	else if (ath12k_dp_mon_hal_rx_is_non_ofdma(&ppdu_info->u_sig_info))
1128 		ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(tlv_data, ppdu_info);
1129 	else if (ath12k_dp_mon_hal_rx_is_ofdma(&ppdu_info->u_sig_info))
1130 		ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(tlv_data, ppdu_info);
1131 }
1132 
1133 static inline enum ath12k_eht_ru_size
1134 hal_rx_mon_hal_ru_size_to_ath12k_ru_size(u32 hal_ru_size)
1135 {
1136 	switch (hal_ru_size) {
1137 	case HAL_EHT_RU_26:
1138 		return ATH12K_EHT_RU_26;
1139 	case HAL_EHT_RU_52:
1140 		return ATH12K_EHT_RU_52;
1141 	case HAL_EHT_RU_78:
1142 		return ATH12K_EHT_RU_52_26;
1143 	case HAL_EHT_RU_106:
1144 		return ATH12K_EHT_RU_106;
1145 	case HAL_EHT_RU_132:
1146 		return ATH12K_EHT_RU_106_26;
1147 	case HAL_EHT_RU_242:
1148 		return ATH12K_EHT_RU_242;
1149 	case HAL_EHT_RU_484:
1150 		return ATH12K_EHT_RU_484;
1151 	case HAL_EHT_RU_726:
1152 		return ATH12K_EHT_RU_484_242;
1153 	case HAL_EHT_RU_996:
1154 		return ATH12K_EHT_RU_996;
1155 	case HAL_EHT_RU_996x2:
1156 		return ATH12K_EHT_RU_996x2;
1157 	case HAL_EHT_RU_996x3:
1158 		return ATH12K_EHT_RU_996x3;
1159 	case HAL_EHT_RU_996x4:
1160 		return ATH12K_EHT_RU_996x4;
1161 	case HAL_EHT_RU_NONE:
1162 		return ATH12K_EHT_RU_INVALID;
1163 	case HAL_EHT_RU_996_484:
1164 		return ATH12K_EHT_RU_996_484;
1165 	case HAL_EHT_RU_996x2_484:
1166 		return ATH12K_EHT_RU_996x2_484;
1167 	case HAL_EHT_RU_996x3_484:
1168 		return ATH12K_EHT_RU_996x3_484;
1169 	case HAL_EHT_RU_996_484_242:
1170 		return ATH12K_EHT_RU_996_484_242;
1171 	default:
1172 		return ATH12K_EHT_RU_INVALID;
1173 	}
1174 }
1175 
1176 static inline u32
1177 hal_rx_ul_ofdma_ru_size_to_width(enum ath12k_eht_ru_size ru_size)
1178 {
1179 	switch (ru_size) {
1180 	case ATH12K_EHT_RU_26:
1181 		return RU_26;
1182 	case ATH12K_EHT_RU_52:
1183 		return RU_52;
1184 	case ATH12K_EHT_RU_52_26:
1185 		return RU_52_26;
1186 	case ATH12K_EHT_RU_106:
1187 		return RU_106;
1188 	case ATH12K_EHT_RU_106_26:
1189 		return RU_106_26;
1190 	case ATH12K_EHT_RU_242:
1191 		return RU_242;
1192 	case ATH12K_EHT_RU_484:
1193 		return RU_484;
1194 	case ATH12K_EHT_RU_484_242:
1195 		return RU_484_242;
1196 	case ATH12K_EHT_RU_996:
1197 		return RU_996;
1198 	case ATH12K_EHT_RU_996_484:
1199 		return RU_996_484;
1200 	case ATH12K_EHT_RU_996_484_242:
1201 		return RU_996_484_242;
1202 	case ATH12K_EHT_RU_996x2:
1203 		return RU_2X996;
1204 	case ATH12K_EHT_RU_996x2_484:
1205 		return RU_2X996_484;
1206 	case ATH12K_EHT_RU_996x3:
1207 		return RU_3X996;
1208 	case ATH12K_EHT_RU_996x3_484:
1209 		return RU_3X996_484;
1210 	case ATH12K_EHT_RU_996x4:
1211 		return RU_4X996;
1212 	default:
1213 		return RU_INVALID;
1214 	}
1215 }
1216 
1217 static void
1218 ath12k_dp_mon_hal_rx_parse_user_info(const struct hal_receive_user_info *rx_usr_info,
1219 				     u16 user_id,
1220 				     struct hal_rx_mon_ppdu_info *ppdu_info)
1221 {
1222 	struct hal_rx_user_status *mon_rx_user_status = NULL;
1223 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1224 	enum ath12k_eht_ru_size rtap_ru_size = ATH12K_EHT_RU_INVALID;
1225 	u32 ru_width, reception_type, ru_index = HAL_EHT_RU_INVALID;
1226 	u32 ru_type_80_0, ru_start_index_80_0;
1227 	u32 ru_type_80_1, ru_start_index_80_1;
1228 	u32 ru_type_80_2, ru_start_index_80_2;
1229 	u32 ru_type_80_3, ru_start_index_80_3;
1230 	u32 ru_size = 0, num_80mhz_with_ru = 0;
1231 	u64 ru_index_320mhz = 0;
1232 	u32 ru_index_per80mhz;
1233 
1234 	reception_type = le32_get_bits(rx_usr_info->info0,
1235 				       HAL_RX_USR_INFO0_RECEPTION_TYPE);
1236 
1237 	switch (reception_type) {
1238 	case HAL_RECEPTION_TYPE_SU:
1239 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_SU;
1240 		break;
1241 	case HAL_RECEPTION_TYPE_DL_MU_MIMO:
1242 	case HAL_RECEPTION_TYPE_UL_MU_MIMO:
1243 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_MIMO;
1244 		break;
1245 	case HAL_RECEPTION_TYPE_DL_MU_OFMA:
1246 	case HAL_RECEPTION_TYPE_UL_MU_OFDMA:
1247 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_OFDMA;
1248 		break;
1249 	case HAL_RECEPTION_TYPE_DL_MU_OFDMA_MIMO:
1250 	case HAL_RECEPTION_TYPE_UL_MU_OFDMA_MIMO:
1251 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO;
1252 	}
1253 
1254 	ppdu_info->is_stbc = le32_get_bits(rx_usr_info->info0, HAL_RX_USR_INFO0_STBC);
1255 	ppdu_info->ldpc = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_LDPC);
1256 	ppdu_info->dcm = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_STA_DCM);
1257 	ppdu_info->bw = le32_get_bits(rx_usr_info->info1, HAL_RX_USR_INFO1_RX_BW);
1258 	ppdu_info->mcs = le32_get_bits(rx_usr_info->info1, HAL_RX_USR_INFO1_MCS);
1259 	ppdu_info->nss = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_NSS) + 1;
1260 
1261 	if (user_id < HAL_MAX_UL_MU_USERS) {
1262 		mon_rx_user_status = &ppdu_info->userstats[user_id];
1263 		mon_rx_user_status->mcs = ppdu_info->mcs;
1264 		mon_rx_user_status->nss = ppdu_info->nss;
1265 	}
1266 
1267 	if (!(ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_MIMO ||
1268 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
1269 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO))
1270 		return;
1271 
1272 	/* RU allocation present only for OFDMA reception */
1273 	ru_type_80_0 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_0);
1274 	ru_start_index_80_0 = le32_get_bits(rx_usr_info->info3,
1275 					    HAL_RX_USR_INFO3_RU_START_IDX_80_0);
1276 	if (ru_type_80_0 != HAL_EHT_RU_NONE) {
1277 		ru_size += ru_type_80_0;
1278 		ru_index_per80mhz = ru_start_index_80_0;
1279 		ru_index = ru_index_per80mhz;
1280 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_0, 0, ru_index_per80mhz);
1281 		num_80mhz_with_ru++;
1282 	}
1283 
1284 	ru_type_80_1 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_1);
1285 	ru_start_index_80_1 = le32_get_bits(rx_usr_info->info3,
1286 					    HAL_RX_USR_INFO3_RU_START_IDX_80_1);
1287 	if (ru_type_80_1 != HAL_EHT_RU_NONE) {
1288 		ru_size += ru_type_80_1;
1289 		ru_index_per80mhz = ru_start_index_80_1;
1290 		ru_index = ru_index_per80mhz;
1291 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_1, 1, ru_index_per80mhz);
1292 		num_80mhz_with_ru++;
1293 	}
1294 
1295 	ru_type_80_2 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_2);
1296 	ru_start_index_80_2 = le32_get_bits(rx_usr_info->info3,
1297 					    HAL_RX_USR_INFO3_RU_START_IDX_80_2);
1298 	if (ru_type_80_2 != HAL_EHT_RU_NONE) {
1299 		ru_size += ru_type_80_2;
1300 		ru_index_per80mhz = ru_start_index_80_2;
1301 		ru_index = ru_index_per80mhz;
1302 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_2, 2, ru_index_per80mhz);
1303 		num_80mhz_with_ru++;
1304 	}
1305 
1306 	ru_type_80_3 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_3);
1307 	ru_start_index_80_3 = le32_get_bits(rx_usr_info->info2,
1308 					    HAL_RX_USR_INFO3_RU_START_IDX_80_3);
1309 	if (ru_type_80_3 != HAL_EHT_RU_NONE) {
1310 		ru_size += ru_type_80_3;
1311 		ru_index_per80mhz = ru_start_index_80_3;
1312 		ru_index = ru_index_per80mhz;
1313 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_3, 3, ru_index_per80mhz);
1314 		num_80mhz_with_ru++;
1315 	}
1316 
1317 	if (num_80mhz_with_ru > 1) {
1318 		/* Calculate the MRU index */
1319 		switch (ru_index_320mhz) {
1320 		case HAL_EHT_RU_996_484_0:
1321 		case HAL_EHT_RU_996x2_484_0:
1322 		case HAL_EHT_RU_996x3_484_0:
1323 			ru_index = 0;
1324 			break;
1325 		case HAL_EHT_RU_996_484_1:
1326 		case HAL_EHT_RU_996x2_484_1:
1327 		case HAL_EHT_RU_996x3_484_1:
1328 			ru_index = 1;
1329 			break;
1330 		case HAL_EHT_RU_996_484_2:
1331 		case HAL_EHT_RU_996x2_484_2:
1332 		case HAL_EHT_RU_996x3_484_2:
1333 			ru_index = 2;
1334 			break;
1335 		case HAL_EHT_RU_996_484_3:
1336 		case HAL_EHT_RU_996x2_484_3:
1337 		case HAL_EHT_RU_996x3_484_3:
1338 			ru_index = 3;
1339 			break;
1340 		case HAL_EHT_RU_996_484_4:
1341 		case HAL_EHT_RU_996x2_484_4:
1342 		case HAL_EHT_RU_996x3_484_4:
1343 			ru_index = 4;
1344 			break;
1345 		case HAL_EHT_RU_996_484_5:
1346 		case HAL_EHT_RU_996x2_484_5:
1347 		case HAL_EHT_RU_996x3_484_5:
1348 			ru_index = 5;
1349 			break;
1350 		case HAL_EHT_RU_996_484_6:
1351 		case HAL_EHT_RU_996x2_484_6:
1352 		case HAL_EHT_RU_996x3_484_6:
1353 			ru_index = 6;
1354 			break;
1355 		case HAL_EHT_RU_996_484_7:
1356 		case HAL_EHT_RU_996x2_484_7:
1357 		case HAL_EHT_RU_996x3_484_7:
1358 			ru_index = 7;
1359 			break;
1360 		case HAL_EHT_RU_996x2_484_8:
1361 			ru_index = 8;
1362 			break;
1363 		case HAL_EHT_RU_996x2_484_9:
1364 			ru_index = 9;
1365 			break;
1366 		case HAL_EHT_RU_996x2_484_10:
1367 			ru_index = 10;
1368 			break;
1369 		case HAL_EHT_RU_996x2_484_11:
1370 			ru_index = 11;
1371 			break;
1372 		default:
1373 			ru_index = HAL_EHT_RU_INVALID;
1374 			break;
1375 		}
1376 
1377 		ru_size += 4;
1378 	}
1379 
1380 	rtap_ru_size = hal_rx_mon_hal_ru_size_to_ath12k_ru_size(ru_size);
1381 	if (rtap_ru_size != ATH12K_EHT_RU_INVALID) {
1382 		u32 known, data;
1383 
1384 		known = __le32_to_cpu(eht->known);
1385 		known |= IEEE80211_RADIOTAP_EHT_KNOWN_RU_MRU_SIZE_OM;
1386 		eht->known = cpu_to_le32(known);
1387 
1388 		data = __le32_to_cpu(eht->data[1]);
1389 		data |=	u32_encode_bits(rtap_ru_size,
1390 					IEEE80211_RADIOTAP_EHT_DATA1_RU_SIZE);
1391 		eht->data[1] = cpu_to_le32(data);
1392 	}
1393 
1394 	if (ru_index != HAL_EHT_RU_INVALID) {
1395 		u32 known, data;
1396 
1397 		known = __le32_to_cpu(eht->known);
1398 		known |= IEEE80211_RADIOTAP_EHT_KNOWN_RU_MRU_INDEX_OM;
1399 		eht->known = cpu_to_le32(known);
1400 
1401 		data = __le32_to_cpu(eht->data[1]);
1402 		data |=	u32_encode_bits(rtap_ru_size,
1403 					IEEE80211_RADIOTAP_EHT_DATA1_RU_INDEX);
1404 		eht->data[1] = cpu_to_le32(data);
1405 	}
1406 
1407 	if (mon_rx_user_status && ru_index != HAL_EHT_RU_INVALID &&
1408 	    rtap_ru_size != ATH12K_EHT_RU_INVALID) {
1409 		mon_rx_user_status->ul_ofdma_ru_start_index = ru_index;
1410 		mon_rx_user_status->ul_ofdma_ru_size = rtap_ru_size;
1411 
1412 		ru_width = hal_rx_ul_ofdma_ru_size_to_width(rtap_ru_size);
1413 
1414 		mon_rx_user_status->ul_ofdma_ru_width = ru_width;
1415 		mon_rx_user_status->ofdma_info_valid = 1;
1416 	}
1417 }
1418 
1419 static void ath12k_dp_mon_parse_rx_msdu_end_err(u32 info, u32 *errmap)
1420 {
1421 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
1422 		*errmap |= HAL_RX_MPDU_ERR_FCS;
1423 
1424 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1425 		*errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1426 
1427 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1428 		*errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1429 
1430 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1431 		*errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1432 
1433 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1434 		*errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1435 
1436 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1437 		*errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1438 
1439 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1440 		*errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1441 }
1442 
1443 static void
1444 ath12k_parse_cmn_usr_info(const struct hal_phyrx_common_user_info *cmn_usr_info,
1445 			  struct hal_rx_mon_ppdu_info *ppdu_info)
1446 {
1447 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1448 	u32 known, data, cp_setting, ltf_size;
1449 
1450 	known = __le32_to_cpu(eht->known);
1451 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_GI |
1452 		IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF;
1453 	eht->known = cpu_to_le32(known);
1454 
1455 	cp_setting = le32_get_bits(cmn_usr_info->info0,
1456 				   HAL_RX_CMN_USR_INFO0_CP_SETTING);
1457 	ltf_size = le32_get_bits(cmn_usr_info->info0,
1458 				 HAL_RX_CMN_USR_INFO0_LTF_SIZE);
1459 
1460 	data = __le32_to_cpu(eht->data[0]);
1461 	data |= u32_encode_bits(cp_setting, IEEE80211_RADIOTAP_EHT_DATA0_GI);
1462 	data |= u32_encode_bits(ltf_size, IEEE80211_RADIOTAP_EHT_DATA0_LTF);
1463 	eht->data[0] = cpu_to_le32(data);
1464 
1465 	if (!ppdu_info->ltf_size)
1466 		ppdu_info->ltf_size = ltf_size;
1467 	if (!ppdu_info->gi)
1468 		ppdu_info->gi = cp_setting;
1469 }
1470 
1471 static void
1472 ath12k_dp_mon_parse_status_msdu_end(struct ath12k_mon_data *pmon,
1473 				    const struct hal_rx_msdu_end *msdu_end)
1474 {
1475 	ath12k_dp_mon_parse_rx_msdu_end_err(__le32_to_cpu(msdu_end->info2),
1476 					    &pmon->err_bitmap);
1477 	pmon->decap_format = le32_get_bits(msdu_end->info1,
1478 					   RX_MSDU_END_INFO11_DECAP_FORMAT);
1479 }
1480 
1481 static enum hal_rx_mon_status
1482 ath12k_dp_mon_rx_parse_status_tlv(struct ath12k *ar,
1483 				  struct ath12k_mon_data *pmon,
1484 				  const struct hal_tlv_64_hdr *tlv)
1485 {
1486 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
1487 	const void *tlv_data = tlv->value;
1488 	u32 info[7], userid;
1489 	u16 tlv_tag, tlv_len;
1490 
1491 	tlv_tag = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_TAG);
1492 	tlv_len = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_LEN);
1493 	userid = le64_get_bits(tlv->tl, HAL_TLV_64_USR_ID);
1494 
1495 	if (ppdu_info->tlv_aggr.in_progress && ppdu_info->tlv_aggr.tlv_tag != tlv_tag) {
1496 		ath12k_dp_mon_parse_eht_sig_hdr(ppdu_info, ppdu_info->tlv_aggr.buf);
1497 
1498 		ppdu_info->tlv_aggr.in_progress = false;
1499 		ppdu_info->tlv_aggr.cur_len = 0;
1500 	}
1501 
1502 	switch (tlv_tag) {
1503 	case HAL_RX_PPDU_START: {
1504 		const struct hal_rx_ppdu_start *ppdu_start = tlv_data;
1505 
1506 		u64 ppdu_ts = ath12k_le32hilo_to_u64(ppdu_start->ppdu_start_ts_63_32,
1507 						     ppdu_start->ppdu_start_ts_31_0);
1508 
1509 		info[0] = __le32_to_cpu(ppdu_start->info0);
1510 
1511 		ppdu_info->ppdu_id = u32_get_bits(info[0],
1512 						  HAL_RX_PPDU_START_INFO0_PPDU_ID);
1513 
1514 		info[1] = __le32_to_cpu(ppdu_start->info1);
1515 		ppdu_info->chan_num = u32_get_bits(info[1],
1516 						   HAL_RX_PPDU_START_INFO1_CHAN_NUM);
1517 		ppdu_info->freq = u32_get_bits(info[1],
1518 					       HAL_RX_PPDU_START_INFO1_CHAN_FREQ);
1519 		ppdu_info->ppdu_ts = ppdu_ts;
1520 
1521 		if (ppdu_info->ppdu_id != ppdu_info->last_ppdu_id) {
1522 			ppdu_info->last_ppdu_id = ppdu_info->ppdu_id;
1523 			ppdu_info->num_users = 0;
1524 			memset(&ppdu_info->mpdu_fcs_ok_bitmap, 0,
1525 			       HAL_RX_NUM_WORDS_PER_PPDU_BITMAP *
1526 			       sizeof(ppdu_info->mpdu_fcs_ok_bitmap[0]));
1527 		}
1528 		break;
1529 	}
1530 	case HAL_RX_PPDU_END_USER_STATS: {
1531 		const struct hal_rx_ppdu_end_user_stats *eu_stats = tlv_data;
1532 		u32 tid_bitmap;
1533 
1534 		info[0] = __le32_to_cpu(eu_stats->info0);
1535 		info[1] = __le32_to_cpu(eu_stats->info1);
1536 		info[2] = __le32_to_cpu(eu_stats->info2);
1537 		info[4] = __le32_to_cpu(eu_stats->info4);
1538 		info[5] = __le32_to_cpu(eu_stats->info5);
1539 		info[6] = __le32_to_cpu(eu_stats->info6);
1540 
1541 		ppdu_info->ast_index =
1542 			u32_get_bits(info[2], HAL_RX_PPDU_END_USER_STATS_INFO2_AST_INDEX);
1543 		ppdu_info->fc_valid =
1544 			u32_get_bits(info[1], HAL_RX_PPDU_END_USER_STATS_INFO1_FC_VALID);
1545 		tid_bitmap = u32_get_bits(info[6],
1546 					  HAL_RX_PPDU_END_USER_STATS_INFO6_TID_BITMAP);
1547 		ppdu_info->tid = ffs(tid_bitmap) - 1;
1548 		ppdu_info->tcp_msdu_count =
1549 			u32_get_bits(info[4],
1550 				     HAL_RX_PPDU_END_USER_STATS_INFO4_TCP_MSDU_CNT);
1551 		ppdu_info->udp_msdu_count =
1552 			u32_get_bits(info[4],
1553 				     HAL_RX_PPDU_END_USER_STATS_INFO4_UDP_MSDU_CNT);
1554 		ppdu_info->other_msdu_count =
1555 			u32_get_bits(info[5],
1556 				     HAL_RX_PPDU_END_USER_STATS_INFO5_OTHER_MSDU_CNT);
1557 		ppdu_info->tcp_ack_msdu_count =
1558 			u32_get_bits(info[5],
1559 				     HAL_RX_PPDU_END_USER_STATS_INFO5_TCP_ACK_MSDU_CNT);
1560 		ppdu_info->preamble_type =
1561 			u32_get_bits(info[1],
1562 				     HAL_RX_PPDU_END_USER_STATS_INFO1_PKT_TYPE);
1563 		ppdu_info->num_mpdu_fcs_ok =
1564 			u32_get_bits(info[1],
1565 				     HAL_RX_PPDU_END_USER_STATS_INFO1_MPDU_CNT_FCS_OK);
1566 		ppdu_info->num_mpdu_fcs_err =
1567 			u32_get_bits(info[0],
1568 				     HAL_RX_PPDU_END_USER_STATS_INFO0_MPDU_CNT_FCS_ERR);
1569 		ppdu_info->peer_id =
1570 			u32_get_bits(info[0], HAL_RX_PPDU_END_USER_STATS_INFO0_PEER_ID);
1571 
1572 		switch (ppdu_info->preamble_type) {
1573 		case HAL_RX_PREAMBLE_11N:
1574 			ppdu_info->ht_flags = 1;
1575 			break;
1576 		case HAL_RX_PREAMBLE_11AC:
1577 			ppdu_info->vht_flags = 1;
1578 			break;
1579 		case HAL_RX_PREAMBLE_11AX:
1580 			ppdu_info->he_flags = 1;
1581 			break;
1582 		case HAL_RX_PREAMBLE_11BE:
1583 			ppdu_info->is_eht = true;
1584 			break;
1585 		default:
1586 			break;
1587 		}
1588 
1589 		if (userid < HAL_MAX_UL_MU_USERS) {
1590 			struct hal_rx_user_status *rxuser_stats =
1591 				&ppdu_info->userstats[userid];
1592 
1593 			if (ppdu_info->num_mpdu_fcs_ok > 1 ||
1594 			    ppdu_info->num_mpdu_fcs_err > 1)
1595 				ppdu_info->userstats[userid].ampdu_present = true;
1596 
1597 			ppdu_info->num_users += 1;
1598 
1599 			ath12k_dp_mon_rx_handle_ofdma_info(eu_stats, rxuser_stats);
1600 			ath12k_dp_mon_rx_populate_mu_user_info(eu_stats, ppdu_info,
1601 							       rxuser_stats);
1602 		}
1603 		ppdu_info->mpdu_fcs_ok_bitmap[0] = __le32_to_cpu(eu_stats->rsvd1[0]);
1604 		ppdu_info->mpdu_fcs_ok_bitmap[1] = __le32_to_cpu(eu_stats->rsvd1[1]);
1605 		break;
1606 	}
1607 	case HAL_RX_PPDU_END_USER_STATS_EXT: {
1608 		const struct hal_rx_ppdu_end_user_stats_ext *eu_stats = tlv_data;
1609 
1610 		ppdu_info->mpdu_fcs_ok_bitmap[2] = __le32_to_cpu(eu_stats->info1);
1611 		ppdu_info->mpdu_fcs_ok_bitmap[3] = __le32_to_cpu(eu_stats->info2);
1612 		ppdu_info->mpdu_fcs_ok_bitmap[4] = __le32_to_cpu(eu_stats->info3);
1613 		ppdu_info->mpdu_fcs_ok_bitmap[5] = __le32_to_cpu(eu_stats->info4);
1614 		ppdu_info->mpdu_fcs_ok_bitmap[6] = __le32_to_cpu(eu_stats->info5);
1615 		ppdu_info->mpdu_fcs_ok_bitmap[7] = __le32_to_cpu(eu_stats->info6);
1616 		break;
1617 	}
1618 	case HAL_PHYRX_HT_SIG:
1619 		ath12k_dp_mon_parse_ht_sig(tlv_data, ppdu_info);
1620 		break;
1621 
1622 	case HAL_PHYRX_L_SIG_B:
1623 		ath12k_dp_mon_parse_l_sig_b(tlv_data, ppdu_info);
1624 		break;
1625 
1626 	case HAL_PHYRX_L_SIG_A:
1627 		ath12k_dp_mon_parse_l_sig_a(tlv_data, ppdu_info);
1628 		break;
1629 
1630 	case HAL_PHYRX_VHT_SIG_A:
1631 		ath12k_dp_mon_parse_vht_sig_a(tlv_data, ppdu_info);
1632 		break;
1633 
1634 	case HAL_PHYRX_HE_SIG_A_SU:
1635 		ath12k_dp_mon_parse_he_sig_su(tlv_data, ppdu_info);
1636 		break;
1637 
1638 	case HAL_PHYRX_HE_SIG_A_MU_DL:
1639 		ath12k_dp_mon_parse_he_sig_mu(tlv_data, ppdu_info);
1640 		break;
1641 
1642 	case HAL_PHYRX_HE_SIG_B1_MU:
1643 		ath12k_dp_mon_parse_he_sig_b1_mu(tlv_data, ppdu_info);
1644 		break;
1645 
1646 	case HAL_PHYRX_HE_SIG_B2_MU:
1647 		ath12k_dp_mon_parse_he_sig_b2_mu(tlv_data, ppdu_info);
1648 		break;
1649 
1650 	case HAL_PHYRX_HE_SIG_B2_OFDMA:
1651 		ath12k_dp_mon_parse_he_sig_b2_ofdma(tlv_data, ppdu_info);
1652 		break;
1653 
1654 	case HAL_PHYRX_RSSI_LEGACY: {
1655 		const struct hal_rx_phyrx_rssi_legacy_info *rssi = tlv_data;
1656 
1657 		info[0] = __le32_to_cpu(rssi->info0);
1658 		info[2] = __le32_to_cpu(rssi->info2);
1659 
1660 		/* TODO: Please note that the combined rssi will not be accurate
1661 		 * in MU case. Rssi in MU needs to be retrieved from
1662 		 * PHYRX_OTHER_RECEIVE_INFO TLV.
1663 		 */
1664 		ppdu_info->rssi_comb =
1665 			u32_get_bits(info[2],
1666 				     HAL_RX_RSSI_LEGACY_INFO_INFO2_RSSI_COMB_PPDU);
1667 
1668 		ppdu_info->bw = u32_get_bits(info[0],
1669 					     HAL_RX_RSSI_LEGACY_INFO_INFO0_RX_BW);
1670 		break;
1671 	}
1672 	case HAL_PHYRX_COMMON_USER_INFO: {
1673 		ath12k_parse_cmn_usr_info(tlv_data, ppdu_info);
1674 		break;
1675 	}
1676 	case HAL_RX_PPDU_START_USER_INFO:
1677 		ath12k_dp_mon_hal_rx_parse_user_info(tlv_data, userid, ppdu_info);
1678 		break;
1679 
1680 	case HAL_RXPCU_PPDU_END_INFO: {
1681 		const struct hal_rx_ppdu_end_duration *ppdu_rx_duration = tlv_data;
1682 
1683 		info[0] = __le32_to_cpu(ppdu_rx_duration->info0);
1684 		ppdu_info->rx_duration =
1685 			u32_get_bits(info[0], HAL_RX_PPDU_END_DURATION);
1686 		ppdu_info->tsft = __le32_to_cpu(ppdu_rx_duration->rsvd0[1]);
1687 		ppdu_info->tsft = (ppdu_info->tsft << 32) |
1688 				   __le32_to_cpu(ppdu_rx_duration->rsvd0[0]);
1689 		break;
1690 	}
1691 	case HAL_RX_MPDU_START: {
1692 		const struct hal_rx_mpdu_start *mpdu_start = tlv_data;
1693 		u16 peer_id;
1694 
1695 		info[1] = __le32_to_cpu(mpdu_start->info1);
1696 		peer_id = u32_get_bits(info[1], HAL_RX_MPDU_START_INFO1_PEERID);
1697 		if (peer_id)
1698 			ppdu_info->peer_id = peer_id;
1699 
1700 		ppdu_info->mpdu_len += u32_get_bits(info[1],
1701 						    HAL_RX_MPDU_START_INFO2_MPDU_LEN);
1702 		if (userid < HAL_MAX_UL_MU_USERS) {
1703 			info[0] = __le32_to_cpu(mpdu_start->info0);
1704 			ppdu_info->userid = userid;
1705 			ppdu_info->userstats[userid].ampdu_id =
1706 				u32_get_bits(info[0], HAL_RX_MPDU_START_INFO0_PPDU_ID);
1707 		}
1708 
1709 		return HAL_RX_MON_STATUS_MPDU_START;
1710 	}
1711 	case HAL_RX_MSDU_START:
1712 		/* TODO: add msdu start parsing logic */
1713 		break;
1714 	case HAL_MON_BUF_ADDR:
1715 		return HAL_RX_MON_STATUS_BUF_ADDR;
1716 	case HAL_RX_MSDU_END:
1717 		ath12k_dp_mon_parse_status_msdu_end(pmon, tlv_data);
1718 		return HAL_RX_MON_STATUS_MSDU_END;
1719 	case HAL_RX_MPDU_END:
1720 		return HAL_RX_MON_STATUS_MPDU_END;
1721 	case HAL_PHYRX_GENERIC_U_SIG:
1722 		ath12k_dp_mon_hal_rx_parse_u_sig_hdr(tlv_data, ppdu_info);
1723 		break;
1724 	case HAL_PHYRX_GENERIC_EHT_SIG:
1725 		/* Handle the case where aggregation is in progress
1726 		 * or the current TLV is one of the TLVs which should be
1727 		 * aggregated
1728 		 */
1729 		if (!ppdu_info->tlv_aggr.in_progress) {
1730 			ppdu_info->tlv_aggr.in_progress = true;
1731 			ppdu_info->tlv_aggr.tlv_tag = tlv_tag;
1732 			ppdu_info->tlv_aggr.cur_len = 0;
1733 		}
1734 
1735 		ppdu_info->is_eht = true;
1736 
1737 		ath12k_dp_mon_hal_aggr_tlv(ppdu_info, tlv_len, tlv_data);
1738 		break;
1739 	case HAL_DUMMY:
1740 		return HAL_RX_MON_STATUS_BUF_DONE;
1741 	case HAL_RX_PPDU_END_STATUS_DONE:
1742 	case 0:
1743 		return HAL_RX_MON_STATUS_PPDU_DONE;
1744 	default:
1745 		break;
1746 	}
1747 
1748 	return HAL_RX_MON_STATUS_PPDU_NOT_DONE;
1749 }
1750 
1751 static void
1752 ath12k_dp_mon_fill_rx_stats_info(struct ath12k *ar,
1753 				 struct hal_rx_mon_ppdu_info *ppdu_info,
1754 				 struct ieee80211_rx_status *rx_status)
1755 {
1756 	u32 center_freq = ppdu_info->freq;
1757 
1758 	rx_status->freq = center_freq;
1759 	rx_status->bw = ath12k_mac_bw_to_mac80211_bw(ppdu_info->bw);
1760 	rx_status->nss = ppdu_info->nss;
1761 	rx_status->rate_idx = 0;
1762 	rx_status->encoding = RX_ENC_LEGACY;
1763 	rx_status->flag |= RX_FLAG_NO_SIGNAL_VAL;
1764 
1765 	if (center_freq >= ATH12K_MIN_6GHZ_FREQ &&
1766 	    center_freq <= ATH12K_MAX_6GHZ_FREQ) {
1767 		rx_status->band = NL80211_BAND_6GHZ;
1768 	} else if (center_freq >= ATH12K_MIN_2GHZ_FREQ &&
1769 		   center_freq <= ATH12K_MAX_2GHZ_FREQ) {
1770 		rx_status->band = NL80211_BAND_2GHZ;
1771 	} else if (center_freq >= ATH12K_MIN_5GHZ_FREQ &&
1772 		   center_freq <= ATH12K_MAX_5GHZ_FREQ) {
1773 		rx_status->band = NL80211_BAND_5GHZ;
1774 	} else {
1775 		rx_status->band = NUM_NL80211_BANDS;
1776 	}
1777 }
1778 
1779 static struct sk_buff
1780 *ath12k_dp_rx_alloc_mon_status_buf(struct ath12k_base *ab,
1781 				   struct dp_rxdma_mon_ring *rx_ring,
1782 				   int *buf_id)
1783 {
1784 	struct sk_buff *skb;
1785 	dma_addr_t paddr;
1786 
1787 	skb = dev_alloc_skb(RX_MON_STATUS_BUF_SIZE);
1788 
1789 	if (!skb)
1790 		goto fail_alloc_skb;
1791 
1792 	if (!IS_ALIGNED((unsigned long)skb->data,
1793 			RX_MON_STATUS_BUF_ALIGN)) {
1794 		skb_pull(skb, PTR_ALIGN(skb->data, RX_MON_STATUS_BUF_ALIGN) -
1795 			 skb->data);
1796 	}
1797 
1798 	paddr = dma_map_single(ab->dev, skb->data,
1799 			       skb->len + skb_tailroom(skb),
1800 			       DMA_FROM_DEVICE);
1801 	if (unlikely(dma_mapping_error(ab->dev, paddr)))
1802 		goto fail_free_skb;
1803 
1804 	spin_lock_bh(&rx_ring->idr_lock);
1805 	*buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0,
1806 			    rx_ring->bufs_max, GFP_ATOMIC);
1807 	spin_unlock_bh(&rx_ring->idr_lock);
1808 	if (*buf_id < 0)
1809 		goto fail_dma_unmap;
1810 
1811 	ATH12K_SKB_RXCB(skb)->paddr = paddr;
1812 	return skb;
1813 
1814 fail_dma_unmap:
1815 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
1816 			 DMA_FROM_DEVICE);
1817 fail_free_skb:
1818 	dev_kfree_skb_any(skb);
1819 fail_alloc_skb:
1820 	return NULL;
1821 }
1822 
1823 static enum dp_mon_status_buf_state
1824 ath12k_dp_rx_mon_buf_done(struct ath12k_base *ab, struct hal_srng *srng,
1825 			  struct dp_rxdma_mon_ring *rx_ring)
1826 {
1827 	struct ath12k_skb_rxcb *rxcb;
1828 	struct hal_tlv_64_hdr *tlv;
1829 	struct sk_buff *skb;
1830 	void *status_desc;
1831 	dma_addr_t paddr;
1832 	u32 cookie;
1833 	int buf_id;
1834 	u8 rbm;
1835 
1836 	status_desc = ath12k_hal_srng_src_next_peek(ab, srng);
1837 	if (!status_desc)
1838 		return DP_MON_STATUS_NO_DMA;
1839 
1840 	ath12k_hal_rx_buf_addr_info_get(status_desc, &paddr, &cookie, &rbm);
1841 
1842 	buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
1843 
1844 	spin_lock_bh(&rx_ring->idr_lock);
1845 	skb = idr_find(&rx_ring->bufs_idr, buf_id);
1846 	spin_unlock_bh(&rx_ring->idr_lock);
1847 
1848 	if (!skb)
1849 		return DP_MON_STATUS_NO_DMA;
1850 
1851 	rxcb = ATH12K_SKB_RXCB(skb);
1852 	dma_sync_single_for_cpu(ab->dev, rxcb->paddr,
1853 				skb->len + skb_tailroom(skb),
1854 				DMA_FROM_DEVICE);
1855 
1856 	tlv = (struct hal_tlv_64_hdr *)skb->data;
1857 	if (le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG) != HAL_RX_STATUS_BUFFER_DONE)
1858 		return DP_MON_STATUS_NO_DMA;
1859 
1860 	return DP_MON_STATUS_REPLINISH;
1861 }
1862 
1863 static u32 ath12k_dp_mon_comp_ppduid(u32 msdu_ppdu_id, u32 *ppdu_id)
1864 {
1865 	u32 ret = 0;
1866 
1867 	if ((*ppdu_id < msdu_ppdu_id) &&
1868 	    ((msdu_ppdu_id - *ppdu_id) < DP_NOT_PPDU_ID_WRAP_AROUND)) {
1869 		/* Hold on mon dest ring, and reap mon status ring. */
1870 		*ppdu_id = msdu_ppdu_id;
1871 		ret = msdu_ppdu_id;
1872 	} else if ((*ppdu_id > msdu_ppdu_id) &&
1873 		((*ppdu_id - msdu_ppdu_id) > DP_NOT_PPDU_ID_WRAP_AROUND)) {
1874 		/* PPDU ID has exceeded the maximum value and will
1875 		 * restart from 0.
1876 		 */
1877 		*ppdu_id = msdu_ppdu_id;
1878 		ret = msdu_ppdu_id;
1879 	}
1880 	return ret;
1881 }
1882 
1883 static
1884 void ath12k_dp_mon_next_link_desc_get(struct hal_rx_msdu_link *msdu_link,
1885 				      dma_addr_t *paddr, u32 *sw_cookie, u8 *rbm,
1886 				      struct ath12k_buffer_addr **pp_buf_addr_info)
1887 {
1888 	struct ath12k_buffer_addr *buf_addr_info;
1889 
1890 	buf_addr_info = &msdu_link->buf_addr_info;
1891 
1892 	ath12k_hal_rx_buf_addr_info_get(buf_addr_info, paddr, sw_cookie, rbm);
1893 
1894 	*pp_buf_addr_info = buf_addr_info;
1895 }
1896 
1897 static void
1898 ath12k_dp_mon_fill_rx_rate(struct ath12k *ar,
1899 			   struct hal_rx_mon_ppdu_info *ppdu_info,
1900 			   struct ieee80211_rx_status *rx_status)
1901 {
1902 	struct ieee80211_supported_band *sband;
1903 	enum rx_msdu_start_pkt_type pkt_type;
1904 	u8 rate_mcs, nss, sgi;
1905 	bool is_cck;
1906 
1907 	pkt_type = ppdu_info->preamble_type;
1908 	rate_mcs = ppdu_info->rate;
1909 	nss = ppdu_info->nss;
1910 	sgi = ppdu_info->gi;
1911 
1912 	switch (pkt_type) {
1913 	case RX_MSDU_START_PKT_TYPE_11A:
1914 	case RX_MSDU_START_PKT_TYPE_11B:
1915 		is_cck = (pkt_type == RX_MSDU_START_PKT_TYPE_11B);
1916 		if (rx_status->band < NUM_NL80211_BANDS) {
1917 			sband = &ar->mac.sbands[rx_status->band];
1918 			rx_status->rate_idx = ath12k_mac_hw_rate_to_idx(sband, rate_mcs,
1919 									is_cck);
1920 		}
1921 		break;
1922 	case RX_MSDU_START_PKT_TYPE_11N:
1923 		rx_status->encoding = RX_ENC_HT;
1924 		if (rate_mcs > ATH12K_HT_MCS_MAX) {
1925 			ath12k_warn(ar->ab,
1926 				    "Received with invalid mcs in HT mode %d\n",
1927 				     rate_mcs);
1928 			break;
1929 		}
1930 		rx_status->rate_idx = rate_mcs + (8 * (nss - 1));
1931 		if (sgi)
1932 			rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1933 		break;
1934 	case RX_MSDU_START_PKT_TYPE_11AC:
1935 		rx_status->encoding = RX_ENC_VHT;
1936 		rx_status->rate_idx = rate_mcs;
1937 		if (rate_mcs > ATH12K_VHT_MCS_MAX) {
1938 			ath12k_warn(ar->ab,
1939 				    "Received with invalid mcs in VHT mode %d\n",
1940 				     rate_mcs);
1941 			break;
1942 		}
1943 		if (sgi)
1944 			rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1945 		break;
1946 	case RX_MSDU_START_PKT_TYPE_11AX:
1947 		rx_status->rate_idx = rate_mcs;
1948 		if (rate_mcs > ATH12K_HE_MCS_MAX) {
1949 			ath12k_warn(ar->ab,
1950 				    "Received with invalid mcs in HE mode %d\n",
1951 				    rate_mcs);
1952 			break;
1953 		}
1954 		rx_status->encoding = RX_ENC_HE;
1955 		rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi);
1956 		break;
1957 	case RX_MSDU_START_PKT_TYPE_11BE:
1958 		rx_status->rate_idx = rate_mcs;
1959 		if (rate_mcs > ATH12K_EHT_MCS_MAX) {
1960 			ath12k_warn(ar->ab,
1961 				    "Received with invalid mcs in EHT mode %d\n",
1962 				    rate_mcs);
1963 			break;
1964 		}
1965 		rx_status->encoding = RX_ENC_EHT;
1966 		rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi);
1967 		break;
1968 	default:
1969 		ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
1970 			   "monitor receives invalid preamble type %d",
1971 			    pkt_type);
1972 		break;
1973 	}
1974 }
1975 
1976 static void ath12k_dp_mon_rx_msdus_set_payload(struct ath12k *ar,
1977 					       struct sk_buff *head_msdu,
1978 					       struct sk_buff *tail_msdu)
1979 {
1980 	u32 rx_pkt_offset, l2_hdr_offset, total_offset;
1981 
1982 	rx_pkt_offset = ar->ab->hal.hal_desc_sz;
1983 	l2_hdr_offset =
1984 		ath12k_dp_rx_h_l3pad(ar->ab, (struct hal_rx_desc *)tail_msdu->data);
1985 
1986 	if (ar->ab->hw_params->rxdma1_enable)
1987 		total_offset = ATH12K_MON_RX_PKT_OFFSET;
1988 	else
1989 		total_offset = rx_pkt_offset + l2_hdr_offset;
1990 
1991 	skb_pull(head_msdu, total_offset);
1992 }
1993 
1994 static struct sk_buff *
1995 ath12k_dp_mon_rx_merg_msdus(struct ath12k *ar,
1996 			    struct dp_mon_mpdu *mon_mpdu,
1997 			    struct hal_rx_mon_ppdu_info *ppdu_info,
1998 			    struct ieee80211_rx_status *rxs)
1999 {
2000 	struct ath12k_base *ab = ar->ab;
2001 	struct sk_buff *msdu, *mpdu_buf, *prev_buf, *head_frag_list;
2002 	struct sk_buff *head_msdu, *tail_msdu;
2003 	struct hal_rx_desc *rx_desc;
2004 	u8 *hdr_desc, *dest, decap_format = mon_mpdu->decap_format;
2005 	struct ieee80211_hdr_3addr *wh;
2006 	struct ieee80211_channel *channel;
2007 	u32 frag_list_sum_len = 0;
2008 	u8 channel_num = ppdu_info->chan_num;
2009 
2010 	mpdu_buf = NULL;
2011 	head_msdu = mon_mpdu->head;
2012 	tail_msdu = mon_mpdu->tail;
2013 
2014 	if (!head_msdu || !tail_msdu)
2015 		goto err_merge_fail;
2016 
2017 	ath12k_dp_mon_fill_rx_stats_info(ar, ppdu_info, rxs);
2018 
2019 	if (unlikely(rxs->band == NUM_NL80211_BANDS ||
2020 		     !ath12k_ar_to_hw(ar)->wiphy->bands[rxs->band])) {
2021 		ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
2022 			   "sband is NULL for status band %d channel_num %d center_freq %d pdev_id %d\n",
2023 			   rxs->band, channel_num, ppdu_info->freq, ar->pdev_idx);
2024 
2025 		spin_lock_bh(&ar->data_lock);
2026 		channel = ar->rx_channel;
2027 		if (channel) {
2028 			rxs->band = channel->band;
2029 			channel_num =
2030 				ieee80211_frequency_to_channel(channel->center_freq);
2031 		}
2032 		spin_unlock_bh(&ar->data_lock);
2033 	}
2034 
2035 	if (rxs->band < NUM_NL80211_BANDS)
2036 		rxs->freq = ieee80211_channel_to_frequency(channel_num,
2037 							   rxs->band);
2038 
2039 	ath12k_dp_mon_fill_rx_rate(ar, ppdu_info, rxs);
2040 
2041 	if (decap_format == DP_RX_DECAP_TYPE_RAW) {
2042 		ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2043 
2044 		prev_buf = head_msdu;
2045 		msdu = head_msdu->next;
2046 		head_frag_list = NULL;
2047 
2048 		while (msdu) {
2049 			ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2050 
2051 			if (!head_frag_list)
2052 				head_frag_list = msdu;
2053 
2054 			frag_list_sum_len += msdu->len;
2055 			prev_buf = msdu;
2056 			msdu = msdu->next;
2057 		}
2058 
2059 		prev_buf->next = NULL;
2060 
2061 		skb_trim(prev_buf, prev_buf->len);
2062 		if (head_frag_list) {
2063 			skb_shinfo(head_msdu)->frag_list = head_frag_list;
2064 			head_msdu->data_len = frag_list_sum_len;
2065 			head_msdu->len += head_msdu->data_len;
2066 			head_msdu->next = NULL;
2067 		}
2068 	} else if (decap_format == DP_RX_DECAP_TYPE_NATIVE_WIFI) {
2069 		u8 qos_pkt = 0;
2070 
2071 		rx_desc = (struct hal_rx_desc *)head_msdu->data;
2072 		hdr_desc =
2073 			ab->hal_rx_ops->rx_desc_get_msdu_payload(rx_desc);
2074 
2075 		/* Base size */
2076 		wh = (struct ieee80211_hdr_3addr *)hdr_desc;
2077 
2078 		if (ieee80211_is_data_qos(wh->frame_control))
2079 			qos_pkt = 1;
2080 
2081 		msdu = head_msdu;
2082 
2083 		while (msdu) {
2084 			ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2085 			if (qos_pkt) {
2086 				dest = skb_push(msdu, sizeof(__le16));
2087 				if (!dest)
2088 					goto err_merge_fail;
2089 				memcpy(dest, hdr_desc, sizeof(struct ieee80211_qos_hdr));
2090 			}
2091 			prev_buf = msdu;
2092 			msdu = msdu->next;
2093 		}
2094 		dest = skb_put(prev_buf, HAL_RX_FCS_LEN);
2095 		if (!dest)
2096 			goto err_merge_fail;
2097 
2098 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2099 			   "mpdu_buf %p mpdu_buf->len %u",
2100 			   prev_buf, prev_buf->len);
2101 	} else {
2102 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2103 			   "decap format %d is not supported!\n",
2104 			   decap_format);
2105 		goto err_merge_fail;
2106 	}
2107 
2108 	return head_msdu;
2109 
2110 err_merge_fail:
2111 	if (mpdu_buf && decap_format != DP_RX_DECAP_TYPE_RAW) {
2112 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2113 			   "err_merge_fail mpdu_buf %p", mpdu_buf);
2114 		/* Free the head buffer */
2115 		dev_kfree_skb_any(mpdu_buf);
2116 	}
2117 	return NULL;
2118 }
2119 
2120 static void
2121 ath12k_dp_mon_rx_update_radiotap_he(struct hal_rx_mon_ppdu_info *rx_status,
2122 				    u8 *rtap_buf)
2123 {
2124 	u32 rtap_len = 0;
2125 
2126 	put_unaligned_le16(rx_status->he_data1, &rtap_buf[rtap_len]);
2127 	rtap_len += 2;
2128 
2129 	put_unaligned_le16(rx_status->he_data2, &rtap_buf[rtap_len]);
2130 	rtap_len += 2;
2131 
2132 	put_unaligned_le16(rx_status->he_data3, &rtap_buf[rtap_len]);
2133 	rtap_len += 2;
2134 
2135 	put_unaligned_le16(rx_status->he_data4, &rtap_buf[rtap_len]);
2136 	rtap_len += 2;
2137 
2138 	put_unaligned_le16(rx_status->he_data5, &rtap_buf[rtap_len]);
2139 	rtap_len += 2;
2140 
2141 	put_unaligned_le16(rx_status->he_data6, &rtap_buf[rtap_len]);
2142 }
2143 
2144 static void
2145 ath12k_dp_mon_rx_update_radiotap_he_mu(struct hal_rx_mon_ppdu_info *rx_status,
2146 				       u8 *rtap_buf)
2147 {
2148 	u32 rtap_len = 0;
2149 
2150 	put_unaligned_le16(rx_status->he_flags1, &rtap_buf[rtap_len]);
2151 	rtap_len += 2;
2152 
2153 	put_unaligned_le16(rx_status->he_flags2, &rtap_buf[rtap_len]);
2154 	rtap_len += 2;
2155 
2156 	rtap_buf[rtap_len] = rx_status->he_RU[0];
2157 	rtap_len += 1;
2158 
2159 	rtap_buf[rtap_len] = rx_status->he_RU[1];
2160 	rtap_len += 1;
2161 
2162 	rtap_buf[rtap_len] = rx_status->he_RU[2];
2163 	rtap_len += 1;
2164 
2165 	rtap_buf[rtap_len] = rx_status->he_RU[3];
2166 }
2167 
2168 static void ath12k_dp_mon_update_radiotap(struct ath12k *ar,
2169 					  struct hal_rx_mon_ppdu_info *ppduinfo,
2170 					  struct sk_buff *mon_skb,
2171 					  struct ieee80211_rx_status *rxs)
2172 {
2173 	struct ieee80211_supported_band *sband;
2174 	s32 noise_floor;
2175 	u8 *ptr = NULL;
2176 
2177 	spin_lock_bh(&ar->data_lock);
2178 	noise_floor = ath12k_pdev_get_noise_floor(ar);
2179 	spin_unlock_bh(&ar->data_lock);
2180 
2181 	rxs->flag |= RX_FLAG_MACTIME_START;
2182 	rxs->nss = ppduinfo->nss + 1;
2183 	if (test_bit(WMI_TLV_SERVICE_HW_DB2DBM_CONVERSION_SUPPORT,
2184 		     ar->ab->wmi_ab.svc_map))
2185 		rxs->signal = ppduinfo->rssi_comb;
2186 	else
2187 		rxs->signal = ppduinfo->rssi_comb + noise_floor;
2188 
2189 	if (ppduinfo->userstats[ppduinfo->userid].ampdu_present) {
2190 		rxs->flag |= RX_FLAG_AMPDU_DETAILS;
2191 		rxs->ampdu_reference = ppduinfo->userstats[ppduinfo->userid].ampdu_id;
2192 	}
2193 
2194 	if (ppduinfo->is_eht || ppduinfo->eht_usig) {
2195 		struct ieee80211_radiotap_tlv *tlv;
2196 		struct ieee80211_radiotap_eht *eht;
2197 		struct ieee80211_radiotap_eht_usig *usig;
2198 		u16 len = 0, i, eht_len, usig_len;
2199 		u8 user;
2200 
2201 		if (ppduinfo->is_eht) {
2202 			eht_len = struct_size(eht,
2203 					      user_info,
2204 					      ppduinfo->eht_info.num_user_info);
2205 			len += sizeof(*tlv) + eht_len;
2206 		}
2207 
2208 		if (ppduinfo->eht_usig) {
2209 			usig_len = sizeof(*usig);
2210 			len += sizeof(*tlv) + usig_len;
2211 		}
2212 
2213 		rxs->flag |= RX_FLAG_RADIOTAP_TLV_AT_END;
2214 		rxs->encoding = RX_ENC_EHT;
2215 
2216 		skb_reset_mac_header(mon_skb);
2217 
2218 		tlv = skb_push(mon_skb, len);
2219 
2220 		if (ppduinfo->is_eht) {
2221 			tlv->type = cpu_to_le16(IEEE80211_RADIOTAP_EHT);
2222 			tlv->len = cpu_to_le16(eht_len);
2223 
2224 			eht = (struct ieee80211_radiotap_eht *)tlv->data;
2225 			eht->known = ppduinfo->eht_info.eht.known;
2226 
2227 			for (i = 0;
2228 			     i < ARRAY_SIZE(eht->data) &&
2229 			     i < ARRAY_SIZE(ppduinfo->eht_info.eht.data);
2230 			     i++)
2231 				eht->data[i] = ppduinfo->eht_info.eht.data[i];
2232 
2233 			for (user = 0; user < ppduinfo->eht_info.num_user_info; user++)
2234 				put_unaligned_le32(ppduinfo->eht_info.user_info[user],
2235 						   &eht->user_info[user]);
2236 
2237 			tlv = (struct ieee80211_radiotap_tlv *)&tlv->data[eht_len];
2238 		}
2239 
2240 		if (ppduinfo->eht_usig) {
2241 			tlv->type = cpu_to_le16(IEEE80211_RADIOTAP_EHT_USIG);
2242 			tlv->len = cpu_to_le16(usig_len);
2243 
2244 			usig = (struct ieee80211_radiotap_eht_usig *)tlv->data;
2245 			*usig = ppduinfo->u_sig_info.usig;
2246 		}
2247 	} else if (ppduinfo->he_mu_flags) {
2248 		rxs->flag |= RX_FLAG_RADIOTAP_HE_MU;
2249 		rxs->encoding = RX_ENC_HE;
2250 		ptr = skb_push(mon_skb, sizeof(struct ieee80211_radiotap_he_mu));
2251 		ath12k_dp_mon_rx_update_radiotap_he_mu(ppduinfo, ptr);
2252 	} else if (ppduinfo->he_flags) {
2253 		rxs->flag |= RX_FLAG_RADIOTAP_HE;
2254 		rxs->encoding = RX_ENC_HE;
2255 		ptr = skb_push(mon_skb, sizeof(struct ieee80211_radiotap_he));
2256 		ath12k_dp_mon_rx_update_radiotap_he(ppduinfo, ptr);
2257 		rxs->rate_idx = ppduinfo->rate;
2258 	} else if (ppduinfo->vht_flags) {
2259 		rxs->encoding = RX_ENC_VHT;
2260 		rxs->rate_idx = ppduinfo->rate;
2261 	} else if (ppduinfo->ht_flags) {
2262 		rxs->encoding = RX_ENC_HT;
2263 		rxs->rate_idx = ppduinfo->rate;
2264 	} else {
2265 		rxs->encoding = RX_ENC_LEGACY;
2266 		sband = &ar->mac.sbands[rxs->band];
2267 		rxs->rate_idx = ath12k_mac_hw_rate_to_idx(sband, ppduinfo->rate,
2268 							  ppduinfo->cck_flag);
2269 	}
2270 
2271 	rxs->mactime = ppduinfo->tsft;
2272 }
2273 
2274 static void ath12k_dp_mon_rx_deliver_msdu(struct ath12k *ar, struct napi_struct *napi,
2275 					  struct sk_buff *msdu,
2276 					  const struct hal_rx_mon_ppdu_info *ppduinfo,
2277 					  struct ieee80211_rx_status *status,
2278 					  u8 decap)
2279 {
2280 	static const struct ieee80211_radiotap_he known = {
2281 		.data1 = cpu_to_le16(IEEE80211_RADIOTAP_HE_DATA1_DATA_MCS_KNOWN |
2282 				     IEEE80211_RADIOTAP_HE_DATA1_BW_RU_ALLOC_KNOWN),
2283 		.data2 = cpu_to_le16(IEEE80211_RADIOTAP_HE_DATA2_GI_KNOWN),
2284 	};
2285 	struct ieee80211_rx_status *rx_status;
2286 	struct ieee80211_radiotap_he *he = NULL;
2287 	struct ieee80211_sta *pubsta = NULL;
2288 	struct ath12k_peer *peer;
2289 	struct ath12k_skb_rxcb *rxcb = ATH12K_SKB_RXCB(msdu);
2290 	bool is_mcbc = rxcb->is_mcbc;
2291 	bool is_eapol_tkip = rxcb->is_eapol;
2292 
2293 	status->link_valid = 0;
2294 
2295 	if ((status->encoding == RX_ENC_HE) && !(status->flag & RX_FLAG_RADIOTAP_HE) &&
2296 	    !(status->flag & RX_FLAG_SKIP_MONITOR)) {
2297 		he = skb_push(msdu, sizeof(known));
2298 		memcpy(he, &known, sizeof(known));
2299 		status->flag |= RX_FLAG_RADIOTAP_HE;
2300 	}
2301 
2302 	spin_lock_bh(&ar->ab->base_lock);
2303 	peer = ath12k_peer_find_by_id(ar->ab, ppduinfo->peer_id);
2304 	if (peer && peer->sta) {
2305 		pubsta = peer->sta;
2306 		if (pubsta->valid_links) {
2307 			status->link_valid = 1;
2308 			status->link_id = peer->link_id;
2309 		}
2310 	}
2311 
2312 	spin_unlock_bh(&ar->ab->base_lock);
2313 
2314 	ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
2315 		   "rx skb %p len %u peer %pM %u %s %s%s%s%s%s%s%s%s %srate_idx %u vht_nss %u freq %u band %u flag 0x%x fcs-err %i mic-err %i amsdu-more %i\n",
2316 		   msdu,
2317 		   msdu->len,
2318 		   peer ? peer->addr : NULL,
2319 		   rxcb->tid,
2320 		   (is_mcbc) ? "mcast" : "ucast",
2321 		   (status->encoding == RX_ENC_LEGACY) ? "legacy" : "",
2322 		   (status->encoding == RX_ENC_HT) ? "ht" : "",
2323 		   (status->encoding == RX_ENC_VHT) ? "vht" : "",
2324 		   (status->encoding == RX_ENC_HE) ? "he" : "",
2325 		   (status->bw == RATE_INFO_BW_40) ? "40" : "",
2326 		   (status->bw == RATE_INFO_BW_80) ? "80" : "",
2327 		   (status->bw == RATE_INFO_BW_160) ? "160" : "",
2328 		   (status->bw == RATE_INFO_BW_320) ? "320" : "",
2329 		   status->enc_flags & RX_ENC_FLAG_SHORT_GI ? "sgi " : "",
2330 		   status->rate_idx,
2331 		   status->nss,
2332 		   status->freq,
2333 		   status->band, status->flag,
2334 		   !!(status->flag & RX_FLAG_FAILED_FCS_CRC),
2335 		   !!(status->flag & RX_FLAG_MMIC_ERROR),
2336 		   !!(status->flag & RX_FLAG_AMSDU_MORE));
2337 
2338 	ath12k_dbg_dump(ar->ab, ATH12K_DBG_DP_RX, NULL, "dp rx msdu: ",
2339 			msdu->data, msdu->len);
2340 	rx_status = IEEE80211_SKB_RXCB(msdu);
2341 	*rx_status = *status;
2342 
2343 	/* TODO: trace rx packet */
2344 
2345 	/* PN for multicast packets are not validate in HW,
2346 	 * so skip 802.3 rx path
2347 	 * Also, fast_rx expects the STA to be authorized, hence
2348 	 * eapol packets are sent in slow path.
2349 	 */
2350 	if (decap == DP_RX_DECAP_TYPE_ETHERNET2_DIX && !is_eapol_tkip &&
2351 	    !(is_mcbc && rx_status->flag & RX_FLAG_DECRYPTED))
2352 		rx_status->flag |= RX_FLAG_8023;
2353 
2354 	ieee80211_rx_napi(ath12k_ar_to_hw(ar), pubsta, msdu, napi);
2355 }
2356 
2357 static int ath12k_dp_mon_rx_deliver(struct ath12k *ar,
2358 				    struct dp_mon_mpdu *mon_mpdu,
2359 				    struct hal_rx_mon_ppdu_info *ppduinfo,
2360 				    struct napi_struct *napi)
2361 {
2362 	struct ath12k_pdev_dp *dp = &ar->dp;
2363 	struct sk_buff *mon_skb, *skb_next, *header;
2364 	struct ieee80211_rx_status *rxs = &dp->rx_status;
2365 	u8 decap = DP_RX_DECAP_TYPE_RAW;
2366 
2367 	mon_skb = ath12k_dp_mon_rx_merg_msdus(ar, mon_mpdu, ppduinfo, rxs);
2368 	if (!mon_skb)
2369 		goto mon_deliver_fail;
2370 
2371 	header = mon_skb;
2372 	rxs->flag = 0;
2373 
2374 	if (mon_mpdu->err_bitmap & HAL_RX_MPDU_ERR_FCS)
2375 		rxs->flag = RX_FLAG_FAILED_FCS_CRC;
2376 
2377 	do {
2378 		skb_next = mon_skb->next;
2379 		if (!skb_next)
2380 			rxs->flag &= ~RX_FLAG_AMSDU_MORE;
2381 		else
2382 			rxs->flag |= RX_FLAG_AMSDU_MORE;
2383 
2384 		if (mon_skb == header) {
2385 			header = NULL;
2386 			rxs->flag &= ~RX_FLAG_ALLOW_SAME_PN;
2387 		} else {
2388 			rxs->flag |= RX_FLAG_ALLOW_SAME_PN;
2389 		}
2390 		rxs->flag |= RX_FLAG_ONLY_MONITOR;
2391 
2392 		if (!(rxs->flag & RX_FLAG_ONLY_MONITOR))
2393 			decap = mon_mpdu->decap_format;
2394 
2395 		ath12k_dp_mon_update_radiotap(ar, ppduinfo, mon_skb, rxs);
2396 		ath12k_dp_mon_rx_deliver_msdu(ar, napi, mon_skb, ppduinfo, rxs, decap);
2397 		mon_skb = skb_next;
2398 	} while (mon_skb);
2399 	rxs->flag = 0;
2400 
2401 	return 0;
2402 
2403 mon_deliver_fail:
2404 	mon_skb = mon_mpdu->head;
2405 	while (mon_skb) {
2406 		skb_next = mon_skb->next;
2407 		dev_kfree_skb_any(mon_skb);
2408 		mon_skb = skb_next;
2409 	}
2410 	return -EINVAL;
2411 }
2412 
2413 static int ath12k_dp_pkt_set_pktlen(struct sk_buff *skb, u32 len)
2414 {
2415 	if (skb->len > len) {
2416 		skb_trim(skb, len);
2417 	} else {
2418 		if (skb_tailroom(skb) < len - skb->len) {
2419 			if ((pskb_expand_head(skb, 0,
2420 					      len - skb->len - skb_tailroom(skb),
2421 					      GFP_ATOMIC))) {
2422 				return -ENOMEM;
2423 			}
2424 		}
2425 		skb_put(skb, (len - skb->len));
2426 	}
2427 
2428 	return 0;
2429 }
2430 
2431 /* Hardware fill buffer with 128 bytes aligned. So need to reap it
2432  * with 128 bytes aligned.
2433  */
2434 #define RXDMA_DATA_DMA_BLOCK_SIZE 128
2435 
2436 static void
2437 ath12k_dp_mon_get_buf_len(struct hal_rx_msdu_desc_info *info,
2438 			  bool *is_frag, u32 *total_len,
2439 			  u32 *frag_len, u32 *msdu_cnt)
2440 {
2441 	if (info->msdu_flags & RX_MSDU_DESC_INFO0_MSDU_CONTINUATION) {
2442 		*is_frag = true;
2443 		*frag_len = (RX_MON_STATUS_BASE_BUF_SIZE -
2444 			     sizeof(struct hal_rx_desc)) &
2445 			     ~(RXDMA_DATA_DMA_BLOCK_SIZE - 1);
2446 		*total_len += *frag_len;
2447 	} else {
2448 		if (*is_frag)
2449 			*frag_len = info->msdu_len - *total_len;
2450 		else
2451 			*frag_len = info->msdu_len;
2452 
2453 		*msdu_cnt -= 1;
2454 	}
2455 }
2456 
2457 static int
2458 ath12k_dp_mon_parse_status_buf(struct ath12k *ar,
2459 			       struct ath12k_mon_data *pmon,
2460 			       const struct dp_mon_packet_info *packet_info)
2461 {
2462 	struct ath12k_base *ab = ar->ab;
2463 	struct dp_rxdma_mon_ring *buf_ring = &ab->dp.rxdma_mon_buf_ring;
2464 	struct sk_buff *msdu;
2465 	int buf_id;
2466 	u32 offset;
2467 
2468 	buf_id = u32_get_bits(packet_info->cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
2469 
2470 	spin_lock_bh(&buf_ring->idr_lock);
2471 	msdu = idr_remove(&buf_ring->bufs_idr, buf_id);
2472 	spin_unlock_bh(&buf_ring->idr_lock);
2473 
2474 	if (unlikely(!msdu)) {
2475 		ath12k_warn(ab, "mon dest desc with inval buf_id %d\n", buf_id);
2476 		return 0;
2477 	}
2478 
2479 	dma_unmap_single(ab->dev, ATH12K_SKB_RXCB(msdu)->paddr,
2480 			 msdu->len + skb_tailroom(msdu),
2481 			 DMA_FROM_DEVICE);
2482 
2483 	offset = packet_info->dma_length + ATH12K_MON_RX_DOT11_OFFSET;
2484 	if (ath12k_dp_pkt_set_pktlen(msdu, offset)) {
2485 		dev_kfree_skb_any(msdu);
2486 		goto dest_replenish;
2487 	}
2488 
2489 	if (!pmon->mon_mpdu->head)
2490 		pmon->mon_mpdu->head = msdu;
2491 	else
2492 		pmon->mon_mpdu->tail->next = msdu;
2493 
2494 	pmon->mon_mpdu->tail = msdu;
2495 
2496 dest_replenish:
2497 	ath12k_dp_mon_buf_replenish(ab, buf_ring, 1);
2498 
2499 	return 0;
2500 }
2501 
2502 static int
2503 ath12k_dp_mon_parse_rx_dest_tlv(struct ath12k *ar,
2504 				struct ath12k_mon_data *pmon,
2505 				enum hal_rx_mon_status hal_status,
2506 				const void *tlv_data)
2507 {
2508 	switch (hal_status) {
2509 	case HAL_RX_MON_STATUS_MPDU_START:
2510 		if (WARN_ON_ONCE(pmon->mon_mpdu))
2511 			break;
2512 
2513 		pmon->mon_mpdu = kzalloc(sizeof(*pmon->mon_mpdu), GFP_ATOMIC);
2514 		if (!pmon->mon_mpdu)
2515 			return -ENOMEM;
2516 		break;
2517 	case HAL_RX_MON_STATUS_BUF_ADDR:
2518 		return ath12k_dp_mon_parse_status_buf(ar, pmon, tlv_data);
2519 	case HAL_RX_MON_STATUS_MPDU_END:
2520 		/* If no MSDU then free empty MPDU */
2521 		if (pmon->mon_mpdu->tail) {
2522 			pmon->mon_mpdu->tail->next = NULL;
2523 			list_add_tail(&pmon->mon_mpdu->list, &pmon->dp_rx_mon_mpdu_list);
2524 		} else {
2525 			kfree(pmon->mon_mpdu);
2526 		}
2527 		pmon->mon_mpdu = NULL;
2528 		break;
2529 	case HAL_RX_MON_STATUS_MSDU_END:
2530 		pmon->mon_mpdu->decap_format = pmon->decap_format;
2531 		pmon->mon_mpdu->err_bitmap = pmon->err_bitmap;
2532 		break;
2533 	default:
2534 		break;
2535 	}
2536 
2537 	return 0;
2538 }
2539 
2540 static enum hal_rx_mon_status
2541 ath12k_dp_mon_parse_rx_dest(struct ath12k *ar, struct ath12k_mon_data *pmon,
2542 			    struct sk_buff *skb)
2543 {
2544 	struct hal_tlv_64_hdr *tlv;
2545 	struct ath12k_skb_rxcb *rxcb;
2546 	enum hal_rx_mon_status hal_status;
2547 	u16 tlv_tag, tlv_len;
2548 	u8 *ptr = skb->data;
2549 
2550 	do {
2551 		tlv = (struct hal_tlv_64_hdr *)ptr;
2552 		tlv_tag = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_TAG);
2553 
2554 		/* The actual length of PPDU_END is the combined length of many PHY
2555 		 * TLVs that follow. Skip the TLV header and
2556 		 * rx_rxpcu_classification_overview that follows the header to get to
2557 		 * next TLV.
2558 		 */
2559 
2560 		if (tlv_tag == HAL_RX_PPDU_END)
2561 			tlv_len = sizeof(struct hal_rx_rxpcu_classification_overview);
2562 		else
2563 			tlv_len = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_LEN);
2564 
2565 		hal_status = ath12k_dp_mon_rx_parse_status_tlv(ar, pmon, tlv);
2566 
2567 		if (ar->monitor_started && ar->ab->hw_params->rxdma1_enable &&
2568 		    ath12k_dp_mon_parse_rx_dest_tlv(ar, pmon, hal_status, tlv->value))
2569 			return HAL_RX_MON_STATUS_PPDU_DONE;
2570 
2571 		ptr += sizeof(*tlv) + tlv_len;
2572 		ptr = PTR_ALIGN(ptr, HAL_TLV_64_ALIGN);
2573 
2574 		if ((ptr - skb->data) > skb->len)
2575 			break;
2576 
2577 	} while ((hal_status == HAL_RX_MON_STATUS_PPDU_NOT_DONE) ||
2578 		 (hal_status == HAL_RX_MON_STATUS_BUF_ADDR) ||
2579 		 (hal_status == HAL_RX_MON_STATUS_MPDU_START) ||
2580 		 (hal_status == HAL_RX_MON_STATUS_MPDU_END) ||
2581 		 (hal_status == HAL_RX_MON_STATUS_MSDU_END));
2582 
2583 	rxcb = ATH12K_SKB_RXCB(skb);
2584 	if (rxcb->is_end_of_ppdu)
2585 		hal_status = HAL_RX_MON_STATUS_PPDU_DONE;
2586 
2587 	return hal_status;
2588 }
2589 
2590 enum hal_rx_mon_status
2591 ath12k_dp_mon_rx_parse_mon_status(struct ath12k *ar,
2592 				  struct ath12k_mon_data *pmon,
2593 				  struct sk_buff *skb,
2594 				  struct napi_struct *napi)
2595 {
2596 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
2597 	struct dp_mon_mpdu *tmp;
2598 	struct dp_mon_mpdu *mon_mpdu = pmon->mon_mpdu;
2599 	enum hal_rx_mon_status hal_status;
2600 
2601 	hal_status = ath12k_dp_mon_parse_rx_dest(ar, pmon, skb);
2602 	if (hal_status != HAL_RX_MON_STATUS_PPDU_DONE)
2603 		return hal_status;
2604 
2605 	list_for_each_entry_safe(mon_mpdu, tmp, &pmon->dp_rx_mon_mpdu_list, list) {
2606 		list_del(&mon_mpdu->list);
2607 
2608 		if (mon_mpdu->head && mon_mpdu->tail)
2609 			ath12k_dp_mon_rx_deliver(ar, mon_mpdu, ppdu_info, napi);
2610 
2611 		kfree(mon_mpdu);
2612 	}
2613 
2614 	return hal_status;
2615 }
2616 
2617 int ath12k_dp_mon_buf_replenish(struct ath12k_base *ab,
2618 				struct dp_rxdma_mon_ring *buf_ring,
2619 				int req_entries)
2620 {
2621 	struct hal_mon_buf_ring *mon_buf;
2622 	struct sk_buff *skb;
2623 	struct hal_srng *srng;
2624 	dma_addr_t paddr;
2625 	u32 cookie;
2626 	int buf_id;
2627 
2628 	srng = &ab->hal.srng_list[buf_ring->refill_buf_ring.ring_id];
2629 	spin_lock_bh(&srng->lock);
2630 	ath12k_hal_srng_access_begin(ab, srng);
2631 
2632 	while (req_entries > 0) {
2633 		skb = dev_alloc_skb(DP_RX_BUFFER_SIZE + DP_RX_BUFFER_ALIGN_SIZE);
2634 		if (unlikely(!skb))
2635 			goto fail_alloc_skb;
2636 
2637 		if (!IS_ALIGNED((unsigned long)skb->data, DP_RX_BUFFER_ALIGN_SIZE)) {
2638 			skb_pull(skb,
2639 				 PTR_ALIGN(skb->data, DP_RX_BUFFER_ALIGN_SIZE) -
2640 				 skb->data);
2641 		}
2642 
2643 		paddr = dma_map_single(ab->dev, skb->data,
2644 				       skb->len + skb_tailroom(skb),
2645 				       DMA_FROM_DEVICE);
2646 
2647 		if (unlikely(dma_mapping_error(ab->dev, paddr)))
2648 			goto fail_free_skb;
2649 
2650 		spin_lock_bh(&buf_ring->idr_lock);
2651 		buf_id = idr_alloc(&buf_ring->bufs_idr, skb, 0,
2652 				   buf_ring->bufs_max * 3, GFP_ATOMIC);
2653 		spin_unlock_bh(&buf_ring->idr_lock);
2654 
2655 		if (unlikely(buf_id < 0))
2656 			goto fail_dma_unmap;
2657 
2658 		mon_buf = ath12k_hal_srng_src_get_next_entry(ab, srng);
2659 		if (unlikely(!mon_buf))
2660 			goto fail_idr_remove;
2661 
2662 		ATH12K_SKB_RXCB(skb)->paddr = paddr;
2663 
2664 		cookie = u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
2665 
2666 		mon_buf->paddr_lo = cpu_to_le32(lower_32_bits(paddr));
2667 		mon_buf->paddr_hi = cpu_to_le32(upper_32_bits(paddr));
2668 		mon_buf->cookie = cpu_to_le64(cookie);
2669 
2670 		req_entries--;
2671 	}
2672 
2673 	ath12k_hal_srng_access_end(ab, srng);
2674 	spin_unlock_bh(&srng->lock);
2675 	return 0;
2676 
2677 fail_idr_remove:
2678 	spin_lock_bh(&buf_ring->idr_lock);
2679 	idr_remove(&buf_ring->bufs_idr, buf_id);
2680 	spin_unlock_bh(&buf_ring->idr_lock);
2681 fail_dma_unmap:
2682 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
2683 			 DMA_FROM_DEVICE);
2684 fail_free_skb:
2685 	dev_kfree_skb_any(skb);
2686 fail_alloc_skb:
2687 	ath12k_hal_srng_access_end(ab, srng);
2688 	spin_unlock_bh(&srng->lock);
2689 	return -ENOMEM;
2690 }
2691 
2692 int ath12k_dp_mon_status_bufs_replenish(struct ath12k_base *ab,
2693 					struct dp_rxdma_mon_ring *rx_ring,
2694 					int req_entries)
2695 {
2696 	enum hal_rx_buf_return_buf_manager mgr =
2697 		ab->hw_params->hal_params->rx_buf_rbm;
2698 	int num_free, num_remain, buf_id;
2699 	struct ath12k_buffer_addr *desc;
2700 	struct hal_srng *srng;
2701 	struct sk_buff *skb;
2702 	dma_addr_t paddr;
2703 	u32 cookie;
2704 
2705 	req_entries = min(req_entries, rx_ring->bufs_max);
2706 
2707 	srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id];
2708 
2709 	spin_lock_bh(&srng->lock);
2710 
2711 	ath12k_hal_srng_access_begin(ab, srng);
2712 
2713 	num_free = ath12k_hal_srng_src_num_free(ab, srng, true);
2714 	if (!req_entries && (num_free > (rx_ring->bufs_max * 3) / 4))
2715 		req_entries = num_free;
2716 
2717 	req_entries = min(num_free, req_entries);
2718 	num_remain = req_entries;
2719 
2720 	while (num_remain > 0) {
2721 		skb = dev_alloc_skb(RX_MON_STATUS_BUF_SIZE);
2722 		if (!skb)
2723 			break;
2724 
2725 		if (!IS_ALIGNED((unsigned long)skb->data,
2726 				RX_MON_STATUS_BUF_ALIGN)) {
2727 			skb_pull(skb,
2728 				 PTR_ALIGN(skb->data, RX_MON_STATUS_BUF_ALIGN) -
2729 				 skb->data);
2730 		}
2731 
2732 		paddr = dma_map_single(ab->dev, skb->data,
2733 				       skb->len + skb_tailroom(skb),
2734 				       DMA_FROM_DEVICE);
2735 		if (dma_mapping_error(ab->dev, paddr))
2736 			goto fail_free_skb;
2737 
2738 		spin_lock_bh(&rx_ring->idr_lock);
2739 		buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0,
2740 				   rx_ring->bufs_max * 3, GFP_ATOMIC);
2741 		spin_unlock_bh(&rx_ring->idr_lock);
2742 		if (buf_id < 0)
2743 			goto fail_dma_unmap;
2744 		cookie = u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
2745 
2746 		desc = ath12k_hal_srng_src_get_next_entry(ab, srng);
2747 		if (!desc)
2748 			goto fail_buf_unassign;
2749 
2750 		ATH12K_SKB_RXCB(skb)->paddr = paddr;
2751 
2752 		num_remain--;
2753 
2754 		ath12k_hal_rx_buf_addr_info_set(desc, paddr, cookie, mgr);
2755 	}
2756 
2757 	ath12k_hal_srng_access_end(ab, srng);
2758 
2759 	spin_unlock_bh(&srng->lock);
2760 
2761 	return req_entries - num_remain;
2762 
2763 fail_buf_unassign:
2764 	spin_lock_bh(&rx_ring->idr_lock);
2765 	idr_remove(&rx_ring->bufs_idr, buf_id);
2766 	spin_unlock_bh(&rx_ring->idr_lock);
2767 fail_dma_unmap:
2768 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
2769 			 DMA_FROM_DEVICE);
2770 fail_free_skb:
2771 	dev_kfree_skb_any(skb);
2772 
2773 	ath12k_hal_srng_access_end(ab, srng);
2774 
2775 	spin_unlock_bh(&srng->lock);
2776 
2777 	return req_entries - num_remain;
2778 }
2779 
2780 static struct dp_mon_tx_ppdu_info *
2781 ath12k_dp_mon_tx_get_ppdu_info(struct ath12k_mon_data *pmon,
2782 			       unsigned int ppdu_id,
2783 			       enum dp_mon_tx_ppdu_info_type type)
2784 {
2785 	struct dp_mon_tx_ppdu_info *tx_ppdu_info;
2786 
2787 	if (type == DP_MON_TX_PROT_PPDU_INFO) {
2788 		tx_ppdu_info = pmon->tx_prot_ppdu_info;
2789 
2790 		if (tx_ppdu_info && !tx_ppdu_info->is_used)
2791 			return tx_ppdu_info;
2792 		kfree(tx_ppdu_info);
2793 	} else {
2794 		tx_ppdu_info = pmon->tx_data_ppdu_info;
2795 
2796 		if (tx_ppdu_info && !tx_ppdu_info->is_used)
2797 			return tx_ppdu_info;
2798 		kfree(tx_ppdu_info);
2799 	}
2800 
2801 	/* allocate new tx_ppdu_info */
2802 	tx_ppdu_info = kzalloc(sizeof(*tx_ppdu_info), GFP_ATOMIC);
2803 	if (!tx_ppdu_info)
2804 		return NULL;
2805 
2806 	tx_ppdu_info->is_used = 0;
2807 	tx_ppdu_info->ppdu_id = ppdu_id;
2808 
2809 	if (type == DP_MON_TX_PROT_PPDU_INFO)
2810 		pmon->tx_prot_ppdu_info = tx_ppdu_info;
2811 	else
2812 		pmon->tx_data_ppdu_info = tx_ppdu_info;
2813 
2814 	return tx_ppdu_info;
2815 }
2816 
2817 static struct dp_mon_tx_ppdu_info *
2818 ath12k_dp_mon_hal_tx_ppdu_info(struct ath12k_mon_data *pmon,
2819 			       u16 tlv_tag)
2820 {
2821 	switch (tlv_tag) {
2822 	case HAL_TX_FES_SETUP:
2823 	case HAL_TX_FLUSH:
2824 	case HAL_PCU_PPDU_SETUP_INIT:
2825 	case HAL_TX_PEER_ENTRY:
2826 	case HAL_TX_QUEUE_EXTENSION:
2827 	case HAL_TX_MPDU_START:
2828 	case HAL_TX_MSDU_START:
2829 	case HAL_TX_DATA:
2830 	case HAL_MON_BUF_ADDR:
2831 	case HAL_TX_MPDU_END:
2832 	case HAL_TX_LAST_MPDU_FETCHED:
2833 	case HAL_TX_LAST_MPDU_END:
2834 	case HAL_COEX_TX_REQ:
2835 	case HAL_TX_RAW_OR_NATIVE_FRAME_SETUP:
2836 	case HAL_SCH_CRITICAL_TLV_REFERENCE:
2837 	case HAL_TX_FES_SETUP_COMPLETE:
2838 	case HAL_TQM_MPDU_GLOBAL_START:
2839 	case HAL_SCHEDULER_END:
2840 	case HAL_TX_FES_STATUS_USER_PPDU:
2841 		break;
2842 	case HAL_TX_FES_STATUS_PROT: {
2843 		if (!pmon->tx_prot_ppdu_info->is_used)
2844 			pmon->tx_prot_ppdu_info->is_used = true;
2845 
2846 		return pmon->tx_prot_ppdu_info;
2847 	}
2848 	}
2849 
2850 	if (!pmon->tx_data_ppdu_info->is_used)
2851 		pmon->tx_data_ppdu_info->is_used = true;
2852 
2853 	return pmon->tx_data_ppdu_info;
2854 }
2855 
2856 #define MAX_MONITOR_HEADER 512
2857 #define MAX_DUMMY_FRM_BODY 128
2858 
2859 struct sk_buff *ath12k_dp_mon_tx_alloc_skb(void)
2860 {
2861 	struct sk_buff *skb;
2862 
2863 	skb = dev_alloc_skb(MAX_MONITOR_HEADER + MAX_DUMMY_FRM_BODY);
2864 	if (!skb)
2865 		return NULL;
2866 
2867 	skb_reserve(skb, MAX_MONITOR_HEADER);
2868 
2869 	if (!IS_ALIGNED((unsigned long)skb->data, 4))
2870 		skb_pull(skb, PTR_ALIGN(skb->data, 4) - skb->data);
2871 
2872 	return skb;
2873 }
2874 
2875 static int
2876 ath12k_dp_mon_tx_gen_cts2self_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2877 {
2878 	struct sk_buff *skb;
2879 	struct ieee80211_cts *cts;
2880 
2881 	skb = ath12k_dp_mon_tx_alloc_skb();
2882 	if (!skb)
2883 		return -ENOMEM;
2884 
2885 	cts = (struct ieee80211_cts *)skb->data;
2886 	memset(cts, 0, MAX_DUMMY_FRM_BODY);
2887 	cts->frame_control =
2888 		cpu_to_le16(IEEE80211_FTYPE_CTL | IEEE80211_STYPE_CTS);
2889 	cts->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2890 	memcpy(cts->ra, tx_ppdu_info->rx_status.addr1, sizeof(cts->ra));
2891 
2892 	skb_put(skb, sizeof(*cts));
2893 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2894 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2895 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2896 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2897 
2898 	return 0;
2899 }
2900 
2901 static int
2902 ath12k_dp_mon_tx_gen_rts_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2903 {
2904 	struct sk_buff *skb;
2905 	struct ieee80211_rts *rts;
2906 
2907 	skb = ath12k_dp_mon_tx_alloc_skb();
2908 	if (!skb)
2909 		return -ENOMEM;
2910 
2911 	rts = (struct ieee80211_rts *)skb->data;
2912 	memset(rts, 0, MAX_DUMMY_FRM_BODY);
2913 	rts->frame_control =
2914 		cpu_to_le16(IEEE80211_FTYPE_CTL | IEEE80211_STYPE_RTS);
2915 	rts->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2916 	memcpy(rts->ra, tx_ppdu_info->rx_status.addr1, sizeof(rts->ra));
2917 	memcpy(rts->ta, tx_ppdu_info->rx_status.addr2, sizeof(rts->ta));
2918 
2919 	skb_put(skb, sizeof(*rts));
2920 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2921 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2922 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2923 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2924 
2925 	return 0;
2926 }
2927 
2928 static int
2929 ath12k_dp_mon_tx_gen_3addr_qos_null_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2930 {
2931 	struct sk_buff *skb;
2932 	struct ieee80211_qos_hdr *qhdr;
2933 
2934 	skb = ath12k_dp_mon_tx_alloc_skb();
2935 	if (!skb)
2936 		return -ENOMEM;
2937 
2938 	qhdr = (struct ieee80211_qos_hdr *)skb->data;
2939 	memset(qhdr, 0, MAX_DUMMY_FRM_BODY);
2940 	qhdr->frame_control =
2941 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_NULLFUNC);
2942 	qhdr->duration_id = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2943 	memcpy(qhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2944 	memcpy(qhdr->addr2, tx_ppdu_info->rx_status.addr2, ETH_ALEN);
2945 	memcpy(qhdr->addr3, tx_ppdu_info->rx_status.addr3, ETH_ALEN);
2946 
2947 	skb_put(skb, sizeof(*qhdr));
2948 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2949 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2950 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2951 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2952 
2953 	return 0;
2954 }
2955 
2956 static int
2957 ath12k_dp_mon_tx_gen_4addr_qos_null_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2958 {
2959 	struct sk_buff *skb;
2960 	struct dp_mon_qosframe_addr4 *qhdr;
2961 
2962 	skb = ath12k_dp_mon_tx_alloc_skb();
2963 	if (!skb)
2964 		return -ENOMEM;
2965 
2966 	qhdr = (struct dp_mon_qosframe_addr4 *)skb->data;
2967 	memset(qhdr, 0, MAX_DUMMY_FRM_BODY);
2968 	qhdr->frame_control =
2969 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_NULLFUNC);
2970 	qhdr->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2971 	memcpy(qhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2972 	memcpy(qhdr->addr2, tx_ppdu_info->rx_status.addr2, ETH_ALEN);
2973 	memcpy(qhdr->addr3, tx_ppdu_info->rx_status.addr3, ETH_ALEN);
2974 	memcpy(qhdr->addr4, tx_ppdu_info->rx_status.addr4, ETH_ALEN);
2975 
2976 	skb_put(skb, sizeof(*qhdr));
2977 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2978 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2979 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2980 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2981 
2982 	return 0;
2983 }
2984 
2985 static int
2986 ath12k_dp_mon_tx_gen_ack_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2987 {
2988 	struct sk_buff *skb;
2989 	struct dp_mon_frame_min_one *fbmhdr;
2990 
2991 	skb = ath12k_dp_mon_tx_alloc_skb();
2992 	if (!skb)
2993 		return -ENOMEM;
2994 
2995 	fbmhdr = (struct dp_mon_frame_min_one *)skb->data;
2996 	memset(fbmhdr, 0, MAX_DUMMY_FRM_BODY);
2997 	fbmhdr->frame_control =
2998 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_CFACK);
2999 	memcpy(fbmhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
3000 
3001 	/* set duration zero for ack frame */
3002 	fbmhdr->duration = 0;
3003 
3004 	skb_put(skb, sizeof(*fbmhdr));
3005 	tx_ppdu_info->tx_mon_mpdu->head = skb;
3006 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
3007 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
3008 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
3009 
3010 	return 0;
3011 }
3012 
3013 static int
3014 ath12k_dp_mon_tx_gen_prot_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
3015 {
3016 	int ret = 0;
3017 
3018 	switch (tx_ppdu_info->rx_status.medium_prot_type) {
3019 	case DP_MON_TX_MEDIUM_RTS_LEGACY:
3020 	case DP_MON_TX_MEDIUM_RTS_11AC_STATIC_BW:
3021 	case DP_MON_TX_MEDIUM_RTS_11AC_DYNAMIC_BW:
3022 		ret = ath12k_dp_mon_tx_gen_rts_frame(tx_ppdu_info);
3023 		break;
3024 	case DP_MON_TX_MEDIUM_CTS2SELF:
3025 		ret = ath12k_dp_mon_tx_gen_cts2self_frame(tx_ppdu_info);
3026 		break;
3027 	case DP_MON_TX_MEDIUM_QOS_NULL_NO_ACK_3ADDR:
3028 		ret = ath12k_dp_mon_tx_gen_3addr_qos_null_frame(tx_ppdu_info);
3029 		break;
3030 	case DP_MON_TX_MEDIUM_QOS_NULL_NO_ACK_4ADDR:
3031 		ret = ath12k_dp_mon_tx_gen_4addr_qos_null_frame(tx_ppdu_info);
3032 		break;
3033 	}
3034 
3035 	return ret;
3036 }
3037 
3038 static enum dp_mon_tx_tlv_status
3039 ath12k_dp_mon_tx_parse_status_tlv(struct ath12k_base *ab,
3040 				  struct ath12k_mon_data *pmon,
3041 				  u16 tlv_tag, const void *tlv_data, u32 userid)
3042 {
3043 	struct dp_mon_tx_ppdu_info *tx_ppdu_info;
3044 	enum dp_mon_tx_tlv_status status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3045 	u32 info[7];
3046 
3047 	tx_ppdu_info = ath12k_dp_mon_hal_tx_ppdu_info(pmon, tlv_tag);
3048 
3049 	switch (tlv_tag) {
3050 	case HAL_TX_FES_SETUP: {
3051 		const struct hal_tx_fes_setup *tx_fes_setup = tlv_data;
3052 
3053 		info[0] = __le32_to_cpu(tx_fes_setup->info0);
3054 		tx_ppdu_info->ppdu_id = __le32_to_cpu(tx_fes_setup->schedule_id);
3055 		tx_ppdu_info->num_users =
3056 			u32_get_bits(info[0], HAL_TX_FES_SETUP_INFO0_NUM_OF_USERS);
3057 		status = DP_MON_TX_FES_SETUP;
3058 		break;
3059 	}
3060 
3061 	case HAL_TX_FES_STATUS_END: {
3062 		const struct hal_tx_fes_status_end *tx_fes_status_end = tlv_data;
3063 		u32 tst_15_0, tst_31_16;
3064 
3065 		info[0] = __le32_to_cpu(tx_fes_status_end->info0);
3066 		tst_15_0 =
3067 			u32_get_bits(info[0],
3068 				     HAL_TX_FES_STATUS_END_INFO0_START_TIMESTAMP_15_0);
3069 		tst_31_16 =
3070 			u32_get_bits(info[0],
3071 				     HAL_TX_FES_STATUS_END_INFO0_START_TIMESTAMP_31_16);
3072 
3073 		tx_ppdu_info->rx_status.ppdu_ts = (tst_15_0 | (tst_31_16 << 16));
3074 		status = DP_MON_TX_FES_STATUS_END;
3075 		break;
3076 	}
3077 
3078 	case HAL_RX_RESPONSE_REQUIRED_INFO: {
3079 		const struct hal_rx_resp_req_info *rx_resp_req_info = tlv_data;
3080 		u32 addr_32;
3081 		u16 addr_16;
3082 
3083 		info[0] = __le32_to_cpu(rx_resp_req_info->info0);
3084 		info[1] = __le32_to_cpu(rx_resp_req_info->info1);
3085 		info[2] = __le32_to_cpu(rx_resp_req_info->info2);
3086 		info[3] = __le32_to_cpu(rx_resp_req_info->info3);
3087 		info[4] = __le32_to_cpu(rx_resp_req_info->info4);
3088 		info[5] = __le32_to_cpu(rx_resp_req_info->info5);
3089 
3090 		tx_ppdu_info->rx_status.ppdu_id =
3091 			u32_get_bits(info[0], HAL_RX_RESP_REQ_INFO0_PPDU_ID);
3092 		tx_ppdu_info->rx_status.reception_type =
3093 			u32_get_bits(info[0], HAL_RX_RESP_REQ_INFO0_RECEPTION_TYPE);
3094 		tx_ppdu_info->rx_status.rx_duration =
3095 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_DURATION);
3096 		tx_ppdu_info->rx_status.mcs =
3097 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_RATE_MCS);
3098 		tx_ppdu_info->rx_status.sgi =
3099 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_SGI);
3100 		tx_ppdu_info->rx_status.is_stbc =
3101 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_STBC);
3102 		tx_ppdu_info->rx_status.ldpc =
3103 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_LDPC);
3104 		tx_ppdu_info->rx_status.is_ampdu =
3105 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_IS_AMPDU);
3106 		tx_ppdu_info->rx_status.num_users =
3107 			u32_get_bits(info[2], HAL_RX_RESP_REQ_INFO2_NUM_USER);
3108 
3109 		addr_32 = u32_get_bits(info[3], HAL_RX_RESP_REQ_INFO3_ADDR1_31_0);
3110 		addr_16 = u32_get_bits(info[3], HAL_RX_RESP_REQ_INFO4_ADDR1_47_32);
3111 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3112 
3113 		addr_16 = u32_get_bits(info[4], HAL_RX_RESP_REQ_INFO4_ADDR1_15_0);
3114 		addr_32 = u32_get_bits(info[5], HAL_RX_RESP_REQ_INFO5_ADDR1_47_16);
3115 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr2);
3116 
3117 		if (tx_ppdu_info->rx_status.reception_type == 0)
3118 			ath12k_dp_mon_tx_gen_cts2self_frame(tx_ppdu_info);
3119 		status = DP_MON_RX_RESPONSE_REQUIRED_INFO;
3120 		break;
3121 	}
3122 
3123 	case HAL_PCU_PPDU_SETUP_INIT: {
3124 		const struct hal_tx_pcu_ppdu_setup_init *ppdu_setup = tlv_data;
3125 		u32 addr_32;
3126 		u16 addr_16;
3127 
3128 		info[0] = __le32_to_cpu(ppdu_setup->info0);
3129 		info[1] = __le32_to_cpu(ppdu_setup->info1);
3130 		info[2] = __le32_to_cpu(ppdu_setup->info2);
3131 		info[3] = __le32_to_cpu(ppdu_setup->info3);
3132 		info[4] = __le32_to_cpu(ppdu_setup->info4);
3133 		info[5] = __le32_to_cpu(ppdu_setup->info5);
3134 		info[6] = __le32_to_cpu(ppdu_setup->info6);
3135 
3136 		/* protection frame address 1 */
3137 		addr_32 = u32_get_bits(info[1],
3138 				       HAL_TX_PPDU_SETUP_INFO1_PROT_FRAME_ADDR1_31_0);
3139 		addr_16 = u32_get_bits(info[2],
3140 				       HAL_TX_PPDU_SETUP_INFO2_PROT_FRAME_ADDR1_47_32);
3141 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3142 
3143 		/* protection frame address 2 */
3144 		addr_16 = u32_get_bits(info[2],
3145 				       HAL_TX_PPDU_SETUP_INFO2_PROT_FRAME_ADDR2_15_0);
3146 		addr_32 = u32_get_bits(info[3],
3147 				       HAL_TX_PPDU_SETUP_INFO3_PROT_FRAME_ADDR2_47_16);
3148 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr2);
3149 
3150 		/* protection frame address 3 */
3151 		addr_32 = u32_get_bits(info[4],
3152 				       HAL_TX_PPDU_SETUP_INFO4_PROT_FRAME_ADDR3_31_0);
3153 		addr_16 = u32_get_bits(info[5],
3154 				       HAL_TX_PPDU_SETUP_INFO5_PROT_FRAME_ADDR3_47_32);
3155 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr3);
3156 
3157 		/* protection frame address 4 */
3158 		addr_16 = u32_get_bits(info[5],
3159 				       HAL_TX_PPDU_SETUP_INFO5_PROT_FRAME_ADDR4_15_0);
3160 		addr_32 = u32_get_bits(info[6],
3161 				       HAL_TX_PPDU_SETUP_INFO6_PROT_FRAME_ADDR4_47_16);
3162 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr4);
3163 
3164 		status = u32_get_bits(info[0],
3165 				      HAL_TX_PPDU_SETUP_INFO0_MEDIUM_PROT_TYPE);
3166 		break;
3167 	}
3168 
3169 	case HAL_TX_QUEUE_EXTENSION: {
3170 		const struct hal_tx_queue_exten *tx_q_exten = tlv_data;
3171 
3172 		info[0] = __le32_to_cpu(tx_q_exten->info0);
3173 
3174 		tx_ppdu_info->rx_status.frame_control =
3175 			u32_get_bits(info[0],
3176 				     HAL_TX_Q_EXT_INFO0_FRAME_CTRL);
3177 		tx_ppdu_info->rx_status.fc_valid = true;
3178 		break;
3179 	}
3180 
3181 	case HAL_TX_FES_STATUS_START: {
3182 		const struct hal_tx_fes_status_start *tx_fes_start = tlv_data;
3183 
3184 		info[0] = __le32_to_cpu(tx_fes_start->info0);
3185 
3186 		tx_ppdu_info->rx_status.medium_prot_type =
3187 			u32_get_bits(info[0],
3188 				     HAL_TX_FES_STATUS_START_INFO0_MEDIUM_PROT_TYPE);
3189 		break;
3190 	}
3191 
3192 	case HAL_TX_FES_STATUS_PROT: {
3193 		const struct hal_tx_fes_status_prot *tx_fes_status = tlv_data;
3194 		u32 start_timestamp;
3195 		u32 end_timestamp;
3196 
3197 		info[0] = __le32_to_cpu(tx_fes_status->info0);
3198 		info[1] = __le32_to_cpu(tx_fes_status->info1);
3199 
3200 		start_timestamp =
3201 			u32_get_bits(info[0],
3202 				     HAL_TX_FES_STAT_PROT_INFO0_STRT_FRM_TS_15_0);
3203 		start_timestamp |=
3204 			u32_get_bits(info[0],
3205 				     HAL_TX_FES_STAT_PROT_INFO0_STRT_FRM_TS_31_16) << 15;
3206 		end_timestamp =
3207 			u32_get_bits(info[1],
3208 				     HAL_TX_FES_STAT_PROT_INFO1_END_FRM_TS_15_0);
3209 		end_timestamp |=
3210 			u32_get_bits(info[1],
3211 				     HAL_TX_FES_STAT_PROT_INFO1_END_FRM_TS_31_16) << 15;
3212 		tx_ppdu_info->rx_status.rx_duration = end_timestamp - start_timestamp;
3213 
3214 		ath12k_dp_mon_tx_gen_prot_frame(tx_ppdu_info);
3215 		break;
3216 	}
3217 
3218 	case HAL_TX_FES_STATUS_START_PPDU:
3219 	case HAL_TX_FES_STATUS_START_PROT: {
3220 		const struct hal_tx_fes_status_start_prot *tx_fes_stat_start = tlv_data;
3221 		u64 ppdu_ts;
3222 
3223 		info[0] = __le32_to_cpu(tx_fes_stat_start->info0);
3224 
3225 		tx_ppdu_info->rx_status.ppdu_ts =
3226 			u32_get_bits(info[0],
3227 				     HAL_TX_FES_STAT_STRT_INFO0_PROT_TS_LOWER_32);
3228 		ppdu_ts = (u32_get_bits(info[1],
3229 					HAL_TX_FES_STAT_STRT_INFO1_PROT_TS_UPPER_32));
3230 		tx_ppdu_info->rx_status.ppdu_ts |= ppdu_ts << 32;
3231 		break;
3232 	}
3233 
3234 	case HAL_TX_FES_STATUS_USER_PPDU: {
3235 		const struct hal_tx_fes_status_user_ppdu *tx_fes_usr_ppdu = tlv_data;
3236 
3237 		info[0] = __le32_to_cpu(tx_fes_usr_ppdu->info0);
3238 
3239 		tx_ppdu_info->rx_status.rx_duration =
3240 			u32_get_bits(info[0],
3241 				     HAL_TX_FES_STAT_USR_PPDU_INFO0_DURATION);
3242 		break;
3243 	}
3244 
3245 	case HAL_MACTX_HE_SIG_A_SU:
3246 		ath12k_dp_mon_parse_he_sig_su(tlv_data, &tx_ppdu_info->rx_status);
3247 		break;
3248 
3249 	case HAL_MACTX_HE_SIG_A_MU_DL:
3250 		ath12k_dp_mon_parse_he_sig_mu(tlv_data, &tx_ppdu_info->rx_status);
3251 		break;
3252 
3253 	case HAL_MACTX_HE_SIG_B1_MU:
3254 		ath12k_dp_mon_parse_he_sig_b1_mu(tlv_data, &tx_ppdu_info->rx_status);
3255 		break;
3256 
3257 	case HAL_MACTX_HE_SIG_B2_MU:
3258 		ath12k_dp_mon_parse_he_sig_b2_mu(tlv_data, &tx_ppdu_info->rx_status);
3259 		break;
3260 
3261 	case HAL_MACTX_HE_SIG_B2_OFDMA:
3262 		ath12k_dp_mon_parse_he_sig_b2_ofdma(tlv_data, &tx_ppdu_info->rx_status);
3263 		break;
3264 
3265 	case HAL_MACTX_VHT_SIG_A:
3266 		ath12k_dp_mon_parse_vht_sig_a(tlv_data, &tx_ppdu_info->rx_status);
3267 		break;
3268 
3269 	case HAL_MACTX_L_SIG_A:
3270 		ath12k_dp_mon_parse_l_sig_a(tlv_data, &tx_ppdu_info->rx_status);
3271 		break;
3272 
3273 	case HAL_MACTX_L_SIG_B:
3274 		ath12k_dp_mon_parse_l_sig_b(tlv_data, &tx_ppdu_info->rx_status);
3275 		break;
3276 
3277 	case HAL_RX_FRAME_BITMAP_ACK: {
3278 		const struct hal_rx_frame_bitmap_ack *fbm_ack = tlv_data;
3279 		u32 addr_32;
3280 		u16 addr_16;
3281 
3282 		info[0] = __le32_to_cpu(fbm_ack->info0);
3283 		info[1] = __le32_to_cpu(fbm_ack->info1);
3284 
3285 		addr_32 = u32_get_bits(info[0],
3286 				       HAL_RX_FBM_ACK_INFO0_ADDR1_31_0);
3287 		addr_16 = u32_get_bits(info[1],
3288 				       HAL_RX_FBM_ACK_INFO1_ADDR1_47_32);
3289 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3290 
3291 		ath12k_dp_mon_tx_gen_ack_frame(tx_ppdu_info);
3292 		break;
3293 	}
3294 
3295 	case HAL_MACTX_PHY_DESC: {
3296 		const struct hal_tx_phy_desc *tx_phy_desc = tlv_data;
3297 
3298 		info[0] = __le32_to_cpu(tx_phy_desc->info0);
3299 		info[1] = __le32_to_cpu(tx_phy_desc->info1);
3300 		info[2] = __le32_to_cpu(tx_phy_desc->info2);
3301 		info[3] = __le32_to_cpu(tx_phy_desc->info3);
3302 
3303 		tx_ppdu_info->rx_status.beamformed =
3304 			u32_get_bits(info[0],
3305 				     HAL_TX_PHY_DESC_INFO0_BF_TYPE);
3306 		tx_ppdu_info->rx_status.preamble_type =
3307 			u32_get_bits(info[0],
3308 				     HAL_TX_PHY_DESC_INFO0_PREAMBLE_11B);
3309 		tx_ppdu_info->rx_status.mcs =
3310 			u32_get_bits(info[1],
3311 				     HAL_TX_PHY_DESC_INFO1_MCS);
3312 		tx_ppdu_info->rx_status.ltf_size =
3313 			u32_get_bits(info[3],
3314 				     HAL_TX_PHY_DESC_INFO3_LTF_SIZE);
3315 		tx_ppdu_info->rx_status.nss =
3316 			u32_get_bits(info[2],
3317 				     HAL_TX_PHY_DESC_INFO2_NSS);
3318 		tx_ppdu_info->rx_status.chan_num =
3319 			u32_get_bits(info[3],
3320 				     HAL_TX_PHY_DESC_INFO3_ACTIVE_CHANNEL);
3321 		tx_ppdu_info->rx_status.bw =
3322 			u32_get_bits(info[0],
3323 				     HAL_TX_PHY_DESC_INFO0_BANDWIDTH);
3324 		break;
3325 	}
3326 
3327 	case HAL_TX_MPDU_START: {
3328 		struct dp_mon_mpdu *mon_mpdu = tx_ppdu_info->tx_mon_mpdu;
3329 
3330 		mon_mpdu = kzalloc(sizeof(*mon_mpdu), GFP_ATOMIC);
3331 		if (!mon_mpdu)
3332 			return DP_MON_TX_STATUS_PPDU_NOT_DONE;
3333 		status = DP_MON_TX_MPDU_START;
3334 		break;
3335 	}
3336 
3337 	case HAL_TX_MPDU_END:
3338 		list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
3339 			      &tx_ppdu_info->dp_tx_mon_mpdu_list);
3340 		break;
3341 	}
3342 
3343 	return status;
3344 }
3345 
3346 enum dp_mon_tx_tlv_status
3347 ath12k_dp_mon_tx_status_get_num_user(u16 tlv_tag,
3348 				     struct hal_tlv_hdr *tx_tlv,
3349 				     u8 *num_users)
3350 {
3351 	u32 tlv_status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3352 	u32 info0;
3353 
3354 	switch (tlv_tag) {
3355 	case HAL_TX_FES_SETUP: {
3356 		struct hal_tx_fes_setup *tx_fes_setup =
3357 				(struct hal_tx_fes_setup *)tx_tlv;
3358 
3359 		info0 = __le32_to_cpu(tx_fes_setup->info0);
3360 
3361 		*num_users = u32_get_bits(info0, HAL_TX_FES_SETUP_INFO0_NUM_OF_USERS);
3362 		tlv_status = DP_MON_TX_FES_SETUP;
3363 		break;
3364 	}
3365 
3366 	case HAL_RX_RESPONSE_REQUIRED_INFO: {
3367 		/* TODO: need to update *num_users */
3368 		tlv_status = DP_MON_RX_RESPONSE_REQUIRED_INFO;
3369 		break;
3370 	}
3371 	}
3372 
3373 	return tlv_status;
3374 }
3375 
3376 static void
3377 ath12k_dp_mon_tx_process_ppdu_info(struct ath12k *ar,
3378 				   struct napi_struct *napi,
3379 				   struct dp_mon_tx_ppdu_info *tx_ppdu_info)
3380 {
3381 	struct dp_mon_mpdu *tmp, *mon_mpdu;
3382 
3383 	list_for_each_entry_safe(mon_mpdu, tmp,
3384 				 &tx_ppdu_info->dp_tx_mon_mpdu_list, list) {
3385 		list_del(&mon_mpdu->list);
3386 
3387 		if (mon_mpdu->head)
3388 			ath12k_dp_mon_rx_deliver(ar, mon_mpdu,
3389 						 &tx_ppdu_info->rx_status, napi);
3390 
3391 		kfree(mon_mpdu);
3392 	}
3393 }
3394 
3395 enum hal_rx_mon_status
3396 ath12k_dp_mon_tx_parse_mon_status(struct ath12k *ar,
3397 				  struct ath12k_mon_data *pmon,
3398 				  struct sk_buff *skb,
3399 				  struct napi_struct *napi,
3400 				  u32 ppdu_id)
3401 {
3402 	struct ath12k_base *ab = ar->ab;
3403 	struct dp_mon_tx_ppdu_info *tx_prot_ppdu_info, *tx_data_ppdu_info;
3404 	struct hal_tlv_hdr *tlv;
3405 	u8 *ptr = skb->data;
3406 	u16 tlv_tag;
3407 	u16 tlv_len;
3408 	u32 tlv_userid = 0;
3409 	u8 num_user;
3410 	u32 tlv_status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3411 
3412 	tx_prot_ppdu_info = ath12k_dp_mon_tx_get_ppdu_info(pmon, ppdu_id,
3413 							   DP_MON_TX_PROT_PPDU_INFO);
3414 	if (!tx_prot_ppdu_info)
3415 		return -ENOMEM;
3416 
3417 	tlv = (struct hal_tlv_hdr *)ptr;
3418 	tlv_tag = le32_get_bits(tlv->tl, HAL_TLV_HDR_TAG);
3419 
3420 	tlv_status = ath12k_dp_mon_tx_status_get_num_user(tlv_tag, tlv, &num_user);
3421 	if (tlv_status == DP_MON_TX_STATUS_PPDU_NOT_DONE || !num_user)
3422 		return -EINVAL;
3423 
3424 	tx_data_ppdu_info = ath12k_dp_mon_tx_get_ppdu_info(pmon, ppdu_id,
3425 							   DP_MON_TX_DATA_PPDU_INFO);
3426 	if (!tx_data_ppdu_info)
3427 		return -ENOMEM;
3428 
3429 	do {
3430 		tlv = (struct hal_tlv_hdr *)ptr;
3431 		tlv_tag = le32_get_bits(tlv->tl, HAL_TLV_HDR_TAG);
3432 		tlv_len = le32_get_bits(tlv->tl, HAL_TLV_HDR_LEN);
3433 		tlv_userid = le32_get_bits(tlv->tl, HAL_TLV_USR_ID);
3434 
3435 		tlv_status = ath12k_dp_mon_tx_parse_status_tlv(ab, pmon,
3436 							       tlv_tag, ptr,
3437 							       tlv_userid);
3438 		ptr += tlv_len;
3439 		ptr = PTR_ALIGN(ptr, HAL_TLV_ALIGN);
3440 		if ((ptr - skb->data) >= DP_TX_MONITOR_BUF_SIZE)
3441 			break;
3442 	} while (tlv_status != DP_MON_TX_FES_STATUS_END);
3443 
3444 	ath12k_dp_mon_tx_process_ppdu_info(ar, napi, tx_data_ppdu_info);
3445 	ath12k_dp_mon_tx_process_ppdu_info(ar, napi, tx_prot_ppdu_info);
3446 
3447 	return tlv_status;
3448 }
3449 
3450 static void
3451 ath12k_dp_mon_rx_update_peer_rate_table_stats(struct ath12k_rx_peer_stats *rx_stats,
3452 					      struct hal_rx_mon_ppdu_info *ppdu_info,
3453 					      struct hal_rx_user_status *user_stats,
3454 					      u32 num_msdu)
3455 {
3456 	struct ath12k_rx_peer_rate_stats *stats;
3457 	u32 mcs_idx = (user_stats) ? user_stats->mcs : ppdu_info->mcs;
3458 	u32 nss_idx = (user_stats) ? user_stats->nss - 1 : ppdu_info->nss - 1;
3459 	u32 bw_idx = ppdu_info->bw;
3460 	u32 gi_idx = ppdu_info->gi;
3461 	u32 len;
3462 
3463 	if (mcs_idx > HAL_RX_MAX_MCS_HT || nss_idx >= HAL_RX_MAX_NSS ||
3464 	    bw_idx >= HAL_RX_BW_MAX || gi_idx >= HAL_RX_GI_MAX) {
3465 		return;
3466 	}
3467 
3468 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AX ||
3469 	    ppdu_info->preamble_type == HAL_RX_PREAMBLE_11BE)
3470 		gi_idx = ath12k_he_gi_to_nl80211_he_gi(ppdu_info->gi);
3471 
3472 	rx_stats->pkt_stats.rx_rate[bw_idx][gi_idx][nss_idx][mcs_idx] += num_msdu;
3473 	stats = &rx_stats->byte_stats;
3474 
3475 	if (user_stats)
3476 		len = user_stats->mpdu_ok_byte_count;
3477 	else
3478 		len = ppdu_info->mpdu_len;
3479 
3480 	stats->rx_rate[bw_idx][gi_idx][nss_idx][mcs_idx] += len;
3481 }
3482 
3483 static void ath12k_dp_mon_rx_update_peer_su_stats(struct ath12k *ar,
3484 						  struct ath12k_link_sta *arsta,
3485 						  struct hal_rx_mon_ppdu_info *ppdu_info)
3486 {
3487 	struct ath12k_rx_peer_stats *rx_stats = arsta->rx_stats;
3488 	u32 num_msdu;
3489 
3490 	arsta->rssi_comb = ppdu_info->rssi_comb;
3491 	ewma_avg_rssi_add(&arsta->avg_rssi, ppdu_info->rssi_comb);
3492 	if (!rx_stats)
3493 		return;
3494 
3495 	num_msdu = ppdu_info->tcp_msdu_count + ppdu_info->tcp_ack_msdu_count +
3496 		   ppdu_info->udp_msdu_count + ppdu_info->other_msdu_count;
3497 
3498 	rx_stats->num_msdu += num_msdu;
3499 	rx_stats->tcp_msdu_count += ppdu_info->tcp_msdu_count +
3500 				    ppdu_info->tcp_ack_msdu_count;
3501 	rx_stats->udp_msdu_count += ppdu_info->udp_msdu_count;
3502 	rx_stats->other_msdu_count += ppdu_info->other_msdu_count;
3503 
3504 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11A ||
3505 	    ppdu_info->preamble_type == HAL_RX_PREAMBLE_11B) {
3506 		ppdu_info->nss = 1;
3507 		ppdu_info->mcs = HAL_RX_MAX_MCS;
3508 		ppdu_info->tid = IEEE80211_NUM_TIDS;
3509 	}
3510 
3511 	if (ppdu_info->ldpc < HAL_RX_SU_MU_CODING_MAX)
3512 		rx_stats->coding_count[ppdu_info->ldpc] += num_msdu;
3513 
3514 	if (ppdu_info->tid <= IEEE80211_NUM_TIDS)
3515 		rx_stats->tid_count[ppdu_info->tid] += num_msdu;
3516 
3517 	if (ppdu_info->preamble_type < HAL_RX_PREAMBLE_MAX)
3518 		rx_stats->pream_cnt[ppdu_info->preamble_type] += num_msdu;
3519 
3520 	if (ppdu_info->reception_type < HAL_RX_RECEPTION_TYPE_MAX)
3521 		rx_stats->reception_type[ppdu_info->reception_type] += num_msdu;
3522 
3523 	if (ppdu_info->is_stbc)
3524 		rx_stats->stbc_count += num_msdu;
3525 
3526 	if (ppdu_info->beamformed)
3527 		rx_stats->beamformed_count += num_msdu;
3528 
3529 	if (ppdu_info->num_mpdu_fcs_ok > 1)
3530 		rx_stats->ampdu_msdu_count += num_msdu;
3531 	else
3532 		rx_stats->non_ampdu_msdu_count += num_msdu;
3533 
3534 	rx_stats->num_mpdu_fcs_ok += ppdu_info->num_mpdu_fcs_ok;
3535 	rx_stats->num_mpdu_fcs_err += ppdu_info->num_mpdu_fcs_err;
3536 	rx_stats->dcm_count += ppdu_info->dcm;
3537 
3538 	rx_stats->rx_duration += ppdu_info->rx_duration;
3539 	arsta->rx_duration = rx_stats->rx_duration;
3540 
3541 	if (ppdu_info->nss > 0 && ppdu_info->nss <= HAL_RX_MAX_NSS) {
3542 		rx_stats->pkt_stats.nss_count[ppdu_info->nss - 1] += num_msdu;
3543 		rx_stats->byte_stats.nss_count[ppdu_info->nss - 1] += ppdu_info->mpdu_len;
3544 	}
3545 
3546 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11N &&
3547 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_HT) {
3548 		rx_stats->pkt_stats.ht_mcs_count[ppdu_info->mcs] += num_msdu;
3549 		rx_stats->byte_stats.ht_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3550 		/* To fit into rate table for HT packets */
3551 		ppdu_info->mcs = ppdu_info->mcs % 8;
3552 	}
3553 
3554 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AC &&
3555 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_VHT) {
3556 		rx_stats->pkt_stats.vht_mcs_count[ppdu_info->mcs] += num_msdu;
3557 		rx_stats->byte_stats.vht_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3558 	}
3559 
3560 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AX &&
3561 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_HE) {
3562 		rx_stats->pkt_stats.he_mcs_count[ppdu_info->mcs] += num_msdu;
3563 		rx_stats->byte_stats.he_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3564 	}
3565 
3566 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11BE &&
3567 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_BE) {
3568 		rx_stats->pkt_stats.be_mcs_count[ppdu_info->mcs] += num_msdu;
3569 		rx_stats->byte_stats.be_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3570 	}
3571 
3572 	if ((ppdu_info->preamble_type == HAL_RX_PREAMBLE_11A ||
3573 	     ppdu_info->preamble_type == HAL_RX_PREAMBLE_11B) &&
3574 	     ppdu_info->rate < HAL_RX_LEGACY_RATE_INVALID) {
3575 		rx_stats->pkt_stats.legacy_count[ppdu_info->rate] += num_msdu;
3576 		rx_stats->byte_stats.legacy_count[ppdu_info->rate] += ppdu_info->mpdu_len;
3577 	}
3578 
3579 	if (ppdu_info->gi < HAL_RX_GI_MAX) {
3580 		rx_stats->pkt_stats.gi_count[ppdu_info->gi] += num_msdu;
3581 		rx_stats->byte_stats.gi_count[ppdu_info->gi] += ppdu_info->mpdu_len;
3582 	}
3583 
3584 	if (ppdu_info->bw < HAL_RX_BW_MAX) {
3585 		rx_stats->pkt_stats.bw_count[ppdu_info->bw] += num_msdu;
3586 		rx_stats->byte_stats.bw_count[ppdu_info->bw] += ppdu_info->mpdu_len;
3587 	}
3588 
3589 	ath12k_dp_mon_rx_update_peer_rate_table_stats(rx_stats, ppdu_info,
3590 						      NULL, num_msdu);
3591 }
3592 
3593 void ath12k_dp_mon_rx_process_ulofdma(struct hal_rx_mon_ppdu_info *ppdu_info)
3594 {
3595 	struct hal_rx_user_status *rx_user_status;
3596 	u32 num_users, i, mu_ul_user_v0_word0, mu_ul_user_v0_word1, ru_size;
3597 
3598 	if (!(ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_MIMO ||
3599 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
3600 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO))
3601 		return;
3602 
3603 	num_users = ppdu_info->num_users;
3604 	if (num_users > HAL_MAX_UL_MU_USERS)
3605 		num_users = HAL_MAX_UL_MU_USERS;
3606 
3607 	for (i = 0; i < num_users; i++) {
3608 		rx_user_status = &ppdu_info->userstats[i];
3609 		mu_ul_user_v0_word0 =
3610 			rx_user_status->ul_ofdma_user_v0_word0;
3611 		mu_ul_user_v0_word1 =
3612 			rx_user_status->ul_ofdma_user_v0_word1;
3613 
3614 		if (u32_get_bits(mu_ul_user_v0_word0,
3615 				 HAL_RX_UL_OFDMA_USER_INFO_V0_W0_VALID) &&
3616 		    !u32_get_bits(mu_ul_user_v0_word0,
3617 				  HAL_RX_UL_OFDMA_USER_INFO_V0_W0_VER)) {
3618 			rx_user_status->mcs =
3619 				u32_get_bits(mu_ul_user_v0_word1,
3620 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_MCS);
3621 			rx_user_status->nss =
3622 				u32_get_bits(mu_ul_user_v0_word1,
3623 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_NSS) + 1;
3624 
3625 			rx_user_status->ofdma_info_valid = 1;
3626 			rx_user_status->ul_ofdma_ru_start_index =
3627 				u32_get_bits(mu_ul_user_v0_word1,
3628 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_RU_START);
3629 
3630 			ru_size = u32_get_bits(mu_ul_user_v0_word1,
3631 					       HAL_RX_UL_OFDMA_USER_INFO_V0_W1_RU_SIZE);
3632 			rx_user_status->ul_ofdma_ru_width = ru_size;
3633 			rx_user_status->ul_ofdma_ru_size = ru_size;
3634 		}
3635 		rx_user_status->ldpc = u32_get_bits(mu_ul_user_v0_word1,
3636 						    HAL_RX_UL_OFDMA_USER_INFO_V0_W1_LDPC);
3637 	}
3638 	ppdu_info->ldpc = 1;
3639 }
3640 
3641 static void
3642 ath12k_dp_mon_rx_update_user_stats(struct ath12k *ar,
3643 				   struct hal_rx_mon_ppdu_info *ppdu_info,
3644 				   u32 uid)
3645 {
3646 	struct ath12k_link_sta *arsta;
3647 	struct ath12k_rx_peer_stats *rx_stats = NULL;
3648 	struct hal_rx_user_status *user_stats = &ppdu_info->userstats[uid];
3649 	struct ath12k_peer *peer;
3650 	u32 num_msdu;
3651 
3652 	if (user_stats->ast_index == 0 || user_stats->ast_index == 0xFFFF)
3653 		return;
3654 
3655 	peer = ath12k_peer_find_by_ast(ar->ab, user_stats->ast_index);
3656 
3657 	if (!peer) {
3658 		ath12k_warn(ar->ab, "peer ast idx %d can't be found\n",
3659 			    user_stats->ast_index);
3660 		return;
3661 	}
3662 
3663 	arsta = ath12k_peer_get_link_sta(ar->ab, peer);
3664 	if (!arsta) {
3665 		ath12k_warn(ar->ab, "link sta not found on peer %pM id %d\n",
3666 			    peer->addr, peer->peer_id);
3667 		return;
3668 	}
3669 
3670 	arsta->rssi_comb = ppdu_info->rssi_comb;
3671 	ewma_avg_rssi_add(&arsta->avg_rssi, ppdu_info->rssi_comb);
3672 	rx_stats = arsta->rx_stats;
3673 	if (!rx_stats)
3674 		return;
3675 
3676 	num_msdu = user_stats->tcp_msdu_count + user_stats->tcp_ack_msdu_count +
3677 		   user_stats->udp_msdu_count + user_stats->other_msdu_count;
3678 
3679 	rx_stats->num_msdu += num_msdu;
3680 	rx_stats->tcp_msdu_count += user_stats->tcp_msdu_count +
3681 				    user_stats->tcp_ack_msdu_count;
3682 	rx_stats->udp_msdu_count += user_stats->udp_msdu_count;
3683 	rx_stats->other_msdu_count += user_stats->other_msdu_count;
3684 
3685 	if (ppdu_info->ldpc < HAL_RX_SU_MU_CODING_MAX)
3686 		rx_stats->coding_count[ppdu_info->ldpc] += num_msdu;
3687 
3688 	if (user_stats->tid <= IEEE80211_NUM_TIDS)
3689 		rx_stats->tid_count[user_stats->tid] += num_msdu;
3690 
3691 	if (user_stats->preamble_type < HAL_RX_PREAMBLE_MAX)
3692 		rx_stats->pream_cnt[user_stats->preamble_type] += num_msdu;
3693 
3694 	if (ppdu_info->reception_type < HAL_RX_RECEPTION_TYPE_MAX)
3695 		rx_stats->reception_type[ppdu_info->reception_type] += num_msdu;
3696 
3697 	if (ppdu_info->is_stbc)
3698 		rx_stats->stbc_count += num_msdu;
3699 
3700 	if (ppdu_info->beamformed)
3701 		rx_stats->beamformed_count += num_msdu;
3702 
3703 	if (user_stats->mpdu_cnt_fcs_ok > 1)
3704 		rx_stats->ampdu_msdu_count += num_msdu;
3705 	else
3706 		rx_stats->non_ampdu_msdu_count += num_msdu;
3707 
3708 	rx_stats->num_mpdu_fcs_ok += user_stats->mpdu_cnt_fcs_ok;
3709 	rx_stats->num_mpdu_fcs_err += user_stats->mpdu_cnt_fcs_err;
3710 	rx_stats->dcm_count += ppdu_info->dcm;
3711 	if (ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
3712 	    ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO)
3713 		rx_stats->ru_alloc_cnt[user_stats->ul_ofdma_ru_size] += num_msdu;
3714 
3715 	rx_stats->rx_duration += ppdu_info->rx_duration;
3716 	arsta->rx_duration = rx_stats->rx_duration;
3717 
3718 	if (user_stats->nss > 0 && user_stats->nss <= HAL_RX_MAX_NSS) {
3719 		rx_stats->pkt_stats.nss_count[user_stats->nss - 1] += num_msdu;
3720 		rx_stats->byte_stats.nss_count[user_stats->nss - 1] +=
3721 						user_stats->mpdu_ok_byte_count;
3722 	}
3723 
3724 	if (user_stats->preamble_type == HAL_RX_PREAMBLE_11AX &&
3725 	    user_stats->mcs <= HAL_RX_MAX_MCS_HE) {
3726 		rx_stats->pkt_stats.he_mcs_count[user_stats->mcs] += num_msdu;
3727 		rx_stats->byte_stats.he_mcs_count[user_stats->mcs] +=
3728 						user_stats->mpdu_ok_byte_count;
3729 	}
3730 
3731 	if (ppdu_info->gi < HAL_RX_GI_MAX) {
3732 		rx_stats->pkt_stats.gi_count[ppdu_info->gi] += num_msdu;
3733 		rx_stats->byte_stats.gi_count[ppdu_info->gi] +=
3734 						user_stats->mpdu_ok_byte_count;
3735 	}
3736 
3737 	if (ppdu_info->bw < HAL_RX_BW_MAX) {
3738 		rx_stats->pkt_stats.bw_count[ppdu_info->bw] += num_msdu;
3739 		rx_stats->byte_stats.bw_count[ppdu_info->bw] +=
3740 						user_stats->mpdu_ok_byte_count;
3741 	}
3742 
3743 	ath12k_dp_mon_rx_update_peer_rate_table_stats(rx_stats, ppdu_info,
3744 						      user_stats, num_msdu);
3745 }
3746 
3747 static void
3748 ath12k_dp_mon_rx_update_peer_mu_stats(struct ath12k *ar,
3749 				      struct hal_rx_mon_ppdu_info *ppdu_info)
3750 {
3751 	u32 num_users, i;
3752 
3753 	num_users = ppdu_info->num_users;
3754 	if (num_users > HAL_MAX_UL_MU_USERS)
3755 		num_users = HAL_MAX_UL_MU_USERS;
3756 
3757 	for (i = 0; i < num_users; i++)
3758 		ath12k_dp_mon_rx_update_user_stats(ar, ppdu_info, i);
3759 }
3760 
3761 static void
3762 ath12k_dp_mon_rx_memset_ppdu_info(struct hal_rx_mon_ppdu_info *ppdu_info)
3763 {
3764 	memset(ppdu_info, 0, sizeof(*ppdu_info));
3765 	ppdu_info->peer_id = HAL_INVALID_PEERID;
3766 }
3767 
3768 int ath12k_dp_mon_srng_process(struct ath12k *ar, int *budget,
3769 			       struct napi_struct *napi)
3770 {
3771 	struct ath12k_base *ab = ar->ab;
3772 	struct ath12k_pdev_dp *pdev_dp = &ar->dp;
3773 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&pdev_dp->mon_data;
3774 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
3775 	struct ath12k_dp *dp = &ab->dp;
3776 	struct hal_mon_dest_desc *mon_dst_desc;
3777 	struct sk_buff *skb;
3778 	struct ath12k_skb_rxcb *rxcb;
3779 	struct dp_srng *mon_dst_ring;
3780 	struct hal_srng *srng;
3781 	struct dp_rxdma_mon_ring *buf_ring;
3782 	struct ath12k_link_sta *arsta;
3783 	struct ath12k_peer *peer;
3784 	struct sk_buff_head skb_list;
3785 	u64 cookie;
3786 	int num_buffs_reaped = 0, srng_id, buf_id;
3787 	u32 hal_status, end_offset, info0, end_reason;
3788 	u8 pdev_idx = ath12k_hw_mac_id_to_pdev_id(ab->hw_params, ar->pdev_idx);
3789 
3790 	__skb_queue_head_init(&skb_list);
3791 	srng_id = ath12k_hw_mac_id_to_srng_id(ab->hw_params, pdev_idx);
3792 	mon_dst_ring = &pdev_dp->rxdma_mon_dst_ring[srng_id];
3793 	buf_ring = &dp->rxdma_mon_buf_ring;
3794 
3795 	srng = &ab->hal.srng_list[mon_dst_ring->ring_id];
3796 	spin_lock_bh(&srng->lock);
3797 	ath12k_hal_srng_access_begin(ab, srng);
3798 
3799 	while (likely(*budget)) {
3800 		mon_dst_desc = ath12k_hal_srng_dst_peek(ab, srng);
3801 		if (unlikely(!mon_dst_desc))
3802 			break;
3803 
3804 		/* In case of empty descriptor, the cookie in the ring descriptor
3805 		 * is invalid. Therefore, this entry is skipped, and ring processing
3806 		 * continues.
3807 		 */
3808 		info0 = le32_to_cpu(mon_dst_desc->info0);
3809 		if (u32_get_bits(info0, HAL_MON_DEST_INFO0_EMPTY_DESC))
3810 			goto move_next;
3811 
3812 		cookie = le32_to_cpu(mon_dst_desc->cookie);
3813 		buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
3814 
3815 		spin_lock_bh(&buf_ring->idr_lock);
3816 		skb = idr_remove(&buf_ring->bufs_idr, buf_id);
3817 		spin_unlock_bh(&buf_ring->idr_lock);
3818 
3819 		if (unlikely(!skb)) {
3820 			ath12k_warn(ab, "monitor destination with invalid buf_id %d\n",
3821 				    buf_id);
3822 			goto move_next;
3823 		}
3824 
3825 		rxcb = ATH12K_SKB_RXCB(skb);
3826 		dma_unmap_single(ab->dev, rxcb->paddr,
3827 				 skb->len + skb_tailroom(skb),
3828 				 DMA_FROM_DEVICE);
3829 
3830 		end_reason = u32_get_bits(info0, HAL_MON_DEST_INFO0_END_REASON);
3831 
3832 		/* HAL_MON_FLUSH_DETECTED implies that an rx flush received at the end of
3833 		 * rx PPDU and HAL_MON_PPDU_TRUNCATED implies that the PPDU got
3834 		 * truncated due to a system level error. In both the cases, buffer data
3835 		 * can be discarded
3836 		 */
3837 		if ((end_reason == HAL_MON_FLUSH_DETECTED) ||
3838 		    (end_reason == HAL_MON_PPDU_TRUNCATED)) {
3839 			ath12k_dbg(ab, ATH12K_DBG_DATA,
3840 				   "Monitor dest descriptor end reason %d", end_reason);
3841 			dev_kfree_skb_any(skb);
3842 			goto move_next;
3843 		}
3844 
3845 		/* Calculate the budget when the ring descriptor with the
3846 		 * HAL_MON_END_OF_PPDU to ensure that one PPDU worth of data is always
3847 		 * reaped. This helps to efficiently utilize the NAPI budget.
3848 		 */
3849 		if (end_reason == HAL_MON_END_OF_PPDU) {
3850 			*budget -= 1;
3851 			rxcb->is_end_of_ppdu = true;
3852 		}
3853 
3854 		end_offset = u32_get_bits(info0, HAL_MON_DEST_INFO0_END_OFFSET);
3855 		if (likely(end_offset <= DP_RX_BUFFER_SIZE)) {
3856 			skb_put(skb, end_offset);
3857 		} else {
3858 			ath12k_warn(ab,
3859 				    "invalid offset on mon stats destination %u\n",
3860 				    end_offset);
3861 			skb_put(skb, DP_RX_BUFFER_SIZE);
3862 		}
3863 
3864 		__skb_queue_tail(&skb_list, skb);
3865 
3866 move_next:
3867 		ath12k_dp_mon_buf_replenish(ab, buf_ring, 1);
3868 		ath12k_hal_srng_dst_get_next_entry(ab, srng);
3869 		num_buffs_reaped++;
3870 	}
3871 
3872 	ath12k_hal_srng_access_end(ab, srng);
3873 	spin_unlock_bh(&srng->lock);
3874 
3875 	if (!num_buffs_reaped)
3876 		return 0;
3877 
3878 	/* In some cases, one PPDU worth of data can be spread across multiple NAPI
3879 	 * schedules, To avoid losing existing parsed ppdu_info information, skip
3880 	 * the memset of the ppdu_info structure and continue processing it.
3881 	 */
3882 	if (!ppdu_info->ppdu_continuation)
3883 		ath12k_dp_mon_rx_memset_ppdu_info(ppdu_info);
3884 
3885 	while ((skb = __skb_dequeue(&skb_list))) {
3886 		hal_status = ath12k_dp_mon_rx_parse_mon_status(ar, pmon, skb, napi);
3887 		if (hal_status != HAL_RX_MON_STATUS_PPDU_DONE) {
3888 			ppdu_info->ppdu_continuation = true;
3889 			dev_kfree_skb_any(skb);
3890 			continue;
3891 		}
3892 
3893 		if (ppdu_info->peer_id == HAL_INVALID_PEERID)
3894 			goto free_skb;
3895 
3896 		rcu_read_lock();
3897 		spin_lock_bh(&ab->base_lock);
3898 		peer = ath12k_peer_find_by_id(ab, ppdu_info->peer_id);
3899 		if (!peer || !peer->sta) {
3900 			ath12k_dbg(ab, ATH12K_DBG_DATA,
3901 				   "failed to find the peer with monitor peer_id %d\n",
3902 				   ppdu_info->peer_id);
3903 			goto next_skb;
3904 		}
3905 
3906 		if (ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_SU) {
3907 			arsta = ath12k_peer_get_link_sta(ar->ab, peer);
3908 			if (!arsta) {
3909 				ath12k_warn(ar->ab, "link sta not found on peer %pM id %d\n",
3910 					    peer->addr, peer->peer_id);
3911 				spin_unlock_bh(&ab->base_lock);
3912 				rcu_read_unlock();
3913 				dev_kfree_skb_any(skb);
3914 				continue;
3915 			}
3916 			ath12k_dp_mon_rx_update_peer_su_stats(ar, arsta,
3917 							      ppdu_info);
3918 		} else if ((ppdu_info->fc_valid) &&
3919 			   (ppdu_info->ast_index != HAL_AST_IDX_INVALID)) {
3920 			ath12k_dp_mon_rx_process_ulofdma(ppdu_info);
3921 			ath12k_dp_mon_rx_update_peer_mu_stats(ar, ppdu_info);
3922 		}
3923 
3924 next_skb:
3925 		spin_unlock_bh(&ab->base_lock);
3926 		rcu_read_unlock();
3927 free_skb:
3928 		dev_kfree_skb_any(skb);
3929 		ath12k_dp_mon_rx_memset_ppdu_info(ppdu_info);
3930 	}
3931 
3932 	return num_buffs_reaped;
3933 }
3934 
3935 static int ath12k_dp_rx_reap_mon_status_ring(struct ath12k_base *ab, int mac_id,
3936 					     int *budget, struct sk_buff_head *skb_list)
3937 {
3938 	const struct ath12k_hw_hal_params *hal_params;
3939 	int buf_id, srng_id, num_buffs_reaped = 0;
3940 	enum dp_mon_status_buf_state reap_status;
3941 	struct dp_rxdma_mon_ring *rx_ring;
3942 	struct ath12k_mon_data *pmon;
3943 	struct ath12k_skb_rxcb *rxcb;
3944 	struct hal_tlv_64_hdr *tlv;
3945 	void *rx_mon_status_desc;
3946 	struct hal_srng *srng;
3947 	struct ath12k_dp *dp;
3948 	struct sk_buff *skb;
3949 	struct ath12k *ar;
3950 	dma_addr_t paddr;
3951 	u32 cookie;
3952 	u8 rbm;
3953 
3954 	ar = ab->pdevs[ath12k_hw_mac_id_to_pdev_id(ab->hw_params, mac_id)].ar;
3955 	dp = &ab->dp;
3956 	pmon = &ar->dp.mon_data;
3957 	srng_id = ath12k_hw_mac_id_to_srng_id(ab->hw_params, mac_id);
3958 	rx_ring = &dp->rx_mon_status_refill_ring[srng_id];
3959 
3960 	srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id];
3961 
3962 	spin_lock_bh(&srng->lock);
3963 
3964 	ath12k_hal_srng_access_begin(ab, srng);
3965 
3966 	while (*budget) {
3967 		*budget -= 1;
3968 		rx_mon_status_desc = ath12k_hal_srng_src_peek(ab, srng);
3969 		if (!rx_mon_status_desc) {
3970 			pmon->buf_state = DP_MON_STATUS_REPLINISH;
3971 			break;
3972 		}
3973 		ath12k_hal_rx_buf_addr_info_get(rx_mon_status_desc, &paddr,
3974 						&cookie, &rbm);
3975 		if (paddr) {
3976 			buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
3977 
3978 			spin_lock_bh(&rx_ring->idr_lock);
3979 			skb = idr_find(&rx_ring->bufs_idr, buf_id);
3980 			spin_unlock_bh(&rx_ring->idr_lock);
3981 
3982 			if (!skb) {
3983 				ath12k_warn(ab, "rx monitor status with invalid buf_id %d\n",
3984 					    buf_id);
3985 				pmon->buf_state = DP_MON_STATUS_REPLINISH;
3986 				goto move_next;
3987 			}
3988 
3989 			rxcb = ATH12K_SKB_RXCB(skb);
3990 
3991 			dma_sync_single_for_cpu(ab->dev, rxcb->paddr,
3992 						skb->len + skb_tailroom(skb),
3993 						DMA_FROM_DEVICE);
3994 
3995 			tlv = (struct hal_tlv_64_hdr *)skb->data;
3996 			if (le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG) !=
3997 					HAL_RX_STATUS_BUFFER_DONE) {
3998 				pmon->buf_state = DP_MON_STATUS_NO_DMA;
3999 				ath12k_warn(ab,
4000 					    "mon status DONE not set %llx, buf_id %d\n",
4001 					    le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG),
4002 					    buf_id);
4003 				/* RxDMA status done bit might not be set even
4004 				 * though tp is moved by HW.
4005 				 */
4006 
4007 				/* If done status is missing:
4008 				 * 1. As per MAC team's suggestion,
4009 				 *    when HP + 1 entry is peeked and if DMA
4010 				 *    is not done and if HP + 2 entry's DMA done
4011 				 *    is set. skip HP + 1 entry and
4012 				 *    start processing in next interrupt.
4013 				 * 2. If HP + 2 entry's DMA done is not set,
4014 				 *    poll onto HP + 1 entry DMA done to be set.
4015 				 *    Check status for same buffer for next time
4016 				 *    dp_rx_mon_status_srng_process
4017 				 */
4018 				reap_status = ath12k_dp_rx_mon_buf_done(ab, srng,
4019 									rx_ring);
4020 				if (reap_status == DP_MON_STATUS_NO_DMA)
4021 					continue;
4022 
4023 				spin_lock_bh(&rx_ring->idr_lock);
4024 				idr_remove(&rx_ring->bufs_idr, buf_id);
4025 				spin_unlock_bh(&rx_ring->idr_lock);
4026 
4027 				dma_unmap_single(ab->dev, rxcb->paddr,
4028 						 skb->len + skb_tailroom(skb),
4029 						 DMA_FROM_DEVICE);
4030 
4031 				dev_kfree_skb_any(skb);
4032 				pmon->buf_state = DP_MON_STATUS_REPLINISH;
4033 				goto move_next;
4034 			}
4035 
4036 			spin_lock_bh(&rx_ring->idr_lock);
4037 			idr_remove(&rx_ring->bufs_idr, buf_id);
4038 			spin_unlock_bh(&rx_ring->idr_lock);
4039 
4040 			dma_unmap_single(ab->dev, rxcb->paddr,
4041 					 skb->len + skb_tailroom(skb),
4042 					 DMA_FROM_DEVICE);
4043 
4044 			if (ath12k_dp_pkt_set_pktlen(skb, RX_MON_STATUS_BUF_SIZE)) {
4045 				dev_kfree_skb_any(skb);
4046 				goto move_next;
4047 			}
4048 			__skb_queue_tail(skb_list, skb);
4049 		} else {
4050 			pmon->buf_state = DP_MON_STATUS_REPLINISH;
4051 		}
4052 move_next:
4053 		skb = ath12k_dp_rx_alloc_mon_status_buf(ab, rx_ring,
4054 							&buf_id);
4055 
4056 		if (!skb) {
4057 			ath12k_warn(ab, "failed to alloc buffer for status ring\n");
4058 			hal_params = ab->hw_params->hal_params;
4059 			ath12k_hal_rx_buf_addr_info_set(rx_mon_status_desc, 0, 0,
4060 							hal_params->rx_buf_rbm);
4061 			num_buffs_reaped++;
4062 			break;
4063 		}
4064 		rxcb = ATH12K_SKB_RXCB(skb);
4065 
4066 		cookie = u32_encode_bits(mac_id, DP_RXDMA_BUF_COOKIE_PDEV_ID) |
4067 			 u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
4068 
4069 		ath12k_hal_rx_buf_addr_info_set(rx_mon_status_desc, rxcb->paddr,
4070 						cookie,
4071 						ab->hw_params->hal_params->rx_buf_rbm);
4072 		ath12k_hal_srng_src_get_next_entry(ab, srng);
4073 		num_buffs_reaped++;
4074 	}
4075 	ath12k_hal_srng_access_end(ab, srng);
4076 	spin_unlock_bh(&srng->lock);
4077 
4078 	return num_buffs_reaped;
4079 }
4080 
4081 static u32
4082 ath12k_dp_rx_mon_mpdu_pop(struct ath12k *ar, int mac_id,
4083 			  void *ring_entry, struct sk_buff **head_msdu,
4084 			  struct sk_buff **tail_msdu,
4085 			  struct list_head *used_list,
4086 			  u32 *npackets, u32 *ppdu_id)
4087 {
4088 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4089 	struct ath12k_buffer_addr *p_buf_addr_info, *p_last_buf_addr_info;
4090 	u32 msdu_ppdu_id = 0, msdu_cnt = 0, total_len = 0, frag_len = 0;
4091 	u32 rx_buf_size, rx_pkt_offset, sw_cookie;
4092 	bool is_frag, is_first_msdu, drop_mpdu = false;
4093 	struct hal_reo_entrance_ring *ent_desc =
4094 		(struct hal_reo_entrance_ring *)ring_entry;
4095 	u32 rx_bufs_used = 0, i = 0, desc_bank = 0;
4096 	struct hal_rx_desc *rx_desc, *tail_rx_desc;
4097 	struct hal_rx_msdu_link *msdu_link_desc;
4098 	struct sk_buff *msdu = NULL, *last = NULL;
4099 	struct ath12k_rx_desc_info *desc_info;
4100 	struct ath12k_buffer_addr buf_info;
4101 	struct hal_rx_msdu_list msdu_list;
4102 	struct ath12k_skb_rxcb *rxcb;
4103 	u16 num_msdus = 0;
4104 	dma_addr_t paddr;
4105 	u8 rbm;
4106 
4107 	ath12k_hal_rx_reo_ent_buf_paddr_get(ring_entry, &paddr,
4108 					    &sw_cookie,
4109 					    &p_last_buf_addr_info, &rbm,
4110 					    &msdu_cnt);
4111 
4112 	spin_lock_bh(&pmon->mon_lock);
4113 
4114 	if (le32_get_bits(ent_desc->info1,
4115 			  HAL_REO_ENTR_RING_INFO1_RXDMA_PUSH_REASON) ==
4116 			  HAL_REO_DEST_RING_PUSH_REASON_ERR_DETECTED) {
4117 		u8 rxdma_err = le32_get_bits(ent_desc->info1,
4118 					     HAL_REO_ENTR_RING_INFO1_RXDMA_ERROR_CODE);
4119 		if (rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_FLUSH_REQUEST_ERR ||
4120 		    rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_MPDU_LEN_ERR ||
4121 		    rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_OVERFLOW_ERR) {
4122 			drop_mpdu = true;
4123 			pmon->rx_mon_stats.dest_mpdu_drop++;
4124 		}
4125 	}
4126 
4127 	is_frag = false;
4128 	is_first_msdu = true;
4129 	rx_pkt_offset = sizeof(struct hal_rx_desc);
4130 
4131 	do {
4132 		if (pmon->mon_last_linkdesc_paddr == paddr) {
4133 			pmon->rx_mon_stats.dup_mon_linkdesc_cnt++;
4134 			spin_unlock_bh(&pmon->mon_lock);
4135 			return rx_bufs_used;
4136 		}
4137 
4138 		desc_bank = u32_get_bits(sw_cookie, DP_LINK_DESC_BANK_MASK);
4139 		msdu_link_desc =
4140 			ar->ab->dp.link_desc_banks[desc_bank].vaddr +
4141 			(paddr - ar->ab->dp.link_desc_banks[desc_bank].paddr);
4142 
4143 		ath12k_hal_rx_msdu_list_get(ar, msdu_link_desc, &msdu_list,
4144 					    &num_msdus);
4145 		desc_info = ath12k_dp_get_rx_desc(ar->ab,
4146 						  msdu_list.sw_cookie[num_msdus - 1]);
4147 		tail_rx_desc = (struct hal_rx_desc *)(desc_info->skb)->data;
4148 
4149 		for (i = 0; i < num_msdus; i++) {
4150 			u32 l2_hdr_offset;
4151 
4152 			if (pmon->mon_last_buf_cookie == msdu_list.sw_cookie[i]) {
4153 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4154 					   "i %d last_cookie %d is same\n",
4155 					   i, pmon->mon_last_buf_cookie);
4156 				drop_mpdu = true;
4157 				pmon->rx_mon_stats.dup_mon_buf_cnt++;
4158 				continue;
4159 			}
4160 
4161 			desc_info =
4162 				ath12k_dp_get_rx_desc(ar->ab, msdu_list.sw_cookie[i]);
4163 			msdu = desc_info->skb;
4164 
4165 			if (!msdu) {
4166 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4167 					   "msdu_pop: invalid msdu (%d/%d)\n",
4168 					   i + 1, num_msdus);
4169 				goto next_msdu;
4170 			}
4171 			rxcb = ATH12K_SKB_RXCB(msdu);
4172 			if (rxcb->paddr != msdu_list.paddr[i]) {
4173 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4174 					   "i %d paddr %lx != %lx\n",
4175 					   i, (unsigned long)rxcb->paddr,
4176 					   (unsigned long)msdu_list.paddr[i]);
4177 				drop_mpdu = true;
4178 				continue;
4179 			}
4180 			if (!rxcb->unmapped) {
4181 				dma_unmap_single(ar->ab->dev, rxcb->paddr,
4182 						 msdu->len +
4183 						 skb_tailroom(msdu),
4184 						 DMA_FROM_DEVICE);
4185 				rxcb->unmapped = 1;
4186 			}
4187 			if (drop_mpdu) {
4188 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4189 					   "i %d drop msdu %p *ppdu_id %x\n",
4190 					   i, msdu, *ppdu_id);
4191 				dev_kfree_skb_any(msdu);
4192 				msdu = NULL;
4193 				goto next_msdu;
4194 			}
4195 
4196 			rx_desc = (struct hal_rx_desc *)msdu->data;
4197 			l2_hdr_offset = ath12k_dp_rx_h_l3pad(ar->ab, tail_rx_desc);
4198 			if (is_first_msdu) {
4199 				if (!ath12k_dp_rxdesc_mpdu_valid(ar->ab, rx_desc)) {
4200 					drop_mpdu = true;
4201 					dev_kfree_skb_any(msdu);
4202 					msdu = NULL;
4203 					pmon->mon_last_linkdesc_paddr = paddr;
4204 					goto next_msdu;
4205 				}
4206 				msdu_ppdu_id =
4207 					ath12k_dp_rxdesc_get_ppduid(ar->ab, rx_desc);
4208 
4209 				if (ath12k_dp_mon_comp_ppduid(msdu_ppdu_id,
4210 							      ppdu_id)) {
4211 					spin_unlock_bh(&pmon->mon_lock);
4212 					return rx_bufs_used;
4213 				}
4214 				pmon->mon_last_linkdesc_paddr = paddr;
4215 				is_first_msdu = false;
4216 			}
4217 			ath12k_dp_mon_get_buf_len(&msdu_list.msdu_info[i],
4218 						  &is_frag, &total_len,
4219 						  &frag_len, &msdu_cnt);
4220 			rx_buf_size = rx_pkt_offset + l2_hdr_offset + frag_len;
4221 
4222 			if (ath12k_dp_pkt_set_pktlen(msdu, rx_buf_size)) {
4223 				dev_kfree_skb_any(msdu);
4224 				goto next_msdu;
4225 			}
4226 
4227 			if (!(*head_msdu))
4228 				*head_msdu = msdu;
4229 			else if (last)
4230 				last->next = msdu;
4231 
4232 			last = msdu;
4233 next_msdu:
4234 			pmon->mon_last_buf_cookie = msdu_list.sw_cookie[i];
4235 			rx_bufs_used++;
4236 			desc_info->skb = NULL;
4237 			list_add_tail(&desc_info->list, used_list);
4238 		}
4239 
4240 		ath12k_hal_rx_buf_addr_info_set(&buf_info, paddr, sw_cookie, rbm);
4241 
4242 		ath12k_dp_mon_next_link_desc_get(msdu_link_desc, &paddr,
4243 						 &sw_cookie, &rbm,
4244 						 &p_buf_addr_info);
4245 
4246 		ath12k_dp_rx_link_desc_return(ar->ab, &buf_info,
4247 					      HAL_WBM_REL_BM_ACT_PUT_IN_IDLE);
4248 
4249 		p_last_buf_addr_info = p_buf_addr_info;
4250 
4251 	} while (paddr && msdu_cnt);
4252 
4253 	spin_unlock_bh(&pmon->mon_lock);
4254 
4255 	if (last)
4256 		last->next = NULL;
4257 
4258 	*tail_msdu = msdu;
4259 
4260 	if (msdu_cnt == 0)
4261 		*npackets = 1;
4262 
4263 	return rx_bufs_used;
4264 }
4265 
4266 /* The destination ring processing is stuck if the destination is not
4267  * moving while status ring moves 16 PPDU. The destination ring processing
4268  * skips this destination ring PPDU as a workaround.
4269  */
4270 #define MON_DEST_RING_STUCK_MAX_CNT 16
4271 
4272 static void ath12k_dp_rx_mon_dest_process(struct ath12k *ar, int mac_id,
4273 					  u32 quota, struct napi_struct *napi)
4274 {
4275 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4276 	struct ath12k_pdev_mon_stats *rx_mon_stats;
4277 	u32 ppdu_id, rx_bufs_used = 0, ring_id;
4278 	u32 mpdu_rx_bufs_used, npackets = 0;
4279 	struct ath12k_dp *dp = &ar->ab->dp;
4280 	struct ath12k_base *ab = ar->ab;
4281 	void *ring_entry, *mon_dst_srng;
4282 	struct dp_mon_mpdu *tmp_mpdu;
4283 	LIST_HEAD(rx_desc_used_list);
4284 	struct hal_srng *srng;
4285 
4286 	ring_id = dp->rxdma_err_dst_ring[mac_id].ring_id;
4287 	srng = &ab->hal.srng_list[ring_id];
4288 
4289 	mon_dst_srng = &ab->hal.srng_list[ring_id];
4290 
4291 	spin_lock_bh(&srng->lock);
4292 
4293 	ath12k_hal_srng_access_begin(ab, mon_dst_srng);
4294 
4295 	ppdu_id = pmon->mon_ppdu_info.ppdu_id;
4296 	rx_mon_stats = &pmon->rx_mon_stats;
4297 
4298 	while ((ring_entry = ath12k_hal_srng_dst_peek(ar->ab, mon_dst_srng))) {
4299 		struct sk_buff *head_msdu, *tail_msdu;
4300 
4301 		head_msdu = NULL;
4302 		tail_msdu = NULL;
4303 
4304 		mpdu_rx_bufs_used = ath12k_dp_rx_mon_mpdu_pop(ar, mac_id, ring_entry,
4305 							      &head_msdu, &tail_msdu,
4306 							      &rx_desc_used_list,
4307 							      &npackets, &ppdu_id);
4308 
4309 		rx_bufs_used += mpdu_rx_bufs_used;
4310 
4311 		if (mpdu_rx_bufs_used) {
4312 			dp->mon_dest_ring_stuck_cnt = 0;
4313 		} else {
4314 			dp->mon_dest_ring_stuck_cnt++;
4315 			rx_mon_stats->dest_mon_not_reaped++;
4316 		}
4317 
4318 		if (dp->mon_dest_ring_stuck_cnt > MON_DEST_RING_STUCK_MAX_CNT) {
4319 			rx_mon_stats->dest_mon_stuck++;
4320 			ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4321 				   "status ring ppdu_id=%d dest ring ppdu_id=%d mon_dest_ring_stuck_cnt=%d dest_mon_not_reaped=%u dest_mon_stuck=%u\n",
4322 				   pmon->mon_ppdu_info.ppdu_id, ppdu_id,
4323 				   dp->mon_dest_ring_stuck_cnt,
4324 				   rx_mon_stats->dest_mon_not_reaped,
4325 				   rx_mon_stats->dest_mon_stuck);
4326 			spin_lock_bh(&pmon->mon_lock);
4327 			pmon->mon_ppdu_info.ppdu_id = ppdu_id;
4328 			spin_unlock_bh(&pmon->mon_lock);
4329 			continue;
4330 		}
4331 
4332 		if (ppdu_id != pmon->mon_ppdu_info.ppdu_id) {
4333 			spin_lock_bh(&pmon->mon_lock);
4334 			pmon->mon_ppdu_status = DP_PPDU_STATUS_START;
4335 			spin_unlock_bh(&pmon->mon_lock);
4336 			ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4337 				   "dest_rx: new ppdu_id %x != status ppdu_id %x dest_mon_not_reaped = %u dest_mon_stuck = %u\n",
4338 				   ppdu_id, pmon->mon_ppdu_info.ppdu_id,
4339 				   rx_mon_stats->dest_mon_not_reaped,
4340 				   rx_mon_stats->dest_mon_stuck);
4341 			break;
4342 		}
4343 
4344 		if (head_msdu && tail_msdu) {
4345 			tmp_mpdu = kzalloc(sizeof(*tmp_mpdu), GFP_ATOMIC);
4346 			if (!tmp_mpdu)
4347 				break;
4348 
4349 			tmp_mpdu->head = head_msdu;
4350 			tmp_mpdu->tail = tail_msdu;
4351 			tmp_mpdu->err_bitmap = pmon->err_bitmap;
4352 			tmp_mpdu->decap_format = pmon->decap_format;
4353 			ath12k_dp_mon_rx_deliver(ar, tmp_mpdu,
4354 						 &pmon->mon_ppdu_info, napi);
4355 			rx_mon_stats->dest_mpdu_done++;
4356 			kfree(tmp_mpdu);
4357 		}
4358 
4359 		ring_entry = ath12k_hal_srng_dst_get_next_entry(ar->ab,
4360 								mon_dst_srng);
4361 	}
4362 	ath12k_hal_srng_access_end(ar->ab, mon_dst_srng);
4363 
4364 	spin_unlock_bh(&srng->lock);
4365 
4366 	if (rx_bufs_used) {
4367 		rx_mon_stats->dest_ppdu_done++;
4368 		ath12k_dp_rx_bufs_replenish(ar->ab,
4369 					    &dp->rx_refill_buf_ring,
4370 					    &rx_desc_used_list,
4371 					    rx_bufs_used);
4372 	}
4373 }
4374 
4375 static int
4376 __ath12k_dp_mon_process_ring(struct ath12k *ar, int mac_id,
4377 			     struct napi_struct *napi, int *budget)
4378 {
4379 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4380 	struct ath12k_pdev_mon_stats *rx_mon_stats = &pmon->rx_mon_stats;
4381 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
4382 	enum hal_rx_mon_status hal_status;
4383 	struct sk_buff_head skb_list;
4384 	int num_buffs_reaped;
4385 	struct sk_buff *skb;
4386 
4387 	__skb_queue_head_init(&skb_list);
4388 
4389 	num_buffs_reaped = ath12k_dp_rx_reap_mon_status_ring(ar->ab, mac_id,
4390 							     budget, &skb_list);
4391 	if (!num_buffs_reaped)
4392 		goto exit;
4393 
4394 	while ((skb = __skb_dequeue(&skb_list))) {
4395 		memset(ppdu_info, 0, sizeof(*ppdu_info));
4396 		ppdu_info->peer_id = HAL_INVALID_PEERID;
4397 
4398 		hal_status = ath12k_dp_mon_parse_rx_dest(ar, pmon, skb);
4399 
4400 		if (ar->monitor_started &&
4401 		    pmon->mon_ppdu_status == DP_PPDU_STATUS_START &&
4402 		    hal_status == HAL_TLV_STATUS_PPDU_DONE) {
4403 			rx_mon_stats->status_ppdu_done++;
4404 			pmon->mon_ppdu_status = DP_PPDU_STATUS_DONE;
4405 			ath12k_dp_rx_mon_dest_process(ar, mac_id, *budget, napi);
4406 			pmon->mon_ppdu_status = DP_PPDU_STATUS_START;
4407 		}
4408 
4409 		dev_kfree_skb_any(skb);
4410 	}
4411 
4412 exit:
4413 	return num_buffs_reaped;
4414 }
4415 
4416 int ath12k_dp_mon_process_ring(struct ath12k_base *ab, int mac_id,
4417 			       struct napi_struct *napi, int budget,
4418 			       enum dp_monitor_mode monitor_mode)
4419 {
4420 	struct ath12k *ar = ath12k_ab_to_ar(ab, mac_id);
4421 	int num_buffs_reaped = 0;
4422 
4423 	if (ab->hw_params->rxdma1_enable) {
4424 		if (monitor_mode == ATH12K_DP_RX_MONITOR_MODE)
4425 			num_buffs_reaped = ath12k_dp_mon_srng_process(ar, &budget, napi);
4426 	} else {
4427 		if (ar->monitor_started)
4428 			num_buffs_reaped =
4429 				__ath12k_dp_mon_process_ring(ar, mac_id, napi, &budget);
4430 	}
4431 
4432 	return num_buffs_reaped;
4433 }
4434