xref: /linux/drivers/net/wireless/ath/ath12k/dp_mon.c (revision fb7399cf2d0b33825b8039f95c45395c7deba25c)
1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3  * Copyright (c) 2019-2021 The Linux Foundation. All rights reserved.
4  * Copyright (c) 2021-2025 Qualcomm Innovation Center, Inc. All rights reserved.
5  */
6 
7 #include "dp_mon.h"
8 #include "debug.h"
9 #include "dp_rx.h"
10 #include "dp_tx.h"
11 #include "peer.h"
12 
13 #define ATH12K_LE32_DEC_ENC(value, dec_bits, enc_bits)	\
14 		u32_encode_bits(le32_get_bits(value, dec_bits), enc_bits)
15 
16 #define ATH12K_LE64_DEC_ENC(value, dec_bits, enc_bits) \
17 		u32_encode_bits(le64_get_bits(value, dec_bits), enc_bits)
18 
19 static void
20 ath12k_dp_mon_rx_handle_ofdma_info(const struct hal_rx_ppdu_end_user_stats *ppdu_end_user,
21 				   struct hal_rx_user_status *rx_user_status)
22 {
23 	rx_user_status->ul_ofdma_user_v0_word0 =
24 		__le32_to_cpu(ppdu_end_user->usr_resp_ref);
25 	rx_user_status->ul_ofdma_user_v0_word1 =
26 		__le32_to_cpu(ppdu_end_user->usr_resp_ref_ext);
27 }
28 
29 static void
30 ath12k_dp_mon_rx_populate_byte_count(const struct hal_rx_ppdu_end_user_stats *stats,
31 				     void *ppduinfo,
32 				     struct hal_rx_user_status *rx_user_status)
33 {
34 	rx_user_status->mpdu_ok_byte_count =
35 		le32_get_bits(stats->info7,
36 			      HAL_RX_PPDU_END_USER_STATS_INFO7_MPDU_OK_BYTE_COUNT);
37 	rx_user_status->mpdu_err_byte_count =
38 		le32_get_bits(stats->info8,
39 			      HAL_RX_PPDU_END_USER_STATS_INFO8_MPDU_ERR_BYTE_COUNT);
40 }
41 
42 static void
43 ath12k_dp_mon_rx_populate_mu_user_info(const struct hal_rx_ppdu_end_user_stats *rx_tlv,
44 				       struct hal_rx_mon_ppdu_info *ppdu_info,
45 				       struct hal_rx_user_status *rx_user_status)
46 {
47 	rx_user_status->ast_index = ppdu_info->ast_index;
48 	rx_user_status->tid = ppdu_info->tid;
49 	rx_user_status->tcp_ack_msdu_count =
50 		ppdu_info->tcp_ack_msdu_count;
51 	rx_user_status->tcp_msdu_count =
52 		ppdu_info->tcp_msdu_count;
53 	rx_user_status->udp_msdu_count =
54 		ppdu_info->udp_msdu_count;
55 	rx_user_status->other_msdu_count =
56 		ppdu_info->other_msdu_count;
57 	rx_user_status->frame_control = ppdu_info->frame_control;
58 	rx_user_status->frame_control_info_valid =
59 		ppdu_info->frame_control_info_valid;
60 	rx_user_status->data_sequence_control_info_valid =
61 		ppdu_info->data_sequence_control_info_valid;
62 	rx_user_status->first_data_seq_ctrl =
63 		ppdu_info->first_data_seq_ctrl;
64 	rx_user_status->preamble_type = ppdu_info->preamble_type;
65 	rx_user_status->ht_flags = ppdu_info->ht_flags;
66 	rx_user_status->vht_flags = ppdu_info->vht_flags;
67 	rx_user_status->he_flags = ppdu_info->he_flags;
68 	rx_user_status->rs_flags = ppdu_info->rs_flags;
69 
70 	rx_user_status->mpdu_cnt_fcs_ok =
71 		ppdu_info->num_mpdu_fcs_ok;
72 	rx_user_status->mpdu_cnt_fcs_err =
73 		ppdu_info->num_mpdu_fcs_err;
74 	memcpy(&rx_user_status->mpdu_fcs_ok_bitmap[0], &ppdu_info->mpdu_fcs_ok_bitmap[0],
75 	       HAL_RX_NUM_WORDS_PER_PPDU_BITMAP *
76 	       sizeof(ppdu_info->mpdu_fcs_ok_bitmap[0]));
77 
78 	ath12k_dp_mon_rx_populate_byte_count(rx_tlv, ppdu_info, rx_user_status);
79 }
80 
81 static void ath12k_dp_mon_parse_vht_sig_a(const struct hal_rx_vht_sig_a_info *vht_sig,
82 					  struct hal_rx_mon_ppdu_info *ppdu_info)
83 {
84 	u32 nsts, info0, info1;
85 	u8 gi_setting;
86 
87 	info0 = __le32_to_cpu(vht_sig->info0);
88 	info1 = __le32_to_cpu(vht_sig->info1);
89 
90 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_SU_MU_CODING);
91 	ppdu_info->mcs = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_MCS);
92 	gi_setting = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_GI_SETTING);
93 	switch (gi_setting) {
94 	case HAL_RX_VHT_SIG_A_NORMAL_GI:
95 		ppdu_info->gi = HAL_RX_GI_0_8_US;
96 		break;
97 	case HAL_RX_VHT_SIG_A_SHORT_GI:
98 	case HAL_RX_VHT_SIG_A_SHORT_GI_AMBIGUITY:
99 		ppdu_info->gi = HAL_RX_GI_0_4_US;
100 		break;
101 	}
102 
103 	ppdu_info->is_stbc = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_STBC);
104 	nsts = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_NSTS);
105 	if (ppdu_info->is_stbc && nsts > 0)
106 		nsts = ((nsts + 1) >> 1) - 1;
107 
108 	ppdu_info->nss = u32_get_bits(nsts, VHT_SIG_SU_NSS_MASK);
109 	ppdu_info->bw = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_BW);
110 	ppdu_info->beamformed = u32_get_bits(info1,
111 					     HAL_RX_VHT_SIG_A_INFO_INFO1_BEAMFORMED);
112 	ppdu_info->vht_flag_values5 = u32_get_bits(info0,
113 						   HAL_RX_VHT_SIG_A_INFO_INFO0_GROUP_ID);
114 	ppdu_info->vht_flag_values3[0] = (((ppdu_info->mcs) << 4) |
115 					    ppdu_info->nss);
116 	ppdu_info->vht_flag_values2 = ppdu_info->bw;
117 	ppdu_info->vht_flag_values4 =
118 		u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_SU_MU_CODING);
119 }
120 
121 static void ath12k_dp_mon_parse_ht_sig(const struct hal_rx_ht_sig_info *ht_sig,
122 				       struct hal_rx_mon_ppdu_info *ppdu_info)
123 {
124 	u32 info0 = __le32_to_cpu(ht_sig->info0);
125 	u32 info1 = __le32_to_cpu(ht_sig->info1);
126 
127 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HT_SIG_INFO_INFO0_MCS);
128 	ppdu_info->bw = u32_get_bits(info0, HAL_RX_HT_SIG_INFO_INFO0_BW);
129 	ppdu_info->is_stbc = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_STBC);
130 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_FEC_CODING);
131 	ppdu_info->gi = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_GI);
132 	ppdu_info->nss = (ppdu_info->mcs >> 3);
133 }
134 
135 static void ath12k_dp_mon_parse_l_sig_b(const struct hal_rx_lsig_b_info *lsigb,
136 					struct hal_rx_mon_ppdu_info *ppdu_info)
137 {
138 	u32 info0 = __le32_to_cpu(lsigb->info0);
139 	u8 rate;
140 
141 	rate = u32_get_bits(info0, HAL_RX_LSIG_B_INFO_INFO0_RATE);
142 	switch (rate) {
143 	case 1:
144 		rate = HAL_RX_LEGACY_RATE_1_MBPS;
145 		break;
146 	case 2:
147 	case 5:
148 		rate = HAL_RX_LEGACY_RATE_2_MBPS;
149 		break;
150 	case 3:
151 	case 6:
152 		rate = HAL_RX_LEGACY_RATE_5_5_MBPS;
153 		break;
154 	case 4:
155 	case 7:
156 		rate = HAL_RX_LEGACY_RATE_11_MBPS;
157 		break;
158 	default:
159 		rate = HAL_RX_LEGACY_RATE_INVALID;
160 	}
161 
162 	ppdu_info->rate = rate;
163 	ppdu_info->cck_flag = 1;
164 }
165 
166 static void ath12k_dp_mon_parse_l_sig_a(const struct hal_rx_lsig_a_info *lsiga,
167 					struct hal_rx_mon_ppdu_info *ppdu_info)
168 {
169 	u32 info0 = __le32_to_cpu(lsiga->info0);
170 	u8 rate;
171 
172 	rate = u32_get_bits(info0, HAL_RX_LSIG_A_INFO_INFO0_RATE);
173 	switch (rate) {
174 	case 8:
175 		rate = HAL_RX_LEGACY_RATE_48_MBPS;
176 		break;
177 	case 9:
178 		rate = HAL_RX_LEGACY_RATE_24_MBPS;
179 		break;
180 	case 10:
181 		rate = HAL_RX_LEGACY_RATE_12_MBPS;
182 		break;
183 	case 11:
184 		rate = HAL_RX_LEGACY_RATE_6_MBPS;
185 		break;
186 	case 12:
187 		rate = HAL_RX_LEGACY_RATE_54_MBPS;
188 		break;
189 	case 13:
190 		rate = HAL_RX_LEGACY_RATE_36_MBPS;
191 		break;
192 	case 14:
193 		rate = HAL_RX_LEGACY_RATE_18_MBPS;
194 		break;
195 	case 15:
196 		rate = HAL_RX_LEGACY_RATE_9_MBPS;
197 		break;
198 	default:
199 		rate = HAL_RX_LEGACY_RATE_INVALID;
200 	}
201 
202 	ppdu_info->rate = rate;
203 }
204 
205 static void
206 ath12k_dp_mon_parse_he_sig_b2_ofdma(const struct hal_rx_he_sig_b2_ofdma_info *ofdma,
207 				    struct hal_rx_mon_ppdu_info *ppdu_info)
208 {
209 	u32 info0, value;
210 
211 	info0 = __le32_to_cpu(ofdma->info0);
212 
213 	ppdu_info->he_data1 |= HE_MCS_KNOWN | HE_DCM_KNOWN | HE_CODING_KNOWN;
214 
215 	/* HE-data2 */
216 	ppdu_info->he_data2 |= HE_TXBF_KNOWN;
217 
218 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_MCS);
219 	value = ppdu_info->mcs << HE_TRANSMIT_MCS_SHIFT;
220 	ppdu_info->he_data3 |= value;
221 
222 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_DCM);
223 	value = value << HE_DCM_SHIFT;
224 	ppdu_info->he_data3 |= value;
225 
226 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_CODING);
227 	ppdu_info->ldpc = value;
228 	value = value << HE_CODING_SHIFT;
229 	ppdu_info->he_data3 |= value;
230 
231 	/* HE-data4 */
232 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_ID);
233 	value = value << HE_STA_ID_SHIFT;
234 	ppdu_info->he_data4 |= value;
235 
236 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_NSTS);
237 	ppdu_info->beamformed = u32_get_bits(info0,
238 					     HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_TXBF);
239 }
240 
241 static void
242 ath12k_dp_mon_parse_he_sig_b2_mu(const struct hal_rx_he_sig_b2_mu_info *he_sig_b2_mu,
243 				 struct hal_rx_mon_ppdu_info *ppdu_info)
244 {
245 	u32 info0, value;
246 
247 	info0 = __le32_to_cpu(he_sig_b2_mu->info0);
248 
249 	ppdu_info->he_data1 |= HE_MCS_KNOWN | HE_CODING_KNOWN;
250 
251 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_MCS);
252 	value = ppdu_info->mcs << HE_TRANSMIT_MCS_SHIFT;
253 	ppdu_info->he_data3 |= value;
254 
255 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_CODING);
256 	ppdu_info->ldpc = value;
257 	value = value << HE_CODING_SHIFT;
258 	ppdu_info->he_data3 |= value;
259 
260 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_ID);
261 	value = value << HE_STA_ID_SHIFT;
262 	ppdu_info->he_data4 |= value;
263 
264 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_NSTS);
265 }
266 
267 static void
268 ath12k_dp_mon_parse_he_sig_b1_mu(const struct hal_rx_he_sig_b1_mu_info *he_sig_b1_mu,
269 				 struct hal_rx_mon_ppdu_info *ppdu_info)
270 {
271 	u32 info0 = __le32_to_cpu(he_sig_b1_mu->info0);
272 	u16 ru_tones;
273 
274 	ru_tones = u32_get_bits(info0,
275 				HAL_RX_HE_SIG_B1_MU_INFO_INFO0_RU_ALLOCATION);
276 	ppdu_info->ru_alloc = ath12k_he_ru_tones_to_nl80211_he_ru_alloc(ru_tones);
277 	ppdu_info->he_RU[0] = ru_tones;
278 }
279 
280 static void
281 ath12k_dp_mon_parse_he_sig_mu(const struct hal_rx_he_sig_a_mu_dl_info *he_sig_a_mu_dl,
282 			      struct hal_rx_mon_ppdu_info *ppdu_info)
283 {
284 	u32 info0, info1, value;
285 	u16 he_gi = 0, he_ltf = 0;
286 
287 	info0 = __le32_to_cpu(he_sig_a_mu_dl->info0);
288 	info1 = __le32_to_cpu(he_sig_a_mu_dl->info1);
289 
290 	ppdu_info->he_mu_flags = 1;
291 
292 	ppdu_info->he_data1 = HE_MU_FORMAT_TYPE;
293 	ppdu_info->he_data1 |=
294 			HE_BSS_COLOR_KNOWN |
295 			HE_DL_UL_KNOWN |
296 			HE_LDPC_EXTRA_SYMBOL_KNOWN |
297 			HE_STBC_KNOWN |
298 			HE_DATA_BW_RU_KNOWN |
299 			HE_DOPPLER_KNOWN;
300 
301 	ppdu_info->he_data2 =
302 			HE_GI_KNOWN |
303 			HE_LTF_SYMBOLS_KNOWN |
304 			HE_PRE_FEC_PADDING_KNOWN |
305 			HE_PE_DISAMBIGUITY_KNOWN |
306 			HE_TXOP_KNOWN |
307 			HE_MIDABLE_PERIODICITY_KNOWN;
308 
309 	/* data3 */
310 	ppdu_info->he_data3 = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_BSS_COLOR);
311 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_UL_FLAG);
312 	value = value << HE_DL_UL_SHIFT;
313 	ppdu_info->he_data3 |= value;
314 
315 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_LDPC_EXTRA);
316 	value = value << HE_LDPC_EXTRA_SYMBOL_SHIFT;
317 	ppdu_info->he_data3 |= value;
318 
319 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_STBC);
320 	value = value << HE_STBC_SHIFT;
321 	ppdu_info->he_data3 |= value;
322 
323 	/* data4 */
324 	ppdu_info->he_data4 = u32_get_bits(info0,
325 					   HAL_RX_HE_SIG_A_MU_DL_INFO0_SPATIAL_REUSE);
326 	ppdu_info->he_data4 = value;
327 
328 	/* data5 */
329 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_TRANSMIT_BW);
330 	ppdu_info->he_data5 = value;
331 	ppdu_info->bw = value;
332 
333 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_CP_LTF_SIZE);
334 	switch (value) {
335 	case 0:
336 		he_gi = HE_GI_0_8;
337 		he_ltf = HE_LTF_4_X;
338 		break;
339 	case 1:
340 		he_gi = HE_GI_0_8;
341 		he_ltf = HE_LTF_2_X;
342 		break;
343 	case 2:
344 		he_gi = HE_GI_1_6;
345 		he_ltf = HE_LTF_2_X;
346 		break;
347 	case 3:
348 		he_gi = HE_GI_3_2;
349 		he_ltf = HE_LTF_4_X;
350 		break;
351 	}
352 
353 	ppdu_info->gi = he_gi;
354 	value = he_gi << HE_GI_SHIFT;
355 	ppdu_info->he_data5 |= value;
356 
357 	value = he_ltf << HE_LTF_SIZE_SHIFT;
358 	ppdu_info->he_data5 |= value;
359 
360 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_NUM_LTF_SYMB);
361 	value = (value << HE_LTF_SYM_SHIFT);
362 	ppdu_info->he_data5 |= value;
363 
364 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_PKT_EXT_FACTOR);
365 	value = value << HE_PRE_FEC_PAD_SHIFT;
366 	ppdu_info->he_data5 |= value;
367 
368 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_PKT_EXT_PE_DISAM);
369 	value = value << HE_PE_DISAMBIGUITY_SHIFT;
370 	ppdu_info->he_data5 |= value;
371 
372 	/*data6*/
373 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_DOPPLER_INDICATION);
374 	value = value << HE_DOPPLER_SHIFT;
375 	ppdu_info->he_data6 |= value;
376 
377 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_TXOP_DURATION);
378 	value = value << HE_TXOP_SHIFT;
379 	ppdu_info->he_data6 |= value;
380 
381 	/* HE-MU Flags */
382 	/* HE-MU-flags1 */
383 	ppdu_info->he_flags1 =
384 		HE_SIG_B_MCS_KNOWN |
385 		HE_SIG_B_DCM_KNOWN |
386 		HE_SIG_B_COMPRESSION_FLAG_1_KNOWN |
387 		HE_SIG_B_SYM_NUM_KNOWN |
388 		HE_RU_0_KNOWN;
389 
390 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_MCS_OF_SIGB);
391 	ppdu_info->he_flags1 |= value;
392 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_DCM_OF_SIGB);
393 	value = value << HE_DCM_FLAG_1_SHIFT;
394 	ppdu_info->he_flags1 |= value;
395 
396 	/* HE-MU-flags2 */
397 	ppdu_info->he_flags2 = HE_BW_KNOWN;
398 
399 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_TRANSMIT_BW);
400 	ppdu_info->he_flags2 |= value;
401 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_COMP_MODE_SIGB);
402 	value = value << HE_SIG_B_COMPRESSION_FLAG_2_SHIFT;
403 	ppdu_info->he_flags2 |= value;
404 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_NUM_SIGB_SYMB);
405 	value = value - 1;
406 	value = value << HE_NUM_SIG_B_SYMBOLS_SHIFT;
407 	ppdu_info->he_flags2 |= value;
408 
409 	ppdu_info->is_stbc = info1 &
410 			     HAL_RX_HE_SIG_A_MU_DL_INFO1_STBC;
411 }
412 
413 static void ath12k_dp_mon_parse_he_sig_su(const struct hal_rx_he_sig_a_su_info *he_sig_a,
414 					  struct hal_rx_mon_ppdu_info *ppdu_info)
415 {
416 	u32 info0, info1, value;
417 	u32 dcm;
418 	u8 he_dcm = 0, he_stbc = 0;
419 	u16 he_gi = 0, he_ltf = 0;
420 
421 	ppdu_info->he_flags = 1;
422 
423 	info0 = __le32_to_cpu(he_sig_a->info0);
424 	info1 = __le32_to_cpu(he_sig_a->info1);
425 
426 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_FORMAT_IND);
427 	if (value == 0)
428 		ppdu_info->he_data1 = HE_TRIG_FORMAT_TYPE;
429 	else
430 		ppdu_info->he_data1 = HE_SU_FORMAT_TYPE;
431 
432 	ppdu_info->he_data1 |=
433 			HE_BSS_COLOR_KNOWN |
434 			HE_BEAM_CHANGE_KNOWN |
435 			HE_DL_UL_KNOWN |
436 			HE_MCS_KNOWN |
437 			HE_DCM_KNOWN |
438 			HE_CODING_KNOWN |
439 			HE_LDPC_EXTRA_SYMBOL_KNOWN |
440 			HE_STBC_KNOWN |
441 			HE_DATA_BW_RU_KNOWN |
442 			HE_DOPPLER_KNOWN;
443 
444 	ppdu_info->he_data2 |=
445 			HE_GI_KNOWN |
446 			HE_TXBF_KNOWN |
447 			HE_PE_DISAMBIGUITY_KNOWN |
448 			HE_TXOP_KNOWN |
449 			HE_LTF_SYMBOLS_KNOWN |
450 			HE_PRE_FEC_PADDING_KNOWN |
451 			HE_MIDABLE_PERIODICITY_KNOWN;
452 
453 	ppdu_info->he_data3 = u32_get_bits(info0,
454 					   HAL_RX_HE_SIG_A_SU_INFO_INFO0_BSS_COLOR);
455 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_BEAM_CHANGE);
456 	value = value << HE_BEAM_CHANGE_SHIFT;
457 	ppdu_info->he_data3 |= value;
458 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DL_UL_FLAG);
459 	value = value << HE_DL_UL_SHIFT;
460 	ppdu_info->he_data3 |= value;
461 
462 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_MCS);
463 	ppdu_info->mcs = value;
464 	value = value << HE_TRANSMIT_MCS_SHIFT;
465 	ppdu_info->he_data3 |= value;
466 
467 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DCM);
468 	he_dcm = value;
469 	value = value << HE_DCM_SHIFT;
470 	ppdu_info->he_data3 |= value;
471 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_CODING);
472 	value = value << HE_CODING_SHIFT;
473 	ppdu_info->he_data3 |= value;
474 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_LDPC_EXTRA);
475 	value = value << HE_LDPC_EXTRA_SYMBOL_SHIFT;
476 	ppdu_info->he_data3 |= value;
477 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_STBC);
478 	he_stbc = value;
479 	value = value << HE_STBC_SHIFT;
480 	ppdu_info->he_data3 |= value;
481 
482 	/* data4 */
483 	ppdu_info->he_data4 = u32_get_bits(info0,
484 					   HAL_RX_HE_SIG_A_SU_INFO_INFO0_SPATIAL_REUSE);
485 
486 	/* data5 */
487 	value = u32_get_bits(info0,
488 			     HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_BW);
489 	ppdu_info->he_data5 = value;
490 	ppdu_info->bw = value;
491 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_CP_LTF_SIZE);
492 	switch (value) {
493 	case 0:
494 		he_gi = HE_GI_0_8;
495 		he_ltf = HE_LTF_1_X;
496 		break;
497 	case 1:
498 		he_gi = HE_GI_0_8;
499 		he_ltf = HE_LTF_2_X;
500 		break;
501 	case 2:
502 		he_gi = HE_GI_1_6;
503 		he_ltf = HE_LTF_2_X;
504 		break;
505 	case 3:
506 		if (he_dcm && he_stbc) {
507 			he_gi = HE_GI_0_8;
508 			he_ltf = HE_LTF_4_X;
509 		} else {
510 			he_gi = HE_GI_3_2;
511 			he_ltf = HE_LTF_4_X;
512 		}
513 		break;
514 	}
515 	ppdu_info->gi = he_gi;
516 	value = he_gi << HE_GI_SHIFT;
517 	ppdu_info->he_data5 |= value;
518 	value = he_ltf << HE_LTF_SIZE_SHIFT;
519 	ppdu_info->ltf_size = he_ltf;
520 	ppdu_info->he_data5 |= value;
521 
522 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
523 	value = (value << HE_LTF_SYM_SHIFT);
524 	ppdu_info->he_data5 |= value;
525 
526 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_PKT_EXT_FACTOR);
527 	value = value << HE_PRE_FEC_PAD_SHIFT;
528 	ppdu_info->he_data5 |= value;
529 
530 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXBF);
531 	value = value << HE_TXBF_SHIFT;
532 	ppdu_info->he_data5 |= value;
533 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_PKT_EXT_PE_DISAM);
534 	value = value << HE_PE_DISAMBIGUITY_SHIFT;
535 	ppdu_info->he_data5 |= value;
536 
537 	/* data6 */
538 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
539 	value++;
540 	ppdu_info->he_data6 = value;
541 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_DOPPLER_IND);
542 	value = value << HE_DOPPLER_SHIFT;
543 	ppdu_info->he_data6 |= value;
544 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXOP_DURATION);
545 	value = value << HE_TXOP_SHIFT;
546 	ppdu_info->he_data6 |= value;
547 
548 	ppdu_info->mcs =
549 		u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_MCS);
550 	ppdu_info->bw =
551 		u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_BW);
552 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_CODING);
553 	ppdu_info->is_stbc = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_STBC);
554 	ppdu_info->beamformed = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXBF);
555 	dcm = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DCM);
556 	ppdu_info->nss = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
557 	ppdu_info->dcm = dcm;
558 }
559 
560 static void
561 ath12k_dp_mon_hal_rx_parse_u_sig_cmn(const struct hal_mon_usig_cmn *cmn,
562 				     struct hal_rx_mon_ppdu_info *ppdu_info)
563 {
564 	u32 common;
565 
566 	ppdu_info->u_sig_info.bw = le32_get_bits(cmn->info0,
567 						 HAL_RX_USIG_CMN_INFO0_BW);
568 	ppdu_info->u_sig_info.ul_dl = le32_get_bits(cmn->info0,
569 						    HAL_RX_USIG_CMN_INFO0_UL_DL);
570 
571 	common = __le32_to_cpu(ppdu_info->u_sig_info.usig.common);
572 	common |= IEEE80211_RADIOTAP_EHT_USIG_COMMON_PHY_VER_KNOWN |
573 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BW_KNOWN |
574 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_UL_DL_KNOWN |
575 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BSS_COLOR_KNOWN |
576 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_TXOP_KNOWN |
577 		  ATH12K_LE32_DEC_ENC(cmn->info0,
578 				      HAL_RX_USIG_CMN_INFO0_PHY_VERSION,
579 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_PHY_VER) |
580 		  u32_encode_bits(ppdu_info->u_sig_info.bw,
581 				  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BW) |
582 		  u32_encode_bits(ppdu_info->u_sig_info.ul_dl,
583 				  IEEE80211_RADIOTAP_EHT_USIG_COMMON_UL_DL) |
584 		  ATH12K_LE32_DEC_ENC(cmn->info0,
585 				      HAL_RX_USIG_CMN_INFO0_BSS_COLOR,
586 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BSS_COLOR) |
587 		  ATH12K_LE32_DEC_ENC(cmn->info0,
588 				      HAL_RX_USIG_CMN_INFO0_TXOP,
589 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_TXOP);
590 	ppdu_info->u_sig_info.usig.common = cpu_to_le32(common);
591 
592 	switch (ppdu_info->u_sig_info.bw) {
593 	default:
594 		fallthrough;
595 	case HAL_EHT_BW_20:
596 		ppdu_info->bw = HAL_RX_BW_20MHZ;
597 		break;
598 	case HAL_EHT_BW_40:
599 		ppdu_info->bw = HAL_RX_BW_40MHZ;
600 		break;
601 	case HAL_EHT_BW_80:
602 		ppdu_info->bw = HAL_RX_BW_80MHZ;
603 		break;
604 	case HAL_EHT_BW_160:
605 		ppdu_info->bw = HAL_RX_BW_160MHZ;
606 		break;
607 	case HAL_EHT_BW_320_1:
608 	case HAL_EHT_BW_320_2:
609 		ppdu_info->bw = HAL_RX_BW_320MHZ;
610 		break;
611 	}
612 }
613 
614 static void
615 ath12k_dp_mon_hal_rx_parse_u_sig_tb(const struct hal_mon_usig_tb *usig_tb,
616 				    struct hal_rx_mon_ppdu_info *ppdu_info)
617 {
618 	struct ieee80211_radiotap_eht_usig *usig = &ppdu_info->u_sig_info.usig;
619 	enum ieee80211_radiotap_eht_usig_tb spatial_reuse1, spatial_reuse2;
620 	u32 common, value, mask;
621 
622 	spatial_reuse1 = IEEE80211_RADIOTAP_EHT_USIG2_TB_B3_B6_SPATIAL_REUSE_1;
623 	spatial_reuse2 = IEEE80211_RADIOTAP_EHT_USIG2_TB_B7_B10_SPATIAL_REUSE_2;
624 
625 	common = __le32_to_cpu(usig->common);
626 	value = __le32_to_cpu(usig->value);
627 	mask = __le32_to_cpu(usig->mask);
628 
629 	ppdu_info->u_sig_info.ppdu_type_comp_mode =
630 				le32_get_bits(usig_tb->info0,
631 					      HAL_RX_USIG_TB_INFO0_PPDU_TYPE_COMP_MODE);
632 
633 	common |= ATH12K_LE32_DEC_ENC(usig_tb->info0,
634 				      HAL_RX_USIG_TB_INFO0_RX_INTEG_CHECK_PASS,
635 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BAD_USIG_CRC);
636 
637 	value |= IEEE80211_RADIOTAP_EHT_USIG1_TB_B20_B25_DISREGARD |
638 		 u32_encode_bits(ppdu_info->u_sig_info.ppdu_type_comp_mode,
639 				 IEEE80211_RADIOTAP_EHT_USIG2_TB_B0_B1_PPDU_TYPE) |
640 		 IEEE80211_RADIOTAP_EHT_USIG2_TB_B2_VALIDATE |
641 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
642 				     HAL_RX_USIG_TB_INFO0_SPATIAL_REUSE_1,
643 				     spatial_reuse1) |
644 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
645 				     HAL_RX_USIG_TB_INFO0_SPATIAL_REUSE_2,
646 				     spatial_reuse2) |
647 		 IEEE80211_RADIOTAP_EHT_USIG2_TB_B11_B15_DISREGARD |
648 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
649 				     HAL_RX_USIG_TB_INFO0_CRC,
650 				     IEEE80211_RADIOTAP_EHT_USIG2_TB_B16_B19_CRC) |
651 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
652 				     HAL_RX_USIG_TB_INFO0_TAIL,
653 				     IEEE80211_RADIOTAP_EHT_USIG2_TB_B20_B25_TAIL);
654 
655 	mask |= IEEE80211_RADIOTAP_EHT_USIG1_TB_B20_B25_DISREGARD |
656 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B0_B1_PPDU_TYPE |
657 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B2_VALIDATE |
658 		spatial_reuse1 | spatial_reuse2 |
659 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B11_B15_DISREGARD |
660 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B16_B19_CRC |
661 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B20_B25_TAIL;
662 
663 	usig->common = cpu_to_le32(common);
664 	usig->value = cpu_to_le32(value);
665 	usig->mask = cpu_to_le32(mask);
666 }
667 
668 static void
669 ath12k_dp_mon_hal_rx_parse_u_sig_mu(const struct hal_mon_usig_mu *usig_mu,
670 				    struct hal_rx_mon_ppdu_info *ppdu_info)
671 {
672 	struct ieee80211_radiotap_eht_usig *usig = &ppdu_info->u_sig_info.usig;
673 	enum ieee80211_radiotap_eht_usig_mu sig_symb, punc;
674 	u32 common, value, mask;
675 
676 	sig_symb = IEEE80211_RADIOTAP_EHT_USIG2_MU_B11_B15_EHT_SIG_SYMBOLS;
677 	punc = IEEE80211_RADIOTAP_EHT_USIG2_MU_B3_B7_PUNCTURED_INFO;
678 
679 	common = __le32_to_cpu(usig->common);
680 	value = __le32_to_cpu(usig->value);
681 	mask = __le32_to_cpu(usig->mask);
682 
683 	ppdu_info->u_sig_info.ppdu_type_comp_mode =
684 				le32_get_bits(usig_mu->info0,
685 					      HAL_RX_USIG_MU_INFO0_PPDU_TYPE_COMP_MODE);
686 	ppdu_info->u_sig_info.eht_sig_mcs =
687 				le32_get_bits(usig_mu->info0,
688 					      HAL_RX_USIG_MU_INFO0_EHT_SIG_MCS);
689 	ppdu_info->u_sig_info.num_eht_sig_sym =
690 				le32_get_bits(usig_mu->info0,
691 					      HAL_RX_USIG_MU_INFO0_NUM_EHT_SIG_SYM);
692 
693 	common |= ATH12K_LE32_DEC_ENC(usig_mu->info0,
694 				      HAL_RX_USIG_MU_INFO0_RX_INTEG_CHECK_PASS,
695 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BAD_USIG_CRC);
696 
697 	value |= IEEE80211_RADIOTAP_EHT_USIG1_MU_B20_B24_DISREGARD |
698 		 IEEE80211_RADIOTAP_EHT_USIG1_MU_B25_VALIDATE |
699 		 u32_encode_bits(ppdu_info->u_sig_info.ppdu_type_comp_mode,
700 				 IEEE80211_RADIOTAP_EHT_USIG2_MU_B0_B1_PPDU_TYPE) |
701 		 IEEE80211_RADIOTAP_EHT_USIG2_MU_B2_VALIDATE |
702 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
703 				     HAL_RX_USIG_MU_INFO0_PUNC_CH_INFO,
704 				     punc) |
705 		 IEEE80211_RADIOTAP_EHT_USIG2_MU_B8_VALIDATE |
706 		 u32_encode_bits(ppdu_info->u_sig_info.eht_sig_mcs,
707 				 IEEE80211_RADIOTAP_EHT_USIG2_MU_B9_B10_SIG_MCS) |
708 		 u32_encode_bits(ppdu_info->u_sig_info.num_eht_sig_sym,
709 				 sig_symb) |
710 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
711 				     HAL_RX_USIG_MU_INFO0_CRC,
712 				     IEEE80211_RADIOTAP_EHT_USIG2_MU_B16_B19_CRC) |
713 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
714 				     HAL_RX_USIG_MU_INFO0_TAIL,
715 				     IEEE80211_RADIOTAP_EHT_USIG2_MU_B20_B25_TAIL);
716 
717 	mask |= IEEE80211_RADIOTAP_EHT_USIG1_MU_B20_B24_DISREGARD |
718 		IEEE80211_RADIOTAP_EHT_USIG1_MU_B25_VALIDATE |
719 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B0_B1_PPDU_TYPE |
720 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B2_VALIDATE |
721 		punc |
722 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B8_VALIDATE |
723 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B9_B10_SIG_MCS |
724 		sig_symb |
725 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B16_B19_CRC |
726 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B20_B25_TAIL;
727 
728 	usig->common = cpu_to_le32(common);
729 	usig->value = cpu_to_le32(value);
730 	usig->mask = cpu_to_le32(mask);
731 }
732 
733 static void
734 ath12k_dp_mon_hal_rx_parse_u_sig_hdr(const struct hal_mon_usig_hdr *usig,
735 				     struct hal_rx_mon_ppdu_info *ppdu_info)
736 {
737 	u8 comp_mode;
738 
739 	ppdu_info->eht_usig = true;
740 
741 	ath12k_dp_mon_hal_rx_parse_u_sig_cmn(&usig->cmn, ppdu_info);
742 
743 	comp_mode = le32_get_bits(usig->non_cmn.mu.info0,
744 				  HAL_RX_USIG_MU_INFO0_PPDU_TYPE_COMP_MODE);
745 
746 	if (comp_mode == 0 && ppdu_info->u_sig_info.ul_dl)
747 		ath12k_dp_mon_hal_rx_parse_u_sig_tb(&usig->non_cmn.tb, ppdu_info);
748 	else
749 		ath12k_dp_mon_hal_rx_parse_u_sig_mu(&usig->non_cmn.mu, ppdu_info);
750 }
751 
752 static void
753 ath12k_dp_mon_hal_aggr_tlv(struct hal_rx_mon_ppdu_info *ppdu_info,
754 			   u16 tlv_len, const void *tlv_data)
755 {
756 	if (tlv_len <= HAL_RX_MON_MAX_AGGR_SIZE - ppdu_info->tlv_aggr.cur_len) {
757 		memcpy(ppdu_info->tlv_aggr.buf + ppdu_info->tlv_aggr.cur_len,
758 		       tlv_data, tlv_len);
759 		ppdu_info->tlv_aggr.cur_len += tlv_len;
760 	}
761 }
762 
763 static inline bool
764 ath12k_dp_mon_hal_rx_is_frame_type_ndp(const struct hal_rx_u_sig_info *usig_info)
765 {
766 	if (usig_info->ppdu_type_comp_mode == 1 &&
767 	    usig_info->eht_sig_mcs == 0 &&
768 	    usig_info->num_eht_sig_sym == 0)
769 		return true;
770 
771 	return false;
772 }
773 
774 static inline bool
775 ath12k_dp_mon_hal_rx_is_non_ofdma(const struct hal_rx_u_sig_info *usig_info)
776 {
777 	u32 ppdu_type_comp_mode = usig_info->ppdu_type_comp_mode;
778 	u32 ul_dl = usig_info->ul_dl;
779 
780 	if ((ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_MIMO && ul_dl == 0) ||
781 	    (ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_OFDMA && ul_dl == 0) ||
782 	    (ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_MIMO  && ul_dl == 1))
783 		return true;
784 
785 	return false;
786 }
787 
788 static inline bool
789 ath12k_dp_mon_hal_rx_is_ofdma(const struct hal_rx_u_sig_info *usig_info)
790 {
791 	if (usig_info->ppdu_type_comp_mode == 0 && usig_info->ul_dl == 0)
792 		return true;
793 
794 	return false;
795 }
796 
797 static void
798 ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(const struct hal_eht_sig_ndp_cmn_eb *eht_sig_ndp,
799 				       struct hal_rx_mon_ppdu_info *ppdu_info)
800 {
801 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
802 	u32 known, data;
803 
804 	known = __le32_to_cpu(eht->known);
805 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_SPATIAL_REUSE |
806 		 IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF |
807 		 IEEE80211_RADIOTAP_EHT_KNOWN_NSS_S |
808 		 IEEE80211_RADIOTAP_EHT_KNOWN_BEAMFORMED_S |
809 		 IEEE80211_RADIOTAP_EHT_KNOWN_DISREGARD_S |
810 		 IEEE80211_RADIOTAP_EHT_KNOWN_CRC1 |
811 		 IEEE80211_RADIOTAP_EHT_KNOWN_TAIL1;
812 	eht->known = cpu_to_le32(known);
813 
814 	data = __le32_to_cpu(eht->data[0]);
815 	data |= ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
816 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_SPATIAL_REUSE,
817 				    IEEE80211_RADIOTAP_EHT_DATA0_SPATIAL_REUSE);
818 	/* GI and LTF size are separately indicated in radiotap header
819 	 * and hence will be parsed from other TLV
820 	 */
821 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
822 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_NUM_LTF_SYM,
823 				    IEEE80211_RADIOTAP_EHT_DATA0_EHT_LTF);
824 
825 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
826 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_CRC,
827 				    IEEE80211_RADIOTAP_EHT_DATA0_CRC1_O);
828 
829 	data |= ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
830 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_DISREGARD,
831 				    IEEE80211_RADIOTAP_EHT_DATA0_DISREGARD_S);
832 	eht->data[0] = cpu_to_le32(data);
833 
834 	data = __le32_to_cpu(eht->data[7]);
835 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
836 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_NSS,
837 				    IEEE80211_RADIOTAP_EHT_DATA7_NSS_S);
838 
839 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
840 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_BEAMFORMED,
841 				    IEEE80211_RADIOTAP_EHT_DATA7_BEAMFORMED_S);
842 	eht->data[7] = cpu_to_le32(data);
843 }
844 
845 static void
846 ath12k_dp_mon_hal_rx_parse_usig_overflow(const struct hal_eht_sig_usig_overflow *ovflow,
847 					 struct hal_rx_mon_ppdu_info *ppdu_info)
848 {
849 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
850 	u32 known, data;
851 
852 	known = __le32_to_cpu(eht->known);
853 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_SPATIAL_REUSE |
854 		 IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF |
855 		 IEEE80211_RADIOTAP_EHT_KNOWN_LDPC_EXTRA_SYM_OM |
856 		 IEEE80211_RADIOTAP_EHT_KNOWN_PRE_PADD_FACOR_OM |
857 		 IEEE80211_RADIOTAP_EHT_KNOWN_PE_DISAMBIGUITY_OM |
858 		 IEEE80211_RADIOTAP_EHT_KNOWN_DISREGARD_O;
859 	eht->known = cpu_to_le32(known);
860 
861 	data = __le32_to_cpu(eht->data[0]);
862 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
863 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_SPATIAL_REUSE,
864 				    IEEE80211_RADIOTAP_EHT_DATA0_SPATIAL_REUSE);
865 
866 	/* GI and LTF size are separately indicated in radiotap header
867 	 * and hence will be parsed from other TLV
868 	 */
869 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
870 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_NUM_LTF_SYM,
871 				    IEEE80211_RADIOTAP_EHT_DATA0_EHT_LTF);
872 
873 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
874 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_LDPC_EXTA_SYM,
875 				    IEEE80211_RADIOTAP_EHT_DATA0_LDPC_EXTRA_SYM_OM);
876 
877 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
878 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_PRE_FEC_PAD_FACTOR,
879 				    IEEE80211_RADIOTAP_EHT_DATA0_PRE_PADD_FACOR_OM);
880 
881 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
882 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_DISAMBIGUITY,
883 				    IEEE80211_RADIOTAP_EHT_DATA0_PE_DISAMBIGUITY_OM);
884 
885 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
886 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_DISREGARD,
887 				    IEEE80211_RADIOTAP_EHT_DATA0_DISREGARD_O);
888 	eht->data[0] = cpu_to_le32(data);
889 }
890 
891 static void
892 ath12k_dp_mon_hal_rx_parse_non_ofdma_users(const struct hal_eht_sig_non_ofdma_cmn_eb *eb,
893 					   struct hal_rx_mon_ppdu_info *ppdu_info)
894 {
895 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
896 	u32 known, data;
897 
898 	known = __le32_to_cpu(eht->known);
899 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_NR_NON_OFDMA_USERS_M;
900 	eht->known = cpu_to_le32(known);
901 
902 	data = __le32_to_cpu(eht->data[7]);
903 	data |=	ATH12K_LE32_DEC_ENC(eb->info0,
904 				    HAL_RX_EHT_SIG_NON_OFDMA_INFO0_NUM_USERS,
905 				    IEEE80211_RADIOTAP_EHT_DATA7_NUM_OF_NON_OFDMA_USERS);
906 	eht->data[7] = cpu_to_le32(data);
907 }
908 
909 static void
910 ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(const struct hal_eht_sig_mu_mimo *user,
911 					   struct hal_rx_mon_ppdu_info *ppdu_info)
912 {
913 	struct hal_rx_eht_info *eht_info = &ppdu_info->eht_info;
914 	u32 user_idx;
915 
916 	if (eht_info->num_user_info >= ARRAY_SIZE(eht_info->user_info))
917 		return;
918 
919 	user_idx = eht_info->num_user_info++;
920 
921 	eht_info->user_info[user_idx] |=
922 		IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID_KNOWN |
923 		IEEE80211_RADIOTAP_EHT_USER_INFO_MCS_KNOWN |
924 		IEEE80211_RADIOTAP_EHT_USER_INFO_CODING_KNOWN |
925 		IEEE80211_RADIOTAP_EHT_USER_INFO_SPATIAL_CONFIG_KNOWN_M |
926 		ATH12K_LE32_DEC_ENC(user->info0,
927 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_STA_ID,
928 				    IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID) |
929 		ATH12K_LE32_DEC_ENC(user->info0,
930 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_CODING,
931 				    IEEE80211_RADIOTAP_EHT_USER_INFO_CODING) |
932 		ATH12K_LE32_DEC_ENC(user->info0,
933 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_MCS,
934 				    IEEE80211_RADIOTAP_EHT_USER_INFO_MCS) |
935 		ATH12K_LE32_DEC_ENC(user->info0,
936 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_SPATIAL_CODING,
937 				    IEEE80211_RADIOTAP_EHT_USER_INFO_SPATIAL_CONFIG_M);
938 
939 	ppdu_info->mcs = le32_get_bits(user->info0,
940 				       HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_MCS);
941 }
942 
943 static void
944 ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(const struct hal_eht_sig_non_mu_mimo *user,
945 					       struct hal_rx_mon_ppdu_info *ppdu_info)
946 {
947 	struct hal_rx_eht_info *eht_info = &ppdu_info->eht_info;
948 	u32 user_idx;
949 
950 	if (eht_info->num_user_info >= ARRAY_SIZE(eht_info->user_info))
951 		return;
952 
953 	user_idx = eht_info->num_user_info++;
954 
955 	eht_info->user_info[user_idx] |=
956 		IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID_KNOWN |
957 		IEEE80211_RADIOTAP_EHT_USER_INFO_MCS_KNOWN |
958 		IEEE80211_RADIOTAP_EHT_USER_INFO_CODING_KNOWN |
959 		IEEE80211_RADIOTAP_EHT_USER_INFO_NSS_KNOWN_O |
960 		IEEE80211_RADIOTAP_EHT_USER_INFO_BEAMFORMING_KNOWN_O |
961 		ATH12K_LE32_DEC_ENC(user->info0,
962 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_STA_ID,
963 				    IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID) |
964 		ATH12K_LE32_DEC_ENC(user->info0,
965 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_CODING,
966 				    IEEE80211_RADIOTAP_EHT_USER_INFO_CODING) |
967 		ATH12K_LE32_DEC_ENC(user->info0,
968 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_MCS,
969 				    IEEE80211_RADIOTAP_EHT_USER_INFO_MCS) |
970 		ATH12K_LE32_DEC_ENC(user->info0,
971 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_NSS,
972 				    IEEE80211_RADIOTAP_EHT_USER_INFO_NSS_O) |
973 		ATH12K_LE32_DEC_ENC(user->info0,
974 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_BEAMFORMED,
975 				    IEEE80211_RADIOTAP_EHT_USER_INFO_BEAMFORMING_O);
976 
977 	ppdu_info->mcs = le32_get_bits(user->info0,
978 				       HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_MCS);
979 
980 	ppdu_info->nss = le32_get_bits(user->info0,
981 				       HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_NSS) + 1;
982 }
983 
984 static inline bool
985 ath12k_dp_mon_hal_rx_is_mu_mimo_user(const struct hal_rx_u_sig_info *usig_info)
986 {
987 	if (usig_info->ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_SU &&
988 	    usig_info->ul_dl == 1)
989 		return true;
990 
991 	return false;
992 }
993 
994 static void
995 ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(const void *tlv,
996 					     struct hal_rx_mon_ppdu_info *ppdu_info)
997 {
998 	const struct hal_eht_sig_non_ofdma_cmn_eb *eb = tlv;
999 
1000 	ath12k_dp_mon_hal_rx_parse_usig_overflow(tlv, ppdu_info);
1001 	ath12k_dp_mon_hal_rx_parse_non_ofdma_users(eb, ppdu_info);
1002 
1003 	if (ath12k_dp_mon_hal_rx_is_mu_mimo_user(&ppdu_info->u_sig_info))
1004 		ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(&eb->user_field.mu_mimo,
1005 							   ppdu_info);
1006 	else
1007 		ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(&eb->user_field.n_mu_mimo,
1008 							       ppdu_info);
1009 }
1010 
1011 static void
1012 ath12k_dp_mon_hal_rx_parse_ru_allocation(const struct hal_eht_sig_ofdma_cmn_eb *eb,
1013 					 struct hal_rx_mon_ppdu_info *ppdu_info)
1014 {
1015 	const struct hal_eht_sig_ofdma_cmn_eb1 *ofdma_cmn_eb1 = &eb->eb1;
1016 	const struct hal_eht_sig_ofdma_cmn_eb2 *ofdma_cmn_eb2 = &eb->eb2;
1017 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1018 	enum ieee80211_radiotap_eht_data ru_123, ru_124, ru_125, ru_126;
1019 	enum ieee80211_radiotap_eht_data ru_121, ru_122, ru_112, ru_111;
1020 	u32 data;
1021 
1022 	ru_123 = IEEE80211_RADIOTAP_EHT_DATA4_RU_ALLOC_CC_1_2_3;
1023 	ru_124 = IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_4;
1024 	ru_125 = IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_5;
1025 	ru_126 = IEEE80211_RADIOTAP_EHT_DATA6_RU_ALLOC_CC_1_2_6;
1026 	ru_121 = IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_1;
1027 	ru_122 = IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_2;
1028 	ru_112 = IEEE80211_RADIOTAP_EHT_DATA2_RU_ALLOC_CC_1_1_2;
1029 	ru_111 = IEEE80211_RADIOTAP_EHT_DATA1_RU_ALLOC_CC_1_1_1;
1030 
1031 	switch (ppdu_info->u_sig_info.bw) {
1032 	case HAL_EHT_BW_320_2:
1033 	case HAL_EHT_BW_320_1:
1034 		data = __le32_to_cpu(eht->data[4]);
1035 		/* CC1 2::3 */
1036 		data |=	IEEE80211_RADIOTAP_EHT_DATA4_RU_ALLOC_CC_1_2_3_KNOWN |
1037 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1038 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_3,
1039 					    ru_123);
1040 		eht->data[4] = cpu_to_le32(data);
1041 
1042 		data = __le32_to_cpu(eht->data[5]);
1043 		/* CC1 2::4 */
1044 		data |=	IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_4_KNOWN |
1045 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1046 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_4,
1047 					    ru_124);
1048 
1049 		/* CC1 2::5 */
1050 		data |=	IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_5_KNOWN |
1051 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1052 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_5,
1053 					    ru_125);
1054 		eht->data[5] = cpu_to_le32(data);
1055 
1056 		data = __le32_to_cpu(eht->data[6]);
1057 		/* CC1 2::6 */
1058 		data |=	IEEE80211_RADIOTAP_EHT_DATA6_RU_ALLOC_CC_1_2_6_KNOWN |
1059 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1060 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_6,
1061 					    ru_126);
1062 		eht->data[6] = cpu_to_le32(data);
1063 
1064 		fallthrough;
1065 	case HAL_EHT_BW_160:
1066 		data = __le32_to_cpu(eht->data[3]);
1067 		/* CC1 2::1 */
1068 		data |=	IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_1_KNOWN |
1069 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1070 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_1,
1071 					    ru_121);
1072 		/* CC1 2::2 */
1073 		data |=	IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_2_KNOWN |
1074 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1075 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_2,
1076 					    ru_122);
1077 		eht->data[3] = cpu_to_le32(data);
1078 
1079 		fallthrough;
1080 	case HAL_EHT_BW_80:
1081 		data = __le32_to_cpu(eht->data[2]);
1082 		/* CC1 1::2 */
1083 		data |=	IEEE80211_RADIOTAP_EHT_DATA2_RU_ALLOC_CC_1_1_2_KNOWN |
1084 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb1->info0,
1085 					    HAL_RX_EHT_SIG_OFDMA_EB1_RU_ALLOC_1_2,
1086 					    ru_112);
1087 		eht->data[2] = cpu_to_le32(data);
1088 
1089 		fallthrough;
1090 	case HAL_EHT_BW_40:
1091 		fallthrough;
1092 	case HAL_EHT_BW_20:
1093 		data = __le32_to_cpu(eht->data[1]);
1094 		/* CC1 1::1 */
1095 		data |=	IEEE80211_RADIOTAP_EHT_DATA1_RU_ALLOC_CC_1_1_1_KNOWN |
1096 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb1->info0,
1097 					    HAL_RX_EHT_SIG_OFDMA_EB1_RU_ALLOC_1_1,
1098 					    ru_111);
1099 		eht->data[1] = cpu_to_le32(data);
1100 		break;
1101 	default:
1102 		break;
1103 	}
1104 }
1105 
1106 static void
1107 ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(const void *tlv,
1108 					 struct hal_rx_mon_ppdu_info *ppdu_info)
1109 {
1110 	const struct hal_eht_sig_ofdma_cmn_eb *ofdma = tlv;
1111 
1112 	ath12k_dp_mon_hal_rx_parse_usig_overflow(tlv, ppdu_info);
1113 	ath12k_dp_mon_hal_rx_parse_ru_allocation(ofdma, ppdu_info);
1114 
1115 	ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(&ofdma->user_field.n_mu_mimo,
1116 						       ppdu_info);
1117 }
1118 
1119 static void
1120 ath12k_dp_mon_parse_eht_sig_hdr(struct hal_rx_mon_ppdu_info *ppdu_info,
1121 				const void *tlv_data)
1122 {
1123 	ppdu_info->is_eht = true;
1124 
1125 	if (ath12k_dp_mon_hal_rx_is_frame_type_ndp(&ppdu_info->u_sig_info))
1126 		ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(tlv_data, ppdu_info);
1127 	else if (ath12k_dp_mon_hal_rx_is_non_ofdma(&ppdu_info->u_sig_info))
1128 		ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(tlv_data, ppdu_info);
1129 	else if (ath12k_dp_mon_hal_rx_is_ofdma(&ppdu_info->u_sig_info))
1130 		ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(tlv_data, ppdu_info);
1131 }
1132 
1133 static inline enum ath12k_eht_ru_size
1134 hal_rx_mon_hal_ru_size_to_ath12k_ru_size(u32 hal_ru_size)
1135 {
1136 	switch (hal_ru_size) {
1137 	case HAL_EHT_RU_26:
1138 		return ATH12K_EHT_RU_26;
1139 	case HAL_EHT_RU_52:
1140 		return ATH12K_EHT_RU_52;
1141 	case HAL_EHT_RU_78:
1142 		return ATH12K_EHT_RU_52_26;
1143 	case HAL_EHT_RU_106:
1144 		return ATH12K_EHT_RU_106;
1145 	case HAL_EHT_RU_132:
1146 		return ATH12K_EHT_RU_106_26;
1147 	case HAL_EHT_RU_242:
1148 		return ATH12K_EHT_RU_242;
1149 	case HAL_EHT_RU_484:
1150 		return ATH12K_EHT_RU_484;
1151 	case HAL_EHT_RU_726:
1152 		return ATH12K_EHT_RU_484_242;
1153 	case HAL_EHT_RU_996:
1154 		return ATH12K_EHT_RU_996;
1155 	case HAL_EHT_RU_996x2:
1156 		return ATH12K_EHT_RU_996x2;
1157 	case HAL_EHT_RU_996x3:
1158 		return ATH12K_EHT_RU_996x3;
1159 	case HAL_EHT_RU_996x4:
1160 		return ATH12K_EHT_RU_996x4;
1161 	case HAL_EHT_RU_NONE:
1162 		return ATH12K_EHT_RU_INVALID;
1163 	case HAL_EHT_RU_996_484:
1164 		return ATH12K_EHT_RU_996_484;
1165 	case HAL_EHT_RU_996x2_484:
1166 		return ATH12K_EHT_RU_996x2_484;
1167 	case HAL_EHT_RU_996x3_484:
1168 		return ATH12K_EHT_RU_996x3_484;
1169 	case HAL_EHT_RU_996_484_242:
1170 		return ATH12K_EHT_RU_996_484_242;
1171 	default:
1172 		return ATH12K_EHT_RU_INVALID;
1173 	}
1174 }
1175 
1176 static inline u32
1177 hal_rx_ul_ofdma_ru_size_to_width(enum ath12k_eht_ru_size ru_size)
1178 {
1179 	switch (ru_size) {
1180 	case ATH12K_EHT_RU_26:
1181 		return RU_26;
1182 	case ATH12K_EHT_RU_52:
1183 		return RU_52;
1184 	case ATH12K_EHT_RU_52_26:
1185 		return RU_52_26;
1186 	case ATH12K_EHT_RU_106:
1187 		return RU_106;
1188 	case ATH12K_EHT_RU_106_26:
1189 		return RU_106_26;
1190 	case ATH12K_EHT_RU_242:
1191 		return RU_242;
1192 	case ATH12K_EHT_RU_484:
1193 		return RU_484;
1194 	case ATH12K_EHT_RU_484_242:
1195 		return RU_484_242;
1196 	case ATH12K_EHT_RU_996:
1197 		return RU_996;
1198 	case ATH12K_EHT_RU_996_484:
1199 		return RU_996_484;
1200 	case ATH12K_EHT_RU_996_484_242:
1201 		return RU_996_484_242;
1202 	case ATH12K_EHT_RU_996x2:
1203 		return RU_2X996;
1204 	case ATH12K_EHT_RU_996x2_484:
1205 		return RU_2X996_484;
1206 	case ATH12K_EHT_RU_996x3:
1207 		return RU_3X996;
1208 	case ATH12K_EHT_RU_996x3_484:
1209 		return RU_3X996_484;
1210 	case ATH12K_EHT_RU_996x4:
1211 		return RU_4X996;
1212 	default:
1213 		return RU_INVALID;
1214 	}
1215 }
1216 
1217 static void
1218 ath12k_dp_mon_hal_rx_parse_user_info(const struct hal_receive_user_info *rx_usr_info,
1219 				     u16 user_id,
1220 				     struct hal_rx_mon_ppdu_info *ppdu_info)
1221 {
1222 	struct hal_rx_user_status *mon_rx_user_status = NULL;
1223 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1224 	enum ath12k_eht_ru_size rtap_ru_size = ATH12K_EHT_RU_INVALID;
1225 	u32 ru_width, reception_type, ru_index = HAL_EHT_RU_INVALID;
1226 	u32 ru_type_80_0, ru_start_index_80_0;
1227 	u32 ru_type_80_1, ru_start_index_80_1;
1228 	u32 ru_type_80_2, ru_start_index_80_2;
1229 	u32 ru_type_80_3, ru_start_index_80_3;
1230 	u32 ru_size = 0, num_80mhz_with_ru = 0;
1231 	u64 ru_index_320mhz = 0;
1232 	u32 ru_index_per80mhz;
1233 
1234 	reception_type = le32_get_bits(rx_usr_info->info0,
1235 				       HAL_RX_USR_INFO0_RECEPTION_TYPE);
1236 
1237 	switch (reception_type) {
1238 	case HAL_RECEPTION_TYPE_SU:
1239 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_SU;
1240 		break;
1241 	case HAL_RECEPTION_TYPE_DL_MU_MIMO:
1242 	case HAL_RECEPTION_TYPE_UL_MU_MIMO:
1243 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_MIMO;
1244 		break;
1245 	case HAL_RECEPTION_TYPE_DL_MU_OFMA:
1246 	case HAL_RECEPTION_TYPE_UL_MU_OFDMA:
1247 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_OFDMA;
1248 		break;
1249 	case HAL_RECEPTION_TYPE_DL_MU_OFDMA_MIMO:
1250 	case HAL_RECEPTION_TYPE_UL_MU_OFDMA_MIMO:
1251 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO;
1252 	}
1253 
1254 	ppdu_info->is_stbc = le32_get_bits(rx_usr_info->info0, HAL_RX_USR_INFO0_STBC);
1255 	ppdu_info->ldpc = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_LDPC);
1256 	ppdu_info->dcm = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_STA_DCM);
1257 	ppdu_info->bw = le32_get_bits(rx_usr_info->info1, HAL_RX_USR_INFO1_RX_BW);
1258 	ppdu_info->mcs = le32_get_bits(rx_usr_info->info1, HAL_RX_USR_INFO1_MCS);
1259 	ppdu_info->nss = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_NSS) + 1;
1260 
1261 	if (user_id < HAL_MAX_UL_MU_USERS) {
1262 		mon_rx_user_status = &ppdu_info->userstats[user_id];
1263 		mon_rx_user_status->mcs = ppdu_info->mcs;
1264 		mon_rx_user_status->nss = ppdu_info->nss;
1265 	}
1266 
1267 	if (!(ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_MIMO ||
1268 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
1269 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO))
1270 		return;
1271 
1272 	/* RU allocation present only for OFDMA reception */
1273 	ru_type_80_0 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_0);
1274 	ru_start_index_80_0 = le32_get_bits(rx_usr_info->info3,
1275 					    HAL_RX_USR_INFO3_RU_START_IDX_80_0);
1276 	if (ru_type_80_0 != HAL_EHT_RU_NONE) {
1277 		ru_size += ru_type_80_0;
1278 		ru_index_per80mhz = ru_start_index_80_0;
1279 		ru_index = ru_index_per80mhz;
1280 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_0, 0, ru_index_per80mhz);
1281 		num_80mhz_with_ru++;
1282 	}
1283 
1284 	ru_type_80_1 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_1);
1285 	ru_start_index_80_1 = le32_get_bits(rx_usr_info->info3,
1286 					    HAL_RX_USR_INFO3_RU_START_IDX_80_1);
1287 	if (ru_type_80_1 != HAL_EHT_RU_NONE) {
1288 		ru_size += ru_type_80_1;
1289 		ru_index_per80mhz = ru_start_index_80_1;
1290 		ru_index = ru_index_per80mhz;
1291 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_1, 1, ru_index_per80mhz);
1292 		num_80mhz_with_ru++;
1293 	}
1294 
1295 	ru_type_80_2 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_2);
1296 	ru_start_index_80_2 = le32_get_bits(rx_usr_info->info3,
1297 					    HAL_RX_USR_INFO3_RU_START_IDX_80_2);
1298 	if (ru_type_80_2 != HAL_EHT_RU_NONE) {
1299 		ru_size += ru_type_80_2;
1300 		ru_index_per80mhz = ru_start_index_80_2;
1301 		ru_index = ru_index_per80mhz;
1302 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_2, 2, ru_index_per80mhz);
1303 		num_80mhz_with_ru++;
1304 	}
1305 
1306 	ru_type_80_3 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_3);
1307 	ru_start_index_80_3 = le32_get_bits(rx_usr_info->info2,
1308 					    HAL_RX_USR_INFO3_RU_START_IDX_80_3);
1309 	if (ru_type_80_3 != HAL_EHT_RU_NONE) {
1310 		ru_size += ru_type_80_3;
1311 		ru_index_per80mhz = ru_start_index_80_3;
1312 		ru_index = ru_index_per80mhz;
1313 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_3, 3, ru_index_per80mhz);
1314 		num_80mhz_with_ru++;
1315 	}
1316 
1317 	if (num_80mhz_with_ru > 1) {
1318 		/* Calculate the MRU index */
1319 		switch (ru_index_320mhz) {
1320 		case HAL_EHT_RU_996_484_0:
1321 		case HAL_EHT_RU_996x2_484_0:
1322 		case HAL_EHT_RU_996x3_484_0:
1323 			ru_index = 0;
1324 			break;
1325 		case HAL_EHT_RU_996_484_1:
1326 		case HAL_EHT_RU_996x2_484_1:
1327 		case HAL_EHT_RU_996x3_484_1:
1328 			ru_index = 1;
1329 			break;
1330 		case HAL_EHT_RU_996_484_2:
1331 		case HAL_EHT_RU_996x2_484_2:
1332 		case HAL_EHT_RU_996x3_484_2:
1333 			ru_index = 2;
1334 			break;
1335 		case HAL_EHT_RU_996_484_3:
1336 		case HAL_EHT_RU_996x2_484_3:
1337 		case HAL_EHT_RU_996x3_484_3:
1338 			ru_index = 3;
1339 			break;
1340 		case HAL_EHT_RU_996_484_4:
1341 		case HAL_EHT_RU_996x2_484_4:
1342 		case HAL_EHT_RU_996x3_484_4:
1343 			ru_index = 4;
1344 			break;
1345 		case HAL_EHT_RU_996_484_5:
1346 		case HAL_EHT_RU_996x2_484_5:
1347 		case HAL_EHT_RU_996x3_484_5:
1348 			ru_index = 5;
1349 			break;
1350 		case HAL_EHT_RU_996_484_6:
1351 		case HAL_EHT_RU_996x2_484_6:
1352 		case HAL_EHT_RU_996x3_484_6:
1353 			ru_index = 6;
1354 			break;
1355 		case HAL_EHT_RU_996_484_7:
1356 		case HAL_EHT_RU_996x2_484_7:
1357 		case HAL_EHT_RU_996x3_484_7:
1358 			ru_index = 7;
1359 			break;
1360 		case HAL_EHT_RU_996x2_484_8:
1361 			ru_index = 8;
1362 			break;
1363 		case HAL_EHT_RU_996x2_484_9:
1364 			ru_index = 9;
1365 			break;
1366 		case HAL_EHT_RU_996x2_484_10:
1367 			ru_index = 10;
1368 			break;
1369 		case HAL_EHT_RU_996x2_484_11:
1370 			ru_index = 11;
1371 			break;
1372 		default:
1373 			ru_index = HAL_EHT_RU_INVALID;
1374 			break;
1375 		}
1376 
1377 		ru_size += 4;
1378 	}
1379 
1380 	rtap_ru_size = hal_rx_mon_hal_ru_size_to_ath12k_ru_size(ru_size);
1381 	if (rtap_ru_size != ATH12K_EHT_RU_INVALID) {
1382 		u32 known, data;
1383 
1384 		known = __le32_to_cpu(eht->known);
1385 		known |= IEEE80211_RADIOTAP_EHT_KNOWN_RU_MRU_SIZE_OM;
1386 		eht->known = cpu_to_le32(known);
1387 
1388 		data = __le32_to_cpu(eht->data[1]);
1389 		data |=	u32_encode_bits(rtap_ru_size,
1390 					IEEE80211_RADIOTAP_EHT_DATA1_RU_SIZE);
1391 		eht->data[1] = cpu_to_le32(data);
1392 	}
1393 
1394 	if (ru_index != HAL_EHT_RU_INVALID) {
1395 		u32 known, data;
1396 
1397 		known = __le32_to_cpu(eht->known);
1398 		known |= IEEE80211_RADIOTAP_EHT_KNOWN_RU_MRU_INDEX_OM;
1399 		eht->known = cpu_to_le32(known);
1400 
1401 		data = __le32_to_cpu(eht->data[1]);
1402 		data |=	u32_encode_bits(rtap_ru_size,
1403 					IEEE80211_RADIOTAP_EHT_DATA1_RU_INDEX);
1404 		eht->data[1] = cpu_to_le32(data);
1405 	}
1406 
1407 	if (mon_rx_user_status && ru_index != HAL_EHT_RU_INVALID &&
1408 	    rtap_ru_size != ATH12K_EHT_RU_INVALID) {
1409 		mon_rx_user_status->ul_ofdma_ru_start_index = ru_index;
1410 		mon_rx_user_status->ul_ofdma_ru_size = rtap_ru_size;
1411 
1412 		ru_width = hal_rx_ul_ofdma_ru_size_to_width(rtap_ru_size);
1413 
1414 		mon_rx_user_status->ul_ofdma_ru_width = ru_width;
1415 		mon_rx_user_status->ofdma_info_valid = 1;
1416 	}
1417 }
1418 
1419 static void ath12k_dp_mon_parse_rx_msdu_end_err(u32 info, u32 *errmap)
1420 {
1421 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
1422 		*errmap |= HAL_RX_MPDU_ERR_FCS;
1423 
1424 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1425 		*errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1426 
1427 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1428 		*errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1429 
1430 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1431 		*errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1432 
1433 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1434 		*errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1435 
1436 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1437 		*errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1438 
1439 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1440 		*errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1441 }
1442 
1443 static void
1444 ath12k_dp_mon_parse_status_msdu_end(struct ath12k_mon_data *pmon,
1445 				    const struct hal_rx_msdu_end *msdu_end)
1446 {
1447 	ath12k_dp_mon_parse_rx_msdu_end_err(__le32_to_cpu(msdu_end->info2),
1448 					    &pmon->err_bitmap);
1449 	pmon->decap_format = le32_get_bits(msdu_end->info1,
1450 					   RX_MSDU_END_INFO11_DECAP_FORMAT);
1451 }
1452 
1453 static enum hal_rx_mon_status
1454 ath12k_dp_mon_rx_parse_status_tlv(struct ath12k *ar,
1455 				  struct ath12k_mon_data *pmon,
1456 				  const struct hal_tlv_64_hdr *tlv)
1457 {
1458 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
1459 	const void *tlv_data = tlv->value;
1460 	u32 info[7], userid;
1461 	u16 tlv_tag, tlv_len;
1462 
1463 	tlv_tag = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_TAG);
1464 	tlv_len = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_LEN);
1465 	userid = le64_get_bits(tlv->tl, HAL_TLV_64_USR_ID);
1466 
1467 	if (ppdu_info->tlv_aggr.in_progress && ppdu_info->tlv_aggr.tlv_tag != tlv_tag) {
1468 		ath12k_dp_mon_parse_eht_sig_hdr(ppdu_info, ppdu_info->tlv_aggr.buf);
1469 
1470 		ppdu_info->tlv_aggr.in_progress = false;
1471 		ppdu_info->tlv_aggr.cur_len = 0;
1472 	}
1473 
1474 	switch (tlv_tag) {
1475 	case HAL_RX_PPDU_START: {
1476 		const struct hal_rx_ppdu_start *ppdu_start = tlv_data;
1477 
1478 		u64 ppdu_ts = ath12k_le32hilo_to_u64(ppdu_start->ppdu_start_ts_63_32,
1479 						     ppdu_start->ppdu_start_ts_31_0);
1480 
1481 		info[0] = __le32_to_cpu(ppdu_start->info0);
1482 
1483 		ppdu_info->ppdu_id = u32_get_bits(info[0],
1484 						  HAL_RX_PPDU_START_INFO0_PPDU_ID);
1485 
1486 		info[1] = __le32_to_cpu(ppdu_start->info1);
1487 		ppdu_info->chan_num = u32_get_bits(info[1],
1488 						   HAL_RX_PPDU_START_INFO1_CHAN_NUM);
1489 		ppdu_info->freq = u32_get_bits(info[1],
1490 					       HAL_RX_PPDU_START_INFO1_CHAN_FREQ);
1491 		ppdu_info->ppdu_ts = ppdu_ts;
1492 
1493 		if (ppdu_info->ppdu_id != ppdu_info->last_ppdu_id) {
1494 			ppdu_info->last_ppdu_id = ppdu_info->ppdu_id;
1495 			ppdu_info->num_users = 0;
1496 			memset(&ppdu_info->mpdu_fcs_ok_bitmap, 0,
1497 			       HAL_RX_NUM_WORDS_PER_PPDU_BITMAP *
1498 			       sizeof(ppdu_info->mpdu_fcs_ok_bitmap[0]));
1499 		}
1500 		break;
1501 	}
1502 	case HAL_RX_PPDU_END_USER_STATS: {
1503 		const struct hal_rx_ppdu_end_user_stats *eu_stats = tlv_data;
1504 		u32 tid_bitmap;
1505 
1506 		info[0] = __le32_to_cpu(eu_stats->info0);
1507 		info[1] = __le32_to_cpu(eu_stats->info1);
1508 		info[2] = __le32_to_cpu(eu_stats->info2);
1509 		info[4] = __le32_to_cpu(eu_stats->info4);
1510 		info[5] = __le32_to_cpu(eu_stats->info5);
1511 		info[6] = __le32_to_cpu(eu_stats->info6);
1512 
1513 		ppdu_info->ast_index =
1514 			u32_get_bits(info[2], HAL_RX_PPDU_END_USER_STATS_INFO2_AST_INDEX);
1515 		ppdu_info->fc_valid =
1516 			u32_get_bits(info[1], HAL_RX_PPDU_END_USER_STATS_INFO1_FC_VALID);
1517 		tid_bitmap = u32_get_bits(info[6],
1518 					  HAL_RX_PPDU_END_USER_STATS_INFO6_TID_BITMAP);
1519 		ppdu_info->tid = ffs(tid_bitmap) - 1;
1520 		ppdu_info->tcp_msdu_count =
1521 			u32_get_bits(info[4],
1522 				     HAL_RX_PPDU_END_USER_STATS_INFO4_TCP_MSDU_CNT);
1523 		ppdu_info->udp_msdu_count =
1524 			u32_get_bits(info[4],
1525 				     HAL_RX_PPDU_END_USER_STATS_INFO4_UDP_MSDU_CNT);
1526 		ppdu_info->other_msdu_count =
1527 			u32_get_bits(info[5],
1528 				     HAL_RX_PPDU_END_USER_STATS_INFO5_OTHER_MSDU_CNT);
1529 		ppdu_info->tcp_ack_msdu_count =
1530 			u32_get_bits(info[5],
1531 				     HAL_RX_PPDU_END_USER_STATS_INFO5_TCP_ACK_MSDU_CNT);
1532 		ppdu_info->preamble_type =
1533 			u32_get_bits(info[1],
1534 				     HAL_RX_PPDU_END_USER_STATS_INFO1_PKT_TYPE);
1535 		ppdu_info->num_mpdu_fcs_ok =
1536 			u32_get_bits(info[1],
1537 				     HAL_RX_PPDU_END_USER_STATS_INFO1_MPDU_CNT_FCS_OK);
1538 		ppdu_info->num_mpdu_fcs_err =
1539 			u32_get_bits(info[0],
1540 				     HAL_RX_PPDU_END_USER_STATS_INFO0_MPDU_CNT_FCS_ERR);
1541 		ppdu_info->peer_id =
1542 			u32_get_bits(info[0], HAL_RX_PPDU_END_USER_STATS_INFO0_PEER_ID);
1543 
1544 		switch (ppdu_info->preamble_type) {
1545 		case HAL_RX_PREAMBLE_11N:
1546 			ppdu_info->ht_flags = 1;
1547 			break;
1548 		case HAL_RX_PREAMBLE_11AC:
1549 			ppdu_info->vht_flags = 1;
1550 			break;
1551 		case HAL_RX_PREAMBLE_11AX:
1552 			ppdu_info->he_flags = 1;
1553 			break;
1554 		case HAL_RX_PREAMBLE_11BE:
1555 			ppdu_info->is_eht = true;
1556 			break;
1557 		default:
1558 			break;
1559 		}
1560 
1561 		if (userid < HAL_MAX_UL_MU_USERS) {
1562 			struct hal_rx_user_status *rxuser_stats =
1563 				&ppdu_info->userstats[userid];
1564 
1565 			if (ppdu_info->num_mpdu_fcs_ok > 1 ||
1566 			    ppdu_info->num_mpdu_fcs_err > 1)
1567 				ppdu_info->userstats[userid].ampdu_present = true;
1568 
1569 			ppdu_info->num_users += 1;
1570 
1571 			ath12k_dp_mon_rx_handle_ofdma_info(eu_stats, rxuser_stats);
1572 			ath12k_dp_mon_rx_populate_mu_user_info(eu_stats, ppdu_info,
1573 							       rxuser_stats);
1574 		}
1575 		ppdu_info->mpdu_fcs_ok_bitmap[0] = __le32_to_cpu(eu_stats->rsvd1[0]);
1576 		ppdu_info->mpdu_fcs_ok_bitmap[1] = __le32_to_cpu(eu_stats->rsvd1[1]);
1577 		break;
1578 	}
1579 	case HAL_RX_PPDU_END_USER_STATS_EXT: {
1580 		const struct hal_rx_ppdu_end_user_stats_ext *eu_stats = tlv_data;
1581 
1582 		ppdu_info->mpdu_fcs_ok_bitmap[2] = __le32_to_cpu(eu_stats->info1);
1583 		ppdu_info->mpdu_fcs_ok_bitmap[3] = __le32_to_cpu(eu_stats->info2);
1584 		ppdu_info->mpdu_fcs_ok_bitmap[4] = __le32_to_cpu(eu_stats->info3);
1585 		ppdu_info->mpdu_fcs_ok_bitmap[5] = __le32_to_cpu(eu_stats->info4);
1586 		ppdu_info->mpdu_fcs_ok_bitmap[6] = __le32_to_cpu(eu_stats->info5);
1587 		ppdu_info->mpdu_fcs_ok_bitmap[7] = __le32_to_cpu(eu_stats->info6);
1588 		break;
1589 	}
1590 	case HAL_PHYRX_HT_SIG:
1591 		ath12k_dp_mon_parse_ht_sig(tlv_data, ppdu_info);
1592 		break;
1593 
1594 	case HAL_PHYRX_L_SIG_B:
1595 		ath12k_dp_mon_parse_l_sig_b(tlv_data, ppdu_info);
1596 		break;
1597 
1598 	case HAL_PHYRX_L_SIG_A:
1599 		ath12k_dp_mon_parse_l_sig_a(tlv_data, ppdu_info);
1600 		break;
1601 
1602 	case HAL_PHYRX_VHT_SIG_A:
1603 		ath12k_dp_mon_parse_vht_sig_a(tlv_data, ppdu_info);
1604 		break;
1605 
1606 	case HAL_PHYRX_HE_SIG_A_SU:
1607 		ath12k_dp_mon_parse_he_sig_su(tlv_data, ppdu_info);
1608 		break;
1609 
1610 	case HAL_PHYRX_HE_SIG_A_MU_DL:
1611 		ath12k_dp_mon_parse_he_sig_mu(tlv_data, ppdu_info);
1612 		break;
1613 
1614 	case HAL_PHYRX_HE_SIG_B1_MU:
1615 		ath12k_dp_mon_parse_he_sig_b1_mu(tlv_data, ppdu_info);
1616 		break;
1617 
1618 	case HAL_PHYRX_HE_SIG_B2_MU:
1619 		ath12k_dp_mon_parse_he_sig_b2_mu(tlv_data, ppdu_info);
1620 		break;
1621 
1622 	case HAL_PHYRX_HE_SIG_B2_OFDMA:
1623 		ath12k_dp_mon_parse_he_sig_b2_ofdma(tlv_data, ppdu_info);
1624 		break;
1625 
1626 	case HAL_PHYRX_RSSI_LEGACY: {
1627 		const struct hal_rx_phyrx_rssi_legacy_info *rssi = tlv_data;
1628 
1629 		info[0] = __le32_to_cpu(rssi->info0);
1630 		info[1] = __le32_to_cpu(rssi->info1);
1631 
1632 		/* TODO: Please note that the combined rssi will not be accurate
1633 		 * in MU case. Rssi in MU needs to be retrieved from
1634 		 * PHYRX_OTHER_RECEIVE_INFO TLV.
1635 		 */
1636 		ppdu_info->rssi_comb =
1637 			u32_get_bits(info[1],
1638 				     HAL_RX_PHYRX_RSSI_LEGACY_INFO_INFO1_RSSI_COMB);
1639 
1640 		ppdu_info->bw = u32_get_bits(info[0],
1641 					     HAL_RX_PHYRX_RSSI_LEGACY_INFO_INFO0_RX_BW);
1642 		break;
1643 	}
1644 	case HAL_PHYRX_OTHER_RECEIVE_INFO: {
1645 		const struct hal_phyrx_common_user_info *cmn_usr_info = tlv_data;
1646 
1647 		ppdu_info->gi = le32_get_bits(cmn_usr_info->info0,
1648 					      HAL_RX_PHY_CMN_USER_INFO0_GI);
1649 		break;
1650 	}
1651 	case HAL_RX_PPDU_START_USER_INFO:
1652 		ath12k_dp_mon_hal_rx_parse_user_info(tlv_data, userid, ppdu_info);
1653 		break;
1654 
1655 	case HAL_RXPCU_PPDU_END_INFO: {
1656 		const struct hal_rx_ppdu_end_duration *ppdu_rx_duration = tlv_data;
1657 
1658 		info[0] = __le32_to_cpu(ppdu_rx_duration->info0);
1659 		ppdu_info->rx_duration =
1660 			u32_get_bits(info[0], HAL_RX_PPDU_END_DURATION);
1661 		ppdu_info->tsft = __le32_to_cpu(ppdu_rx_duration->rsvd0[1]);
1662 		ppdu_info->tsft = (ppdu_info->tsft << 32) |
1663 				   __le32_to_cpu(ppdu_rx_duration->rsvd0[0]);
1664 		break;
1665 	}
1666 	case HAL_RX_MPDU_START: {
1667 		const struct hal_rx_mpdu_start *mpdu_start = tlv_data;
1668 		u16 peer_id;
1669 
1670 		info[1] = __le32_to_cpu(mpdu_start->info1);
1671 		peer_id = u32_get_bits(info[1], HAL_RX_MPDU_START_INFO1_PEERID);
1672 		if (peer_id)
1673 			ppdu_info->peer_id = peer_id;
1674 
1675 		ppdu_info->mpdu_len += u32_get_bits(info[1],
1676 						    HAL_RX_MPDU_START_INFO2_MPDU_LEN);
1677 		if (userid < HAL_MAX_UL_MU_USERS) {
1678 			info[0] = __le32_to_cpu(mpdu_start->info0);
1679 			ppdu_info->userid = userid;
1680 			ppdu_info->userstats[userid].ampdu_id =
1681 				u32_get_bits(info[0], HAL_RX_MPDU_START_INFO0_PPDU_ID);
1682 		}
1683 
1684 		return HAL_RX_MON_STATUS_MPDU_START;
1685 	}
1686 	case HAL_RX_MSDU_START:
1687 		/* TODO: add msdu start parsing logic */
1688 		break;
1689 	case HAL_MON_BUF_ADDR:
1690 		return HAL_RX_MON_STATUS_BUF_ADDR;
1691 	case HAL_RX_MSDU_END:
1692 		ath12k_dp_mon_parse_status_msdu_end(pmon, tlv_data);
1693 		return HAL_RX_MON_STATUS_MSDU_END;
1694 	case HAL_RX_MPDU_END:
1695 		return HAL_RX_MON_STATUS_MPDU_END;
1696 	case HAL_PHYRX_GENERIC_U_SIG:
1697 		ath12k_dp_mon_hal_rx_parse_u_sig_hdr(tlv_data, ppdu_info);
1698 		break;
1699 	case HAL_PHYRX_GENERIC_EHT_SIG:
1700 		/* Handle the case where aggregation is in progress
1701 		 * or the current TLV is one of the TLVs which should be
1702 		 * aggregated
1703 		 */
1704 		if (!ppdu_info->tlv_aggr.in_progress) {
1705 			ppdu_info->tlv_aggr.in_progress = true;
1706 			ppdu_info->tlv_aggr.tlv_tag = tlv_tag;
1707 			ppdu_info->tlv_aggr.cur_len = 0;
1708 		}
1709 
1710 		ppdu_info->is_eht = true;
1711 
1712 		ath12k_dp_mon_hal_aggr_tlv(ppdu_info, tlv_len, tlv_data);
1713 		break;
1714 	case HAL_DUMMY:
1715 		return HAL_RX_MON_STATUS_BUF_DONE;
1716 	case HAL_RX_PPDU_END_STATUS_DONE:
1717 	case 0:
1718 		return HAL_RX_MON_STATUS_PPDU_DONE;
1719 	default:
1720 		break;
1721 	}
1722 
1723 	return HAL_RX_MON_STATUS_PPDU_NOT_DONE;
1724 }
1725 
1726 static void
1727 ath12k_dp_mon_fill_rx_stats_info(struct ath12k *ar,
1728 				 struct hal_rx_mon_ppdu_info *ppdu_info,
1729 				 struct ieee80211_rx_status *rx_status)
1730 {
1731 	u32 center_freq = ppdu_info->freq;
1732 
1733 	rx_status->freq = center_freq;
1734 	rx_status->bw = ath12k_mac_bw_to_mac80211_bw(ppdu_info->bw);
1735 	rx_status->nss = ppdu_info->nss;
1736 	rx_status->rate_idx = 0;
1737 	rx_status->encoding = RX_ENC_LEGACY;
1738 	rx_status->flag |= RX_FLAG_NO_SIGNAL_VAL;
1739 
1740 	if (center_freq >= ATH12K_MIN_6GHZ_FREQ &&
1741 	    center_freq <= ATH12K_MAX_6GHZ_FREQ) {
1742 		rx_status->band = NL80211_BAND_6GHZ;
1743 	} else if (center_freq >= ATH12K_MIN_2GHZ_FREQ &&
1744 		   center_freq <= ATH12K_MAX_2GHZ_FREQ) {
1745 		rx_status->band = NL80211_BAND_2GHZ;
1746 	} else if (center_freq >= ATH12K_MIN_5GHZ_FREQ &&
1747 		   center_freq <= ATH12K_MAX_5GHZ_FREQ) {
1748 		rx_status->band = NL80211_BAND_5GHZ;
1749 	} else {
1750 		rx_status->band = NUM_NL80211_BANDS;
1751 	}
1752 }
1753 
1754 static struct sk_buff
1755 *ath12k_dp_rx_alloc_mon_status_buf(struct ath12k_base *ab,
1756 				   struct dp_rxdma_mon_ring *rx_ring,
1757 				   int *buf_id)
1758 {
1759 	struct sk_buff *skb;
1760 	dma_addr_t paddr;
1761 
1762 	skb = dev_alloc_skb(RX_MON_STATUS_BUF_SIZE);
1763 
1764 	if (!skb)
1765 		goto fail_alloc_skb;
1766 
1767 	if (!IS_ALIGNED((unsigned long)skb->data,
1768 			RX_MON_STATUS_BUF_ALIGN)) {
1769 		skb_pull(skb, PTR_ALIGN(skb->data, RX_MON_STATUS_BUF_ALIGN) -
1770 			 skb->data);
1771 	}
1772 
1773 	paddr = dma_map_single(ab->dev, skb->data,
1774 			       skb->len + skb_tailroom(skb),
1775 			       DMA_FROM_DEVICE);
1776 	if (unlikely(dma_mapping_error(ab->dev, paddr)))
1777 		goto fail_free_skb;
1778 
1779 	spin_lock_bh(&rx_ring->idr_lock);
1780 	*buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0,
1781 			    rx_ring->bufs_max, GFP_ATOMIC);
1782 	spin_unlock_bh(&rx_ring->idr_lock);
1783 	if (*buf_id < 0)
1784 		goto fail_dma_unmap;
1785 
1786 	ATH12K_SKB_RXCB(skb)->paddr = paddr;
1787 	return skb;
1788 
1789 fail_dma_unmap:
1790 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
1791 			 DMA_FROM_DEVICE);
1792 fail_free_skb:
1793 	dev_kfree_skb_any(skb);
1794 fail_alloc_skb:
1795 	return NULL;
1796 }
1797 
1798 static enum dp_mon_status_buf_state
1799 ath12k_dp_rx_mon_buf_done(struct ath12k_base *ab, struct hal_srng *srng,
1800 			  struct dp_rxdma_mon_ring *rx_ring)
1801 {
1802 	struct ath12k_skb_rxcb *rxcb;
1803 	struct hal_tlv_64_hdr *tlv;
1804 	struct sk_buff *skb;
1805 	void *status_desc;
1806 	dma_addr_t paddr;
1807 	u32 cookie;
1808 	int buf_id;
1809 	u8 rbm;
1810 
1811 	status_desc = ath12k_hal_srng_src_next_peek(ab, srng);
1812 	if (!status_desc)
1813 		return DP_MON_STATUS_NO_DMA;
1814 
1815 	ath12k_hal_rx_buf_addr_info_get(status_desc, &paddr, &cookie, &rbm);
1816 
1817 	buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
1818 
1819 	spin_lock_bh(&rx_ring->idr_lock);
1820 	skb = idr_find(&rx_ring->bufs_idr, buf_id);
1821 	spin_unlock_bh(&rx_ring->idr_lock);
1822 
1823 	if (!skb)
1824 		return DP_MON_STATUS_NO_DMA;
1825 
1826 	rxcb = ATH12K_SKB_RXCB(skb);
1827 	dma_sync_single_for_cpu(ab->dev, rxcb->paddr,
1828 				skb->len + skb_tailroom(skb),
1829 				DMA_FROM_DEVICE);
1830 
1831 	tlv = (struct hal_tlv_64_hdr *)skb->data;
1832 	if (le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG) != HAL_RX_STATUS_BUFFER_DONE)
1833 		return DP_MON_STATUS_NO_DMA;
1834 
1835 	return DP_MON_STATUS_REPLINISH;
1836 }
1837 
1838 static u32 ath12k_dp_mon_comp_ppduid(u32 msdu_ppdu_id, u32 *ppdu_id)
1839 {
1840 	u32 ret = 0;
1841 
1842 	if ((*ppdu_id < msdu_ppdu_id) &&
1843 	    ((msdu_ppdu_id - *ppdu_id) < DP_NOT_PPDU_ID_WRAP_AROUND)) {
1844 		/* Hold on mon dest ring, and reap mon status ring. */
1845 		*ppdu_id = msdu_ppdu_id;
1846 		ret = msdu_ppdu_id;
1847 	} else if ((*ppdu_id > msdu_ppdu_id) &&
1848 		((*ppdu_id - msdu_ppdu_id) > DP_NOT_PPDU_ID_WRAP_AROUND)) {
1849 		/* PPDU ID has exceeded the maximum value and will
1850 		 * restart from 0.
1851 		 */
1852 		*ppdu_id = msdu_ppdu_id;
1853 		ret = msdu_ppdu_id;
1854 	}
1855 	return ret;
1856 }
1857 
1858 static
1859 void ath12k_dp_mon_next_link_desc_get(struct hal_rx_msdu_link *msdu_link,
1860 				      dma_addr_t *paddr, u32 *sw_cookie, u8 *rbm,
1861 				      struct ath12k_buffer_addr **pp_buf_addr_info)
1862 {
1863 	struct ath12k_buffer_addr *buf_addr_info;
1864 
1865 	buf_addr_info = &msdu_link->buf_addr_info;
1866 
1867 	ath12k_hal_rx_buf_addr_info_get(buf_addr_info, paddr, sw_cookie, rbm);
1868 
1869 	*pp_buf_addr_info = buf_addr_info;
1870 }
1871 
1872 static void
1873 ath12k_dp_mon_fill_rx_rate(struct ath12k *ar,
1874 			   struct hal_rx_mon_ppdu_info *ppdu_info,
1875 			   struct ieee80211_rx_status *rx_status)
1876 {
1877 	struct ieee80211_supported_band *sband;
1878 	enum rx_msdu_start_pkt_type pkt_type;
1879 	u8 rate_mcs, nss, sgi;
1880 	bool is_cck;
1881 
1882 	pkt_type = ppdu_info->preamble_type;
1883 	rate_mcs = ppdu_info->rate;
1884 	nss = ppdu_info->nss;
1885 	sgi = ppdu_info->gi;
1886 
1887 	switch (pkt_type) {
1888 	case RX_MSDU_START_PKT_TYPE_11A:
1889 	case RX_MSDU_START_PKT_TYPE_11B:
1890 		is_cck = (pkt_type == RX_MSDU_START_PKT_TYPE_11B);
1891 		if (rx_status->band < NUM_NL80211_BANDS) {
1892 			sband = &ar->mac.sbands[rx_status->band];
1893 			rx_status->rate_idx = ath12k_mac_hw_rate_to_idx(sband, rate_mcs,
1894 									is_cck);
1895 		}
1896 		break;
1897 	case RX_MSDU_START_PKT_TYPE_11N:
1898 		rx_status->encoding = RX_ENC_HT;
1899 		if (rate_mcs > ATH12K_HT_MCS_MAX) {
1900 			ath12k_warn(ar->ab,
1901 				    "Received with invalid mcs in HT mode %d\n",
1902 				     rate_mcs);
1903 			break;
1904 		}
1905 		rx_status->rate_idx = rate_mcs + (8 * (nss - 1));
1906 		if (sgi)
1907 			rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1908 		break;
1909 	case RX_MSDU_START_PKT_TYPE_11AC:
1910 		rx_status->encoding = RX_ENC_VHT;
1911 		rx_status->rate_idx = rate_mcs;
1912 		if (rate_mcs > ATH12K_VHT_MCS_MAX) {
1913 			ath12k_warn(ar->ab,
1914 				    "Received with invalid mcs in VHT mode %d\n",
1915 				     rate_mcs);
1916 			break;
1917 		}
1918 		if (sgi)
1919 			rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1920 		break;
1921 	case RX_MSDU_START_PKT_TYPE_11AX:
1922 		rx_status->rate_idx = rate_mcs;
1923 		if (rate_mcs > ATH12K_HE_MCS_MAX) {
1924 			ath12k_warn(ar->ab,
1925 				    "Received with invalid mcs in HE mode %d\n",
1926 				    rate_mcs);
1927 			break;
1928 		}
1929 		rx_status->encoding = RX_ENC_HE;
1930 		rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi);
1931 		break;
1932 	case RX_MSDU_START_PKT_TYPE_11BE:
1933 		rx_status->rate_idx = rate_mcs;
1934 		if (rate_mcs > ATH12K_EHT_MCS_MAX) {
1935 			ath12k_warn(ar->ab,
1936 				    "Received with invalid mcs in EHT mode %d\n",
1937 				    rate_mcs);
1938 			break;
1939 		}
1940 		rx_status->encoding = RX_ENC_EHT;
1941 		rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi);
1942 		break;
1943 	default:
1944 		ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
1945 			   "monitor receives invalid preamble type %d",
1946 			    pkt_type);
1947 		break;
1948 	}
1949 }
1950 
1951 static void ath12k_dp_mon_rx_msdus_set_payload(struct ath12k *ar,
1952 					       struct sk_buff *head_msdu,
1953 					       struct sk_buff *tail_msdu)
1954 {
1955 	u32 rx_pkt_offset, l2_hdr_offset, total_offset;
1956 
1957 	rx_pkt_offset = ar->ab->hal.hal_desc_sz;
1958 	l2_hdr_offset =
1959 		ath12k_dp_rx_h_l3pad(ar->ab, (struct hal_rx_desc *)tail_msdu->data);
1960 
1961 	if (ar->ab->hw_params->rxdma1_enable)
1962 		total_offset = ATH12K_MON_RX_PKT_OFFSET;
1963 	else
1964 		total_offset = rx_pkt_offset + l2_hdr_offset;
1965 
1966 	skb_pull(head_msdu, total_offset);
1967 }
1968 
1969 static struct sk_buff *
1970 ath12k_dp_mon_rx_merg_msdus(struct ath12k *ar,
1971 			    struct dp_mon_mpdu *mon_mpdu,
1972 			    struct hal_rx_mon_ppdu_info *ppdu_info,
1973 			    struct ieee80211_rx_status *rxs)
1974 {
1975 	struct ath12k_base *ab = ar->ab;
1976 	struct sk_buff *msdu, *mpdu_buf, *prev_buf, *head_frag_list;
1977 	struct sk_buff *head_msdu, *tail_msdu;
1978 	struct hal_rx_desc *rx_desc;
1979 	u8 *hdr_desc, *dest, decap_format = mon_mpdu->decap_format;
1980 	struct ieee80211_hdr_3addr *wh;
1981 	struct ieee80211_channel *channel;
1982 	u32 frag_list_sum_len = 0;
1983 	u8 channel_num = ppdu_info->chan_num;
1984 
1985 	mpdu_buf = NULL;
1986 	head_msdu = mon_mpdu->head;
1987 	tail_msdu = mon_mpdu->tail;
1988 
1989 	if (!head_msdu || !tail_msdu)
1990 		goto err_merge_fail;
1991 
1992 	ath12k_dp_mon_fill_rx_stats_info(ar, ppdu_info, rxs);
1993 
1994 	if (unlikely(rxs->band == NUM_NL80211_BANDS ||
1995 		     !ath12k_ar_to_hw(ar)->wiphy->bands[rxs->band])) {
1996 		ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
1997 			   "sband is NULL for status band %d channel_num %d center_freq %d pdev_id %d\n",
1998 			   rxs->band, channel_num, ppdu_info->freq, ar->pdev_idx);
1999 
2000 		spin_lock_bh(&ar->data_lock);
2001 		channel = ar->rx_channel;
2002 		if (channel) {
2003 			rxs->band = channel->band;
2004 			channel_num =
2005 				ieee80211_frequency_to_channel(channel->center_freq);
2006 		}
2007 		spin_unlock_bh(&ar->data_lock);
2008 	}
2009 
2010 	if (rxs->band < NUM_NL80211_BANDS)
2011 		rxs->freq = ieee80211_channel_to_frequency(channel_num,
2012 							   rxs->band);
2013 
2014 	ath12k_dp_mon_fill_rx_rate(ar, ppdu_info, rxs);
2015 
2016 	if (decap_format == DP_RX_DECAP_TYPE_RAW) {
2017 		ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2018 
2019 		prev_buf = head_msdu;
2020 		msdu = head_msdu->next;
2021 		head_frag_list = NULL;
2022 
2023 		while (msdu) {
2024 			ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2025 
2026 			if (!head_frag_list)
2027 				head_frag_list = msdu;
2028 
2029 			frag_list_sum_len += msdu->len;
2030 			prev_buf = msdu;
2031 			msdu = msdu->next;
2032 		}
2033 
2034 		prev_buf->next = NULL;
2035 
2036 		skb_trim(prev_buf, prev_buf->len);
2037 		if (head_frag_list) {
2038 			skb_shinfo(head_msdu)->frag_list = head_frag_list;
2039 			head_msdu->data_len = frag_list_sum_len;
2040 			head_msdu->len += head_msdu->data_len;
2041 			head_msdu->next = NULL;
2042 		}
2043 	} else if (decap_format == DP_RX_DECAP_TYPE_NATIVE_WIFI) {
2044 		u8 qos_pkt = 0;
2045 
2046 		rx_desc = (struct hal_rx_desc *)head_msdu->data;
2047 		hdr_desc =
2048 			ab->hal_rx_ops->rx_desc_get_msdu_payload(rx_desc);
2049 
2050 		/* Base size */
2051 		wh = (struct ieee80211_hdr_3addr *)hdr_desc;
2052 
2053 		if (ieee80211_is_data_qos(wh->frame_control))
2054 			qos_pkt = 1;
2055 
2056 		msdu = head_msdu;
2057 
2058 		while (msdu) {
2059 			ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2060 			if (qos_pkt) {
2061 				dest = skb_push(msdu, sizeof(__le16));
2062 				if (!dest)
2063 					goto err_merge_fail;
2064 				memcpy(dest, hdr_desc, sizeof(struct ieee80211_qos_hdr));
2065 			}
2066 			prev_buf = msdu;
2067 			msdu = msdu->next;
2068 		}
2069 		dest = skb_put(prev_buf, HAL_RX_FCS_LEN);
2070 		if (!dest)
2071 			goto err_merge_fail;
2072 
2073 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2074 			   "mpdu_buf %p mpdu_buf->len %u",
2075 			   prev_buf, prev_buf->len);
2076 	} else {
2077 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2078 			   "decap format %d is not supported!\n",
2079 			   decap_format);
2080 		goto err_merge_fail;
2081 	}
2082 
2083 	return head_msdu;
2084 
2085 err_merge_fail:
2086 	if (mpdu_buf && decap_format != DP_RX_DECAP_TYPE_RAW) {
2087 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2088 			   "err_merge_fail mpdu_buf %p", mpdu_buf);
2089 		/* Free the head buffer */
2090 		dev_kfree_skb_any(mpdu_buf);
2091 	}
2092 	return NULL;
2093 }
2094 
2095 static void
2096 ath12k_dp_mon_rx_update_radiotap_he(struct hal_rx_mon_ppdu_info *rx_status,
2097 				    u8 *rtap_buf)
2098 {
2099 	u32 rtap_len = 0;
2100 
2101 	put_unaligned_le16(rx_status->he_data1, &rtap_buf[rtap_len]);
2102 	rtap_len += 2;
2103 
2104 	put_unaligned_le16(rx_status->he_data2, &rtap_buf[rtap_len]);
2105 	rtap_len += 2;
2106 
2107 	put_unaligned_le16(rx_status->he_data3, &rtap_buf[rtap_len]);
2108 	rtap_len += 2;
2109 
2110 	put_unaligned_le16(rx_status->he_data4, &rtap_buf[rtap_len]);
2111 	rtap_len += 2;
2112 
2113 	put_unaligned_le16(rx_status->he_data5, &rtap_buf[rtap_len]);
2114 	rtap_len += 2;
2115 
2116 	put_unaligned_le16(rx_status->he_data6, &rtap_buf[rtap_len]);
2117 }
2118 
2119 static void
2120 ath12k_dp_mon_rx_update_radiotap_he_mu(struct hal_rx_mon_ppdu_info *rx_status,
2121 				       u8 *rtap_buf)
2122 {
2123 	u32 rtap_len = 0;
2124 
2125 	put_unaligned_le16(rx_status->he_flags1, &rtap_buf[rtap_len]);
2126 	rtap_len += 2;
2127 
2128 	put_unaligned_le16(rx_status->he_flags2, &rtap_buf[rtap_len]);
2129 	rtap_len += 2;
2130 
2131 	rtap_buf[rtap_len] = rx_status->he_RU[0];
2132 	rtap_len += 1;
2133 
2134 	rtap_buf[rtap_len] = rx_status->he_RU[1];
2135 	rtap_len += 1;
2136 
2137 	rtap_buf[rtap_len] = rx_status->he_RU[2];
2138 	rtap_len += 1;
2139 
2140 	rtap_buf[rtap_len] = rx_status->he_RU[3];
2141 }
2142 
2143 static void ath12k_dp_mon_update_radiotap(struct ath12k *ar,
2144 					  struct hal_rx_mon_ppdu_info *ppduinfo,
2145 					  struct sk_buff *mon_skb,
2146 					  struct ieee80211_rx_status *rxs)
2147 {
2148 	struct ieee80211_supported_band *sband;
2149 	u8 *ptr = NULL;
2150 
2151 	rxs->flag |= RX_FLAG_MACTIME_START;
2152 	rxs->signal = ppduinfo->rssi_comb + ATH12K_DEFAULT_NOISE_FLOOR;
2153 	rxs->nss = ppduinfo->nss + 1;
2154 
2155 	if (ppduinfo->userstats[ppduinfo->userid].ampdu_present) {
2156 		rxs->flag |= RX_FLAG_AMPDU_DETAILS;
2157 		rxs->ampdu_reference = ppduinfo->userstats[ppduinfo->userid].ampdu_id;
2158 	}
2159 
2160 	if (ppduinfo->is_eht || ppduinfo->eht_usig) {
2161 		struct ieee80211_radiotap_tlv *tlv;
2162 		struct ieee80211_radiotap_eht *eht;
2163 		struct ieee80211_radiotap_eht_usig *usig;
2164 		u16 len = 0, i, eht_len, usig_len;
2165 		u8 user;
2166 
2167 		if (ppduinfo->is_eht) {
2168 			eht_len = struct_size(eht,
2169 					      user_info,
2170 					      ppduinfo->eht_info.num_user_info);
2171 			len += sizeof(*tlv) + eht_len;
2172 		}
2173 
2174 		if (ppduinfo->eht_usig) {
2175 			usig_len = sizeof(*usig);
2176 			len += sizeof(*tlv) + usig_len;
2177 		}
2178 
2179 		rxs->flag |= RX_FLAG_RADIOTAP_TLV_AT_END;
2180 		rxs->encoding = RX_ENC_EHT;
2181 
2182 		skb_reset_mac_header(mon_skb);
2183 
2184 		tlv = skb_push(mon_skb, len);
2185 
2186 		if (ppduinfo->is_eht) {
2187 			tlv->type = cpu_to_le16(IEEE80211_RADIOTAP_EHT);
2188 			tlv->len = cpu_to_le16(eht_len);
2189 
2190 			eht = (struct ieee80211_radiotap_eht *)tlv->data;
2191 			eht->known = ppduinfo->eht_info.eht.known;
2192 
2193 			for (i = 0;
2194 			     i < ARRAY_SIZE(eht->data) &&
2195 			     i < ARRAY_SIZE(ppduinfo->eht_info.eht.data);
2196 			     i++)
2197 				eht->data[i] = ppduinfo->eht_info.eht.data[i];
2198 
2199 			for (user = 0; user < ppduinfo->eht_info.num_user_info; user++)
2200 				put_unaligned_le32(ppduinfo->eht_info.user_info[user],
2201 						   &eht->user_info[user]);
2202 
2203 			tlv = (struct ieee80211_radiotap_tlv *)&tlv->data[eht_len];
2204 		}
2205 
2206 		if (ppduinfo->eht_usig) {
2207 			tlv->type = cpu_to_le16(IEEE80211_RADIOTAP_EHT_USIG);
2208 			tlv->len = cpu_to_le16(usig_len);
2209 
2210 			usig = (struct ieee80211_radiotap_eht_usig *)tlv->data;
2211 			*usig = ppduinfo->u_sig_info.usig;
2212 		}
2213 	} else if (ppduinfo->he_mu_flags) {
2214 		rxs->flag |= RX_FLAG_RADIOTAP_HE_MU;
2215 		rxs->encoding = RX_ENC_HE;
2216 		ptr = skb_push(mon_skb, sizeof(struct ieee80211_radiotap_he_mu));
2217 		ath12k_dp_mon_rx_update_radiotap_he_mu(ppduinfo, ptr);
2218 	} else if (ppduinfo->he_flags) {
2219 		rxs->flag |= RX_FLAG_RADIOTAP_HE;
2220 		rxs->encoding = RX_ENC_HE;
2221 		ptr = skb_push(mon_skb, sizeof(struct ieee80211_radiotap_he));
2222 		ath12k_dp_mon_rx_update_radiotap_he(ppduinfo, ptr);
2223 		rxs->rate_idx = ppduinfo->rate;
2224 	} else if (ppduinfo->vht_flags) {
2225 		rxs->encoding = RX_ENC_VHT;
2226 		rxs->rate_idx = ppduinfo->rate;
2227 	} else if (ppduinfo->ht_flags) {
2228 		rxs->encoding = RX_ENC_HT;
2229 		rxs->rate_idx = ppduinfo->rate;
2230 	} else {
2231 		rxs->encoding = RX_ENC_LEGACY;
2232 		sband = &ar->mac.sbands[rxs->band];
2233 		rxs->rate_idx = ath12k_mac_hw_rate_to_idx(sband, ppduinfo->rate,
2234 							  ppduinfo->cck_flag);
2235 	}
2236 
2237 	rxs->mactime = ppduinfo->tsft;
2238 }
2239 
2240 static void ath12k_dp_mon_rx_deliver_msdu(struct ath12k *ar, struct napi_struct *napi,
2241 					  struct sk_buff *msdu,
2242 					  struct ieee80211_rx_status *status,
2243 					  u8 decap)
2244 {
2245 	static const struct ieee80211_radiotap_he known = {
2246 		.data1 = cpu_to_le16(IEEE80211_RADIOTAP_HE_DATA1_DATA_MCS_KNOWN |
2247 				     IEEE80211_RADIOTAP_HE_DATA1_BW_RU_ALLOC_KNOWN),
2248 		.data2 = cpu_to_le16(IEEE80211_RADIOTAP_HE_DATA2_GI_KNOWN),
2249 	};
2250 	struct ieee80211_rx_status *rx_status;
2251 	struct ieee80211_radiotap_he *he = NULL;
2252 	struct ieee80211_sta *pubsta = NULL;
2253 	struct ath12k_peer *peer;
2254 	struct ath12k_skb_rxcb *rxcb = ATH12K_SKB_RXCB(msdu);
2255 	struct ath12k_dp_rx_info rx_info;
2256 	bool is_mcbc = rxcb->is_mcbc;
2257 	bool is_eapol_tkip = rxcb->is_eapol;
2258 
2259 	status->link_valid = 0;
2260 
2261 	if ((status->encoding == RX_ENC_HE) && !(status->flag & RX_FLAG_RADIOTAP_HE) &&
2262 	    !(status->flag & RX_FLAG_SKIP_MONITOR)) {
2263 		he = skb_push(msdu, sizeof(known));
2264 		memcpy(he, &known, sizeof(known));
2265 		status->flag |= RX_FLAG_RADIOTAP_HE;
2266 	}
2267 
2268 	spin_lock_bh(&ar->ab->base_lock);
2269 	rx_info.addr2_present = false;
2270 	peer = ath12k_dp_rx_h_find_peer(ar->ab, msdu, &rx_info);
2271 	if (peer && peer->sta) {
2272 		pubsta = peer->sta;
2273 		if (pubsta->valid_links) {
2274 			status->link_valid = 1;
2275 			status->link_id = peer->link_id;
2276 		}
2277 	}
2278 
2279 	spin_unlock_bh(&ar->ab->base_lock);
2280 
2281 	ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
2282 		   "rx skb %p len %u peer %pM %u %s %s%s%s%s%s%s%s%s %srate_idx %u vht_nss %u freq %u band %u flag 0x%x fcs-err %i mic-err %i amsdu-more %i\n",
2283 		   msdu,
2284 		   msdu->len,
2285 		   peer ? peer->addr : NULL,
2286 		   rxcb->tid,
2287 		   (is_mcbc) ? "mcast" : "ucast",
2288 		   (status->encoding == RX_ENC_LEGACY) ? "legacy" : "",
2289 		   (status->encoding == RX_ENC_HT) ? "ht" : "",
2290 		   (status->encoding == RX_ENC_VHT) ? "vht" : "",
2291 		   (status->encoding == RX_ENC_HE) ? "he" : "",
2292 		   (status->bw == RATE_INFO_BW_40) ? "40" : "",
2293 		   (status->bw == RATE_INFO_BW_80) ? "80" : "",
2294 		   (status->bw == RATE_INFO_BW_160) ? "160" : "",
2295 		   (status->bw == RATE_INFO_BW_320) ? "320" : "",
2296 		   status->enc_flags & RX_ENC_FLAG_SHORT_GI ? "sgi " : "",
2297 		   status->rate_idx,
2298 		   status->nss,
2299 		   status->freq,
2300 		   status->band, status->flag,
2301 		   !!(status->flag & RX_FLAG_FAILED_FCS_CRC),
2302 		   !!(status->flag & RX_FLAG_MMIC_ERROR),
2303 		   !!(status->flag & RX_FLAG_AMSDU_MORE));
2304 
2305 	ath12k_dbg_dump(ar->ab, ATH12K_DBG_DP_RX, NULL, "dp rx msdu: ",
2306 			msdu->data, msdu->len);
2307 	rx_status = IEEE80211_SKB_RXCB(msdu);
2308 	*rx_status = *status;
2309 
2310 	/* TODO: trace rx packet */
2311 
2312 	/* PN for multicast packets are not validate in HW,
2313 	 * so skip 802.3 rx path
2314 	 * Also, fast_rx expects the STA to be authorized, hence
2315 	 * eapol packets are sent in slow path.
2316 	 */
2317 	if (decap == DP_RX_DECAP_TYPE_ETHERNET2_DIX && !is_eapol_tkip &&
2318 	    !(is_mcbc && rx_status->flag & RX_FLAG_DECRYPTED))
2319 		rx_status->flag |= RX_FLAG_8023;
2320 
2321 	ieee80211_rx_napi(ath12k_ar_to_hw(ar), pubsta, msdu, napi);
2322 }
2323 
2324 static int ath12k_dp_mon_rx_deliver(struct ath12k *ar,
2325 				    struct dp_mon_mpdu *mon_mpdu,
2326 				    struct hal_rx_mon_ppdu_info *ppduinfo,
2327 				    struct napi_struct *napi)
2328 {
2329 	struct ath12k_pdev_dp *dp = &ar->dp;
2330 	struct sk_buff *mon_skb, *skb_next, *header;
2331 	struct ieee80211_rx_status *rxs = &dp->rx_status;
2332 	u8 decap = DP_RX_DECAP_TYPE_RAW;
2333 
2334 	mon_skb = ath12k_dp_mon_rx_merg_msdus(ar, mon_mpdu, ppduinfo, rxs);
2335 	if (!mon_skb)
2336 		goto mon_deliver_fail;
2337 
2338 	header = mon_skb;
2339 	rxs->flag = 0;
2340 
2341 	if (mon_mpdu->err_bitmap & HAL_RX_MPDU_ERR_FCS)
2342 		rxs->flag = RX_FLAG_FAILED_FCS_CRC;
2343 
2344 	do {
2345 		skb_next = mon_skb->next;
2346 		if (!skb_next)
2347 			rxs->flag &= ~RX_FLAG_AMSDU_MORE;
2348 		else
2349 			rxs->flag |= RX_FLAG_AMSDU_MORE;
2350 
2351 		if (mon_skb == header) {
2352 			header = NULL;
2353 			rxs->flag &= ~RX_FLAG_ALLOW_SAME_PN;
2354 		} else {
2355 			rxs->flag |= RX_FLAG_ALLOW_SAME_PN;
2356 		}
2357 		rxs->flag |= RX_FLAG_ONLY_MONITOR;
2358 
2359 		if (!(rxs->flag & RX_FLAG_ONLY_MONITOR))
2360 			decap = mon_mpdu->decap_format;
2361 
2362 		ath12k_dp_mon_update_radiotap(ar, ppduinfo, mon_skb, rxs);
2363 		ath12k_dp_mon_rx_deliver_msdu(ar, napi, mon_skb, rxs, decap);
2364 		mon_skb = skb_next;
2365 	} while (mon_skb);
2366 	rxs->flag = 0;
2367 
2368 	return 0;
2369 
2370 mon_deliver_fail:
2371 	mon_skb = mon_mpdu->head;
2372 	while (mon_skb) {
2373 		skb_next = mon_skb->next;
2374 		dev_kfree_skb_any(mon_skb);
2375 		mon_skb = skb_next;
2376 	}
2377 	return -EINVAL;
2378 }
2379 
2380 static int ath12k_dp_pkt_set_pktlen(struct sk_buff *skb, u32 len)
2381 {
2382 	if (skb->len > len) {
2383 		skb_trim(skb, len);
2384 	} else {
2385 		if (skb_tailroom(skb) < len - skb->len) {
2386 			if ((pskb_expand_head(skb, 0,
2387 					      len - skb->len - skb_tailroom(skb),
2388 					      GFP_ATOMIC))) {
2389 				return -ENOMEM;
2390 			}
2391 		}
2392 		skb_put(skb, (len - skb->len));
2393 	}
2394 
2395 	return 0;
2396 }
2397 
2398 /* Hardware fill buffer with 128 bytes aligned. So need to reap it
2399  * with 128 bytes aligned.
2400  */
2401 #define RXDMA_DATA_DMA_BLOCK_SIZE 128
2402 
2403 static void
2404 ath12k_dp_mon_get_buf_len(struct hal_rx_msdu_desc_info *info,
2405 			  bool *is_frag, u32 *total_len,
2406 			  u32 *frag_len, u32 *msdu_cnt)
2407 {
2408 	if (info->msdu_flags & RX_MSDU_DESC_INFO0_MSDU_CONTINUATION) {
2409 		*is_frag = true;
2410 		*frag_len = (RX_MON_STATUS_BASE_BUF_SIZE -
2411 			     sizeof(struct hal_rx_desc)) &
2412 			     ~(RXDMA_DATA_DMA_BLOCK_SIZE - 1);
2413 		*total_len += *frag_len;
2414 	} else {
2415 		if (*is_frag)
2416 			*frag_len = info->msdu_len - *total_len;
2417 		else
2418 			*frag_len = info->msdu_len;
2419 
2420 		*msdu_cnt -= 1;
2421 	}
2422 }
2423 
2424 static int
2425 ath12k_dp_mon_parse_status_buf(struct ath12k *ar,
2426 			       struct ath12k_mon_data *pmon,
2427 			       const struct dp_mon_packet_info *packet_info)
2428 {
2429 	struct ath12k_base *ab = ar->ab;
2430 	struct dp_rxdma_mon_ring *buf_ring = &ab->dp.rxdma_mon_buf_ring;
2431 	struct sk_buff *msdu;
2432 	int buf_id;
2433 	u32 offset;
2434 
2435 	buf_id = u32_get_bits(packet_info->cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
2436 
2437 	spin_lock_bh(&buf_ring->idr_lock);
2438 	msdu = idr_remove(&buf_ring->bufs_idr, buf_id);
2439 	spin_unlock_bh(&buf_ring->idr_lock);
2440 
2441 	if (unlikely(!msdu)) {
2442 		ath12k_warn(ab, "mon dest desc with inval buf_id %d\n", buf_id);
2443 		return 0;
2444 	}
2445 
2446 	dma_unmap_single(ab->dev, ATH12K_SKB_RXCB(msdu)->paddr,
2447 			 msdu->len + skb_tailroom(msdu),
2448 			 DMA_FROM_DEVICE);
2449 
2450 	offset = packet_info->dma_length + ATH12K_MON_RX_DOT11_OFFSET;
2451 	if (ath12k_dp_pkt_set_pktlen(msdu, offset)) {
2452 		dev_kfree_skb_any(msdu);
2453 		goto dest_replenish;
2454 	}
2455 
2456 	if (!pmon->mon_mpdu->head)
2457 		pmon->mon_mpdu->head = msdu;
2458 	else
2459 		pmon->mon_mpdu->tail->next = msdu;
2460 
2461 	pmon->mon_mpdu->tail = msdu;
2462 
2463 dest_replenish:
2464 	ath12k_dp_mon_buf_replenish(ab, buf_ring, 1);
2465 
2466 	return 0;
2467 }
2468 
2469 static int
2470 ath12k_dp_mon_parse_rx_dest_tlv(struct ath12k *ar,
2471 				struct ath12k_mon_data *pmon,
2472 				enum hal_rx_mon_status hal_status,
2473 				const void *tlv_data)
2474 {
2475 	switch (hal_status) {
2476 	case HAL_RX_MON_STATUS_MPDU_START:
2477 		if (WARN_ON_ONCE(pmon->mon_mpdu))
2478 			break;
2479 
2480 		pmon->mon_mpdu = kzalloc(sizeof(*pmon->mon_mpdu), GFP_ATOMIC);
2481 		if (!pmon->mon_mpdu)
2482 			return -ENOMEM;
2483 		break;
2484 	case HAL_RX_MON_STATUS_BUF_ADDR:
2485 		return ath12k_dp_mon_parse_status_buf(ar, pmon, tlv_data);
2486 	case HAL_RX_MON_STATUS_MPDU_END:
2487 		/* If no MSDU then free empty MPDU */
2488 		if (pmon->mon_mpdu->tail) {
2489 			pmon->mon_mpdu->tail->next = NULL;
2490 			list_add_tail(&pmon->mon_mpdu->list, &pmon->dp_rx_mon_mpdu_list);
2491 		} else {
2492 			kfree(pmon->mon_mpdu);
2493 		}
2494 		pmon->mon_mpdu = NULL;
2495 		break;
2496 	case HAL_RX_MON_STATUS_MSDU_END:
2497 		pmon->mon_mpdu->decap_format = pmon->decap_format;
2498 		pmon->mon_mpdu->err_bitmap = pmon->err_bitmap;
2499 		break;
2500 	default:
2501 		break;
2502 	}
2503 
2504 	return 0;
2505 }
2506 
2507 static enum hal_rx_mon_status
2508 ath12k_dp_mon_parse_rx_dest(struct ath12k *ar, struct ath12k_mon_data *pmon,
2509 			    struct sk_buff *skb)
2510 {
2511 	struct hal_tlv_64_hdr *tlv;
2512 	struct ath12k_skb_rxcb *rxcb;
2513 	enum hal_rx_mon_status hal_status;
2514 	u16 tlv_tag, tlv_len;
2515 	u8 *ptr = skb->data;
2516 
2517 	do {
2518 		tlv = (struct hal_tlv_64_hdr *)ptr;
2519 		tlv_tag = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_TAG);
2520 
2521 		/* The actual length of PPDU_END is the combined length of many PHY
2522 		 * TLVs that follow. Skip the TLV header and
2523 		 * rx_rxpcu_classification_overview that follows the header to get to
2524 		 * next TLV.
2525 		 */
2526 
2527 		if (tlv_tag == HAL_RX_PPDU_END)
2528 			tlv_len = sizeof(struct hal_rx_rxpcu_classification_overview);
2529 		else
2530 			tlv_len = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_LEN);
2531 
2532 		hal_status = ath12k_dp_mon_rx_parse_status_tlv(ar, pmon, tlv);
2533 
2534 		if (ar->monitor_started && ar->ab->hw_params->rxdma1_enable &&
2535 		    ath12k_dp_mon_parse_rx_dest_tlv(ar, pmon, hal_status, tlv->value))
2536 			return HAL_RX_MON_STATUS_PPDU_DONE;
2537 
2538 		ptr += sizeof(*tlv) + tlv_len;
2539 		ptr = PTR_ALIGN(ptr, HAL_TLV_64_ALIGN);
2540 
2541 		if ((ptr - skb->data) > skb->len)
2542 			break;
2543 
2544 	} while ((hal_status == HAL_RX_MON_STATUS_PPDU_NOT_DONE) ||
2545 		 (hal_status == HAL_RX_MON_STATUS_BUF_ADDR) ||
2546 		 (hal_status == HAL_RX_MON_STATUS_MPDU_START) ||
2547 		 (hal_status == HAL_RX_MON_STATUS_MPDU_END) ||
2548 		 (hal_status == HAL_RX_MON_STATUS_MSDU_END));
2549 
2550 	rxcb = ATH12K_SKB_RXCB(skb);
2551 	if (rxcb->is_end_of_ppdu)
2552 		hal_status = HAL_RX_MON_STATUS_PPDU_DONE;
2553 
2554 	return hal_status;
2555 }
2556 
2557 enum hal_rx_mon_status
2558 ath12k_dp_mon_rx_parse_mon_status(struct ath12k *ar,
2559 				  struct ath12k_mon_data *pmon,
2560 				  struct sk_buff *skb,
2561 				  struct napi_struct *napi)
2562 {
2563 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
2564 	struct dp_mon_mpdu *tmp;
2565 	struct dp_mon_mpdu *mon_mpdu = pmon->mon_mpdu;
2566 	enum hal_rx_mon_status hal_status;
2567 
2568 	hal_status = ath12k_dp_mon_parse_rx_dest(ar, pmon, skb);
2569 	if (hal_status != HAL_RX_MON_STATUS_PPDU_DONE)
2570 		return hal_status;
2571 
2572 	list_for_each_entry_safe(mon_mpdu, tmp, &pmon->dp_rx_mon_mpdu_list, list) {
2573 		list_del(&mon_mpdu->list);
2574 
2575 		if (mon_mpdu->head && mon_mpdu->tail)
2576 			ath12k_dp_mon_rx_deliver(ar, mon_mpdu, ppdu_info, napi);
2577 
2578 		kfree(mon_mpdu);
2579 	}
2580 
2581 	return hal_status;
2582 }
2583 
2584 int ath12k_dp_mon_buf_replenish(struct ath12k_base *ab,
2585 				struct dp_rxdma_mon_ring *buf_ring,
2586 				int req_entries)
2587 {
2588 	struct hal_mon_buf_ring *mon_buf;
2589 	struct sk_buff *skb;
2590 	struct hal_srng *srng;
2591 	dma_addr_t paddr;
2592 	u32 cookie;
2593 	int buf_id;
2594 
2595 	srng = &ab->hal.srng_list[buf_ring->refill_buf_ring.ring_id];
2596 	spin_lock_bh(&srng->lock);
2597 	ath12k_hal_srng_access_begin(ab, srng);
2598 
2599 	while (req_entries > 0) {
2600 		skb = dev_alloc_skb(DP_RX_BUFFER_SIZE + DP_RX_BUFFER_ALIGN_SIZE);
2601 		if (unlikely(!skb))
2602 			goto fail_alloc_skb;
2603 
2604 		if (!IS_ALIGNED((unsigned long)skb->data, DP_RX_BUFFER_ALIGN_SIZE)) {
2605 			skb_pull(skb,
2606 				 PTR_ALIGN(skb->data, DP_RX_BUFFER_ALIGN_SIZE) -
2607 				 skb->data);
2608 		}
2609 
2610 		paddr = dma_map_single(ab->dev, skb->data,
2611 				       skb->len + skb_tailroom(skb),
2612 				       DMA_FROM_DEVICE);
2613 
2614 		if (unlikely(dma_mapping_error(ab->dev, paddr)))
2615 			goto fail_free_skb;
2616 
2617 		spin_lock_bh(&buf_ring->idr_lock);
2618 		buf_id = idr_alloc(&buf_ring->bufs_idr, skb, 0,
2619 				   buf_ring->bufs_max * 3, GFP_ATOMIC);
2620 		spin_unlock_bh(&buf_ring->idr_lock);
2621 
2622 		if (unlikely(buf_id < 0))
2623 			goto fail_dma_unmap;
2624 
2625 		mon_buf = ath12k_hal_srng_src_get_next_entry(ab, srng);
2626 		if (unlikely(!mon_buf))
2627 			goto fail_idr_remove;
2628 
2629 		ATH12K_SKB_RXCB(skb)->paddr = paddr;
2630 
2631 		cookie = u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
2632 
2633 		mon_buf->paddr_lo = cpu_to_le32(lower_32_bits(paddr));
2634 		mon_buf->paddr_hi = cpu_to_le32(upper_32_bits(paddr));
2635 		mon_buf->cookie = cpu_to_le64(cookie);
2636 
2637 		req_entries--;
2638 	}
2639 
2640 	ath12k_hal_srng_access_end(ab, srng);
2641 	spin_unlock_bh(&srng->lock);
2642 	return 0;
2643 
2644 fail_idr_remove:
2645 	spin_lock_bh(&buf_ring->idr_lock);
2646 	idr_remove(&buf_ring->bufs_idr, buf_id);
2647 	spin_unlock_bh(&buf_ring->idr_lock);
2648 fail_dma_unmap:
2649 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
2650 			 DMA_FROM_DEVICE);
2651 fail_free_skb:
2652 	dev_kfree_skb_any(skb);
2653 fail_alloc_skb:
2654 	ath12k_hal_srng_access_end(ab, srng);
2655 	spin_unlock_bh(&srng->lock);
2656 	return -ENOMEM;
2657 }
2658 
2659 int ath12k_dp_mon_status_bufs_replenish(struct ath12k_base *ab,
2660 					struct dp_rxdma_mon_ring *rx_ring,
2661 					int req_entries)
2662 {
2663 	enum hal_rx_buf_return_buf_manager mgr =
2664 		ab->hw_params->hal_params->rx_buf_rbm;
2665 	int num_free, num_remain, buf_id;
2666 	struct ath12k_buffer_addr *desc;
2667 	struct hal_srng *srng;
2668 	struct sk_buff *skb;
2669 	dma_addr_t paddr;
2670 	u32 cookie;
2671 
2672 	req_entries = min(req_entries, rx_ring->bufs_max);
2673 
2674 	srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id];
2675 
2676 	spin_lock_bh(&srng->lock);
2677 
2678 	ath12k_hal_srng_access_begin(ab, srng);
2679 
2680 	num_free = ath12k_hal_srng_src_num_free(ab, srng, true);
2681 	if (!req_entries && (num_free > (rx_ring->bufs_max * 3) / 4))
2682 		req_entries = num_free;
2683 
2684 	req_entries = min(num_free, req_entries);
2685 	num_remain = req_entries;
2686 
2687 	while (num_remain > 0) {
2688 		skb = dev_alloc_skb(RX_MON_STATUS_BUF_SIZE);
2689 		if (!skb)
2690 			break;
2691 
2692 		if (!IS_ALIGNED((unsigned long)skb->data,
2693 				RX_MON_STATUS_BUF_ALIGN)) {
2694 			skb_pull(skb,
2695 				 PTR_ALIGN(skb->data, RX_MON_STATUS_BUF_ALIGN) -
2696 				 skb->data);
2697 		}
2698 
2699 		paddr = dma_map_single(ab->dev, skb->data,
2700 				       skb->len + skb_tailroom(skb),
2701 				       DMA_FROM_DEVICE);
2702 		if (dma_mapping_error(ab->dev, paddr))
2703 			goto fail_free_skb;
2704 
2705 		spin_lock_bh(&rx_ring->idr_lock);
2706 		buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0,
2707 				   rx_ring->bufs_max * 3, GFP_ATOMIC);
2708 		spin_unlock_bh(&rx_ring->idr_lock);
2709 		if (buf_id < 0)
2710 			goto fail_dma_unmap;
2711 		cookie = u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
2712 
2713 		desc = ath12k_hal_srng_src_get_next_entry(ab, srng);
2714 		if (!desc)
2715 			goto fail_buf_unassign;
2716 
2717 		ATH12K_SKB_RXCB(skb)->paddr = paddr;
2718 
2719 		num_remain--;
2720 
2721 		ath12k_hal_rx_buf_addr_info_set(desc, paddr, cookie, mgr);
2722 	}
2723 
2724 	ath12k_hal_srng_access_end(ab, srng);
2725 
2726 	spin_unlock_bh(&srng->lock);
2727 
2728 	return req_entries - num_remain;
2729 
2730 fail_buf_unassign:
2731 	spin_lock_bh(&rx_ring->idr_lock);
2732 	idr_remove(&rx_ring->bufs_idr, buf_id);
2733 	spin_unlock_bh(&rx_ring->idr_lock);
2734 fail_dma_unmap:
2735 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
2736 			 DMA_FROM_DEVICE);
2737 fail_free_skb:
2738 	dev_kfree_skb_any(skb);
2739 
2740 	ath12k_hal_srng_access_end(ab, srng);
2741 
2742 	spin_unlock_bh(&srng->lock);
2743 
2744 	return req_entries - num_remain;
2745 }
2746 
2747 static struct dp_mon_tx_ppdu_info *
2748 ath12k_dp_mon_tx_get_ppdu_info(struct ath12k_mon_data *pmon,
2749 			       unsigned int ppdu_id,
2750 			       enum dp_mon_tx_ppdu_info_type type)
2751 {
2752 	struct dp_mon_tx_ppdu_info *tx_ppdu_info;
2753 
2754 	if (type == DP_MON_TX_PROT_PPDU_INFO) {
2755 		tx_ppdu_info = pmon->tx_prot_ppdu_info;
2756 
2757 		if (tx_ppdu_info && !tx_ppdu_info->is_used)
2758 			return tx_ppdu_info;
2759 		kfree(tx_ppdu_info);
2760 	} else {
2761 		tx_ppdu_info = pmon->tx_data_ppdu_info;
2762 
2763 		if (tx_ppdu_info && !tx_ppdu_info->is_used)
2764 			return tx_ppdu_info;
2765 		kfree(tx_ppdu_info);
2766 	}
2767 
2768 	/* allocate new tx_ppdu_info */
2769 	tx_ppdu_info = kzalloc(sizeof(*tx_ppdu_info), GFP_ATOMIC);
2770 	if (!tx_ppdu_info)
2771 		return NULL;
2772 
2773 	tx_ppdu_info->is_used = 0;
2774 	tx_ppdu_info->ppdu_id = ppdu_id;
2775 
2776 	if (type == DP_MON_TX_PROT_PPDU_INFO)
2777 		pmon->tx_prot_ppdu_info = tx_ppdu_info;
2778 	else
2779 		pmon->tx_data_ppdu_info = tx_ppdu_info;
2780 
2781 	return tx_ppdu_info;
2782 }
2783 
2784 static struct dp_mon_tx_ppdu_info *
2785 ath12k_dp_mon_hal_tx_ppdu_info(struct ath12k_mon_data *pmon,
2786 			       u16 tlv_tag)
2787 {
2788 	switch (tlv_tag) {
2789 	case HAL_TX_FES_SETUP:
2790 	case HAL_TX_FLUSH:
2791 	case HAL_PCU_PPDU_SETUP_INIT:
2792 	case HAL_TX_PEER_ENTRY:
2793 	case HAL_TX_QUEUE_EXTENSION:
2794 	case HAL_TX_MPDU_START:
2795 	case HAL_TX_MSDU_START:
2796 	case HAL_TX_DATA:
2797 	case HAL_MON_BUF_ADDR:
2798 	case HAL_TX_MPDU_END:
2799 	case HAL_TX_LAST_MPDU_FETCHED:
2800 	case HAL_TX_LAST_MPDU_END:
2801 	case HAL_COEX_TX_REQ:
2802 	case HAL_TX_RAW_OR_NATIVE_FRAME_SETUP:
2803 	case HAL_SCH_CRITICAL_TLV_REFERENCE:
2804 	case HAL_TX_FES_SETUP_COMPLETE:
2805 	case HAL_TQM_MPDU_GLOBAL_START:
2806 	case HAL_SCHEDULER_END:
2807 	case HAL_TX_FES_STATUS_USER_PPDU:
2808 		break;
2809 	case HAL_TX_FES_STATUS_PROT: {
2810 		if (!pmon->tx_prot_ppdu_info->is_used)
2811 			pmon->tx_prot_ppdu_info->is_used = true;
2812 
2813 		return pmon->tx_prot_ppdu_info;
2814 	}
2815 	}
2816 
2817 	if (!pmon->tx_data_ppdu_info->is_used)
2818 		pmon->tx_data_ppdu_info->is_used = true;
2819 
2820 	return pmon->tx_data_ppdu_info;
2821 }
2822 
2823 #define MAX_MONITOR_HEADER 512
2824 #define MAX_DUMMY_FRM_BODY 128
2825 
2826 struct sk_buff *ath12k_dp_mon_tx_alloc_skb(void)
2827 {
2828 	struct sk_buff *skb;
2829 
2830 	skb = dev_alloc_skb(MAX_MONITOR_HEADER + MAX_DUMMY_FRM_BODY);
2831 	if (!skb)
2832 		return NULL;
2833 
2834 	skb_reserve(skb, MAX_MONITOR_HEADER);
2835 
2836 	if (!IS_ALIGNED((unsigned long)skb->data, 4))
2837 		skb_pull(skb, PTR_ALIGN(skb->data, 4) - skb->data);
2838 
2839 	return skb;
2840 }
2841 
2842 static int
2843 ath12k_dp_mon_tx_gen_cts2self_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2844 {
2845 	struct sk_buff *skb;
2846 	struct ieee80211_cts *cts;
2847 
2848 	skb = ath12k_dp_mon_tx_alloc_skb();
2849 	if (!skb)
2850 		return -ENOMEM;
2851 
2852 	cts = (struct ieee80211_cts *)skb->data;
2853 	memset(cts, 0, MAX_DUMMY_FRM_BODY);
2854 	cts->frame_control =
2855 		cpu_to_le16(IEEE80211_FTYPE_CTL | IEEE80211_STYPE_CTS);
2856 	cts->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2857 	memcpy(cts->ra, tx_ppdu_info->rx_status.addr1, sizeof(cts->ra));
2858 
2859 	skb_put(skb, sizeof(*cts));
2860 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2861 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2862 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2863 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2864 
2865 	return 0;
2866 }
2867 
2868 static int
2869 ath12k_dp_mon_tx_gen_rts_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2870 {
2871 	struct sk_buff *skb;
2872 	struct ieee80211_rts *rts;
2873 
2874 	skb = ath12k_dp_mon_tx_alloc_skb();
2875 	if (!skb)
2876 		return -ENOMEM;
2877 
2878 	rts = (struct ieee80211_rts *)skb->data;
2879 	memset(rts, 0, MAX_DUMMY_FRM_BODY);
2880 	rts->frame_control =
2881 		cpu_to_le16(IEEE80211_FTYPE_CTL | IEEE80211_STYPE_RTS);
2882 	rts->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2883 	memcpy(rts->ra, tx_ppdu_info->rx_status.addr1, sizeof(rts->ra));
2884 	memcpy(rts->ta, tx_ppdu_info->rx_status.addr2, sizeof(rts->ta));
2885 
2886 	skb_put(skb, sizeof(*rts));
2887 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2888 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2889 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2890 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2891 
2892 	return 0;
2893 }
2894 
2895 static int
2896 ath12k_dp_mon_tx_gen_3addr_qos_null_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2897 {
2898 	struct sk_buff *skb;
2899 	struct ieee80211_qos_hdr *qhdr;
2900 
2901 	skb = ath12k_dp_mon_tx_alloc_skb();
2902 	if (!skb)
2903 		return -ENOMEM;
2904 
2905 	qhdr = (struct ieee80211_qos_hdr *)skb->data;
2906 	memset(qhdr, 0, MAX_DUMMY_FRM_BODY);
2907 	qhdr->frame_control =
2908 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_NULLFUNC);
2909 	qhdr->duration_id = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2910 	memcpy(qhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2911 	memcpy(qhdr->addr2, tx_ppdu_info->rx_status.addr2, ETH_ALEN);
2912 	memcpy(qhdr->addr3, tx_ppdu_info->rx_status.addr3, ETH_ALEN);
2913 
2914 	skb_put(skb, sizeof(*qhdr));
2915 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2916 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2917 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2918 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2919 
2920 	return 0;
2921 }
2922 
2923 static int
2924 ath12k_dp_mon_tx_gen_4addr_qos_null_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2925 {
2926 	struct sk_buff *skb;
2927 	struct dp_mon_qosframe_addr4 *qhdr;
2928 
2929 	skb = ath12k_dp_mon_tx_alloc_skb();
2930 	if (!skb)
2931 		return -ENOMEM;
2932 
2933 	qhdr = (struct dp_mon_qosframe_addr4 *)skb->data;
2934 	memset(qhdr, 0, MAX_DUMMY_FRM_BODY);
2935 	qhdr->frame_control =
2936 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_NULLFUNC);
2937 	qhdr->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2938 	memcpy(qhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2939 	memcpy(qhdr->addr2, tx_ppdu_info->rx_status.addr2, ETH_ALEN);
2940 	memcpy(qhdr->addr3, tx_ppdu_info->rx_status.addr3, ETH_ALEN);
2941 	memcpy(qhdr->addr4, tx_ppdu_info->rx_status.addr4, ETH_ALEN);
2942 
2943 	skb_put(skb, sizeof(*qhdr));
2944 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2945 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2946 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2947 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2948 
2949 	return 0;
2950 }
2951 
2952 static int
2953 ath12k_dp_mon_tx_gen_ack_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2954 {
2955 	struct sk_buff *skb;
2956 	struct dp_mon_frame_min_one *fbmhdr;
2957 
2958 	skb = ath12k_dp_mon_tx_alloc_skb();
2959 	if (!skb)
2960 		return -ENOMEM;
2961 
2962 	fbmhdr = (struct dp_mon_frame_min_one *)skb->data;
2963 	memset(fbmhdr, 0, MAX_DUMMY_FRM_BODY);
2964 	fbmhdr->frame_control =
2965 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_CFACK);
2966 	memcpy(fbmhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2967 
2968 	/* set duration zero for ack frame */
2969 	fbmhdr->duration = 0;
2970 
2971 	skb_put(skb, sizeof(*fbmhdr));
2972 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2973 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2974 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2975 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2976 
2977 	return 0;
2978 }
2979 
2980 static int
2981 ath12k_dp_mon_tx_gen_prot_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2982 {
2983 	int ret = 0;
2984 
2985 	switch (tx_ppdu_info->rx_status.medium_prot_type) {
2986 	case DP_MON_TX_MEDIUM_RTS_LEGACY:
2987 	case DP_MON_TX_MEDIUM_RTS_11AC_STATIC_BW:
2988 	case DP_MON_TX_MEDIUM_RTS_11AC_DYNAMIC_BW:
2989 		ret = ath12k_dp_mon_tx_gen_rts_frame(tx_ppdu_info);
2990 		break;
2991 	case DP_MON_TX_MEDIUM_CTS2SELF:
2992 		ret = ath12k_dp_mon_tx_gen_cts2self_frame(tx_ppdu_info);
2993 		break;
2994 	case DP_MON_TX_MEDIUM_QOS_NULL_NO_ACK_3ADDR:
2995 		ret = ath12k_dp_mon_tx_gen_3addr_qos_null_frame(tx_ppdu_info);
2996 		break;
2997 	case DP_MON_TX_MEDIUM_QOS_NULL_NO_ACK_4ADDR:
2998 		ret = ath12k_dp_mon_tx_gen_4addr_qos_null_frame(tx_ppdu_info);
2999 		break;
3000 	}
3001 
3002 	return ret;
3003 }
3004 
3005 static enum dp_mon_tx_tlv_status
3006 ath12k_dp_mon_tx_parse_status_tlv(struct ath12k_base *ab,
3007 				  struct ath12k_mon_data *pmon,
3008 				  u16 tlv_tag, const void *tlv_data, u32 userid)
3009 {
3010 	struct dp_mon_tx_ppdu_info *tx_ppdu_info;
3011 	enum dp_mon_tx_tlv_status status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3012 	u32 info[7];
3013 
3014 	tx_ppdu_info = ath12k_dp_mon_hal_tx_ppdu_info(pmon, tlv_tag);
3015 
3016 	switch (tlv_tag) {
3017 	case HAL_TX_FES_SETUP: {
3018 		const struct hal_tx_fes_setup *tx_fes_setup = tlv_data;
3019 
3020 		info[0] = __le32_to_cpu(tx_fes_setup->info0);
3021 		tx_ppdu_info->ppdu_id = __le32_to_cpu(tx_fes_setup->schedule_id);
3022 		tx_ppdu_info->num_users =
3023 			u32_get_bits(info[0], HAL_TX_FES_SETUP_INFO0_NUM_OF_USERS);
3024 		status = DP_MON_TX_FES_SETUP;
3025 		break;
3026 	}
3027 
3028 	case HAL_TX_FES_STATUS_END: {
3029 		const struct hal_tx_fes_status_end *tx_fes_status_end = tlv_data;
3030 		u32 tst_15_0, tst_31_16;
3031 
3032 		info[0] = __le32_to_cpu(tx_fes_status_end->info0);
3033 		tst_15_0 =
3034 			u32_get_bits(info[0],
3035 				     HAL_TX_FES_STATUS_END_INFO0_START_TIMESTAMP_15_0);
3036 		tst_31_16 =
3037 			u32_get_bits(info[0],
3038 				     HAL_TX_FES_STATUS_END_INFO0_START_TIMESTAMP_31_16);
3039 
3040 		tx_ppdu_info->rx_status.ppdu_ts = (tst_15_0 | (tst_31_16 << 16));
3041 		status = DP_MON_TX_FES_STATUS_END;
3042 		break;
3043 	}
3044 
3045 	case HAL_RX_RESPONSE_REQUIRED_INFO: {
3046 		const struct hal_rx_resp_req_info *rx_resp_req_info = tlv_data;
3047 		u32 addr_32;
3048 		u16 addr_16;
3049 
3050 		info[0] = __le32_to_cpu(rx_resp_req_info->info0);
3051 		info[1] = __le32_to_cpu(rx_resp_req_info->info1);
3052 		info[2] = __le32_to_cpu(rx_resp_req_info->info2);
3053 		info[3] = __le32_to_cpu(rx_resp_req_info->info3);
3054 		info[4] = __le32_to_cpu(rx_resp_req_info->info4);
3055 		info[5] = __le32_to_cpu(rx_resp_req_info->info5);
3056 
3057 		tx_ppdu_info->rx_status.ppdu_id =
3058 			u32_get_bits(info[0], HAL_RX_RESP_REQ_INFO0_PPDU_ID);
3059 		tx_ppdu_info->rx_status.reception_type =
3060 			u32_get_bits(info[0], HAL_RX_RESP_REQ_INFO0_RECEPTION_TYPE);
3061 		tx_ppdu_info->rx_status.rx_duration =
3062 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_DURATION);
3063 		tx_ppdu_info->rx_status.mcs =
3064 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_RATE_MCS);
3065 		tx_ppdu_info->rx_status.sgi =
3066 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_SGI);
3067 		tx_ppdu_info->rx_status.is_stbc =
3068 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_STBC);
3069 		tx_ppdu_info->rx_status.ldpc =
3070 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_LDPC);
3071 		tx_ppdu_info->rx_status.is_ampdu =
3072 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_IS_AMPDU);
3073 		tx_ppdu_info->rx_status.num_users =
3074 			u32_get_bits(info[2], HAL_RX_RESP_REQ_INFO2_NUM_USER);
3075 
3076 		addr_32 = u32_get_bits(info[3], HAL_RX_RESP_REQ_INFO3_ADDR1_31_0);
3077 		addr_16 = u32_get_bits(info[3], HAL_RX_RESP_REQ_INFO4_ADDR1_47_32);
3078 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3079 
3080 		addr_16 = u32_get_bits(info[4], HAL_RX_RESP_REQ_INFO4_ADDR1_15_0);
3081 		addr_32 = u32_get_bits(info[5], HAL_RX_RESP_REQ_INFO5_ADDR1_47_16);
3082 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr2);
3083 
3084 		if (tx_ppdu_info->rx_status.reception_type == 0)
3085 			ath12k_dp_mon_tx_gen_cts2self_frame(tx_ppdu_info);
3086 		status = DP_MON_RX_RESPONSE_REQUIRED_INFO;
3087 		break;
3088 	}
3089 
3090 	case HAL_PCU_PPDU_SETUP_INIT: {
3091 		const struct hal_tx_pcu_ppdu_setup_init *ppdu_setup = tlv_data;
3092 		u32 addr_32;
3093 		u16 addr_16;
3094 
3095 		info[0] = __le32_to_cpu(ppdu_setup->info0);
3096 		info[1] = __le32_to_cpu(ppdu_setup->info1);
3097 		info[2] = __le32_to_cpu(ppdu_setup->info2);
3098 		info[3] = __le32_to_cpu(ppdu_setup->info3);
3099 		info[4] = __le32_to_cpu(ppdu_setup->info4);
3100 		info[5] = __le32_to_cpu(ppdu_setup->info5);
3101 		info[6] = __le32_to_cpu(ppdu_setup->info6);
3102 
3103 		/* protection frame address 1 */
3104 		addr_32 = u32_get_bits(info[1],
3105 				       HAL_TX_PPDU_SETUP_INFO1_PROT_FRAME_ADDR1_31_0);
3106 		addr_16 = u32_get_bits(info[2],
3107 				       HAL_TX_PPDU_SETUP_INFO2_PROT_FRAME_ADDR1_47_32);
3108 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3109 
3110 		/* protection frame address 2 */
3111 		addr_16 = u32_get_bits(info[2],
3112 				       HAL_TX_PPDU_SETUP_INFO2_PROT_FRAME_ADDR2_15_0);
3113 		addr_32 = u32_get_bits(info[3],
3114 				       HAL_TX_PPDU_SETUP_INFO3_PROT_FRAME_ADDR2_47_16);
3115 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr2);
3116 
3117 		/* protection frame address 3 */
3118 		addr_32 = u32_get_bits(info[4],
3119 				       HAL_TX_PPDU_SETUP_INFO4_PROT_FRAME_ADDR3_31_0);
3120 		addr_16 = u32_get_bits(info[5],
3121 				       HAL_TX_PPDU_SETUP_INFO5_PROT_FRAME_ADDR3_47_32);
3122 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr3);
3123 
3124 		/* protection frame address 4 */
3125 		addr_16 = u32_get_bits(info[5],
3126 				       HAL_TX_PPDU_SETUP_INFO5_PROT_FRAME_ADDR4_15_0);
3127 		addr_32 = u32_get_bits(info[6],
3128 				       HAL_TX_PPDU_SETUP_INFO6_PROT_FRAME_ADDR4_47_16);
3129 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr4);
3130 
3131 		status = u32_get_bits(info[0],
3132 				      HAL_TX_PPDU_SETUP_INFO0_MEDIUM_PROT_TYPE);
3133 		break;
3134 	}
3135 
3136 	case HAL_TX_QUEUE_EXTENSION: {
3137 		const struct hal_tx_queue_exten *tx_q_exten = tlv_data;
3138 
3139 		info[0] = __le32_to_cpu(tx_q_exten->info0);
3140 
3141 		tx_ppdu_info->rx_status.frame_control =
3142 			u32_get_bits(info[0],
3143 				     HAL_TX_Q_EXT_INFO0_FRAME_CTRL);
3144 		tx_ppdu_info->rx_status.fc_valid = true;
3145 		break;
3146 	}
3147 
3148 	case HAL_TX_FES_STATUS_START: {
3149 		const struct hal_tx_fes_status_start *tx_fes_start = tlv_data;
3150 
3151 		info[0] = __le32_to_cpu(tx_fes_start->info0);
3152 
3153 		tx_ppdu_info->rx_status.medium_prot_type =
3154 			u32_get_bits(info[0],
3155 				     HAL_TX_FES_STATUS_START_INFO0_MEDIUM_PROT_TYPE);
3156 		break;
3157 	}
3158 
3159 	case HAL_TX_FES_STATUS_PROT: {
3160 		const struct hal_tx_fes_status_prot *tx_fes_status = tlv_data;
3161 		u32 start_timestamp;
3162 		u32 end_timestamp;
3163 
3164 		info[0] = __le32_to_cpu(tx_fes_status->info0);
3165 		info[1] = __le32_to_cpu(tx_fes_status->info1);
3166 
3167 		start_timestamp =
3168 			u32_get_bits(info[0],
3169 				     HAL_TX_FES_STAT_PROT_INFO0_STRT_FRM_TS_15_0);
3170 		start_timestamp |=
3171 			u32_get_bits(info[0],
3172 				     HAL_TX_FES_STAT_PROT_INFO0_STRT_FRM_TS_31_16) << 15;
3173 		end_timestamp =
3174 			u32_get_bits(info[1],
3175 				     HAL_TX_FES_STAT_PROT_INFO1_END_FRM_TS_15_0);
3176 		end_timestamp |=
3177 			u32_get_bits(info[1],
3178 				     HAL_TX_FES_STAT_PROT_INFO1_END_FRM_TS_31_16) << 15;
3179 		tx_ppdu_info->rx_status.rx_duration = end_timestamp - start_timestamp;
3180 
3181 		ath12k_dp_mon_tx_gen_prot_frame(tx_ppdu_info);
3182 		break;
3183 	}
3184 
3185 	case HAL_TX_FES_STATUS_START_PPDU:
3186 	case HAL_TX_FES_STATUS_START_PROT: {
3187 		const struct hal_tx_fes_status_start_prot *tx_fes_stat_start = tlv_data;
3188 		u64 ppdu_ts;
3189 
3190 		info[0] = __le32_to_cpu(tx_fes_stat_start->info0);
3191 
3192 		tx_ppdu_info->rx_status.ppdu_ts =
3193 			u32_get_bits(info[0],
3194 				     HAL_TX_FES_STAT_STRT_INFO0_PROT_TS_LOWER_32);
3195 		ppdu_ts = (u32_get_bits(info[1],
3196 					HAL_TX_FES_STAT_STRT_INFO1_PROT_TS_UPPER_32));
3197 		tx_ppdu_info->rx_status.ppdu_ts |= ppdu_ts << 32;
3198 		break;
3199 	}
3200 
3201 	case HAL_TX_FES_STATUS_USER_PPDU: {
3202 		const struct hal_tx_fes_status_user_ppdu *tx_fes_usr_ppdu = tlv_data;
3203 
3204 		info[0] = __le32_to_cpu(tx_fes_usr_ppdu->info0);
3205 
3206 		tx_ppdu_info->rx_status.rx_duration =
3207 			u32_get_bits(info[0],
3208 				     HAL_TX_FES_STAT_USR_PPDU_INFO0_DURATION);
3209 		break;
3210 	}
3211 
3212 	case HAL_MACTX_HE_SIG_A_SU:
3213 		ath12k_dp_mon_parse_he_sig_su(tlv_data, &tx_ppdu_info->rx_status);
3214 		break;
3215 
3216 	case HAL_MACTX_HE_SIG_A_MU_DL:
3217 		ath12k_dp_mon_parse_he_sig_mu(tlv_data, &tx_ppdu_info->rx_status);
3218 		break;
3219 
3220 	case HAL_MACTX_HE_SIG_B1_MU:
3221 		ath12k_dp_mon_parse_he_sig_b1_mu(tlv_data, &tx_ppdu_info->rx_status);
3222 		break;
3223 
3224 	case HAL_MACTX_HE_SIG_B2_MU:
3225 		ath12k_dp_mon_parse_he_sig_b2_mu(tlv_data, &tx_ppdu_info->rx_status);
3226 		break;
3227 
3228 	case HAL_MACTX_HE_SIG_B2_OFDMA:
3229 		ath12k_dp_mon_parse_he_sig_b2_ofdma(tlv_data, &tx_ppdu_info->rx_status);
3230 		break;
3231 
3232 	case HAL_MACTX_VHT_SIG_A:
3233 		ath12k_dp_mon_parse_vht_sig_a(tlv_data, &tx_ppdu_info->rx_status);
3234 		break;
3235 
3236 	case HAL_MACTX_L_SIG_A:
3237 		ath12k_dp_mon_parse_l_sig_a(tlv_data, &tx_ppdu_info->rx_status);
3238 		break;
3239 
3240 	case HAL_MACTX_L_SIG_B:
3241 		ath12k_dp_mon_parse_l_sig_b(tlv_data, &tx_ppdu_info->rx_status);
3242 		break;
3243 
3244 	case HAL_RX_FRAME_BITMAP_ACK: {
3245 		const struct hal_rx_frame_bitmap_ack *fbm_ack = tlv_data;
3246 		u32 addr_32;
3247 		u16 addr_16;
3248 
3249 		info[0] = __le32_to_cpu(fbm_ack->info0);
3250 		info[1] = __le32_to_cpu(fbm_ack->info1);
3251 
3252 		addr_32 = u32_get_bits(info[0],
3253 				       HAL_RX_FBM_ACK_INFO0_ADDR1_31_0);
3254 		addr_16 = u32_get_bits(info[1],
3255 				       HAL_RX_FBM_ACK_INFO1_ADDR1_47_32);
3256 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3257 
3258 		ath12k_dp_mon_tx_gen_ack_frame(tx_ppdu_info);
3259 		break;
3260 	}
3261 
3262 	case HAL_MACTX_PHY_DESC: {
3263 		const struct hal_tx_phy_desc *tx_phy_desc = tlv_data;
3264 
3265 		info[0] = __le32_to_cpu(tx_phy_desc->info0);
3266 		info[1] = __le32_to_cpu(tx_phy_desc->info1);
3267 		info[2] = __le32_to_cpu(tx_phy_desc->info2);
3268 		info[3] = __le32_to_cpu(tx_phy_desc->info3);
3269 
3270 		tx_ppdu_info->rx_status.beamformed =
3271 			u32_get_bits(info[0],
3272 				     HAL_TX_PHY_DESC_INFO0_BF_TYPE);
3273 		tx_ppdu_info->rx_status.preamble_type =
3274 			u32_get_bits(info[0],
3275 				     HAL_TX_PHY_DESC_INFO0_PREAMBLE_11B);
3276 		tx_ppdu_info->rx_status.mcs =
3277 			u32_get_bits(info[1],
3278 				     HAL_TX_PHY_DESC_INFO1_MCS);
3279 		tx_ppdu_info->rx_status.ltf_size =
3280 			u32_get_bits(info[3],
3281 				     HAL_TX_PHY_DESC_INFO3_LTF_SIZE);
3282 		tx_ppdu_info->rx_status.nss =
3283 			u32_get_bits(info[2],
3284 				     HAL_TX_PHY_DESC_INFO2_NSS);
3285 		tx_ppdu_info->rx_status.chan_num =
3286 			u32_get_bits(info[3],
3287 				     HAL_TX_PHY_DESC_INFO3_ACTIVE_CHANNEL);
3288 		tx_ppdu_info->rx_status.bw =
3289 			u32_get_bits(info[0],
3290 				     HAL_TX_PHY_DESC_INFO0_BANDWIDTH);
3291 		break;
3292 	}
3293 
3294 	case HAL_TX_MPDU_START: {
3295 		struct dp_mon_mpdu *mon_mpdu = tx_ppdu_info->tx_mon_mpdu;
3296 
3297 		mon_mpdu = kzalloc(sizeof(*mon_mpdu), GFP_ATOMIC);
3298 		if (!mon_mpdu)
3299 			return DP_MON_TX_STATUS_PPDU_NOT_DONE;
3300 		status = DP_MON_TX_MPDU_START;
3301 		break;
3302 	}
3303 
3304 	case HAL_TX_MPDU_END:
3305 		list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
3306 			      &tx_ppdu_info->dp_tx_mon_mpdu_list);
3307 		break;
3308 	}
3309 
3310 	return status;
3311 }
3312 
3313 enum dp_mon_tx_tlv_status
3314 ath12k_dp_mon_tx_status_get_num_user(u16 tlv_tag,
3315 				     struct hal_tlv_hdr *tx_tlv,
3316 				     u8 *num_users)
3317 {
3318 	u32 tlv_status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3319 	u32 info0;
3320 
3321 	switch (tlv_tag) {
3322 	case HAL_TX_FES_SETUP: {
3323 		struct hal_tx_fes_setup *tx_fes_setup =
3324 				(struct hal_tx_fes_setup *)tx_tlv;
3325 
3326 		info0 = __le32_to_cpu(tx_fes_setup->info0);
3327 
3328 		*num_users = u32_get_bits(info0, HAL_TX_FES_SETUP_INFO0_NUM_OF_USERS);
3329 		tlv_status = DP_MON_TX_FES_SETUP;
3330 		break;
3331 	}
3332 
3333 	case HAL_RX_RESPONSE_REQUIRED_INFO: {
3334 		/* TODO: need to update *num_users */
3335 		tlv_status = DP_MON_RX_RESPONSE_REQUIRED_INFO;
3336 		break;
3337 	}
3338 	}
3339 
3340 	return tlv_status;
3341 }
3342 
3343 static void
3344 ath12k_dp_mon_tx_process_ppdu_info(struct ath12k *ar,
3345 				   struct napi_struct *napi,
3346 				   struct dp_mon_tx_ppdu_info *tx_ppdu_info)
3347 {
3348 	struct dp_mon_mpdu *tmp, *mon_mpdu;
3349 
3350 	list_for_each_entry_safe(mon_mpdu, tmp,
3351 				 &tx_ppdu_info->dp_tx_mon_mpdu_list, list) {
3352 		list_del(&mon_mpdu->list);
3353 
3354 		if (mon_mpdu->head)
3355 			ath12k_dp_mon_rx_deliver(ar, mon_mpdu,
3356 						 &tx_ppdu_info->rx_status, napi);
3357 
3358 		kfree(mon_mpdu);
3359 	}
3360 }
3361 
3362 enum hal_rx_mon_status
3363 ath12k_dp_mon_tx_parse_mon_status(struct ath12k *ar,
3364 				  struct ath12k_mon_data *pmon,
3365 				  struct sk_buff *skb,
3366 				  struct napi_struct *napi,
3367 				  u32 ppdu_id)
3368 {
3369 	struct ath12k_base *ab = ar->ab;
3370 	struct dp_mon_tx_ppdu_info *tx_prot_ppdu_info, *tx_data_ppdu_info;
3371 	struct hal_tlv_hdr *tlv;
3372 	u8 *ptr = skb->data;
3373 	u16 tlv_tag;
3374 	u16 tlv_len;
3375 	u32 tlv_userid = 0;
3376 	u8 num_user;
3377 	u32 tlv_status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3378 
3379 	tx_prot_ppdu_info = ath12k_dp_mon_tx_get_ppdu_info(pmon, ppdu_id,
3380 							   DP_MON_TX_PROT_PPDU_INFO);
3381 	if (!tx_prot_ppdu_info)
3382 		return -ENOMEM;
3383 
3384 	tlv = (struct hal_tlv_hdr *)ptr;
3385 	tlv_tag = le32_get_bits(tlv->tl, HAL_TLV_HDR_TAG);
3386 
3387 	tlv_status = ath12k_dp_mon_tx_status_get_num_user(tlv_tag, tlv, &num_user);
3388 	if (tlv_status == DP_MON_TX_STATUS_PPDU_NOT_DONE || !num_user)
3389 		return -EINVAL;
3390 
3391 	tx_data_ppdu_info = ath12k_dp_mon_tx_get_ppdu_info(pmon, ppdu_id,
3392 							   DP_MON_TX_DATA_PPDU_INFO);
3393 	if (!tx_data_ppdu_info)
3394 		return -ENOMEM;
3395 
3396 	do {
3397 		tlv = (struct hal_tlv_hdr *)ptr;
3398 		tlv_tag = le32_get_bits(tlv->tl, HAL_TLV_HDR_TAG);
3399 		tlv_len = le32_get_bits(tlv->tl, HAL_TLV_HDR_LEN);
3400 		tlv_userid = le32_get_bits(tlv->tl, HAL_TLV_USR_ID);
3401 
3402 		tlv_status = ath12k_dp_mon_tx_parse_status_tlv(ab, pmon,
3403 							       tlv_tag, ptr,
3404 							       tlv_userid);
3405 		ptr += tlv_len;
3406 		ptr = PTR_ALIGN(ptr, HAL_TLV_ALIGN);
3407 		if ((ptr - skb->data) >= DP_TX_MONITOR_BUF_SIZE)
3408 			break;
3409 	} while (tlv_status != DP_MON_TX_FES_STATUS_END);
3410 
3411 	ath12k_dp_mon_tx_process_ppdu_info(ar, napi, tx_data_ppdu_info);
3412 	ath12k_dp_mon_tx_process_ppdu_info(ar, napi, tx_prot_ppdu_info);
3413 
3414 	return tlv_status;
3415 }
3416 
3417 static void
3418 ath12k_dp_mon_rx_update_peer_rate_table_stats(struct ath12k_rx_peer_stats *rx_stats,
3419 					      struct hal_rx_mon_ppdu_info *ppdu_info,
3420 					      struct hal_rx_user_status *user_stats,
3421 					      u32 num_msdu)
3422 {
3423 	struct ath12k_rx_peer_rate_stats *stats;
3424 	u32 mcs_idx = (user_stats) ? user_stats->mcs : ppdu_info->mcs;
3425 	u32 nss_idx = (user_stats) ? user_stats->nss - 1 : ppdu_info->nss - 1;
3426 	u32 bw_idx = ppdu_info->bw;
3427 	u32 gi_idx = ppdu_info->gi;
3428 	u32 len;
3429 
3430 	if (mcs_idx > HAL_RX_MAX_MCS_HT || nss_idx >= HAL_RX_MAX_NSS ||
3431 	    bw_idx >= HAL_RX_BW_MAX || gi_idx >= HAL_RX_GI_MAX) {
3432 		return;
3433 	}
3434 
3435 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AX ||
3436 	    ppdu_info->preamble_type == HAL_RX_PREAMBLE_11BE)
3437 		gi_idx = ath12k_he_gi_to_nl80211_he_gi(ppdu_info->gi);
3438 
3439 	rx_stats->pkt_stats.rx_rate[bw_idx][gi_idx][nss_idx][mcs_idx] += num_msdu;
3440 	stats = &rx_stats->byte_stats;
3441 
3442 	if (user_stats)
3443 		len = user_stats->mpdu_ok_byte_count;
3444 	else
3445 		len = ppdu_info->mpdu_len;
3446 
3447 	stats->rx_rate[bw_idx][gi_idx][nss_idx][mcs_idx] += len;
3448 }
3449 
3450 static void ath12k_dp_mon_rx_update_peer_su_stats(struct ath12k *ar,
3451 						  struct ath12k_link_sta *arsta,
3452 						  struct hal_rx_mon_ppdu_info *ppdu_info)
3453 {
3454 	struct ath12k_rx_peer_stats *rx_stats = arsta->rx_stats;
3455 	u32 num_msdu;
3456 
3457 	arsta->rssi_comb = ppdu_info->rssi_comb;
3458 	ewma_avg_rssi_add(&arsta->avg_rssi, ppdu_info->rssi_comb);
3459 	if (!rx_stats)
3460 		return;
3461 
3462 	num_msdu = ppdu_info->tcp_msdu_count + ppdu_info->tcp_ack_msdu_count +
3463 		   ppdu_info->udp_msdu_count + ppdu_info->other_msdu_count;
3464 
3465 	rx_stats->num_msdu += num_msdu;
3466 	rx_stats->tcp_msdu_count += ppdu_info->tcp_msdu_count +
3467 				    ppdu_info->tcp_ack_msdu_count;
3468 	rx_stats->udp_msdu_count += ppdu_info->udp_msdu_count;
3469 	rx_stats->other_msdu_count += ppdu_info->other_msdu_count;
3470 
3471 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11A ||
3472 	    ppdu_info->preamble_type == HAL_RX_PREAMBLE_11B) {
3473 		ppdu_info->nss = 1;
3474 		ppdu_info->mcs = HAL_RX_MAX_MCS;
3475 		ppdu_info->tid = IEEE80211_NUM_TIDS;
3476 	}
3477 
3478 	if (ppdu_info->ldpc < HAL_RX_SU_MU_CODING_MAX)
3479 		rx_stats->coding_count[ppdu_info->ldpc] += num_msdu;
3480 
3481 	if (ppdu_info->tid <= IEEE80211_NUM_TIDS)
3482 		rx_stats->tid_count[ppdu_info->tid] += num_msdu;
3483 
3484 	if (ppdu_info->preamble_type < HAL_RX_PREAMBLE_MAX)
3485 		rx_stats->pream_cnt[ppdu_info->preamble_type] += num_msdu;
3486 
3487 	if (ppdu_info->reception_type < HAL_RX_RECEPTION_TYPE_MAX)
3488 		rx_stats->reception_type[ppdu_info->reception_type] += num_msdu;
3489 
3490 	if (ppdu_info->is_stbc)
3491 		rx_stats->stbc_count += num_msdu;
3492 
3493 	if (ppdu_info->beamformed)
3494 		rx_stats->beamformed_count += num_msdu;
3495 
3496 	if (ppdu_info->num_mpdu_fcs_ok > 1)
3497 		rx_stats->ampdu_msdu_count += num_msdu;
3498 	else
3499 		rx_stats->non_ampdu_msdu_count += num_msdu;
3500 
3501 	rx_stats->num_mpdu_fcs_ok += ppdu_info->num_mpdu_fcs_ok;
3502 	rx_stats->num_mpdu_fcs_err += ppdu_info->num_mpdu_fcs_err;
3503 	rx_stats->dcm_count += ppdu_info->dcm;
3504 
3505 	rx_stats->rx_duration += ppdu_info->rx_duration;
3506 	arsta->rx_duration = rx_stats->rx_duration;
3507 
3508 	if (ppdu_info->nss > 0 && ppdu_info->nss <= HAL_RX_MAX_NSS) {
3509 		rx_stats->pkt_stats.nss_count[ppdu_info->nss - 1] += num_msdu;
3510 		rx_stats->byte_stats.nss_count[ppdu_info->nss - 1] += ppdu_info->mpdu_len;
3511 	}
3512 
3513 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11N &&
3514 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_HT) {
3515 		rx_stats->pkt_stats.ht_mcs_count[ppdu_info->mcs] += num_msdu;
3516 		rx_stats->byte_stats.ht_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3517 		/* To fit into rate table for HT packets */
3518 		ppdu_info->mcs = ppdu_info->mcs % 8;
3519 	}
3520 
3521 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AC &&
3522 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_VHT) {
3523 		rx_stats->pkt_stats.vht_mcs_count[ppdu_info->mcs] += num_msdu;
3524 		rx_stats->byte_stats.vht_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3525 	}
3526 
3527 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AX &&
3528 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_HE) {
3529 		rx_stats->pkt_stats.he_mcs_count[ppdu_info->mcs] += num_msdu;
3530 		rx_stats->byte_stats.he_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3531 	}
3532 
3533 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11BE &&
3534 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_BE) {
3535 		rx_stats->pkt_stats.be_mcs_count[ppdu_info->mcs] += num_msdu;
3536 		rx_stats->byte_stats.be_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3537 	}
3538 
3539 	if ((ppdu_info->preamble_type == HAL_RX_PREAMBLE_11A ||
3540 	     ppdu_info->preamble_type == HAL_RX_PREAMBLE_11B) &&
3541 	     ppdu_info->rate < HAL_RX_LEGACY_RATE_INVALID) {
3542 		rx_stats->pkt_stats.legacy_count[ppdu_info->rate] += num_msdu;
3543 		rx_stats->byte_stats.legacy_count[ppdu_info->rate] += ppdu_info->mpdu_len;
3544 	}
3545 
3546 	if (ppdu_info->gi < HAL_RX_GI_MAX) {
3547 		rx_stats->pkt_stats.gi_count[ppdu_info->gi] += num_msdu;
3548 		rx_stats->byte_stats.gi_count[ppdu_info->gi] += ppdu_info->mpdu_len;
3549 	}
3550 
3551 	if (ppdu_info->bw < HAL_RX_BW_MAX) {
3552 		rx_stats->pkt_stats.bw_count[ppdu_info->bw] += num_msdu;
3553 		rx_stats->byte_stats.bw_count[ppdu_info->bw] += ppdu_info->mpdu_len;
3554 	}
3555 
3556 	ath12k_dp_mon_rx_update_peer_rate_table_stats(rx_stats, ppdu_info,
3557 						      NULL, num_msdu);
3558 }
3559 
3560 void ath12k_dp_mon_rx_process_ulofdma(struct hal_rx_mon_ppdu_info *ppdu_info)
3561 {
3562 	struct hal_rx_user_status *rx_user_status;
3563 	u32 num_users, i, mu_ul_user_v0_word0, mu_ul_user_v0_word1, ru_size;
3564 
3565 	if (!(ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_MIMO ||
3566 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
3567 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO))
3568 		return;
3569 
3570 	num_users = ppdu_info->num_users;
3571 	if (num_users > HAL_MAX_UL_MU_USERS)
3572 		num_users = HAL_MAX_UL_MU_USERS;
3573 
3574 	for (i = 0; i < num_users; i++) {
3575 		rx_user_status = &ppdu_info->userstats[i];
3576 		mu_ul_user_v0_word0 =
3577 			rx_user_status->ul_ofdma_user_v0_word0;
3578 		mu_ul_user_v0_word1 =
3579 			rx_user_status->ul_ofdma_user_v0_word1;
3580 
3581 		if (u32_get_bits(mu_ul_user_v0_word0,
3582 				 HAL_RX_UL_OFDMA_USER_INFO_V0_W0_VALID) &&
3583 		    !u32_get_bits(mu_ul_user_v0_word0,
3584 				  HAL_RX_UL_OFDMA_USER_INFO_V0_W0_VER)) {
3585 			rx_user_status->mcs =
3586 				u32_get_bits(mu_ul_user_v0_word1,
3587 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_MCS);
3588 			rx_user_status->nss =
3589 				u32_get_bits(mu_ul_user_v0_word1,
3590 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_NSS) + 1;
3591 
3592 			rx_user_status->ofdma_info_valid = 1;
3593 			rx_user_status->ul_ofdma_ru_start_index =
3594 				u32_get_bits(mu_ul_user_v0_word1,
3595 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_RU_START);
3596 
3597 			ru_size = u32_get_bits(mu_ul_user_v0_word1,
3598 					       HAL_RX_UL_OFDMA_USER_INFO_V0_W1_RU_SIZE);
3599 			rx_user_status->ul_ofdma_ru_width = ru_size;
3600 			rx_user_status->ul_ofdma_ru_size = ru_size;
3601 		}
3602 		rx_user_status->ldpc = u32_get_bits(mu_ul_user_v0_word1,
3603 						    HAL_RX_UL_OFDMA_USER_INFO_V0_W1_LDPC);
3604 	}
3605 	ppdu_info->ldpc = 1;
3606 }
3607 
3608 static void
3609 ath12k_dp_mon_rx_update_user_stats(struct ath12k *ar,
3610 				   struct hal_rx_mon_ppdu_info *ppdu_info,
3611 				   u32 uid)
3612 {
3613 	struct ath12k_sta *ahsta;
3614 	struct ath12k_link_sta *arsta;
3615 	struct ath12k_rx_peer_stats *rx_stats = NULL;
3616 	struct hal_rx_user_status *user_stats = &ppdu_info->userstats[uid];
3617 	struct ath12k_peer *peer;
3618 	u32 num_msdu;
3619 
3620 	if (user_stats->ast_index == 0 || user_stats->ast_index == 0xFFFF)
3621 		return;
3622 
3623 	peer = ath12k_peer_find_by_ast(ar->ab, user_stats->ast_index);
3624 
3625 	if (!peer) {
3626 		ath12k_warn(ar->ab, "peer ast idx %d can't be found\n",
3627 			    user_stats->ast_index);
3628 		return;
3629 	}
3630 
3631 	ahsta = ath12k_sta_to_ahsta(peer->sta);
3632 	arsta = &ahsta->deflink;
3633 	arsta->rssi_comb = ppdu_info->rssi_comb;
3634 	ewma_avg_rssi_add(&arsta->avg_rssi, ppdu_info->rssi_comb);
3635 	rx_stats = arsta->rx_stats;
3636 	if (!rx_stats)
3637 		return;
3638 
3639 	num_msdu = user_stats->tcp_msdu_count + user_stats->tcp_ack_msdu_count +
3640 		   user_stats->udp_msdu_count + user_stats->other_msdu_count;
3641 
3642 	rx_stats->num_msdu += num_msdu;
3643 	rx_stats->tcp_msdu_count += user_stats->tcp_msdu_count +
3644 				    user_stats->tcp_ack_msdu_count;
3645 	rx_stats->udp_msdu_count += user_stats->udp_msdu_count;
3646 	rx_stats->other_msdu_count += user_stats->other_msdu_count;
3647 
3648 	if (ppdu_info->ldpc < HAL_RX_SU_MU_CODING_MAX)
3649 		rx_stats->coding_count[ppdu_info->ldpc] += num_msdu;
3650 
3651 	if (user_stats->tid <= IEEE80211_NUM_TIDS)
3652 		rx_stats->tid_count[user_stats->tid] += num_msdu;
3653 
3654 	if (user_stats->preamble_type < HAL_RX_PREAMBLE_MAX)
3655 		rx_stats->pream_cnt[user_stats->preamble_type] += num_msdu;
3656 
3657 	if (ppdu_info->reception_type < HAL_RX_RECEPTION_TYPE_MAX)
3658 		rx_stats->reception_type[ppdu_info->reception_type] += num_msdu;
3659 
3660 	if (ppdu_info->is_stbc)
3661 		rx_stats->stbc_count += num_msdu;
3662 
3663 	if (ppdu_info->beamformed)
3664 		rx_stats->beamformed_count += num_msdu;
3665 
3666 	if (user_stats->mpdu_cnt_fcs_ok > 1)
3667 		rx_stats->ampdu_msdu_count += num_msdu;
3668 	else
3669 		rx_stats->non_ampdu_msdu_count += num_msdu;
3670 
3671 	rx_stats->num_mpdu_fcs_ok += user_stats->mpdu_cnt_fcs_ok;
3672 	rx_stats->num_mpdu_fcs_err += user_stats->mpdu_cnt_fcs_err;
3673 	rx_stats->dcm_count += ppdu_info->dcm;
3674 	if (ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
3675 	    ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO)
3676 		rx_stats->ru_alloc_cnt[user_stats->ul_ofdma_ru_size] += num_msdu;
3677 
3678 	rx_stats->rx_duration += ppdu_info->rx_duration;
3679 	arsta->rx_duration = rx_stats->rx_duration;
3680 
3681 	if (user_stats->nss > 0 && user_stats->nss <= HAL_RX_MAX_NSS) {
3682 		rx_stats->pkt_stats.nss_count[user_stats->nss - 1] += num_msdu;
3683 		rx_stats->byte_stats.nss_count[user_stats->nss - 1] +=
3684 						user_stats->mpdu_ok_byte_count;
3685 	}
3686 
3687 	if (user_stats->preamble_type == HAL_RX_PREAMBLE_11AX &&
3688 	    user_stats->mcs <= HAL_RX_MAX_MCS_HE) {
3689 		rx_stats->pkt_stats.he_mcs_count[user_stats->mcs] += num_msdu;
3690 		rx_stats->byte_stats.he_mcs_count[user_stats->mcs] +=
3691 						user_stats->mpdu_ok_byte_count;
3692 	}
3693 
3694 	if (ppdu_info->gi < HAL_RX_GI_MAX) {
3695 		rx_stats->pkt_stats.gi_count[ppdu_info->gi] += num_msdu;
3696 		rx_stats->byte_stats.gi_count[ppdu_info->gi] +=
3697 						user_stats->mpdu_ok_byte_count;
3698 	}
3699 
3700 	if (ppdu_info->bw < HAL_RX_BW_MAX) {
3701 		rx_stats->pkt_stats.bw_count[ppdu_info->bw] += num_msdu;
3702 		rx_stats->byte_stats.bw_count[ppdu_info->bw] +=
3703 						user_stats->mpdu_ok_byte_count;
3704 	}
3705 
3706 	ath12k_dp_mon_rx_update_peer_rate_table_stats(rx_stats, ppdu_info,
3707 						      user_stats, num_msdu);
3708 }
3709 
3710 static void
3711 ath12k_dp_mon_rx_update_peer_mu_stats(struct ath12k *ar,
3712 				      struct hal_rx_mon_ppdu_info *ppdu_info)
3713 {
3714 	u32 num_users, i;
3715 
3716 	num_users = ppdu_info->num_users;
3717 	if (num_users > HAL_MAX_UL_MU_USERS)
3718 		num_users = HAL_MAX_UL_MU_USERS;
3719 
3720 	for (i = 0; i < num_users; i++)
3721 		ath12k_dp_mon_rx_update_user_stats(ar, ppdu_info, i);
3722 }
3723 
3724 static void
3725 ath12k_dp_mon_rx_memset_ppdu_info(struct hal_rx_mon_ppdu_info *ppdu_info)
3726 {
3727 	memset(ppdu_info, 0, sizeof(*ppdu_info));
3728 	ppdu_info->peer_id = HAL_INVALID_PEERID;
3729 }
3730 
3731 int ath12k_dp_mon_srng_process(struct ath12k *ar, int *budget,
3732 			       struct napi_struct *napi)
3733 {
3734 	struct ath12k_base *ab = ar->ab;
3735 	struct ath12k_pdev_dp *pdev_dp = &ar->dp;
3736 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&pdev_dp->mon_data;
3737 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
3738 	struct ath12k_dp *dp = &ab->dp;
3739 	struct hal_mon_dest_desc *mon_dst_desc;
3740 	struct sk_buff *skb;
3741 	struct ath12k_skb_rxcb *rxcb;
3742 	struct dp_srng *mon_dst_ring;
3743 	struct hal_srng *srng;
3744 	struct dp_rxdma_mon_ring *buf_ring;
3745 	struct ath12k_sta *ahsta = NULL;
3746 	struct ath12k_link_sta *arsta;
3747 	struct ath12k_peer *peer;
3748 	struct sk_buff_head skb_list;
3749 	u64 cookie;
3750 	int num_buffs_reaped = 0, srng_id, buf_id;
3751 	u32 hal_status, end_offset, info0, end_reason;
3752 	u8 pdev_idx = ath12k_hw_mac_id_to_pdev_id(ab->hw_params, ar->pdev_idx);
3753 
3754 	__skb_queue_head_init(&skb_list);
3755 	srng_id = ath12k_hw_mac_id_to_srng_id(ab->hw_params, pdev_idx);
3756 	mon_dst_ring = &pdev_dp->rxdma_mon_dst_ring[srng_id];
3757 	buf_ring = &dp->rxdma_mon_buf_ring;
3758 
3759 	srng = &ab->hal.srng_list[mon_dst_ring->ring_id];
3760 	spin_lock_bh(&srng->lock);
3761 	ath12k_hal_srng_access_begin(ab, srng);
3762 
3763 	while (likely(*budget)) {
3764 		*budget -= 1;
3765 		mon_dst_desc = ath12k_hal_srng_dst_peek(ab, srng);
3766 		if (unlikely(!mon_dst_desc))
3767 			break;
3768 
3769 		/* In case of empty descriptor, the cookie in the ring descriptor
3770 		 * is invalid. Therefore, this entry is skipped, and ring processing
3771 		 * continues.
3772 		 */
3773 		info0 = le32_to_cpu(mon_dst_desc->info0);
3774 		if (u32_get_bits(info0, HAL_MON_DEST_INFO0_EMPTY_DESC))
3775 			goto move_next;
3776 
3777 		cookie = le32_to_cpu(mon_dst_desc->cookie);
3778 		buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
3779 
3780 		spin_lock_bh(&buf_ring->idr_lock);
3781 		skb = idr_remove(&buf_ring->bufs_idr, buf_id);
3782 		spin_unlock_bh(&buf_ring->idr_lock);
3783 
3784 		if (unlikely(!skb)) {
3785 			ath12k_warn(ab, "monitor destination with invalid buf_id %d\n",
3786 				    buf_id);
3787 			goto move_next;
3788 		}
3789 
3790 		rxcb = ATH12K_SKB_RXCB(skb);
3791 		dma_unmap_single(ab->dev, rxcb->paddr,
3792 				 skb->len + skb_tailroom(skb),
3793 				 DMA_FROM_DEVICE);
3794 
3795 		end_reason = u32_get_bits(info0, HAL_MON_DEST_INFO0_END_REASON);
3796 
3797 		/* HAL_MON_FLUSH_DETECTED implies that an rx flush received at the end of
3798 		 * rx PPDU and HAL_MON_PPDU_TRUNCATED implies that the PPDU got
3799 		 * truncated due to a system level error. In both the cases, buffer data
3800 		 * can be discarded
3801 		 */
3802 		if ((end_reason == HAL_MON_FLUSH_DETECTED) ||
3803 		    (end_reason == HAL_MON_PPDU_TRUNCATED)) {
3804 			ath12k_dbg(ab, ATH12K_DBG_DATA,
3805 				   "Monitor dest descriptor end reason %d", end_reason);
3806 			dev_kfree_skb_any(skb);
3807 			goto move_next;
3808 		}
3809 
3810 		/* Calculate the budget when the ring descriptor with the
3811 		 * HAL_MON_END_OF_PPDU to ensure that one PPDU worth of data is always
3812 		 * reaped. This helps to efficiently utilize the NAPI budget.
3813 		 */
3814 		if (end_reason == HAL_MON_END_OF_PPDU) {
3815 			*budget -= 1;
3816 			rxcb->is_end_of_ppdu = true;
3817 		}
3818 
3819 		end_offset = u32_get_bits(info0, HAL_MON_DEST_INFO0_END_OFFSET);
3820 		if (likely(end_offset <= DP_RX_BUFFER_SIZE)) {
3821 			skb_put(skb, end_offset);
3822 		} else {
3823 			ath12k_warn(ab,
3824 				    "invalid offset on mon stats destination %u\n",
3825 				    end_offset);
3826 			skb_put(skb, DP_RX_BUFFER_SIZE);
3827 		}
3828 
3829 		__skb_queue_tail(&skb_list, skb);
3830 
3831 move_next:
3832 		ath12k_dp_mon_buf_replenish(ab, buf_ring, 1);
3833 		ath12k_hal_srng_dst_get_next_entry(ab, srng);
3834 		num_buffs_reaped++;
3835 	}
3836 
3837 	ath12k_hal_srng_access_end(ab, srng);
3838 	spin_unlock_bh(&srng->lock);
3839 
3840 	if (!num_buffs_reaped)
3841 		return 0;
3842 
3843 	/* In some cases, one PPDU worth of data can be spread across multiple NAPI
3844 	 * schedules, To avoid losing existing parsed ppdu_info information, skip
3845 	 * the memset of the ppdu_info structure and continue processing it.
3846 	 */
3847 	if (!ppdu_info->ppdu_continuation)
3848 		ath12k_dp_mon_rx_memset_ppdu_info(ppdu_info);
3849 
3850 	while ((skb = __skb_dequeue(&skb_list))) {
3851 		hal_status = ath12k_dp_mon_rx_parse_mon_status(ar, pmon, skb, napi);
3852 		if (hal_status != HAL_RX_MON_STATUS_PPDU_DONE) {
3853 			ppdu_info->ppdu_continuation = true;
3854 			dev_kfree_skb_any(skb);
3855 			continue;
3856 		}
3857 
3858 		if (ppdu_info->peer_id == HAL_INVALID_PEERID)
3859 			goto free_skb;
3860 
3861 		rcu_read_lock();
3862 		spin_lock_bh(&ab->base_lock);
3863 		peer = ath12k_peer_find_by_id(ab, ppdu_info->peer_id);
3864 		if (!peer || !peer->sta) {
3865 			ath12k_dbg(ab, ATH12K_DBG_DATA,
3866 				   "failed to find the peer with monitor peer_id %d\n",
3867 				   ppdu_info->peer_id);
3868 			goto next_skb;
3869 		}
3870 
3871 		if (ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_SU) {
3872 			ahsta = ath12k_sta_to_ahsta(peer->sta);
3873 			arsta = &ahsta->deflink;
3874 			ath12k_dp_mon_rx_update_peer_su_stats(ar, arsta,
3875 							      ppdu_info);
3876 		} else if ((ppdu_info->fc_valid) &&
3877 			   (ppdu_info->ast_index != HAL_AST_IDX_INVALID)) {
3878 			ath12k_dp_mon_rx_process_ulofdma(ppdu_info);
3879 			ath12k_dp_mon_rx_update_peer_mu_stats(ar, ppdu_info);
3880 		}
3881 
3882 next_skb:
3883 		spin_unlock_bh(&ab->base_lock);
3884 		rcu_read_unlock();
3885 free_skb:
3886 		dev_kfree_skb_any(skb);
3887 		ath12k_dp_mon_rx_memset_ppdu_info(ppdu_info);
3888 	}
3889 
3890 	return num_buffs_reaped;
3891 }
3892 
3893 static int ath12k_dp_rx_reap_mon_status_ring(struct ath12k_base *ab, int mac_id,
3894 					     int *budget, struct sk_buff_head *skb_list)
3895 {
3896 	const struct ath12k_hw_hal_params *hal_params;
3897 	int buf_id, srng_id, num_buffs_reaped = 0;
3898 	enum dp_mon_status_buf_state reap_status;
3899 	struct dp_rxdma_mon_ring *rx_ring;
3900 	struct ath12k_mon_data *pmon;
3901 	struct ath12k_skb_rxcb *rxcb;
3902 	struct hal_tlv_64_hdr *tlv;
3903 	void *rx_mon_status_desc;
3904 	struct hal_srng *srng;
3905 	struct ath12k_dp *dp;
3906 	struct sk_buff *skb;
3907 	struct ath12k *ar;
3908 	dma_addr_t paddr;
3909 	u32 cookie;
3910 	u8 rbm;
3911 
3912 	ar = ab->pdevs[ath12k_hw_mac_id_to_pdev_id(ab->hw_params, mac_id)].ar;
3913 	dp = &ab->dp;
3914 	pmon = &ar->dp.mon_data;
3915 	srng_id = ath12k_hw_mac_id_to_srng_id(ab->hw_params, mac_id);
3916 	rx_ring = &dp->rx_mon_status_refill_ring[srng_id];
3917 
3918 	srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id];
3919 
3920 	spin_lock_bh(&srng->lock);
3921 
3922 	ath12k_hal_srng_access_begin(ab, srng);
3923 
3924 	while (*budget) {
3925 		*budget -= 1;
3926 		rx_mon_status_desc = ath12k_hal_srng_src_peek(ab, srng);
3927 		if (!rx_mon_status_desc) {
3928 			pmon->buf_state = DP_MON_STATUS_REPLINISH;
3929 			break;
3930 		}
3931 		ath12k_hal_rx_buf_addr_info_get(rx_mon_status_desc, &paddr,
3932 						&cookie, &rbm);
3933 		if (paddr) {
3934 			buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
3935 
3936 			spin_lock_bh(&rx_ring->idr_lock);
3937 			skb = idr_find(&rx_ring->bufs_idr, buf_id);
3938 			spin_unlock_bh(&rx_ring->idr_lock);
3939 
3940 			if (!skb) {
3941 				ath12k_warn(ab, "rx monitor status with invalid buf_id %d\n",
3942 					    buf_id);
3943 				pmon->buf_state = DP_MON_STATUS_REPLINISH;
3944 				goto move_next;
3945 			}
3946 
3947 			rxcb = ATH12K_SKB_RXCB(skb);
3948 
3949 			dma_sync_single_for_cpu(ab->dev, rxcb->paddr,
3950 						skb->len + skb_tailroom(skb),
3951 						DMA_FROM_DEVICE);
3952 
3953 			tlv = (struct hal_tlv_64_hdr *)skb->data;
3954 			if (le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG) !=
3955 					HAL_RX_STATUS_BUFFER_DONE) {
3956 				pmon->buf_state = DP_MON_STATUS_NO_DMA;
3957 				ath12k_warn(ab,
3958 					    "mon status DONE not set %llx, buf_id %d\n",
3959 					    le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG),
3960 					    buf_id);
3961 				/* RxDMA status done bit might not be set even
3962 				 * though tp is moved by HW.
3963 				 */
3964 
3965 				/* If done status is missing:
3966 				 * 1. As per MAC team's suggestion,
3967 				 *    when HP + 1 entry is peeked and if DMA
3968 				 *    is not done and if HP + 2 entry's DMA done
3969 				 *    is set. skip HP + 1 entry and
3970 				 *    start processing in next interrupt.
3971 				 * 2. If HP + 2 entry's DMA done is not set,
3972 				 *    poll onto HP + 1 entry DMA done to be set.
3973 				 *    Check status for same buffer for next time
3974 				 *    dp_rx_mon_status_srng_process
3975 				 */
3976 				reap_status = ath12k_dp_rx_mon_buf_done(ab, srng,
3977 									rx_ring);
3978 				if (reap_status == DP_MON_STATUS_NO_DMA)
3979 					continue;
3980 
3981 				spin_lock_bh(&rx_ring->idr_lock);
3982 				idr_remove(&rx_ring->bufs_idr, buf_id);
3983 				spin_unlock_bh(&rx_ring->idr_lock);
3984 
3985 				dma_unmap_single(ab->dev, rxcb->paddr,
3986 						 skb->len + skb_tailroom(skb),
3987 						 DMA_FROM_DEVICE);
3988 
3989 				dev_kfree_skb_any(skb);
3990 				pmon->buf_state = DP_MON_STATUS_REPLINISH;
3991 				goto move_next;
3992 			}
3993 
3994 			spin_lock_bh(&rx_ring->idr_lock);
3995 			idr_remove(&rx_ring->bufs_idr, buf_id);
3996 			spin_unlock_bh(&rx_ring->idr_lock);
3997 
3998 			dma_unmap_single(ab->dev, rxcb->paddr,
3999 					 skb->len + skb_tailroom(skb),
4000 					 DMA_FROM_DEVICE);
4001 
4002 			if (ath12k_dp_pkt_set_pktlen(skb, RX_MON_STATUS_BUF_SIZE)) {
4003 				dev_kfree_skb_any(skb);
4004 				goto move_next;
4005 			}
4006 			__skb_queue_tail(skb_list, skb);
4007 		} else {
4008 			pmon->buf_state = DP_MON_STATUS_REPLINISH;
4009 		}
4010 move_next:
4011 		skb = ath12k_dp_rx_alloc_mon_status_buf(ab, rx_ring,
4012 							&buf_id);
4013 
4014 		if (!skb) {
4015 			ath12k_warn(ab, "failed to alloc buffer for status ring\n");
4016 			hal_params = ab->hw_params->hal_params;
4017 			ath12k_hal_rx_buf_addr_info_set(rx_mon_status_desc, 0, 0,
4018 							hal_params->rx_buf_rbm);
4019 			num_buffs_reaped++;
4020 			break;
4021 		}
4022 		rxcb = ATH12K_SKB_RXCB(skb);
4023 
4024 		cookie = u32_encode_bits(mac_id, DP_RXDMA_BUF_COOKIE_PDEV_ID) |
4025 			 u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
4026 
4027 		ath12k_hal_rx_buf_addr_info_set(rx_mon_status_desc, rxcb->paddr,
4028 						cookie,
4029 						ab->hw_params->hal_params->rx_buf_rbm);
4030 		ath12k_hal_srng_src_get_next_entry(ab, srng);
4031 		num_buffs_reaped++;
4032 	}
4033 	ath12k_hal_srng_access_end(ab, srng);
4034 	spin_unlock_bh(&srng->lock);
4035 
4036 	return num_buffs_reaped;
4037 }
4038 
4039 static u32
4040 ath12k_dp_rx_mon_mpdu_pop(struct ath12k *ar, int mac_id,
4041 			  void *ring_entry, struct sk_buff **head_msdu,
4042 			  struct sk_buff **tail_msdu,
4043 			  struct list_head *used_list,
4044 			  u32 *npackets, u32 *ppdu_id)
4045 {
4046 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4047 	struct ath12k_buffer_addr *p_buf_addr_info, *p_last_buf_addr_info;
4048 	u32 msdu_ppdu_id = 0, msdu_cnt = 0, total_len = 0, frag_len = 0;
4049 	u32 rx_buf_size, rx_pkt_offset, sw_cookie;
4050 	bool is_frag, is_first_msdu, drop_mpdu = false;
4051 	struct hal_reo_entrance_ring *ent_desc =
4052 		(struct hal_reo_entrance_ring *)ring_entry;
4053 	u32 rx_bufs_used = 0, i = 0, desc_bank = 0;
4054 	struct hal_rx_desc *rx_desc, *tail_rx_desc;
4055 	struct hal_rx_msdu_link *msdu_link_desc;
4056 	struct sk_buff *msdu = NULL, *last = NULL;
4057 	struct ath12k_rx_desc_info *desc_info;
4058 	struct ath12k_buffer_addr buf_info;
4059 	struct hal_rx_msdu_list msdu_list;
4060 	struct ath12k_skb_rxcb *rxcb;
4061 	u16 num_msdus = 0;
4062 	dma_addr_t paddr;
4063 	u8 rbm;
4064 
4065 	ath12k_hal_rx_reo_ent_buf_paddr_get(ring_entry, &paddr,
4066 					    &sw_cookie,
4067 					    &p_last_buf_addr_info, &rbm,
4068 					    &msdu_cnt);
4069 
4070 	spin_lock_bh(&pmon->mon_lock);
4071 
4072 	if (le32_get_bits(ent_desc->info1,
4073 			  HAL_REO_ENTR_RING_INFO1_RXDMA_PUSH_REASON) ==
4074 			  HAL_REO_DEST_RING_PUSH_REASON_ERR_DETECTED) {
4075 		u8 rxdma_err = le32_get_bits(ent_desc->info1,
4076 					     HAL_REO_ENTR_RING_INFO1_RXDMA_ERROR_CODE);
4077 		if (rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_FLUSH_REQUEST_ERR ||
4078 		    rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_MPDU_LEN_ERR ||
4079 		    rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_OVERFLOW_ERR) {
4080 			drop_mpdu = true;
4081 			pmon->rx_mon_stats.dest_mpdu_drop++;
4082 		}
4083 	}
4084 
4085 	is_frag = false;
4086 	is_first_msdu = true;
4087 	rx_pkt_offset = sizeof(struct hal_rx_desc);
4088 
4089 	do {
4090 		if (pmon->mon_last_linkdesc_paddr == paddr) {
4091 			pmon->rx_mon_stats.dup_mon_linkdesc_cnt++;
4092 			spin_unlock_bh(&pmon->mon_lock);
4093 			return rx_bufs_used;
4094 		}
4095 
4096 		desc_bank = u32_get_bits(sw_cookie, DP_LINK_DESC_BANK_MASK);
4097 		msdu_link_desc =
4098 			ar->ab->dp.link_desc_banks[desc_bank].vaddr +
4099 			(paddr - ar->ab->dp.link_desc_banks[desc_bank].paddr);
4100 
4101 		ath12k_hal_rx_msdu_list_get(ar, msdu_link_desc, &msdu_list,
4102 					    &num_msdus);
4103 		desc_info = ath12k_dp_get_rx_desc(ar->ab,
4104 						  msdu_list.sw_cookie[num_msdus - 1]);
4105 		tail_rx_desc = (struct hal_rx_desc *)(desc_info->skb)->data;
4106 
4107 		for (i = 0; i < num_msdus; i++) {
4108 			u32 l2_hdr_offset;
4109 
4110 			if (pmon->mon_last_buf_cookie == msdu_list.sw_cookie[i]) {
4111 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4112 					   "i %d last_cookie %d is same\n",
4113 					   i, pmon->mon_last_buf_cookie);
4114 				drop_mpdu = true;
4115 				pmon->rx_mon_stats.dup_mon_buf_cnt++;
4116 				continue;
4117 			}
4118 
4119 			desc_info =
4120 				ath12k_dp_get_rx_desc(ar->ab, msdu_list.sw_cookie[i]);
4121 			msdu = desc_info->skb;
4122 
4123 			if (!msdu) {
4124 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4125 					   "msdu_pop: invalid msdu (%d/%d)\n",
4126 					   i + 1, num_msdus);
4127 				goto next_msdu;
4128 			}
4129 			rxcb = ATH12K_SKB_RXCB(msdu);
4130 			if (rxcb->paddr != msdu_list.paddr[i]) {
4131 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4132 					   "i %d paddr %lx != %lx\n",
4133 					   i, (unsigned long)rxcb->paddr,
4134 					   (unsigned long)msdu_list.paddr[i]);
4135 				drop_mpdu = true;
4136 				continue;
4137 			}
4138 			if (!rxcb->unmapped) {
4139 				dma_unmap_single(ar->ab->dev, rxcb->paddr,
4140 						 msdu->len +
4141 						 skb_tailroom(msdu),
4142 						 DMA_FROM_DEVICE);
4143 				rxcb->unmapped = 1;
4144 			}
4145 			if (drop_mpdu) {
4146 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4147 					   "i %d drop msdu %p *ppdu_id %x\n",
4148 					   i, msdu, *ppdu_id);
4149 				dev_kfree_skb_any(msdu);
4150 				msdu = NULL;
4151 				goto next_msdu;
4152 			}
4153 
4154 			rx_desc = (struct hal_rx_desc *)msdu->data;
4155 			l2_hdr_offset = ath12k_dp_rx_h_l3pad(ar->ab, tail_rx_desc);
4156 			if (is_first_msdu) {
4157 				if (!ath12k_dp_rxdesc_mpdu_valid(ar->ab, rx_desc)) {
4158 					drop_mpdu = true;
4159 					dev_kfree_skb_any(msdu);
4160 					msdu = NULL;
4161 					pmon->mon_last_linkdesc_paddr = paddr;
4162 					goto next_msdu;
4163 				}
4164 				msdu_ppdu_id =
4165 					ath12k_dp_rxdesc_get_ppduid(ar->ab, rx_desc);
4166 
4167 				if (ath12k_dp_mon_comp_ppduid(msdu_ppdu_id,
4168 							      ppdu_id)) {
4169 					spin_unlock_bh(&pmon->mon_lock);
4170 					return rx_bufs_used;
4171 				}
4172 				pmon->mon_last_linkdesc_paddr = paddr;
4173 				is_first_msdu = false;
4174 			}
4175 			ath12k_dp_mon_get_buf_len(&msdu_list.msdu_info[i],
4176 						  &is_frag, &total_len,
4177 						  &frag_len, &msdu_cnt);
4178 			rx_buf_size = rx_pkt_offset + l2_hdr_offset + frag_len;
4179 
4180 			if (ath12k_dp_pkt_set_pktlen(msdu, rx_buf_size)) {
4181 				dev_kfree_skb_any(msdu);
4182 				goto next_msdu;
4183 			}
4184 
4185 			if (!(*head_msdu))
4186 				*head_msdu = msdu;
4187 			else if (last)
4188 				last->next = msdu;
4189 
4190 			last = msdu;
4191 next_msdu:
4192 			pmon->mon_last_buf_cookie = msdu_list.sw_cookie[i];
4193 			rx_bufs_used++;
4194 			desc_info->skb = NULL;
4195 			list_add_tail(&desc_info->list, used_list);
4196 		}
4197 
4198 		ath12k_hal_rx_buf_addr_info_set(&buf_info, paddr, sw_cookie, rbm);
4199 
4200 		ath12k_dp_mon_next_link_desc_get(msdu_link_desc, &paddr,
4201 						 &sw_cookie, &rbm,
4202 						 &p_buf_addr_info);
4203 
4204 		ath12k_dp_rx_link_desc_return(ar->ab, &buf_info,
4205 					      HAL_WBM_REL_BM_ACT_PUT_IN_IDLE);
4206 
4207 		p_last_buf_addr_info = p_buf_addr_info;
4208 
4209 	} while (paddr && msdu_cnt);
4210 
4211 	spin_unlock_bh(&pmon->mon_lock);
4212 
4213 	if (last)
4214 		last->next = NULL;
4215 
4216 	*tail_msdu = msdu;
4217 
4218 	if (msdu_cnt == 0)
4219 		*npackets = 1;
4220 
4221 	return rx_bufs_used;
4222 }
4223 
4224 /* The destination ring processing is stuck if the destination is not
4225  * moving while status ring moves 16 PPDU. The destination ring processing
4226  * skips this destination ring PPDU as a workaround.
4227  */
4228 #define MON_DEST_RING_STUCK_MAX_CNT 16
4229 
4230 static void ath12k_dp_rx_mon_dest_process(struct ath12k *ar, int mac_id,
4231 					  u32 quota, struct napi_struct *napi)
4232 {
4233 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4234 	struct ath12k_pdev_mon_stats *rx_mon_stats;
4235 	u32 ppdu_id, rx_bufs_used = 0, ring_id;
4236 	u32 mpdu_rx_bufs_used, npackets = 0;
4237 	struct ath12k_dp *dp = &ar->ab->dp;
4238 	struct ath12k_base *ab = ar->ab;
4239 	void *ring_entry, *mon_dst_srng;
4240 	struct dp_mon_mpdu *tmp_mpdu;
4241 	LIST_HEAD(rx_desc_used_list);
4242 	struct hal_srng *srng;
4243 
4244 	ring_id = dp->rxdma_err_dst_ring[mac_id].ring_id;
4245 	srng = &ab->hal.srng_list[ring_id];
4246 
4247 	mon_dst_srng = &ab->hal.srng_list[ring_id];
4248 
4249 	spin_lock_bh(&srng->lock);
4250 
4251 	ath12k_hal_srng_access_begin(ab, mon_dst_srng);
4252 
4253 	ppdu_id = pmon->mon_ppdu_info.ppdu_id;
4254 	rx_mon_stats = &pmon->rx_mon_stats;
4255 
4256 	while ((ring_entry = ath12k_hal_srng_dst_peek(ar->ab, mon_dst_srng))) {
4257 		struct sk_buff *head_msdu, *tail_msdu;
4258 
4259 		head_msdu = NULL;
4260 		tail_msdu = NULL;
4261 
4262 		mpdu_rx_bufs_used = ath12k_dp_rx_mon_mpdu_pop(ar, mac_id, ring_entry,
4263 							      &head_msdu, &tail_msdu,
4264 							      &rx_desc_used_list,
4265 							      &npackets, &ppdu_id);
4266 
4267 		rx_bufs_used += mpdu_rx_bufs_used;
4268 
4269 		if (mpdu_rx_bufs_used) {
4270 			dp->mon_dest_ring_stuck_cnt = 0;
4271 		} else {
4272 			dp->mon_dest_ring_stuck_cnt++;
4273 			rx_mon_stats->dest_mon_not_reaped++;
4274 		}
4275 
4276 		if (dp->mon_dest_ring_stuck_cnt > MON_DEST_RING_STUCK_MAX_CNT) {
4277 			rx_mon_stats->dest_mon_stuck++;
4278 			ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4279 				   "status ring ppdu_id=%d dest ring ppdu_id=%d mon_dest_ring_stuck_cnt=%d dest_mon_not_reaped=%u dest_mon_stuck=%u\n",
4280 				   pmon->mon_ppdu_info.ppdu_id, ppdu_id,
4281 				   dp->mon_dest_ring_stuck_cnt,
4282 				   rx_mon_stats->dest_mon_not_reaped,
4283 				   rx_mon_stats->dest_mon_stuck);
4284 			spin_lock_bh(&pmon->mon_lock);
4285 			pmon->mon_ppdu_info.ppdu_id = ppdu_id;
4286 			spin_unlock_bh(&pmon->mon_lock);
4287 			continue;
4288 		}
4289 
4290 		if (ppdu_id != pmon->mon_ppdu_info.ppdu_id) {
4291 			spin_lock_bh(&pmon->mon_lock);
4292 			pmon->mon_ppdu_status = DP_PPDU_STATUS_START;
4293 			spin_unlock_bh(&pmon->mon_lock);
4294 			ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4295 				   "dest_rx: new ppdu_id %x != status ppdu_id %x dest_mon_not_reaped = %u dest_mon_stuck = %u\n",
4296 				   ppdu_id, pmon->mon_ppdu_info.ppdu_id,
4297 				   rx_mon_stats->dest_mon_not_reaped,
4298 				   rx_mon_stats->dest_mon_stuck);
4299 			break;
4300 		}
4301 
4302 		if (head_msdu && tail_msdu) {
4303 			tmp_mpdu = kzalloc(sizeof(*tmp_mpdu), GFP_ATOMIC);
4304 			if (!tmp_mpdu)
4305 				break;
4306 
4307 			tmp_mpdu->head = head_msdu;
4308 			tmp_mpdu->tail = tail_msdu;
4309 			tmp_mpdu->err_bitmap = pmon->err_bitmap;
4310 			tmp_mpdu->decap_format = pmon->decap_format;
4311 			ath12k_dp_mon_rx_deliver(ar, tmp_mpdu,
4312 						 &pmon->mon_ppdu_info, napi);
4313 			rx_mon_stats->dest_mpdu_done++;
4314 			kfree(tmp_mpdu);
4315 		}
4316 
4317 		ring_entry = ath12k_hal_srng_dst_get_next_entry(ar->ab,
4318 								mon_dst_srng);
4319 	}
4320 	ath12k_hal_srng_access_end(ar->ab, mon_dst_srng);
4321 
4322 	spin_unlock_bh(&srng->lock);
4323 
4324 	if (rx_bufs_used) {
4325 		rx_mon_stats->dest_ppdu_done++;
4326 		ath12k_dp_rx_bufs_replenish(ar->ab,
4327 					    &dp->rx_refill_buf_ring,
4328 					    &rx_desc_used_list,
4329 					    rx_bufs_used);
4330 	}
4331 }
4332 
4333 static int
4334 __ath12k_dp_mon_process_ring(struct ath12k *ar, int mac_id,
4335 			     struct napi_struct *napi, int *budget)
4336 {
4337 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4338 	struct ath12k_pdev_mon_stats *rx_mon_stats = &pmon->rx_mon_stats;
4339 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
4340 	enum hal_rx_mon_status hal_status;
4341 	struct sk_buff_head skb_list;
4342 	int num_buffs_reaped;
4343 	struct sk_buff *skb;
4344 
4345 	__skb_queue_head_init(&skb_list);
4346 
4347 	num_buffs_reaped = ath12k_dp_rx_reap_mon_status_ring(ar->ab, mac_id,
4348 							     budget, &skb_list);
4349 	if (!num_buffs_reaped)
4350 		goto exit;
4351 
4352 	while ((skb = __skb_dequeue(&skb_list))) {
4353 		memset(ppdu_info, 0, sizeof(*ppdu_info));
4354 		ppdu_info->peer_id = HAL_INVALID_PEERID;
4355 
4356 		hal_status = ath12k_dp_mon_parse_rx_dest(ar, pmon, skb);
4357 
4358 		if (ar->monitor_started &&
4359 		    pmon->mon_ppdu_status == DP_PPDU_STATUS_START &&
4360 		    hal_status == HAL_TLV_STATUS_PPDU_DONE) {
4361 			rx_mon_stats->status_ppdu_done++;
4362 			pmon->mon_ppdu_status = DP_PPDU_STATUS_DONE;
4363 			ath12k_dp_rx_mon_dest_process(ar, mac_id, *budget, napi);
4364 			pmon->mon_ppdu_status = DP_PPDU_STATUS_START;
4365 		}
4366 
4367 		dev_kfree_skb_any(skb);
4368 	}
4369 
4370 exit:
4371 	return num_buffs_reaped;
4372 }
4373 
4374 int ath12k_dp_mon_process_ring(struct ath12k_base *ab, int mac_id,
4375 			       struct napi_struct *napi, int budget,
4376 			       enum dp_monitor_mode monitor_mode)
4377 {
4378 	struct ath12k *ar = ath12k_ab_to_ar(ab, mac_id);
4379 	int num_buffs_reaped = 0;
4380 
4381 	if (ab->hw_params->rxdma1_enable) {
4382 		if (monitor_mode == ATH12K_DP_RX_MONITOR_MODE)
4383 			num_buffs_reaped = ath12k_dp_mon_srng_process(ar, &budget, napi);
4384 	} else {
4385 		if (ar->monitor_started)
4386 			num_buffs_reaped =
4387 				__ath12k_dp_mon_process_ring(ar, mac_id, napi, &budget);
4388 	}
4389 
4390 	return num_buffs_reaped;
4391 }
4392