xref: /linux/drivers/net/wireless/ath/ath12k/dp_mon.c (revision d30c1683aaecb93d2ab95685dc4300a33d3cea7a)
1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3  * Copyright (c) 2019-2021 The Linux Foundation. All rights reserved.
4  * Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries.
5  */
6 
7 #include "dp_mon.h"
8 #include "debug.h"
9 #include "dp_rx.h"
10 #include "dp_tx.h"
11 #include "peer.h"
12 
13 #define ATH12K_LE32_DEC_ENC(value, dec_bits, enc_bits)	\
14 		u32_encode_bits(le32_get_bits(value, dec_bits), enc_bits)
15 
16 #define ATH12K_LE64_DEC_ENC(value, dec_bits, enc_bits) \
17 		u32_encode_bits(le64_get_bits(value, dec_bits), enc_bits)
18 
19 static void
20 ath12k_dp_mon_rx_handle_ofdma_info(const struct hal_rx_ppdu_end_user_stats *ppdu_end_user,
21 				   struct hal_rx_user_status *rx_user_status)
22 {
23 	rx_user_status->ul_ofdma_user_v0_word0 =
24 		__le32_to_cpu(ppdu_end_user->usr_resp_ref);
25 	rx_user_status->ul_ofdma_user_v0_word1 =
26 		__le32_to_cpu(ppdu_end_user->usr_resp_ref_ext);
27 }
28 
29 static void
30 ath12k_dp_mon_rx_populate_byte_count(const struct hal_rx_ppdu_end_user_stats *stats,
31 				     void *ppduinfo,
32 				     struct hal_rx_user_status *rx_user_status)
33 {
34 	rx_user_status->mpdu_ok_byte_count =
35 		le32_get_bits(stats->info7,
36 			      HAL_RX_PPDU_END_USER_STATS_INFO7_MPDU_OK_BYTE_COUNT);
37 	rx_user_status->mpdu_err_byte_count =
38 		le32_get_bits(stats->info8,
39 			      HAL_RX_PPDU_END_USER_STATS_INFO8_MPDU_ERR_BYTE_COUNT);
40 }
41 
42 static void
43 ath12k_dp_mon_rx_populate_mu_user_info(const struct hal_rx_ppdu_end_user_stats *rx_tlv,
44 				       struct hal_rx_mon_ppdu_info *ppdu_info,
45 				       struct hal_rx_user_status *rx_user_status)
46 {
47 	rx_user_status->ast_index = ppdu_info->ast_index;
48 	rx_user_status->tid = ppdu_info->tid;
49 	rx_user_status->tcp_ack_msdu_count =
50 		ppdu_info->tcp_ack_msdu_count;
51 	rx_user_status->tcp_msdu_count =
52 		ppdu_info->tcp_msdu_count;
53 	rx_user_status->udp_msdu_count =
54 		ppdu_info->udp_msdu_count;
55 	rx_user_status->other_msdu_count =
56 		ppdu_info->other_msdu_count;
57 	rx_user_status->frame_control = ppdu_info->frame_control;
58 	rx_user_status->frame_control_info_valid =
59 		ppdu_info->frame_control_info_valid;
60 	rx_user_status->data_sequence_control_info_valid =
61 		ppdu_info->data_sequence_control_info_valid;
62 	rx_user_status->first_data_seq_ctrl =
63 		ppdu_info->first_data_seq_ctrl;
64 	rx_user_status->preamble_type = ppdu_info->preamble_type;
65 	rx_user_status->ht_flags = ppdu_info->ht_flags;
66 	rx_user_status->vht_flags = ppdu_info->vht_flags;
67 	rx_user_status->he_flags = ppdu_info->he_flags;
68 	rx_user_status->rs_flags = ppdu_info->rs_flags;
69 
70 	rx_user_status->mpdu_cnt_fcs_ok =
71 		ppdu_info->num_mpdu_fcs_ok;
72 	rx_user_status->mpdu_cnt_fcs_err =
73 		ppdu_info->num_mpdu_fcs_err;
74 	memcpy(&rx_user_status->mpdu_fcs_ok_bitmap[0], &ppdu_info->mpdu_fcs_ok_bitmap[0],
75 	       HAL_RX_NUM_WORDS_PER_PPDU_BITMAP *
76 	       sizeof(ppdu_info->mpdu_fcs_ok_bitmap[0]));
77 
78 	ath12k_dp_mon_rx_populate_byte_count(rx_tlv, ppdu_info, rx_user_status);
79 }
80 
81 static void ath12k_dp_mon_parse_vht_sig_a(const struct hal_rx_vht_sig_a_info *vht_sig,
82 					  struct hal_rx_mon_ppdu_info *ppdu_info)
83 {
84 	u32 nsts, info0, info1;
85 	u8 gi_setting;
86 
87 	info0 = __le32_to_cpu(vht_sig->info0);
88 	info1 = __le32_to_cpu(vht_sig->info1);
89 
90 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_SU_MU_CODING);
91 	ppdu_info->mcs = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_MCS);
92 	gi_setting = u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_GI_SETTING);
93 	switch (gi_setting) {
94 	case HAL_RX_VHT_SIG_A_NORMAL_GI:
95 		ppdu_info->gi = HAL_RX_GI_0_8_US;
96 		break;
97 	case HAL_RX_VHT_SIG_A_SHORT_GI:
98 	case HAL_RX_VHT_SIG_A_SHORT_GI_AMBIGUITY:
99 		ppdu_info->gi = HAL_RX_GI_0_4_US;
100 		break;
101 	}
102 
103 	ppdu_info->is_stbc = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_STBC);
104 	nsts = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_NSTS);
105 	if (ppdu_info->is_stbc && nsts > 0)
106 		nsts = ((nsts + 1) >> 1) - 1;
107 
108 	ppdu_info->nss = u32_get_bits(nsts, VHT_SIG_SU_NSS_MASK) + 1;
109 	ppdu_info->bw = u32_get_bits(info0, HAL_RX_VHT_SIG_A_INFO_INFO0_BW);
110 	ppdu_info->beamformed = u32_get_bits(info1,
111 					     HAL_RX_VHT_SIG_A_INFO_INFO1_BEAMFORMED);
112 	ppdu_info->vht_flag_values5 = u32_get_bits(info0,
113 						   HAL_RX_VHT_SIG_A_INFO_INFO0_GROUP_ID);
114 	ppdu_info->vht_flag_values3[0] = (((ppdu_info->mcs) << 4) |
115 					    ppdu_info->nss);
116 	ppdu_info->vht_flag_values2 = ppdu_info->bw;
117 	ppdu_info->vht_flag_values4 =
118 		u32_get_bits(info1, HAL_RX_VHT_SIG_A_INFO_INFO1_SU_MU_CODING);
119 }
120 
121 static void ath12k_dp_mon_parse_ht_sig(const struct hal_rx_ht_sig_info *ht_sig,
122 				       struct hal_rx_mon_ppdu_info *ppdu_info)
123 {
124 	u32 info0 = __le32_to_cpu(ht_sig->info0);
125 	u32 info1 = __le32_to_cpu(ht_sig->info1);
126 
127 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HT_SIG_INFO_INFO0_MCS);
128 	ppdu_info->bw = u32_get_bits(info0, HAL_RX_HT_SIG_INFO_INFO0_BW);
129 	ppdu_info->is_stbc = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_STBC);
130 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_FEC_CODING);
131 	ppdu_info->gi = u32_get_bits(info1, HAL_RX_HT_SIG_INFO_INFO1_GI);
132 	ppdu_info->nss = (ppdu_info->mcs >> 3) + 1;
133 }
134 
135 static void ath12k_dp_mon_parse_l_sig_b(const struct hal_rx_lsig_b_info *lsigb,
136 					struct hal_rx_mon_ppdu_info *ppdu_info)
137 {
138 	u32 info0 = __le32_to_cpu(lsigb->info0);
139 	u8 rate;
140 
141 	rate = u32_get_bits(info0, HAL_RX_LSIG_B_INFO_INFO0_RATE);
142 	switch (rate) {
143 	case 1:
144 		rate = HAL_RX_LEGACY_RATE_1_MBPS;
145 		break;
146 	case 2:
147 	case 5:
148 		rate = HAL_RX_LEGACY_RATE_2_MBPS;
149 		break;
150 	case 3:
151 	case 6:
152 		rate = HAL_RX_LEGACY_RATE_5_5_MBPS;
153 		break;
154 	case 4:
155 	case 7:
156 		rate = HAL_RX_LEGACY_RATE_11_MBPS;
157 		break;
158 	default:
159 		rate = HAL_RX_LEGACY_RATE_INVALID;
160 	}
161 
162 	ppdu_info->rate = rate;
163 	ppdu_info->cck_flag = 1;
164 }
165 
166 static void ath12k_dp_mon_parse_l_sig_a(const struct hal_rx_lsig_a_info *lsiga,
167 					struct hal_rx_mon_ppdu_info *ppdu_info)
168 {
169 	u32 info0 = __le32_to_cpu(lsiga->info0);
170 	u8 rate;
171 
172 	rate = u32_get_bits(info0, HAL_RX_LSIG_A_INFO_INFO0_RATE);
173 	switch (rate) {
174 	case 8:
175 		rate = HAL_RX_LEGACY_RATE_48_MBPS;
176 		break;
177 	case 9:
178 		rate = HAL_RX_LEGACY_RATE_24_MBPS;
179 		break;
180 	case 10:
181 		rate = HAL_RX_LEGACY_RATE_12_MBPS;
182 		break;
183 	case 11:
184 		rate = HAL_RX_LEGACY_RATE_6_MBPS;
185 		break;
186 	case 12:
187 		rate = HAL_RX_LEGACY_RATE_54_MBPS;
188 		break;
189 	case 13:
190 		rate = HAL_RX_LEGACY_RATE_36_MBPS;
191 		break;
192 	case 14:
193 		rate = HAL_RX_LEGACY_RATE_18_MBPS;
194 		break;
195 	case 15:
196 		rate = HAL_RX_LEGACY_RATE_9_MBPS;
197 		break;
198 	default:
199 		rate = HAL_RX_LEGACY_RATE_INVALID;
200 	}
201 
202 	ppdu_info->rate = rate;
203 }
204 
205 static void
206 ath12k_dp_mon_parse_he_sig_b2_ofdma(const struct hal_rx_he_sig_b2_ofdma_info *ofdma,
207 				    struct hal_rx_mon_ppdu_info *ppdu_info)
208 {
209 	u32 info0, value;
210 
211 	info0 = __le32_to_cpu(ofdma->info0);
212 
213 	ppdu_info->he_data1 |= HE_MCS_KNOWN | HE_DCM_KNOWN | HE_CODING_KNOWN;
214 
215 	/* HE-data2 */
216 	ppdu_info->he_data2 |= HE_TXBF_KNOWN;
217 
218 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_MCS);
219 	value = ppdu_info->mcs << HE_TRANSMIT_MCS_SHIFT;
220 	ppdu_info->he_data3 |= value;
221 
222 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_DCM);
223 	value = value << HE_DCM_SHIFT;
224 	ppdu_info->he_data3 |= value;
225 
226 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_CODING);
227 	ppdu_info->ldpc = value;
228 	value = value << HE_CODING_SHIFT;
229 	ppdu_info->he_data3 |= value;
230 
231 	/* HE-data4 */
232 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_ID);
233 	value = value << HE_STA_ID_SHIFT;
234 	ppdu_info->he_data4 |= value;
235 
236 	ppdu_info->nss =
237 		u32_get_bits(info0,
238 			     HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_NSTS) + 1;
239 	ppdu_info->beamformed = u32_get_bits(info0,
240 					     HAL_RX_HE_SIG_B2_OFDMA_INFO_INFO0_STA_TXBF);
241 }
242 
243 static void
244 ath12k_dp_mon_parse_he_sig_b2_mu(const struct hal_rx_he_sig_b2_mu_info *he_sig_b2_mu,
245 				 struct hal_rx_mon_ppdu_info *ppdu_info)
246 {
247 	u32 info0, value;
248 
249 	info0 = __le32_to_cpu(he_sig_b2_mu->info0);
250 
251 	ppdu_info->he_data1 |= HE_MCS_KNOWN | HE_CODING_KNOWN;
252 
253 	ppdu_info->mcs = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_MCS);
254 	value = ppdu_info->mcs << HE_TRANSMIT_MCS_SHIFT;
255 	ppdu_info->he_data3 |= value;
256 
257 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_CODING);
258 	ppdu_info->ldpc = value;
259 	value = value << HE_CODING_SHIFT;
260 	ppdu_info->he_data3 |= value;
261 
262 	value = u32_get_bits(info0, HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_ID);
263 	value = value << HE_STA_ID_SHIFT;
264 	ppdu_info->he_data4 |= value;
265 
266 	ppdu_info->nss =
267 		u32_get_bits(info0,
268 			     HAL_RX_HE_SIG_B2_MU_INFO_INFO0_STA_NSTS) + 1;
269 }
270 
271 static void
272 ath12k_dp_mon_parse_he_sig_b1_mu(const struct hal_rx_he_sig_b1_mu_info *he_sig_b1_mu,
273 				 struct hal_rx_mon_ppdu_info *ppdu_info)
274 {
275 	u32 info0 = __le32_to_cpu(he_sig_b1_mu->info0);
276 	u16 ru_tones;
277 
278 	ru_tones = u32_get_bits(info0,
279 				HAL_RX_HE_SIG_B1_MU_INFO_INFO0_RU_ALLOCATION);
280 	ppdu_info->ru_alloc = ath12k_he_ru_tones_to_nl80211_he_ru_alloc(ru_tones);
281 	ppdu_info->he_RU[0] = ru_tones;
282 }
283 
284 static void
285 ath12k_dp_mon_parse_he_sig_mu(const struct hal_rx_he_sig_a_mu_dl_info *he_sig_a_mu_dl,
286 			      struct hal_rx_mon_ppdu_info *ppdu_info)
287 {
288 	u32 info0, info1, value;
289 	u16 he_gi = 0, he_ltf = 0;
290 
291 	info0 = __le32_to_cpu(he_sig_a_mu_dl->info0);
292 	info1 = __le32_to_cpu(he_sig_a_mu_dl->info1);
293 
294 	ppdu_info->he_mu_flags = 1;
295 
296 	ppdu_info->he_data1 = HE_MU_FORMAT_TYPE;
297 	ppdu_info->he_data1 |=
298 			HE_BSS_COLOR_KNOWN |
299 			HE_DL_UL_KNOWN |
300 			HE_LDPC_EXTRA_SYMBOL_KNOWN |
301 			HE_STBC_KNOWN |
302 			HE_DATA_BW_RU_KNOWN |
303 			HE_DOPPLER_KNOWN;
304 
305 	ppdu_info->he_data2 =
306 			HE_GI_KNOWN |
307 			HE_LTF_SYMBOLS_KNOWN |
308 			HE_PRE_FEC_PADDING_KNOWN |
309 			HE_PE_DISAMBIGUITY_KNOWN |
310 			HE_TXOP_KNOWN |
311 			HE_MIDABLE_PERIODICITY_KNOWN;
312 
313 	/* data3 */
314 	ppdu_info->he_data3 = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_BSS_COLOR);
315 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_UL_FLAG);
316 	value = value << HE_DL_UL_SHIFT;
317 	ppdu_info->he_data3 |= value;
318 
319 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_LDPC_EXTRA);
320 	value = value << HE_LDPC_EXTRA_SYMBOL_SHIFT;
321 	ppdu_info->he_data3 |= value;
322 
323 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_STBC);
324 	value = value << HE_STBC_SHIFT;
325 	ppdu_info->he_data3 |= value;
326 
327 	/* data4 */
328 	ppdu_info->he_data4 = u32_get_bits(info0,
329 					   HAL_RX_HE_SIG_A_MU_DL_INFO0_SPATIAL_REUSE);
330 	ppdu_info->he_data4 = value;
331 
332 	/* data5 */
333 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_TRANSMIT_BW);
334 	ppdu_info->he_data5 = value;
335 	ppdu_info->bw = value;
336 
337 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_CP_LTF_SIZE);
338 	switch (value) {
339 	case 0:
340 		he_gi = HE_GI_0_8;
341 		he_ltf = HE_LTF_4_X;
342 		break;
343 	case 1:
344 		he_gi = HE_GI_0_8;
345 		he_ltf = HE_LTF_2_X;
346 		break;
347 	case 2:
348 		he_gi = HE_GI_1_6;
349 		he_ltf = HE_LTF_2_X;
350 		break;
351 	case 3:
352 		he_gi = HE_GI_3_2;
353 		he_ltf = HE_LTF_4_X;
354 		break;
355 	}
356 
357 	ppdu_info->gi = he_gi;
358 	value = he_gi << HE_GI_SHIFT;
359 	ppdu_info->he_data5 |= value;
360 
361 	value = he_ltf << HE_LTF_SIZE_SHIFT;
362 	ppdu_info->he_data5 |= value;
363 
364 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_NUM_LTF_SYMB);
365 	value = (value << HE_LTF_SYM_SHIFT);
366 	ppdu_info->he_data5 |= value;
367 
368 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_PKT_EXT_FACTOR);
369 	value = value << HE_PRE_FEC_PAD_SHIFT;
370 	ppdu_info->he_data5 |= value;
371 
372 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_PKT_EXT_PE_DISAM);
373 	value = value << HE_PE_DISAMBIGUITY_SHIFT;
374 	ppdu_info->he_data5 |= value;
375 
376 	/*data6*/
377 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_DOPPLER_INDICATION);
378 	value = value << HE_DOPPLER_SHIFT;
379 	ppdu_info->he_data6 |= value;
380 
381 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_MU_DL_INFO1_TXOP_DURATION);
382 	value = value << HE_TXOP_SHIFT;
383 	ppdu_info->he_data6 |= value;
384 
385 	/* HE-MU Flags */
386 	/* HE-MU-flags1 */
387 	ppdu_info->he_flags1 =
388 		HE_SIG_B_MCS_KNOWN |
389 		HE_SIG_B_DCM_KNOWN |
390 		HE_SIG_B_COMPRESSION_FLAG_1_KNOWN |
391 		HE_SIG_B_SYM_NUM_KNOWN |
392 		HE_RU_0_KNOWN;
393 
394 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_MCS_OF_SIGB);
395 	ppdu_info->he_flags1 |= value;
396 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_DCM_OF_SIGB);
397 	value = value << HE_DCM_FLAG_1_SHIFT;
398 	ppdu_info->he_flags1 |= value;
399 
400 	/* HE-MU-flags2 */
401 	ppdu_info->he_flags2 = HE_BW_KNOWN;
402 
403 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_TRANSMIT_BW);
404 	ppdu_info->he_flags2 |= value;
405 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_COMP_MODE_SIGB);
406 	value = value << HE_SIG_B_COMPRESSION_FLAG_2_SHIFT;
407 	ppdu_info->he_flags2 |= value;
408 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_MU_DL_INFO0_NUM_SIGB_SYMB);
409 	value = value - 1;
410 	value = value << HE_NUM_SIG_B_SYMBOLS_SHIFT;
411 	ppdu_info->he_flags2 |= value;
412 
413 	ppdu_info->is_stbc = info1 &
414 			     HAL_RX_HE_SIG_A_MU_DL_INFO1_STBC;
415 }
416 
417 static void ath12k_dp_mon_parse_he_sig_su(const struct hal_rx_he_sig_a_su_info *he_sig_a,
418 					  struct hal_rx_mon_ppdu_info *ppdu_info)
419 {
420 	u32 info0, info1, value;
421 	u32 dcm;
422 	u8 he_dcm = 0, he_stbc = 0;
423 	u16 he_gi = 0, he_ltf = 0;
424 
425 	ppdu_info->he_flags = 1;
426 
427 	info0 = __le32_to_cpu(he_sig_a->info0);
428 	info1 = __le32_to_cpu(he_sig_a->info1);
429 
430 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_FORMAT_IND);
431 	if (value == 0)
432 		ppdu_info->he_data1 = HE_TRIG_FORMAT_TYPE;
433 	else
434 		ppdu_info->he_data1 = HE_SU_FORMAT_TYPE;
435 
436 	ppdu_info->he_data1 |=
437 			HE_BSS_COLOR_KNOWN |
438 			HE_BEAM_CHANGE_KNOWN |
439 			HE_DL_UL_KNOWN |
440 			HE_MCS_KNOWN |
441 			HE_DCM_KNOWN |
442 			HE_CODING_KNOWN |
443 			HE_LDPC_EXTRA_SYMBOL_KNOWN |
444 			HE_STBC_KNOWN |
445 			HE_DATA_BW_RU_KNOWN |
446 			HE_DOPPLER_KNOWN;
447 
448 	ppdu_info->he_data2 |=
449 			HE_GI_KNOWN |
450 			HE_TXBF_KNOWN |
451 			HE_PE_DISAMBIGUITY_KNOWN |
452 			HE_TXOP_KNOWN |
453 			HE_LTF_SYMBOLS_KNOWN |
454 			HE_PRE_FEC_PADDING_KNOWN |
455 			HE_MIDABLE_PERIODICITY_KNOWN;
456 
457 	ppdu_info->he_data3 = u32_get_bits(info0,
458 					   HAL_RX_HE_SIG_A_SU_INFO_INFO0_BSS_COLOR);
459 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_BEAM_CHANGE);
460 	value = value << HE_BEAM_CHANGE_SHIFT;
461 	ppdu_info->he_data3 |= value;
462 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DL_UL_FLAG);
463 	value = value << HE_DL_UL_SHIFT;
464 	ppdu_info->he_data3 |= value;
465 
466 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_MCS);
467 	ppdu_info->mcs = value;
468 	value = value << HE_TRANSMIT_MCS_SHIFT;
469 	ppdu_info->he_data3 |= value;
470 
471 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DCM);
472 	he_dcm = value;
473 	value = value << HE_DCM_SHIFT;
474 	ppdu_info->he_data3 |= value;
475 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_CODING);
476 	value = value << HE_CODING_SHIFT;
477 	ppdu_info->he_data3 |= value;
478 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_LDPC_EXTRA);
479 	value = value << HE_LDPC_EXTRA_SYMBOL_SHIFT;
480 	ppdu_info->he_data3 |= value;
481 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_STBC);
482 	he_stbc = value;
483 	value = value << HE_STBC_SHIFT;
484 	ppdu_info->he_data3 |= value;
485 
486 	/* data4 */
487 	ppdu_info->he_data4 = u32_get_bits(info0,
488 					   HAL_RX_HE_SIG_A_SU_INFO_INFO0_SPATIAL_REUSE);
489 
490 	/* data5 */
491 	value = u32_get_bits(info0,
492 			     HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_BW);
493 	ppdu_info->he_data5 = value;
494 	ppdu_info->bw = value;
495 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_CP_LTF_SIZE);
496 	switch (value) {
497 	case 0:
498 		he_gi = HE_GI_0_8;
499 		he_ltf = HE_LTF_1_X;
500 		break;
501 	case 1:
502 		he_gi = HE_GI_0_8;
503 		he_ltf = HE_LTF_2_X;
504 		break;
505 	case 2:
506 		he_gi = HE_GI_1_6;
507 		he_ltf = HE_LTF_2_X;
508 		break;
509 	case 3:
510 		if (he_dcm && he_stbc) {
511 			he_gi = HE_GI_0_8;
512 			he_ltf = HE_LTF_4_X;
513 		} else {
514 			he_gi = HE_GI_3_2;
515 			he_ltf = HE_LTF_4_X;
516 		}
517 		break;
518 	}
519 	ppdu_info->gi = he_gi;
520 	value = he_gi << HE_GI_SHIFT;
521 	ppdu_info->he_data5 |= value;
522 	value = he_ltf << HE_LTF_SIZE_SHIFT;
523 	ppdu_info->ltf_size = he_ltf;
524 	ppdu_info->he_data5 |= value;
525 
526 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
527 	value = (value << HE_LTF_SYM_SHIFT);
528 	ppdu_info->he_data5 |= value;
529 
530 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_PKT_EXT_FACTOR);
531 	value = value << HE_PRE_FEC_PAD_SHIFT;
532 	ppdu_info->he_data5 |= value;
533 
534 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXBF);
535 	value = value << HE_TXBF_SHIFT;
536 	ppdu_info->he_data5 |= value;
537 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_PKT_EXT_PE_DISAM);
538 	value = value << HE_PE_DISAMBIGUITY_SHIFT;
539 	ppdu_info->he_data5 |= value;
540 
541 	/* data6 */
542 	value = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS);
543 	value++;
544 	ppdu_info->he_data6 = value;
545 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_DOPPLER_IND);
546 	value = value << HE_DOPPLER_SHIFT;
547 	ppdu_info->he_data6 |= value;
548 	value = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXOP_DURATION);
549 	value = value << HE_TXOP_SHIFT;
550 	ppdu_info->he_data6 |= value;
551 
552 	ppdu_info->mcs =
553 		u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_MCS);
554 	ppdu_info->bw =
555 		u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_TRANSMIT_BW);
556 	ppdu_info->ldpc = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_CODING);
557 	ppdu_info->is_stbc = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_STBC);
558 	ppdu_info->beamformed = u32_get_bits(info1, HAL_RX_HE_SIG_A_SU_INFO_INFO1_TXBF);
559 	dcm = u32_get_bits(info0, HAL_RX_HE_SIG_A_SU_INFO_INFO0_DCM);
560 	ppdu_info->nss = u32_get_bits(info0,
561 				      HAL_RX_HE_SIG_A_SU_INFO_INFO0_NSTS) + 1;
562 	ppdu_info->dcm = dcm;
563 }
564 
565 static void
566 ath12k_dp_mon_hal_rx_parse_u_sig_cmn(const struct hal_mon_usig_cmn *cmn,
567 				     struct hal_rx_mon_ppdu_info *ppdu_info)
568 {
569 	u32 common;
570 
571 	ppdu_info->u_sig_info.bw = le32_get_bits(cmn->info0,
572 						 HAL_RX_USIG_CMN_INFO0_BW);
573 	ppdu_info->u_sig_info.ul_dl = le32_get_bits(cmn->info0,
574 						    HAL_RX_USIG_CMN_INFO0_UL_DL);
575 
576 	common = __le32_to_cpu(ppdu_info->u_sig_info.usig.common);
577 	common |= IEEE80211_RADIOTAP_EHT_USIG_COMMON_PHY_VER_KNOWN |
578 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BW_KNOWN |
579 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_UL_DL_KNOWN |
580 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BSS_COLOR_KNOWN |
581 		  IEEE80211_RADIOTAP_EHT_USIG_COMMON_TXOP_KNOWN |
582 		  ATH12K_LE32_DEC_ENC(cmn->info0,
583 				      HAL_RX_USIG_CMN_INFO0_PHY_VERSION,
584 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_PHY_VER) |
585 		  u32_encode_bits(ppdu_info->u_sig_info.bw,
586 				  IEEE80211_RADIOTAP_EHT_USIG_COMMON_BW) |
587 		  u32_encode_bits(ppdu_info->u_sig_info.ul_dl,
588 				  IEEE80211_RADIOTAP_EHT_USIG_COMMON_UL_DL) |
589 		  ATH12K_LE32_DEC_ENC(cmn->info0,
590 				      HAL_RX_USIG_CMN_INFO0_BSS_COLOR,
591 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BSS_COLOR) |
592 		  ATH12K_LE32_DEC_ENC(cmn->info0,
593 				      HAL_RX_USIG_CMN_INFO0_TXOP,
594 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_TXOP);
595 	ppdu_info->u_sig_info.usig.common = cpu_to_le32(common);
596 
597 	switch (ppdu_info->u_sig_info.bw) {
598 	default:
599 		fallthrough;
600 	case HAL_EHT_BW_20:
601 		ppdu_info->bw = HAL_RX_BW_20MHZ;
602 		break;
603 	case HAL_EHT_BW_40:
604 		ppdu_info->bw = HAL_RX_BW_40MHZ;
605 		break;
606 	case HAL_EHT_BW_80:
607 		ppdu_info->bw = HAL_RX_BW_80MHZ;
608 		break;
609 	case HAL_EHT_BW_160:
610 		ppdu_info->bw = HAL_RX_BW_160MHZ;
611 		break;
612 	case HAL_EHT_BW_320_1:
613 	case HAL_EHT_BW_320_2:
614 		ppdu_info->bw = HAL_RX_BW_320MHZ;
615 		break;
616 	}
617 }
618 
619 static void
620 ath12k_dp_mon_hal_rx_parse_u_sig_tb(const struct hal_mon_usig_tb *usig_tb,
621 				    struct hal_rx_mon_ppdu_info *ppdu_info)
622 {
623 	struct ieee80211_radiotap_eht_usig *usig = &ppdu_info->u_sig_info.usig;
624 	enum ieee80211_radiotap_eht_usig_tb spatial_reuse1, spatial_reuse2;
625 	u32 common, value, mask;
626 
627 	spatial_reuse1 = IEEE80211_RADIOTAP_EHT_USIG2_TB_B3_B6_SPATIAL_REUSE_1;
628 	spatial_reuse2 = IEEE80211_RADIOTAP_EHT_USIG2_TB_B7_B10_SPATIAL_REUSE_2;
629 
630 	common = __le32_to_cpu(usig->common);
631 	value = __le32_to_cpu(usig->value);
632 	mask = __le32_to_cpu(usig->mask);
633 
634 	ppdu_info->u_sig_info.ppdu_type_comp_mode =
635 				le32_get_bits(usig_tb->info0,
636 					      HAL_RX_USIG_TB_INFO0_PPDU_TYPE_COMP_MODE);
637 
638 	common |= ATH12K_LE32_DEC_ENC(usig_tb->info0,
639 				      HAL_RX_USIG_TB_INFO0_RX_INTEG_CHECK_PASS,
640 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BAD_USIG_CRC);
641 
642 	value |= IEEE80211_RADIOTAP_EHT_USIG1_TB_B20_B25_DISREGARD |
643 		 u32_encode_bits(ppdu_info->u_sig_info.ppdu_type_comp_mode,
644 				 IEEE80211_RADIOTAP_EHT_USIG2_TB_B0_B1_PPDU_TYPE) |
645 		 IEEE80211_RADIOTAP_EHT_USIG2_TB_B2_VALIDATE |
646 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
647 				     HAL_RX_USIG_TB_INFO0_SPATIAL_REUSE_1,
648 				     spatial_reuse1) |
649 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
650 				     HAL_RX_USIG_TB_INFO0_SPATIAL_REUSE_2,
651 				     spatial_reuse2) |
652 		 IEEE80211_RADIOTAP_EHT_USIG2_TB_B11_B15_DISREGARD |
653 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
654 				     HAL_RX_USIG_TB_INFO0_CRC,
655 				     IEEE80211_RADIOTAP_EHT_USIG2_TB_B16_B19_CRC) |
656 		 ATH12K_LE32_DEC_ENC(usig_tb->info0,
657 				     HAL_RX_USIG_TB_INFO0_TAIL,
658 				     IEEE80211_RADIOTAP_EHT_USIG2_TB_B20_B25_TAIL);
659 
660 	mask |= IEEE80211_RADIOTAP_EHT_USIG1_TB_B20_B25_DISREGARD |
661 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B0_B1_PPDU_TYPE |
662 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B2_VALIDATE |
663 		spatial_reuse1 | spatial_reuse2 |
664 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B11_B15_DISREGARD |
665 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B16_B19_CRC |
666 		IEEE80211_RADIOTAP_EHT_USIG2_TB_B20_B25_TAIL;
667 
668 	usig->common = cpu_to_le32(common);
669 	usig->value = cpu_to_le32(value);
670 	usig->mask = cpu_to_le32(mask);
671 }
672 
673 static void
674 ath12k_dp_mon_hal_rx_parse_u_sig_mu(const struct hal_mon_usig_mu *usig_mu,
675 				    struct hal_rx_mon_ppdu_info *ppdu_info)
676 {
677 	struct ieee80211_radiotap_eht_usig *usig = &ppdu_info->u_sig_info.usig;
678 	enum ieee80211_radiotap_eht_usig_mu sig_symb, punc;
679 	u32 common, value, mask;
680 
681 	sig_symb = IEEE80211_RADIOTAP_EHT_USIG2_MU_B11_B15_EHT_SIG_SYMBOLS;
682 	punc = IEEE80211_RADIOTAP_EHT_USIG2_MU_B3_B7_PUNCTURED_INFO;
683 
684 	common = __le32_to_cpu(usig->common);
685 	value = __le32_to_cpu(usig->value);
686 	mask = __le32_to_cpu(usig->mask);
687 
688 	ppdu_info->u_sig_info.ppdu_type_comp_mode =
689 				le32_get_bits(usig_mu->info0,
690 					      HAL_RX_USIG_MU_INFO0_PPDU_TYPE_COMP_MODE);
691 	ppdu_info->u_sig_info.eht_sig_mcs =
692 				le32_get_bits(usig_mu->info0,
693 					      HAL_RX_USIG_MU_INFO0_EHT_SIG_MCS);
694 	ppdu_info->u_sig_info.num_eht_sig_sym =
695 				le32_get_bits(usig_mu->info0,
696 					      HAL_RX_USIG_MU_INFO0_NUM_EHT_SIG_SYM);
697 
698 	common |= ATH12K_LE32_DEC_ENC(usig_mu->info0,
699 				      HAL_RX_USIG_MU_INFO0_RX_INTEG_CHECK_PASS,
700 				      IEEE80211_RADIOTAP_EHT_USIG_COMMON_BAD_USIG_CRC);
701 
702 	value |= IEEE80211_RADIOTAP_EHT_USIG1_MU_B20_B24_DISREGARD |
703 		 IEEE80211_RADIOTAP_EHT_USIG1_MU_B25_VALIDATE |
704 		 u32_encode_bits(ppdu_info->u_sig_info.ppdu_type_comp_mode,
705 				 IEEE80211_RADIOTAP_EHT_USIG2_MU_B0_B1_PPDU_TYPE) |
706 		 IEEE80211_RADIOTAP_EHT_USIG2_MU_B2_VALIDATE |
707 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
708 				     HAL_RX_USIG_MU_INFO0_PUNC_CH_INFO,
709 				     punc) |
710 		 IEEE80211_RADIOTAP_EHT_USIG2_MU_B8_VALIDATE |
711 		 u32_encode_bits(ppdu_info->u_sig_info.eht_sig_mcs,
712 				 IEEE80211_RADIOTAP_EHT_USIG2_MU_B9_B10_SIG_MCS) |
713 		 u32_encode_bits(ppdu_info->u_sig_info.num_eht_sig_sym,
714 				 sig_symb) |
715 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
716 				     HAL_RX_USIG_MU_INFO0_CRC,
717 				     IEEE80211_RADIOTAP_EHT_USIG2_MU_B16_B19_CRC) |
718 		 ATH12K_LE32_DEC_ENC(usig_mu->info0,
719 				     HAL_RX_USIG_MU_INFO0_TAIL,
720 				     IEEE80211_RADIOTAP_EHT_USIG2_MU_B20_B25_TAIL);
721 
722 	mask |= IEEE80211_RADIOTAP_EHT_USIG1_MU_B20_B24_DISREGARD |
723 		IEEE80211_RADIOTAP_EHT_USIG1_MU_B25_VALIDATE |
724 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B0_B1_PPDU_TYPE |
725 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B2_VALIDATE |
726 		punc |
727 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B8_VALIDATE |
728 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B9_B10_SIG_MCS |
729 		sig_symb |
730 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B16_B19_CRC |
731 		IEEE80211_RADIOTAP_EHT_USIG2_MU_B20_B25_TAIL;
732 
733 	usig->common = cpu_to_le32(common);
734 	usig->value = cpu_to_le32(value);
735 	usig->mask = cpu_to_le32(mask);
736 }
737 
738 static void
739 ath12k_dp_mon_hal_rx_parse_u_sig_hdr(const struct hal_mon_usig_hdr *usig,
740 				     struct hal_rx_mon_ppdu_info *ppdu_info)
741 {
742 	u8 comp_mode;
743 
744 	ppdu_info->eht_usig = true;
745 
746 	ath12k_dp_mon_hal_rx_parse_u_sig_cmn(&usig->cmn, ppdu_info);
747 
748 	comp_mode = le32_get_bits(usig->non_cmn.mu.info0,
749 				  HAL_RX_USIG_MU_INFO0_PPDU_TYPE_COMP_MODE);
750 
751 	if (comp_mode == 0 && ppdu_info->u_sig_info.ul_dl)
752 		ath12k_dp_mon_hal_rx_parse_u_sig_tb(&usig->non_cmn.tb, ppdu_info);
753 	else
754 		ath12k_dp_mon_hal_rx_parse_u_sig_mu(&usig->non_cmn.mu, ppdu_info);
755 }
756 
757 static void
758 ath12k_dp_mon_hal_aggr_tlv(struct hal_rx_mon_ppdu_info *ppdu_info,
759 			   u16 tlv_len, const void *tlv_data)
760 {
761 	if (tlv_len <= HAL_RX_MON_MAX_AGGR_SIZE - ppdu_info->tlv_aggr.cur_len) {
762 		memcpy(ppdu_info->tlv_aggr.buf + ppdu_info->tlv_aggr.cur_len,
763 		       tlv_data, tlv_len);
764 		ppdu_info->tlv_aggr.cur_len += tlv_len;
765 	}
766 }
767 
768 static inline bool
769 ath12k_dp_mon_hal_rx_is_frame_type_ndp(const struct hal_rx_u_sig_info *usig_info)
770 {
771 	if (usig_info->ppdu_type_comp_mode == 1 &&
772 	    usig_info->eht_sig_mcs == 0 &&
773 	    usig_info->num_eht_sig_sym == 0)
774 		return true;
775 
776 	return false;
777 }
778 
779 static inline bool
780 ath12k_dp_mon_hal_rx_is_non_ofdma(const struct hal_rx_u_sig_info *usig_info)
781 {
782 	u32 ppdu_type_comp_mode = usig_info->ppdu_type_comp_mode;
783 	u32 ul_dl = usig_info->ul_dl;
784 
785 	if ((ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_MIMO && ul_dl == 0) ||
786 	    (ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_OFDMA && ul_dl == 0) ||
787 	    (ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_MU_MIMO  && ul_dl == 1))
788 		return true;
789 
790 	return false;
791 }
792 
793 static inline bool
794 ath12k_dp_mon_hal_rx_is_ofdma(const struct hal_rx_u_sig_info *usig_info)
795 {
796 	if (usig_info->ppdu_type_comp_mode == 0 && usig_info->ul_dl == 0)
797 		return true;
798 
799 	return false;
800 }
801 
802 static void
803 ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(const struct hal_eht_sig_ndp_cmn_eb *eht_sig_ndp,
804 				       struct hal_rx_mon_ppdu_info *ppdu_info)
805 {
806 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
807 	u32 known, data;
808 
809 	known = __le32_to_cpu(eht->known);
810 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_SPATIAL_REUSE |
811 		 IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF |
812 		 IEEE80211_RADIOTAP_EHT_KNOWN_NSS_S |
813 		 IEEE80211_RADIOTAP_EHT_KNOWN_BEAMFORMED_S |
814 		 IEEE80211_RADIOTAP_EHT_KNOWN_DISREGARD_S |
815 		 IEEE80211_RADIOTAP_EHT_KNOWN_CRC1 |
816 		 IEEE80211_RADIOTAP_EHT_KNOWN_TAIL1;
817 	eht->known = cpu_to_le32(known);
818 
819 	data = __le32_to_cpu(eht->data[0]);
820 	data |= ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
821 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_SPATIAL_REUSE,
822 				    IEEE80211_RADIOTAP_EHT_DATA0_SPATIAL_REUSE);
823 	/* GI and LTF size are separately indicated in radiotap header
824 	 * and hence will be parsed from other TLV
825 	 */
826 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
827 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_NUM_LTF_SYM,
828 				    IEEE80211_RADIOTAP_EHT_DATA0_EHT_LTF);
829 
830 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
831 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_CRC,
832 				    IEEE80211_RADIOTAP_EHT_DATA0_CRC1_O);
833 
834 	data |= ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
835 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_DISREGARD,
836 				    IEEE80211_RADIOTAP_EHT_DATA0_DISREGARD_S);
837 	eht->data[0] = cpu_to_le32(data);
838 
839 	data = __le32_to_cpu(eht->data[7]);
840 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
841 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_NSS,
842 				    IEEE80211_RADIOTAP_EHT_DATA7_NSS_S);
843 
844 	data |=	ATH12K_LE32_DEC_ENC(eht_sig_ndp->info0,
845 				    HAL_RX_EHT_SIG_NDP_CMN_INFO0_BEAMFORMED,
846 				    IEEE80211_RADIOTAP_EHT_DATA7_BEAMFORMED_S);
847 	eht->data[7] = cpu_to_le32(data);
848 }
849 
850 static void
851 ath12k_dp_mon_hal_rx_parse_usig_overflow(const struct hal_eht_sig_usig_overflow *ovflow,
852 					 struct hal_rx_mon_ppdu_info *ppdu_info)
853 {
854 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
855 	u32 known, data;
856 
857 	known = __le32_to_cpu(eht->known);
858 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_SPATIAL_REUSE |
859 		 IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF |
860 		 IEEE80211_RADIOTAP_EHT_KNOWN_LDPC_EXTRA_SYM_OM |
861 		 IEEE80211_RADIOTAP_EHT_KNOWN_PRE_PADD_FACOR_OM |
862 		 IEEE80211_RADIOTAP_EHT_KNOWN_PE_DISAMBIGUITY_OM |
863 		 IEEE80211_RADIOTAP_EHT_KNOWN_DISREGARD_O;
864 	eht->known = cpu_to_le32(known);
865 
866 	data = __le32_to_cpu(eht->data[0]);
867 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
868 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_SPATIAL_REUSE,
869 				    IEEE80211_RADIOTAP_EHT_DATA0_SPATIAL_REUSE);
870 
871 	/* GI and LTF size are separately indicated in radiotap header
872 	 * and hence will be parsed from other TLV
873 	 */
874 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
875 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_NUM_LTF_SYM,
876 				    IEEE80211_RADIOTAP_EHT_DATA0_EHT_LTF);
877 
878 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
879 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_LDPC_EXTA_SYM,
880 				    IEEE80211_RADIOTAP_EHT_DATA0_LDPC_EXTRA_SYM_OM);
881 
882 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
883 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_PRE_FEC_PAD_FACTOR,
884 				    IEEE80211_RADIOTAP_EHT_DATA0_PRE_PADD_FACOR_OM);
885 
886 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
887 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_DISAMBIGUITY,
888 				    IEEE80211_RADIOTAP_EHT_DATA0_PE_DISAMBIGUITY_OM);
889 
890 	data |=	ATH12K_LE32_DEC_ENC(ovflow->info0,
891 				    HAL_RX_EHT_SIG_OVERFLOW_INFO0_DISREGARD,
892 				    IEEE80211_RADIOTAP_EHT_DATA0_DISREGARD_O);
893 	eht->data[0] = cpu_to_le32(data);
894 }
895 
896 static void
897 ath12k_dp_mon_hal_rx_parse_non_ofdma_users(const struct hal_eht_sig_non_ofdma_cmn_eb *eb,
898 					   struct hal_rx_mon_ppdu_info *ppdu_info)
899 {
900 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
901 	u32 known, data;
902 
903 	known = __le32_to_cpu(eht->known);
904 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_NR_NON_OFDMA_USERS_M;
905 	eht->known = cpu_to_le32(known);
906 
907 	data = __le32_to_cpu(eht->data[7]);
908 	data |=	ATH12K_LE32_DEC_ENC(eb->info0,
909 				    HAL_RX_EHT_SIG_NON_OFDMA_INFO0_NUM_USERS,
910 				    IEEE80211_RADIOTAP_EHT_DATA7_NUM_OF_NON_OFDMA_USERS);
911 	eht->data[7] = cpu_to_le32(data);
912 }
913 
914 static void
915 ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(const struct hal_eht_sig_mu_mimo *user,
916 					   struct hal_rx_mon_ppdu_info *ppdu_info)
917 {
918 	struct hal_rx_eht_info *eht_info = &ppdu_info->eht_info;
919 	u32 user_idx;
920 
921 	if (eht_info->num_user_info >= ARRAY_SIZE(eht_info->user_info))
922 		return;
923 
924 	user_idx = eht_info->num_user_info++;
925 
926 	eht_info->user_info[user_idx] |=
927 		IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID_KNOWN |
928 		IEEE80211_RADIOTAP_EHT_USER_INFO_MCS_KNOWN |
929 		IEEE80211_RADIOTAP_EHT_USER_INFO_CODING_KNOWN |
930 		IEEE80211_RADIOTAP_EHT_USER_INFO_SPATIAL_CONFIG_KNOWN_M |
931 		ATH12K_LE32_DEC_ENC(user->info0,
932 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_STA_ID,
933 				    IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID) |
934 		ATH12K_LE32_DEC_ENC(user->info0,
935 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_CODING,
936 				    IEEE80211_RADIOTAP_EHT_USER_INFO_CODING) |
937 		ATH12K_LE32_DEC_ENC(user->info0,
938 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_MCS,
939 				    IEEE80211_RADIOTAP_EHT_USER_INFO_MCS) |
940 		ATH12K_LE32_DEC_ENC(user->info0,
941 				    HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_SPATIAL_CODING,
942 				    IEEE80211_RADIOTAP_EHT_USER_INFO_SPATIAL_CONFIG_M);
943 
944 	ppdu_info->mcs = le32_get_bits(user->info0,
945 				       HAL_RX_EHT_SIG_MUMIMO_USER_INFO0_MCS);
946 }
947 
948 static void
949 ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(const struct hal_eht_sig_non_mu_mimo *user,
950 					       struct hal_rx_mon_ppdu_info *ppdu_info)
951 {
952 	struct hal_rx_eht_info *eht_info = &ppdu_info->eht_info;
953 	u32 user_idx;
954 
955 	if (eht_info->num_user_info >= ARRAY_SIZE(eht_info->user_info))
956 		return;
957 
958 	user_idx = eht_info->num_user_info++;
959 
960 	eht_info->user_info[user_idx] |=
961 		IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID_KNOWN |
962 		IEEE80211_RADIOTAP_EHT_USER_INFO_MCS_KNOWN |
963 		IEEE80211_RADIOTAP_EHT_USER_INFO_CODING_KNOWN |
964 		IEEE80211_RADIOTAP_EHT_USER_INFO_NSS_KNOWN_O |
965 		IEEE80211_RADIOTAP_EHT_USER_INFO_BEAMFORMING_KNOWN_O |
966 		ATH12K_LE32_DEC_ENC(user->info0,
967 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_STA_ID,
968 				    IEEE80211_RADIOTAP_EHT_USER_INFO_STA_ID) |
969 		ATH12K_LE32_DEC_ENC(user->info0,
970 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_CODING,
971 				    IEEE80211_RADIOTAP_EHT_USER_INFO_CODING) |
972 		ATH12K_LE32_DEC_ENC(user->info0,
973 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_MCS,
974 				    IEEE80211_RADIOTAP_EHT_USER_INFO_MCS) |
975 		ATH12K_LE32_DEC_ENC(user->info0,
976 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_NSS,
977 				    IEEE80211_RADIOTAP_EHT_USER_INFO_NSS_O) |
978 		ATH12K_LE32_DEC_ENC(user->info0,
979 				    HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_BEAMFORMED,
980 				    IEEE80211_RADIOTAP_EHT_USER_INFO_BEAMFORMING_O);
981 
982 	ppdu_info->mcs = le32_get_bits(user->info0,
983 				       HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_MCS);
984 
985 	ppdu_info->nss = le32_get_bits(user->info0,
986 				       HAL_RX_EHT_SIG_NON_MUMIMO_USER_INFO0_NSS) + 1;
987 }
988 
989 static inline bool
990 ath12k_dp_mon_hal_rx_is_mu_mimo_user(const struct hal_rx_u_sig_info *usig_info)
991 {
992 	if (usig_info->ppdu_type_comp_mode == HAL_RX_RECEPTION_TYPE_SU &&
993 	    usig_info->ul_dl == 1)
994 		return true;
995 
996 	return false;
997 }
998 
999 static void
1000 ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(const void *tlv,
1001 					     struct hal_rx_mon_ppdu_info *ppdu_info)
1002 {
1003 	const struct hal_eht_sig_non_ofdma_cmn_eb *eb = tlv;
1004 
1005 	ath12k_dp_mon_hal_rx_parse_usig_overflow(tlv, ppdu_info);
1006 	ath12k_dp_mon_hal_rx_parse_non_ofdma_users(eb, ppdu_info);
1007 
1008 	if (ath12k_dp_mon_hal_rx_is_mu_mimo_user(&ppdu_info->u_sig_info))
1009 		ath12k_dp_mon_hal_rx_parse_eht_mumimo_user(&eb->user_field.mu_mimo,
1010 							   ppdu_info);
1011 	else
1012 		ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(&eb->user_field.n_mu_mimo,
1013 							       ppdu_info);
1014 }
1015 
1016 static void
1017 ath12k_dp_mon_hal_rx_parse_ru_allocation(const struct hal_eht_sig_ofdma_cmn_eb *eb,
1018 					 struct hal_rx_mon_ppdu_info *ppdu_info)
1019 {
1020 	const struct hal_eht_sig_ofdma_cmn_eb1 *ofdma_cmn_eb1 = &eb->eb1;
1021 	const struct hal_eht_sig_ofdma_cmn_eb2 *ofdma_cmn_eb2 = &eb->eb2;
1022 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1023 	enum ieee80211_radiotap_eht_data ru_123, ru_124, ru_125, ru_126;
1024 	enum ieee80211_radiotap_eht_data ru_121, ru_122, ru_112, ru_111;
1025 	u32 data;
1026 
1027 	ru_123 = IEEE80211_RADIOTAP_EHT_DATA4_RU_ALLOC_CC_1_2_3;
1028 	ru_124 = IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_4;
1029 	ru_125 = IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_5;
1030 	ru_126 = IEEE80211_RADIOTAP_EHT_DATA6_RU_ALLOC_CC_1_2_6;
1031 	ru_121 = IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_1;
1032 	ru_122 = IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_2;
1033 	ru_112 = IEEE80211_RADIOTAP_EHT_DATA2_RU_ALLOC_CC_1_1_2;
1034 	ru_111 = IEEE80211_RADIOTAP_EHT_DATA1_RU_ALLOC_CC_1_1_1;
1035 
1036 	switch (ppdu_info->u_sig_info.bw) {
1037 	case HAL_EHT_BW_320_2:
1038 	case HAL_EHT_BW_320_1:
1039 		data = __le32_to_cpu(eht->data[4]);
1040 		/* CC1 2::3 */
1041 		data |=	IEEE80211_RADIOTAP_EHT_DATA4_RU_ALLOC_CC_1_2_3_KNOWN |
1042 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1043 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_3,
1044 					    ru_123);
1045 		eht->data[4] = cpu_to_le32(data);
1046 
1047 		data = __le32_to_cpu(eht->data[5]);
1048 		/* CC1 2::4 */
1049 		data |=	IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_4_KNOWN |
1050 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1051 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_4,
1052 					    ru_124);
1053 
1054 		/* CC1 2::5 */
1055 		data |=	IEEE80211_RADIOTAP_EHT_DATA5_RU_ALLOC_CC_1_2_5_KNOWN |
1056 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1057 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_5,
1058 					    ru_125);
1059 		eht->data[5] = cpu_to_le32(data);
1060 
1061 		data = __le32_to_cpu(eht->data[6]);
1062 		/* CC1 2::6 */
1063 		data |=	IEEE80211_RADIOTAP_EHT_DATA6_RU_ALLOC_CC_1_2_6_KNOWN |
1064 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1065 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_6,
1066 					    ru_126);
1067 		eht->data[6] = cpu_to_le32(data);
1068 
1069 		fallthrough;
1070 	case HAL_EHT_BW_160:
1071 		data = __le32_to_cpu(eht->data[3]);
1072 		/* CC1 2::1 */
1073 		data |=	IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_1_KNOWN |
1074 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1075 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_1,
1076 					    ru_121);
1077 		/* CC1 2::2 */
1078 		data |=	IEEE80211_RADIOTAP_EHT_DATA3_RU_ALLOC_CC_1_2_2_KNOWN |
1079 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb2->info0,
1080 					    HAL_RX_EHT_SIG_OFDMA_EB2_RU_ALLOC_2_2,
1081 					    ru_122);
1082 		eht->data[3] = cpu_to_le32(data);
1083 
1084 		fallthrough;
1085 	case HAL_EHT_BW_80:
1086 		data = __le32_to_cpu(eht->data[2]);
1087 		/* CC1 1::2 */
1088 		data |=	IEEE80211_RADIOTAP_EHT_DATA2_RU_ALLOC_CC_1_1_2_KNOWN |
1089 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb1->info0,
1090 					    HAL_RX_EHT_SIG_OFDMA_EB1_RU_ALLOC_1_2,
1091 					    ru_112);
1092 		eht->data[2] = cpu_to_le32(data);
1093 
1094 		fallthrough;
1095 	case HAL_EHT_BW_40:
1096 		fallthrough;
1097 	case HAL_EHT_BW_20:
1098 		data = __le32_to_cpu(eht->data[1]);
1099 		/* CC1 1::1 */
1100 		data |=	IEEE80211_RADIOTAP_EHT_DATA1_RU_ALLOC_CC_1_1_1_KNOWN |
1101 			ATH12K_LE64_DEC_ENC(ofdma_cmn_eb1->info0,
1102 					    HAL_RX_EHT_SIG_OFDMA_EB1_RU_ALLOC_1_1,
1103 					    ru_111);
1104 		eht->data[1] = cpu_to_le32(data);
1105 		break;
1106 	default:
1107 		break;
1108 	}
1109 }
1110 
1111 static void
1112 ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(const void *tlv,
1113 					 struct hal_rx_mon_ppdu_info *ppdu_info)
1114 {
1115 	const struct hal_eht_sig_ofdma_cmn_eb *ofdma = tlv;
1116 
1117 	ath12k_dp_mon_hal_rx_parse_usig_overflow(tlv, ppdu_info);
1118 	ath12k_dp_mon_hal_rx_parse_ru_allocation(ofdma, ppdu_info);
1119 
1120 	ath12k_dp_mon_hal_rx_parse_eht_non_mumimo_user(&ofdma->user_field.n_mu_mimo,
1121 						       ppdu_info);
1122 }
1123 
1124 static void
1125 ath12k_dp_mon_parse_eht_sig_hdr(struct hal_rx_mon_ppdu_info *ppdu_info,
1126 				const void *tlv_data)
1127 {
1128 	ppdu_info->is_eht = true;
1129 
1130 	if (ath12k_dp_mon_hal_rx_is_frame_type_ndp(&ppdu_info->u_sig_info))
1131 		ath12k_dp_mon_hal_rx_parse_eht_sig_ndp(tlv_data, ppdu_info);
1132 	else if (ath12k_dp_mon_hal_rx_is_non_ofdma(&ppdu_info->u_sig_info))
1133 		ath12k_dp_mon_hal_rx_parse_eht_sig_non_ofdma(tlv_data, ppdu_info);
1134 	else if (ath12k_dp_mon_hal_rx_is_ofdma(&ppdu_info->u_sig_info))
1135 		ath12k_dp_mon_hal_rx_parse_eht_sig_ofdma(tlv_data, ppdu_info);
1136 }
1137 
1138 static inline enum ath12k_eht_ru_size
1139 hal_rx_mon_hal_ru_size_to_ath12k_ru_size(u32 hal_ru_size)
1140 {
1141 	switch (hal_ru_size) {
1142 	case HAL_EHT_RU_26:
1143 		return ATH12K_EHT_RU_26;
1144 	case HAL_EHT_RU_52:
1145 		return ATH12K_EHT_RU_52;
1146 	case HAL_EHT_RU_78:
1147 		return ATH12K_EHT_RU_52_26;
1148 	case HAL_EHT_RU_106:
1149 		return ATH12K_EHT_RU_106;
1150 	case HAL_EHT_RU_132:
1151 		return ATH12K_EHT_RU_106_26;
1152 	case HAL_EHT_RU_242:
1153 		return ATH12K_EHT_RU_242;
1154 	case HAL_EHT_RU_484:
1155 		return ATH12K_EHT_RU_484;
1156 	case HAL_EHT_RU_726:
1157 		return ATH12K_EHT_RU_484_242;
1158 	case HAL_EHT_RU_996:
1159 		return ATH12K_EHT_RU_996;
1160 	case HAL_EHT_RU_996x2:
1161 		return ATH12K_EHT_RU_996x2;
1162 	case HAL_EHT_RU_996x3:
1163 		return ATH12K_EHT_RU_996x3;
1164 	case HAL_EHT_RU_996x4:
1165 		return ATH12K_EHT_RU_996x4;
1166 	case HAL_EHT_RU_NONE:
1167 		return ATH12K_EHT_RU_INVALID;
1168 	case HAL_EHT_RU_996_484:
1169 		return ATH12K_EHT_RU_996_484;
1170 	case HAL_EHT_RU_996x2_484:
1171 		return ATH12K_EHT_RU_996x2_484;
1172 	case HAL_EHT_RU_996x3_484:
1173 		return ATH12K_EHT_RU_996x3_484;
1174 	case HAL_EHT_RU_996_484_242:
1175 		return ATH12K_EHT_RU_996_484_242;
1176 	default:
1177 		return ATH12K_EHT_RU_INVALID;
1178 	}
1179 }
1180 
1181 static inline u32
1182 hal_rx_ul_ofdma_ru_size_to_width(enum ath12k_eht_ru_size ru_size)
1183 {
1184 	switch (ru_size) {
1185 	case ATH12K_EHT_RU_26:
1186 		return RU_26;
1187 	case ATH12K_EHT_RU_52:
1188 		return RU_52;
1189 	case ATH12K_EHT_RU_52_26:
1190 		return RU_52_26;
1191 	case ATH12K_EHT_RU_106:
1192 		return RU_106;
1193 	case ATH12K_EHT_RU_106_26:
1194 		return RU_106_26;
1195 	case ATH12K_EHT_RU_242:
1196 		return RU_242;
1197 	case ATH12K_EHT_RU_484:
1198 		return RU_484;
1199 	case ATH12K_EHT_RU_484_242:
1200 		return RU_484_242;
1201 	case ATH12K_EHT_RU_996:
1202 		return RU_996;
1203 	case ATH12K_EHT_RU_996_484:
1204 		return RU_996_484;
1205 	case ATH12K_EHT_RU_996_484_242:
1206 		return RU_996_484_242;
1207 	case ATH12K_EHT_RU_996x2:
1208 		return RU_2X996;
1209 	case ATH12K_EHT_RU_996x2_484:
1210 		return RU_2X996_484;
1211 	case ATH12K_EHT_RU_996x3:
1212 		return RU_3X996;
1213 	case ATH12K_EHT_RU_996x3_484:
1214 		return RU_3X996_484;
1215 	case ATH12K_EHT_RU_996x4:
1216 		return RU_4X996;
1217 	default:
1218 		return RU_INVALID;
1219 	}
1220 }
1221 
1222 static void
1223 ath12k_dp_mon_hal_rx_parse_user_info(const struct hal_receive_user_info *rx_usr_info,
1224 				     u16 user_id,
1225 				     struct hal_rx_mon_ppdu_info *ppdu_info)
1226 {
1227 	struct hal_rx_user_status *mon_rx_user_status = NULL;
1228 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1229 	enum ath12k_eht_ru_size rtap_ru_size = ATH12K_EHT_RU_INVALID;
1230 	u32 ru_width, reception_type, ru_index = HAL_EHT_RU_INVALID;
1231 	u32 ru_type_80_0, ru_start_index_80_0;
1232 	u32 ru_type_80_1, ru_start_index_80_1;
1233 	u32 ru_type_80_2, ru_start_index_80_2;
1234 	u32 ru_type_80_3, ru_start_index_80_3;
1235 	u32 ru_size = 0, num_80mhz_with_ru = 0;
1236 	u64 ru_index_320mhz = 0;
1237 	u32 ru_index_per80mhz;
1238 
1239 	reception_type = le32_get_bits(rx_usr_info->info0,
1240 				       HAL_RX_USR_INFO0_RECEPTION_TYPE);
1241 
1242 	switch (reception_type) {
1243 	case HAL_RECEPTION_TYPE_SU:
1244 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_SU;
1245 		break;
1246 	case HAL_RECEPTION_TYPE_DL_MU_MIMO:
1247 	case HAL_RECEPTION_TYPE_UL_MU_MIMO:
1248 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_MIMO;
1249 		break;
1250 	case HAL_RECEPTION_TYPE_DL_MU_OFMA:
1251 	case HAL_RECEPTION_TYPE_UL_MU_OFDMA:
1252 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_OFDMA;
1253 		break;
1254 	case HAL_RECEPTION_TYPE_DL_MU_OFDMA_MIMO:
1255 	case HAL_RECEPTION_TYPE_UL_MU_OFDMA_MIMO:
1256 		ppdu_info->reception_type = HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO;
1257 	}
1258 
1259 	ppdu_info->is_stbc = le32_get_bits(rx_usr_info->info0, HAL_RX_USR_INFO0_STBC);
1260 	ppdu_info->ldpc = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_LDPC);
1261 	ppdu_info->dcm = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_STA_DCM);
1262 	ppdu_info->bw = le32_get_bits(rx_usr_info->info1, HAL_RX_USR_INFO1_RX_BW);
1263 	ppdu_info->mcs = le32_get_bits(rx_usr_info->info1, HAL_RX_USR_INFO1_MCS);
1264 	ppdu_info->nss = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_NSS) + 1;
1265 
1266 	if (user_id < HAL_MAX_UL_MU_USERS) {
1267 		mon_rx_user_status = &ppdu_info->userstats[user_id];
1268 		mon_rx_user_status->mcs = ppdu_info->mcs;
1269 		mon_rx_user_status->nss = ppdu_info->nss;
1270 	}
1271 
1272 	if (!(ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_MIMO ||
1273 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
1274 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO))
1275 		return;
1276 
1277 	/* RU allocation present only for OFDMA reception */
1278 	ru_type_80_0 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_0);
1279 	ru_start_index_80_0 = le32_get_bits(rx_usr_info->info3,
1280 					    HAL_RX_USR_INFO3_RU_START_IDX_80_0);
1281 	if (ru_type_80_0 != HAL_EHT_RU_NONE) {
1282 		ru_size += ru_type_80_0;
1283 		ru_index_per80mhz = ru_start_index_80_0;
1284 		ru_index = ru_index_per80mhz;
1285 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_0, 0, ru_index_per80mhz);
1286 		num_80mhz_with_ru++;
1287 	}
1288 
1289 	ru_type_80_1 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_1);
1290 	ru_start_index_80_1 = le32_get_bits(rx_usr_info->info3,
1291 					    HAL_RX_USR_INFO3_RU_START_IDX_80_1);
1292 	if (ru_type_80_1 != HAL_EHT_RU_NONE) {
1293 		ru_size += ru_type_80_1;
1294 		ru_index_per80mhz = ru_start_index_80_1;
1295 		ru_index = ru_index_per80mhz;
1296 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_1, 1, ru_index_per80mhz);
1297 		num_80mhz_with_ru++;
1298 	}
1299 
1300 	ru_type_80_2 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_2);
1301 	ru_start_index_80_2 = le32_get_bits(rx_usr_info->info3,
1302 					    HAL_RX_USR_INFO3_RU_START_IDX_80_2);
1303 	if (ru_type_80_2 != HAL_EHT_RU_NONE) {
1304 		ru_size += ru_type_80_2;
1305 		ru_index_per80mhz = ru_start_index_80_2;
1306 		ru_index = ru_index_per80mhz;
1307 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_2, 2, ru_index_per80mhz);
1308 		num_80mhz_with_ru++;
1309 	}
1310 
1311 	ru_type_80_3 = le32_get_bits(rx_usr_info->info2, HAL_RX_USR_INFO2_RU_TYPE_80_3);
1312 	ru_start_index_80_3 = le32_get_bits(rx_usr_info->info2,
1313 					    HAL_RX_USR_INFO3_RU_START_IDX_80_3);
1314 	if (ru_type_80_3 != HAL_EHT_RU_NONE) {
1315 		ru_size += ru_type_80_3;
1316 		ru_index_per80mhz = ru_start_index_80_3;
1317 		ru_index = ru_index_per80mhz;
1318 		ru_index_320mhz |= HAL_RU_PER80(ru_type_80_3, 3, ru_index_per80mhz);
1319 		num_80mhz_with_ru++;
1320 	}
1321 
1322 	if (num_80mhz_with_ru > 1) {
1323 		/* Calculate the MRU index */
1324 		switch (ru_index_320mhz) {
1325 		case HAL_EHT_RU_996_484_0:
1326 		case HAL_EHT_RU_996x2_484_0:
1327 		case HAL_EHT_RU_996x3_484_0:
1328 			ru_index = 0;
1329 			break;
1330 		case HAL_EHT_RU_996_484_1:
1331 		case HAL_EHT_RU_996x2_484_1:
1332 		case HAL_EHT_RU_996x3_484_1:
1333 			ru_index = 1;
1334 			break;
1335 		case HAL_EHT_RU_996_484_2:
1336 		case HAL_EHT_RU_996x2_484_2:
1337 		case HAL_EHT_RU_996x3_484_2:
1338 			ru_index = 2;
1339 			break;
1340 		case HAL_EHT_RU_996_484_3:
1341 		case HAL_EHT_RU_996x2_484_3:
1342 		case HAL_EHT_RU_996x3_484_3:
1343 			ru_index = 3;
1344 			break;
1345 		case HAL_EHT_RU_996_484_4:
1346 		case HAL_EHT_RU_996x2_484_4:
1347 		case HAL_EHT_RU_996x3_484_4:
1348 			ru_index = 4;
1349 			break;
1350 		case HAL_EHT_RU_996_484_5:
1351 		case HAL_EHT_RU_996x2_484_5:
1352 		case HAL_EHT_RU_996x3_484_5:
1353 			ru_index = 5;
1354 			break;
1355 		case HAL_EHT_RU_996_484_6:
1356 		case HAL_EHT_RU_996x2_484_6:
1357 		case HAL_EHT_RU_996x3_484_6:
1358 			ru_index = 6;
1359 			break;
1360 		case HAL_EHT_RU_996_484_7:
1361 		case HAL_EHT_RU_996x2_484_7:
1362 		case HAL_EHT_RU_996x3_484_7:
1363 			ru_index = 7;
1364 			break;
1365 		case HAL_EHT_RU_996x2_484_8:
1366 			ru_index = 8;
1367 			break;
1368 		case HAL_EHT_RU_996x2_484_9:
1369 			ru_index = 9;
1370 			break;
1371 		case HAL_EHT_RU_996x2_484_10:
1372 			ru_index = 10;
1373 			break;
1374 		case HAL_EHT_RU_996x2_484_11:
1375 			ru_index = 11;
1376 			break;
1377 		default:
1378 			ru_index = HAL_EHT_RU_INVALID;
1379 			break;
1380 		}
1381 
1382 		ru_size += 4;
1383 	}
1384 
1385 	rtap_ru_size = hal_rx_mon_hal_ru_size_to_ath12k_ru_size(ru_size);
1386 	if (rtap_ru_size != ATH12K_EHT_RU_INVALID) {
1387 		u32 known, data;
1388 
1389 		known = __le32_to_cpu(eht->known);
1390 		known |= IEEE80211_RADIOTAP_EHT_KNOWN_RU_MRU_SIZE_OM;
1391 		eht->known = cpu_to_le32(known);
1392 
1393 		data = __le32_to_cpu(eht->data[1]);
1394 		data |=	u32_encode_bits(rtap_ru_size,
1395 					IEEE80211_RADIOTAP_EHT_DATA1_RU_SIZE);
1396 		eht->data[1] = cpu_to_le32(data);
1397 	}
1398 
1399 	if (ru_index != HAL_EHT_RU_INVALID) {
1400 		u32 known, data;
1401 
1402 		known = __le32_to_cpu(eht->known);
1403 		known |= IEEE80211_RADIOTAP_EHT_KNOWN_RU_MRU_INDEX_OM;
1404 		eht->known = cpu_to_le32(known);
1405 
1406 		data = __le32_to_cpu(eht->data[1]);
1407 		data |=	u32_encode_bits(rtap_ru_size,
1408 					IEEE80211_RADIOTAP_EHT_DATA1_RU_INDEX);
1409 		eht->data[1] = cpu_to_le32(data);
1410 	}
1411 
1412 	if (mon_rx_user_status && ru_index != HAL_EHT_RU_INVALID &&
1413 	    rtap_ru_size != ATH12K_EHT_RU_INVALID) {
1414 		mon_rx_user_status->ul_ofdma_ru_start_index = ru_index;
1415 		mon_rx_user_status->ul_ofdma_ru_size = rtap_ru_size;
1416 
1417 		ru_width = hal_rx_ul_ofdma_ru_size_to_width(rtap_ru_size);
1418 
1419 		mon_rx_user_status->ul_ofdma_ru_width = ru_width;
1420 		mon_rx_user_status->ofdma_info_valid = 1;
1421 	}
1422 }
1423 
1424 static void ath12k_dp_mon_parse_rx_msdu_end_err(u32 info, u32 *errmap)
1425 {
1426 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
1427 		*errmap |= HAL_RX_MPDU_ERR_FCS;
1428 
1429 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1430 		*errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1431 
1432 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1433 		*errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1434 
1435 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1436 		*errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1437 
1438 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1439 		*errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1440 
1441 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1442 		*errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1443 
1444 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1445 		*errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1446 }
1447 
1448 static void
1449 ath12k_parse_cmn_usr_info(const struct hal_phyrx_common_user_info *cmn_usr_info,
1450 			  struct hal_rx_mon_ppdu_info *ppdu_info)
1451 {
1452 	struct hal_rx_radiotap_eht *eht = &ppdu_info->eht_info.eht;
1453 	u32 known, data, cp_setting, ltf_size;
1454 
1455 	known = __le32_to_cpu(eht->known);
1456 	known |= IEEE80211_RADIOTAP_EHT_KNOWN_GI |
1457 		IEEE80211_RADIOTAP_EHT_KNOWN_EHT_LTF;
1458 	eht->known = cpu_to_le32(known);
1459 
1460 	cp_setting = le32_get_bits(cmn_usr_info->info0,
1461 				   HAL_RX_CMN_USR_INFO0_CP_SETTING);
1462 	ltf_size = le32_get_bits(cmn_usr_info->info0,
1463 				 HAL_RX_CMN_USR_INFO0_LTF_SIZE);
1464 
1465 	data = __le32_to_cpu(eht->data[0]);
1466 	data |= u32_encode_bits(cp_setting, IEEE80211_RADIOTAP_EHT_DATA0_GI);
1467 	data |= u32_encode_bits(ltf_size, IEEE80211_RADIOTAP_EHT_DATA0_LTF);
1468 	eht->data[0] = cpu_to_le32(data);
1469 
1470 	if (!ppdu_info->ltf_size)
1471 		ppdu_info->ltf_size = ltf_size;
1472 	if (!ppdu_info->gi)
1473 		ppdu_info->gi = cp_setting;
1474 }
1475 
1476 static void
1477 ath12k_dp_mon_parse_status_msdu_end(struct ath12k_mon_data *pmon,
1478 				    const struct hal_rx_msdu_end *msdu_end)
1479 {
1480 	ath12k_dp_mon_parse_rx_msdu_end_err(__le32_to_cpu(msdu_end->info2),
1481 					    &pmon->err_bitmap);
1482 	pmon->decap_format = le32_get_bits(msdu_end->info1,
1483 					   RX_MSDU_END_INFO11_DECAP_FORMAT);
1484 }
1485 
1486 static enum hal_rx_mon_status
1487 ath12k_dp_mon_rx_parse_status_tlv(struct ath12k *ar,
1488 				  struct ath12k_mon_data *pmon,
1489 				  const struct hal_tlv_64_hdr *tlv)
1490 {
1491 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
1492 	const void *tlv_data = tlv->value;
1493 	u32 info[7], userid;
1494 	u16 tlv_tag, tlv_len;
1495 
1496 	tlv_tag = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_TAG);
1497 	tlv_len = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_LEN);
1498 	userid = le64_get_bits(tlv->tl, HAL_TLV_64_USR_ID);
1499 
1500 	if (ppdu_info->tlv_aggr.in_progress && ppdu_info->tlv_aggr.tlv_tag != tlv_tag) {
1501 		ath12k_dp_mon_parse_eht_sig_hdr(ppdu_info, ppdu_info->tlv_aggr.buf);
1502 
1503 		ppdu_info->tlv_aggr.in_progress = false;
1504 		ppdu_info->tlv_aggr.cur_len = 0;
1505 	}
1506 
1507 	switch (tlv_tag) {
1508 	case HAL_RX_PPDU_START: {
1509 		const struct hal_rx_ppdu_start *ppdu_start = tlv_data;
1510 
1511 		u64 ppdu_ts = ath12k_le32hilo_to_u64(ppdu_start->ppdu_start_ts_63_32,
1512 						     ppdu_start->ppdu_start_ts_31_0);
1513 
1514 		info[0] = __le32_to_cpu(ppdu_start->info0);
1515 
1516 		ppdu_info->ppdu_id = u32_get_bits(info[0],
1517 						  HAL_RX_PPDU_START_INFO0_PPDU_ID);
1518 
1519 		info[1] = __le32_to_cpu(ppdu_start->info1);
1520 		ppdu_info->chan_num = u32_get_bits(info[1],
1521 						   HAL_RX_PPDU_START_INFO1_CHAN_NUM);
1522 		ppdu_info->freq = u32_get_bits(info[1],
1523 					       HAL_RX_PPDU_START_INFO1_CHAN_FREQ);
1524 		ppdu_info->ppdu_ts = ppdu_ts;
1525 
1526 		if (ppdu_info->ppdu_id != ppdu_info->last_ppdu_id) {
1527 			ppdu_info->last_ppdu_id = ppdu_info->ppdu_id;
1528 			ppdu_info->num_users = 0;
1529 			memset(&ppdu_info->mpdu_fcs_ok_bitmap, 0,
1530 			       HAL_RX_NUM_WORDS_PER_PPDU_BITMAP *
1531 			       sizeof(ppdu_info->mpdu_fcs_ok_bitmap[0]));
1532 		}
1533 		break;
1534 	}
1535 	case HAL_RX_PPDU_END_USER_STATS: {
1536 		const struct hal_rx_ppdu_end_user_stats *eu_stats = tlv_data;
1537 		u32 tid_bitmap;
1538 
1539 		info[0] = __le32_to_cpu(eu_stats->info0);
1540 		info[1] = __le32_to_cpu(eu_stats->info1);
1541 		info[2] = __le32_to_cpu(eu_stats->info2);
1542 		info[4] = __le32_to_cpu(eu_stats->info4);
1543 		info[5] = __le32_to_cpu(eu_stats->info5);
1544 		info[6] = __le32_to_cpu(eu_stats->info6);
1545 
1546 		ppdu_info->ast_index =
1547 			u32_get_bits(info[2], HAL_RX_PPDU_END_USER_STATS_INFO2_AST_INDEX);
1548 		ppdu_info->fc_valid =
1549 			u32_get_bits(info[1], HAL_RX_PPDU_END_USER_STATS_INFO1_FC_VALID);
1550 		tid_bitmap = u32_get_bits(info[6],
1551 					  HAL_RX_PPDU_END_USER_STATS_INFO6_TID_BITMAP);
1552 		ppdu_info->tid = ffs(tid_bitmap) - 1;
1553 		ppdu_info->tcp_msdu_count =
1554 			u32_get_bits(info[4],
1555 				     HAL_RX_PPDU_END_USER_STATS_INFO4_TCP_MSDU_CNT);
1556 		ppdu_info->udp_msdu_count =
1557 			u32_get_bits(info[4],
1558 				     HAL_RX_PPDU_END_USER_STATS_INFO4_UDP_MSDU_CNT);
1559 		ppdu_info->other_msdu_count =
1560 			u32_get_bits(info[5],
1561 				     HAL_RX_PPDU_END_USER_STATS_INFO5_OTHER_MSDU_CNT);
1562 		ppdu_info->tcp_ack_msdu_count =
1563 			u32_get_bits(info[5],
1564 				     HAL_RX_PPDU_END_USER_STATS_INFO5_TCP_ACK_MSDU_CNT);
1565 		ppdu_info->preamble_type =
1566 			u32_get_bits(info[1],
1567 				     HAL_RX_PPDU_END_USER_STATS_INFO1_PKT_TYPE);
1568 		ppdu_info->num_mpdu_fcs_ok =
1569 			u32_get_bits(info[1],
1570 				     HAL_RX_PPDU_END_USER_STATS_INFO1_MPDU_CNT_FCS_OK);
1571 		ppdu_info->num_mpdu_fcs_err =
1572 			u32_get_bits(info[0],
1573 				     HAL_RX_PPDU_END_USER_STATS_INFO0_MPDU_CNT_FCS_ERR);
1574 		ppdu_info->peer_id =
1575 			u32_get_bits(info[0], HAL_RX_PPDU_END_USER_STATS_INFO0_PEER_ID);
1576 
1577 		switch (ppdu_info->preamble_type) {
1578 		case HAL_RX_PREAMBLE_11N:
1579 			ppdu_info->ht_flags = 1;
1580 			break;
1581 		case HAL_RX_PREAMBLE_11AC:
1582 			ppdu_info->vht_flags = 1;
1583 			break;
1584 		case HAL_RX_PREAMBLE_11AX:
1585 			ppdu_info->he_flags = 1;
1586 			break;
1587 		case HAL_RX_PREAMBLE_11BE:
1588 			ppdu_info->is_eht = true;
1589 			break;
1590 		default:
1591 			break;
1592 		}
1593 
1594 		if (userid < HAL_MAX_UL_MU_USERS) {
1595 			struct hal_rx_user_status *rxuser_stats =
1596 				&ppdu_info->userstats[userid];
1597 
1598 			if (ppdu_info->num_mpdu_fcs_ok > 1 ||
1599 			    ppdu_info->num_mpdu_fcs_err > 1)
1600 				ppdu_info->userstats[userid].ampdu_present = true;
1601 
1602 			ppdu_info->num_users += 1;
1603 
1604 			ath12k_dp_mon_rx_handle_ofdma_info(eu_stats, rxuser_stats);
1605 			ath12k_dp_mon_rx_populate_mu_user_info(eu_stats, ppdu_info,
1606 							       rxuser_stats);
1607 		}
1608 		ppdu_info->mpdu_fcs_ok_bitmap[0] = __le32_to_cpu(eu_stats->rsvd1[0]);
1609 		ppdu_info->mpdu_fcs_ok_bitmap[1] = __le32_to_cpu(eu_stats->rsvd1[1]);
1610 		break;
1611 	}
1612 	case HAL_RX_PPDU_END_USER_STATS_EXT: {
1613 		const struct hal_rx_ppdu_end_user_stats_ext *eu_stats = tlv_data;
1614 
1615 		ppdu_info->mpdu_fcs_ok_bitmap[2] = __le32_to_cpu(eu_stats->info1);
1616 		ppdu_info->mpdu_fcs_ok_bitmap[3] = __le32_to_cpu(eu_stats->info2);
1617 		ppdu_info->mpdu_fcs_ok_bitmap[4] = __le32_to_cpu(eu_stats->info3);
1618 		ppdu_info->mpdu_fcs_ok_bitmap[5] = __le32_to_cpu(eu_stats->info4);
1619 		ppdu_info->mpdu_fcs_ok_bitmap[6] = __le32_to_cpu(eu_stats->info5);
1620 		ppdu_info->mpdu_fcs_ok_bitmap[7] = __le32_to_cpu(eu_stats->info6);
1621 		break;
1622 	}
1623 	case HAL_PHYRX_HT_SIG:
1624 		ath12k_dp_mon_parse_ht_sig(tlv_data, ppdu_info);
1625 		break;
1626 
1627 	case HAL_PHYRX_L_SIG_B:
1628 		ath12k_dp_mon_parse_l_sig_b(tlv_data, ppdu_info);
1629 		break;
1630 
1631 	case HAL_PHYRX_L_SIG_A:
1632 		ath12k_dp_mon_parse_l_sig_a(tlv_data, ppdu_info);
1633 		break;
1634 
1635 	case HAL_PHYRX_VHT_SIG_A:
1636 		ath12k_dp_mon_parse_vht_sig_a(tlv_data, ppdu_info);
1637 		break;
1638 
1639 	case HAL_PHYRX_HE_SIG_A_SU:
1640 		ath12k_dp_mon_parse_he_sig_su(tlv_data, ppdu_info);
1641 		break;
1642 
1643 	case HAL_PHYRX_HE_SIG_A_MU_DL:
1644 		ath12k_dp_mon_parse_he_sig_mu(tlv_data, ppdu_info);
1645 		break;
1646 
1647 	case HAL_PHYRX_HE_SIG_B1_MU:
1648 		ath12k_dp_mon_parse_he_sig_b1_mu(tlv_data, ppdu_info);
1649 		break;
1650 
1651 	case HAL_PHYRX_HE_SIG_B2_MU:
1652 		ath12k_dp_mon_parse_he_sig_b2_mu(tlv_data, ppdu_info);
1653 		break;
1654 
1655 	case HAL_PHYRX_HE_SIG_B2_OFDMA:
1656 		ath12k_dp_mon_parse_he_sig_b2_ofdma(tlv_data, ppdu_info);
1657 		break;
1658 
1659 	case HAL_PHYRX_RSSI_LEGACY: {
1660 		const struct hal_rx_phyrx_rssi_legacy_info *rssi = tlv_data;
1661 
1662 		info[0] = __le32_to_cpu(rssi->info0);
1663 		info[2] = __le32_to_cpu(rssi->info2);
1664 
1665 		/* TODO: Please note that the combined rssi will not be accurate
1666 		 * in MU case. Rssi in MU needs to be retrieved from
1667 		 * PHYRX_OTHER_RECEIVE_INFO TLV.
1668 		 */
1669 		ppdu_info->rssi_comb =
1670 			u32_get_bits(info[2],
1671 				     HAL_RX_RSSI_LEGACY_INFO_INFO2_RSSI_COMB_PPDU);
1672 
1673 		ppdu_info->bw = u32_get_bits(info[0],
1674 					     HAL_RX_RSSI_LEGACY_INFO_INFO0_RX_BW);
1675 		break;
1676 	}
1677 	case HAL_PHYRX_COMMON_USER_INFO: {
1678 		ath12k_parse_cmn_usr_info(tlv_data, ppdu_info);
1679 		break;
1680 	}
1681 	case HAL_RX_PPDU_START_USER_INFO:
1682 		ath12k_dp_mon_hal_rx_parse_user_info(tlv_data, userid, ppdu_info);
1683 		break;
1684 
1685 	case HAL_RXPCU_PPDU_END_INFO: {
1686 		const struct hal_rx_ppdu_end_duration *ppdu_rx_duration = tlv_data;
1687 
1688 		info[0] = __le32_to_cpu(ppdu_rx_duration->info0);
1689 		ppdu_info->rx_duration =
1690 			u32_get_bits(info[0], HAL_RX_PPDU_END_DURATION);
1691 		ppdu_info->tsft = __le32_to_cpu(ppdu_rx_duration->rsvd0[1]);
1692 		ppdu_info->tsft = (ppdu_info->tsft << 32) |
1693 				   __le32_to_cpu(ppdu_rx_duration->rsvd0[0]);
1694 		break;
1695 	}
1696 	case HAL_RX_MPDU_START: {
1697 		const struct hal_rx_mpdu_start *mpdu_start = tlv_data;
1698 		u16 peer_id;
1699 
1700 		info[1] = __le32_to_cpu(mpdu_start->info1);
1701 		peer_id = u32_get_bits(info[1], HAL_RX_MPDU_START_INFO1_PEERID);
1702 		if (peer_id)
1703 			ppdu_info->peer_id = peer_id;
1704 
1705 		ppdu_info->mpdu_len += u32_get_bits(info[1],
1706 						    HAL_RX_MPDU_START_INFO2_MPDU_LEN);
1707 		if (userid < HAL_MAX_UL_MU_USERS) {
1708 			info[0] = __le32_to_cpu(mpdu_start->info0);
1709 			ppdu_info->userid = userid;
1710 			ppdu_info->userstats[userid].ampdu_id =
1711 				u32_get_bits(info[0], HAL_RX_MPDU_START_INFO0_PPDU_ID);
1712 		}
1713 
1714 		return HAL_RX_MON_STATUS_MPDU_START;
1715 	}
1716 	case HAL_RX_MSDU_START:
1717 		/* TODO: add msdu start parsing logic */
1718 		break;
1719 	case HAL_MON_BUF_ADDR:
1720 		return HAL_RX_MON_STATUS_BUF_ADDR;
1721 	case HAL_RX_MSDU_END:
1722 		ath12k_dp_mon_parse_status_msdu_end(pmon, tlv_data);
1723 		return HAL_RX_MON_STATUS_MSDU_END;
1724 	case HAL_RX_MPDU_END:
1725 		return HAL_RX_MON_STATUS_MPDU_END;
1726 	case HAL_PHYRX_GENERIC_U_SIG:
1727 		ath12k_dp_mon_hal_rx_parse_u_sig_hdr(tlv_data, ppdu_info);
1728 		break;
1729 	case HAL_PHYRX_GENERIC_EHT_SIG:
1730 		/* Handle the case where aggregation is in progress
1731 		 * or the current TLV is one of the TLVs which should be
1732 		 * aggregated
1733 		 */
1734 		if (!ppdu_info->tlv_aggr.in_progress) {
1735 			ppdu_info->tlv_aggr.in_progress = true;
1736 			ppdu_info->tlv_aggr.tlv_tag = tlv_tag;
1737 			ppdu_info->tlv_aggr.cur_len = 0;
1738 		}
1739 
1740 		ppdu_info->is_eht = true;
1741 
1742 		ath12k_dp_mon_hal_aggr_tlv(ppdu_info, tlv_len, tlv_data);
1743 		break;
1744 	case HAL_DUMMY:
1745 		return HAL_RX_MON_STATUS_BUF_DONE;
1746 	case HAL_RX_PPDU_END_STATUS_DONE:
1747 	case 0:
1748 		return HAL_RX_MON_STATUS_PPDU_DONE;
1749 	default:
1750 		break;
1751 	}
1752 
1753 	return HAL_RX_MON_STATUS_PPDU_NOT_DONE;
1754 }
1755 
1756 static void
1757 ath12k_dp_mon_fill_rx_stats_info(struct ath12k *ar,
1758 				 struct hal_rx_mon_ppdu_info *ppdu_info,
1759 				 struct ieee80211_rx_status *rx_status)
1760 {
1761 	u32 center_freq = ppdu_info->freq;
1762 
1763 	rx_status->freq = center_freq;
1764 	rx_status->bw = ath12k_mac_bw_to_mac80211_bw(ppdu_info->bw);
1765 	rx_status->nss = ppdu_info->nss;
1766 	rx_status->rate_idx = 0;
1767 	rx_status->encoding = RX_ENC_LEGACY;
1768 	rx_status->flag |= RX_FLAG_NO_SIGNAL_VAL;
1769 
1770 	if (center_freq >= ATH12K_MIN_6GHZ_FREQ &&
1771 	    center_freq <= ATH12K_MAX_6GHZ_FREQ) {
1772 		rx_status->band = NL80211_BAND_6GHZ;
1773 	} else if (center_freq >= ATH12K_MIN_2GHZ_FREQ &&
1774 		   center_freq <= ATH12K_MAX_2GHZ_FREQ) {
1775 		rx_status->band = NL80211_BAND_2GHZ;
1776 	} else if (center_freq >= ATH12K_MIN_5GHZ_FREQ &&
1777 		   center_freq <= ATH12K_MAX_5GHZ_FREQ) {
1778 		rx_status->band = NL80211_BAND_5GHZ;
1779 	} else {
1780 		rx_status->band = NUM_NL80211_BANDS;
1781 	}
1782 }
1783 
1784 static struct sk_buff
1785 *ath12k_dp_rx_alloc_mon_status_buf(struct ath12k_base *ab,
1786 				   struct dp_rxdma_mon_ring *rx_ring,
1787 				   int *buf_id)
1788 {
1789 	struct sk_buff *skb;
1790 	dma_addr_t paddr;
1791 
1792 	skb = dev_alloc_skb(RX_MON_STATUS_BUF_SIZE);
1793 
1794 	if (!skb)
1795 		goto fail_alloc_skb;
1796 
1797 	if (!IS_ALIGNED((unsigned long)skb->data,
1798 			RX_MON_STATUS_BUF_ALIGN)) {
1799 		skb_pull(skb, PTR_ALIGN(skb->data, RX_MON_STATUS_BUF_ALIGN) -
1800 			 skb->data);
1801 	}
1802 
1803 	paddr = dma_map_single(ab->dev, skb->data,
1804 			       skb->len + skb_tailroom(skb),
1805 			       DMA_FROM_DEVICE);
1806 	if (unlikely(dma_mapping_error(ab->dev, paddr)))
1807 		goto fail_free_skb;
1808 
1809 	spin_lock_bh(&rx_ring->idr_lock);
1810 	*buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0,
1811 			    rx_ring->bufs_max, GFP_ATOMIC);
1812 	spin_unlock_bh(&rx_ring->idr_lock);
1813 	if (*buf_id < 0)
1814 		goto fail_dma_unmap;
1815 
1816 	ATH12K_SKB_RXCB(skb)->paddr = paddr;
1817 	return skb;
1818 
1819 fail_dma_unmap:
1820 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
1821 			 DMA_FROM_DEVICE);
1822 fail_free_skb:
1823 	dev_kfree_skb_any(skb);
1824 fail_alloc_skb:
1825 	return NULL;
1826 }
1827 
1828 static enum dp_mon_status_buf_state
1829 ath12k_dp_rx_mon_buf_done(struct ath12k_base *ab, struct hal_srng *srng,
1830 			  struct dp_rxdma_mon_ring *rx_ring)
1831 {
1832 	struct ath12k_skb_rxcb *rxcb;
1833 	struct hal_tlv_64_hdr *tlv;
1834 	struct sk_buff *skb;
1835 	void *status_desc;
1836 	dma_addr_t paddr;
1837 	u32 cookie;
1838 	int buf_id;
1839 	u8 rbm;
1840 
1841 	status_desc = ath12k_hal_srng_src_next_peek(ab, srng);
1842 	if (!status_desc)
1843 		return DP_MON_STATUS_NO_DMA;
1844 
1845 	ath12k_hal_rx_buf_addr_info_get(status_desc, &paddr, &cookie, &rbm);
1846 
1847 	buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
1848 
1849 	spin_lock_bh(&rx_ring->idr_lock);
1850 	skb = idr_find(&rx_ring->bufs_idr, buf_id);
1851 	spin_unlock_bh(&rx_ring->idr_lock);
1852 
1853 	if (!skb)
1854 		return DP_MON_STATUS_NO_DMA;
1855 
1856 	rxcb = ATH12K_SKB_RXCB(skb);
1857 	dma_sync_single_for_cpu(ab->dev, rxcb->paddr,
1858 				skb->len + skb_tailroom(skb),
1859 				DMA_FROM_DEVICE);
1860 
1861 	tlv = (struct hal_tlv_64_hdr *)skb->data;
1862 	if (le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG) != HAL_RX_STATUS_BUFFER_DONE)
1863 		return DP_MON_STATUS_NO_DMA;
1864 
1865 	return DP_MON_STATUS_REPLINISH;
1866 }
1867 
1868 static u32 ath12k_dp_mon_comp_ppduid(u32 msdu_ppdu_id, u32 *ppdu_id)
1869 {
1870 	u32 ret = 0;
1871 
1872 	if ((*ppdu_id < msdu_ppdu_id) &&
1873 	    ((msdu_ppdu_id - *ppdu_id) < DP_NOT_PPDU_ID_WRAP_AROUND)) {
1874 		/* Hold on mon dest ring, and reap mon status ring. */
1875 		*ppdu_id = msdu_ppdu_id;
1876 		ret = msdu_ppdu_id;
1877 	} else if ((*ppdu_id > msdu_ppdu_id) &&
1878 		((*ppdu_id - msdu_ppdu_id) > DP_NOT_PPDU_ID_WRAP_AROUND)) {
1879 		/* PPDU ID has exceeded the maximum value and will
1880 		 * restart from 0.
1881 		 */
1882 		*ppdu_id = msdu_ppdu_id;
1883 		ret = msdu_ppdu_id;
1884 	}
1885 	return ret;
1886 }
1887 
1888 static
1889 void ath12k_dp_mon_next_link_desc_get(struct hal_rx_msdu_link *msdu_link,
1890 				      dma_addr_t *paddr, u32 *sw_cookie, u8 *rbm,
1891 				      struct ath12k_buffer_addr **pp_buf_addr_info)
1892 {
1893 	struct ath12k_buffer_addr *buf_addr_info;
1894 
1895 	buf_addr_info = &msdu_link->buf_addr_info;
1896 
1897 	ath12k_hal_rx_buf_addr_info_get(buf_addr_info, paddr, sw_cookie, rbm);
1898 
1899 	*pp_buf_addr_info = buf_addr_info;
1900 }
1901 
1902 static void
1903 ath12k_dp_mon_fill_rx_rate(struct ath12k *ar,
1904 			   struct hal_rx_mon_ppdu_info *ppdu_info,
1905 			   struct ieee80211_rx_status *rx_status)
1906 {
1907 	struct ieee80211_supported_band *sband;
1908 	enum rx_msdu_start_pkt_type pkt_type;
1909 	u8 rate_mcs, nss, sgi;
1910 	bool is_cck;
1911 
1912 	pkt_type = ppdu_info->preamble_type;
1913 	rate_mcs = ppdu_info->rate;
1914 	nss = ppdu_info->nss;
1915 	sgi = ppdu_info->gi;
1916 
1917 	switch (pkt_type) {
1918 	case RX_MSDU_START_PKT_TYPE_11A:
1919 	case RX_MSDU_START_PKT_TYPE_11B:
1920 		is_cck = (pkt_type == RX_MSDU_START_PKT_TYPE_11B);
1921 		if (rx_status->band < NUM_NL80211_BANDS) {
1922 			sband = &ar->mac.sbands[rx_status->band];
1923 			rx_status->rate_idx = ath12k_mac_hw_rate_to_idx(sband, rate_mcs,
1924 									is_cck);
1925 		}
1926 		break;
1927 	case RX_MSDU_START_PKT_TYPE_11N:
1928 		rx_status->encoding = RX_ENC_HT;
1929 		if (rate_mcs > ATH12K_HT_MCS_MAX) {
1930 			ath12k_warn(ar->ab,
1931 				    "Received with invalid mcs in HT mode %d\n",
1932 				     rate_mcs);
1933 			break;
1934 		}
1935 		rx_status->rate_idx = rate_mcs + (8 * (nss - 1));
1936 		if (sgi)
1937 			rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1938 		break;
1939 	case RX_MSDU_START_PKT_TYPE_11AC:
1940 		rx_status->encoding = RX_ENC_VHT;
1941 		rx_status->rate_idx = rate_mcs;
1942 		if (rate_mcs > ATH12K_VHT_MCS_MAX) {
1943 			ath12k_warn(ar->ab,
1944 				    "Received with invalid mcs in VHT mode %d\n",
1945 				     rate_mcs);
1946 			break;
1947 		}
1948 		if (sgi)
1949 			rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI;
1950 		break;
1951 	case RX_MSDU_START_PKT_TYPE_11AX:
1952 		rx_status->rate_idx = rate_mcs;
1953 		if (rate_mcs > ATH12K_HE_MCS_MAX) {
1954 			ath12k_warn(ar->ab,
1955 				    "Received with invalid mcs in HE mode %d\n",
1956 				    rate_mcs);
1957 			break;
1958 		}
1959 		rx_status->encoding = RX_ENC_HE;
1960 		rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi);
1961 		break;
1962 	case RX_MSDU_START_PKT_TYPE_11BE:
1963 		rx_status->rate_idx = rate_mcs;
1964 		if (rate_mcs > ATH12K_EHT_MCS_MAX) {
1965 			ath12k_warn(ar->ab,
1966 				    "Received with invalid mcs in EHT mode %d\n",
1967 				    rate_mcs);
1968 			break;
1969 		}
1970 		rx_status->encoding = RX_ENC_EHT;
1971 		rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi);
1972 		break;
1973 	default:
1974 		ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
1975 			   "monitor receives invalid preamble type %d",
1976 			    pkt_type);
1977 		break;
1978 	}
1979 }
1980 
1981 static void ath12k_dp_mon_rx_msdus_set_payload(struct ath12k *ar,
1982 					       struct sk_buff *head_msdu,
1983 					       struct sk_buff *tail_msdu)
1984 {
1985 	u32 rx_pkt_offset, l2_hdr_offset, total_offset;
1986 
1987 	rx_pkt_offset = ar->ab->hal.hal_desc_sz;
1988 	l2_hdr_offset =
1989 		ath12k_dp_rx_h_l3pad(ar->ab, (struct hal_rx_desc *)tail_msdu->data);
1990 
1991 	if (ar->ab->hw_params->rxdma1_enable)
1992 		total_offset = ATH12K_MON_RX_PKT_OFFSET;
1993 	else
1994 		total_offset = rx_pkt_offset + l2_hdr_offset;
1995 
1996 	skb_pull(head_msdu, total_offset);
1997 }
1998 
1999 static struct sk_buff *
2000 ath12k_dp_mon_rx_merg_msdus(struct ath12k *ar,
2001 			    struct dp_mon_mpdu *mon_mpdu,
2002 			    struct hal_rx_mon_ppdu_info *ppdu_info,
2003 			    struct ieee80211_rx_status *rxs)
2004 {
2005 	struct ath12k_base *ab = ar->ab;
2006 	struct sk_buff *msdu, *mpdu_buf, *prev_buf, *head_frag_list;
2007 	struct sk_buff *head_msdu, *tail_msdu;
2008 	struct hal_rx_desc *rx_desc;
2009 	u8 *hdr_desc, *dest, decap_format = mon_mpdu->decap_format;
2010 	struct ieee80211_hdr_3addr *wh;
2011 	struct ieee80211_channel *channel;
2012 	u32 frag_list_sum_len = 0;
2013 	u8 channel_num = ppdu_info->chan_num;
2014 
2015 	mpdu_buf = NULL;
2016 	head_msdu = mon_mpdu->head;
2017 	tail_msdu = mon_mpdu->tail;
2018 
2019 	if (!head_msdu || !tail_msdu)
2020 		goto err_merge_fail;
2021 
2022 	ath12k_dp_mon_fill_rx_stats_info(ar, ppdu_info, rxs);
2023 
2024 	if (unlikely(rxs->band == NUM_NL80211_BANDS ||
2025 		     !ath12k_ar_to_hw(ar)->wiphy->bands[rxs->band])) {
2026 		ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
2027 			   "sband is NULL for status band %d channel_num %d center_freq %d pdev_id %d\n",
2028 			   rxs->band, channel_num, ppdu_info->freq, ar->pdev_idx);
2029 
2030 		spin_lock_bh(&ar->data_lock);
2031 		channel = ar->rx_channel;
2032 		if (channel) {
2033 			rxs->band = channel->band;
2034 			channel_num =
2035 				ieee80211_frequency_to_channel(channel->center_freq);
2036 		}
2037 		spin_unlock_bh(&ar->data_lock);
2038 	}
2039 
2040 	if (rxs->band < NUM_NL80211_BANDS)
2041 		rxs->freq = ieee80211_channel_to_frequency(channel_num,
2042 							   rxs->band);
2043 
2044 	ath12k_dp_mon_fill_rx_rate(ar, ppdu_info, rxs);
2045 
2046 	if (decap_format == DP_RX_DECAP_TYPE_RAW) {
2047 		ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2048 
2049 		prev_buf = head_msdu;
2050 		msdu = head_msdu->next;
2051 		head_frag_list = NULL;
2052 
2053 		while (msdu) {
2054 			ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2055 
2056 			if (!head_frag_list)
2057 				head_frag_list = msdu;
2058 
2059 			frag_list_sum_len += msdu->len;
2060 			prev_buf = msdu;
2061 			msdu = msdu->next;
2062 		}
2063 
2064 		prev_buf->next = NULL;
2065 
2066 		skb_trim(prev_buf, prev_buf->len);
2067 		if (head_frag_list) {
2068 			skb_shinfo(head_msdu)->frag_list = head_frag_list;
2069 			head_msdu->data_len = frag_list_sum_len;
2070 			head_msdu->len += head_msdu->data_len;
2071 			head_msdu->next = NULL;
2072 		}
2073 	} else if (decap_format == DP_RX_DECAP_TYPE_NATIVE_WIFI) {
2074 		u8 qos_pkt = 0;
2075 
2076 		rx_desc = (struct hal_rx_desc *)head_msdu->data;
2077 		hdr_desc =
2078 			ab->hal_rx_ops->rx_desc_get_msdu_payload(rx_desc);
2079 
2080 		/* Base size */
2081 		wh = (struct ieee80211_hdr_3addr *)hdr_desc;
2082 
2083 		if (ieee80211_is_data_qos(wh->frame_control))
2084 			qos_pkt = 1;
2085 
2086 		msdu = head_msdu;
2087 
2088 		while (msdu) {
2089 			ath12k_dp_mon_rx_msdus_set_payload(ar, head_msdu, tail_msdu);
2090 			if (qos_pkt) {
2091 				dest = skb_push(msdu, sizeof(__le16));
2092 				if (!dest)
2093 					goto err_merge_fail;
2094 				memcpy(dest, hdr_desc, sizeof(struct ieee80211_qos_hdr));
2095 			}
2096 			prev_buf = msdu;
2097 			msdu = msdu->next;
2098 		}
2099 		dest = skb_put(prev_buf, HAL_RX_FCS_LEN);
2100 		if (!dest)
2101 			goto err_merge_fail;
2102 
2103 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2104 			   "mpdu_buf %p mpdu_buf->len %u",
2105 			   prev_buf, prev_buf->len);
2106 	} else {
2107 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2108 			   "decap format %d is not supported!\n",
2109 			   decap_format);
2110 		goto err_merge_fail;
2111 	}
2112 
2113 	return head_msdu;
2114 
2115 err_merge_fail:
2116 	if (mpdu_buf && decap_format != DP_RX_DECAP_TYPE_RAW) {
2117 		ath12k_dbg(ab, ATH12K_DBG_DATA,
2118 			   "err_merge_fail mpdu_buf %p", mpdu_buf);
2119 		/* Free the head buffer */
2120 		dev_kfree_skb_any(mpdu_buf);
2121 	}
2122 	return NULL;
2123 }
2124 
2125 static void
2126 ath12k_dp_mon_rx_update_radiotap_he(struct hal_rx_mon_ppdu_info *rx_status,
2127 				    u8 *rtap_buf)
2128 {
2129 	u32 rtap_len = 0;
2130 
2131 	put_unaligned_le16(rx_status->he_data1, &rtap_buf[rtap_len]);
2132 	rtap_len += 2;
2133 
2134 	put_unaligned_le16(rx_status->he_data2, &rtap_buf[rtap_len]);
2135 	rtap_len += 2;
2136 
2137 	put_unaligned_le16(rx_status->he_data3, &rtap_buf[rtap_len]);
2138 	rtap_len += 2;
2139 
2140 	put_unaligned_le16(rx_status->he_data4, &rtap_buf[rtap_len]);
2141 	rtap_len += 2;
2142 
2143 	put_unaligned_le16(rx_status->he_data5, &rtap_buf[rtap_len]);
2144 	rtap_len += 2;
2145 
2146 	put_unaligned_le16(rx_status->he_data6, &rtap_buf[rtap_len]);
2147 }
2148 
2149 static void
2150 ath12k_dp_mon_rx_update_radiotap_he_mu(struct hal_rx_mon_ppdu_info *rx_status,
2151 				       u8 *rtap_buf)
2152 {
2153 	u32 rtap_len = 0;
2154 
2155 	put_unaligned_le16(rx_status->he_flags1, &rtap_buf[rtap_len]);
2156 	rtap_len += 2;
2157 
2158 	put_unaligned_le16(rx_status->he_flags2, &rtap_buf[rtap_len]);
2159 	rtap_len += 2;
2160 
2161 	rtap_buf[rtap_len] = rx_status->he_RU[0];
2162 	rtap_len += 1;
2163 
2164 	rtap_buf[rtap_len] = rx_status->he_RU[1];
2165 	rtap_len += 1;
2166 
2167 	rtap_buf[rtap_len] = rx_status->he_RU[2];
2168 	rtap_len += 1;
2169 
2170 	rtap_buf[rtap_len] = rx_status->he_RU[3];
2171 }
2172 
2173 static void ath12k_dp_mon_update_radiotap(struct ath12k *ar,
2174 					  struct hal_rx_mon_ppdu_info *ppduinfo,
2175 					  struct sk_buff *mon_skb,
2176 					  struct ieee80211_rx_status *rxs)
2177 {
2178 	struct ieee80211_supported_band *sband;
2179 	s32 noise_floor;
2180 	u8 *ptr = NULL;
2181 
2182 	spin_lock_bh(&ar->data_lock);
2183 	noise_floor = ath12k_pdev_get_noise_floor(ar);
2184 	spin_unlock_bh(&ar->data_lock);
2185 
2186 	rxs->flag |= RX_FLAG_MACTIME_START;
2187 	rxs->nss = ppduinfo->nss;
2188 	if (test_bit(WMI_TLV_SERVICE_HW_DB2DBM_CONVERSION_SUPPORT,
2189 		     ar->ab->wmi_ab.svc_map))
2190 		rxs->signal = ppduinfo->rssi_comb;
2191 	else
2192 		rxs->signal = ppduinfo->rssi_comb + noise_floor;
2193 
2194 	if (ppduinfo->userstats[ppduinfo->userid].ampdu_present) {
2195 		rxs->flag |= RX_FLAG_AMPDU_DETAILS;
2196 		rxs->ampdu_reference = ppduinfo->userstats[ppduinfo->userid].ampdu_id;
2197 	}
2198 
2199 	if (ppduinfo->is_eht || ppduinfo->eht_usig) {
2200 		struct ieee80211_radiotap_tlv *tlv;
2201 		struct ieee80211_radiotap_eht *eht;
2202 		struct ieee80211_radiotap_eht_usig *usig;
2203 		u16 len = 0, i, eht_len, usig_len;
2204 		u8 user;
2205 
2206 		if (ppduinfo->is_eht) {
2207 			eht_len = struct_size(eht,
2208 					      user_info,
2209 					      ppduinfo->eht_info.num_user_info);
2210 			len += sizeof(*tlv) + eht_len;
2211 		}
2212 
2213 		if (ppduinfo->eht_usig) {
2214 			usig_len = sizeof(*usig);
2215 			len += sizeof(*tlv) + usig_len;
2216 		}
2217 
2218 		rxs->flag |= RX_FLAG_RADIOTAP_TLV_AT_END;
2219 		rxs->encoding = RX_ENC_EHT;
2220 
2221 		skb_reset_mac_header(mon_skb);
2222 
2223 		tlv = skb_push(mon_skb, len);
2224 
2225 		if (ppduinfo->is_eht) {
2226 			tlv->type = cpu_to_le16(IEEE80211_RADIOTAP_EHT);
2227 			tlv->len = cpu_to_le16(eht_len);
2228 
2229 			eht = (struct ieee80211_radiotap_eht *)tlv->data;
2230 			eht->known = ppduinfo->eht_info.eht.known;
2231 
2232 			for (i = 0;
2233 			     i < ARRAY_SIZE(eht->data) &&
2234 			     i < ARRAY_SIZE(ppduinfo->eht_info.eht.data);
2235 			     i++)
2236 				eht->data[i] = ppduinfo->eht_info.eht.data[i];
2237 
2238 			for (user = 0; user < ppduinfo->eht_info.num_user_info; user++)
2239 				put_unaligned_le32(ppduinfo->eht_info.user_info[user],
2240 						   &eht->user_info[user]);
2241 
2242 			tlv = (struct ieee80211_radiotap_tlv *)&tlv->data[eht_len];
2243 		}
2244 
2245 		if (ppduinfo->eht_usig) {
2246 			tlv->type = cpu_to_le16(IEEE80211_RADIOTAP_EHT_USIG);
2247 			tlv->len = cpu_to_le16(usig_len);
2248 
2249 			usig = (struct ieee80211_radiotap_eht_usig *)tlv->data;
2250 			*usig = ppduinfo->u_sig_info.usig;
2251 		}
2252 	} else if (ppduinfo->he_mu_flags) {
2253 		rxs->flag |= RX_FLAG_RADIOTAP_HE_MU;
2254 		rxs->encoding = RX_ENC_HE;
2255 		ptr = skb_push(mon_skb, sizeof(struct ieee80211_radiotap_he_mu));
2256 		ath12k_dp_mon_rx_update_radiotap_he_mu(ppduinfo, ptr);
2257 	} else if (ppduinfo->he_flags) {
2258 		rxs->flag |= RX_FLAG_RADIOTAP_HE;
2259 		rxs->encoding = RX_ENC_HE;
2260 		ptr = skb_push(mon_skb, sizeof(struct ieee80211_radiotap_he));
2261 		ath12k_dp_mon_rx_update_radiotap_he(ppduinfo, ptr);
2262 		rxs->rate_idx = ppduinfo->rate;
2263 	} else if (ppduinfo->vht_flags) {
2264 		rxs->encoding = RX_ENC_VHT;
2265 		rxs->rate_idx = ppduinfo->rate;
2266 	} else if (ppduinfo->ht_flags) {
2267 		rxs->encoding = RX_ENC_HT;
2268 		rxs->rate_idx = ppduinfo->rate;
2269 	} else {
2270 		rxs->encoding = RX_ENC_LEGACY;
2271 		sband = &ar->mac.sbands[rxs->band];
2272 		rxs->rate_idx = ath12k_mac_hw_rate_to_idx(sband, ppduinfo->rate,
2273 							  ppduinfo->cck_flag);
2274 	}
2275 
2276 	rxs->mactime = ppduinfo->tsft;
2277 }
2278 
2279 static void ath12k_dp_mon_rx_deliver_msdu(struct ath12k *ar, struct napi_struct *napi,
2280 					  struct sk_buff *msdu,
2281 					  const struct hal_rx_mon_ppdu_info *ppduinfo,
2282 					  struct ieee80211_rx_status *status,
2283 					  u8 decap)
2284 {
2285 	static const struct ieee80211_radiotap_he known = {
2286 		.data1 = cpu_to_le16(IEEE80211_RADIOTAP_HE_DATA1_DATA_MCS_KNOWN |
2287 				     IEEE80211_RADIOTAP_HE_DATA1_BW_RU_ALLOC_KNOWN),
2288 		.data2 = cpu_to_le16(IEEE80211_RADIOTAP_HE_DATA2_GI_KNOWN),
2289 	};
2290 	struct ieee80211_rx_status *rx_status;
2291 	struct ieee80211_radiotap_he *he = NULL;
2292 	struct ieee80211_sta *pubsta = NULL;
2293 	struct ath12k_peer *peer;
2294 	struct ath12k_skb_rxcb *rxcb = ATH12K_SKB_RXCB(msdu);
2295 	bool is_mcbc = rxcb->is_mcbc;
2296 	bool is_eapol_tkip = rxcb->is_eapol;
2297 
2298 	status->link_valid = 0;
2299 
2300 	if ((status->encoding == RX_ENC_HE) && !(status->flag & RX_FLAG_RADIOTAP_HE) &&
2301 	    !(status->flag & RX_FLAG_SKIP_MONITOR)) {
2302 		he = skb_push(msdu, sizeof(known));
2303 		memcpy(he, &known, sizeof(known));
2304 		status->flag |= RX_FLAG_RADIOTAP_HE;
2305 	}
2306 
2307 	spin_lock_bh(&ar->ab->base_lock);
2308 	peer = ath12k_peer_find_by_id(ar->ab, ppduinfo->peer_id);
2309 	if (peer && peer->sta) {
2310 		pubsta = peer->sta;
2311 		if (pubsta->valid_links) {
2312 			status->link_valid = 1;
2313 			status->link_id = peer->link_id;
2314 		}
2315 	}
2316 
2317 	spin_unlock_bh(&ar->ab->base_lock);
2318 
2319 	ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
2320 		   "rx skb %p len %u peer %pM %u %s %s%s%s%s%s%s%s%s %srate_idx %u vht_nss %u freq %u band %u flag 0x%x fcs-err %i mic-err %i amsdu-more %i\n",
2321 		   msdu,
2322 		   msdu->len,
2323 		   peer ? peer->addr : NULL,
2324 		   rxcb->tid,
2325 		   (is_mcbc) ? "mcast" : "ucast",
2326 		   (status->encoding == RX_ENC_LEGACY) ? "legacy" : "",
2327 		   (status->encoding == RX_ENC_HT) ? "ht" : "",
2328 		   (status->encoding == RX_ENC_VHT) ? "vht" : "",
2329 		   (status->encoding == RX_ENC_HE) ? "he" : "",
2330 		   (status->bw == RATE_INFO_BW_40) ? "40" : "",
2331 		   (status->bw == RATE_INFO_BW_80) ? "80" : "",
2332 		   (status->bw == RATE_INFO_BW_160) ? "160" : "",
2333 		   (status->bw == RATE_INFO_BW_320) ? "320" : "",
2334 		   status->enc_flags & RX_ENC_FLAG_SHORT_GI ? "sgi " : "",
2335 		   status->rate_idx,
2336 		   status->nss,
2337 		   status->freq,
2338 		   status->band, status->flag,
2339 		   !!(status->flag & RX_FLAG_FAILED_FCS_CRC),
2340 		   !!(status->flag & RX_FLAG_MMIC_ERROR),
2341 		   !!(status->flag & RX_FLAG_AMSDU_MORE));
2342 
2343 	ath12k_dbg_dump(ar->ab, ATH12K_DBG_DP_RX, NULL, "dp rx msdu: ",
2344 			msdu->data, msdu->len);
2345 	rx_status = IEEE80211_SKB_RXCB(msdu);
2346 	*rx_status = *status;
2347 
2348 	/* TODO: trace rx packet */
2349 
2350 	/* PN for multicast packets are not validate in HW,
2351 	 * so skip 802.3 rx path
2352 	 * Also, fast_rx expects the STA to be authorized, hence
2353 	 * eapol packets are sent in slow path.
2354 	 */
2355 	if (decap == DP_RX_DECAP_TYPE_ETHERNET2_DIX && !is_eapol_tkip &&
2356 	    !(is_mcbc && rx_status->flag & RX_FLAG_DECRYPTED))
2357 		rx_status->flag |= RX_FLAG_8023;
2358 
2359 	ieee80211_rx_napi(ath12k_ar_to_hw(ar), pubsta, msdu, napi);
2360 }
2361 
2362 static int ath12k_dp_mon_rx_deliver(struct ath12k *ar,
2363 				    struct dp_mon_mpdu *mon_mpdu,
2364 				    struct hal_rx_mon_ppdu_info *ppduinfo,
2365 				    struct napi_struct *napi)
2366 {
2367 	struct ath12k_pdev_dp *dp = &ar->dp;
2368 	struct sk_buff *mon_skb, *skb_next, *header;
2369 	struct ieee80211_rx_status *rxs = &dp->rx_status;
2370 	u8 decap = DP_RX_DECAP_TYPE_RAW;
2371 
2372 	mon_skb = ath12k_dp_mon_rx_merg_msdus(ar, mon_mpdu, ppduinfo, rxs);
2373 	if (!mon_skb)
2374 		goto mon_deliver_fail;
2375 
2376 	header = mon_skb;
2377 	rxs->flag = 0;
2378 
2379 	if (mon_mpdu->err_bitmap & HAL_RX_MPDU_ERR_FCS)
2380 		rxs->flag = RX_FLAG_FAILED_FCS_CRC;
2381 
2382 	do {
2383 		skb_next = mon_skb->next;
2384 		if (!skb_next)
2385 			rxs->flag &= ~RX_FLAG_AMSDU_MORE;
2386 		else
2387 			rxs->flag |= RX_FLAG_AMSDU_MORE;
2388 
2389 		if (mon_skb == header) {
2390 			header = NULL;
2391 			rxs->flag &= ~RX_FLAG_ALLOW_SAME_PN;
2392 		} else {
2393 			rxs->flag |= RX_FLAG_ALLOW_SAME_PN;
2394 		}
2395 		rxs->flag |= RX_FLAG_ONLY_MONITOR;
2396 
2397 		if (!(rxs->flag & RX_FLAG_ONLY_MONITOR))
2398 			decap = mon_mpdu->decap_format;
2399 
2400 		ath12k_dp_mon_update_radiotap(ar, ppduinfo, mon_skb, rxs);
2401 		ath12k_dp_mon_rx_deliver_msdu(ar, napi, mon_skb, ppduinfo, rxs, decap);
2402 		mon_skb = skb_next;
2403 	} while (mon_skb);
2404 	rxs->flag = 0;
2405 
2406 	return 0;
2407 
2408 mon_deliver_fail:
2409 	mon_skb = mon_mpdu->head;
2410 	while (mon_skb) {
2411 		skb_next = mon_skb->next;
2412 		dev_kfree_skb_any(mon_skb);
2413 		mon_skb = skb_next;
2414 	}
2415 	return -EINVAL;
2416 }
2417 
2418 static int ath12k_dp_pkt_set_pktlen(struct sk_buff *skb, u32 len)
2419 {
2420 	if (skb->len > len) {
2421 		skb_trim(skb, len);
2422 	} else {
2423 		if (skb_tailroom(skb) < len - skb->len) {
2424 			if ((pskb_expand_head(skb, 0,
2425 					      len - skb->len - skb_tailroom(skb),
2426 					      GFP_ATOMIC))) {
2427 				return -ENOMEM;
2428 			}
2429 		}
2430 		skb_put(skb, (len - skb->len));
2431 	}
2432 
2433 	return 0;
2434 }
2435 
2436 /* Hardware fill buffer with 128 bytes aligned. So need to reap it
2437  * with 128 bytes aligned.
2438  */
2439 #define RXDMA_DATA_DMA_BLOCK_SIZE 128
2440 
2441 static void
2442 ath12k_dp_mon_get_buf_len(struct hal_rx_msdu_desc_info *info,
2443 			  bool *is_frag, u32 *total_len,
2444 			  u32 *frag_len, u32 *msdu_cnt)
2445 {
2446 	if (info->msdu_flags & RX_MSDU_DESC_INFO0_MSDU_CONTINUATION) {
2447 		*is_frag = true;
2448 		*frag_len = (RX_MON_STATUS_BASE_BUF_SIZE -
2449 			     sizeof(struct hal_rx_desc)) &
2450 			     ~(RXDMA_DATA_DMA_BLOCK_SIZE - 1);
2451 		*total_len += *frag_len;
2452 	} else {
2453 		if (*is_frag)
2454 			*frag_len = info->msdu_len - *total_len;
2455 		else
2456 			*frag_len = info->msdu_len;
2457 
2458 		*msdu_cnt -= 1;
2459 	}
2460 }
2461 
2462 static int
2463 ath12k_dp_mon_parse_status_buf(struct ath12k *ar,
2464 			       struct ath12k_mon_data *pmon,
2465 			       const struct dp_mon_packet_info *packet_info)
2466 {
2467 	struct ath12k_base *ab = ar->ab;
2468 	struct dp_rxdma_mon_ring *buf_ring = &ab->dp.rxdma_mon_buf_ring;
2469 	struct sk_buff *msdu;
2470 	int buf_id;
2471 	u32 offset;
2472 
2473 	buf_id = u32_get_bits(packet_info->cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
2474 
2475 	spin_lock_bh(&buf_ring->idr_lock);
2476 	msdu = idr_remove(&buf_ring->bufs_idr, buf_id);
2477 	spin_unlock_bh(&buf_ring->idr_lock);
2478 
2479 	if (unlikely(!msdu)) {
2480 		ath12k_warn(ab, "mon dest desc with inval buf_id %d\n", buf_id);
2481 		return 0;
2482 	}
2483 
2484 	dma_unmap_single(ab->dev, ATH12K_SKB_RXCB(msdu)->paddr,
2485 			 msdu->len + skb_tailroom(msdu),
2486 			 DMA_FROM_DEVICE);
2487 
2488 	offset = packet_info->dma_length + ATH12K_MON_RX_DOT11_OFFSET;
2489 	if (ath12k_dp_pkt_set_pktlen(msdu, offset)) {
2490 		dev_kfree_skb_any(msdu);
2491 		goto dest_replenish;
2492 	}
2493 
2494 	if (!pmon->mon_mpdu->head)
2495 		pmon->mon_mpdu->head = msdu;
2496 	else
2497 		pmon->mon_mpdu->tail->next = msdu;
2498 
2499 	pmon->mon_mpdu->tail = msdu;
2500 
2501 dest_replenish:
2502 	ath12k_dp_mon_buf_replenish(ab, buf_ring, 1);
2503 
2504 	return 0;
2505 }
2506 
2507 static int
2508 ath12k_dp_mon_parse_rx_dest_tlv(struct ath12k *ar,
2509 				struct ath12k_mon_data *pmon,
2510 				enum hal_rx_mon_status hal_status,
2511 				const void *tlv_data)
2512 {
2513 	switch (hal_status) {
2514 	case HAL_RX_MON_STATUS_MPDU_START:
2515 		if (WARN_ON_ONCE(pmon->mon_mpdu))
2516 			break;
2517 
2518 		pmon->mon_mpdu = kzalloc(sizeof(*pmon->mon_mpdu), GFP_ATOMIC);
2519 		if (!pmon->mon_mpdu)
2520 			return -ENOMEM;
2521 		break;
2522 	case HAL_RX_MON_STATUS_BUF_ADDR:
2523 		return ath12k_dp_mon_parse_status_buf(ar, pmon, tlv_data);
2524 	case HAL_RX_MON_STATUS_MPDU_END:
2525 		/* If no MSDU then free empty MPDU */
2526 		if (pmon->mon_mpdu->tail) {
2527 			pmon->mon_mpdu->tail->next = NULL;
2528 			list_add_tail(&pmon->mon_mpdu->list, &pmon->dp_rx_mon_mpdu_list);
2529 		} else {
2530 			kfree(pmon->mon_mpdu);
2531 		}
2532 		pmon->mon_mpdu = NULL;
2533 		break;
2534 	case HAL_RX_MON_STATUS_MSDU_END:
2535 		pmon->mon_mpdu->decap_format = pmon->decap_format;
2536 		pmon->mon_mpdu->err_bitmap = pmon->err_bitmap;
2537 		break;
2538 	default:
2539 		break;
2540 	}
2541 
2542 	return 0;
2543 }
2544 
2545 static enum hal_rx_mon_status
2546 ath12k_dp_mon_parse_rx_dest(struct ath12k *ar, struct ath12k_mon_data *pmon,
2547 			    struct sk_buff *skb)
2548 {
2549 	struct hal_tlv_64_hdr *tlv;
2550 	struct ath12k_skb_rxcb *rxcb;
2551 	enum hal_rx_mon_status hal_status;
2552 	u16 tlv_tag, tlv_len;
2553 	u8 *ptr = skb->data;
2554 
2555 	do {
2556 		tlv = (struct hal_tlv_64_hdr *)ptr;
2557 		tlv_tag = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_TAG);
2558 
2559 		/* The actual length of PPDU_END is the combined length of many PHY
2560 		 * TLVs that follow. Skip the TLV header and
2561 		 * rx_rxpcu_classification_overview that follows the header to get to
2562 		 * next TLV.
2563 		 */
2564 
2565 		if (tlv_tag == HAL_RX_PPDU_END)
2566 			tlv_len = sizeof(struct hal_rx_rxpcu_classification_overview);
2567 		else
2568 			tlv_len = le64_get_bits(tlv->tl, HAL_TLV_64_HDR_LEN);
2569 
2570 		hal_status = ath12k_dp_mon_rx_parse_status_tlv(ar, pmon, tlv);
2571 
2572 		if (ar->monitor_started && ar->ab->hw_params->rxdma1_enable &&
2573 		    ath12k_dp_mon_parse_rx_dest_tlv(ar, pmon, hal_status, tlv->value))
2574 			return HAL_RX_MON_STATUS_PPDU_DONE;
2575 
2576 		ptr += sizeof(*tlv) + tlv_len;
2577 		ptr = PTR_ALIGN(ptr, HAL_TLV_64_ALIGN);
2578 
2579 		if ((ptr - skb->data) > skb->len)
2580 			break;
2581 
2582 	} while ((hal_status == HAL_RX_MON_STATUS_PPDU_NOT_DONE) ||
2583 		 (hal_status == HAL_RX_MON_STATUS_BUF_ADDR) ||
2584 		 (hal_status == HAL_RX_MON_STATUS_MPDU_START) ||
2585 		 (hal_status == HAL_RX_MON_STATUS_MPDU_END) ||
2586 		 (hal_status == HAL_RX_MON_STATUS_MSDU_END));
2587 
2588 	rxcb = ATH12K_SKB_RXCB(skb);
2589 	if (rxcb->is_end_of_ppdu)
2590 		hal_status = HAL_RX_MON_STATUS_PPDU_DONE;
2591 
2592 	return hal_status;
2593 }
2594 
2595 enum hal_rx_mon_status
2596 ath12k_dp_mon_rx_parse_mon_status(struct ath12k *ar,
2597 				  struct ath12k_mon_data *pmon,
2598 				  struct sk_buff *skb,
2599 				  struct napi_struct *napi)
2600 {
2601 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
2602 	struct dp_mon_mpdu *tmp;
2603 	struct dp_mon_mpdu *mon_mpdu = pmon->mon_mpdu;
2604 	enum hal_rx_mon_status hal_status;
2605 
2606 	hal_status = ath12k_dp_mon_parse_rx_dest(ar, pmon, skb);
2607 	if (hal_status != HAL_RX_MON_STATUS_PPDU_DONE)
2608 		return hal_status;
2609 
2610 	list_for_each_entry_safe(mon_mpdu, tmp, &pmon->dp_rx_mon_mpdu_list, list) {
2611 		list_del(&mon_mpdu->list);
2612 
2613 		if (mon_mpdu->head && mon_mpdu->tail)
2614 			ath12k_dp_mon_rx_deliver(ar, mon_mpdu, ppdu_info, napi);
2615 
2616 		kfree(mon_mpdu);
2617 	}
2618 
2619 	return hal_status;
2620 }
2621 
2622 int ath12k_dp_mon_buf_replenish(struct ath12k_base *ab,
2623 				struct dp_rxdma_mon_ring *buf_ring,
2624 				int req_entries)
2625 {
2626 	struct hal_mon_buf_ring *mon_buf;
2627 	struct sk_buff *skb;
2628 	struct hal_srng *srng;
2629 	dma_addr_t paddr;
2630 	u32 cookie;
2631 	int buf_id;
2632 
2633 	srng = &ab->hal.srng_list[buf_ring->refill_buf_ring.ring_id];
2634 	spin_lock_bh(&srng->lock);
2635 	ath12k_hal_srng_access_begin(ab, srng);
2636 
2637 	while (req_entries > 0) {
2638 		skb = dev_alloc_skb(DP_RX_BUFFER_SIZE + DP_RX_BUFFER_ALIGN_SIZE);
2639 		if (unlikely(!skb))
2640 			goto fail_alloc_skb;
2641 
2642 		if (!IS_ALIGNED((unsigned long)skb->data, DP_RX_BUFFER_ALIGN_SIZE)) {
2643 			skb_pull(skb,
2644 				 PTR_ALIGN(skb->data, DP_RX_BUFFER_ALIGN_SIZE) -
2645 				 skb->data);
2646 		}
2647 
2648 		paddr = dma_map_single(ab->dev, skb->data,
2649 				       skb->len + skb_tailroom(skb),
2650 				       DMA_FROM_DEVICE);
2651 
2652 		if (unlikely(dma_mapping_error(ab->dev, paddr)))
2653 			goto fail_free_skb;
2654 
2655 		spin_lock_bh(&buf_ring->idr_lock);
2656 		buf_id = idr_alloc(&buf_ring->bufs_idr, skb, 0,
2657 				   buf_ring->bufs_max * 3, GFP_ATOMIC);
2658 		spin_unlock_bh(&buf_ring->idr_lock);
2659 
2660 		if (unlikely(buf_id < 0))
2661 			goto fail_dma_unmap;
2662 
2663 		mon_buf = ath12k_hal_srng_src_get_next_entry(ab, srng);
2664 		if (unlikely(!mon_buf))
2665 			goto fail_idr_remove;
2666 
2667 		ATH12K_SKB_RXCB(skb)->paddr = paddr;
2668 
2669 		cookie = u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
2670 
2671 		mon_buf->paddr_lo = cpu_to_le32(lower_32_bits(paddr));
2672 		mon_buf->paddr_hi = cpu_to_le32(upper_32_bits(paddr));
2673 		mon_buf->cookie = cpu_to_le64(cookie);
2674 
2675 		req_entries--;
2676 	}
2677 
2678 	ath12k_hal_srng_access_end(ab, srng);
2679 	spin_unlock_bh(&srng->lock);
2680 	return 0;
2681 
2682 fail_idr_remove:
2683 	spin_lock_bh(&buf_ring->idr_lock);
2684 	idr_remove(&buf_ring->bufs_idr, buf_id);
2685 	spin_unlock_bh(&buf_ring->idr_lock);
2686 fail_dma_unmap:
2687 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
2688 			 DMA_FROM_DEVICE);
2689 fail_free_skb:
2690 	dev_kfree_skb_any(skb);
2691 fail_alloc_skb:
2692 	ath12k_hal_srng_access_end(ab, srng);
2693 	spin_unlock_bh(&srng->lock);
2694 	return -ENOMEM;
2695 }
2696 
2697 int ath12k_dp_mon_status_bufs_replenish(struct ath12k_base *ab,
2698 					struct dp_rxdma_mon_ring *rx_ring,
2699 					int req_entries)
2700 {
2701 	enum hal_rx_buf_return_buf_manager mgr =
2702 		ab->hw_params->hal_params->rx_buf_rbm;
2703 	int num_free, num_remain, buf_id;
2704 	struct ath12k_buffer_addr *desc;
2705 	struct hal_srng *srng;
2706 	struct sk_buff *skb;
2707 	dma_addr_t paddr;
2708 	u32 cookie;
2709 
2710 	req_entries = min(req_entries, rx_ring->bufs_max);
2711 
2712 	srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id];
2713 
2714 	spin_lock_bh(&srng->lock);
2715 
2716 	ath12k_hal_srng_access_begin(ab, srng);
2717 
2718 	num_free = ath12k_hal_srng_src_num_free(ab, srng, true);
2719 	if (!req_entries && (num_free > (rx_ring->bufs_max * 3) / 4))
2720 		req_entries = num_free;
2721 
2722 	req_entries = min(num_free, req_entries);
2723 	num_remain = req_entries;
2724 
2725 	while (num_remain > 0) {
2726 		skb = dev_alloc_skb(RX_MON_STATUS_BUF_SIZE);
2727 		if (!skb)
2728 			break;
2729 
2730 		if (!IS_ALIGNED((unsigned long)skb->data,
2731 				RX_MON_STATUS_BUF_ALIGN)) {
2732 			skb_pull(skb,
2733 				 PTR_ALIGN(skb->data, RX_MON_STATUS_BUF_ALIGN) -
2734 				 skb->data);
2735 		}
2736 
2737 		paddr = dma_map_single(ab->dev, skb->data,
2738 				       skb->len + skb_tailroom(skb),
2739 				       DMA_FROM_DEVICE);
2740 		if (dma_mapping_error(ab->dev, paddr))
2741 			goto fail_free_skb;
2742 
2743 		spin_lock_bh(&rx_ring->idr_lock);
2744 		buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0,
2745 				   rx_ring->bufs_max * 3, GFP_ATOMIC);
2746 		spin_unlock_bh(&rx_ring->idr_lock);
2747 		if (buf_id < 0)
2748 			goto fail_dma_unmap;
2749 		cookie = u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
2750 
2751 		desc = ath12k_hal_srng_src_get_next_entry(ab, srng);
2752 		if (!desc)
2753 			goto fail_buf_unassign;
2754 
2755 		ATH12K_SKB_RXCB(skb)->paddr = paddr;
2756 
2757 		num_remain--;
2758 
2759 		ath12k_hal_rx_buf_addr_info_set(desc, paddr, cookie, mgr);
2760 	}
2761 
2762 	ath12k_hal_srng_access_end(ab, srng);
2763 
2764 	spin_unlock_bh(&srng->lock);
2765 
2766 	return req_entries - num_remain;
2767 
2768 fail_buf_unassign:
2769 	spin_lock_bh(&rx_ring->idr_lock);
2770 	idr_remove(&rx_ring->bufs_idr, buf_id);
2771 	spin_unlock_bh(&rx_ring->idr_lock);
2772 fail_dma_unmap:
2773 	dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb),
2774 			 DMA_FROM_DEVICE);
2775 fail_free_skb:
2776 	dev_kfree_skb_any(skb);
2777 
2778 	ath12k_hal_srng_access_end(ab, srng);
2779 
2780 	spin_unlock_bh(&srng->lock);
2781 
2782 	return req_entries - num_remain;
2783 }
2784 
2785 static struct dp_mon_tx_ppdu_info *
2786 ath12k_dp_mon_tx_get_ppdu_info(struct ath12k_mon_data *pmon,
2787 			       unsigned int ppdu_id,
2788 			       enum dp_mon_tx_ppdu_info_type type)
2789 {
2790 	struct dp_mon_tx_ppdu_info *tx_ppdu_info;
2791 
2792 	if (type == DP_MON_TX_PROT_PPDU_INFO) {
2793 		tx_ppdu_info = pmon->tx_prot_ppdu_info;
2794 
2795 		if (tx_ppdu_info && !tx_ppdu_info->is_used)
2796 			return tx_ppdu_info;
2797 		kfree(tx_ppdu_info);
2798 	} else {
2799 		tx_ppdu_info = pmon->tx_data_ppdu_info;
2800 
2801 		if (tx_ppdu_info && !tx_ppdu_info->is_used)
2802 			return tx_ppdu_info;
2803 		kfree(tx_ppdu_info);
2804 	}
2805 
2806 	/* allocate new tx_ppdu_info */
2807 	tx_ppdu_info = kzalloc(sizeof(*tx_ppdu_info), GFP_ATOMIC);
2808 	if (!tx_ppdu_info)
2809 		return NULL;
2810 
2811 	tx_ppdu_info->is_used = 0;
2812 	tx_ppdu_info->ppdu_id = ppdu_id;
2813 
2814 	if (type == DP_MON_TX_PROT_PPDU_INFO)
2815 		pmon->tx_prot_ppdu_info = tx_ppdu_info;
2816 	else
2817 		pmon->tx_data_ppdu_info = tx_ppdu_info;
2818 
2819 	return tx_ppdu_info;
2820 }
2821 
2822 static struct dp_mon_tx_ppdu_info *
2823 ath12k_dp_mon_hal_tx_ppdu_info(struct ath12k_mon_data *pmon,
2824 			       u16 tlv_tag)
2825 {
2826 	switch (tlv_tag) {
2827 	case HAL_TX_FES_SETUP:
2828 	case HAL_TX_FLUSH:
2829 	case HAL_PCU_PPDU_SETUP_INIT:
2830 	case HAL_TX_PEER_ENTRY:
2831 	case HAL_TX_QUEUE_EXTENSION:
2832 	case HAL_TX_MPDU_START:
2833 	case HAL_TX_MSDU_START:
2834 	case HAL_TX_DATA:
2835 	case HAL_MON_BUF_ADDR:
2836 	case HAL_TX_MPDU_END:
2837 	case HAL_TX_LAST_MPDU_FETCHED:
2838 	case HAL_TX_LAST_MPDU_END:
2839 	case HAL_COEX_TX_REQ:
2840 	case HAL_TX_RAW_OR_NATIVE_FRAME_SETUP:
2841 	case HAL_SCH_CRITICAL_TLV_REFERENCE:
2842 	case HAL_TX_FES_SETUP_COMPLETE:
2843 	case HAL_TQM_MPDU_GLOBAL_START:
2844 	case HAL_SCHEDULER_END:
2845 	case HAL_TX_FES_STATUS_USER_PPDU:
2846 		break;
2847 	case HAL_TX_FES_STATUS_PROT: {
2848 		if (!pmon->tx_prot_ppdu_info->is_used)
2849 			pmon->tx_prot_ppdu_info->is_used = true;
2850 
2851 		return pmon->tx_prot_ppdu_info;
2852 	}
2853 	}
2854 
2855 	if (!pmon->tx_data_ppdu_info->is_used)
2856 		pmon->tx_data_ppdu_info->is_used = true;
2857 
2858 	return pmon->tx_data_ppdu_info;
2859 }
2860 
2861 #define MAX_MONITOR_HEADER 512
2862 #define MAX_DUMMY_FRM_BODY 128
2863 
2864 struct sk_buff *ath12k_dp_mon_tx_alloc_skb(void)
2865 {
2866 	struct sk_buff *skb;
2867 
2868 	skb = dev_alloc_skb(MAX_MONITOR_HEADER + MAX_DUMMY_FRM_BODY);
2869 	if (!skb)
2870 		return NULL;
2871 
2872 	skb_reserve(skb, MAX_MONITOR_HEADER);
2873 
2874 	if (!IS_ALIGNED((unsigned long)skb->data, 4))
2875 		skb_pull(skb, PTR_ALIGN(skb->data, 4) - skb->data);
2876 
2877 	return skb;
2878 }
2879 
2880 static int
2881 ath12k_dp_mon_tx_gen_cts2self_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2882 {
2883 	struct sk_buff *skb;
2884 	struct ieee80211_cts *cts;
2885 
2886 	skb = ath12k_dp_mon_tx_alloc_skb();
2887 	if (!skb)
2888 		return -ENOMEM;
2889 
2890 	cts = (struct ieee80211_cts *)skb->data;
2891 	memset(cts, 0, MAX_DUMMY_FRM_BODY);
2892 	cts->frame_control =
2893 		cpu_to_le16(IEEE80211_FTYPE_CTL | IEEE80211_STYPE_CTS);
2894 	cts->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2895 	memcpy(cts->ra, tx_ppdu_info->rx_status.addr1, sizeof(cts->ra));
2896 
2897 	skb_put(skb, sizeof(*cts));
2898 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2899 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2900 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2901 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2902 
2903 	return 0;
2904 }
2905 
2906 static int
2907 ath12k_dp_mon_tx_gen_rts_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2908 {
2909 	struct sk_buff *skb;
2910 	struct ieee80211_rts *rts;
2911 
2912 	skb = ath12k_dp_mon_tx_alloc_skb();
2913 	if (!skb)
2914 		return -ENOMEM;
2915 
2916 	rts = (struct ieee80211_rts *)skb->data;
2917 	memset(rts, 0, MAX_DUMMY_FRM_BODY);
2918 	rts->frame_control =
2919 		cpu_to_le16(IEEE80211_FTYPE_CTL | IEEE80211_STYPE_RTS);
2920 	rts->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2921 	memcpy(rts->ra, tx_ppdu_info->rx_status.addr1, sizeof(rts->ra));
2922 	memcpy(rts->ta, tx_ppdu_info->rx_status.addr2, sizeof(rts->ta));
2923 
2924 	skb_put(skb, sizeof(*rts));
2925 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2926 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2927 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2928 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2929 
2930 	return 0;
2931 }
2932 
2933 static int
2934 ath12k_dp_mon_tx_gen_3addr_qos_null_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2935 {
2936 	struct sk_buff *skb;
2937 	struct ieee80211_qos_hdr *qhdr;
2938 
2939 	skb = ath12k_dp_mon_tx_alloc_skb();
2940 	if (!skb)
2941 		return -ENOMEM;
2942 
2943 	qhdr = (struct ieee80211_qos_hdr *)skb->data;
2944 	memset(qhdr, 0, MAX_DUMMY_FRM_BODY);
2945 	qhdr->frame_control =
2946 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_NULLFUNC);
2947 	qhdr->duration_id = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2948 	memcpy(qhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2949 	memcpy(qhdr->addr2, tx_ppdu_info->rx_status.addr2, ETH_ALEN);
2950 	memcpy(qhdr->addr3, tx_ppdu_info->rx_status.addr3, ETH_ALEN);
2951 
2952 	skb_put(skb, sizeof(*qhdr));
2953 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2954 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2955 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2956 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2957 
2958 	return 0;
2959 }
2960 
2961 static int
2962 ath12k_dp_mon_tx_gen_4addr_qos_null_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2963 {
2964 	struct sk_buff *skb;
2965 	struct dp_mon_qosframe_addr4 *qhdr;
2966 
2967 	skb = ath12k_dp_mon_tx_alloc_skb();
2968 	if (!skb)
2969 		return -ENOMEM;
2970 
2971 	qhdr = (struct dp_mon_qosframe_addr4 *)skb->data;
2972 	memset(qhdr, 0, MAX_DUMMY_FRM_BODY);
2973 	qhdr->frame_control =
2974 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_NULLFUNC);
2975 	qhdr->duration = cpu_to_le16(tx_ppdu_info->rx_status.rx_duration);
2976 	memcpy(qhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
2977 	memcpy(qhdr->addr2, tx_ppdu_info->rx_status.addr2, ETH_ALEN);
2978 	memcpy(qhdr->addr3, tx_ppdu_info->rx_status.addr3, ETH_ALEN);
2979 	memcpy(qhdr->addr4, tx_ppdu_info->rx_status.addr4, ETH_ALEN);
2980 
2981 	skb_put(skb, sizeof(*qhdr));
2982 	tx_ppdu_info->tx_mon_mpdu->head = skb;
2983 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
2984 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
2985 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
2986 
2987 	return 0;
2988 }
2989 
2990 static int
2991 ath12k_dp_mon_tx_gen_ack_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
2992 {
2993 	struct sk_buff *skb;
2994 	struct dp_mon_frame_min_one *fbmhdr;
2995 
2996 	skb = ath12k_dp_mon_tx_alloc_skb();
2997 	if (!skb)
2998 		return -ENOMEM;
2999 
3000 	fbmhdr = (struct dp_mon_frame_min_one *)skb->data;
3001 	memset(fbmhdr, 0, MAX_DUMMY_FRM_BODY);
3002 	fbmhdr->frame_control =
3003 		cpu_to_le16(IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_CFACK);
3004 	memcpy(fbmhdr->addr1, tx_ppdu_info->rx_status.addr1, ETH_ALEN);
3005 
3006 	/* set duration zero for ack frame */
3007 	fbmhdr->duration = 0;
3008 
3009 	skb_put(skb, sizeof(*fbmhdr));
3010 	tx_ppdu_info->tx_mon_mpdu->head = skb;
3011 	tx_ppdu_info->tx_mon_mpdu->tail = NULL;
3012 	list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
3013 		      &tx_ppdu_info->dp_tx_mon_mpdu_list);
3014 
3015 	return 0;
3016 }
3017 
3018 static int
3019 ath12k_dp_mon_tx_gen_prot_frame(struct dp_mon_tx_ppdu_info *tx_ppdu_info)
3020 {
3021 	int ret = 0;
3022 
3023 	switch (tx_ppdu_info->rx_status.medium_prot_type) {
3024 	case DP_MON_TX_MEDIUM_RTS_LEGACY:
3025 	case DP_MON_TX_MEDIUM_RTS_11AC_STATIC_BW:
3026 	case DP_MON_TX_MEDIUM_RTS_11AC_DYNAMIC_BW:
3027 		ret = ath12k_dp_mon_tx_gen_rts_frame(tx_ppdu_info);
3028 		break;
3029 	case DP_MON_TX_MEDIUM_CTS2SELF:
3030 		ret = ath12k_dp_mon_tx_gen_cts2self_frame(tx_ppdu_info);
3031 		break;
3032 	case DP_MON_TX_MEDIUM_QOS_NULL_NO_ACK_3ADDR:
3033 		ret = ath12k_dp_mon_tx_gen_3addr_qos_null_frame(tx_ppdu_info);
3034 		break;
3035 	case DP_MON_TX_MEDIUM_QOS_NULL_NO_ACK_4ADDR:
3036 		ret = ath12k_dp_mon_tx_gen_4addr_qos_null_frame(tx_ppdu_info);
3037 		break;
3038 	}
3039 
3040 	return ret;
3041 }
3042 
3043 static enum dp_mon_tx_tlv_status
3044 ath12k_dp_mon_tx_parse_status_tlv(struct ath12k_base *ab,
3045 				  struct ath12k_mon_data *pmon,
3046 				  u16 tlv_tag, const void *tlv_data, u32 userid)
3047 {
3048 	struct dp_mon_tx_ppdu_info *tx_ppdu_info;
3049 	enum dp_mon_tx_tlv_status status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3050 	u32 info[7];
3051 
3052 	tx_ppdu_info = ath12k_dp_mon_hal_tx_ppdu_info(pmon, tlv_tag);
3053 
3054 	switch (tlv_tag) {
3055 	case HAL_TX_FES_SETUP: {
3056 		const struct hal_tx_fes_setup *tx_fes_setup = tlv_data;
3057 
3058 		info[0] = __le32_to_cpu(tx_fes_setup->info0);
3059 		tx_ppdu_info->ppdu_id = __le32_to_cpu(tx_fes_setup->schedule_id);
3060 		tx_ppdu_info->num_users =
3061 			u32_get_bits(info[0], HAL_TX_FES_SETUP_INFO0_NUM_OF_USERS);
3062 		status = DP_MON_TX_FES_SETUP;
3063 		break;
3064 	}
3065 
3066 	case HAL_TX_FES_STATUS_END: {
3067 		const struct hal_tx_fes_status_end *tx_fes_status_end = tlv_data;
3068 		u32 tst_15_0, tst_31_16;
3069 
3070 		info[0] = __le32_to_cpu(tx_fes_status_end->info0);
3071 		tst_15_0 =
3072 			u32_get_bits(info[0],
3073 				     HAL_TX_FES_STATUS_END_INFO0_START_TIMESTAMP_15_0);
3074 		tst_31_16 =
3075 			u32_get_bits(info[0],
3076 				     HAL_TX_FES_STATUS_END_INFO0_START_TIMESTAMP_31_16);
3077 
3078 		tx_ppdu_info->rx_status.ppdu_ts = (tst_15_0 | (tst_31_16 << 16));
3079 		status = DP_MON_TX_FES_STATUS_END;
3080 		break;
3081 	}
3082 
3083 	case HAL_RX_RESPONSE_REQUIRED_INFO: {
3084 		const struct hal_rx_resp_req_info *rx_resp_req_info = tlv_data;
3085 		u32 addr_32;
3086 		u16 addr_16;
3087 
3088 		info[0] = __le32_to_cpu(rx_resp_req_info->info0);
3089 		info[1] = __le32_to_cpu(rx_resp_req_info->info1);
3090 		info[2] = __le32_to_cpu(rx_resp_req_info->info2);
3091 		info[3] = __le32_to_cpu(rx_resp_req_info->info3);
3092 		info[4] = __le32_to_cpu(rx_resp_req_info->info4);
3093 		info[5] = __le32_to_cpu(rx_resp_req_info->info5);
3094 
3095 		tx_ppdu_info->rx_status.ppdu_id =
3096 			u32_get_bits(info[0], HAL_RX_RESP_REQ_INFO0_PPDU_ID);
3097 		tx_ppdu_info->rx_status.reception_type =
3098 			u32_get_bits(info[0], HAL_RX_RESP_REQ_INFO0_RECEPTION_TYPE);
3099 		tx_ppdu_info->rx_status.rx_duration =
3100 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_DURATION);
3101 		tx_ppdu_info->rx_status.mcs =
3102 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_RATE_MCS);
3103 		tx_ppdu_info->rx_status.sgi =
3104 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_SGI);
3105 		tx_ppdu_info->rx_status.is_stbc =
3106 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_STBC);
3107 		tx_ppdu_info->rx_status.ldpc =
3108 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_LDPC);
3109 		tx_ppdu_info->rx_status.is_ampdu =
3110 			u32_get_bits(info[1], HAL_RX_RESP_REQ_INFO1_IS_AMPDU);
3111 		tx_ppdu_info->rx_status.num_users =
3112 			u32_get_bits(info[2], HAL_RX_RESP_REQ_INFO2_NUM_USER);
3113 
3114 		addr_32 = u32_get_bits(info[3], HAL_RX_RESP_REQ_INFO3_ADDR1_31_0);
3115 		addr_16 = u32_get_bits(info[3], HAL_RX_RESP_REQ_INFO4_ADDR1_47_32);
3116 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3117 
3118 		addr_16 = u32_get_bits(info[4], HAL_RX_RESP_REQ_INFO4_ADDR1_15_0);
3119 		addr_32 = u32_get_bits(info[5], HAL_RX_RESP_REQ_INFO5_ADDR1_47_16);
3120 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr2);
3121 
3122 		if (tx_ppdu_info->rx_status.reception_type == 0)
3123 			ath12k_dp_mon_tx_gen_cts2self_frame(tx_ppdu_info);
3124 		status = DP_MON_RX_RESPONSE_REQUIRED_INFO;
3125 		break;
3126 	}
3127 
3128 	case HAL_PCU_PPDU_SETUP_INIT: {
3129 		const struct hal_tx_pcu_ppdu_setup_init *ppdu_setup = tlv_data;
3130 		u32 addr_32;
3131 		u16 addr_16;
3132 
3133 		info[0] = __le32_to_cpu(ppdu_setup->info0);
3134 		info[1] = __le32_to_cpu(ppdu_setup->info1);
3135 		info[2] = __le32_to_cpu(ppdu_setup->info2);
3136 		info[3] = __le32_to_cpu(ppdu_setup->info3);
3137 		info[4] = __le32_to_cpu(ppdu_setup->info4);
3138 		info[5] = __le32_to_cpu(ppdu_setup->info5);
3139 		info[6] = __le32_to_cpu(ppdu_setup->info6);
3140 
3141 		/* protection frame address 1 */
3142 		addr_32 = u32_get_bits(info[1],
3143 				       HAL_TX_PPDU_SETUP_INFO1_PROT_FRAME_ADDR1_31_0);
3144 		addr_16 = u32_get_bits(info[2],
3145 				       HAL_TX_PPDU_SETUP_INFO2_PROT_FRAME_ADDR1_47_32);
3146 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3147 
3148 		/* protection frame address 2 */
3149 		addr_16 = u32_get_bits(info[2],
3150 				       HAL_TX_PPDU_SETUP_INFO2_PROT_FRAME_ADDR2_15_0);
3151 		addr_32 = u32_get_bits(info[3],
3152 				       HAL_TX_PPDU_SETUP_INFO3_PROT_FRAME_ADDR2_47_16);
3153 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr2);
3154 
3155 		/* protection frame address 3 */
3156 		addr_32 = u32_get_bits(info[4],
3157 				       HAL_TX_PPDU_SETUP_INFO4_PROT_FRAME_ADDR3_31_0);
3158 		addr_16 = u32_get_bits(info[5],
3159 				       HAL_TX_PPDU_SETUP_INFO5_PROT_FRAME_ADDR3_47_32);
3160 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr3);
3161 
3162 		/* protection frame address 4 */
3163 		addr_16 = u32_get_bits(info[5],
3164 				       HAL_TX_PPDU_SETUP_INFO5_PROT_FRAME_ADDR4_15_0);
3165 		addr_32 = u32_get_bits(info[6],
3166 				       HAL_TX_PPDU_SETUP_INFO6_PROT_FRAME_ADDR4_47_16);
3167 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr4);
3168 
3169 		status = u32_get_bits(info[0],
3170 				      HAL_TX_PPDU_SETUP_INFO0_MEDIUM_PROT_TYPE);
3171 		break;
3172 	}
3173 
3174 	case HAL_TX_QUEUE_EXTENSION: {
3175 		const struct hal_tx_queue_exten *tx_q_exten = tlv_data;
3176 
3177 		info[0] = __le32_to_cpu(tx_q_exten->info0);
3178 
3179 		tx_ppdu_info->rx_status.frame_control =
3180 			u32_get_bits(info[0],
3181 				     HAL_TX_Q_EXT_INFO0_FRAME_CTRL);
3182 		tx_ppdu_info->rx_status.fc_valid = true;
3183 		break;
3184 	}
3185 
3186 	case HAL_TX_FES_STATUS_START: {
3187 		const struct hal_tx_fes_status_start *tx_fes_start = tlv_data;
3188 
3189 		info[0] = __le32_to_cpu(tx_fes_start->info0);
3190 
3191 		tx_ppdu_info->rx_status.medium_prot_type =
3192 			u32_get_bits(info[0],
3193 				     HAL_TX_FES_STATUS_START_INFO0_MEDIUM_PROT_TYPE);
3194 		break;
3195 	}
3196 
3197 	case HAL_TX_FES_STATUS_PROT: {
3198 		const struct hal_tx_fes_status_prot *tx_fes_status = tlv_data;
3199 		u32 start_timestamp;
3200 		u32 end_timestamp;
3201 
3202 		info[0] = __le32_to_cpu(tx_fes_status->info0);
3203 		info[1] = __le32_to_cpu(tx_fes_status->info1);
3204 
3205 		start_timestamp =
3206 			u32_get_bits(info[0],
3207 				     HAL_TX_FES_STAT_PROT_INFO0_STRT_FRM_TS_15_0);
3208 		start_timestamp |=
3209 			u32_get_bits(info[0],
3210 				     HAL_TX_FES_STAT_PROT_INFO0_STRT_FRM_TS_31_16) << 15;
3211 		end_timestamp =
3212 			u32_get_bits(info[1],
3213 				     HAL_TX_FES_STAT_PROT_INFO1_END_FRM_TS_15_0);
3214 		end_timestamp |=
3215 			u32_get_bits(info[1],
3216 				     HAL_TX_FES_STAT_PROT_INFO1_END_FRM_TS_31_16) << 15;
3217 		tx_ppdu_info->rx_status.rx_duration = end_timestamp - start_timestamp;
3218 
3219 		ath12k_dp_mon_tx_gen_prot_frame(tx_ppdu_info);
3220 		break;
3221 	}
3222 
3223 	case HAL_TX_FES_STATUS_START_PPDU:
3224 	case HAL_TX_FES_STATUS_START_PROT: {
3225 		const struct hal_tx_fes_status_start_prot *tx_fes_stat_start = tlv_data;
3226 		u64 ppdu_ts;
3227 
3228 		info[0] = __le32_to_cpu(tx_fes_stat_start->info0);
3229 
3230 		tx_ppdu_info->rx_status.ppdu_ts =
3231 			u32_get_bits(info[0],
3232 				     HAL_TX_FES_STAT_STRT_INFO0_PROT_TS_LOWER_32);
3233 		ppdu_ts = (u32_get_bits(info[1],
3234 					HAL_TX_FES_STAT_STRT_INFO1_PROT_TS_UPPER_32));
3235 		tx_ppdu_info->rx_status.ppdu_ts |= ppdu_ts << 32;
3236 		break;
3237 	}
3238 
3239 	case HAL_TX_FES_STATUS_USER_PPDU: {
3240 		const struct hal_tx_fes_status_user_ppdu *tx_fes_usr_ppdu = tlv_data;
3241 
3242 		info[0] = __le32_to_cpu(tx_fes_usr_ppdu->info0);
3243 
3244 		tx_ppdu_info->rx_status.rx_duration =
3245 			u32_get_bits(info[0],
3246 				     HAL_TX_FES_STAT_USR_PPDU_INFO0_DURATION);
3247 		break;
3248 	}
3249 
3250 	case HAL_MACTX_HE_SIG_A_SU:
3251 		ath12k_dp_mon_parse_he_sig_su(tlv_data, &tx_ppdu_info->rx_status);
3252 		break;
3253 
3254 	case HAL_MACTX_HE_SIG_A_MU_DL:
3255 		ath12k_dp_mon_parse_he_sig_mu(tlv_data, &tx_ppdu_info->rx_status);
3256 		break;
3257 
3258 	case HAL_MACTX_HE_SIG_B1_MU:
3259 		ath12k_dp_mon_parse_he_sig_b1_mu(tlv_data, &tx_ppdu_info->rx_status);
3260 		break;
3261 
3262 	case HAL_MACTX_HE_SIG_B2_MU:
3263 		ath12k_dp_mon_parse_he_sig_b2_mu(tlv_data, &tx_ppdu_info->rx_status);
3264 		break;
3265 
3266 	case HAL_MACTX_HE_SIG_B2_OFDMA:
3267 		ath12k_dp_mon_parse_he_sig_b2_ofdma(tlv_data, &tx_ppdu_info->rx_status);
3268 		break;
3269 
3270 	case HAL_MACTX_VHT_SIG_A:
3271 		ath12k_dp_mon_parse_vht_sig_a(tlv_data, &tx_ppdu_info->rx_status);
3272 		break;
3273 
3274 	case HAL_MACTX_L_SIG_A:
3275 		ath12k_dp_mon_parse_l_sig_a(tlv_data, &tx_ppdu_info->rx_status);
3276 		break;
3277 
3278 	case HAL_MACTX_L_SIG_B:
3279 		ath12k_dp_mon_parse_l_sig_b(tlv_data, &tx_ppdu_info->rx_status);
3280 		break;
3281 
3282 	case HAL_RX_FRAME_BITMAP_ACK: {
3283 		const struct hal_rx_frame_bitmap_ack *fbm_ack = tlv_data;
3284 		u32 addr_32;
3285 		u16 addr_16;
3286 
3287 		info[0] = __le32_to_cpu(fbm_ack->info0);
3288 		info[1] = __le32_to_cpu(fbm_ack->info1);
3289 
3290 		addr_32 = u32_get_bits(info[0],
3291 				       HAL_RX_FBM_ACK_INFO0_ADDR1_31_0);
3292 		addr_16 = u32_get_bits(info[1],
3293 				       HAL_RX_FBM_ACK_INFO1_ADDR1_47_32);
3294 		ath12k_dp_get_mac_addr(addr_32, addr_16, tx_ppdu_info->rx_status.addr1);
3295 
3296 		ath12k_dp_mon_tx_gen_ack_frame(tx_ppdu_info);
3297 		break;
3298 	}
3299 
3300 	case HAL_MACTX_PHY_DESC: {
3301 		const struct hal_tx_phy_desc *tx_phy_desc = tlv_data;
3302 
3303 		info[0] = __le32_to_cpu(tx_phy_desc->info0);
3304 		info[1] = __le32_to_cpu(tx_phy_desc->info1);
3305 		info[2] = __le32_to_cpu(tx_phy_desc->info2);
3306 		info[3] = __le32_to_cpu(tx_phy_desc->info3);
3307 
3308 		tx_ppdu_info->rx_status.beamformed =
3309 			u32_get_bits(info[0],
3310 				     HAL_TX_PHY_DESC_INFO0_BF_TYPE);
3311 		tx_ppdu_info->rx_status.preamble_type =
3312 			u32_get_bits(info[0],
3313 				     HAL_TX_PHY_DESC_INFO0_PREAMBLE_11B);
3314 		tx_ppdu_info->rx_status.mcs =
3315 			u32_get_bits(info[1],
3316 				     HAL_TX_PHY_DESC_INFO1_MCS);
3317 		tx_ppdu_info->rx_status.ltf_size =
3318 			u32_get_bits(info[3],
3319 				     HAL_TX_PHY_DESC_INFO3_LTF_SIZE);
3320 		tx_ppdu_info->rx_status.nss =
3321 			u32_get_bits(info[2],
3322 				     HAL_TX_PHY_DESC_INFO2_NSS);
3323 		tx_ppdu_info->rx_status.chan_num =
3324 			u32_get_bits(info[3],
3325 				     HAL_TX_PHY_DESC_INFO3_ACTIVE_CHANNEL);
3326 		tx_ppdu_info->rx_status.bw =
3327 			u32_get_bits(info[0],
3328 				     HAL_TX_PHY_DESC_INFO0_BANDWIDTH);
3329 		break;
3330 	}
3331 
3332 	case HAL_TX_MPDU_START: {
3333 		struct dp_mon_mpdu *mon_mpdu = tx_ppdu_info->tx_mon_mpdu;
3334 
3335 		mon_mpdu = kzalloc(sizeof(*mon_mpdu), GFP_ATOMIC);
3336 		if (!mon_mpdu)
3337 			return DP_MON_TX_STATUS_PPDU_NOT_DONE;
3338 		status = DP_MON_TX_MPDU_START;
3339 		break;
3340 	}
3341 
3342 	case HAL_TX_MPDU_END:
3343 		list_add_tail(&tx_ppdu_info->tx_mon_mpdu->list,
3344 			      &tx_ppdu_info->dp_tx_mon_mpdu_list);
3345 		break;
3346 	}
3347 
3348 	return status;
3349 }
3350 
3351 enum dp_mon_tx_tlv_status
3352 ath12k_dp_mon_tx_status_get_num_user(u16 tlv_tag,
3353 				     struct hal_tlv_hdr *tx_tlv,
3354 				     u8 *num_users)
3355 {
3356 	u32 tlv_status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3357 	u32 info0;
3358 
3359 	switch (tlv_tag) {
3360 	case HAL_TX_FES_SETUP: {
3361 		struct hal_tx_fes_setup *tx_fes_setup =
3362 				(struct hal_tx_fes_setup *)tx_tlv;
3363 
3364 		info0 = __le32_to_cpu(tx_fes_setup->info0);
3365 
3366 		*num_users = u32_get_bits(info0, HAL_TX_FES_SETUP_INFO0_NUM_OF_USERS);
3367 		tlv_status = DP_MON_TX_FES_SETUP;
3368 		break;
3369 	}
3370 
3371 	case HAL_RX_RESPONSE_REQUIRED_INFO: {
3372 		/* TODO: need to update *num_users */
3373 		tlv_status = DP_MON_RX_RESPONSE_REQUIRED_INFO;
3374 		break;
3375 	}
3376 	}
3377 
3378 	return tlv_status;
3379 }
3380 
3381 static void
3382 ath12k_dp_mon_tx_process_ppdu_info(struct ath12k *ar,
3383 				   struct napi_struct *napi,
3384 				   struct dp_mon_tx_ppdu_info *tx_ppdu_info)
3385 {
3386 	struct dp_mon_mpdu *tmp, *mon_mpdu;
3387 
3388 	list_for_each_entry_safe(mon_mpdu, tmp,
3389 				 &tx_ppdu_info->dp_tx_mon_mpdu_list, list) {
3390 		list_del(&mon_mpdu->list);
3391 
3392 		if (mon_mpdu->head)
3393 			ath12k_dp_mon_rx_deliver(ar, mon_mpdu,
3394 						 &tx_ppdu_info->rx_status, napi);
3395 
3396 		kfree(mon_mpdu);
3397 	}
3398 }
3399 
3400 enum hal_rx_mon_status
3401 ath12k_dp_mon_tx_parse_mon_status(struct ath12k *ar,
3402 				  struct ath12k_mon_data *pmon,
3403 				  struct sk_buff *skb,
3404 				  struct napi_struct *napi,
3405 				  u32 ppdu_id)
3406 {
3407 	struct ath12k_base *ab = ar->ab;
3408 	struct dp_mon_tx_ppdu_info *tx_prot_ppdu_info, *tx_data_ppdu_info;
3409 	struct hal_tlv_hdr *tlv;
3410 	u8 *ptr = skb->data;
3411 	u16 tlv_tag;
3412 	u16 tlv_len;
3413 	u32 tlv_userid = 0;
3414 	u8 num_user;
3415 	u32 tlv_status = DP_MON_TX_STATUS_PPDU_NOT_DONE;
3416 
3417 	tx_prot_ppdu_info = ath12k_dp_mon_tx_get_ppdu_info(pmon, ppdu_id,
3418 							   DP_MON_TX_PROT_PPDU_INFO);
3419 	if (!tx_prot_ppdu_info)
3420 		return -ENOMEM;
3421 
3422 	tlv = (struct hal_tlv_hdr *)ptr;
3423 	tlv_tag = le32_get_bits(tlv->tl, HAL_TLV_HDR_TAG);
3424 
3425 	tlv_status = ath12k_dp_mon_tx_status_get_num_user(tlv_tag, tlv, &num_user);
3426 	if (tlv_status == DP_MON_TX_STATUS_PPDU_NOT_DONE || !num_user)
3427 		return -EINVAL;
3428 
3429 	tx_data_ppdu_info = ath12k_dp_mon_tx_get_ppdu_info(pmon, ppdu_id,
3430 							   DP_MON_TX_DATA_PPDU_INFO);
3431 	if (!tx_data_ppdu_info)
3432 		return -ENOMEM;
3433 
3434 	do {
3435 		tlv = (struct hal_tlv_hdr *)ptr;
3436 		tlv_tag = le32_get_bits(tlv->tl, HAL_TLV_HDR_TAG);
3437 		tlv_len = le32_get_bits(tlv->tl, HAL_TLV_HDR_LEN);
3438 		tlv_userid = le32_get_bits(tlv->tl, HAL_TLV_USR_ID);
3439 
3440 		tlv_status = ath12k_dp_mon_tx_parse_status_tlv(ab, pmon,
3441 							       tlv_tag, ptr,
3442 							       tlv_userid);
3443 		ptr += tlv_len;
3444 		ptr = PTR_ALIGN(ptr, HAL_TLV_ALIGN);
3445 		if ((ptr - skb->data) >= DP_TX_MONITOR_BUF_SIZE)
3446 			break;
3447 	} while (tlv_status != DP_MON_TX_FES_STATUS_END);
3448 
3449 	ath12k_dp_mon_tx_process_ppdu_info(ar, napi, tx_data_ppdu_info);
3450 	ath12k_dp_mon_tx_process_ppdu_info(ar, napi, tx_prot_ppdu_info);
3451 
3452 	return tlv_status;
3453 }
3454 
3455 static void
3456 ath12k_dp_mon_rx_update_peer_rate_table_stats(struct ath12k_rx_peer_stats *rx_stats,
3457 					      struct hal_rx_mon_ppdu_info *ppdu_info,
3458 					      struct hal_rx_user_status *user_stats,
3459 					      u32 num_msdu)
3460 {
3461 	struct ath12k_rx_peer_rate_stats *stats;
3462 	u32 mcs_idx = (user_stats) ? user_stats->mcs : ppdu_info->mcs;
3463 	u32 nss_idx = (user_stats) ? user_stats->nss - 1 : ppdu_info->nss - 1;
3464 	u32 bw_idx = ppdu_info->bw;
3465 	u32 gi_idx = ppdu_info->gi;
3466 	u32 len;
3467 
3468 	if (mcs_idx > HAL_RX_MAX_MCS_HT || nss_idx >= HAL_RX_MAX_NSS ||
3469 	    bw_idx >= HAL_RX_BW_MAX || gi_idx >= HAL_RX_GI_MAX) {
3470 		return;
3471 	}
3472 
3473 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AX ||
3474 	    ppdu_info->preamble_type == HAL_RX_PREAMBLE_11BE)
3475 		gi_idx = ath12k_he_gi_to_nl80211_he_gi(ppdu_info->gi);
3476 
3477 	rx_stats->pkt_stats.rx_rate[bw_idx][gi_idx][nss_idx][mcs_idx] += num_msdu;
3478 	stats = &rx_stats->byte_stats;
3479 
3480 	if (user_stats)
3481 		len = user_stats->mpdu_ok_byte_count;
3482 	else
3483 		len = ppdu_info->mpdu_len;
3484 
3485 	stats->rx_rate[bw_idx][gi_idx][nss_idx][mcs_idx] += len;
3486 }
3487 
3488 static void ath12k_dp_mon_rx_update_peer_su_stats(struct ath12k *ar,
3489 						  struct ath12k_link_sta *arsta,
3490 						  struct hal_rx_mon_ppdu_info *ppdu_info)
3491 {
3492 	struct ath12k_rx_peer_stats *rx_stats = arsta->rx_stats;
3493 	u32 num_msdu;
3494 
3495 	arsta->rssi_comb = ppdu_info->rssi_comb;
3496 	ewma_avg_rssi_add(&arsta->avg_rssi, ppdu_info->rssi_comb);
3497 	if (!rx_stats)
3498 		return;
3499 
3500 	num_msdu = ppdu_info->tcp_msdu_count + ppdu_info->tcp_ack_msdu_count +
3501 		   ppdu_info->udp_msdu_count + ppdu_info->other_msdu_count;
3502 
3503 	rx_stats->num_msdu += num_msdu;
3504 	rx_stats->tcp_msdu_count += ppdu_info->tcp_msdu_count +
3505 				    ppdu_info->tcp_ack_msdu_count;
3506 	rx_stats->udp_msdu_count += ppdu_info->udp_msdu_count;
3507 	rx_stats->other_msdu_count += ppdu_info->other_msdu_count;
3508 
3509 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11A ||
3510 	    ppdu_info->preamble_type == HAL_RX_PREAMBLE_11B) {
3511 		ppdu_info->nss = 1;
3512 		ppdu_info->mcs = HAL_RX_MAX_MCS;
3513 		ppdu_info->tid = IEEE80211_NUM_TIDS;
3514 	}
3515 
3516 	if (ppdu_info->ldpc < HAL_RX_SU_MU_CODING_MAX)
3517 		rx_stats->coding_count[ppdu_info->ldpc] += num_msdu;
3518 
3519 	if (ppdu_info->tid <= IEEE80211_NUM_TIDS)
3520 		rx_stats->tid_count[ppdu_info->tid] += num_msdu;
3521 
3522 	if (ppdu_info->preamble_type < HAL_RX_PREAMBLE_MAX)
3523 		rx_stats->pream_cnt[ppdu_info->preamble_type] += num_msdu;
3524 
3525 	if (ppdu_info->reception_type < HAL_RX_RECEPTION_TYPE_MAX)
3526 		rx_stats->reception_type[ppdu_info->reception_type] += num_msdu;
3527 
3528 	if (ppdu_info->is_stbc)
3529 		rx_stats->stbc_count += num_msdu;
3530 
3531 	if (ppdu_info->beamformed)
3532 		rx_stats->beamformed_count += num_msdu;
3533 
3534 	if (ppdu_info->num_mpdu_fcs_ok > 1)
3535 		rx_stats->ampdu_msdu_count += num_msdu;
3536 	else
3537 		rx_stats->non_ampdu_msdu_count += num_msdu;
3538 
3539 	rx_stats->num_mpdu_fcs_ok += ppdu_info->num_mpdu_fcs_ok;
3540 	rx_stats->num_mpdu_fcs_err += ppdu_info->num_mpdu_fcs_err;
3541 	rx_stats->dcm_count += ppdu_info->dcm;
3542 
3543 	rx_stats->rx_duration += ppdu_info->rx_duration;
3544 	arsta->rx_duration = rx_stats->rx_duration;
3545 
3546 	if (ppdu_info->nss > 0 && ppdu_info->nss <= HAL_RX_MAX_NSS) {
3547 		rx_stats->pkt_stats.nss_count[ppdu_info->nss - 1] += num_msdu;
3548 		rx_stats->byte_stats.nss_count[ppdu_info->nss - 1] += ppdu_info->mpdu_len;
3549 	}
3550 
3551 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11N &&
3552 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_HT) {
3553 		rx_stats->pkt_stats.ht_mcs_count[ppdu_info->mcs] += num_msdu;
3554 		rx_stats->byte_stats.ht_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3555 		/* To fit into rate table for HT packets */
3556 		ppdu_info->mcs = ppdu_info->mcs % 8;
3557 	}
3558 
3559 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AC &&
3560 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_VHT) {
3561 		rx_stats->pkt_stats.vht_mcs_count[ppdu_info->mcs] += num_msdu;
3562 		rx_stats->byte_stats.vht_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3563 	}
3564 
3565 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11AX &&
3566 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_HE) {
3567 		rx_stats->pkt_stats.he_mcs_count[ppdu_info->mcs] += num_msdu;
3568 		rx_stats->byte_stats.he_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3569 	}
3570 
3571 	if (ppdu_info->preamble_type == HAL_RX_PREAMBLE_11BE &&
3572 	    ppdu_info->mcs <= HAL_RX_MAX_MCS_BE) {
3573 		rx_stats->pkt_stats.be_mcs_count[ppdu_info->mcs] += num_msdu;
3574 		rx_stats->byte_stats.be_mcs_count[ppdu_info->mcs] += ppdu_info->mpdu_len;
3575 	}
3576 
3577 	if ((ppdu_info->preamble_type == HAL_RX_PREAMBLE_11A ||
3578 	     ppdu_info->preamble_type == HAL_RX_PREAMBLE_11B) &&
3579 	     ppdu_info->rate < HAL_RX_LEGACY_RATE_INVALID) {
3580 		rx_stats->pkt_stats.legacy_count[ppdu_info->rate] += num_msdu;
3581 		rx_stats->byte_stats.legacy_count[ppdu_info->rate] += ppdu_info->mpdu_len;
3582 	}
3583 
3584 	if (ppdu_info->gi < HAL_RX_GI_MAX) {
3585 		rx_stats->pkt_stats.gi_count[ppdu_info->gi] += num_msdu;
3586 		rx_stats->byte_stats.gi_count[ppdu_info->gi] += ppdu_info->mpdu_len;
3587 	}
3588 
3589 	if (ppdu_info->bw < HAL_RX_BW_MAX) {
3590 		rx_stats->pkt_stats.bw_count[ppdu_info->bw] += num_msdu;
3591 		rx_stats->byte_stats.bw_count[ppdu_info->bw] += ppdu_info->mpdu_len;
3592 	}
3593 
3594 	ath12k_dp_mon_rx_update_peer_rate_table_stats(rx_stats, ppdu_info,
3595 						      NULL, num_msdu);
3596 }
3597 
3598 void ath12k_dp_mon_rx_process_ulofdma(struct hal_rx_mon_ppdu_info *ppdu_info)
3599 {
3600 	struct hal_rx_user_status *rx_user_status;
3601 	u32 num_users, i, mu_ul_user_v0_word0, mu_ul_user_v0_word1, ru_size;
3602 
3603 	if (!(ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_MIMO ||
3604 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
3605 	      ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO))
3606 		return;
3607 
3608 	num_users = ppdu_info->num_users;
3609 	if (num_users > HAL_MAX_UL_MU_USERS)
3610 		num_users = HAL_MAX_UL_MU_USERS;
3611 
3612 	for (i = 0; i < num_users; i++) {
3613 		rx_user_status = &ppdu_info->userstats[i];
3614 		mu_ul_user_v0_word0 =
3615 			rx_user_status->ul_ofdma_user_v0_word0;
3616 		mu_ul_user_v0_word1 =
3617 			rx_user_status->ul_ofdma_user_v0_word1;
3618 
3619 		if (u32_get_bits(mu_ul_user_v0_word0,
3620 				 HAL_RX_UL_OFDMA_USER_INFO_V0_W0_VALID) &&
3621 		    !u32_get_bits(mu_ul_user_v0_word0,
3622 				  HAL_RX_UL_OFDMA_USER_INFO_V0_W0_VER)) {
3623 			rx_user_status->mcs =
3624 				u32_get_bits(mu_ul_user_v0_word1,
3625 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_MCS);
3626 			rx_user_status->nss =
3627 				u32_get_bits(mu_ul_user_v0_word1,
3628 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_NSS) + 1;
3629 
3630 			rx_user_status->ofdma_info_valid = 1;
3631 			rx_user_status->ul_ofdma_ru_start_index =
3632 				u32_get_bits(mu_ul_user_v0_word1,
3633 					     HAL_RX_UL_OFDMA_USER_INFO_V0_W1_RU_START);
3634 
3635 			ru_size = u32_get_bits(mu_ul_user_v0_word1,
3636 					       HAL_RX_UL_OFDMA_USER_INFO_V0_W1_RU_SIZE);
3637 			rx_user_status->ul_ofdma_ru_width = ru_size;
3638 			rx_user_status->ul_ofdma_ru_size = ru_size;
3639 		}
3640 		rx_user_status->ldpc = u32_get_bits(mu_ul_user_v0_word1,
3641 						    HAL_RX_UL_OFDMA_USER_INFO_V0_W1_LDPC);
3642 	}
3643 	ppdu_info->ldpc = 1;
3644 }
3645 
3646 static void
3647 ath12k_dp_mon_rx_update_user_stats(struct ath12k *ar,
3648 				   struct hal_rx_mon_ppdu_info *ppdu_info,
3649 				   u32 uid)
3650 {
3651 	struct ath12k_link_sta *arsta;
3652 	struct ath12k_rx_peer_stats *rx_stats = NULL;
3653 	struct hal_rx_user_status *user_stats = &ppdu_info->userstats[uid];
3654 	struct ath12k_peer *peer;
3655 	u32 num_msdu;
3656 
3657 	if (user_stats->ast_index == 0 || user_stats->ast_index == 0xFFFF)
3658 		return;
3659 
3660 	peer = ath12k_peer_find_by_ast(ar->ab, user_stats->ast_index);
3661 
3662 	if (!peer) {
3663 		ath12k_warn(ar->ab, "peer ast idx %d can't be found\n",
3664 			    user_stats->ast_index);
3665 		return;
3666 	}
3667 
3668 	arsta = ath12k_peer_get_link_sta(ar->ab, peer);
3669 	if (!arsta) {
3670 		ath12k_warn(ar->ab, "link sta not found on peer %pM id %d\n",
3671 			    peer->addr, peer->peer_id);
3672 		return;
3673 	}
3674 
3675 	arsta->rssi_comb = ppdu_info->rssi_comb;
3676 	ewma_avg_rssi_add(&arsta->avg_rssi, ppdu_info->rssi_comb);
3677 	rx_stats = arsta->rx_stats;
3678 	if (!rx_stats)
3679 		return;
3680 
3681 	num_msdu = user_stats->tcp_msdu_count + user_stats->tcp_ack_msdu_count +
3682 		   user_stats->udp_msdu_count + user_stats->other_msdu_count;
3683 
3684 	rx_stats->num_msdu += num_msdu;
3685 	rx_stats->tcp_msdu_count += user_stats->tcp_msdu_count +
3686 				    user_stats->tcp_ack_msdu_count;
3687 	rx_stats->udp_msdu_count += user_stats->udp_msdu_count;
3688 	rx_stats->other_msdu_count += user_stats->other_msdu_count;
3689 
3690 	if (ppdu_info->ldpc < HAL_RX_SU_MU_CODING_MAX)
3691 		rx_stats->coding_count[ppdu_info->ldpc] += num_msdu;
3692 
3693 	if (user_stats->tid <= IEEE80211_NUM_TIDS)
3694 		rx_stats->tid_count[user_stats->tid] += num_msdu;
3695 
3696 	if (user_stats->preamble_type < HAL_RX_PREAMBLE_MAX)
3697 		rx_stats->pream_cnt[user_stats->preamble_type] += num_msdu;
3698 
3699 	if (ppdu_info->reception_type < HAL_RX_RECEPTION_TYPE_MAX)
3700 		rx_stats->reception_type[ppdu_info->reception_type] += num_msdu;
3701 
3702 	if (ppdu_info->is_stbc)
3703 		rx_stats->stbc_count += num_msdu;
3704 
3705 	if (ppdu_info->beamformed)
3706 		rx_stats->beamformed_count += num_msdu;
3707 
3708 	if (user_stats->mpdu_cnt_fcs_ok > 1)
3709 		rx_stats->ampdu_msdu_count += num_msdu;
3710 	else
3711 		rx_stats->non_ampdu_msdu_count += num_msdu;
3712 
3713 	rx_stats->num_mpdu_fcs_ok += user_stats->mpdu_cnt_fcs_ok;
3714 	rx_stats->num_mpdu_fcs_err += user_stats->mpdu_cnt_fcs_err;
3715 	rx_stats->dcm_count += ppdu_info->dcm;
3716 	if (ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA ||
3717 	    ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_MU_OFDMA_MIMO)
3718 		rx_stats->ru_alloc_cnt[user_stats->ul_ofdma_ru_size] += num_msdu;
3719 
3720 	rx_stats->rx_duration += ppdu_info->rx_duration;
3721 	arsta->rx_duration = rx_stats->rx_duration;
3722 
3723 	if (user_stats->nss > 0 && user_stats->nss <= HAL_RX_MAX_NSS) {
3724 		rx_stats->pkt_stats.nss_count[user_stats->nss - 1] += num_msdu;
3725 		rx_stats->byte_stats.nss_count[user_stats->nss - 1] +=
3726 						user_stats->mpdu_ok_byte_count;
3727 	}
3728 
3729 	if (user_stats->preamble_type == HAL_RX_PREAMBLE_11AX &&
3730 	    user_stats->mcs <= HAL_RX_MAX_MCS_HE) {
3731 		rx_stats->pkt_stats.he_mcs_count[user_stats->mcs] += num_msdu;
3732 		rx_stats->byte_stats.he_mcs_count[user_stats->mcs] +=
3733 						user_stats->mpdu_ok_byte_count;
3734 	}
3735 
3736 	if (ppdu_info->gi < HAL_RX_GI_MAX) {
3737 		rx_stats->pkt_stats.gi_count[ppdu_info->gi] += num_msdu;
3738 		rx_stats->byte_stats.gi_count[ppdu_info->gi] +=
3739 						user_stats->mpdu_ok_byte_count;
3740 	}
3741 
3742 	if (ppdu_info->bw < HAL_RX_BW_MAX) {
3743 		rx_stats->pkt_stats.bw_count[ppdu_info->bw] += num_msdu;
3744 		rx_stats->byte_stats.bw_count[ppdu_info->bw] +=
3745 						user_stats->mpdu_ok_byte_count;
3746 	}
3747 
3748 	ath12k_dp_mon_rx_update_peer_rate_table_stats(rx_stats, ppdu_info,
3749 						      user_stats, num_msdu);
3750 }
3751 
3752 static void
3753 ath12k_dp_mon_rx_update_peer_mu_stats(struct ath12k *ar,
3754 				      struct hal_rx_mon_ppdu_info *ppdu_info)
3755 {
3756 	u32 num_users, i;
3757 
3758 	num_users = ppdu_info->num_users;
3759 	if (num_users > HAL_MAX_UL_MU_USERS)
3760 		num_users = HAL_MAX_UL_MU_USERS;
3761 
3762 	for (i = 0; i < num_users; i++)
3763 		ath12k_dp_mon_rx_update_user_stats(ar, ppdu_info, i);
3764 }
3765 
3766 static void
3767 ath12k_dp_mon_rx_memset_ppdu_info(struct hal_rx_mon_ppdu_info *ppdu_info)
3768 {
3769 	memset(ppdu_info, 0, sizeof(*ppdu_info));
3770 	ppdu_info->peer_id = HAL_INVALID_PEERID;
3771 }
3772 
3773 int ath12k_dp_mon_srng_process(struct ath12k *ar, int *budget,
3774 			       struct napi_struct *napi)
3775 {
3776 	struct ath12k_base *ab = ar->ab;
3777 	struct ath12k_pdev_dp *pdev_dp = &ar->dp;
3778 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&pdev_dp->mon_data;
3779 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
3780 	struct ath12k_dp *dp = &ab->dp;
3781 	struct hal_mon_dest_desc *mon_dst_desc;
3782 	struct sk_buff *skb;
3783 	struct ath12k_skb_rxcb *rxcb;
3784 	struct dp_srng *mon_dst_ring;
3785 	struct hal_srng *srng;
3786 	struct dp_rxdma_mon_ring *buf_ring;
3787 	struct ath12k_link_sta *arsta;
3788 	struct ath12k_peer *peer;
3789 	struct sk_buff_head skb_list;
3790 	u64 cookie;
3791 	int num_buffs_reaped = 0, srng_id, buf_id;
3792 	u32 hal_status, end_offset, info0, end_reason;
3793 	u8 pdev_idx = ath12k_hw_mac_id_to_pdev_id(ab->hw_params, ar->pdev_idx);
3794 
3795 	__skb_queue_head_init(&skb_list);
3796 	srng_id = ath12k_hw_mac_id_to_srng_id(ab->hw_params, pdev_idx);
3797 	mon_dst_ring = &pdev_dp->rxdma_mon_dst_ring[srng_id];
3798 	buf_ring = &dp->rxdma_mon_buf_ring;
3799 
3800 	srng = &ab->hal.srng_list[mon_dst_ring->ring_id];
3801 	spin_lock_bh(&srng->lock);
3802 	ath12k_hal_srng_access_begin(ab, srng);
3803 
3804 	while (likely(*budget)) {
3805 		mon_dst_desc = ath12k_hal_srng_dst_peek(ab, srng);
3806 		if (unlikely(!mon_dst_desc))
3807 			break;
3808 
3809 		/* In case of empty descriptor, the cookie in the ring descriptor
3810 		 * is invalid. Therefore, this entry is skipped, and ring processing
3811 		 * continues.
3812 		 */
3813 		info0 = le32_to_cpu(mon_dst_desc->info0);
3814 		if (u32_get_bits(info0, HAL_MON_DEST_INFO0_EMPTY_DESC))
3815 			goto move_next;
3816 
3817 		cookie = le32_to_cpu(mon_dst_desc->cookie);
3818 		buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
3819 
3820 		spin_lock_bh(&buf_ring->idr_lock);
3821 		skb = idr_remove(&buf_ring->bufs_idr, buf_id);
3822 		spin_unlock_bh(&buf_ring->idr_lock);
3823 
3824 		if (unlikely(!skb)) {
3825 			ath12k_warn(ab, "monitor destination with invalid buf_id %d\n",
3826 				    buf_id);
3827 			goto move_next;
3828 		}
3829 
3830 		rxcb = ATH12K_SKB_RXCB(skb);
3831 		dma_unmap_single(ab->dev, rxcb->paddr,
3832 				 skb->len + skb_tailroom(skb),
3833 				 DMA_FROM_DEVICE);
3834 
3835 		end_reason = u32_get_bits(info0, HAL_MON_DEST_INFO0_END_REASON);
3836 
3837 		/* HAL_MON_FLUSH_DETECTED implies that an rx flush received at the end of
3838 		 * rx PPDU and HAL_MON_PPDU_TRUNCATED implies that the PPDU got
3839 		 * truncated due to a system level error. In both the cases, buffer data
3840 		 * can be discarded
3841 		 */
3842 		if ((end_reason == HAL_MON_FLUSH_DETECTED) ||
3843 		    (end_reason == HAL_MON_PPDU_TRUNCATED)) {
3844 			ath12k_dbg(ab, ATH12K_DBG_DATA,
3845 				   "Monitor dest descriptor end reason %d", end_reason);
3846 			dev_kfree_skb_any(skb);
3847 			goto move_next;
3848 		}
3849 
3850 		/* Calculate the budget when the ring descriptor with the
3851 		 * HAL_MON_END_OF_PPDU to ensure that one PPDU worth of data is always
3852 		 * reaped. This helps to efficiently utilize the NAPI budget.
3853 		 */
3854 		if (end_reason == HAL_MON_END_OF_PPDU) {
3855 			*budget -= 1;
3856 			rxcb->is_end_of_ppdu = true;
3857 		}
3858 
3859 		end_offset = u32_get_bits(info0, HAL_MON_DEST_INFO0_END_OFFSET);
3860 		if (likely(end_offset <= DP_RX_BUFFER_SIZE)) {
3861 			skb_put(skb, end_offset);
3862 		} else {
3863 			ath12k_warn(ab,
3864 				    "invalid offset on mon stats destination %u\n",
3865 				    end_offset);
3866 			skb_put(skb, DP_RX_BUFFER_SIZE);
3867 		}
3868 
3869 		__skb_queue_tail(&skb_list, skb);
3870 
3871 move_next:
3872 		ath12k_dp_mon_buf_replenish(ab, buf_ring, 1);
3873 		ath12k_hal_srng_dst_get_next_entry(ab, srng);
3874 		num_buffs_reaped++;
3875 	}
3876 
3877 	ath12k_hal_srng_access_end(ab, srng);
3878 	spin_unlock_bh(&srng->lock);
3879 
3880 	if (!num_buffs_reaped)
3881 		return 0;
3882 
3883 	/* In some cases, one PPDU worth of data can be spread across multiple NAPI
3884 	 * schedules, To avoid losing existing parsed ppdu_info information, skip
3885 	 * the memset of the ppdu_info structure and continue processing it.
3886 	 */
3887 	if (!ppdu_info->ppdu_continuation)
3888 		ath12k_dp_mon_rx_memset_ppdu_info(ppdu_info);
3889 
3890 	while ((skb = __skb_dequeue(&skb_list))) {
3891 		hal_status = ath12k_dp_mon_rx_parse_mon_status(ar, pmon, skb, napi);
3892 		if (hal_status != HAL_RX_MON_STATUS_PPDU_DONE) {
3893 			ppdu_info->ppdu_continuation = true;
3894 			dev_kfree_skb_any(skb);
3895 			continue;
3896 		}
3897 
3898 		if (ppdu_info->peer_id == HAL_INVALID_PEERID)
3899 			goto free_skb;
3900 
3901 		rcu_read_lock();
3902 		spin_lock_bh(&ab->base_lock);
3903 		peer = ath12k_peer_find_by_id(ab, ppdu_info->peer_id);
3904 		if (!peer || !peer->sta) {
3905 			ath12k_dbg(ab, ATH12K_DBG_DATA,
3906 				   "failed to find the peer with monitor peer_id %d\n",
3907 				   ppdu_info->peer_id);
3908 			goto next_skb;
3909 		}
3910 
3911 		if (ppdu_info->reception_type == HAL_RX_RECEPTION_TYPE_SU) {
3912 			arsta = ath12k_peer_get_link_sta(ar->ab, peer);
3913 			if (!arsta) {
3914 				ath12k_warn(ar->ab, "link sta not found on peer %pM id %d\n",
3915 					    peer->addr, peer->peer_id);
3916 				spin_unlock_bh(&ab->base_lock);
3917 				rcu_read_unlock();
3918 				dev_kfree_skb_any(skb);
3919 				continue;
3920 			}
3921 			ath12k_dp_mon_rx_update_peer_su_stats(ar, arsta,
3922 							      ppdu_info);
3923 		} else if ((ppdu_info->fc_valid) &&
3924 			   (ppdu_info->ast_index != HAL_AST_IDX_INVALID)) {
3925 			ath12k_dp_mon_rx_process_ulofdma(ppdu_info);
3926 			ath12k_dp_mon_rx_update_peer_mu_stats(ar, ppdu_info);
3927 		}
3928 
3929 next_skb:
3930 		spin_unlock_bh(&ab->base_lock);
3931 		rcu_read_unlock();
3932 free_skb:
3933 		dev_kfree_skb_any(skb);
3934 		ath12k_dp_mon_rx_memset_ppdu_info(ppdu_info);
3935 	}
3936 
3937 	return num_buffs_reaped;
3938 }
3939 
3940 static int ath12k_dp_rx_reap_mon_status_ring(struct ath12k_base *ab, int mac_id,
3941 					     int *budget, struct sk_buff_head *skb_list)
3942 {
3943 	const struct ath12k_hw_hal_params *hal_params;
3944 	int buf_id, srng_id, num_buffs_reaped = 0;
3945 	enum dp_mon_status_buf_state reap_status;
3946 	struct dp_rxdma_mon_ring *rx_ring;
3947 	struct ath12k_mon_data *pmon;
3948 	struct ath12k_skb_rxcb *rxcb;
3949 	struct hal_tlv_64_hdr *tlv;
3950 	void *rx_mon_status_desc;
3951 	struct hal_srng *srng;
3952 	struct ath12k_dp *dp;
3953 	struct sk_buff *skb;
3954 	struct ath12k *ar;
3955 	dma_addr_t paddr;
3956 	u32 cookie;
3957 	u8 rbm;
3958 
3959 	ar = ab->pdevs[ath12k_hw_mac_id_to_pdev_id(ab->hw_params, mac_id)].ar;
3960 	dp = &ab->dp;
3961 	pmon = &ar->dp.mon_data;
3962 	srng_id = ath12k_hw_mac_id_to_srng_id(ab->hw_params, mac_id);
3963 	rx_ring = &dp->rx_mon_status_refill_ring[srng_id];
3964 
3965 	srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id];
3966 
3967 	spin_lock_bh(&srng->lock);
3968 
3969 	ath12k_hal_srng_access_begin(ab, srng);
3970 
3971 	while (*budget) {
3972 		*budget -= 1;
3973 		rx_mon_status_desc = ath12k_hal_srng_src_peek(ab, srng);
3974 		if (!rx_mon_status_desc) {
3975 			pmon->buf_state = DP_MON_STATUS_REPLINISH;
3976 			break;
3977 		}
3978 		ath12k_hal_rx_buf_addr_info_get(rx_mon_status_desc, &paddr,
3979 						&cookie, &rbm);
3980 		if (paddr) {
3981 			buf_id = u32_get_bits(cookie, DP_RXDMA_BUF_COOKIE_BUF_ID);
3982 
3983 			spin_lock_bh(&rx_ring->idr_lock);
3984 			skb = idr_find(&rx_ring->bufs_idr, buf_id);
3985 			spin_unlock_bh(&rx_ring->idr_lock);
3986 
3987 			if (!skb) {
3988 				ath12k_warn(ab, "rx monitor status with invalid buf_id %d\n",
3989 					    buf_id);
3990 				pmon->buf_state = DP_MON_STATUS_REPLINISH;
3991 				goto move_next;
3992 			}
3993 
3994 			rxcb = ATH12K_SKB_RXCB(skb);
3995 
3996 			dma_sync_single_for_cpu(ab->dev, rxcb->paddr,
3997 						skb->len + skb_tailroom(skb),
3998 						DMA_FROM_DEVICE);
3999 
4000 			tlv = (struct hal_tlv_64_hdr *)skb->data;
4001 			if (le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG) !=
4002 					HAL_RX_STATUS_BUFFER_DONE) {
4003 				pmon->buf_state = DP_MON_STATUS_NO_DMA;
4004 				ath12k_warn(ab,
4005 					    "mon status DONE not set %llx, buf_id %d\n",
4006 					    le64_get_bits(tlv->tl, HAL_TLV_HDR_TAG),
4007 					    buf_id);
4008 				/* RxDMA status done bit might not be set even
4009 				 * though tp is moved by HW.
4010 				 */
4011 
4012 				/* If done status is missing:
4013 				 * 1. As per MAC team's suggestion,
4014 				 *    when HP + 1 entry is peeked and if DMA
4015 				 *    is not done and if HP + 2 entry's DMA done
4016 				 *    is set. skip HP + 1 entry and
4017 				 *    start processing in next interrupt.
4018 				 * 2. If HP + 2 entry's DMA done is not set,
4019 				 *    poll onto HP + 1 entry DMA done to be set.
4020 				 *    Check status for same buffer for next time
4021 				 *    dp_rx_mon_status_srng_process
4022 				 */
4023 				reap_status = ath12k_dp_rx_mon_buf_done(ab, srng,
4024 									rx_ring);
4025 				if (reap_status == DP_MON_STATUS_NO_DMA)
4026 					continue;
4027 
4028 				spin_lock_bh(&rx_ring->idr_lock);
4029 				idr_remove(&rx_ring->bufs_idr, buf_id);
4030 				spin_unlock_bh(&rx_ring->idr_lock);
4031 
4032 				dma_unmap_single(ab->dev, rxcb->paddr,
4033 						 skb->len + skb_tailroom(skb),
4034 						 DMA_FROM_DEVICE);
4035 
4036 				dev_kfree_skb_any(skb);
4037 				pmon->buf_state = DP_MON_STATUS_REPLINISH;
4038 				goto move_next;
4039 			}
4040 
4041 			spin_lock_bh(&rx_ring->idr_lock);
4042 			idr_remove(&rx_ring->bufs_idr, buf_id);
4043 			spin_unlock_bh(&rx_ring->idr_lock);
4044 
4045 			dma_unmap_single(ab->dev, rxcb->paddr,
4046 					 skb->len + skb_tailroom(skb),
4047 					 DMA_FROM_DEVICE);
4048 
4049 			if (ath12k_dp_pkt_set_pktlen(skb, RX_MON_STATUS_BUF_SIZE)) {
4050 				dev_kfree_skb_any(skb);
4051 				goto move_next;
4052 			}
4053 			__skb_queue_tail(skb_list, skb);
4054 		} else {
4055 			pmon->buf_state = DP_MON_STATUS_REPLINISH;
4056 		}
4057 move_next:
4058 		skb = ath12k_dp_rx_alloc_mon_status_buf(ab, rx_ring,
4059 							&buf_id);
4060 
4061 		if (!skb) {
4062 			ath12k_warn(ab, "failed to alloc buffer for status ring\n");
4063 			hal_params = ab->hw_params->hal_params;
4064 			ath12k_hal_rx_buf_addr_info_set(rx_mon_status_desc, 0, 0,
4065 							hal_params->rx_buf_rbm);
4066 			num_buffs_reaped++;
4067 			break;
4068 		}
4069 		rxcb = ATH12K_SKB_RXCB(skb);
4070 
4071 		cookie = u32_encode_bits(mac_id, DP_RXDMA_BUF_COOKIE_PDEV_ID) |
4072 			 u32_encode_bits(buf_id, DP_RXDMA_BUF_COOKIE_BUF_ID);
4073 
4074 		ath12k_hal_rx_buf_addr_info_set(rx_mon_status_desc, rxcb->paddr,
4075 						cookie,
4076 						ab->hw_params->hal_params->rx_buf_rbm);
4077 		ath12k_hal_srng_src_get_next_entry(ab, srng);
4078 		num_buffs_reaped++;
4079 	}
4080 	ath12k_hal_srng_access_end(ab, srng);
4081 	spin_unlock_bh(&srng->lock);
4082 
4083 	return num_buffs_reaped;
4084 }
4085 
4086 static u32
4087 ath12k_dp_rx_mon_mpdu_pop(struct ath12k *ar, int mac_id,
4088 			  void *ring_entry, struct sk_buff **head_msdu,
4089 			  struct sk_buff **tail_msdu,
4090 			  struct list_head *used_list,
4091 			  u32 *npackets, u32 *ppdu_id)
4092 {
4093 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4094 	struct ath12k_buffer_addr *p_buf_addr_info, *p_last_buf_addr_info;
4095 	u32 msdu_ppdu_id = 0, msdu_cnt = 0, total_len = 0, frag_len = 0;
4096 	u32 rx_buf_size, rx_pkt_offset, sw_cookie;
4097 	bool is_frag, is_first_msdu, drop_mpdu = false;
4098 	struct hal_reo_entrance_ring *ent_desc =
4099 		(struct hal_reo_entrance_ring *)ring_entry;
4100 	u32 rx_bufs_used = 0, i = 0, desc_bank = 0;
4101 	struct hal_rx_desc *rx_desc, *tail_rx_desc;
4102 	struct hal_rx_msdu_link *msdu_link_desc;
4103 	struct sk_buff *msdu = NULL, *last = NULL;
4104 	struct ath12k_rx_desc_info *desc_info;
4105 	struct ath12k_buffer_addr buf_info;
4106 	struct hal_rx_msdu_list msdu_list;
4107 	struct ath12k_skb_rxcb *rxcb;
4108 	u16 num_msdus = 0;
4109 	dma_addr_t paddr;
4110 	u8 rbm;
4111 
4112 	ath12k_hal_rx_reo_ent_buf_paddr_get(ring_entry, &paddr,
4113 					    &sw_cookie,
4114 					    &p_last_buf_addr_info, &rbm,
4115 					    &msdu_cnt);
4116 
4117 	spin_lock_bh(&pmon->mon_lock);
4118 
4119 	if (le32_get_bits(ent_desc->info1,
4120 			  HAL_REO_ENTR_RING_INFO1_RXDMA_PUSH_REASON) ==
4121 			  HAL_REO_DEST_RING_PUSH_REASON_ERR_DETECTED) {
4122 		u8 rxdma_err = le32_get_bits(ent_desc->info1,
4123 					     HAL_REO_ENTR_RING_INFO1_RXDMA_ERROR_CODE);
4124 		if (rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_FLUSH_REQUEST_ERR ||
4125 		    rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_MPDU_LEN_ERR ||
4126 		    rxdma_err == HAL_REO_ENTR_RING_RXDMA_ECODE_OVERFLOW_ERR) {
4127 			drop_mpdu = true;
4128 			pmon->rx_mon_stats.dest_mpdu_drop++;
4129 		}
4130 	}
4131 
4132 	is_frag = false;
4133 	is_first_msdu = true;
4134 	rx_pkt_offset = sizeof(struct hal_rx_desc);
4135 
4136 	do {
4137 		if (pmon->mon_last_linkdesc_paddr == paddr) {
4138 			pmon->rx_mon_stats.dup_mon_linkdesc_cnt++;
4139 			spin_unlock_bh(&pmon->mon_lock);
4140 			return rx_bufs_used;
4141 		}
4142 
4143 		desc_bank = u32_get_bits(sw_cookie, DP_LINK_DESC_BANK_MASK);
4144 		msdu_link_desc =
4145 			ar->ab->dp.link_desc_banks[desc_bank].vaddr +
4146 			(paddr - ar->ab->dp.link_desc_banks[desc_bank].paddr);
4147 
4148 		ath12k_hal_rx_msdu_list_get(ar, msdu_link_desc, &msdu_list,
4149 					    &num_msdus);
4150 		desc_info = ath12k_dp_get_rx_desc(ar->ab,
4151 						  msdu_list.sw_cookie[num_msdus - 1]);
4152 		tail_rx_desc = (struct hal_rx_desc *)(desc_info->skb)->data;
4153 
4154 		for (i = 0; i < num_msdus; i++) {
4155 			u32 l2_hdr_offset;
4156 
4157 			if (pmon->mon_last_buf_cookie == msdu_list.sw_cookie[i]) {
4158 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4159 					   "i %d last_cookie %d is same\n",
4160 					   i, pmon->mon_last_buf_cookie);
4161 				drop_mpdu = true;
4162 				pmon->rx_mon_stats.dup_mon_buf_cnt++;
4163 				continue;
4164 			}
4165 
4166 			desc_info =
4167 				ath12k_dp_get_rx_desc(ar->ab, msdu_list.sw_cookie[i]);
4168 			msdu = desc_info->skb;
4169 
4170 			if (!msdu) {
4171 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4172 					   "msdu_pop: invalid msdu (%d/%d)\n",
4173 					   i + 1, num_msdus);
4174 				goto next_msdu;
4175 			}
4176 			rxcb = ATH12K_SKB_RXCB(msdu);
4177 			if (rxcb->paddr != msdu_list.paddr[i]) {
4178 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4179 					   "i %d paddr %lx != %lx\n",
4180 					   i, (unsigned long)rxcb->paddr,
4181 					   (unsigned long)msdu_list.paddr[i]);
4182 				drop_mpdu = true;
4183 				continue;
4184 			}
4185 			if (!rxcb->unmapped) {
4186 				dma_unmap_single(ar->ab->dev, rxcb->paddr,
4187 						 msdu->len +
4188 						 skb_tailroom(msdu),
4189 						 DMA_FROM_DEVICE);
4190 				rxcb->unmapped = 1;
4191 			}
4192 			if (drop_mpdu) {
4193 				ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4194 					   "i %d drop msdu %p *ppdu_id %x\n",
4195 					   i, msdu, *ppdu_id);
4196 				dev_kfree_skb_any(msdu);
4197 				msdu = NULL;
4198 				goto next_msdu;
4199 			}
4200 
4201 			rx_desc = (struct hal_rx_desc *)msdu->data;
4202 			l2_hdr_offset = ath12k_dp_rx_h_l3pad(ar->ab, tail_rx_desc);
4203 			if (is_first_msdu) {
4204 				if (!ath12k_dp_rxdesc_mpdu_valid(ar->ab, rx_desc)) {
4205 					drop_mpdu = true;
4206 					dev_kfree_skb_any(msdu);
4207 					msdu = NULL;
4208 					pmon->mon_last_linkdesc_paddr = paddr;
4209 					goto next_msdu;
4210 				}
4211 				msdu_ppdu_id =
4212 					ath12k_dp_rxdesc_get_ppduid(ar->ab, rx_desc);
4213 
4214 				if (ath12k_dp_mon_comp_ppduid(msdu_ppdu_id,
4215 							      ppdu_id)) {
4216 					spin_unlock_bh(&pmon->mon_lock);
4217 					return rx_bufs_used;
4218 				}
4219 				pmon->mon_last_linkdesc_paddr = paddr;
4220 				is_first_msdu = false;
4221 			}
4222 			ath12k_dp_mon_get_buf_len(&msdu_list.msdu_info[i],
4223 						  &is_frag, &total_len,
4224 						  &frag_len, &msdu_cnt);
4225 			rx_buf_size = rx_pkt_offset + l2_hdr_offset + frag_len;
4226 
4227 			if (ath12k_dp_pkt_set_pktlen(msdu, rx_buf_size)) {
4228 				dev_kfree_skb_any(msdu);
4229 				goto next_msdu;
4230 			}
4231 
4232 			if (!(*head_msdu))
4233 				*head_msdu = msdu;
4234 			else if (last)
4235 				last->next = msdu;
4236 
4237 			last = msdu;
4238 next_msdu:
4239 			pmon->mon_last_buf_cookie = msdu_list.sw_cookie[i];
4240 			rx_bufs_used++;
4241 			desc_info->skb = NULL;
4242 			list_add_tail(&desc_info->list, used_list);
4243 		}
4244 
4245 		ath12k_hal_rx_buf_addr_info_set(&buf_info, paddr, sw_cookie, rbm);
4246 
4247 		ath12k_dp_mon_next_link_desc_get(msdu_link_desc, &paddr,
4248 						 &sw_cookie, &rbm,
4249 						 &p_buf_addr_info);
4250 
4251 		ath12k_dp_rx_link_desc_return(ar->ab, &buf_info,
4252 					      HAL_WBM_REL_BM_ACT_PUT_IN_IDLE);
4253 
4254 		p_last_buf_addr_info = p_buf_addr_info;
4255 
4256 	} while (paddr && msdu_cnt);
4257 
4258 	spin_unlock_bh(&pmon->mon_lock);
4259 
4260 	if (last)
4261 		last->next = NULL;
4262 
4263 	*tail_msdu = msdu;
4264 
4265 	if (msdu_cnt == 0)
4266 		*npackets = 1;
4267 
4268 	return rx_bufs_used;
4269 }
4270 
4271 /* The destination ring processing is stuck if the destination is not
4272  * moving while status ring moves 16 PPDU. The destination ring processing
4273  * skips this destination ring PPDU as a workaround.
4274  */
4275 #define MON_DEST_RING_STUCK_MAX_CNT 16
4276 
4277 static void ath12k_dp_rx_mon_dest_process(struct ath12k *ar, int mac_id,
4278 					  u32 quota, struct napi_struct *napi)
4279 {
4280 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4281 	struct ath12k_pdev_mon_stats *rx_mon_stats;
4282 	u32 ppdu_id, rx_bufs_used = 0, ring_id;
4283 	u32 mpdu_rx_bufs_used, npackets = 0;
4284 	struct ath12k_dp *dp = &ar->ab->dp;
4285 	struct ath12k_base *ab = ar->ab;
4286 	void *ring_entry, *mon_dst_srng;
4287 	struct dp_mon_mpdu *tmp_mpdu;
4288 	LIST_HEAD(rx_desc_used_list);
4289 	struct hal_srng *srng;
4290 
4291 	ring_id = dp->rxdma_err_dst_ring[mac_id].ring_id;
4292 	srng = &ab->hal.srng_list[ring_id];
4293 
4294 	mon_dst_srng = &ab->hal.srng_list[ring_id];
4295 
4296 	spin_lock_bh(&srng->lock);
4297 
4298 	ath12k_hal_srng_access_begin(ab, mon_dst_srng);
4299 
4300 	ppdu_id = pmon->mon_ppdu_info.ppdu_id;
4301 	rx_mon_stats = &pmon->rx_mon_stats;
4302 
4303 	while ((ring_entry = ath12k_hal_srng_dst_peek(ar->ab, mon_dst_srng))) {
4304 		struct sk_buff *head_msdu, *tail_msdu;
4305 
4306 		head_msdu = NULL;
4307 		tail_msdu = NULL;
4308 
4309 		mpdu_rx_bufs_used = ath12k_dp_rx_mon_mpdu_pop(ar, mac_id, ring_entry,
4310 							      &head_msdu, &tail_msdu,
4311 							      &rx_desc_used_list,
4312 							      &npackets, &ppdu_id);
4313 
4314 		rx_bufs_used += mpdu_rx_bufs_used;
4315 
4316 		if (mpdu_rx_bufs_used) {
4317 			dp->mon_dest_ring_stuck_cnt = 0;
4318 		} else {
4319 			dp->mon_dest_ring_stuck_cnt++;
4320 			rx_mon_stats->dest_mon_not_reaped++;
4321 		}
4322 
4323 		if (dp->mon_dest_ring_stuck_cnt > MON_DEST_RING_STUCK_MAX_CNT) {
4324 			rx_mon_stats->dest_mon_stuck++;
4325 			ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4326 				   "status ring ppdu_id=%d dest ring ppdu_id=%d mon_dest_ring_stuck_cnt=%d dest_mon_not_reaped=%u dest_mon_stuck=%u\n",
4327 				   pmon->mon_ppdu_info.ppdu_id, ppdu_id,
4328 				   dp->mon_dest_ring_stuck_cnt,
4329 				   rx_mon_stats->dest_mon_not_reaped,
4330 				   rx_mon_stats->dest_mon_stuck);
4331 			spin_lock_bh(&pmon->mon_lock);
4332 			pmon->mon_ppdu_info.ppdu_id = ppdu_id;
4333 			spin_unlock_bh(&pmon->mon_lock);
4334 			continue;
4335 		}
4336 
4337 		if (ppdu_id != pmon->mon_ppdu_info.ppdu_id) {
4338 			spin_lock_bh(&pmon->mon_lock);
4339 			pmon->mon_ppdu_status = DP_PPDU_STATUS_START;
4340 			spin_unlock_bh(&pmon->mon_lock);
4341 			ath12k_dbg(ar->ab, ATH12K_DBG_DATA,
4342 				   "dest_rx: new ppdu_id %x != status ppdu_id %x dest_mon_not_reaped = %u dest_mon_stuck = %u\n",
4343 				   ppdu_id, pmon->mon_ppdu_info.ppdu_id,
4344 				   rx_mon_stats->dest_mon_not_reaped,
4345 				   rx_mon_stats->dest_mon_stuck);
4346 			break;
4347 		}
4348 
4349 		if (head_msdu && tail_msdu) {
4350 			tmp_mpdu = kzalloc(sizeof(*tmp_mpdu), GFP_ATOMIC);
4351 			if (!tmp_mpdu)
4352 				break;
4353 
4354 			tmp_mpdu->head = head_msdu;
4355 			tmp_mpdu->tail = tail_msdu;
4356 			tmp_mpdu->err_bitmap = pmon->err_bitmap;
4357 			tmp_mpdu->decap_format = pmon->decap_format;
4358 			ath12k_dp_mon_rx_deliver(ar, tmp_mpdu,
4359 						 &pmon->mon_ppdu_info, napi);
4360 			rx_mon_stats->dest_mpdu_done++;
4361 			kfree(tmp_mpdu);
4362 		}
4363 
4364 		ring_entry = ath12k_hal_srng_dst_get_next_entry(ar->ab,
4365 								mon_dst_srng);
4366 	}
4367 	ath12k_hal_srng_access_end(ar->ab, mon_dst_srng);
4368 
4369 	spin_unlock_bh(&srng->lock);
4370 
4371 	if (rx_bufs_used) {
4372 		rx_mon_stats->dest_ppdu_done++;
4373 		ath12k_dp_rx_bufs_replenish(ar->ab,
4374 					    &dp->rx_refill_buf_ring,
4375 					    &rx_desc_used_list,
4376 					    rx_bufs_used);
4377 	}
4378 }
4379 
4380 static int
4381 __ath12k_dp_mon_process_ring(struct ath12k *ar, int mac_id,
4382 			     struct napi_struct *napi, int *budget)
4383 {
4384 	struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&ar->dp.mon_data;
4385 	struct ath12k_pdev_mon_stats *rx_mon_stats = &pmon->rx_mon_stats;
4386 	struct hal_rx_mon_ppdu_info *ppdu_info = &pmon->mon_ppdu_info;
4387 	enum hal_rx_mon_status hal_status;
4388 	struct sk_buff_head skb_list;
4389 	int num_buffs_reaped;
4390 	struct sk_buff *skb;
4391 
4392 	__skb_queue_head_init(&skb_list);
4393 
4394 	num_buffs_reaped = ath12k_dp_rx_reap_mon_status_ring(ar->ab, mac_id,
4395 							     budget, &skb_list);
4396 	if (!num_buffs_reaped)
4397 		goto exit;
4398 
4399 	while ((skb = __skb_dequeue(&skb_list))) {
4400 		memset(ppdu_info, 0, sizeof(*ppdu_info));
4401 		ppdu_info->peer_id = HAL_INVALID_PEERID;
4402 
4403 		hal_status = ath12k_dp_mon_parse_rx_dest(ar, pmon, skb);
4404 
4405 		if (ar->monitor_started &&
4406 		    pmon->mon_ppdu_status == DP_PPDU_STATUS_START &&
4407 		    hal_status == HAL_TLV_STATUS_PPDU_DONE) {
4408 			rx_mon_stats->status_ppdu_done++;
4409 			pmon->mon_ppdu_status = DP_PPDU_STATUS_DONE;
4410 			ath12k_dp_rx_mon_dest_process(ar, mac_id, *budget, napi);
4411 			pmon->mon_ppdu_status = DP_PPDU_STATUS_START;
4412 		}
4413 
4414 		dev_kfree_skb_any(skb);
4415 	}
4416 
4417 exit:
4418 	return num_buffs_reaped;
4419 }
4420 
4421 int ath12k_dp_mon_process_ring(struct ath12k_base *ab, int mac_id,
4422 			       struct napi_struct *napi, int budget,
4423 			       enum dp_monitor_mode monitor_mode)
4424 {
4425 	struct ath12k *ar = ath12k_ab_to_ar(ab, mac_id);
4426 	int num_buffs_reaped = 0;
4427 
4428 	if (ab->hw_params->rxdma1_enable) {
4429 		if (monitor_mode == ATH12K_DP_RX_MONITOR_MODE)
4430 			num_buffs_reaped = ath12k_dp_mon_srng_process(ar, &budget, napi);
4431 	} else {
4432 		if (ar->monitor_started)
4433 			num_buffs_reaped =
4434 				__ath12k_dp_mon_process_ring(ar, mac_id, napi, &budget);
4435 	}
4436 
4437 	return num_buffs_reaped;
4438 }
4439