xref: /linux/drivers/net/wireless/ath/ath12k/hal.c (revision 8be4d31cb8aaeea27bde4b7ddb26e28a89062ebf)
1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3  * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4  * Copyright (c) 2021-2025 Qualcomm Innovation Center, Inc. All rights reserved.
5  */
6 #include <linux/dma-mapping.h>
7 #include "hal_tx.h"
8 #include "hal_rx.h"
9 #include "debug.h"
10 #include "hal_desc.h"
11 #include "hif.h"
12 
13 static const struct hal_srng_config hw_srng_config_template[] = {
14 	/* TODO: max_rings can populated by querying HW capabilities */
15 	[HAL_REO_DST] = {
16 		.start_ring_id = HAL_SRNG_RING_ID_REO2SW1,
17 		.max_rings = 8,
18 		.entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
19 		.mac_type = ATH12K_HAL_SRNG_UMAC,
20 		.ring_dir = HAL_SRNG_DIR_DST,
21 		.max_size = HAL_REO_REO2SW1_RING_BASE_MSB_RING_SIZE,
22 	},
23 	[HAL_REO_EXCEPTION] = {
24 		/* Designating REO2SW0 ring as exception ring.
25 		 * Any of theREO2SW rings can be used as exception ring.
26 		 */
27 		.start_ring_id = HAL_SRNG_RING_ID_REO2SW0,
28 		.max_rings = 1,
29 		.entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
30 		.mac_type = ATH12K_HAL_SRNG_UMAC,
31 		.ring_dir = HAL_SRNG_DIR_DST,
32 		.max_size = HAL_REO_REO2SW0_RING_BASE_MSB_RING_SIZE,
33 	},
34 	[HAL_REO_REINJECT] = {
35 		.start_ring_id = HAL_SRNG_RING_ID_SW2REO,
36 		.max_rings = 4,
37 		.entry_size = sizeof(struct hal_reo_entrance_ring) >> 2,
38 		.mac_type = ATH12K_HAL_SRNG_UMAC,
39 		.ring_dir = HAL_SRNG_DIR_SRC,
40 		.max_size = HAL_REO_SW2REO_RING_BASE_MSB_RING_SIZE,
41 	},
42 	[HAL_REO_CMD] = {
43 		.start_ring_id = HAL_SRNG_RING_ID_REO_CMD,
44 		.max_rings = 1,
45 		.entry_size = (sizeof(struct hal_tlv_64_hdr) +
46 			sizeof(struct hal_reo_get_queue_stats)) >> 2,
47 		.mac_type = ATH12K_HAL_SRNG_UMAC,
48 		.ring_dir = HAL_SRNG_DIR_SRC,
49 		.max_size = HAL_REO_CMD_RING_BASE_MSB_RING_SIZE,
50 	},
51 	[HAL_REO_STATUS] = {
52 		.start_ring_id = HAL_SRNG_RING_ID_REO_STATUS,
53 		.max_rings = 1,
54 		.entry_size = (sizeof(struct hal_tlv_64_hdr) +
55 			sizeof(struct hal_reo_get_queue_stats_status)) >> 2,
56 		.mac_type = ATH12K_HAL_SRNG_UMAC,
57 		.ring_dir = HAL_SRNG_DIR_DST,
58 		.max_size = HAL_REO_STATUS_RING_BASE_MSB_RING_SIZE,
59 	},
60 	[HAL_TCL_DATA] = {
61 		.start_ring_id = HAL_SRNG_RING_ID_SW2TCL1,
62 		.max_rings = 6,
63 		.entry_size = sizeof(struct hal_tcl_data_cmd) >> 2,
64 		.mac_type = ATH12K_HAL_SRNG_UMAC,
65 		.ring_dir = HAL_SRNG_DIR_SRC,
66 		.max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
67 	},
68 	[HAL_TCL_CMD] = {
69 		.start_ring_id = HAL_SRNG_RING_ID_SW2TCL_CMD,
70 		.max_rings = 1,
71 		.entry_size = sizeof(struct hal_tcl_gse_cmd) >> 2,
72 		.mac_type = ATH12K_HAL_SRNG_UMAC,
73 		.ring_dir = HAL_SRNG_DIR_SRC,
74 		.max_size = HAL_SW2TCL1_CMD_RING_BASE_MSB_RING_SIZE,
75 	},
76 	[HAL_TCL_STATUS] = {
77 		.start_ring_id = HAL_SRNG_RING_ID_TCL_STATUS,
78 		.max_rings = 1,
79 		.entry_size = (sizeof(struct hal_tlv_hdr) +
80 			     sizeof(struct hal_tcl_status_ring)) >> 2,
81 		.mac_type = ATH12K_HAL_SRNG_UMAC,
82 		.ring_dir = HAL_SRNG_DIR_DST,
83 		.max_size = HAL_TCL_STATUS_RING_BASE_MSB_RING_SIZE,
84 	},
85 	[HAL_CE_SRC] = {
86 		.start_ring_id = HAL_SRNG_RING_ID_CE0_SRC,
87 		.max_rings = 16,
88 		.entry_size = sizeof(struct hal_ce_srng_src_desc) >> 2,
89 		.mac_type = ATH12K_HAL_SRNG_UMAC,
90 		.ring_dir = HAL_SRNG_DIR_SRC,
91 		.max_size = HAL_CE_SRC_RING_BASE_MSB_RING_SIZE,
92 	},
93 	[HAL_CE_DST] = {
94 		.start_ring_id = HAL_SRNG_RING_ID_CE0_DST,
95 		.max_rings = 16,
96 		.entry_size = sizeof(struct hal_ce_srng_dest_desc) >> 2,
97 		.mac_type = ATH12K_HAL_SRNG_UMAC,
98 		.ring_dir = HAL_SRNG_DIR_SRC,
99 		.max_size = HAL_CE_DST_RING_BASE_MSB_RING_SIZE,
100 	},
101 	[HAL_CE_DST_STATUS] = {
102 		.start_ring_id = HAL_SRNG_RING_ID_CE0_DST_STATUS,
103 		.max_rings = 16,
104 		.entry_size = sizeof(struct hal_ce_srng_dst_status_desc) >> 2,
105 		.mac_type = ATH12K_HAL_SRNG_UMAC,
106 		.ring_dir = HAL_SRNG_DIR_DST,
107 		.max_size = HAL_CE_DST_STATUS_RING_BASE_MSB_RING_SIZE,
108 	},
109 	[HAL_WBM_IDLE_LINK] = {
110 		.start_ring_id = HAL_SRNG_RING_ID_WBM_IDLE_LINK,
111 		.max_rings = 1,
112 		.entry_size = sizeof(struct hal_wbm_link_desc) >> 2,
113 		.mac_type = ATH12K_HAL_SRNG_UMAC,
114 		.ring_dir = HAL_SRNG_DIR_SRC,
115 		.max_size = HAL_WBM_IDLE_LINK_RING_BASE_MSB_RING_SIZE,
116 	},
117 	[HAL_SW2WBM_RELEASE] = {
118 		.start_ring_id = HAL_SRNG_RING_ID_WBM_SW0_RELEASE,
119 		.max_rings = 2,
120 		.entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
121 		.mac_type = ATH12K_HAL_SRNG_UMAC,
122 		.ring_dir = HAL_SRNG_DIR_SRC,
123 		.max_size = HAL_SW2WBM_RELEASE_RING_BASE_MSB_RING_SIZE,
124 	},
125 	[HAL_WBM2SW_RELEASE] = {
126 		.start_ring_id = HAL_SRNG_RING_ID_WBM2SW0_RELEASE,
127 		.max_rings = 8,
128 		.entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
129 		.mac_type = ATH12K_HAL_SRNG_UMAC,
130 		.ring_dir = HAL_SRNG_DIR_DST,
131 		.max_size = HAL_WBM2SW_RELEASE_RING_BASE_MSB_RING_SIZE,
132 	},
133 	[HAL_RXDMA_BUF] = {
134 		.start_ring_id = HAL_SRNG_SW2RXDMA_BUF0,
135 		.max_rings = 1,
136 		.entry_size = sizeof(struct hal_wbm_buffer_ring) >> 2,
137 		.mac_type = ATH12K_HAL_SRNG_DMAC,
138 		.ring_dir = HAL_SRNG_DIR_SRC,
139 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
140 	},
141 	[HAL_RXDMA_DST] = {
142 		.start_ring_id = HAL_SRNG_RING_ID_WMAC1_RXDMA2SW0,
143 		.max_rings = 0,
144 		.entry_size = 0,
145 		.mac_type = ATH12K_HAL_SRNG_PMAC,
146 		.ring_dir = HAL_SRNG_DIR_DST,
147 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
148 	},
149 	[HAL_RXDMA_MONITOR_BUF] = {
150 		.start_ring_id = HAL_SRNG_SW2RXMON_BUF0,
151 		.max_rings = 1,
152 		.entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
153 		.mac_type = ATH12K_HAL_SRNG_PMAC,
154 		.ring_dir = HAL_SRNG_DIR_SRC,
155 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
156 	},
157 	[HAL_RXDMA_MONITOR_STATUS] = {
158 		.start_ring_id = HAL_SRNG_RING_ID_WMAC1_SW2RXDMA1_STATBUF,
159 		.max_rings = 1,
160 		.entry_size = sizeof(struct hal_wbm_buffer_ring) >> 2,
161 		.mac_type = ATH12K_HAL_SRNG_PMAC,
162 		.ring_dir = HAL_SRNG_DIR_SRC,
163 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
164 	},
165 	[HAL_RXDMA_MONITOR_DESC] = { 0, },
166 	[HAL_RXDMA_DIR_BUF] = {
167 		.start_ring_id = HAL_SRNG_RING_ID_RXDMA_DIR_BUF,
168 		.max_rings = 2,
169 		.entry_size = 8 >> 2, /* TODO: Define the struct */
170 		.mac_type = ATH12K_HAL_SRNG_PMAC,
171 		.ring_dir = HAL_SRNG_DIR_SRC,
172 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
173 	},
174 	[HAL_PPE2TCL] = {
175 		.start_ring_id = HAL_SRNG_RING_ID_PPE2TCL1,
176 		.max_rings = 1,
177 		.entry_size = sizeof(struct hal_tcl_entrance_from_ppe_ring) >> 2,
178 		.mac_type = ATH12K_HAL_SRNG_PMAC,
179 		.ring_dir = HAL_SRNG_DIR_SRC,
180 		.max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
181 	},
182 	[HAL_PPE_RELEASE] = {
183 		.start_ring_id = HAL_SRNG_RING_ID_WBM_PPE_RELEASE,
184 		.max_rings = 1,
185 		.entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
186 		.mac_type = ATH12K_HAL_SRNG_PMAC,
187 		.ring_dir = HAL_SRNG_DIR_SRC,
188 		.max_size = HAL_WBM2PPE_RELEASE_RING_BASE_MSB_RING_SIZE,
189 	},
190 	[HAL_TX_MONITOR_BUF] = {
191 		.start_ring_id = HAL_SRNG_RING_ID_WMAC1_SW2TXMON_BUF0,
192 		.max_rings = 1,
193 		.entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
194 		.mac_type = ATH12K_HAL_SRNG_PMAC,
195 		.ring_dir = HAL_SRNG_DIR_SRC,
196 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
197 	},
198 	[HAL_RXDMA_MONITOR_DST] = {
199 		.start_ring_id = HAL_SRNG_RING_ID_WMAC1_SW2RXMON_BUF0,
200 		.max_rings = 1,
201 		.entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
202 		.mac_type = ATH12K_HAL_SRNG_PMAC,
203 		.ring_dir = HAL_SRNG_DIR_DST,
204 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
205 	},
206 	[HAL_TX_MONITOR_DST] = {
207 		.start_ring_id = HAL_SRNG_RING_ID_WMAC1_TXMON2SW0_BUF0,
208 		.max_rings = 1,
209 		.entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
210 		.mac_type = ATH12K_HAL_SRNG_PMAC,
211 		.ring_dir = HAL_SRNG_DIR_DST,
212 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
213 	}
214 };
215 
216 static const struct ath12k_hal_tcl_to_wbm_rbm_map
217 ath12k_hal_qcn9274_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
218 	{
219 		.wbm_ring_num = 0,
220 		.rbm_id = HAL_RX_BUF_RBM_SW0_BM,
221 	},
222 	{
223 		.wbm_ring_num = 1,
224 		.rbm_id = HAL_RX_BUF_RBM_SW1_BM,
225 	},
226 	{
227 		.wbm_ring_num = 2,
228 		.rbm_id = HAL_RX_BUF_RBM_SW2_BM,
229 	},
230 	{
231 		.wbm_ring_num = 4,
232 		.rbm_id = HAL_RX_BUF_RBM_SW4_BM,
233 	}
234 };
235 
236 static const struct ath12k_hal_tcl_to_wbm_rbm_map
237 ath12k_hal_wcn7850_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
238 	{
239 		.wbm_ring_num = 0,
240 		.rbm_id = HAL_RX_BUF_RBM_SW0_BM,
241 	},
242 	{
243 		.wbm_ring_num = 2,
244 		.rbm_id = HAL_RX_BUF_RBM_SW2_BM,
245 	},
246 	{
247 		.wbm_ring_num = 4,
248 		.rbm_id = HAL_RX_BUF_RBM_SW4_BM,
249 	},
250 };
251 
ath12k_hal_reo1_ring_id_offset(struct ath12k_base * ab)252 static unsigned int ath12k_hal_reo1_ring_id_offset(struct ath12k_base *ab)
253 {
254 	return HAL_REO1_RING_ID(ab) - HAL_REO1_RING_BASE_LSB(ab);
255 }
256 
ath12k_hal_reo1_ring_msi1_base_lsb_offset(struct ath12k_base * ab)257 static unsigned int ath12k_hal_reo1_ring_msi1_base_lsb_offset(struct ath12k_base *ab)
258 {
259 	return HAL_REO1_RING_MSI1_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
260 }
261 
ath12k_hal_reo1_ring_msi1_base_msb_offset(struct ath12k_base * ab)262 static unsigned int ath12k_hal_reo1_ring_msi1_base_msb_offset(struct ath12k_base *ab)
263 {
264 	return HAL_REO1_RING_MSI1_BASE_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
265 }
266 
ath12k_hal_reo1_ring_msi1_data_offset(struct ath12k_base * ab)267 static unsigned int ath12k_hal_reo1_ring_msi1_data_offset(struct ath12k_base *ab)
268 {
269 	return HAL_REO1_RING_MSI1_DATA(ab) - HAL_REO1_RING_BASE_LSB(ab);
270 }
271 
ath12k_hal_reo1_ring_base_msb_offset(struct ath12k_base * ab)272 static unsigned int ath12k_hal_reo1_ring_base_msb_offset(struct ath12k_base *ab)
273 {
274 	return HAL_REO1_RING_BASE_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
275 }
276 
ath12k_hal_reo1_ring_producer_int_setup_offset(struct ath12k_base * ab)277 static unsigned int ath12k_hal_reo1_ring_producer_int_setup_offset(struct ath12k_base *ab)
278 {
279 	return HAL_REO1_RING_PRODUCER_INT_SETUP(ab) - HAL_REO1_RING_BASE_LSB(ab);
280 }
281 
ath12k_hal_reo1_ring_hp_addr_lsb_offset(struct ath12k_base * ab)282 static unsigned int ath12k_hal_reo1_ring_hp_addr_lsb_offset(struct ath12k_base *ab)
283 {
284 	return HAL_REO1_RING_HP_ADDR_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
285 }
286 
ath12k_hal_reo1_ring_hp_addr_msb_offset(struct ath12k_base * ab)287 static unsigned int ath12k_hal_reo1_ring_hp_addr_msb_offset(struct ath12k_base *ab)
288 {
289 	return HAL_REO1_RING_HP_ADDR_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
290 }
291 
ath12k_hal_reo1_ring_misc_offset(struct ath12k_base * ab)292 static unsigned int ath12k_hal_reo1_ring_misc_offset(struct ath12k_base *ab)
293 {
294 	return HAL_REO1_RING_MISC(ab) - HAL_REO1_RING_BASE_LSB(ab);
295 }
296 
ath12k_hw_qcn9274_rx_desc_get_first_msdu(struct hal_rx_desc * desc)297 static bool ath12k_hw_qcn9274_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
298 {
299 	return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
300 			       RX_MSDU_END_INFO5_FIRST_MSDU);
301 }
302 
ath12k_hw_qcn9274_rx_desc_get_last_msdu(struct hal_rx_desc * desc)303 static bool ath12k_hw_qcn9274_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
304 {
305 	return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
306 			       RX_MSDU_END_INFO5_LAST_MSDU);
307 }
308 
ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes(struct hal_rx_desc * desc)309 static u8 ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
310 {
311 	return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
312 			     RX_MSDU_END_INFO5_L3_HDR_PADDING);
313 }
314 
ath12k_hw_qcn9274_rx_desc_encrypt_valid(struct hal_rx_desc * desc)315 static bool ath12k_hw_qcn9274_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
316 {
317 	return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
318 			       RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
319 }
320 
ath12k_hw_qcn9274_rx_desc_get_encrypt_type(struct hal_rx_desc * desc)321 static u32 ath12k_hw_qcn9274_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
322 {
323 	return le32_get_bits(desc->u.qcn9274.mpdu_start.info2,
324 			     RX_MPDU_START_INFO2_ENC_TYPE);
325 }
326 
ath12k_hw_qcn9274_rx_desc_get_decap_type(struct hal_rx_desc * desc)327 static u8 ath12k_hw_qcn9274_rx_desc_get_decap_type(struct hal_rx_desc *desc)
328 {
329 	return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
330 			     RX_MSDU_END_INFO11_DECAP_FORMAT);
331 }
332 
ath12k_hw_qcn9274_rx_desc_get_mesh_ctl(struct hal_rx_desc * desc)333 static u8 ath12k_hw_qcn9274_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
334 {
335 	return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
336 			     RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
337 }
338 
ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc * desc)339 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
340 {
341 	return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
342 			       RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
343 }
344 
ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc * desc)345 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
346 {
347 	return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
348 			       RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
349 }
350 
ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc * desc)351 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
352 {
353 	return le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
354 			     RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
355 }
356 
ath12k_hw_qcn9274_rx_desc_get_msdu_len(struct hal_rx_desc * desc)357 static u16 ath12k_hw_qcn9274_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
358 {
359 	return le32_get_bits(desc->u.qcn9274.msdu_end.info10,
360 			     RX_MSDU_END_INFO10_MSDU_LENGTH);
361 }
362 
ath12k_hw_qcn9274_rx_desc_get_msdu_sgi(struct hal_rx_desc * desc)363 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
364 {
365 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
366 			     RX_MSDU_END_INFO12_SGI);
367 }
368 
ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc * desc)369 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
370 {
371 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
372 			     RX_MSDU_END_INFO12_RATE_MCS);
373 }
374 
ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw(struct hal_rx_desc * desc)375 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
376 {
377 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
378 			     RX_MSDU_END_INFO12_RECV_BW);
379 }
380 
ath12k_hw_qcn9274_rx_desc_get_msdu_freq(struct hal_rx_desc * desc)381 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
382 {
383 	return __le32_to_cpu(desc->u.qcn9274.msdu_end.phy_meta_data);
384 }
385 
ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type(struct hal_rx_desc * desc)386 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
387 {
388 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
389 			     RX_MSDU_END_INFO12_PKT_TYPE);
390 }
391 
ath12k_hw_qcn9274_rx_desc_get_msdu_nss(struct hal_rx_desc * desc)392 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
393 {
394 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
395 			     RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
396 }
397 
ath12k_hw_qcn9274_rx_desc_get_mpdu_tid(struct hal_rx_desc * desc)398 static u8 ath12k_hw_qcn9274_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
399 {
400 	return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
401 			    RX_MSDU_END_INFO5_TID);
402 }
403 
ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id(struct hal_rx_desc * desc)404 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
405 {
406 	return __le16_to_cpu(desc->u.qcn9274.mpdu_start.sw_peer_id);
407 }
408 
ath12k_hw_qcn9274_rx_desc_copy_end_tlv(struct hal_rx_desc * fdesc,struct hal_rx_desc * ldesc)409 static void ath12k_hw_qcn9274_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
410 						   struct hal_rx_desc *ldesc)
411 {
412 	memcpy(&fdesc->u.qcn9274.msdu_end, &ldesc->u.qcn9274.msdu_end,
413 	       sizeof(struct rx_msdu_end_qcn9274));
414 }
415 
ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc * desc)416 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
417 {
418 	return __le16_to_cpu(desc->u.qcn9274.mpdu_start.phy_ppdu_id);
419 }
420 
ath12k_hw_qcn9274_rx_desc_set_msdu_len(struct hal_rx_desc * desc,u16 len)421 static void ath12k_hw_qcn9274_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
422 {
423 	u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info10);
424 
425 	info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
426 	info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
427 
428 	desc->u.qcn9274.msdu_end.info10 = __cpu_to_le32(info);
429 }
430 
ath12k_hw_qcn9274_rx_desc_get_msdu_payload(struct hal_rx_desc * desc)431 static u8 *ath12k_hw_qcn9274_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
432 {
433 	return &desc->u.qcn9274.msdu_payload[0];
434 }
435 
ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset(void)436 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset(void)
437 {
438 	return offsetof(struct hal_rx_desc_qcn9274, mpdu_start);
439 }
440 
ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset(void)441 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset(void)
442 {
443 	return offsetof(struct hal_rx_desc_qcn9274, msdu_end);
444 }
445 
ath12k_hw_qcn9274_rx_desc_mac_addr2_valid(struct hal_rx_desc * desc)446 static bool ath12k_hw_qcn9274_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
447 {
448 	return __le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
449 	       RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
450 }
451 
ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2(struct hal_rx_desc * desc)452 static u8 *ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
453 {
454 	return desc->u.qcn9274.mpdu_start.addr2;
455 }
456 
ath12k_hw_qcn9274_rx_desc_is_da_mcbc(struct hal_rx_desc * desc)457 static bool ath12k_hw_qcn9274_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
458 {
459 	return __le16_to_cpu(desc->u.qcn9274.msdu_end.info5) &
460 	       RX_MSDU_END_INFO5_DA_IS_MCBC;
461 }
462 
ath12k_hw_qcn9274_rx_desc_get_dot11_hdr(struct hal_rx_desc * desc,struct ieee80211_hdr * hdr)463 static void ath12k_hw_qcn9274_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
464 						    struct ieee80211_hdr *hdr)
465 {
466 	hdr->frame_control = desc->u.qcn9274.mpdu_start.frame_ctrl;
467 	hdr->duration_id = desc->u.qcn9274.mpdu_start.duration;
468 	ether_addr_copy(hdr->addr1, desc->u.qcn9274.mpdu_start.addr1);
469 	ether_addr_copy(hdr->addr2, desc->u.qcn9274.mpdu_start.addr2);
470 	ether_addr_copy(hdr->addr3, desc->u.qcn9274.mpdu_start.addr3);
471 	if (__le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
472 			RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
473 		ether_addr_copy(hdr->addr4, desc->u.qcn9274.mpdu_start.addr4);
474 	}
475 	hdr->seq_ctrl = desc->u.qcn9274.mpdu_start.seq_ctrl;
476 }
477 
ath12k_hw_qcn9274_rx_desc_get_crypto_hdr(struct hal_rx_desc * desc,u8 * crypto_hdr,enum hal_encrypt_type enctype)478 static void ath12k_hw_qcn9274_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
479 						     u8 *crypto_hdr,
480 						     enum hal_encrypt_type enctype)
481 {
482 	unsigned int key_id;
483 
484 	switch (enctype) {
485 	case HAL_ENCRYPT_TYPE_OPEN:
486 		return;
487 	case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
488 	case HAL_ENCRYPT_TYPE_TKIP_MIC:
489 		crypto_hdr[0] =
490 			HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
491 		crypto_hdr[1] = 0;
492 		crypto_hdr[2] =
493 			HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
494 		break;
495 	case HAL_ENCRYPT_TYPE_CCMP_128:
496 	case HAL_ENCRYPT_TYPE_CCMP_256:
497 	case HAL_ENCRYPT_TYPE_GCMP_128:
498 	case HAL_ENCRYPT_TYPE_AES_GCMP_256:
499 		crypto_hdr[0] =
500 			HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
501 		crypto_hdr[1] =
502 			HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
503 		crypto_hdr[2] = 0;
504 		break;
505 	case HAL_ENCRYPT_TYPE_WEP_40:
506 	case HAL_ENCRYPT_TYPE_WEP_104:
507 	case HAL_ENCRYPT_TYPE_WEP_128:
508 	case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
509 	case HAL_ENCRYPT_TYPE_WAPI:
510 		return;
511 	}
512 	key_id = le32_get_bits(desc->u.qcn9274.mpdu_start.info5,
513 			       RX_MPDU_START_INFO5_KEY_ID);
514 	crypto_hdr[3] = 0x20 | (key_id << 6);
515 	crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.qcn9274.mpdu_start.pn[0]);
516 	crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.qcn9274.mpdu_start.pn[0]);
517 	crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[1]);
518 	crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[1]);
519 }
520 
ath12k_hal_srng_create_config_qcn9274(struct ath12k_base * ab)521 static int ath12k_hal_srng_create_config_qcn9274(struct ath12k_base *ab)
522 {
523 	struct ath12k_hal *hal = &ab->hal;
524 	struct hal_srng_config *s;
525 
526 	hal->srng_config = kmemdup(hw_srng_config_template,
527 				   sizeof(hw_srng_config_template),
528 				   GFP_KERNEL);
529 	if (!hal->srng_config)
530 		return -ENOMEM;
531 
532 	s = &hal->srng_config[HAL_REO_DST];
533 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
534 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
535 	s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
536 	s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
537 
538 	s = &hal->srng_config[HAL_REO_EXCEPTION];
539 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
540 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
541 
542 	s = &hal->srng_config[HAL_REO_REINJECT];
543 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
544 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
545 	s->reg_size[0] = HAL_SW2REO1_RING_BASE_LSB(ab) - HAL_SW2REO_RING_BASE_LSB(ab);
546 	s->reg_size[1] = HAL_SW2REO1_RING_HP - HAL_SW2REO_RING_HP;
547 
548 	s = &hal->srng_config[HAL_REO_CMD];
549 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
550 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
551 
552 	s = &hal->srng_config[HAL_REO_STATUS];
553 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
554 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
555 
556 	s = &hal->srng_config[HAL_TCL_DATA];
557 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB(ab);
558 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
559 	s->reg_size[0] = HAL_TCL2_RING_BASE_LSB(ab) - HAL_TCL1_RING_BASE_LSB(ab);
560 	s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
561 
562 	s = &hal->srng_config[HAL_TCL_CMD];
563 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
564 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
565 
566 	s = &hal->srng_config[HAL_TCL_STATUS];
567 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
568 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
569 
570 	s = &hal->srng_config[HAL_CE_SRC];
571 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab) + HAL_CE_DST_RING_BASE_LSB;
572 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab) + HAL_CE_DST_RING_HP;
573 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG(ab) -
574 		HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab);
575 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG(ab) -
576 		HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab);
577 
578 	s = &hal->srng_config[HAL_CE_DST];
579 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_RING_BASE_LSB;
580 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_RING_HP;
581 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
582 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
583 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
584 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
585 
586 	s = &hal->srng_config[HAL_CE_DST_STATUS];
587 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) +
588 		HAL_CE_DST_STATUS_RING_BASE_LSB;
589 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_STATUS_RING_HP;
590 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
591 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
592 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
593 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
594 
595 	s = &hal->srng_config[HAL_WBM_IDLE_LINK];
596 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
597 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
598 
599 	s = &hal->srng_config[HAL_SW2WBM_RELEASE];
600 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
601 		HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
602 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
603 	s->reg_size[0] = HAL_WBM_SW1_RELEASE_RING_BASE_LSB(ab) -
604 			 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
605 	s->reg_size[1] = HAL_WBM_SW1_RELEASE_RING_HP - HAL_WBM_SW_RELEASE_RING_HP;
606 
607 	s = &hal->srng_config[HAL_WBM2SW_RELEASE];
608 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
609 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
610 	s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
611 		HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
612 	s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
613 
614 	/* Some LMAC rings are not accessed from the host:
615 	 * RXDMA_BUG, RXDMA_DST, RXDMA_MONITOR_BUF, RXDMA_MONITOR_STATUS,
616 	 * RXDMA_MONITOR_DST, RXDMA_MONITOR_DESC, RXDMA_DIR_BUF_SRC,
617 	 * RXDMA_RX_MONITOR_BUF, TX_MONITOR_BUF, TX_MONITOR_DST, SW2RXDMA
618 	 */
619 	s = &hal->srng_config[HAL_PPE2TCL];
620 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_BASE_LSB;
621 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_HP;
622 
623 	s = &hal->srng_config[HAL_PPE_RELEASE];
624 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
625 				HAL_WBM_PPE_RELEASE_RING_BASE_LSB(ab);
626 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_PPE_RELEASE_RING_HP;
627 
628 	return 0;
629 }
630 
ath12k_hal_qcn9274_rx_mpdu_start_wmask_get(void)631 static u16 ath12k_hal_qcn9274_rx_mpdu_start_wmask_get(void)
632 {
633 	return QCN9274_MPDU_START_WMASK;
634 }
635 
ath12k_hal_qcn9274_rx_msdu_end_wmask_get(void)636 static u32 ath12k_hal_qcn9274_rx_msdu_end_wmask_get(void)
637 {
638 	return QCN9274_MSDU_END_WMASK;
639 }
640 
ath12k_hal_qcn9274_get_hal_rx_compact_ops(void)641 static const struct hal_rx_ops *ath12k_hal_qcn9274_get_hal_rx_compact_ops(void)
642 {
643 	return &hal_rx_qcn9274_compact_ops;
644 }
645 
ath12k_hw_qcn9274_dp_rx_h_msdu_done(struct hal_rx_desc * desc)646 static bool ath12k_hw_qcn9274_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
647 {
648 	return !!le32_get_bits(desc->u.qcn9274.msdu_end.info14,
649 			       RX_MSDU_END_INFO14_MSDU_DONE);
650 }
651 
ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail(struct hal_rx_desc * desc)652 static bool ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
653 {
654 	return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
655 			       RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
656 }
657 
ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail(struct hal_rx_desc * desc)658 static bool ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
659 {
660 	return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
661 			       RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
662 }
663 
ath12k_hw_qcn9274_dp_rx_h_is_decrypted(struct hal_rx_desc * desc)664 static bool ath12k_hw_qcn9274_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
665 {
666 	return (le32_get_bits(desc->u.qcn9274.msdu_end.info14,
667 			      RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
668 			      RX_DESC_DECRYPT_STATUS_CODE_OK);
669 }
670 
ath12k_hw_qcn9274_dp_rx_h_mpdu_err(struct hal_rx_desc * desc)671 static u32 ath12k_hw_qcn9274_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
672 {
673 	u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info13);
674 	u32 errmap = 0;
675 
676 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
677 		errmap |= HAL_RX_MPDU_ERR_FCS;
678 
679 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
680 		errmap |= HAL_RX_MPDU_ERR_DECRYPT;
681 
682 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
683 		errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
684 
685 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
686 		errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
687 
688 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
689 		errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
690 
691 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
692 		errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
693 
694 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
695 		errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
696 
697 	return errmap;
698 }
699 
ath12k_hw_qcn9274_get_rx_desc_size(void)700 static u32 ath12k_hw_qcn9274_get_rx_desc_size(void)
701 {
702 	return sizeof(struct hal_rx_desc_qcn9274);
703 }
704 
ath12k_hw_qcn9274_rx_desc_get_msdu_src_link(struct hal_rx_desc * desc)705 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
706 {
707 	return 0;
708 }
709 
710 const struct hal_rx_ops hal_rx_qcn9274_ops = {
711 	.rx_desc_get_first_msdu = ath12k_hw_qcn9274_rx_desc_get_first_msdu,
712 	.rx_desc_get_last_msdu = ath12k_hw_qcn9274_rx_desc_get_last_msdu,
713 	.rx_desc_get_l3_pad_bytes = ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes,
714 	.rx_desc_encrypt_valid = ath12k_hw_qcn9274_rx_desc_encrypt_valid,
715 	.rx_desc_get_encrypt_type = ath12k_hw_qcn9274_rx_desc_get_encrypt_type,
716 	.rx_desc_get_decap_type = ath12k_hw_qcn9274_rx_desc_get_decap_type,
717 	.rx_desc_get_mesh_ctl = ath12k_hw_qcn9274_rx_desc_get_mesh_ctl,
718 	.rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld,
719 	.rx_desc_get_mpdu_fc_valid = ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid,
720 	.rx_desc_get_mpdu_start_seq_no = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no,
721 	.rx_desc_get_msdu_len = ath12k_hw_qcn9274_rx_desc_get_msdu_len,
722 	.rx_desc_get_msdu_sgi = ath12k_hw_qcn9274_rx_desc_get_msdu_sgi,
723 	.rx_desc_get_msdu_rate_mcs = ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs,
724 	.rx_desc_get_msdu_rx_bw = ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw,
725 	.rx_desc_get_msdu_freq = ath12k_hw_qcn9274_rx_desc_get_msdu_freq,
726 	.rx_desc_get_msdu_pkt_type = ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type,
727 	.rx_desc_get_msdu_nss = ath12k_hw_qcn9274_rx_desc_get_msdu_nss,
728 	.rx_desc_get_mpdu_tid = ath12k_hw_qcn9274_rx_desc_get_mpdu_tid,
729 	.rx_desc_get_mpdu_peer_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id,
730 	.rx_desc_copy_end_tlv = ath12k_hw_qcn9274_rx_desc_copy_end_tlv,
731 	.rx_desc_get_mpdu_ppdu_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id,
732 	.rx_desc_set_msdu_len = ath12k_hw_qcn9274_rx_desc_set_msdu_len,
733 	.rx_desc_get_msdu_payload = ath12k_hw_qcn9274_rx_desc_get_msdu_payload,
734 	.rx_desc_get_mpdu_start_offset = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset,
735 	.rx_desc_get_msdu_end_offset = ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset,
736 	.rx_desc_mac_addr2_valid = ath12k_hw_qcn9274_rx_desc_mac_addr2_valid,
737 	.rx_desc_mpdu_start_addr2 = ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2,
738 	.rx_desc_is_da_mcbc = ath12k_hw_qcn9274_rx_desc_is_da_mcbc,
739 	.rx_desc_get_dot11_hdr = ath12k_hw_qcn9274_rx_desc_get_dot11_hdr,
740 	.rx_desc_get_crypto_header = ath12k_hw_qcn9274_rx_desc_get_crypto_hdr,
741 	.dp_rx_h_msdu_done = ath12k_hw_qcn9274_dp_rx_h_msdu_done,
742 	.dp_rx_h_l4_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail,
743 	.dp_rx_h_ip_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail,
744 	.dp_rx_h_is_decrypted = ath12k_hw_qcn9274_dp_rx_h_is_decrypted,
745 	.dp_rx_h_mpdu_err = ath12k_hw_qcn9274_dp_rx_h_mpdu_err,
746 	.rx_desc_get_desc_size = ath12k_hw_qcn9274_get_rx_desc_size,
747 	.rx_desc_get_msdu_src_link_id = ath12k_hw_qcn9274_rx_desc_get_msdu_src_link,
748 };
749 
ath12k_hw_qcn9274_compact_rx_desc_get_first_msdu(struct hal_rx_desc * desc)750 static bool ath12k_hw_qcn9274_compact_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
751 {
752 	return !!le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
753 			       RX_MSDU_END_INFO5_FIRST_MSDU);
754 }
755 
ath12k_hw_qcn9274_compact_rx_desc_get_last_msdu(struct hal_rx_desc * desc)756 static bool ath12k_hw_qcn9274_compact_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
757 {
758 	return !!le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
759 			       RX_MSDU_END_INFO5_LAST_MSDU);
760 }
761 
ath12k_hw_qcn9274_compact_rx_desc_get_l3_pad_bytes(struct hal_rx_desc * desc)762 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
763 {
764 	return le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
765 			     RX_MSDU_END_INFO5_L3_HDR_PADDING);
766 }
767 
ath12k_hw_qcn9274_compact_rx_desc_encrypt_valid(struct hal_rx_desc * desc)768 static bool ath12k_hw_qcn9274_compact_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
769 {
770 	return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
771 			       RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
772 }
773 
ath12k_hw_qcn9274_compact_rx_desc_get_encrypt_type(struct hal_rx_desc * desc)774 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
775 {
776 	return le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info2,
777 			     RX_MPDU_START_INFO2_ENC_TYPE);
778 }
779 
ath12k_hw_qcn9274_compact_rx_desc_get_decap_type(struct hal_rx_desc * desc)780 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_decap_type(struct hal_rx_desc *desc)
781 {
782 	return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info11,
783 			     RX_MSDU_END_INFO11_DECAP_FORMAT);
784 }
785 
ath12k_hw_qcn9274_compact_rx_desc_get_mesh_ctl(struct hal_rx_desc * desc)786 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
787 {
788 	return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
789 			     RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
790 }
791 
792 static bool
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc * desc)793 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
794 {
795 	return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
796 			       RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
797 }
798 
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc * desc)799 static bool ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
800 {
801 	return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
802 			       RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
803 }
804 
805 static u16
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc * desc)806 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
807 {
808 	return le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
809 			     RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
810 }
811 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_len(struct hal_rx_desc * desc)812 static u16 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
813 {
814 	return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info10,
815 			     RX_MSDU_END_INFO10_MSDU_LENGTH);
816 }
817 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_sgi(struct hal_rx_desc * desc)818 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
819 {
820 	return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
821 			     RX_MSDU_END_INFO12_SGI);
822 }
823 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc * desc)824 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
825 {
826 	return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
827 			     RX_MSDU_END_INFO12_RATE_MCS);
828 }
829 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rx_bw(struct hal_rx_desc * desc)830 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
831 {
832 	return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
833 			     RX_MSDU_END_INFO12_RECV_BW);
834 }
835 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_freq(struct hal_rx_desc * desc)836 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
837 {
838 	return __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.phy_meta_data);
839 }
840 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_pkt_type(struct hal_rx_desc * desc)841 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
842 {
843 	return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
844 			     RX_MSDU_END_INFO12_PKT_TYPE);
845 }
846 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_nss(struct hal_rx_desc * desc)847 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
848 {
849 	return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
850 			     RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
851 }
852 
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_tid(struct hal_rx_desc * desc)853 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
854 {
855 	return le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
856 			     RX_MSDU_END_INFO5_TID);
857 }
858 
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_peer_id(struct hal_rx_desc * desc)859 static u16 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
860 {
861 	return __le16_to_cpu(desc->u.qcn9274_compact.mpdu_start.sw_peer_id);
862 }
863 
ath12k_hw_qcn9274_compact_rx_desc_copy_end_tlv(struct hal_rx_desc * fdesc,struct hal_rx_desc * ldesc)864 static void ath12k_hw_qcn9274_compact_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
865 							   struct hal_rx_desc *ldesc)
866 {
867 	fdesc->u.qcn9274_compact.msdu_end = ldesc->u.qcn9274_compact.msdu_end;
868 }
869 
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc * desc)870 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
871 {
872 	return __le16_to_cpu(desc->u.qcn9274_compact.mpdu_start.phy_ppdu_id);
873 }
874 
875 static void
ath12k_hw_qcn9274_compact_rx_desc_set_msdu_len(struct hal_rx_desc * desc,u16 len)876 ath12k_hw_qcn9274_compact_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
877 {
878 	u32 info = __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.info10);
879 
880 	info = u32_replace_bits(info, len, RX_MSDU_END_INFO10_MSDU_LENGTH);
881 	desc->u.qcn9274_compact.msdu_end.info10 = __cpu_to_le32(info);
882 }
883 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_payload(struct hal_rx_desc * desc)884 static u8 *ath12k_hw_qcn9274_compact_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
885 {
886 	return &desc->u.qcn9274_compact.msdu_payload[0];
887 }
888 
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_offset(void)889 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_offset(void)
890 {
891 	return offsetof(struct hal_rx_desc_qcn9274_compact, mpdu_start);
892 }
893 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_end_offset(void)894 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_end_offset(void)
895 {
896 	return offsetof(struct hal_rx_desc_qcn9274_compact, msdu_end);
897 }
898 
ath12k_hw_qcn9274_compact_rx_desc_mac_addr2_valid(struct hal_rx_desc * desc)899 static bool ath12k_hw_qcn9274_compact_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
900 {
901 	return __le32_to_cpu(desc->u.qcn9274_compact.mpdu_start.info4) &
902 			     RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
903 }
904 
ath12k_hw_qcn9274_compact_rx_desc_mpdu_start_addr2(struct hal_rx_desc * desc)905 static u8 *ath12k_hw_qcn9274_compact_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
906 {
907 	return desc->u.qcn9274_compact.mpdu_start.addr2;
908 }
909 
ath12k_hw_qcn9274_compact_rx_desc_is_da_mcbc(struct hal_rx_desc * desc)910 static bool ath12k_hw_qcn9274_compact_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
911 {
912 	return __le16_to_cpu(desc->u.qcn9274_compact.msdu_end.info5) &
913 	       RX_MSDU_END_INFO5_DA_IS_MCBC;
914 }
915 
ath12k_hw_qcn9274_compact_rx_desc_get_dot11_hdr(struct hal_rx_desc * desc,struct ieee80211_hdr * hdr)916 static void ath12k_hw_qcn9274_compact_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
917 							    struct ieee80211_hdr *hdr)
918 {
919 	hdr->frame_control = desc->u.qcn9274_compact.mpdu_start.frame_ctrl;
920 	hdr->duration_id = desc->u.qcn9274_compact.mpdu_start.duration;
921 	ether_addr_copy(hdr->addr1, desc->u.qcn9274_compact.mpdu_start.addr1);
922 	ether_addr_copy(hdr->addr2, desc->u.qcn9274_compact.mpdu_start.addr2);
923 	ether_addr_copy(hdr->addr3, desc->u.qcn9274_compact.mpdu_start.addr3);
924 	if (__le32_to_cpu(desc->u.qcn9274_compact.mpdu_start.info4) &
925 			RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
926 		ether_addr_copy(hdr->addr4, desc->u.qcn9274_compact.mpdu_start.addr4);
927 	}
928 	hdr->seq_ctrl = desc->u.qcn9274_compact.mpdu_start.seq_ctrl;
929 }
930 
931 static void
ath12k_hw_qcn9274_compact_rx_desc_get_crypto_hdr(struct hal_rx_desc * desc,u8 * crypto_hdr,enum hal_encrypt_type enctype)932 ath12k_hw_qcn9274_compact_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
933 						 u8 *crypto_hdr,
934 						 enum hal_encrypt_type enctype)
935 {
936 	unsigned int key_id;
937 
938 	switch (enctype) {
939 	case HAL_ENCRYPT_TYPE_OPEN:
940 		return;
941 	case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
942 	case HAL_ENCRYPT_TYPE_TKIP_MIC:
943 		crypto_hdr[0] =
944 		HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[0]);
945 		crypto_hdr[1] = 0;
946 		crypto_hdr[2] =
947 		HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[0]);
948 		break;
949 	case HAL_ENCRYPT_TYPE_CCMP_128:
950 	case HAL_ENCRYPT_TYPE_CCMP_256:
951 	case HAL_ENCRYPT_TYPE_GCMP_128:
952 	case HAL_ENCRYPT_TYPE_AES_GCMP_256:
953 		crypto_hdr[0] =
954 		HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[0]);
955 		crypto_hdr[1] =
956 		HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[0]);
957 		crypto_hdr[2] = 0;
958 		break;
959 	case HAL_ENCRYPT_TYPE_WEP_40:
960 	case HAL_ENCRYPT_TYPE_WEP_104:
961 	case HAL_ENCRYPT_TYPE_WEP_128:
962 	case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
963 	case HAL_ENCRYPT_TYPE_WAPI:
964 		return;
965 	}
966 	key_id = le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info5,
967 			       RX_MPDU_START_INFO5_KEY_ID);
968 	crypto_hdr[3] = 0x20 | (key_id << 6);
969 	crypto_hdr[4] =
970 		HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.qcn9274_compact.mpdu_start.pn[0]);
971 	crypto_hdr[5] =
972 		HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.qcn9274_compact.mpdu_start.pn[0]);
973 	crypto_hdr[6] =
974 		HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[1]);
975 	crypto_hdr[7] =
976 		HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[1]);
977 }
978 
ath12k_hw_qcn9274_compact_dp_rx_h_msdu_done(struct hal_rx_desc * desc)979 static bool ath12k_hw_qcn9274_compact_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
980 {
981 	return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info14,
982 			       RX_MSDU_END_INFO14_MSDU_DONE);
983 }
984 
ath12k_hw_qcn9274_compact_dp_rx_h_l4_cksum_fail(struct hal_rx_desc * desc)985 static bool ath12k_hw_qcn9274_compact_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
986 {
987 	return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info13,
988 			       RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
989 }
990 
ath12k_hw_qcn9274_compact_dp_rx_h_ip_cksum_fail(struct hal_rx_desc * desc)991 static bool ath12k_hw_qcn9274_compact_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
992 {
993 	return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info13,
994 			       RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
995 }
996 
ath12k_hw_qcn9274_compact_dp_rx_h_is_decrypted(struct hal_rx_desc * desc)997 static bool ath12k_hw_qcn9274_compact_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
998 {
999 	return (le32_get_bits(desc->u.qcn9274_compact.msdu_end.info14,
1000 			      RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
1001 			RX_DESC_DECRYPT_STATUS_CODE_OK);
1002 }
1003 
ath12k_hw_qcn9274_compact_dp_rx_h_mpdu_err(struct hal_rx_desc * desc)1004 static u32 ath12k_hw_qcn9274_compact_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
1005 {
1006 	u32 info = __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.info13);
1007 	u32 errmap = 0;
1008 
1009 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
1010 		errmap |= HAL_RX_MPDU_ERR_FCS;
1011 
1012 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1013 		errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1014 
1015 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1016 		errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1017 
1018 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1019 		errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1020 
1021 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1022 		errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1023 
1024 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1025 		errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1026 
1027 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1028 		errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1029 
1030 	return errmap;
1031 }
1032 
ath12k_hw_qcn9274_compact_get_rx_desc_size(void)1033 static u32 ath12k_hw_qcn9274_compact_get_rx_desc_size(void)
1034 {
1035 	return sizeof(struct hal_rx_desc_qcn9274_compact);
1036 }
1037 
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_src_link(struct hal_rx_desc * desc)1038 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
1039 {
1040 	return le64_get_bits(desc->u.qcn9274_compact.msdu_end.msdu_end_tag,
1041 			     RX_MSDU_END_64_TLV_SRC_LINK_ID);
1042 }
1043 
1044 const struct hal_rx_ops hal_rx_qcn9274_compact_ops = {
1045 	.rx_desc_get_first_msdu = ath12k_hw_qcn9274_compact_rx_desc_get_first_msdu,
1046 	.rx_desc_get_last_msdu = ath12k_hw_qcn9274_compact_rx_desc_get_last_msdu,
1047 	.rx_desc_get_l3_pad_bytes = ath12k_hw_qcn9274_compact_rx_desc_get_l3_pad_bytes,
1048 	.rx_desc_encrypt_valid = ath12k_hw_qcn9274_compact_rx_desc_encrypt_valid,
1049 	.rx_desc_get_encrypt_type = ath12k_hw_qcn9274_compact_rx_desc_get_encrypt_type,
1050 	.rx_desc_get_decap_type = ath12k_hw_qcn9274_compact_rx_desc_get_decap_type,
1051 	.rx_desc_get_mesh_ctl = ath12k_hw_qcn9274_compact_rx_desc_get_mesh_ctl,
1052 	.rx_desc_get_mpdu_seq_ctl_vld =
1053 		ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_seq_ctl_vld,
1054 	.rx_desc_get_mpdu_fc_valid = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_fc_valid,
1055 	.rx_desc_get_mpdu_start_seq_no =
1056 		ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_seq_no,
1057 	.rx_desc_get_msdu_len = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_len,
1058 	.rx_desc_get_msdu_sgi = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_sgi,
1059 	.rx_desc_get_msdu_rate_mcs = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rate_mcs,
1060 	.rx_desc_get_msdu_rx_bw = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rx_bw,
1061 	.rx_desc_get_msdu_freq = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_freq,
1062 	.rx_desc_get_msdu_pkt_type = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_pkt_type,
1063 	.rx_desc_get_msdu_nss = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_nss,
1064 	.rx_desc_get_mpdu_tid = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_tid,
1065 	.rx_desc_get_mpdu_peer_id = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_peer_id,
1066 	.rx_desc_copy_end_tlv = ath12k_hw_qcn9274_compact_rx_desc_copy_end_tlv,
1067 	.rx_desc_get_mpdu_ppdu_id = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_ppdu_id,
1068 	.rx_desc_set_msdu_len = ath12k_hw_qcn9274_compact_rx_desc_set_msdu_len,
1069 	.rx_desc_get_msdu_payload = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_payload,
1070 	.rx_desc_get_mpdu_start_offset =
1071 		ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_offset,
1072 	.rx_desc_get_msdu_end_offset =
1073 		ath12k_hw_qcn9274_compact_rx_desc_get_msdu_end_offset,
1074 	.rx_desc_mac_addr2_valid = ath12k_hw_qcn9274_compact_rx_desc_mac_addr2_valid,
1075 	.rx_desc_mpdu_start_addr2 = ath12k_hw_qcn9274_compact_rx_desc_mpdu_start_addr2,
1076 	.rx_desc_is_da_mcbc = ath12k_hw_qcn9274_compact_rx_desc_is_da_mcbc,
1077 	.rx_desc_get_dot11_hdr = ath12k_hw_qcn9274_compact_rx_desc_get_dot11_hdr,
1078 	.rx_desc_get_crypto_header = ath12k_hw_qcn9274_compact_rx_desc_get_crypto_hdr,
1079 	.dp_rx_h_msdu_done = ath12k_hw_qcn9274_compact_dp_rx_h_msdu_done,
1080 	.dp_rx_h_l4_cksum_fail = ath12k_hw_qcn9274_compact_dp_rx_h_l4_cksum_fail,
1081 	.dp_rx_h_ip_cksum_fail = ath12k_hw_qcn9274_compact_dp_rx_h_ip_cksum_fail,
1082 	.dp_rx_h_is_decrypted = ath12k_hw_qcn9274_compact_dp_rx_h_is_decrypted,
1083 	.dp_rx_h_mpdu_err = ath12k_hw_qcn9274_compact_dp_rx_h_mpdu_err,
1084 	.rx_desc_get_desc_size = ath12k_hw_qcn9274_compact_get_rx_desc_size,
1085 	.rx_desc_get_msdu_src_link_id =
1086 		ath12k_hw_qcn9274_compact_rx_desc_get_msdu_src_link,
1087 };
1088 
1089 const struct hal_ops hal_qcn9274_ops = {
1090 	.create_srng_config = ath12k_hal_srng_create_config_qcn9274,
1091 	.tcl_to_wbm_rbm_map = ath12k_hal_qcn9274_tcl_to_wbm_rbm_map,
1092 	.rxdma_ring_wmask_rx_mpdu_start = ath12k_hal_qcn9274_rx_mpdu_start_wmask_get,
1093 	.rxdma_ring_wmask_rx_msdu_end = ath12k_hal_qcn9274_rx_msdu_end_wmask_get,
1094 	.get_hal_rx_compact_ops = ath12k_hal_qcn9274_get_hal_rx_compact_ops,
1095 };
1096 
ath12k_hw_wcn7850_rx_desc_get_first_msdu(struct hal_rx_desc * desc)1097 static bool ath12k_hw_wcn7850_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
1098 {
1099 	return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1100 			       RX_MSDU_END_INFO5_FIRST_MSDU);
1101 }
1102 
ath12k_hw_wcn7850_rx_desc_get_last_msdu(struct hal_rx_desc * desc)1103 static bool ath12k_hw_wcn7850_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
1104 {
1105 	return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1106 			       RX_MSDU_END_INFO5_LAST_MSDU);
1107 }
1108 
ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes(struct hal_rx_desc * desc)1109 static u8 ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
1110 {
1111 	return le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1112 			    RX_MSDU_END_INFO5_L3_HDR_PADDING);
1113 }
1114 
ath12k_hw_wcn7850_rx_desc_encrypt_valid(struct hal_rx_desc * desc)1115 static bool ath12k_hw_wcn7850_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
1116 {
1117 	return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1118 			       RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
1119 }
1120 
ath12k_hw_wcn7850_rx_desc_get_encrypt_type(struct hal_rx_desc * desc)1121 static u32 ath12k_hw_wcn7850_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
1122 {
1123 	return le32_get_bits(desc->u.wcn7850.mpdu_start.info2,
1124 			     RX_MPDU_START_INFO2_ENC_TYPE);
1125 }
1126 
ath12k_hw_wcn7850_rx_desc_get_decap_type(struct hal_rx_desc * desc)1127 static u8 ath12k_hw_wcn7850_rx_desc_get_decap_type(struct hal_rx_desc *desc)
1128 {
1129 	return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
1130 			     RX_MSDU_END_INFO11_DECAP_FORMAT);
1131 }
1132 
ath12k_hw_wcn7850_rx_desc_get_mesh_ctl(struct hal_rx_desc * desc)1133 static u8 ath12k_hw_wcn7850_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
1134 {
1135 	return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
1136 			     RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
1137 }
1138 
ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc * desc)1139 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
1140 {
1141 	return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1142 			       RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
1143 }
1144 
ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc * desc)1145 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
1146 {
1147 	return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1148 			       RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
1149 }
1150 
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc * desc)1151 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
1152 {
1153 	return le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1154 			     RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
1155 }
1156 
ath12k_hw_wcn7850_rx_desc_get_msdu_len(struct hal_rx_desc * desc)1157 static u16 ath12k_hw_wcn7850_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
1158 {
1159 	return le32_get_bits(desc->u.wcn7850.msdu_end.info10,
1160 			     RX_MSDU_END_INFO10_MSDU_LENGTH);
1161 }
1162 
ath12k_hw_wcn7850_rx_desc_get_msdu_sgi(struct hal_rx_desc * desc)1163 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
1164 {
1165 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1166 			     RX_MSDU_END_INFO12_SGI);
1167 }
1168 
ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc * desc)1169 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
1170 {
1171 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1172 			     RX_MSDU_END_INFO12_RATE_MCS);
1173 }
1174 
ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw(struct hal_rx_desc * desc)1175 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
1176 {
1177 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1178 			     RX_MSDU_END_INFO12_RECV_BW);
1179 }
1180 
ath12k_hw_wcn7850_rx_desc_get_msdu_freq(struct hal_rx_desc * desc)1181 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
1182 {
1183 	return __le32_to_cpu(desc->u.wcn7850.msdu_end.phy_meta_data);
1184 }
1185 
ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type(struct hal_rx_desc * desc)1186 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
1187 {
1188 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1189 			     RX_MSDU_END_INFO12_PKT_TYPE);
1190 }
1191 
ath12k_hw_wcn7850_rx_desc_get_msdu_nss(struct hal_rx_desc * desc)1192 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
1193 {
1194 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1195 			     RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
1196 }
1197 
ath12k_hw_wcn7850_rx_desc_get_mpdu_tid(struct hal_rx_desc * desc)1198 static u8 ath12k_hw_wcn7850_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
1199 {
1200 	return le32_get_bits(desc->u.wcn7850.mpdu_start.info2,
1201 			     RX_MPDU_START_INFO2_TID);
1202 }
1203 
ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id(struct hal_rx_desc * desc)1204 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
1205 {
1206 	return __le16_to_cpu(desc->u.wcn7850.mpdu_start.sw_peer_id);
1207 }
1208 
ath12k_hw_wcn7850_rx_desc_copy_end_tlv(struct hal_rx_desc * fdesc,struct hal_rx_desc * ldesc)1209 static void ath12k_hw_wcn7850_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
1210 						   struct hal_rx_desc *ldesc)
1211 {
1212 	memcpy(&fdesc->u.wcn7850.msdu_end, &ldesc->u.wcn7850.msdu_end,
1213 	       sizeof(struct rx_msdu_end_qcn9274));
1214 }
1215 
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag(struct hal_rx_desc * desc)1216 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag(struct hal_rx_desc *desc)
1217 {
1218 	return le64_get_bits(desc->u.wcn7850.mpdu_start_tag,
1219 			    HAL_TLV_HDR_TAG);
1220 }
1221 
ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc * desc)1222 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
1223 {
1224 	return __le16_to_cpu(desc->u.wcn7850.mpdu_start.phy_ppdu_id);
1225 }
1226 
ath12k_hw_wcn7850_rx_desc_set_msdu_len(struct hal_rx_desc * desc,u16 len)1227 static void ath12k_hw_wcn7850_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
1228 {
1229 	u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info10);
1230 
1231 	info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
1232 	info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
1233 
1234 	desc->u.wcn7850.msdu_end.info10 = __cpu_to_le32(info);
1235 }
1236 
ath12k_hw_wcn7850_rx_desc_get_msdu_payload(struct hal_rx_desc * desc)1237 static u8 *ath12k_hw_wcn7850_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
1238 {
1239 	return &desc->u.wcn7850.msdu_payload[0];
1240 }
1241 
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset(void)1242 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset(void)
1243 {
1244 	return offsetof(struct hal_rx_desc_wcn7850, mpdu_start_tag);
1245 }
1246 
ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset(void)1247 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset(void)
1248 {
1249 	return offsetof(struct hal_rx_desc_wcn7850, msdu_end_tag);
1250 }
1251 
ath12k_hw_wcn7850_rx_desc_mac_addr2_valid(struct hal_rx_desc * desc)1252 static bool ath12k_hw_wcn7850_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
1253 {
1254 	return __le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
1255 	       RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
1256 }
1257 
ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2(struct hal_rx_desc * desc)1258 static u8 *ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
1259 {
1260 	return desc->u.wcn7850.mpdu_start.addr2;
1261 }
1262 
ath12k_hw_wcn7850_rx_desc_is_da_mcbc(struct hal_rx_desc * desc)1263 static bool ath12k_hw_wcn7850_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
1264 {
1265 	return __le32_to_cpu(desc->u.wcn7850.msdu_end.info13) &
1266 	       RX_MSDU_END_INFO13_MCAST_BCAST;
1267 }
1268 
ath12k_hw_wcn7850_rx_desc_get_dot11_hdr(struct hal_rx_desc * desc,struct ieee80211_hdr * hdr)1269 static void ath12k_hw_wcn7850_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
1270 						    struct ieee80211_hdr *hdr)
1271 {
1272 	hdr->frame_control = desc->u.wcn7850.mpdu_start.frame_ctrl;
1273 	hdr->duration_id = desc->u.wcn7850.mpdu_start.duration;
1274 	ether_addr_copy(hdr->addr1, desc->u.wcn7850.mpdu_start.addr1);
1275 	ether_addr_copy(hdr->addr2, desc->u.wcn7850.mpdu_start.addr2);
1276 	ether_addr_copy(hdr->addr3, desc->u.wcn7850.mpdu_start.addr3);
1277 	if (__le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
1278 			RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
1279 		ether_addr_copy(hdr->addr4, desc->u.wcn7850.mpdu_start.addr4);
1280 	}
1281 	hdr->seq_ctrl = desc->u.wcn7850.mpdu_start.seq_ctrl;
1282 }
1283 
ath12k_hw_wcn7850_rx_desc_get_crypto_hdr(struct hal_rx_desc * desc,u8 * crypto_hdr,enum hal_encrypt_type enctype)1284 static void ath12k_hw_wcn7850_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
1285 						     u8 *crypto_hdr,
1286 						     enum hal_encrypt_type enctype)
1287 {
1288 	unsigned int key_id;
1289 
1290 	switch (enctype) {
1291 	case HAL_ENCRYPT_TYPE_OPEN:
1292 		return;
1293 	case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
1294 	case HAL_ENCRYPT_TYPE_TKIP_MIC:
1295 		crypto_hdr[0] =
1296 			HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
1297 		crypto_hdr[1] = 0;
1298 		crypto_hdr[2] =
1299 			HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
1300 		break;
1301 	case HAL_ENCRYPT_TYPE_CCMP_128:
1302 	case HAL_ENCRYPT_TYPE_CCMP_256:
1303 	case HAL_ENCRYPT_TYPE_GCMP_128:
1304 	case HAL_ENCRYPT_TYPE_AES_GCMP_256:
1305 		crypto_hdr[0] =
1306 			HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
1307 		crypto_hdr[1] =
1308 			HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
1309 		crypto_hdr[2] = 0;
1310 		break;
1311 	case HAL_ENCRYPT_TYPE_WEP_40:
1312 	case HAL_ENCRYPT_TYPE_WEP_104:
1313 	case HAL_ENCRYPT_TYPE_WEP_128:
1314 	case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
1315 	case HAL_ENCRYPT_TYPE_WAPI:
1316 		return;
1317 	}
1318 	key_id = u32_get_bits(__le32_to_cpu(desc->u.wcn7850.mpdu_start.info5),
1319 			      RX_MPDU_START_INFO5_KEY_ID);
1320 	crypto_hdr[3] = 0x20 | (key_id << 6);
1321 	crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.wcn7850.mpdu_start.pn[0]);
1322 	crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.wcn7850.mpdu_start.pn[0]);
1323 	crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[1]);
1324 	crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[1]);
1325 }
1326 
ath12k_hal_srng_create_config_wcn7850(struct ath12k_base * ab)1327 static int ath12k_hal_srng_create_config_wcn7850(struct ath12k_base *ab)
1328 {
1329 	struct ath12k_hal *hal = &ab->hal;
1330 	struct hal_srng_config *s;
1331 
1332 	hal->srng_config = kmemdup(hw_srng_config_template,
1333 				   sizeof(hw_srng_config_template),
1334 				   GFP_KERNEL);
1335 	if (!hal->srng_config)
1336 		return -ENOMEM;
1337 
1338 	s = &hal->srng_config[HAL_REO_DST];
1339 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
1340 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
1341 	s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
1342 	s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
1343 
1344 	s = &hal->srng_config[HAL_REO_EXCEPTION];
1345 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
1346 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
1347 
1348 	s = &hal->srng_config[HAL_REO_REINJECT];
1349 	s->max_rings = 1;
1350 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
1351 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
1352 
1353 	s = &hal->srng_config[HAL_REO_CMD];
1354 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
1355 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
1356 
1357 	s = &hal->srng_config[HAL_REO_STATUS];
1358 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
1359 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
1360 
1361 	s = &hal->srng_config[HAL_TCL_DATA];
1362 	s->max_rings = 5;
1363 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB(ab);
1364 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
1365 	s->reg_size[0] = HAL_TCL2_RING_BASE_LSB(ab) - HAL_TCL1_RING_BASE_LSB(ab);
1366 	s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
1367 
1368 	s = &hal->srng_config[HAL_TCL_CMD];
1369 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
1370 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
1371 
1372 	s = &hal->srng_config[HAL_TCL_STATUS];
1373 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
1374 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
1375 
1376 	s = &hal->srng_config[HAL_CE_SRC];
1377 	s->max_rings = 12;
1378 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab) + HAL_CE_DST_RING_BASE_LSB;
1379 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab) + HAL_CE_DST_RING_HP;
1380 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG(ab) -
1381 		HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab);
1382 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG(ab) -
1383 		HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab);
1384 
1385 	s = &hal->srng_config[HAL_CE_DST];
1386 	s->max_rings = 12;
1387 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_RING_BASE_LSB;
1388 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_RING_HP;
1389 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
1390 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
1391 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
1392 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
1393 
1394 	s = &hal->srng_config[HAL_CE_DST_STATUS];
1395 	s->max_rings = 12;
1396 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) +
1397 		HAL_CE_DST_STATUS_RING_BASE_LSB;
1398 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_STATUS_RING_HP;
1399 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
1400 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
1401 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
1402 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
1403 
1404 	s = &hal->srng_config[HAL_WBM_IDLE_LINK];
1405 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
1406 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
1407 
1408 	s = &hal->srng_config[HAL_SW2WBM_RELEASE];
1409 	s->max_rings = 1;
1410 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
1411 		HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
1412 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
1413 
1414 	s = &hal->srng_config[HAL_WBM2SW_RELEASE];
1415 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1416 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
1417 	s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
1418 		HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1419 	s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
1420 
1421 	s = &hal->srng_config[HAL_RXDMA_BUF];
1422 	s->max_rings = 2;
1423 	s->mac_type = ATH12K_HAL_SRNG_PMAC;
1424 
1425 	s = &hal->srng_config[HAL_RXDMA_DST];
1426 	s->max_rings = 1;
1427 	s->entry_size = sizeof(struct hal_reo_entrance_ring) >> 2;
1428 
1429 	/* below rings are not used */
1430 	s = &hal->srng_config[HAL_RXDMA_DIR_BUF];
1431 	s->max_rings = 0;
1432 
1433 	s = &hal->srng_config[HAL_PPE2TCL];
1434 	s->max_rings = 0;
1435 
1436 	s = &hal->srng_config[HAL_PPE_RELEASE];
1437 	s->max_rings = 0;
1438 
1439 	s = &hal->srng_config[HAL_TX_MONITOR_BUF];
1440 	s->max_rings = 0;
1441 
1442 	s = &hal->srng_config[HAL_TX_MONITOR_DST];
1443 	s->max_rings = 0;
1444 
1445 	s = &hal->srng_config[HAL_PPE2TCL];
1446 	s->max_rings = 0;
1447 
1448 	return 0;
1449 }
1450 
ath12k_hw_wcn7850_dp_rx_h_msdu_done(struct hal_rx_desc * desc)1451 static bool ath12k_hw_wcn7850_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
1452 {
1453 	return !!le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1454 			       RX_MSDU_END_INFO14_MSDU_DONE);
1455 }
1456 
ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail(struct hal_rx_desc * desc)1457 static bool ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
1458 {
1459 	return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1460 			       RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
1461 }
1462 
ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail(struct hal_rx_desc * desc)1463 static bool ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
1464 {
1465 	return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1466 			      RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
1467 }
1468 
ath12k_hw_wcn7850_dp_rx_h_is_decrypted(struct hal_rx_desc * desc)1469 static bool ath12k_hw_wcn7850_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
1470 {
1471 	return (le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1472 			      RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
1473 			      RX_DESC_DECRYPT_STATUS_CODE_OK);
1474 }
1475 
ath12k_hw_wcn7850_dp_rx_h_mpdu_err(struct hal_rx_desc * desc)1476 static u32 ath12k_hw_wcn7850_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
1477 {
1478 	u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info13);
1479 	u32 errmap = 0;
1480 
1481 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
1482 		errmap |= HAL_RX_MPDU_ERR_FCS;
1483 
1484 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1485 		errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1486 
1487 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1488 		errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1489 
1490 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1491 		errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1492 
1493 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1494 		errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1495 
1496 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1497 		errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1498 
1499 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1500 		errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1501 
1502 	return errmap;
1503 }
1504 
ath12k_hw_wcn7850_get_rx_desc_size(void)1505 static u32 ath12k_hw_wcn7850_get_rx_desc_size(void)
1506 {
1507 	return sizeof(struct hal_rx_desc_wcn7850);
1508 }
1509 
ath12k_hw_wcn7850_rx_desc_get_msdu_src_link(struct hal_rx_desc * desc)1510 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
1511 {
1512 	return 0;
1513 }
1514 
1515 const struct hal_rx_ops hal_rx_wcn7850_ops = {
1516 	.rx_desc_get_first_msdu = ath12k_hw_wcn7850_rx_desc_get_first_msdu,
1517 	.rx_desc_get_last_msdu = ath12k_hw_wcn7850_rx_desc_get_last_msdu,
1518 	.rx_desc_get_l3_pad_bytes = ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes,
1519 	.rx_desc_encrypt_valid = ath12k_hw_wcn7850_rx_desc_encrypt_valid,
1520 	.rx_desc_get_encrypt_type = ath12k_hw_wcn7850_rx_desc_get_encrypt_type,
1521 	.rx_desc_get_decap_type = ath12k_hw_wcn7850_rx_desc_get_decap_type,
1522 	.rx_desc_get_mesh_ctl = ath12k_hw_wcn7850_rx_desc_get_mesh_ctl,
1523 	.rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld,
1524 	.rx_desc_get_mpdu_fc_valid = ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid,
1525 	.rx_desc_get_mpdu_start_seq_no = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no,
1526 	.rx_desc_get_msdu_len = ath12k_hw_wcn7850_rx_desc_get_msdu_len,
1527 	.rx_desc_get_msdu_sgi = ath12k_hw_wcn7850_rx_desc_get_msdu_sgi,
1528 	.rx_desc_get_msdu_rate_mcs = ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs,
1529 	.rx_desc_get_msdu_rx_bw = ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw,
1530 	.rx_desc_get_msdu_freq = ath12k_hw_wcn7850_rx_desc_get_msdu_freq,
1531 	.rx_desc_get_msdu_pkt_type = ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type,
1532 	.rx_desc_get_msdu_nss = ath12k_hw_wcn7850_rx_desc_get_msdu_nss,
1533 	.rx_desc_get_mpdu_tid = ath12k_hw_wcn7850_rx_desc_get_mpdu_tid,
1534 	.rx_desc_get_mpdu_peer_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id,
1535 	.rx_desc_copy_end_tlv = ath12k_hw_wcn7850_rx_desc_copy_end_tlv,
1536 	.rx_desc_get_mpdu_start_tag = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag,
1537 	.rx_desc_get_mpdu_ppdu_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id,
1538 	.rx_desc_set_msdu_len = ath12k_hw_wcn7850_rx_desc_set_msdu_len,
1539 	.rx_desc_get_msdu_payload = ath12k_hw_wcn7850_rx_desc_get_msdu_payload,
1540 	.rx_desc_get_mpdu_start_offset = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset,
1541 	.rx_desc_get_msdu_end_offset = ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset,
1542 	.rx_desc_mac_addr2_valid = ath12k_hw_wcn7850_rx_desc_mac_addr2_valid,
1543 	.rx_desc_mpdu_start_addr2 = ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2,
1544 	.rx_desc_is_da_mcbc = ath12k_hw_wcn7850_rx_desc_is_da_mcbc,
1545 	.rx_desc_get_dot11_hdr = ath12k_hw_wcn7850_rx_desc_get_dot11_hdr,
1546 	.rx_desc_get_crypto_header = ath12k_hw_wcn7850_rx_desc_get_crypto_hdr,
1547 	.dp_rx_h_msdu_done = ath12k_hw_wcn7850_dp_rx_h_msdu_done,
1548 	.dp_rx_h_l4_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail,
1549 	.dp_rx_h_ip_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail,
1550 	.dp_rx_h_is_decrypted = ath12k_hw_wcn7850_dp_rx_h_is_decrypted,
1551 	.dp_rx_h_mpdu_err = ath12k_hw_wcn7850_dp_rx_h_mpdu_err,
1552 	.rx_desc_get_desc_size = ath12k_hw_wcn7850_get_rx_desc_size,
1553 	.rx_desc_get_msdu_src_link_id = ath12k_hw_wcn7850_rx_desc_get_msdu_src_link,
1554 };
1555 
1556 const struct hal_ops hal_wcn7850_ops = {
1557 	.create_srng_config = ath12k_hal_srng_create_config_wcn7850,
1558 	.tcl_to_wbm_rbm_map = ath12k_hal_wcn7850_tcl_to_wbm_rbm_map,
1559 	.rxdma_ring_wmask_rx_mpdu_start = NULL,
1560 	.rxdma_ring_wmask_rx_msdu_end = NULL,
1561 	.get_hal_rx_compact_ops = NULL,
1562 };
1563 
ath12k_hal_alloc_cont_rdp(struct ath12k_base * ab)1564 static int ath12k_hal_alloc_cont_rdp(struct ath12k_base *ab)
1565 {
1566 	struct ath12k_hal *hal = &ab->hal;
1567 	size_t size;
1568 
1569 	size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1570 	hal->rdp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->rdp.paddr,
1571 					    GFP_KERNEL);
1572 	if (!hal->rdp.vaddr)
1573 		return -ENOMEM;
1574 
1575 	return 0;
1576 }
1577 
ath12k_hal_free_cont_rdp(struct ath12k_base * ab)1578 static void ath12k_hal_free_cont_rdp(struct ath12k_base *ab)
1579 {
1580 	struct ath12k_hal *hal = &ab->hal;
1581 	size_t size;
1582 
1583 	if (!hal->rdp.vaddr)
1584 		return;
1585 
1586 	size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1587 	dma_free_coherent(ab->dev, size,
1588 			  hal->rdp.vaddr, hal->rdp.paddr);
1589 	hal->rdp.vaddr = NULL;
1590 }
1591 
ath12k_hal_alloc_cont_wrp(struct ath12k_base * ab)1592 static int ath12k_hal_alloc_cont_wrp(struct ath12k_base *ab)
1593 {
1594 	struct ath12k_hal *hal = &ab->hal;
1595 	size_t size;
1596 
1597 	size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1598 	hal->wrp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->wrp.paddr,
1599 					    GFP_KERNEL);
1600 	if (!hal->wrp.vaddr)
1601 		return -ENOMEM;
1602 
1603 	return 0;
1604 }
1605 
ath12k_hal_free_cont_wrp(struct ath12k_base * ab)1606 static void ath12k_hal_free_cont_wrp(struct ath12k_base *ab)
1607 {
1608 	struct ath12k_hal *hal = &ab->hal;
1609 	size_t size;
1610 
1611 	if (!hal->wrp.vaddr)
1612 		return;
1613 
1614 	size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1615 	dma_free_coherent(ab->dev, size,
1616 			  hal->wrp.vaddr, hal->wrp.paddr);
1617 	hal->wrp.vaddr = NULL;
1618 }
1619 
ath12k_hal_ce_dst_setup(struct ath12k_base * ab,struct hal_srng * srng,int ring_num)1620 static void ath12k_hal_ce_dst_setup(struct ath12k_base *ab,
1621 				    struct hal_srng *srng, int ring_num)
1622 {
1623 	struct hal_srng_config *srng_config = &ab->hal.srng_config[HAL_CE_DST];
1624 	u32 addr;
1625 	u32 val;
1626 
1627 	addr = HAL_CE_DST_RING_CTRL +
1628 	       srng_config->reg_start[HAL_SRNG_REG_GRP_R0] +
1629 	       ring_num * srng_config->reg_size[HAL_SRNG_REG_GRP_R0];
1630 
1631 	val = ath12k_hif_read32(ab, addr);
1632 	val &= ~HAL_CE_DST_R0_DEST_CTRL_MAX_LEN;
1633 	val |= u32_encode_bits(srng->u.dst_ring.max_buffer_length,
1634 			       HAL_CE_DST_R0_DEST_CTRL_MAX_LEN);
1635 	ath12k_hif_write32(ab, addr, val);
1636 }
1637 
ath12k_hal_srng_dst_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1638 static void ath12k_hal_srng_dst_hw_init(struct ath12k_base *ab,
1639 					struct hal_srng *srng)
1640 {
1641 	struct ath12k_hal *hal = &ab->hal;
1642 	u32 val;
1643 	u64 hp_addr;
1644 	u32 reg_base;
1645 
1646 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1647 
1648 	if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1649 		ath12k_hif_write32(ab, reg_base +
1650 				   ath12k_hal_reo1_ring_msi1_base_lsb_offset(ab),
1651 				   srng->msi_addr);
1652 
1653 		val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1654 				      HAL_REO1_RING_MSI1_BASE_MSB_ADDR) |
1655 				      HAL_REO1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1656 		ath12k_hif_write32(ab, reg_base +
1657 				   ath12k_hal_reo1_ring_msi1_base_msb_offset(ab), val);
1658 
1659 		ath12k_hif_write32(ab,
1660 				   reg_base + ath12k_hal_reo1_ring_msi1_data_offset(ab),
1661 				   srng->msi_data);
1662 	}
1663 
1664 	ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1665 
1666 	val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1667 			      HAL_REO1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1668 	      u32_encode_bits((srng->entry_size * srng->num_entries),
1669 			      HAL_REO1_RING_BASE_MSB_RING_SIZE);
1670 	ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_base_msb_offset(ab), val);
1671 
1672 	val = u32_encode_bits(srng->ring_id, HAL_REO1_RING_ID_RING_ID) |
1673 	      u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1674 	ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_id_offset(ab), val);
1675 
1676 	/* interrupt setup */
1677 	val = u32_encode_bits((srng->intr_timer_thres_us >> 3),
1678 			      HAL_REO1_RING_PRDR_INT_SETUP_INTR_TMR_THOLD);
1679 
1680 	val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1681 				HAL_REO1_RING_PRDR_INT_SETUP_BATCH_COUNTER_THOLD);
1682 
1683 	ath12k_hif_write32(ab,
1684 			   reg_base + ath12k_hal_reo1_ring_producer_int_setup_offset(ab),
1685 			   val);
1686 
1687 	hp_addr = hal->rdp.paddr +
1688 		  ((unsigned long)srng->u.dst_ring.hp_addr -
1689 		   (unsigned long)hal->rdp.vaddr);
1690 	ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_hp_addr_lsb_offset(ab),
1691 			   hp_addr & HAL_ADDR_LSB_REG_MASK);
1692 	ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_hp_addr_msb_offset(ab),
1693 			   hp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1694 
1695 	/* Initialize head and tail pointers to indicate ring is empty */
1696 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1697 	ath12k_hif_write32(ab, reg_base, 0);
1698 	ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_TP_OFFSET, 0);
1699 	*srng->u.dst_ring.hp_addr = 0;
1700 
1701 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1702 	val = 0;
1703 	if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1704 		val |= HAL_REO1_RING_MISC_DATA_TLV_SWAP;
1705 	if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1706 		val |= HAL_REO1_RING_MISC_HOST_FW_SWAP;
1707 	if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1708 		val |= HAL_REO1_RING_MISC_MSI_SWAP;
1709 	val |= HAL_REO1_RING_MISC_SRNG_ENABLE;
1710 
1711 	ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_misc_offset(ab), val);
1712 }
1713 
ath12k_hal_srng_src_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1714 static void ath12k_hal_srng_src_hw_init(struct ath12k_base *ab,
1715 					struct hal_srng *srng)
1716 {
1717 	struct ath12k_hal *hal = &ab->hal;
1718 	u32 val;
1719 	u64 tp_addr;
1720 	u32 reg_base;
1721 
1722 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1723 
1724 	if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1725 		ath12k_hif_write32(ab, reg_base +
1726 				   HAL_TCL1_RING_MSI1_BASE_LSB_OFFSET(ab),
1727 				   srng->msi_addr);
1728 
1729 		val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1730 				      HAL_TCL1_RING_MSI1_BASE_MSB_ADDR) |
1731 				      HAL_TCL1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1732 		ath12k_hif_write32(ab, reg_base +
1733 				       HAL_TCL1_RING_MSI1_BASE_MSB_OFFSET(ab),
1734 				   val);
1735 
1736 		ath12k_hif_write32(ab, reg_base +
1737 				       HAL_TCL1_RING_MSI1_DATA_OFFSET(ab),
1738 				   srng->msi_data);
1739 	}
1740 
1741 	ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1742 
1743 	val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1744 			      HAL_TCL1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1745 	      u32_encode_bits((srng->entry_size * srng->num_entries),
1746 			      HAL_TCL1_RING_BASE_MSB_RING_SIZE);
1747 	ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_BASE_MSB_OFFSET(ab), val);
1748 
1749 	val = u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1750 	ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_ID_OFFSET(ab), val);
1751 
1752 	val = u32_encode_bits(srng->intr_timer_thres_us,
1753 			      HAL_TCL1_RING_CONSR_INT_SETUP_IX0_INTR_TMR_THOLD);
1754 
1755 	val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1756 			       HAL_TCL1_RING_CONSR_INT_SETUP_IX0_BATCH_COUNTER_THOLD);
1757 
1758 	ath12k_hif_write32(ab,
1759 			   reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX0_OFFSET(ab),
1760 			   val);
1761 
1762 	val = 0;
1763 	if (srng->flags & HAL_SRNG_FLAGS_LOW_THRESH_INTR_EN) {
1764 		val |= u32_encode_bits(srng->u.src_ring.low_threshold,
1765 				       HAL_TCL1_RING_CONSR_INT_SETUP_IX1_LOW_THOLD);
1766 	}
1767 	ath12k_hif_write32(ab,
1768 			   reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX1_OFFSET(ab),
1769 			   val);
1770 
1771 	if (srng->ring_id != HAL_SRNG_RING_ID_WBM_IDLE_LINK) {
1772 		tp_addr = hal->rdp.paddr +
1773 			  ((unsigned long)srng->u.src_ring.tp_addr -
1774 			   (unsigned long)hal->rdp.vaddr);
1775 		ath12k_hif_write32(ab,
1776 				   reg_base + HAL_TCL1_RING_TP_ADDR_LSB_OFFSET(ab),
1777 				   tp_addr & HAL_ADDR_LSB_REG_MASK);
1778 		ath12k_hif_write32(ab,
1779 				   reg_base + HAL_TCL1_RING_TP_ADDR_MSB_OFFSET(ab),
1780 				   tp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1781 	}
1782 
1783 	/* Initialize head and tail pointers to indicate ring is empty */
1784 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1785 	ath12k_hif_write32(ab, reg_base, 0);
1786 	ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_TP_OFFSET, 0);
1787 	*srng->u.src_ring.tp_addr = 0;
1788 
1789 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1790 	val = 0;
1791 	if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1792 		val |= HAL_TCL1_RING_MISC_DATA_TLV_SWAP;
1793 	if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1794 		val |= HAL_TCL1_RING_MISC_HOST_FW_SWAP;
1795 	if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1796 		val |= HAL_TCL1_RING_MISC_MSI_SWAP;
1797 
1798 	/* Loop count is not used for SRC rings */
1799 	val |= HAL_TCL1_RING_MISC_MSI_LOOPCNT_DISABLE;
1800 
1801 	val |= HAL_TCL1_RING_MISC_SRNG_ENABLE;
1802 
1803 	if (srng->ring_id == HAL_SRNG_RING_ID_WBM_IDLE_LINK)
1804 		val |= HAL_TCL1_RING_MISC_MSI_RING_ID_DISABLE;
1805 
1806 	ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_MISC_OFFSET(ab), val);
1807 }
1808 
ath12k_hal_srng_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1809 static void ath12k_hal_srng_hw_init(struct ath12k_base *ab,
1810 				    struct hal_srng *srng)
1811 {
1812 	if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1813 		ath12k_hal_srng_src_hw_init(ab, srng);
1814 	else
1815 		ath12k_hal_srng_dst_hw_init(ab, srng);
1816 }
1817 
ath12k_hal_srng_get_ring_id(struct ath12k_base * ab,enum hal_ring_type type,int ring_num,int mac_id)1818 static int ath12k_hal_srng_get_ring_id(struct ath12k_base *ab,
1819 				       enum hal_ring_type type,
1820 				       int ring_num, int mac_id)
1821 {
1822 	struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
1823 	int ring_id;
1824 
1825 	if (ring_num >= srng_config->max_rings) {
1826 		ath12k_warn(ab, "invalid ring number :%d\n", ring_num);
1827 		return -EINVAL;
1828 	}
1829 
1830 	ring_id = srng_config->start_ring_id + ring_num;
1831 	if (srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
1832 		ring_id += mac_id * HAL_SRNG_RINGS_PER_PMAC;
1833 
1834 	if (WARN_ON(ring_id >= HAL_SRNG_RING_ID_MAX))
1835 		return -EINVAL;
1836 
1837 	return ring_id;
1838 }
1839 
ath12k_hal_srng_get_entrysize(struct ath12k_base * ab,u32 ring_type)1840 int ath12k_hal_srng_get_entrysize(struct ath12k_base *ab, u32 ring_type)
1841 {
1842 	struct hal_srng_config *srng_config;
1843 
1844 	if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1845 		return -EINVAL;
1846 
1847 	srng_config = &ab->hal.srng_config[ring_type];
1848 
1849 	return (srng_config->entry_size << 2);
1850 }
1851 
ath12k_hal_srng_get_max_entries(struct ath12k_base * ab,u32 ring_type)1852 int ath12k_hal_srng_get_max_entries(struct ath12k_base *ab, u32 ring_type)
1853 {
1854 	struct hal_srng_config *srng_config;
1855 
1856 	if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1857 		return -EINVAL;
1858 
1859 	srng_config = &ab->hal.srng_config[ring_type];
1860 
1861 	return (srng_config->max_size / srng_config->entry_size);
1862 }
1863 
ath12k_hal_srng_get_params(struct ath12k_base * ab,struct hal_srng * srng,struct hal_srng_params * params)1864 void ath12k_hal_srng_get_params(struct ath12k_base *ab, struct hal_srng *srng,
1865 				struct hal_srng_params *params)
1866 {
1867 	params->ring_base_paddr = srng->ring_base_paddr;
1868 	params->ring_base_vaddr = srng->ring_base_vaddr;
1869 	params->num_entries = srng->num_entries;
1870 	params->intr_timer_thres_us = srng->intr_timer_thres_us;
1871 	params->intr_batch_cntr_thres_entries =
1872 		srng->intr_batch_cntr_thres_entries;
1873 	params->low_threshold = srng->u.src_ring.low_threshold;
1874 	params->msi_addr = srng->msi_addr;
1875 	params->msi2_addr = srng->msi2_addr;
1876 	params->msi_data = srng->msi_data;
1877 	params->msi2_data = srng->msi2_data;
1878 	params->flags = srng->flags;
1879 }
1880 
ath12k_hal_srng_get_hp_addr(struct ath12k_base * ab,struct hal_srng * srng)1881 dma_addr_t ath12k_hal_srng_get_hp_addr(struct ath12k_base *ab,
1882 				       struct hal_srng *srng)
1883 {
1884 	if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1885 		return 0;
1886 
1887 	if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1888 		return ab->hal.wrp.paddr +
1889 		       ((unsigned long)srng->u.src_ring.hp_addr -
1890 			(unsigned long)ab->hal.wrp.vaddr);
1891 	else
1892 		return ab->hal.rdp.paddr +
1893 		       ((unsigned long)srng->u.dst_ring.hp_addr -
1894 			 (unsigned long)ab->hal.rdp.vaddr);
1895 }
1896 
ath12k_hal_srng_get_tp_addr(struct ath12k_base * ab,struct hal_srng * srng)1897 dma_addr_t ath12k_hal_srng_get_tp_addr(struct ath12k_base *ab,
1898 				       struct hal_srng *srng)
1899 {
1900 	if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1901 		return 0;
1902 
1903 	if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1904 		return ab->hal.rdp.paddr +
1905 		       ((unsigned long)srng->u.src_ring.tp_addr -
1906 			(unsigned long)ab->hal.rdp.vaddr);
1907 	else
1908 		return ab->hal.wrp.paddr +
1909 		       ((unsigned long)srng->u.dst_ring.tp_addr -
1910 			(unsigned long)ab->hal.wrp.vaddr);
1911 }
1912 
ath12k_hal_ce_get_desc_size(enum hal_ce_desc type)1913 u32 ath12k_hal_ce_get_desc_size(enum hal_ce_desc type)
1914 {
1915 	switch (type) {
1916 	case HAL_CE_DESC_SRC:
1917 		return sizeof(struct hal_ce_srng_src_desc);
1918 	case HAL_CE_DESC_DST:
1919 		return sizeof(struct hal_ce_srng_dest_desc);
1920 	case HAL_CE_DESC_DST_STATUS:
1921 		return sizeof(struct hal_ce_srng_dst_status_desc);
1922 	}
1923 
1924 	return 0;
1925 }
1926 
ath12k_hal_ce_src_set_desc(struct hal_ce_srng_src_desc * desc,dma_addr_t paddr,u32 len,u32 id,u8 byte_swap_data)1927 void ath12k_hal_ce_src_set_desc(struct hal_ce_srng_src_desc *desc, dma_addr_t paddr,
1928 				u32 len, u32 id, u8 byte_swap_data)
1929 {
1930 	desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1931 	desc->buffer_addr_info =
1932 		le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1933 				 HAL_CE_SRC_DESC_ADDR_INFO_ADDR_HI) |
1934 		le32_encode_bits(byte_swap_data,
1935 				 HAL_CE_SRC_DESC_ADDR_INFO_BYTE_SWAP) |
1936 		le32_encode_bits(0, HAL_CE_SRC_DESC_ADDR_INFO_GATHER) |
1937 		le32_encode_bits(len, HAL_CE_SRC_DESC_ADDR_INFO_LEN);
1938 	desc->meta_info = le32_encode_bits(id, HAL_CE_SRC_DESC_META_INFO_DATA);
1939 }
1940 
ath12k_hal_ce_dst_set_desc(struct hal_ce_srng_dest_desc * desc,dma_addr_t paddr)1941 void ath12k_hal_ce_dst_set_desc(struct hal_ce_srng_dest_desc *desc, dma_addr_t paddr)
1942 {
1943 	desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1944 	desc->buffer_addr_info =
1945 		le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1946 				 HAL_CE_DEST_DESC_ADDR_INFO_ADDR_HI);
1947 }
1948 
ath12k_hal_ce_dst_status_get_length(struct hal_ce_srng_dst_status_desc * desc)1949 u32 ath12k_hal_ce_dst_status_get_length(struct hal_ce_srng_dst_status_desc *desc)
1950 {
1951 	u32 len;
1952 
1953 	len = le32_get_bits(desc->flags, HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1954 	desc->flags &= ~cpu_to_le32(HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1955 
1956 	return len;
1957 }
1958 
ath12k_hal_set_link_desc_addr(struct hal_wbm_link_desc * desc,u32 cookie,dma_addr_t paddr,enum hal_rx_buf_return_buf_manager rbm)1959 void ath12k_hal_set_link_desc_addr(struct hal_wbm_link_desc *desc, u32 cookie,
1960 				   dma_addr_t paddr,
1961 				   enum hal_rx_buf_return_buf_manager rbm)
1962 {
1963 	desc->buf_addr_info.info0 = le32_encode_bits((paddr & HAL_ADDR_LSB_REG_MASK),
1964 						     BUFFER_ADDR_INFO0_ADDR);
1965 	desc->buf_addr_info.info1 =
1966 			le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1967 					 BUFFER_ADDR_INFO1_ADDR) |
1968 			le32_encode_bits(rbm, BUFFER_ADDR_INFO1_RET_BUF_MGR) |
1969 			le32_encode_bits(cookie, BUFFER_ADDR_INFO1_SW_COOKIE);
1970 }
1971 
ath12k_hal_srng_dst_peek(struct ath12k_base * ab,struct hal_srng * srng)1972 void *ath12k_hal_srng_dst_peek(struct ath12k_base *ab, struct hal_srng *srng)
1973 {
1974 	lockdep_assert_held(&srng->lock);
1975 
1976 	if (srng->u.dst_ring.tp != srng->u.dst_ring.cached_hp)
1977 		return (srng->ring_base_vaddr + srng->u.dst_ring.tp);
1978 
1979 	return NULL;
1980 }
1981 
ath12k_hal_srng_dst_get_next_entry(struct ath12k_base * ab,struct hal_srng * srng)1982 void *ath12k_hal_srng_dst_get_next_entry(struct ath12k_base *ab,
1983 					 struct hal_srng *srng)
1984 {
1985 	void *desc;
1986 
1987 	lockdep_assert_held(&srng->lock);
1988 
1989 	if (srng->u.dst_ring.tp == srng->u.dst_ring.cached_hp)
1990 		return NULL;
1991 
1992 	desc = srng->ring_base_vaddr + srng->u.dst_ring.tp;
1993 
1994 	srng->u.dst_ring.tp = (srng->u.dst_ring.tp + srng->entry_size) %
1995 			      srng->ring_size;
1996 
1997 	return desc;
1998 }
1999 
ath12k_hal_srng_dst_num_free(struct ath12k_base * ab,struct hal_srng * srng,bool sync_hw_ptr)2000 int ath12k_hal_srng_dst_num_free(struct ath12k_base *ab, struct hal_srng *srng,
2001 				 bool sync_hw_ptr)
2002 {
2003 	u32 tp, hp;
2004 
2005 	lockdep_assert_held(&srng->lock);
2006 
2007 	tp = srng->u.dst_ring.tp;
2008 
2009 	if (sync_hw_ptr) {
2010 		hp = *srng->u.dst_ring.hp_addr;
2011 		srng->u.dst_ring.cached_hp = hp;
2012 	} else {
2013 		hp = srng->u.dst_ring.cached_hp;
2014 	}
2015 
2016 	if (hp >= tp)
2017 		return (hp - tp) / srng->entry_size;
2018 	else
2019 		return (srng->ring_size - tp + hp) / srng->entry_size;
2020 }
2021 
2022 /* Returns number of available entries in src ring */
ath12k_hal_srng_src_num_free(struct ath12k_base * ab,struct hal_srng * srng,bool sync_hw_ptr)2023 int ath12k_hal_srng_src_num_free(struct ath12k_base *ab, struct hal_srng *srng,
2024 				 bool sync_hw_ptr)
2025 {
2026 	u32 tp, hp;
2027 
2028 	lockdep_assert_held(&srng->lock);
2029 
2030 	hp = srng->u.src_ring.hp;
2031 
2032 	if (sync_hw_ptr) {
2033 		tp = *srng->u.src_ring.tp_addr;
2034 		srng->u.src_ring.cached_tp = tp;
2035 	} else {
2036 		tp = srng->u.src_ring.cached_tp;
2037 	}
2038 
2039 	if (tp > hp)
2040 		return ((tp - hp) / srng->entry_size) - 1;
2041 	else
2042 		return ((srng->ring_size - hp + tp) / srng->entry_size) - 1;
2043 }
2044 
ath12k_hal_srng_src_next_peek(struct ath12k_base * ab,struct hal_srng * srng)2045 void *ath12k_hal_srng_src_next_peek(struct ath12k_base *ab,
2046 				    struct hal_srng *srng)
2047 {
2048 	void *desc;
2049 	u32 next_hp;
2050 
2051 	lockdep_assert_held(&srng->lock);
2052 
2053 	next_hp = (srng->u.src_ring.hp + srng->entry_size) % srng->ring_size;
2054 
2055 	if (next_hp == srng->u.src_ring.cached_tp)
2056 		return NULL;
2057 
2058 	desc = srng->ring_base_vaddr + next_hp;
2059 
2060 	return desc;
2061 }
2062 
ath12k_hal_srng_src_get_next_entry(struct ath12k_base * ab,struct hal_srng * srng)2063 void *ath12k_hal_srng_src_get_next_entry(struct ath12k_base *ab,
2064 					 struct hal_srng *srng)
2065 {
2066 	void *desc;
2067 	u32 next_hp;
2068 
2069 	lockdep_assert_held(&srng->lock);
2070 
2071 	/* TODO: Using % is expensive, but we have to do this since size of some
2072 	 * SRNG rings is not power of 2 (due to descriptor sizes). Need to see
2073 	 * if separate function is defined for rings having power of 2 ring size
2074 	 * (TCL2SW, REO2SW, SW2RXDMA and CE rings) so that we can avoid the
2075 	 * overhead of % by using mask (with &).
2076 	 */
2077 	next_hp = (srng->u.src_ring.hp + srng->entry_size) % srng->ring_size;
2078 
2079 	if (next_hp == srng->u.src_ring.cached_tp)
2080 		return NULL;
2081 
2082 	desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
2083 	srng->u.src_ring.hp = next_hp;
2084 
2085 	/* TODO: Reap functionality is not used by all rings. If particular
2086 	 * ring does not use reap functionality, we need not update reap_hp
2087 	 * with next_hp pointer. Need to make sure a separate function is used
2088 	 * before doing any optimization by removing below code updating
2089 	 * reap_hp.
2090 	 */
2091 	srng->u.src_ring.reap_hp = next_hp;
2092 
2093 	return desc;
2094 }
2095 
ath12k_hal_srng_src_peek(struct ath12k_base * ab,struct hal_srng * srng)2096 void *ath12k_hal_srng_src_peek(struct ath12k_base *ab, struct hal_srng *srng)
2097 {
2098 	lockdep_assert_held(&srng->lock);
2099 
2100 	if (((srng->u.src_ring.hp + srng->entry_size) % srng->ring_size) ==
2101 	    srng->u.src_ring.cached_tp)
2102 		return NULL;
2103 
2104 	return srng->ring_base_vaddr + srng->u.src_ring.hp;
2105 }
2106 
ath12k_hal_srng_src_reap_next(struct ath12k_base * ab,struct hal_srng * srng)2107 void *ath12k_hal_srng_src_reap_next(struct ath12k_base *ab,
2108 				    struct hal_srng *srng)
2109 {
2110 	void *desc;
2111 	u32 next_reap_hp;
2112 
2113 	lockdep_assert_held(&srng->lock);
2114 
2115 	next_reap_hp = (srng->u.src_ring.reap_hp + srng->entry_size) %
2116 		       srng->ring_size;
2117 
2118 	if (next_reap_hp == srng->u.src_ring.cached_tp)
2119 		return NULL;
2120 
2121 	desc = srng->ring_base_vaddr + next_reap_hp;
2122 	srng->u.src_ring.reap_hp = next_reap_hp;
2123 
2124 	return desc;
2125 }
2126 
ath12k_hal_srng_src_get_next_reaped(struct ath12k_base * ab,struct hal_srng * srng)2127 void *ath12k_hal_srng_src_get_next_reaped(struct ath12k_base *ab,
2128 					  struct hal_srng *srng)
2129 {
2130 	void *desc;
2131 
2132 	lockdep_assert_held(&srng->lock);
2133 
2134 	if (srng->u.src_ring.hp == srng->u.src_ring.reap_hp)
2135 		return NULL;
2136 
2137 	desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
2138 	srng->u.src_ring.hp = (srng->u.src_ring.hp + srng->entry_size) %
2139 			      srng->ring_size;
2140 
2141 	return desc;
2142 }
2143 
ath12k_hal_srng_access_begin(struct ath12k_base * ab,struct hal_srng * srng)2144 void ath12k_hal_srng_access_begin(struct ath12k_base *ab, struct hal_srng *srng)
2145 {
2146 	u32 hp;
2147 
2148 	lockdep_assert_held(&srng->lock);
2149 
2150 	if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2151 		srng->u.src_ring.cached_tp =
2152 			*(volatile u32 *)srng->u.src_ring.tp_addr;
2153 	} else {
2154 		hp = READ_ONCE(*srng->u.dst_ring.hp_addr);
2155 
2156 		if (hp != srng->u.dst_ring.cached_hp) {
2157 			srng->u.dst_ring.cached_hp = hp;
2158 			/* Make sure descriptor is read after the head
2159 			 * pointer.
2160 			 */
2161 			dma_rmb();
2162 		}
2163 	}
2164 }
2165 
2166 /* Update cached ring head/tail pointers to HW. ath12k_hal_srng_access_begin()
2167  * should have been called before this.
2168  */
ath12k_hal_srng_access_end(struct ath12k_base * ab,struct hal_srng * srng)2169 void ath12k_hal_srng_access_end(struct ath12k_base *ab, struct hal_srng *srng)
2170 {
2171 	lockdep_assert_held(&srng->lock);
2172 
2173 	if (srng->flags & HAL_SRNG_FLAGS_LMAC_RING) {
2174 		/* For LMAC rings, ring pointer updates are done through FW and
2175 		 * hence written to a shared memory location that is read by FW
2176 		 */
2177 		if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2178 			srng->u.src_ring.last_tp =
2179 				*(volatile u32 *)srng->u.src_ring.tp_addr;
2180 			/* Make sure descriptor is written before updating the
2181 			 * head pointer.
2182 			 */
2183 			dma_wmb();
2184 			WRITE_ONCE(*srng->u.src_ring.hp_addr, srng->u.src_ring.hp);
2185 		} else {
2186 			srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
2187 			/* Make sure descriptor is read before updating the
2188 			 * tail pointer.
2189 			 */
2190 			dma_mb();
2191 			WRITE_ONCE(*srng->u.dst_ring.tp_addr, srng->u.dst_ring.tp);
2192 		}
2193 	} else {
2194 		if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2195 			srng->u.src_ring.last_tp =
2196 				*(volatile u32 *)srng->u.src_ring.tp_addr;
2197 			/* Assume implementation use an MMIO write accessor
2198 			 * which has the required wmb() so that the descriptor
2199 			 * is written before the updating the head pointer.
2200 			 */
2201 			ath12k_hif_write32(ab,
2202 					   (unsigned long)srng->u.src_ring.hp_addr -
2203 					   (unsigned long)ab->mem,
2204 					   srng->u.src_ring.hp);
2205 		} else {
2206 			srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
2207 			/* Make sure descriptor is read before updating the
2208 			 * tail pointer.
2209 			 */
2210 			mb();
2211 			ath12k_hif_write32(ab,
2212 					   (unsigned long)srng->u.dst_ring.tp_addr -
2213 					   (unsigned long)ab->mem,
2214 					   srng->u.dst_ring.tp);
2215 		}
2216 	}
2217 
2218 	srng->timestamp = jiffies;
2219 }
2220 
ath12k_hal_setup_link_idle_list(struct ath12k_base * ab,struct hal_wbm_idle_scatter_list * sbuf,u32 nsbufs,u32 tot_link_desc,u32 end_offset)2221 void ath12k_hal_setup_link_idle_list(struct ath12k_base *ab,
2222 				     struct hal_wbm_idle_scatter_list *sbuf,
2223 				     u32 nsbufs, u32 tot_link_desc,
2224 				     u32 end_offset)
2225 {
2226 	struct ath12k_buffer_addr *link_addr;
2227 	int i;
2228 	u32 reg_scatter_buf_sz = HAL_WBM_IDLE_SCATTER_BUF_SIZE / 64;
2229 	u32 val;
2230 
2231 	link_addr = (void *)sbuf[0].vaddr + HAL_WBM_IDLE_SCATTER_BUF_SIZE;
2232 
2233 	for (i = 1; i < nsbufs; i++) {
2234 		link_addr->info0 = cpu_to_le32(sbuf[i].paddr & HAL_ADDR_LSB_REG_MASK);
2235 
2236 		link_addr->info1 =
2237 			le32_encode_bits((u64)sbuf[i].paddr >> HAL_ADDR_MSB_REG_SHIFT,
2238 					 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2239 			le32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
2240 					 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG);
2241 
2242 		link_addr = (void *)sbuf[i].vaddr +
2243 			     HAL_WBM_IDLE_SCATTER_BUF_SIZE;
2244 	}
2245 
2246 	val = u32_encode_bits(reg_scatter_buf_sz, HAL_WBM_SCATTER_BUFFER_SIZE) |
2247 	      u32_encode_bits(0x1, HAL_WBM_LINK_DESC_IDLE_LIST_MODE);
2248 
2249 	ath12k_hif_write32(ab,
2250 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2251 			   HAL_WBM_R0_IDLE_LIST_CONTROL_ADDR(ab),
2252 			   val);
2253 
2254 	val = u32_encode_bits(reg_scatter_buf_sz * nsbufs,
2255 			      HAL_WBM_SCATTER_RING_SIZE_OF_IDLE_LINK_DESC_LIST);
2256 	ath12k_hif_write32(ab,
2257 			   HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_R0_IDLE_LIST_SIZE_ADDR(ab),
2258 			   val);
2259 
2260 	val = u32_encode_bits(sbuf[0].paddr & HAL_ADDR_LSB_REG_MASK,
2261 			      BUFFER_ADDR_INFO0_ADDR);
2262 	ath12k_hif_write32(ab,
2263 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2264 			   HAL_WBM_SCATTERED_RING_BASE_LSB(ab),
2265 			   val);
2266 
2267 	val = u32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
2268 			      HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG) |
2269 	      u32_encode_bits((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT,
2270 			      HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32);
2271 	ath12k_hif_write32(ab,
2272 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2273 			   HAL_WBM_SCATTERED_RING_BASE_MSB(ab),
2274 			   val);
2275 
2276 	/* Setup head and tail pointers for the idle list */
2277 	val = u32_encode_bits(sbuf[nsbufs - 1].paddr, BUFFER_ADDR_INFO0_ADDR);
2278 	ath12k_hif_write32(ab,
2279 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2280 			   HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
2281 			   val);
2282 
2283 	val = u32_encode_bits(((u64)sbuf[nsbufs - 1].paddr >> HAL_ADDR_MSB_REG_SHIFT),
2284 			      HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2285 	       u32_encode_bits((end_offset >> 2),
2286 			       HAL_WBM_SCATTERED_DESC_HEAD_P_OFFSET_IX1);
2287 	ath12k_hif_write32(ab,
2288 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2289 			   HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX1(ab),
2290 			   val);
2291 
2292 	val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
2293 	ath12k_hif_write32(ab,
2294 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2295 			   HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
2296 			   val);
2297 
2298 	val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
2299 	ath12k_hif_write32(ab,
2300 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2301 			   HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX0(ab),
2302 			   val);
2303 
2304 	val = u32_encode_bits(((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT),
2305 			      HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2306 	      u32_encode_bits(0, HAL_WBM_SCATTERED_DESC_TAIL_P_OFFSET_IX1);
2307 	ath12k_hif_write32(ab,
2308 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2309 			   HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX1(ab),
2310 			   val);
2311 
2312 	val = 2 * tot_link_desc;
2313 	ath12k_hif_write32(ab,
2314 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2315 			   HAL_WBM_SCATTERED_DESC_PTR_HP_ADDR(ab),
2316 			   val);
2317 
2318 	/* Enable the SRNG */
2319 	val = u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_SRNG_ENABLE) |
2320 	      u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_RIND_ID_DISABLE);
2321 	ath12k_hif_write32(ab,
2322 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
2323 			   HAL_WBM_IDLE_LINK_RING_MISC_ADDR(ab),
2324 			   val);
2325 }
2326 
ath12k_hal_srng_setup(struct ath12k_base * ab,enum hal_ring_type type,int ring_num,int mac_id,struct hal_srng_params * params)2327 int ath12k_hal_srng_setup(struct ath12k_base *ab, enum hal_ring_type type,
2328 			  int ring_num, int mac_id,
2329 			  struct hal_srng_params *params)
2330 {
2331 	struct ath12k_hal *hal = &ab->hal;
2332 	struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
2333 	struct hal_srng *srng;
2334 	int ring_id;
2335 	u32 idx;
2336 	int i;
2337 	u32 reg_base;
2338 
2339 	ring_id = ath12k_hal_srng_get_ring_id(ab, type, ring_num, mac_id);
2340 	if (ring_id < 0)
2341 		return ring_id;
2342 
2343 	srng = &hal->srng_list[ring_id];
2344 
2345 	srng->ring_id = ring_id;
2346 	srng->ring_dir = srng_config->ring_dir;
2347 	srng->ring_base_paddr = params->ring_base_paddr;
2348 	srng->ring_base_vaddr = params->ring_base_vaddr;
2349 	srng->entry_size = srng_config->entry_size;
2350 	srng->num_entries = params->num_entries;
2351 	srng->ring_size = srng->entry_size * srng->num_entries;
2352 	srng->intr_batch_cntr_thres_entries =
2353 				params->intr_batch_cntr_thres_entries;
2354 	srng->intr_timer_thres_us = params->intr_timer_thres_us;
2355 	srng->flags = params->flags;
2356 	srng->msi_addr = params->msi_addr;
2357 	srng->msi2_addr = params->msi2_addr;
2358 	srng->msi_data = params->msi_data;
2359 	srng->msi2_data = params->msi2_data;
2360 	srng->initialized = 1;
2361 	spin_lock_init(&srng->lock);
2362 	lockdep_set_class(&srng->lock, &srng->lock_key);
2363 
2364 	for (i = 0; i < HAL_SRNG_NUM_REG_GRP; i++) {
2365 		srng->hwreg_base[i] = srng_config->reg_start[i] +
2366 				      (ring_num * srng_config->reg_size[i]);
2367 	}
2368 
2369 	memset(srng->ring_base_vaddr, 0,
2370 	       (srng->entry_size * srng->num_entries) << 2);
2371 
2372 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
2373 
2374 	if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2375 		srng->u.src_ring.hp = 0;
2376 		srng->u.src_ring.cached_tp = 0;
2377 		srng->u.src_ring.reap_hp = srng->ring_size - srng->entry_size;
2378 		srng->u.src_ring.tp_addr = (void *)(hal->rdp.vaddr + ring_id);
2379 		srng->u.src_ring.low_threshold = params->low_threshold *
2380 						 srng->entry_size;
2381 		if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
2382 			if (!ab->hw_params->supports_shadow_regs)
2383 				srng->u.src_ring.hp_addr =
2384 					(u32 *)((unsigned long)ab->mem + reg_base);
2385 			else
2386 				ath12k_dbg(ab, ATH12K_DBG_HAL,
2387 					   "hal type %d ring_num %d reg_base 0x%x shadow 0x%lx\n",
2388 					   type, ring_num,
2389 					   reg_base,
2390 					   (unsigned long)srng->u.src_ring.hp_addr -
2391 					   (unsigned long)ab->mem);
2392 		} else {
2393 			idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
2394 			srng->u.src_ring.hp_addr = (void *)(hal->wrp.vaddr +
2395 						   idx);
2396 			srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
2397 		}
2398 	} else {
2399 		/* During initialization loop count in all the descriptors
2400 		 * will be set to zero, and HW will set it to 1 on completing
2401 		 * descriptor update in first loop, and increments it by 1 on
2402 		 * subsequent loops (loop count wraps around after reaching
2403 		 * 0xffff). The 'loop_cnt' in SW ring state is the expected
2404 		 * loop count in descriptors updated by HW (to be processed
2405 		 * by SW).
2406 		 */
2407 		srng->u.dst_ring.loop_cnt = 1;
2408 		srng->u.dst_ring.tp = 0;
2409 		srng->u.dst_ring.cached_hp = 0;
2410 		srng->u.dst_ring.hp_addr = (void *)(hal->rdp.vaddr + ring_id);
2411 		if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
2412 			if (!ab->hw_params->supports_shadow_regs)
2413 				srng->u.dst_ring.tp_addr =
2414 					(u32 *)((unsigned long)ab->mem + reg_base +
2415 					(HAL_REO1_RING_TP - HAL_REO1_RING_HP));
2416 			else
2417 				ath12k_dbg(ab, ATH12K_DBG_HAL,
2418 					   "type %d ring_num %d target_reg 0x%x shadow 0x%lx\n",
2419 					   type, ring_num,
2420 					   reg_base + HAL_REO1_RING_TP - HAL_REO1_RING_HP,
2421 					   (unsigned long)srng->u.dst_ring.tp_addr -
2422 					   (unsigned long)ab->mem);
2423 		} else {
2424 			/* For PMAC & DMAC rings, tail pointer updates will be done
2425 			 * through FW by writing to a shared memory location
2426 			 */
2427 			idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
2428 			srng->u.dst_ring.tp_addr = (void *)(hal->wrp.vaddr +
2429 						   idx);
2430 			srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
2431 		}
2432 	}
2433 
2434 	if (srng_config->mac_type != ATH12K_HAL_SRNG_UMAC)
2435 		return ring_id;
2436 
2437 	ath12k_hal_srng_hw_init(ab, srng);
2438 
2439 	if (type == HAL_CE_DST) {
2440 		srng->u.dst_ring.max_buffer_length = params->max_buffer_len;
2441 		ath12k_hal_ce_dst_setup(ab, srng, ring_num);
2442 	}
2443 
2444 	return ring_id;
2445 }
2446 
ath12k_hal_srng_update_hp_tp_addr(struct ath12k_base * ab,int shadow_cfg_idx,enum hal_ring_type ring_type,int ring_num)2447 static void ath12k_hal_srng_update_hp_tp_addr(struct ath12k_base *ab,
2448 					      int shadow_cfg_idx,
2449 					      enum hal_ring_type ring_type,
2450 					      int ring_num)
2451 {
2452 	struct hal_srng *srng;
2453 	struct ath12k_hal *hal = &ab->hal;
2454 	int ring_id;
2455 	struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2456 
2457 	ring_id = ath12k_hal_srng_get_ring_id(ab, ring_type, ring_num, 0);
2458 	if (ring_id < 0)
2459 		return;
2460 
2461 	srng = &hal->srng_list[ring_id];
2462 
2463 	if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2464 		srng->u.dst_ring.tp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
2465 						   (unsigned long)ab->mem);
2466 	else
2467 		srng->u.src_ring.hp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
2468 						   (unsigned long)ab->mem);
2469 }
2470 
ath12k_hal_srng_update_shadow_config(struct ath12k_base * ab,enum hal_ring_type ring_type,int ring_num)2471 int ath12k_hal_srng_update_shadow_config(struct ath12k_base *ab,
2472 					 enum hal_ring_type ring_type,
2473 					 int ring_num)
2474 {
2475 	struct ath12k_hal *hal = &ab->hal;
2476 	struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2477 	int shadow_cfg_idx = hal->num_shadow_reg_configured;
2478 	u32 target_reg;
2479 
2480 	if (shadow_cfg_idx >= HAL_SHADOW_NUM_REGS)
2481 		return -EINVAL;
2482 
2483 	hal->num_shadow_reg_configured++;
2484 
2485 	target_reg = srng_config->reg_start[HAL_HP_OFFSET_IN_REG_START];
2486 	target_reg += srng_config->reg_size[HAL_HP_OFFSET_IN_REG_START] *
2487 		ring_num;
2488 
2489 	/* For destination ring, shadow the TP */
2490 	if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2491 		target_reg += HAL_OFFSET_FROM_HP_TO_TP;
2492 
2493 	hal->shadow_reg_addr[shadow_cfg_idx] = target_reg;
2494 
2495 	/* update hp/tp addr to hal structure*/
2496 	ath12k_hal_srng_update_hp_tp_addr(ab, shadow_cfg_idx, ring_type,
2497 					  ring_num);
2498 
2499 	ath12k_dbg(ab, ATH12K_DBG_HAL,
2500 		   "target_reg %x, shadow reg 0x%x shadow_idx 0x%x, ring_type %d, ring num %d",
2501 		  target_reg,
2502 		  HAL_SHADOW_REG(shadow_cfg_idx),
2503 		  shadow_cfg_idx,
2504 		  ring_type, ring_num);
2505 
2506 	return 0;
2507 }
2508 
ath12k_hal_srng_shadow_config(struct ath12k_base * ab)2509 void ath12k_hal_srng_shadow_config(struct ath12k_base *ab)
2510 {
2511 	struct ath12k_hal *hal = &ab->hal;
2512 	int ring_type, ring_num;
2513 
2514 	/* update all the non-CE srngs. */
2515 	for (ring_type = 0; ring_type < HAL_MAX_RING_TYPES; ring_type++) {
2516 		struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2517 
2518 		if (ring_type == HAL_CE_SRC ||
2519 		    ring_type == HAL_CE_DST ||
2520 			ring_type == HAL_CE_DST_STATUS)
2521 			continue;
2522 
2523 		if (srng_config->mac_type == ATH12K_HAL_SRNG_DMAC ||
2524 		    srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
2525 			continue;
2526 
2527 		for (ring_num = 0; ring_num < srng_config->max_rings; ring_num++)
2528 			ath12k_hal_srng_update_shadow_config(ab, ring_type, ring_num);
2529 	}
2530 }
2531 
ath12k_hal_srng_get_shadow_config(struct ath12k_base * ab,u32 ** cfg,u32 * len)2532 void ath12k_hal_srng_get_shadow_config(struct ath12k_base *ab,
2533 				       u32 **cfg, u32 *len)
2534 {
2535 	struct ath12k_hal *hal = &ab->hal;
2536 
2537 	*len = hal->num_shadow_reg_configured;
2538 	*cfg = hal->shadow_reg_addr;
2539 }
2540 
ath12k_hal_srng_shadow_update_hp_tp(struct ath12k_base * ab,struct hal_srng * srng)2541 void ath12k_hal_srng_shadow_update_hp_tp(struct ath12k_base *ab,
2542 					 struct hal_srng *srng)
2543 {
2544 	lockdep_assert_held(&srng->lock);
2545 
2546 	/* check whether the ring is empty. Update the shadow
2547 	 * HP only when then ring isn't' empty.
2548 	 */
2549 	if (srng->ring_dir == HAL_SRNG_DIR_SRC &&
2550 	    *srng->u.src_ring.tp_addr != srng->u.src_ring.hp)
2551 		ath12k_hal_srng_access_end(ab, srng);
2552 }
2553 
ath12k_hal_register_srng_lock_keys(struct ath12k_base * ab)2554 static void ath12k_hal_register_srng_lock_keys(struct ath12k_base *ab)
2555 {
2556 	struct ath12k_hal *hal = &ab->hal;
2557 	u32 ring_id;
2558 
2559 	for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2560 		lockdep_register_key(&hal->srng_list[ring_id].lock_key);
2561 }
2562 
ath12k_hal_unregister_srng_lock_keys(struct ath12k_base * ab)2563 static void ath12k_hal_unregister_srng_lock_keys(struct ath12k_base *ab)
2564 {
2565 	struct ath12k_hal *hal = &ab->hal;
2566 	u32 ring_id;
2567 
2568 	for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2569 		lockdep_unregister_key(&hal->srng_list[ring_id].lock_key);
2570 }
2571 
ath12k_hal_srng_init(struct ath12k_base * ab)2572 int ath12k_hal_srng_init(struct ath12k_base *ab)
2573 {
2574 	struct ath12k_hal *hal = &ab->hal;
2575 	int ret;
2576 
2577 	memset(hal, 0, sizeof(*hal));
2578 
2579 	ret = ab->hw_params->hal_ops->create_srng_config(ab);
2580 	if (ret)
2581 		goto err_hal;
2582 
2583 	ret = ath12k_hal_alloc_cont_rdp(ab);
2584 	if (ret)
2585 		goto err_hal;
2586 
2587 	ret = ath12k_hal_alloc_cont_wrp(ab);
2588 	if (ret)
2589 		goto err_free_cont_rdp;
2590 
2591 	ath12k_hal_register_srng_lock_keys(ab);
2592 
2593 	return 0;
2594 
2595 err_free_cont_rdp:
2596 	ath12k_hal_free_cont_rdp(ab);
2597 
2598 err_hal:
2599 	return ret;
2600 }
2601 
ath12k_hal_srng_deinit(struct ath12k_base * ab)2602 void ath12k_hal_srng_deinit(struct ath12k_base *ab)
2603 {
2604 	struct ath12k_hal *hal = &ab->hal;
2605 
2606 	ath12k_hal_unregister_srng_lock_keys(ab);
2607 	ath12k_hal_free_cont_rdp(ab);
2608 	ath12k_hal_free_cont_wrp(ab);
2609 	kfree(hal->srng_config);
2610 	hal->srng_config = NULL;
2611 }
2612 
ath12k_hal_dump_srng_stats(struct ath12k_base * ab)2613 void ath12k_hal_dump_srng_stats(struct ath12k_base *ab)
2614 {
2615 	struct hal_srng *srng;
2616 	struct ath12k_ext_irq_grp *irq_grp;
2617 	struct ath12k_ce_pipe *ce_pipe;
2618 	int i;
2619 
2620 	ath12k_err(ab, "Last interrupt received for each CE:\n");
2621 	for (i = 0; i < ab->hw_params->ce_count; i++) {
2622 		ce_pipe = &ab->ce.ce_pipe[i];
2623 
2624 		if (ath12k_ce_get_attr_flags(ab, i) & CE_ATTR_DIS_INTR)
2625 			continue;
2626 
2627 		ath12k_err(ab, "CE_id %d pipe_num %d %ums before\n",
2628 			   i, ce_pipe->pipe_num,
2629 			   jiffies_to_msecs(jiffies - ce_pipe->timestamp));
2630 	}
2631 
2632 	ath12k_err(ab, "\nLast interrupt received for each group:\n");
2633 	for (i = 0; i < ATH12K_EXT_IRQ_GRP_NUM_MAX; i++) {
2634 		irq_grp = &ab->ext_irq_grp[i];
2635 		ath12k_err(ab, "group_id %d %ums before\n",
2636 			   irq_grp->grp_id,
2637 			   jiffies_to_msecs(jiffies - irq_grp->timestamp));
2638 	}
2639 
2640 	for (i = 0; i < HAL_SRNG_RING_ID_MAX; i++) {
2641 		srng = &ab->hal.srng_list[i];
2642 
2643 		if (!srng->initialized)
2644 			continue;
2645 
2646 		if (srng->ring_dir == HAL_SRNG_DIR_SRC)
2647 			ath12k_err(ab,
2648 				   "src srng id %u hp %u, reap_hp %u, cur tp %u, cached tp %u last tp %u napi processed before %ums\n",
2649 				   srng->ring_id, srng->u.src_ring.hp,
2650 				   srng->u.src_ring.reap_hp,
2651 				   *srng->u.src_ring.tp_addr, srng->u.src_ring.cached_tp,
2652 				   srng->u.src_ring.last_tp,
2653 				   jiffies_to_msecs(jiffies - srng->timestamp));
2654 		else if (srng->ring_dir == HAL_SRNG_DIR_DST)
2655 			ath12k_err(ab,
2656 				   "dst srng id %u tp %u, cur hp %u, cached hp %u last hp %u napi processed before %ums\n",
2657 				   srng->ring_id, srng->u.dst_ring.tp,
2658 				   *srng->u.dst_ring.hp_addr,
2659 				   srng->u.dst_ring.cached_hp,
2660 				   srng->u.dst_ring.last_hp,
2661 				   jiffies_to_msecs(jiffies - srng->timestamp));
2662 	}
2663 }
2664