xref: /freebsd/sys/contrib/dev/athk/ath12k/testmode.c (revision a96550206e4bde15bf615ff2127b80404a7ec41f)
1*a9655020SBjoern A. Zeeb // SPDX-License-Identifier: BSD-3-Clause-Clear
2*a9655020SBjoern A. Zeeb /*
3*a9655020SBjoern A. Zeeb  * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4*a9655020SBjoern A. Zeeb  * Copyright (c) 2021-2025 Qualcomm Innovation Center, Inc. All rights reserved.
5*a9655020SBjoern A. Zeeb  */
6*a9655020SBjoern A. Zeeb 
7*a9655020SBjoern A. Zeeb #include "testmode.h"
8*a9655020SBjoern A. Zeeb #include <net/netlink.h>
9*a9655020SBjoern A. Zeeb #include "debug.h"
10*a9655020SBjoern A. Zeeb #include "wmi.h"
11*a9655020SBjoern A. Zeeb #include "hw.h"
12*a9655020SBjoern A. Zeeb #include "core.h"
13*a9655020SBjoern A. Zeeb #include "hif.h"
14*a9655020SBjoern A. Zeeb #include "../testmode_i.h"
15*a9655020SBjoern A. Zeeb 
16*a9655020SBjoern A. Zeeb #define ATH12K_FTM_SEGHDR_CURRENT_SEQ		GENMASK(3, 0)
17*a9655020SBjoern A. Zeeb #define ATH12K_FTM_SEGHDR_TOTAL_SEGMENTS	GENMASK(7, 4)
18*a9655020SBjoern A. Zeeb 
19*a9655020SBjoern A. Zeeb static const struct nla_policy ath12k_tm_policy[ATH_TM_ATTR_MAX + 1] = {
20*a9655020SBjoern A. Zeeb 	[ATH_TM_ATTR_CMD]		= { .type = NLA_U32 },
21*a9655020SBjoern A. Zeeb 	[ATH_TM_ATTR_DATA]		= { .type = NLA_BINARY,
22*a9655020SBjoern A. Zeeb 					    .len = ATH_TM_DATA_MAX_LEN },
23*a9655020SBjoern A. Zeeb 	[ATH_TM_ATTR_WMI_CMDID]		= { .type = NLA_U32 },
24*a9655020SBjoern A. Zeeb 	[ATH_TM_ATTR_VERSION_MAJOR]	= { .type = NLA_U32 },
25*a9655020SBjoern A. Zeeb 	[ATH_TM_ATTR_VERSION_MINOR]	= { .type = NLA_U32 },
26*a9655020SBjoern A. Zeeb };
27*a9655020SBjoern A. Zeeb 
ath12k_tm_get_ar(struct ath12k_base * ab)28*a9655020SBjoern A. Zeeb static struct ath12k *ath12k_tm_get_ar(struct ath12k_base *ab)
29*a9655020SBjoern A. Zeeb {
30*a9655020SBjoern A. Zeeb 	struct ath12k_pdev *pdev;
31*a9655020SBjoern A. Zeeb 	struct ath12k *ar;
32*a9655020SBjoern A. Zeeb 	int i;
33*a9655020SBjoern A. Zeeb 
34*a9655020SBjoern A. Zeeb 	for (i = 0; i < ab->num_radios; i++) {
35*a9655020SBjoern A. Zeeb 		pdev = &ab->pdevs[i];
36*a9655020SBjoern A. Zeeb 		ar = pdev->ar;
37*a9655020SBjoern A. Zeeb 
38*a9655020SBjoern A. Zeeb 		if (ar && ar->ah->state == ATH12K_HW_STATE_TM)
39*a9655020SBjoern A. Zeeb 			return ar;
40*a9655020SBjoern A. Zeeb 	}
41*a9655020SBjoern A. Zeeb 
42*a9655020SBjoern A. Zeeb 	return NULL;
43*a9655020SBjoern A. Zeeb }
44*a9655020SBjoern A. Zeeb 
ath12k_tm_wmi_event_unsegmented(struct ath12k_base * ab,u32 cmd_id,struct sk_buff * skb)45*a9655020SBjoern A. Zeeb void ath12k_tm_wmi_event_unsegmented(struct ath12k_base *ab, u32 cmd_id,
46*a9655020SBjoern A. Zeeb 				     struct sk_buff *skb)
47*a9655020SBjoern A. Zeeb {
48*a9655020SBjoern A. Zeeb 	struct sk_buff *nl_skb;
49*a9655020SBjoern A. Zeeb 	struct ath12k *ar;
50*a9655020SBjoern A. Zeeb 
51*a9655020SBjoern A. Zeeb 	ath12k_dbg(ab, ATH12K_DBG_TESTMODE,
52*a9655020SBjoern A. Zeeb 		   "testmode event wmi cmd_id %d skb length %d\n",
53*a9655020SBjoern A. Zeeb 		   cmd_id, skb->len);
54*a9655020SBjoern A. Zeeb 
55*a9655020SBjoern A. Zeeb 	ath12k_dbg_dump(ab, ATH12K_DBG_TESTMODE, NULL, "", skb->data, skb->len);
56*a9655020SBjoern A. Zeeb 
57*a9655020SBjoern A. Zeeb 	ar = ath12k_tm_get_ar(ab);
58*a9655020SBjoern A. Zeeb 	if (!ar) {
59*a9655020SBjoern A. Zeeb 		ath12k_warn(ab, "testmode event not handled due to invalid pdev\n");
60*a9655020SBjoern A. Zeeb 		return;
61*a9655020SBjoern A. Zeeb 	}
62*a9655020SBjoern A. Zeeb 
63*a9655020SBjoern A. Zeeb 	spin_lock_bh(&ar->data_lock);
64*a9655020SBjoern A. Zeeb 
65*a9655020SBjoern A. Zeeb 	nl_skb = cfg80211_testmode_alloc_event_skb(ar->ah->hw->wiphy,
66*a9655020SBjoern A. Zeeb 						   2 * nla_total_size(sizeof(u32)) +
67*a9655020SBjoern A. Zeeb 						   nla_total_size(skb->len),
68*a9655020SBjoern A. Zeeb 						   GFP_ATOMIC);
69*a9655020SBjoern A. Zeeb 	spin_unlock_bh(&ar->data_lock);
70*a9655020SBjoern A. Zeeb 
71*a9655020SBjoern A. Zeeb 	if (!nl_skb) {
72*a9655020SBjoern A. Zeeb 		ath12k_warn(ab,
73*a9655020SBjoern A. Zeeb 			    "failed to allocate skb for unsegmented testmode wmi event\n");
74*a9655020SBjoern A. Zeeb 		return;
75*a9655020SBjoern A. Zeeb 	}
76*a9655020SBjoern A. Zeeb 
77*a9655020SBjoern A. Zeeb 	if (nla_put_u32(nl_skb, ATH_TM_ATTR_CMD, ATH_TM_CMD_WMI) ||
78*a9655020SBjoern A. Zeeb 	    nla_put_u32(nl_skb, ATH_TM_ATTR_WMI_CMDID, cmd_id) ||
79*a9655020SBjoern A. Zeeb 	    nla_put(nl_skb, ATH_TM_ATTR_DATA, skb->len, skb->data)) {
80*a9655020SBjoern A. Zeeb 		ath12k_warn(ab, "failed to populate testmode unsegmented event\n");
81*a9655020SBjoern A. Zeeb 		kfree_skb(nl_skb);
82*a9655020SBjoern A. Zeeb 		return;
83*a9655020SBjoern A. Zeeb 	}
84*a9655020SBjoern A. Zeeb 
85*a9655020SBjoern A. Zeeb 	cfg80211_testmode_event(nl_skb, GFP_ATOMIC);
86*a9655020SBjoern A. Zeeb }
87*a9655020SBjoern A. Zeeb 
ath12k_tm_process_event(struct ath12k_base * ab,u32 cmd_id,const struct ath12k_wmi_ftm_event * ftm_msg,u16 length)88*a9655020SBjoern A. Zeeb void ath12k_tm_process_event(struct ath12k_base *ab, u32 cmd_id,
89*a9655020SBjoern A. Zeeb 			     const struct ath12k_wmi_ftm_event *ftm_msg,
90*a9655020SBjoern A. Zeeb 			     u16 length)
91*a9655020SBjoern A. Zeeb {
92*a9655020SBjoern A. Zeeb 	struct sk_buff *nl_skb;
93*a9655020SBjoern A. Zeeb 	struct ath12k *ar;
94*a9655020SBjoern A. Zeeb 	u32 data_pos, pdev_id;
95*a9655020SBjoern A. Zeeb 	u16 datalen;
96*a9655020SBjoern A. Zeeb 	u8 total_segments, current_seq;
97*a9655020SBjoern A. Zeeb 	u8 const *buf_pos;
98*a9655020SBjoern A. Zeeb 
99*a9655020SBjoern A. Zeeb 	ath12k_dbg(ab, ATH12K_DBG_TESTMODE,
100*a9655020SBjoern A. Zeeb 		   "testmode event wmi cmd_id %d ftm event msg %p datalen %d\n",
101*a9655020SBjoern A. Zeeb 		   cmd_id, ftm_msg, length);
102*a9655020SBjoern A. Zeeb 	ath12k_dbg_dump(ab, ATH12K_DBG_TESTMODE, NULL, "", ftm_msg, length);
103*a9655020SBjoern A. Zeeb 	pdev_id = DP_HW2SW_MACID(le32_to_cpu(ftm_msg->seg_hdr.pdev_id));
104*a9655020SBjoern A. Zeeb 
105*a9655020SBjoern A. Zeeb 	if (pdev_id >= ab->num_radios) {
106*a9655020SBjoern A. Zeeb 		ath12k_warn(ab, "testmode event not handled due to invalid pdev id\n");
107*a9655020SBjoern A. Zeeb 		return;
108*a9655020SBjoern A. Zeeb 	}
109*a9655020SBjoern A. Zeeb 
110*a9655020SBjoern A. Zeeb 	ar = ab->pdevs[pdev_id].ar;
111*a9655020SBjoern A. Zeeb 
112*a9655020SBjoern A. Zeeb 	if (!ar) {
113*a9655020SBjoern A. Zeeb 		ath12k_warn(ab, "testmode event not handled due to absence of pdev\n");
114*a9655020SBjoern A. Zeeb 		return;
115*a9655020SBjoern A. Zeeb 	}
116*a9655020SBjoern A. Zeeb 
117*a9655020SBjoern A. Zeeb 	current_seq = le32_get_bits(ftm_msg->seg_hdr.segmentinfo,
118*a9655020SBjoern A. Zeeb 				    ATH12K_FTM_SEGHDR_CURRENT_SEQ);
119*a9655020SBjoern A. Zeeb 	total_segments = le32_get_bits(ftm_msg->seg_hdr.segmentinfo,
120*a9655020SBjoern A. Zeeb 				       ATH12K_FTM_SEGHDR_TOTAL_SEGMENTS);
121*a9655020SBjoern A. Zeeb 	datalen = length - (sizeof(struct ath12k_wmi_ftm_seg_hdr_params));
122*a9655020SBjoern A. Zeeb 	buf_pos = ftm_msg->data;
123*a9655020SBjoern A. Zeeb 
124*a9655020SBjoern A. Zeeb 	if (current_seq == 0) {
125*a9655020SBjoern A. Zeeb 		ab->ftm_event_obj.expected_seq = 0;
126*a9655020SBjoern A. Zeeb 		ab->ftm_event_obj.data_pos = 0;
127*a9655020SBjoern A. Zeeb 	}
128*a9655020SBjoern A. Zeeb 
129*a9655020SBjoern A. Zeeb 	data_pos = ab->ftm_event_obj.data_pos;
130*a9655020SBjoern A. Zeeb 
131*a9655020SBjoern A. Zeeb 	if ((data_pos + datalen) > ATH_FTM_EVENT_MAX_BUF_LENGTH) {
132*a9655020SBjoern A. Zeeb 		ath12k_warn(ab,
133*a9655020SBjoern A. Zeeb 			    "Invalid event length date_pos[%d] datalen[%d]\n",
134*a9655020SBjoern A. Zeeb 			    data_pos, datalen);
135*a9655020SBjoern A. Zeeb 		return;
136*a9655020SBjoern A. Zeeb 	}
137*a9655020SBjoern A. Zeeb 
138*a9655020SBjoern A. Zeeb 	memcpy(&ab->ftm_event_obj.eventdata[data_pos], buf_pos, datalen);
139*a9655020SBjoern A. Zeeb 	data_pos += datalen;
140*a9655020SBjoern A. Zeeb 
141*a9655020SBjoern A. Zeeb 	if (++ab->ftm_event_obj.expected_seq != total_segments) {
142*a9655020SBjoern A. Zeeb 		ab->ftm_event_obj.data_pos = data_pos;
143*a9655020SBjoern A. Zeeb 		ath12k_dbg(ab, ATH12K_DBG_TESTMODE,
144*a9655020SBjoern A. Zeeb 			   "partial data received current_seq[%d], total_seg[%d]\n",
145*a9655020SBjoern A. Zeeb 			    current_seq, total_segments);
146*a9655020SBjoern A. Zeeb 		return;
147*a9655020SBjoern A. Zeeb 	}
148*a9655020SBjoern A. Zeeb 
149*a9655020SBjoern A. Zeeb 	ath12k_dbg(ab, ATH12K_DBG_TESTMODE,
150*a9655020SBjoern A. Zeeb 		   "total data length[%d] = [%d]\n",
151*a9655020SBjoern A. Zeeb 		   data_pos, ftm_msg->seg_hdr.len);
152*a9655020SBjoern A. Zeeb 
153*a9655020SBjoern A. Zeeb 	spin_lock_bh(&ar->data_lock);
154*a9655020SBjoern A. Zeeb 	nl_skb = cfg80211_testmode_alloc_event_skb(ar->ah->hw->wiphy,
155*a9655020SBjoern A. Zeeb 						   2 * nla_total_size(sizeof(u32)) +
156*a9655020SBjoern A. Zeeb 						   nla_total_size(data_pos),
157*a9655020SBjoern A. Zeeb 						   GFP_ATOMIC);
158*a9655020SBjoern A. Zeeb 	spin_unlock_bh(&ar->data_lock);
159*a9655020SBjoern A. Zeeb 
160*a9655020SBjoern A. Zeeb 	if (!nl_skb) {
161*a9655020SBjoern A. Zeeb 		ath12k_warn(ab,
162*a9655020SBjoern A. Zeeb 			    "failed to allocate skb for testmode wmi event\n");
163*a9655020SBjoern A. Zeeb 		return;
164*a9655020SBjoern A. Zeeb 	}
165*a9655020SBjoern A. Zeeb 
166*a9655020SBjoern A. Zeeb 	if (nla_put_u32(nl_skb, ATH_TM_ATTR_CMD,
167*a9655020SBjoern A. Zeeb 			ATH_TM_CMD_WMI_FTM) ||
168*a9655020SBjoern A. Zeeb 	    nla_put_u32(nl_skb, ATH_TM_ATTR_WMI_CMDID, cmd_id) ||
169*a9655020SBjoern A. Zeeb 	    nla_put(nl_skb, ATH_TM_ATTR_DATA, data_pos,
170*a9655020SBjoern A. Zeeb 		    &ab->ftm_event_obj.eventdata[0])) {
171*a9655020SBjoern A. Zeeb 		ath12k_warn(ab, "failed to populate testmode event");
172*a9655020SBjoern A. Zeeb 		kfree_skb(nl_skb);
173*a9655020SBjoern A. Zeeb 		return;
174*a9655020SBjoern A. Zeeb 	}
175*a9655020SBjoern A. Zeeb 
176*a9655020SBjoern A. Zeeb 	cfg80211_testmode_event(nl_skb, GFP_ATOMIC);
177*a9655020SBjoern A. Zeeb }
178*a9655020SBjoern A. Zeeb 
ath12k_tm_cmd_get_version(struct ath12k * ar,struct nlattr * tb[])179*a9655020SBjoern A. Zeeb static int ath12k_tm_cmd_get_version(struct ath12k *ar, struct nlattr *tb[])
180*a9655020SBjoern A. Zeeb {
181*a9655020SBjoern A. Zeeb 	struct sk_buff *skb;
182*a9655020SBjoern A. Zeeb 
183*a9655020SBjoern A. Zeeb 	ath12k_dbg(ar->ab, ATH12K_DBG_TESTMODE,
184*a9655020SBjoern A. Zeeb 		   "testmode cmd get version_major %d version_minor %d\n",
185*a9655020SBjoern A. Zeeb 		   ATH_TESTMODE_VERSION_MAJOR,
186*a9655020SBjoern A. Zeeb 		   ATH_TESTMODE_VERSION_MINOR);
187*a9655020SBjoern A. Zeeb 
188*a9655020SBjoern A. Zeeb 	spin_lock_bh(&ar->data_lock);
189*a9655020SBjoern A. Zeeb 	skb = cfg80211_testmode_alloc_reply_skb(ar->ah->hw->wiphy,
190*a9655020SBjoern A. Zeeb 						2 * nla_total_size(sizeof(u32)));
191*a9655020SBjoern A. Zeeb 	spin_unlock_bh(&ar->data_lock);
192*a9655020SBjoern A. Zeeb 
193*a9655020SBjoern A. Zeeb 	if (!skb)
194*a9655020SBjoern A. Zeeb 		return -ENOMEM;
195*a9655020SBjoern A. Zeeb 
196*a9655020SBjoern A. Zeeb 	if (nla_put_u32(skb, ATH_TM_ATTR_VERSION_MAJOR,
197*a9655020SBjoern A. Zeeb 			ATH_TESTMODE_VERSION_MAJOR) ||
198*a9655020SBjoern A. Zeeb 	    nla_put_u32(skb, ATH_TM_ATTR_VERSION_MINOR,
199*a9655020SBjoern A. Zeeb 			ATH_TESTMODE_VERSION_MINOR)) {
200*a9655020SBjoern A. Zeeb 		kfree_skb(skb);
201*a9655020SBjoern A. Zeeb 		return -ENOBUFS;
202*a9655020SBjoern A. Zeeb 	}
203*a9655020SBjoern A. Zeeb 
204*a9655020SBjoern A. Zeeb 	return cfg80211_testmode_reply(skb);
205*a9655020SBjoern A. Zeeb }
206*a9655020SBjoern A. Zeeb 
ath12k_tm_cmd_process_ftm(struct ath12k * ar,struct nlattr * tb[])207*a9655020SBjoern A. Zeeb static int ath12k_tm_cmd_process_ftm(struct ath12k *ar, struct nlattr *tb[])
208*a9655020SBjoern A. Zeeb {
209*a9655020SBjoern A. Zeeb 	struct ath12k_wmi_pdev *wmi = ar->wmi;
210*a9655020SBjoern A. Zeeb 	struct sk_buff *skb;
211*a9655020SBjoern A. Zeeb 	struct ath12k_wmi_ftm_cmd *ftm_cmd;
212*a9655020SBjoern A. Zeeb 	int ret = 0;
213*a9655020SBjoern A. Zeeb 	void *buf;
214*a9655020SBjoern A. Zeeb 	size_t aligned_len;
215*a9655020SBjoern A. Zeeb 	u32 cmd_id, buf_len;
216*a9655020SBjoern A. Zeeb 	u16 chunk_len, total_bytes, num_segments;
217*a9655020SBjoern A. Zeeb 	u8 segnumber = 0, *bufpos;
218*a9655020SBjoern A. Zeeb 
219*a9655020SBjoern A. Zeeb 	ath12k_dbg(ar->ab, ATH12K_DBG_TESTMODE, "ah->state  %d\n", ar->ah->state);
220*a9655020SBjoern A. Zeeb 	if (ar->ah->state != ATH12K_HW_STATE_TM)
221*a9655020SBjoern A. Zeeb 		return -ENETDOWN;
222*a9655020SBjoern A. Zeeb 
223*a9655020SBjoern A. Zeeb 	if (!tb[ATH_TM_ATTR_DATA])
224*a9655020SBjoern A. Zeeb 		return -EINVAL;
225*a9655020SBjoern A. Zeeb 
226*a9655020SBjoern A. Zeeb 	buf = nla_data(tb[ATH_TM_ATTR_DATA]);
227*a9655020SBjoern A. Zeeb 	buf_len = nla_len(tb[ATH_TM_ATTR_DATA]);
228*a9655020SBjoern A. Zeeb 	cmd_id = WMI_PDEV_UTF_CMDID;
229*a9655020SBjoern A. Zeeb 	ath12k_dbg(ar->ab, ATH12K_DBG_TESTMODE,
230*a9655020SBjoern A. Zeeb 		   "testmode cmd wmi cmd_id %d buf %p buf_len %d\n",
231*a9655020SBjoern A. Zeeb 		   cmd_id, buf, buf_len);
232*a9655020SBjoern A. Zeeb 	ath12k_dbg_dump(ar->ab, ATH12K_DBG_TESTMODE, NULL, "", buf, buf_len);
233*a9655020SBjoern A. Zeeb 	bufpos = buf;
234*a9655020SBjoern A. Zeeb 	total_bytes = buf_len;
235*a9655020SBjoern A. Zeeb 	num_segments = total_bytes / MAX_WMI_UTF_LEN;
236*a9655020SBjoern A. Zeeb 
237*a9655020SBjoern A. Zeeb 	if (buf_len - (num_segments * MAX_WMI_UTF_LEN))
238*a9655020SBjoern A. Zeeb 		num_segments++;
239*a9655020SBjoern A. Zeeb 
240*a9655020SBjoern A. Zeeb 	while (buf_len) {
241*a9655020SBjoern A. Zeeb 		if (buf_len > MAX_WMI_UTF_LEN)
242*a9655020SBjoern A. Zeeb 			chunk_len = MAX_WMI_UTF_LEN;    /* MAX message */
243*a9655020SBjoern A. Zeeb 		else
244*a9655020SBjoern A. Zeeb 			chunk_len = buf_len;
245*a9655020SBjoern A. Zeeb 
246*a9655020SBjoern A. Zeeb 		skb = ath12k_wmi_alloc_skb(wmi->wmi_ab, (chunk_len +
247*a9655020SBjoern A. Zeeb 					sizeof(struct ath12k_wmi_ftm_cmd)));
248*a9655020SBjoern A. Zeeb 
249*a9655020SBjoern A. Zeeb 		if (!skb)
250*a9655020SBjoern A. Zeeb 			return -ENOMEM;
251*a9655020SBjoern A. Zeeb 
252*a9655020SBjoern A. Zeeb 		ftm_cmd = (struct ath12k_wmi_ftm_cmd *)skb->data;
253*a9655020SBjoern A. Zeeb 		aligned_len  = chunk_len + sizeof(struct ath12k_wmi_ftm_seg_hdr_params);
254*a9655020SBjoern A. Zeeb 		ftm_cmd->tlv_header = ath12k_wmi_tlv_hdr(WMI_TAG_ARRAY_BYTE, aligned_len);
255*a9655020SBjoern A. Zeeb 		ftm_cmd->seg_hdr.len = cpu_to_le32(total_bytes);
256*a9655020SBjoern A. Zeeb 		ftm_cmd->seg_hdr.msgref = cpu_to_le32(ar->ftm_msgref);
257*a9655020SBjoern A. Zeeb 		ftm_cmd->seg_hdr.segmentinfo =
258*a9655020SBjoern A. Zeeb 			le32_encode_bits(num_segments,
259*a9655020SBjoern A. Zeeb 					 ATH12K_FTM_SEGHDR_TOTAL_SEGMENTS) |
260*a9655020SBjoern A. Zeeb 			le32_encode_bits(segnumber,
261*a9655020SBjoern A. Zeeb 					 ATH12K_FTM_SEGHDR_CURRENT_SEQ);
262*a9655020SBjoern A. Zeeb 		ftm_cmd->seg_hdr.pdev_id = cpu_to_le32(ar->pdev->pdev_id);
263*a9655020SBjoern A. Zeeb 		segnumber++;
264*a9655020SBjoern A. Zeeb 		memcpy(&ftm_cmd->data, bufpos, chunk_len);
265*a9655020SBjoern A. Zeeb 		ret = ath12k_wmi_cmd_send(wmi, skb, cmd_id);
266*a9655020SBjoern A. Zeeb 
267*a9655020SBjoern A. Zeeb 		if (ret) {
268*a9655020SBjoern A. Zeeb 			ath12k_warn(ar->ab, "ftm wmi command fail: %d\n", ret);
269*a9655020SBjoern A. Zeeb 			kfree_skb(skb);
270*a9655020SBjoern A. Zeeb 			return ret;
271*a9655020SBjoern A. Zeeb 		}
272*a9655020SBjoern A. Zeeb 
273*a9655020SBjoern A. Zeeb 		buf_len -= chunk_len;
274*a9655020SBjoern A. Zeeb 		bufpos += chunk_len;
275*a9655020SBjoern A. Zeeb 	}
276*a9655020SBjoern A. Zeeb 
277*a9655020SBjoern A. Zeeb 	++ar->ftm_msgref;
278*a9655020SBjoern A. Zeeb 	return ret;
279*a9655020SBjoern A. Zeeb }
280*a9655020SBjoern A. Zeeb 
ath12k_tm_cmd_testmode_start(struct ath12k * ar,struct nlattr * tb[])281*a9655020SBjoern A. Zeeb static int ath12k_tm_cmd_testmode_start(struct ath12k *ar, struct nlattr *tb[])
282*a9655020SBjoern A. Zeeb {
283*a9655020SBjoern A. Zeeb 	if (ar->ah->state == ATH12K_HW_STATE_TM)
284*a9655020SBjoern A. Zeeb 		return -EALREADY;
285*a9655020SBjoern A. Zeeb 
286*a9655020SBjoern A. Zeeb 	if (ar->ah->state != ATH12K_HW_STATE_OFF)
287*a9655020SBjoern A. Zeeb 		return -EBUSY;
288*a9655020SBjoern A. Zeeb 
289*a9655020SBjoern A. Zeeb 	ar->ab->ftm_event_obj.eventdata = kzalloc(ATH_FTM_EVENT_MAX_BUF_LENGTH,
290*a9655020SBjoern A. Zeeb 						  GFP_KERNEL);
291*a9655020SBjoern A. Zeeb 
292*a9655020SBjoern A. Zeeb 	if (!ar->ab->ftm_event_obj.eventdata)
293*a9655020SBjoern A. Zeeb 		return -ENOMEM;
294*a9655020SBjoern A. Zeeb 
295*a9655020SBjoern A. Zeeb 	ar->ah->state = ATH12K_HW_STATE_TM;
296*a9655020SBjoern A. Zeeb 	ar->ftm_msgref = 0;
297*a9655020SBjoern A. Zeeb 	return 0;
298*a9655020SBjoern A. Zeeb }
299*a9655020SBjoern A. Zeeb 
ath12k_tm_cmd_wmi(struct ath12k * ar,struct nlattr * tb[])300*a9655020SBjoern A. Zeeb static int ath12k_tm_cmd_wmi(struct ath12k *ar, struct nlattr *tb[])
301*a9655020SBjoern A. Zeeb {
302*a9655020SBjoern A. Zeeb 	struct ath12k_wmi_pdev *wmi = ar->wmi;
303*a9655020SBjoern A. Zeeb 	struct sk_buff *skb;
304*a9655020SBjoern A. Zeeb 	struct wmi_pdev_set_param_cmd *cmd;
305*a9655020SBjoern A. Zeeb 	int ret = 0, tag;
306*a9655020SBjoern A. Zeeb 	void *buf;
307*a9655020SBjoern A. Zeeb 	u32 cmd_id, buf_len;
308*a9655020SBjoern A. Zeeb 
309*a9655020SBjoern A. Zeeb 	if (!tb[ATH_TM_ATTR_DATA])
310*a9655020SBjoern A. Zeeb 		return -EINVAL;
311*a9655020SBjoern A. Zeeb 
312*a9655020SBjoern A. Zeeb 	if (!tb[ATH_TM_ATTR_WMI_CMDID])
313*a9655020SBjoern A. Zeeb 		return -EINVAL;
314*a9655020SBjoern A. Zeeb 
315*a9655020SBjoern A. Zeeb 	buf = nla_data(tb[ATH_TM_ATTR_DATA]);
316*a9655020SBjoern A. Zeeb 	buf_len = nla_len(tb[ATH_TM_ATTR_DATA]);
317*a9655020SBjoern A. Zeeb 
318*a9655020SBjoern A. Zeeb 	if (!buf_len) {
319*a9655020SBjoern A. Zeeb 		ath12k_warn(ar->ab, "No data present in testmode command\n");
320*a9655020SBjoern A. Zeeb 		return -EINVAL;
321*a9655020SBjoern A. Zeeb 	}
322*a9655020SBjoern A. Zeeb 
323*a9655020SBjoern A. Zeeb 	cmd_id = nla_get_u32(tb[ATH_TM_ATTR_WMI_CMDID]);
324*a9655020SBjoern A. Zeeb 
325*a9655020SBjoern A. Zeeb 	cmd = buf;
326*a9655020SBjoern A. Zeeb 	tag = le32_get_bits(cmd->tlv_header, WMI_TLV_TAG);
327*a9655020SBjoern A. Zeeb 
328*a9655020SBjoern A. Zeeb 	if (tag == WMI_TAG_PDEV_SET_PARAM_CMD)
329*a9655020SBjoern A. Zeeb 		cmd->pdev_id = cpu_to_le32(ar->pdev->pdev_id);
330*a9655020SBjoern A. Zeeb 
331*a9655020SBjoern A. Zeeb 	ath12k_dbg(ar->ab, ATH12K_DBG_TESTMODE,
332*a9655020SBjoern A. Zeeb 		   "testmode cmd wmi cmd_id %d  buf length %d\n",
333*a9655020SBjoern A. Zeeb 		   cmd_id, buf_len);
334*a9655020SBjoern A. Zeeb 
335*a9655020SBjoern A. Zeeb 	ath12k_dbg_dump(ar->ab, ATH12K_DBG_TESTMODE, NULL, "", buf, buf_len);
336*a9655020SBjoern A. Zeeb 
337*a9655020SBjoern A. Zeeb 	skb = ath12k_wmi_alloc_skb(wmi->wmi_ab, buf_len);
338*a9655020SBjoern A. Zeeb 
339*a9655020SBjoern A. Zeeb 	if (!skb)
340*a9655020SBjoern A. Zeeb 		return -ENOMEM;
341*a9655020SBjoern A. Zeeb 
342*a9655020SBjoern A. Zeeb 	memcpy(skb->data, buf, buf_len);
343*a9655020SBjoern A. Zeeb 
344*a9655020SBjoern A. Zeeb 	ret = ath12k_wmi_cmd_send(wmi, skb, cmd_id);
345*a9655020SBjoern A. Zeeb 	if (ret) {
346*a9655020SBjoern A. Zeeb 		dev_kfree_skb(skb);
347*a9655020SBjoern A. Zeeb 		ath12k_warn(ar->ab, "failed to transmit wmi command (testmode): %d\n",
348*a9655020SBjoern A. Zeeb 			    ret);
349*a9655020SBjoern A. Zeeb 	}
350*a9655020SBjoern A. Zeeb 
351*a9655020SBjoern A. Zeeb 	return ret;
352*a9655020SBjoern A. Zeeb }
353*a9655020SBjoern A. Zeeb 
ath12k_tm_cmd(struct ieee80211_hw * hw,struct ieee80211_vif * vif,void * data,int len)354*a9655020SBjoern A. Zeeb int ath12k_tm_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
355*a9655020SBjoern A. Zeeb 		  void *data, int len)
356*a9655020SBjoern A. Zeeb {
357*a9655020SBjoern A. Zeeb 	struct ath12k_hw *ah = hw->priv;
358*a9655020SBjoern A. Zeeb 	struct ath12k *ar = NULL;
359*a9655020SBjoern A. Zeeb 	struct nlattr *tb[ATH_TM_ATTR_MAX + 1];
360*a9655020SBjoern A. Zeeb 	struct ath12k_base *ab;
361*a9655020SBjoern A. Zeeb 	struct wiphy *wiphy = hw->wiphy;
362*a9655020SBjoern A. Zeeb 	int ret;
363*a9655020SBjoern A. Zeeb 
364*a9655020SBjoern A. Zeeb 	lockdep_assert_held(&wiphy->mtx);
365*a9655020SBjoern A. Zeeb 
366*a9655020SBjoern A. Zeeb 	ret = nla_parse(tb, ATH_TM_ATTR_MAX, data, len, ath12k_tm_policy,
367*a9655020SBjoern A. Zeeb 			NULL);
368*a9655020SBjoern A. Zeeb 	if (ret)
369*a9655020SBjoern A. Zeeb 		return ret;
370*a9655020SBjoern A. Zeeb 
371*a9655020SBjoern A. Zeeb 	if (!tb[ATH_TM_ATTR_CMD])
372*a9655020SBjoern A. Zeeb 		return -EINVAL;
373*a9655020SBjoern A. Zeeb 
374*a9655020SBjoern A. Zeeb 	/* TODO: have to handle ar for MLO case */
375*a9655020SBjoern A. Zeeb 	if (ah->num_radio)
376*a9655020SBjoern A. Zeeb 		ar = ah->radio;
377*a9655020SBjoern A. Zeeb 
378*a9655020SBjoern A. Zeeb 	if (!ar)
379*a9655020SBjoern A. Zeeb 		return -EINVAL;
380*a9655020SBjoern A. Zeeb 
381*a9655020SBjoern A. Zeeb 	ab = ar->ab;
382*a9655020SBjoern A. Zeeb 	switch (nla_get_u32(tb[ATH_TM_ATTR_CMD])) {
383*a9655020SBjoern A. Zeeb 	case ATH_TM_CMD_WMI:
384*a9655020SBjoern A. Zeeb 		return ath12k_tm_cmd_wmi(ar, tb);
385*a9655020SBjoern A. Zeeb 	case ATH_TM_CMD_TESTMODE_START:
386*a9655020SBjoern A. Zeeb 		return ath12k_tm_cmd_testmode_start(ar, tb);
387*a9655020SBjoern A. Zeeb 	case ATH_TM_CMD_GET_VERSION:
388*a9655020SBjoern A. Zeeb 		return ath12k_tm_cmd_get_version(ar, tb);
389*a9655020SBjoern A. Zeeb 	case ATH_TM_CMD_WMI_FTM:
390*a9655020SBjoern A. Zeeb 		set_bit(ATH12K_FLAG_FTM_SEGMENTED, &ab->dev_flags);
391*a9655020SBjoern A. Zeeb 		return ath12k_tm_cmd_process_ftm(ar, tb);
392*a9655020SBjoern A. Zeeb 	default:
393*a9655020SBjoern A. Zeeb 		return -EOPNOTSUPP;
394*a9655020SBjoern A. Zeeb 	}
395*a9655020SBjoern A. Zeeb }
396