xref: /linux/drivers/firewire/phy-packet-definitions.h (revision a1ff5a7d78a036d6c2178ee5acd6ba4946243800)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 //
3 // phy-packet-definitions.h - The definitions of phy packet for IEEE 1394.
4 //
5 // Copyright (c) 2024 Takashi Sakamoto
6 
7 #ifndef _FIREWIRE_PHY_PACKET_DEFINITIONS_H
8 #define _FIREWIRE_PHY_PACKET_DEFINITIONS_H
9 
10 #define PACKET_IDENTIFIER_MASK				0xc0000000
11 #define PACKET_IDENTIFIER_SHIFT				30
12 
phy_packet_get_packet_identifier(u32 quadlet)13 static inline unsigned int phy_packet_get_packet_identifier(u32 quadlet)
14 {
15 	return (quadlet & PACKET_IDENTIFIER_MASK) >> PACKET_IDENTIFIER_SHIFT;
16 }
17 
phy_packet_set_packet_identifier(u32 * quadlet,unsigned int packet_identifier)18 static inline void phy_packet_set_packet_identifier(u32 *quadlet, unsigned int packet_identifier)
19 {
20 	*quadlet &= ~PACKET_IDENTIFIER_MASK;
21 	*quadlet |= (packet_identifier << PACKET_IDENTIFIER_SHIFT) & PACKET_IDENTIFIER_MASK;
22 }
23 
24 #define PHY_PACKET_PACKET_IDENTIFIER_PHY_CONFIG		0
25 
26 #define PHY_CONFIG_ROOT_ID_MASK				0x3f000000
27 #define PHY_CONFIG_ROOT_ID_SHIFT			24
28 #define PHY_CONFIG_FORCE_ROOT_NODE_MASK			0x00800000
29 #define PHY_CONFIG_FORCE_ROOT_NODE_SHIFT		23
30 #define PHY_CONFIG_GAP_COUNT_OPTIMIZATION_MASK		0x00400000
31 #define PHY_CONFIG_GAP_COUNT_OPTIMIZATION_SHIFT		22
32 #define PHY_CONFIG_GAP_COUNT_MASK			0x003f0000
33 #define PHY_CONFIG_GAP_COUNT_SHIFT			16
34 
phy_packet_phy_config_get_root_id(u32 quadlet)35 static inline unsigned int phy_packet_phy_config_get_root_id(u32 quadlet)
36 {
37 	return (quadlet & PHY_CONFIG_ROOT_ID_MASK) >> PHY_CONFIG_ROOT_ID_SHIFT;
38 }
39 
phy_packet_phy_config_set_root_id(u32 * quadlet,unsigned int root_id)40 static inline void phy_packet_phy_config_set_root_id(u32 *quadlet, unsigned int root_id)
41 {
42 	*quadlet &= ~PHY_CONFIG_ROOT_ID_MASK;
43 	*quadlet |= (root_id << PHY_CONFIG_ROOT_ID_SHIFT) & PHY_CONFIG_ROOT_ID_MASK;
44 }
45 
phy_packet_phy_config_get_force_root_node(u32 quadlet)46 static inline bool phy_packet_phy_config_get_force_root_node(u32 quadlet)
47 {
48 	return (quadlet & PHY_CONFIG_FORCE_ROOT_NODE_MASK) >> PHY_CONFIG_FORCE_ROOT_NODE_SHIFT;
49 }
50 
phy_packet_phy_config_set_force_root_node(u32 * quadlet,bool has_force_root_node)51 static inline void phy_packet_phy_config_set_force_root_node(u32 *quadlet, bool has_force_root_node)
52 {
53 	*quadlet &= ~PHY_CONFIG_FORCE_ROOT_NODE_MASK;
54 	*quadlet |= (has_force_root_node << PHY_CONFIG_FORCE_ROOT_NODE_SHIFT) & PHY_CONFIG_FORCE_ROOT_NODE_MASK;
55 }
56 
phy_packet_phy_config_get_gap_count_optimization(u32 quadlet)57 static inline bool phy_packet_phy_config_get_gap_count_optimization(u32 quadlet)
58 {
59 	return (quadlet & PHY_CONFIG_GAP_COUNT_OPTIMIZATION_MASK) >> PHY_CONFIG_GAP_COUNT_OPTIMIZATION_SHIFT;
60 }
61 
phy_packet_phy_config_set_gap_count_optimization(u32 * quadlet,bool has_gap_count_optimization)62 static inline void phy_packet_phy_config_set_gap_count_optimization(u32 *quadlet, bool has_gap_count_optimization)
63 {
64 	*quadlet &= ~PHY_CONFIG_GAP_COUNT_OPTIMIZATION_MASK;
65 	*quadlet |= (has_gap_count_optimization << PHY_CONFIG_GAP_COUNT_OPTIMIZATION_SHIFT) & PHY_CONFIG_GAP_COUNT_OPTIMIZATION_MASK;
66 }
67 
phy_packet_phy_config_get_gap_count(u32 quadlet)68 static inline unsigned int phy_packet_phy_config_get_gap_count(u32 quadlet)
69 {
70 	return (quadlet & PHY_CONFIG_GAP_COUNT_MASK) >> PHY_CONFIG_GAP_COUNT_SHIFT;
71 }
72 
phy_packet_phy_config_set_gap_count(u32 * quadlet,unsigned int gap_count)73 static inline void phy_packet_phy_config_set_gap_count(u32 *quadlet, unsigned int gap_count)
74 {
75 	*quadlet &= ~PHY_CONFIG_GAP_COUNT_MASK;
76 	*quadlet |= (gap_count << PHY_CONFIG_GAP_COUNT_SHIFT) & PHY_CONFIG_GAP_COUNT_MASK;
77 }
78 
79 #define PHY_PACKET_PACKET_IDENTIFIER_SELF_ID		2
80 
81 #define SELF_ID_PHY_ID_MASK				0x3f000000
82 #define SELF_ID_PHY_ID_SHIFT				24
83 #define SELF_ID_EXTENDED_MASK				0x00800000
84 #define SELF_ID_EXTENDED_SHIFT				23
85 #define SELF_ID_MORE_PACKETS_MASK			0x00000001
86 #define SELF_ID_MORE_PACKETS_SHIFT			0
87 
88 #define SELF_ID_ZERO_LINK_ACTIVE_MASK			0x00400000
89 #define SELF_ID_ZERO_LINK_ACTIVE_SHIFT			22
90 #define SELF_ID_ZERO_GAP_COUNT_MASK			0x003f0000
91 #define SELF_ID_ZERO_GAP_COUNT_SHIFT			16
92 #define SELF_ID_ZERO_SCODE_MASK				0x0000c000
93 #define SELF_ID_ZERO_SCODE_SHIFT			14
94 #define SELF_ID_ZERO_CONTENDER_MASK			0x00000800
95 #define SELF_ID_ZERO_CONTENDER_SHIFT			11
96 #define SELF_ID_ZERO_POWER_CLASS_MASK			0x00000700
97 #define SELF_ID_ZERO_POWER_CLASS_SHIFT			8
98 #define SELF_ID_ZERO_INITIATED_RESET_MASK		0x00000002
99 #define SELF_ID_ZERO_INITIATED_RESET_SHIFT		1
100 
101 #define SELF_ID_EXTENDED_SEQUENCE_MASK			0x00700000
102 #define SELF_ID_EXTENDED_SEQUENCE_SHIFT			20
103 
104 #define SELF_ID_PORT_STATUS_MASK			0x3
105 
106 #define SELF_ID_SEQUENCE_MAXIMUM_QUADLET_COUNT		4
107 
phy_packet_self_id_get_phy_id(u32 quadlet)108 static inline unsigned int phy_packet_self_id_get_phy_id(u32 quadlet)
109 {
110 	return (quadlet & SELF_ID_PHY_ID_MASK)  >> SELF_ID_PHY_ID_SHIFT;
111 }
112 
phy_packet_self_id_set_phy_id(u32 * quadlet,unsigned int phy_id)113 static inline void phy_packet_self_id_set_phy_id(u32 *quadlet, unsigned int phy_id)
114 {
115 	*quadlet &= ~SELF_ID_PHY_ID_MASK;
116 	*quadlet |= (phy_id << SELF_ID_PHY_ID_SHIFT) & SELF_ID_PHY_ID_MASK;
117 }
118 
phy_packet_self_id_get_extended(u32 quadlet)119 static inline bool phy_packet_self_id_get_extended(u32 quadlet)
120 {
121 	return (quadlet & SELF_ID_EXTENDED_MASK) >> SELF_ID_EXTENDED_SHIFT;
122 }
123 
phy_packet_self_id_set_extended(u32 * quadlet,bool extended)124 static inline void phy_packet_self_id_set_extended(u32 *quadlet, bool extended)
125 {
126 	*quadlet &= ~SELF_ID_EXTENDED_MASK;
127 	*quadlet |= (extended << SELF_ID_EXTENDED_SHIFT) & SELF_ID_EXTENDED_MASK;
128 }
129 
phy_packet_self_id_zero_get_link_active(u32 quadlet)130 static inline bool phy_packet_self_id_zero_get_link_active(u32 quadlet)
131 {
132 	return (quadlet & SELF_ID_ZERO_LINK_ACTIVE_MASK) >> SELF_ID_ZERO_LINK_ACTIVE_SHIFT;
133 }
134 
phy_packet_self_id_zero_set_link_active(u32 * quadlet,bool is_active)135 static inline void phy_packet_self_id_zero_set_link_active(u32 *quadlet, bool is_active)
136 {
137 	*quadlet &= ~SELF_ID_ZERO_LINK_ACTIVE_MASK;
138 	*quadlet |= (is_active << SELF_ID_ZERO_LINK_ACTIVE_SHIFT) & SELF_ID_ZERO_LINK_ACTIVE_MASK;
139 }
140 
phy_packet_self_id_zero_get_gap_count(u32 quadlet)141 static inline unsigned int phy_packet_self_id_zero_get_gap_count(u32 quadlet)
142 {
143 	return (quadlet & SELF_ID_ZERO_GAP_COUNT_MASK) >> SELF_ID_ZERO_GAP_COUNT_SHIFT;
144 }
145 
phy_packet_self_id_zero_set_gap_count(u32 * quadlet,unsigned int gap_count)146 static inline void phy_packet_self_id_zero_set_gap_count(u32 *quadlet, unsigned int gap_count)
147 {
148 	*quadlet &= ~SELF_ID_ZERO_GAP_COUNT_MASK;
149 	*quadlet |= (gap_count << SELF_ID_ZERO_GAP_COUNT_SHIFT) & SELF_ID_ZERO_GAP_COUNT_MASK;
150 }
151 
phy_packet_self_id_zero_get_scode(u32 quadlet)152 static inline unsigned int phy_packet_self_id_zero_get_scode(u32 quadlet)
153 {
154 	return (quadlet & SELF_ID_ZERO_SCODE_MASK) >> SELF_ID_ZERO_SCODE_SHIFT;
155 }
156 
phy_packet_self_id_zero_set_scode(u32 * quadlet,unsigned int speed)157 static inline void phy_packet_self_id_zero_set_scode(u32 *quadlet, unsigned int speed)
158 {
159 	*quadlet &= ~SELF_ID_ZERO_SCODE_MASK;
160 	*quadlet |= (speed << SELF_ID_ZERO_SCODE_SHIFT) & SELF_ID_ZERO_SCODE_MASK;
161 }
162 
phy_packet_self_id_zero_get_contender(u32 quadlet)163 static inline bool phy_packet_self_id_zero_get_contender(u32 quadlet)
164 {
165 	return (quadlet & SELF_ID_ZERO_CONTENDER_MASK) >> SELF_ID_ZERO_CONTENDER_SHIFT;
166 }
167 
phy_packet_self_id_zero_set_contender(u32 * quadlet,bool is_contender)168 static inline void phy_packet_self_id_zero_set_contender(u32 *quadlet, bool is_contender)
169 {
170 	*quadlet &= ~SELF_ID_ZERO_CONTENDER_MASK;
171 	*quadlet |= (is_contender << SELF_ID_ZERO_CONTENDER_SHIFT) & SELF_ID_ZERO_CONTENDER_MASK;
172 }
173 
phy_packet_self_id_zero_get_power_class(u32 quadlet)174 static inline unsigned int phy_packet_self_id_zero_get_power_class(u32 quadlet)
175 {
176 	return (quadlet & SELF_ID_ZERO_POWER_CLASS_MASK) >> SELF_ID_ZERO_POWER_CLASS_SHIFT;
177 }
178 
phy_packet_self_id_zero_set_power_class(u32 * quadlet,unsigned int power_class)179 static inline void phy_packet_self_id_zero_set_power_class(u32 *quadlet, unsigned int power_class)
180 {
181 	*quadlet &= ~SELF_ID_ZERO_POWER_CLASS_MASK;
182 	*quadlet |= (power_class << SELF_ID_ZERO_POWER_CLASS_SHIFT) & SELF_ID_ZERO_POWER_CLASS_MASK;
183 }
184 
phy_packet_self_id_zero_get_initiated_reset(u32 quadlet)185 static inline bool phy_packet_self_id_zero_get_initiated_reset(u32 quadlet)
186 {
187 	return (quadlet & SELF_ID_ZERO_INITIATED_RESET_MASK) >> SELF_ID_ZERO_INITIATED_RESET_SHIFT;
188 }
189 
phy_packet_self_id_zero_set_initiated_reset(u32 * quadlet,bool is_initiated_reset)190 static inline void phy_packet_self_id_zero_set_initiated_reset(u32 *quadlet, bool is_initiated_reset)
191 {
192 	*quadlet &= ~SELF_ID_ZERO_INITIATED_RESET_MASK;
193 	*quadlet |= (is_initiated_reset << SELF_ID_ZERO_INITIATED_RESET_SHIFT) & SELF_ID_ZERO_INITIATED_RESET_MASK;
194 }
195 
phy_packet_self_id_get_more_packets(u32 quadlet)196 static inline bool phy_packet_self_id_get_more_packets(u32 quadlet)
197 {
198 	return (quadlet & SELF_ID_MORE_PACKETS_MASK) >> SELF_ID_MORE_PACKETS_SHIFT;
199 }
200 
phy_packet_self_id_set_more_packets(u32 * quadlet,bool is_more_packets)201 static inline void phy_packet_self_id_set_more_packets(u32 *quadlet, bool is_more_packets)
202 {
203 	*quadlet &= ~SELF_ID_MORE_PACKETS_MASK;
204 	*quadlet |= (is_more_packets << SELF_ID_MORE_PACKETS_SHIFT) & SELF_ID_MORE_PACKETS_MASK;
205 }
206 
phy_packet_self_id_extended_get_sequence(u32 quadlet)207 static inline unsigned int phy_packet_self_id_extended_get_sequence(u32 quadlet)
208 {
209 	return (quadlet & SELF_ID_EXTENDED_SEQUENCE_MASK) >> SELF_ID_EXTENDED_SEQUENCE_SHIFT;
210 }
211 
phy_packet_self_id_extended_set_sequence(u32 * quadlet,unsigned int sequence)212 static inline void phy_packet_self_id_extended_set_sequence(u32 *quadlet, unsigned int sequence)
213 {
214 	*quadlet &= ~SELF_ID_EXTENDED_SEQUENCE_MASK;
215 	*quadlet |= (sequence << SELF_ID_EXTENDED_SHIFT) & SELF_ID_EXTENDED_SEQUENCE_MASK;
216 }
217 
218 struct self_id_sequence_enumerator {
219 	const u32 *cursor;
220 	unsigned int quadlet_count;
221 };
222 
self_id_sequence_enumerator_next(struct self_id_sequence_enumerator * enumerator,unsigned int * quadlet_count)223 static inline const u32 *self_id_sequence_enumerator_next(
224 		struct self_id_sequence_enumerator *enumerator, unsigned int *quadlet_count)
225 {
226 	const u32 *self_id_sequence, *cursor;
227 	u32 quadlet;
228 	unsigned int count;
229 	unsigned int sequence;
230 
231 	if (enumerator->cursor == NULL || enumerator->quadlet_count == 0)
232 		return ERR_PTR(-ENODATA);
233 	cursor = enumerator->cursor;
234 	count = 1;
235 
236 	quadlet = *cursor;
237 	sequence = 0;
238 	while (phy_packet_self_id_get_more_packets(quadlet)) {
239 		if (count >= enumerator->quadlet_count ||
240 		    count >= SELF_ID_SEQUENCE_MAXIMUM_QUADLET_COUNT)
241 			return ERR_PTR(-EPROTO);
242 		++cursor;
243 		++count;
244 		quadlet = *cursor;
245 
246 		if (!phy_packet_self_id_get_extended(quadlet) ||
247 		    sequence != phy_packet_self_id_extended_get_sequence(quadlet))
248 			return ERR_PTR(-EPROTO);
249 		++sequence;
250 	}
251 
252 	*quadlet_count = count;
253 	self_id_sequence = enumerator->cursor;
254 
255 	enumerator->cursor += count;
256 	enumerator->quadlet_count -= count;
257 
258 	return self_id_sequence;
259 }
260 
261 enum phy_packet_self_id_port_status {
262 	PHY_PACKET_SELF_ID_PORT_STATUS_NONE = 0,
263 	PHY_PACKET_SELF_ID_PORT_STATUS_NCONN = 1,
264 	PHY_PACKET_SELF_ID_PORT_STATUS_PARENT = 2,
265 	PHY_PACKET_SELF_ID_PORT_STATUS_CHILD = 3,
266 };
267 
self_id_sequence_get_port_capacity(unsigned int quadlet_count)268 static inline unsigned int self_id_sequence_get_port_capacity(unsigned int quadlet_count)
269 {
270 	return quadlet_count * 8 - 5;
271 }
272 
self_id_sequence_get_port_status(const u32 * self_id_sequence,unsigned int quadlet_count,unsigned int port_index)273 static inline enum phy_packet_self_id_port_status self_id_sequence_get_port_status(
274 		const u32 *self_id_sequence, unsigned int quadlet_count, unsigned int port_index)
275 {
276 	unsigned int index, shift;
277 
278 	index = (port_index + 5) / 8;
279 	shift = 16 - ((port_index + 5) % 8) * 2;
280 
281 	if (index < quadlet_count && index < SELF_ID_SEQUENCE_MAXIMUM_QUADLET_COUNT)
282 		return (self_id_sequence[index] >> shift) & SELF_ID_PORT_STATUS_MASK;
283 
284 	return PHY_PACKET_SELF_ID_PORT_STATUS_NONE;
285 }
286 
self_id_sequence_set_port_status(u32 * self_id_sequence,unsigned int quadlet_count,unsigned int port_index,enum phy_packet_self_id_port_status status)287 static inline void self_id_sequence_set_port_status(u32 *self_id_sequence, unsigned int quadlet_count,
288 						    unsigned int port_index,
289 						    enum phy_packet_self_id_port_status status)
290 {
291 	unsigned int index, shift;
292 
293 	index = (port_index + 5) / 8;
294 	shift = 16 - ((port_index + 5) % 8) * 2;
295 
296 	if (index < quadlet_count) {
297 		self_id_sequence[index] &= ~(SELF_ID_PORT_STATUS_MASK << shift);
298 		self_id_sequence[index] |= status << shift;
299 	}
300 }
301 
302 #endif // _FIREWIRE_PHY_PACKET_DEFINITIONS_H
303