xref: /linux/include/trace/events/dma.h (revision 69050f8d6d075dc01af7a5f2f550a8067510366f)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #undef TRACE_SYSTEM
3 #define TRACE_SYSTEM dma
4 
5 #if !defined(_TRACE_DMA_H) || defined(TRACE_HEADER_MULTI_READ)
6 #define _TRACE_DMA_H
7 
8 #include <linux/tracepoint.h>
9 #include <linux/dma-direction.h>
10 #include <linux/dma-mapping.h>
11 #include <trace/events/mmflags.h>
12 
13 TRACE_DEFINE_ENUM(DMA_BIDIRECTIONAL);
14 TRACE_DEFINE_ENUM(DMA_TO_DEVICE);
15 TRACE_DEFINE_ENUM(DMA_FROM_DEVICE);
16 TRACE_DEFINE_ENUM(DMA_NONE);
17 
18 #define decode_dma_data_direction(dir) \
19 	__print_symbolic(dir, \
20 		{ DMA_BIDIRECTIONAL, "BIDIRECTIONAL" }, \
21 		{ DMA_TO_DEVICE, "TO_DEVICE" }, \
22 		{ DMA_FROM_DEVICE, "FROM_DEVICE" }, \
23 		{ DMA_NONE, "NONE" })
24 
25 #define decode_dma_attrs(attrs) \
26 	__print_flags(attrs, "|", \
27 		{ DMA_ATTR_WEAK_ORDERING, "WEAK_ORDERING" }, \
28 		{ DMA_ATTR_WRITE_COMBINE, "WRITE_COMBINE" }, \
29 		{ DMA_ATTR_NO_KERNEL_MAPPING, "NO_KERNEL_MAPPING" }, \
30 		{ DMA_ATTR_SKIP_CPU_SYNC, "SKIP_CPU_SYNC" }, \
31 		{ DMA_ATTR_FORCE_CONTIGUOUS, "FORCE_CONTIGUOUS" }, \
32 		{ DMA_ATTR_ALLOC_SINGLE_PAGES, "ALLOC_SINGLE_PAGES" }, \
33 		{ DMA_ATTR_NO_WARN, "NO_WARN" }, \
34 		{ DMA_ATTR_PRIVILEGED, "PRIVILEGED" }, \
35 		{ DMA_ATTR_MMIO, "MMIO" })
36 
37 DECLARE_EVENT_CLASS(dma_map,
38 	TP_PROTO(struct device *dev, phys_addr_t phys_addr, dma_addr_t dma_addr,
39 		 size_t size, enum dma_data_direction dir, unsigned long attrs),
40 	TP_ARGS(dev, phys_addr, dma_addr, size, dir, attrs),
41 
42 	TP_STRUCT__entry(
43 		__string(device, dev_name(dev))
44 		__field(u64, phys_addr)
45 		__field(u64, dma_addr)
46 		__field(size_t, size)
47 		__field(enum dma_data_direction, dir)
48 		__field(unsigned long, attrs)
49 	),
50 
51 	TP_fast_assign(
52 		__assign_str(device);
53 		__entry->phys_addr = phys_addr;
54 		__entry->dma_addr = dma_addr;
55 		__entry->size = size;
56 		__entry->dir = dir;
57 		__entry->attrs = attrs;
58 	),
59 
60 	TP_printk("%s dir=%s dma_addr=%llx size=%zu phys_addr=%llx attrs=%s",
61 		__get_str(device),
62 		decode_dma_data_direction(__entry->dir),
63 		__entry->dma_addr,
64 		__entry->size,
65 		__entry->phys_addr,
66 		decode_dma_attrs(__entry->attrs))
67 );
68 
69 #define DEFINE_MAP_EVENT(name) \
70 DEFINE_EVENT(dma_map, name, \
71 	TP_PROTO(struct device *dev, phys_addr_t phys_addr, dma_addr_t dma_addr, \
72 		 size_t size, enum dma_data_direction dir, unsigned long attrs), \
73 	TP_ARGS(dev, phys_addr, dma_addr, size, dir, attrs))
74 
75 DEFINE_MAP_EVENT(dma_map_phys);
76 
77 DECLARE_EVENT_CLASS(dma_unmap,
78 	TP_PROTO(struct device *dev, dma_addr_t addr, size_t size,
79 		 enum dma_data_direction dir, unsigned long attrs),
80 	TP_ARGS(dev, addr, size, dir, attrs),
81 
82 	TP_STRUCT__entry(
83 		__string(device, dev_name(dev))
84 		__field(u64, addr)
85 		__field(size_t, size)
86 		__field(enum dma_data_direction, dir)
87 		__field(unsigned long, attrs)
88 	),
89 
90 	TP_fast_assign(
91 		__assign_str(device);
92 		__entry->addr = addr;
93 		__entry->size = size;
94 		__entry->dir = dir;
95 		__entry->attrs = attrs;
96 	),
97 
98 	TP_printk("%s dir=%s dma_addr=%llx size=%zu attrs=%s",
99 		__get_str(device),
100 		decode_dma_data_direction(__entry->dir),
101 		__entry->addr,
102 		__entry->size,
103 		decode_dma_attrs(__entry->attrs))
104 );
105 
106 #define DEFINE_UNMAP_EVENT(name) \
107 DEFINE_EVENT(dma_unmap, name, \
108 	TP_PROTO(struct device *dev, dma_addr_t addr, size_t size, \
109 		 enum dma_data_direction dir, unsigned long attrs), \
110 	TP_ARGS(dev, addr, size, dir, attrs))
111 
112 DEFINE_UNMAP_EVENT(dma_unmap_phys);
113 
114 DECLARE_EVENT_CLASS(dma_alloc_class,
115 	TP_PROTO(struct device *dev, void *virt_addr, dma_addr_t dma_addr,
116 		 size_t size, enum dma_data_direction dir, gfp_t flags,
117 		 unsigned long attrs),
118 	TP_ARGS(dev, virt_addr, dma_addr, size, dir, flags, attrs),
119 
120 	TP_STRUCT__entry(
121 		__string(device, dev_name(dev))
122 		__field(void *, virt_addr)
123 		__field(u64, dma_addr)
124 		__field(size_t, size)
125 		__field(gfp_t, flags)
126 		__field(enum dma_data_direction, dir)
127 		__field(unsigned long, attrs)
128 	),
129 
130 	TP_fast_assign(
131 		__assign_str(device);
132 		__entry->virt_addr = virt_addr;
133 		__entry->dma_addr = dma_addr;
134 		__entry->size = size;
135 		__entry->flags = flags;
136 		__entry->dir = dir;
137 		__entry->attrs = attrs;
138 	),
139 
140 	TP_printk("%s dir=%s dma_addr=%llx size=%zu virt_addr=%p flags=%s attrs=%s",
141 		__get_str(device),
142 		decode_dma_data_direction(__entry->dir),
143 		__entry->dma_addr,
144 		__entry->size,
145 		__entry->virt_addr,
146 		show_gfp_flags(__entry->flags),
147 		decode_dma_attrs(__entry->attrs))
148 );
149 
150 #define DEFINE_ALLOC_EVENT(name) \
151 DEFINE_EVENT(dma_alloc_class, name, \
152 	TP_PROTO(struct device *dev, void *virt_addr, dma_addr_t dma_addr, \
153 		 size_t size, enum dma_data_direction dir, gfp_t flags, \
154 		 unsigned long attrs), \
155 	TP_ARGS(dev, virt_addr, dma_addr, size, dir, flags, attrs))
156 
157 DEFINE_ALLOC_EVENT(dma_alloc);
158 DEFINE_ALLOC_EVENT(dma_alloc_pages);
159 DEFINE_ALLOC_EVENT(dma_alloc_sgt_err);
160 
161 TRACE_EVENT(dma_alloc_sgt,
162 	TP_PROTO(struct device *dev, struct sg_table *sgt, size_t size,
163 		 enum dma_data_direction dir, gfp_t flags, unsigned long attrs),
164 	TP_ARGS(dev, sgt, size, dir, flags, attrs),
165 
166 	TP_STRUCT__entry(
167 		__string(device, dev_name(dev))
168 		__dynamic_array(u64, phys_addrs, sgt->orig_nents)
169 		__field(u64, dma_addr)
170 		__field(size_t, size)
171 		__field(enum dma_data_direction, dir)
172 		__field(gfp_t, flags)
173 		__field(unsigned long, attrs)
174 	),
175 
176 	TP_fast_assign(
177 		struct scatterlist *sg;
178 		int i;
179 
180 		__assign_str(device);
181 		for_each_sg(sgt->sgl, sg, sgt->orig_nents, i)
182 			((u64 *)__get_dynamic_array(phys_addrs))[i] = sg_phys(sg);
183 		__entry->dma_addr = sg_dma_address(sgt->sgl);
184 		__entry->size = size;
185 		__entry->dir = dir;
186 		__entry->flags = flags;
187 		__entry->attrs = attrs;
188 	),
189 
190 	TP_printk("%s dir=%s dma_addr=%llx size=%zu phys_addrs=%s flags=%s attrs=%s",
191 		__get_str(device),
192 		decode_dma_data_direction(__entry->dir),
193 		__entry->dma_addr,
194 		__entry->size,
195 		__print_array(__get_dynamic_array(phys_addrs),
196 			      __get_dynamic_array_len(phys_addrs) /
197 				sizeof(u64), sizeof(u64)),
198 		show_gfp_flags(__entry->flags),
199 		decode_dma_attrs(__entry->attrs))
200 );
201 
202 DECLARE_EVENT_CLASS(dma_free_class,
203 	TP_PROTO(struct device *dev, void *virt_addr, dma_addr_t dma_addr,
204 		 size_t size, enum dma_data_direction dir, unsigned long attrs),
205 	TP_ARGS(dev, virt_addr, dma_addr, size, dir, attrs),
206 
207 	TP_STRUCT__entry(
208 		__string(device, dev_name(dev))
209 		__field(void *, virt_addr)
210 		__field(u64, dma_addr)
211 		__field(size_t, size)
212 		__field(enum dma_data_direction, dir)
213 		__field(unsigned long, attrs)
214 	),
215 
216 	TP_fast_assign(
217 		__assign_str(device);
218 		__entry->virt_addr = virt_addr;
219 		__entry->dma_addr = dma_addr;
220 		__entry->size = size;
221 		__entry->dir = dir;
222 		__entry->attrs = attrs;
223 	),
224 
225 	TP_printk("%s dir=%s dma_addr=%llx size=%zu virt_addr=%p attrs=%s",
226 		__get_str(device),
227 		decode_dma_data_direction(__entry->dir),
228 		__entry->dma_addr,
229 		__entry->size,
230 		__entry->virt_addr,
231 		decode_dma_attrs(__entry->attrs))
232 );
233 
234 #define DEFINE_FREE_EVENT(name) \
235 DEFINE_EVENT(dma_free_class, name, \
236 	TP_PROTO(struct device *dev, void *virt_addr, dma_addr_t dma_addr, \
237 		 size_t size, enum dma_data_direction dir, unsigned long attrs), \
238 	TP_ARGS(dev, virt_addr, dma_addr, size, dir, attrs))
239 
240 DEFINE_FREE_EVENT(dma_free);
241 DEFINE_FREE_EVENT(dma_free_pages);
242 
243 TRACE_EVENT(dma_free_sgt,
244 	TP_PROTO(struct device *dev, struct sg_table *sgt, size_t size,
245 		 enum dma_data_direction dir),
246 	TP_ARGS(dev, sgt, size, dir),
247 
248 	TP_STRUCT__entry(
249 		__string(device, dev_name(dev))
250 		__dynamic_array(u64, phys_addrs, sgt->orig_nents)
251 		__field(u64, dma_addr)
252 		__field(size_t, size)
253 		__field(enum dma_data_direction, dir)
254 	),
255 
256 	TP_fast_assign(
257 		struct scatterlist *sg;
258 		int i;
259 
260 		__assign_str(device);
261 		for_each_sg(sgt->sgl, sg, sgt->orig_nents, i)
262 			((u64 *)__get_dynamic_array(phys_addrs))[i] = sg_phys(sg);
263 		__entry->dma_addr = sg_dma_address(sgt->sgl);
264 		__entry->size = size;
265 		__entry->dir = dir;
266 	),
267 
268 	TP_printk("%s dir=%s dma_addr=%llx size=%zu phys_addrs=%s",
269 		__get_str(device),
270 		decode_dma_data_direction(__entry->dir),
271 		__entry->dma_addr,
272 		__entry->size,
273 		__print_array(__get_dynamic_array(phys_addrs),
274 			      __get_dynamic_array_len(phys_addrs) /
275 				sizeof(u64), sizeof(u64)))
276 );
277 
278 #define DMA_TRACE_MAX_ENTRIES 128
279 
280 TRACE_EVENT(dma_map_sg,
281 	TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents,
282 		 int ents, enum dma_data_direction dir, unsigned long attrs),
283 	TP_ARGS(dev, sgl, nents, ents, dir, attrs),
284 
285 	TP_STRUCT__entry(
286 		__string(device, dev_name(dev))
287 		__field(int, full_nents)
288 		__field(int, full_ents)
289 		__field(bool, truncated)
290 		__dynamic_array(u64, phys_addrs,  min(nents, DMA_TRACE_MAX_ENTRIES))
291 		__dynamic_array(u64, dma_addrs, min(ents, DMA_TRACE_MAX_ENTRIES))
292 		__dynamic_array(unsigned int, lengths, min(ents, DMA_TRACE_MAX_ENTRIES))
293 		__field(enum dma_data_direction, dir)
294 		__field(unsigned long, attrs)
295 	),
296 
297 	TP_fast_assign(
298 		struct scatterlist *sg;
299 		int i;
300 		int traced_nents = min_t(int, nents, DMA_TRACE_MAX_ENTRIES);
301 		int traced_ents = min_t(int, ents, DMA_TRACE_MAX_ENTRIES);
302 
303 		__assign_str(device);
304 		__entry->full_nents = nents;
305 		__entry->full_ents = ents;
306 		__entry->truncated = (nents > DMA_TRACE_MAX_ENTRIES) || (ents > DMA_TRACE_MAX_ENTRIES);
307 		for_each_sg(sgl, sg, traced_nents, i)
308 			((u64 *)__get_dynamic_array(phys_addrs))[i] = sg_phys(sg);
309 		for_each_sg(sgl, sg, traced_ents, i) {
310 			((u64 *)__get_dynamic_array(dma_addrs))[i] =
311 				sg_dma_address(sg);
312 			((unsigned int *)__get_dynamic_array(lengths))[i] =
313 				sg_dma_len(sg);
314 		}
315 		__entry->dir = dir;
316 		__entry->attrs = attrs;
317 	),
318 
319 	TP_printk("%s dir=%s nents=%d/%d ents=%d/%d%s dma_addrs=%s sizes=%s phys_addrs=%s attrs=%s",
320 		__get_str(device),
321 		decode_dma_data_direction(__entry->dir),
322 		min_t(int, __entry->full_nents, DMA_TRACE_MAX_ENTRIES), __entry->full_nents,
323 		min_t(int, __entry->full_ents, DMA_TRACE_MAX_ENTRIES), __entry->full_ents,
324 		__entry->truncated ? " [TRUNCATED]" : "",
325 		__print_array(__get_dynamic_array(dma_addrs),
326 			      __get_dynamic_array_len(dma_addrs) /
327 				sizeof(u64), sizeof(u64)),
328 		__print_array(__get_dynamic_array(lengths),
329 			      __get_dynamic_array_len(lengths) /
330 				sizeof(unsigned int), sizeof(unsigned int)),
331 		__print_array(__get_dynamic_array(phys_addrs),
332 			      __get_dynamic_array_len(phys_addrs) /
333 				sizeof(u64), sizeof(u64)),
334 		decode_dma_attrs(__entry->attrs))
335 );
336 
337 TRACE_EVENT(dma_map_sg_err,
338 	TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents,
339 		 int err, enum dma_data_direction dir, unsigned long attrs),
340 	TP_ARGS(dev, sgl, nents, err, dir, attrs),
341 
342 	TP_STRUCT__entry(
343 		__string(device, dev_name(dev))
344 		__dynamic_array(u64, phys_addrs, nents)
345 		__field(int, err)
346 		__field(enum dma_data_direction, dir)
347 		__field(unsigned long, attrs)
348 	),
349 
350 	TP_fast_assign(
351 		struct scatterlist *sg;
352 		int i;
353 
354 		__assign_str(device);
355 		for_each_sg(sgl, sg, nents, i)
356 			((u64 *)__get_dynamic_array(phys_addrs))[i] = sg_phys(sg);
357 		__entry->err = err;
358 		__entry->dir = dir;
359 		__entry->attrs = attrs;
360 	),
361 
362 	TP_printk("%s dir=%s dma_addrs=%s err=%d attrs=%s",
363 		__get_str(device),
364 		decode_dma_data_direction(__entry->dir),
365 		__print_array(__get_dynamic_array(phys_addrs),
366 			      __get_dynamic_array_len(phys_addrs) /
367 				sizeof(u64), sizeof(u64)),
368 		__entry->err,
369 		decode_dma_attrs(__entry->attrs))
370 );
371 
372 TRACE_EVENT(dma_unmap_sg,
373 	TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents,
374 		 enum dma_data_direction dir, unsigned long attrs),
375 	TP_ARGS(dev, sgl, nents, dir, attrs),
376 
377 	TP_STRUCT__entry(
378 		__string(device, dev_name(dev))
379 		__dynamic_array(u64, addrs, nents)
380 		__field(enum dma_data_direction, dir)
381 		__field(unsigned long, attrs)
382 	),
383 
384 	TP_fast_assign(
385 		struct scatterlist *sg;
386 		int i;
387 
388 		__assign_str(device);
389 		for_each_sg(sgl, sg, nents, i)
390 			((u64 *)__get_dynamic_array(addrs))[i] = sg_phys(sg);
391 		__entry->dir = dir;
392 		__entry->attrs = attrs;
393 	),
394 
395 	TP_printk("%s dir=%s phys_addrs=%s attrs=%s",
396 		__get_str(device),
397 		decode_dma_data_direction(__entry->dir),
398 		__print_array(__get_dynamic_array(addrs),
399 			      __get_dynamic_array_len(addrs) /
400 				sizeof(u64), sizeof(u64)),
401 		decode_dma_attrs(__entry->attrs))
402 );
403 
404 DECLARE_EVENT_CLASS(dma_sync_single,
405 	TP_PROTO(struct device *dev, dma_addr_t dma_addr, size_t size,
406 		 enum dma_data_direction dir),
407 	TP_ARGS(dev, dma_addr, size, dir),
408 
409 	TP_STRUCT__entry(
410 		__string(device, dev_name(dev))
411 		__field(u64, dma_addr)
412 		__field(size_t, size)
413 		__field(enum dma_data_direction, dir)
414 	),
415 
416 	TP_fast_assign(
417 		__assign_str(device);
418 		__entry->dma_addr = dma_addr;
419 		__entry->size = size;
420 		__entry->dir = dir;
421 	),
422 
423 	TP_printk("%s dir=%s dma_addr=%llx size=%zu",
424 		__get_str(device),
425 		decode_dma_data_direction(__entry->dir),
426 		__entry->dma_addr,
427 		__entry->size)
428 );
429 
430 #define DEFINE_SYNC_SINGLE_EVENT(name) \
431 DEFINE_EVENT(dma_sync_single, name, \
432 	TP_PROTO(struct device *dev, dma_addr_t dma_addr, size_t size, \
433 		 enum dma_data_direction dir), \
434 	TP_ARGS(dev, dma_addr, size, dir))
435 
436 DEFINE_SYNC_SINGLE_EVENT(dma_sync_single_for_cpu);
437 DEFINE_SYNC_SINGLE_EVENT(dma_sync_single_for_device);
438 
439 DECLARE_EVENT_CLASS(dma_sync_sg,
440 	TP_PROTO(struct device *dev, struct scatterlist *sgl, int nents,
441 		 enum dma_data_direction dir),
442 	TP_ARGS(dev, sgl, nents, dir),
443 
444 	TP_STRUCT__entry(
445 		__string(device, dev_name(dev))
446 		__dynamic_array(u64, dma_addrs, nents)
447 		__dynamic_array(unsigned int, lengths, nents)
448 		__field(enum dma_data_direction, dir)
449 	),
450 
451 	TP_fast_assign(
452 		struct scatterlist *sg;
453 		int i;
454 
455 		__assign_str(device);
456 		for_each_sg(sgl, sg, nents, i) {
457 			((u64 *)__get_dynamic_array(dma_addrs))[i] =
458 				sg_dma_address(sg);
459 			((unsigned int *)__get_dynamic_array(lengths))[i] =
460 				sg_dma_len(sg);
461 		}
462 		__entry->dir = dir;
463 	),
464 
465 	TP_printk("%s dir=%s dma_addrs=%s sizes=%s",
466 		__get_str(device),
467 		decode_dma_data_direction(__entry->dir),
468 		__print_array(__get_dynamic_array(dma_addrs),
469 			      __get_dynamic_array_len(dma_addrs) /
470 				sizeof(u64), sizeof(u64)),
471 		__print_array(__get_dynamic_array(lengths),
472 			      __get_dynamic_array_len(lengths) /
473 				sizeof(unsigned int), sizeof(unsigned int)))
474 );
475 
476 #define DEFINE_SYNC_SG_EVENT(name) \
477 DEFINE_EVENT(dma_sync_sg, name, \
478 	TP_PROTO(struct device *dev, struct scatterlist *sg, int nents, \
479 		 enum dma_data_direction dir), \
480 	TP_ARGS(dev, sg, nents, dir))
481 
482 DEFINE_SYNC_SG_EVENT(dma_sync_sg_for_cpu);
483 DEFINE_SYNC_SG_EVENT(dma_sync_sg_for_device);
484 
485 #endif /*  _TRACE_DMA_H */
486 
487 /* This part must be outside protection */
488 #include <trace/define_trace.h>
489