xref: /linux/drivers/gpu/drm/i915/gem/selftests/i915_gem_dmabuf.c (revision 3a39d672e7f48b8d6b91a09afa4b55352773b4b5)
1 /*
2  * SPDX-License-Identifier: MIT
3  *
4  * Copyright © 2016 Intel Corporation
5  */
6 
7 #include "i915_drv.h"
8 #include "i915_selftest.h"
9 #include "gem/i915_gem_context.h"
10 #include "gt/intel_gt.h"
11 
12 #include "mock_context.h"
13 #include "mock_dmabuf.h"
14 #include "igt_gem_utils.h"
15 #include "selftests/mock_drm.h"
16 #include "selftests/mock_gem_device.h"
17 
igt_dmabuf_export(void * arg)18 static int igt_dmabuf_export(void *arg)
19 {
20 	struct drm_i915_private *i915 = arg;
21 	struct drm_i915_gem_object *obj;
22 	struct dma_buf *dmabuf;
23 
24 	obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
25 	if (IS_ERR(obj))
26 		return PTR_ERR(obj);
27 
28 	dmabuf = i915_gem_prime_export(&obj->base, 0);
29 	i915_gem_object_put(obj);
30 	if (IS_ERR(dmabuf)) {
31 		pr_err("i915_gem_prime_export failed with err=%d\n",
32 		       (int)PTR_ERR(dmabuf));
33 		return PTR_ERR(dmabuf);
34 	}
35 
36 	dma_buf_put(dmabuf);
37 	return 0;
38 }
39 
igt_dmabuf_import_self(void * arg)40 static int igt_dmabuf_import_self(void *arg)
41 {
42 	struct drm_i915_private *i915 = arg;
43 	struct drm_i915_gem_object *obj, *import_obj;
44 	struct drm_gem_object *import;
45 	struct dma_buf *dmabuf;
46 	int err;
47 
48 	obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
49 	if (IS_ERR(obj))
50 		return PTR_ERR(obj);
51 
52 	dmabuf = i915_gem_prime_export(&obj->base, 0);
53 	if (IS_ERR(dmabuf)) {
54 		pr_err("i915_gem_prime_export failed with err=%d\n",
55 		       (int)PTR_ERR(dmabuf));
56 		err = PTR_ERR(dmabuf);
57 		goto out;
58 	}
59 
60 	import = i915_gem_prime_import(&i915->drm, dmabuf);
61 	if (IS_ERR(import)) {
62 		pr_err("i915_gem_prime_import failed with err=%d\n",
63 		       (int)PTR_ERR(import));
64 		err = PTR_ERR(import);
65 		goto out_dmabuf;
66 	}
67 	import_obj = to_intel_bo(import);
68 
69 	if (import != &obj->base) {
70 		pr_err("i915_gem_prime_import created a new object!\n");
71 		err = -EINVAL;
72 		goto out_import;
73 	}
74 
75 	i915_gem_object_lock(import_obj, NULL);
76 	err = __i915_gem_object_get_pages(import_obj);
77 	i915_gem_object_unlock(import_obj);
78 	if (err) {
79 		pr_err("Same object dma-buf get_pages failed!\n");
80 		goto out_import;
81 	}
82 
83 	err = 0;
84 out_import:
85 	i915_gem_object_put(import_obj);
86 out_dmabuf:
87 	dma_buf_put(dmabuf);
88 out:
89 	i915_gem_object_put(obj);
90 	return err;
91 }
92 
igt_dmabuf_import_same_driver_lmem(void * arg)93 static int igt_dmabuf_import_same_driver_lmem(void *arg)
94 {
95 	struct drm_i915_private *i915 = arg;
96 	struct intel_memory_region *lmem = i915->mm.regions[INTEL_REGION_LMEM_0];
97 	struct drm_i915_gem_object *obj;
98 	struct drm_gem_object *import;
99 	struct dma_buf *dmabuf;
100 	int err;
101 
102 	if (!lmem)
103 		return 0;
104 
105 	force_different_devices = true;
106 
107 	obj = __i915_gem_object_create_user(i915, PAGE_SIZE, &lmem, 1);
108 	if (IS_ERR(obj)) {
109 		pr_err("__i915_gem_object_create_user failed with err=%ld\n",
110 		       PTR_ERR(obj));
111 		err = PTR_ERR(obj);
112 		goto out_ret;
113 	}
114 
115 	dmabuf = i915_gem_prime_export(&obj->base, 0);
116 	if (IS_ERR(dmabuf)) {
117 		pr_err("i915_gem_prime_export failed with err=%ld\n",
118 		       PTR_ERR(dmabuf));
119 		err = PTR_ERR(dmabuf);
120 		goto out;
121 	}
122 
123 	/*
124 	 * We expect an import of an LMEM-only object to fail with
125 	 * -EOPNOTSUPP because it can't be migrated to SMEM.
126 	 */
127 	import = i915_gem_prime_import(&i915->drm, dmabuf);
128 	if (!IS_ERR(import)) {
129 		drm_gem_object_put(import);
130 		pr_err("i915_gem_prime_import succeeded when it shouldn't have\n");
131 		err = -EINVAL;
132 	} else if (PTR_ERR(import) != -EOPNOTSUPP) {
133 		pr_err("i915_gem_prime_import failed with the wrong err=%ld\n",
134 		       PTR_ERR(import));
135 		err = PTR_ERR(import);
136 	} else {
137 		err = 0;
138 	}
139 
140 	dma_buf_put(dmabuf);
141 out:
142 	i915_gem_object_put(obj);
143 out_ret:
144 	force_different_devices = false;
145 	return err;
146 }
147 
verify_access(struct drm_i915_private * i915,struct drm_i915_gem_object * native_obj,struct drm_i915_gem_object * import_obj)148 static int verify_access(struct drm_i915_private *i915,
149 			 struct drm_i915_gem_object *native_obj,
150 			 struct drm_i915_gem_object *import_obj)
151 {
152 	struct i915_gem_engines_iter it;
153 	struct i915_gem_context *ctx;
154 	struct intel_context *ce;
155 	struct i915_vma *vma;
156 	struct file *file;
157 	u32 *vaddr;
158 	int err = 0, i;
159 	unsigned int mode;
160 
161 	file = mock_file(i915);
162 	if (IS_ERR(file))
163 		return PTR_ERR(file);
164 
165 	ctx = live_context(i915, file);
166 	if (IS_ERR(ctx)) {
167 		err = PTR_ERR(ctx);
168 		goto out_file;
169 	}
170 
171 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
172 		if (intel_engine_can_store_dword(ce->engine))
173 			break;
174 	}
175 	i915_gem_context_unlock_engines(ctx);
176 	if (!ce)
177 		goto out_file;
178 
179 	vma = i915_vma_instance(import_obj, ce->vm, NULL);
180 	if (IS_ERR(vma)) {
181 		err = PTR_ERR(vma);
182 		goto out_file;
183 	}
184 
185 	err = i915_vma_pin(vma, 0, 0, PIN_USER);
186 	if (err)
187 		goto out_file;
188 
189 	err = igt_gpu_fill_dw(ce, vma, 0,
190 			      vma->size >> PAGE_SHIFT, 0xdeadbeaf);
191 	i915_vma_unpin(vma);
192 	if (err)
193 		goto out_file;
194 
195 	err = i915_gem_object_wait(import_obj, 0, MAX_SCHEDULE_TIMEOUT);
196 	if (err)
197 		goto out_file;
198 
199 	mode = intel_gt_coherent_map_type(to_gt(i915), native_obj, false);
200 	vaddr = i915_gem_object_pin_map_unlocked(native_obj, mode);
201 	if (IS_ERR(vaddr)) {
202 		err = PTR_ERR(vaddr);
203 		goto out_file;
204 	}
205 
206 	for (i = 0; i < native_obj->base.size / sizeof(u32); i += PAGE_SIZE / sizeof(u32)) {
207 		if (vaddr[i] != 0xdeadbeaf) {
208 			pr_err("Data mismatch [%d]=%u\n", i, vaddr[i]);
209 			err = -EINVAL;
210 			goto out_file;
211 		}
212 	}
213 
214 out_file:
215 	fput(file);
216 	return err;
217 }
218 
igt_dmabuf_import_same_driver(struct drm_i915_private * i915,struct intel_memory_region ** regions,unsigned int num_regions)219 static int igt_dmabuf_import_same_driver(struct drm_i915_private *i915,
220 					 struct intel_memory_region **regions,
221 					 unsigned int num_regions)
222 {
223 	struct drm_i915_gem_object *obj, *import_obj;
224 	struct drm_gem_object *import;
225 	struct dma_buf *dmabuf;
226 	struct dma_buf_attachment *import_attach;
227 	struct sg_table *st;
228 	long timeout;
229 	int err;
230 
231 	force_different_devices = true;
232 
233 	obj = __i915_gem_object_create_user(i915, SZ_8M,
234 					    regions, num_regions);
235 	if (IS_ERR(obj)) {
236 		pr_err("__i915_gem_object_create_user failed with err=%ld\n",
237 		       PTR_ERR(obj));
238 		err = PTR_ERR(obj);
239 		goto out_ret;
240 	}
241 
242 	dmabuf = i915_gem_prime_export(&obj->base, 0);
243 	if (IS_ERR(dmabuf)) {
244 		pr_err("i915_gem_prime_export failed with err=%ld\n",
245 		       PTR_ERR(dmabuf));
246 		err = PTR_ERR(dmabuf);
247 		goto out;
248 	}
249 
250 	import = i915_gem_prime_import(&i915->drm, dmabuf);
251 	if (IS_ERR(import)) {
252 		pr_err("i915_gem_prime_import failed with err=%ld\n",
253 		       PTR_ERR(import));
254 		err = PTR_ERR(import);
255 		goto out_dmabuf;
256 	}
257 	import_obj = to_intel_bo(import);
258 
259 	if (import == &obj->base) {
260 		pr_err("i915_gem_prime_import reused gem object!\n");
261 		err = -EINVAL;
262 		goto out_import;
263 	}
264 
265 	i915_gem_object_lock(import_obj, NULL);
266 	err = __i915_gem_object_get_pages(import_obj);
267 	if (err) {
268 		pr_err("Different objects dma-buf get_pages failed!\n");
269 		i915_gem_object_unlock(import_obj);
270 		goto out_import;
271 	}
272 
273 	/*
274 	 * If the exported object is not in system memory, something
275 	 * weird is going on. TODO: When p2p is supported, this is no
276 	 * longer considered weird.
277 	 */
278 	if (obj->mm.region != i915->mm.regions[INTEL_REGION_SMEM]) {
279 		pr_err("Exported dma-buf is not in system memory\n");
280 		err = -EINVAL;
281 	}
282 
283 	i915_gem_object_unlock(import_obj);
284 
285 	err = verify_access(i915, obj, import_obj);
286 	if (err)
287 		goto out_import;
288 
289 	/* Now try a fake an importer */
290 	import_attach = dma_buf_attach(dmabuf, obj->base.dev->dev);
291 	if (IS_ERR(import_attach)) {
292 		err = PTR_ERR(import_attach);
293 		goto out_import;
294 	}
295 
296 	st = dma_buf_map_attachment_unlocked(import_attach, DMA_BIDIRECTIONAL);
297 	if (IS_ERR(st)) {
298 		err = PTR_ERR(st);
299 		goto out_detach;
300 	}
301 
302 	timeout = dma_resv_wait_timeout(dmabuf->resv, DMA_RESV_USAGE_WRITE,
303 					true, 5 * HZ);
304 	if (!timeout) {
305 		pr_err("dmabuf wait for exclusive fence timed out.\n");
306 		timeout = -ETIME;
307 	}
308 	err = timeout > 0 ? 0 : timeout;
309 	dma_buf_unmap_attachment_unlocked(import_attach, st, DMA_BIDIRECTIONAL);
310 out_detach:
311 	dma_buf_detach(dmabuf, import_attach);
312 out_import:
313 	i915_gem_object_put(import_obj);
314 out_dmabuf:
315 	dma_buf_put(dmabuf);
316 out:
317 	i915_gem_object_put(obj);
318 out_ret:
319 	force_different_devices = false;
320 	return err;
321 }
322 
igt_dmabuf_import_same_driver_smem(void * arg)323 static int igt_dmabuf_import_same_driver_smem(void *arg)
324 {
325 	struct drm_i915_private *i915 = arg;
326 	struct intel_memory_region *smem = i915->mm.regions[INTEL_REGION_SMEM];
327 
328 	return igt_dmabuf_import_same_driver(i915, &smem, 1);
329 }
330 
igt_dmabuf_import_same_driver_lmem_smem(void * arg)331 static int igt_dmabuf_import_same_driver_lmem_smem(void *arg)
332 {
333 	struct drm_i915_private *i915 = arg;
334 	struct intel_memory_region *regions[2];
335 
336 	if (!i915->mm.regions[INTEL_REGION_LMEM_0])
337 		return 0;
338 
339 	regions[0] = i915->mm.regions[INTEL_REGION_LMEM_0];
340 	regions[1] = i915->mm.regions[INTEL_REGION_SMEM];
341 	return igt_dmabuf_import_same_driver(i915, regions, 2);
342 }
343 
igt_dmabuf_import(void * arg)344 static int igt_dmabuf_import(void *arg)
345 {
346 	struct drm_i915_private *i915 = arg;
347 	struct drm_i915_gem_object *obj;
348 	struct dma_buf *dmabuf;
349 	void *obj_map, *dma_map;
350 	struct iosys_map map;
351 	u32 pattern[] = { 0, 0xaa, 0xcc, 0x55, 0xff };
352 	int err, i;
353 
354 	dmabuf = mock_dmabuf(1);
355 	if (IS_ERR(dmabuf))
356 		return PTR_ERR(dmabuf);
357 
358 	obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf));
359 	if (IS_ERR(obj)) {
360 		pr_err("i915_gem_prime_import failed with err=%d\n",
361 		       (int)PTR_ERR(obj));
362 		err = PTR_ERR(obj);
363 		goto out_dmabuf;
364 	}
365 
366 	if (obj->base.dev != &i915->drm) {
367 		pr_err("i915_gem_prime_import created a non-i915 object!\n");
368 		err = -EINVAL;
369 		goto out_obj;
370 	}
371 
372 	if (obj->base.size != PAGE_SIZE) {
373 		pr_err("i915_gem_prime_import is wrong size found %lld, expected %ld\n",
374 		       (long long)obj->base.size, PAGE_SIZE);
375 		err = -EINVAL;
376 		goto out_obj;
377 	}
378 
379 	err = dma_buf_vmap_unlocked(dmabuf, &map);
380 	dma_map = err ? NULL : map.vaddr;
381 	if (!dma_map) {
382 		pr_err("dma_buf_vmap failed\n");
383 		err = -ENOMEM;
384 		goto out_obj;
385 	}
386 
387 	if (0) { /* Can not yet map dmabuf */
388 		obj_map = i915_gem_object_pin_map(obj, I915_MAP_WB);
389 		if (IS_ERR(obj_map)) {
390 			err = PTR_ERR(obj_map);
391 			pr_err("i915_gem_object_pin_map failed with err=%d\n", err);
392 			goto out_dma_map;
393 		}
394 
395 		for (i = 0; i < ARRAY_SIZE(pattern); i++) {
396 			memset(dma_map, pattern[i], PAGE_SIZE);
397 			if (memchr_inv(obj_map, pattern[i], PAGE_SIZE)) {
398 				err = -EINVAL;
399 				pr_err("imported vmap not all set to %x!\n", pattern[i]);
400 				i915_gem_object_unpin_map(obj);
401 				goto out_dma_map;
402 			}
403 		}
404 
405 		for (i = 0; i < ARRAY_SIZE(pattern); i++) {
406 			memset(obj_map, pattern[i], PAGE_SIZE);
407 			if (memchr_inv(dma_map, pattern[i], PAGE_SIZE)) {
408 				err = -EINVAL;
409 				pr_err("exported vmap not all set to %x!\n", pattern[i]);
410 				i915_gem_object_unpin_map(obj);
411 				goto out_dma_map;
412 			}
413 		}
414 
415 		i915_gem_object_unpin_map(obj);
416 	}
417 
418 	err = 0;
419 out_dma_map:
420 	dma_buf_vunmap_unlocked(dmabuf, &map);
421 out_obj:
422 	i915_gem_object_put(obj);
423 out_dmabuf:
424 	dma_buf_put(dmabuf);
425 	return err;
426 }
427 
igt_dmabuf_import_ownership(void * arg)428 static int igt_dmabuf_import_ownership(void *arg)
429 {
430 	struct drm_i915_private *i915 = arg;
431 	struct drm_i915_gem_object *obj;
432 	struct dma_buf *dmabuf;
433 	struct iosys_map map;
434 	void *ptr;
435 	int err;
436 
437 	dmabuf = mock_dmabuf(1);
438 	if (IS_ERR(dmabuf))
439 		return PTR_ERR(dmabuf);
440 
441 	err = dma_buf_vmap_unlocked(dmabuf, &map);
442 	ptr = err ? NULL : map.vaddr;
443 	if (!ptr) {
444 		pr_err("dma_buf_vmap failed\n");
445 		err = -ENOMEM;
446 		goto err_dmabuf;
447 	}
448 
449 	memset(ptr, 0xc5, PAGE_SIZE);
450 	dma_buf_vunmap_unlocked(dmabuf, &map);
451 
452 	obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf));
453 	if (IS_ERR(obj)) {
454 		pr_err("i915_gem_prime_import failed with err=%d\n",
455 		       (int)PTR_ERR(obj));
456 		err = PTR_ERR(obj);
457 		goto err_dmabuf;
458 	}
459 
460 	dma_buf_put(dmabuf);
461 
462 	err = i915_gem_object_pin_pages_unlocked(obj);
463 	if (err) {
464 		pr_err("i915_gem_object_pin_pages failed with err=%d\n", err);
465 		goto out_obj;
466 	}
467 
468 	err = 0;
469 	i915_gem_object_unpin_pages(obj);
470 out_obj:
471 	i915_gem_object_put(obj);
472 	return err;
473 
474 err_dmabuf:
475 	dma_buf_put(dmabuf);
476 	return err;
477 }
478 
igt_dmabuf_export_vmap(void * arg)479 static int igt_dmabuf_export_vmap(void *arg)
480 {
481 	struct drm_i915_private *i915 = arg;
482 	struct drm_i915_gem_object *obj;
483 	struct dma_buf *dmabuf;
484 	struct iosys_map map;
485 	void *ptr;
486 	int err;
487 
488 	obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
489 	if (IS_ERR(obj))
490 		return PTR_ERR(obj);
491 
492 	dmabuf = i915_gem_prime_export(&obj->base, 0);
493 	if (IS_ERR(dmabuf)) {
494 		pr_err("i915_gem_prime_export failed with err=%d\n",
495 		       (int)PTR_ERR(dmabuf));
496 		err = PTR_ERR(dmabuf);
497 		goto err_obj;
498 	}
499 	i915_gem_object_put(obj);
500 
501 	err = dma_buf_vmap_unlocked(dmabuf, &map);
502 	ptr = err ? NULL : map.vaddr;
503 	if (!ptr) {
504 		pr_err("dma_buf_vmap failed\n");
505 		err = -ENOMEM;
506 		goto out;
507 	}
508 
509 	if (!mem_is_zero(ptr, dmabuf->size)) {
510 		pr_err("Exported object not initialised to zero!\n");
511 		err = -EINVAL;
512 		goto out;
513 	}
514 
515 	memset(ptr, 0xc5, dmabuf->size);
516 
517 	err = 0;
518 	dma_buf_vunmap_unlocked(dmabuf, &map);
519 out:
520 	dma_buf_put(dmabuf);
521 	return err;
522 
523 err_obj:
524 	i915_gem_object_put(obj);
525 	return err;
526 }
527 
i915_gem_dmabuf_mock_selftests(void)528 int i915_gem_dmabuf_mock_selftests(void)
529 {
530 	static const struct i915_subtest tests[] = {
531 		SUBTEST(igt_dmabuf_export),
532 		SUBTEST(igt_dmabuf_import_self),
533 		SUBTEST(igt_dmabuf_import),
534 		SUBTEST(igt_dmabuf_import_ownership),
535 		SUBTEST(igt_dmabuf_export_vmap),
536 	};
537 	struct drm_i915_private *i915;
538 	int err;
539 
540 	i915 = mock_gem_device();
541 	if (!i915)
542 		return -ENOMEM;
543 
544 	err = i915_subtests(tests, i915);
545 
546 	mock_destroy_device(i915);
547 	return err;
548 }
549 
i915_gem_dmabuf_live_selftests(struct drm_i915_private * i915)550 int i915_gem_dmabuf_live_selftests(struct drm_i915_private *i915)
551 {
552 	static const struct i915_subtest tests[] = {
553 		SUBTEST(igt_dmabuf_export),
554 		SUBTEST(igt_dmabuf_import_same_driver_lmem),
555 		SUBTEST(igt_dmabuf_import_same_driver_smem),
556 		SUBTEST(igt_dmabuf_import_same_driver_lmem_smem),
557 	};
558 
559 	return i915_live_subtests(tests, i915);
560 }
561