1 /*
2 * SPDX-License-Identifier: MIT
3 *
4 * Copyright © 2016 Intel Corporation
5 */
6
7 #include "i915_drv.h"
8 #include "i915_selftest.h"
9
10 #include "mock_dmabuf.h"
11 #include "selftests/mock_gem_device.h"
12
igt_dmabuf_export(void * arg)13 static int igt_dmabuf_export(void *arg)
14 {
15 struct drm_i915_private *i915 = arg;
16 struct drm_i915_gem_object *obj;
17 struct dma_buf *dmabuf;
18
19 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
20 if (IS_ERR(obj))
21 return PTR_ERR(obj);
22
23 dmabuf = i915_gem_prime_export(&obj->base, 0);
24 i915_gem_object_put(obj);
25 if (IS_ERR(dmabuf)) {
26 pr_err("i915_gem_prime_export failed with err=%d\n",
27 (int)PTR_ERR(dmabuf));
28 return PTR_ERR(dmabuf);
29 }
30
31 dma_buf_put(dmabuf);
32 return 0;
33 }
34
igt_dmabuf_import_self(void * arg)35 static int igt_dmabuf_import_self(void *arg)
36 {
37 struct drm_i915_private *i915 = arg;
38 struct drm_i915_gem_object *obj, *import_obj;
39 struct drm_gem_object *import;
40 struct dma_buf *dmabuf;
41 int err;
42
43 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
44 if (IS_ERR(obj))
45 return PTR_ERR(obj);
46
47 dmabuf = i915_gem_prime_export(&obj->base, 0);
48 if (IS_ERR(dmabuf)) {
49 pr_err("i915_gem_prime_export failed with err=%d\n",
50 (int)PTR_ERR(dmabuf));
51 err = PTR_ERR(dmabuf);
52 goto out;
53 }
54
55 import = i915_gem_prime_import(&i915->drm, dmabuf);
56 if (IS_ERR(import)) {
57 pr_err("i915_gem_prime_import failed with err=%d\n",
58 (int)PTR_ERR(import));
59 err = PTR_ERR(import);
60 goto out_dmabuf;
61 }
62 import_obj = to_intel_bo(import);
63
64 if (import != &obj->base) {
65 pr_err("i915_gem_prime_import created a new object!\n");
66 err = -EINVAL;
67 goto out_import;
68 }
69
70 i915_gem_object_lock(import_obj, NULL);
71 err = __i915_gem_object_get_pages(import_obj);
72 i915_gem_object_unlock(import_obj);
73 if (err) {
74 pr_err("Same object dma-buf get_pages failed!\n");
75 goto out_import;
76 }
77
78 err = 0;
79 out_import:
80 i915_gem_object_put(import_obj);
81 out_dmabuf:
82 dma_buf_put(dmabuf);
83 out:
84 i915_gem_object_put(obj);
85 return err;
86 }
87
igt_dmabuf_import_same_driver_lmem(void * arg)88 static int igt_dmabuf_import_same_driver_lmem(void *arg)
89 {
90 struct drm_i915_private *i915 = arg;
91 struct intel_memory_region *lmem = i915->mm.regions[INTEL_REGION_LMEM_0];
92 struct drm_i915_gem_object *obj;
93 struct drm_gem_object *import;
94 struct dma_buf *dmabuf;
95 int err;
96
97 if (!lmem)
98 return 0;
99
100 force_different_devices = true;
101
102 obj = __i915_gem_object_create_user(i915, PAGE_SIZE, &lmem, 1);
103 if (IS_ERR(obj)) {
104 pr_err("__i915_gem_object_create_user failed with err=%ld\n",
105 PTR_ERR(obj));
106 err = PTR_ERR(obj);
107 goto out_ret;
108 }
109
110 dmabuf = i915_gem_prime_export(&obj->base, 0);
111 if (IS_ERR(dmabuf)) {
112 pr_err("i915_gem_prime_export failed with err=%ld\n",
113 PTR_ERR(dmabuf));
114 err = PTR_ERR(dmabuf);
115 goto out;
116 }
117
118 /*
119 * We expect an import of an LMEM-only object to fail with
120 * -EOPNOTSUPP because it can't be migrated to SMEM.
121 */
122 import = i915_gem_prime_import(&i915->drm, dmabuf);
123 if (!IS_ERR(import)) {
124 drm_gem_object_put(import);
125 pr_err("i915_gem_prime_import succeeded when it shouldn't have\n");
126 err = -EINVAL;
127 } else if (PTR_ERR(import) != -EOPNOTSUPP) {
128 pr_err("i915_gem_prime_import failed with the wrong err=%ld\n",
129 PTR_ERR(import));
130 err = PTR_ERR(import);
131 } else {
132 err = 0;
133 }
134
135 dma_buf_put(dmabuf);
136 out:
137 i915_gem_object_put(obj);
138 out_ret:
139 force_different_devices = false;
140 return err;
141 }
142
igt_dmabuf_import_same_driver(struct drm_i915_private * i915,struct intel_memory_region ** regions,unsigned int num_regions)143 static int igt_dmabuf_import_same_driver(struct drm_i915_private *i915,
144 struct intel_memory_region **regions,
145 unsigned int num_regions)
146 {
147 struct drm_i915_gem_object *obj, *import_obj;
148 struct drm_gem_object *import;
149 struct dma_buf *dmabuf;
150 struct dma_buf_attachment *import_attach;
151 struct sg_table *st;
152 long timeout;
153 int err;
154
155 force_different_devices = true;
156
157 obj = __i915_gem_object_create_user(i915, PAGE_SIZE,
158 regions, num_regions);
159 if (IS_ERR(obj)) {
160 pr_err("__i915_gem_object_create_user failed with err=%ld\n",
161 PTR_ERR(obj));
162 err = PTR_ERR(obj);
163 goto out_ret;
164 }
165
166 dmabuf = i915_gem_prime_export(&obj->base, 0);
167 if (IS_ERR(dmabuf)) {
168 pr_err("i915_gem_prime_export failed with err=%ld\n",
169 PTR_ERR(dmabuf));
170 err = PTR_ERR(dmabuf);
171 goto out;
172 }
173
174 import = i915_gem_prime_import(&i915->drm, dmabuf);
175 if (IS_ERR(import)) {
176 pr_err("i915_gem_prime_import failed with err=%ld\n",
177 PTR_ERR(import));
178 err = PTR_ERR(import);
179 goto out_dmabuf;
180 }
181 import_obj = to_intel_bo(import);
182
183 if (import == &obj->base) {
184 pr_err("i915_gem_prime_import reused gem object!\n");
185 err = -EINVAL;
186 goto out_import;
187 }
188
189 i915_gem_object_lock(import_obj, NULL);
190 err = __i915_gem_object_get_pages(import_obj);
191 if (err) {
192 pr_err("Different objects dma-buf get_pages failed!\n");
193 i915_gem_object_unlock(import_obj);
194 goto out_import;
195 }
196
197 /*
198 * If the exported object is not in system memory, something
199 * weird is going on. TODO: When p2p is supported, this is no
200 * longer considered weird.
201 */
202 if (obj->mm.region != i915->mm.regions[INTEL_REGION_SMEM]) {
203 pr_err("Exported dma-buf is not in system memory\n");
204 err = -EINVAL;
205 }
206
207 i915_gem_object_unlock(import_obj);
208
209 /* Now try a fake an importer */
210 import_attach = dma_buf_attach(dmabuf, obj->base.dev->dev);
211 if (IS_ERR(import_attach)) {
212 err = PTR_ERR(import_attach);
213 goto out_import;
214 }
215
216 st = dma_buf_map_attachment(import_attach, DMA_BIDIRECTIONAL);
217 if (IS_ERR(st)) {
218 err = PTR_ERR(st);
219 goto out_detach;
220 }
221
222 timeout = dma_resv_wait_timeout(dmabuf->resv, DMA_RESV_USAGE_WRITE,
223 true, 5 * HZ);
224 if (!timeout) {
225 pr_err("dmabuf wait for exclusive fence timed out.\n");
226 timeout = -ETIME;
227 }
228 err = timeout > 0 ? 0 : timeout;
229 dma_buf_unmap_attachment(import_attach, st, DMA_BIDIRECTIONAL);
230 out_detach:
231 dma_buf_detach(dmabuf, import_attach);
232 out_import:
233 i915_gem_object_put(import_obj);
234 out_dmabuf:
235 dma_buf_put(dmabuf);
236 out:
237 i915_gem_object_put(obj);
238 out_ret:
239 force_different_devices = false;
240 return err;
241 }
242
igt_dmabuf_import_same_driver_smem(void * arg)243 static int igt_dmabuf_import_same_driver_smem(void *arg)
244 {
245 struct drm_i915_private *i915 = arg;
246 struct intel_memory_region *smem = i915->mm.regions[INTEL_REGION_SMEM];
247
248 return igt_dmabuf_import_same_driver(i915, &smem, 1);
249 }
250
igt_dmabuf_import_same_driver_lmem_smem(void * arg)251 static int igt_dmabuf_import_same_driver_lmem_smem(void *arg)
252 {
253 struct drm_i915_private *i915 = arg;
254 struct intel_memory_region *regions[2];
255
256 if (!i915->mm.regions[INTEL_REGION_LMEM_0])
257 return 0;
258
259 regions[0] = i915->mm.regions[INTEL_REGION_LMEM_0];
260 regions[1] = i915->mm.regions[INTEL_REGION_SMEM];
261 return igt_dmabuf_import_same_driver(i915, regions, 2);
262 }
263
igt_dmabuf_import(void * arg)264 static int igt_dmabuf_import(void *arg)
265 {
266 struct drm_i915_private *i915 = arg;
267 struct drm_i915_gem_object *obj;
268 struct dma_buf *dmabuf;
269 void *obj_map, *dma_map;
270 struct iosys_map map;
271 u32 pattern[] = { 0, 0xaa, 0xcc, 0x55, 0xff };
272 int err, i;
273
274 dmabuf = mock_dmabuf(1);
275 if (IS_ERR(dmabuf))
276 return PTR_ERR(dmabuf);
277
278 obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf));
279 if (IS_ERR(obj)) {
280 pr_err("i915_gem_prime_import failed with err=%d\n",
281 (int)PTR_ERR(obj));
282 err = PTR_ERR(obj);
283 goto out_dmabuf;
284 }
285
286 if (obj->base.dev != &i915->drm) {
287 pr_err("i915_gem_prime_import created a non-i915 object!\n");
288 err = -EINVAL;
289 goto out_obj;
290 }
291
292 if (obj->base.size != PAGE_SIZE) {
293 pr_err("i915_gem_prime_import is wrong size found %lld, expected %ld\n",
294 (long long)obj->base.size, PAGE_SIZE);
295 err = -EINVAL;
296 goto out_obj;
297 }
298
299 err = dma_buf_vmap(dmabuf, &map);
300 dma_map = err ? NULL : map.vaddr;
301 if (!dma_map) {
302 pr_err("dma_buf_vmap failed\n");
303 err = -ENOMEM;
304 goto out_obj;
305 }
306
307 if (0) { /* Can not yet map dmabuf */
308 obj_map = i915_gem_object_pin_map(obj, I915_MAP_WB);
309 if (IS_ERR(obj_map)) {
310 err = PTR_ERR(obj_map);
311 pr_err("i915_gem_object_pin_map failed with err=%d\n", err);
312 goto out_dma_map;
313 }
314
315 for (i = 0; i < ARRAY_SIZE(pattern); i++) {
316 memset(dma_map, pattern[i], PAGE_SIZE);
317 if (memchr_inv(obj_map, pattern[i], PAGE_SIZE)) {
318 err = -EINVAL;
319 pr_err("imported vmap not all set to %x!\n", pattern[i]);
320 i915_gem_object_unpin_map(obj);
321 goto out_dma_map;
322 }
323 }
324
325 for (i = 0; i < ARRAY_SIZE(pattern); i++) {
326 memset(obj_map, pattern[i], PAGE_SIZE);
327 if (memchr_inv(dma_map, pattern[i], PAGE_SIZE)) {
328 err = -EINVAL;
329 pr_err("exported vmap not all set to %x!\n", pattern[i]);
330 i915_gem_object_unpin_map(obj);
331 goto out_dma_map;
332 }
333 }
334
335 i915_gem_object_unpin_map(obj);
336 }
337
338 err = 0;
339 out_dma_map:
340 dma_buf_vunmap(dmabuf, &map);
341 out_obj:
342 i915_gem_object_put(obj);
343 out_dmabuf:
344 dma_buf_put(dmabuf);
345 return err;
346 }
347
igt_dmabuf_import_ownership(void * arg)348 static int igt_dmabuf_import_ownership(void *arg)
349 {
350 struct drm_i915_private *i915 = arg;
351 struct drm_i915_gem_object *obj;
352 struct dma_buf *dmabuf;
353 struct iosys_map map;
354 void *ptr;
355 int err;
356
357 dmabuf = mock_dmabuf(1);
358 if (IS_ERR(dmabuf))
359 return PTR_ERR(dmabuf);
360
361 err = dma_buf_vmap(dmabuf, &map);
362 ptr = err ? NULL : map.vaddr;
363 if (!ptr) {
364 pr_err("dma_buf_vmap failed\n");
365 err = -ENOMEM;
366 goto err_dmabuf;
367 }
368
369 memset(ptr, 0xc5, PAGE_SIZE);
370 dma_buf_vunmap(dmabuf, &map);
371
372 obj = to_intel_bo(i915_gem_prime_import(&i915->drm, dmabuf));
373 if (IS_ERR(obj)) {
374 pr_err("i915_gem_prime_import failed with err=%d\n",
375 (int)PTR_ERR(obj));
376 err = PTR_ERR(obj);
377 goto err_dmabuf;
378 }
379
380 dma_buf_put(dmabuf);
381
382 err = i915_gem_object_pin_pages_unlocked(obj);
383 if (err) {
384 pr_err("i915_gem_object_pin_pages failed with err=%d\n", err);
385 goto out_obj;
386 }
387
388 err = 0;
389 i915_gem_object_unpin_pages(obj);
390 out_obj:
391 i915_gem_object_put(obj);
392 return err;
393
394 err_dmabuf:
395 dma_buf_put(dmabuf);
396 return err;
397 }
398
igt_dmabuf_export_vmap(void * arg)399 static int igt_dmabuf_export_vmap(void *arg)
400 {
401 struct drm_i915_private *i915 = arg;
402 struct drm_i915_gem_object *obj;
403 struct dma_buf *dmabuf;
404 struct iosys_map map;
405 void *ptr;
406 int err;
407
408 obj = i915_gem_object_create_shmem(i915, PAGE_SIZE);
409 if (IS_ERR(obj))
410 return PTR_ERR(obj);
411
412 dmabuf = i915_gem_prime_export(&obj->base, 0);
413 if (IS_ERR(dmabuf)) {
414 pr_err("i915_gem_prime_export failed with err=%d\n",
415 (int)PTR_ERR(dmabuf));
416 err = PTR_ERR(dmabuf);
417 goto err_obj;
418 }
419 i915_gem_object_put(obj);
420
421 err = dma_buf_vmap(dmabuf, &map);
422 ptr = err ? NULL : map.vaddr;
423 if (!ptr) {
424 pr_err("dma_buf_vmap failed\n");
425 err = -ENOMEM;
426 goto out;
427 }
428
429 if (memchr_inv(ptr, 0, dmabuf->size)) {
430 pr_err("Exported object not initialiased to zero!\n");
431 err = -EINVAL;
432 goto out;
433 }
434
435 memset(ptr, 0xc5, dmabuf->size);
436
437 err = 0;
438 dma_buf_vunmap(dmabuf, &map);
439 out:
440 dma_buf_put(dmabuf);
441 return err;
442
443 err_obj:
444 i915_gem_object_put(obj);
445 return err;
446 }
447
i915_gem_dmabuf_mock_selftests(void)448 int i915_gem_dmabuf_mock_selftests(void)
449 {
450 static const struct i915_subtest tests[] = {
451 SUBTEST(igt_dmabuf_export),
452 SUBTEST(igt_dmabuf_import_self),
453 SUBTEST(igt_dmabuf_import),
454 SUBTEST(igt_dmabuf_import_ownership),
455 SUBTEST(igt_dmabuf_export_vmap),
456 };
457 struct drm_i915_private *i915;
458 int err;
459
460 i915 = mock_gem_device();
461 if (!i915)
462 return -ENOMEM;
463
464 err = i915_subtests(tests, i915);
465
466 mock_destroy_device(i915);
467 return err;
468 }
469
i915_gem_dmabuf_live_selftests(struct drm_i915_private * i915)470 int i915_gem_dmabuf_live_selftests(struct drm_i915_private *i915)
471 {
472 static const struct i915_subtest tests[] = {
473 SUBTEST(igt_dmabuf_export),
474 SUBTEST(igt_dmabuf_import_same_driver_lmem),
475 SUBTEST(igt_dmabuf_import_same_driver_smem),
476 SUBTEST(igt_dmabuf_import_same_driver_lmem_smem),
477 };
478
479 return i915_live_subtests(tests, i915);
480 }
481