1 /*
2 * Copyright (C) 2012-2013 Avionic Design GmbH
3 * Copyright (C) 2012 NVIDIA CORPORATION. All rights reserved.
4 *
5 * Based on the KMS/FB CMA helpers
6 * Copyright (C) 2012 Analog Device Inc.
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 */
12
13 #include <linux/console.h>
14
15 #include "drm.h"
16 #include "gem.h"
17 #include <drm/drm_gem_framebuffer_helper.h>
18
19 #ifdef CONFIG_DRM_FBDEV_EMULATION
to_tegra_fbdev(struct drm_fb_helper * helper)20 static inline struct tegra_fbdev *to_tegra_fbdev(struct drm_fb_helper *helper)
21 {
22 return container_of(helper, struct tegra_fbdev, base);
23 }
24 #endif
25
tegra_fb_get_plane(struct drm_framebuffer * framebuffer,unsigned int index)26 struct tegra_bo *tegra_fb_get_plane(struct drm_framebuffer *framebuffer,
27 unsigned int index)
28 {
29 return to_tegra_bo(drm_gem_fb_get_obj(framebuffer, index));
30 }
31
tegra_fb_is_bottom_up(struct drm_framebuffer * framebuffer)32 bool tegra_fb_is_bottom_up(struct drm_framebuffer *framebuffer)
33 {
34 struct tegra_bo *bo = tegra_fb_get_plane(framebuffer, 0);
35
36 if (bo->flags & TEGRA_BO_BOTTOM_UP)
37 return true;
38
39 return false;
40 }
41
tegra_fb_get_tiling(struct drm_framebuffer * framebuffer,struct tegra_bo_tiling * tiling)42 int tegra_fb_get_tiling(struct drm_framebuffer *framebuffer,
43 struct tegra_bo_tiling *tiling)
44 {
45 uint64_t modifier = framebuffer->modifier;
46
47 switch (modifier) {
48 case DRM_FORMAT_MOD_LINEAR:
49 tiling->mode = TEGRA_BO_TILING_MODE_PITCH;
50 tiling->value = 0;
51 break;
52
53 case DRM_FORMAT_MOD_NVIDIA_TEGRA_TILED:
54 tiling->mode = TEGRA_BO_TILING_MODE_TILED;
55 tiling->value = 0;
56 break;
57
58 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(0):
59 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK;
60 tiling->value = 0;
61 break;
62
63 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(1):
64 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK;
65 tiling->value = 1;
66 break;
67
68 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(2):
69 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK;
70 tiling->value = 2;
71 break;
72
73 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(3):
74 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK;
75 tiling->value = 3;
76 break;
77
78 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(4):
79 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK;
80 tiling->value = 4;
81 break;
82
83 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(5):
84 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK;
85 tiling->value = 5;
86 break;
87
88 default:
89 return -EINVAL;
90 }
91
92 return 0;
93 }
94
95 static const struct drm_framebuffer_funcs tegra_fb_funcs = {
96 .destroy = drm_gem_fb_destroy,
97 .create_handle = drm_gem_fb_create_handle,
98 };
99
tegra_fb_alloc(struct drm_device * drm,const struct drm_mode_fb_cmd2 * mode_cmd,struct tegra_bo ** planes,unsigned int num_planes)100 static struct drm_framebuffer *tegra_fb_alloc(struct drm_device *drm,
101 const struct drm_mode_fb_cmd2 *mode_cmd,
102 struct tegra_bo **planes,
103 unsigned int num_planes)
104 {
105 struct drm_framebuffer *fb;
106 unsigned int i;
107 int err;
108
109 fb = kzalloc(sizeof(*fb), GFP_KERNEL);
110 if (!fb)
111 return ERR_PTR(-ENOMEM);
112
113 drm_helper_mode_fill_fb_struct(drm, fb, mode_cmd);
114
115 for (i = 0; i < fb->format->num_planes; i++)
116 fb->obj[i] = &planes[i]->gem;
117
118 err = drm_framebuffer_init(drm, fb, &tegra_fb_funcs);
119 if (err < 0) {
120 dev_err(drm->dev, "failed to initialize framebuffer: %d\n",
121 err);
122 kfree(fb);
123 return ERR_PTR(err);
124 }
125
126 return fb;
127 }
128
tegra_fb_create(struct drm_device * drm,struct drm_file * file,const struct drm_mode_fb_cmd2 * cmd)129 struct drm_framebuffer *tegra_fb_create(struct drm_device *drm,
130 struct drm_file *file,
131 const struct drm_mode_fb_cmd2 *cmd)
132 {
133 unsigned int hsub, vsub, i;
134 struct tegra_bo *planes[4];
135 struct drm_gem_object *gem;
136 struct drm_framebuffer *fb;
137 int err;
138
139 hsub = drm_format_horz_chroma_subsampling(cmd->pixel_format);
140 vsub = drm_format_vert_chroma_subsampling(cmd->pixel_format);
141
142 for (i = 0; i < drm_format_num_planes(cmd->pixel_format); i++) {
143 unsigned int width = cmd->width / (i ? hsub : 1);
144 unsigned int height = cmd->height / (i ? vsub : 1);
145 unsigned int size, bpp;
146
147 gem = drm_gem_object_lookup(file, cmd->handles[i]);
148 if (!gem) {
149 err = -ENXIO;
150 goto unreference;
151 }
152
153 bpp = drm_format_plane_cpp(cmd->pixel_format, i);
154
155 size = (height - 1) * cmd->pitches[i] +
156 width * bpp + cmd->offsets[i];
157
158 if (gem->size < size) {
159 err = -EINVAL;
160 goto unreference;
161 }
162
163 planes[i] = to_tegra_bo(gem);
164 }
165
166 fb = tegra_fb_alloc(drm, cmd, planes, i);
167 if (IS_ERR(fb)) {
168 err = PTR_ERR(fb);
169 goto unreference;
170 }
171
172 return fb;
173
174 unreference:
175 while (i--)
176 drm_gem_object_put_unlocked(&planes[i]->gem);
177
178 return ERR_PTR(err);
179 }
180
181 #ifdef CONFIG_DRM_FBDEV_EMULATION
tegra_fb_mmap(struct fb_info * info,struct vm_area_struct * vma)182 static int tegra_fb_mmap(struct fb_info *info, struct vm_area_struct *vma)
183 {
184 struct drm_fb_helper *helper = info->par;
185 struct tegra_bo *bo;
186 int err;
187
188 bo = tegra_fb_get_plane(helper->fb, 0);
189
190 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma);
191 if (err < 0)
192 return err;
193
194 return __tegra_gem_mmap(&bo->gem, vma);
195 }
196
197 static struct fb_ops tegra_fb_ops = {
198 .owner = THIS_MODULE,
199 DRM_FB_HELPER_DEFAULT_OPS,
200 .fb_fillrect = drm_fb_helper_sys_fillrect,
201 .fb_copyarea = drm_fb_helper_sys_copyarea,
202 .fb_imageblit = drm_fb_helper_sys_imageblit,
203 .fb_mmap = tegra_fb_mmap,
204 };
205
tegra_fbdev_probe(struct drm_fb_helper * helper,struct drm_fb_helper_surface_size * sizes)206 static int tegra_fbdev_probe(struct drm_fb_helper *helper,
207 struct drm_fb_helper_surface_size *sizes)
208 {
209 struct tegra_fbdev *fbdev = to_tegra_fbdev(helper);
210 struct tegra_drm *tegra = helper->dev->dev_private;
211 struct drm_device *drm = helper->dev;
212 struct drm_mode_fb_cmd2 cmd = { 0 };
213 unsigned int bytes_per_pixel;
214 struct drm_framebuffer *fb;
215 unsigned long offset;
216 struct fb_info *info;
217 struct tegra_bo *bo;
218 size_t size;
219 int err;
220
221 bytes_per_pixel = DIV_ROUND_UP(sizes->surface_bpp, 8);
222
223 cmd.width = sizes->surface_width;
224 cmd.height = sizes->surface_height;
225 cmd.pitches[0] = round_up(sizes->surface_width * bytes_per_pixel,
226 tegra->pitch_align);
227
228 cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp,
229 sizes->surface_depth);
230
231 size = cmd.pitches[0] * cmd.height;
232
233 bo = tegra_bo_create(drm, size, 0);
234 if (IS_ERR(bo))
235 return PTR_ERR(bo);
236
237 info = drm_fb_helper_alloc_fbi(helper);
238 if (IS_ERR(info)) {
239 dev_err(drm->dev, "failed to allocate framebuffer info\n");
240 drm_gem_object_put_unlocked(&bo->gem);
241 return PTR_ERR(info);
242 }
243
244 fbdev->fb = tegra_fb_alloc(drm, &cmd, &bo, 1);
245 if (IS_ERR(fbdev->fb)) {
246 err = PTR_ERR(fbdev->fb);
247 dev_err(drm->dev, "failed to allocate DRM framebuffer: %d\n",
248 err);
249 drm_gem_object_put_unlocked(&bo->gem);
250 return PTR_ERR(fbdev->fb);
251 }
252
253 fb = fbdev->fb;
254 helper->fb = fb;
255 helper->fbdev = info;
256
257 info->par = helper;
258 info->flags = FBINFO_FLAG_DEFAULT;
259 info->fbops = &tegra_fb_ops;
260
261 drm_fb_helper_fill_fix(info, fb->pitches[0], fb->format->depth);
262 drm_fb_helper_fill_var(info, helper, fb->width, fb->height);
263
264 offset = info->var.xoffset * bytes_per_pixel +
265 info->var.yoffset * fb->pitches[0];
266
267 if (bo->pages) {
268 bo->vaddr = vmap(bo->pages, bo->num_pages, VM_MAP,
269 pgprot_writecombine(PAGE_KERNEL));
270 if (!bo->vaddr) {
271 dev_err(drm->dev, "failed to vmap() framebuffer\n");
272 err = -ENOMEM;
273 goto destroy;
274 }
275 }
276
277 drm->mode_config.fb_base = (resource_size_t)bo->paddr;
278 info->screen_base = (void __iomem *)bo->vaddr + offset;
279 info->screen_size = size;
280 info->fix.smem_start = (unsigned long)(bo->paddr + offset);
281 info->fix.smem_len = size;
282
283 return 0;
284
285 destroy:
286 drm_framebuffer_remove(fb);
287 return err;
288 }
289
290 static const struct drm_fb_helper_funcs tegra_fb_helper_funcs = {
291 .fb_probe = tegra_fbdev_probe,
292 };
293
tegra_fbdev_create(struct drm_device * drm)294 static struct tegra_fbdev *tegra_fbdev_create(struct drm_device *drm)
295 {
296 struct tegra_fbdev *fbdev;
297
298 fbdev = kzalloc(sizeof(*fbdev), GFP_KERNEL);
299 if (!fbdev) {
300 dev_err(drm->dev, "failed to allocate DRM fbdev\n");
301 return ERR_PTR(-ENOMEM);
302 }
303
304 drm_fb_helper_prepare(drm, &fbdev->base, &tegra_fb_helper_funcs);
305
306 return fbdev;
307 }
308
tegra_fbdev_free(struct tegra_fbdev * fbdev)309 static void tegra_fbdev_free(struct tegra_fbdev *fbdev)
310 {
311 kfree(fbdev);
312 }
313
tegra_fbdev_init(struct tegra_fbdev * fbdev,unsigned int preferred_bpp,unsigned int num_crtc,unsigned int max_connectors)314 static int tegra_fbdev_init(struct tegra_fbdev *fbdev,
315 unsigned int preferred_bpp,
316 unsigned int num_crtc,
317 unsigned int max_connectors)
318 {
319 struct drm_device *drm = fbdev->base.dev;
320 int err;
321
322 err = drm_fb_helper_init(drm, &fbdev->base, max_connectors);
323 if (err < 0) {
324 dev_err(drm->dev, "failed to initialize DRM FB helper: %d\n",
325 err);
326 return err;
327 }
328
329 err = drm_fb_helper_single_add_all_connectors(&fbdev->base);
330 if (err < 0) {
331 dev_err(drm->dev, "failed to add connectors: %d\n", err);
332 goto fini;
333 }
334
335 err = drm_fb_helper_initial_config(&fbdev->base, preferred_bpp);
336 if (err < 0) {
337 dev_err(drm->dev, "failed to set initial configuration: %d\n",
338 err);
339 goto fini;
340 }
341
342 return 0;
343
344 fini:
345 drm_fb_helper_fini(&fbdev->base);
346 return err;
347 }
348
tegra_fbdev_exit(struct tegra_fbdev * fbdev)349 static void tegra_fbdev_exit(struct tegra_fbdev *fbdev)
350 {
351 drm_fb_helper_unregister_fbi(&fbdev->base);
352
353 if (fbdev->fb) {
354 struct tegra_bo *bo = tegra_fb_get_plane(fbdev->fb, 0);
355
356 /* Undo the special mapping we made in fbdev probe. */
357 if (bo && bo->pages) {
358 vunmap(bo->vaddr);
359 bo->vaddr = 0;
360 }
361
362 drm_framebuffer_remove(fbdev->fb);
363 }
364
365 drm_fb_helper_fini(&fbdev->base);
366 tegra_fbdev_free(fbdev);
367 }
368 #endif
369
tegra_drm_fb_prepare(struct drm_device * drm)370 int tegra_drm_fb_prepare(struct drm_device *drm)
371 {
372 #ifdef CONFIG_DRM_FBDEV_EMULATION
373 struct tegra_drm *tegra = drm->dev_private;
374
375 tegra->fbdev = tegra_fbdev_create(drm);
376 if (IS_ERR(tegra->fbdev))
377 return PTR_ERR(tegra->fbdev);
378 #endif
379
380 return 0;
381 }
382
tegra_drm_fb_free(struct drm_device * drm)383 void tegra_drm_fb_free(struct drm_device *drm)
384 {
385 #ifdef CONFIG_DRM_FBDEV_EMULATION
386 struct tegra_drm *tegra = drm->dev_private;
387
388 tegra_fbdev_free(tegra->fbdev);
389 #endif
390 }
391
tegra_drm_fb_init(struct drm_device * drm)392 int tegra_drm_fb_init(struct drm_device *drm)
393 {
394 #ifdef CONFIG_DRM_FBDEV_EMULATION
395 struct tegra_drm *tegra = drm->dev_private;
396 int err;
397
398 err = tegra_fbdev_init(tegra->fbdev, 32, drm->mode_config.num_crtc,
399 drm->mode_config.num_connector);
400 if (err < 0)
401 return err;
402 #endif
403
404 return 0;
405 }
406
tegra_drm_fb_exit(struct drm_device * drm)407 void tegra_drm_fb_exit(struct drm_device *drm)
408 {
409 #ifdef CONFIG_DRM_FBDEV_EMULATION
410 struct tegra_drm *tegra = drm->dev_private;
411
412 tegra_fbdev_exit(tegra->fbdev);
413 #endif
414 }
415
tegra_drm_fb_suspend(struct drm_device * drm)416 void tegra_drm_fb_suspend(struct drm_device *drm)
417 {
418 #ifdef CONFIG_DRM_FBDEV_EMULATION
419 struct tegra_drm *tegra = drm->dev_private;
420
421 console_lock();
422 drm_fb_helper_set_suspend(&tegra->fbdev->base, 1);
423 console_unlock();
424 #endif
425 }
426
tegra_drm_fb_resume(struct drm_device * drm)427 void tegra_drm_fb_resume(struct drm_device *drm)
428 {
429 #ifdef CONFIG_DRM_FBDEV_EMULATION
430 struct tegra_drm *tegra = drm->dev_private;
431
432 console_lock();
433 drm_fb_helper_set_suspend(&tegra->fbdev->base, 0);
434 console_unlock();
435 #endif
436 }
437