Commit | Line | Data |
---|---|---|
2e3b3c42 LPC |
1 | /* |
2 | * drm kms/fb cma (contiguous memory allocator) helper functions | |
3 | * | |
4 | * Copyright (C) 2012 Analog Device Inc. | |
5 | * Author: Lars-Peter Clausen <lars@metafoo.de> | |
6 | * | |
7 | * Based on udl_fbdev.c | |
8 | * Copyright (C) 2012 Red Hat | |
9 | * | |
10 | * This program is free software; you can redistribute it and/or | |
11 | * modify it under the terms of the GNU General Public License | |
12 | * as published by the Free Software Foundation; either version 2 | |
13 | * of the License, or (at your option) any later version. | |
14 | * This program is distributed in the hope that it will be useful, | |
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
17 | * GNU General Public License for more details. | |
18 | */ | |
19 | ||
20 | #include <drm/drmP.h> | |
21 | #include <drm/drm_crtc.h> | |
22 | #include <drm/drm_fb_helper.h> | |
23 | #include <drm/drm_crtc_helper.h> | |
24 | #include <drm/drm_gem_cma_helper.h> | |
25 | #include <drm/drm_fb_cma_helper.h> | |
26 | #include <linux/module.h> | |
27 | ||
28 | struct drm_fb_cma { | |
29 | struct drm_framebuffer fb; | |
30 | struct drm_gem_cma_object *obj[4]; | |
31 | }; | |
32 | ||
33 | struct drm_fbdev_cma { | |
34 | struct drm_fb_helper fb_helper; | |
35 | struct drm_fb_cma *fb; | |
36 | }; | |
37 | ||
38 | static inline struct drm_fbdev_cma *to_fbdev_cma(struct drm_fb_helper *helper) | |
39 | { | |
40 | return container_of(helper, struct drm_fbdev_cma, fb_helper); | |
41 | } | |
42 | ||
43 | static inline struct drm_fb_cma *to_fb_cma(struct drm_framebuffer *fb) | |
44 | { | |
45 | return container_of(fb, struct drm_fb_cma, fb); | |
46 | } | |
47 | ||
48 | static void drm_fb_cma_destroy(struct drm_framebuffer *fb) | |
49 | { | |
50 | struct drm_fb_cma *fb_cma = to_fb_cma(fb); | |
51 | int i; | |
52 | ||
53 | for (i = 0; i < 4; i++) { | |
54 | if (fb_cma->obj[i]) | |
55 | drm_gem_object_unreference_unlocked(&fb_cma->obj[i]->base); | |
56 | } | |
57 | ||
58 | drm_framebuffer_cleanup(fb); | |
59 | kfree(fb_cma); | |
60 | } | |
61 | ||
62 | static int drm_fb_cma_create_handle(struct drm_framebuffer *fb, | |
63 | struct drm_file *file_priv, unsigned int *handle) | |
64 | { | |
65 | struct drm_fb_cma *fb_cma = to_fb_cma(fb); | |
66 | ||
67 | return drm_gem_handle_create(file_priv, | |
68 | &fb_cma->obj[0]->base, handle); | |
69 | } | |
70 | ||
71 | static struct drm_framebuffer_funcs drm_fb_cma_funcs = { | |
72 | .destroy = drm_fb_cma_destroy, | |
73 | .create_handle = drm_fb_cma_create_handle, | |
74 | }; | |
75 | ||
76 | static struct drm_fb_cma *drm_fb_cma_alloc(struct drm_device *dev, | |
77 | struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_cma_object **obj, | |
78 | unsigned int num_planes) | |
79 | { | |
80 | struct drm_fb_cma *fb_cma; | |
81 | int ret; | |
82 | int i; | |
83 | ||
84 | fb_cma = kzalloc(sizeof(*fb_cma), GFP_KERNEL); | |
85 | if (!fb_cma) | |
86 | return ERR_PTR(-ENOMEM); | |
87 | ||
c7d73f6a DV |
88 | drm_helper_mode_fill_fb_struct(&fb_cma->fb, mode_cmd); |
89 | ||
90 | for (i = 0; i < num_planes; i++) | |
91 | fb_cma->obj[i] = obj[i]; | |
92 | ||
2e3b3c42 LPC |
93 | ret = drm_framebuffer_init(dev, &fb_cma->fb, &drm_fb_cma_funcs); |
94 | if (ret) { | |
95 | dev_err(dev->dev, "Failed to initalize framebuffer: %d\n", ret); | |
96 | kfree(fb_cma); | |
97 | return ERR_PTR(ret); | |
98 | } | |
99 | ||
2e3b3c42 LPC |
100 | return fb_cma; |
101 | } | |
102 | ||
103 | /** | |
104 | * drm_fb_cma_create() - (struct drm_mode_config_funcs *)->fb_create callback function | |
105 | * | |
106 | * If your hardware has special alignment or pitch requirements these should be | |
107 | * checked before calling this function. | |
108 | */ | |
109 | struct drm_framebuffer *drm_fb_cma_create(struct drm_device *dev, | |
110 | struct drm_file *file_priv, struct drm_mode_fb_cmd2 *mode_cmd) | |
111 | { | |
112 | struct drm_fb_cma *fb_cma; | |
113 | struct drm_gem_cma_object *objs[4]; | |
114 | struct drm_gem_object *obj; | |
115 | unsigned int hsub; | |
116 | unsigned int vsub; | |
117 | int ret; | |
118 | int i; | |
119 | ||
120 | hsub = drm_format_horz_chroma_subsampling(mode_cmd->pixel_format); | |
121 | vsub = drm_format_vert_chroma_subsampling(mode_cmd->pixel_format); | |
122 | ||
123 | for (i = 0; i < drm_format_num_planes(mode_cmd->pixel_format); i++) { | |
124 | unsigned int width = mode_cmd->width / (i ? hsub : 1); | |
125 | unsigned int height = mode_cmd->height / (i ? vsub : 1); | |
126 | unsigned int min_size; | |
127 | ||
128 | obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[i]); | |
129 | if (!obj) { | |
130 | dev_err(dev->dev, "Failed to lookup GEM object\n"); | |
131 | ret = -ENXIO; | |
132 | goto err_gem_object_unreference; | |
133 | } | |
134 | ||
135 | min_size = (height - 1) * mode_cmd->pitches[i] | |
136 | + width * drm_format_plane_cpp(mode_cmd->pixel_format, i) | |
137 | + mode_cmd->offsets[i]; | |
138 | ||
139 | if (obj->size < min_size) { | |
140 | drm_gem_object_unreference_unlocked(obj); | |
141 | ret = -EINVAL; | |
142 | goto err_gem_object_unreference; | |
143 | } | |
144 | objs[i] = to_drm_gem_cma_obj(obj); | |
145 | } | |
146 | ||
147 | fb_cma = drm_fb_cma_alloc(dev, mode_cmd, objs, i); | |
148 | if (IS_ERR(fb_cma)) { | |
149 | ret = PTR_ERR(fb_cma); | |
150 | goto err_gem_object_unreference; | |
151 | } | |
152 | ||
153 | return &fb_cma->fb; | |
154 | ||
155 | err_gem_object_unreference: | |
156 | for (i--; i >= 0; i--) | |
157 | drm_gem_object_unreference_unlocked(&objs[i]->base); | |
158 | return ERR_PTR(ret); | |
159 | } | |
160 | EXPORT_SYMBOL_GPL(drm_fb_cma_create); | |
161 | ||
162 | /** | |
163 | * drm_fb_cma_get_gem_obj() - Get CMA GEM object for framebuffer | |
164 | * @fb: The framebuffer | |
165 | * @plane: Which plane | |
166 | * | |
167 | * Return the CMA GEM object for given framebuffer. | |
168 | * | |
169 | * This function will usually be called from the CRTC callback functions. | |
170 | */ | |
171 | struct drm_gem_cma_object *drm_fb_cma_get_gem_obj(struct drm_framebuffer *fb, | |
172 | unsigned int plane) | |
173 | { | |
174 | struct drm_fb_cma *fb_cma = to_fb_cma(fb); | |
175 | ||
176 | if (plane >= 4) | |
177 | return NULL; | |
178 | ||
179 | return fb_cma->obj[plane]; | |
180 | } | |
181 | EXPORT_SYMBOL_GPL(drm_fb_cma_get_gem_obj); | |
182 | ||
183 | static struct fb_ops drm_fbdev_cma_ops = { | |
184 | .owner = THIS_MODULE, | |
185 | .fb_fillrect = sys_fillrect, | |
186 | .fb_copyarea = sys_copyarea, | |
187 | .fb_imageblit = sys_imageblit, | |
188 | .fb_check_var = drm_fb_helper_check_var, | |
189 | .fb_set_par = drm_fb_helper_set_par, | |
190 | .fb_blank = drm_fb_helper_blank, | |
191 | .fb_pan_display = drm_fb_helper_pan_display, | |
192 | .fb_setcmap = drm_fb_helper_setcmap, | |
193 | }; | |
194 | ||
195 | static int drm_fbdev_cma_create(struct drm_fb_helper *helper, | |
196 | struct drm_fb_helper_surface_size *sizes) | |
197 | { | |
198 | struct drm_fbdev_cma *fbdev_cma = to_fbdev_cma(helper); | |
199 | struct drm_mode_fb_cmd2 mode_cmd = { 0 }; | |
200 | struct drm_device *dev = helper->dev; | |
201 | struct drm_gem_cma_object *obj; | |
202 | struct drm_framebuffer *fb; | |
203 | unsigned int bytes_per_pixel; | |
204 | unsigned long offset; | |
205 | struct fb_info *fbi; | |
206 | size_t size; | |
207 | int ret; | |
208 | ||
e0d78d08 | 209 | DRM_DEBUG_KMS("surface width(%d), height(%d) and bpp(%d)\n", |
2e3b3c42 LPC |
210 | sizes->surface_width, sizes->surface_height, |
211 | sizes->surface_bpp); | |
212 | ||
213 | bytes_per_pixel = DIV_ROUND_UP(sizes->surface_bpp, 8); | |
214 | ||
215 | mode_cmd.width = sizes->surface_width; | |
216 | mode_cmd.height = sizes->surface_height; | |
217 | mode_cmd.pitches[0] = sizes->surface_width * bytes_per_pixel; | |
218 | mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp, | |
219 | sizes->surface_depth); | |
220 | ||
221 | size = mode_cmd.pitches[0] * mode_cmd.height; | |
222 | obj = drm_gem_cma_create(dev, size); | |
02813245 | 223 | if (IS_ERR(obj)) |
2e3b3c42 LPC |
224 | return -ENOMEM; |
225 | ||
226 | fbi = framebuffer_alloc(0, dev->dev); | |
227 | if (!fbi) { | |
228 | dev_err(dev->dev, "Failed to allocate framebuffer info.\n"); | |
229 | ret = -ENOMEM; | |
230 | goto err_drm_gem_cma_free_object; | |
231 | } | |
232 | ||
233 | fbdev_cma->fb = drm_fb_cma_alloc(dev, &mode_cmd, &obj, 1); | |
234 | if (IS_ERR(fbdev_cma->fb)) { | |
235 | dev_err(dev->dev, "Failed to allocate DRM framebuffer.\n"); | |
236 | ret = PTR_ERR(fbdev_cma->fb); | |
237 | goto err_framebuffer_release; | |
238 | } | |
239 | ||
240 | fb = &fbdev_cma->fb->fb; | |
241 | helper->fb = fb; | |
242 | helper->fbdev = fbi; | |
243 | ||
244 | fbi->par = helper; | |
245 | fbi->flags = FBINFO_FLAG_DEFAULT; | |
246 | fbi->fbops = &drm_fbdev_cma_ops; | |
247 | ||
248 | ret = fb_alloc_cmap(&fbi->cmap, 256, 0); | |
249 | if (ret) { | |
250 | dev_err(dev->dev, "Failed to allocate color map.\n"); | |
251 | goto err_drm_fb_cma_destroy; | |
252 | } | |
253 | ||
254 | drm_fb_helper_fill_fix(fbi, fb->pitches[0], fb->depth); | |
255 | drm_fb_helper_fill_var(fbi, helper, fb->width, fb->height); | |
256 | ||
257 | offset = fbi->var.xoffset * bytes_per_pixel; | |
258 | offset += fbi->var.yoffset * fb->pitches[0]; | |
259 | ||
260 | dev->mode_config.fb_base = (resource_size_t)obj->paddr; | |
261 | fbi->screen_base = obj->vaddr + offset; | |
262 | fbi->fix.smem_start = (unsigned long)(obj->paddr + offset); | |
263 | fbi->screen_size = size; | |
264 | fbi->fix.smem_len = size; | |
265 | ||
266 | return 0; | |
267 | ||
268 | err_drm_fb_cma_destroy: | |
269 | drm_fb_cma_destroy(fb); | |
270 | err_framebuffer_release: | |
271 | framebuffer_release(fbi); | |
272 | err_drm_gem_cma_free_object: | |
273 | drm_gem_cma_free_object(&obj->base); | |
274 | return ret; | |
275 | } | |
276 | ||
277 | static int drm_fbdev_cma_probe(struct drm_fb_helper *helper, | |
278 | struct drm_fb_helper_surface_size *sizes) | |
279 | { | |
280 | int ret = 0; | |
281 | ||
282 | if (!helper->fb) { | |
283 | ret = drm_fbdev_cma_create(helper, sizes); | |
284 | if (ret < 0) | |
285 | return ret; | |
286 | ret = 1; | |
287 | } | |
288 | ||
289 | return ret; | |
290 | } | |
291 | ||
292 | static struct drm_fb_helper_funcs drm_fb_cma_helper_funcs = { | |
293 | .fb_probe = drm_fbdev_cma_probe, | |
294 | }; | |
295 | ||
296 | /** | |
297 | * drm_fbdev_cma_init() - Allocate and initializes a drm_fbdev_cma struct | |
298 | * @dev: DRM device | |
299 | * @preferred_bpp: Preferred bits per pixel for the device | |
300 | * @num_crtc: Number of CRTCs | |
301 | * @max_conn_count: Maximum number of connectors | |
302 | * | |
303 | * Returns a newly allocated drm_fbdev_cma struct or a ERR_PTR. | |
304 | */ | |
305 | struct drm_fbdev_cma *drm_fbdev_cma_init(struct drm_device *dev, | |
306 | unsigned int preferred_bpp, unsigned int num_crtc, | |
307 | unsigned int max_conn_count) | |
308 | { | |
309 | struct drm_fbdev_cma *fbdev_cma; | |
310 | struct drm_fb_helper *helper; | |
311 | int ret; | |
312 | ||
313 | fbdev_cma = kzalloc(sizeof(*fbdev_cma), GFP_KERNEL); | |
314 | if (!fbdev_cma) { | |
315 | dev_err(dev->dev, "Failed to allocate drm fbdev.\n"); | |
316 | return ERR_PTR(-ENOMEM); | |
317 | } | |
318 | ||
319 | fbdev_cma->fb_helper.funcs = &drm_fb_cma_helper_funcs; | |
320 | helper = &fbdev_cma->fb_helper; | |
321 | ||
322 | ret = drm_fb_helper_init(dev, helper, num_crtc, max_conn_count); | |
323 | if (ret < 0) { | |
324 | dev_err(dev->dev, "Failed to initialize drm fb helper.\n"); | |
325 | goto err_free; | |
326 | } | |
327 | ||
328 | ret = drm_fb_helper_single_add_all_connectors(helper); | |
329 | if (ret < 0) { | |
330 | dev_err(dev->dev, "Failed to add connectors.\n"); | |
331 | goto err_drm_fb_helper_fini; | |
332 | ||
333 | } | |
334 | ||
335 | ret = drm_fb_helper_initial_config(helper, preferred_bpp); | |
336 | if (ret < 0) { | |
337 | dev_err(dev->dev, "Failed to set inital hw configuration.\n"); | |
338 | goto err_drm_fb_helper_fini; | |
339 | } | |
340 | ||
341 | return fbdev_cma; | |
342 | ||
343 | err_drm_fb_helper_fini: | |
344 | drm_fb_helper_fini(helper); | |
345 | err_free: | |
346 | kfree(fbdev_cma); | |
347 | ||
348 | return ERR_PTR(ret); | |
349 | } | |
350 | EXPORT_SYMBOL_GPL(drm_fbdev_cma_init); | |
351 | ||
352 | /** | |
353 | * drm_fbdev_cma_fini() - Free drm_fbdev_cma struct | |
354 | * @fbdev_cma: The drm_fbdev_cma struct | |
355 | */ | |
356 | void drm_fbdev_cma_fini(struct drm_fbdev_cma *fbdev_cma) | |
357 | { | |
358 | if (fbdev_cma->fb_helper.fbdev) { | |
359 | struct fb_info *info; | |
360 | int ret; | |
361 | ||
362 | info = fbdev_cma->fb_helper.fbdev; | |
363 | ret = unregister_framebuffer(info); | |
364 | if (ret < 0) | |
365 | DRM_DEBUG_KMS("failed unregister_framebuffer()\n"); | |
366 | ||
367 | if (info->cmap.len) | |
368 | fb_dealloc_cmap(&info->cmap); | |
369 | ||
370 | framebuffer_release(info); | |
371 | } | |
372 | ||
373 | if (fbdev_cma->fb) | |
374 | drm_fb_cma_destroy(&fbdev_cma->fb->fb); | |
375 | ||
376 | drm_fb_helper_fini(&fbdev_cma->fb_helper); | |
377 | kfree(fbdev_cma); | |
378 | } | |
379 | EXPORT_SYMBOL_GPL(drm_fbdev_cma_fini); | |
380 | ||
381 | /** | |
382 | * drm_fbdev_cma_restore_mode() - Restores initial framebuffer mode | |
383 | * @fbdev_cma: The drm_fbdev_cma struct, may be NULL | |
384 | * | |
385 | * This function is usually called from the DRM drivers lastclose callback. | |
386 | */ | |
387 | void drm_fbdev_cma_restore_mode(struct drm_fbdev_cma *fbdev_cma) | |
388 | { | |
389 | if (fbdev_cma) | |
390 | drm_fb_helper_restore_fbdev_mode(&fbdev_cma->fb_helper); | |
391 | } | |
392 | EXPORT_SYMBOL_GPL(drm_fbdev_cma_restore_mode); | |
393 | ||
394 | /** | |
395 | * drm_fbdev_cma_hotplug_event() - Poll for hotpulug events | |
396 | * @fbdev_cma: The drm_fbdev_cma struct, may be NULL | |
397 | * | |
398 | * This function is usually called from the DRM drivers output_poll_changed | |
399 | * callback. | |
400 | */ | |
401 | void drm_fbdev_cma_hotplug_event(struct drm_fbdev_cma *fbdev_cma) | |
402 | { | |
403 | if (fbdev_cma) | |
404 | drm_fb_helper_hotplug_event(&fbdev_cma->fb_helper); | |
405 | } | |
406 | EXPORT_SYMBOL_GPL(drm_fbdev_cma_hotplug_event); |