1 /*
2 * Copyright © 2007 David Airlie
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 *
23 * Authors:
24 * David Airlie
25 */
26 #include <linux/module.h>
27 #include <linux/slab.h>
28 #include <linux/pm_runtime.h>
29
30 #include <drm/drmP.h>
31 #include <drm/drm_crtc.h>
32 #include <drm/drm_crtc_helper.h>
33 #include <drm/radeon_drm.h>
34 #include "radeon.h"
35
36 #include <drm/drm_fb_helper.h>
37
38 #include <linux/vga_switcheroo.h>
39
40 /* object hierarchy -
41 * this contains a helper + a radeon fb
42 * the helper contains a pointer to radeon framebuffer baseclass.
43 */
44 struct radeon_fbdev {
45 struct drm_fb_helper helper;
46 struct radeon_framebuffer rfb;
47 struct radeon_device *rdev;
48 };
49
50 static int
radeonfb_open(struct fb_info * info,int user)51 radeonfb_open(struct fb_info *info, int user)
52 {
53 struct radeon_fbdev *rfbdev = info->par;
54 struct radeon_device *rdev = rfbdev->rdev;
55 int ret = pm_runtime_get_sync(rdev->ddev->dev);
56 if (ret < 0 && ret != -EACCES) {
57 pm_runtime_mark_last_busy(rdev->ddev->dev);
58 pm_runtime_put_autosuspend(rdev->ddev->dev);
59 return ret;
60 }
61 return 0;
62 }
63
64 static int
radeonfb_release(struct fb_info * info,int user)65 radeonfb_release(struct fb_info *info, int user)
66 {
67 struct radeon_fbdev *rfbdev = info->par;
68 struct radeon_device *rdev = rfbdev->rdev;
69
70 pm_runtime_mark_last_busy(rdev->ddev->dev);
71 pm_runtime_put_autosuspend(rdev->ddev->dev);
72 return 0;
73 }
74
75 static struct fb_ops radeonfb_ops = {
76 .owner = THIS_MODULE,
77 .fb_open = radeonfb_open,
78 .fb_release = radeonfb_release,
79 .fb_check_var = drm_fb_helper_check_var,
80 .fb_set_par = drm_fb_helper_set_par,
81 .fb_fillrect = drm_fb_helper_cfb_fillrect,
82 .fb_copyarea = drm_fb_helper_cfb_copyarea,
83 .fb_imageblit = drm_fb_helper_cfb_imageblit,
84 .fb_pan_display = drm_fb_helper_pan_display,
85 .fb_blank = drm_fb_helper_blank,
86 .fb_setcmap = drm_fb_helper_setcmap,
87 .fb_debug_enter = drm_fb_helper_debug_enter,
88 .fb_debug_leave = drm_fb_helper_debug_leave,
89 };
90
91
radeon_align_pitch(struct radeon_device * rdev,int width,int bpp,bool tiled)92 int radeon_align_pitch(struct radeon_device *rdev, int width, int bpp, bool tiled)
93 {
94 int aligned = width;
95 int align_large = (ASIC_IS_AVIVO(rdev)) || tiled;
96 int pitch_mask = 0;
97
98 switch (bpp / 8) {
99 case 1:
100 pitch_mask = align_large ? 255 : 127;
101 break;
102 case 2:
103 pitch_mask = align_large ? 127 : 31;
104 break;
105 case 3:
106 case 4:
107 pitch_mask = align_large ? 63 : 15;
108 break;
109 }
110
111 aligned += pitch_mask;
112 aligned &= ~pitch_mask;
113 return aligned;
114 }
115
radeonfb_destroy_pinned_object(struct drm_gem_object * gobj)116 static void radeonfb_destroy_pinned_object(struct drm_gem_object *gobj)
117 {
118 struct radeon_bo *rbo = gem_to_radeon_bo(gobj);
119 int ret;
120
121 ret = radeon_bo_reserve(rbo, false);
122 if (likely(ret == 0)) {
123 radeon_bo_kunmap(rbo);
124 radeon_bo_unpin(rbo);
125 radeon_bo_unreserve(rbo);
126 }
127 drm_gem_object_unreference_unlocked(gobj);
128 }
129
radeonfb_create_pinned_object(struct radeon_fbdev * rfbdev,struct drm_mode_fb_cmd2 * mode_cmd,struct drm_gem_object ** gobj_p)130 static int radeonfb_create_pinned_object(struct radeon_fbdev *rfbdev,
131 struct drm_mode_fb_cmd2 *mode_cmd,
132 struct drm_gem_object **gobj_p)
133 {
134 struct radeon_device *rdev = rfbdev->rdev;
135 struct drm_gem_object *gobj = NULL;
136 struct radeon_bo *rbo = NULL;
137 bool fb_tiled = false; /* useful for testing */
138 u32 tiling_flags = 0;
139 int ret;
140 int aligned_size, size;
141 int height = mode_cmd->height;
142 u32 bpp, depth;
143
144 drm_fb_get_bpp_depth(mode_cmd->pixel_format, &depth, &bpp);
145
146 /* need to align pitch with crtc limits */
147 mode_cmd->pitches[0] = radeon_align_pitch(rdev, mode_cmd->width, bpp,
148 fb_tiled) * ((bpp + 1) / 8);
149
150 if (rdev->family >= CHIP_R600)
151 height = ALIGN(mode_cmd->height, 8);
152 size = mode_cmd->pitches[0] * height;
153 aligned_size = ALIGN(size, PAGE_SIZE);
154 ret = radeon_gem_object_create(rdev, aligned_size, 0,
155 RADEON_GEM_DOMAIN_VRAM,
156 0, true, &gobj);
157 if (ret) {
158 printk(KERN_ERR "failed to allocate framebuffer (%d)\n",
159 aligned_size);
160 return -ENOMEM;
161 }
162 rbo = gem_to_radeon_bo(gobj);
163
164 if (fb_tiled)
165 tiling_flags = RADEON_TILING_MACRO;
166
167 #ifdef __BIG_ENDIAN
168 switch (bpp) {
169 case 32:
170 tiling_flags |= RADEON_TILING_SWAP_32BIT;
171 break;
172 case 16:
173 tiling_flags |= RADEON_TILING_SWAP_16BIT;
174 default:
175 break;
176 }
177 #endif
178
179 if (tiling_flags) {
180 ret = radeon_bo_set_tiling_flags(rbo,
181 tiling_flags | RADEON_TILING_SURFACE,
182 mode_cmd->pitches[0]);
183 if (ret)
184 dev_err(rdev->dev, "FB failed to set tiling flags\n");
185 }
186
187
188 ret = radeon_bo_reserve(rbo, false);
189 if (unlikely(ret != 0))
190 goto out_unref;
191 /* Only 27 bit offset for legacy CRTC */
192 ret = radeon_bo_pin_restricted(rbo, RADEON_GEM_DOMAIN_VRAM,
193 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27,
194 NULL);
195 if (ret) {
196 radeon_bo_unreserve(rbo);
197 goto out_unref;
198 }
199 if (fb_tiled)
200 radeon_bo_check_tiling(rbo, 0, 0);
201 ret = radeon_bo_kmap(rbo, NULL);
202 radeon_bo_unreserve(rbo);
203 if (ret) {
204 goto out_unref;
205 }
206
207 *gobj_p = gobj;
208 return 0;
209 out_unref:
210 radeonfb_destroy_pinned_object(gobj);
211 *gobj_p = NULL;
212 return ret;
213 }
214
radeonfb_create(struct drm_fb_helper * helper,struct drm_fb_helper_surface_size * sizes)215 static int radeonfb_create(struct drm_fb_helper *helper,
216 struct drm_fb_helper_surface_size *sizes)
217 {
218 struct radeon_fbdev *rfbdev =
219 container_of(helper, struct radeon_fbdev, helper);
220 struct radeon_device *rdev = rfbdev->rdev;
221 struct fb_info *info;
222 struct drm_framebuffer *fb = NULL;
223 struct drm_mode_fb_cmd2 mode_cmd;
224 struct drm_gem_object *gobj = NULL;
225 struct radeon_bo *rbo = NULL;
226 int ret;
227 unsigned long tmp;
228
229 mode_cmd.width = sizes->surface_width;
230 mode_cmd.height = sizes->surface_height;
231
232 /* avivo can't scanout real 24bpp */
233 if ((sizes->surface_bpp == 24) && ASIC_IS_AVIVO(rdev))
234 sizes->surface_bpp = 32;
235
236 mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp,
237 sizes->surface_depth);
238
239 ret = radeonfb_create_pinned_object(rfbdev, &mode_cmd, &gobj);
240 if (ret) {
241 DRM_ERROR("failed to create fbcon object %d\n", ret);
242 return ret;
243 }
244
245 rbo = gem_to_radeon_bo(gobj);
246
247 /* okay we have an object now allocate the framebuffer */
248 info = drm_fb_helper_alloc_fbi(helper);
249 if (IS_ERR(info)) {
250 ret = PTR_ERR(info);
251 goto out_unref;
252 }
253
254 info->par = rfbdev;
255
256 ret = radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj);
257 if (ret) {
258 DRM_ERROR("failed to initialize framebuffer %d\n", ret);
259 goto out_destroy_fbi;
260 }
261
262 fb = &rfbdev->rfb.base;
263
264 /* setup helper */
265 rfbdev->helper.fb = fb;
266
267 memset_io(rbo->kptr, 0x0, radeon_bo_size(rbo));
268
269 strcpy(info->fix.id, "radeondrmfb");
270
271 drm_fb_helper_fill_fix(info, fb->pitches[0], fb->depth);
272
273 info->flags = FBINFO_DEFAULT | FBINFO_CAN_FORCE_OUTPUT;
274 info->fbops = &radeonfb_ops;
275
276 tmp = radeon_bo_gpu_offset(rbo) - rdev->mc.vram_start;
277 info->fix.smem_start = rdev->mc.aper_base + tmp;
278 info->fix.smem_len = radeon_bo_size(rbo);
279 info->screen_base = rbo->kptr;
280 info->screen_size = radeon_bo_size(rbo);
281
282 drm_fb_helper_fill_var(info, &rfbdev->helper, sizes->fb_width, sizes->fb_height);
283
284 /* setup aperture base/size for vesafb takeover */
285 info->apertures->ranges[0].base = rdev->ddev->mode_config.fb_base;
286 info->apertures->ranges[0].size = rdev->mc.aper_size;
287
288 /* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */
289
290 if (info->screen_base == NULL) {
291 ret = -ENOSPC;
292 goto out_destroy_fbi;
293 }
294
295 DRM_INFO("fb mappable at 0x%lX\n", info->fix.smem_start);
296 DRM_INFO("vram apper at 0x%lX\n", (unsigned long)rdev->mc.aper_base);
297 DRM_INFO("size %lu\n", (unsigned long)radeon_bo_size(rbo));
298 DRM_INFO("fb depth is %d\n", fb->depth);
299 DRM_INFO(" pitch is %d\n", fb->pitches[0]);
300
301 vga_switcheroo_client_fb_set(rdev->ddev->pdev, info);
302 return 0;
303
304 out_destroy_fbi:
305 drm_fb_helper_release_fbi(helper);
306 out_unref:
307 if (rbo) {
308
309 }
310 if (fb && ret) {
311 drm_gem_object_unreference_unlocked(gobj);
312 drm_framebuffer_unregister_private(fb);
313 drm_framebuffer_cleanup(fb);
314 kfree(fb);
315 }
316 return ret;
317 }
318
radeon_fb_output_poll_changed(struct radeon_device * rdev)319 void radeon_fb_output_poll_changed(struct radeon_device *rdev)
320 {
321 if (rdev->mode_info.rfbdev)
322 drm_fb_helper_hotplug_event(&rdev->mode_info.rfbdev->helper);
323 }
324
radeon_fbdev_destroy(struct drm_device * dev,struct radeon_fbdev * rfbdev)325 static int radeon_fbdev_destroy(struct drm_device *dev, struct radeon_fbdev *rfbdev)
326 {
327 struct radeon_framebuffer *rfb = &rfbdev->rfb;
328
329 drm_fb_helper_unregister_fbi(&rfbdev->helper);
330 drm_fb_helper_release_fbi(&rfbdev->helper);
331
332 if (rfb->obj) {
333 radeonfb_destroy_pinned_object(rfb->obj);
334 rfb->obj = NULL;
335 }
336 drm_fb_helper_fini(&rfbdev->helper);
337 drm_framebuffer_unregister_private(&rfb->base);
338 drm_framebuffer_cleanup(&rfb->base);
339
340 return 0;
341 }
342
343 static const struct drm_fb_helper_funcs radeon_fb_helper_funcs = {
344 .gamma_set = radeon_crtc_fb_gamma_set,
345 .gamma_get = radeon_crtc_fb_gamma_get,
346 .fb_probe = radeonfb_create,
347 };
348
radeon_fbdev_init(struct radeon_device * rdev)349 int radeon_fbdev_init(struct radeon_device *rdev)
350 {
351 struct radeon_fbdev *rfbdev;
352 int bpp_sel = 32;
353 int ret;
354
355 /* don't enable fbdev if no connectors */
356 if (list_empty(&rdev->ddev->mode_config.connector_list))
357 return 0;
358
359 /* select 8 bpp console on RN50 or 16MB cards */
360 if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024))
361 bpp_sel = 8;
362
363 rfbdev = kzalloc(sizeof(struct radeon_fbdev), GFP_KERNEL);
364 if (!rfbdev)
365 return -ENOMEM;
366
367 rfbdev->rdev = rdev;
368 rdev->mode_info.rfbdev = rfbdev;
369
370 drm_fb_helper_prepare(rdev->ddev, &rfbdev->helper,
371 &radeon_fb_helper_funcs);
372
373 ret = drm_fb_helper_init(rdev->ddev, &rfbdev->helper,
374 rdev->num_crtc,
375 RADEONFB_CONN_LIMIT);
376 if (ret)
377 goto free;
378
379 ret = drm_fb_helper_single_add_all_connectors(&rfbdev->helper);
380 if (ret)
381 goto fini;
382
383 /* disable all the possible outputs/crtcs before entering KMS mode */
384 drm_helper_disable_unused_functions(rdev->ddev);
385
386 ret = drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel);
387 if (ret)
388 goto fini;
389
390 return 0;
391
392 fini:
393 drm_fb_helper_fini(&rfbdev->helper);
394 free:
395 kfree(rfbdev);
396 return ret;
397 }
398
radeon_fbdev_fini(struct radeon_device * rdev)399 void radeon_fbdev_fini(struct radeon_device *rdev)
400 {
401 if (!rdev->mode_info.rfbdev)
402 return;
403
404 radeon_fbdev_destroy(rdev->ddev, rdev->mode_info.rfbdev);
405 kfree(rdev->mode_info.rfbdev);
406 rdev->mode_info.rfbdev = NULL;
407 }
408
radeon_fbdev_set_suspend(struct radeon_device * rdev,int state)409 void radeon_fbdev_set_suspend(struct radeon_device *rdev, int state)
410 {
411 if (rdev->mode_info.rfbdev)
412 drm_fb_helper_set_suspend(&rdev->mode_info.rfbdev->helper, state);
413 }
414
radeon_fbdev_robj_is_fb(struct radeon_device * rdev,struct radeon_bo * robj)415 bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj)
416 {
417 if (!rdev->mode_info.rfbdev)
418 return false;
419
420 if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj))
421 return true;
422 return false;
423 }
424
radeon_fb_add_connector(struct radeon_device * rdev,struct drm_connector * connector)425 void radeon_fb_add_connector(struct radeon_device *rdev, struct drm_connector *connector)
426 {
427 if (rdev->mode_info.rfbdev)
428 drm_fb_helper_add_one_connector(&rdev->mode_info.rfbdev->helper, connector);
429 }
430
radeon_fb_remove_connector(struct radeon_device * rdev,struct drm_connector * connector)431 void radeon_fb_remove_connector(struct radeon_device *rdev, struct drm_connector *connector)
432 {
433 if (rdev->mode_info.rfbdev)
434 drm_fb_helper_remove_one_connector(&rdev->mode_info.rfbdev->helper, connector);
435 }
436
radeon_fbdev_restore_mode(struct radeon_device * rdev)437 void radeon_fbdev_restore_mode(struct radeon_device *rdev)
438 {
439 struct radeon_fbdev *rfbdev = rdev->mode_info.rfbdev;
440 struct drm_fb_helper *fb_helper;
441 int ret;
442
443 if (!rfbdev)
444 return;
445
446 fb_helper = &rfbdev->helper;
447
448 ret = drm_fb_helper_restore_fbdev_mode_unlocked(fb_helper);
449 if (ret)
450 DRM_DEBUG("failed to restore crtc mode\n");
451 }
452