• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2007 David Airlie
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  *
23  * Authors:
24  *     David Airlie
25  */
26 #include <linux/module.h>
27 #include <linux/slab.h>
28 #include <linux/fb.h>
29 
30 #include "drmP.h"
31 #include "drm.h"
32 #include "drm_crtc.h"
33 #include "drm_crtc_helper.h"
34 #include "radeon_drm.h"
35 #include "radeon.h"
36 
37 #include "drm_fb_helper.h"
38 
39 #include <linux/vga_switcheroo.h>
40 
41 /* object hierarchy -
42    this contains a helper + a radeon fb
43    the helper contains a pointer to radeon framebuffer baseclass.
44 */
45 struct radeon_fbdev {
46 	struct drm_fb_helper helper;
47 	struct radeon_framebuffer rfb;
48 	struct list_head fbdev_list;
49 	struct radeon_device *rdev;
50 };
51 
52 static struct fb_ops radeonfb_ops = {
53 	.owner = THIS_MODULE,
54 	.fb_check_var = drm_fb_helper_check_var,
55 	.fb_set_par = drm_fb_helper_set_par,
56 	.fb_fillrect = cfb_fillrect,
57 	.fb_copyarea = cfb_copyarea,
58 	.fb_imageblit = cfb_imageblit,
59 	.fb_pan_display = drm_fb_helper_pan_display,
60 	.fb_blank = drm_fb_helper_blank,
61 	.fb_setcmap = drm_fb_helper_setcmap,
62 	.fb_debug_enter = drm_fb_helper_debug_enter,
63 	.fb_debug_leave = drm_fb_helper_debug_leave,
64 };
65 
66 
radeon_align_pitch(struct radeon_device * rdev,int width,int bpp,bool tiled)67 int radeon_align_pitch(struct radeon_device *rdev, int width, int bpp, bool tiled)
68 {
69 	int aligned = width;
70 	int align_large = (ASIC_IS_AVIVO(rdev)) || tiled;
71 	int pitch_mask = 0;
72 
73 	switch (bpp / 8) {
74 	case 1:
75 		pitch_mask = align_large ? 255 : 127;
76 		break;
77 	case 2:
78 		pitch_mask = align_large ? 127 : 31;
79 		break;
80 	case 3:
81 	case 4:
82 		pitch_mask = align_large ? 63 : 15;
83 		break;
84 	}
85 
86 	aligned += pitch_mask;
87 	aligned &= ~pitch_mask;
88 	return aligned;
89 }
90 
radeonfb_destroy_pinned_object(struct drm_gem_object * gobj)91 static void radeonfb_destroy_pinned_object(struct drm_gem_object *gobj)
92 {
93 	struct radeon_bo *rbo = gem_to_radeon_bo(gobj);
94 	int ret;
95 
96 	ret = radeon_bo_reserve(rbo, false);
97 	if (likely(ret == 0)) {
98 		radeon_bo_kunmap(rbo);
99 		radeon_bo_unpin(rbo);
100 		radeon_bo_unreserve(rbo);
101 	}
102 	drm_gem_object_unreference_unlocked(gobj);
103 }
104 
radeonfb_create_pinned_object(struct radeon_fbdev * rfbdev,struct drm_mode_fb_cmd2 * mode_cmd,struct drm_gem_object ** gobj_p)105 static int radeonfb_create_pinned_object(struct radeon_fbdev *rfbdev,
106 					 struct drm_mode_fb_cmd2 *mode_cmd,
107 					 struct drm_gem_object **gobj_p)
108 {
109 	struct radeon_device *rdev = rfbdev->rdev;
110 	struct drm_gem_object *gobj = NULL;
111 	struct radeon_bo *rbo = NULL;
112 	bool fb_tiled = false; /* useful for testing */
113 	u32 tiling_flags = 0;
114 	int ret;
115 	int aligned_size, size;
116 	int height = mode_cmd->height;
117 	u32 bpp, depth;
118 
119 	drm_fb_get_bpp_depth(mode_cmd->pixel_format, &depth, &bpp);
120 
121 	/* need to align pitch with crtc limits */
122 	mode_cmd->pitches[0] = radeon_align_pitch(rdev, mode_cmd->width, bpp,
123 						  fb_tiled) * ((bpp + 1) / 8);
124 
125 	if (rdev->family >= CHIP_R600)
126 		height = ALIGN(mode_cmd->height, 8);
127 	size = mode_cmd->pitches[0] * height;
128 	aligned_size = ALIGN(size, PAGE_SIZE);
129 	ret = radeon_gem_object_create(rdev, aligned_size, 0,
130 				       RADEON_GEM_DOMAIN_VRAM,
131 				       false, true,
132 				       &gobj);
133 	if (ret) {
134 		printk(KERN_ERR "failed to allocate framebuffer (%d)\n",
135 		       aligned_size);
136 		return -ENOMEM;
137 	}
138 	rbo = gem_to_radeon_bo(gobj);
139 
140 	if (fb_tiled)
141 		tiling_flags = RADEON_TILING_MACRO;
142 
143 #ifdef __BIG_ENDIAN
144 	switch (bpp) {
145 	case 32:
146 		tiling_flags |= RADEON_TILING_SWAP_32BIT;
147 		break;
148 	case 16:
149 		tiling_flags |= RADEON_TILING_SWAP_16BIT;
150 	default:
151 		break;
152 	}
153 #endif
154 
155 	if (tiling_flags) {
156 		ret = radeon_bo_set_tiling_flags(rbo,
157 						 tiling_flags | RADEON_TILING_SURFACE,
158 						 mode_cmd->pitches[0]);
159 		if (ret)
160 			dev_err(rdev->dev, "FB failed to set tiling flags\n");
161 	}
162 
163 
164 	ret = radeon_bo_reserve(rbo, false);
165 	if (unlikely(ret != 0))
166 		goto out_unref;
167 	/* Only 27 bit offset for legacy CRTC */
168 	ret = radeon_bo_pin_restricted(rbo, RADEON_GEM_DOMAIN_VRAM,
169 				       ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27,
170 				       NULL);
171 	if (ret) {
172 		radeon_bo_unreserve(rbo);
173 		goto out_unref;
174 	}
175 	if (fb_tiled)
176 		radeon_bo_check_tiling(rbo, 0, 0);
177 	ret = radeon_bo_kmap(rbo, NULL);
178 	radeon_bo_unreserve(rbo);
179 	if (ret) {
180 		goto out_unref;
181 	}
182 
183 	*gobj_p = gobj;
184 	return 0;
185 out_unref:
186 	radeonfb_destroy_pinned_object(gobj);
187 	*gobj_p = NULL;
188 	return ret;
189 }
190 
radeonfb_create(struct radeon_fbdev * rfbdev,struct drm_fb_helper_surface_size * sizes)191 static int radeonfb_create(struct radeon_fbdev *rfbdev,
192 			   struct drm_fb_helper_surface_size *sizes)
193 {
194 	struct radeon_device *rdev = rfbdev->rdev;
195 	struct fb_info *info;
196 	struct drm_framebuffer *fb = NULL;
197 	struct drm_mode_fb_cmd2 mode_cmd;
198 	struct drm_gem_object *gobj = NULL;
199 	struct radeon_bo *rbo = NULL;
200 	struct device *device = &rdev->pdev->dev;
201 	int ret;
202 	unsigned long tmp;
203 
204 	mode_cmd.width = sizes->surface_width;
205 	mode_cmd.height = sizes->surface_height;
206 
207 	/* avivo can't scanout real 24bpp */
208 	if ((sizes->surface_bpp == 24) && ASIC_IS_AVIVO(rdev))
209 		sizes->surface_bpp = 32;
210 
211 	mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp,
212 							  sizes->surface_depth);
213 
214 	ret = radeonfb_create_pinned_object(rfbdev, &mode_cmd, &gobj);
215 	if (ret) {
216 		DRM_ERROR("failed to create fbcon object %d\n", ret);
217 		return ret;
218 	}
219 
220 	rbo = gem_to_radeon_bo(gobj);
221 
222 	/* okay we have an object now allocate the framebuffer */
223 	info = framebuffer_alloc(0, device);
224 	if (info == NULL) {
225 		ret = -ENOMEM;
226 		goto out_unref;
227 	}
228 
229 	info->par = rfbdev;
230 
231 	ret = radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj);
232 	if (ret) {
233 		DRM_ERROR("failed to initalise framebuffer %d\n", ret);
234 		goto out_unref;
235 	}
236 
237 	fb = &rfbdev->rfb.base;
238 
239 	/* setup helper */
240 	rfbdev->helper.fb = fb;
241 	rfbdev->helper.fbdev = info;
242 
243 	memset_io(rbo->kptr, 0x0, radeon_bo_size(rbo));
244 
245 	strcpy(info->fix.id, "radeondrmfb");
246 
247 	drm_fb_helper_fill_fix(info, fb->pitches[0], fb->depth);
248 
249 	info->flags = FBINFO_DEFAULT | FBINFO_CAN_FORCE_OUTPUT;
250 	info->fbops = &radeonfb_ops;
251 
252 	tmp = radeon_bo_gpu_offset(rbo) - rdev->mc.vram_start;
253 	info->fix.smem_start = rdev->mc.aper_base + tmp;
254 	info->fix.smem_len = radeon_bo_size(rbo);
255 	info->screen_base = rbo->kptr;
256 	info->screen_size = radeon_bo_size(rbo);
257 
258 	drm_fb_helper_fill_var(info, &rfbdev->helper, sizes->fb_width, sizes->fb_height);
259 
260 	/* setup aperture base/size for vesafb takeover */
261 	info->apertures = alloc_apertures(1);
262 	if (!info->apertures) {
263 		ret = -ENOMEM;
264 		goto out_unref;
265 	}
266 	info->apertures->ranges[0].base = rdev->ddev->mode_config.fb_base;
267 	info->apertures->ranges[0].size = rdev->mc.aper_size;
268 
269 	/* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */
270 
271 	if (info->screen_base == NULL) {
272 		ret = -ENOSPC;
273 		goto out_unref;
274 	}
275 
276 	ret = fb_alloc_cmap(&info->cmap, 256, 0);
277 	if (ret) {
278 		ret = -ENOMEM;
279 		goto out_unref;
280 	}
281 
282 	DRM_INFO("fb mappable at 0x%lX\n",  info->fix.smem_start);
283 	DRM_INFO("vram apper at 0x%lX\n",  (unsigned long)rdev->mc.aper_base);
284 	DRM_INFO("size %lu\n", (unsigned long)radeon_bo_size(rbo));
285 	DRM_INFO("fb depth is %d\n", fb->depth);
286 	DRM_INFO("   pitch is %d\n", fb->pitches[0]);
287 
288 	vga_switcheroo_client_fb_set(rdev->ddev->pdev, info);
289 	return 0;
290 
291 out_unref:
292 	if (rbo) {
293 
294 	}
295 	if (fb && ret) {
296 		drm_gem_object_unreference(gobj);
297 		drm_framebuffer_cleanup(fb);
298 		kfree(fb);
299 	}
300 	return ret;
301 }
302 
radeon_fb_find_or_create_single(struct drm_fb_helper * helper,struct drm_fb_helper_surface_size * sizes)303 static int radeon_fb_find_or_create_single(struct drm_fb_helper *helper,
304 					   struct drm_fb_helper_surface_size *sizes)
305 {
306 	struct radeon_fbdev *rfbdev = (struct radeon_fbdev *)helper;
307 	int new_fb = 0;
308 	int ret;
309 
310 	if (!helper->fb) {
311 		ret = radeonfb_create(rfbdev, sizes);
312 		if (ret)
313 			return ret;
314 		new_fb = 1;
315 	}
316 	return new_fb;
317 }
318 
319 static char *mode_option;
radeon_parse_options(char * options)320 int radeon_parse_options(char *options)
321 {
322 	char *this_opt;
323 
324 	if (!options || !*options)
325 		return 0;
326 
327 	while ((this_opt = strsep(&options, ",")) != NULL) {
328 		if (!*this_opt)
329 			continue;
330 		mode_option = this_opt;
331 	}
332 	return 0;
333 }
334 
radeon_fb_output_poll_changed(struct radeon_device * rdev)335 void radeon_fb_output_poll_changed(struct radeon_device *rdev)
336 {
337 	drm_fb_helper_hotplug_event(&rdev->mode_info.rfbdev->helper);
338 }
339 
radeon_fbdev_destroy(struct drm_device * dev,struct radeon_fbdev * rfbdev)340 static int radeon_fbdev_destroy(struct drm_device *dev, struct radeon_fbdev *rfbdev)
341 {
342 	struct fb_info *info;
343 	struct radeon_framebuffer *rfb = &rfbdev->rfb;
344 
345 	if (rfbdev->helper.fbdev) {
346 		info = rfbdev->helper.fbdev;
347 
348 		unregister_framebuffer(info);
349 		if (info->cmap.len)
350 			fb_dealloc_cmap(&info->cmap);
351 		framebuffer_release(info);
352 	}
353 
354 	if (rfb->obj) {
355 		radeonfb_destroy_pinned_object(rfb->obj);
356 		rfb->obj = NULL;
357 	}
358 	drm_fb_helper_fini(&rfbdev->helper);
359 	drm_framebuffer_cleanup(&rfb->base);
360 
361 	return 0;
362 }
363 
364 static struct drm_fb_helper_funcs radeon_fb_helper_funcs = {
365 	.gamma_set = radeon_crtc_fb_gamma_set,
366 	.gamma_get = radeon_crtc_fb_gamma_get,
367 	.fb_probe = radeon_fb_find_or_create_single,
368 };
369 
radeon_fbdev_init(struct radeon_device * rdev)370 int radeon_fbdev_init(struct radeon_device *rdev)
371 {
372 	struct radeon_fbdev *rfbdev;
373 	int bpp_sel = 32;
374 	int ret;
375 
376 	/* select 8 bpp console on RN50 or 16MB cards */
377 	if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024))
378 		bpp_sel = 8;
379 
380 	rfbdev = kzalloc(sizeof(struct radeon_fbdev), GFP_KERNEL);
381 	if (!rfbdev)
382 		return -ENOMEM;
383 
384 	rfbdev->rdev = rdev;
385 	rdev->mode_info.rfbdev = rfbdev;
386 	rfbdev->helper.funcs = &radeon_fb_helper_funcs;
387 
388 	ret = drm_fb_helper_init(rdev->ddev, &rfbdev->helper,
389 				 rdev->num_crtc,
390 				 RADEONFB_CONN_LIMIT);
391 	if (ret) {
392 		kfree(rfbdev);
393 		return ret;
394 	}
395 
396 	drm_fb_helper_single_add_all_connectors(&rfbdev->helper);
397 	drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel);
398 	return 0;
399 }
400 
radeon_fbdev_fini(struct radeon_device * rdev)401 void radeon_fbdev_fini(struct radeon_device *rdev)
402 {
403 	if (!rdev->mode_info.rfbdev)
404 		return;
405 
406 	radeon_fbdev_destroy(rdev->ddev, rdev->mode_info.rfbdev);
407 	kfree(rdev->mode_info.rfbdev);
408 	rdev->mode_info.rfbdev = NULL;
409 }
410 
radeon_fbdev_set_suspend(struct radeon_device * rdev,int state)411 void radeon_fbdev_set_suspend(struct radeon_device *rdev, int state)
412 {
413 	fb_set_suspend(rdev->mode_info.rfbdev->helper.fbdev, state);
414 }
415 
radeon_fbdev_total_size(struct radeon_device * rdev)416 int radeon_fbdev_total_size(struct radeon_device *rdev)
417 {
418 	struct radeon_bo *robj;
419 	int size = 0;
420 
421 	robj = gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj);
422 	size += radeon_bo_size(robj);
423 	return size;
424 }
425 
radeon_fbdev_robj_is_fb(struct radeon_device * rdev,struct radeon_bo * robj)426 bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj)
427 {
428 	if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj))
429 		return true;
430 	return false;
431 }
432