1 /*
2 * Copyright 2014 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
23
24 #include <drm/drm_vblank.h>
25
26 #include "amdgpu.h"
27 #include "amdgpu_pm.h"
28 #include "amdgpu_i2c.h"
29 #include "atom.h"
30 #include "amdgpu_pll.h"
31 #include "amdgpu_connectors.h"
32 #ifdef CONFIG_DRM_AMDGPU_SI
33 #include "dce_v6_0.h"
34 #endif
35 #ifdef CONFIG_DRM_AMDGPU_CIK
36 #include "dce_v8_0.h"
37 #endif
38 #include "dce_v10_0.h"
39 #include "dce_v11_0.h"
40 #include "dce_virtual.h"
41 #include "ivsrcid/ivsrcid_vislands30.h"
42
43 #define DCE_VIRTUAL_VBLANK_PERIOD 16666666
44
45
46 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
47 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
48 static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
49 int index);
50 static int dce_virtual_pageflip(struct amdgpu_device *adev,
51 unsigned crtc_id);
52 static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer);
53 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
54 int crtc,
55 enum amdgpu_interrupt_state state);
56
dce_virtual_vblank_get_counter(struct amdgpu_device * adev,int crtc)57 static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
58 {
59 return 0;
60 }
61
dce_virtual_page_flip(struct amdgpu_device * adev,int crtc_id,u64 crtc_base,bool async)62 static void dce_virtual_page_flip(struct amdgpu_device *adev,
63 int crtc_id, u64 crtc_base, bool async)
64 {
65 return;
66 }
67
dce_virtual_crtc_get_scanoutpos(struct amdgpu_device * adev,int crtc,u32 * vbl,u32 * position)68 static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
69 u32 *vbl, u32 *position)
70 {
71 *vbl = 0;
72 *position = 0;
73
74 return -EINVAL;
75 }
76
dce_virtual_hpd_sense(struct amdgpu_device * adev,enum amdgpu_hpd_id hpd)77 static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
78 enum amdgpu_hpd_id hpd)
79 {
80 return true;
81 }
82
dce_virtual_hpd_set_polarity(struct amdgpu_device * adev,enum amdgpu_hpd_id hpd)83 static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
84 enum amdgpu_hpd_id hpd)
85 {
86 return;
87 }
88
dce_virtual_hpd_get_gpio_reg(struct amdgpu_device * adev)89 static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
90 {
91 return 0;
92 }
93
94 /**
95 * dce_virtual_bandwidth_update - program display watermarks
96 *
97 * @adev: amdgpu_device pointer
98 *
99 * Calculate and program the display watermarks and line
100 * buffer allocation (CIK).
101 */
dce_virtual_bandwidth_update(struct amdgpu_device * adev)102 static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
103 {
104 return;
105 }
106
dce_virtual_crtc_gamma_set(struct drm_crtc * crtc,u16 * red,u16 * green,u16 * blue,uint32_t size,struct drm_modeset_acquire_ctx * ctx)107 static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
108 u16 *green, u16 *blue, uint32_t size,
109 struct drm_modeset_acquire_ctx *ctx)
110 {
111 return 0;
112 }
113
dce_virtual_crtc_destroy(struct drm_crtc * crtc)114 static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
115 {
116 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
117
118 drm_crtc_cleanup(crtc);
119 kfree(amdgpu_crtc);
120 }
121
122 static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
123 .cursor_set2 = NULL,
124 .cursor_move = NULL,
125 .gamma_set = dce_virtual_crtc_gamma_set,
126 .set_config = amdgpu_display_crtc_set_config,
127 .destroy = dce_virtual_crtc_destroy,
128 .page_flip_target = amdgpu_display_crtc_page_flip_target,
129 .get_vblank_counter = amdgpu_get_vblank_counter_kms,
130 .enable_vblank = amdgpu_enable_vblank_kms,
131 .disable_vblank = amdgpu_disable_vblank_kms,
132 .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
133 };
134
dce_virtual_crtc_dpms(struct drm_crtc * crtc,int mode)135 static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
136 {
137 struct drm_device *dev = crtc->dev;
138 struct amdgpu_device *adev = drm_to_adev(dev);
139 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
140 unsigned type;
141
142 if (amdgpu_sriov_vf(adev))
143 return;
144
145 switch (mode) {
146 case DRM_MODE_DPMS_ON:
147 amdgpu_crtc->enabled = true;
148 /* Make sure VBLANK interrupts are still enabled */
149 type = amdgpu_display_crtc_idx_to_irq_type(adev,
150 amdgpu_crtc->crtc_id);
151 amdgpu_irq_update(adev, &adev->crtc_irq, type);
152 drm_crtc_vblank_on(crtc);
153 break;
154 case DRM_MODE_DPMS_STANDBY:
155 case DRM_MODE_DPMS_SUSPEND:
156 case DRM_MODE_DPMS_OFF:
157 drm_crtc_vblank_off(crtc);
158 amdgpu_crtc->enabled = false;
159 break;
160 }
161 }
162
163
dce_virtual_crtc_prepare(struct drm_crtc * crtc)164 static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
165 {
166 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
167 }
168
dce_virtual_crtc_commit(struct drm_crtc * crtc)169 static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
170 {
171 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
172 }
173
dce_virtual_crtc_disable(struct drm_crtc * crtc)174 static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
175 {
176 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
177 struct drm_device *dev = crtc->dev;
178
179 if (dev->num_crtcs)
180 drm_crtc_vblank_off(crtc);
181
182 amdgpu_crtc->enabled = false;
183 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
184 amdgpu_crtc->encoder = NULL;
185 amdgpu_crtc->connector = NULL;
186 }
187
dce_virtual_crtc_mode_set(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode,int x,int y,struct drm_framebuffer * old_fb)188 static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
189 struct drm_display_mode *mode,
190 struct drm_display_mode *adjusted_mode,
191 int x, int y, struct drm_framebuffer *old_fb)
192 {
193 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
194
195 /* update the hw version fpr dpm */
196 amdgpu_crtc->hw_mode = *adjusted_mode;
197
198 return 0;
199 }
200
dce_virtual_crtc_mode_fixup(struct drm_crtc * crtc,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)201 static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
202 const struct drm_display_mode *mode,
203 struct drm_display_mode *adjusted_mode)
204 {
205 return true;
206 }
207
208
dce_virtual_crtc_set_base(struct drm_crtc * crtc,int x,int y,struct drm_framebuffer * old_fb)209 static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
210 struct drm_framebuffer *old_fb)
211 {
212 return 0;
213 }
214
dce_virtual_crtc_set_base_atomic(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,enum mode_set_atomic state)215 static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
216 struct drm_framebuffer *fb,
217 int x, int y, enum mode_set_atomic state)
218 {
219 return 0;
220 }
221
222 static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
223 .dpms = dce_virtual_crtc_dpms,
224 .mode_fixup = dce_virtual_crtc_mode_fixup,
225 .mode_set = dce_virtual_crtc_mode_set,
226 .mode_set_base = dce_virtual_crtc_set_base,
227 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
228 .prepare = dce_virtual_crtc_prepare,
229 .commit = dce_virtual_crtc_commit,
230 .disable = dce_virtual_crtc_disable,
231 .get_scanout_position = amdgpu_crtc_get_scanout_position,
232 };
233
dce_virtual_crtc_init(struct amdgpu_device * adev,int index)234 static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
235 {
236 struct amdgpu_crtc *amdgpu_crtc;
237
238 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
239 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
240 if (amdgpu_crtc == NULL)
241 return -ENOMEM;
242
243 drm_crtc_init(adev_to_drm(adev), &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
244
245 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
246 amdgpu_crtc->crtc_id = index;
247 adev->mode_info.crtcs[index] = amdgpu_crtc;
248
249 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
250 amdgpu_crtc->encoder = NULL;
251 amdgpu_crtc->connector = NULL;
252 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
253 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
254
255 hrtimer_init(&amdgpu_crtc->vblank_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
256 hrtimer_set_expires(&amdgpu_crtc->vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD);
257 amdgpu_crtc->vblank_timer.function = dce_virtual_vblank_timer_handle;
258 hrtimer_start(&amdgpu_crtc->vblank_timer,
259 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
260 return 0;
261 }
262
dce_virtual_early_init(void * handle)263 static int dce_virtual_early_init(void *handle)
264 {
265 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
266
267 dce_virtual_set_display_funcs(adev);
268 dce_virtual_set_irq_funcs(adev);
269
270 adev->mode_info.num_hpd = 1;
271 adev->mode_info.num_dig = 1;
272 return 0;
273 }
274
275 static struct drm_encoder *
dce_virtual_encoder(struct drm_connector * connector)276 dce_virtual_encoder(struct drm_connector *connector)
277 {
278 struct drm_encoder *encoder;
279
280 drm_connector_for_each_possible_encoder(connector, encoder) {
281 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
282 return encoder;
283 }
284
285 /* pick the first one */
286 drm_connector_for_each_possible_encoder(connector, encoder)
287 return encoder;
288
289 return NULL;
290 }
291
dce_virtual_get_modes(struct drm_connector * connector)292 static int dce_virtual_get_modes(struct drm_connector *connector)
293 {
294 struct drm_device *dev = connector->dev;
295 struct drm_display_mode *mode = NULL;
296 unsigned i;
297 static const struct mode_size {
298 int w;
299 int h;
300 } common_modes[21] = {
301 { 640, 480},
302 { 720, 480},
303 { 800, 600},
304 { 848, 480},
305 {1024, 768},
306 {1152, 768},
307 {1280, 720},
308 {1280, 800},
309 {1280, 854},
310 {1280, 960},
311 {1280, 1024},
312 {1440, 900},
313 {1400, 1050},
314 {1680, 1050},
315 {1600, 1200},
316 {1920, 1080},
317 {1920, 1200},
318 {4096, 3112},
319 {3656, 2664},
320 {3840, 2160},
321 {4096, 2160},
322 };
323
324 for (i = 0; i < 21; i++) {
325 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
326 drm_mode_probed_add(connector, mode);
327 }
328
329 return 0;
330 }
331
dce_virtual_mode_valid(struct drm_connector * connector,struct drm_display_mode * mode)332 static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
333 struct drm_display_mode *mode)
334 {
335 return MODE_OK;
336 }
337
338 static int
dce_virtual_dpms(struct drm_connector * connector,int mode)339 dce_virtual_dpms(struct drm_connector *connector, int mode)
340 {
341 return 0;
342 }
343
344 static int
dce_virtual_set_property(struct drm_connector * connector,struct drm_property * property,uint64_t val)345 dce_virtual_set_property(struct drm_connector *connector,
346 struct drm_property *property,
347 uint64_t val)
348 {
349 return 0;
350 }
351
dce_virtual_destroy(struct drm_connector * connector)352 static void dce_virtual_destroy(struct drm_connector *connector)
353 {
354 drm_connector_unregister(connector);
355 drm_connector_cleanup(connector);
356 kfree(connector);
357 }
358
dce_virtual_force(struct drm_connector * connector)359 static void dce_virtual_force(struct drm_connector *connector)
360 {
361 return;
362 }
363
364 static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
365 .get_modes = dce_virtual_get_modes,
366 .mode_valid = dce_virtual_mode_valid,
367 .best_encoder = dce_virtual_encoder,
368 };
369
370 static const struct drm_connector_funcs dce_virtual_connector_funcs = {
371 .dpms = dce_virtual_dpms,
372 .fill_modes = drm_helper_probe_single_connector_modes,
373 .set_property = dce_virtual_set_property,
374 .destroy = dce_virtual_destroy,
375 .force = dce_virtual_force,
376 };
377
dce_virtual_sw_init(void * handle)378 static int dce_virtual_sw_init(void *handle)
379 {
380 int r, i;
381 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
382
383 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
384 if (r)
385 return r;
386
387 adev_to_drm(adev)->max_vblank_count = 0;
388
389 adev_to_drm(adev)->mode_config.funcs = &amdgpu_mode_funcs;
390
391 adev_to_drm(adev)->mode_config.max_width = 16384;
392 adev_to_drm(adev)->mode_config.max_height = 16384;
393
394 adev_to_drm(adev)->mode_config.preferred_depth = 24;
395 adev_to_drm(adev)->mode_config.prefer_shadow = 1;
396
397 adev_to_drm(adev)->mode_config.fb_base = adev->gmc.aper_base;
398
399 r = amdgpu_display_modeset_create_props(adev);
400 if (r)
401 return r;
402
403 adev_to_drm(adev)->mode_config.max_width = 16384;
404 adev_to_drm(adev)->mode_config.max_height = 16384;
405
406 /* allocate crtcs, encoders, connectors */
407 for (i = 0; i < adev->mode_info.num_crtc; i++) {
408 r = dce_virtual_crtc_init(adev, i);
409 if (r)
410 return r;
411 r = dce_virtual_connector_encoder_init(adev, i);
412 if (r)
413 return r;
414 }
415
416 drm_kms_helper_poll_init(adev_to_drm(adev));
417
418 adev->mode_info.mode_config_initialized = true;
419 return 0;
420 }
421
dce_virtual_sw_fini(void * handle)422 static int dce_virtual_sw_fini(void *handle)
423 {
424 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
425
426 kfree(adev->mode_info.bios_hardcoded_edid);
427
428 drm_kms_helper_poll_fini(adev_to_drm(adev));
429
430 drm_mode_config_cleanup(adev_to_drm(adev));
431 /* clear crtcs pointer to avoid dce irq finish routine access freed data */
432 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
433 adev->mode_info.mode_config_initialized = false;
434 return 0;
435 }
436
dce_virtual_hw_init(void * handle)437 static int dce_virtual_hw_init(void *handle)
438 {
439 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
440
441 switch (adev->asic_type) {
442 #ifdef CONFIG_DRM_AMDGPU_SI
443 case CHIP_TAHITI:
444 case CHIP_PITCAIRN:
445 case CHIP_VERDE:
446 case CHIP_OLAND:
447 dce_v6_0_disable_dce(adev);
448 break;
449 #endif
450 #ifdef CONFIG_DRM_AMDGPU_CIK
451 case CHIP_BONAIRE:
452 case CHIP_HAWAII:
453 case CHIP_KAVERI:
454 case CHIP_KABINI:
455 case CHIP_MULLINS:
456 dce_v8_0_disable_dce(adev);
457 break;
458 #endif
459 case CHIP_FIJI:
460 case CHIP_TONGA:
461 dce_v10_0_disable_dce(adev);
462 break;
463 case CHIP_CARRIZO:
464 case CHIP_STONEY:
465 case CHIP_POLARIS10:
466 case CHIP_POLARIS11:
467 case CHIP_VEGAM:
468 dce_v11_0_disable_dce(adev);
469 break;
470 case CHIP_TOPAZ:
471 #ifdef CONFIG_DRM_AMDGPU_SI
472 case CHIP_HAINAN:
473 #endif
474 /* no DCE */
475 break;
476 default:
477 break;
478 }
479 return 0;
480 }
481
dce_virtual_hw_fini(void * handle)482 static int dce_virtual_hw_fini(void *handle)
483 {
484 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
485 int i = 0;
486
487 for (i = 0; i<adev->mode_info.num_crtc; i++)
488 if (adev->mode_info.crtcs[i])
489 hrtimer_cancel(&adev->mode_info.crtcs[i]->vblank_timer);
490
491 return 0;
492 }
493
dce_virtual_suspend(void * handle)494 static int dce_virtual_suspend(void *handle)
495 {
496 return dce_virtual_hw_fini(handle);
497 }
498
dce_virtual_resume(void * handle)499 static int dce_virtual_resume(void *handle)
500 {
501 return dce_virtual_hw_init(handle);
502 }
503
dce_virtual_is_idle(void * handle)504 static bool dce_virtual_is_idle(void *handle)
505 {
506 return true;
507 }
508
dce_virtual_wait_for_idle(void * handle)509 static int dce_virtual_wait_for_idle(void *handle)
510 {
511 return 0;
512 }
513
dce_virtual_soft_reset(void * handle)514 static int dce_virtual_soft_reset(void *handle)
515 {
516 return 0;
517 }
518
dce_virtual_set_clockgating_state(void * handle,enum amd_clockgating_state state)519 static int dce_virtual_set_clockgating_state(void *handle,
520 enum amd_clockgating_state state)
521 {
522 return 0;
523 }
524
dce_virtual_set_powergating_state(void * handle,enum amd_powergating_state state)525 static int dce_virtual_set_powergating_state(void *handle,
526 enum amd_powergating_state state)
527 {
528 return 0;
529 }
530
531 static const struct amd_ip_funcs dce_virtual_ip_funcs = {
532 .name = "dce_virtual",
533 .early_init = dce_virtual_early_init,
534 .late_init = NULL,
535 .sw_init = dce_virtual_sw_init,
536 .sw_fini = dce_virtual_sw_fini,
537 .hw_init = dce_virtual_hw_init,
538 .hw_fini = dce_virtual_hw_fini,
539 .suspend = dce_virtual_suspend,
540 .resume = dce_virtual_resume,
541 .is_idle = dce_virtual_is_idle,
542 .wait_for_idle = dce_virtual_wait_for_idle,
543 .soft_reset = dce_virtual_soft_reset,
544 .set_clockgating_state = dce_virtual_set_clockgating_state,
545 .set_powergating_state = dce_virtual_set_powergating_state,
546 };
547
548 /* these are handled by the primary encoders */
dce_virtual_encoder_prepare(struct drm_encoder * encoder)549 static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
550 {
551 return;
552 }
553
dce_virtual_encoder_commit(struct drm_encoder * encoder)554 static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
555 {
556 return;
557 }
558
559 static void
dce_virtual_encoder_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)560 dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
561 struct drm_display_mode *mode,
562 struct drm_display_mode *adjusted_mode)
563 {
564 return;
565 }
566
dce_virtual_encoder_disable(struct drm_encoder * encoder)567 static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
568 {
569 return;
570 }
571
572 static void
dce_virtual_encoder_dpms(struct drm_encoder * encoder,int mode)573 dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
574 {
575 return;
576 }
577
dce_virtual_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)578 static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
579 const struct drm_display_mode *mode,
580 struct drm_display_mode *adjusted_mode)
581 {
582 return true;
583 }
584
585 static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
586 .dpms = dce_virtual_encoder_dpms,
587 .mode_fixup = dce_virtual_encoder_mode_fixup,
588 .prepare = dce_virtual_encoder_prepare,
589 .mode_set = dce_virtual_encoder_mode_set,
590 .commit = dce_virtual_encoder_commit,
591 .disable = dce_virtual_encoder_disable,
592 };
593
dce_virtual_encoder_destroy(struct drm_encoder * encoder)594 static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
595 {
596 drm_encoder_cleanup(encoder);
597 kfree(encoder);
598 }
599
600 static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
601 .destroy = dce_virtual_encoder_destroy,
602 };
603
dce_virtual_connector_encoder_init(struct amdgpu_device * adev,int index)604 static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
605 int index)
606 {
607 struct drm_encoder *encoder;
608 struct drm_connector *connector;
609
610 /* add a new encoder */
611 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
612 if (!encoder)
613 return -ENOMEM;
614 encoder->possible_crtcs = 1 << index;
615 drm_encoder_init(adev_to_drm(adev), encoder, &dce_virtual_encoder_funcs,
616 DRM_MODE_ENCODER_VIRTUAL, NULL);
617 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
618
619 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
620 if (!connector) {
621 kfree(encoder);
622 return -ENOMEM;
623 }
624
625 /* add a new connector */
626 drm_connector_init(adev_to_drm(adev), connector, &dce_virtual_connector_funcs,
627 DRM_MODE_CONNECTOR_VIRTUAL);
628 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
629 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
630 connector->interlace_allowed = false;
631 connector->doublescan_allowed = false;
632
633 /* link them */
634 drm_connector_attach_encoder(connector, encoder);
635
636 return 0;
637 }
638
639 static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
640 .bandwidth_update = &dce_virtual_bandwidth_update,
641 .vblank_get_counter = &dce_virtual_vblank_get_counter,
642 .backlight_set_level = NULL,
643 .backlight_get_level = NULL,
644 .hpd_sense = &dce_virtual_hpd_sense,
645 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
646 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
647 .page_flip = &dce_virtual_page_flip,
648 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
649 .add_encoder = NULL,
650 .add_connector = NULL,
651 };
652
dce_virtual_set_display_funcs(struct amdgpu_device * adev)653 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
654 {
655 adev->mode_info.funcs = &dce_virtual_display_funcs;
656 }
657
dce_virtual_pageflip(struct amdgpu_device * adev,unsigned crtc_id)658 static int dce_virtual_pageflip(struct amdgpu_device *adev,
659 unsigned crtc_id)
660 {
661 unsigned long flags;
662 struct amdgpu_crtc *amdgpu_crtc;
663 struct amdgpu_flip_work *works;
664
665 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
666
667 if (crtc_id >= adev->mode_info.num_crtc) {
668 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
669 return -EINVAL;
670 }
671
672 /* IRQ could occur when in initial stage */
673 if (amdgpu_crtc == NULL)
674 return 0;
675
676 spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags);
677 works = amdgpu_crtc->pflip_works;
678 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
679 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
680 "AMDGPU_FLIP_SUBMITTED(%d)\n",
681 amdgpu_crtc->pflip_status,
682 AMDGPU_FLIP_SUBMITTED);
683 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
684 return 0;
685 }
686
687 /* page flip completed. clean up */
688 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
689 amdgpu_crtc->pflip_works = NULL;
690
691 /* wakeup usersapce */
692 if (works->event)
693 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
694
695 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
696
697 drm_crtc_vblank_put(&amdgpu_crtc->base);
698 amdgpu_bo_unref(&works->old_abo);
699 kfree(works->shared);
700 kfree(works);
701
702 return 0;
703 }
704
dce_virtual_vblank_timer_handle(struct hrtimer * vblank_timer)705 static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
706 {
707 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
708 struct amdgpu_crtc, vblank_timer);
709 struct drm_device *ddev = amdgpu_crtc->base.dev;
710 struct amdgpu_device *adev = drm_to_adev(ddev);
711 struct amdgpu_irq_src *source = adev->irq.client[AMDGPU_IRQ_CLIENTID_LEGACY].sources
712 [VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER];
713 int irq_type = amdgpu_display_crtc_idx_to_irq_type(adev,
714 amdgpu_crtc->crtc_id);
715
716 if (amdgpu_irq_enabled(adev, source, irq_type)) {
717 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
718 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
719 }
720 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
721 HRTIMER_MODE_REL);
722
723 return HRTIMER_NORESTART;
724 }
725
dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device * adev,int crtc,enum amdgpu_interrupt_state state)726 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
727 int crtc,
728 enum amdgpu_interrupt_state state)
729 {
730 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
731 DRM_DEBUG("invalid crtc %d\n", crtc);
732 return;
733 }
734
735 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
736 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
737 }
738
739
dce_virtual_set_crtc_irq_state(struct amdgpu_device * adev,struct amdgpu_irq_src * source,unsigned type,enum amdgpu_interrupt_state state)740 static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
741 struct amdgpu_irq_src *source,
742 unsigned type,
743 enum amdgpu_interrupt_state state)
744 {
745 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
746 return -EINVAL;
747
748 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
749
750 return 0;
751 }
752
753 static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
754 .set = dce_virtual_set_crtc_irq_state,
755 .process = NULL,
756 };
757
dce_virtual_set_irq_funcs(struct amdgpu_device * adev)758 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
759 {
760 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
761 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
762 }
763
764 const struct amdgpu_ip_block_version dce_virtual_ip_block =
765 {
766 .type = AMD_IP_BLOCK_TYPE_DCE,
767 .major = 1,
768 .minor = 0,
769 .rev = 0,
770 .funcs = &dce_virtual_ip_funcs,
771 };
772