1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 */
23
24 /**
25 * DOC: atomic modeset support
26 *
27 * The functions here implement the state management and hardware programming
28 * dispatch required by the atomic modeset infrastructure.
29 * See intel_atomic_plane.c for the plane-specific atomic functionality.
30 */
31
32 #include <drm/drm_atomic.h>
33 #include <drm/drm_atomic_helper.h>
34 #include <drm/drm_fourcc.h>
35 #include <drm/drm_plane_helper.h>
36
37 #include "intel_atomic.h"
38 #include "intel_cdclk.h"
39 #include "intel_display_types.h"
40 #include "intel_global_state.h"
41 #include "intel_hdcp.h"
42 #include "intel_psr.h"
43 #include "intel_sprite.h"
44
45 /**
46 * intel_digital_connector_atomic_get_property - hook for connector->atomic_get_property.
47 * @connector: Connector to get the property for.
48 * @state: Connector state to retrieve the property from.
49 * @property: Property to retrieve.
50 * @val: Return value for the property.
51 *
52 * Returns the atomic property value for a digital connector.
53 */
intel_digital_connector_atomic_get_property(struct drm_connector * connector,const struct drm_connector_state * state,struct drm_property * property,u64 * val)54 int intel_digital_connector_atomic_get_property(struct drm_connector *connector,
55 const struct drm_connector_state *state,
56 struct drm_property *property,
57 u64 *val)
58 {
59 struct drm_device *dev = connector->dev;
60 struct drm_i915_private *dev_priv = to_i915(dev);
61 struct intel_digital_connector_state *intel_conn_state =
62 to_intel_digital_connector_state(state);
63
64 if (property == dev_priv->force_audio_property)
65 *val = intel_conn_state->force_audio;
66 else if (property == dev_priv->broadcast_rgb_property)
67 *val = intel_conn_state->broadcast_rgb;
68 else {
69 drm_dbg_atomic(&dev_priv->drm,
70 "Unknown property [PROP:%d:%s]\n",
71 property->base.id, property->name);
72 return -EINVAL;
73 }
74
75 return 0;
76 }
77
78 /**
79 * intel_digital_connector_atomic_set_property - hook for connector->atomic_set_property.
80 * @connector: Connector to set the property for.
81 * @state: Connector state to set the property on.
82 * @property: Property to set.
83 * @val: New value for the property.
84 *
85 * Sets the atomic property value for a digital connector.
86 */
intel_digital_connector_atomic_set_property(struct drm_connector * connector,struct drm_connector_state * state,struct drm_property * property,u64 val)87 int intel_digital_connector_atomic_set_property(struct drm_connector *connector,
88 struct drm_connector_state *state,
89 struct drm_property *property,
90 u64 val)
91 {
92 struct drm_device *dev = connector->dev;
93 struct drm_i915_private *dev_priv = to_i915(dev);
94 struct intel_digital_connector_state *intel_conn_state =
95 to_intel_digital_connector_state(state);
96
97 if (property == dev_priv->force_audio_property) {
98 intel_conn_state->force_audio = val;
99 return 0;
100 }
101
102 if (property == dev_priv->broadcast_rgb_property) {
103 intel_conn_state->broadcast_rgb = val;
104 return 0;
105 }
106
107 drm_dbg_atomic(&dev_priv->drm, "Unknown property [PROP:%d:%s]\n",
108 property->base.id, property->name);
109 return -EINVAL;
110 }
111
blob_equal(const struct drm_property_blob * a,const struct drm_property_blob * b)112 static bool blob_equal(const struct drm_property_blob *a,
113 const struct drm_property_blob *b)
114 {
115 if (a && b)
116 return a->length == b->length &&
117 !memcmp(a->data, b->data, a->length);
118
119 return !a == !b;
120 }
121
intel_digital_connector_atomic_check(struct drm_connector * conn,struct drm_atomic_state * state)122 int intel_digital_connector_atomic_check(struct drm_connector *conn,
123 struct drm_atomic_state *state)
124 {
125 struct drm_connector_state *new_state =
126 drm_atomic_get_new_connector_state(state, conn);
127 struct intel_digital_connector_state *new_conn_state =
128 to_intel_digital_connector_state(new_state);
129 struct drm_connector_state *old_state =
130 drm_atomic_get_old_connector_state(state, conn);
131 struct intel_digital_connector_state *old_conn_state =
132 to_intel_digital_connector_state(old_state);
133 struct drm_crtc_state *crtc_state;
134
135 intel_hdcp_atomic_check(conn, old_state, new_state);
136 intel_psr_atomic_check(conn, old_state, new_state);
137
138 if (!new_state->crtc)
139 return 0;
140
141 crtc_state = drm_atomic_get_new_crtc_state(state, new_state->crtc);
142
143 /*
144 * These properties are handled by fastset, and might not end
145 * up in a modeset.
146 */
147 if (new_conn_state->force_audio != old_conn_state->force_audio ||
148 new_conn_state->broadcast_rgb != old_conn_state->broadcast_rgb ||
149 new_conn_state->base.colorspace != old_conn_state->base.colorspace ||
150 new_conn_state->base.picture_aspect_ratio != old_conn_state->base.picture_aspect_ratio ||
151 new_conn_state->base.content_type != old_conn_state->base.content_type ||
152 new_conn_state->base.scaling_mode != old_conn_state->base.scaling_mode ||
153 !blob_equal(new_conn_state->base.hdr_output_metadata,
154 old_conn_state->base.hdr_output_metadata))
155 crtc_state->mode_changed = true;
156
157 return 0;
158 }
159
160 /**
161 * intel_digital_connector_duplicate_state - duplicate connector state
162 * @connector: digital connector
163 *
164 * Allocates and returns a copy of the connector state (both common and
165 * digital connector specific) for the specified connector.
166 *
167 * Returns: The newly allocated connector state, or NULL on failure.
168 */
169 struct drm_connector_state *
intel_digital_connector_duplicate_state(struct drm_connector * connector)170 intel_digital_connector_duplicate_state(struct drm_connector *connector)
171 {
172 struct intel_digital_connector_state *state;
173
174 state = kmemdup(connector->state, sizeof(*state), GFP_KERNEL);
175 if (!state)
176 return NULL;
177
178 __drm_atomic_helper_connector_duplicate_state(connector, &state->base);
179 return &state->base;
180 }
181
182 /**
183 * intel_connector_needs_modeset - check if connector needs a modeset
184 * @state: the atomic state corresponding to this modeset
185 * @connector: the connector
186 */
187 bool
intel_connector_needs_modeset(struct intel_atomic_state * state,struct drm_connector * connector)188 intel_connector_needs_modeset(struct intel_atomic_state *state,
189 struct drm_connector *connector)
190 {
191 const struct drm_connector_state *old_conn_state, *new_conn_state;
192
193 old_conn_state = drm_atomic_get_old_connector_state(&state->base, connector);
194 new_conn_state = drm_atomic_get_new_connector_state(&state->base, connector);
195
196 return old_conn_state->crtc != new_conn_state->crtc ||
197 (new_conn_state->crtc &&
198 drm_atomic_crtc_needs_modeset(drm_atomic_get_new_crtc_state(&state->base,
199 new_conn_state->crtc)));
200 }
201
202 struct intel_digital_connector_state *
intel_atomic_get_digital_connector_state(struct intel_atomic_state * state,struct intel_connector * connector)203 intel_atomic_get_digital_connector_state(struct intel_atomic_state *state,
204 struct intel_connector *connector)
205 {
206 struct drm_connector_state *conn_state;
207
208 conn_state = drm_atomic_get_connector_state(&state->base,
209 &connector->base);
210 if (IS_ERR(conn_state))
211 return ERR_CAST(conn_state);
212
213 return to_intel_digital_connector_state(conn_state);
214 }
215
216 /**
217 * intel_crtc_duplicate_state - duplicate crtc state
218 * @crtc: drm crtc
219 *
220 * Allocates and returns a copy of the crtc state (both common and
221 * Intel-specific) for the specified crtc.
222 *
223 * Returns: The newly allocated crtc state, or NULL on failure.
224 */
225 struct drm_crtc_state *
intel_crtc_duplicate_state(struct drm_crtc * crtc)226 intel_crtc_duplicate_state(struct drm_crtc *crtc)
227 {
228 const struct intel_crtc_state *old_crtc_state = to_intel_crtc_state(crtc->state);
229 struct intel_crtc_state *crtc_state;
230
231 crtc_state = kmemdup(old_crtc_state, sizeof(*crtc_state), GFP_KERNEL);
232 if (!crtc_state)
233 return NULL;
234
235 __drm_atomic_helper_crtc_duplicate_state(crtc, &crtc_state->uapi);
236
237 /* copy color blobs */
238 if (crtc_state->hw.degamma_lut)
239 drm_property_blob_get(crtc_state->hw.degamma_lut);
240 if (crtc_state->hw.ctm)
241 drm_property_blob_get(crtc_state->hw.ctm);
242 if (crtc_state->hw.gamma_lut)
243 drm_property_blob_get(crtc_state->hw.gamma_lut);
244
245 crtc_state->update_pipe = false;
246 crtc_state->disable_lp_wm = false;
247 crtc_state->disable_cxsr = false;
248 crtc_state->update_wm_pre = false;
249 crtc_state->update_wm_post = false;
250 crtc_state->fifo_changed = false;
251 crtc_state->preload_luts = false;
252 crtc_state->inherited = false;
253 crtc_state->wm.need_postvbl_update = false;
254 crtc_state->fb_bits = 0;
255 crtc_state->update_planes = 0;
256 crtc_state->dsb = NULL;
257
258 return &crtc_state->uapi;
259 }
260
intel_crtc_put_color_blobs(struct intel_crtc_state * crtc_state)261 static void intel_crtc_put_color_blobs(struct intel_crtc_state *crtc_state)
262 {
263 drm_property_blob_put(crtc_state->hw.degamma_lut);
264 drm_property_blob_put(crtc_state->hw.gamma_lut);
265 drm_property_blob_put(crtc_state->hw.ctm);
266 }
267
intel_crtc_free_hw_state(struct intel_crtc_state * crtc_state)268 void intel_crtc_free_hw_state(struct intel_crtc_state *crtc_state)
269 {
270 intel_crtc_put_color_blobs(crtc_state);
271 }
272
intel_crtc_copy_color_blobs(struct intel_crtc_state * crtc_state)273 void intel_crtc_copy_color_blobs(struct intel_crtc_state *crtc_state)
274 {
275 drm_property_replace_blob(&crtc_state->hw.degamma_lut,
276 crtc_state->uapi.degamma_lut);
277 drm_property_replace_blob(&crtc_state->hw.gamma_lut,
278 crtc_state->uapi.gamma_lut);
279 drm_property_replace_blob(&crtc_state->hw.ctm,
280 crtc_state->uapi.ctm);
281 }
282
283 /**
284 * intel_crtc_destroy_state - destroy crtc state
285 * @crtc: drm crtc
286 * @state: the state to destroy
287 *
288 * Destroys the crtc state (both common and Intel-specific) for the
289 * specified crtc.
290 */
291 void
intel_crtc_destroy_state(struct drm_crtc * crtc,struct drm_crtc_state * state)292 intel_crtc_destroy_state(struct drm_crtc *crtc,
293 struct drm_crtc_state *state)
294 {
295 struct intel_crtc_state *crtc_state = to_intel_crtc_state(state);
296
297 drm_WARN_ON(crtc->dev, crtc_state->dsb);
298
299 __drm_atomic_helper_crtc_destroy_state(&crtc_state->uapi);
300 intel_crtc_free_hw_state(crtc_state);
301 kfree(crtc_state);
302 }
303
intel_atomic_setup_scaler(struct intel_crtc_scaler_state * scaler_state,int num_scalers_need,struct intel_crtc * intel_crtc,const char * name,int idx,struct intel_plane_state * plane_state,int * scaler_id)304 static void intel_atomic_setup_scaler(struct intel_crtc_scaler_state *scaler_state,
305 int num_scalers_need, struct intel_crtc *intel_crtc,
306 const char *name, int idx,
307 struct intel_plane_state *plane_state,
308 int *scaler_id)
309 {
310 struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
311 int j;
312 u32 mode;
313
314 if (*scaler_id < 0) {
315 /* find a free scaler */
316 for (j = 0; j < intel_crtc->num_scalers; j++) {
317 if (scaler_state->scalers[j].in_use)
318 continue;
319
320 *scaler_id = j;
321 scaler_state->scalers[*scaler_id].in_use = 1;
322 break;
323 }
324 }
325
326 if (drm_WARN(&dev_priv->drm, *scaler_id < 0,
327 "Cannot find scaler for %s:%d\n", name, idx))
328 return;
329
330 /* set scaler mode */
331 if (plane_state && plane_state->hw.fb &&
332 plane_state->hw.fb->format->is_yuv &&
333 plane_state->hw.fb->format->num_planes > 1) {
334 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
335 if (IS_GEN(dev_priv, 9) &&
336 !IS_GEMINILAKE(dev_priv)) {
337 mode = SKL_PS_SCALER_MODE_NV12;
338 } else if (icl_is_hdr_plane(dev_priv, plane->id)) {
339 /*
340 * On gen11+'s HDR planes we only use the scaler for
341 * scaling. They have a dedicated chroma upsampler, so
342 * we don't need the scaler to upsample the UV plane.
343 */
344 mode = PS_SCALER_MODE_NORMAL;
345 } else {
346 struct intel_plane *linked =
347 plane_state->planar_linked_plane;
348
349 mode = PS_SCALER_MODE_PLANAR;
350
351 if (linked)
352 mode |= PS_PLANE_Y_SEL(linked->id);
353 }
354 } else if (INTEL_GEN(dev_priv) > 9 || IS_GEMINILAKE(dev_priv)) {
355 mode = PS_SCALER_MODE_NORMAL;
356 } else if (num_scalers_need == 1 && intel_crtc->num_scalers > 1) {
357 /*
358 * when only 1 scaler is in use on a pipe with 2 scalers
359 * scaler 0 operates in high quality (HQ) mode.
360 * In this case use scaler 0 to take advantage of HQ mode
361 */
362 scaler_state->scalers[*scaler_id].in_use = 0;
363 *scaler_id = 0;
364 scaler_state->scalers[0].in_use = 1;
365 mode = SKL_PS_SCALER_MODE_HQ;
366 } else {
367 mode = SKL_PS_SCALER_MODE_DYN;
368 }
369
370 drm_dbg_kms(&dev_priv->drm, "Attached scaler id %u.%u to %s:%d\n",
371 intel_crtc->pipe, *scaler_id, name, idx);
372 scaler_state->scalers[*scaler_id].mode = mode;
373 }
374
375 /**
376 * intel_atomic_setup_scalers() - setup scalers for crtc per staged requests
377 * @dev_priv: i915 device
378 * @intel_crtc: intel crtc
379 * @crtc_state: incoming crtc_state to validate and setup scalers
380 *
381 * This function sets up scalers based on staged scaling requests for
382 * a @crtc and its planes. It is called from crtc level check path. If request
383 * is a supportable request, it attaches scalers to requested planes and crtc.
384 *
385 * This function takes into account the current scaler(s) in use by any planes
386 * not being part of this atomic state
387 *
388 * Returns:
389 * 0 - scalers were setup succesfully
390 * error code - otherwise
391 */
intel_atomic_setup_scalers(struct drm_i915_private * dev_priv,struct intel_crtc * intel_crtc,struct intel_crtc_state * crtc_state)392 int intel_atomic_setup_scalers(struct drm_i915_private *dev_priv,
393 struct intel_crtc *intel_crtc,
394 struct intel_crtc_state *crtc_state)
395 {
396 struct drm_plane *plane = NULL;
397 struct intel_plane *intel_plane;
398 struct intel_plane_state *plane_state = NULL;
399 struct intel_crtc_scaler_state *scaler_state =
400 &crtc_state->scaler_state;
401 struct drm_atomic_state *drm_state = crtc_state->uapi.state;
402 struct intel_atomic_state *intel_state = to_intel_atomic_state(drm_state);
403 int num_scalers_need;
404 int i;
405
406 num_scalers_need = hweight32(scaler_state->scaler_users);
407
408 /*
409 * High level flow:
410 * - staged scaler requests are already in scaler_state->scaler_users
411 * - check whether staged scaling requests can be supported
412 * - add planes using scalers that aren't in current transaction
413 * - assign scalers to requested users
414 * - as part of plane commit, scalers will be committed
415 * (i.e., either attached or detached) to respective planes in hw
416 * - as part of crtc_commit, scaler will be either attached or detached
417 * to crtc in hw
418 */
419
420 /* fail if required scalers > available scalers */
421 if (num_scalers_need > intel_crtc->num_scalers){
422 drm_dbg_kms(&dev_priv->drm,
423 "Too many scaling requests %d > %d\n",
424 num_scalers_need, intel_crtc->num_scalers);
425 return -EINVAL;
426 }
427
428 /* walkthrough scaler_users bits and start assigning scalers */
429 for (i = 0; i < sizeof(scaler_state->scaler_users) * 8; i++) {
430 int *scaler_id;
431 const char *name;
432 int idx;
433
434 /* skip if scaler not required */
435 if (!(scaler_state->scaler_users & (1 << i)))
436 continue;
437
438 if (i == SKL_CRTC_INDEX) {
439 name = "CRTC";
440 idx = intel_crtc->base.base.id;
441
442 /* panel fitter case: assign as a crtc scaler */
443 scaler_id = &scaler_state->scaler_id;
444 } else {
445 name = "PLANE";
446
447 /* plane scaler case: assign as a plane scaler */
448 /* find the plane that set the bit as scaler_user */
449 plane = drm_state->planes[i].ptr;
450
451 /*
452 * to enable/disable hq mode, add planes that are using scaler
453 * into this transaction
454 */
455 if (!plane) {
456 struct drm_plane_state *state;
457
458 /*
459 * GLK+ scalers don't have a HQ mode so it
460 * isn't necessary to change between HQ and dyn mode
461 * on those platforms.
462 */
463 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
464 continue;
465
466 plane = drm_plane_from_index(&dev_priv->drm, i);
467 state = drm_atomic_get_plane_state(drm_state, plane);
468 if (IS_ERR(state)) {
469 drm_dbg_kms(&dev_priv->drm,
470 "Failed to add [PLANE:%d] to drm_state\n",
471 plane->base.id);
472 return PTR_ERR(state);
473 }
474 }
475
476 intel_plane = to_intel_plane(plane);
477 idx = plane->base.id;
478
479 /* plane on different crtc cannot be a scaler user of this crtc */
480 if (drm_WARN_ON(&dev_priv->drm,
481 intel_plane->pipe != intel_crtc->pipe))
482 continue;
483
484 plane_state = intel_atomic_get_new_plane_state(intel_state,
485 intel_plane);
486 scaler_id = &plane_state->scaler_id;
487 }
488
489 intel_atomic_setup_scaler(scaler_state, num_scalers_need,
490 intel_crtc, name, idx,
491 plane_state, scaler_id);
492 }
493
494 return 0;
495 }
496
497 struct drm_atomic_state *
intel_atomic_state_alloc(struct drm_device * dev)498 intel_atomic_state_alloc(struct drm_device *dev)
499 {
500 struct intel_atomic_state *state = kzalloc(sizeof(*state), GFP_KERNEL);
501
502 if (!state || drm_atomic_state_init(dev, &state->base) < 0) {
503 kfree(state);
504 return NULL;
505 }
506
507 return &state->base;
508 }
509
intel_atomic_state_free(struct drm_atomic_state * _state)510 void intel_atomic_state_free(struct drm_atomic_state *_state)
511 {
512 struct intel_atomic_state *state = to_intel_atomic_state(_state);
513
514 drm_atomic_state_default_release(&state->base);
515 kfree(state->global_objs);
516
517 i915_sw_fence_fini(&state->commit_ready);
518
519 kfree(state);
520 }
521
intel_atomic_state_clear(struct drm_atomic_state * s)522 void intel_atomic_state_clear(struct drm_atomic_state *s)
523 {
524 struct intel_atomic_state *state = to_intel_atomic_state(s);
525
526 drm_atomic_state_default_clear(&state->base);
527 intel_atomic_clear_global_state(state);
528
529 state->dpll_set = state->modeset = false;
530 }
531
532 struct intel_crtc_state *
intel_atomic_get_crtc_state(struct drm_atomic_state * state,struct intel_crtc * crtc)533 intel_atomic_get_crtc_state(struct drm_atomic_state *state,
534 struct intel_crtc *crtc)
535 {
536 struct drm_crtc_state *crtc_state;
537 crtc_state = drm_atomic_get_crtc_state(state, &crtc->base);
538 if (IS_ERR(crtc_state))
539 return ERR_CAST(crtc_state);
540
541 return to_intel_crtc_state(crtc_state);
542 }
543