• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2021 Intel Corporation
4  */
5 
6 #include "i915_drv.h"
7 #include "intel_atomic.h"
8 #include "intel_de.h"
9 #include "intel_display_types.h"
10 #include "intel_drrs.h"
11 #include "intel_panel.h"
12 
13 /**
14  * DOC: Display Refresh Rate Switching (DRRS)
15  *
16  * Display Refresh Rate Switching (DRRS) is a power conservation feature
17  * which enables swtching between low and high refresh rates,
18  * dynamically, based on the usage scenario. This feature is applicable
19  * for internal panels.
20  *
21  * Indication that the panel supports DRRS is given by the panel EDID, which
22  * would list multiple refresh rates for one resolution.
23  *
24  * DRRS is of 2 types - static and seamless.
25  * Static DRRS involves changing refresh rate (RR) by doing a full modeset
26  * (may appear as a blink on screen) and is used in dock-undock scenario.
27  * Seamless DRRS involves changing RR without any visual effect to the user
28  * and can be used during normal system usage. This is done by programming
29  * certain registers.
30  *
31  * Support for static/seamless DRRS may be indicated in the VBT based on
32  * inputs from the panel spec.
33  *
34  * DRRS saves power by switching to low RR based on usage scenarios.
35  *
36  * The implementation is based on frontbuffer tracking implementation.  When
37  * there is a disturbance on the screen triggered by user activity or a periodic
38  * system activity, DRRS is disabled (RR is changed to high RR).  When there is
39  * no movement on screen, after a timeout of 1 second, a switch to low RR is
40  * made.
41  *
42  * For integration with frontbuffer tracking code, intel_edp_drrs_invalidate()
43  * and intel_edp_drrs_flush() are called.
44  *
45  * DRRS can be further extended to support other internal panels and also
46  * the scenario of video playback wherein RR is set based on the rate
47  * requested by userspace.
48  */
49 
50 void
intel_dp_drrs_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * pipe_config,int output_bpp,bool constant_n)51 intel_dp_drrs_compute_config(struct intel_dp *intel_dp,
52 			     struct intel_crtc_state *pipe_config,
53 			     int output_bpp, bool constant_n)
54 {
55 	struct intel_connector *intel_connector = intel_dp->attached_connector;
56 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
57 	int pixel_clock;
58 
59 	if (pipe_config->vrr.enable)
60 		return;
61 
62 	/*
63 	 * DRRS and PSR can't be enable together, so giving preference to PSR
64 	 * as it allows more power-savings by complete shutting down display,
65 	 * so to guarantee this, intel_dp_drrs_compute_config() must be called
66 	 * after intel_psr_compute_config().
67 	 */
68 	if (pipe_config->has_psr)
69 		return;
70 
71 	if (!intel_connector->panel.downclock_mode ||
72 	    dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
73 		return;
74 
75 	pipe_config->has_drrs = true;
76 
77 	pixel_clock = intel_connector->panel.downclock_mode->clock;
78 	if (pipe_config->splitter.enable)
79 		pixel_clock /= pipe_config->splitter.link_count;
80 
81 	intel_link_compute_m_n(output_bpp, pipe_config->lane_count, pixel_clock,
82 			       pipe_config->port_clock, &pipe_config->dp_m2_n2,
83 			       constant_n, pipe_config->fec_enable);
84 
85 	/* FIXME: abstract this better */
86 	if (pipe_config->splitter.enable)
87 		pipe_config->dp_m2_n2.gmch_m *= pipe_config->splitter.link_count;
88 }
89 
90 /**
91  * intel_dp_set_drrs_state - program registers for RR switch to take effect
92  * @dev_priv: i915 device
93  * @crtc_state: a pointer to the active intel_crtc_state
94  * @refresh_rate: RR to be programmed
95  *
96  * This function gets called when refresh rate (RR) has to be changed from
97  * one frequency to another. Switches can be between high and low RR
98  * supported by the panel or to any other RR based on media playback (in
99  * this case, RR value needs to be passed from user space).
100  *
101  * The caller of this function needs to take a lock on dev_priv->drrs.
102  */
intel_dp_set_drrs_state(struct drm_i915_private * dev_priv,const struct intel_crtc_state * crtc_state,int refresh_rate)103 static void intel_dp_set_drrs_state(struct drm_i915_private *dev_priv,
104 				    const struct intel_crtc_state *crtc_state,
105 				    int refresh_rate)
106 {
107 	struct intel_dp *intel_dp = dev_priv->drrs.dp;
108 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
109 	enum drrs_refresh_rate_type index = DRRS_HIGH_RR;
110 
111 	if (refresh_rate <= 0) {
112 		drm_dbg_kms(&dev_priv->drm,
113 			    "Refresh rate should be positive non-zero.\n");
114 		return;
115 	}
116 
117 	if (intel_dp == NULL) {
118 		drm_dbg_kms(&dev_priv->drm, "DRRS not supported.\n");
119 		return;
120 	}
121 
122 	if (!crtc) {
123 		drm_dbg_kms(&dev_priv->drm,
124 			    "DRRS: intel_crtc not initialized\n");
125 		return;
126 	}
127 
128 	if (dev_priv->drrs.type < SEAMLESS_DRRS_SUPPORT) {
129 		drm_dbg_kms(&dev_priv->drm, "Only Seamless DRRS supported.\n");
130 		return;
131 	}
132 
133 	if (drm_mode_vrefresh(intel_dp->attached_connector->panel.downclock_mode) ==
134 			refresh_rate)
135 		index = DRRS_LOW_RR;
136 
137 	if (index == dev_priv->drrs.refresh_rate_type) {
138 		drm_dbg_kms(&dev_priv->drm,
139 			    "DRRS requested for previously set RR...ignoring\n");
140 		return;
141 	}
142 
143 	if (!crtc_state->hw.active) {
144 		drm_dbg_kms(&dev_priv->drm,
145 			    "eDP encoder disabled. CRTC not Active\n");
146 		return;
147 	}
148 
149 	if (DISPLAY_VER(dev_priv) >= 8 && !IS_CHERRYVIEW(dev_priv)) {
150 		switch (index) {
151 		case DRRS_HIGH_RR:
152 			intel_dp_set_m_n(crtc_state, M1_N1);
153 			break;
154 		case DRRS_LOW_RR:
155 			intel_dp_set_m_n(crtc_state, M2_N2);
156 			break;
157 		case DRRS_MAX_RR:
158 		default:
159 			drm_err(&dev_priv->drm,
160 				"Unsupported refreshrate type\n");
161 		}
162 	} else if (DISPLAY_VER(dev_priv) > 6) {
163 		i915_reg_t reg = PIPECONF(crtc_state->cpu_transcoder);
164 		u32 val;
165 
166 		val = intel_de_read(dev_priv, reg);
167 		if (index > DRRS_HIGH_RR) {
168 			if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
169 				val |= PIPECONF_EDP_RR_MODE_SWITCH_VLV;
170 			else
171 				val |= PIPECONF_EDP_RR_MODE_SWITCH;
172 		} else {
173 			if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
174 				val &= ~PIPECONF_EDP_RR_MODE_SWITCH_VLV;
175 			else
176 				val &= ~PIPECONF_EDP_RR_MODE_SWITCH;
177 		}
178 		intel_de_write(dev_priv, reg, val);
179 	}
180 
181 	dev_priv->drrs.refresh_rate_type = index;
182 
183 	drm_dbg_kms(&dev_priv->drm, "eDP Refresh Rate set to : %dHz\n",
184 		    refresh_rate);
185 }
186 
187 static void
intel_edp_drrs_enable_locked(struct intel_dp * intel_dp)188 intel_edp_drrs_enable_locked(struct intel_dp *intel_dp)
189 {
190 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
191 
192 	dev_priv->drrs.busy_frontbuffer_bits = 0;
193 	dev_priv->drrs.dp = intel_dp;
194 }
195 
196 /**
197  * intel_edp_drrs_enable - init drrs struct if supported
198  * @intel_dp: DP struct
199  * @crtc_state: A pointer to the active crtc state.
200  *
201  * Initializes frontbuffer_bits and drrs.dp
202  */
intel_edp_drrs_enable(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)203 void intel_edp_drrs_enable(struct intel_dp *intel_dp,
204 			   const struct intel_crtc_state *crtc_state)
205 {
206 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
207 
208 	if (!crtc_state->has_drrs)
209 		return;
210 
211 	drm_dbg_kms(&dev_priv->drm, "Enabling DRRS\n");
212 
213 	mutex_lock(&dev_priv->drrs.mutex);
214 
215 	if (dev_priv->drrs.dp) {
216 		drm_warn(&dev_priv->drm, "DRRS already enabled\n");
217 		goto unlock;
218 	}
219 
220 	intel_edp_drrs_enable_locked(intel_dp);
221 
222 unlock:
223 	mutex_unlock(&dev_priv->drrs.mutex);
224 }
225 
226 static void
intel_edp_drrs_disable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)227 intel_edp_drrs_disable_locked(struct intel_dp *intel_dp,
228 			      const struct intel_crtc_state *crtc_state)
229 {
230 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
231 
232 	if (dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR) {
233 		int refresh;
234 
235 		refresh = drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode);
236 		intel_dp_set_drrs_state(dev_priv, crtc_state, refresh);
237 	}
238 
239 	dev_priv->drrs.dp = NULL;
240 }
241 
242 /**
243  * intel_edp_drrs_disable - Disable DRRS
244  * @intel_dp: DP struct
245  * @old_crtc_state: Pointer to old crtc_state.
246  *
247  */
intel_edp_drrs_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)248 void intel_edp_drrs_disable(struct intel_dp *intel_dp,
249 			    const struct intel_crtc_state *old_crtc_state)
250 {
251 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
252 
253 	if (!old_crtc_state->has_drrs)
254 		return;
255 
256 	mutex_lock(&dev_priv->drrs.mutex);
257 	if (!dev_priv->drrs.dp) {
258 		mutex_unlock(&dev_priv->drrs.mutex);
259 		return;
260 	}
261 
262 	intel_edp_drrs_disable_locked(intel_dp, old_crtc_state);
263 	mutex_unlock(&dev_priv->drrs.mutex);
264 
265 	cancel_delayed_work_sync(&dev_priv->drrs.work);
266 }
267 
268 /**
269  * intel_edp_drrs_update - Update DRRS state
270  * @intel_dp: Intel DP
271  * @crtc_state: new CRTC state
272  *
273  * This function will update DRRS states, disabling or enabling DRRS when
274  * executing fastsets. For full modeset, intel_edp_drrs_disable() and
275  * intel_edp_drrs_enable() should be called instead.
276  */
277 void
intel_edp_drrs_update(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)278 intel_edp_drrs_update(struct intel_dp *intel_dp,
279 		      const struct intel_crtc_state *crtc_state)
280 {
281 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
282 
283 	if (dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
284 		return;
285 
286 	mutex_lock(&dev_priv->drrs.mutex);
287 
288 	/* New state matches current one? */
289 	if (crtc_state->has_drrs == !!dev_priv->drrs.dp)
290 		goto unlock;
291 
292 	if (crtc_state->has_drrs)
293 		intel_edp_drrs_enable_locked(intel_dp);
294 	else
295 		intel_edp_drrs_disable_locked(intel_dp, crtc_state);
296 
297 unlock:
298 	mutex_unlock(&dev_priv->drrs.mutex);
299 }
300 
intel_edp_drrs_downclock_work(struct work_struct * work)301 static void intel_edp_drrs_downclock_work(struct work_struct *work)
302 {
303 	struct drm_i915_private *dev_priv =
304 		container_of(work, typeof(*dev_priv), drrs.work.work);
305 	struct intel_dp *intel_dp;
306 
307 	mutex_lock(&dev_priv->drrs.mutex);
308 
309 	intel_dp = dev_priv->drrs.dp;
310 
311 	if (!intel_dp)
312 		goto unlock;
313 
314 	/*
315 	 * The delayed work can race with an invalidate hence we need to
316 	 * recheck.
317 	 */
318 
319 	if (dev_priv->drrs.busy_frontbuffer_bits)
320 		goto unlock;
321 
322 	if (dev_priv->drrs.refresh_rate_type != DRRS_LOW_RR) {
323 		struct drm_crtc *crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
324 
325 		intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
326 					drm_mode_vrefresh(intel_dp->attached_connector->panel.downclock_mode));
327 	}
328 
329 unlock:
330 	mutex_unlock(&dev_priv->drrs.mutex);
331 }
332 
333 /**
334  * intel_edp_drrs_invalidate - Disable Idleness DRRS
335  * @dev_priv: i915 device
336  * @frontbuffer_bits: frontbuffer plane tracking bits
337  *
338  * This function gets called everytime rendering on the given planes start.
339  * Hence DRRS needs to be Upclocked, i.e. (LOW_RR -> HIGH_RR).
340  *
341  * Dirty frontbuffers relevant to DRRS are tracked in busy_frontbuffer_bits.
342  */
intel_edp_drrs_invalidate(struct drm_i915_private * dev_priv,unsigned int frontbuffer_bits)343 void intel_edp_drrs_invalidate(struct drm_i915_private *dev_priv,
344 			       unsigned int frontbuffer_bits)
345 {
346 	struct intel_dp *intel_dp;
347 	struct drm_crtc *crtc;
348 	enum pipe pipe;
349 
350 	if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
351 		return;
352 
353 	cancel_delayed_work(&dev_priv->drrs.work);
354 
355 	mutex_lock(&dev_priv->drrs.mutex);
356 
357 	intel_dp = dev_priv->drrs.dp;
358 	if (!intel_dp) {
359 		mutex_unlock(&dev_priv->drrs.mutex);
360 		return;
361 	}
362 
363 	crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
364 	pipe = to_intel_crtc(crtc)->pipe;
365 
366 	frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
367 	dev_priv->drrs.busy_frontbuffer_bits |= frontbuffer_bits;
368 
369 	/* invalidate means busy screen hence upclock */
370 	if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
371 		intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
372 					drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode));
373 
374 	mutex_unlock(&dev_priv->drrs.mutex);
375 }
376 
377 /**
378  * intel_edp_drrs_flush - Restart Idleness DRRS
379  * @dev_priv: i915 device
380  * @frontbuffer_bits: frontbuffer plane tracking bits
381  *
382  * This function gets called every time rendering on the given planes has
383  * completed or flip on a crtc is completed. So DRRS should be upclocked
384  * (LOW_RR -> HIGH_RR). And also Idleness detection should be started again,
385  * if no other planes are dirty.
386  *
387  * Dirty frontbuffers relevant to DRRS are tracked in busy_frontbuffer_bits.
388  */
intel_edp_drrs_flush(struct drm_i915_private * dev_priv,unsigned int frontbuffer_bits)389 void intel_edp_drrs_flush(struct drm_i915_private *dev_priv,
390 			  unsigned int frontbuffer_bits)
391 {
392 	struct intel_dp *intel_dp;
393 	struct drm_crtc *crtc;
394 	enum pipe pipe;
395 
396 	if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
397 		return;
398 
399 	cancel_delayed_work(&dev_priv->drrs.work);
400 
401 	mutex_lock(&dev_priv->drrs.mutex);
402 
403 	intel_dp = dev_priv->drrs.dp;
404 	if (!intel_dp) {
405 		mutex_unlock(&dev_priv->drrs.mutex);
406 		return;
407 	}
408 
409 	crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
410 	pipe = to_intel_crtc(crtc)->pipe;
411 
412 	frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
413 	dev_priv->drrs.busy_frontbuffer_bits &= ~frontbuffer_bits;
414 
415 	/* flush means busy screen hence upclock */
416 	if (frontbuffer_bits && dev_priv->drrs.refresh_rate_type == DRRS_LOW_RR)
417 		intel_dp_set_drrs_state(dev_priv, to_intel_crtc(crtc)->config,
418 					drm_mode_vrefresh(intel_dp->attached_connector->panel.fixed_mode));
419 
420 	/*
421 	 * flush also means no more activity hence schedule downclock, if all
422 	 * other fbs are quiescent too
423 	 */
424 	if (!dev_priv->drrs.busy_frontbuffer_bits)
425 		schedule_delayed_work(&dev_priv->drrs.work,
426 				      msecs_to_jiffies(1000));
427 	mutex_unlock(&dev_priv->drrs.mutex);
428 }
429 
430 /**
431  * intel_dp_drrs_init - Init basic DRRS work and mutex.
432  * @connector: eDP connector
433  * @fixed_mode: preferred mode of panel
434  *
435  * This function is  called only once at driver load to initialize basic
436  * DRRS stuff.
437  *
438  * Returns:
439  * Downclock mode if panel supports it, else return NULL.
440  * DRRS support is determined by the presence of downclock mode (apart
441  * from VBT setting).
442  */
443 struct drm_display_mode *
intel_dp_drrs_init(struct intel_connector * connector,struct drm_display_mode * fixed_mode)444 intel_dp_drrs_init(struct intel_connector *connector,
445 		   struct drm_display_mode *fixed_mode)
446 {
447 	struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
448 	struct intel_encoder *encoder = connector->encoder;
449 	struct drm_display_mode *downclock_mode = NULL;
450 
451 	INIT_DELAYED_WORK(&dev_priv->drrs.work, intel_edp_drrs_downclock_work);
452 	mutex_init(&dev_priv->drrs.mutex);
453 
454 	if (DISPLAY_VER(dev_priv) <= 6) {
455 		drm_dbg_kms(&dev_priv->drm,
456 			    "DRRS supported for Gen7 and above\n");
457 		return NULL;
458 	}
459 
460 	if ((DISPLAY_VER(dev_priv) < 8 && !HAS_GMCH(dev_priv)) &&
461 	    encoder->port != PORT_A) {
462 		drm_dbg_kms(&dev_priv->drm,
463 			    "DRRS only supported on eDP port A\n");
464 		return NULL;
465 	}
466 
467 	if (dev_priv->vbt.drrs_type != SEAMLESS_DRRS_SUPPORT) {
468 		drm_dbg_kms(&dev_priv->drm, "VBT doesn't support DRRS\n");
469 		return NULL;
470 	}
471 
472 	downclock_mode = intel_panel_edid_downclock_mode(connector, fixed_mode);
473 	if (!downclock_mode) {
474 		drm_dbg_kms(&dev_priv->drm,
475 			    "Downclock mode is not found. DRRS not supported\n");
476 		return NULL;
477 	}
478 
479 	dev_priv->drrs.type = dev_priv->vbt.drrs_type;
480 
481 	dev_priv->drrs.refresh_rate_type = DRRS_HIGH_RR;
482 	drm_dbg_kms(&dev_priv->drm,
483 		    "seamless DRRS supported for eDP panel.\n");
484 	return downclock_mode;
485 }
486