1 // SPDX-License-Identifier: MIT
2 /*
3 * Copyright © 2021 Intel Corporation
4 */
5
6 #include "i915_drv.h"
7 #include "intel_atomic.h"
8 #include "intel_de.h"
9 #include "intel_display_types.h"
10 #include "intel_drrs.h"
11 #include "intel_panel.h"
12
13 /**
14 * DOC: Display Refresh Rate Switching (DRRS)
15 *
16 * Display Refresh Rate Switching (DRRS) is a power conservation feature
17 * which enables swtching between low and high refresh rates,
18 * dynamically, based on the usage scenario. This feature is applicable
19 * for internal panels.
20 *
21 * Indication that the panel supports DRRS is given by the panel EDID, which
22 * would list multiple refresh rates for one resolution.
23 *
24 * DRRS is of 2 types - static and seamless.
25 * Static DRRS involves changing refresh rate (RR) by doing a full modeset
26 * (may appear as a blink on screen) and is used in dock-undock scenario.
27 * Seamless DRRS involves changing RR without any visual effect to the user
28 * and can be used during normal system usage. This is done by programming
29 * certain registers.
30 *
31 * Support for static/seamless DRRS may be indicated in the VBT based on
32 * inputs from the panel spec.
33 *
34 * DRRS saves power by switching to low RR based on usage scenarios.
35 *
36 * The implementation is based on frontbuffer tracking implementation. When
37 * there is a disturbance on the screen triggered by user activity or a periodic
38 * system activity, DRRS is disabled (RR is changed to high RR). When there is
39 * no movement on screen, after a timeout of 1 second, a switch to low RR is
40 * made.
41 *
42 * For integration with frontbuffer tracking code, intel_drrs_invalidate()
43 * and intel_drrs_flush() are called.
44 *
45 * DRRS can be further extended to support other internal panels and also
46 * the scenario of video playback wherein RR is set based on the rate
47 * requested by userspace.
48 */
49
50 void
intel_drrs_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * pipe_config,int output_bpp,bool constant_n)51 intel_drrs_compute_config(struct intel_dp *intel_dp,
52 struct intel_crtc_state *pipe_config,
53 int output_bpp, bool constant_n)
54 {
55 struct intel_connector *intel_connector = intel_dp->attached_connector;
56 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
57 int pixel_clock;
58
59 if (pipe_config->vrr.enable)
60 return;
61
62 /*
63 * DRRS and PSR can't be enable together, so giving preference to PSR
64 * as it allows more power-savings by complete shutting down display,
65 * so to guarantee this, intel_drrs_compute_config() must be called
66 * after intel_psr_compute_config().
67 */
68 if (pipe_config->has_psr)
69 return;
70
71 if (!intel_connector->panel.downclock_mode ||
72 dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
73 return;
74
75 pipe_config->has_drrs = true;
76
77 pixel_clock = intel_connector->panel.downclock_mode->clock;
78 if (pipe_config->splitter.enable)
79 pixel_clock /= pipe_config->splitter.link_count;
80
81 intel_link_compute_m_n(output_bpp, pipe_config->lane_count, pixel_clock,
82 pipe_config->port_clock, &pipe_config->dp_m2_n2,
83 constant_n, pipe_config->fec_enable);
84
85 /* FIXME: abstract this better */
86 if (pipe_config->splitter.enable)
87 pipe_config->dp_m2_n2.gmch_m *= pipe_config->splitter.link_count;
88 }
89
intel_drrs_set_state(struct drm_i915_private * dev_priv,const struct intel_crtc_state * crtc_state,enum drrs_refresh_rate_type refresh_type)90 static void intel_drrs_set_state(struct drm_i915_private *dev_priv,
91 const struct intel_crtc_state *crtc_state,
92 enum drrs_refresh_rate_type refresh_type)
93 {
94 struct intel_dp *intel_dp = dev_priv->drrs.dp;
95 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
96 struct drm_display_mode *mode;
97
98 if (!intel_dp) {
99 drm_dbg_kms(&dev_priv->drm, "DRRS not supported.\n");
100 return;
101 }
102
103 if (!crtc) {
104 drm_dbg_kms(&dev_priv->drm,
105 "DRRS: intel_crtc not initialized\n");
106 return;
107 }
108
109 if (dev_priv->drrs.type < SEAMLESS_DRRS_SUPPORT) {
110 drm_dbg_kms(&dev_priv->drm, "Only Seamless DRRS supported.\n");
111 return;
112 }
113
114 if (refresh_type == dev_priv->drrs.refresh_rate_type)
115 return;
116
117 if (!crtc_state->hw.active) {
118 drm_dbg_kms(&dev_priv->drm,
119 "eDP encoder disabled. CRTC not Active\n");
120 return;
121 }
122
123 if (DISPLAY_VER(dev_priv) >= 8 && !IS_CHERRYVIEW(dev_priv)) {
124 switch (refresh_type) {
125 case DRRS_HIGH_RR:
126 intel_dp_set_m_n(crtc_state, M1_N1);
127 break;
128 case DRRS_LOW_RR:
129 intel_dp_set_m_n(crtc_state, M2_N2);
130 break;
131 case DRRS_MAX_RR:
132 default:
133 drm_err(&dev_priv->drm,
134 "Unsupported refreshrate type\n");
135 }
136 } else if (DISPLAY_VER(dev_priv) > 6) {
137 i915_reg_t reg = PIPECONF(crtc_state->cpu_transcoder);
138 u32 val;
139
140 val = intel_de_read(dev_priv, reg);
141 if (refresh_type == DRRS_LOW_RR) {
142 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
143 val |= PIPECONF_EDP_RR_MODE_SWITCH_VLV;
144 else
145 val |= PIPECONF_EDP_RR_MODE_SWITCH;
146 } else {
147 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
148 val &= ~PIPECONF_EDP_RR_MODE_SWITCH_VLV;
149 else
150 val &= ~PIPECONF_EDP_RR_MODE_SWITCH;
151 }
152 intel_de_write(dev_priv, reg, val);
153 }
154
155 dev_priv->drrs.refresh_rate_type = refresh_type;
156
157 if (refresh_type == DRRS_LOW_RR)
158 mode = intel_dp->attached_connector->panel.downclock_mode;
159 else
160 mode = intel_dp->attached_connector->panel.fixed_mode;
161 drm_dbg_kms(&dev_priv->drm, "eDP Refresh Rate set to : %dHz\n",
162 drm_mode_vrefresh(mode));
163 }
164
165 static void
intel_drrs_enable_locked(struct intel_dp * intel_dp)166 intel_drrs_enable_locked(struct intel_dp *intel_dp)
167 {
168 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
169
170 dev_priv->drrs.busy_frontbuffer_bits = 0;
171 dev_priv->drrs.dp = intel_dp;
172 }
173
174 /**
175 * intel_drrs_enable - init drrs struct if supported
176 * @intel_dp: DP struct
177 * @crtc_state: A pointer to the active crtc state.
178 *
179 * Initializes frontbuffer_bits and drrs.dp
180 */
intel_drrs_enable(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)181 void intel_drrs_enable(struct intel_dp *intel_dp,
182 const struct intel_crtc_state *crtc_state)
183 {
184 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
185
186 if (!crtc_state->has_drrs)
187 return;
188
189 drm_dbg_kms(&dev_priv->drm, "Enabling DRRS\n");
190
191 mutex_lock(&dev_priv->drrs.mutex);
192
193 if (dev_priv->drrs.dp) {
194 drm_warn(&dev_priv->drm, "DRRS already enabled\n");
195 goto unlock;
196 }
197
198 intel_drrs_enable_locked(intel_dp);
199
200 unlock:
201 mutex_unlock(&dev_priv->drrs.mutex);
202 }
203
204 static void
intel_drrs_disable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)205 intel_drrs_disable_locked(struct intel_dp *intel_dp,
206 const struct intel_crtc_state *crtc_state)
207 {
208 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
209
210 intel_drrs_set_state(dev_priv, crtc_state, DRRS_HIGH_RR);
211 dev_priv->drrs.dp = NULL;
212 }
213
214 /**
215 * intel_drrs_disable - Disable DRRS
216 * @intel_dp: DP struct
217 * @old_crtc_state: Pointer to old crtc_state.
218 *
219 */
intel_drrs_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)220 void intel_drrs_disable(struct intel_dp *intel_dp,
221 const struct intel_crtc_state *old_crtc_state)
222 {
223 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
224
225 if (!old_crtc_state->has_drrs)
226 return;
227
228 mutex_lock(&dev_priv->drrs.mutex);
229 if (!dev_priv->drrs.dp) {
230 mutex_unlock(&dev_priv->drrs.mutex);
231 return;
232 }
233
234 intel_drrs_disable_locked(intel_dp, old_crtc_state);
235 mutex_unlock(&dev_priv->drrs.mutex);
236
237 cancel_delayed_work_sync(&dev_priv->drrs.work);
238 }
239
240 /**
241 * intel_drrs_update - Update DRRS state
242 * @intel_dp: Intel DP
243 * @crtc_state: new CRTC state
244 *
245 * This function will update DRRS states, disabling or enabling DRRS when
246 * executing fastsets. For full modeset, intel_drrs_disable() and
247 * intel_drrs_enable() should be called instead.
248 */
249 void
intel_drrs_update(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)250 intel_drrs_update(struct intel_dp *intel_dp,
251 const struct intel_crtc_state *crtc_state)
252 {
253 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
254
255 if (dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
256 return;
257
258 mutex_lock(&dev_priv->drrs.mutex);
259
260 /* New state matches current one? */
261 if (crtc_state->has_drrs == !!dev_priv->drrs.dp)
262 goto unlock;
263
264 if (crtc_state->has_drrs)
265 intel_drrs_enable_locked(intel_dp);
266 else
267 intel_drrs_disable_locked(intel_dp, crtc_state);
268
269 unlock:
270 mutex_unlock(&dev_priv->drrs.mutex);
271 }
272
intel_drrs_downclock_work(struct work_struct * work)273 static void intel_drrs_downclock_work(struct work_struct *work)
274 {
275 struct drm_i915_private *dev_priv =
276 container_of(work, typeof(*dev_priv), drrs.work.work);
277 struct intel_dp *intel_dp;
278 struct drm_crtc *crtc;
279
280 mutex_lock(&dev_priv->drrs.mutex);
281
282 intel_dp = dev_priv->drrs.dp;
283
284 if (!intel_dp)
285 goto unlock;
286
287 /*
288 * The delayed work can race with an invalidate hence we need to
289 * recheck.
290 */
291
292 if (dev_priv->drrs.busy_frontbuffer_bits)
293 goto unlock;
294
295 crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
296 intel_drrs_set_state(dev_priv, to_intel_crtc(crtc)->config, DRRS_LOW_RR);
297
298 unlock:
299 mutex_unlock(&dev_priv->drrs.mutex);
300 }
301
intel_drrs_frontbuffer_update(struct drm_i915_private * dev_priv,unsigned int frontbuffer_bits,bool invalidate)302 static void intel_drrs_frontbuffer_update(struct drm_i915_private *dev_priv,
303 unsigned int frontbuffer_bits,
304 bool invalidate)
305 {
306 struct intel_dp *intel_dp;
307 struct drm_crtc *crtc;
308 enum pipe pipe;
309
310 if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
311 return;
312
313 cancel_delayed_work(&dev_priv->drrs.work);
314
315 mutex_lock(&dev_priv->drrs.mutex);
316
317 intel_dp = dev_priv->drrs.dp;
318 if (!intel_dp) {
319 mutex_unlock(&dev_priv->drrs.mutex);
320 return;
321 }
322
323 crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
324 pipe = to_intel_crtc(crtc)->pipe;
325
326 frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
327 if (invalidate)
328 dev_priv->drrs.busy_frontbuffer_bits |= frontbuffer_bits;
329 else
330 dev_priv->drrs.busy_frontbuffer_bits &= ~frontbuffer_bits;
331
332 /* flush/invalidate means busy screen hence upclock */
333 if (frontbuffer_bits)
334 intel_drrs_set_state(dev_priv, to_intel_crtc(crtc)->config,
335 DRRS_HIGH_RR);
336
337 /*
338 * flush also means no more activity hence schedule downclock, if all
339 * other fbs are quiescent too
340 */
341 if (!invalidate && !dev_priv->drrs.busy_frontbuffer_bits)
342 schedule_delayed_work(&dev_priv->drrs.work,
343 msecs_to_jiffies(1000));
344 mutex_unlock(&dev_priv->drrs.mutex);
345 }
346
347 /**
348 * intel_drrs_invalidate - Disable Idleness DRRS
349 * @dev_priv: i915 device
350 * @frontbuffer_bits: frontbuffer plane tracking bits
351 *
352 * This function gets called everytime rendering on the given planes start.
353 * Hence DRRS needs to be Upclocked, i.e. (LOW_RR -> HIGH_RR).
354 *
355 * Dirty frontbuffers relevant to DRRS are tracked in busy_frontbuffer_bits.
356 */
intel_drrs_invalidate(struct drm_i915_private * dev_priv,unsigned int frontbuffer_bits)357 void intel_drrs_invalidate(struct drm_i915_private *dev_priv,
358 unsigned int frontbuffer_bits)
359 {
360 intel_drrs_frontbuffer_update(dev_priv, frontbuffer_bits, true);
361 }
362
363 /**
364 * intel_drrs_flush - Restart Idleness DRRS
365 * @dev_priv: i915 device
366 * @frontbuffer_bits: frontbuffer plane tracking bits
367 *
368 * This function gets called every time rendering on the given planes has
369 * completed or flip on a crtc is completed. So DRRS should be upclocked
370 * (LOW_RR -> HIGH_RR). And also Idleness detection should be started again,
371 * if no other planes are dirty.
372 *
373 * Dirty frontbuffers relevant to DRRS are tracked in busy_frontbuffer_bits.
374 */
intel_drrs_flush(struct drm_i915_private * dev_priv,unsigned int frontbuffer_bits)375 void intel_drrs_flush(struct drm_i915_private *dev_priv,
376 unsigned int frontbuffer_bits)
377 {
378 intel_drrs_frontbuffer_update(dev_priv, frontbuffer_bits, false);
379 }
380
intel_drrs_page_flip(struct intel_atomic_state * state,struct intel_crtc * crtc)381 void intel_drrs_page_flip(struct intel_atomic_state *state,
382 struct intel_crtc *crtc)
383 {
384 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
385 unsigned int frontbuffer_bits = INTEL_FRONTBUFFER_ALL_MASK(crtc->pipe);
386
387 intel_drrs_frontbuffer_update(dev_priv, frontbuffer_bits, false);
388 }
389
390 /**
391 * intel_drrs_init - Init basic DRRS work and mutex.
392 * @connector: eDP connector
393 * @fixed_mode: preferred mode of panel
394 *
395 * This function is called only once at driver load to initialize basic
396 * DRRS stuff.
397 *
398 * Returns:
399 * Downclock mode if panel supports it, else return NULL.
400 * DRRS support is determined by the presence of downclock mode (apart
401 * from VBT setting).
402 */
403 struct drm_display_mode *
intel_drrs_init(struct intel_connector * connector,struct drm_display_mode * fixed_mode)404 intel_drrs_init(struct intel_connector *connector,
405 struct drm_display_mode *fixed_mode)
406 {
407 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
408 struct drm_display_mode *downclock_mode = NULL;
409
410 INIT_DELAYED_WORK(&dev_priv->drrs.work, intel_drrs_downclock_work);
411 mutex_init(&dev_priv->drrs.mutex);
412
413 if (DISPLAY_VER(dev_priv) <= 6) {
414 drm_dbg_kms(&dev_priv->drm,
415 "DRRS supported for Gen7 and above\n");
416 return NULL;
417 }
418
419 if (dev_priv->vbt.drrs_type != SEAMLESS_DRRS_SUPPORT) {
420 drm_dbg_kms(&dev_priv->drm, "VBT doesn't support DRRS\n");
421 return NULL;
422 }
423
424 downclock_mode = intel_panel_edid_downclock_mode(connector, fixed_mode);
425 if (!downclock_mode) {
426 drm_dbg_kms(&dev_priv->drm,
427 "Downclock mode is not found. DRRS not supported\n");
428 return NULL;
429 }
430
431 dev_priv->drrs.type = dev_priv->vbt.drrs_type;
432
433 dev_priv->drrs.refresh_rate_type = DRRS_HIGH_RR;
434 drm_dbg_kms(&dev_priv->drm,
435 "seamless DRRS supported for eDP panel.\n");
436 return downclock_mode;
437 }
438