• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2012-16 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 
26 #include <linux/slab.h>
27 
28 #include "dal_asic_id.h"
29 #include "dc_types.h"
30 #include "dccg.h"
31 #include "clk_mgr_internal.h"
32 #include "link.h"
33 
34 #include "dce100/dce_clk_mgr.h"
35 #include "dce110/dce110_clk_mgr.h"
36 #include "dce112/dce112_clk_mgr.h"
37 #include "dce120/dce120_clk_mgr.h"
38 #include "dce60/dce60_clk_mgr.h"
39 #include "dcn10/rv1_clk_mgr.h"
40 #include "dcn10/rv2_clk_mgr.h"
41 #include "dcn20/dcn20_clk_mgr.h"
42 #include "dcn21/rn_clk_mgr.h"
43 #include "dcn201/dcn201_clk_mgr.h"
44 #include "dcn30/dcn30_clk_mgr.h"
45 #include "dcn301/vg_clk_mgr.h"
46 #include "dcn31/dcn31_clk_mgr.h"
47 #include "dcn314/dcn314_clk_mgr.h"
48 #include "dcn315/dcn315_clk_mgr.h"
49 #include "dcn316/dcn316_clk_mgr.h"
50 #include "dcn32/dcn32_clk_mgr.h"
51 
clk_mgr_helper_get_active_display_cnt(struct dc * dc,struct dc_state * context)52 int clk_mgr_helper_get_active_display_cnt(
53 		struct dc *dc,
54 		struct dc_state *context)
55 {
56 	int i, display_count;
57 
58 	display_count = 0;
59 	for (i = 0; i < context->stream_count; i++) {
60 		const struct dc_stream_state *stream = context->streams[i];
61 
62 		/* Don't count SubVP phantom pipes as part of active
63 		 * display count
64 		 */
65 		if (stream->mall_stream_config.type == SUBVP_PHANTOM)
66 			continue;
67 
68 		/*
69 		 * Only notify active stream or virtual stream.
70 		 * Need to notify virtual stream to work around
71 		 * headless case. HPD does not fire when system is in
72 		 * S0i2.
73 		 */
74 		if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
75 			display_count++;
76 	}
77 
78 	return display_count;
79 }
80 
clk_mgr_helper_get_active_plane_cnt(struct dc * dc,struct dc_state * context)81 int clk_mgr_helper_get_active_plane_cnt(
82 		struct dc *dc,
83 		struct dc_state *context)
84 {
85 	int i, total_plane_count;
86 
87 	total_plane_count = 0;
88 	for (i = 0; i < context->stream_count; i++) {
89 		const struct dc_stream_status stream_status = context->stream_status[i];
90 
91 		/*
92 		 * Sum up plane_count for all streams ( active and virtual ).
93 		 */
94 		total_plane_count += stream_status.plane_count;
95 	}
96 
97 	return total_plane_count;
98 }
99 
clk_mgr_exit_optimized_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)100 void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
101 {
102 	struct dc_link *edp_links[MAX_NUM_EDP];
103 	struct dc_link *edp_link = NULL;
104 	int edp_num;
105 	unsigned int panel_inst;
106 
107 	dc_get_edp_links(dc, edp_links, &edp_num);
108 	if (dc->hwss.exit_optimized_pwr_state)
109 		dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
110 
111 	if (edp_num) {
112 		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
113 			bool allow_active = false;
114 
115 			edp_link = edp_links[panel_inst];
116 			if (!edp_link->psr_settings.psr_feature_enabled)
117 				continue;
118 			clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
119 			dc->link_srv->edp_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
120 			dc->link_srv->edp_set_replay_allow_active(edp_link, &allow_active, false, false, NULL);
121 		}
122 	}
123 
124 }
125 
clk_mgr_optimize_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)126 void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
127 {
128 	struct dc_link *edp_links[MAX_NUM_EDP];
129 	struct dc_link *edp_link = NULL;
130 	int edp_num;
131 	unsigned int panel_inst;
132 
133 	dc_get_edp_links(dc, edp_links, &edp_num);
134 	if (edp_num) {
135 		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
136 			edp_link = edp_links[panel_inst];
137 			if (!edp_link->psr_settings.psr_feature_enabled)
138 				continue;
139 			dc->link_srv->edp_set_psr_allow_active(edp_link,
140 					&clk_mgr->psr_allow_active_cache, false, false, NULL);
141 			dc->link_srv->edp_set_replay_allow_active(edp_link,
142 					&clk_mgr->psr_allow_active_cache, false, false, NULL);
143 		}
144 	}
145 
146 	if (dc->hwss.optimize_pwr_state)
147 		dc->hwss.optimize_pwr_state(dc, dc->current_state);
148 
149 }
150 
dc_clk_mgr_create(struct dc_context * ctx,struct pp_smu_funcs * pp_smu,struct dccg * dccg)151 struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
152 {
153 	struct hw_asic_id asic_id = ctx->asic_id;
154 
155 	switch (asic_id.chip_family) {
156 #if defined(CONFIG_DRM_AMD_DC_SI)
157 	case FAMILY_SI: {
158 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
159 
160 		if (clk_mgr == NULL) {
161 			BREAK_TO_DEBUGGER();
162 			return NULL;
163 		}
164 		dce60_clk_mgr_construct(ctx, clk_mgr);
165 		dce_clk_mgr_construct(ctx, clk_mgr);
166 		return &clk_mgr->base;
167 	}
168 #endif
169 	case FAMILY_CI:
170 	case FAMILY_KV: {
171 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
172 
173 		if (clk_mgr == NULL) {
174 			BREAK_TO_DEBUGGER();
175 			return NULL;
176 		}
177 		dce_clk_mgr_construct(ctx, clk_mgr);
178 		return &clk_mgr->base;
179 	}
180 	case FAMILY_CZ: {
181 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
182 
183 		if (clk_mgr == NULL) {
184 			BREAK_TO_DEBUGGER();
185 			return NULL;
186 		}
187 		dce110_clk_mgr_construct(ctx, clk_mgr);
188 		return &clk_mgr->base;
189 	}
190 	case FAMILY_VI: {
191 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
192 
193 		if (clk_mgr == NULL) {
194 			BREAK_TO_DEBUGGER();
195 			return NULL;
196 		}
197 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
198 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
199 			dce_clk_mgr_construct(ctx, clk_mgr);
200 			return &clk_mgr->base;
201 		}
202 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
203 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
204 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
205 			dce112_clk_mgr_construct(ctx, clk_mgr);
206 			return &clk_mgr->base;
207 		}
208 		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
209 			dce112_clk_mgr_construct(ctx, clk_mgr);
210 			return &clk_mgr->base;
211 		}
212 		return &clk_mgr->base;
213 	}
214 	case FAMILY_AI: {
215 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
216 
217 		if (clk_mgr == NULL) {
218 			BREAK_TO_DEBUGGER();
219 			return NULL;
220 		}
221 		if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
222 			dce121_clk_mgr_construct(ctx, clk_mgr);
223 		else
224 			dce120_clk_mgr_construct(ctx, clk_mgr);
225 		return &clk_mgr->base;
226 	}
227 #if defined(CONFIG_DRM_AMD_DC_FP)
228 	case FAMILY_RV: {
229 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
230 
231 		if (clk_mgr == NULL) {
232 			BREAK_TO_DEBUGGER();
233 			return NULL;
234 		}
235 
236 		if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
237 			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
238 			return &clk_mgr->base;
239 		}
240 
241 		if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
242 			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
243 			return &clk_mgr->base;
244 		}
245 		if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
246 			rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
247 			return &clk_mgr->base;
248 		}
249 		if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
250 				ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
251 			rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
252 			return &clk_mgr->base;
253 		}
254 		return &clk_mgr->base;
255 	}
256 	case FAMILY_NV: {
257 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
258 
259 		if (clk_mgr == NULL) {
260 			BREAK_TO_DEBUGGER();
261 			return NULL;
262 		}
263 		if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
264 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
265 			return &clk_mgr->base;
266 		}
267 		if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
268 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
269 			return &clk_mgr->base;
270 		}
271 		if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
272 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
273 			return &clk_mgr->base;
274 		}
275 		if (asic_id.chip_id == DEVICE_ID_NV_13FE) {
276 			dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
277 			return &clk_mgr->base;
278 		}
279 		dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
280 		return &clk_mgr->base;
281 	}
282 	case FAMILY_VGH:
283 		if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
284 			struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
285 
286 			if (clk_mgr == NULL) {
287 				BREAK_TO_DEBUGGER();
288 				return NULL;
289 			}
290 			vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
291 			return &clk_mgr->base.base;
292 		}
293 		break;
294 
295 	case FAMILY_YELLOW_CARP: {
296 		struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
297 
298 		if (clk_mgr == NULL) {
299 			BREAK_TO_DEBUGGER();
300 			return NULL;
301 		}
302 
303 		dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
304 		return &clk_mgr->base.base;
305 	}
306 		break;
307 	case AMDGPU_FAMILY_GC_10_3_6: {
308 		struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
309 
310 		if (clk_mgr == NULL) {
311 			BREAK_TO_DEBUGGER();
312 			return NULL;
313 		}
314 
315 		dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
316 		return &clk_mgr->base.base;
317 	}
318 		break;
319 	case AMDGPU_FAMILY_GC_10_3_7: {
320 		struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
321 
322 		if (clk_mgr == NULL) {
323 			BREAK_TO_DEBUGGER();
324 			return NULL;
325 		}
326 
327 		dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
328 		return &clk_mgr->base.base;
329 	}
330 		break;
331 	case AMDGPU_FAMILY_GC_11_0_0: {
332 	    struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
333 
334 	    if (clk_mgr == NULL) {
335 		BREAK_TO_DEBUGGER();
336 		return NULL;
337 	    }
338 
339 	    dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
340 	    return &clk_mgr->base;
341 	    break;
342 	}
343 
344 	case AMDGPU_FAMILY_GC_11_0_1: {
345 		struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
346 
347 		if (clk_mgr == NULL) {
348 			BREAK_TO_DEBUGGER();
349 			return NULL;
350 		}
351 
352 		dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
353 		return &clk_mgr->base.base;
354 	}
355 	break;
356 
357 #endif /* CONFIG_DRM_AMD_DC_FP - Family RV */
358 	default:
359 		ASSERT(0); /* Unknown Asic */
360 		break;
361 	}
362 
363 	return NULL;
364 }
365 
dc_destroy_clk_mgr(struct clk_mgr * clk_mgr_base)366 void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
367 {
368 	struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
369 
370 #ifdef CONFIG_DRM_AMD_DC_FP
371 	switch (clk_mgr_base->ctx->asic_id.chip_family) {
372 	case FAMILY_NV:
373 		if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
374 			dcn3_clk_mgr_destroy(clk_mgr);
375 		} else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
376 			dcn3_clk_mgr_destroy(clk_mgr);
377 		}
378 		if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
379 			dcn3_clk_mgr_destroy(clk_mgr);
380 		}
381 		break;
382 
383 	case FAMILY_VGH:
384 		if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
385 			vg_clk_mgr_destroy(clk_mgr);
386 		break;
387 
388 	case FAMILY_YELLOW_CARP:
389 		dcn31_clk_mgr_destroy(clk_mgr);
390 		break;
391 
392 	case AMDGPU_FAMILY_GC_10_3_6:
393 		dcn315_clk_mgr_destroy(clk_mgr);
394 		break;
395 
396 	case AMDGPU_FAMILY_GC_10_3_7:
397 		dcn316_clk_mgr_destroy(clk_mgr);
398 		break;
399 
400 	case AMDGPU_FAMILY_GC_11_0_0:
401 		dcn32_clk_mgr_destroy(clk_mgr);
402 		break;
403 
404 	case AMDGPU_FAMILY_GC_11_0_1:
405 		dcn314_clk_mgr_destroy(clk_mgr);
406 		break;
407 
408 	default:
409 		break;
410 	}
411 #endif /* CONFIG_DRM_AMD_DC_FP */
412 
413 	kfree(clk_mgr);
414 }
415 
416