• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2012-16 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 
26 #include <linux/slab.h>
27 
28 #include "dal_asic_id.h"
29 #include "dc_types.h"
30 #include "dccg.h"
31 #include "clk_mgr_internal.h"
32 
33 #include "dce100/dce_clk_mgr.h"
34 #include "dce110/dce110_clk_mgr.h"
35 #include "dce112/dce112_clk_mgr.h"
36 #include "dce120/dce120_clk_mgr.h"
37 #include "dce60/dce60_clk_mgr.h"
38 #include "dcn10/rv1_clk_mgr.h"
39 #include "dcn10/rv2_clk_mgr.h"
40 #include "dcn20/dcn20_clk_mgr.h"
41 #include "dcn21/rn_clk_mgr.h"
42 #include "dcn30/dcn30_clk_mgr.h"
43 #include "dcn301/vg_clk_mgr.h"
44 #include "dcn31/dcn31_clk_mgr.h"
45 
46 
clk_mgr_helper_get_active_display_cnt(struct dc * dc,struct dc_state * context)47 int clk_mgr_helper_get_active_display_cnt(
48 		struct dc *dc,
49 		struct dc_state *context)
50 {
51 	int i, display_count;
52 
53 	display_count = 0;
54 	for (i = 0; i < context->stream_count; i++) {
55 		const struct dc_stream_state *stream = context->streams[i];
56 
57 		/*
58 		 * Only notify active stream or virtual stream.
59 		 * Need to notify virtual stream to work around
60 		 * headless case. HPD does not fire when system is in
61 		 * S0i2.
62 		 */
63 		if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
64 			display_count++;
65 	}
66 
67 	return display_count;
68 }
69 
clk_mgr_helper_get_active_plane_cnt(struct dc * dc,struct dc_state * context)70 int clk_mgr_helper_get_active_plane_cnt(
71 		struct dc *dc,
72 		struct dc_state *context)
73 {
74 	int i, total_plane_count;
75 
76 	total_plane_count = 0;
77 	for (i = 0; i < context->stream_count; i++) {
78 		const struct dc_stream_status stream_status = context->stream_status[i];
79 
80 		/*
81 		 * Sum up plane_count for all streams ( active and virtual ).
82 		 */
83 		total_plane_count += stream_status.plane_count;
84 	}
85 
86 	return total_plane_count;
87 }
88 
clk_mgr_exit_optimized_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)89 void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
90 {
91 	struct dc_link *edp_links[MAX_NUM_EDP];
92 	struct dc_link *edp_link = NULL;
93 	int edp_num;
94 	unsigned int panel_inst;
95 
96 	get_edp_links(dc, edp_links, &edp_num);
97 	if (dc->hwss.exit_optimized_pwr_state)
98 		dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
99 
100 	if (edp_num) {
101 		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
102 			edp_link = edp_links[panel_inst];
103 			if (!edp_link->psr_settings.psr_feature_enabled)
104 				continue;
105 			clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
106 			dc_link_set_psr_allow_active(edp_link, false, false, false);
107 		}
108 	}
109 
110 }
111 
clk_mgr_optimize_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)112 void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
113 {
114 	struct dc_link *edp_links[MAX_NUM_EDP];
115 	struct dc_link *edp_link = NULL;
116 	int edp_num;
117 	unsigned int panel_inst;
118 
119 	get_edp_links(dc, edp_links, &edp_num);
120 	if (edp_num) {
121 		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
122 			edp_link = edp_links[panel_inst];
123 			if (!edp_link->psr_settings.psr_feature_enabled)
124 				continue;
125 			dc_link_set_psr_allow_active(edp_link,
126 					clk_mgr->psr_allow_active_cache, false, false);
127 		}
128 	}
129 
130 	if (dc->hwss.optimize_pwr_state)
131 		dc->hwss.optimize_pwr_state(dc, dc->current_state);
132 
133 }
134 
dc_clk_mgr_create(struct dc_context * ctx,struct pp_smu_funcs * pp_smu,struct dccg * dccg)135 struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
136 {
137 	struct hw_asic_id asic_id = ctx->asic_id;
138 
139 	switch (asic_id.chip_family) {
140 #if defined(CONFIG_DRM_AMD_DC_SI)
141 	case FAMILY_SI: {
142 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
143 
144 		if (clk_mgr == NULL) {
145 			BREAK_TO_DEBUGGER();
146 			return NULL;
147 		}
148 		dce60_clk_mgr_construct(ctx, clk_mgr);
149 		dce_clk_mgr_construct(ctx, clk_mgr);
150 		return &clk_mgr->base;
151 	}
152 #endif
153 	case FAMILY_CI:
154 	case FAMILY_KV: {
155 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
156 
157 		if (clk_mgr == NULL) {
158 			BREAK_TO_DEBUGGER();
159 			return NULL;
160 		}
161 		dce_clk_mgr_construct(ctx, clk_mgr);
162 		return &clk_mgr->base;
163 	}
164 	case FAMILY_CZ: {
165 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
166 
167 		if (clk_mgr == NULL) {
168 			BREAK_TO_DEBUGGER();
169 			return NULL;
170 		}
171 		dce110_clk_mgr_construct(ctx, clk_mgr);
172 		return &clk_mgr->base;
173 	}
174 	case FAMILY_VI: {
175 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
176 
177 		if (clk_mgr == NULL) {
178 			BREAK_TO_DEBUGGER();
179 			return NULL;
180 		}
181 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
182 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
183 			dce_clk_mgr_construct(ctx, clk_mgr);
184 			return &clk_mgr->base;
185 		}
186 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
187 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
188 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
189 			dce112_clk_mgr_construct(ctx, clk_mgr);
190 			return &clk_mgr->base;
191 		}
192 		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
193 			dce112_clk_mgr_construct(ctx, clk_mgr);
194 			return &clk_mgr->base;
195 		}
196 		return &clk_mgr->base;
197 	}
198 	case FAMILY_AI: {
199 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
200 
201 		if (clk_mgr == NULL) {
202 			BREAK_TO_DEBUGGER();
203 			return NULL;
204 		}
205 		if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
206 			dce121_clk_mgr_construct(ctx, clk_mgr);
207 		else
208 			dce120_clk_mgr_construct(ctx, clk_mgr);
209 		return &clk_mgr->base;
210 	}
211 #if defined(CONFIG_DRM_AMD_DC_DCN)
212 	case FAMILY_RV: {
213 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
214 
215 		if (clk_mgr == NULL) {
216 			BREAK_TO_DEBUGGER();
217 			return NULL;
218 		}
219 
220 		if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
221 			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
222 			return &clk_mgr->base;
223 		}
224 
225 		if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
226 			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
227 			return &clk_mgr->base;
228 		}
229 		if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
230 			rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
231 			return &clk_mgr->base;
232 		}
233 		if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
234 				ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
235 			rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
236 			return &clk_mgr->base;
237 		}
238 		return &clk_mgr->base;
239 	}
240 	case FAMILY_NV: {
241 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
242 
243 		if (clk_mgr == NULL) {
244 			BREAK_TO_DEBUGGER();
245 			return NULL;
246 		}
247 		if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
248 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
249 			return &clk_mgr->base;
250 		}
251 		if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
252 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
253 			return &clk_mgr->base;
254 		}
255 		if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
256 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
257 			return &clk_mgr->base;
258 		}
259 		dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
260 		return &clk_mgr->base;
261 	}
262 	case FAMILY_VGH:
263 		if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
264 			struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
265 
266 			if (clk_mgr == NULL) {
267 				BREAK_TO_DEBUGGER();
268 				return NULL;
269 			}
270 			vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
271 			return &clk_mgr->base.base;
272 		}
273 		break;
274 	case FAMILY_YELLOW_CARP: {
275 		struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
276 
277 		if (clk_mgr == NULL) {
278 			BREAK_TO_DEBUGGER();
279 			return NULL;
280 		}
281 		if (ASICREV_IS_YELLOW_CARP(asic_id.hw_internal_rev)) {
282 			/* TODO: to add DCN31 clk_mgr support, once CLK IP header files are available,
283 			 * for now use DCN3.0 clk mgr.
284 			 */
285 			dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
286 			return &clk_mgr->base.base;
287 		}
288 		return &clk_mgr->base.base;
289 	}
290 #endif
291 
292 	default:
293 		ASSERT(0); /* Unknown Asic */
294 		break;
295 	}
296 
297 	return NULL;
298 }
299 
dc_destroy_clk_mgr(struct clk_mgr * clk_mgr_base)300 void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
301 {
302 	struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
303 
304 #ifdef CONFIG_DRM_AMD_DC_DCN
305 	switch (clk_mgr_base->ctx->asic_id.chip_family) {
306 	case FAMILY_NV:
307 		if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
308 			dcn3_clk_mgr_destroy(clk_mgr);
309 		} else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
310 			dcn3_clk_mgr_destroy(clk_mgr);
311 		}
312 		if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
313 			dcn3_clk_mgr_destroy(clk_mgr);
314 		}
315 		break;
316 
317 	case FAMILY_VGH:
318 		if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
319 			vg_clk_mgr_destroy(clk_mgr);
320 		break;
321 
322 	case FAMILY_YELLOW_CARP:
323 		if (ASICREV_IS_YELLOW_CARP(clk_mgr_base->ctx->asic_id.hw_internal_rev))
324 			dcn31_clk_mgr_destroy(clk_mgr);
325 		break;
326 
327 	default:
328 		break;
329 	}
330 #endif
331 
332 	kfree(clk_mgr);
333 }
334 
335