xref: /linux/drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c (revision 994aeacbb3c039b4f3e02e76e6d39407920e76c6)
1 /*
2  * Copyright 2012-16 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 
26 #include "dal_asic_id.h"
27 #include "dc_types.h"
28 #include "dccg.h"
29 #include "clk_mgr_internal.h"
30 #include "dc_state_priv.h"
31 #include "link.h"
32 
33 #include "dce100/dce_clk_mgr.h"
34 #include "dce110/dce110_clk_mgr.h"
35 #include "dce112/dce112_clk_mgr.h"
36 #include "dce120/dce120_clk_mgr.h"
37 #include "dce60/dce60_clk_mgr.h"
38 #include "dcn10/rv1_clk_mgr.h"
39 #include "dcn10/rv2_clk_mgr.h"
40 #include "dcn20/dcn20_clk_mgr.h"
41 #include "dcn21/rn_clk_mgr.h"
42 #include "dcn201/dcn201_clk_mgr.h"
43 #include "dcn30/dcn30_clk_mgr.h"
44 #include "dcn301/vg_clk_mgr.h"
45 #include "dcn31/dcn31_clk_mgr.h"
46 #include "dcn314/dcn314_clk_mgr.h"
47 #include "dcn315/dcn315_clk_mgr.h"
48 #include "dcn316/dcn316_clk_mgr.h"
49 #include "dcn32/dcn32_clk_mgr.h"
50 #include "dcn35/dcn35_clk_mgr.h"
51 #include "dcn401/dcn401_clk_mgr.h"
52 
clk_mgr_helper_get_active_display_cnt(struct dc * dc,struct dc_state * context)53 int clk_mgr_helper_get_active_display_cnt(
54 		struct dc *dc,
55 		struct dc_state *context)
56 {
57 	int i, display_count;
58 
59 	display_count = 0;
60 	for (i = 0; i < context->stream_count; i++) {
61 		const struct dc_stream_state *stream = context->streams[i];
62 		const struct dc_stream_status *stream_status = &context->stream_status[i];
63 
64 		/* Don't count SubVP phantom pipes as part of active
65 		 * display count
66 		 */
67 		if (dc_state_get_stream_subvp_type(context, stream) == SUBVP_PHANTOM)
68 			continue;
69 
70 		if (!stream->dpms_off || (stream_status && stream_status->plane_count))
71 			display_count++;
72 	}
73 
74 	return display_count;
75 }
76 
clk_mgr_helper_get_active_plane_cnt(struct dc * dc,struct dc_state * context)77 int clk_mgr_helper_get_active_plane_cnt(
78 		struct dc *dc,
79 		struct dc_state *context)
80 {
81 	int i, total_plane_count;
82 
83 	total_plane_count = 0;
84 	for (i = 0; i < context->stream_count; i++) {
85 		const struct dc_stream_status stream_status = context->stream_status[i];
86 
87 		/*
88 		 * Sum up plane_count for all streams ( active and virtual ).
89 		 */
90 		total_plane_count += stream_status.plane_count;
91 	}
92 
93 	return total_plane_count;
94 }
95 
clk_mgr_exit_optimized_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)96 void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
97 {
98 	struct dc_link *edp_links[MAX_NUM_EDP];
99 	struct dc_link *edp_link = NULL;
100 	int edp_num;
101 	unsigned int panel_inst;
102 
103 	dc_get_edp_links(dc, edp_links, &edp_num);
104 	if (dc->hwss.exit_optimized_pwr_state)
105 		dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
106 
107 	if (edp_num) {
108 		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
109 			bool allow_active = false;
110 
111 			edp_link = edp_links[panel_inst];
112 			if (!edp_link->psr_settings.psr_feature_enabled)
113 				continue;
114 			clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
115 			dc->link_srv->edp_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
116 			dc->link_srv->edp_set_replay_allow_active(edp_link, &allow_active, false, false, NULL);
117 		}
118 	}
119 
120 }
121 
clk_mgr_optimize_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)122 void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
123 {
124 	struct dc_link *edp_links[MAX_NUM_EDP];
125 	struct dc_link *edp_link = NULL;
126 	int edp_num;
127 	unsigned int panel_inst;
128 
129 	dc_get_edp_links(dc, edp_links, &edp_num);
130 	if (edp_num) {
131 		for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
132 			edp_link = edp_links[panel_inst];
133 			if (!edp_link->psr_settings.psr_feature_enabled)
134 				continue;
135 			dc->link_srv->edp_set_psr_allow_active(edp_link,
136 					&clk_mgr->psr_allow_active_cache, false, false, NULL);
137 			dc->link_srv->edp_set_replay_allow_active(edp_link,
138 					&clk_mgr->psr_allow_active_cache, false, false, NULL);
139 		}
140 	}
141 
142 	if (dc->hwss.optimize_pwr_state)
143 		dc->hwss.optimize_pwr_state(dc, dc->current_state);
144 
145 }
146 
dc_clk_mgr_create(struct dc_context * ctx,struct pp_smu_funcs * pp_smu,struct dccg * dccg)147 struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
148 {
149 	struct hw_asic_id asic_id = ctx->asic_id;
150 
151 	switch (asic_id.chip_family) {
152 #if defined(CONFIG_DRM_AMD_DC_SI)
153 	case FAMILY_SI: {
154 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
155 
156 		if (clk_mgr == NULL) {
157 			BREAK_TO_DEBUGGER();
158 			return NULL;
159 		}
160 		dce60_clk_mgr_construct(ctx, clk_mgr);
161 		dce_clk_mgr_construct(ctx, clk_mgr);
162 		return &clk_mgr->base;
163 	}
164 #endif
165 	case FAMILY_CI:
166 	case FAMILY_KV: {
167 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
168 
169 		if (clk_mgr == NULL) {
170 			BREAK_TO_DEBUGGER();
171 			return NULL;
172 		}
173 		dce_clk_mgr_construct(ctx, clk_mgr);
174 		return &clk_mgr->base;
175 	}
176 	case FAMILY_CZ: {
177 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
178 
179 		if (clk_mgr == NULL) {
180 			BREAK_TO_DEBUGGER();
181 			return NULL;
182 		}
183 		dce110_clk_mgr_construct(ctx, clk_mgr);
184 		return &clk_mgr->base;
185 	}
186 	case FAMILY_VI: {
187 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
188 
189 		if (clk_mgr == NULL) {
190 			BREAK_TO_DEBUGGER();
191 			return NULL;
192 		}
193 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
194 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
195 			dce_clk_mgr_construct(ctx, clk_mgr);
196 			return &clk_mgr->base;
197 		}
198 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
199 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
200 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
201 			dce112_clk_mgr_construct(ctx, clk_mgr);
202 			return &clk_mgr->base;
203 		}
204 		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
205 			dce112_clk_mgr_construct(ctx, clk_mgr);
206 			return &clk_mgr->base;
207 		}
208 		return &clk_mgr->base;
209 	}
210 	case FAMILY_AI: {
211 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
212 
213 		if (clk_mgr == NULL) {
214 			BREAK_TO_DEBUGGER();
215 			return NULL;
216 		}
217 		if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
218 			dce121_clk_mgr_construct(ctx, clk_mgr);
219 		else
220 			dce120_clk_mgr_construct(ctx, clk_mgr);
221 		return &clk_mgr->base;
222 	}
223 #if defined(CONFIG_DRM_AMD_DC_FP)
224 	case FAMILY_RV: {
225 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
226 
227 		if (clk_mgr == NULL) {
228 			BREAK_TO_DEBUGGER();
229 			return NULL;
230 		}
231 
232 		if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
233 			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
234 			return &clk_mgr->base;
235 		}
236 
237 		if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
238 			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
239 			return &clk_mgr->base;
240 		}
241 		if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
242 			rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
243 			return &clk_mgr->base;
244 		}
245 		if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
246 				ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
247 			rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
248 			return &clk_mgr->base;
249 		}
250 		return &clk_mgr->base;
251 	}
252 	case FAMILY_NV: {
253 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
254 
255 		if (clk_mgr == NULL) {
256 			BREAK_TO_DEBUGGER();
257 			return NULL;
258 		}
259 		if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
260 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
261 			return &clk_mgr->base;
262 		}
263 		if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
264 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
265 			return &clk_mgr->base;
266 		}
267 		if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
268 			dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
269 			return &clk_mgr->base;
270 		}
271 		if (ctx->dce_version == DCN_VERSION_2_01) {
272 			dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
273 			return &clk_mgr->base;
274 		}
275 		dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
276 		return &clk_mgr->base;
277 	}
278 	case FAMILY_VGH:
279 		if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
280 			struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
281 
282 			if (clk_mgr == NULL) {
283 				BREAK_TO_DEBUGGER();
284 				return NULL;
285 			}
286 			vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
287 			return &clk_mgr->base.base;
288 		}
289 		break;
290 
291 	case FAMILY_YELLOW_CARP: {
292 		struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
293 
294 		if (clk_mgr == NULL) {
295 			BREAK_TO_DEBUGGER();
296 			return NULL;
297 		}
298 
299 		dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
300 		return &clk_mgr->base.base;
301 	}
302 		break;
303 	case AMDGPU_FAMILY_GC_10_3_6: {
304 		struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
305 
306 		if (clk_mgr == NULL) {
307 			BREAK_TO_DEBUGGER();
308 			return NULL;
309 		}
310 
311 		dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
312 		return &clk_mgr->base.base;
313 	}
314 		break;
315 	case AMDGPU_FAMILY_GC_10_3_7: {
316 		struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
317 
318 		if (clk_mgr == NULL) {
319 			BREAK_TO_DEBUGGER();
320 			return NULL;
321 		}
322 
323 		dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
324 		return &clk_mgr->base.base;
325 	}
326 		break;
327 	case AMDGPU_FAMILY_GC_11_0_0: {
328 		struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
329 
330 		if (clk_mgr == NULL) {
331 			BREAK_TO_DEBUGGER();
332 			return NULL;
333 		}
334 		dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
335 		return &clk_mgr->base;
336 	}
337 
338 	case AMDGPU_FAMILY_GC_11_0_1: {
339 		struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
340 
341 		if (clk_mgr == NULL) {
342 			BREAK_TO_DEBUGGER();
343 			return NULL;
344 		}
345 
346 		dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
347 		return &clk_mgr->base.base;
348 	}
349 	break;
350 
351 	case AMDGPU_FAMILY_GC_11_5_0: {
352 		struct clk_mgr_dcn35 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
353 
354 		if (clk_mgr == NULL) {
355 			BREAK_TO_DEBUGGER();
356 			return NULL;
357 		}
358 
359 		dcn35_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
360 		return &clk_mgr->base.base;
361 	}
362 	break;
363 
364 	case AMDGPU_FAMILY_GC_12_0_0: {
365 		struct clk_mgr_internal *clk_mgr = dcn401_clk_mgr_construct(ctx, dccg);
366 
367 		if (clk_mgr == NULL) {
368 			BREAK_TO_DEBUGGER();
369 			return NULL;
370 		}
371 
372 		return &clk_mgr->base;
373 	}
374 	break;
375 #endif	/* CONFIG_DRM_AMD_DC_FP */
376 	default:
377 		ASSERT(0); /* Unknown Asic */
378 		break;
379 	}
380 
381 	return NULL;
382 }
383 
dc_destroy_clk_mgr(struct clk_mgr * clk_mgr_base)384 void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
385 {
386 	struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
387 
388 #ifdef CONFIG_DRM_AMD_DC_FP
389 	switch (clk_mgr_base->ctx->asic_id.chip_family) {
390 	case FAMILY_NV:
391 		if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
392 			dcn3_clk_mgr_destroy(clk_mgr);
393 		} else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
394 			dcn3_clk_mgr_destroy(clk_mgr);
395 		}
396 		if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
397 			dcn3_clk_mgr_destroy(clk_mgr);
398 		}
399 		break;
400 
401 	case FAMILY_VGH:
402 		if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
403 			vg_clk_mgr_destroy(clk_mgr);
404 		break;
405 
406 	case FAMILY_YELLOW_CARP:
407 		dcn31_clk_mgr_destroy(clk_mgr);
408 		break;
409 
410 	case AMDGPU_FAMILY_GC_10_3_6:
411 		dcn315_clk_mgr_destroy(clk_mgr);
412 		break;
413 
414 	case AMDGPU_FAMILY_GC_10_3_7:
415 		dcn316_clk_mgr_destroy(clk_mgr);
416 		break;
417 
418 	case AMDGPU_FAMILY_GC_11_0_0:
419 		dcn32_clk_mgr_destroy(clk_mgr);
420 		break;
421 
422 	case AMDGPU_FAMILY_GC_11_0_1:
423 		dcn314_clk_mgr_destroy(clk_mgr);
424 		break;
425 
426 	case AMDGPU_FAMILY_GC_11_5_0:
427 		dcn35_clk_mgr_destroy(clk_mgr);
428 		break;
429 	case AMDGPU_FAMILY_GC_12_0_0:
430 		dcn401_clk_mgr_destroy(clk_mgr);
431 		break;
432 
433 	default:
434 		break;
435 	}
436 #endif /* CONFIG_DRM_AMD_DC_FP */
437 
438 	kfree(clk_mgr);
439 }
440 
441