1 /*
2 * Copyright 2012-16 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26 #include "dal_asic_id.h"
27 #include "dc_types.h"
28 #include "dccg.h"
29 #include "clk_mgr_internal.h"
30 #include "dc_state_priv.h"
31 #include "link.h"
32
33 #include "dce100/dce_clk_mgr.h"
34 #include "dce110/dce110_clk_mgr.h"
35 #include "dce112/dce112_clk_mgr.h"
36 #include "dce120/dce120_clk_mgr.h"
37 #include "dce60/dce60_clk_mgr.h"
38 #include "dcn10/rv1_clk_mgr.h"
39 #include "dcn10/rv2_clk_mgr.h"
40 #include "dcn20/dcn20_clk_mgr.h"
41 #include "dcn21/rn_clk_mgr.h"
42 #include "dcn201/dcn201_clk_mgr.h"
43 #include "dcn30/dcn30_clk_mgr.h"
44 #include "dcn301/vg_clk_mgr.h"
45 #include "dcn31/dcn31_clk_mgr.h"
46 #include "dcn314/dcn314_clk_mgr.h"
47 #include "dcn315/dcn315_clk_mgr.h"
48 #include "dcn316/dcn316_clk_mgr.h"
49 #include "dcn32/dcn32_clk_mgr.h"
50 #include "dcn35/dcn35_clk_mgr.h"
51 #include "dcn401/dcn401_clk_mgr.h"
52
clk_mgr_helper_get_active_display_cnt(struct dc * dc,struct dc_state * context)53 int clk_mgr_helper_get_active_display_cnt(
54 struct dc *dc,
55 struct dc_state *context)
56 {
57 int i, display_count;
58
59 display_count = 0;
60 for (i = 0; i < context->stream_count; i++) {
61 const struct dc_stream_state *stream = context->streams[i];
62 const struct dc_stream_status *stream_status = &context->stream_status[i];
63
64 /* Don't count SubVP phantom pipes as part of active
65 * display count
66 */
67 if (dc_state_get_stream_subvp_type(context, stream) == SUBVP_PHANTOM)
68 continue;
69
70 if (!stream->dpms_off || dc->is_switch_in_progress_dest || (stream_status && stream_status->plane_count))
71 display_count++;
72 }
73
74 return display_count;
75 }
76
clk_mgr_helper_get_active_plane_cnt(struct dc * dc,struct dc_state * context)77 int clk_mgr_helper_get_active_plane_cnt(
78 struct dc *dc,
79 struct dc_state *context)
80 {
81 int i, total_plane_count;
82
83 total_plane_count = 0;
84 for (i = 0; i < context->stream_count; i++) {
85 const struct dc_stream_status stream_status = context->stream_status[i];
86
87 /*
88 * Sum up plane_count for all streams ( active and virtual ).
89 */
90 total_plane_count += stream_status.plane_count;
91 }
92
93 return total_plane_count;
94 }
95
clk_mgr_exit_optimized_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)96 void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
97 {
98 struct dc_link *edp_links[MAX_NUM_EDP];
99 struct dc_link *edp_link = NULL;
100 int edp_num;
101 unsigned int panel_inst;
102
103 dc_get_edp_links(dc, edp_links, &edp_num);
104 if (dc->hwss.exit_optimized_pwr_state)
105 dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
106
107 if (edp_num) {
108 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
109 bool allow_active = false;
110
111 edp_link = edp_links[panel_inst];
112 if (!edp_link->psr_settings.psr_feature_enabled)
113 continue;
114 clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
115 dc->link_srv->edp_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
116 dc->link_srv->edp_set_replay_allow_active(edp_link, &allow_active, false, false, NULL);
117 }
118 }
119
120 }
121
clk_mgr_optimize_pwr_state(const struct dc * dc,struct clk_mgr * clk_mgr)122 void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
123 {
124 struct dc_link *edp_links[MAX_NUM_EDP];
125 struct dc_link *edp_link = NULL;
126 int edp_num;
127 unsigned int panel_inst;
128
129 dc_get_edp_links(dc, edp_links, &edp_num);
130 if (edp_num) {
131 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
132 edp_link = edp_links[panel_inst];
133 if (!edp_link->psr_settings.psr_feature_enabled)
134 continue;
135 dc->link_srv->edp_set_psr_allow_active(edp_link,
136 &clk_mgr->psr_allow_active_cache, false, false, NULL);
137 dc->link_srv->edp_set_replay_allow_active(edp_link,
138 &clk_mgr->psr_allow_active_cache, false, false, NULL);
139 }
140 }
141
142 if (dc->hwss.optimize_pwr_state)
143 dc->hwss.optimize_pwr_state(dc, dc->current_state);
144
145 }
146
dc_clk_mgr_create(struct dc_context * ctx,struct pp_smu_funcs * pp_smu,struct dccg * dccg)147 struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
148 {
149 struct hw_asic_id asic_id = ctx->asic_id;
150
151 switch (asic_id.chip_family) {
152 #if defined(CONFIG_DRM_AMD_DC_SI)
153 case FAMILY_SI: {
154 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
155
156 if (clk_mgr == NULL) {
157 BREAK_TO_DEBUGGER();
158 return NULL;
159 }
160 dce60_clk_mgr_construct(ctx, clk_mgr);
161 return &clk_mgr->base;
162 }
163 #endif
164 case FAMILY_CI:
165 case FAMILY_KV: {
166 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
167
168 if (clk_mgr == NULL) {
169 BREAK_TO_DEBUGGER();
170 return NULL;
171 }
172 dce_clk_mgr_construct(ctx, clk_mgr);
173 return &clk_mgr->base;
174 }
175 case FAMILY_CZ: {
176 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
177
178 if (clk_mgr == NULL) {
179 BREAK_TO_DEBUGGER();
180 return NULL;
181 }
182 dce110_clk_mgr_construct(ctx, clk_mgr);
183 return &clk_mgr->base;
184 }
185 case FAMILY_VI: {
186 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
187
188 if (clk_mgr == NULL) {
189 BREAK_TO_DEBUGGER();
190 return NULL;
191 }
192 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
193 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
194 dce_clk_mgr_construct(ctx, clk_mgr);
195 return &clk_mgr->base;
196 }
197 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
198 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
199 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
200 dce112_clk_mgr_construct(ctx, clk_mgr);
201 return &clk_mgr->base;
202 }
203 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
204 dce112_clk_mgr_construct(ctx, clk_mgr);
205 return &clk_mgr->base;
206 }
207 return &clk_mgr->base;
208 }
209 case FAMILY_AI: {
210 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
211
212 if (clk_mgr == NULL) {
213 BREAK_TO_DEBUGGER();
214 return NULL;
215 }
216 if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
217 dce121_clk_mgr_construct(ctx, clk_mgr);
218 else
219 dce120_clk_mgr_construct(ctx, clk_mgr);
220 return &clk_mgr->base;
221 }
222 #if defined(CONFIG_DRM_AMD_DC_FP)
223 case FAMILY_RV: {
224 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
225
226 if (clk_mgr == NULL) {
227 BREAK_TO_DEBUGGER();
228 return NULL;
229 }
230
231 if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
232 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
233 return &clk_mgr->base;
234 }
235
236 if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
237 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
238 return &clk_mgr->base;
239 }
240 if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
241 rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
242 return &clk_mgr->base;
243 }
244 if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
245 ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
246 rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
247 return &clk_mgr->base;
248 }
249 return &clk_mgr->base;
250 }
251 case FAMILY_NV: {
252 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
253
254 if (clk_mgr == NULL) {
255 BREAK_TO_DEBUGGER();
256 return NULL;
257 }
258 if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
259 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
260 return &clk_mgr->base;
261 }
262 if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
263 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
264 return &clk_mgr->base;
265 }
266 if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
267 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
268 return &clk_mgr->base;
269 }
270 if (ctx->dce_version == DCN_VERSION_2_01) {
271 dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
272 return &clk_mgr->base;
273 }
274 dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
275 return &clk_mgr->base;
276 }
277 case FAMILY_VGH:
278 if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
279 struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
280
281 if (clk_mgr == NULL) {
282 BREAK_TO_DEBUGGER();
283 return NULL;
284 }
285 vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
286 return &clk_mgr->base.base;
287 }
288 break;
289
290 case FAMILY_YELLOW_CARP: {
291 struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
292
293 if (clk_mgr == NULL) {
294 BREAK_TO_DEBUGGER();
295 return NULL;
296 }
297
298 dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
299 return &clk_mgr->base.base;
300 }
301 break;
302 case AMDGPU_FAMILY_GC_10_3_6: {
303 struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
304
305 if (clk_mgr == NULL) {
306 BREAK_TO_DEBUGGER();
307 return NULL;
308 }
309
310 dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
311 return &clk_mgr->base.base;
312 }
313 break;
314 case AMDGPU_FAMILY_GC_10_3_7: {
315 struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
316
317 if (clk_mgr == NULL) {
318 BREAK_TO_DEBUGGER();
319 return NULL;
320 }
321
322 dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
323 return &clk_mgr->base.base;
324 }
325 break;
326 case AMDGPU_FAMILY_GC_11_0_0: {
327 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
328
329 if (clk_mgr == NULL) {
330 BREAK_TO_DEBUGGER();
331 return NULL;
332 }
333 dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
334 return &clk_mgr->base;
335 }
336
337 case AMDGPU_FAMILY_GC_11_0_1: {
338 struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
339
340 if (clk_mgr == NULL) {
341 BREAK_TO_DEBUGGER();
342 return NULL;
343 }
344
345 dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
346 return &clk_mgr->base.base;
347 }
348 break;
349
350 case AMDGPU_FAMILY_GC_11_5_0: {
351 struct clk_mgr_dcn35 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
352
353 if (clk_mgr == NULL) {
354 BREAK_TO_DEBUGGER();
355 return NULL;
356 }
357 if (ctx->dce_version == DCN_VERSION_3_51)
358 dcn351_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
359 else
360 dcn35_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
361
362 return &clk_mgr->base.base;
363 }
364 break;
365
366 case AMDGPU_FAMILY_GC_12_0_0: {
367 struct clk_mgr_internal *clk_mgr = dcn401_clk_mgr_construct(ctx, dccg);
368
369 if (clk_mgr == NULL) {
370 BREAK_TO_DEBUGGER();
371 return NULL;
372 }
373
374 return &clk_mgr->base;
375 }
376 break;
377 #endif /* CONFIG_DRM_AMD_DC_FP */
378 default:
379 ASSERT(0); /* Unknown Asic */
380 break;
381 }
382
383 return NULL;
384 }
385
dc_destroy_clk_mgr(struct clk_mgr * clk_mgr_base)386 void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
387 {
388 struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
389
390 #ifdef CONFIG_DRM_AMD_DC_FP
391 switch (clk_mgr_base->ctx->asic_id.chip_family) {
392 case FAMILY_NV:
393 if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
394 dcn3_clk_mgr_destroy(clk_mgr);
395 } else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
396 dcn3_clk_mgr_destroy(clk_mgr);
397 }
398 if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
399 dcn3_clk_mgr_destroy(clk_mgr);
400 }
401 break;
402
403 case FAMILY_VGH:
404 if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
405 vg_clk_mgr_destroy(clk_mgr);
406 break;
407
408 case FAMILY_YELLOW_CARP:
409 dcn31_clk_mgr_destroy(clk_mgr);
410 break;
411
412 case AMDGPU_FAMILY_GC_10_3_6:
413 dcn315_clk_mgr_destroy(clk_mgr);
414 break;
415
416 case AMDGPU_FAMILY_GC_10_3_7:
417 dcn316_clk_mgr_destroy(clk_mgr);
418 break;
419
420 case AMDGPU_FAMILY_GC_11_0_0:
421 dcn32_clk_mgr_destroy(clk_mgr);
422 break;
423
424 case AMDGPU_FAMILY_GC_11_0_1:
425 dcn314_clk_mgr_destroy(clk_mgr);
426 break;
427
428 case AMDGPU_FAMILY_GC_11_5_0:
429 dcn35_clk_mgr_destroy(clk_mgr);
430 break;
431 case AMDGPU_FAMILY_GC_12_0_0:
432 dcn401_clk_mgr_destroy(clk_mgr);
433 break;
434
435 default:
436 break;
437 }
438 #endif /* CONFIG_DRM_AMD_DC_FP */
439
440 kfree(clk_mgr);
441 }
442
443