xref: /linux/drivers/gpu/drm/radeon/cypress_dpm.c (revision cdd5b5a9761fd66d17586e4f4ba6588c70e640ea)
1 /*
2  * Copyright 2011 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 
25 #include <linux/pci.h>
26 
27 #include "atom.h"
28 #include "cypress_dpm.h"
29 #include "evergreen.h"
30 #include "evergreend.h"
31 #include "r600_dpm.h"
32 #include "rv770.h"
33 #include "radeon.h"
34 #include "radeon_asic.h"
35 
36 #define SMC_RAM_END 0x8000
37 
38 #define MC_CG_ARB_FREQ_F0           0x0a
39 #define MC_CG_ARB_FREQ_F1           0x0b
40 #define MC_CG_ARB_FREQ_F2           0x0c
41 #define MC_CG_ARB_FREQ_F3           0x0d
42 
43 #define MC_CG_SEQ_DRAMCONF_S0       0x05
44 #define MC_CG_SEQ_DRAMCONF_S1       0x06
45 #define MC_CG_SEQ_YCLK_SUSPEND      0x04
46 #define MC_CG_SEQ_YCLK_RESUME       0x0a
47 
cypress_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)48 static void cypress_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
49 						 bool enable)
50 {
51 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
52 	u32 tmp, bif;
53 
54 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
55 	if (enable) {
56 		if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
57 		    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
58 			if (!pi->boot_in_gen2) {
59 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
60 				bif |= CG_CLIENT_REQ(0xd);
61 				WREG32(CG_BIF_REQ_AND_RSP, bif);
62 
63 				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
64 				tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
65 				tmp |= LC_GEN2_EN_STRAP;
66 
67 				tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
68 				WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
69 				udelay(10);
70 				tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
71 				WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
72 			}
73 		}
74 	} else {
75 		if (!pi->boot_in_gen2) {
76 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
77 			tmp &= ~LC_GEN2_EN_STRAP;
78 		}
79 		if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
80 		    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
81 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
82 	}
83 }
84 
cypress_enable_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)85 static void cypress_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
86 					     bool enable)
87 {
88 	cypress_enable_bif_dynamic_pcie_gen2(rdev, enable);
89 
90 	if (enable)
91 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
92 	else
93 		WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
94 }
95 
96 #if 0
97 static int cypress_enter_ulp_state(struct radeon_device *rdev)
98 {
99 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
100 
101 	if (pi->gfx_clock_gating) {
102 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
103 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
104 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
105 
106 		RREG32(GB_ADDR_CONFIG);
107 	}
108 
109 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
110 		 ~HOST_SMC_MSG_MASK);
111 
112 	udelay(7000);
113 
114 	return 0;
115 }
116 #endif
117 
cypress_gfx_clock_gating_enable(struct radeon_device * rdev,bool enable)118 static void cypress_gfx_clock_gating_enable(struct radeon_device *rdev,
119 					    bool enable)
120 {
121 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
122 
123 	if (enable) {
124 		if (eg_pi->light_sleep) {
125 			WREG32(GRBM_GFX_INDEX, 0xC0000000);
126 
127 			WREG32_CG(CG_CGLS_TILE_0, 0xFFFFFFFF);
128 			WREG32_CG(CG_CGLS_TILE_1, 0xFFFFFFFF);
129 			WREG32_CG(CG_CGLS_TILE_2, 0xFFFFFFFF);
130 			WREG32_CG(CG_CGLS_TILE_3, 0xFFFFFFFF);
131 			WREG32_CG(CG_CGLS_TILE_4, 0xFFFFFFFF);
132 			WREG32_CG(CG_CGLS_TILE_5, 0xFFFFFFFF);
133 			WREG32_CG(CG_CGLS_TILE_6, 0xFFFFFFFF);
134 			WREG32_CG(CG_CGLS_TILE_7, 0xFFFFFFFF);
135 			WREG32_CG(CG_CGLS_TILE_8, 0xFFFFFFFF);
136 			WREG32_CG(CG_CGLS_TILE_9, 0xFFFFFFFF);
137 			WREG32_CG(CG_CGLS_TILE_10, 0xFFFFFFFF);
138 			WREG32_CG(CG_CGLS_TILE_11, 0xFFFFFFFF);
139 
140 			WREG32_P(SCLK_PWRMGT_CNTL, DYN_LIGHT_SLEEP_EN, ~DYN_LIGHT_SLEEP_EN);
141 		}
142 		WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN);
143 	} else {
144 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
145 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
146 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
147 		RREG32(GB_ADDR_CONFIG);
148 
149 		if (eg_pi->light_sleep) {
150 			WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_LIGHT_SLEEP_EN);
151 
152 			WREG32(GRBM_GFX_INDEX, 0xC0000000);
153 
154 			WREG32_CG(CG_CGLS_TILE_0, 0);
155 			WREG32_CG(CG_CGLS_TILE_1, 0);
156 			WREG32_CG(CG_CGLS_TILE_2, 0);
157 			WREG32_CG(CG_CGLS_TILE_3, 0);
158 			WREG32_CG(CG_CGLS_TILE_4, 0);
159 			WREG32_CG(CG_CGLS_TILE_5, 0);
160 			WREG32_CG(CG_CGLS_TILE_6, 0);
161 			WREG32_CG(CG_CGLS_TILE_7, 0);
162 			WREG32_CG(CG_CGLS_TILE_8, 0);
163 			WREG32_CG(CG_CGLS_TILE_9, 0);
164 			WREG32_CG(CG_CGLS_TILE_10, 0);
165 			WREG32_CG(CG_CGLS_TILE_11, 0);
166 		}
167 	}
168 }
169 
cypress_mg_clock_gating_enable(struct radeon_device * rdev,bool enable)170 static void cypress_mg_clock_gating_enable(struct radeon_device *rdev,
171 					   bool enable)
172 {
173 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
174 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
175 
176 	if (enable) {
177 		u32 cgts_sm_ctrl_reg;
178 
179 		if (rdev->family == CHIP_CEDAR)
180 			cgts_sm_ctrl_reg = CEDAR_MGCGCGTSSMCTRL_DFLT;
181 		else if (rdev->family == CHIP_REDWOOD)
182 			cgts_sm_ctrl_reg = REDWOOD_MGCGCGTSSMCTRL_DFLT;
183 		else
184 			cgts_sm_ctrl_reg = CYPRESS_MGCGCGTSSMCTRL_DFLT;
185 
186 		WREG32(GRBM_GFX_INDEX, 0xC0000000);
187 
188 		WREG32_CG(CG_CGTT_LOCAL_0, CYPRESS_MGCGTTLOCAL0_DFLT);
189 		WREG32_CG(CG_CGTT_LOCAL_1, CYPRESS_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF);
190 		WREG32_CG(CG_CGTT_LOCAL_2, CYPRESS_MGCGTTLOCAL2_DFLT);
191 		WREG32_CG(CG_CGTT_LOCAL_3, CYPRESS_MGCGTTLOCAL3_DFLT);
192 
193 		if (pi->mgcgtssm)
194 			WREG32(CGTS_SM_CTRL_REG, cgts_sm_ctrl_reg);
195 
196 		if (eg_pi->mcls) {
197 			WREG32_P(MC_CITF_MISC_RD_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
198 			WREG32_P(MC_CITF_MISC_WR_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
199 			WREG32_P(MC_CITF_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
200 			WREG32_P(MC_HUB_MISC_HUB_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
201 			WREG32_P(MC_HUB_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
202 			WREG32_P(MC_HUB_MISC_SIP_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
203 			WREG32_P(MC_XPB_CLK_GAT, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
204 			WREG32_P(VM_L2_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE);
205 		}
206 	} else {
207 		WREG32(GRBM_GFX_INDEX, 0xC0000000);
208 
209 		WREG32_CG(CG_CGTT_LOCAL_0, 0xFFFFFFFF);
210 		WREG32_CG(CG_CGTT_LOCAL_1, 0xFFFFFFFF);
211 		WREG32_CG(CG_CGTT_LOCAL_2, 0xFFFFFFFF);
212 		WREG32_CG(CG_CGTT_LOCAL_3, 0xFFFFFFFF);
213 
214 		if (pi->mgcgtssm)
215 			WREG32(CGTS_SM_CTRL_REG, 0x81f44bc0);
216 	}
217 }
218 
cypress_enable_spread_spectrum(struct radeon_device * rdev,bool enable)219 void cypress_enable_spread_spectrum(struct radeon_device *rdev,
220 				    bool enable)
221 {
222 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
223 
224 	if (enable) {
225 		if (pi->sclk_ss)
226 			WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN);
227 
228 		if (pi->mclk_ss)
229 			WREG32_P(MPLL_CNTL_MODE, SS_SSEN, ~SS_SSEN);
230 	} else {
231 		WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN);
232 		WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN);
233 		WREG32_P(MPLL_CNTL_MODE, 0, ~SS_SSEN);
234 		WREG32_P(MPLL_CNTL_MODE, 0, ~SS_DSMODE_EN);
235 	}
236 }
237 
cypress_start_dpm(struct radeon_device * rdev)238 void cypress_start_dpm(struct radeon_device *rdev)
239 {
240 	WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN);
241 }
242 
cypress_enable_sclk_control(struct radeon_device * rdev,bool enable)243 void cypress_enable_sclk_control(struct radeon_device *rdev,
244 				 bool enable)
245 {
246 	if (enable)
247 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF);
248 	else
249 		WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF);
250 }
251 
cypress_enable_mclk_control(struct radeon_device * rdev,bool enable)252 void cypress_enable_mclk_control(struct radeon_device *rdev,
253 				 bool enable)
254 {
255 	if (enable)
256 		WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF);
257 	else
258 		WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF);
259 }
260 
cypress_notify_smc_display_change(struct radeon_device * rdev,bool has_display)261 int cypress_notify_smc_display_change(struct radeon_device *rdev,
262 				      bool has_display)
263 {
264 	PPSMC_Msg msg = has_display ?
265 		(PPSMC_Msg)PPSMC_MSG_HasDisplay : (PPSMC_Msg)PPSMC_MSG_NoDisplay;
266 
267 	if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK)
268 		return -EINVAL;
269 
270 	return 0;
271 }
272 
cypress_program_response_times(struct radeon_device * rdev)273 void cypress_program_response_times(struct radeon_device *rdev)
274 {
275 	u32 reference_clock;
276 	u32 mclk_switch_limit;
277 
278 	reference_clock = radeon_get_xclk(rdev);
279 	mclk_switch_limit = (460 * reference_clock) / 100;
280 
281 	rv770_write_smc_soft_register(rdev,
282 				      RV770_SMC_SOFT_REGISTER_mclk_switch_lim,
283 				      mclk_switch_limit);
284 
285 	rv770_write_smc_soft_register(rdev,
286 				      RV770_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
287 
288 	rv770_write_smc_soft_register(rdev,
289 				      RV770_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
290 
291 	rv770_program_response_times(rdev);
292 
293 	if (ASIC_IS_LOMBOK(rdev))
294 		rv770_write_smc_soft_register(rdev,
295 					      RV770_SMC_SOFT_REGISTER_is_asic_lombok, 1);
296 
297 }
298 
cypress_pcie_performance_request(struct radeon_device * rdev,u8 perf_req,bool advertise)299 static int cypress_pcie_performance_request(struct radeon_device *rdev,
300 					    u8 perf_req, bool advertise)
301 {
302 #if defined(CONFIG_ACPI)
303 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
304 #endif
305 	u32 tmp;
306 
307 	udelay(10);
308 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
309 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) && (tmp & LC_CURRENT_DATA_RATE))
310 		return 0;
311 
312 #if defined(CONFIG_ACPI)
313 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
314 	    (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
315 		eg_pi->pcie_performance_request_registered = true;
316 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
317 	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
318 		   eg_pi->pcie_performance_request_registered) {
319 		eg_pi->pcie_performance_request_registered = false;
320 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
321 	}
322 #endif
323 
324 	return 0;
325 }
326 
cypress_advertise_gen2_capability(struct radeon_device * rdev)327 void cypress_advertise_gen2_capability(struct radeon_device *rdev)
328 {
329 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
330 	u32 tmp;
331 
332 #if defined(CONFIG_ACPI)
333 	radeon_acpi_pcie_notify_device_ready(rdev);
334 #endif
335 
336 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
337 
338 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
339 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
340 		pi->pcie_gen2 = true;
341 	else
342 		pi->pcie_gen2 = false;
343 
344 	if (!pi->pcie_gen2)
345 		cypress_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
346 
347 }
348 
cypress_get_maximum_link_speed(struct radeon_ps * radeon_state)349 static enum radeon_pcie_gen cypress_get_maximum_link_speed(struct radeon_ps *radeon_state)
350 {
351 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
352 
353 	if (state->high.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
354 		return 1;
355 	return 0;
356 }
357 
cypress_notify_link_speed_change_after_state_change(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)358 void cypress_notify_link_speed_change_after_state_change(struct radeon_device *rdev,
359 							 struct radeon_ps *radeon_new_state,
360 							 struct radeon_ps *radeon_current_state)
361 {
362 	enum radeon_pcie_gen pcie_link_speed_target =
363 		cypress_get_maximum_link_speed(radeon_new_state);
364 	enum radeon_pcie_gen pcie_link_speed_current =
365 		cypress_get_maximum_link_speed(radeon_current_state);
366 	u8 request;
367 
368 	if (pcie_link_speed_target < pcie_link_speed_current) {
369 		if (pcie_link_speed_target == RADEON_PCIE_GEN1)
370 			request = PCIE_PERF_REQ_PECI_GEN1;
371 		else if (pcie_link_speed_target == RADEON_PCIE_GEN2)
372 			request = PCIE_PERF_REQ_PECI_GEN2;
373 		else
374 			request = PCIE_PERF_REQ_PECI_GEN3;
375 
376 		cypress_pcie_performance_request(rdev, request, false);
377 	}
378 }
379 
cypress_notify_link_speed_change_before_state_change(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,struct radeon_ps * radeon_current_state)380 void cypress_notify_link_speed_change_before_state_change(struct radeon_device *rdev,
381 							  struct radeon_ps *radeon_new_state,
382 							  struct radeon_ps *radeon_current_state)
383 {
384 	enum radeon_pcie_gen pcie_link_speed_target =
385 		cypress_get_maximum_link_speed(radeon_new_state);
386 	enum radeon_pcie_gen pcie_link_speed_current =
387 		cypress_get_maximum_link_speed(radeon_current_state);
388 	u8 request;
389 
390 	if (pcie_link_speed_target > pcie_link_speed_current) {
391 		if (pcie_link_speed_target == RADEON_PCIE_GEN1)
392 			request = PCIE_PERF_REQ_PECI_GEN1;
393 		else if (pcie_link_speed_target == RADEON_PCIE_GEN2)
394 			request = PCIE_PERF_REQ_PECI_GEN2;
395 		else
396 			request = PCIE_PERF_REQ_PECI_GEN3;
397 
398 		cypress_pcie_performance_request(rdev, request, false);
399 	}
400 }
401 
cypress_populate_voltage_value(struct radeon_device * rdev,struct atom_voltage_table * table,u16 value,RV770_SMC_VOLTAGE_VALUE * voltage)402 static int cypress_populate_voltage_value(struct radeon_device *rdev,
403 					  struct atom_voltage_table *table,
404 					  u16 value, RV770_SMC_VOLTAGE_VALUE *voltage)
405 {
406 	unsigned int i;
407 
408 	for (i = 0; i < table->count; i++) {
409 		if (value <= table->entries[i].value) {
410 			voltage->index = (u8)i;
411 			voltage->value = cpu_to_be16(table->entries[i].value);
412 			break;
413 		}
414 	}
415 
416 	if (i == table->count)
417 		return -EINVAL;
418 
419 	return 0;
420 }
421 
cypress_get_strobe_mode_settings(struct radeon_device * rdev,u32 mclk)422 u8 cypress_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk)
423 {
424 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
425 	u8 result = 0;
426 	bool strobe_mode = false;
427 
428 	if (pi->mem_gddr5) {
429 		if (mclk <= pi->mclk_strobe_mode_threshold)
430 			strobe_mode = true;
431 		result = cypress_get_mclk_frequency_ratio(rdev, mclk, strobe_mode);
432 
433 		if (strobe_mode)
434 			result |= SMC_STROBE_ENABLE;
435 	}
436 
437 	return result;
438 }
439 
cypress_map_clkf_to_ibias(struct radeon_device * rdev,u32 clkf)440 u32 cypress_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf)
441 {
442 	u32 ref_clk = rdev->clock.mpll.reference_freq;
443 	u32 vco = clkf * ref_clk;
444 
445 	/* 100 Mhz ref clk */
446 	if (ref_clk == 10000) {
447 		if (vco > 500000)
448 			return 0xC6;
449 		if (vco > 400000)
450 			return 0x9D;
451 		if (vco > 330000)
452 			return 0x6C;
453 		if (vco > 250000)
454 			return 0x2B;
455 		if (vco >  160000)
456 			return 0x5B;
457 		if (vco > 120000)
458 			return 0x0A;
459 		return 0x4B;
460 	}
461 
462 	/* 27 Mhz ref clk */
463 	if (vco > 250000)
464 		return 0x8B;
465 	if (vco > 200000)
466 		return 0xCC;
467 	if (vco > 150000)
468 		return 0x9B;
469 	return 0x6B;
470 }
471 
cypress_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,RV7XX_SMC_MCLK_VALUE * mclk,bool strobe_mode,bool dll_state_on)472 static int cypress_populate_mclk_value(struct radeon_device *rdev,
473 				       u32 engine_clock, u32 memory_clock,
474 				       RV7XX_SMC_MCLK_VALUE *mclk,
475 				       bool strobe_mode, bool dll_state_on)
476 {
477 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
478 
479 	u32 mpll_ad_func_cntl =
480 		pi->clk_regs.rv770.mpll_ad_func_cntl;
481 	u32 mpll_ad_func_cntl_2 =
482 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
483 	u32 mpll_dq_func_cntl =
484 		pi->clk_regs.rv770.mpll_dq_func_cntl;
485 	u32 mpll_dq_func_cntl_2 =
486 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
487 	u32 mclk_pwrmgt_cntl =
488 		pi->clk_regs.rv770.mclk_pwrmgt_cntl;
489 	u32 dll_cntl =
490 		pi->clk_regs.rv770.dll_cntl;
491 	u32 mpll_ss1 = pi->clk_regs.rv770.mpll_ss1;
492 	u32 mpll_ss2 = pi->clk_regs.rv770.mpll_ss2;
493 	struct atom_clock_dividers dividers;
494 	u32 ibias;
495 	u32 dll_speed;
496 	int ret;
497 	u32 mc_seq_misc7;
498 
499 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
500 					     memory_clock, strobe_mode, &dividers);
501 	if (ret)
502 		return ret;
503 
504 	if (!strobe_mode) {
505 		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
506 
507 		if(mc_seq_misc7 & 0x8000000)
508 			dividers.post_div = 1;
509 	}
510 
511 	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
512 
513 	mpll_ad_func_cntl &= ~(CLKR_MASK |
514 			       YCLK_POST_DIV_MASK |
515 			       CLKF_MASK |
516 			       CLKFRAC_MASK |
517 			       IBIAS_MASK);
518 	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
519 	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
520 	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
521 	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
522 	mpll_ad_func_cntl |= IBIAS(ibias);
523 
524 	if (dividers.vco_mode)
525 		mpll_ad_func_cntl_2 |= VCO_MODE;
526 	else
527 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
528 
529 	if (pi->mem_gddr5) {
530 		mpll_dq_func_cntl &= ~(CLKR_MASK |
531 				       YCLK_POST_DIV_MASK |
532 				       CLKF_MASK |
533 				       CLKFRAC_MASK |
534 				       IBIAS_MASK);
535 		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
536 		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
537 		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
538 		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
539 		mpll_dq_func_cntl |= IBIAS(ibias);
540 
541 		if (strobe_mode)
542 			mpll_dq_func_cntl &= ~PDNB;
543 		else
544 			mpll_dq_func_cntl |= PDNB;
545 
546 		if (dividers.vco_mode)
547 			mpll_dq_func_cntl_2 |= VCO_MODE;
548 		else
549 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
550 	}
551 
552 	if (pi->mclk_ss) {
553 		struct radeon_atom_ss ss;
554 		u32 vco_freq = memory_clock * dividers.post_div;
555 
556 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
557 						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
558 			u32 reference_clock = rdev->clock.mpll.reference_freq;
559 			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
560 			u32 clk_s, clk_v;
561 
562 			if (!decoded_ref)
563 				return -EINVAL;
564 			clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
565 			clk_v = ss.percentage *
566 				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
567 
568 			mpll_ss1 &= ~CLKV_MASK;
569 			mpll_ss1 |= CLKV(clk_v);
570 
571 			mpll_ss2 &= ~CLKS_MASK;
572 			mpll_ss2 |= CLKS(clk_s);
573 		}
574 	}
575 
576 	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
577 					memory_clock);
578 
579 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
580 	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
581 	if (dll_state_on)
582 		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
583 				     MRDCKA1_PDNB |
584 				     MRDCKB0_PDNB |
585 				     MRDCKB1_PDNB |
586 				     MRDCKC0_PDNB |
587 				     MRDCKC1_PDNB |
588 				     MRDCKD0_PDNB |
589 				     MRDCKD1_PDNB);
590 	else
591 		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
592 				      MRDCKA1_PDNB |
593 				      MRDCKB0_PDNB |
594 				      MRDCKB1_PDNB |
595 				      MRDCKC0_PDNB |
596 				      MRDCKC1_PDNB |
597 				      MRDCKD0_PDNB |
598 				      MRDCKD1_PDNB);
599 
600 	mclk->mclk770.mclk_value = cpu_to_be32(memory_clock);
601 	mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
602 	mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
603 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
604 	mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
605 	mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
606 	mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
607 	mclk->mclk770.vMPLL_SS = cpu_to_be32(mpll_ss1);
608 	mclk->mclk770.vMPLL_SS2 = cpu_to_be32(mpll_ss2);
609 
610 	return 0;
611 }
612 
cypress_get_mclk_frequency_ratio(struct radeon_device * rdev,u32 memory_clock,bool strobe_mode)613 u8 cypress_get_mclk_frequency_ratio(struct radeon_device *rdev,
614 				    u32 memory_clock, bool strobe_mode)
615 {
616 	u8 mc_para_index;
617 
618 	if (rdev->family >= CHIP_BARTS) {
619 		if (strobe_mode) {
620 			if (memory_clock < 10000)
621 				mc_para_index = 0x00;
622 			else if (memory_clock > 47500)
623 				mc_para_index = 0x0f;
624 			else
625 				mc_para_index = (u8)((memory_clock - 10000) / 2500);
626 		} else {
627 			if (memory_clock < 65000)
628 				mc_para_index = 0x00;
629 			else if (memory_clock > 135000)
630 				mc_para_index = 0x0f;
631 			else
632 				mc_para_index = (u8)((memory_clock - 60000) / 5000);
633 		}
634 	} else {
635 		if (strobe_mode) {
636 			if (memory_clock < 10000)
637 				mc_para_index = 0x00;
638 			else if (memory_clock > 47500)
639 				mc_para_index = 0x0f;
640 			else
641 				mc_para_index = (u8)((memory_clock - 10000) / 2500);
642 		} else {
643 			if (memory_clock < 40000)
644 				mc_para_index = 0x00;
645 			else if (memory_clock > 115000)
646 				mc_para_index = 0x0f;
647 			else
648 				mc_para_index = (u8)((memory_clock - 40000) / 5000);
649 		}
650 	}
651 	return mc_para_index;
652 }
653 
cypress_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,RV770_SMC_VOLTAGE_VALUE * voltage)654 static int cypress_populate_mvdd_value(struct radeon_device *rdev,
655 				       u32 mclk,
656 				       RV770_SMC_VOLTAGE_VALUE *voltage)
657 {
658 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
659 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
660 
661 	if (!pi->mvdd_control) {
662 		voltage->index = eg_pi->mvdd_high_index;
663 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
664 		return 0;
665 	}
666 
667 	if (mclk <= pi->mvdd_split_frequency) {
668 		voltage->index = eg_pi->mvdd_low_index;
669 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
670 	} else {
671 		voltage->index = eg_pi->mvdd_high_index;
672 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
673 	}
674 
675 	return 0;
676 }
677 
cypress_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,RV770_SMC_HW_PERFORMANCE_LEVEL * level,u8 watermark_level)678 int cypress_convert_power_level_to_smc(struct radeon_device *rdev,
679 				       struct rv7xx_pl *pl,
680 				       RV770_SMC_HW_PERFORMANCE_LEVEL *level,
681 				       u8 watermark_level)
682 {
683 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
684 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
685 	int ret;
686 	bool dll_state_on;
687 
688 	level->gen2PCIE = pi->pcie_gen2 ?
689 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
690 	level->gen2XSP  = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0;
691 	level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0;
692 	level->displayWatermark = watermark_level;
693 
694 	ret = rv740_populate_sclk_value(rdev, pl->sclk, &level->sclk);
695 	if (ret)
696 		return ret;
697 
698 	level->mcFlags =  0;
699 	if (pi->mclk_stutter_mode_threshold &&
700 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
701 	    !eg_pi->uvd_enabled) {
702 		level->mcFlags |= SMC_MC_STUTTER_EN;
703 		if (eg_pi->sclk_deep_sleep)
704 			level->stateFlags |= PPSMC_STATEFLAG_AUTO_PULSE_SKIP;
705 		else
706 			level->stateFlags &= ~PPSMC_STATEFLAG_AUTO_PULSE_SKIP;
707 	}
708 
709 	if (pi->mem_gddr5) {
710 		if (pl->mclk > pi->mclk_edc_enable_threshold)
711 			level->mcFlags |= SMC_MC_EDC_RD_FLAG;
712 
713 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
714 			level->mcFlags |= SMC_MC_EDC_WR_FLAG;
715 
716 		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
717 
718 		if (level->strobeMode & SMC_STROBE_ENABLE) {
719 			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
720 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
721 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
722 			else
723 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
724 		} else
725 			dll_state_on = eg_pi->dll_default_on;
726 
727 		ret = cypress_populate_mclk_value(rdev,
728 						  pl->sclk,
729 						  pl->mclk,
730 						  &level->mclk,
731 						  (level->strobeMode & SMC_STROBE_ENABLE) != 0,
732 						  dll_state_on);
733 	} else {
734 		ret = cypress_populate_mclk_value(rdev,
735 						  pl->sclk,
736 						  pl->mclk,
737 						  &level->mclk,
738 						  true,
739 						  true);
740 	}
741 	if (ret)
742 		return ret;
743 
744 	ret = cypress_populate_voltage_value(rdev,
745 					     &eg_pi->vddc_voltage_table,
746 					     pl->vddc,
747 					     &level->vddc);
748 	if (ret)
749 		return ret;
750 
751 	if (eg_pi->vddci_control) {
752 		ret = cypress_populate_voltage_value(rdev,
753 						     &eg_pi->vddci_voltage_table,
754 						     pl->vddci,
755 						     &level->vddci);
756 		if (ret)
757 			return ret;
758 	}
759 
760 	ret = cypress_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
761 
762 	return ret;
763 }
764 
cypress_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,RV770_SMC_SWSTATE * smc_state)765 static int cypress_convert_power_state_to_smc(struct radeon_device *rdev,
766 					      struct radeon_ps *radeon_state,
767 					      RV770_SMC_SWSTATE *smc_state)
768 {
769 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
770 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
771 	int ret;
772 
773 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
774 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
775 
776 	ret = cypress_convert_power_level_to_smc(rdev,
777 						 &state->low,
778 						 &smc_state->levels[0],
779 						 PPSMC_DISPLAY_WATERMARK_LOW);
780 	if (ret)
781 		return ret;
782 
783 	ret = cypress_convert_power_level_to_smc(rdev,
784 						 &state->medium,
785 						 &smc_state->levels[1],
786 						 PPSMC_DISPLAY_WATERMARK_LOW);
787 	if (ret)
788 		return ret;
789 
790 	ret = cypress_convert_power_level_to_smc(rdev,
791 						 &state->high,
792 						 &smc_state->levels[2],
793 						 PPSMC_DISPLAY_WATERMARK_HIGH);
794 	if (ret)
795 		return ret;
796 
797 	smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1;
798 	smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2;
799 	smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3;
800 
801 	if (eg_pi->dynamic_ac_timing) {
802 		smc_state->levels[0].ACIndex = 2;
803 		smc_state->levels[1].ACIndex = 3;
804 		smc_state->levels[2].ACIndex = 4;
805 	} else {
806 		smc_state->levels[0].ACIndex = 0;
807 		smc_state->levels[1].ACIndex = 0;
808 		smc_state->levels[2].ACIndex = 0;
809 	}
810 
811 	rv770_populate_smc_sp(rdev, radeon_state, smc_state);
812 
813 	return rv770_populate_smc_t(rdev, radeon_state, smc_state);
814 }
815 
cypress_convert_mc_registers(struct evergreen_mc_reg_entry * entry,SMC_Evergreen_MCRegisterSet * data,u32 num_entries,u32 valid_flag)816 static void cypress_convert_mc_registers(struct evergreen_mc_reg_entry *entry,
817 					 SMC_Evergreen_MCRegisterSet *data,
818 					 u32 num_entries, u32 valid_flag)
819 {
820 	u32 i, j;
821 
822 	for (i = 0, j = 0; j < num_entries; j++) {
823 		if (valid_flag & (1 << j)) {
824 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
825 			i++;
826 		}
827 	}
828 }
829 
cypress_convert_mc_reg_table_entry_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,SMC_Evergreen_MCRegisterSet * mc_reg_table_data)830 static void cypress_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
831 						      struct rv7xx_pl *pl,
832 						      SMC_Evergreen_MCRegisterSet *mc_reg_table_data)
833 {
834 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
835 	u32 i = 0;
836 
837 	for (i = 0; i < eg_pi->mc_reg_table.num_entries; i++) {
838 		if (pl->mclk <=
839 		    eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
840 			break;
841 	}
842 
843 	if ((i == eg_pi->mc_reg_table.num_entries) && (i > 0))
844 		--i;
845 
846 	cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[i],
847 				     mc_reg_table_data,
848 				     eg_pi->mc_reg_table.last,
849 				     eg_pi->mc_reg_table.valid_flag);
850 }
851 
cypress_convert_mc_reg_table_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,SMC_Evergreen_MCRegisters * mc_reg_table)852 static void cypress_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
853 						struct radeon_ps *radeon_state,
854 						SMC_Evergreen_MCRegisters *mc_reg_table)
855 {
856 	struct rv7xx_ps *state = rv770_get_ps(radeon_state);
857 
858 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
859 						  &state->low,
860 						  &mc_reg_table->data[2]);
861 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
862 						  &state->medium,
863 						  &mc_reg_table->data[3]);
864 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
865 						  &state->high,
866 						  &mc_reg_table->data[4]);
867 }
868 
cypress_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)869 int cypress_upload_sw_state(struct radeon_device *rdev,
870 			    struct radeon_ps *radeon_new_state)
871 {
872 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
873 	u16 address = pi->state_table_start +
874 		offsetof(RV770_SMC_STATETABLE, driverState);
875 	RV770_SMC_SWSTATE state = { 0 };
876 	int ret;
877 
878 	ret = cypress_convert_power_state_to_smc(rdev, radeon_new_state, &state);
879 	if (ret)
880 		return ret;
881 
882 	return rv770_copy_bytes_to_smc(rdev, address, (u8 *)&state,
883 				    sizeof(RV770_SMC_SWSTATE),
884 				    pi->sram_end);
885 }
886 
cypress_upload_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)887 int cypress_upload_mc_reg_table(struct radeon_device *rdev,
888 				struct radeon_ps *radeon_new_state)
889 {
890 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
891 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
892 	SMC_Evergreen_MCRegisters mc_reg_table = { 0 };
893 	u16 address;
894 
895 	cypress_convert_mc_reg_table_to_smc(rdev, radeon_new_state, &mc_reg_table);
896 
897 	address = eg_pi->mc_reg_table_start +
898 		(u16)offsetof(SMC_Evergreen_MCRegisters, data[2]);
899 
900 	return rv770_copy_bytes_to_smc(rdev, address,
901 				       (u8 *)&mc_reg_table.data[2],
902 				       sizeof(SMC_Evergreen_MCRegisterSet) * 3,
903 				       pi->sram_end);
904 }
905 
cypress_calculate_burst_time(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock)906 u32 cypress_calculate_burst_time(struct radeon_device *rdev,
907 				 u32 engine_clock, u32 memory_clock)
908 {
909 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
910 	u32 multiplier = pi->mem_gddr5 ? 1 : 2;
911 	u32 result = (4 * multiplier * engine_clock) / (memory_clock / 2);
912 	u32 burst_time;
913 
914 	if (result <= 4)
915 		burst_time = 0;
916 	else if (result < 8)
917 		burst_time = result - 4;
918 	else {
919 		burst_time = result / 2 ;
920 		if (burst_time > 18)
921 			burst_time = 18;
922 	}
923 
924 	return burst_time;
925 }
926 
cypress_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)927 void cypress_program_memory_timing_parameters(struct radeon_device *rdev,
928 					      struct radeon_ps *radeon_new_state)
929 {
930 	struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state);
931 	u32 mc_arb_burst_time = RREG32(MC_ARB_BURST_TIME);
932 
933 	mc_arb_burst_time &= ~(STATE1_MASK | STATE2_MASK | STATE3_MASK);
934 
935 	mc_arb_burst_time |= STATE1(cypress_calculate_burst_time(rdev,
936 								 new_state->low.sclk,
937 								 new_state->low.mclk));
938 	mc_arb_burst_time |= STATE2(cypress_calculate_burst_time(rdev,
939 								 new_state->medium.sclk,
940 								 new_state->medium.mclk));
941 	mc_arb_burst_time |= STATE3(cypress_calculate_burst_time(rdev,
942 								 new_state->high.sclk,
943 								 new_state->high.mclk));
944 
945 	rv730_program_memory_timing_parameters(rdev, radeon_new_state);
946 
947 	WREG32(MC_ARB_BURST_TIME, mc_arb_burst_time);
948 }
949 
cypress_populate_mc_reg_addresses(struct radeon_device * rdev,SMC_Evergreen_MCRegisters * mc_reg_table)950 static void cypress_populate_mc_reg_addresses(struct radeon_device *rdev,
951 					      SMC_Evergreen_MCRegisters *mc_reg_table)
952 {
953 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
954 	u32 i, j;
955 
956 	for (i = 0, j = 0; j < eg_pi->mc_reg_table.last; j++) {
957 		if (eg_pi->mc_reg_table.valid_flag & (1 << j)) {
958 			mc_reg_table->address[i].s0 =
959 				cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s0);
960 			mc_reg_table->address[i].s1 =
961 				cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s1);
962 			i++;
963 		}
964 	}
965 
966 	mc_reg_table->last = (u8)i;
967 }
968 
cypress_set_mc_reg_address_table(struct radeon_device * rdev)969 static void cypress_set_mc_reg_address_table(struct radeon_device *rdev)
970 {
971 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
972 	u32 i = 0;
973 
974 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RAS_TIMING_LP >> 2;
975 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RAS_TIMING >> 2;
976 	i++;
977 
978 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_CAS_TIMING_LP >> 2;
979 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_CAS_TIMING >> 2;
980 	i++;
981 
982 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING_LP >> 2;
983 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING >> 2;
984 	i++;
985 
986 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING2_LP >> 2;
987 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING2 >> 2;
988 	i++;
989 
990 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D0_LP >> 2;
991 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D0 >> 2;
992 	i++;
993 
994 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D1_LP >> 2;
995 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D1 >> 2;
996 	i++;
997 
998 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D0_LP >> 2;
999 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D0 >> 2;
1000 	i++;
1001 
1002 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D1_LP >> 2;
1003 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D1 >> 2;
1004 	i++;
1005 
1006 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
1007 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_EMRS >> 2;
1008 	i++;
1009 
1010 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
1011 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS >> 2;
1012 	i++;
1013 
1014 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
1015 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS1 >> 2;
1016 	i++;
1017 
1018 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC1 >> 2;
1019 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC1 >> 2;
1020 	i++;
1021 
1022 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RESERVE_M >> 2;
1023 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RESERVE_M >> 2;
1024 	i++;
1025 
1026 	eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC3 >> 2;
1027 	eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC3 >> 2;
1028 	i++;
1029 
1030 	eg_pi->mc_reg_table.last = (u8)i;
1031 }
1032 
cypress_retrieve_ac_timing_for_one_entry(struct radeon_device * rdev,struct evergreen_mc_reg_entry * entry)1033 static void cypress_retrieve_ac_timing_for_one_entry(struct radeon_device *rdev,
1034 						     struct evergreen_mc_reg_entry *entry)
1035 {
1036 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1037 	u32 i;
1038 
1039 	for (i = 0; i < eg_pi->mc_reg_table.last; i++)
1040 		entry->mc_data[i] =
1041 			RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2);
1042 
1043 }
1044 
cypress_retrieve_ac_timing_for_all_ranges(struct radeon_device * rdev,struct atom_memory_clock_range_table * range_table)1045 static void cypress_retrieve_ac_timing_for_all_ranges(struct radeon_device *rdev,
1046 						      struct atom_memory_clock_range_table *range_table)
1047 {
1048 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1049 	u32 i, j;
1050 
1051 	for (i = 0; i < range_table->num_entries; i++) {
1052 		eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max =
1053 			range_table->mclk[i];
1054 		radeon_atom_set_ac_timing(rdev, range_table->mclk[i]);
1055 		cypress_retrieve_ac_timing_for_one_entry(rdev,
1056 							 &eg_pi->mc_reg_table.mc_reg_table_entry[i]);
1057 	}
1058 
1059 	eg_pi->mc_reg_table.num_entries = range_table->num_entries;
1060 	eg_pi->mc_reg_table.valid_flag = 0;
1061 
1062 	for (i = 0; i < eg_pi->mc_reg_table.last; i++) {
1063 		for (j = 1; j < range_table->num_entries; j++) {
1064 			if (eg_pi->mc_reg_table.mc_reg_table_entry[j-1].mc_data[i] !=
1065 			    eg_pi->mc_reg_table.mc_reg_table_entry[j].mc_data[i]) {
1066 				eg_pi->mc_reg_table.valid_flag |= (1 << i);
1067 				break;
1068 			}
1069 		}
1070 	}
1071 }
1072 
cypress_initialize_mc_reg_table(struct radeon_device * rdev)1073 static int cypress_initialize_mc_reg_table(struct radeon_device *rdev)
1074 {
1075 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1076 	u8 module_index = rv770_get_memory_module_index(rdev);
1077 	struct atom_memory_clock_range_table range_table = { 0 };
1078 	int ret;
1079 
1080 	ret = radeon_atom_get_mclk_range_table(rdev,
1081 					       pi->mem_gddr5,
1082 					       module_index, &range_table);
1083 	if (ret)
1084 		return ret;
1085 
1086 	cypress_retrieve_ac_timing_for_all_ranges(rdev, &range_table);
1087 
1088 	return 0;
1089 }
1090 
cypress_wait_for_mc_sequencer(struct radeon_device * rdev,u8 value)1091 static void cypress_wait_for_mc_sequencer(struct radeon_device *rdev, u8 value)
1092 {
1093 	u32 i, j;
1094 	u32 channels = 2;
1095 
1096 	if ((rdev->family == CHIP_CYPRESS) ||
1097 	    (rdev->family == CHIP_HEMLOCK))
1098 		channels = 4;
1099 	else if (rdev->family == CHIP_CEDAR)
1100 		channels = 1;
1101 
1102 	for (i = 0; i < channels; i++) {
1103 		if ((rdev->family == CHIP_CYPRESS) ||
1104 		    (rdev->family == CHIP_HEMLOCK)) {
1105 			WREG32_P(MC_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK);
1106 			WREG32_P(MC_CG_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK);
1107 		} else {
1108 			WREG32_P(MC_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK);
1109 			WREG32_P(MC_CG_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK);
1110 		}
1111 		for (j = 0; j < rdev->usec_timeout; j++) {
1112 			if (((RREG32(MC_SEQ_CG) & CG_SEQ_RESP_MASK) >> CG_SEQ_RESP_SHIFT) == value)
1113 				break;
1114 			udelay(1);
1115 		}
1116 	}
1117 }
1118 
cypress_force_mc_use_s1(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1119 static void cypress_force_mc_use_s1(struct radeon_device *rdev,
1120 				    struct radeon_ps *radeon_boot_state)
1121 {
1122 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1123 	u32 strobe_mode;
1124 	u32 mc_seq_cg;
1125 	int i;
1126 
1127 	if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)
1128 		return;
1129 
1130 	radeon_atom_set_ac_timing(rdev, boot_state->low.mclk);
1131 	radeon_mc_wait_for_idle(rdev);
1132 
1133 	if ((rdev->family == CHIP_CYPRESS) ||
1134 	    (rdev->family == CHIP_HEMLOCK)) {
1135 		WREG32(MC_CONFIG_MCD, 0xf);
1136 		WREG32(MC_CG_CONFIG_MCD, 0xf);
1137 	} else {
1138 		WREG32(MC_CONFIG, 0xf);
1139 		WREG32(MC_CG_CONFIG, 0xf);
1140 	}
1141 
1142 	for (i = 0; i < rdev->num_crtc; i++)
1143 		radeon_wait_for_vblank(rdev, i);
1144 
1145 	WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND);
1146 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND);
1147 
1148 	strobe_mode = cypress_get_strobe_mode_settings(rdev,
1149 						       boot_state->low.mclk);
1150 
1151 	mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S1);
1152 	mc_seq_cg |= SEQ_CG_RESP(strobe_mode);
1153 	WREG32(MC_SEQ_CG, mc_seq_cg);
1154 
1155 	for (i = 0; i < rdev->usec_timeout; i++) {
1156 		if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)
1157 			break;
1158 		udelay(1);
1159 	}
1160 
1161 	mc_seq_cg &= ~CG_SEQ_REQ_MASK;
1162 	mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME);
1163 	WREG32(MC_SEQ_CG, mc_seq_cg);
1164 
1165 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME);
1166 }
1167 
cypress_copy_ac_timing_from_s1_to_s0(struct radeon_device * rdev)1168 static void cypress_copy_ac_timing_from_s1_to_s0(struct radeon_device *rdev)
1169 {
1170 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1171 	u32 value;
1172 	u32 i;
1173 
1174 	for (i = 0; i < eg_pi->mc_reg_table.last; i++) {
1175 		value = RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2);
1176 		WREG32(eg_pi->mc_reg_table.mc_reg_address[i].s0 << 2, value);
1177 	}
1178 }
1179 
cypress_force_mc_use_s0(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1180 static void cypress_force_mc_use_s0(struct radeon_device *rdev,
1181 				    struct radeon_ps *radeon_boot_state)
1182 {
1183 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1184 	u32 strobe_mode;
1185 	u32 mc_seq_cg;
1186 	int i;
1187 
1188 	cypress_copy_ac_timing_from_s1_to_s0(rdev);
1189 	radeon_mc_wait_for_idle(rdev);
1190 
1191 	if ((rdev->family == CHIP_CYPRESS) ||
1192 	    (rdev->family == CHIP_HEMLOCK)) {
1193 		WREG32(MC_CONFIG_MCD, 0xf);
1194 		WREG32(MC_CG_CONFIG_MCD, 0xf);
1195 	} else {
1196 		WREG32(MC_CONFIG, 0xf);
1197 		WREG32(MC_CG_CONFIG, 0xf);
1198 	}
1199 
1200 	for (i = 0; i < rdev->num_crtc; i++)
1201 		radeon_wait_for_vblank(rdev, i);
1202 
1203 	WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND);
1204 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND);
1205 
1206 	strobe_mode = cypress_get_strobe_mode_settings(rdev,
1207 						       boot_state->low.mclk);
1208 
1209 	mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S0);
1210 	mc_seq_cg |= SEQ_CG_RESP(strobe_mode);
1211 	WREG32(MC_SEQ_CG, mc_seq_cg);
1212 
1213 	for (i = 0; i < rdev->usec_timeout; i++) {
1214 		if (!(RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE))
1215 			break;
1216 		udelay(1);
1217 	}
1218 
1219 	mc_seq_cg &= ~CG_SEQ_REQ_MASK;
1220 	mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME);
1221 	WREG32(MC_SEQ_CG, mc_seq_cg);
1222 
1223 	cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME);
1224 }
1225 
cypress_populate_initial_mvdd_value(struct radeon_device * rdev,RV770_SMC_VOLTAGE_VALUE * voltage)1226 static int cypress_populate_initial_mvdd_value(struct radeon_device *rdev,
1227 					       RV770_SMC_VOLTAGE_VALUE *voltage)
1228 {
1229 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1230 
1231 	voltage->index = eg_pi->mvdd_high_index;
1232 	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1233 
1234 	return 0;
1235 }
1236 
cypress_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_initial_state,RV770_SMC_STATETABLE * table)1237 int cypress_populate_smc_initial_state(struct radeon_device *rdev,
1238 				       struct radeon_ps *radeon_initial_state,
1239 				       RV770_SMC_STATETABLE *table)
1240 {
1241 	struct rv7xx_ps *initial_state = rv770_get_ps(radeon_initial_state);
1242 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1243 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1244 	u32 a_t;
1245 
1246 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1247 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl);
1248 	table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1249 		cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2);
1250 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1251 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl);
1252 	table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1253 		cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2);
1254 	table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1255 		cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl);
1256 	table->initialState.levels[0].mclk.mclk770.vDLL_CNTL =
1257 		cpu_to_be32(pi->clk_regs.rv770.dll_cntl);
1258 
1259 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS =
1260 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss1);
1261 	table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 =
1262 		cpu_to_be32(pi->clk_regs.rv770.mpll_ss2);
1263 
1264 	table->initialState.levels[0].mclk.mclk770.mclk_value =
1265 		cpu_to_be32(initial_state->low.mclk);
1266 
1267 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1268 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl);
1269 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1270 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2);
1271 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1272 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3);
1273 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1274 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum);
1275 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1276 		cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2);
1277 
1278 	table->initialState.levels[0].sclk.sclk_value =
1279 		cpu_to_be32(initial_state->low.sclk);
1280 
1281 	table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0;
1282 
1283 	table->initialState.levels[0].ACIndex = 0;
1284 
1285 	cypress_populate_voltage_value(rdev,
1286 				       &eg_pi->vddc_voltage_table,
1287 				       initial_state->low.vddc,
1288 				       &table->initialState.levels[0].vddc);
1289 
1290 	if (eg_pi->vddci_control)
1291 		cypress_populate_voltage_value(rdev,
1292 					       &eg_pi->vddci_voltage_table,
1293 					       initial_state->low.vddci,
1294 					       &table->initialState.levels[0].vddci);
1295 
1296 	cypress_populate_initial_mvdd_value(rdev,
1297 					    &table->initialState.levels[0].mvdd);
1298 
1299 	a_t = CG_R(0xffff) | CG_L(0);
1300 	table->initialState.levels[0].aT = cpu_to_be32(a_t);
1301 
1302 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1303 
1304 
1305 	if (pi->boot_in_gen2)
1306 		table->initialState.levels[0].gen2PCIE = 1;
1307 	else
1308 		table->initialState.levels[0].gen2PCIE = 0;
1309 	if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
1310 		table->initialState.levels[0].gen2XSP = 1;
1311 	else
1312 		table->initialState.levels[0].gen2XSP = 0;
1313 
1314 	if (pi->mem_gddr5) {
1315 		table->initialState.levels[0].strobeMode =
1316 			cypress_get_strobe_mode_settings(rdev,
1317 							 initial_state->low.mclk);
1318 
1319 		if (initial_state->low.mclk > pi->mclk_edc_enable_threshold)
1320 			table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG;
1321 		else
1322 			table->initialState.levels[0].mcFlags =  0;
1323 	}
1324 
1325 	table->initialState.levels[1] = table->initialState.levels[0];
1326 	table->initialState.levels[2] = table->initialState.levels[0];
1327 
1328 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1329 
1330 	return 0;
1331 }
1332 
cypress_populate_smc_acpi_state(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1333 int cypress_populate_smc_acpi_state(struct radeon_device *rdev,
1334 				    RV770_SMC_STATETABLE *table)
1335 {
1336 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1337 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1338 	u32 mpll_ad_func_cntl =
1339 		pi->clk_regs.rv770.mpll_ad_func_cntl;
1340 	u32 mpll_ad_func_cntl_2 =
1341 		pi->clk_regs.rv770.mpll_ad_func_cntl_2;
1342 	u32 mpll_dq_func_cntl =
1343 		pi->clk_regs.rv770.mpll_dq_func_cntl;
1344 	u32 mpll_dq_func_cntl_2 =
1345 		pi->clk_regs.rv770.mpll_dq_func_cntl_2;
1346 	u32 spll_func_cntl =
1347 		pi->clk_regs.rv770.cg_spll_func_cntl;
1348 	u32 spll_func_cntl_2 =
1349 		pi->clk_regs.rv770.cg_spll_func_cntl_2;
1350 	u32 spll_func_cntl_3 =
1351 		pi->clk_regs.rv770.cg_spll_func_cntl_3;
1352 	u32 mclk_pwrmgt_cntl =
1353 		pi->clk_regs.rv770.mclk_pwrmgt_cntl;
1354 	u32 dll_cntl =
1355 		pi->clk_regs.rv770.dll_cntl;
1356 
1357 	table->ACPIState = table->initialState;
1358 
1359 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1360 
1361 	if (pi->acpi_vddc) {
1362 		cypress_populate_voltage_value(rdev,
1363 					       &eg_pi->vddc_voltage_table,
1364 					       pi->acpi_vddc,
1365 					       &table->ACPIState.levels[0].vddc);
1366 		if (pi->pcie_gen2) {
1367 			if (pi->acpi_pcie_gen2)
1368 				table->ACPIState.levels[0].gen2PCIE = 1;
1369 			else
1370 				table->ACPIState.levels[0].gen2PCIE = 0;
1371 		} else
1372 			table->ACPIState.levels[0].gen2PCIE = 0;
1373 		if (pi->acpi_pcie_gen2)
1374 			table->ACPIState.levels[0].gen2XSP = 1;
1375 		else
1376 			table->ACPIState.levels[0].gen2XSP = 0;
1377 	} else {
1378 		cypress_populate_voltage_value(rdev,
1379 					       &eg_pi->vddc_voltage_table,
1380 					       pi->min_vddc_in_table,
1381 					       &table->ACPIState.levels[0].vddc);
1382 		table->ACPIState.levels[0].gen2PCIE = 0;
1383 	}
1384 
1385 	if (eg_pi->acpi_vddci) {
1386 		if (eg_pi->vddci_control) {
1387 			cypress_populate_voltage_value(rdev,
1388 						       &eg_pi->vddci_voltage_table,
1389 						       eg_pi->acpi_vddci,
1390 						       &table->ACPIState.levels[0].vddci);
1391 		}
1392 	}
1393 
1394 	mpll_ad_func_cntl &= ~PDNB;
1395 
1396 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1397 
1398 	if (pi->mem_gddr5)
1399 		mpll_dq_func_cntl &= ~PDNB;
1400 	mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1401 
1402 	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1403 			     MRDCKA1_RESET |
1404 			     MRDCKB0_RESET |
1405 			     MRDCKB1_RESET |
1406 			     MRDCKC0_RESET |
1407 			     MRDCKC1_RESET |
1408 			     MRDCKD0_RESET |
1409 			     MRDCKD1_RESET);
1410 
1411 	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1412 			      MRDCKA1_PDNB |
1413 			      MRDCKB0_PDNB |
1414 			      MRDCKB1_PDNB |
1415 			      MRDCKC0_PDNB |
1416 			      MRDCKC1_PDNB |
1417 			      MRDCKD0_PDNB |
1418 			      MRDCKD1_PDNB);
1419 
1420 	dll_cntl |= (MRDCKA0_BYPASS |
1421 		     MRDCKA1_BYPASS |
1422 		     MRDCKB0_BYPASS |
1423 		     MRDCKB1_BYPASS |
1424 		     MRDCKC0_BYPASS |
1425 		     MRDCKC1_BYPASS |
1426 		     MRDCKD0_BYPASS |
1427 		     MRDCKD1_BYPASS);
1428 
1429 	/* evergreen only */
1430 	if (rdev->family <= CHIP_HEMLOCK)
1431 		spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN;
1432 
1433 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1434 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1435 
1436 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL =
1437 		cpu_to_be32(mpll_ad_func_cntl);
1438 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 =
1439 		cpu_to_be32(mpll_ad_func_cntl_2);
1440 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL =
1441 		cpu_to_be32(mpll_dq_func_cntl);
1442 	table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 =
1443 		cpu_to_be32(mpll_dq_func_cntl_2);
1444 	table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL =
1445 		cpu_to_be32(mclk_pwrmgt_cntl);
1446 	table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl);
1447 
1448 	table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0;
1449 
1450 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1451 		cpu_to_be32(spll_func_cntl);
1452 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1453 		cpu_to_be32(spll_func_cntl_2);
1454 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1455 		cpu_to_be32(spll_func_cntl_3);
1456 
1457 	table->ACPIState.levels[0].sclk.sclk_value = 0;
1458 
1459 	cypress_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1460 
1461 	if (eg_pi->dynamic_ac_timing)
1462 		table->ACPIState.levels[0].ACIndex = 1;
1463 
1464 	table->ACPIState.levels[1] = table->ACPIState.levels[0];
1465 	table->ACPIState.levels[2] = table->ACPIState.levels[0];
1466 
1467 	return 0;
1468 }
1469 
cypress_trim_voltage_table_to_fit_state_table(struct radeon_device * rdev,struct atom_voltage_table * voltage_table)1470 static void cypress_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev,
1471 							  struct atom_voltage_table *voltage_table)
1472 {
1473 	unsigned int i, diff;
1474 
1475 	if (voltage_table->count <= MAX_NO_VREG_STEPS)
1476 		return;
1477 
1478 	diff = voltage_table->count - MAX_NO_VREG_STEPS;
1479 
1480 	for (i= 0; i < MAX_NO_VREG_STEPS; i++)
1481 		voltage_table->entries[i] = voltage_table->entries[i + diff];
1482 
1483 	voltage_table->count = MAX_NO_VREG_STEPS;
1484 }
1485 
cypress_construct_voltage_tables(struct radeon_device * rdev)1486 int cypress_construct_voltage_tables(struct radeon_device *rdev)
1487 {
1488 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1489 	int ret;
1490 
1491 	ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0,
1492 					    &eg_pi->vddc_voltage_table);
1493 	if (ret)
1494 		return ret;
1495 
1496 	if (eg_pi->vddc_voltage_table.count > MAX_NO_VREG_STEPS)
1497 		cypress_trim_voltage_table_to_fit_state_table(rdev,
1498 							      &eg_pi->vddc_voltage_table);
1499 
1500 	if (eg_pi->vddci_control) {
1501 		ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0,
1502 						    &eg_pi->vddci_voltage_table);
1503 		if (ret)
1504 			return ret;
1505 
1506 		if (eg_pi->vddci_voltage_table.count > MAX_NO_VREG_STEPS)
1507 			cypress_trim_voltage_table_to_fit_state_table(rdev,
1508 								      &eg_pi->vddci_voltage_table);
1509 	}
1510 
1511 	return 0;
1512 }
1513 
cypress_populate_smc_voltage_table(struct radeon_device * rdev,struct atom_voltage_table * voltage_table,RV770_SMC_STATETABLE * table)1514 static void cypress_populate_smc_voltage_table(struct radeon_device *rdev,
1515 					       struct atom_voltage_table *voltage_table,
1516 					       RV770_SMC_STATETABLE *table)
1517 {
1518 	unsigned int i;
1519 
1520 	for (i = 0; i < voltage_table->count; i++) {
1521 		table->highSMIO[i] = 0;
1522 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1523 	}
1524 }
1525 
cypress_populate_smc_voltage_tables(struct radeon_device * rdev,RV770_SMC_STATETABLE * table)1526 int cypress_populate_smc_voltage_tables(struct radeon_device *rdev,
1527 					RV770_SMC_STATETABLE *table)
1528 {
1529 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1530 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1531 	unsigned char i;
1532 
1533 	if (eg_pi->vddc_voltage_table.count) {
1534 		cypress_populate_smc_voltage_table(rdev,
1535 						   &eg_pi->vddc_voltage_table,
1536 						   table);
1537 
1538 		table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0;
1539 		table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] =
1540 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1541 
1542 		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1543 			if (pi->max_vddc_in_table <=
1544 			    eg_pi->vddc_voltage_table.entries[i].value) {
1545 				table->maxVDDCIndexInPPTable = i;
1546 				break;
1547 			}
1548 		}
1549 	}
1550 
1551 	if (eg_pi->vddci_voltage_table.count) {
1552 		cypress_populate_smc_voltage_table(rdev,
1553 						   &eg_pi->vddci_voltage_table,
1554 						   table);
1555 
1556 		table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDCI] = 0;
1557 		table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDCI] =
1558 			cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
1559 	}
1560 
1561 	return 0;
1562 }
1563 
cypress_get_mclk_split_point(struct atom_memory_info * memory_info)1564 static u32 cypress_get_mclk_split_point(struct atom_memory_info *memory_info)
1565 {
1566 	if ((memory_info->mem_type == MEM_TYPE_GDDR3) ||
1567 	    (memory_info->mem_type == MEM_TYPE_DDR3))
1568 		return 30000;
1569 
1570 	return 0;
1571 }
1572 
cypress_get_mvdd_configuration(struct radeon_device * rdev)1573 int cypress_get_mvdd_configuration(struct radeon_device *rdev)
1574 {
1575 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1576 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1577 	u8 module_index;
1578 	struct atom_memory_info memory_info;
1579 	u32 tmp = RREG32(GENERAL_PWRMGT);
1580 
1581 	if (!(tmp & BACKBIAS_PAD_EN)) {
1582 		eg_pi->mvdd_high_index = 0;
1583 		eg_pi->mvdd_low_index = 1;
1584 		pi->mvdd_control = false;
1585 		return 0;
1586 	}
1587 
1588 	if (tmp & BACKBIAS_VALUE)
1589 		eg_pi->mvdd_high_index = 1;
1590 	else
1591 		eg_pi->mvdd_high_index = 0;
1592 
1593 	eg_pi->mvdd_low_index =
1594 		(eg_pi->mvdd_high_index == 0) ? 1 : 0;
1595 
1596 	module_index = rv770_get_memory_module_index(rdev);
1597 
1598 	if (radeon_atom_get_memory_info(rdev, module_index, &memory_info)) {
1599 		pi->mvdd_control = false;
1600 		return 0;
1601 	}
1602 
1603 	pi->mvdd_split_frequency =
1604 		cypress_get_mclk_split_point(&memory_info);
1605 
1606 	if (pi->mvdd_split_frequency == 0) {
1607 		pi->mvdd_control = false;
1608 		return 0;
1609 	}
1610 
1611 	return 0;
1612 }
1613 
cypress_init_smc_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1614 static int cypress_init_smc_table(struct radeon_device *rdev,
1615 				  struct radeon_ps *radeon_boot_state)
1616 {
1617 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1618 	RV770_SMC_STATETABLE *table = &pi->smc_statetable;
1619 	int ret;
1620 
1621 	memset(table, 0, sizeof(RV770_SMC_STATETABLE));
1622 
1623 	cypress_populate_smc_voltage_tables(rdev, table);
1624 
1625 	switch (rdev->pm.int_thermal_type) {
1626 	case THERMAL_TYPE_EVERGREEN:
1627 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1628 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1629 		break;
1630 	case THERMAL_TYPE_NONE:
1631 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1632 		break;
1633 	default:
1634 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1635 		break;
1636 	}
1637 
1638 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1639 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1640 
1641 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1642 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1643 
1644 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1645 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1646 
1647 	if (pi->mem_gddr5)
1648 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1649 
1650 	ret = cypress_populate_smc_initial_state(rdev, radeon_boot_state, table);
1651 	if (ret)
1652 		return ret;
1653 
1654 	ret = cypress_populate_smc_acpi_state(rdev, table);
1655 	if (ret)
1656 		return ret;
1657 
1658 	table->driverState = table->initialState;
1659 
1660 	return rv770_copy_bytes_to_smc(rdev,
1661 				       pi->state_table_start,
1662 				       (u8 *)table, sizeof(RV770_SMC_STATETABLE),
1663 				       pi->sram_end);
1664 }
1665 
cypress_populate_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)1666 int cypress_populate_mc_reg_table(struct radeon_device *rdev,
1667 				  struct radeon_ps *radeon_boot_state)
1668 {
1669 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1670 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1671 	struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state);
1672 	SMC_Evergreen_MCRegisters mc_reg_table = { 0 };
1673 
1674 	rv770_write_smc_soft_register(rdev,
1675 				      RV770_SMC_SOFT_REGISTER_seq_index, 1);
1676 
1677 	cypress_populate_mc_reg_addresses(rdev, &mc_reg_table);
1678 
1679 	cypress_convert_mc_reg_table_entry_to_smc(rdev,
1680 						  &boot_state->low,
1681 						  &mc_reg_table.data[0]);
1682 
1683 	cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[0],
1684 				     &mc_reg_table.data[1], eg_pi->mc_reg_table.last,
1685 				     eg_pi->mc_reg_table.valid_flag);
1686 
1687 	cypress_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, &mc_reg_table);
1688 
1689 	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
1690 				       (u8 *)&mc_reg_table, sizeof(SMC_Evergreen_MCRegisters),
1691 				       pi->sram_end);
1692 }
1693 
cypress_get_table_locations(struct radeon_device * rdev)1694 int cypress_get_table_locations(struct radeon_device *rdev)
1695 {
1696 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1697 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1698 	u32 tmp;
1699 	int ret;
1700 
1701 	ret = rv770_read_smc_sram_dword(rdev,
1702 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1703 					EVERGREEN_SMC_FIRMWARE_HEADER_stateTable,
1704 					&tmp, pi->sram_end);
1705 	if (ret)
1706 		return ret;
1707 
1708 	pi->state_table_start = (u16)tmp;
1709 
1710 	ret = rv770_read_smc_sram_dword(rdev,
1711 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1712 					EVERGREEN_SMC_FIRMWARE_HEADER_softRegisters,
1713 					&tmp, pi->sram_end);
1714 	if (ret)
1715 		return ret;
1716 
1717 	pi->soft_regs_start = (u16)tmp;
1718 
1719 	ret = rv770_read_smc_sram_dword(rdev,
1720 					EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION +
1721 					EVERGREEN_SMC_FIRMWARE_HEADER_mcRegisterTable,
1722 					&tmp, pi->sram_end);
1723 	if (ret)
1724 		return ret;
1725 
1726 	eg_pi->mc_reg_table_start = (u16)tmp;
1727 
1728 	return 0;
1729 }
1730 
cypress_enable_display_gap(struct radeon_device * rdev)1731 void cypress_enable_display_gap(struct radeon_device *rdev)
1732 {
1733 	u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL);
1734 
1735 	tmp &= ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
1736 	tmp |= (DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE) |
1737 		DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE));
1738 
1739 	tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK);
1740 	tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK) |
1741 		DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE));
1742 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1743 }
1744 
cypress_program_display_gap(struct radeon_device * rdev)1745 static void cypress_program_display_gap(struct radeon_device *rdev)
1746 {
1747 	u32 tmp, pipe;
1748 	int i;
1749 
1750 	tmp = RREG32(CG_DISPLAY_GAP_CNTL) & ~(DISP1_GAP_MASK | DISP2_GAP_MASK);
1751 	if (rdev->pm.dpm.new_active_crtc_count > 0)
1752 		tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
1753 	else
1754 		tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE);
1755 
1756 	if (rdev->pm.dpm.new_active_crtc_count > 1)
1757 		tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM);
1758 	else
1759 		tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE);
1760 
1761 	WREG32(CG_DISPLAY_GAP_CNTL, tmp);
1762 
1763 	tmp = RREG32(DCCG_DISP_SLOW_SELECT_REG);
1764 	pipe = (tmp & DCCG_DISP1_SLOW_SELECT_MASK) >> DCCG_DISP1_SLOW_SELECT_SHIFT;
1765 
1766 	if ((rdev->pm.dpm.new_active_crtc_count > 0) &&
1767 	    (!(rdev->pm.dpm.new_active_crtcs & (1 << pipe)))) {
1768 		/* find the first active crtc */
1769 		for (i = 0; i < rdev->num_crtc; i++) {
1770 			if (rdev->pm.dpm.new_active_crtcs & (1 << i))
1771 				break;
1772 		}
1773 		if (i == rdev->num_crtc)
1774 			pipe = 0;
1775 		else
1776 			pipe = i;
1777 
1778 		tmp &= ~DCCG_DISP1_SLOW_SELECT_MASK;
1779 		tmp |= DCCG_DISP1_SLOW_SELECT(pipe);
1780 		WREG32(DCCG_DISP_SLOW_SELECT_REG, tmp);
1781 	}
1782 
1783 	cypress_notify_smc_display_change(rdev, rdev->pm.dpm.new_active_crtc_count > 0);
1784 }
1785 
cypress_dpm_setup_asic(struct radeon_device * rdev)1786 void cypress_dpm_setup_asic(struct radeon_device *rdev)
1787 {
1788 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1789 
1790 	rv740_read_clock_registers(rdev);
1791 	rv770_read_voltage_smio_registers(rdev);
1792 	rv770_get_max_vddc(rdev);
1793 	rv770_get_memory_type(rdev);
1794 
1795 	if (eg_pi->pcie_performance_request)
1796 		eg_pi->pcie_performance_request_registered = false;
1797 
1798 	if (eg_pi->pcie_performance_request)
1799 		cypress_advertise_gen2_capability(rdev);
1800 
1801 	rv770_get_pcie_gen2_status(rdev);
1802 
1803 	rv770_enable_acpi_pm(rdev);
1804 }
1805 
cypress_dpm_enable(struct radeon_device * rdev)1806 int cypress_dpm_enable(struct radeon_device *rdev)
1807 {
1808 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1809 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1810 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1811 	int ret;
1812 
1813 	if (pi->gfx_clock_gating)
1814 		rv770_restore_cgcg(rdev);
1815 
1816 	if (rv770_dpm_enabled(rdev))
1817 		return -EINVAL;
1818 
1819 	if (pi->voltage_control) {
1820 		rv770_enable_voltage_control(rdev, true);
1821 		ret = cypress_construct_voltage_tables(rdev);
1822 		if (ret) {
1823 			DRM_ERROR("cypress_construct_voltage_tables failed\n");
1824 			return ret;
1825 		}
1826 	}
1827 
1828 	if (pi->mvdd_control) {
1829 		ret = cypress_get_mvdd_configuration(rdev);
1830 		if (ret) {
1831 			DRM_ERROR("cypress_get_mvdd_configuration failed\n");
1832 			return ret;
1833 		}
1834 	}
1835 
1836 	if (eg_pi->dynamic_ac_timing) {
1837 		cypress_set_mc_reg_address_table(rdev);
1838 		cypress_force_mc_use_s0(rdev, boot_ps);
1839 		ret = cypress_initialize_mc_reg_table(rdev);
1840 		if (ret)
1841 			eg_pi->dynamic_ac_timing = false;
1842 		cypress_force_mc_use_s1(rdev, boot_ps);
1843 	}
1844 
1845 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS)
1846 		rv770_enable_backbias(rdev, true);
1847 
1848 	if (pi->dynamic_ss)
1849 		cypress_enable_spread_spectrum(rdev, true);
1850 
1851 	if (pi->thermal_protection)
1852 		rv770_enable_thermal_protection(rdev, true);
1853 
1854 	rv770_setup_bsp(rdev);
1855 	rv770_program_git(rdev);
1856 	rv770_program_tp(rdev);
1857 	rv770_program_tpp(rdev);
1858 	rv770_program_sstp(rdev);
1859 	rv770_program_engine_speed_parameters(rdev);
1860 	cypress_enable_display_gap(rdev);
1861 	rv770_program_vc(rdev);
1862 
1863 	if (pi->dynamic_pcie_gen2)
1864 		cypress_enable_dynamic_pcie_gen2(rdev, true);
1865 
1866 	ret = rv770_upload_firmware(rdev);
1867 	if (ret) {
1868 		DRM_ERROR("rv770_upload_firmware failed\n");
1869 		return ret;
1870 	}
1871 
1872 	ret = cypress_get_table_locations(rdev);
1873 	if (ret) {
1874 		DRM_ERROR("cypress_get_table_locations failed\n");
1875 		return ret;
1876 	}
1877 	ret = cypress_init_smc_table(rdev, boot_ps);
1878 	if (ret) {
1879 		DRM_ERROR("cypress_init_smc_table failed\n");
1880 		return ret;
1881 	}
1882 	if (eg_pi->dynamic_ac_timing) {
1883 		ret = cypress_populate_mc_reg_table(rdev, boot_ps);
1884 		if (ret) {
1885 			DRM_ERROR("cypress_populate_mc_reg_table failed\n");
1886 			return ret;
1887 		}
1888 	}
1889 
1890 	cypress_program_response_times(rdev);
1891 
1892 	r7xx_start_smc(rdev);
1893 
1894 	ret = cypress_notify_smc_display_change(rdev, false);
1895 	if (ret) {
1896 		DRM_ERROR("cypress_notify_smc_display_change failed\n");
1897 		return ret;
1898 	}
1899 	cypress_enable_sclk_control(rdev, true);
1900 
1901 	if (eg_pi->memory_transition)
1902 		cypress_enable_mclk_control(rdev, true);
1903 
1904 	cypress_start_dpm(rdev);
1905 
1906 	if (pi->gfx_clock_gating)
1907 		cypress_gfx_clock_gating_enable(rdev, true);
1908 
1909 	if (pi->mg_clock_gating)
1910 		cypress_mg_clock_gating_enable(rdev, true);
1911 
1912 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
1913 
1914 	return 0;
1915 }
1916 
cypress_dpm_disable(struct radeon_device * rdev)1917 void cypress_dpm_disable(struct radeon_device *rdev)
1918 {
1919 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1920 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1921 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
1922 
1923 	if (!rv770_dpm_enabled(rdev))
1924 		return;
1925 
1926 	rv770_clear_vc(rdev);
1927 
1928 	if (pi->thermal_protection)
1929 		rv770_enable_thermal_protection(rdev, false);
1930 
1931 	if (pi->dynamic_pcie_gen2)
1932 		cypress_enable_dynamic_pcie_gen2(rdev, false);
1933 
1934 	if (rdev->irq.installed &&
1935 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
1936 		rdev->irq.dpm_thermal = false;
1937 		radeon_irq_set(rdev);
1938 	}
1939 
1940 	if (pi->gfx_clock_gating)
1941 		cypress_gfx_clock_gating_enable(rdev, false);
1942 
1943 	if (pi->mg_clock_gating)
1944 		cypress_mg_clock_gating_enable(rdev, false);
1945 
1946 	rv770_stop_dpm(rdev);
1947 	r7xx_stop_smc(rdev);
1948 
1949 	cypress_enable_spread_spectrum(rdev, false);
1950 
1951 	if (eg_pi->dynamic_ac_timing)
1952 		cypress_force_mc_use_s1(rdev, boot_ps);
1953 
1954 	rv770_reset_smio_status(rdev);
1955 }
1956 
cypress_dpm_set_power_state(struct radeon_device * rdev)1957 int cypress_dpm_set_power_state(struct radeon_device *rdev)
1958 {
1959 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1960 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
1961 	struct radeon_ps *old_ps = rdev->pm.dpm.current_ps;
1962 	int ret;
1963 
1964 	ret = rv770_restrict_performance_levels_before_switch(rdev);
1965 	if (ret) {
1966 		DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n");
1967 		return ret;
1968 	}
1969 	if (eg_pi->pcie_performance_request)
1970 		cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps);
1971 
1972 	rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
1973 	ret = rv770_halt_smc(rdev);
1974 	if (ret) {
1975 		DRM_ERROR("rv770_halt_smc failed\n");
1976 		return ret;
1977 	}
1978 	ret = cypress_upload_sw_state(rdev, new_ps);
1979 	if (ret) {
1980 		DRM_ERROR("cypress_upload_sw_state failed\n");
1981 		return ret;
1982 	}
1983 	if (eg_pi->dynamic_ac_timing) {
1984 		ret = cypress_upload_mc_reg_table(rdev, new_ps);
1985 		if (ret) {
1986 			DRM_ERROR("cypress_upload_mc_reg_table failed\n");
1987 			return ret;
1988 		}
1989 	}
1990 
1991 	cypress_program_memory_timing_parameters(rdev, new_ps);
1992 
1993 	ret = rv770_resume_smc(rdev);
1994 	if (ret) {
1995 		DRM_ERROR("rv770_resume_smc failed\n");
1996 		return ret;
1997 	}
1998 	ret = rv770_set_sw_state(rdev);
1999 	if (ret) {
2000 		DRM_ERROR("rv770_set_sw_state failed\n");
2001 		return ret;
2002 	}
2003 	rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
2004 
2005 	if (eg_pi->pcie_performance_request)
2006 		cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps);
2007 
2008 	return 0;
2009 }
2010 
2011 #if 0
2012 void cypress_dpm_reset_asic(struct radeon_device *rdev)
2013 {
2014 	rv770_restrict_performance_levels_before_switch(rdev);
2015 	rv770_set_boot_state(rdev);
2016 }
2017 #endif
2018 
cypress_dpm_display_configuration_changed(struct radeon_device * rdev)2019 void cypress_dpm_display_configuration_changed(struct radeon_device *rdev)
2020 {
2021 	cypress_program_display_gap(rdev);
2022 }
2023 
cypress_dpm_init(struct radeon_device * rdev)2024 int cypress_dpm_init(struct radeon_device *rdev)
2025 {
2026 	struct rv7xx_power_info *pi;
2027 	struct evergreen_power_info *eg_pi;
2028 	struct atom_clock_dividers dividers;
2029 	int ret;
2030 
2031 	eg_pi = kzalloc(sizeof(struct evergreen_power_info), GFP_KERNEL);
2032 	if (eg_pi == NULL)
2033 		return -ENOMEM;
2034 	rdev->pm.dpm.priv = eg_pi;
2035 	pi = &eg_pi->rv7xx;
2036 
2037 	rv770_get_max_vddc(rdev);
2038 
2039 	eg_pi->ulv.supported = false;
2040 	pi->acpi_vddc = 0;
2041 	eg_pi->acpi_vddci = 0;
2042 	pi->min_vddc_in_table = 0;
2043 	pi->max_vddc_in_table = 0;
2044 
2045 	ret = r600_get_platform_caps(rdev);
2046 	if (ret)
2047 		return ret;
2048 
2049 	ret = rv7xx_parse_power_table(rdev);
2050 	if (ret)
2051 		return ret;
2052 
2053 	if (rdev->pm.dpm.voltage_response_time == 0)
2054 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
2055 	if (rdev->pm.dpm.backbias_response_time == 0)
2056 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
2057 
2058 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2059 					     0, false, &dividers);
2060 	if (ret)
2061 		pi->ref_div = dividers.ref_div + 1;
2062 	else
2063 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
2064 
2065 	pi->mclk_strobe_mode_threshold = 40000;
2066 	pi->mclk_edc_enable_threshold = 40000;
2067 	eg_pi->mclk_edc_wr_enable_threshold = 40000;
2068 
2069 	pi->rlp = RV770_RLP_DFLT;
2070 	pi->rmp = RV770_RMP_DFLT;
2071 	pi->lhp = RV770_LHP_DFLT;
2072 	pi->lmp = RV770_LMP_DFLT;
2073 
2074 	pi->voltage_control =
2075 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
2076 
2077 	pi->mvdd_control =
2078 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
2079 
2080 	eg_pi->vddci_control =
2081 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
2082 
2083 	rv770_get_engine_memory_ss(rdev);
2084 
2085 	pi->asi = RV770_ASI_DFLT;
2086 	pi->pasi = CYPRESS_HASI_DFLT;
2087 	pi->vrc = CYPRESS_VRC_DFLT;
2088 
2089 	pi->power_gating = false;
2090 
2091 	if ((rdev->family == CHIP_CYPRESS) ||
2092 	    (rdev->family == CHIP_HEMLOCK))
2093 		pi->gfx_clock_gating = false;
2094 	else
2095 		pi->gfx_clock_gating = true;
2096 
2097 	pi->mg_clock_gating = true;
2098 	pi->mgcgtssm = true;
2099 	eg_pi->ls_clock_gating = false;
2100 	eg_pi->sclk_deep_sleep = false;
2101 
2102 	pi->dynamic_pcie_gen2 = true;
2103 
2104 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
2105 		pi->thermal_protection = true;
2106 	else
2107 		pi->thermal_protection = false;
2108 
2109 	pi->display_gap = true;
2110 
2111 	if (rdev->flags & RADEON_IS_MOBILITY)
2112 		pi->dcodt = true;
2113 	else
2114 		pi->dcodt = false;
2115 
2116 	pi->ulps = true;
2117 
2118 	eg_pi->dynamic_ac_timing = true;
2119 	eg_pi->abm = true;
2120 	eg_pi->mcls = true;
2121 	eg_pi->light_sleep = true;
2122 	eg_pi->memory_transition = true;
2123 #if defined(CONFIG_ACPI)
2124 	eg_pi->pcie_performance_request =
2125 		radeon_acpi_is_pcie_performance_request_supported(rdev);
2126 #else
2127 	eg_pi->pcie_performance_request = false;
2128 #endif
2129 
2130 	if ((rdev->family == CHIP_CYPRESS) ||
2131 	    (rdev->family == CHIP_HEMLOCK) ||
2132 	    (rdev->family == CHIP_JUNIPER))
2133 		eg_pi->dll_default_on = true;
2134 	else
2135 		eg_pi->dll_default_on = false;
2136 
2137 	eg_pi->sclk_deep_sleep = false;
2138 	pi->mclk_stutter_mode_threshold = 0;
2139 
2140 	pi->sram_end = SMC_RAM_END;
2141 
2142 	return 0;
2143 }
2144 
cypress_dpm_fini(struct radeon_device * rdev)2145 void cypress_dpm_fini(struct radeon_device *rdev)
2146 {
2147 	int i;
2148 
2149 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
2150 		kfree(rdev->pm.dpm.ps[i].ps_priv);
2151 	}
2152 	kfree(rdev->pm.dpm.ps);
2153 	kfree(rdev->pm.dpm.priv);
2154 }
2155 
cypress_dpm_vblank_too_short(struct radeon_device * rdev)2156 bool cypress_dpm_vblank_too_short(struct radeon_device *rdev)
2157 {
2158 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2159 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
2160 	/* we never hit the non-gddr5 limit so disable it */
2161 	u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
2162 
2163 	if (vblank_time < switch_limit)
2164 		return true;
2165 	else
2166 		return false;
2167 
2168 }
2169