1 /* 2 * Copyright 2011 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Alex Deucher 23 */ 24 25 #include <drm/drm_pci.h> 26 27 #include "atom.h" 28 #include "cypress_dpm.h" 29 #include "evergreend.h" 30 #include "r600_dpm.h" 31 #include "radeon.h" 32 #include "radeon_asic.h" 33 34 #define SMC_RAM_END 0x8000 35 36 #define MC_CG_ARB_FREQ_F0 0x0a 37 #define MC_CG_ARB_FREQ_F1 0x0b 38 #define MC_CG_ARB_FREQ_F2 0x0c 39 #define MC_CG_ARB_FREQ_F3 0x0d 40 41 #define MC_CG_SEQ_DRAMCONF_S0 0x05 42 #define MC_CG_SEQ_DRAMCONF_S1 0x06 43 #define MC_CG_SEQ_YCLK_SUSPEND 0x04 44 #define MC_CG_SEQ_YCLK_RESUME 0x0a 45 46 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps); 47 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev); 48 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev); 49 50 static void cypress_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, 51 bool enable) 52 { 53 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 54 u32 tmp, bif; 55 56 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL); 57 if (enable) { 58 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) && 59 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) { 60 if (!pi->boot_in_gen2) { 61 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK; 62 bif |= CG_CLIENT_REQ(0xd); 63 WREG32(CG_BIF_REQ_AND_RSP, bif); 64 65 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK; 66 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1); 67 tmp |= LC_GEN2_EN_STRAP; 68 69 tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT; 70 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp); 71 udelay(10); 72 tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT; 73 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp); 74 } 75 } 76 } else { 77 if (!pi->boot_in_gen2) { 78 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK; 79 tmp &= ~LC_GEN2_EN_STRAP; 80 } 81 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) || 82 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) 83 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp); 84 } 85 } 86 87 static void cypress_enable_dynamic_pcie_gen2(struct radeon_device *rdev, 88 bool enable) 89 { 90 cypress_enable_bif_dynamic_pcie_gen2(rdev, enable); 91 92 if (enable) 93 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE); 94 else 95 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE); 96 } 97 98 #if 0 99 static int cypress_enter_ulp_state(struct radeon_device *rdev) 100 { 101 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 102 103 if (pi->gfx_clock_gating) { 104 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN); 105 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON); 106 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON); 107 108 RREG32(GB_ADDR_CONFIG); 109 } 110 111 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower), 112 ~HOST_SMC_MSG_MASK); 113 114 udelay(7000); 115 116 return 0; 117 } 118 #endif 119 120 static void cypress_gfx_clock_gating_enable(struct radeon_device *rdev, 121 bool enable) 122 { 123 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 124 125 if (enable) { 126 if (eg_pi->light_sleep) { 127 WREG32(GRBM_GFX_INDEX, 0xC0000000); 128 129 WREG32_CG(CG_CGLS_TILE_0, 0xFFFFFFFF); 130 WREG32_CG(CG_CGLS_TILE_1, 0xFFFFFFFF); 131 WREG32_CG(CG_CGLS_TILE_2, 0xFFFFFFFF); 132 WREG32_CG(CG_CGLS_TILE_3, 0xFFFFFFFF); 133 WREG32_CG(CG_CGLS_TILE_4, 0xFFFFFFFF); 134 WREG32_CG(CG_CGLS_TILE_5, 0xFFFFFFFF); 135 WREG32_CG(CG_CGLS_TILE_6, 0xFFFFFFFF); 136 WREG32_CG(CG_CGLS_TILE_7, 0xFFFFFFFF); 137 WREG32_CG(CG_CGLS_TILE_8, 0xFFFFFFFF); 138 WREG32_CG(CG_CGLS_TILE_9, 0xFFFFFFFF); 139 WREG32_CG(CG_CGLS_TILE_10, 0xFFFFFFFF); 140 WREG32_CG(CG_CGLS_TILE_11, 0xFFFFFFFF); 141 142 WREG32_P(SCLK_PWRMGT_CNTL, DYN_LIGHT_SLEEP_EN, ~DYN_LIGHT_SLEEP_EN); 143 } 144 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN); 145 } else { 146 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN); 147 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON); 148 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON); 149 RREG32(GB_ADDR_CONFIG); 150 151 if (eg_pi->light_sleep) { 152 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_LIGHT_SLEEP_EN); 153 154 WREG32(GRBM_GFX_INDEX, 0xC0000000); 155 156 WREG32_CG(CG_CGLS_TILE_0, 0); 157 WREG32_CG(CG_CGLS_TILE_1, 0); 158 WREG32_CG(CG_CGLS_TILE_2, 0); 159 WREG32_CG(CG_CGLS_TILE_3, 0); 160 WREG32_CG(CG_CGLS_TILE_4, 0); 161 WREG32_CG(CG_CGLS_TILE_5, 0); 162 WREG32_CG(CG_CGLS_TILE_6, 0); 163 WREG32_CG(CG_CGLS_TILE_7, 0); 164 WREG32_CG(CG_CGLS_TILE_8, 0); 165 WREG32_CG(CG_CGLS_TILE_9, 0); 166 WREG32_CG(CG_CGLS_TILE_10, 0); 167 WREG32_CG(CG_CGLS_TILE_11, 0); 168 } 169 } 170 } 171 172 static void cypress_mg_clock_gating_enable(struct radeon_device *rdev, 173 bool enable) 174 { 175 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 176 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 177 178 if (enable) { 179 u32 cgts_sm_ctrl_reg; 180 181 if (rdev->family == CHIP_CEDAR) 182 cgts_sm_ctrl_reg = CEDAR_MGCGCGTSSMCTRL_DFLT; 183 else if (rdev->family == CHIP_REDWOOD) 184 cgts_sm_ctrl_reg = REDWOOD_MGCGCGTSSMCTRL_DFLT; 185 else 186 cgts_sm_ctrl_reg = CYPRESS_MGCGCGTSSMCTRL_DFLT; 187 188 WREG32(GRBM_GFX_INDEX, 0xC0000000); 189 190 WREG32_CG(CG_CGTT_LOCAL_0, CYPRESS_MGCGTTLOCAL0_DFLT); 191 WREG32_CG(CG_CGTT_LOCAL_1, CYPRESS_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF); 192 WREG32_CG(CG_CGTT_LOCAL_2, CYPRESS_MGCGTTLOCAL2_DFLT); 193 WREG32_CG(CG_CGTT_LOCAL_3, CYPRESS_MGCGTTLOCAL3_DFLT); 194 195 if (pi->mgcgtssm) 196 WREG32(CGTS_SM_CTRL_REG, cgts_sm_ctrl_reg); 197 198 if (eg_pi->mcls) { 199 WREG32_P(MC_CITF_MISC_RD_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE); 200 WREG32_P(MC_CITF_MISC_WR_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE); 201 WREG32_P(MC_CITF_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE); 202 WREG32_P(MC_HUB_MISC_HUB_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE); 203 WREG32_P(MC_HUB_MISC_VM_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE); 204 WREG32_P(MC_HUB_MISC_SIP_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE); 205 WREG32_P(MC_XPB_CLK_GAT, MEM_LS_ENABLE, ~MEM_LS_ENABLE); 206 WREG32_P(VM_L2_CG, MEM_LS_ENABLE, ~MEM_LS_ENABLE); 207 } 208 } else { 209 WREG32(GRBM_GFX_INDEX, 0xC0000000); 210 211 WREG32_CG(CG_CGTT_LOCAL_0, 0xFFFFFFFF); 212 WREG32_CG(CG_CGTT_LOCAL_1, 0xFFFFFFFF); 213 WREG32_CG(CG_CGTT_LOCAL_2, 0xFFFFFFFF); 214 WREG32_CG(CG_CGTT_LOCAL_3, 0xFFFFFFFF); 215 216 if (pi->mgcgtssm) 217 WREG32(CGTS_SM_CTRL_REG, 0x81f44bc0); 218 } 219 } 220 221 void cypress_enable_spread_spectrum(struct radeon_device *rdev, 222 bool enable) 223 { 224 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 225 226 if (enable) { 227 if (pi->sclk_ss) 228 WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN); 229 230 if (pi->mclk_ss) 231 WREG32_P(MPLL_CNTL_MODE, SS_SSEN, ~SS_SSEN); 232 } else { 233 WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN); 234 WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN); 235 WREG32_P(MPLL_CNTL_MODE, 0, ~SS_SSEN); 236 WREG32_P(MPLL_CNTL_MODE, 0, ~SS_DSMODE_EN); 237 } 238 } 239 240 void cypress_start_dpm(struct radeon_device *rdev) 241 { 242 WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN); 243 } 244 245 void cypress_enable_sclk_control(struct radeon_device *rdev, 246 bool enable) 247 { 248 if (enable) 249 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF); 250 else 251 WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF); 252 } 253 254 void cypress_enable_mclk_control(struct radeon_device *rdev, 255 bool enable) 256 { 257 if (enable) 258 WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF); 259 else 260 WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF); 261 } 262 263 int cypress_notify_smc_display_change(struct radeon_device *rdev, 264 bool has_display) 265 { 266 PPSMC_Msg msg = has_display ? 267 (PPSMC_Msg)PPSMC_MSG_HasDisplay : (PPSMC_Msg)PPSMC_MSG_NoDisplay; 268 269 if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK) 270 return -EINVAL; 271 272 return 0; 273 } 274 275 void cypress_program_response_times(struct radeon_device *rdev) 276 { 277 u32 reference_clock; 278 u32 mclk_switch_limit; 279 280 reference_clock = radeon_get_xclk(rdev); 281 mclk_switch_limit = (460 * reference_clock) / 100; 282 283 rv770_write_smc_soft_register(rdev, 284 RV770_SMC_SOFT_REGISTER_mclk_switch_lim, 285 mclk_switch_limit); 286 287 rv770_write_smc_soft_register(rdev, 288 RV770_SMC_SOFT_REGISTER_mvdd_chg_time, 1); 289 290 rv770_write_smc_soft_register(rdev, 291 RV770_SMC_SOFT_REGISTER_mc_block_delay, 0xAA); 292 293 rv770_program_response_times(rdev); 294 295 if (ASIC_IS_LOMBOK(rdev)) 296 rv770_write_smc_soft_register(rdev, 297 RV770_SMC_SOFT_REGISTER_is_asic_lombok, 1); 298 299 } 300 301 static int cypress_pcie_performance_request(struct radeon_device *rdev, 302 u8 perf_req, bool advertise) 303 { 304 #if defined(CONFIG_ACPI) 305 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 306 #endif 307 u32 tmp; 308 309 udelay(10); 310 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL); 311 if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) && (tmp & LC_CURRENT_DATA_RATE)) 312 return 0; 313 314 #if defined(CONFIG_ACPI) 315 if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) || 316 (perf_req == PCIE_PERF_REQ_PECI_GEN2)) { 317 eg_pi->pcie_performance_request_registered = true; 318 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise); 319 } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) && 320 eg_pi->pcie_performance_request_registered) { 321 eg_pi->pcie_performance_request_registered = false; 322 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise); 323 } 324 #endif 325 326 return 0; 327 } 328 329 void cypress_advertise_gen2_capability(struct radeon_device *rdev) 330 { 331 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 332 u32 tmp; 333 334 #if defined(CONFIG_ACPI) 335 radeon_acpi_pcie_notify_device_ready(rdev); 336 #endif 337 338 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL); 339 340 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) && 341 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) 342 pi->pcie_gen2 = true; 343 else 344 pi->pcie_gen2 = false; 345 346 if (!pi->pcie_gen2) 347 cypress_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true); 348 349 } 350 351 static enum radeon_pcie_gen cypress_get_maximum_link_speed(struct radeon_ps *radeon_state) 352 { 353 struct rv7xx_ps *state = rv770_get_ps(radeon_state); 354 355 if (state->high.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) 356 return 1; 357 return 0; 358 } 359 360 void cypress_notify_link_speed_change_after_state_change(struct radeon_device *rdev, 361 struct radeon_ps *radeon_new_state, 362 struct radeon_ps *radeon_current_state) 363 { 364 enum radeon_pcie_gen pcie_link_speed_target = 365 cypress_get_maximum_link_speed(radeon_new_state); 366 enum radeon_pcie_gen pcie_link_speed_current = 367 cypress_get_maximum_link_speed(radeon_current_state); 368 u8 request; 369 370 if (pcie_link_speed_target < pcie_link_speed_current) { 371 if (pcie_link_speed_target == RADEON_PCIE_GEN1) 372 request = PCIE_PERF_REQ_PECI_GEN1; 373 else if (pcie_link_speed_target == RADEON_PCIE_GEN2) 374 request = PCIE_PERF_REQ_PECI_GEN2; 375 else 376 request = PCIE_PERF_REQ_PECI_GEN3; 377 378 cypress_pcie_performance_request(rdev, request, false); 379 } 380 } 381 382 void cypress_notify_link_speed_change_before_state_change(struct radeon_device *rdev, 383 struct radeon_ps *radeon_new_state, 384 struct radeon_ps *radeon_current_state) 385 { 386 enum radeon_pcie_gen pcie_link_speed_target = 387 cypress_get_maximum_link_speed(radeon_new_state); 388 enum radeon_pcie_gen pcie_link_speed_current = 389 cypress_get_maximum_link_speed(radeon_current_state); 390 u8 request; 391 392 if (pcie_link_speed_target > pcie_link_speed_current) { 393 if (pcie_link_speed_target == RADEON_PCIE_GEN1) 394 request = PCIE_PERF_REQ_PECI_GEN1; 395 else if (pcie_link_speed_target == RADEON_PCIE_GEN2) 396 request = PCIE_PERF_REQ_PECI_GEN2; 397 else 398 request = PCIE_PERF_REQ_PECI_GEN3; 399 400 cypress_pcie_performance_request(rdev, request, false); 401 } 402 } 403 404 static int cypress_populate_voltage_value(struct radeon_device *rdev, 405 struct atom_voltage_table *table, 406 u16 value, RV770_SMC_VOLTAGE_VALUE *voltage) 407 { 408 unsigned int i; 409 410 for (i = 0; i < table->count; i++) { 411 if (value <= table->entries[i].value) { 412 voltage->index = (u8)i; 413 voltage->value = cpu_to_be16(table->entries[i].value); 414 break; 415 } 416 } 417 418 if (i == table->count) 419 return -EINVAL; 420 421 return 0; 422 } 423 424 u8 cypress_get_strobe_mode_settings(struct radeon_device *rdev, u32 mclk) 425 { 426 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 427 u8 result = 0; 428 bool strobe_mode = false; 429 430 if (pi->mem_gddr5) { 431 if (mclk <= pi->mclk_strobe_mode_threshold) 432 strobe_mode = true; 433 result = cypress_get_mclk_frequency_ratio(rdev, mclk, strobe_mode); 434 435 if (strobe_mode) 436 result |= SMC_STROBE_ENABLE; 437 } 438 439 return result; 440 } 441 442 u32 cypress_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf) 443 { 444 u32 ref_clk = rdev->clock.mpll.reference_freq; 445 u32 vco = clkf * ref_clk; 446 447 /* 100 Mhz ref clk */ 448 if (ref_clk == 10000) { 449 if (vco > 500000) 450 return 0xC6; 451 if (vco > 400000) 452 return 0x9D; 453 if (vco > 330000) 454 return 0x6C; 455 if (vco > 250000) 456 return 0x2B; 457 if (vco > 160000) 458 return 0x5B; 459 if (vco > 120000) 460 return 0x0A; 461 return 0x4B; 462 } 463 464 /* 27 Mhz ref clk */ 465 if (vco > 250000) 466 return 0x8B; 467 if (vco > 200000) 468 return 0xCC; 469 if (vco > 150000) 470 return 0x9B; 471 return 0x6B; 472 } 473 474 static int cypress_populate_mclk_value(struct radeon_device *rdev, 475 u32 engine_clock, u32 memory_clock, 476 RV7XX_SMC_MCLK_VALUE *mclk, 477 bool strobe_mode, bool dll_state_on) 478 { 479 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 480 481 u32 mpll_ad_func_cntl = 482 pi->clk_regs.rv770.mpll_ad_func_cntl; 483 u32 mpll_ad_func_cntl_2 = 484 pi->clk_regs.rv770.mpll_ad_func_cntl_2; 485 u32 mpll_dq_func_cntl = 486 pi->clk_regs.rv770.mpll_dq_func_cntl; 487 u32 mpll_dq_func_cntl_2 = 488 pi->clk_regs.rv770.mpll_dq_func_cntl_2; 489 u32 mclk_pwrmgt_cntl = 490 pi->clk_regs.rv770.mclk_pwrmgt_cntl; 491 u32 dll_cntl = 492 pi->clk_regs.rv770.dll_cntl; 493 u32 mpll_ss1 = pi->clk_regs.rv770.mpll_ss1; 494 u32 mpll_ss2 = pi->clk_regs.rv770.mpll_ss2; 495 struct atom_clock_dividers dividers; 496 u32 ibias; 497 u32 dll_speed; 498 int ret; 499 u32 mc_seq_misc7; 500 501 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, 502 memory_clock, strobe_mode, ÷rs); 503 if (ret) 504 return ret; 505 506 if (!strobe_mode) { 507 mc_seq_misc7 = RREG32(MC_SEQ_MISC7); 508 509 if(mc_seq_misc7 & 0x8000000) 510 dividers.post_div = 1; 511 } 512 513 ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div); 514 515 mpll_ad_func_cntl &= ~(CLKR_MASK | 516 YCLK_POST_DIV_MASK | 517 CLKF_MASK | 518 CLKFRAC_MASK | 519 IBIAS_MASK); 520 mpll_ad_func_cntl |= CLKR(dividers.ref_div); 521 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); 522 mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div); 523 mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div); 524 mpll_ad_func_cntl |= IBIAS(ibias); 525 526 if (dividers.vco_mode) 527 mpll_ad_func_cntl_2 |= VCO_MODE; 528 else 529 mpll_ad_func_cntl_2 &= ~VCO_MODE; 530 531 if (pi->mem_gddr5) { 532 mpll_dq_func_cntl &= ~(CLKR_MASK | 533 YCLK_POST_DIV_MASK | 534 CLKF_MASK | 535 CLKFRAC_MASK | 536 IBIAS_MASK); 537 mpll_dq_func_cntl |= CLKR(dividers.ref_div); 538 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div); 539 mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div); 540 mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div); 541 mpll_dq_func_cntl |= IBIAS(ibias); 542 543 if (strobe_mode) 544 mpll_dq_func_cntl &= ~PDNB; 545 else 546 mpll_dq_func_cntl |= PDNB; 547 548 if (dividers.vco_mode) 549 mpll_dq_func_cntl_2 |= VCO_MODE; 550 else 551 mpll_dq_func_cntl_2 &= ~VCO_MODE; 552 } 553 554 if (pi->mclk_ss) { 555 struct radeon_atom_ss ss; 556 u32 vco_freq = memory_clock * dividers.post_div; 557 558 if (radeon_atombios_get_asic_ss_info(rdev, &ss, 559 ASIC_INTERNAL_MEMORY_SS, vco_freq)) { 560 u32 reference_clock = rdev->clock.mpll.reference_freq; 561 u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div); 562 u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate); 563 u32 clk_v = ss.percentage * 564 (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625); 565 566 mpll_ss1 &= ~CLKV_MASK; 567 mpll_ss1 |= CLKV(clk_v); 568 569 mpll_ss2 &= ~CLKS_MASK; 570 mpll_ss2 |= CLKS(clk_s); 571 } 572 } 573 574 dll_speed = rv740_get_dll_speed(pi->mem_gddr5, 575 memory_clock); 576 577 mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK; 578 mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed); 579 if (dll_state_on) 580 mclk_pwrmgt_cntl |= (MRDCKA0_PDNB | 581 MRDCKA1_PDNB | 582 MRDCKB0_PDNB | 583 MRDCKB1_PDNB | 584 MRDCKC0_PDNB | 585 MRDCKC1_PDNB | 586 MRDCKD0_PDNB | 587 MRDCKD1_PDNB); 588 else 589 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB | 590 MRDCKA1_PDNB | 591 MRDCKB0_PDNB | 592 MRDCKB1_PDNB | 593 MRDCKC0_PDNB | 594 MRDCKC1_PDNB | 595 MRDCKD0_PDNB | 596 MRDCKD1_PDNB); 597 598 mclk->mclk770.mclk_value = cpu_to_be32(memory_clock); 599 mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl); 600 mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2); 601 mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl); 602 mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2); 603 mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl); 604 mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl); 605 mclk->mclk770.vMPLL_SS = cpu_to_be32(mpll_ss1); 606 mclk->mclk770.vMPLL_SS2 = cpu_to_be32(mpll_ss2); 607 608 return 0; 609 } 610 611 u8 cypress_get_mclk_frequency_ratio(struct radeon_device *rdev, 612 u32 memory_clock, bool strobe_mode) 613 { 614 u8 mc_para_index; 615 616 if (rdev->family >= CHIP_BARTS) { 617 if (strobe_mode) { 618 if (memory_clock < 10000) 619 mc_para_index = 0x00; 620 else if (memory_clock > 47500) 621 mc_para_index = 0x0f; 622 else 623 mc_para_index = (u8)((memory_clock - 10000) / 2500); 624 } else { 625 if (memory_clock < 65000) 626 mc_para_index = 0x00; 627 else if (memory_clock > 135000) 628 mc_para_index = 0x0f; 629 else 630 mc_para_index = (u8)((memory_clock - 60000) / 5000); 631 } 632 } else { 633 if (strobe_mode) { 634 if (memory_clock < 10000) 635 mc_para_index = 0x00; 636 else if (memory_clock > 47500) 637 mc_para_index = 0x0f; 638 else 639 mc_para_index = (u8)((memory_clock - 10000) / 2500); 640 } else { 641 if (memory_clock < 40000) 642 mc_para_index = 0x00; 643 else if (memory_clock > 115000) 644 mc_para_index = 0x0f; 645 else 646 mc_para_index = (u8)((memory_clock - 40000) / 5000); 647 } 648 } 649 return mc_para_index; 650 } 651 652 static int cypress_populate_mvdd_value(struct radeon_device *rdev, 653 u32 mclk, 654 RV770_SMC_VOLTAGE_VALUE *voltage) 655 { 656 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 657 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 658 659 if (!pi->mvdd_control) { 660 voltage->index = eg_pi->mvdd_high_index; 661 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 662 return 0; 663 } 664 665 if (mclk <= pi->mvdd_split_frequency) { 666 voltage->index = eg_pi->mvdd_low_index; 667 voltage->value = cpu_to_be16(MVDD_LOW_VALUE); 668 } else { 669 voltage->index = eg_pi->mvdd_high_index; 670 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 671 } 672 673 return 0; 674 } 675 676 int cypress_convert_power_level_to_smc(struct radeon_device *rdev, 677 struct rv7xx_pl *pl, 678 RV770_SMC_HW_PERFORMANCE_LEVEL *level, 679 u8 watermark_level) 680 { 681 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 682 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 683 int ret; 684 bool dll_state_on; 685 686 level->gen2PCIE = pi->pcie_gen2 ? 687 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0; 688 level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0; 689 level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0; 690 level->displayWatermark = watermark_level; 691 692 ret = rv740_populate_sclk_value(rdev, pl->sclk, &level->sclk); 693 if (ret) 694 return ret; 695 696 level->mcFlags = 0; 697 if (pi->mclk_stutter_mode_threshold && 698 (pl->mclk <= pi->mclk_stutter_mode_threshold) && 699 !eg_pi->uvd_enabled) { 700 level->mcFlags |= SMC_MC_STUTTER_EN; 701 if (eg_pi->sclk_deep_sleep) 702 level->stateFlags |= PPSMC_STATEFLAG_AUTO_PULSE_SKIP; 703 else 704 level->stateFlags &= ~PPSMC_STATEFLAG_AUTO_PULSE_SKIP; 705 } 706 707 if (pi->mem_gddr5) { 708 if (pl->mclk > pi->mclk_edc_enable_threshold) 709 level->mcFlags |= SMC_MC_EDC_RD_FLAG; 710 711 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold) 712 level->mcFlags |= SMC_MC_EDC_WR_FLAG; 713 714 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk); 715 716 if (level->strobeMode & SMC_STROBE_ENABLE) { 717 if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >= 718 ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf)) 719 dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false; 720 else 721 dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false; 722 } else 723 dll_state_on = eg_pi->dll_default_on; 724 725 ret = cypress_populate_mclk_value(rdev, 726 pl->sclk, 727 pl->mclk, 728 &level->mclk, 729 (level->strobeMode & SMC_STROBE_ENABLE) != 0, 730 dll_state_on); 731 } else { 732 ret = cypress_populate_mclk_value(rdev, 733 pl->sclk, 734 pl->mclk, 735 &level->mclk, 736 true, 737 true); 738 } 739 if (ret) 740 return ret; 741 742 ret = cypress_populate_voltage_value(rdev, 743 &eg_pi->vddc_voltage_table, 744 pl->vddc, 745 &level->vddc); 746 if (ret) 747 return ret; 748 749 if (eg_pi->vddci_control) { 750 ret = cypress_populate_voltage_value(rdev, 751 &eg_pi->vddci_voltage_table, 752 pl->vddci, 753 &level->vddci); 754 if (ret) 755 return ret; 756 } 757 758 ret = cypress_populate_mvdd_value(rdev, pl->mclk, &level->mvdd); 759 760 return ret; 761 } 762 763 static int cypress_convert_power_state_to_smc(struct radeon_device *rdev, 764 struct radeon_ps *radeon_state, 765 RV770_SMC_SWSTATE *smc_state) 766 { 767 struct rv7xx_ps *state = rv770_get_ps(radeon_state); 768 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 769 int ret; 770 771 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC)) 772 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC; 773 774 ret = cypress_convert_power_level_to_smc(rdev, 775 &state->low, 776 &smc_state->levels[0], 777 PPSMC_DISPLAY_WATERMARK_LOW); 778 if (ret) 779 return ret; 780 781 ret = cypress_convert_power_level_to_smc(rdev, 782 &state->medium, 783 &smc_state->levels[1], 784 PPSMC_DISPLAY_WATERMARK_LOW); 785 if (ret) 786 return ret; 787 788 ret = cypress_convert_power_level_to_smc(rdev, 789 &state->high, 790 &smc_state->levels[2], 791 PPSMC_DISPLAY_WATERMARK_HIGH); 792 if (ret) 793 return ret; 794 795 smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1; 796 smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2; 797 smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3; 798 799 if (eg_pi->dynamic_ac_timing) { 800 smc_state->levels[0].ACIndex = 2; 801 smc_state->levels[1].ACIndex = 3; 802 smc_state->levels[2].ACIndex = 4; 803 } else { 804 smc_state->levels[0].ACIndex = 0; 805 smc_state->levels[1].ACIndex = 0; 806 smc_state->levels[2].ACIndex = 0; 807 } 808 809 rv770_populate_smc_sp(rdev, radeon_state, smc_state); 810 811 return rv770_populate_smc_t(rdev, radeon_state, smc_state); 812 } 813 814 static void cypress_convert_mc_registers(struct evergreen_mc_reg_entry *entry, 815 SMC_Evergreen_MCRegisterSet *data, 816 u32 num_entries, u32 valid_flag) 817 { 818 u32 i, j; 819 820 for (i = 0, j = 0; j < num_entries; j++) { 821 if (valid_flag & (1 << j)) { 822 data->value[i] = cpu_to_be32(entry->mc_data[j]); 823 i++; 824 } 825 } 826 } 827 828 static void cypress_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev, 829 struct rv7xx_pl *pl, 830 SMC_Evergreen_MCRegisterSet *mc_reg_table_data) 831 { 832 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 833 u32 i = 0; 834 835 for (i = 0; i < eg_pi->mc_reg_table.num_entries; i++) { 836 if (pl->mclk <= 837 eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max) 838 break; 839 } 840 841 if ((i == eg_pi->mc_reg_table.num_entries) && (i > 0)) 842 --i; 843 844 cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[i], 845 mc_reg_table_data, 846 eg_pi->mc_reg_table.last, 847 eg_pi->mc_reg_table.valid_flag); 848 } 849 850 static void cypress_convert_mc_reg_table_to_smc(struct radeon_device *rdev, 851 struct radeon_ps *radeon_state, 852 SMC_Evergreen_MCRegisters *mc_reg_table) 853 { 854 struct rv7xx_ps *state = rv770_get_ps(radeon_state); 855 856 cypress_convert_mc_reg_table_entry_to_smc(rdev, 857 &state->low, 858 &mc_reg_table->data[2]); 859 cypress_convert_mc_reg_table_entry_to_smc(rdev, 860 &state->medium, 861 &mc_reg_table->data[3]); 862 cypress_convert_mc_reg_table_entry_to_smc(rdev, 863 &state->high, 864 &mc_reg_table->data[4]); 865 } 866 867 int cypress_upload_sw_state(struct radeon_device *rdev, 868 struct radeon_ps *radeon_new_state) 869 { 870 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 871 u16 address = pi->state_table_start + 872 offsetof(RV770_SMC_STATETABLE, driverState); 873 RV770_SMC_SWSTATE state = { 0 }; 874 int ret; 875 876 ret = cypress_convert_power_state_to_smc(rdev, radeon_new_state, &state); 877 if (ret) 878 return ret; 879 880 return rv770_copy_bytes_to_smc(rdev, address, (u8 *)&state, 881 sizeof(RV770_SMC_SWSTATE), 882 pi->sram_end); 883 } 884 885 int cypress_upload_mc_reg_table(struct radeon_device *rdev, 886 struct radeon_ps *radeon_new_state) 887 { 888 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 889 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 890 SMC_Evergreen_MCRegisters mc_reg_table = { 0 }; 891 u16 address; 892 893 cypress_convert_mc_reg_table_to_smc(rdev, radeon_new_state, &mc_reg_table); 894 895 address = eg_pi->mc_reg_table_start + 896 (u16)offsetof(SMC_Evergreen_MCRegisters, data[2]); 897 898 return rv770_copy_bytes_to_smc(rdev, address, 899 (u8 *)&mc_reg_table.data[2], 900 sizeof(SMC_Evergreen_MCRegisterSet) * 3, 901 pi->sram_end); 902 } 903 904 u32 cypress_calculate_burst_time(struct radeon_device *rdev, 905 u32 engine_clock, u32 memory_clock) 906 { 907 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 908 u32 multiplier = pi->mem_gddr5 ? 1 : 2; 909 u32 result = (4 * multiplier * engine_clock) / (memory_clock / 2); 910 u32 burst_time; 911 912 if (result <= 4) 913 burst_time = 0; 914 else if (result < 8) 915 burst_time = result - 4; 916 else { 917 burst_time = result / 2 ; 918 if (burst_time > 18) 919 burst_time = 18; 920 } 921 922 return burst_time; 923 } 924 925 void cypress_program_memory_timing_parameters(struct radeon_device *rdev, 926 struct radeon_ps *radeon_new_state) 927 { 928 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state); 929 u32 mc_arb_burst_time = RREG32(MC_ARB_BURST_TIME); 930 931 mc_arb_burst_time &= ~(STATE1_MASK | STATE2_MASK | STATE3_MASK); 932 933 mc_arb_burst_time |= STATE1(cypress_calculate_burst_time(rdev, 934 new_state->low.sclk, 935 new_state->low.mclk)); 936 mc_arb_burst_time |= STATE2(cypress_calculate_burst_time(rdev, 937 new_state->medium.sclk, 938 new_state->medium.mclk)); 939 mc_arb_burst_time |= STATE3(cypress_calculate_burst_time(rdev, 940 new_state->high.sclk, 941 new_state->high.mclk)); 942 943 rv730_program_memory_timing_parameters(rdev, radeon_new_state); 944 945 WREG32(MC_ARB_BURST_TIME, mc_arb_burst_time); 946 } 947 948 static void cypress_populate_mc_reg_addresses(struct radeon_device *rdev, 949 SMC_Evergreen_MCRegisters *mc_reg_table) 950 { 951 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 952 u32 i, j; 953 954 for (i = 0, j = 0; j < eg_pi->mc_reg_table.last; j++) { 955 if (eg_pi->mc_reg_table.valid_flag & (1 << j)) { 956 mc_reg_table->address[i].s0 = 957 cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s0); 958 mc_reg_table->address[i].s1 = 959 cpu_to_be16(eg_pi->mc_reg_table.mc_reg_address[j].s1); 960 i++; 961 } 962 } 963 964 mc_reg_table->last = (u8)i; 965 } 966 967 static void cypress_set_mc_reg_address_table(struct radeon_device *rdev) 968 { 969 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 970 u32 i = 0; 971 972 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RAS_TIMING_LP >> 2; 973 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RAS_TIMING >> 2; 974 i++; 975 976 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_CAS_TIMING_LP >> 2; 977 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_CAS_TIMING >> 2; 978 i++; 979 980 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING_LP >> 2; 981 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING >> 2; 982 i++; 983 984 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC_TIMING2_LP >> 2; 985 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC_TIMING2 >> 2; 986 i++; 987 988 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D0_LP >> 2; 989 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D0 >> 2; 990 i++; 991 992 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RD_CTL_D1_LP >> 2; 993 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RD_CTL_D1 >> 2; 994 i++; 995 996 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D0_LP >> 2; 997 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D0 >> 2; 998 i++; 999 1000 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_WR_CTL_D1_LP >> 2; 1001 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_WR_CTL_D1 >> 2; 1002 i++; 1003 1004 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2; 1005 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_EMRS >> 2; 1006 i++; 1007 1008 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2; 1009 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS >> 2; 1010 i++; 1011 1012 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2; 1013 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_PMG_CMD_MRS1 >> 2; 1014 i++; 1015 1016 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC1 >> 2; 1017 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC1 >> 2; 1018 i++; 1019 1020 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_RESERVE_M >> 2; 1021 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_RESERVE_M >> 2; 1022 i++; 1023 1024 eg_pi->mc_reg_table.mc_reg_address[i].s0 = MC_SEQ_MISC3 >> 2; 1025 eg_pi->mc_reg_table.mc_reg_address[i].s1 = MC_SEQ_MISC3 >> 2; 1026 i++; 1027 1028 eg_pi->mc_reg_table.last = (u8)i; 1029 } 1030 1031 static void cypress_retrieve_ac_timing_for_one_entry(struct radeon_device *rdev, 1032 struct evergreen_mc_reg_entry *entry) 1033 { 1034 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1035 u32 i; 1036 1037 for (i = 0; i < eg_pi->mc_reg_table.last; i++) 1038 entry->mc_data[i] = 1039 RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2); 1040 1041 } 1042 1043 static void cypress_retrieve_ac_timing_for_all_ranges(struct radeon_device *rdev, 1044 struct atom_memory_clock_range_table *range_table) 1045 { 1046 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1047 u32 i, j; 1048 1049 for (i = 0; i < range_table->num_entries; i++) { 1050 eg_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max = 1051 range_table->mclk[i]; 1052 radeon_atom_set_ac_timing(rdev, range_table->mclk[i]); 1053 cypress_retrieve_ac_timing_for_one_entry(rdev, 1054 &eg_pi->mc_reg_table.mc_reg_table_entry[i]); 1055 } 1056 1057 eg_pi->mc_reg_table.num_entries = range_table->num_entries; 1058 eg_pi->mc_reg_table.valid_flag = 0; 1059 1060 for (i = 0; i < eg_pi->mc_reg_table.last; i++) { 1061 for (j = 1; j < range_table->num_entries; j++) { 1062 if (eg_pi->mc_reg_table.mc_reg_table_entry[j-1].mc_data[i] != 1063 eg_pi->mc_reg_table.mc_reg_table_entry[j].mc_data[i]) { 1064 eg_pi->mc_reg_table.valid_flag |= (1 << i); 1065 break; 1066 } 1067 } 1068 } 1069 } 1070 1071 static int cypress_initialize_mc_reg_table(struct radeon_device *rdev) 1072 { 1073 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1074 u8 module_index = rv770_get_memory_module_index(rdev); 1075 struct atom_memory_clock_range_table range_table = { 0 }; 1076 int ret; 1077 1078 ret = radeon_atom_get_mclk_range_table(rdev, 1079 pi->mem_gddr5, 1080 module_index, &range_table); 1081 if (ret) 1082 return ret; 1083 1084 cypress_retrieve_ac_timing_for_all_ranges(rdev, &range_table); 1085 1086 return 0; 1087 } 1088 1089 static void cypress_wait_for_mc_sequencer(struct radeon_device *rdev, u8 value) 1090 { 1091 u32 i, j; 1092 u32 channels = 2; 1093 1094 if ((rdev->family == CHIP_CYPRESS) || 1095 (rdev->family == CHIP_HEMLOCK)) 1096 channels = 4; 1097 else if (rdev->family == CHIP_CEDAR) 1098 channels = 1; 1099 1100 for (i = 0; i < channels; i++) { 1101 if ((rdev->family == CHIP_CYPRESS) || 1102 (rdev->family == CHIP_HEMLOCK)) { 1103 WREG32_P(MC_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK); 1104 WREG32_P(MC_CG_CONFIG_MCD, MC_RD_ENABLE_MCD(i), ~MC_RD_ENABLE_MCD_MASK); 1105 } else { 1106 WREG32_P(MC_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK); 1107 WREG32_P(MC_CG_CONFIG, MC_RD_ENABLE(i), ~MC_RD_ENABLE_MASK); 1108 } 1109 for (j = 0; j < rdev->usec_timeout; j++) { 1110 if (((RREG32(MC_SEQ_CG) & CG_SEQ_RESP_MASK) >> CG_SEQ_RESP_SHIFT) == value) 1111 break; 1112 udelay(1); 1113 } 1114 } 1115 } 1116 1117 static void cypress_force_mc_use_s1(struct radeon_device *rdev, 1118 struct radeon_ps *radeon_boot_state) 1119 { 1120 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state); 1121 u32 strobe_mode; 1122 u32 mc_seq_cg; 1123 int i; 1124 1125 if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE) 1126 return; 1127 1128 radeon_atom_set_ac_timing(rdev, boot_state->low.mclk); 1129 radeon_mc_wait_for_idle(rdev); 1130 1131 if ((rdev->family == CHIP_CYPRESS) || 1132 (rdev->family == CHIP_HEMLOCK)) { 1133 WREG32(MC_CONFIG_MCD, 0xf); 1134 WREG32(MC_CG_CONFIG_MCD, 0xf); 1135 } else { 1136 WREG32(MC_CONFIG, 0xf); 1137 WREG32(MC_CG_CONFIG, 0xf); 1138 } 1139 1140 for (i = 0; i < rdev->num_crtc; i++) 1141 radeon_wait_for_vblank(rdev, i); 1142 1143 WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND); 1144 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND); 1145 1146 strobe_mode = cypress_get_strobe_mode_settings(rdev, 1147 boot_state->low.mclk); 1148 1149 mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S1); 1150 mc_seq_cg |= SEQ_CG_RESP(strobe_mode); 1151 WREG32(MC_SEQ_CG, mc_seq_cg); 1152 1153 for (i = 0; i < rdev->usec_timeout; i++) { 1154 if (RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE) 1155 break; 1156 udelay(1); 1157 } 1158 1159 mc_seq_cg &= ~CG_SEQ_REQ_MASK; 1160 mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME); 1161 WREG32(MC_SEQ_CG, mc_seq_cg); 1162 1163 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME); 1164 } 1165 1166 static void cypress_copy_ac_timing_from_s1_to_s0(struct radeon_device *rdev) 1167 { 1168 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1169 u32 value; 1170 u32 i; 1171 1172 for (i = 0; i < eg_pi->mc_reg_table.last; i++) { 1173 value = RREG32(eg_pi->mc_reg_table.mc_reg_address[i].s1 << 2); 1174 WREG32(eg_pi->mc_reg_table.mc_reg_address[i].s0 << 2, value); 1175 } 1176 } 1177 1178 static void cypress_force_mc_use_s0(struct radeon_device *rdev, 1179 struct radeon_ps *radeon_boot_state) 1180 { 1181 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state); 1182 u32 strobe_mode; 1183 u32 mc_seq_cg; 1184 int i; 1185 1186 cypress_copy_ac_timing_from_s1_to_s0(rdev); 1187 radeon_mc_wait_for_idle(rdev); 1188 1189 if ((rdev->family == CHIP_CYPRESS) || 1190 (rdev->family == CHIP_HEMLOCK)) { 1191 WREG32(MC_CONFIG_MCD, 0xf); 1192 WREG32(MC_CG_CONFIG_MCD, 0xf); 1193 } else { 1194 WREG32(MC_CONFIG, 0xf); 1195 WREG32(MC_CG_CONFIG, 0xf); 1196 } 1197 1198 for (i = 0; i < rdev->num_crtc; i++) 1199 radeon_wait_for_vblank(rdev, i); 1200 1201 WREG32(MC_SEQ_CG, MC_CG_SEQ_YCLK_SUSPEND); 1202 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_SUSPEND); 1203 1204 strobe_mode = cypress_get_strobe_mode_settings(rdev, 1205 boot_state->low.mclk); 1206 1207 mc_seq_cg = CG_SEQ_REQ(MC_CG_SEQ_DRAMCONF_S0); 1208 mc_seq_cg |= SEQ_CG_RESP(strobe_mode); 1209 WREG32(MC_SEQ_CG, mc_seq_cg); 1210 1211 for (i = 0; i < rdev->usec_timeout; i++) { 1212 if (!(RREG32(MC_SEQ_STATUS_M) & PMG_PWRSTATE)) 1213 break; 1214 udelay(1); 1215 } 1216 1217 mc_seq_cg &= ~CG_SEQ_REQ_MASK; 1218 mc_seq_cg |= CG_SEQ_REQ(MC_CG_SEQ_YCLK_RESUME); 1219 WREG32(MC_SEQ_CG, mc_seq_cg); 1220 1221 cypress_wait_for_mc_sequencer(rdev, MC_CG_SEQ_YCLK_RESUME); 1222 } 1223 1224 static int cypress_populate_initial_mvdd_value(struct radeon_device *rdev, 1225 RV770_SMC_VOLTAGE_VALUE *voltage) 1226 { 1227 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1228 1229 voltage->index = eg_pi->mvdd_high_index; 1230 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 1231 1232 return 0; 1233 } 1234 1235 int cypress_populate_smc_initial_state(struct radeon_device *rdev, 1236 struct radeon_ps *radeon_initial_state, 1237 RV770_SMC_STATETABLE *table) 1238 { 1239 struct rv7xx_ps *initial_state = rv770_get_ps(radeon_initial_state); 1240 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1241 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1242 u32 a_t; 1243 1244 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = 1245 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl); 1246 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = 1247 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2); 1248 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = 1249 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl); 1250 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = 1251 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2); 1252 table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = 1253 cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl); 1254 table->initialState.levels[0].mclk.mclk770.vDLL_CNTL = 1255 cpu_to_be32(pi->clk_regs.rv770.dll_cntl); 1256 1257 table->initialState.levels[0].mclk.mclk770.vMPLL_SS = 1258 cpu_to_be32(pi->clk_regs.rv770.mpll_ss1); 1259 table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 = 1260 cpu_to_be32(pi->clk_regs.rv770.mpll_ss2); 1261 1262 table->initialState.levels[0].mclk.mclk770.mclk_value = 1263 cpu_to_be32(initial_state->low.mclk); 1264 1265 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = 1266 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl); 1267 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = 1268 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2); 1269 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = 1270 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3); 1271 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM = 1272 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum); 1273 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 = 1274 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2); 1275 1276 table->initialState.levels[0].sclk.sclk_value = 1277 cpu_to_be32(initial_state->low.sclk); 1278 1279 table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0; 1280 1281 table->initialState.levels[0].ACIndex = 0; 1282 1283 cypress_populate_voltage_value(rdev, 1284 &eg_pi->vddc_voltage_table, 1285 initial_state->low.vddc, 1286 &table->initialState.levels[0].vddc); 1287 1288 if (eg_pi->vddci_control) 1289 cypress_populate_voltage_value(rdev, 1290 &eg_pi->vddci_voltage_table, 1291 initial_state->low.vddci, 1292 &table->initialState.levels[0].vddci); 1293 1294 cypress_populate_initial_mvdd_value(rdev, 1295 &table->initialState.levels[0].mvdd); 1296 1297 a_t = CG_R(0xffff) | CG_L(0); 1298 table->initialState.levels[0].aT = cpu_to_be32(a_t); 1299 1300 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp); 1301 1302 1303 if (pi->boot_in_gen2) 1304 table->initialState.levels[0].gen2PCIE = 1; 1305 else 1306 table->initialState.levels[0].gen2PCIE = 0; 1307 if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) 1308 table->initialState.levels[0].gen2XSP = 1; 1309 else 1310 table->initialState.levels[0].gen2XSP = 0; 1311 1312 if (pi->mem_gddr5) { 1313 table->initialState.levels[0].strobeMode = 1314 cypress_get_strobe_mode_settings(rdev, 1315 initial_state->low.mclk); 1316 1317 if (initial_state->low.mclk > pi->mclk_edc_enable_threshold) 1318 table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG; 1319 else 1320 table->initialState.levels[0].mcFlags = 0; 1321 } 1322 1323 table->initialState.levels[1] = table->initialState.levels[0]; 1324 table->initialState.levels[2] = table->initialState.levels[0]; 1325 1326 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC; 1327 1328 return 0; 1329 } 1330 1331 int cypress_populate_smc_acpi_state(struct radeon_device *rdev, 1332 RV770_SMC_STATETABLE *table) 1333 { 1334 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1335 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1336 u32 mpll_ad_func_cntl = 1337 pi->clk_regs.rv770.mpll_ad_func_cntl; 1338 u32 mpll_ad_func_cntl_2 = 1339 pi->clk_regs.rv770.mpll_ad_func_cntl_2; 1340 u32 mpll_dq_func_cntl = 1341 pi->clk_regs.rv770.mpll_dq_func_cntl; 1342 u32 mpll_dq_func_cntl_2 = 1343 pi->clk_regs.rv770.mpll_dq_func_cntl_2; 1344 u32 spll_func_cntl = 1345 pi->clk_regs.rv770.cg_spll_func_cntl; 1346 u32 spll_func_cntl_2 = 1347 pi->clk_regs.rv770.cg_spll_func_cntl_2; 1348 u32 spll_func_cntl_3 = 1349 pi->clk_regs.rv770.cg_spll_func_cntl_3; 1350 u32 mclk_pwrmgt_cntl = 1351 pi->clk_regs.rv770.mclk_pwrmgt_cntl; 1352 u32 dll_cntl = 1353 pi->clk_regs.rv770.dll_cntl; 1354 1355 table->ACPIState = table->initialState; 1356 1357 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC; 1358 1359 if (pi->acpi_vddc) { 1360 cypress_populate_voltage_value(rdev, 1361 &eg_pi->vddc_voltage_table, 1362 pi->acpi_vddc, 1363 &table->ACPIState.levels[0].vddc); 1364 if (pi->pcie_gen2) { 1365 if (pi->acpi_pcie_gen2) 1366 table->ACPIState.levels[0].gen2PCIE = 1; 1367 else 1368 table->ACPIState.levels[0].gen2PCIE = 0; 1369 } else 1370 table->ACPIState.levels[0].gen2PCIE = 0; 1371 if (pi->acpi_pcie_gen2) 1372 table->ACPIState.levels[0].gen2XSP = 1; 1373 else 1374 table->ACPIState.levels[0].gen2XSP = 0; 1375 } else { 1376 cypress_populate_voltage_value(rdev, 1377 &eg_pi->vddc_voltage_table, 1378 pi->min_vddc_in_table, 1379 &table->ACPIState.levels[0].vddc); 1380 table->ACPIState.levels[0].gen2PCIE = 0; 1381 } 1382 1383 if (eg_pi->acpi_vddci) { 1384 if (eg_pi->vddci_control) { 1385 cypress_populate_voltage_value(rdev, 1386 &eg_pi->vddci_voltage_table, 1387 eg_pi->acpi_vddci, 1388 &table->ACPIState.levels[0].vddci); 1389 } 1390 } 1391 1392 mpll_ad_func_cntl &= ~PDNB; 1393 1394 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN; 1395 1396 if (pi->mem_gddr5) 1397 mpll_dq_func_cntl &= ~PDNB; 1398 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS; 1399 1400 mclk_pwrmgt_cntl |= (MRDCKA0_RESET | 1401 MRDCKA1_RESET | 1402 MRDCKB0_RESET | 1403 MRDCKB1_RESET | 1404 MRDCKC0_RESET | 1405 MRDCKC1_RESET | 1406 MRDCKD0_RESET | 1407 MRDCKD1_RESET); 1408 1409 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB | 1410 MRDCKA1_PDNB | 1411 MRDCKB0_PDNB | 1412 MRDCKB1_PDNB | 1413 MRDCKC0_PDNB | 1414 MRDCKC1_PDNB | 1415 MRDCKD0_PDNB | 1416 MRDCKD1_PDNB); 1417 1418 dll_cntl |= (MRDCKA0_BYPASS | 1419 MRDCKA1_BYPASS | 1420 MRDCKB0_BYPASS | 1421 MRDCKB1_BYPASS | 1422 MRDCKC0_BYPASS | 1423 MRDCKC1_BYPASS | 1424 MRDCKD0_BYPASS | 1425 MRDCKD1_BYPASS); 1426 1427 /* evergreen only */ 1428 if (rdev->family <= CHIP_HEMLOCK) 1429 spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN; 1430 1431 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK; 1432 spll_func_cntl_2 |= SCLK_MUX_SEL(4); 1433 1434 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = 1435 cpu_to_be32(mpll_ad_func_cntl); 1436 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = 1437 cpu_to_be32(mpll_ad_func_cntl_2); 1438 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = 1439 cpu_to_be32(mpll_dq_func_cntl); 1440 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = 1441 cpu_to_be32(mpll_dq_func_cntl_2); 1442 table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = 1443 cpu_to_be32(mclk_pwrmgt_cntl); 1444 table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl); 1445 1446 table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0; 1447 1448 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = 1449 cpu_to_be32(spll_func_cntl); 1450 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = 1451 cpu_to_be32(spll_func_cntl_2); 1452 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = 1453 cpu_to_be32(spll_func_cntl_3); 1454 1455 table->ACPIState.levels[0].sclk.sclk_value = 0; 1456 1457 cypress_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); 1458 1459 if (eg_pi->dynamic_ac_timing) 1460 table->ACPIState.levels[0].ACIndex = 1; 1461 1462 table->ACPIState.levels[1] = table->ACPIState.levels[0]; 1463 table->ACPIState.levels[2] = table->ACPIState.levels[0]; 1464 1465 return 0; 1466 } 1467 1468 static void cypress_trim_voltage_table_to_fit_state_table(struct radeon_device *rdev, 1469 struct atom_voltage_table *voltage_table) 1470 { 1471 unsigned int i, diff; 1472 1473 if (voltage_table->count <= MAX_NO_VREG_STEPS) 1474 return; 1475 1476 diff = voltage_table->count - MAX_NO_VREG_STEPS; 1477 1478 for (i= 0; i < MAX_NO_VREG_STEPS; i++) 1479 voltage_table->entries[i] = voltage_table->entries[i + diff]; 1480 1481 voltage_table->count = MAX_NO_VREG_STEPS; 1482 } 1483 1484 int cypress_construct_voltage_tables(struct radeon_device *rdev) 1485 { 1486 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1487 int ret; 1488 1489 ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0, 1490 &eg_pi->vddc_voltage_table); 1491 if (ret) 1492 return ret; 1493 1494 if (eg_pi->vddc_voltage_table.count > MAX_NO_VREG_STEPS) 1495 cypress_trim_voltage_table_to_fit_state_table(rdev, 1496 &eg_pi->vddc_voltage_table); 1497 1498 if (eg_pi->vddci_control) { 1499 ret = radeon_atom_get_voltage_table(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0, 1500 &eg_pi->vddci_voltage_table); 1501 if (ret) 1502 return ret; 1503 1504 if (eg_pi->vddci_voltage_table.count > MAX_NO_VREG_STEPS) 1505 cypress_trim_voltage_table_to_fit_state_table(rdev, 1506 &eg_pi->vddci_voltage_table); 1507 } 1508 1509 return 0; 1510 } 1511 1512 static void cypress_populate_smc_voltage_table(struct radeon_device *rdev, 1513 struct atom_voltage_table *voltage_table, 1514 RV770_SMC_STATETABLE *table) 1515 { 1516 unsigned int i; 1517 1518 for (i = 0; i < voltage_table->count; i++) { 1519 table->highSMIO[i] = 0; 1520 table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low); 1521 } 1522 } 1523 1524 int cypress_populate_smc_voltage_tables(struct radeon_device *rdev, 1525 RV770_SMC_STATETABLE *table) 1526 { 1527 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1528 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1529 unsigned char i; 1530 1531 if (eg_pi->vddc_voltage_table.count) { 1532 cypress_populate_smc_voltage_table(rdev, 1533 &eg_pi->vddc_voltage_table, 1534 table); 1535 1536 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0; 1537 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] = 1538 cpu_to_be32(eg_pi->vddc_voltage_table.mask_low); 1539 1540 for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) { 1541 if (pi->max_vddc_in_table <= 1542 eg_pi->vddc_voltage_table.entries[i].value) { 1543 table->maxVDDCIndexInPPTable = i; 1544 break; 1545 } 1546 } 1547 } 1548 1549 if (eg_pi->vddci_voltage_table.count) { 1550 cypress_populate_smc_voltage_table(rdev, 1551 &eg_pi->vddci_voltage_table, 1552 table); 1553 1554 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDCI] = 0; 1555 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDCI] = 1556 cpu_to_be32(eg_pi->vddci_voltage_table.mask_low); 1557 } 1558 1559 return 0; 1560 } 1561 1562 static u32 cypress_get_mclk_split_point(struct atom_memory_info *memory_info) 1563 { 1564 if ((memory_info->mem_type == MEM_TYPE_GDDR3) || 1565 (memory_info->mem_type == MEM_TYPE_DDR3)) 1566 return 30000; 1567 1568 return 0; 1569 } 1570 1571 int cypress_get_mvdd_configuration(struct radeon_device *rdev) 1572 { 1573 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1574 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1575 u8 module_index; 1576 struct atom_memory_info memory_info; 1577 u32 tmp = RREG32(GENERAL_PWRMGT); 1578 1579 if (!(tmp & BACKBIAS_PAD_EN)) { 1580 eg_pi->mvdd_high_index = 0; 1581 eg_pi->mvdd_low_index = 1; 1582 pi->mvdd_control = false; 1583 return 0; 1584 } 1585 1586 if (tmp & BACKBIAS_VALUE) 1587 eg_pi->mvdd_high_index = 1; 1588 else 1589 eg_pi->mvdd_high_index = 0; 1590 1591 eg_pi->mvdd_low_index = 1592 (eg_pi->mvdd_high_index == 0) ? 1 : 0; 1593 1594 module_index = rv770_get_memory_module_index(rdev); 1595 1596 if (radeon_atom_get_memory_info(rdev, module_index, &memory_info)) { 1597 pi->mvdd_control = false; 1598 return 0; 1599 } 1600 1601 pi->mvdd_split_frequency = 1602 cypress_get_mclk_split_point(&memory_info); 1603 1604 if (pi->mvdd_split_frequency == 0) { 1605 pi->mvdd_control = false; 1606 return 0; 1607 } 1608 1609 return 0; 1610 } 1611 1612 static int cypress_init_smc_table(struct radeon_device *rdev, 1613 struct radeon_ps *radeon_boot_state) 1614 { 1615 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1616 RV770_SMC_STATETABLE *table = &pi->smc_statetable; 1617 int ret; 1618 1619 memset(table, 0, sizeof(RV770_SMC_STATETABLE)); 1620 1621 cypress_populate_smc_voltage_tables(rdev, table); 1622 1623 switch (rdev->pm.int_thermal_type) { 1624 case THERMAL_TYPE_EVERGREEN: 1625 case THERMAL_TYPE_EMC2103_WITH_INTERNAL: 1626 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL; 1627 break; 1628 case THERMAL_TYPE_NONE: 1629 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE; 1630 break; 1631 default: 1632 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL; 1633 break; 1634 } 1635 1636 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) 1637 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC; 1638 1639 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT) 1640 table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT; 1641 1642 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) 1643 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC; 1644 1645 if (pi->mem_gddr5) 1646 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5; 1647 1648 ret = cypress_populate_smc_initial_state(rdev, radeon_boot_state, table); 1649 if (ret) 1650 return ret; 1651 1652 ret = cypress_populate_smc_acpi_state(rdev, table); 1653 if (ret) 1654 return ret; 1655 1656 table->driverState = table->initialState; 1657 1658 return rv770_copy_bytes_to_smc(rdev, 1659 pi->state_table_start, 1660 (u8 *)table, sizeof(RV770_SMC_STATETABLE), 1661 pi->sram_end); 1662 } 1663 1664 int cypress_populate_mc_reg_table(struct radeon_device *rdev, 1665 struct radeon_ps *radeon_boot_state) 1666 { 1667 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1668 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1669 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state); 1670 SMC_Evergreen_MCRegisters mc_reg_table = { 0 }; 1671 1672 rv770_write_smc_soft_register(rdev, 1673 RV770_SMC_SOFT_REGISTER_seq_index, 1); 1674 1675 cypress_populate_mc_reg_addresses(rdev, &mc_reg_table); 1676 1677 cypress_convert_mc_reg_table_entry_to_smc(rdev, 1678 &boot_state->low, 1679 &mc_reg_table.data[0]); 1680 1681 cypress_convert_mc_registers(&eg_pi->mc_reg_table.mc_reg_table_entry[0], 1682 &mc_reg_table.data[1], eg_pi->mc_reg_table.last, 1683 eg_pi->mc_reg_table.valid_flag); 1684 1685 cypress_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, &mc_reg_table); 1686 1687 return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start, 1688 (u8 *)&mc_reg_table, sizeof(SMC_Evergreen_MCRegisters), 1689 pi->sram_end); 1690 } 1691 1692 int cypress_get_table_locations(struct radeon_device *rdev) 1693 { 1694 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1695 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1696 u32 tmp; 1697 int ret; 1698 1699 ret = rv770_read_smc_sram_dword(rdev, 1700 EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION + 1701 EVERGREEN_SMC_FIRMWARE_HEADER_stateTable, 1702 &tmp, pi->sram_end); 1703 if (ret) 1704 return ret; 1705 1706 pi->state_table_start = (u16)tmp; 1707 1708 ret = rv770_read_smc_sram_dword(rdev, 1709 EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION + 1710 EVERGREEN_SMC_FIRMWARE_HEADER_softRegisters, 1711 &tmp, pi->sram_end); 1712 if (ret) 1713 return ret; 1714 1715 pi->soft_regs_start = (u16)tmp; 1716 1717 ret = rv770_read_smc_sram_dword(rdev, 1718 EVERGREEN_SMC_FIRMWARE_HEADER_LOCATION + 1719 EVERGREEN_SMC_FIRMWARE_HEADER_mcRegisterTable, 1720 &tmp, pi->sram_end); 1721 if (ret) 1722 return ret; 1723 1724 eg_pi->mc_reg_table_start = (u16)tmp; 1725 1726 return 0; 1727 } 1728 1729 void cypress_enable_display_gap(struct radeon_device *rdev) 1730 { 1731 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL); 1732 1733 tmp &= ~(DISP1_GAP_MASK | DISP2_GAP_MASK); 1734 tmp |= (DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE) | 1735 DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE)); 1736 1737 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK); 1738 tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK) | 1739 DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE)); 1740 WREG32(CG_DISPLAY_GAP_CNTL, tmp); 1741 } 1742 1743 static void cypress_program_display_gap(struct radeon_device *rdev) 1744 { 1745 u32 tmp, pipe; 1746 int i; 1747 1748 tmp = RREG32(CG_DISPLAY_GAP_CNTL) & ~(DISP1_GAP_MASK | DISP2_GAP_MASK); 1749 if (rdev->pm.dpm.new_active_crtc_count > 0) 1750 tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM); 1751 else 1752 tmp |= DISP1_GAP(R600_PM_DISPLAY_GAP_IGNORE); 1753 1754 if (rdev->pm.dpm.new_active_crtc_count > 1) 1755 tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_VBLANK_OR_WM); 1756 else 1757 tmp |= DISP2_GAP(R600_PM_DISPLAY_GAP_IGNORE); 1758 1759 WREG32(CG_DISPLAY_GAP_CNTL, tmp); 1760 1761 tmp = RREG32(DCCG_DISP_SLOW_SELECT_REG); 1762 pipe = (tmp & DCCG_DISP1_SLOW_SELECT_MASK) >> DCCG_DISP1_SLOW_SELECT_SHIFT; 1763 1764 if ((rdev->pm.dpm.new_active_crtc_count > 0) && 1765 (!(rdev->pm.dpm.new_active_crtcs & (1 << pipe)))) { 1766 /* find the first active crtc */ 1767 for (i = 0; i < rdev->num_crtc; i++) { 1768 if (rdev->pm.dpm.new_active_crtcs & (1 << i)) 1769 break; 1770 } 1771 if (i == rdev->num_crtc) 1772 pipe = 0; 1773 else 1774 pipe = i; 1775 1776 tmp &= ~DCCG_DISP1_SLOW_SELECT_MASK; 1777 tmp |= DCCG_DISP1_SLOW_SELECT(pipe); 1778 WREG32(DCCG_DISP_SLOW_SELECT_REG, tmp); 1779 } 1780 1781 cypress_notify_smc_display_change(rdev, rdev->pm.dpm.new_active_crtc_count > 0); 1782 } 1783 1784 void cypress_dpm_setup_asic(struct radeon_device *rdev) 1785 { 1786 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1787 1788 rv740_read_clock_registers(rdev); 1789 rv770_read_voltage_smio_registers(rdev); 1790 rv770_get_max_vddc(rdev); 1791 rv770_get_memory_type(rdev); 1792 1793 if (eg_pi->pcie_performance_request) 1794 eg_pi->pcie_performance_request_registered = false; 1795 1796 if (eg_pi->pcie_performance_request) 1797 cypress_advertise_gen2_capability(rdev); 1798 1799 rv770_get_pcie_gen2_status(rdev); 1800 1801 rv770_enable_acpi_pm(rdev); 1802 } 1803 1804 int cypress_dpm_enable(struct radeon_device *rdev) 1805 { 1806 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1807 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1808 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; 1809 int ret; 1810 1811 if (pi->gfx_clock_gating) 1812 rv770_restore_cgcg(rdev); 1813 1814 if (rv770_dpm_enabled(rdev)) 1815 return -EINVAL; 1816 1817 if (pi->voltage_control) { 1818 rv770_enable_voltage_control(rdev, true); 1819 ret = cypress_construct_voltage_tables(rdev); 1820 if (ret) { 1821 DRM_ERROR("cypress_construct_voltage_tables failed\n"); 1822 return ret; 1823 } 1824 } 1825 1826 if (pi->mvdd_control) { 1827 ret = cypress_get_mvdd_configuration(rdev); 1828 if (ret) { 1829 DRM_ERROR("cypress_get_mvdd_configuration failed\n"); 1830 return ret; 1831 } 1832 } 1833 1834 if (eg_pi->dynamic_ac_timing) { 1835 cypress_set_mc_reg_address_table(rdev); 1836 cypress_force_mc_use_s0(rdev, boot_ps); 1837 ret = cypress_initialize_mc_reg_table(rdev); 1838 if (ret) 1839 eg_pi->dynamic_ac_timing = false; 1840 cypress_force_mc_use_s1(rdev, boot_ps); 1841 } 1842 1843 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) 1844 rv770_enable_backbias(rdev, true); 1845 1846 if (pi->dynamic_ss) 1847 cypress_enable_spread_spectrum(rdev, true); 1848 1849 if (pi->thermal_protection) 1850 rv770_enable_thermal_protection(rdev, true); 1851 1852 rv770_setup_bsp(rdev); 1853 rv770_program_git(rdev); 1854 rv770_program_tp(rdev); 1855 rv770_program_tpp(rdev); 1856 rv770_program_sstp(rdev); 1857 rv770_program_engine_speed_parameters(rdev); 1858 cypress_enable_display_gap(rdev); 1859 rv770_program_vc(rdev); 1860 1861 if (pi->dynamic_pcie_gen2) 1862 cypress_enable_dynamic_pcie_gen2(rdev, true); 1863 1864 ret = rv770_upload_firmware(rdev); 1865 if (ret) { 1866 DRM_ERROR("rv770_upload_firmware failed\n"); 1867 return ret; 1868 } 1869 1870 ret = cypress_get_table_locations(rdev); 1871 if (ret) { 1872 DRM_ERROR("cypress_get_table_locations failed\n"); 1873 return ret; 1874 } 1875 ret = cypress_init_smc_table(rdev, boot_ps); 1876 if (ret) { 1877 DRM_ERROR("cypress_init_smc_table failed\n"); 1878 return ret; 1879 } 1880 if (eg_pi->dynamic_ac_timing) { 1881 ret = cypress_populate_mc_reg_table(rdev, boot_ps); 1882 if (ret) { 1883 DRM_ERROR("cypress_populate_mc_reg_table failed\n"); 1884 return ret; 1885 } 1886 } 1887 1888 cypress_program_response_times(rdev); 1889 1890 r7xx_start_smc(rdev); 1891 1892 ret = cypress_notify_smc_display_change(rdev, false); 1893 if (ret) { 1894 DRM_ERROR("cypress_notify_smc_display_change failed\n"); 1895 return ret; 1896 } 1897 cypress_enable_sclk_control(rdev, true); 1898 1899 if (eg_pi->memory_transition) 1900 cypress_enable_mclk_control(rdev, true); 1901 1902 cypress_start_dpm(rdev); 1903 1904 if (pi->gfx_clock_gating) 1905 cypress_gfx_clock_gating_enable(rdev, true); 1906 1907 if (pi->mg_clock_gating) 1908 cypress_mg_clock_gating_enable(rdev, true); 1909 1910 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); 1911 1912 return 0; 1913 } 1914 1915 void cypress_dpm_disable(struct radeon_device *rdev) 1916 { 1917 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1918 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1919 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; 1920 1921 if (!rv770_dpm_enabled(rdev)) 1922 return; 1923 1924 rv770_clear_vc(rdev); 1925 1926 if (pi->thermal_protection) 1927 rv770_enable_thermal_protection(rdev, false); 1928 1929 if (pi->dynamic_pcie_gen2) 1930 cypress_enable_dynamic_pcie_gen2(rdev, false); 1931 1932 if (rdev->irq.installed && 1933 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { 1934 rdev->irq.dpm_thermal = false; 1935 radeon_irq_set(rdev); 1936 } 1937 1938 if (pi->gfx_clock_gating) 1939 cypress_gfx_clock_gating_enable(rdev, false); 1940 1941 if (pi->mg_clock_gating) 1942 cypress_mg_clock_gating_enable(rdev, false); 1943 1944 rv770_stop_dpm(rdev); 1945 r7xx_stop_smc(rdev); 1946 1947 cypress_enable_spread_spectrum(rdev, false); 1948 1949 if (eg_pi->dynamic_ac_timing) 1950 cypress_force_mc_use_s1(rdev, boot_ps); 1951 1952 rv770_reset_smio_status(rdev); 1953 } 1954 1955 int cypress_dpm_set_power_state(struct radeon_device *rdev) 1956 { 1957 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 1958 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; 1959 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps; 1960 int ret; 1961 1962 ret = rv770_restrict_performance_levels_before_switch(rdev); 1963 if (ret) { 1964 DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n"); 1965 return ret; 1966 } 1967 if (eg_pi->pcie_performance_request) 1968 cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps); 1969 1970 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); 1971 ret = rv770_halt_smc(rdev); 1972 if (ret) { 1973 DRM_ERROR("rv770_halt_smc failed\n"); 1974 return ret; 1975 } 1976 ret = cypress_upload_sw_state(rdev, new_ps); 1977 if (ret) { 1978 DRM_ERROR("cypress_upload_sw_state failed\n"); 1979 return ret; 1980 } 1981 if (eg_pi->dynamic_ac_timing) { 1982 ret = cypress_upload_mc_reg_table(rdev, new_ps); 1983 if (ret) { 1984 DRM_ERROR("cypress_upload_mc_reg_table failed\n"); 1985 return ret; 1986 } 1987 } 1988 1989 cypress_program_memory_timing_parameters(rdev, new_ps); 1990 1991 ret = rv770_resume_smc(rdev); 1992 if (ret) { 1993 DRM_ERROR("rv770_resume_smc failed\n"); 1994 return ret; 1995 } 1996 ret = rv770_set_sw_state(rdev); 1997 if (ret) { 1998 DRM_ERROR("rv770_set_sw_state failed\n"); 1999 return ret; 2000 } 2001 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); 2002 2003 if (eg_pi->pcie_performance_request) 2004 cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); 2005 2006 return 0; 2007 } 2008 2009 #if 0 2010 void cypress_dpm_reset_asic(struct radeon_device *rdev) 2011 { 2012 rv770_restrict_performance_levels_before_switch(rdev); 2013 rv770_set_boot_state(rdev); 2014 } 2015 #endif 2016 2017 void cypress_dpm_display_configuration_changed(struct radeon_device *rdev) 2018 { 2019 cypress_program_display_gap(rdev); 2020 } 2021 2022 int cypress_dpm_init(struct radeon_device *rdev) 2023 { 2024 struct rv7xx_power_info *pi; 2025 struct evergreen_power_info *eg_pi; 2026 struct atom_clock_dividers dividers; 2027 int ret; 2028 2029 eg_pi = kzalloc(sizeof(struct evergreen_power_info), GFP_KERNEL); 2030 if (eg_pi == NULL) 2031 return -ENOMEM; 2032 rdev->pm.dpm.priv = eg_pi; 2033 pi = &eg_pi->rv7xx; 2034 2035 rv770_get_max_vddc(rdev); 2036 2037 eg_pi->ulv.supported = false; 2038 pi->acpi_vddc = 0; 2039 eg_pi->acpi_vddci = 0; 2040 pi->min_vddc_in_table = 0; 2041 pi->max_vddc_in_table = 0; 2042 2043 ret = r600_get_platform_caps(rdev); 2044 if (ret) 2045 return ret; 2046 2047 ret = rv7xx_parse_power_table(rdev); 2048 if (ret) 2049 return ret; 2050 2051 if (rdev->pm.dpm.voltage_response_time == 0) 2052 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; 2053 if (rdev->pm.dpm.backbias_response_time == 0) 2054 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; 2055 2056 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, 2057 0, false, ÷rs); 2058 if (ret) 2059 pi->ref_div = dividers.ref_div + 1; 2060 else 2061 pi->ref_div = R600_REFERENCEDIVIDER_DFLT; 2062 2063 pi->mclk_strobe_mode_threshold = 40000; 2064 pi->mclk_edc_enable_threshold = 40000; 2065 eg_pi->mclk_edc_wr_enable_threshold = 40000; 2066 2067 pi->rlp = RV770_RLP_DFLT; 2068 pi->rmp = RV770_RMP_DFLT; 2069 pi->lhp = RV770_LHP_DFLT; 2070 pi->lmp = RV770_LMP_DFLT; 2071 2072 pi->voltage_control = 2073 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0); 2074 2075 pi->mvdd_control = 2076 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0); 2077 2078 eg_pi->vddci_control = 2079 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0); 2080 2081 rv770_get_engine_memory_ss(rdev); 2082 2083 pi->asi = RV770_ASI_DFLT; 2084 pi->pasi = CYPRESS_HASI_DFLT; 2085 pi->vrc = CYPRESS_VRC_DFLT; 2086 2087 pi->power_gating = false; 2088 2089 if ((rdev->family == CHIP_CYPRESS) || 2090 (rdev->family == CHIP_HEMLOCK)) 2091 pi->gfx_clock_gating = false; 2092 else 2093 pi->gfx_clock_gating = true; 2094 2095 pi->mg_clock_gating = true; 2096 pi->mgcgtssm = true; 2097 eg_pi->ls_clock_gating = false; 2098 eg_pi->sclk_deep_sleep = false; 2099 2100 pi->dynamic_pcie_gen2 = true; 2101 2102 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) 2103 pi->thermal_protection = true; 2104 else 2105 pi->thermal_protection = false; 2106 2107 pi->display_gap = true; 2108 2109 if (rdev->flags & RADEON_IS_MOBILITY) 2110 pi->dcodt = true; 2111 else 2112 pi->dcodt = false; 2113 2114 pi->ulps = true; 2115 2116 eg_pi->dynamic_ac_timing = true; 2117 eg_pi->abm = true; 2118 eg_pi->mcls = true; 2119 eg_pi->light_sleep = true; 2120 eg_pi->memory_transition = true; 2121 #if defined(CONFIG_ACPI) 2122 eg_pi->pcie_performance_request = 2123 radeon_acpi_is_pcie_performance_request_supported(rdev); 2124 #else 2125 eg_pi->pcie_performance_request = false; 2126 #endif 2127 2128 if ((rdev->family == CHIP_CYPRESS) || 2129 (rdev->family == CHIP_HEMLOCK) || 2130 (rdev->family == CHIP_JUNIPER)) 2131 eg_pi->dll_default_on = true; 2132 else 2133 eg_pi->dll_default_on = false; 2134 2135 eg_pi->sclk_deep_sleep = false; 2136 pi->mclk_stutter_mode_threshold = 0; 2137 2138 pi->sram_end = SMC_RAM_END; 2139 2140 return 0; 2141 } 2142 2143 void cypress_dpm_fini(struct radeon_device *rdev) 2144 { 2145 int i; 2146 2147 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { 2148 kfree(rdev->pm.dpm.ps[i].ps_priv); 2149 } 2150 kfree(rdev->pm.dpm.ps); 2151 kfree(rdev->pm.dpm.priv); 2152 } 2153 2154 bool cypress_dpm_vblank_too_short(struct radeon_device *rdev) 2155 { 2156 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 2157 u32 vblank_time = r600_dpm_get_vblank_time(rdev); 2158 /* we never hit the non-gddr5 limit so disable it */ 2159 u32 switch_limit = pi->mem_gddr5 ? 450 : 0; 2160 2161 if (vblank_time < switch_limit) 2162 return true; 2163 else 2164 return false; 2165 2166 } 2167