1 /* 2 * SPDX-License-Identifier: GPL-2.0 3 * Copyright (c) 2018, The Linux Foundation 4 */ 5 6 #include <dt-bindings/clock/qcom,dsi-phy-28nm.h> 7 #include <linux/bitfield.h> 8 #include <linux/clk.h> 9 #include <linux/clk-provider.h> 10 #include <linux/iopoll.h> 11 12 #include "dsi_phy.h" 13 #include "dsi.xml.h" 14 #include "dsi_phy_7nm.xml.h" 15 16 /* 17 * DSI PLL 7nm - clock diagram (eg: DSI0): TODO: updated CPHY diagram 18 * 19 * dsi0_pll_out_div_clk dsi0_pll_bit_clk 20 * | | 21 * | | 22 * +---------+ | +----------+ | +----+ 23 * dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0_phy_pll_out_byteclk 24 * +---------+ | +----------+ | +----+ 25 * | | 26 * | | dsi0_pll_by_2_bit_clk 27 * | | | 28 * | | +----+ | |\ dsi0_pclk_mux 29 * | |--| /2 |--o--| \ | 30 * | | +----+ | \ | +---------+ 31 * | --------------| |--o--| div_7_4 |-- dsi0_phy_pll_out_dsiclk 32 * |------------------------------| / +---------+ 33 * | +-----+ | / 34 * -----------| /4? |--o----------|/ 35 * +-----+ | | 36 * | |dsiclk_sel 37 * | 38 * dsi0_pll_post_out_div_clk 39 */ 40 41 #define VCO_REF_CLK_RATE 19200000 42 #define FRAC_BITS 18 43 44 /* Hardware is pre V4.1 */ 45 #define DSI_PHY_7NM_QUIRK_PRE_V4_1 BIT(0) 46 /* Hardware is V4.1 */ 47 #define DSI_PHY_7NM_QUIRK_V4_1 BIT(1) 48 /* Hardware is V4.2 */ 49 #define DSI_PHY_7NM_QUIRK_V4_2 BIT(2) 50 /* Hardware is V4.3 */ 51 #define DSI_PHY_7NM_QUIRK_V4_3 BIT(3) 52 /* Hardware is V5.2 */ 53 #define DSI_PHY_7NM_QUIRK_V5_2 BIT(4) 54 /* Hardware is V7.0 */ 55 #define DSI_PHY_7NM_QUIRK_V7_0 BIT(5) 56 57 struct dsi_pll_config { 58 bool enable_ssc; 59 bool ssc_center; 60 u32 ssc_freq; 61 u32 ssc_offset; 62 u32 ssc_adj_per; 63 64 /* out */ 65 u32 decimal_div_start; 66 u32 frac_div_start; 67 u32 pll_clock_inverters; 68 u32 ssc_stepsize; 69 u32 ssc_div_per; 70 }; 71 72 struct pll_7nm_cached_state { 73 unsigned long vco_rate; 74 u8 bit_clk_div; 75 u8 pix_clk_div; 76 u8 pll_out_div; 77 u8 pll_mux; 78 }; 79 80 struct dsi_pll_7nm { 81 struct clk_hw clk_hw; 82 83 struct msm_dsi_phy *phy; 84 85 u64 vco_current_rate; 86 87 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG0 register */ 88 spinlock_t postdiv_lock; 89 90 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG1 register */ 91 spinlock_t pclk_mux_lock; 92 93 struct pll_7nm_cached_state cached_state; 94 95 struct dsi_pll_7nm *slave; 96 }; 97 98 #define to_pll_7nm(x) container_of(x, struct dsi_pll_7nm, clk_hw) 99 100 /* 101 * Global list of private DSI PLL struct pointers. We need this for bonded DSI 102 * mode, where the master PLL's clk_ops needs access the slave's private data 103 */ 104 static struct dsi_pll_7nm *pll_7nm_list[DSI_MAX]; 105 106 static void dsi_pll_setup_config(struct dsi_pll_config *config) 107 { 108 config->ssc_freq = 31500; 109 config->ssc_offset = 4800; 110 config->ssc_adj_per = 2; 111 112 /* TODO: ssc enable */ 113 config->enable_ssc = false; 114 config->ssc_center = 0; 115 } 116 117 static void dsi_pll_calc_dec_frac(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 118 { 119 u64 fref = VCO_REF_CLK_RATE; 120 u64 pll_freq; 121 u64 divider; 122 u64 dec, dec_multiple; 123 u32 frac; 124 u64 multiplier; 125 126 pll_freq = pll->vco_current_rate; 127 128 divider = fref * 2; 129 130 multiplier = 1 << FRAC_BITS; 131 dec_multiple = div_u64(pll_freq * multiplier, divider); 132 dec = div_u64_rem(dec_multiple, multiplier, &frac); 133 134 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) { 135 config->pll_clock_inverters = 0x28; 136 } else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 137 if (pll_freq < 163000000ULL) 138 config->pll_clock_inverters = 0xa0; 139 else if (pll_freq < 175000000ULL) 140 config->pll_clock_inverters = 0x20; 141 else if (pll_freq < 325000000ULL) 142 config->pll_clock_inverters = 0xa0; 143 else if (pll_freq < 350000000ULL) 144 config->pll_clock_inverters = 0x20; 145 else if (pll_freq < 650000000ULL) 146 config->pll_clock_inverters = 0xa0; 147 else if (pll_freq < 700000000ULL) 148 config->pll_clock_inverters = 0x20; 149 else if (pll_freq < 1300000000ULL) 150 config->pll_clock_inverters = 0xa0; 151 else if (pll_freq < 2500000000ULL) 152 config->pll_clock_inverters = 0x20; 153 else if (pll_freq < 4000000000ULL) 154 config->pll_clock_inverters = 0x00; 155 else 156 config->pll_clock_inverters = 0x40; 157 } else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 158 if (pll_freq <= 1300000000ULL) 159 config->pll_clock_inverters = 0xa0; 160 else if (pll_freq <= 2500000000ULL) 161 config->pll_clock_inverters = 0x20; 162 else if (pll_freq <= 4000000000ULL) 163 config->pll_clock_inverters = 0x00; 164 else 165 config->pll_clock_inverters = 0x40; 166 } else if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 167 if (pll_freq <= 1000000000ULL) 168 config->pll_clock_inverters = 0xa0; 169 else if (pll_freq <= 2500000000ULL) 170 config->pll_clock_inverters = 0x20; 171 else if (pll_freq <= 3020000000ULL) 172 config->pll_clock_inverters = 0x00; 173 else 174 config->pll_clock_inverters = 0x40; 175 } else { 176 /* 4.2, 4.3 */ 177 if (pll_freq <= 1000000000ULL) 178 config->pll_clock_inverters = 0xa0; 179 else if (pll_freq <= 2500000000ULL) 180 config->pll_clock_inverters = 0x20; 181 else if (pll_freq <= 3500000000ULL) 182 config->pll_clock_inverters = 0x00; 183 else 184 config->pll_clock_inverters = 0x40; 185 } 186 187 config->decimal_div_start = dec; 188 config->frac_div_start = frac; 189 } 190 191 #define SSC_CENTER BIT(0) 192 #define SSC_EN BIT(1) 193 194 static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 195 { 196 u32 ssc_per; 197 u32 ssc_mod; 198 u64 ssc_step_size; 199 u64 frac; 200 201 if (!config->enable_ssc) { 202 DBG("SSC not enabled\n"); 203 return; 204 } 205 206 ssc_per = DIV_ROUND_CLOSEST(VCO_REF_CLK_RATE, config->ssc_freq) / 2 - 1; 207 ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1); 208 ssc_per -= ssc_mod; 209 210 frac = config->frac_div_start; 211 ssc_step_size = config->decimal_div_start; 212 ssc_step_size *= (1 << FRAC_BITS); 213 ssc_step_size += frac; 214 ssc_step_size *= config->ssc_offset; 215 ssc_step_size *= (config->ssc_adj_per + 1); 216 ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1)); 217 ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000); 218 219 config->ssc_div_per = ssc_per; 220 config->ssc_stepsize = ssc_step_size; 221 222 pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n", 223 config->decimal_div_start, frac, FRAC_BITS); 224 pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n", 225 ssc_per, (u32)ssc_step_size, config->ssc_adj_per); 226 } 227 228 static void dsi_pll_ssc_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 229 { 230 void __iomem *base = pll->phy->pll_base; 231 232 if (config->enable_ssc) { 233 pr_debug("SSC is enabled\n"); 234 235 writel(config->ssc_stepsize & 0xff, 236 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_LOW_1); 237 writel(config->ssc_stepsize >> 8, 238 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_HIGH_1); 239 writel(config->ssc_div_per & 0xff, 240 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_LOW_1); 241 writel(config->ssc_div_per >> 8, 242 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_HIGH_1); 243 writel(config->ssc_adj_per & 0xff, 244 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_LOW_1); 245 writel(config->ssc_adj_per >> 8, 246 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_HIGH_1); 247 writel(SSC_EN | (config->ssc_center ? SSC_CENTER : 0), 248 base + REG_DSI_7nm_PHY_PLL_SSC_CONTROL); 249 } 250 } 251 252 static void dsi_pll_config_hzindep_reg(struct dsi_pll_7nm *pll) 253 { 254 void __iomem *base = pll->phy->pll_base; 255 u8 analog_controls_five_1 = 0x01, vco_config_1 = 0x00; 256 257 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 258 if (pll->vco_current_rate >= 3100000000ULL) 259 analog_controls_five_1 = 0x03; 260 261 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 262 if (pll->vco_current_rate < 1520000000ULL) 263 vco_config_1 = 0x08; 264 else if (pll->vco_current_rate < 2990000000ULL) 265 vco_config_1 = 0x01; 266 } 267 268 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) || 269 (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 270 if (pll->vco_current_rate < 1520000000ULL) 271 vco_config_1 = 0x08; 272 else if (pll->vco_current_rate >= 2990000000ULL) 273 vco_config_1 = 0x01; 274 } 275 276 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 277 (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 278 if (pll->vco_current_rate < 1557000000ULL) 279 vco_config_1 = 0x08; 280 else 281 vco_config_1 = 0x01; 282 } 283 284 writel(analog_controls_five_1, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE_1); 285 writel(vco_config_1, base + REG_DSI_7nm_PHY_PLL_VCO_CONFIG_1); 286 writel(0x01, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE); 287 writel(0x03, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_TWO); 288 writel(0x00, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_THREE); 289 writel(0x00, base + REG_DSI_7nm_PHY_PLL_DSM_DIVIDER); 290 writel(0x4e, base + REG_DSI_7nm_PHY_PLL_FEEDBACK_DIVIDER); 291 writel(0x40, base + REG_DSI_7nm_PHY_PLL_CALIBRATION_SETTINGS); 292 writel(0xba, base + REG_DSI_7nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE); 293 writel(0x0c, base + REG_DSI_7nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE); 294 writel(0x00, base + REG_DSI_7nm_PHY_PLL_OUTDIV); 295 writel(0x00, base + REG_DSI_7nm_PHY_PLL_CORE_OVERRIDE); 296 writel(0x08, base + REG_DSI_7nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO); 297 writel(0x0a, base + REG_DSI_7nm_PHY_PLL_PLL_PROP_GAIN_RATE_1); 298 writel(0xc0, base + REG_DSI_7nm_PHY_PLL_PLL_BAND_SEL_RATE_1); 299 writel(0x84, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1); 300 writel(0x82, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1); 301 writel(0x4c, base + REG_DSI_7nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1); 302 writel(0x80, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_OVERRIDE); 303 writel(0x29, base + REG_DSI_7nm_PHY_PLL_PFILT); 304 writel(0x2f, base + REG_DSI_7nm_PHY_PLL_PFILT); 305 writel(0x2a, base + REG_DSI_7nm_PHY_PLL_IFILT); 306 writel(!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) ? 0x3f : 0x22, 307 base + REG_DSI_7nm_PHY_PLL_IFILT); 308 309 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) { 310 writel(0x22, base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE); 311 if (pll->slave) 312 writel(0x22, pll->slave->phy->pll_base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE); 313 } 314 } 315 316 static void dsi_pll_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 317 { 318 void __iomem *base = pll->phy->pll_base; 319 320 writel(0x12, base + REG_DSI_7nm_PHY_PLL_CORE_INPUT_OVERRIDE); 321 writel(config->decimal_div_start, 322 base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 323 writel(config->frac_div_start & 0xff, 324 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 325 writel((config->frac_div_start & 0xff00) >> 8, 326 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1); 327 writel((config->frac_div_start & 0x30000) >> 16, 328 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1); 329 writel(0x40, base + REG_DSI_7nm_PHY_PLL_PLL_LOCKDET_RATE_1); 330 writel(0x06, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_DELAY); 331 writel(pll->phy->cphy_mode ? 0x00 : 0x10, 332 base + REG_DSI_7nm_PHY_PLL_CMODE_1); 333 writel(config->pll_clock_inverters, 334 base + REG_DSI_7nm_PHY_PLL_CLOCK_INVERTERS_1); 335 } 336 337 static int dsi_pll_7nm_vco_set_rate(struct clk_hw *hw, unsigned long rate, 338 unsigned long parent_rate) 339 { 340 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 341 struct dsi_pll_config config; 342 343 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_7nm->phy->id, rate, 344 parent_rate); 345 346 pll_7nm->vco_current_rate = rate; 347 348 dsi_pll_setup_config(&config); 349 350 dsi_pll_calc_dec_frac(pll_7nm, &config); 351 352 dsi_pll_calc_ssc(pll_7nm, &config); 353 354 dsi_pll_commit(pll_7nm, &config); 355 356 dsi_pll_config_hzindep_reg(pll_7nm); 357 358 dsi_pll_ssc_commit(pll_7nm, &config); 359 360 /* flush, ensure all register writes are done*/ 361 wmb(); 362 363 return 0; 364 } 365 366 static int dsi_pll_7nm_lock_status(struct dsi_pll_7nm *pll) 367 { 368 int rc; 369 u32 status = 0; 370 u32 const delay_us = 100; 371 u32 const timeout_us = 5000; 372 373 rc = readl_poll_timeout_atomic(pll->phy->pll_base + 374 REG_DSI_7nm_PHY_PLL_COMMON_STATUS_ONE, 375 status, 376 ((status & BIT(0)) > 0), 377 delay_us, 378 timeout_us); 379 if (rc) 380 pr_err("DSI PLL(%d) lock failed, status=0x%08x\n", 381 pll->phy->id, status); 382 383 return rc; 384 } 385 386 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll) 387 { 388 u32 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 389 390 writel(0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES); 391 writel(data & ~BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 392 ndelay(250); 393 } 394 395 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll) 396 { 397 u32 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 398 399 writel(data | BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 400 writel(0xc0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES); 401 ndelay(250); 402 } 403 404 static void dsi_pll_cmn_clk_cfg0_write(struct dsi_pll_7nm *pll, u32 val) 405 { 406 unsigned long flags; 407 408 spin_lock_irqsave(&pll->postdiv_lock, flags); 409 writel(val, pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 410 spin_unlock_irqrestore(&pll->postdiv_lock, flags); 411 } 412 413 static void dsi_pll_cmn_clk_cfg1_update(struct dsi_pll_7nm *pll, u32 mask, 414 u32 val) 415 { 416 unsigned long flags; 417 u32 data; 418 419 spin_lock_irqsave(&pll->pclk_mux_lock, flags); 420 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 421 data &= ~mask; 422 data |= val & mask; 423 424 writel(data, pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 425 spin_unlock_irqrestore(&pll->pclk_mux_lock, flags); 426 } 427 428 static void dsi_pll_disable_global_clk(struct dsi_pll_7nm *pll) 429 { 430 dsi_pll_cmn_clk_cfg1_update(pll, DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN, 0); 431 } 432 433 static void dsi_pll_enable_global_clk(struct dsi_pll_7nm *pll) 434 { 435 u32 cfg_1 = DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN | DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN_SEL; 436 437 writel(0x04, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_3); 438 dsi_pll_cmn_clk_cfg1_update(pll, cfg_1, cfg_1); 439 } 440 441 static void dsi_pll_phy_dig_reset(struct dsi_pll_7nm *pll) 442 { 443 /* 444 * Reset the PHY digital domain. This would be needed when 445 * coming out of a CX or analog rail power collapse while 446 * ensuring that the pads maintain LP00 or LP11 state 447 */ 448 writel(BIT(0), pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4); 449 wmb(); /* Ensure that the reset is deasserted */ 450 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4); 451 wmb(); /* Ensure that the reset is deasserted */ 452 } 453 454 static int dsi_pll_7nm_vco_prepare(struct clk_hw *hw) 455 { 456 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 457 int rc; 458 459 dsi_pll_enable_pll_bias(pll_7nm); 460 if (pll_7nm->slave) 461 dsi_pll_enable_pll_bias(pll_7nm->slave); 462 463 /* Start PLL */ 464 writel(BIT(0), pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 465 466 /* 467 * ensure all PLL configurations are written prior to checking 468 * for PLL lock. 469 */ 470 wmb(); 471 472 /* Check for PLL lock */ 473 rc = dsi_pll_7nm_lock_status(pll_7nm); 474 if (rc) { 475 pr_err("PLL(%d) lock failed\n", pll_7nm->phy->id); 476 goto error; 477 } 478 479 pll_7nm->phy->pll_on = true; 480 481 /* 482 * assert power on reset for PHY digital in case the PLL is 483 * enabled after CX of analog domain power collapse. This needs 484 * to be done before enabling the global clk. 485 */ 486 dsi_pll_phy_dig_reset(pll_7nm); 487 if (pll_7nm->slave) 488 dsi_pll_phy_dig_reset(pll_7nm->slave); 489 490 dsi_pll_enable_global_clk(pll_7nm); 491 if (pll_7nm->slave) 492 dsi_pll_enable_global_clk(pll_7nm->slave); 493 494 error: 495 return rc; 496 } 497 498 static void dsi_pll_disable_sub(struct dsi_pll_7nm *pll) 499 { 500 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 501 dsi_pll_disable_pll_bias(pll); 502 } 503 504 static void dsi_pll_7nm_vco_unprepare(struct clk_hw *hw) 505 { 506 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 507 508 /* 509 * To avoid any stray glitches while abruptly powering down the PLL 510 * make sure to gate the clock using the clock enable bit before 511 * powering down the PLL 512 */ 513 dsi_pll_disable_global_clk(pll_7nm); 514 writel(0, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 515 dsi_pll_disable_sub(pll_7nm); 516 if (pll_7nm->slave) { 517 dsi_pll_disable_global_clk(pll_7nm->slave); 518 dsi_pll_disable_sub(pll_7nm->slave); 519 } 520 /* flush, ensure all register writes are done */ 521 wmb(); 522 pll_7nm->phy->pll_on = false; 523 } 524 525 static unsigned long dsi_pll_7nm_vco_recalc_rate(struct clk_hw *hw, 526 unsigned long parent_rate) 527 { 528 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 529 void __iomem *base = pll_7nm->phy->pll_base; 530 u64 ref_clk = VCO_REF_CLK_RATE; 531 u64 vco_rate = 0x0; 532 u64 multiplier; 533 u32 frac; 534 u32 dec; 535 u64 pll_freq, tmp64; 536 537 dec = readl(base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 538 dec &= 0xff; 539 540 frac = readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 541 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1) & 542 0xff) << 8); 543 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1) & 544 0x3) << 16); 545 546 /* 547 * TODO: 548 * 1. Assumes prescaler is disabled 549 */ 550 multiplier = 1 << FRAC_BITS; 551 pll_freq = dec * (ref_clk * 2); 552 tmp64 = (ref_clk * 2 * frac); 553 pll_freq += div_u64(tmp64, multiplier); 554 555 vco_rate = pll_freq; 556 pll_7nm->vco_current_rate = vco_rate; 557 558 DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x", 559 pll_7nm->phy->id, (unsigned long)vco_rate, dec, frac); 560 561 return (unsigned long)vco_rate; 562 } 563 564 static long dsi_pll_7nm_clk_round_rate(struct clk_hw *hw, 565 unsigned long rate, unsigned long *parent_rate) 566 { 567 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 568 569 if (rate < pll_7nm->phy->cfg->min_pll_rate) 570 return pll_7nm->phy->cfg->min_pll_rate; 571 else if (rate > pll_7nm->phy->cfg->max_pll_rate) 572 return pll_7nm->phy->cfg->max_pll_rate; 573 else 574 return rate; 575 } 576 577 static const struct clk_ops clk_ops_dsi_pll_7nm_vco = { 578 .round_rate = dsi_pll_7nm_clk_round_rate, 579 .set_rate = dsi_pll_7nm_vco_set_rate, 580 .recalc_rate = dsi_pll_7nm_vco_recalc_rate, 581 .prepare = dsi_pll_7nm_vco_prepare, 582 .unprepare = dsi_pll_7nm_vco_unprepare, 583 }; 584 585 /* 586 * PLL Callbacks 587 */ 588 589 static void dsi_7nm_pll_save_state(struct msm_dsi_phy *phy) 590 { 591 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 592 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 593 void __iomem *phy_base = pll_7nm->phy->base; 594 u32 cmn_clk_cfg0, cmn_clk_cfg1; 595 596 cached->pll_out_div = readl(pll_7nm->phy->pll_base + 597 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 598 cached->pll_out_div &= 0x3; 599 600 cmn_clk_cfg0 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 601 cached->bit_clk_div = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0__MASK, cmn_clk_cfg0); 602 cached->pix_clk_div = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4__MASK, cmn_clk_cfg0); 603 604 cmn_clk_cfg1 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 605 cached->pll_mux = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, cmn_clk_cfg1); 606 607 DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x", 608 pll_7nm->phy->id, cached->pll_out_div, cached->bit_clk_div, 609 cached->pix_clk_div, cached->pll_mux); 610 } 611 612 static int dsi_7nm_pll_restore_state(struct msm_dsi_phy *phy) 613 { 614 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 615 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 616 u32 val; 617 int ret; 618 619 val = readl(pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 620 val &= ~0x3; 621 val |= cached->pll_out_div; 622 writel(val, pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 623 624 dsi_pll_cmn_clk_cfg0_write(pll_7nm, 625 DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0(cached->bit_clk_div) | 626 DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4(cached->pix_clk_div)); 627 dsi_pll_cmn_clk_cfg1_update(pll_7nm, DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, 628 cached->pll_mux); 629 630 ret = dsi_pll_7nm_vco_set_rate(phy->vco_hw, 631 pll_7nm->vco_current_rate, 632 VCO_REF_CLK_RATE); 633 if (ret) { 634 DRM_DEV_ERROR(&pll_7nm->phy->pdev->dev, 635 "restore vco rate failed. ret=%d\n", ret); 636 return ret; 637 } 638 639 DBG("DSI PLL%d", pll_7nm->phy->id); 640 641 return 0; 642 } 643 644 static int dsi_7nm_set_usecase(struct msm_dsi_phy *phy) 645 { 646 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 647 void __iomem *base = phy->base; 648 u32 data = 0x0; /* internal PLL */ 649 650 DBG("DSI PLL%d", pll_7nm->phy->id); 651 652 switch (phy->usecase) { 653 case MSM_DSI_PHY_STANDALONE: 654 break; 655 case MSM_DSI_PHY_MASTER: 656 pll_7nm->slave = pll_7nm_list[(pll_7nm->phy->id + 1) % DSI_MAX]; 657 /* v7.0: Enable ATB_EN0 and alternate clock output to external phy */ 658 if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0) 659 writel(0x07, base + REG_DSI_7nm_PHY_CMN_CTRL_5); 660 break; 661 case MSM_DSI_PHY_SLAVE: 662 data = 0x1; /* external PLL */ 663 break; 664 default: 665 return -EINVAL; 666 } 667 668 /* set PLL src */ 669 dsi_pll_cmn_clk_cfg1_update(pll_7nm, DSI_7nm_PHY_CMN_CLK_CFG1_BITCLK_SEL__MASK, 670 DSI_7nm_PHY_CMN_CLK_CFG1_BITCLK_SEL(data)); 671 672 return 0; 673 } 674 675 /* 676 * The post dividers and mux clocks are created using the standard divider and 677 * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux 678 * state to follow the master PLL's divider/mux state. Therefore, we don't 679 * require special clock ops that also configure the slave PLL registers 680 */ 681 static int pll_7nm_register(struct dsi_pll_7nm *pll_7nm, struct clk_hw **provided_clocks) 682 { 683 char clk_name[32]; 684 struct clk_init_data vco_init = { 685 .parent_data = &(const struct clk_parent_data) { 686 .fw_name = "ref", 687 }, 688 .num_parents = 1, 689 .name = clk_name, 690 .flags = CLK_IGNORE_UNUSED, 691 .ops = &clk_ops_dsi_pll_7nm_vco, 692 }; 693 struct device *dev = &pll_7nm->phy->pdev->dev; 694 struct clk_hw *hw, *pll_out_div, *pll_bit, *pll_by_2_bit; 695 struct clk_hw *pll_post_out_div, *phy_pll_out_dsi_parent; 696 int ret; 697 698 DBG("DSI%d", pll_7nm->phy->id); 699 700 snprintf(clk_name, sizeof(clk_name), "dsi%dvco_clk", pll_7nm->phy->id); 701 pll_7nm->clk_hw.init = &vco_init; 702 703 ret = devm_clk_hw_register(dev, &pll_7nm->clk_hw); 704 if (ret) 705 return ret; 706 707 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_out_div_clk", pll_7nm->phy->id); 708 709 pll_out_div = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 710 &pll_7nm->clk_hw, CLK_SET_RATE_PARENT, 711 pll_7nm->phy->pll_base + 712 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE, 713 0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL); 714 if (IS_ERR(pll_out_div)) { 715 ret = PTR_ERR(pll_out_div); 716 goto fail; 717 } 718 719 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_bit_clk", pll_7nm->phy->id); 720 721 /* BIT CLK: DIV_CTRL_3_0 */ 722 pll_bit = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 723 pll_out_div, CLK_SET_RATE_PARENT, 724 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 725 0, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 726 if (IS_ERR(pll_bit)) { 727 ret = PTR_ERR(pll_bit); 728 goto fail; 729 } 730 731 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_byteclk", pll_7nm->phy->id); 732 733 /* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */ 734 hw = devm_clk_hw_register_fixed_factor_parent_hw(dev, clk_name, 735 pll_bit, CLK_SET_RATE_PARENT, 1, 736 pll_7nm->phy->cphy_mode ? 7 : 8); 737 if (IS_ERR(hw)) { 738 ret = PTR_ERR(hw); 739 goto fail; 740 } 741 742 provided_clocks[DSI_BYTE_PLL_CLK] = hw; 743 744 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_by_2_bit_clk", pll_7nm->phy->id); 745 746 pll_by_2_bit = devm_clk_hw_register_fixed_factor_parent_hw(dev, 747 clk_name, pll_bit, 0, 1, 2); 748 if (IS_ERR(pll_by_2_bit)) { 749 ret = PTR_ERR(pll_by_2_bit); 750 goto fail; 751 } 752 753 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_post_out_div_clk", pll_7nm->phy->id); 754 755 if (pll_7nm->phy->cphy_mode) 756 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 757 dev, clk_name, pll_out_div, 0, 2, 7); 758 else 759 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 760 dev, clk_name, pll_out_div, 0, 1, 4); 761 if (IS_ERR(pll_post_out_div)) { 762 ret = PTR_ERR(pll_post_out_div); 763 goto fail; 764 } 765 766 /* in CPHY mode, pclk_mux will always have post_out_div as parent 767 * don't register a pclk_mux clock and just use post_out_div instead 768 */ 769 if (pll_7nm->phy->cphy_mode) { 770 dsi_pll_cmn_clk_cfg1_update(pll_7nm, 771 DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, 772 DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL(3)); 773 phy_pll_out_dsi_parent = pll_post_out_div; 774 } else { 775 snprintf(clk_name, sizeof(clk_name), "dsi%d_pclk_mux", pll_7nm->phy->id); 776 777 hw = devm_clk_hw_register_mux_parent_hws(dev, clk_name, 778 ((const struct clk_hw *[]){ 779 pll_bit, 780 pll_by_2_bit, 781 }), 2, 0, pll_7nm->phy->base + 782 REG_DSI_7nm_PHY_CMN_CLK_CFG1, 783 0, 1, 0, &pll_7nm->pclk_mux_lock); 784 if (IS_ERR(hw)) { 785 ret = PTR_ERR(hw); 786 goto fail; 787 } 788 789 phy_pll_out_dsi_parent = hw; 790 } 791 792 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_dsiclk", pll_7nm->phy->id); 793 794 /* PIX CLK DIV : DIV_CTRL_7_4*/ 795 hw = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 796 phy_pll_out_dsi_parent, 0, 797 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 798 4, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 799 if (IS_ERR(hw)) { 800 ret = PTR_ERR(hw); 801 goto fail; 802 } 803 804 provided_clocks[DSI_PIXEL_PLL_CLK] = hw; 805 806 return 0; 807 808 fail: 809 810 return ret; 811 } 812 813 static int dsi_pll_7nm_init(struct msm_dsi_phy *phy) 814 { 815 struct platform_device *pdev = phy->pdev; 816 struct dsi_pll_7nm *pll_7nm; 817 int ret; 818 819 pll_7nm = devm_kzalloc(&pdev->dev, sizeof(*pll_7nm), GFP_KERNEL); 820 if (!pll_7nm) 821 return -ENOMEM; 822 823 DBG("DSI PLL%d", phy->id); 824 825 pll_7nm_list[phy->id] = pll_7nm; 826 827 spin_lock_init(&pll_7nm->postdiv_lock); 828 spin_lock_init(&pll_7nm->pclk_mux_lock); 829 830 pll_7nm->phy = phy; 831 832 ret = pll_7nm_register(pll_7nm, phy->provided_clocks->hws); 833 if (ret) { 834 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret); 835 return ret; 836 } 837 838 phy->vco_hw = &pll_7nm->clk_hw; 839 840 /* TODO: Remove this when we have proper display handover support */ 841 msm_dsi_phy_pll_save_state(phy); 842 843 return 0; 844 } 845 846 static int dsi_phy_hw_v4_0_is_pll_on(struct msm_dsi_phy *phy) 847 { 848 void __iomem *base = phy->base; 849 u32 data = 0; 850 851 data = readl(base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 852 mb(); /* make sure read happened */ 853 854 return (data & BIT(0)); 855 } 856 857 static void dsi_phy_hw_v4_0_config_lpcdrx(struct msm_dsi_phy *phy, bool enable) 858 { 859 void __iomem *lane_base = phy->lane_base; 860 int phy_lane_0 = 0; /* TODO: Support all lane swap configs */ 861 862 /* 863 * LPRX and CDRX need to enabled only for physical data lane 864 * corresponding to the logical data lane 0 865 */ 866 if (enable) 867 writel(0x3, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0)); 868 else 869 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0)); 870 } 871 872 static void dsi_phy_hw_v4_0_lane_settings(struct msm_dsi_phy *phy) 873 { 874 int i; 875 const u8 tx_dctrl_0[] = { 0x00, 0x00, 0x00, 0x04, 0x01 }; 876 const u8 tx_dctrl_1[] = { 0x40, 0x40, 0x40, 0x46, 0x41 }; 877 const u8 *tx_dctrl = tx_dctrl_0; 878 void __iomem *lane_base = phy->lane_base; 879 880 if (!(phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 881 tx_dctrl = tx_dctrl_1; 882 883 /* Strength ctrl settings */ 884 for (i = 0; i < 5; i++) { 885 /* 886 * Disable LPRX and CDRX for all lanes. And later on, it will 887 * be only enabled for the physical data lane corresponding 888 * to the logical data lane 0 889 */ 890 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(i)); 891 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_PIN_SWAP(i)); 892 } 893 894 dsi_phy_hw_v4_0_config_lpcdrx(phy, true); 895 896 /* other settings */ 897 for (i = 0; i < 5; i++) { 898 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG0(i)); 899 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG1(i)); 900 writel(i == 4 ? 0x8a : 0xa, lane_base + REG_DSI_7nm_PHY_LN_CFG2(i)); 901 writel(tx_dctrl[i], lane_base + REG_DSI_7nm_PHY_LN_TX_DCTRL(i)); 902 } 903 } 904 905 static int dsi_7nm_phy_enable(struct msm_dsi_phy *phy, 906 struct msm_dsi_phy_clk_request *clk_req) 907 { 908 int ret; 909 u32 status; 910 u32 const delay_us = 5; 911 u32 const timeout_us = 1000; 912 struct msm_dsi_dphy_timing *timing = &phy->timing; 913 void __iomem *base = phy->base; 914 bool less_than_1500_mhz; 915 u32 vreg_ctrl_0, vreg_ctrl_1, lane_ctrl0; 916 u32 glbl_pemph_ctrl_0; 917 u32 glbl_str_swi_cal_sel_ctrl, glbl_hstx_str_ctrl_0; 918 u32 glbl_rescode_top_ctrl, glbl_rescode_bot_ctrl; 919 u32 data; 920 921 DBG(""); 922 923 if (phy->cphy_mode) 924 ret = msm_dsi_cphy_timing_calc_v4(timing, clk_req); 925 else 926 ret = msm_dsi_dphy_timing_calc_v4(timing, clk_req); 927 if (ret) { 928 DRM_DEV_ERROR(&phy->pdev->dev, 929 "%s: PHY timing calculation failed\n", __func__); 930 return -EINVAL; 931 } 932 933 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 934 pr_warn("PLL turned on before configuring PHY\n"); 935 936 /* Request for REFGEN READY */ 937 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 938 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 939 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 940 writel(0x1, phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10); 941 udelay(500); 942 } 943 944 /* wait for REFGEN READY */ 945 ret = readl_poll_timeout_atomic(base + REG_DSI_7nm_PHY_CMN_PHY_STATUS, 946 status, (status & BIT(0)), 947 delay_us, timeout_us); 948 if (ret) { 949 pr_err("Ref gen not ready. Aborting\n"); 950 return -EINVAL; 951 } 952 953 /* TODO: CPHY enable path (this is for DPHY only) */ 954 955 /* Alter PHY configurations if data rate less than 1.5GHZ*/ 956 less_than_1500_mhz = (clk_req->bitclk_rate <= 1500000000); 957 958 glbl_str_swi_cal_sel_ctrl = 0x00; 959 if (phy->cphy_mode) { 960 vreg_ctrl_0 = 0x51; 961 vreg_ctrl_1 = 0x55; 962 glbl_hstx_str_ctrl_0 = 0x00; 963 glbl_pemph_ctrl_0 = 0x11; 964 lane_ctrl0 = 0x17; 965 } else { 966 vreg_ctrl_0 = less_than_1500_mhz ? 0x53 : 0x52; 967 vreg_ctrl_1 = 0x5c; 968 glbl_hstx_str_ctrl_0 = 0x88; 969 glbl_pemph_ctrl_0 = 0x00; 970 lane_ctrl0 = 0x1f; 971 } 972 973 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 974 if (phy->cphy_mode) { 975 /* TODO: different for second phy */ 976 vreg_ctrl_0 = 0x57; 977 vreg_ctrl_1 = 0x41; 978 glbl_rescode_top_ctrl = 0x3d; 979 glbl_rescode_bot_ctrl = 0x38; 980 } else { 981 vreg_ctrl_0 = 0x56; 982 vreg_ctrl_1 = 0x19; 983 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x03; 984 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3c; 985 } 986 } else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 987 if (phy->cphy_mode) { 988 vreg_ctrl_0 = 0x45; 989 vreg_ctrl_1 = 0x41; 990 glbl_rescode_top_ctrl = 0x00; 991 glbl_rescode_bot_ctrl = 0x00; 992 } else { 993 vreg_ctrl_0 = 0x44; 994 vreg_ctrl_1 = 0x19; 995 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x03; 996 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3c; 997 } 998 } else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 999 if (phy->cphy_mode) { 1000 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 1001 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 1002 } else { 1003 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 1004 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 1005 } 1006 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) { 1007 if (phy->cphy_mode) { 1008 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 1009 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 1010 } else { 1011 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x00; 1012 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 1013 } 1014 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 1015 if (phy->cphy_mode) { 1016 glbl_hstx_str_ctrl_0 = 0x88; 1017 glbl_rescode_top_ctrl = 0x00; 1018 glbl_rescode_bot_ctrl = 0x3c; 1019 } else { 1020 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x00; 1021 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x39 : 0x3c; 1022 } 1023 } else { 1024 if (phy->cphy_mode) { 1025 glbl_str_swi_cal_sel_ctrl = 0x03; 1026 glbl_hstx_str_ctrl_0 = 0x66; 1027 } else { 1028 vreg_ctrl_0 = less_than_1500_mhz ? 0x5B : 0x59; 1029 glbl_str_swi_cal_sel_ctrl = less_than_1500_mhz ? 0x03 : 0x00; 1030 glbl_hstx_str_ctrl_0 = less_than_1500_mhz ? 0x66 : 0x88; 1031 } 1032 glbl_rescode_top_ctrl = 0x03; 1033 glbl_rescode_bot_ctrl = 0x3c; 1034 } 1035 1036 /* de-assert digital and pll power down */ 1037 data = BIT(6) | BIT(5); 1038 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1039 1040 /* Assert PLL core reset */ 1041 writel(0x00, base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 1042 1043 /* turn off resync FIFO */ 1044 writel(0x00, base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 1045 1046 /* program CMN_CTRL_4 for minor_ver 2 chipsets*/ 1047 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 1048 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0) || 1049 (readl(base + REG_DSI_7nm_PHY_CMN_REVISION_ID0) & (0xf0)) == 0x20) 1050 writel(0x04, base + REG_DSI_7nm_PHY_CMN_CTRL_4); 1051 1052 /* Configure PHY lane swap (TODO: we need to calculate this) */ 1053 writel(0x21, base + REG_DSI_7nm_PHY_CMN_LANE_CFG0); 1054 writel(0x84, base + REG_DSI_7nm_PHY_CMN_LANE_CFG1); 1055 1056 if (phy->cphy_mode) 1057 writel(BIT(6), base + REG_DSI_7nm_PHY_CMN_GLBL_CTRL); 1058 1059 /* Enable LDO */ 1060 writel(vreg_ctrl_0, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_0); 1061 writel(vreg_ctrl_1, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_1); 1062 1063 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_3); 1064 writel(glbl_str_swi_cal_sel_ctrl, 1065 base + REG_DSI_7nm_PHY_CMN_GLBL_STR_SWI_CAL_SEL_CTRL); 1066 writel(glbl_hstx_str_ctrl_0, 1067 base + REG_DSI_7nm_PHY_CMN_GLBL_HSTX_STR_CTRL_0); 1068 writel(glbl_pemph_ctrl_0, 1069 base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_0); 1070 if (phy->cphy_mode) 1071 writel(0x01, base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_1); 1072 writel(glbl_rescode_top_ctrl, 1073 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_TOP_CTRL); 1074 writel(glbl_rescode_bot_ctrl, 1075 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_BOT_CTRL); 1076 writel(0x55, base + REG_DSI_7nm_PHY_CMN_GLBL_LPTX_STR_CTRL); 1077 1078 /* Remove power down from all blocks */ 1079 writel(0x7f, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1080 1081 writel(lane_ctrl0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0); 1082 1083 /* Select full-rate mode */ 1084 if (!phy->cphy_mode) 1085 writel(0x40, base + REG_DSI_7nm_PHY_CMN_CTRL_2); 1086 1087 ret = dsi_7nm_set_usecase(phy); 1088 if (ret) { 1089 DRM_DEV_ERROR(&phy->pdev->dev, "%s: set pll usecase failed, %d\n", 1090 __func__, ret); 1091 return ret; 1092 } 1093 1094 /* DSI PHY timings */ 1095 if (phy->cphy_mode) { 1096 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0); 1097 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4); 1098 writel(timing->shared_timings.clk_pre, 1099 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5); 1100 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6); 1101 writel(timing->shared_timings.clk_post, 1102 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7); 1103 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8); 1104 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9); 1105 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10); 1106 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11); 1107 } else { 1108 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0); 1109 writel(timing->clk_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_1); 1110 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_2); 1111 writel(timing->clk_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_3); 1112 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4); 1113 writel(timing->hs_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5); 1114 writel(timing->hs_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6); 1115 writel(timing->hs_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7); 1116 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8); 1117 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9); 1118 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10); 1119 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11); 1120 writel(timing->shared_timings.clk_pre, 1121 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_12); 1122 writel(timing->shared_timings.clk_post, 1123 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_13); 1124 } 1125 1126 /* DSI lane settings */ 1127 dsi_phy_hw_v4_0_lane_settings(phy); 1128 1129 DBG("DSI%d PHY enabled", phy->id); 1130 1131 return 0; 1132 } 1133 1134 static bool dsi_7nm_set_continuous_clock(struct msm_dsi_phy *phy, bool enable) 1135 { 1136 void __iomem *base = phy->base; 1137 u32 data; 1138 1139 data = readl(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1140 if (enable) 1141 data |= BIT(5) | BIT(6); 1142 else 1143 data &= ~(BIT(5) | BIT(6)); 1144 writel(data, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1145 1146 return enable; 1147 } 1148 1149 static void dsi_7nm_phy_disable(struct msm_dsi_phy *phy) 1150 { 1151 void __iomem *base = phy->base; 1152 u32 data; 1153 1154 DBG(""); 1155 1156 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 1157 pr_warn("Turning OFF PHY while PLL is on\n"); 1158 1159 dsi_phy_hw_v4_0_config_lpcdrx(phy, false); 1160 1161 /* Turn off REFGEN Vote */ 1162 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 1163 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 1164 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 1165 writel(0x0, base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10); 1166 wmb(); 1167 /* Delay to ensure HW removes vote before PHY shut down */ 1168 udelay(2); 1169 } 1170 1171 data = readl(base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1172 1173 /* disable all lanes */ 1174 data &= ~0x1F; 1175 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1176 writel(0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0); 1177 1178 /* Turn off all PHY blocks */ 1179 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1180 /* make sure phy is turned off */ 1181 wmb(); 1182 1183 DBG("DSI%d PHY disabled", phy->id); 1184 } 1185 1186 static const struct regulator_bulk_data dsi_phy_7nm_36mA_regulators[] = { 1187 { .supply = "vdds", .init_load_uA = 36000 }, 1188 }; 1189 1190 static const struct regulator_bulk_data dsi_phy_7nm_37750uA_regulators[] = { 1191 { .supply = "vdds", .init_load_uA = 37550 }, 1192 }; 1193 1194 static const struct regulator_bulk_data dsi_phy_7nm_48000uA_regulators[] = { 1195 { .supply = "vdds", .init_load_uA = 48000 }, 1196 }; 1197 1198 static const struct regulator_bulk_data dsi_phy_7nm_98000uA_regulators[] = { 1199 { .supply = "vdds", .init_load_uA = 98000 }, 1200 }; 1201 1202 static const struct regulator_bulk_data dsi_phy_7nm_97800uA_regulators[] = { 1203 { .supply = "vdds", .init_load_uA = 97800 }, 1204 }; 1205 1206 static const struct regulator_bulk_data dsi_phy_7nm_98400uA_regulators[] = { 1207 { .supply = "vdds", .init_load_uA = 98400 }, 1208 }; 1209 1210 const struct msm_dsi_phy_cfg dsi_phy_7nm_cfgs = { 1211 .has_phy_lane = true, 1212 .regulator_data = dsi_phy_7nm_36mA_regulators, 1213 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1214 .ops = { 1215 .enable = dsi_7nm_phy_enable, 1216 .disable = dsi_7nm_phy_disable, 1217 .pll_init = dsi_pll_7nm_init, 1218 .save_pll_state = dsi_7nm_pll_save_state, 1219 .restore_pll_state = dsi_7nm_pll_restore_state, 1220 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1221 }, 1222 .min_pll_rate = 600000000UL, 1223 #ifdef CONFIG_64BIT 1224 .max_pll_rate = 5000000000UL, 1225 #else 1226 .max_pll_rate = ULONG_MAX, 1227 #endif 1228 .io_start = { 0xae94400, 0xae96400 }, 1229 .num_dsi_phy = 2, 1230 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1231 }; 1232 1233 const struct msm_dsi_phy_cfg dsi_phy_7nm_6375_cfgs = { 1234 .has_phy_lane = true, 1235 .ops = { 1236 .enable = dsi_7nm_phy_enable, 1237 .disable = dsi_7nm_phy_disable, 1238 .pll_init = dsi_pll_7nm_init, 1239 .save_pll_state = dsi_7nm_pll_save_state, 1240 .restore_pll_state = dsi_7nm_pll_restore_state, 1241 }, 1242 .min_pll_rate = 600000000UL, 1243 #ifdef CONFIG_64BIT 1244 .max_pll_rate = 5000000000ULL, 1245 #else 1246 .max_pll_rate = ULONG_MAX, 1247 #endif 1248 .io_start = { 0x5e94400 }, 1249 .num_dsi_phy = 1, 1250 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1251 }; 1252 1253 const struct msm_dsi_phy_cfg dsi_phy_7nm_8150_cfgs = { 1254 .has_phy_lane = true, 1255 .regulator_data = dsi_phy_7nm_36mA_regulators, 1256 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1257 .ops = { 1258 .enable = dsi_7nm_phy_enable, 1259 .disable = dsi_7nm_phy_disable, 1260 .pll_init = dsi_pll_7nm_init, 1261 .save_pll_state = dsi_7nm_pll_save_state, 1262 .restore_pll_state = dsi_7nm_pll_restore_state, 1263 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1264 }, 1265 .min_pll_rate = 1000000000UL, 1266 .max_pll_rate = 3500000000UL, 1267 .io_start = { 0xae94400, 0xae96400 }, 1268 .num_dsi_phy = 2, 1269 .quirks = DSI_PHY_7NM_QUIRK_PRE_V4_1, 1270 }; 1271 1272 const struct msm_dsi_phy_cfg dsi_phy_7nm_7280_cfgs = { 1273 .has_phy_lane = true, 1274 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1275 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1276 .ops = { 1277 .enable = dsi_7nm_phy_enable, 1278 .disable = dsi_7nm_phy_disable, 1279 .pll_init = dsi_pll_7nm_init, 1280 .save_pll_state = dsi_7nm_pll_save_state, 1281 .restore_pll_state = dsi_7nm_pll_restore_state, 1282 }, 1283 .min_pll_rate = 600000000UL, 1284 #ifdef CONFIG_64BIT 1285 .max_pll_rate = 5000000000ULL, 1286 #else 1287 .max_pll_rate = ULONG_MAX, 1288 #endif 1289 .io_start = { 0xae94400 }, 1290 .num_dsi_phy = 1, 1291 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1292 }; 1293 1294 const struct msm_dsi_phy_cfg dsi_phy_5nm_8350_cfgs = { 1295 .has_phy_lane = true, 1296 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1297 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1298 .ops = { 1299 .enable = dsi_7nm_phy_enable, 1300 .disable = dsi_7nm_phy_disable, 1301 .pll_init = dsi_pll_7nm_init, 1302 .save_pll_state = dsi_7nm_pll_save_state, 1303 .restore_pll_state = dsi_7nm_pll_restore_state, 1304 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1305 }, 1306 .min_pll_rate = 600000000UL, 1307 #ifdef CONFIG_64BIT 1308 .max_pll_rate = 5000000000UL, 1309 #else 1310 .max_pll_rate = ULONG_MAX, 1311 #endif 1312 .io_start = { 0xae94400, 0xae96400 }, 1313 .num_dsi_phy = 2, 1314 .quirks = DSI_PHY_7NM_QUIRK_V4_2, 1315 }; 1316 1317 const struct msm_dsi_phy_cfg dsi_phy_5nm_8450_cfgs = { 1318 .has_phy_lane = true, 1319 .regulator_data = dsi_phy_7nm_97800uA_regulators, 1320 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators), 1321 .ops = { 1322 .enable = dsi_7nm_phy_enable, 1323 .disable = dsi_7nm_phy_disable, 1324 .pll_init = dsi_pll_7nm_init, 1325 .save_pll_state = dsi_7nm_pll_save_state, 1326 .restore_pll_state = dsi_7nm_pll_restore_state, 1327 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1328 }, 1329 .min_pll_rate = 600000000UL, 1330 #ifdef CONFIG_64BIT 1331 .max_pll_rate = 5000000000UL, 1332 #else 1333 .max_pll_rate = ULONG_MAX, 1334 #endif 1335 .io_start = { 0xae94400, 0xae96400 }, 1336 .num_dsi_phy = 2, 1337 .quirks = DSI_PHY_7NM_QUIRK_V4_3, 1338 }; 1339 1340 const struct msm_dsi_phy_cfg dsi_phy_5nm_8775p_cfgs = { 1341 .has_phy_lane = true, 1342 .regulator_data = dsi_phy_7nm_48000uA_regulators, 1343 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_48000uA_regulators), 1344 .ops = { 1345 .enable = dsi_7nm_phy_enable, 1346 .disable = dsi_7nm_phy_disable, 1347 .pll_init = dsi_pll_7nm_init, 1348 .save_pll_state = dsi_7nm_pll_save_state, 1349 .restore_pll_state = dsi_7nm_pll_restore_state, 1350 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1351 }, 1352 .min_pll_rate = 600000000UL, 1353 #ifdef CONFIG_64BIT 1354 .max_pll_rate = 5000000000UL, 1355 #else 1356 .max_pll_rate = ULONG_MAX, 1357 #endif 1358 .io_start = { 0xae94400, 0xae96400 }, 1359 .num_dsi_phy = 2, 1360 .quirks = DSI_PHY_7NM_QUIRK_V4_2, 1361 }; 1362 1363 const struct msm_dsi_phy_cfg dsi_phy_5nm_sar2130p_cfgs = { 1364 .has_phy_lane = true, 1365 .regulator_data = dsi_phy_7nm_97800uA_regulators, 1366 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators), 1367 .ops = { 1368 .enable = dsi_7nm_phy_enable, 1369 .disable = dsi_7nm_phy_disable, 1370 .pll_init = dsi_pll_7nm_init, 1371 .save_pll_state = dsi_7nm_pll_save_state, 1372 .restore_pll_state = dsi_7nm_pll_restore_state, 1373 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1374 }, 1375 .min_pll_rate = 600000000UL, 1376 #ifdef CONFIG_64BIT 1377 .max_pll_rate = 5000000000UL, 1378 #else 1379 .max_pll_rate = ULONG_MAX, 1380 #endif 1381 .io_start = { 0xae95000, 0xae97000 }, 1382 .num_dsi_phy = 2, 1383 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1384 }; 1385 1386 const struct msm_dsi_phy_cfg dsi_phy_4nm_8550_cfgs = { 1387 .has_phy_lane = true, 1388 .regulator_data = dsi_phy_7nm_98400uA_regulators, 1389 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98400uA_regulators), 1390 .ops = { 1391 .enable = dsi_7nm_phy_enable, 1392 .disable = dsi_7nm_phy_disable, 1393 .pll_init = dsi_pll_7nm_init, 1394 .save_pll_state = dsi_7nm_pll_save_state, 1395 .restore_pll_state = dsi_7nm_pll_restore_state, 1396 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1397 }, 1398 .min_pll_rate = 600000000UL, 1399 #ifdef CONFIG_64BIT 1400 .max_pll_rate = 5000000000UL, 1401 #else 1402 .max_pll_rate = ULONG_MAX, 1403 #endif 1404 .io_start = { 0xae95000, 0xae97000 }, 1405 .num_dsi_phy = 2, 1406 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1407 }; 1408 1409 const struct msm_dsi_phy_cfg dsi_phy_4nm_8650_cfgs = { 1410 .has_phy_lane = true, 1411 .regulator_data = dsi_phy_7nm_98000uA_regulators, 1412 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators), 1413 .ops = { 1414 .enable = dsi_7nm_phy_enable, 1415 .disable = dsi_7nm_phy_disable, 1416 .pll_init = dsi_pll_7nm_init, 1417 .save_pll_state = dsi_7nm_pll_save_state, 1418 .restore_pll_state = dsi_7nm_pll_restore_state, 1419 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1420 }, 1421 .min_pll_rate = 600000000UL, 1422 #ifdef CONFIG_64BIT 1423 .max_pll_rate = 5000000000UL, 1424 #else 1425 .max_pll_rate = ULONG_MAX, 1426 #endif 1427 .io_start = { 0xae95000, 0xae97000 }, 1428 .num_dsi_phy = 2, 1429 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1430 }; 1431 1432 const struct msm_dsi_phy_cfg dsi_phy_3nm_8750_cfgs = { 1433 .has_phy_lane = true, 1434 .regulator_data = dsi_phy_7nm_98000uA_regulators, 1435 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators), 1436 .ops = { 1437 .enable = dsi_7nm_phy_enable, 1438 .disable = dsi_7nm_phy_disable, 1439 .pll_init = dsi_pll_7nm_init, 1440 .save_pll_state = dsi_7nm_pll_save_state, 1441 .restore_pll_state = dsi_7nm_pll_restore_state, 1442 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1443 }, 1444 .min_pll_rate = 600000000UL, 1445 #ifdef CONFIG_64BIT 1446 .max_pll_rate = 5000000000UL, 1447 #else 1448 .max_pll_rate = ULONG_MAX, 1449 #endif 1450 .io_start = { 0xae95000, 0xae97000 }, 1451 .num_dsi_phy = 2, 1452 .quirks = DSI_PHY_7NM_QUIRK_V7_0, 1453 }; 1454