1 /* 2 * SPDX-License-Identifier: GPL-2.0 3 * Copyright (c) 2018, The Linux Foundation 4 */ 5 6 #include <linux/clk.h> 7 #include <linux/clk-provider.h> 8 #include <linux/iopoll.h> 9 10 #include "dsi_phy.h" 11 #include "dsi.xml.h" 12 #include "dsi_phy_7nm.xml.h" 13 14 /* 15 * DSI PLL 7nm - clock diagram (eg: DSI0): TODO: updated CPHY diagram 16 * 17 * dsi0_pll_out_div_clk dsi0_pll_bit_clk 18 * | | 19 * | | 20 * +---------+ | +----------+ | +----+ 21 * dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0_phy_pll_out_byteclk 22 * +---------+ | +----------+ | +----+ 23 * | | 24 * | | dsi0_pll_by_2_bit_clk 25 * | | | 26 * | | +----+ | |\ dsi0_pclk_mux 27 * | |--| /2 |--o--| \ | 28 * | | +----+ | \ | +---------+ 29 * | --------------| |--o--| div_7_4 |-- dsi0_phy_pll_out_dsiclk 30 * |------------------------------| / +---------+ 31 * | +-----+ | / 32 * -----------| /4? |--o----------|/ 33 * +-----+ | | 34 * | |dsiclk_sel 35 * | 36 * dsi0_pll_post_out_div_clk 37 */ 38 39 #define VCO_REF_CLK_RATE 19200000 40 #define FRAC_BITS 18 41 42 /* Hardware is pre V4.1 */ 43 #define DSI_PHY_7NM_QUIRK_PRE_V4_1 BIT(0) 44 /* Hardware is V4.1 */ 45 #define DSI_PHY_7NM_QUIRK_V4_1 BIT(1) 46 /* Hardware is V4.2 */ 47 #define DSI_PHY_7NM_QUIRK_V4_2 BIT(2) 48 /* Hardware is V4.3 */ 49 #define DSI_PHY_7NM_QUIRK_V4_3 BIT(3) 50 /* Hardware is V5.2 */ 51 #define DSI_PHY_7NM_QUIRK_V5_2 BIT(4) 52 53 struct dsi_pll_config { 54 bool enable_ssc; 55 bool ssc_center; 56 u32 ssc_freq; 57 u32 ssc_offset; 58 u32 ssc_adj_per; 59 60 /* out */ 61 u32 decimal_div_start; 62 u32 frac_div_start; 63 u32 pll_clock_inverters; 64 u32 ssc_stepsize; 65 u32 ssc_div_per; 66 }; 67 68 struct pll_7nm_cached_state { 69 unsigned long vco_rate; 70 u8 bit_clk_div; 71 u8 pix_clk_div; 72 u8 pll_out_div; 73 u8 pll_mux; 74 }; 75 76 struct dsi_pll_7nm { 77 struct clk_hw clk_hw; 78 79 struct msm_dsi_phy *phy; 80 81 u64 vco_current_rate; 82 83 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG0 register */ 84 spinlock_t postdiv_lock; 85 86 struct pll_7nm_cached_state cached_state; 87 88 struct dsi_pll_7nm *slave; 89 }; 90 91 #define to_pll_7nm(x) container_of(x, struct dsi_pll_7nm, clk_hw) 92 93 /* 94 * Global list of private DSI PLL struct pointers. We need this for bonded DSI 95 * mode, where the master PLL's clk_ops needs access the slave's private data 96 */ 97 static struct dsi_pll_7nm *pll_7nm_list[DSI_MAX]; 98 99 static void dsi_pll_setup_config(struct dsi_pll_config *config) 100 { 101 config->ssc_freq = 31500; 102 config->ssc_offset = 4800; 103 config->ssc_adj_per = 2; 104 105 /* TODO: ssc enable */ 106 config->enable_ssc = false; 107 config->ssc_center = 0; 108 } 109 110 static void dsi_pll_calc_dec_frac(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 111 { 112 u64 fref = VCO_REF_CLK_RATE; 113 u64 pll_freq; 114 u64 divider; 115 u64 dec, dec_multiple; 116 u32 frac; 117 u64 multiplier; 118 119 pll_freq = pll->vco_current_rate; 120 121 divider = fref * 2; 122 123 multiplier = 1 << FRAC_BITS; 124 dec_multiple = div_u64(pll_freq * multiplier, divider); 125 dec = div_u64_rem(dec_multiple, multiplier, &frac); 126 127 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) 128 config->pll_clock_inverters = 0x28; 129 else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 130 if (pll_freq <= 1300000000ULL) 131 config->pll_clock_inverters = 0xa0; 132 else if (pll_freq <= 2500000000ULL) 133 config->pll_clock_inverters = 0x20; 134 else if (pll_freq <= 4000000000ULL) 135 config->pll_clock_inverters = 0x00; 136 else 137 config->pll_clock_inverters = 0x40; 138 } else if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 139 if (pll_freq <= 1000000000ULL) 140 config->pll_clock_inverters = 0xa0; 141 else if (pll_freq <= 2500000000ULL) 142 config->pll_clock_inverters = 0x20; 143 else if (pll_freq <= 3020000000ULL) 144 config->pll_clock_inverters = 0x00; 145 else 146 config->pll_clock_inverters = 0x40; 147 } else { 148 /* 4.2, 4.3 */ 149 if (pll_freq <= 1000000000ULL) 150 config->pll_clock_inverters = 0xa0; 151 else if (pll_freq <= 2500000000ULL) 152 config->pll_clock_inverters = 0x20; 153 else if (pll_freq <= 3500000000ULL) 154 config->pll_clock_inverters = 0x00; 155 else 156 config->pll_clock_inverters = 0x40; 157 } 158 159 config->decimal_div_start = dec; 160 config->frac_div_start = frac; 161 } 162 163 #define SSC_CENTER BIT(0) 164 #define SSC_EN BIT(1) 165 166 static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 167 { 168 u32 ssc_per; 169 u32 ssc_mod; 170 u64 ssc_step_size; 171 u64 frac; 172 173 if (!config->enable_ssc) { 174 DBG("SSC not enabled\n"); 175 return; 176 } 177 178 ssc_per = DIV_ROUND_CLOSEST(VCO_REF_CLK_RATE, config->ssc_freq) / 2 - 1; 179 ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1); 180 ssc_per -= ssc_mod; 181 182 frac = config->frac_div_start; 183 ssc_step_size = config->decimal_div_start; 184 ssc_step_size *= (1 << FRAC_BITS); 185 ssc_step_size += frac; 186 ssc_step_size *= config->ssc_offset; 187 ssc_step_size *= (config->ssc_adj_per + 1); 188 ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1)); 189 ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000); 190 191 config->ssc_div_per = ssc_per; 192 config->ssc_stepsize = ssc_step_size; 193 194 pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n", 195 config->decimal_div_start, frac, FRAC_BITS); 196 pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n", 197 ssc_per, (u32)ssc_step_size, config->ssc_adj_per); 198 } 199 200 static void dsi_pll_ssc_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 201 { 202 void __iomem *base = pll->phy->pll_base; 203 204 if (config->enable_ssc) { 205 pr_debug("SSC is enabled\n"); 206 207 writel(config->ssc_stepsize & 0xff, 208 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_LOW_1); 209 writel(config->ssc_stepsize >> 8, 210 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_HIGH_1); 211 writel(config->ssc_div_per & 0xff, 212 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_LOW_1); 213 writel(config->ssc_div_per >> 8, 214 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_HIGH_1); 215 writel(config->ssc_adj_per & 0xff, 216 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_LOW_1); 217 writel(config->ssc_adj_per >> 8, 218 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_HIGH_1); 219 writel(SSC_EN | (config->ssc_center ? SSC_CENTER : 0), 220 base + REG_DSI_7nm_PHY_PLL_SSC_CONTROL); 221 } 222 } 223 224 static void dsi_pll_config_hzindep_reg(struct dsi_pll_7nm *pll) 225 { 226 void __iomem *base = pll->phy->pll_base; 227 u8 analog_controls_five_1 = 0x01, vco_config_1 = 0x00; 228 229 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 230 if (pll->vco_current_rate >= 3100000000ULL) 231 analog_controls_five_1 = 0x03; 232 233 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 234 if (pll->vco_current_rate < 1520000000ULL) 235 vco_config_1 = 0x08; 236 else if (pll->vco_current_rate < 2990000000ULL) 237 vco_config_1 = 0x01; 238 } 239 240 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) || 241 (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 242 if (pll->vco_current_rate < 1520000000ULL) 243 vco_config_1 = 0x08; 244 else if (pll->vco_current_rate >= 2990000000ULL) 245 vco_config_1 = 0x01; 246 } 247 248 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 249 if (pll->vco_current_rate < 1557000000ULL) 250 vco_config_1 = 0x08; 251 else 252 vco_config_1 = 0x01; 253 } 254 255 writel(analog_controls_five_1, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE_1); 256 writel(vco_config_1, base + REG_DSI_7nm_PHY_PLL_VCO_CONFIG_1); 257 writel(0x01, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE); 258 writel(0x03, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_TWO); 259 writel(0x00, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_THREE); 260 writel(0x00, base + REG_DSI_7nm_PHY_PLL_DSM_DIVIDER); 261 writel(0x4e, base + REG_DSI_7nm_PHY_PLL_FEEDBACK_DIVIDER); 262 writel(0x40, base + REG_DSI_7nm_PHY_PLL_CALIBRATION_SETTINGS); 263 writel(0xba, base + REG_DSI_7nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE); 264 writel(0x0c, base + REG_DSI_7nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE); 265 writel(0x00, base + REG_DSI_7nm_PHY_PLL_OUTDIV); 266 writel(0x00, base + REG_DSI_7nm_PHY_PLL_CORE_OVERRIDE); 267 writel(0x08, base + REG_DSI_7nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO); 268 writel(0x0a, base + REG_DSI_7nm_PHY_PLL_PLL_PROP_GAIN_RATE_1); 269 writel(0xc0, base + REG_DSI_7nm_PHY_PLL_PLL_BAND_SEL_RATE_1); 270 writel(0x84, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1); 271 writel(0x82, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1); 272 writel(0x4c, base + REG_DSI_7nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1); 273 writel(0x80, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_OVERRIDE); 274 writel(0x29, base + REG_DSI_7nm_PHY_PLL_PFILT); 275 writel(0x2f, base + REG_DSI_7nm_PHY_PLL_PFILT); 276 writel(0x2a, base + REG_DSI_7nm_PHY_PLL_IFILT); 277 writel(!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) ? 0x3f : 0x22, 278 base + REG_DSI_7nm_PHY_PLL_IFILT); 279 280 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) { 281 writel(0x22, base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE); 282 if (pll->slave) 283 writel(0x22, pll->slave->phy->pll_base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE); 284 } 285 } 286 287 static void dsi_pll_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 288 { 289 void __iomem *base = pll->phy->pll_base; 290 291 writel(0x12, base + REG_DSI_7nm_PHY_PLL_CORE_INPUT_OVERRIDE); 292 writel(config->decimal_div_start, 293 base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 294 writel(config->frac_div_start & 0xff, 295 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 296 writel((config->frac_div_start & 0xff00) >> 8, 297 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1); 298 writel((config->frac_div_start & 0x30000) >> 16, 299 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1); 300 writel(0x40, base + REG_DSI_7nm_PHY_PLL_PLL_LOCKDET_RATE_1); 301 writel(0x06, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_DELAY); 302 writel(pll->phy->cphy_mode ? 0x00 : 0x10, 303 base + REG_DSI_7nm_PHY_PLL_CMODE_1); 304 writel(config->pll_clock_inverters, 305 base + REG_DSI_7nm_PHY_PLL_CLOCK_INVERTERS); 306 } 307 308 static int dsi_pll_7nm_vco_set_rate(struct clk_hw *hw, unsigned long rate, 309 unsigned long parent_rate) 310 { 311 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 312 struct dsi_pll_config config; 313 314 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_7nm->phy->id, rate, 315 parent_rate); 316 317 pll_7nm->vco_current_rate = rate; 318 319 dsi_pll_setup_config(&config); 320 321 dsi_pll_calc_dec_frac(pll_7nm, &config); 322 323 dsi_pll_calc_ssc(pll_7nm, &config); 324 325 dsi_pll_commit(pll_7nm, &config); 326 327 dsi_pll_config_hzindep_reg(pll_7nm); 328 329 dsi_pll_ssc_commit(pll_7nm, &config); 330 331 /* flush, ensure all register writes are done*/ 332 wmb(); 333 334 return 0; 335 } 336 337 static int dsi_pll_7nm_lock_status(struct dsi_pll_7nm *pll) 338 { 339 int rc; 340 u32 status = 0; 341 u32 const delay_us = 100; 342 u32 const timeout_us = 5000; 343 344 rc = readl_poll_timeout_atomic(pll->phy->pll_base + 345 REG_DSI_7nm_PHY_PLL_COMMON_STATUS_ONE, 346 status, 347 ((status & BIT(0)) > 0), 348 delay_us, 349 timeout_us); 350 if (rc) 351 pr_err("DSI PLL(%d) lock failed, status=0x%08x\n", 352 pll->phy->id, status); 353 354 return rc; 355 } 356 357 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll) 358 { 359 u32 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 360 361 writel(0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES); 362 writel(data & ~BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 363 ndelay(250); 364 } 365 366 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll) 367 { 368 u32 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 369 370 writel(data | BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 371 writel(0xc0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES); 372 ndelay(250); 373 } 374 375 static void dsi_pll_disable_global_clk(struct dsi_pll_7nm *pll) 376 { 377 u32 data; 378 379 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 380 writel(data & ~BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 381 } 382 383 static void dsi_pll_enable_global_clk(struct dsi_pll_7nm *pll) 384 { 385 u32 data; 386 387 writel(0x04, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_3); 388 389 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 390 writel(data | BIT(5) | BIT(4), pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 391 } 392 393 static void dsi_pll_phy_dig_reset(struct dsi_pll_7nm *pll) 394 { 395 /* 396 * Reset the PHY digital domain. This would be needed when 397 * coming out of a CX or analog rail power collapse while 398 * ensuring that the pads maintain LP00 or LP11 state 399 */ 400 writel(BIT(0), pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4); 401 wmb(); /* Ensure that the reset is deasserted */ 402 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4); 403 wmb(); /* Ensure that the reset is deasserted */ 404 } 405 406 static int dsi_pll_7nm_vco_prepare(struct clk_hw *hw) 407 { 408 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 409 int rc; 410 411 dsi_pll_enable_pll_bias(pll_7nm); 412 if (pll_7nm->slave) 413 dsi_pll_enable_pll_bias(pll_7nm->slave); 414 415 /* Start PLL */ 416 writel(BIT(0), pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 417 418 /* 419 * ensure all PLL configurations are written prior to checking 420 * for PLL lock. 421 */ 422 wmb(); 423 424 /* Check for PLL lock */ 425 rc = dsi_pll_7nm_lock_status(pll_7nm); 426 if (rc) { 427 pr_err("PLL(%d) lock failed\n", pll_7nm->phy->id); 428 goto error; 429 } 430 431 pll_7nm->phy->pll_on = true; 432 433 /* 434 * assert power on reset for PHY digital in case the PLL is 435 * enabled after CX of analog domain power collapse. This needs 436 * to be done before enabling the global clk. 437 */ 438 dsi_pll_phy_dig_reset(pll_7nm); 439 if (pll_7nm->slave) 440 dsi_pll_phy_dig_reset(pll_7nm->slave); 441 442 dsi_pll_enable_global_clk(pll_7nm); 443 if (pll_7nm->slave) 444 dsi_pll_enable_global_clk(pll_7nm->slave); 445 446 error: 447 return rc; 448 } 449 450 static void dsi_pll_disable_sub(struct dsi_pll_7nm *pll) 451 { 452 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 453 dsi_pll_disable_pll_bias(pll); 454 } 455 456 static void dsi_pll_7nm_vco_unprepare(struct clk_hw *hw) 457 { 458 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 459 460 /* 461 * To avoid any stray glitches while abruptly powering down the PLL 462 * make sure to gate the clock using the clock enable bit before 463 * powering down the PLL 464 */ 465 dsi_pll_disable_global_clk(pll_7nm); 466 writel(0, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 467 dsi_pll_disable_sub(pll_7nm); 468 if (pll_7nm->slave) { 469 dsi_pll_disable_global_clk(pll_7nm->slave); 470 dsi_pll_disable_sub(pll_7nm->slave); 471 } 472 /* flush, ensure all register writes are done */ 473 wmb(); 474 pll_7nm->phy->pll_on = false; 475 } 476 477 static unsigned long dsi_pll_7nm_vco_recalc_rate(struct clk_hw *hw, 478 unsigned long parent_rate) 479 { 480 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 481 void __iomem *base = pll_7nm->phy->pll_base; 482 u64 ref_clk = VCO_REF_CLK_RATE; 483 u64 vco_rate = 0x0; 484 u64 multiplier; 485 u32 frac; 486 u32 dec; 487 u64 pll_freq, tmp64; 488 489 dec = readl(base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 490 dec &= 0xff; 491 492 frac = readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 493 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1) & 494 0xff) << 8); 495 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1) & 496 0x3) << 16); 497 498 /* 499 * TODO: 500 * 1. Assumes prescaler is disabled 501 */ 502 multiplier = 1 << FRAC_BITS; 503 pll_freq = dec * (ref_clk * 2); 504 tmp64 = (ref_clk * 2 * frac); 505 pll_freq += div_u64(tmp64, multiplier); 506 507 vco_rate = pll_freq; 508 pll_7nm->vco_current_rate = vco_rate; 509 510 DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x", 511 pll_7nm->phy->id, (unsigned long)vco_rate, dec, frac); 512 513 return (unsigned long)vco_rate; 514 } 515 516 static long dsi_pll_7nm_clk_round_rate(struct clk_hw *hw, 517 unsigned long rate, unsigned long *parent_rate) 518 { 519 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 520 521 if (rate < pll_7nm->phy->cfg->min_pll_rate) 522 return pll_7nm->phy->cfg->min_pll_rate; 523 else if (rate > pll_7nm->phy->cfg->max_pll_rate) 524 return pll_7nm->phy->cfg->max_pll_rate; 525 else 526 return rate; 527 } 528 529 static const struct clk_ops clk_ops_dsi_pll_7nm_vco = { 530 .round_rate = dsi_pll_7nm_clk_round_rate, 531 .set_rate = dsi_pll_7nm_vco_set_rate, 532 .recalc_rate = dsi_pll_7nm_vco_recalc_rate, 533 .prepare = dsi_pll_7nm_vco_prepare, 534 .unprepare = dsi_pll_7nm_vco_unprepare, 535 }; 536 537 /* 538 * PLL Callbacks 539 */ 540 541 static void dsi_7nm_pll_save_state(struct msm_dsi_phy *phy) 542 { 543 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 544 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 545 void __iomem *phy_base = pll_7nm->phy->base; 546 u32 cmn_clk_cfg0, cmn_clk_cfg1; 547 548 cached->pll_out_div = readl(pll_7nm->phy->pll_base + 549 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 550 cached->pll_out_div &= 0x3; 551 552 cmn_clk_cfg0 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 553 cached->bit_clk_div = cmn_clk_cfg0 & 0xf; 554 cached->pix_clk_div = (cmn_clk_cfg0 & 0xf0) >> 4; 555 556 cmn_clk_cfg1 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 557 cached->pll_mux = cmn_clk_cfg1 & 0x3; 558 559 DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x", 560 pll_7nm->phy->id, cached->pll_out_div, cached->bit_clk_div, 561 cached->pix_clk_div, cached->pll_mux); 562 } 563 564 static int dsi_7nm_pll_restore_state(struct msm_dsi_phy *phy) 565 { 566 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 567 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 568 void __iomem *phy_base = pll_7nm->phy->base; 569 u32 val; 570 int ret; 571 572 val = readl(pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 573 val &= ~0x3; 574 val |= cached->pll_out_div; 575 writel(val, pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 576 577 writel(cached->bit_clk_div | (cached->pix_clk_div << 4), 578 phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 579 580 val = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 581 val &= ~0x3; 582 val |= cached->pll_mux; 583 writel(val, phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 584 585 ret = dsi_pll_7nm_vco_set_rate(phy->vco_hw, 586 pll_7nm->vco_current_rate, 587 VCO_REF_CLK_RATE); 588 if (ret) { 589 DRM_DEV_ERROR(&pll_7nm->phy->pdev->dev, 590 "restore vco rate failed. ret=%d\n", ret); 591 return ret; 592 } 593 594 DBG("DSI PLL%d", pll_7nm->phy->id); 595 596 return 0; 597 } 598 599 static int dsi_7nm_set_usecase(struct msm_dsi_phy *phy) 600 { 601 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 602 void __iomem *base = phy->base; 603 u32 data = 0x0; /* internal PLL */ 604 605 DBG("DSI PLL%d", pll_7nm->phy->id); 606 607 switch (phy->usecase) { 608 case MSM_DSI_PHY_STANDALONE: 609 break; 610 case MSM_DSI_PHY_MASTER: 611 pll_7nm->slave = pll_7nm_list[(pll_7nm->phy->id + 1) % DSI_MAX]; 612 break; 613 case MSM_DSI_PHY_SLAVE: 614 data = 0x1; /* external PLL */ 615 break; 616 default: 617 return -EINVAL; 618 } 619 620 /* set PLL src */ 621 writel(data << 2, base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 622 623 return 0; 624 } 625 626 /* 627 * The post dividers and mux clocks are created using the standard divider and 628 * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux 629 * state to follow the master PLL's divider/mux state. Therefore, we don't 630 * require special clock ops that also configure the slave PLL registers 631 */ 632 static int pll_7nm_register(struct dsi_pll_7nm *pll_7nm, struct clk_hw **provided_clocks) 633 { 634 char clk_name[32]; 635 struct clk_init_data vco_init = { 636 .parent_data = &(const struct clk_parent_data) { 637 .fw_name = "ref", 638 }, 639 .num_parents = 1, 640 .name = clk_name, 641 .flags = CLK_IGNORE_UNUSED, 642 .ops = &clk_ops_dsi_pll_7nm_vco, 643 }; 644 struct device *dev = &pll_7nm->phy->pdev->dev; 645 struct clk_hw *hw, *pll_out_div, *pll_bit, *pll_by_2_bit; 646 struct clk_hw *pll_post_out_div, *phy_pll_out_dsi_parent; 647 int ret; 648 649 DBG("DSI%d", pll_7nm->phy->id); 650 651 snprintf(clk_name, sizeof(clk_name), "dsi%dvco_clk", pll_7nm->phy->id); 652 pll_7nm->clk_hw.init = &vco_init; 653 654 ret = devm_clk_hw_register(dev, &pll_7nm->clk_hw); 655 if (ret) 656 return ret; 657 658 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_out_div_clk", pll_7nm->phy->id); 659 660 pll_out_div = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 661 &pll_7nm->clk_hw, CLK_SET_RATE_PARENT, 662 pll_7nm->phy->pll_base + 663 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE, 664 0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL); 665 if (IS_ERR(pll_out_div)) { 666 ret = PTR_ERR(pll_out_div); 667 goto fail; 668 } 669 670 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_bit_clk", pll_7nm->phy->id); 671 672 /* BIT CLK: DIV_CTRL_3_0 */ 673 pll_bit = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 674 pll_out_div, CLK_SET_RATE_PARENT, 675 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 676 0, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 677 if (IS_ERR(pll_bit)) { 678 ret = PTR_ERR(pll_bit); 679 goto fail; 680 } 681 682 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_byteclk", pll_7nm->phy->id); 683 684 /* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */ 685 hw = devm_clk_hw_register_fixed_factor_parent_hw(dev, clk_name, 686 pll_bit, CLK_SET_RATE_PARENT, 1, 687 pll_7nm->phy->cphy_mode ? 7 : 8); 688 if (IS_ERR(hw)) { 689 ret = PTR_ERR(hw); 690 goto fail; 691 } 692 693 provided_clocks[DSI_BYTE_PLL_CLK] = hw; 694 695 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_by_2_bit_clk", pll_7nm->phy->id); 696 697 pll_by_2_bit = devm_clk_hw_register_fixed_factor_parent_hw(dev, 698 clk_name, pll_bit, 0, 1, 2); 699 if (IS_ERR(pll_by_2_bit)) { 700 ret = PTR_ERR(pll_by_2_bit); 701 goto fail; 702 } 703 704 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_post_out_div_clk", pll_7nm->phy->id); 705 706 if (pll_7nm->phy->cphy_mode) 707 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 708 dev, clk_name, pll_out_div, 0, 2, 7); 709 else 710 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 711 dev, clk_name, pll_out_div, 0, 1, 4); 712 if (IS_ERR(pll_post_out_div)) { 713 ret = PTR_ERR(pll_post_out_div); 714 goto fail; 715 } 716 717 /* in CPHY mode, pclk_mux will always have post_out_div as parent 718 * don't register a pclk_mux clock and just use post_out_div instead 719 */ 720 if (pll_7nm->phy->cphy_mode) { 721 u32 data; 722 723 data = readl(pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 724 writel(data | 3, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 725 726 phy_pll_out_dsi_parent = pll_post_out_div; 727 } else { 728 snprintf(clk_name, sizeof(clk_name), "dsi%d_pclk_mux", pll_7nm->phy->id); 729 730 hw = devm_clk_hw_register_mux_parent_hws(dev, clk_name, 731 ((const struct clk_hw *[]){ 732 pll_bit, 733 pll_by_2_bit, 734 }), 2, 0, pll_7nm->phy->base + 735 REG_DSI_7nm_PHY_CMN_CLK_CFG1, 736 0, 1, 0, NULL); 737 if (IS_ERR(hw)) { 738 ret = PTR_ERR(hw); 739 goto fail; 740 } 741 742 phy_pll_out_dsi_parent = hw; 743 } 744 745 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_dsiclk", pll_7nm->phy->id); 746 747 /* PIX CLK DIV : DIV_CTRL_7_4*/ 748 hw = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 749 phy_pll_out_dsi_parent, 0, 750 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 751 4, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 752 if (IS_ERR(hw)) { 753 ret = PTR_ERR(hw); 754 goto fail; 755 } 756 757 provided_clocks[DSI_PIXEL_PLL_CLK] = hw; 758 759 return 0; 760 761 fail: 762 763 return ret; 764 } 765 766 static int dsi_pll_7nm_init(struct msm_dsi_phy *phy) 767 { 768 struct platform_device *pdev = phy->pdev; 769 struct dsi_pll_7nm *pll_7nm; 770 int ret; 771 772 pll_7nm = devm_kzalloc(&pdev->dev, sizeof(*pll_7nm), GFP_KERNEL); 773 if (!pll_7nm) 774 return -ENOMEM; 775 776 DBG("DSI PLL%d", phy->id); 777 778 pll_7nm_list[phy->id] = pll_7nm; 779 780 spin_lock_init(&pll_7nm->postdiv_lock); 781 782 pll_7nm->phy = phy; 783 784 ret = pll_7nm_register(pll_7nm, phy->provided_clocks->hws); 785 if (ret) { 786 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret); 787 return ret; 788 } 789 790 phy->vco_hw = &pll_7nm->clk_hw; 791 792 /* TODO: Remove this when we have proper display handover support */ 793 msm_dsi_phy_pll_save_state(phy); 794 795 return 0; 796 } 797 798 static int dsi_phy_hw_v4_0_is_pll_on(struct msm_dsi_phy *phy) 799 { 800 void __iomem *base = phy->base; 801 u32 data = 0; 802 803 data = readl(base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 804 mb(); /* make sure read happened */ 805 806 return (data & BIT(0)); 807 } 808 809 static void dsi_phy_hw_v4_0_config_lpcdrx(struct msm_dsi_phy *phy, bool enable) 810 { 811 void __iomem *lane_base = phy->lane_base; 812 int phy_lane_0 = 0; /* TODO: Support all lane swap configs */ 813 814 /* 815 * LPRX and CDRX need to enabled only for physical data lane 816 * corresponding to the logical data lane 0 817 */ 818 if (enable) 819 writel(0x3, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0)); 820 else 821 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0)); 822 } 823 824 static void dsi_phy_hw_v4_0_lane_settings(struct msm_dsi_phy *phy) 825 { 826 int i; 827 const u8 tx_dctrl_0[] = { 0x00, 0x00, 0x00, 0x04, 0x01 }; 828 const u8 tx_dctrl_1[] = { 0x40, 0x40, 0x40, 0x46, 0x41 }; 829 const u8 *tx_dctrl = tx_dctrl_0; 830 void __iomem *lane_base = phy->lane_base; 831 832 if (!(phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 833 tx_dctrl = tx_dctrl_1; 834 835 /* Strength ctrl settings */ 836 for (i = 0; i < 5; i++) { 837 /* 838 * Disable LPRX and CDRX for all lanes. And later on, it will 839 * be only enabled for the physical data lane corresponding 840 * to the logical data lane 0 841 */ 842 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(i)); 843 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_PIN_SWAP(i)); 844 } 845 846 dsi_phy_hw_v4_0_config_lpcdrx(phy, true); 847 848 /* other settings */ 849 for (i = 0; i < 5; i++) { 850 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG0(i)); 851 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG1(i)); 852 writel(i == 4 ? 0x8a : 0xa, lane_base + REG_DSI_7nm_PHY_LN_CFG2(i)); 853 writel(tx_dctrl[i], lane_base + REG_DSI_7nm_PHY_LN_TX_DCTRL(i)); 854 } 855 } 856 857 static int dsi_7nm_phy_enable(struct msm_dsi_phy *phy, 858 struct msm_dsi_phy_clk_request *clk_req) 859 { 860 int ret; 861 u32 status; 862 u32 const delay_us = 5; 863 u32 const timeout_us = 1000; 864 struct msm_dsi_dphy_timing *timing = &phy->timing; 865 void __iomem *base = phy->base; 866 bool less_than_1500_mhz; 867 u32 vreg_ctrl_0, vreg_ctrl_1, lane_ctrl0; 868 u32 glbl_pemph_ctrl_0; 869 u32 glbl_str_swi_cal_sel_ctrl, glbl_hstx_str_ctrl_0; 870 u32 glbl_rescode_top_ctrl, glbl_rescode_bot_ctrl; 871 u32 data; 872 873 DBG(""); 874 875 if (phy->cphy_mode) 876 ret = msm_dsi_cphy_timing_calc_v4(timing, clk_req); 877 else 878 ret = msm_dsi_dphy_timing_calc_v4(timing, clk_req); 879 if (ret) { 880 DRM_DEV_ERROR(&phy->pdev->dev, 881 "%s: PHY timing calculation failed\n", __func__); 882 return -EINVAL; 883 } 884 885 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 886 pr_warn("PLL turned on before configuring PHY\n"); 887 888 /* Request for REFGEN READY */ 889 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 890 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 891 writel(0x1, phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10); 892 udelay(500); 893 } 894 895 /* wait for REFGEN READY */ 896 ret = readl_poll_timeout_atomic(base + REG_DSI_7nm_PHY_CMN_PHY_STATUS, 897 status, (status & BIT(0)), 898 delay_us, timeout_us); 899 if (ret) { 900 pr_err("Ref gen not ready. Aborting\n"); 901 return -EINVAL; 902 } 903 904 /* TODO: CPHY enable path (this is for DPHY only) */ 905 906 /* Alter PHY configurations if data rate less than 1.5GHZ*/ 907 less_than_1500_mhz = (clk_req->bitclk_rate <= 1500000000); 908 909 glbl_str_swi_cal_sel_ctrl = 0x00; 910 if (phy->cphy_mode) { 911 vreg_ctrl_0 = 0x51; 912 vreg_ctrl_1 = 0x55; 913 glbl_hstx_str_ctrl_0 = 0x00; 914 glbl_pemph_ctrl_0 = 0x11; 915 lane_ctrl0 = 0x17; 916 } else { 917 vreg_ctrl_0 = less_than_1500_mhz ? 0x53 : 0x52; 918 vreg_ctrl_1 = 0x5c; 919 glbl_hstx_str_ctrl_0 = 0x88; 920 glbl_pemph_ctrl_0 = 0x00; 921 lane_ctrl0 = 0x1f; 922 } 923 924 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 925 if (phy->cphy_mode) { 926 vreg_ctrl_0 = 0x45; 927 vreg_ctrl_1 = 0x41; 928 glbl_rescode_top_ctrl = 0x00; 929 glbl_rescode_bot_ctrl = 0x00; 930 } else { 931 vreg_ctrl_0 = 0x44; 932 vreg_ctrl_1 = 0x19; 933 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x03; 934 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3c; 935 } 936 } else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 937 if (phy->cphy_mode) { 938 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 939 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 940 } else { 941 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 942 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 943 } 944 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) { 945 if (phy->cphy_mode) { 946 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 947 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 948 } else { 949 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x00; 950 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 951 } 952 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 953 if (phy->cphy_mode) { 954 glbl_hstx_str_ctrl_0 = 0x88; 955 glbl_rescode_top_ctrl = 0x00; 956 glbl_rescode_bot_ctrl = 0x3c; 957 } else { 958 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x00; 959 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x39 : 0x3c; 960 } 961 } else { 962 if (phy->cphy_mode) { 963 glbl_str_swi_cal_sel_ctrl = 0x03; 964 glbl_hstx_str_ctrl_0 = 0x66; 965 } else { 966 vreg_ctrl_0 = less_than_1500_mhz ? 0x5B : 0x59; 967 glbl_str_swi_cal_sel_ctrl = less_than_1500_mhz ? 0x03 : 0x00; 968 glbl_hstx_str_ctrl_0 = less_than_1500_mhz ? 0x66 : 0x88; 969 } 970 glbl_rescode_top_ctrl = 0x03; 971 glbl_rescode_bot_ctrl = 0x3c; 972 } 973 974 /* de-assert digital and pll power down */ 975 data = BIT(6) | BIT(5); 976 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 977 978 /* Assert PLL core reset */ 979 writel(0x00, base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 980 981 /* turn off resync FIFO */ 982 writel(0x00, base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 983 984 /* program CMN_CTRL_4 for minor_ver 2 chipsets*/ 985 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 986 (readl(base + REG_DSI_7nm_PHY_CMN_REVISION_ID0) & (0xf0)) == 0x20) 987 writel(0x04, base + REG_DSI_7nm_PHY_CMN_CTRL_4); 988 989 /* Configure PHY lane swap (TODO: we need to calculate this) */ 990 writel(0x21, base + REG_DSI_7nm_PHY_CMN_LANE_CFG0); 991 writel(0x84, base + REG_DSI_7nm_PHY_CMN_LANE_CFG1); 992 993 if (phy->cphy_mode) 994 writel(BIT(6), base + REG_DSI_7nm_PHY_CMN_GLBL_CTRL); 995 996 /* Enable LDO */ 997 writel(vreg_ctrl_0, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_0); 998 writel(vreg_ctrl_1, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_1); 999 1000 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_3); 1001 writel(glbl_str_swi_cal_sel_ctrl, 1002 base + REG_DSI_7nm_PHY_CMN_GLBL_STR_SWI_CAL_SEL_CTRL); 1003 writel(glbl_hstx_str_ctrl_0, 1004 base + REG_DSI_7nm_PHY_CMN_GLBL_HSTX_STR_CTRL_0); 1005 writel(glbl_pemph_ctrl_0, 1006 base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_0); 1007 if (phy->cphy_mode) 1008 writel(0x01, base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_1); 1009 writel(glbl_rescode_top_ctrl, 1010 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_TOP_CTRL); 1011 writel(glbl_rescode_bot_ctrl, 1012 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_BOT_CTRL); 1013 writel(0x55, base + REG_DSI_7nm_PHY_CMN_GLBL_LPTX_STR_CTRL); 1014 1015 /* Remove power down from all blocks */ 1016 writel(0x7f, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1017 1018 writel(lane_ctrl0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0); 1019 1020 /* Select full-rate mode */ 1021 if (!phy->cphy_mode) 1022 writel(0x40, base + REG_DSI_7nm_PHY_CMN_CTRL_2); 1023 1024 ret = dsi_7nm_set_usecase(phy); 1025 if (ret) { 1026 DRM_DEV_ERROR(&phy->pdev->dev, "%s: set pll usecase failed, %d\n", 1027 __func__, ret); 1028 return ret; 1029 } 1030 1031 /* DSI PHY timings */ 1032 if (phy->cphy_mode) { 1033 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0); 1034 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4); 1035 writel(timing->shared_timings.clk_pre, 1036 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5); 1037 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6); 1038 writel(timing->shared_timings.clk_post, 1039 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7); 1040 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8); 1041 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9); 1042 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10); 1043 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11); 1044 } else { 1045 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0); 1046 writel(timing->clk_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_1); 1047 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_2); 1048 writel(timing->clk_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_3); 1049 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4); 1050 writel(timing->hs_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5); 1051 writel(timing->hs_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6); 1052 writel(timing->hs_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7); 1053 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8); 1054 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9); 1055 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10); 1056 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11); 1057 writel(timing->shared_timings.clk_pre, 1058 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_12); 1059 writel(timing->shared_timings.clk_post, 1060 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_13); 1061 } 1062 1063 /* DSI lane settings */ 1064 dsi_phy_hw_v4_0_lane_settings(phy); 1065 1066 DBG("DSI%d PHY enabled", phy->id); 1067 1068 return 0; 1069 } 1070 1071 static bool dsi_7nm_set_continuous_clock(struct msm_dsi_phy *phy, bool enable) 1072 { 1073 void __iomem *base = phy->base; 1074 u32 data; 1075 1076 data = readl(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1077 if (enable) 1078 data |= BIT(5) | BIT(6); 1079 else 1080 data &= ~(BIT(5) | BIT(6)); 1081 writel(data, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1082 1083 return enable; 1084 } 1085 1086 static void dsi_7nm_phy_disable(struct msm_dsi_phy *phy) 1087 { 1088 void __iomem *base = phy->base; 1089 u32 data; 1090 1091 DBG(""); 1092 1093 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 1094 pr_warn("Turning OFF PHY while PLL is on\n"); 1095 1096 dsi_phy_hw_v4_0_config_lpcdrx(phy, false); 1097 1098 /* Turn off REFGEN Vote */ 1099 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 1100 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 1101 writel(0x0, base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10); 1102 wmb(); 1103 /* Delay to ensure HW removes vote before PHY shut down */ 1104 udelay(2); 1105 } 1106 1107 data = readl(base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1108 1109 /* disable all lanes */ 1110 data &= ~0x1F; 1111 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1112 writel(0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0); 1113 1114 /* Turn off all PHY blocks */ 1115 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1116 /* make sure phy is turned off */ 1117 wmb(); 1118 1119 DBG("DSI%d PHY disabled", phy->id); 1120 } 1121 1122 static const struct regulator_bulk_data dsi_phy_7nm_36mA_regulators[] = { 1123 { .supply = "vdds", .init_load_uA = 36000 }, 1124 }; 1125 1126 static const struct regulator_bulk_data dsi_phy_7nm_37750uA_regulators[] = { 1127 { .supply = "vdds", .init_load_uA = 37550 }, 1128 }; 1129 1130 static const struct regulator_bulk_data dsi_phy_7nm_98000uA_regulators[] = { 1131 { .supply = "vdds", .init_load_uA = 98000 }, 1132 }; 1133 1134 static const struct regulator_bulk_data dsi_phy_7nm_97800uA_regulators[] = { 1135 { .supply = "vdds", .init_load_uA = 97800 }, 1136 }; 1137 1138 static const struct regulator_bulk_data dsi_phy_7nm_98400uA_regulators[] = { 1139 { .supply = "vdds", .init_load_uA = 98400 }, 1140 }; 1141 1142 const struct msm_dsi_phy_cfg dsi_phy_7nm_cfgs = { 1143 .has_phy_lane = true, 1144 .regulator_data = dsi_phy_7nm_36mA_regulators, 1145 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1146 .ops = { 1147 .enable = dsi_7nm_phy_enable, 1148 .disable = dsi_7nm_phy_disable, 1149 .pll_init = dsi_pll_7nm_init, 1150 .save_pll_state = dsi_7nm_pll_save_state, 1151 .restore_pll_state = dsi_7nm_pll_restore_state, 1152 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1153 }, 1154 .min_pll_rate = 600000000UL, 1155 #ifdef CONFIG_64BIT 1156 .max_pll_rate = 5000000000UL, 1157 #else 1158 .max_pll_rate = ULONG_MAX, 1159 #endif 1160 .io_start = { 0xae94400, 0xae96400 }, 1161 .num_dsi_phy = 2, 1162 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1163 }; 1164 1165 const struct msm_dsi_phy_cfg dsi_phy_7nm_6375_cfgs = { 1166 .has_phy_lane = true, 1167 .ops = { 1168 .enable = dsi_7nm_phy_enable, 1169 .disable = dsi_7nm_phy_disable, 1170 .pll_init = dsi_pll_7nm_init, 1171 .save_pll_state = dsi_7nm_pll_save_state, 1172 .restore_pll_state = dsi_7nm_pll_restore_state, 1173 }, 1174 .min_pll_rate = 600000000UL, 1175 #ifdef CONFIG_64BIT 1176 .max_pll_rate = 5000000000ULL, 1177 #else 1178 .max_pll_rate = ULONG_MAX, 1179 #endif 1180 .io_start = { 0x5e94400 }, 1181 .num_dsi_phy = 1, 1182 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1183 }; 1184 1185 const struct msm_dsi_phy_cfg dsi_phy_7nm_8150_cfgs = { 1186 .has_phy_lane = true, 1187 .regulator_data = dsi_phy_7nm_36mA_regulators, 1188 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1189 .ops = { 1190 .enable = dsi_7nm_phy_enable, 1191 .disable = dsi_7nm_phy_disable, 1192 .pll_init = dsi_pll_7nm_init, 1193 .save_pll_state = dsi_7nm_pll_save_state, 1194 .restore_pll_state = dsi_7nm_pll_restore_state, 1195 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1196 }, 1197 .min_pll_rate = 1000000000UL, 1198 .max_pll_rate = 3500000000UL, 1199 .io_start = { 0xae94400, 0xae96400 }, 1200 .num_dsi_phy = 2, 1201 .quirks = DSI_PHY_7NM_QUIRK_PRE_V4_1, 1202 }; 1203 1204 const struct msm_dsi_phy_cfg dsi_phy_7nm_7280_cfgs = { 1205 .has_phy_lane = true, 1206 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1207 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1208 .ops = { 1209 .enable = dsi_7nm_phy_enable, 1210 .disable = dsi_7nm_phy_disable, 1211 .pll_init = dsi_pll_7nm_init, 1212 .save_pll_state = dsi_7nm_pll_save_state, 1213 .restore_pll_state = dsi_7nm_pll_restore_state, 1214 }, 1215 .min_pll_rate = 600000000UL, 1216 #ifdef CONFIG_64BIT 1217 .max_pll_rate = 5000000000ULL, 1218 #else 1219 .max_pll_rate = ULONG_MAX, 1220 #endif 1221 .io_start = { 0xae94400 }, 1222 .num_dsi_phy = 1, 1223 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1224 }; 1225 1226 const struct msm_dsi_phy_cfg dsi_phy_5nm_8350_cfgs = { 1227 .has_phy_lane = true, 1228 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1229 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1230 .ops = { 1231 .enable = dsi_7nm_phy_enable, 1232 .disable = dsi_7nm_phy_disable, 1233 .pll_init = dsi_pll_7nm_init, 1234 .save_pll_state = dsi_7nm_pll_save_state, 1235 .restore_pll_state = dsi_7nm_pll_restore_state, 1236 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1237 }, 1238 .min_pll_rate = 600000000UL, 1239 #ifdef CONFIG_64BIT 1240 .max_pll_rate = 5000000000UL, 1241 #else 1242 .max_pll_rate = ULONG_MAX, 1243 #endif 1244 .io_start = { 0xae94400, 0xae96400 }, 1245 .num_dsi_phy = 2, 1246 .quirks = DSI_PHY_7NM_QUIRK_V4_2, 1247 }; 1248 1249 const struct msm_dsi_phy_cfg dsi_phy_5nm_8450_cfgs = { 1250 .has_phy_lane = true, 1251 .regulator_data = dsi_phy_7nm_97800uA_regulators, 1252 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators), 1253 .ops = { 1254 .enable = dsi_7nm_phy_enable, 1255 .disable = dsi_7nm_phy_disable, 1256 .pll_init = dsi_pll_7nm_init, 1257 .save_pll_state = dsi_7nm_pll_save_state, 1258 .restore_pll_state = dsi_7nm_pll_restore_state, 1259 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1260 }, 1261 .min_pll_rate = 600000000UL, 1262 #ifdef CONFIG_64BIT 1263 .max_pll_rate = 5000000000UL, 1264 #else 1265 .max_pll_rate = ULONG_MAX, 1266 #endif 1267 .io_start = { 0xae94400, 0xae96400 }, 1268 .num_dsi_phy = 2, 1269 .quirks = DSI_PHY_7NM_QUIRK_V4_3, 1270 }; 1271 1272 const struct msm_dsi_phy_cfg dsi_phy_4nm_8550_cfgs = { 1273 .has_phy_lane = true, 1274 .regulator_data = dsi_phy_7nm_98400uA_regulators, 1275 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98400uA_regulators), 1276 .ops = { 1277 .enable = dsi_7nm_phy_enable, 1278 .disable = dsi_7nm_phy_disable, 1279 .pll_init = dsi_pll_7nm_init, 1280 .save_pll_state = dsi_7nm_pll_save_state, 1281 .restore_pll_state = dsi_7nm_pll_restore_state, 1282 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1283 }, 1284 .min_pll_rate = 600000000UL, 1285 #ifdef CONFIG_64BIT 1286 .max_pll_rate = 5000000000UL, 1287 #else 1288 .max_pll_rate = ULONG_MAX, 1289 #endif 1290 .io_start = { 0xae95000, 0xae97000 }, 1291 .num_dsi_phy = 2, 1292 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1293 }; 1294 1295 const struct msm_dsi_phy_cfg dsi_phy_4nm_8650_cfgs = { 1296 .has_phy_lane = true, 1297 .regulator_data = dsi_phy_7nm_98000uA_regulators, 1298 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators), 1299 .ops = { 1300 .enable = dsi_7nm_phy_enable, 1301 .disable = dsi_7nm_phy_disable, 1302 .pll_init = dsi_pll_7nm_init, 1303 .save_pll_state = dsi_7nm_pll_save_state, 1304 .restore_pll_state = dsi_7nm_pll_restore_state, 1305 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1306 }, 1307 .min_pll_rate = 600000000UL, 1308 #ifdef CONFIG_64BIT 1309 .max_pll_rate = 5000000000UL, 1310 #else 1311 .max_pll_rate = ULONG_MAX, 1312 #endif 1313 .io_start = { 0xae95000, 0xae97000 }, 1314 .num_dsi_phy = 2, 1315 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1316 }; 1317