1 /* 2 * SPDX-License-Identifier: GPL-2.0 3 * Copyright (c) 2018, The Linux Foundation 4 */ 5 6 #include <linux/clk.h> 7 #include <linux/clk-provider.h> 8 #include <linux/iopoll.h> 9 10 #include "dsi_phy.h" 11 #include "dsi.xml.h" 12 #include "dsi_phy_7nm.xml.h" 13 14 /* 15 * DSI PLL 7nm - clock diagram (eg: DSI0): TODO: updated CPHY diagram 16 * 17 * dsi0_pll_out_div_clk dsi0_pll_bit_clk 18 * | | 19 * | | 20 * +---------+ | +----------+ | +----+ 21 * dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0_phy_pll_out_byteclk 22 * +---------+ | +----------+ | +----+ 23 * | | 24 * | | dsi0_pll_by_2_bit_clk 25 * | | | 26 * | | +----+ | |\ dsi0_pclk_mux 27 * | |--| /2 |--o--| \ | 28 * | | +----+ | \ | +---------+ 29 * | --------------| |--o--| div_7_4 |-- dsi0_phy_pll_out_dsiclk 30 * |------------------------------| / +---------+ 31 * | +-----+ | / 32 * -----------| /4? |--o----------|/ 33 * +-----+ | | 34 * | |dsiclk_sel 35 * | 36 * dsi0_pll_post_out_div_clk 37 */ 38 39 #define VCO_REF_CLK_RATE 19200000 40 #define FRAC_BITS 18 41 42 /* Hardware is pre V4.1 */ 43 #define DSI_PHY_7NM_QUIRK_PRE_V4_1 BIT(0) 44 /* Hardware is V4.1 */ 45 #define DSI_PHY_7NM_QUIRK_V4_1 BIT(1) 46 /* Hardware is V4.2 */ 47 #define DSI_PHY_7NM_QUIRK_V4_2 BIT(2) 48 /* Hardware is V4.3 */ 49 #define DSI_PHY_7NM_QUIRK_V4_3 BIT(3) 50 /* Hardware is V5.2 */ 51 #define DSI_PHY_7NM_QUIRK_V5_2 BIT(4) 52 53 struct dsi_pll_config { 54 bool enable_ssc; 55 bool ssc_center; 56 u32 ssc_freq; 57 u32 ssc_offset; 58 u32 ssc_adj_per; 59 60 /* out */ 61 u32 decimal_div_start; 62 u32 frac_div_start; 63 u32 pll_clock_inverters; 64 u32 ssc_stepsize; 65 u32 ssc_div_per; 66 }; 67 68 struct pll_7nm_cached_state { 69 unsigned long vco_rate; 70 u8 bit_clk_div; 71 u8 pix_clk_div; 72 u8 pll_out_div; 73 u8 pll_mux; 74 }; 75 76 struct dsi_pll_7nm { 77 struct clk_hw clk_hw; 78 79 struct msm_dsi_phy *phy; 80 81 u64 vco_current_rate; 82 83 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG0 register */ 84 spinlock_t postdiv_lock; 85 86 struct pll_7nm_cached_state cached_state; 87 88 struct dsi_pll_7nm *slave; 89 }; 90 91 #define to_pll_7nm(x) container_of(x, struct dsi_pll_7nm, clk_hw) 92 93 /* 94 * Global list of private DSI PLL struct pointers. We need this for bonded DSI 95 * mode, where the master PLL's clk_ops needs access the slave's private data 96 */ 97 static struct dsi_pll_7nm *pll_7nm_list[DSI_MAX]; 98 99 static void dsi_pll_setup_config(struct dsi_pll_config *config) 100 { 101 config->ssc_freq = 31500; 102 config->ssc_offset = 4800; 103 config->ssc_adj_per = 2; 104 105 /* TODO: ssc enable */ 106 config->enable_ssc = false; 107 config->ssc_center = 0; 108 } 109 110 static void dsi_pll_calc_dec_frac(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 111 { 112 u64 fref = VCO_REF_CLK_RATE; 113 u64 pll_freq; 114 u64 divider; 115 u64 dec, dec_multiple; 116 u32 frac; 117 u64 multiplier; 118 119 pll_freq = pll->vco_current_rate; 120 121 divider = fref * 2; 122 123 multiplier = 1 << FRAC_BITS; 124 dec_multiple = div_u64(pll_freq * multiplier, divider); 125 dec = div_u64_rem(dec_multiple, multiplier, &frac); 126 127 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) 128 config->pll_clock_inverters = 0x28; 129 else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 130 if (pll_freq <= 1300000000ULL) 131 config->pll_clock_inverters = 0xa0; 132 else if (pll_freq <= 2500000000ULL) 133 config->pll_clock_inverters = 0x20; 134 else if (pll_freq <= 4000000000ULL) 135 config->pll_clock_inverters = 0x00; 136 else 137 config->pll_clock_inverters = 0x40; 138 } else { 139 if (pll_freq <= 1000000000ULL) 140 config->pll_clock_inverters = 0xa0; 141 else if (pll_freq <= 2500000000ULL) 142 config->pll_clock_inverters = 0x20; 143 else if (pll_freq <= 3020000000ULL) 144 config->pll_clock_inverters = 0x00; 145 else 146 config->pll_clock_inverters = 0x40; 147 } 148 149 config->decimal_div_start = dec; 150 config->frac_div_start = frac; 151 } 152 153 #define SSC_CENTER BIT(0) 154 #define SSC_EN BIT(1) 155 156 static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 157 { 158 u32 ssc_per; 159 u32 ssc_mod; 160 u64 ssc_step_size; 161 u64 frac; 162 163 if (!config->enable_ssc) { 164 DBG("SSC not enabled\n"); 165 return; 166 } 167 168 ssc_per = DIV_ROUND_CLOSEST(VCO_REF_CLK_RATE, config->ssc_freq) / 2 - 1; 169 ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1); 170 ssc_per -= ssc_mod; 171 172 frac = config->frac_div_start; 173 ssc_step_size = config->decimal_div_start; 174 ssc_step_size *= (1 << FRAC_BITS); 175 ssc_step_size += frac; 176 ssc_step_size *= config->ssc_offset; 177 ssc_step_size *= (config->ssc_adj_per + 1); 178 ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1)); 179 ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000); 180 181 config->ssc_div_per = ssc_per; 182 config->ssc_stepsize = ssc_step_size; 183 184 pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n", 185 config->decimal_div_start, frac, FRAC_BITS); 186 pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n", 187 ssc_per, (u32)ssc_step_size, config->ssc_adj_per); 188 } 189 190 static void dsi_pll_ssc_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 191 { 192 void __iomem *base = pll->phy->pll_base; 193 194 if (config->enable_ssc) { 195 pr_debug("SSC is enabled\n"); 196 197 writel(config->ssc_stepsize & 0xff, 198 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_LOW_1); 199 writel(config->ssc_stepsize >> 8, 200 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_HIGH_1); 201 writel(config->ssc_div_per & 0xff, 202 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_LOW_1); 203 writel(config->ssc_div_per >> 8, 204 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_HIGH_1); 205 writel(config->ssc_adj_per & 0xff, 206 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_LOW_1); 207 writel(config->ssc_adj_per >> 8, 208 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_HIGH_1); 209 writel(SSC_EN | (config->ssc_center ? SSC_CENTER : 0), 210 base + REG_DSI_7nm_PHY_PLL_SSC_CONTROL); 211 } 212 } 213 214 static void dsi_pll_config_hzindep_reg(struct dsi_pll_7nm *pll) 215 { 216 void __iomem *base = pll->phy->pll_base; 217 u8 analog_controls_five_1 = 0x01, vco_config_1 = 0x00; 218 219 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 220 if (pll->vco_current_rate >= 3100000000ULL) 221 analog_controls_five_1 = 0x03; 222 223 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 224 if (pll->vco_current_rate < 1520000000ULL) 225 vco_config_1 = 0x08; 226 else if (pll->vco_current_rate < 2990000000ULL) 227 vco_config_1 = 0x01; 228 } 229 230 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) || 231 (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 232 if (pll->vco_current_rate < 1520000000ULL) 233 vco_config_1 = 0x08; 234 else if (pll->vco_current_rate >= 2990000000ULL) 235 vco_config_1 = 0x01; 236 } 237 238 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 239 if (pll->vco_current_rate < 1557000000ULL) 240 vco_config_1 = 0x08; 241 else 242 vco_config_1 = 0x01; 243 } 244 245 writel(analog_controls_five_1, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE_1); 246 writel(vco_config_1, base + REG_DSI_7nm_PHY_PLL_VCO_CONFIG_1); 247 writel(0x01, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE); 248 writel(0x03, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_TWO); 249 writel(0x00, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_THREE); 250 writel(0x00, base + REG_DSI_7nm_PHY_PLL_DSM_DIVIDER); 251 writel(0x4e, base + REG_DSI_7nm_PHY_PLL_FEEDBACK_DIVIDER); 252 writel(0x40, base + REG_DSI_7nm_PHY_PLL_CALIBRATION_SETTINGS); 253 writel(0xba, base + REG_DSI_7nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE); 254 writel(0x0c, base + REG_DSI_7nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE); 255 writel(0x00, base + REG_DSI_7nm_PHY_PLL_OUTDIV); 256 writel(0x00, base + REG_DSI_7nm_PHY_PLL_CORE_OVERRIDE); 257 writel(0x08, base + REG_DSI_7nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO); 258 writel(0x0a, base + REG_DSI_7nm_PHY_PLL_PLL_PROP_GAIN_RATE_1); 259 writel(0xc0, base + REG_DSI_7nm_PHY_PLL_PLL_BAND_SEL_RATE_1); 260 writel(0x84, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1); 261 writel(0x82, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1); 262 writel(0x4c, base + REG_DSI_7nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1); 263 writel(0x80, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_OVERRIDE); 264 writel(0x29, base + REG_DSI_7nm_PHY_PLL_PFILT); 265 writel(0x2f, base + REG_DSI_7nm_PHY_PLL_PFILT); 266 writel(0x2a, base + REG_DSI_7nm_PHY_PLL_IFILT); 267 writel(!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) ? 0x3f : 0x22, 268 base + REG_DSI_7nm_PHY_PLL_IFILT); 269 270 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) { 271 writel(0x22, base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE); 272 if (pll->slave) 273 writel(0x22, pll->slave->phy->pll_base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE); 274 } 275 } 276 277 static void dsi_pll_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 278 { 279 void __iomem *base = pll->phy->pll_base; 280 281 writel(0x12, base + REG_DSI_7nm_PHY_PLL_CORE_INPUT_OVERRIDE); 282 writel(config->decimal_div_start, 283 base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 284 writel(config->frac_div_start & 0xff, 285 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 286 writel((config->frac_div_start & 0xff00) >> 8, 287 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1); 288 writel((config->frac_div_start & 0x30000) >> 16, 289 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1); 290 writel(0x40, base + REG_DSI_7nm_PHY_PLL_PLL_LOCKDET_RATE_1); 291 writel(0x06, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_DELAY); 292 writel(pll->phy->cphy_mode ? 0x00 : 0x10, 293 base + REG_DSI_7nm_PHY_PLL_CMODE_1); 294 writel(config->pll_clock_inverters, 295 base + REG_DSI_7nm_PHY_PLL_CLOCK_INVERTERS); 296 } 297 298 static int dsi_pll_7nm_vco_set_rate(struct clk_hw *hw, unsigned long rate, 299 unsigned long parent_rate) 300 { 301 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 302 struct dsi_pll_config config; 303 304 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_7nm->phy->id, rate, 305 parent_rate); 306 307 pll_7nm->vco_current_rate = rate; 308 309 dsi_pll_setup_config(&config); 310 311 dsi_pll_calc_dec_frac(pll_7nm, &config); 312 313 dsi_pll_calc_ssc(pll_7nm, &config); 314 315 dsi_pll_commit(pll_7nm, &config); 316 317 dsi_pll_config_hzindep_reg(pll_7nm); 318 319 dsi_pll_ssc_commit(pll_7nm, &config); 320 321 /* flush, ensure all register writes are done*/ 322 wmb(); 323 324 return 0; 325 } 326 327 static int dsi_pll_7nm_lock_status(struct dsi_pll_7nm *pll) 328 { 329 int rc; 330 u32 status = 0; 331 u32 const delay_us = 100; 332 u32 const timeout_us = 5000; 333 334 rc = readl_poll_timeout_atomic(pll->phy->pll_base + 335 REG_DSI_7nm_PHY_PLL_COMMON_STATUS_ONE, 336 status, 337 ((status & BIT(0)) > 0), 338 delay_us, 339 timeout_us); 340 if (rc) 341 pr_err("DSI PLL(%d) lock failed, status=0x%08x\n", 342 pll->phy->id, status); 343 344 return rc; 345 } 346 347 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll) 348 { 349 u32 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 350 351 writel(0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES); 352 writel(data & ~BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 353 ndelay(250); 354 } 355 356 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll) 357 { 358 u32 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 359 360 writel(data | BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 361 writel(0xc0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES); 362 ndelay(250); 363 } 364 365 static void dsi_pll_disable_global_clk(struct dsi_pll_7nm *pll) 366 { 367 u32 data; 368 369 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 370 writel(data & ~BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 371 } 372 373 static void dsi_pll_enable_global_clk(struct dsi_pll_7nm *pll) 374 { 375 u32 data; 376 377 writel(0x04, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_3); 378 379 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 380 writel(data | BIT(5) | BIT(4), pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 381 } 382 383 static void dsi_pll_phy_dig_reset(struct dsi_pll_7nm *pll) 384 { 385 /* 386 * Reset the PHY digital domain. This would be needed when 387 * coming out of a CX or analog rail power collapse while 388 * ensuring that the pads maintain LP00 or LP11 state 389 */ 390 writel(BIT(0), pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4); 391 wmb(); /* Ensure that the reset is deasserted */ 392 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4); 393 wmb(); /* Ensure that the reset is deasserted */ 394 } 395 396 static int dsi_pll_7nm_vco_prepare(struct clk_hw *hw) 397 { 398 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 399 int rc; 400 401 dsi_pll_enable_pll_bias(pll_7nm); 402 if (pll_7nm->slave) 403 dsi_pll_enable_pll_bias(pll_7nm->slave); 404 405 /* Start PLL */ 406 writel(BIT(0), pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 407 408 /* 409 * ensure all PLL configurations are written prior to checking 410 * for PLL lock. 411 */ 412 wmb(); 413 414 /* Check for PLL lock */ 415 rc = dsi_pll_7nm_lock_status(pll_7nm); 416 if (rc) { 417 pr_err("PLL(%d) lock failed\n", pll_7nm->phy->id); 418 goto error; 419 } 420 421 pll_7nm->phy->pll_on = true; 422 423 /* 424 * assert power on reset for PHY digital in case the PLL is 425 * enabled after CX of analog domain power collapse. This needs 426 * to be done before enabling the global clk. 427 */ 428 dsi_pll_phy_dig_reset(pll_7nm); 429 if (pll_7nm->slave) 430 dsi_pll_phy_dig_reset(pll_7nm->slave); 431 432 dsi_pll_enable_global_clk(pll_7nm); 433 if (pll_7nm->slave) 434 dsi_pll_enable_global_clk(pll_7nm->slave); 435 436 error: 437 return rc; 438 } 439 440 static void dsi_pll_disable_sub(struct dsi_pll_7nm *pll) 441 { 442 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 443 dsi_pll_disable_pll_bias(pll); 444 } 445 446 static void dsi_pll_7nm_vco_unprepare(struct clk_hw *hw) 447 { 448 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 449 450 /* 451 * To avoid any stray glitches while abruptly powering down the PLL 452 * make sure to gate the clock using the clock enable bit before 453 * powering down the PLL 454 */ 455 dsi_pll_disable_global_clk(pll_7nm); 456 writel(0, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 457 dsi_pll_disable_sub(pll_7nm); 458 if (pll_7nm->slave) { 459 dsi_pll_disable_global_clk(pll_7nm->slave); 460 dsi_pll_disable_sub(pll_7nm->slave); 461 } 462 /* flush, ensure all register writes are done */ 463 wmb(); 464 pll_7nm->phy->pll_on = false; 465 } 466 467 static unsigned long dsi_pll_7nm_vco_recalc_rate(struct clk_hw *hw, 468 unsigned long parent_rate) 469 { 470 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 471 void __iomem *base = pll_7nm->phy->pll_base; 472 u64 ref_clk = VCO_REF_CLK_RATE; 473 u64 vco_rate = 0x0; 474 u64 multiplier; 475 u32 frac; 476 u32 dec; 477 u64 pll_freq, tmp64; 478 479 dec = readl(base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 480 dec &= 0xff; 481 482 frac = readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 483 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1) & 484 0xff) << 8); 485 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1) & 486 0x3) << 16); 487 488 /* 489 * TODO: 490 * 1. Assumes prescaler is disabled 491 */ 492 multiplier = 1 << FRAC_BITS; 493 pll_freq = dec * (ref_clk * 2); 494 tmp64 = (ref_clk * 2 * frac); 495 pll_freq += div_u64(tmp64, multiplier); 496 497 vco_rate = pll_freq; 498 pll_7nm->vco_current_rate = vco_rate; 499 500 DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x", 501 pll_7nm->phy->id, (unsigned long)vco_rate, dec, frac); 502 503 return (unsigned long)vco_rate; 504 } 505 506 static long dsi_pll_7nm_clk_round_rate(struct clk_hw *hw, 507 unsigned long rate, unsigned long *parent_rate) 508 { 509 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 510 511 if (rate < pll_7nm->phy->cfg->min_pll_rate) 512 return pll_7nm->phy->cfg->min_pll_rate; 513 else if (rate > pll_7nm->phy->cfg->max_pll_rate) 514 return pll_7nm->phy->cfg->max_pll_rate; 515 else 516 return rate; 517 } 518 519 static const struct clk_ops clk_ops_dsi_pll_7nm_vco = { 520 .round_rate = dsi_pll_7nm_clk_round_rate, 521 .set_rate = dsi_pll_7nm_vco_set_rate, 522 .recalc_rate = dsi_pll_7nm_vco_recalc_rate, 523 .prepare = dsi_pll_7nm_vco_prepare, 524 .unprepare = dsi_pll_7nm_vco_unprepare, 525 }; 526 527 /* 528 * PLL Callbacks 529 */ 530 531 static void dsi_7nm_pll_save_state(struct msm_dsi_phy *phy) 532 { 533 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 534 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 535 void __iomem *phy_base = pll_7nm->phy->base; 536 u32 cmn_clk_cfg0, cmn_clk_cfg1; 537 538 cached->pll_out_div = readl(pll_7nm->phy->pll_base + 539 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 540 cached->pll_out_div &= 0x3; 541 542 cmn_clk_cfg0 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 543 cached->bit_clk_div = cmn_clk_cfg0 & 0xf; 544 cached->pix_clk_div = (cmn_clk_cfg0 & 0xf0) >> 4; 545 546 cmn_clk_cfg1 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 547 cached->pll_mux = cmn_clk_cfg1 & 0x3; 548 549 DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x", 550 pll_7nm->phy->id, cached->pll_out_div, cached->bit_clk_div, 551 cached->pix_clk_div, cached->pll_mux); 552 } 553 554 static int dsi_7nm_pll_restore_state(struct msm_dsi_phy *phy) 555 { 556 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 557 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 558 void __iomem *phy_base = pll_7nm->phy->base; 559 u32 val; 560 int ret; 561 562 val = readl(pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 563 val &= ~0x3; 564 val |= cached->pll_out_div; 565 writel(val, pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 566 567 writel(cached->bit_clk_div | (cached->pix_clk_div << 4), 568 phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 569 570 val = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 571 val &= ~0x3; 572 val |= cached->pll_mux; 573 writel(val, phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 574 575 ret = dsi_pll_7nm_vco_set_rate(phy->vco_hw, 576 pll_7nm->vco_current_rate, 577 VCO_REF_CLK_RATE); 578 if (ret) { 579 DRM_DEV_ERROR(&pll_7nm->phy->pdev->dev, 580 "restore vco rate failed. ret=%d\n", ret); 581 return ret; 582 } 583 584 DBG("DSI PLL%d", pll_7nm->phy->id); 585 586 return 0; 587 } 588 589 static int dsi_7nm_set_usecase(struct msm_dsi_phy *phy) 590 { 591 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 592 void __iomem *base = phy->base; 593 u32 data = 0x0; /* internal PLL */ 594 595 DBG("DSI PLL%d", pll_7nm->phy->id); 596 597 switch (phy->usecase) { 598 case MSM_DSI_PHY_STANDALONE: 599 break; 600 case MSM_DSI_PHY_MASTER: 601 pll_7nm->slave = pll_7nm_list[(pll_7nm->phy->id + 1) % DSI_MAX]; 602 break; 603 case MSM_DSI_PHY_SLAVE: 604 data = 0x1; /* external PLL */ 605 break; 606 default: 607 return -EINVAL; 608 } 609 610 /* set PLL src */ 611 writel(data << 2, base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 612 613 return 0; 614 } 615 616 /* 617 * The post dividers and mux clocks are created using the standard divider and 618 * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux 619 * state to follow the master PLL's divider/mux state. Therefore, we don't 620 * require special clock ops that also configure the slave PLL registers 621 */ 622 static int pll_7nm_register(struct dsi_pll_7nm *pll_7nm, struct clk_hw **provided_clocks) 623 { 624 char clk_name[32]; 625 struct clk_init_data vco_init = { 626 .parent_data = &(const struct clk_parent_data) { 627 .fw_name = "ref", 628 }, 629 .num_parents = 1, 630 .name = clk_name, 631 .flags = CLK_IGNORE_UNUSED, 632 .ops = &clk_ops_dsi_pll_7nm_vco, 633 }; 634 struct device *dev = &pll_7nm->phy->pdev->dev; 635 struct clk_hw *hw, *pll_out_div, *pll_bit, *pll_by_2_bit; 636 struct clk_hw *pll_post_out_div, *phy_pll_out_dsi_parent; 637 int ret; 638 639 DBG("DSI%d", pll_7nm->phy->id); 640 641 snprintf(clk_name, sizeof(clk_name), "dsi%dvco_clk", pll_7nm->phy->id); 642 pll_7nm->clk_hw.init = &vco_init; 643 644 ret = devm_clk_hw_register(dev, &pll_7nm->clk_hw); 645 if (ret) 646 return ret; 647 648 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_out_div_clk", pll_7nm->phy->id); 649 650 pll_out_div = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 651 &pll_7nm->clk_hw, CLK_SET_RATE_PARENT, 652 pll_7nm->phy->pll_base + 653 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE, 654 0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL); 655 if (IS_ERR(pll_out_div)) { 656 ret = PTR_ERR(pll_out_div); 657 goto fail; 658 } 659 660 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_bit_clk", pll_7nm->phy->id); 661 662 /* BIT CLK: DIV_CTRL_3_0 */ 663 pll_bit = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 664 pll_out_div, CLK_SET_RATE_PARENT, 665 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 666 0, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 667 if (IS_ERR(pll_bit)) { 668 ret = PTR_ERR(pll_bit); 669 goto fail; 670 } 671 672 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_byteclk", pll_7nm->phy->id); 673 674 /* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */ 675 hw = devm_clk_hw_register_fixed_factor_parent_hw(dev, clk_name, 676 pll_bit, CLK_SET_RATE_PARENT, 1, 677 pll_7nm->phy->cphy_mode ? 7 : 8); 678 if (IS_ERR(hw)) { 679 ret = PTR_ERR(hw); 680 goto fail; 681 } 682 683 provided_clocks[DSI_BYTE_PLL_CLK] = hw; 684 685 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_by_2_bit_clk", pll_7nm->phy->id); 686 687 pll_by_2_bit = devm_clk_hw_register_fixed_factor_parent_hw(dev, 688 clk_name, pll_bit, 0, 1, 2); 689 if (IS_ERR(pll_by_2_bit)) { 690 ret = PTR_ERR(pll_by_2_bit); 691 goto fail; 692 } 693 694 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_post_out_div_clk", pll_7nm->phy->id); 695 696 if (pll_7nm->phy->cphy_mode) 697 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 698 dev, clk_name, pll_out_div, 0, 2, 7); 699 else 700 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 701 dev, clk_name, pll_out_div, 0, 1, 4); 702 if (IS_ERR(pll_post_out_div)) { 703 ret = PTR_ERR(pll_post_out_div); 704 goto fail; 705 } 706 707 /* in CPHY mode, pclk_mux will always have post_out_div as parent 708 * don't register a pclk_mux clock and just use post_out_div instead 709 */ 710 if (pll_7nm->phy->cphy_mode) { 711 u32 data; 712 713 data = readl(pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 714 writel(data | 3, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 715 716 phy_pll_out_dsi_parent = pll_post_out_div; 717 } else { 718 snprintf(clk_name, sizeof(clk_name), "dsi%d_pclk_mux", pll_7nm->phy->id); 719 720 hw = devm_clk_hw_register_mux_parent_hws(dev, clk_name, 721 ((const struct clk_hw *[]){ 722 pll_bit, 723 pll_by_2_bit, 724 }), 2, 0, pll_7nm->phy->base + 725 REG_DSI_7nm_PHY_CMN_CLK_CFG1, 726 0, 1, 0, NULL); 727 if (IS_ERR(hw)) { 728 ret = PTR_ERR(hw); 729 goto fail; 730 } 731 732 phy_pll_out_dsi_parent = hw; 733 } 734 735 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_dsiclk", pll_7nm->phy->id); 736 737 /* PIX CLK DIV : DIV_CTRL_7_4*/ 738 hw = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 739 phy_pll_out_dsi_parent, 0, 740 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 741 4, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 742 if (IS_ERR(hw)) { 743 ret = PTR_ERR(hw); 744 goto fail; 745 } 746 747 provided_clocks[DSI_PIXEL_PLL_CLK] = hw; 748 749 return 0; 750 751 fail: 752 753 return ret; 754 } 755 756 static int dsi_pll_7nm_init(struct msm_dsi_phy *phy) 757 { 758 struct platform_device *pdev = phy->pdev; 759 struct dsi_pll_7nm *pll_7nm; 760 int ret; 761 762 pll_7nm = devm_kzalloc(&pdev->dev, sizeof(*pll_7nm), GFP_KERNEL); 763 if (!pll_7nm) 764 return -ENOMEM; 765 766 DBG("DSI PLL%d", phy->id); 767 768 pll_7nm_list[phy->id] = pll_7nm; 769 770 spin_lock_init(&pll_7nm->postdiv_lock); 771 772 pll_7nm->phy = phy; 773 774 ret = pll_7nm_register(pll_7nm, phy->provided_clocks->hws); 775 if (ret) { 776 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret); 777 return ret; 778 } 779 780 phy->vco_hw = &pll_7nm->clk_hw; 781 782 /* TODO: Remove this when we have proper display handover support */ 783 msm_dsi_phy_pll_save_state(phy); 784 785 return 0; 786 } 787 788 static int dsi_phy_hw_v4_0_is_pll_on(struct msm_dsi_phy *phy) 789 { 790 void __iomem *base = phy->base; 791 u32 data = 0; 792 793 data = readl(base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 794 mb(); /* make sure read happened */ 795 796 return (data & BIT(0)); 797 } 798 799 static void dsi_phy_hw_v4_0_config_lpcdrx(struct msm_dsi_phy *phy, bool enable) 800 { 801 void __iomem *lane_base = phy->lane_base; 802 int phy_lane_0 = 0; /* TODO: Support all lane swap configs */ 803 804 /* 805 * LPRX and CDRX need to enabled only for physical data lane 806 * corresponding to the logical data lane 0 807 */ 808 if (enable) 809 writel(0x3, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0)); 810 else 811 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0)); 812 } 813 814 static void dsi_phy_hw_v4_0_lane_settings(struct msm_dsi_phy *phy) 815 { 816 int i; 817 const u8 tx_dctrl_0[] = { 0x00, 0x00, 0x00, 0x04, 0x01 }; 818 const u8 tx_dctrl_1[] = { 0x40, 0x40, 0x40, 0x46, 0x41 }; 819 const u8 *tx_dctrl = tx_dctrl_0; 820 void __iomem *lane_base = phy->lane_base; 821 822 if (!(phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 823 tx_dctrl = tx_dctrl_1; 824 825 /* Strength ctrl settings */ 826 for (i = 0; i < 5; i++) { 827 /* 828 * Disable LPRX and CDRX for all lanes. And later on, it will 829 * be only enabled for the physical data lane corresponding 830 * to the logical data lane 0 831 */ 832 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(i)); 833 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_PIN_SWAP(i)); 834 } 835 836 dsi_phy_hw_v4_0_config_lpcdrx(phy, true); 837 838 /* other settings */ 839 for (i = 0; i < 5; i++) { 840 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG0(i)); 841 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG1(i)); 842 writel(i == 4 ? 0x8a : 0xa, lane_base + REG_DSI_7nm_PHY_LN_CFG2(i)); 843 writel(tx_dctrl[i], lane_base + REG_DSI_7nm_PHY_LN_TX_DCTRL(i)); 844 } 845 } 846 847 static int dsi_7nm_phy_enable(struct msm_dsi_phy *phy, 848 struct msm_dsi_phy_clk_request *clk_req) 849 { 850 int ret; 851 u32 status; 852 u32 const delay_us = 5; 853 u32 const timeout_us = 1000; 854 struct msm_dsi_dphy_timing *timing = &phy->timing; 855 void __iomem *base = phy->base; 856 bool less_than_1500_mhz; 857 u32 vreg_ctrl_0, vreg_ctrl_1, lane_ctrl0; 858 u32 glbl_pemph_ctrl_0; 859 u32 glbl_str_swi_cal_sel_ctrl, glbl_hstx_str_ctrl_0; 860 u32 glbl_rescode_top_ctrl, glbl_rescode_bot_ctrl; 861 u32 data; 862 863 DBG(""); 864 865 if (phy->cphy_mode) 866 ret = msm_dsi_cphy_timing_calc_v4(timing, clk_req); 867 else 868 ret = msm_dsi_dphy_timing_calc_v4(timing, clk_req); 869 if (ret) { 870 DRM_DEV_ERROR(&phy->pdev->dev, 871 "%s: PHY timing calculation failed\n", __func__); 872 return -EINVAL; 873 } 874 875 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 876 pr_warn("PLL turned on before configuring PHY\n"); 877 878 /* Request for REFGEN READY */ 879 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 880 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 881 writel(0x1, phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10); 882 udelay(500); 883 } 884 885 /* wait for REFGEN READY */ 886 ret = readl_poll_timeout_atomic(base + REG_DSI_7nm_PHY_CMN_PHY_STATUS, 887 status, (status & BIT(0)), 888 delay_us, timeout_us); 889 if (ret) { 890 pr_err("Ref gen not ready. Aborting\n"); 891 return -EINVAL; 892 } 893 894 /* TODO: CPHY enable path (this is for DPHY only) */ 895 896 /* Alter PHY configurations if data rate less than 1.5GHZ*/ 897 less_than_1500_mhz = (clk_req->bitclk_rate <= 1500000000); 898 899 glbl_str_swi_cal_sel_ctrl = 0x00; 900 if (phy->cphy_mode) { 901 vreg_ctrl_0 = 0x51; 902 vreg_ctrl_1 = 0x55; 903 glbl_hstx_str_ctrl_0 = 0x00; 904 glbl_pemph_ctrl_0 = 0x11; 905 lane_ctrl0 = 0x17; 906 } else { 907 vreg_ctrl_0 = less_than_1500_mhz ? 0x53 : 0x52; 908 vreg_ctrl_1 = 0x5c; 909 glbl_hstx_str_ctrl_0 = 0x88; 910 glbl_pemph_ctrl_0 = 0x00; 911 lane_ctrl0 = 0x1f; 912 } 913 914 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 915 if (phy->cphy_mode) { 916 vreg_ctrl_0 = 0x45; 917 vreg_ctrl_1 = 0x41; 918 glbl_rescode_top_ctrl = 0x00; 919 glbl_rescode_bot_ctrl = 0x00; 920 } else { 921 vreg_ctrl_0 = 0x44; 922 vreg_ctrl_1 = 0x19; 923 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x03; 924 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3c; 925 } 926 } else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 927 if (phy->cphy_mode) { 928 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 929 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 930 } else { 931 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 932 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 933 } 934 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) { 935 if (phy->cphy_mode) { 936 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 937 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 938 } else { 939 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x00; 940 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 941 } 942 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 943 if (phy->cphy_mode) { 944 glbl_hstx_str_ctrl_0 = 0x88; 945 glbl_rescode_top_ctrl = 0x00; 946 glbl_rescode_bot_ctrl = 0x3c; 947 } else { 948 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x00; 949 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x39 : 0x3c; 950 } 951 } else { 952 if (phy->cphy_mode) { 953 glbl_str_swi_cal_sel_ctrl = 0x03; 954 glbl_hstx_str_ctrl_0 = 0x66; 955 } else { 956 vreg_ctrl_0 = less_than_1500_mhz ? 0x5B : 0x59; 957 glbl_str_swi_cal_sel_ctrl = less_than_1500_mhz ? 0x03 : 0x00; 958 glbl_hstx_str_ctrl_0 = less_than_1500_mhz ? 0x66 : 0x88; 959 } 960 glbl_rescode_top_ctrl = 0x03; 961 glbl_rescode_bot_ctrl = 0x3c; 962 } 963 964 /* de-assert digital and pll power down */ 965 data = BIT(6) | BIT(5); 966 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 967 968 /* Assert PLL core reset */ 969 writel(0x00, base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 970 971 /* turn off resync FIFO */ 972 writel(0x00, base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 973 974 /* program CMN_CTRL_4 for minor_ver 2 chipsets*/ 975 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 976 (readl(base + REG_DSI_7nm_PHY_CMN_REVISION_ID0) & (0xf0)) == 0x20) 977 writel(0x04, base + REG_DSI_7nm_PHY_CMN_CTRL_4); 978 979 /* Configure PHY lane swap (TODO: we need to calculate this) */ 980 writel(0x21, base + REG_DSI_7nm_PHY_CMN_LANE_CFG0); 981 writel(0x84, base + REG_DSI_7nm_PHY_CMN_LANE_CFG1); 982 983 if (phy->cphy_mode) 984 writel(BIT(6), base + REG_DSI_7nm_PHY_CMN_GLBL_CTRL); 985 986 /* Enable LDO */ 987 writel(vreg_ctrl_0, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_0); 988 writel(vreg_ctrl_1, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_1); 989 990 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_3); 991 writel(glbl_str_swi_cal_sel_ctrl, 992 base + REG_DSI_7nm_PHY_CMN_GLBL_STR_SWI_CAL_SEL_CTRL); 993 writel(glbl_hstx_str_ctrl_0, 994 base + REG_DSI_7nm_PHY_CMN_GLBL_HSTX_STR_CTRL_0); 995 writel(glbl_pemph_ctrl_0, 996 base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_0); 997 if (phy->cphy_mode) 998 writel(0x01, base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_1); 999 writel(glbl_rescode_top_ctrl, 1000 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_TOP_CTRL); 1001 writel(glbl_rescode_bot_ctrl, 1002 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_BOT_CTRL); 1003 writel(0x55, base + REG_DSI_7nm_PHY_CMN_GLBL_LPTX_STR_CTRL); 1004 1005 /* Remove power down from all blocks */ 1006 writel(0x7f, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1007 1008 writel(lane_ctrl0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0); 1009 1010 /* Select full-rate mode */ 1011 if (!phy->cphy_mode) 1012 writel(0x40, base + REG_DSI_7nm_PHY_CMN_CTRL_2); 1013 1014 ret = dsi_7nm_set_usecase(phy); 1015 if (ret) { 1016 DRM_DEV_ERROR(&phy->pdev->dev, "%s: set pll usecase failed, %d\n", 1017 __func__, ret); 1018 return ret; 1019 } 1020 1021 /* DSI PHY timings */ 1022 if (phy->cphy_mode) { 1023 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0); 1024 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4); 1025 writel(timing->shared_timings.clk_pre, 1026 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5); 1027 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6); 1028 writel(timing->shared_timings.clk_post, 1029 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7); 1030 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8); 1031 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9); 1032 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10); 1033 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11); 1034 } else { 1035 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0); 1036 writel(timing->clk_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_1); 1037 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_2); 1038 writel(timing->clk_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_3); 1039 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4); 1040 writel(timing->hs_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5); 1041 writel(timing->hs_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6); 1042 writel(timing->hs_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7); 1043 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8); 1044 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9); 1045 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10); 1046 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11); 1047 writel(timing->shared_timings.clk_pre, 1048 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_12); 1049 writel(timing->shared_timings.clk_post, 1050 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_13); 1051 } 1052 1053 /* DSI lane settings */ 1054 dsi_phy_hw_v4_0_lane_settings(phy); 1055 1056 DBG("DSI%d PHY enabled", phy->id); 1057 1058 return 0; 1059 } 1060 1061 static bool dsi_7nm_set_continuous_clock(struct msm_dsi_phy *phy, bool enable) 1062 { 1063 void __iomem *base = phy->base; 1064 u32 data; 1065 1066 data = readl(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1067 if (enable) 1068 data |= BIT(5) | BIT(6); 1069 else 1070 data &= ~(BIT(5) | BIT(6)); 1071 writel(data, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1072 1073 return enable; 1074 } 1075 1076 static void dsi_7nm_phy_disable(struct msm_dsi_phy *phy) 1077 { 1078 void __iomem *base = phy->base; 1079 u32 data; 1080 1081 DBG(""); 1082 1083 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 1084 pr_warn("Turning OFF PHY while PLL is on\n"); 1085 1086 dsi_phy_hw_v4_0_config_lpcdrx(phy, false); 1087 1088 /* Turn off REFGEN Vote */ 1089 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 1090 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 1091 writel(0x0, base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10); 1092 wmb(); 1093 /* Delay to ensure HW removes vote before PHY shut down */ 1094 udelay(2); 1095 } 1096 1097 data = readl(base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1098 1099 /* disable all lanes */ 1100 data &= ~0x1F; 1101 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1102 writel(0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0); 1103 1104 /* Turn off all PHY blocks */ 1105 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1106 /* make sure phy is turned off */ 1107 wmb(); 1108 1109 DBG("DSI%d PHY disabled", phy->id); 1110 } 1111 1112 static const struct regulator_bulk_data dsi_phy_7nm_36mA_regulators[] = { 1113 { .supply = "vdds", .init_load_uA = 36000 }, 1114 }; 1115 1116 static const struct regulator_bulk_data dsi_phy_7nm_37750uA_regulators[] = { 1117 { .supply = "vdds", .init_load_uA = 37550 }, 1118 }; 1119 1120 static const struct regulator_bulk_data dsi_phy_7nm_98000uA_regulators[] = { 1121 { .supply = "vdds", .init_load_uA = 98000 }, 1122 }; 1123 1124 static const struct regulator_bulk_data dsi_phy_7nm_97800uA_regulators[] = { 1125 { .supply = "vdds", .init_load_uA = 97800 }, 1126 }; 1127 1128 static const struct regulator_bulk_data dsi_phy_7nm_98400uA_regulators[] = { 1129 { .supply = "vdds", .init_load_uA = 98400 }, 1130 }; 1131 1132 const struct msm_dsi_phy_cfg dsi_phy_7nm_cfgs = { 1133 .has_phy_lane = true, 1134 .regulator_data = dsi_phy_7nm_36mA_regulators, 1135 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1136 .ops = { 1137 .enable = dsi_7nm_phy_enable, 1138 .disable = dsi_7nm_phy_disable, 1139 .pll_init = dsi_pll_7nm_init, 1140 .save_pll_state = dsi_7nm_pll_save_state, 1141 .restore_pll_state = dsi_7nm_pll_restore_state, 1142 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1143 }, 1144 .min_pll_rate = 600000000UL, 1145 #ifdef CONFIG_64BIT 1146 .max_pll_rate = 5000000000UL, 1147 #else 1148 .max_pll_rate = ULONG_MAX, 1149 #endif 1150 .io_start = { 0xae94400, 0xae96400 }, 1151 .num_dsi_phy = 2, 1152 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1153 }; 1154 1155 const struct msm_dsi_phy_cfg dsi_phy_7nm_6375_cfgs = { 1156 .has_phy_lane = true, 1157 .ops = { 1158 .enable = dsi_7nm_phy_enable, 1159 .disable = dsi_7nm_phy_disable, 1160 .pll_init = dsi_pll_7nm_init, 1161 .save_pll_state = dsi_7nm_pll_save_state, 1162 .restore_pll_state = dsi_7nm_pll_restore_state, 1163 }, 1164 .min_pll_rate = 600000000UL, 1165 #ifdef CONFIG_64BIT 1166 .max_pll_rate = 5000000000ULL, 1167 #else 1168 .max_pll_rate = ULONG_MAX, 1169 #endif 1170 .io_start = { 0x5e94400 }, 1171 .num_dsi_phy = 1, 1172 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1173 }; 1174 1175 const struct msm_dsi_phy_cfg dsi_phy_7nm_8150_cfgs = { 1176 .has_phy_lane = true, 1177 .regulator_data = dsi_phy_7nm_36mA_regulators, 1178 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1179 .ops = { 1180 .enable = dsi_7nm_phy_enable, 1181 .disable = dsi_7nm_phy_disable, 1182 .pll_init = dsi_pll_7nm_init, 1183 .save_pll_state = dsi_7nm_pll_save_state, 1184 .restore_pll_state = dsi_7nm_pll_restore_state, 1185 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1186 }, 1187 .min_pll_rate = 1000000000UL, 1188 .max_pll_rate = 3500000000UL, 1189 .io_start = { 0xae94400, 0xae96400 }, 1190 .num_dsi_phy = 2, 1191 .quirks = DSI_PHY_7NM_QUIRK_PRE_V4_1, 1192 }; 1193 1194 const struct msm_dsi_phy_cfg dsi_phy_7nm_7280_cfgs = { 1195 .has_phy_lane = true, 1196 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1197 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1198 .ops = { 1199 .enable = dsi_7nm_phy_enable, 1200 .disable = dsi_7nm_phy_disable, 1201 .pll_init = dsi_pll_7nm_init, 1202 .save_pll_state = dsi_7nm_pll_save_state, 1203 .restore_pll_state = dsi_7nm_pll_restore_state, 1204 }, 1205 .min_pll_rate = 600000000UL, 1206 #ifdef CONFIG_64BIT 1207 .max_pll_rate = 5000000000ULL, 1208 #else 1209 .max_pll_rate = ULONG_MAX, 1210 #endif 1211 .io_start = { 0xae94400 }, 1212 .num_dsi_phy = 1, 1213 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1214 }; 1215 1216 const struct msm_dsi_phy_cfg dsi_phy_5nm_8350_cfgs = { 1217 .has_phy_lane = true, 1218 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1219 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1220 .ops = { 1221 .enable = dsi_7nm_phy_enable, 1222 .disable = dsi_7nm_phy_disable, 1223 .pll_init = dsi_pll_7nm_init, 1224 .save_pll_state = dsi_7nm_pll_save_state, 1225 .restore_pll_state = dsi_7nm_pll_restore_state, 1226 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1227 }, 1228 .min_pll_rate = 600000000UL, 1229 #ifdef CONFIG_64BIT 1230 .max_pll_rate = 5000000000UL, 1231 #else 1232 .max_pll_rate = ULONG_MAX, 1233 #endif 1234 .io_start = { 0xae94400, 0xae96400 }, 1235 .num_dsi_phy = 2, 1236 .quirks = DSI_PHY_7NM_QUIRK_V4_2, 1237 }; 1238 1239 const struct msm_dsi_phy_cfg dsi_phy_5nm_8450_cfgs = { 1240 .has_phy_lane = true, 1241 .regulator_data = dsi_phy_7nm_97800uA_regulators, 1242 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators), 1243 .ops = { 1244 .enable = dsi_7nm_phy_enable, 1245 .disable = dsi_7nm_phy_disable, 1246 .pll_init = dsi_pll_7nm_init, 1247 .save_pll_state = dsi_7nm_pll_save_state, 1248 .restore_pll_state = dsi_7nm_pll_restore_state, 1249 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1250 }, 1251 .min_pll_rate = 600000000UL, 1252 #ifdef CONFIG_64BIT 1253 .max_pll_rate = 5000000000UL, 1254 #else 1255 .max_pll_rate = ULONG_MAX, 1256 #endif 1257 .io_start = { 0xae94400, 0xae96400 }, 1258 .num_dsi_phy = 2, 1259 .quirks = DSI_PHY_7NM_QUIRK_V4_3, 1260 }; 1261 1262 const struct msm_dsi_phy_cfg dsi_phy_4nm_8550_cfgs = { 1263 .has_phy_lane = true, 1264 .regulator_data = dsi_phy_7nm_98400uA_regulators, 1265 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98400uA_regulators), 1266 .ops = { 1267 .enable = dsi_7nm_phy_enable, 1268 .disable = dsi_7nm_phy_disable, 1269 .pll_init = dsi_pll_7nm_init, 1270 .save_pll_state = dsi_7nm_pll_save_state, 1271 .restore_pll_state = dsi_7nm_pll_restore_state, 1272 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1273 }, 1274 .min_pll_rate = 600000000UL, 1275 #ifdef CONFIG_64BIT 1276 .max_pll_rate = 5000000000UL, 1277 #else 1278 .max_pll_rate = ULONG_MAX, 1279 #endif 1280 .io_start = { 0xae95000, 0xae97000 }, 1281 .num_dsi_phy = 2, 1282 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1283 }; 1284 1285 const struct msm_dsi_phy_cfg dsi_phy_4nm_8650_cfgs = { 1286 .has_phy_lane = true, 1287 .regulator_data = dsi_phy_7nm_98000uA_regulators, 1288 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators), 1289 .ops = { 1290 .enable = dsi_7nm_phy_enable, 1291 .disable = dsi_7nm_phy_disable, 1292 .pll_init = dsi_pll_7nm_init, 1293 .save_pll_state = dsi_7nm_pll_save_state, 1294 .restore_pll_state = dsi_7nm_pll_restore_state, 1295 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1296 }, 1297 .min_pll_rate = 600000000UL, 1298 #ifdef CONFIG_64BIT 1299 .max_pll_rate = 5000000000UL, 1300 #else 1301 .max_pll_rate = ULONG_MAX, 1302 #endif 1303 .io_start = { 0xae95000, 0xae97000 }, 1304 .num_dsi_phy = 2, 1305 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1306 }; 1307