1 /* 2 * SPDX-License-Identifier: GPL-2.0 3 * Copyright (c) 2018, The Linux Foundation 4 */ 5 6 #include <dt-bindings/clock/qcom,dsi-phy-28nm.h> 7 #include <linux/bitfield.h> 8 #include <linux/clk.h> 9 #include <linux/clk-provider.h> 10 #include <linux/iopoll.h> 11 12 #include "dsi_phy.h" 13 #include "dsi.xml.h" 14 #include "dsi_phy_7nm.xml.h" 15 16 /* 17 * DSI PLL 7nm - clock diagram (eg: DSI0): TODO: updated CPHY diagram 18 * 19 * dsi0_pll_out_div_clk dsi0_pll_bit_clk 20 * | | 21 * | | 22 * +---------+ | +----------+ | +----+ 23 * dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0_phy_pll_out_byteclk 24 * +---------+ | +----------+ | +----+ 25 * | | 26 * | | dsi0_pll_by_2_bit_clk 27 * | | | 28 * | | +----+ | |\ dsi0_pclk_mux 29 * | |--| /2 |--o--| \ | 30 * | | +----+ | \ | +---------+ 31 * | --------------| |--o--| div_7_4 |-- dsi0_phy_pll_out_dsiclk 32 * |------------------------------| / +---------+ 33 * | +-----+ | / 34 * -----------| /4? |--o----------|/ 35 * +-----+ | | 36 * | |dsiclk_sel 37 * | 38 * dsi0_pll_post_out_div_clk 39 */ 40 41 #define VCO_REF_CLK_RATE 19200000 42 #define FRAC_BITS 18 43 44 /* Hardware is pre V4.1 */ 45 #define DSI_PHY_7NM_QUIRK_PRE_V4_1 BIT(0) 46 /* Hardware is V4.1 */ 47 #define DSI_PHY_7NM_QUIRK_V4_1 BIT(1) 48 /* Hardware is V4.2 */ 49 #define DSI_PHY_7NM_QUIRK_V4_2 BIT(2) 50 /* Hardware is V4.3 */ 51 #define DSI_PHY_7NM_QUIRK_V4_3 BIT(3) 52 /* Hardware is V5.2 */ 53 #define DSI_PHY_7NM_QUIRK_V5_2 BIT(4) 54 /* Hardware is V7.0 */ 55 #define DSI_PHY_7NM_QUIRK_V7_0 BIT(5) 56 57 struct dsi_pll_config { 58 bool enable_ssc; 59 bool ssc_center; 60 u32 ssc_freq; 61 u32 ssc_offset; 62 u32 ssc_adj_per; 63 64 /* out */ 65 u32 decimal_div_start; 66 u32 frac_div_start; 67 u32 pll_clock_inverters; 68 u32 ssc_stepsize; 69 u32 ssc_div_per; 70 }; 71 72 struct pll_7nm_cached_state { 73 unsigned long vco_rate; 74 u8 bit_clk_div; 75 u8 pix_clk_div; 76 u8 pll_out_div; 77 u8 pll_mux; 78 }; 79 80 struct dsi_pll_7nm { 81 struct clk_hw clk_hw; 82 83 struct msm_dsi_phy *phy; 84 85 u64 vco_current_rate; 86 87 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG0 register */ 88 spinlock_t postdiv_lock; 89 90 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG1 register */ 91 spinlock_t pclk_mux_lock; 92 93 /* 94 * protects REG_DSI_7nm_PHY_CMN_CTRL_0 register and pll_enable_cnt 95 * member 96 */ 97 spinlock_t pll_enable_lock; 98 int pll_enable_cnt; 99 100 struct pll_7nm_cached_state cached_state; 101 102 struct dsi_pll_7nm *slave; 103 }; 104 105 #define to_pll_7nm(x) container_of(x, struct dsi_pll_7nm, clk_hw) 106 107 /* 108 * Global list of private DSI PLL struct pointers. We need this for bonded DSI 109 * mode, where the master PLL's clk_ops needs access the slave's private data 110 */ 111 static struct dsi_pll_7nm *pll_7nm_list[DSI_MAX]; 112 113 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll); 114 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll); 115 116 static void dsi_pll_setup_config(struct dsi_pll_config *config) 117 { 118 config->ssc_freq = 31500; 119 config->ssc_offset = 4800; 120 config->ssc_adj_per = 2; 121 122 /* TODO: ssc enable */ 123 config->enable_ssc = false; 124 config->ssc_center = 0; 125 } 126 127 static void dsi_pll_calc_dec_frac(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 128 { 129 u64 fref = VCO_REF_CLK_RATE; 130 u64 pll_freq; 131 u64 divider; 132 u64 dec, dec_multiple; 133 u32 frac; 134 u64 multiplier; 135 136 pll_freq = pll->vco_current_rate; 137 138 divider = fref * 2; 139 140 multiplier = 1 << FRAC_BITS; 141 dec_multiple = div_u64(pll_freq * multiplier, divider); 142 dec = div_u64_rem(dec_multiple, multiplier, &frac); 143 144 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) { 145 config->pll_clock_inverters = 0x28; 146 } else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 147 if (pll_freq < 163000000ULL) 148 config->pll_clock_inverters = 0xa0; 149 else if (pll_freq < 175000000ULL) 150 config->pll_clock_inverters = 0x20; 151 else if (pll_freq < 325000000ULL) 152 config->pll_clock_inverters = 0xa0; 153 else if (pll_freq < 350000000ULL) 154 config->pll_clock_inverters = 0x20; 155 else if (pll_freq < 650000000ULL) 156 config->pll_clock_inverters = 0xa0; 157 else if (pll_freq < 700000000ULL) 158 config->pll_clock_inverters = 0x20; 159 else if (pll_freq < 1300000000ULL) 160 config->pll_clock_inverters = 0xa0; 161 else if (pll_freq < 2500000000ULL) 162 config->pll_clock_inverters = 0x20; 163 else if (pll_freq < 4000000000ULL) 164 config->pll_clock_inverters = 0x00; 165 else 166 config->pll_clock_inverters = 0x40; 167 } else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 168 if (pll_freq <= 1300000000ULL) 169 config->pll_clock_inverters = 0xa0; 170 else if (pll_freq <= 2500000000ULL) 171 config->pll_clock_inverters = 0x20; 172 else if (pll_freq <= 4000000000ULL) 173 config->pll_clock_inverters = 0x00; 174 else 175 config->pll_clock_inverters = 0x40; 176 } else if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 177 if (pll_freq <= 1000000000ULL) 178 config->pll_clock_inverters = 0xa0; 179 else if (pll_freq <= 2500000000ULL) 180 config->pll_clock_inverters = 0x20; 181 else if (pll_freq <= 3020000000ULL) 182 config->pll_clock_inverters = 0x00; 183 else 184 config->pll_clock_inverters = 0x40; 185 } else { 186 /* 4.2, 4.3 */ 187 if (pll_freq <= 1000000000ULL) 188 config->pll_clock_inverters = 0xa0; 189 else if (pll_freq <= 2500000000ULL) 190 config->pll_clock_inverters = 0x20; 191 else if (pll_freq <= 3500000000ULL) 192 config->pll_clock_inverters = 0x00; 193 else 194 config->pll_clock_inverters = 0x40; 195 } 196 197 config->decimal_div_start = dec; 198 config->frac_div_start = frac; 199 } 200 201 #define SSC_CENTER BIT(0) 202 #define SSC_EN BIT(1) 203 204 static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 205 { 206 u32 ssc_per; 207 u32 ssc_mod; 208 u64 ssc_step_size; 209 u64 frac; 210 211 if (!config->enable_ssc) { 212 DBG("SSC not enabled\n"); 213 return; 214 } 215 216 ssc_per = DIV_ROUND_CLOSEST(VCO_REF_CLK_RATE, config->ssc_freq) / 2 - 1; 217 ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1); 218 ssc_per -= ssc_mod; 219 220 frac = config->frac_div_start; 221 ssc_step_size = config->decimal_div_start; 222 ssc_step_size *= (1 << FRAC_BITS); 223 ssc_step_size += frac; 224 ssc_step_size *= config->ssc_offset; 225 ssc_step_size *= (config->ssc_adj_per + 1); 226 ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1)); 227 ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000); 228 229 config->ssc_div_per = ssc_per; 230 config->ssc_stepsize = ssc_step_size; 231 232 pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n", 233 config->decimal_div_start, frac, FRAC_BITS); 234 pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n", 235 ssc_per, (u32)ssc_step_size, config->ssc_adj_per); 236 } 237 238 static void dsi_pll_ssc_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 239 { 240 void __iomem *base = pll->phy->pll_base; 241 242 if (config->enable_ssc) { 243 pr_debug("SSC is enabled\n"); 244 245 writel(config->ssc_stepsize & 0xff, 246 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_LOW_1); 247 writel(config->ssc_stepsize >> 8, 248 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_HIGH_1); 249 writel(config->ssc_div_per & 0xff, 250 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_LOW_1); 251 writel(config->ssc_div_per >> 8, 252 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_HIGH_1); 253 writel(config->ssc_adj_per & 0xff, 254 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_LOW_1); 255 writel(config->ssc_adj_per >> 8, 256 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_HIGH_1); 257 writel(SSC_EN | (config->ssc_center ? SSC_CENTER : 0), 258 base + REG_DSI_7nm_PHY_PLL_SSC_CONTROL); 259 } 260 } 261 262 static void dsi_pll_config_hzindep_reg(struct dsi_pll_7nm *pll) 263 { 264 void __iomem *base = pll->phy->pll_base; 265 u8 analog_controls_five_1 = 0x01, vco_config_1 = 0x00; 266 267 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 268 if (pll->vco_current_rate >= 3100000000ULL) 269 analog_controls_five_1 = 0x03; 270 271 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 272 if (pll->vco_current_rate < 1520000000ULL) 273 vco_config_1 = 0x08; 274 else if (pll->vco_current_rate < 2990000000ULL) 275 vco_config_1 = 0x01; 276 } 277 278 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) || 279 (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 280 if (pll->vco_current_rate < 1520000000ULL) 281 vco_config_1 = 0x08; 282 else if (pll->vco_current_rate >= 2990000000ULL) 283 vco_config_1 = 0x01; 284 } 285 286 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 287 (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 288 if (pll->vco_current_rate < 1557000000ULL) 289 vco_config_1 = 0x08; 290 else 291 vco_config_1 = 0x01; 292 } 293 294 writel(analog_controls_five_1, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE_1); 295 writel(vco_config_1, base + REG_DSI_7nm_PHY_PLL_VCO_CONFIG_1); 296 writel(0x01, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE); 297 writel(0x03, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_TWO); 298 writel(0x00, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_THREE); 299 writel(0x00, base + REG_DSI_7nm_PHY_PLL_DSM_DIVIDER); 300 writel(0x4e, base + REG_DSI_7nm_PHY_PLL_FEEDBACK_DIVIDER); 301 writel(0x40, base + REG_DSI_7nm_PHY_PLL_CALIBRATION_SETTINGS); 302 writel(0xba, base + REG_DSI_7nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE); 303 writel(0x0c, base + REG_DSI_7nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE); 304 writel(0x00, base + REG_DSI_7nm_PHY_PLL_OUTDIV); 305 writel(0x00, base + REG_DSI_7nm_PHY_PLL_CORE_OVERRIDE); 306 writel(0x08, base + REG_DSI_7nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO); 307 writel(0x0a, base + REG_DSI_7nm_PHY_PLL_PLL_PROP_GAIN_RATE_1); 308 writel(0xc0, base + REG_DSI_7nm_PHY_PLL_PLL_BAND_SEL_RATE_1); 309 writel(0x84, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1); 310 writel(0x82, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1); 311 writel(0x4c, base + REG_DSI_7nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1); 312 writel(0x80, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_OVERRIDE); 313 writel(0x29, base + REG_DSI_7nm_PHY_PLL_PFILT); 314 writel(0x2f, base + REG_DSI_7nm_PHY_PLL_PFILT); 315 writel(0x2a, base + REG_DSI_7nm_PHY_PLL_IFILT); 316 writel(!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) ? 0x3f : 0x22, 317 base + REG_DSI_7nm_PHY_PLL_IFILT); 318 319 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) { 320 writel(0x22, base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE); 321 if (pll->slave) 322 writel(0x22, pll->slave->phy->pll_base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE); 323 } 324 } 325 326 static void dsi_pll_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 327 { 328 void __iomem *base = pll->phy->pll_base; 329 330 writel(0x12, base + REG_DSI_7nm_PHY_PLL_CORE_INPUT_OVERRIDE); 331 writel(config->decimal_div_start, 332 base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 333 writel(config->frac_div_start & 0xff, 334 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 335 writel((config->frac_div_start & 0xff00) >> 8, 336 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1); 337 writel((config->frac_div_start & 0x30000) >> 16, 338 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1); 339 writel(0x40, base + REG_DSI_7nm_PHY_PLL_PLL_LOCKDET_RATE_1); 340 writel(0x06, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_DELAY); 341 writel(pll->phy->cphy_mode ? 0x00 : 0x10, 342 base + REG_DSI_7nm_PHY_PLL_CMODE_1); 343 writel(config->pll_clock_inverters, 344 base + REG_DSI_7nm_PHY_PLL_CLOCK_INVERTERS_1); 345 } 346 347 static int dsi_pll_7nm_vco_set_rate(struct clk_hw *hw, unsigned long rate, 348 unsigned long parent_rate) 349 { 350 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 351 struct dsi_pll_config config; 352 353 dsi_pll_enable_pll_bias(pll_7nm); 354 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_7nm->phy->id, rate, 355 parent_rate); 356 357 pll_7nm->vco_current_rate = rate; 358 359 dsi_pll_setup_config(&config); 360 361 dsi_pll_calc_dec_frac(pll_7nm, &config); 362 363 dsi_pll_calc_ssc(pll_7nm, &config); 364 365 dsi_pll_commit(pll_7nm, &config); 366 367 dsi_pll_config_hzindep_reg(pll_7nm); 368 369 dsi_pll_ssc_commit(pll_7nm, &config); 370 371 dsi_pll_disable_pll_bias(pll_7nm); 372 /* flush, ensure all register writes are done*/ 373 wmb(); 374 375 return 0; 376 } 377 378 static int dsi_pll_7nm_lock_status(struct dsi_pll_7nm *pll) 379 { 380 int rc; 381 u32 status = 0; 382 u32 const delay_us = 100; 383 u32 const timeout_us = 5000; 384 385 rc = readl_poll_timeout_atomic(pll->phy->pll_base + 386 REG_DSI_7nm_PHY_PLL_COMMON_STATUS_ONE, 387 status, 388 ((status & BIT(0)) > 0), 389 delay_us, 390 timeout_us); 391 if (rc) 392 pr_err("DSI PLL(%d) lock failed, status=0x%08x\n", 393 pll->phy->id, status); 394 395 return rc; 396 } 397 398 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll) 399 { 400 unsigned long flags; 401 u32 data; 402 403 spin_lock_irqsave(&pll->pll_enable_lock, flags); 404 --pll->pll_enable_cnt; 405 if (pll->pll_enable_cnt < 0) { 406 spin_unlock_irqrestore(&pll->pll_enable_lock, flags); 407 DRM_DEV_ERROR_RATELIMITED(&pll->phy->pdev->dev, 408 "bug: imbalance in disabling PLL bias\n"); 409 return; 410 } else if (pll->pll_enable_cnt > 0) { 411 spin_unlock_irqrestore(&pll->pll_enable_lock, flags); 412 return; 413 } /* else: == 0 */ 414 415 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 416 data &= ~DSI_7nm_PHY_CMN_CTRL_0_PLL_SHUTDOWNB; 417 writel(0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES); 418 writel(data, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 419 spin_unlock_irqrestore(&pll->pll_enable_lock, flags); 420 ndelay(250); 421 } 422 423 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll) 424 { 425 unsigned long flags; 426 u32 data; 427 428 spin_lock_irqsave(&pll->pll_enable_lock, flags); 429 if (pll->pll_enable_cnt++) { 430 spin_unlock_irqrestore(&pll->pll_enable_lock, flags); 431 WARN_ON(pll->pll_enable_cnt == INT_MAX); 432 return; 433 } 434 435 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 436 data |= DSI_7nm_PHY_CMN_CTRL_0_PLL_SHUTDOWNB; 437 writel(data, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 438 439 writel(0xc0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES); 440 spin_unlock_irqrestore(&pll->pll_enable_lock, flags); 441 ndelay(250); 442 } 443 444 static void dsi_pll_cmn_clk_cfg0_write(struct dsi_pll_7nm *pll, u32 val) 445 { 446 unsigned long flags; 447 448 spin_lock_irqsave(&pll->postdiv_lock, flags); 449 writel(val, pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 450 spin_unlock_irqrestore(&pll->postdiv_lock, flags); 451 } 452 453 static void dsi_pll_cmn_clk_cfg1_update(struct dsi_pll_7nm *pll, u32 mask, 454 u32 val) 455 { 456 unsigned long flags; 457 u32 data; 458 459 spin_lock_irqsave(&pll->pclk_mux_lock, flags); 460 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 461 data &= ~mask; 462 data |= val & mask; 463 464 writel(data, pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 465 spin_unlock_irqrestore(&pll->pclk_mux_lock, flags); 466 } 467 468 static void dsi_pll_disable_global_clk(struct dsi_pll_7nm *pll) 469 { 470 dsi_pll_cmn_clk_cfg1_update(pll, DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN, 0); 471 } 472 473 static void dsi_pll_enable_global_clk(struct dsi_pll_7nm *pll) 474 { 475 u32 cfg_1 = DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN | DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN_SEL; 476 477 writel(0x04, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_3); 478 dsi_pll_cmn_clk_cfg1_update(pll, cfg_1, cfg_1); 479 } 480 481 static void dsi_pll_phy_dig_reset(struct dsi_pll_7nm *pll) 482 { 483 /* 484 * Reset the PHY digital domain. This would be needed when 485 * coming out of a CX or analog rail power collapse while 486 * ensuring that the pads maintain LP00 or LP11 state 487 */ 488 writel(BIT(0), pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4); 489 wmb(); /* Ensure that the reset is deasserted */ 490 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4); 491 wmb(); /* Ensure that the reset is deasserted */ 492 } 493 494 static int dsi_pll_7nm_vco_prepare(struct clk_hw *hw) 495 { 496 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 497 int rc; 498 499 dsi_pll_enable_pll_bias(pll_7nm); 500 if (pll_7nm->slave) 501 dsi_pll_enable_pll_bias(pll_7nm->slave); 502 503 /* Start PLL */ 504 writel(BIT(0), pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 505 506 /* 507 * ensure all PLL configurations are written prior to checking 508 * for PLL lock. 509 */ 510 wmb(); 511 512 /* Check for PLL lock */ 513 rc = dsi_pll_7nm_lock_status(pll_7nm); 514 if (rc) { 515 pr_err("PLL(%d) lock failed\n", pll_7nm->phy->id); 516 goto error; 517 } 518 519 pll_7nm->phy->pll_on = true; 520 521 /* 522 * assert power on reset for PHY digital in case the PLL is 523 * enabled after CX of analog domain power collapse. This needs 524 * to be done before enabling the global clk. 525 */ 526 dsi_pll_phy_dig_reset(pll_7nm); 527 if (pll_7nm->slave) 528 dsi_pll_phy_dig_reset(pll_7nm->slave); 529 530 dsi_pll_enable_global_clk(pll_7nm); 531 if (pll_7nm->slave) 532 dsi_pll_enable_global_clk(pll_7nm->slave); 533 534 writel(0x1, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 535 if (pll_7nm->slave) 536 writel(0x1, pll_7nm->slave->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 537 538 error: 539 return rc; 540 } 541 542 static void dsi_pll_disable_sub(struct dsi_pll_7nm *pll) 543 { 544 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 545 dsi_pll_disable_pll_bias(pll); 546 } 547 548 static void dsi_pll_7nm_vco_unprepare(struct clk_hw *hw) 549 { 550 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 551 552 /* 553 * To avoid any stray glitches while abruptly powering down the PLL 554 * make sure to gate the clock using the clock enable bit before 555 * powering down the PLL 556 */ 557 dsi_pll_disable_global_clk(pll_7nm); 558 writel(0, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 559 dsi_pll_disable_sub(pll_7nm); 560 if (pll_7nm->slave) { 561 dsi_pll_disable_global_clk(pll_7nm->slave); 562 dsi_pll_disable_sub(pll_7nm->slave); 563 } 564 /* flush, ensure all register writes are done */ 565 wmb(); 566 pll_7nm->phy->pll_on = false; 567 } 568 569 static unsigned long dsi_pll_7nm_vco_recalc_rate(struct clk_hw *hw, 570 unsigned long parent_rate) 571 { 572 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 573 void __iomem *base = pll_7nm->phy->pll_base; 574 u64 ref_clk = VCO_REF_CLK_RATE; 575 u64 vco_rate = 0x0; 576 u64 multiplier; 577 u32 frac; 578 u32 dec; 579 u64 pll_freq, tmp64; 580 581 dsi_pll_enable_pll_bias(pll_7nm); 582 dec = readl(base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 583 dec &= 0xff; 584 585 frac = readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 586 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1) & 587 0xff) << 8); 588 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1) & 589 0x3) << 16); 590 591 /* 592 * TODO: 593 * 1. Assumes prescaler is disabled 594 */ 595 multiplier = 1 << FRAC_BITS; 596 pll_freq = dec * (ref_clk * 2); 597 tmp64 = (ref_clk * 2 * frac); 598 pll_freq += div_u64(tmp64, multiplier); 599 600 vco_rate = pll_freq; 601 pll_7nm->vco_current_rate = vco_rate; 602 603 DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x", 604 pll_7nm->phy->id, (unsigned long)vco_rate, dec, frac); 605 606 dsi_pll_disable_pll_bias(pll_7nm); 607 608 return (unsigned long)vco_rate; 609 } 610 611 static int dsi_pll_7nm_clk_determine_rate(struct clk_hw *hw, 612 struct clk_rate_request *req) 613 { 614 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 615 616 req->rate = clamp_t(unsigned long, req->rate, 617 pll_7nm->phy->cfg->min_pll_rate, pll_7nm->phy->cfg->max_pll_rate); 618 619 return 0; 620 } 621 622 static const struct clk_ops clk_ops_dsi_pll_7nm_vco = { 623 .determine_rate = dsi_pll_7nm_clk_determine_rate, 624 .set_rate = dsi_pll_7nm_vco_set_rate, 625 .recalc_rate = dsi_pll_7nm_vco_recalc_rate, 626 .prepare = dsi_pll_7nm_vco_prepare, 627 .unprepare = dsi_pll_7nm_vco_unprepare, 628 }; 629 630 /* 631 * PLL Callbacks 632 */ 633 634 static void dsi_7nm_pll_save_state(struct msm_dsi_phy *phy) 635 { 636 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 637 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 638 void __iomem *phy_base = pll_7nm->phy->base; 639 u32 cmn_clk_cfg0, cmn_clk_cfg1; 640 641 dsi_pll_enable_pll_bias(pll_7nm); 642 cached->pll_out_div = readl(pll_7nm->phy->pll_base + 643 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 644 cached->pll_out_div &= 0x3; 645 646 cmn_clk_cfg0 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 647 cached->bit_clk_div = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0__MASK, cmn_clk_cfg0); 648 cached->pix_clk_div = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4__MASK, cmn_clk_cfg0); 649 650 cmn_clk_cfg1 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 651 cached->pll_mux = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, cmn_clk_cfg1); 652 653 dsi_pll_disable_pll_bias(pll_7nm); 654 DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x", 655 pll_7nm->phy->id, cached->pll_out_div, cached->bit_clk_div, 656 cached->pix_clk_div, cached->pll_mux); 657 } 658 659 static int dsi_7nm_pll_restore_state(struct msm_dsi_phy *phy) 660 { 661 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 662 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 663 u32 val; 664 int ret; 665 666 val = readl(pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 667 val &= ~0x3; 668 val |= cached->pll_out_div; 669 writel(val, pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 670 671 dsi_pll_cmn_clk_cfg0_write(pll_7nm, 672 DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0(cached->bit_clk_div) | 673 DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4(cached->pix_clk_div)); 674 dsi_pll_cmn_clk_cfg1_update(pll_7nm, DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, 675 cached->pll_mux); 676 677 ret = dsi_pll_7nm_vco_set_rate(phy->vco_hw, 678 pll_7nm->vco_current_rate, 679 VCO_REF_CLK_RATE); 680 if (ret) { 681 DRM_DEV_ERROR(&pll_7nm->phy->pdev->dev, 682 "restore vco rate failed. ret=%d\n", ret); 683 return ret; 684 } 685 686 DBG("DSI PLL%d", pll_7nm->phy->id); 687 688 return 0; 689 } 690 691 static int dsi_7nm_set_usecase(struct msm_dsi_phy *phy) 692 { 693 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 694 void __iomem *base = phy->base; 695 u32 data = 0x0; /* internal PLL */ 696 697 DBG("DSI PLL%d", pll_7nm->phy->id); 698 699 switch (phy->usecase) { 700 case MSM_DSI_PHY_STANDALONE: 701 break; 702 case MSM_DSI_PHY_MASTER: 703 pll_7nm->slave = pll_7nm_list[(pll_7nm->phy->id + 1) % DSI_MAX]; 704 /* v7.0: Enable ATB_EN0 and alternate clock output to external phy */ 705 if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0) 706 writel(0x07, base + REG_DSI_7nm_PHY_CMN_CTRL_5); 707 break; 708 case MSM_DSI_PHY_SLAVE: 709 data = 0x1; /* external PLL */ 710 break; 711 default: 712 return -EINVAL; 713 } 714 715 /* set PLL src */ 716 dsi_pll_cmn_clk_cfg1_update(pll_7nm, DSI_7nm_PHY_CMN_CLK_CFG1_BITCLK_SEL__MASK, 717 DSI_7nm_PHY_CMN_CLK_CFG1_BITCLK_SEL(data)); 718 719 return 0; 720 } 721 722 /* 723 * The post dividers and mux clocks are created using the standard divider and 724 * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux 725 * state to follow the master PLL's divider/mux state. Therefore, we don't 726 * require special clock ops that also configure the slave PLL registers 727 */ 728 static int pll_7nm_register(struct dsi_pll_7nm *pll_7nm, struct clk_hw **provided_clocks) 729 { 730 char clk_name[32]; 731 struct clk_init_data vco_init = { 732 .parent_data = &(const struct clk_parent_data) { 733 .fw_name = "ref", 734 }, 735 .num_parents = 1, 736 .name = clk_name, 737 .flags = CLK_IGNORE_UNUSED, 738 .ops = &clk_ops_dsi_pll_7nm_vco, 739 }; 740 struct device *dev = &pll_7nm->phy->pdev->dev; 741 struct clk_hw *hw, *pll_out_div, *pll_bit, *pll_by_2_bit; 742 struct clk_hw *pll_post_out_div, *phy_pll_out_dsi_parent; 743 int ret; 744 745 DBG("DSI%d", pll_7nm->phy->id); 746 747 snprintf(clk_name, sizeof(clk_name), "dsi%dvco_clk", pll_7nm->phy->id); 748 pll_7nm->clk_hw.init = &vco_init; 749 750 ret = devm_clk_hw_register(dev, &pll_7nm->clk_hw); 751 if (ret) 752 return ret; 753 754 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_out_div_clk", pll_7nm->phy->id); 755 756 pll_out_div = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 757 &pll_7nm->clk_hw, CLK_SET_RATE_PARENT, 758 pll_7nm->phy->pll_base + 759 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE, 760 0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL); 761 if (IS_ERR(pll_out_div)) { 762 ret = PTR_ERR(pll_out_div); 763 goto fail; 764 } 765 766 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_bit_clk", pll_7nm->phy->id); 767 768 /* BIT CLK: DIV_CTRL_3_0 */ 769 pll_bit = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 770 pll_out_div, CLK_SET_RATE_PARENT, 771 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 772 0, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 773 if (IS_ERR(pll_bit)) { 774 ret = PTR_ERR(pll_bit); 775 goto fail; 776 } 777 778 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_byteclk", pll_7nm->phy->id); 779 780 /* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */ 781 hw = devm_clk_hw_register_fixed_factor_parent_hw(dev, clk_name, 782 pll_bit, CLK_SET_RATE_PARENT, 1, 783 pll_7nm->phy->cphy_mode ? 7 : 8); 784 if (IS_ERR(hw)) { 785 ret = PTR_ERR(hw); 786 goto fail; 787 } 788 789 provided_clocks[DSI_BYTE_PLL_CLK] = hw; 790 791 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_by_2_bit_clk", pll_7nm->phy->id); 792 793 pll_by_2_bit = devm_clk_hw_register_fixed_factor_parent_hw(dev, 794 clk_name, pll_bit, 0, 1, 2); 795 if (IS_ERR(pll_by_2_bit)) { 796 ret = PTR_ERR(pll_by_2_bit); 797 goto fail; 798 } 799 800 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_post_out_div_clk", pll_7nm->phy->id); 801 802 if (pll_7nm->phy->cphy_mode) 803 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 804 dev, clk_name, pll_out_div, 0, 2, 7); 805 else 806 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 807 dev, clk_name, pll_out_div, 0, 1, 4); 808 if (IS_ERR(pll_post_out_div)) { 809 ret = PTR_ERR(pll_post_out_div); 810 goto fail; 811 } 812 813 /* in CPHY mode, pclk_mux will always have post_out_div as parent 814 * don't register a pclk_mux clock and just use post_out_div instead 815 */ 816 if (pll_7nm->phy->cphy_mode) { 817 dsi_pll_cmn_clk_cfg1_update(pll_7nm, 818 DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, 819 DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL(3)); 820 phy_pll_out_dsi_parent = pll_post_out_div; 821 } else { 822 snprintf(clk_name, sizeof(clk_name), "dsi%d_pclk_mux", pll_7nm->phy->id); 823 824 hw = devm_clk_hw_register_mux_parent_hws(dev, clk_name, 825 ((const struct clk_hw *[]){ 826 pll_bit, 827 pll_by_2_bit, 828 }), 2, 0, pll_7nm->phy->base + 829 REG_DSI_7nm_PHY_CMN_CLK_CFG1, 830 0, 1, 0, &pll_7nm->pclk_mux_lock); 831 if (IS_ERR(hw)) { 832 ret = PTR_ERR(hw); 833 goto fail; 834 } 835 836 phy_pll_out_dsi_parent = hw; 837 } 838 839 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_dsiclk", pll_7nm->phy->id); 840 841 /* PIX CLK DIV : DIV_CTRL_7_4*/ 842 hw = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 843 phy_pll_out_dsi_parent, 0, 844 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 845 4, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 846 if (IS_ERR(hw)) { 847 ret = PTR_ERR(hw); 848 goto fail; 849 } 850 851 provided_clocks[DSI_PIXEL_PLL_CLK] = hw; 852 853 return 0; 854 855 fail: 856 857 return ret; 858 } 859 860 static int dsi_pll_7nm_init(struct msm_dsi_phy *phy) 861 { 862 struct platform_device *pdev = phy->pdev; 863 struct dsi_pll_7nm *pll_7nm; 864 int ret; 865 866 pll_7nm = devm_kzalloc(&pdev->dev, sizeof(*pll_7nm), GFP_KERNEL); 867 if (!pll_7nm) 868 return -ENOMEM; 869 870 DBG("DSI PLL%d", phy->id); 871 872 pll_7nm_list[phy->id] = pll_7nm; 873 874 spin_lock_init(&pll_7nm->postdiv_lock); 875 spin_lock_init(&pll_7nm->pclk_mux_lock); 876 spin_lock_init(&pll_7nm->pll_enable_lock); 877 878 pll_7nm->phy = phy; 879 phy->pll_data = pll_7nm; 880 881 ret = pll_7nm_register(pll_7nm, phy->provided_clocks->hws); 882 if (ret) { 883 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret); 884 return ret; 885 } 886 887 phy->vco_hw = &pll_7nm->clk_hw; 888 889 /* TODO: Remove this when we have proper display handover support */ 890 msm_dsi_phy_pll_save_state(phy); 891 /* 892 * Store also proper vco_current_rate, because its value will be used in 893 * dsi_7nm_pll_restore_state(). 894 */ 895 if (!dsi_pll_7nm_vco_recalc_rate(&pll_7nm->clk_hw, VCO_REF_CLK_RATE)) 896 pll_7nm->vco_current_rate = pll_7nm->phy->cfg->min_pll_rate; 897 898 return 0; 899 } 900 901 static int dsi_phy_hw_v4_0_is_pll_on(struct msm_dsi_phy *phy) 902 { 903 void __iomem *base = phy->base; 904 u32 data = 0; 905 906 data = readl(base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 907 mb(); /* make sure read happened */ 908 909 return (data & BIT(0)); 910 } 911 912 static void dsi_phy_hw_v4_0_config_lpcdrx(struct msm_dsi_phy *phy, bool enable) 913 { 914 void __iomem *lane_base = phy->lane_base; 915 int phy_lane_0 = 0; /* TODO: Support all lane swap configs */ 916 917 /* 918 * LPRX and CDRX need to enabled only for physical data lane 919 * corresponding to the logical data lane 0 920 */ 921 if (enable) 922 writel(0x3, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0)); 923 else 924 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0)); 925 } 926 927 static void dsi_phy_hw_v4_0_lane_settings(struct msm_dsi_phy *phy) 928 { 929 int i; 930 const u8 tx_dctrl_0[] = { 0x00, 0x00, 0x00, 0x04, 0x01 }; 931 const u8 tx_dctrl_1[] = { 0x40, 0x40, 0x40, 0x46, 0x41 }; 932 const u8 *tx_dctrl = tx_dctrl_0; 933 void __iomem *lane_base = phy->lane_base; 934 935 if (!(phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 936 tx_dctrl = tx_dctrl_1; 937 938 /* Strength ctrl settings */ 939 for (i = 0; i < 5; i++) { 940 /* 941 * Disable LPRX and CDRX for all lanes. And later on, it will 942 * be only enabled for the physical data lane corresponding 943 * to the logical data lane 0 944 */ 945 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(i)); 946 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_PIN_SWAP(i)); 947 } 948 949 dsi_phy_hw_v4_0_config_lpcdrx(phy, true); 950 951 /* other settings */ 952 for (i = 0; i < 5; i++) { 953 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG0(i)); 954 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG1(i)); 955 writel(i == 4 ? 0x8a : 0xa, lane_base + REG_DSI_7nm_PHY_LN_CFG2(i)); 956 writel(tx_dctrl[i], lane_base + REG_DSI_7nm_PHY_LN_TX_DCTRL(i)); 957 } 958 } 959 960 static int dsi_7nm_phy_enable(struct msm_dsi_phy *phy, 961 struct msm_dsi_phy_clk_request *clk_req) 962 { 963 int ret; 964 u32 status; 965 u32 const delay_us = 5; 966 u32 const timeout_us = 1000; 967 struct msm_dsi_dphy_timing *timing = &phy->timing; 968 struct dsi_pll_7nm *pll = phy->pll_data; 969 void __iomem *base = phy->base; 970 bool less_than_1500_mhz; 971 unsigned long flags; 972 u32 vreg_ctrl_0, vreg_ctrl_1, lane_ctrl0; 973 u32 glbl_pemph_ctrl_0; 974 u32 glbl_str_swi_cal_sel_ctrl, glbl_hstx_str_ctrl_0; 975 u32 glbl_rescode_top_ctrl, glbl_rescode_bot_ctrl; 976 u32 data; 977 978 DBG(""); 979 980 if (phy->cphy_mode) 981 ret = msm_dsi_cphy_timing_calc_v4(timing, clk_req); 982 else 983 ret = msm_dsi_dphy_timing_calc_v4(timing, clk_req); 984 if (ret) { 985 DRM_DEV_ERROR(&phy->pdev->dev, 986 "%s: PHY timing calculation failed\n", __func__); 987 return -EINVAL; 988 } 989 990 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 991 pr_warn("PLL turned on before configuring PHY\n"); 992 993 /* Request for REFGEN READY */ 994 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 995 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 996 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 997 writel(0x1, phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10); 998 udelay(500); 999 } 1000 1001 /* wait for REFGEN READY */ 1002 ret = readl_poll_timeout_atomic(base + REG_DSI_7nm_PHY_CMN_PHY_STATUS, 1003 status, (status & BIT(0)), 1004 delay_us, timeout_us); 1005 if (ret) { 1006 pr_err("Ref gen not ready. Aborting\n"); 1007 return -EINVAL; 1008 } 1009 1010 /* TODO: CPHY enable path (this is for DPHY only) */ 1011 1012 /* Alter PHY configurations if data rate less than 1.5GHZ*/ 1013 less_than_1500_mhz = (clk_req->bitclk_rate <= 1500000000); 1014 1015 glbl_str_swi_cal_sel_ctrl = 0x00; 1016 if (phy->cphy_mode) { 1017 vreg_ctrl_0 = 0x51; 1018 vreg_ctrl_1 = 0x55; 1019 glbl_hstx_str_ctrl_0 = 0x00; 1020 glbl_pemph_ctrl_0 = 0x11; 1021 lane_ctrl0 = 0x17; 1022 } else { 1023 vreg_ctrl_0 = less_than_1500_mhz ? 0x53 : 0x52; 1024 vreg_ctrl_1 = 0x5c; 1025 glbl_hstx_str_ctrl_0 = 0x88; 1026 glbl_pemph_ctrl_0 = 0x00; 1027 lane_ctrl0 = 0x1f; 1028 } 1029 1030 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 1031 if (phy->cphy_mode) { 1032 /* TODO: different for second phy */ 1033 vreg_ctrl_0 = 0x57; 1034 vreg_ctrl_1 = 0x41; 1035 glbl_rescode_top_ctrl = 0x3d; 1036 glbl_rescode_bot_ctrl = 0x38; 1037 } else { 1038 vreg_ctrl_0 = 0x56; 1039 vreg_ctrl_1 = 0x19; 1040 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x03; 1041 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3c; 1042 } 1043 } else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 1044 if (phy->cphy_mode) { 1045 vreg_ctrl_0 = 0x45; 1046 vreg_ctrl_1 = 0x41; 1047 glbl_rescode_top_ctrl = 0x00; 1048 glbl_rescode_bot_ctrl = 0x00; 1049 } else { 1050 vreg_ctrl_0 = 0x44; 1051 vreg_ctrl_1 = 0x19; 1052 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x03; 1053 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3c; 1054 } 1055 } else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 1056 if (phy->cphy_mode) { 1057 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 1058 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 1059 } else { 1060 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 1061 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 1062 } 1063 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) { 1064 if (phy->cphy_mode) { 1065 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 1066 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 1067 } else { 1068 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x00; 1069 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 1070 } 1071 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 1072 if (phy->cphy_mode) { 1073 glbl_hstx_str_ctrl_0 = 0x88; 1074 glbl_rescode_top_ctrl = 0x00; 1075 glbl_rescode_bot_ctrl = 0x3c; 1076 } else { 1077 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x00; 1078 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x39 : 0x3c; 1079 } 1080 } else { 1081 if (phy->cphy_mode) { 1082 glbl_str_swi_cal_sel_ctrl = 0x03; 1083 glbl_hstx_str_ctrl_0 = 0x66; 1084 } else { 1085 vreg_ctrl_0 = less_than_1500_mhz ? 0x5B : 0x59; 1086 glbl_str_swi_cal_sel_ctrl = less_than_1500_mhz ? 0x03 : 0x00; 1087 glbl_hstx_str_ctrl_0 = less_than_1500_mhz ? 0x66 : 0x88; 1088 } 1089 glbl_rescode_top_ctrl = 0x03; 1090 glbl_rescode_bot_ctrl = 0x3c; 1091 } 1092 1093 spin_lock_irqsave(&pll->pll_enable_lock, flags); 1094 pll->pll_enable_cnt = 1; 1095 /* de-assert digital and pll power down */ 1096 data = DSI_7nm_PHY_CMN_CTRL_0_DIGTOP_PWRDN_B | 1097 DSI_7nm_PHY_CMN_CTRL_0_PLL_SHUTDOWNB; 1098 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1099 spin_unlock_irqrestore(&pll->pll_enable_lock, flags); 1100 1101 /* Assert PLL core reset */ 1102 writel(0x00, base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 1103 1104 /* turn off resync FIFO */ 1105 writel(0x00, base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL); 1106 1107 /* program CMN_CTRL_4 for minor_ver 2 chipsets*/ 1108 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 1109 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0) || 1110 (readl(base + REG_DSI_7nm_PHY_CMN_REVISION_ID0) & (0xf0)) == 0x20) 1111 writel(0x04, base + REG_DSI_7nm_PHY_CMN_CTRL_4); 1112 1113 /* Configure PHY lane swap (TODO: we need to calculate this) */ 1114 writel(0x21, base + REG_DSI_7nm_PHY_CMN_LANE_CFG0); 1115 writel(0x84, base + REG_DSI_7nm_PHY_CMN_LANE_CFG1); 1116 1117 if (phy->cphy_mode) 1118 writel(BIT(6), base + REG_DSI_7nm_PHY_CMN_GLBL_CTRL); 1119 1120 /* Enable LDO */ 1121 writel(vreg_ctrl_0, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_0); 1122 writel(vreg_ctrl_1, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_1); 1123 1124 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_3); 1125 writel(glbl_str_swi_cal_sel_ctrl, 1126 base + REG_DSI_7nm_PHY_CMN_GLBL_STR_SWI_CAL_SEL_CTRL); 1127 writel(glbl_hstx_str_ctrl_0, 1128 base + REG_DSI_7nm_PHY_CMN_GLBL_HSTX_STR_CTRL_0); 1129 writel(glbl_pemph_ctrl_0, 1130 base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_0); 1131 if (phy->cphy_mode) 1132 writel(0x01, base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_1); 1133 writel(glbl_rescode_top_ctrl, 1134 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_TOP_CTRL); 1135 writel(glbl_rescode_bot_ctrl, 1136 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_BOT_CTRL); 1137 writel(0x55, base + REG_DSI_7nm_PHY_CMN_GLBL_LPTX_STR_CTRL); 1138 1139 /* Remove power down from all blocks */ 1140 writel(0x7f, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1141 1142 writel(lane_ctrl0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0); 1143 1144 /* Select full-rate mode */ 1145 if (!phy->cphy_mode) 1146 writel(0x40, base + REG_DSI_7nm_PHY_CMN_CTRL_2); 1147 1148 ret = dsi_7nm_set_usecase(phy); 1149 if (ret) { 1150 DRM_DEV_ERROR(&phy->pdev->dev, "%s: set pll usecase failed, %d\n", 1151 __func__, ret); 1152 return ret; 1153 } 1154 1155 /* DSI PHY timings */ 1156 if (phy->cphy_mode) { 1157 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0); 1158 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4); 1159 writel(timing->shared_timings.clk_pre, 1160 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5); 1161 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6); 1162 writel(timing->shared_timings.clk_post, 1163 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7); 1164 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8); 1165 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9); 1166 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10); 1167 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11); 1168 } else { 1169 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0); 1170 writel(timing->clk_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_1); 1171 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_2); 1172 writel(timing->clk_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_3); 1173 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4); 1174 writel(timing->hs_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5); 1175 writel(timing->hs_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6); 1176 writel(timing->hs_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7); 1177 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8); 1178 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9); 1179 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10); 1180 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11); 1181 writel(timing->shared_timings.clk_pre, 1182 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_12); 1183 writel(timing->shared_timings.clk_post, 1184 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_13); 1185 } 1186 1187 /* DSI lane settings */ 1188 dsi_phy_hw_v4_0_lane_settings(phy); 1189 1190 DBG("DSI%d PHY enabled", phy->id); 1191 1192 return 0; 1193 } 1194 1195 static bool dsi_7nm_set_continuous_clock(struct msm_dsi_phy *phy, bool enable) 1196 { 1197 void __iomem *base = phy->base; 1198 u32 data; 1199 1200 data = readl(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1201 if (enable) 1202 data |= BIT(5) | BIT(6); 1203 else 1204 data &= ~(BIT(5) | BIT(6)); 1205 writel(data, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1206 1207 return enable; 1208 } 1209 1210 static void dsi_7nm_phy_disable(struct msm_dsi_phy *phy) 1211 { 1212 struct dsi_pll_7nm *pll = phy->pll_data; 1213 void __iomem *base = phy->base; 1214 unsigned long flags; 1215 u32 data; 1216 1217 DBG(""); 1218 1219 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 1220 pr_warn("Turning OFF PHY while PLL is on\n"); 1221 1222 dsi_phy_hw_v4_0_config_lpcdrx(phy, false); 1223 1224 /* Turn off REFGEN Vote */ 1225 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 1226 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 1227 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) { 1228 writel(0x0, base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10); 1229 wmb(); 1230 /* Delay to ensure HW removes vote before PHY shut down */ 1231 udelay(2); 1232 } 1233 1234 data = readl(base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1235 1236 /* disable all lanes */ 1237 data &= ~0x1F; 1238 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1239 writel(0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0); 1240 1241 spin_lock_irqsave(&pll->pll_enable_lock, flags); 1242 pll->pll_enable_cnt = 0; 1243 /* Turn off all PHY blocks */ 1244 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1245 spin_unlock_irqrestore(&pll->pll_enable_lock, flags); 1246 1247 /* make sure phy is turned off */ 1248 wmb(); 1249 1250 DBG("DSI%d PHY disabled", phy->id); 1251 } 1252 1253 static const struct regulator_bulk_data dsi_phy_7nm_36mA_regulators[] = { 1254 { .supply = "vdds", .init_load_uA = 36000 }, 1255 }; 1256 1257 static const struct regulator_bulk_data dsi_phy_7nm_37750uA_regulators[] = { 1258 { .supply = "vdds", .init_load_uA = 37550 }, 1259 }; 1260 1261 static const struct regulator_bulk_data dsi_phy_7nm_48000uA_regulators[] = { 1262 { .supply = "vdds", .init_load_uA = 48000 }, 1263 }; 1264 1265 static const struct regulator_bulk_data dsi_phy_7nm_98000uA_regulators[] = { 1266 { .supply = "vdds", .init_load_uA = 98000 }, 1267 }; 1268 1269 static const struct regulator_bulk_data dsi_phy_7nm_97800uA_regulators[] = { 1270 { .supply = "vdds", .init_load_uA = 97800 }, 1271 }; 1272 1273 static const struct regulator_bulk_data dsi_phy_7nm_98400uA_regulators[] = { 1274 { .supply = "vdds", .init_load_uA = 98400 }, 1275 }; 1276 1277 const struct msm_dsi_phy_cfg dsi_phy_7nm_cfgs = { 1278 .has_phy_lane = true, 1279 .regulator_data = dsi_phy_7nm_36mA_regulators, 1280 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1281 .ops = { 1282 .enable = dsi_7nm_phy_enable, 1283 .disable = dsi_7nm_phy_disable, 1284 .pll_init = dsi_pll_7nm_init, 1285 .save_pll_state = dsi_7nm_pll_save_state, 1286 .restore_pll_state = dsi_7nm_pll_restore_state, 1287 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1288 }, 1289 .min_pll_rate = 600000000UL, 1290 #ifdef CONFIG_64BIT 1291 .max_pll_rate = 5000000000UL, 1292 #else 1293 .max_pll_rate = ULONG_MAX, 1294 #endif 1295 .io_start = { 0xae94400, 0xae96400 }, 1296 .num_dsi_phy = 2, 1297 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1298 }; 1299 1300 const struct msm_dsi_phy_cfg dsi_phy_7nm_6375_cfgs = { 1301 .has_phy_lane = true, 1302 .ops = { 1303 .enable = dsi_7nm_phy_enable, 1304 .disable = dsi_7nm_phy_disable, 1305 .pll_init = dsi_pll_7nm_init, 1306 .save_pll_state = dsi_7nm_pll_save_state, 1307 .restore_pll_state = dsi_7nm_pll_restore_state, 1308 }, 1309 .min_pll_rate = 600000000UL, 1310 #ifdef CONFIG_64BIT 1311 .max_pll_rate = 5000000000ULL, 1312 #else 1313 .max_pll_rate = ULONG_MAX, 1314 #endif 1315 .io_start = { 0x5e94400 }, 1316 .num_dsi_phy = 1, 1317 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1318 }; 1319 1320 const struct msm_dsi_phy_cfg dsi_phy_7nm_8150_cfgs = { 1321 .has_phy_lane = true, 1322 .regulator_data = dsi_phy_7nm_36mA_regulators, 1323 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1324 .ops = { 1325 .enable = dsi_7nm_phy_enable, 1326 .disable = dsi_7nm_phy_disable, 1327 .pll_init = dsi_pll_7nm_init, 1328 .save_pll_state = dsi_7nm_pll_save_state, 1329 .restore_pll_state = dsi_7nm_pll_restore_state, 1330 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1331 }, 1332 .min_pll_rate = 1000000000UL, 1333 .max_pll_rate = 3500000000UL, 1334 .io_start = { 0xae94400, 0xae96400 }, 1335 .num_dsi_phy = 2, 1336 .quirks = DSI_PHY_7NM_QUIRK_PRE_V4_1, 1337 }; 1338 1339 const struct msm_dsi_phy_cfg dsi_phy_7nm_7280_cfgs = { 1340 .has_phy_lane = true, 1341 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1342 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1343 .ops = { 1344 .enable = dsi_7nm_phy_enable, 1345 .disable = dsi_7nm_phy_disable, 1346 .pll_init = dsi_pll_7nm_init, 1347 .save_pll_state = dsi_7nm_pll_save_state, 1348 .restore_pll_state = dsi_7nm_pll_restore_state, 1349 }, 1350 .min_pll_rate = 600000000UL, 1351 #ifdef CONFIG_64BIT 1352 .max_pll_rate = 5000000000ULL, 1353 #else 1354 .max_pll_rate = ULONG_MAX, 1355 #endif 1356 .io_start = { 0xae94400 }, 1357 .num_dsi_phy = 1, 1358 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1359 }; 1360 1361 const struct msm_dsi_phy_cfg dsi_phy_5nm_8350_cfgs = { 1362 .has_phy_lane = true, 1363 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1364 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1365 .ops = { 1366 .enable = dsi_7nm_phy_enable, 1367 .disable = dsi_7nm_phy_disable, 1368 .pll_init = dsi_pll_7nm_init, 1369 .save_pll_state = dsi_7nm_pll_save_state, 1370 .restore_pll_state = dsi_7nm_pll_restore_state, 1371 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1372 }, 1373 .min_pll_rate = 600000000UL, 1374 #ifdef CONFIG_64BIT 1375 .max_pll_rate = 5000000000UL, 1376 #else 1377 .max_pll_rate = ULONG_MAX, 1378 #endif 1379 .io_start = { 0xae94400, 0xae96400 }, 1380 .num_dsi_phy = 2, 1381 .quirks = DSI_PHY_7NM_QUIRK_V4_2, 1382 }; 1383 1384 const struct msm_dsi_phy_cfg dsi_phy_5nm_8450_cfgs = { 1385 .has_phy_lane = true, 1386 .regulator_data = dsi_phy_7nm_97800uA_regulators, 1387 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators), 1388 .ops = { 1389 .enable = dsi_7nm_phy_enable, 1390 .disable = dsi_7nm_phy_disable, 1391 .pll_init = dsi_pll_7nm_init, 1392 .save_pll_state = dsi_7nm_pll_save_state, 1393 .restore_pll_state = dsi_7nm_pll_restore_state, 1394 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1395 }, 1396 .min_pll_rate = 600000000UL, 1397 #ifdef CONFIG_64BIT 1398 .max_pll_rate = 5000000000UL, 1399 #else 1400 .max_pll_rate = ULONG_MAX, 1401 #endif 1402 .io_start = { 0xae94400, 0xae96400 }, 1403 .num_dsi_phy = 2, 1404 .quirks = DSI_PHY_7NM_QUIRK_V4_3, 1405 }; 1406 1407 const struct msm_dsi_phy_cfg dsi_phy_5nm_8775p_cfgs = { 1408 .has_phy_lane = true, 1409 .regulator_data = dsi_phy_7nm_48000uA_regulators, 1410 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_48000uA_regulators), 1411 .ops = { 1412 .enable = dsi_7nm_phy_enable, 1413 .disable = dsi_7nm_phy_disable, 1414 .pll_init = dsi_pll_7nm_init, 1415 .save_pll_state = dsi_7nm_pll_save_state, 1416 .restore_pll_state = dsi_7nm_pll_restore_state, 1417 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1418 }, 1419 .min_pll_rate = 600000000UL, 1420 #ifdef CONFIG_64BIT 1421 .max_pll_rate = 5000000000UL, 1422 #else 1423 .max_pll_rate = ULONG_MAX, 1424 #endif 1425 .io_start = { 0xae94400, 0xae96400 }, 1426 .num_dsi_phy = 2, 1427 .quirks = DSI_PHY_7NM_QUIRK_V4_2, 1428 }; 1429 1430 const struct msm_dsi_phy_cfg dsi_phy_5nm_sar2130p_cfgs = { 1431 .has_phy_lane = true, 1432 .regulator_data = dsi_phy_7nm_97800uA_regulators, 1433 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators), 1434 .ops = { 1435 .enable = dsi_7nm_phy_enable, 1436 .disable = dsi_7nm_phy_disable, 1437 .pll_init = dsi_pll_7nm_init, 1438 .save_pll_state = dsi_7nm_pll_save_state, 1439 .restore_pll_state = dsi_7nm_pll_restore_state, 1440 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1441 }, 1442 .min_pll_rate = 600000000UL, 1443 #ifdef CONFIG_64BIT 1444 .max_pll_rate = 5000000000UL, 1445 #else 1446 .max_pll_rate = ULONG_MAX, 1447 #endif 1448 .io_start = { 0xae95000, 0xae97000 }, 1449 .num_dsi_phy = 2, 1450 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1451 }; 1452 1453 const struct msm_dsi_phy_cfg dsi_phy_4nm_8550_cfgs = { 1454 .has_phy_lane = true, 1455 .regulator_data = dsi_phy_7nm_98400uA_regulators, 1456 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98400uA_regulators), 1457 .ops = { 1458 .enable = dsi_7nm_phy_enable, 1459 .disable = dsi_7nm_phy_disable, 1460 .pll_init = dsi_pll_7nm_init, 1461 .save_pll_state = dsi_7nm_pll_save_state, 1462 .restore_pll_state = dsi_7nm_pll_restore_state, 1463 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1464 }, 1465 .min_pll_rate = 600000000UL, 1466 #ifdef CONFIG_64BIT 1467 .max_pll_rate = 5000000000UL, 1468 #else 1469 .max_pll_rate = ULONG_MAX, 1470 #endif 1471 .io_start = { 0xae95000, 0xae97000 }, 1472 .num_dsi_phy = 2, 1473 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1474 }; 1475 1476 const struct msm_dsi_phy_cfg dsi_phy_4nm_8650_cfgs = { 1477 .has_phy_lane = true, 1478 .regulator_data = dsi_phy_7nm_98000uA_regulators, 1479 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators), 1480 .ops = { 1481 .enable = dsi_7nm_phy_enable, 1482 .disable = dsi_7nm_phy_disable, 1483 .pll_init = dsi_pll_7nm_init, 1484 .save_pll_state = dsi_7nm_pll_save_state, 1485 .restore_pll_state = dsi_7nm_pll_restore_state, 1486 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1487 }, 1488 .min_pll_rate = 600000000UL, 1489 #ifdef CONFIG_64BIT 1490 .max_pll_rate = 5000000000UL, 1491 #else 1492 .max_pll_rate = ULONG_MAX, 1493 #endif 1494 .io_start = { 0xae95000, 0xae97000 }, 1495 .num_dsi_phy = 2, 1496 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1497 }; 1498 1499 const struct msm_dsi_phy_cfg dsi_phy_3nm_8750_cfgs = { 1500 .has_phy_lane = true, 1501 .regulator_data = dsi_phy_7nm_98000uA_regulators, 1502 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators), 1503 .ops = { 1504 .enable = dsi_7nm_phy_enable, 1505 .disable = dsi_7nm_phy_disable, 1506 .pll_init = dsi_pll_7nm_init, 1507 .save_pll_state = dsi_7nm_pll_save_state, 1508 .restore_pll_state = dsi_7nm_pll_restore_state, 1509 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1510 }, 1511 .min_pll_rate = 600000000UL, 1512 #ifdef CONFIG_64BIT 1513 .max_pll_rate = 5000000000UL, 1514 #else 1515 .max_pll_rate = ULONG_MAX, 1516 #endif 1517 .io_start = { 0xae95000, 0xae97000 }, 1518 .num_dsi_phy = 2, 1519 .quirks = DSI_PHY_7NM_QUIRK_V7_0, 1520 }; 1521