xref: /linux/drivers/gpu/drm/msm/dsi/phy/dsi_phy_7nm.c (revision 09b1704f5b02c18dd02b21343530463fcfc92c54)
1 /*
2  * SPDX-License-Identifier: GPL-2.0
3  * Copyright (c) 2018, The Linux Foundation
4  */
5 
6 #include <dt-bindings/clock/qcom,dsi-phy-28nm.h>
7 #include <linux/bitfield.h>
8 #include <linux/clk.h>
9 #include <linux/clk-provider.h>
10 #include <linux/iopoll.h>
11 
12 #include "dsi_phy.h"
13 #include "dsi.xml.h"
14 #include "dsi_phy_7nm.xml.h"
15 
16 /*
17  * DSI PLL 7nm - clock diagram (eg: DSI0): TODO: updated CPHY diagram
18  *
19  *           dsi0_pll_out_div_clk  dsi0_pll_bit_clk
20  *                              |                |
21  *                              |                |
22  *                 +---------+  |  +----------+  |  +----+
23  *  dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0_phy_pll_out_byteclk
24  *                 +---------+  |  +----------+  |  +----+
25  *                              |                |
26  *                              |                |         dsi0_pll_by_2_bit_clk
27  *                              |                |          |
28  *                              |                |  +----+  |  |\  dsi0_pclk_mux
29  *                              |                |--| /2 |--o--| \   |
30  *                              |                |  +----+     |  \  |  +---------+
31  *                              |                --------------|  |--o--| div_7_4 |-- dsi0_phy_pll_out_dsiclk
32  *                              |------------------------------|  /     +---------+
33  *                              |          +-----+             | /
34  *                              -----------| /4? |--o----------|/
35  *                                         +-----+  |           |
36  *                                                  |           |dsiclk_sel
37  *                                                  |
38  *                                                  dsi0_pll_post_out_div_clk
39  */
40 
41 #define VCO_REF_CLK_RATE		19200000
42 #define FRAC_BITS 18
43 
44 /* Hardware is pre V4.1 */
45 #define DSI_PHY_7NM_QUIRK_PRE_V4_1	BIT(0)
46 /* Hardware is V4.1 */
47 #define DSI_PHY_7NM_QUIRK_V4_1		BIT(1)
48 /* Hardware is V4.2 */
49 #define DSI_PHY_7NM_QUIRK_V4_2		BIT(2)
50 /* Hardware is V4.3 */
51 #define DSI_PHY_7NM_QUIRK_V4_3		BIT(3)
52 /* Hardware is V5.2 */
53 #define DSI_PHY_7NM_QUIRK_V5_2		BIT(4)
54 /* Hardware is V7.0 */
55 #define DSI_PHY_7NM_QUIRK_V7_0		BIT(5)
56 
57 struct dsi_pll_config {
58 	bool enable_ssc;
59 	bool ssc_center;
60 	u32 ssc_freq;
61 	u32 ssc_offset;
62 	u32 ssc_adj_per;
63 
64 	/* out */
65 	u32 decimal_div_start;
66 	u32 frac_div_start;
67 	u32 pll_clock_inverters;
68 	u32 ssc_stepsize;
69 	u32 ssc_div_per;
70 };
71 
72 struct pll_7nm_cached_state {
73 	unsigned long vco_rate;
74 	u8 bit_clk_div;
75 	u8 pix_clk_div;
76 	u8 pll_out_div;
77 	u8 pll_mux;
78 };
79 
80 struct dsi_pll_7nm {
81 	struct clk_hw clk_hw;
82 
83 	struct msm_dsi_phy *phy;
84 
85 	u64 vco_current_rate;
86 
87 	/* protects REG_DSI_7nm_PHY_CMN_CLK_CFG0 register */
88 	spinlock_t postdiv_lock;
89 
90 	/* protects REG_DSI_7nm_PHY_CMN_CLK_CFG1 register */
91 	spinlock_t pclk_mux_lock;
92 
93 	/*
94 	 * protects REG_DSI_7nm_PHY_CMN_CTRL_0 register and pll_enable_cnt
95 	 * member
96 	 */
97 	spinlock_t pll_enable_lock;
98 	int pll_enable_cnt;
99 
100 	struct pll_7nm_cached_state cached_state;
101 
102 	struct dsi_pll_7nm *slave;
103 };
104 
105 #define to_pll_7nm(x)	container_of(x, struct dsi_pll_7nm, clk_hw)
106 
107 /*
108  * Global list of private DSI PLL struct pointers. We need this for bonded DSI
109  * mode, where the master PLL's clk_ops needs access the slave's private data
110  */
111 static struct dsi_pll_7nm *pll_7nm_list[DSI_MAX];
112 
113 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll);
114 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll);
115 
116 static void dsi_pll_setup_config(struct dsi_pll_config *config)
117 {
118 	config->ssc_freq = 31500;
119 	config->ssc_offset = 4800;
120 	config->ssc_adj_per = 2;
121 
122 	/* TODO: ssc enable */
123 	config->enable_ssc = false;
124 	config->ssc_center = 0;
125 }
126 
127 static void dsi_pll_calc_dec_frac(struct dsi_pll_7nm *pll, struct dsi_pll_config *config)
128 {
129 	u64 fref = VCO_REF_CLK_RATE;
130 	u64 pll_freq;
131 	u64 divider;
132 	u64 dec, dec_multiple;
133 	u32 frac;
134 	u64 multiplier;
135 
136 	pll_freq = pll->vco_current_rate;
137 
138 	divider = fref * 2;
139 
140 	multiplier = 1 << FRAC_BITS;
141 	dec_multiple = div_u64(pll_freq * multiplier, divider);
142 	dec = div_u64_rem(dec_multiple, multiplier, &frac);
143 
144 	if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) {
145 		config->pll_clock_inverters = 0x28;
146 	} else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) {
147 		if (pll_freq < 163000000ULL)
148 			config->pll_clock_inverters = 0xa0;
149 		else if (pll_freq < 175000000ULL)
150 			config->pll_clock_inverters = 0x20;
151 		else if (pll_freq < 325000000ULL)
152 			config->pll_clock_inverters = 0xa0;
153 		else if (pll_freq < 350000000ULL)
154 			config->pll_clock_inverters = 0x20;
155 		else if (pll_freq < 650000000ULL)
156 			config->pll_clock_inverters = 0xa0;
157 		else if (pll_freq < 700000000ULL)
158 			config->pll_clock_inverters = 0x20;
159 		else if (pll_freq < 1300000000ULL)
160 			config->pll_clock_inverters = 0xa0;
161 		else if (pll_freq < 2500000000ULL)
162 			config->pll_clock_inverters = 0x20;
163 		else if (pll_freq < 4000000000ULL)
164 			config->pll_clock_inverters = 0x00;
165 		else
166 			config->pll_clock_inverters = 0x40;
167 	} else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) {
168 		if (pll_freq <= 1300000000ULL)
169 			config->pll_clock_inverters = 0xa0;
170 		else if (pll_freq <= 2500000000ULL)
171 			config->pll_clock_inverters = 0x20;
172 		else if (pll_freq <= 4000000000ULL)
173 			config->pll_clock_inverters = 0x00;
174 		else
175 			config->pll_clock_inverters = 0x40;
176 	} else if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) {
177 		if (pll_freq <= 1000000000ULL)
178 			config->pll_clock_inverters = 0xa0;
179 		else if (pll_freq <= 2500000000ULL)
180 			config->pll_clock_inverters = 0x20;
181 		else if (pll_freq <= 3020000000ULL)
182 			config->pll_clock_inverters = 0x00;
183 		else
184 			config->pll_clock_inverters = 0x40;
185 	} else {
186 		/* 4.2, 4.3 */
187 		if (pll_freq <= 1000000000ULL)
188 			config->pll_clock_inverters = 0xa0;
189 		else if (pll_freq <= 2500000000ULL)
190 			config->pll_clock_inverters = 0x20;
191 		else if (pll_freq <= 3500000000ULL)
192 			config->pll_clock_inverters = 0x00;
193 		else
194 			config->pll_clock_inverters = 0x40;
195 	}
196 
197 	config->decimal_div_start = dec;
198 	config->frac_div_start = frac;
199 }
200 
201 #define SSC_CENTER		BIT(0)
202 #define SSC_EN			BIT(1)
203 
204 static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll, struct dsi_pll_config *config)
205 {
206 	u32 ssc_per;
207 	u32 ssc_mod;
208 	u64 ssc_step_size;
209 	u64 frac;
210 
211 	if (!config->enable_ssc) {
212 		DBG("SSC not enabled\n");
213 		return;
214 	}
215 
216 	ssc_per = DIV_ROUND_CLOSEST(VCO_REF_CLK_RATE, config->ssc_freq) / 2 - 1;
217 	ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1);
218 	ssc_per -= ssc_mod;
219 
220 	frac = config->frac_div_start;
221 	ssc_step_size = config->decimal_div_start;
222 	ssc_step_size *= (1 << FRAC_BITS);
223 	ssc_step_size += frac;
224 	ssc_step_size *= config->ssc_offset;
225 	ssc_step_size *= (config->ssc_adj_per + 1);
226 	ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1));
227 	ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000);
228 
229 	config->ssc_div_per = ssc_per;
230 	config->ssc_stepsize = ssc_step_size;
231 
232 	pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n",
233 		 config->decimal_div_start, frac, FRAC_BITS);
234 	pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n",
235 		 ssc_per, (u32)ssc_step_size, config->ssc_adj_per);
236 }
237 
238 static void dsi_pll_ssc_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config)
239 {
240 	void __iomem *base = pll->phy->pll_base;
241 
242 	if (config->enable_ssc) {
243 		pr_debug("SSC is enabled\n");
244 
245 		writel(config->ssc_stepsize & 0xff,
246 		       base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_LOW_1);
247 		writel(config->ssc_stepsize >> 8,
248 		       base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_HIGH_1);
249 		writel(config->ssc_div_per & 0xff,
250 		       base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_LOW_1);
251 		writel(config->ssc_div_per >> 8,
252 		       base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_HIGH_1);
253 		writel(config->ssc_adj_per & 0xff,
254 		       base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_LOW_1);
255 		writel(config->ssc_adj_per >> 8,
256 		       base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_HIGH_1);
257 		writel(SSC_EN | (config->ssc_center ? SSC_CENTER : 0),
258 		       base + REG_DSI_7nm_PHY_PLL_SSC_CONTROL);
259 	}
260 }
261 
262 static void dsi_pll_config_hzindep_reg(struct dsi_pll_7nm *pll)
263 {
264 	void __iomem *base = pll->phy->pll_base;
265 	u8 analog_controls_five_1 = 0x01, vco_config_1 = 0x00;
266 
267 	if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1))
268 		if (pll->vco_current_rate >= 3100000000ULL)
269 			analog_controls_five_1 = 0x03;
270 
271 	if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) {
272 		if (pll->vco_current_rate < 1520000000ULL)
273 			vco_config_1 = 0x08;
274 		else if (pll->vco_current_rate < 2990000000ULL)
275 			vco_config_1 = 0x01;
276 	}
277 
278 	if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) ||
279 	    (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) {
280 		if (pll->vco_current_rate < 1520000000ULL)
281 			vco_config_1 = 0x08;
282 		else if (pll->vco_current_rate >= 2990000000ULL)
283 			vco_config_1 = 0x01;
284 	}
285 
286 	if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) ||
287 	    (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) {
288 		if (pll->vco_current_rate < 1557000000ULL)
289 			vco_config_1 = 0x08;
290 		else
291 			vco_config_1 = 0x01;
292 	}
293 
294 	writel(analog_controls_five_1, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE_1);
295 	writel(vco_config_1, base + REG_DSI_7nm_PHY_PLL_VCO_CONFIG_1);
296 	writel(0x01, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE);
297 	writel(0x03, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_TWO);
298 	writel(0x00, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_THREE);
299 	writel(0x00, base + REG_DSI_7nm_PHY_PLL_DSM_DIVIDER);
300 	writel(0x4e, base + REG_DSI_7nm_PHY_PLL_FEEDBACK_DIVIDER);
301 	writel(0x40, base + REG_DSI_7nm_PHY_PLL_CALIBRATION_SETTINGS);
302 	writel(0xba, base + REG_DSI_7nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE);
303 	writel(0x0c, base + REG_DSI_7nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE);
304 	writel(0x00, base + REG_DSI_7nm_PHY_PLL_OUTDIV);
305 	writel(0x00, base + REG_DSI_7nm_PHY_PLL_CORE_OVERRIDE);
306 	writel(0x08, base + REG_DSI_7nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO);
307 	writel(0x0a, base + REG_DSI_7nm_PHY_PLL_PLL_PROP_GAIN_RATE_1);
308 	writel(0xc0, base + REG_DSI_7nm_PHY_PLL_PLL_BAND_SEL_RATE_1);
309 	writel(0x84, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1);
310 	writel(0x82, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1);
311 	writel(0x4c, base + REG_DSI_7nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1);
312 	writel(0x80, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_OVERRIDE);
313 	writel(0x29, base + REG_DSI_7nm_PHY_PLL_PFILT);
314 	writel(0x2f, base + REG_DSI_7nm_PHY_PLL_PFILT);
315 	writel(0x2a, base + REG_DSI_7nm_PHY_PLL_IFILT);
316 	writel(!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) ? 0x3f : 0x22,
317 	       base + REG_DSI_7nm_PHY_PLL_IFILT);
318 
319 	if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) {
320 		writel(0x22, base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE);
321 		if (pll->slave)
322 			writel(0x22, pll->slave->phy->pll_base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE);
323 	}
324 }
325 
326 static void dsi_pll_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config)
327 {
328 	void __iomem *base = pll->phy->pll_base;
329 
330 	writel(0x12, base + REG_DSI_7nm_PHY_PLL_CORE_INPUT_OVERRIDE);
331 	writel(config->decimal_div_start,
332 	       base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1);
333 	writel(config->frac_div_start & 0xff,
334 	       base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1);
335 	writel((config->frac_div_start & 0xff00) >> 8,
336 	       base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1);
337 	writel((config->frac_div_start & 0x30000) >> 16,
338 	       base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1);
339 	writel(0x40, base + REG_DSI_7nm_PHY_PLL_PLL_LOCKDET_RATE_1);
340 	writel(0x06, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_DELAY);
341 	writel(pll->phy->cphy_mode ? 0x00 : 0x10,
342 	       base + REG_DSI_7nm_PHY_PLL_CMODE_1);
343 	writel(config->pll_clock_inverters,
344 	       base + REG_DSI_7nm_PHY_PLL_CLOCK_INVERTERS_1);
345 }
346 
347 static int dsi_pll_7nm_vco_set_rate(struct clk_hw *hw, unsigned long rate,
348 				     unsigned long parent_rate)
349 {
350 	struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
351 	struct dsi_pll_config config;
352 
353 	dsi_pll_enable_pll_bias(pll_7nm);
354 	DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_7nm->phy->id, rate,
355 	    parent_rate);
356 
357 	pll_7nm->vco_current_rate = rate;
358 
359 	dsi_pll_setup_config(&config);
360 
361 	dsi_pll_calc_dec_frac(pll_7nm, &config);
362 
363 	dsi_pll_calc_ssc(pll_7nm, &config);
364 
365 	dsi_pll_commit(pll_7nm, &config);
366 
367 	dsi_pll_config_hzindep_reg(pll_7nm);
368 
369 	dsi_pll_ssc_commit(pll_7nm, &config);
370 
371 	dsi_pll_disable_pll_bias(pll_7nm);
372 	/* flush, ensure all register writes are done*/
373 	wmb();
374 
375 	return 0;
376 }
377 
378 static int dsi_pll_7nm_lock_status(struct dsi_pll_7nm *pll)
379 {
380 	int rc;
381 	u32 status = 0;
382 	u32 const delay_us = 100;
383 	u32 const timeout_us = 5000;
384 
385 	rc = readl_poll_timeout_atomic(pll->phy->pll_base +
386 				       REG_DSI_7nm_PHY_PLL_COMMON_STATUS_ONE,
387 				       status,
388 				       ((status & BIT(0)) > 0),
389 				       delay_us,
390 				       timeout_us);
391 	if (rc)
392 		pr_err("DSI PLL(%d) lock failed, status=0x%08x\n",
393 		       pll->phy->id, status);
394 
395 	return rc;
396 }
397 
398 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll)
399 {
400 	unsigned long flags;
401 	u32 data;
402 
403 	spin_lock_irqsave(&pll->pll_enable_lock, flags);
404 	--pll->pll_enable_cnt;
405 	if (pll->pll_enable_cnt < 0) {
406 		spin_unlock_irqrestore(&pll->pll_enable_lock, flags);
407 		DRM_DEV_ERROR_RATELIMITED(&pll->phy->pdev->dev,
408 					  "bug: imbalance in disabling PLL bias\n");
409 		return;
410 	} else if (pll->pll_enable_cnt > 0) {
411 		spin_unlock_irqrestore(&pll->pll_enable_lock, flags);
412 		return;
413 	} /* else: == 0 */
414 
415 	data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0);
416 	data &= ~DSI_7nm_PHY_CMN_CTRL_0_PLL_SHUTDOWNB;
417 	writel(0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES);
418 	writel(data, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0);
419 	spin_unlock_irqrestore(&pll->pll_enable_lock, flags);
420 	ndelay(250);
421 }
422 
423 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll)
424 {
425 	unsigned long flags;
426 	u32 data;
427 
428 	spin_lock_irqsave(&pll->pll_enable_lock, flags);
429 	pll->pll_enable_cnt++;
430 	WARN_ON(pll->pll_enable_cnt == INT_MAX);
431 
432 	data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0);
433 	data |= DSI_7nm_PHY_CMN_CTRL_0_PLL_SHUTDOWNB;
434 	writel(data, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0);
435 
436 	writel(0xc0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES);
437 	spin_unlock_irqrestore(&pll->pll_enable_lock, flags);
438 	ndelay(250);
439 }
440 
441 static void dsi_pll_cmn_clk_cfg0_write(struct dsi_pll_7nm *pll, u32 val)
442 {
443 	unsigned long flags;
444 
445 	spin_lock_irqsave(&pll->postdiv_lock, flags);
446 	writel(val, pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0);
447 	spin_unlock_irqrestore(&pll->postdiv_lock, flags);
448 }
449 
450 static void dsi_pll_cmn_clk_cfg1_update(struct dsi_pll_7nm *pll, u32 mask,
451 					u32 val)
452 {
453 	unsigned long flags;
454 	u32 data;
455 
456 	spin_lock_irqsave(&pll->pclk_mux_lock, flags);
457 	data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1);
458 	data &= ~mask;
459 	data |= val & mask;
460 
461 	writel(data, pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1);
462 	spin_unlock_irqrestore(&pll->pclk_mux_lock, flags);
463 }
464 
465 static void dsi_pll_disable_global_clk(struct dsi_pll_7nm *pll)
466 {
467 	dsi_pll_cmn_clk_cfg1_update(pll, DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN, 0);
468 }
469 
470 static void dsi_pll_enable_global_clk(struct dsi_pll_7nm *pll)
471 {
472 	u32 cfg_1 = DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN | DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN_SEL;
473 
474 	writel(0x04, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_3);
475 	dsi_pll_cmn_clk_cfg1_update(pll, cfg_1, cfg_1);
476 }
477 
478 static void dsi_pll_phy_dig_reset(struct dsi_pll_7nm *pll)
479 {
480 	/*
481 	 * Reset the PHY digital domain. This would be needed when
482 	 * coming out of a CX or analog rail power collapse while
483 	 * ensuring that the pads maintain LP00 or LP11 state
484 	 */
485 	writel(BIT(0), pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4);
486 	wmb(); /* Ensure that the reset is deasserted */
487 	writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4);
488 	wmb(); /* Ensure that the reset is deasserted */
489 }
490 
491 static int dsi_pll_7nm_vco_prepare(struct clk_hw *hw)
492 {
493 	struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
494 	int rc;
495 
496 	dsi_pll_enable_pll_bias(pll_7nm);
497 	if (pll_7nm->slave)
498 		dsi_pll_enable_pll_bias(pll_7nm->slave);
499 
500 	/* Start PLL */
501 	writel(BIT(0), pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL);
502 
503 	/*
504 	 * ensure all PLL configurations are written prior to checking
505 	 * for PLL lock.
506 	 */
507 	wmb();
508 
509 	/* Check for PLL lock */
510 	rc = dsi_pll_7nm_lock_status(pll_7nm);
511 	if (rc) {
512 		pr_err("PLL(%d) lock failed\n", pll_7nm->phy->id);
513 		goto error;
514 	}
515 
516 	pll_7nm->phy->pll_on = true;
517 
518 	/*
519 	 * assert power on reset for PHY digital in case the PLL is
520 	 * enabled after CX of analog domain power collapse. This needs
521 	 * to be done before enabling the global clk.
522 	 */
523 	dsi_pll_phy_dig_reset(pll_7nm);
524 	if (pll_7nm->slave)
525 		dsi_pll_phy_dig_reset(pll_7nm->slave);
526 
527 	dsi_pll_enable_global_clk(pll_7nm);
528 	if (pll_7nm->slave)
529 		dsi_pll_enable_global_clk(pll_7nm->slave);
530 
531 	writel(0x1, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL);
532 	if (pll_7nm->slave)
533 		writel(0x1, pll_7nm->slave->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL);
534 
535 error:
536 	return rc;
537 }
538 
539 static void dsi_pll_disable_sub(struct dsi_pll_7nm *pll)
540 {
541 	writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL);
542 	dsi_pll_disable_pll_bias(pll);
543 }
544 
545 static void dsi_pll_7nm_vco_unprepare(struct clk_hw *hw)
546 {
547 	struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
548 
549 	/*
550 	 * To avoid any stray glitches while abruptly powering down the PLL
551 	 * make sure to gate the clock using the clock enable bit before
552 	 * powering down the PLL
553 	 */
554 	dsi_pll_disable_global_clk(pll_7nm);
555 	writel(0, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL);
556 	dsi_pll_disable_sub(pll_7nm);
557 	if (pll_7nm->slave) {
558 		dsi_pll_disable_global_clk(pll_7nm->slave);
559 		dsi_pll_disable_sub(pll_7nm->slave);
560 	}
561 	/* flush, ensure all register writes are done */
562 	wmb();
563 	pll_7nm->phy->pll_on = false;
564 }
565 
566 static unsigned long dsi_pll_7nm_vco_recalc_rate(struct clk_hw *hw,
567 						  unsigned long parent_rate)
568 {
569 	struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
570 	void __iomem *base = pll_7nm->phy->pll_base;
571 	u64 ref_clk = VCO_REF_CLK_RATE;
572 	u64 vco_rate = 0x0;
573 	u64 multiplier;
574 	u32 frac;
575 	u32 dec;
576 	u64 pll_freq, tmp64;
577 
578 	dsi_pll_enable_pll_bias(pll_7nm);
579 	dec = readl(base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1);
580 	dec &= 0xff;
581 
582 	frac = readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1);
583 	frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1) &
584 		  0xff) << 8);
585 	frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1) &
586 		  0x3) << 16);
587 
588 	/*
589 	 * TODO:
590 	 *	1. Assumes prescaler is disabled
591 	 */
592 	multiplier = 1 << FRAC_BITS;
593 	pll_freq = dec * (ref_clk * 2);
594 	tmp64 = (ref_clk * 2 * frac);
595 	pll_freq += div_u64(tmp64, multiplier);
596 
597 	vco_rate = pll_freq;
598 	pll_7nm->vco_current_rate = vco_rate;
599 
600 	DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x",
601 	    pll_7nm->phy->id, (unsigned long)vco_rate, dec, frac);
602 
603 	dsi_pll_disable_pll_bias(pll_7nm);
604 
605 	return (unsigned long)vco_rate;
606 }
607 
608 static int dsi_pll_7nm_clk_determine_rate(struct clk_hw *hw,
609 					  struct clk_rate_request *req)
610 {
611 	struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
612 
613 	req->rate = clamp_t(unsigned long, req->rate,
614 			    pll_7nm->phy->cfg->min_pll_rate, pll_7nm->phy->cfg->max_pll_rate);
615 
616 	return 0;
617 }
618 
619 static const struct clk_ops clk_ops_dsi_pll_7nm_vco = {
620 	.determine_rate = dsi_pll_7nm_clk_determine_rate,
621 	.set_rate = dsi_pll_7nm_vco_set_rate,
622 	.recalc_rate = dsi_pll_7nm_vco_recalc_rate,
623 	.prepare = dsi_pll_7nm_vco_prepare,
624 	.unprepare = dsi_pll_7nm_vco_unprepare,
625 };
626 
627 /*
628  * PLL Callbacks
629  */
630 
631 static void dsi_7nm_pll_save_state(struct msm_dsi_phy *phy)
632 {
633 	struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw);
634 	struct pll_7nm_cached_state *cached = &pll_7nm->cached_state;
635 	void __iomem *phy_base = pll_7nm->phy->base;
636 	u32 cmn_clk_cfg0, cmn_clk_cfg1;
637 
638 	dsi_pll_enable_pll_bias(pll_7nm);
639 	cached->pll_out_div = readl(pll_7nm->phy->pll_base +
640 			REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE);
641 	cached->pll_out_div &= 0x3;
642 
643 	cmn_clk_cfg0 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0);
644 	cached->bit_clk_div = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0__MASK, cmn_clk_cfg0);
645 	cached->pix_clk_div = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4__MASK, cmn_clk_cfg0);
646 
647 	cmn_clk_cfg1 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1);
648 	cached->pll_mux = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, cmn_clk_cfg1);
649 
650 	dsi_pll_disable_pll_bias(pll_7nm);
651 	DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x",
652 	    pll_7nm->phy->id, cached->pll_out_div, cached->bit_clk_div,
653 	    cached->pix_clk_div, cached->pll_mux);
654 }
655 
656 static int dsi_7nm_pll_restore_state(struct msm_dsi_phy *phy)
657 {
658 	struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw);
659 	struct pll_7nm_cached_state *cached = &pll_7nm->cached_state;
660 	u32 val;
661 	int ret;
662 
663 	val = readl(pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE);
664 	val &= ~0x3;
665 	val |= cached->pll_out_div;
666 	writel(val, pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE);
667 
668 	dsi_pll_cmn_clk_cfg0_write(pll_7nm,
669 				   DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0(cached->bit_clk_div) |
670 				   DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4(cached->pix_clk_div));
671 	dsi_pll_cmn_clk_cfg1_update(pll_7nm, DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK,
672 				    cached->pll_mux);
673 
674 	ret = dsi_pll_7nm_vco_set_rate(phy->vco_hw,
675 			pll_7nm->vco_current_rate,
676 			VCO_REF_CLK_RATE);
677 	if (ret) {
678 		DRM_DEV_ERROR(&pll_7nm->phy->pdev->dev,
679 			"restore vco rate failed. ret=%d\n", ret);
680 		return ret;
681 	}
682 
683 	DBG("DSI PLL%d", pll_7nm->phy->id);
684 
685 	return 0;
686 }
687 
688 static int dsi_7nm_set_usecase(struct msm_dsi_phy *phy)
689 {
690 	struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw);
691 	void __iomem *base = phy->base;
692 	u32 data = 0x0;	/* internal PLL */
693 
694 	DBG("DSI PLL%d", pll_7nm->phy->id);
695 
696 	switch (phy->usecase) {
697 	case MSM_DSI_PHY_STANDALONE:
698 		break;
699 	case MSM_DSI_PHY_MASTER:
700 		pll_7nm->slave = pll_7nm_list[(pll_7nm->phy->id + 1) % DSI_MAX];
701 		/* v7.0: Enable ATB_EN0 and alternate clock output to external phy */
702 		if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)
703 			writel(0x07, base + REG_DSI_7nm_PHY_CMN_CTRL_5);
704 		break;
705 	case MSM_DSI_PHY_SLAVE:
706 		data = 0x1; /* external PLL */
707 		break;
708 	default:
709 		return -EINVAL;
710 	}
711 
712 	/* set PLL src */
713 	dsi_pll_cmn_clk_cfg1_update(pll_7nm, DSI_7nm_PHY_CMN_CLK_CFG1_BITCLK_SEL__MASK,
714 				    DSI_7nm_PHY_CMN_CLK_CFG1_BITCLK_SEL(data));
715 
716 	return 0;
717 }
718 
719 /*
720  * The post dividers and mux clocks are created using the standard divider and
721  * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux
722  * state to follow the master PLL's divider/mux state. Therefore, we don't
723  * require special clock ops that also configure the slave PLL registers
724  */
725 static int pll_7nm_register(struct dsi_pll_7nm *pll_7nm, struct clk_hw **provided_clocks)
726 {
727 	char clk_name[32];
728 	struct clk_init_data vco_init = {
729 		.parent_data = &(const struct clk_parent_data) {
730 			.fw_name = "ref",
731 		},
732 		.num_parents = 1,
733 		.name = clk_name,
734 		.flags = CLK_IGNORE_UNUSED,
735 		.ops = &clk_ops_dsi_pll_7nm_vco,
736 	};
737 	struct device *dev = &pll_7nm->phy->pdev->dev;
738 	struct clk_hw *hw, *pll_out_div, *pll_bit, *pll_by_2_bit;
739 	struct clk_hw *pll_post_out_div, *phy_pll_out_dsi_parent;
740 	int ret;
741 
742 	DBG("DSI%d", pll_7nm->phy->id);
743 
744 	snprintf(clk_name, sizeof(clk_name), "dsi%dvco_clk", pll_7nm->phy->id);
745 	pll_7nm->clk_hw.init = &vco_init;
746 
747 	ret = devm_clk_hw_register(dev, &pll_7nm->clk_hw);
748 	if (ret)
749 		return ret;
750 
751 	snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_out_div_clk", pll_7nm->phy->id);
752 
753 	pll_out_div = devm_clk_hw_register_divider_parent_hw(dev, clk_name,
754 			&pll_7nm->clk_hw, CLK_SET_RATE_PARENT,
755 			pll_7nm->phy->pll_base +
756 				REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE,
757 			0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL);
758 	if (IS_ERR(pll_out_div)) {
759 		ret = PTR_ERR(pll_out_div);
760 		goto fail;
761 	}
762 
763 	snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_bit_clk", pll_7nm->phy->id);
764 
765 	/* BIT CLK: DIV_CTRL_3_0 */
766 	pll_bit = devm_clk_hw_register_divider_parent_hw(dev, clk_name,
767 			pll_out_div, CLK_SET_RATE_PARENT,
768 			pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0,
769 			0, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock);
770 	if (IS_ERR(pll_bit)) {
771 		ret = PTR_ERR(pll_bit);
772 		goto fail;
773 	}
774 
775 	snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_byteclk", pll_7nm->phy->id);
776 
777 	/* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */
778 	hw = devm_clk_hw_register_fixed_factor_parent_hw(dev, clk_name,
779 			pll_bit, CLK_SET_RATE_PARENT, 1,
780 			pll_7nm->phy->cphy_mode ? 7 : 8);
781 	if (IS_ERR(hw)) {
782 		ret = PTR_ERR(hw);
783 		goto fail;
784 	}
785 
786 	provided_clocks[DSI_BYTE_PLL_CLK] = hw;
787 
788 	snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_by_2_bit_clk", pll_7nm->phy->id);
789 
790 	pll_by_2_bit = devm_clk_hw_register_fixed_factor_parent_hw(dev,
791 			clk_name, pll_bit, 0, 1, 2);
792 	if (IS_ERR(pll_by_2_bit)) {
793 		ret = PTR_ERR(pll_by_2_bit);
794 		goto fail;
795 	}
796 
797 	snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_post_out_div_clk", pll_7nm->phy->id);
798 
799 	if (pll_7nm->phy->cphy_mode)
800 		pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw(
801 				dev, clk_name, pll_out_div, 0, 2, 7);
802 	else
803 		pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw(
804 				dev, clk_name, pll_out_div, 0, 1, 4);
805 	if (IS_ERR(pll_post_out_div)) {
806 		ret = PTR_ERR(pll_post_out_div);
807 		goto fail;
808 	}
809 
810 	/* in CPHY mode, pclk_mux will always have post_out_div as parent
811 	 * don't register a pclk_mux clock and just use post_out_div instead
812 	 */
813 	if (pll_7nm->phy->cphy_mode) {
814 		dsi_pll_cmn_clk_cfg1_update(pll_7nm,
815 					    DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK,
816 					    DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL(3));
817 		phy_pll_out_dsi_parent = pll_post_out_div;
818 	} else {
819 		snprintf(clk_name, sizeof(clk_name), "dsi%d_pclk_mux", pll_7nm->phy->id);
820 
821 		hw = devm_clk_hw_register_mux_parent_hws(dev, clk_name,
822 				((const struct clk_hw *[]){
823 					pll_bit,
824 					pll_by_2_bit,
825 				}), 2, 0, pll_7nm->phy->base +
826 					REG_DSI_7nm_PHY_CMN_CLK_CFG1,
827 				0, 1, 0, &pll_7nm->pclk_mux_lock);
828 		if (IS_ERR(hw)) {
829 			ret = PTR_ERR(hw);
830 			goto fail;
831 		}
832 
833 		phy_pll_out_dsi_parent = hw;
834 	}
835 
836 	snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_dsiclk", pll_7nm->phy->id);
837 
838 	/* PIX CLK DIV : DIV_CTRL_7_4*/
839 	hw = devm_clk_hw_register_divider_parent_hw(dev, clk_name,
840 			phy_pll_out_dsi_parent, 0,
841 			pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0,
842 			4, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock);
843 	if (IS_ERR(hw)) {
844 		ret = PTR_ERR(hw);
845 		goto fail;
846 	}
847 
848 	provided_clocks[DSI_PIXEL_PLL_CLK] = hw;
849 
850 	return 0;
851 
852 fail:
853 
854 	return ret;
855 }
856 
857 static int dsi_pll_7nm_init(struct msm_dsi_phy *phy)
858 {
859 	struct platform_device *pdev = phy->pdev;
860 	struct dsi_pll_7nm *pll_7nm;
861 	int ret;
862 
863 	pll_7nm = devm_kzalloc(&pdev->dev, sizeof(*pll_7nm), GFP_KERNEL);
864 	if (!pll_7nm)
865 		return -ENOMEM;
866 
867 	DBG("DSI PLL%d", phy->id);
868 
869 	pll_7nm_list[phy->id] = pll_7nm;
870 
871 	spin_lock_init(&pll_7nm->postdiv_lock);
872 	spin_lock_init(&pll_7nm->pclk_mux_lock);
873 	spin_lock_init(&pll_7nm->pll_enable_lock);
874 
875 	pll_7nm->phy = phy;
876 
877 	ret = pll_7nm_register(pll_7nm, phy->provided_clocks->hws);
878 	if (ret) {
879 		DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret);
880 		return ret;
881 	}
882 
883 	phy->vco_hw = &pll_7nm->clk_hw;
884 
885 	/* TODO: Remove this when we have proper display handover support */
886 	msm_dsi_phy_pll_save_state(phy);
887 	/*
888 	 * Store also proper vco_current_rate, because its value will be used in
889 	 * dsi_7nm_pll_restore_state().
890 	 */
891 	if (!dsi_pll_7nm_vco_recalc_rate(&pll_7nm->clk_hw, VCO_REF_CLK_RATE))
892 		pll_7nm->vco_current_rate = pll_7nm->phy->cfg->min_pll_rate;
893 
894 	return 0;
895 }
896 
897 static int dsi_phy_hw_v4_0_is_pll_on(struct msm_dsi_phy *phy)
898 {
899 	void __iomem *base = phy->base;
900 	u32 data = 0;
901 
902 	data = readl(base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL);
903 	mb(); /* make sure read happened */
904 
905 	return (data & BIT(0));
906 }
907 
908 static void dsi_phy_hw_v4_0_config_lpcdrx(struct msm_dsi_phy *phy, bool enable)
909 {
910 	void __iomem *lane_base = phy->lane_base;
911 	int phy_lane_0 = 0;	/* TODO: Support all lane swap configs */
912 
913 	/*
914 	 * LPRX and CDRX need to enabled only for physical data lane
915 	 * corresponding to the logical data lane 0
916 	 */
917 	if (enable)
918 		writel(0x3, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0));
919 	else
920 		writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0));
921 }
922 
923 static void dsi_phy_hw_v4_0_lane_settings(struct msm_dsi_phy *phy)
924 {
925 	int i;
926 	const u8 tx_dctrl_0[] = { 0x00, 0x00, 0x00, 0x04, 0x01 };
927 	const u8 tx_dctrl_1[] = { 0x40, 0x40, 0x40, 0x46, 0x41 };
928 	const u8 *tx_dctrl = tx_dctrl_0;
929 	void __iomem *lane_base = phy->lane_base;
930 
931 	if (!(phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1))
932 		tx_dctrl = tx_dctrl_1;
933 
934 	/* Strength ctrl settings */
935 	for (i = 0; i < 5; i++) {
936 		/*
937 		 * Disable LPRX and CDRX for all lanes. And later on, it will
938 		 * be only enabled for the physical data lane corresponding
939 		 * to the logical data lane 0
940 		 */
941 		writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(i));
942 		writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_PIN_SWAP(i));
943 	}
944 
945 	dsi_phy_hw_v4_0_config_lpcdrx(phy, true);
946 
947 	/* other settings */
948 	for (i = 0; i < 5; i++) {
949 		writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG0(i));
950 		writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG1(i));
951 		writel(i == 4 ? 0x8a : 0xa, lane_base + REG_DSI_7nm_PHY_LN_CFG2(i));
952 		writel(tx_dctrl[i], lane_base + REG_DSI_7nm_PHY_LN_TX_DCTRL(i));
953 	}
954 }
955 
956 static int dsi_7nm_phy_enable(struct msm_dsi_phy *phy,
957 			      struct msm_dsi_phy_clk_request *clk_req)
958 {
959 	int ret;
960 	u32 status;
961 	u32 const delay_us = 5;
962 	u32 const timeout_us = 1000;
963 	struct msm_dsi_dphy_timing *timing = &phy->timing;
964 	void __iomem *base = phy->base;
965 	bool less_than_1500_mhz;
966 	u32 vreg_ctrl_0, vreg_ctrl_1, lane_ctrl0;
967 	u32 glbl_pemph_ctrl_0;
968 	u32 glbl_str_swi_cal_sel_ctrl, glbl_hstx_str_ctrl_0;
969 	u32 glbl_rescode_top_ctrl, glbl_rescode_bot_ctrl;
970 	u32 data;
971 
972 	DBG("");
973 
974 	if (phy->cphy_mode)
975 		ret = msm_dsi_cphy_timing_calc_v4(timing, clk_req);
976 	else
977 		ret = msm_dsi_dphy_timing_calc_v4(timing, clk_req);
978 	if (ret) {
979 		DRM_DEV_ERROR(&phy->pdev->dev,
980 			      "%s: PHY timing calculation failed\n", __func__);
981 		return -EINVAL;
982 	}
983 
984 	if (dsi_phy_hw_v4_0_is_pll_on(phy))
985 		pr_warn("PLL turned on before configuring PHY\n");
986 
987 	/* Request for REFGEN READY */
988 	if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) ||
989 	    (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) ||
990 	    (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) {
991 		writel(0x1, phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10);
992 		udelay(500);
993 	}
994 
995 	/* wait for REFGEN READY */
996 	ret = readl_poll_timeout_atomic(base + REG_DSI_7nm_PHY_CMN_PHY_STATUS,
997 					status, (status & BIT(0)),
998 					delay_us, timeout_us);
999 	if (ret) {
1000 		pr_err("Ref gen not ready. Aborting\n");
1001 		return -EINVAL;
1002 	}
1003 
1004 	/* TODO: CPHY enable path (this is for DPHY only) */
1005 
1006 	/* Alter PHY configurations if data rate less than 1.5GHZ*/
1007 	less_than_1500_mhz = (clk_req->bitclk_rate <= 1500000000);
1008 
1009 	glbl_str_swi_cal_sel_ctrl = 0x00;
1010 	if (phy->cphy_mode) {
1011 		vreg_ctrl_0 = 0x51;
1012 		vreg_ctrl_1 = 0x55;
1013 		glbl_hstx_str_ctrl_0 = 0x00;
1014 		glbl_pemph_ctrl_0 = 0x11;
1015 		lane_ctrl0 = 0x17;
1016 	} else {
1017 		vreg_ctrl_0 = less_than_1500_mhz ? 0x53 : 0x52;
1018 		vreg_ctrl_1 = 0x5c;
1019 		glbl_hstx_str_ctrl_0 = 0x88;
1020 		glbl_pemph_ctrl_0 = 0x00;
1021 		lane_ctrl0 = 0x1f;
1022 	}
1023 
1024 	if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) {
1025 		if (phy->cphy_mode) {
1026 			/* TODO: different for second phy */
1027 			vreg_ctrl_0 = 0x57;
1028 			vreg_ctrl_1 = 0x41;
1029 			glbl_rescode_top_ctrl = 0x3d;
1030 			glbl_rescode_bot_ctrl = 0x38;
1031 		} else {
1032 			vreg_ctrl_0 = 0x56;
1033 			vreg_ctrl_1 = 0x19;
1034 			glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c :  0x03;
1035 			glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 :  0x3c;
1036 		}
1037 	} else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) {
1038 		if (phy->cphy_mode) {
1039 			vreg_ctrl_0 = 0x45;
1040 			vreg_ctrl_1 = 0x41;
1041 			glbl_rescode_top_ctrl = 0x00;
1042 			glbl_rescode_bot_ctrl = 0x00;
1043 		} else {
1044 			vreg_ctrl_0 = 0x44;
1045 			vreg_ctrl_1 = 0x19;
1046 			glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c :  0x03;
1047 			glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 :  0x3c;
1048 		}
1049 	} else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) {
1050 		if (phy->cphy_mode) {
1051 			glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d :  0x01;
1052 			glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 :  0x3b;
1053 		} else {
1054 			glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d :  0x01;
1055 			glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 :  0x39;
1056 		}
1057 	} else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) {
1058 		if (phy->cphy_mode) {
1059 			glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d :  0x01;
1060 			glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 :  0x3b;
1061 		} else {
1062 			glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c :  0x00;
1063 			glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 :  0x39;
1064 		}
1065 	} else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) {
1066 		if (phy->cphy_mode) {
1067 			glbl_hstx_str_ctrl_0 = 0x88;
1068 			glbl_rescode_top_ctrl = 0x00;
1069 			glbl_rescode_bot_ctrl = 0x3c;
1070 		} else {
1071 			glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d :  0x00;
1072 			glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x39 :  0x3c;
1073 		}
1074 	} else {
1075 		if (phy->cphy_mode) {
1076 			glbl_str_swi_cal_sel_ctrl = 0x03;
1077 			glbl_hstx_str_ctrl_0 = 0x66;
1078 		} else {
1079 			vreg_ctrl_0 = less_than_1500_mhz ? 0x5B : 0x59;
1080 			glbl_str_swi_cal_sel_ctrl = less_than_1500_mhz ? 0x03 : 0x00;
1081 			glbl_hstx_str_ctrl_0 = less_than_1500_mhz ? 0x66 : 0x88;
1082 		}
1083 		glbl_rescode_top_ctrl = 0x03;
1084 		glbl_rescode_bot_ctrl = 0x3c;
1085 	}
1086 
1087 	/* de-assert digital and pll power down */
1088 	data = DSI_7nm_PHY_CMN_CTRL_0_DIGTOP_PWRDN_B |
1089 	       DSI_7nm_PHY_CMN_CTRL_0_PLL_SHUTDOWNB;
1090 	writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1091 
1092 	/* Assert PLL core reset */
1093 	writel(0x00, base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL);
1094 
1095 	/* turn off resync FIFO */
1096 	writel(0x00, base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL);
1097 
1098 	/* program CMN_CTRL_4 for minor_ver 2 chipsets*/
1099 	if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) ||
1100 	    (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0) ||
1101 	    (readl(base + REG_DSI_7nm_PHY_CMN_REVISION_ID0) & (0xf0)) == 0x20)
1102 		writel(0x04, base + REG_DSI_7nm_PHY_CMN_CTRL_4);
1103 
1104 	/* Configure PHY lane swap (TODO: we need to calculate this) */
1105 	writel(0x21, base + REG_DSI_7nm_PHY_CMN_LANE_CFG0);
1106 	writel(0x84, base + REG_DSI_7nm_PHY_CMN_LANE_CFG1);
1107 
1108 	if (phy->cphy_mode)
1109 		writel(BIT(6), base + REG_DSI_7nm_PHY_CMN_GLBL_CTRL);
1110 
1111 	/* Enable LDO */
1112 	writel(vreg_ctrl_0, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_0);
1113 	writel(vreg_ctrl_1, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_1);
1114 
1115 	writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_3);
1116 	writel(glbl_str_swi_cal_sel_ctrl,
1117 	       base + REG_DSI_7nm_PHY_CMN_GLBL_STR_SWI_CAL_SEL_CTRL);
1118 	writel(glbl_hstx_str_ctrl_0,
1119 	       base + REG_DSI_7nm_PHY_CMN_GLBL_HSTX_STR_CTRL_0);
1120 	writel(glbl_pemph_ctrl_0,
1121 	       base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_0);
1122 	if (phy->cphy_mode)
1123 		writel(0x01, base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_1);
1124 	writel(glbl_rescode_top_ctrl,
1125 	       base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_TOP_CTRL);
1126 	writel(glbl_rescode_bot_ctrl,
1127 	       base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_BOT_CTRL);
1128 	writel(0x55, base + REG_DSI_7nm_PHY_CMN_GLBL_LPTX_STR_CTRL);
1129 
1130 	/* Remove power down from all blocks */
1131 	writel(0x7f, base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1132 
1133 	writel(lane_ctrl0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0);
1134 
1135 	/* Select full-rate mode */
1136 	if (!phy->cphy_mode)
1137 		writel(0x40, base + REG_DSI_7nm_PHY_CMN_CTRL_2);
1138 
1139 	ret = dsi_7nm_set_usecase(phy);
1140 	if (ret) {
1141 		DRM_DEV_ERROR(&phy->pdev->dev, "%s: set pll usecase failed, %d\n",
1142 			__func__, ret);
1143 		return ret;
1144 	}
1145 
1146 	/* DSI PHY timings */
1147 	if (phy->cphy_mode) {
1148 		writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0);
1149 		writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4);
1150 		writel(timing->shared_timings.clk_pre,
1151 		       base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5);
1152 		writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6);
1153 		writel(timing->shared_timings.clk_post,
1154 		       base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7);
1155 		writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8);
1156 		writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9);
1157 		writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10);
1158 		writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11);
1159 	} else {
1160 		writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0);
1161 		writel(timing->clk_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_1);
1162 		writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_2);
1163 		writel(timing->clk_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_3);
1164 		writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4);
1165 		writel(timing->hs_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5);
1166 		writel(timing->hs_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6);
1167 		writel(timing->hs_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7);
1168 		writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8);
1169 		writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9);
1170 		writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10);
1171 		writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11);
1172 		writel(timing->shared_timings.clk_pre,
1173 		       base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_12);
1174 		writel(timing->shared_timings.clk_post,
1175 		       base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_13);
1176 	}
1177 
1178 	/* DSI lane settings */
1179 	dsi_phy_hw_v4_0_lane_settings(phy);
1180 
1181 	DBG("DSI%d PHY enabled", phy->id);
1182 
1183 	return 0;
1184 }
1185 
1186 static bool dsi_7nm_set_continuous_clock(struct msm_dsi_phy *phy, bool enable)
1187 {
1188 	void __iomem *base = phy->base;
1189 	u32 data;
1190 
1191 	data = readl(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1);
1192 	if (enable)
1193 		data |= BIT(5) | BIT(6);
1194 	else
1195 		data &= ~(BIT(5) | BIT(6));
1196 	writel(data, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1);
1197 
1198 	return enable;
1199 }
1200 
1201 static void dsi_7nm_phy_disable(struct msm_dsi_phy *phy)
1202 {
1203 	void __iomem *base = phy->base;
1204 	u32 data;
1205 
1206 	DBG("");
1207 
1208 	if (dsi_phy_hw_v4_0_is_pll_on(phy))
1209 		pr_warn("Turning OFF PHY while PLL is on\n");
1210 
1211 	dsi_phy_hw_v4_0_config_lpcdrx(phy, false);
1212 
1213 	/* Turn off REFGEN Vote */
1214 	if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) ||
1215 	    (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) ||
1216 	    (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V7_0)) {
1217 		writel(0x0, base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10);
1218 		wmb();
1219 		/* Delay to ensure HW removes vote before PHY shut down */
1220 		udelay(2);
1221 	}
1222 
1223 	data = readl(base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1224 
1225 	/* disable all lanes */
1226 	data &= ~0x1F;
1227 	writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1228 	writel(0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0);
1229 
1230 	/* Turn off all PHY blocks */
1231 	writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1232 
1233 	/* make sure phy is turned off */
1234 	wmb();
1235 
1236 	DBG("DSI%d PHY disabled", phy->id);
1237 }
1238 
1239 static const struct regulator_bulk_data dsi_phy_7nm_36mA_regulators[] = {
1240 	{ .supply = "vdds", .init_load_uA = 36000 },
1241 };
1242 
1243 static const struct regulator_bulk_data dsi_phy_7nm_37750uA_regulators[] = {
1244 	{ .supply = "vdds", .init_load_uA = 37550 },
1245 };
1246 
1247 static const struct regulator_bulk_data dsi_phy_7nm_48000uA_regulators[] = {
1248 	{ .supply = "vdds", .init_load_uA = 48000 },
1249 };
1250 
1251 static const struct regulator_bulk_data dsi_phy_7nm_98000uA_regulators[] = {
1252 	{ .supply = "vdds", .init_load_uA = 98000 },
1253 };
1254 
1255 static const struct regulator_bulk_data dsi_phy_7nm_97800uA_regulators[] = {
1256 	{ .supply = "vdds", .init_load_uA = 97800 },
1257 };
1258 
1259 static const struct regulator_bulk_data dsi_phy_7nm_98400uA_regulators[] = {
1260 	{ .supply = "vdds", .init_load_uA = 98400 },
1261 };
1262 
1263 const struct msm_dsi_phy_cfg dsi_phy_7nm_cfgs = {
1264 	.has_phy_lane = true,
1265 	.regulator_data = dsi_phy_7nm_36mA_regulators,
1266 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators),
1267 	.ops = {
1268 		.enable = dsi_7nm_phy_enable,
1269 		.disable = dsi_7nm_phy_disable,
1270 		.pll_init = dsi_pll_7nm_init,
1271 		.save_pll_state = dsi_7nm_pll_save_state,
1272 		.restore_pll_state = dsi_7nm_pll_restore_state,
1273 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1274 	},
1275 	.min_pll_rate = 600000000UL,
1276 #ifdef CONFIG_64BIT
1277 	.max_pll_rate = 5000000000UL,
1278 #else
1279 	.max_pll_rate = ULONG_MAX,
1280 #endif
1281 	.io_start = { 0xae94400, 0xae96400 },
1282 	.num_dsi_phy = 2,
1283 	.quirks = DSI_PHY_7NM_QUIRK_V4_1,
1284 };
1285 
1286 const struct msm_dsi_phy_cfg dsi_phy_7nm_6375_cfgs = {
1287 	.has_phy_lane = true,
1288 	.ops = {
1289 		.enable = dsi_7nm_phy_enable,
1290 		.disable = dsi_7nm_phy_disable,
1291 		.pll_init = dsi_pll_7nm_init,
1292 		.save_pll_state = dsi_7nm_pll_save_state,
1293 		.restore_pll_state = dsi_7nm_pll_restore_state,
1294 	},
1295 	.min_pll_rate = 600000000UL,
1296 #ifdef CONFIG_64BIT
1297 	.max_pll_rate = 5000000000ULL,
1298 #else
1299 	.max_pll_rate = ULONG_MAX,
1300 #endif
1301 	.io_start = { 0x5e94400 },
1302 	.num_dsi_phy = 1,
1303 	.quirks = DSI_PHY_7NM_QUIRK_V4_1,
1304 };
1305 
1306 const struct msm_dsi_phy_cfg dsi_phy_7nm_8150_cfgs = {
1307 	.has_phy_lane = true,
1308 	.regulator_data = dsi_phy_7nm_36mA_regulators,
1309 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators),
1310 	.ops = {
1311 		.enable = dsi_7nm_phy_enable,
1312 		.disable = dsi_7nm_phy_disable,
1313 		.pll_init = dsi_pll_7nm_init,
1314 		.save_pll_state = dsi_7nm_pll_save_state,
1315 		.restore_pll_state = dsi_7nm_pll_restore_state,
1316 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1317 	},
1318 	.min_pll_rate = 1000000000UL,
1319 	.max_pll_rate = 3500000000UL,
1320 	.io_start = { 0xae94400, 0xae96400 },
1321 	.num_dsi_phy = 2,
1322 	.quirks = DSI_PHY_7NM_QUIRK_PRE_V4_1,
1323 };
1324 
1325 const struct msm_dsi_phy_cfg dsi_phy_7nm_7280_cfgs = {
1326 	.has_phy_lane = true,
1327 	.regulator_data = dsi_phy_7nm_37750uA_regulators,
1328 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators),
1329 	.ops = {
1330 		.enable = dsi_7nm_phy_enable,
1331 		.disable = dsi_7nm_phy_disable,
1332 		.pll_init = dsi_pll_7nm_init,
1333 		.save_pll_state = dsi_7nm_pll_save_state,
1334 		.restore_pll_state = dsi_7nm_pll_restore_state,
1335 	},
1336 	.min_pll_rate = 600000000UL,
1337 #ifdef CONFIG_64BIT
1338 	.max_pll_rate = 5000000000ULL,
1339 #else
1340 	.max_pll_rate = ULONG_MAX,
1341 #endif
1342 	.io_start = { 0xae94400 },
1343 	.num_dsi_phy = 1,
1344 	.quirks = DSI_PHY_7NM_QUIRK_V4_1,
1345 };
1346 
1347 const struct msm_dsi_phy_cfg dsi_phy_5nm_8350_cfgs = {
1348 	.has_phy_lane = true,
1349 	.regulator_data = dsi_phy_7nm_37750uA_regulators,
1350 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators),
1351 	.ops = {
1352 		.enable = dsi_7nm_phy_enable,
1353 		.disable = dsi_7nm_phy_disable,
1354 		.pll_init = dsi_pll_7nm_init,
1355 		.save_pll_state = dsi_7nm_pll_save_state,
1356 		.restore_pll_state = dsi_7nm_pll_restore_state,
1357 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1358 	},
1359 	.min_pll_rate = 600000000UL,
1360 #ifdef CONFIG_64BIT
1361 	.max_pll_rate = 5000000000UL,
1362 #else
1363 	.max_pll_rate = ULONG_MAX,
1364 #endif
1365 	.io_start = { 0xae94400, 0xae96400 },
1366 	.num_dsi_phy = 2,
1367 	.quirks = DSI_PHY_7NM_QUIRK_V4_2,
1368 };
1369 
1370 const struct msm_dsi_phy_cfg dsi_phy_5nm_8450_cfgs = {
1371 	.has_phy_lane = true,
1372 	.regulator_data = dsi_phy_7nm_97800uA_regulators,
1373 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators),
1374 	.ops = {
1375 		.enable = dsi_7nm_phy_enable,
1376 		.disable = dsi_7nm_phy_disable,
1377 		.pll_init = dsi_pll_7nm_init,
1378 		.save_pll_state = dsi_7nm_pll_save_state,
1379 		.restore_pll_state = dsi_7nm_pll_restore_state,
1380 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1381 	},
1382 	.min_pll_rate = 600000000UL,
1383 #ifdef CONFIG_64BIT
1384 	.max_pll_rate = 5000000000UL,
1385 #else
1386 	.max_pll_rate = ULONG_MAX,
1387 #endif
1388 	.io_start = { 0xae94400, 0xae96400 },
1389 	.num_dsi_phy = 2,
1390 	.quirks = DSI_PHY_7NM_QUIRK_V4_3,
1391 };
1392 
1393 const struct msm_dsi_phy_cfg dsi_phy_5nm_8775p_cfgs = {
1394 	.has_phy_lane = true,
1395 	.regulator_data = dsi_phy_7nm_48000uA_regulators,
1396 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_48000uA_regulators),
1397 	.ops = {
1398 		.enable = dsi_7nm_phy_enable,
1399 		.disable = dsi_7nm_phy_disable,
1400 		.pll_init = dsi_pll_7nm_init,
1401 		.save_pll_state = dsi_7nm_pll_save_state,
1402 		.restore_pll_state = dsi_7nm_pll_restore_state,
1403 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1404 		},
1405 	.min_pll_rate = 600000000UL,
1406 #ifdef CONFIG_64BIT
1407 	.max_pll_rate = 5000000000UL,
1408 #else
1409 	.max_pll_rate = ULONG_MAX,
1410 #endif
1411 	.io_start = { 0xae94400, 0xae96400 },
1412 	.num_dsi_phy = 2,
1413 	.quirks = DSI_PHY_7NM_QUIRK_V4_2,
1414 };
1415 
1416 const struct msm_dsi_phy_cfg dsi_phy_5nm_sar2130p_cfgs = {
1417 	.has_phy_lane = true,
1418 	.regulator_data = dsi_phy_7nm_97800uA_regulators,
1419 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators),
1420 	.ops = {
1421 		.enable = dsi_7nm_phy_enable,
1422 		.disable = dsi_7nm_phy_disable,
1423 		.pll_init = dsi_pll_7nm_init,
1424 		.save_pll_state = dsi_7nm_pll_save_state,
1425 		.restore_pll_state = dsi_7nm_pll_restore_state,
1426 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1427 	},
1428 	.min_pll_rate = 600000000UL,
1429 #ifdef CONFIG_64BIT
1430 	.max_pll_rate = 5000000000UL,
1431 #else
1432 	.max_pll_rate = ULONG_MAX,
1433 #endif
1434 	.io_start = { 0xae95000, 0xae97000 },
1435 	.num_dsi_phy = 2,
1436 	.quirks = DSI_PHY_7NM_QUIRK_V5_2,
1437 };
1438 
1439 const struct msm_dsi_phy_cfg dsi_phy_4nm_8550_cfgs = {
1440 	.has_phy_lane = true,
1441 	.regulator_data = dsi_phy_7nm_98400uA_regulators,
1442 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_98400uA_regulators),
1443 	.ops = {
1444 		.enable = dsi_7nm_phy_enable,
1445 		.disable = dsi_7nm_phy_disable,
1446 		.pll_init = dsi_pll_7nm_init,
1447 		.save_pll_state = dsi_7nm_pll_save_state,
1448 		.restore_pll_state = dsi_7nm_pll_restore_state,
1449 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1450 	},
1451 	.min_pll_rate = 600000000UL,
1452 #ifdef CONFIG_64BIT
1453 	.max_pll_rate = 5000000000UL,
1454 #else
1455 	.max_pll_rate = ULONG_MAX,
1456 #endif
1457 	.io_start = { 0xae95000, 0xae97000 },
1458 	.num_dsi_phy = 2,
1459 	.quirks = DSI_PHY_7NM_QUIRK_V5_2,
1460 };
1461 
1462 const struct msm_dsi_phy_cfg dsi_phy_4nm_8650_cfgs = {
1463 	.has_phy_lane = true,
1464 	.regulator_data = dsi_phy_7nm_98000uA_regulators,
1465 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators),
1466 	.ops = {
1467 		.enable = dsi_7nm_phy_enable,
1468 		.disable = dsi_7nm_phy_disable,
1469 		.pll_init = dsi_pll_7nm_init,
1470 		.save_pll_state = dsi_7nm_pll_save_state,
1471 		.restore_pll_state = dsi_7nm_pll_restore_state,
1472 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1473 	},
1474 	.min_pll_rate = 600000000UL,
1475 #ifdef CONFIG_64BIT
1476 	.max_pll_rate = 5000000000UL,
1477 #else
1478 	.max_pll_rate = ULONG_MAX,
1479 #endif
1480 	.io_start = { 0xae95000, 0xae97000 },
1481 	.num_dsi_phy = 2,
1482 	.quirks = DSI_PHY_7NM_QUIRK_V5_2,
1483 };
1484 
1485 const struct msm_dsi_phy_cfg dsi_phy_3nm_8750_cfgs = {
1486 	.has_phy_lane = true,
1487 	.regulator_data = dsi_phy_7nm_98000uA_regulators,
1488 	.num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators),
1489 	.ops = {
1490 		.enable = dsi_7nm_phy_enable,
1491 		.disable = dsi_7nm_phy_disable,
1492 		.pll_init = dsi_pll_7nm_init,
1493 		.save_pll_state = dsi_7nm_pll_save_state,
1494 		.restore_pll_state = dsi_7nm_pll_restore_state,
1495 		.set_continuous_clock = dsi_7nm_set_continuous_clock,
1496 	},
1497 	.min_pll_rate = 600000000UL,
1498 #ifdef CONFIG_64BIT
1499 	.max_pll_rate = 5000000000UL,
1500 #else
1501 	.max_pll_rate = ULONG_MAX,
1502 #endif
1503 	.io_start = { 0xae95000, 0xae97000 },
1504 	.num_dsi_phy = 2,
1505 	.quirks = DSI_PHY_7NM_QUIRK_V7_0,
1506 };
1507