1 /*
2 * SPDX-License-Identifier: GPL-2.0
3 * Copyright (c) 2018, The Linux Foundation
4 */
5
6 #include <dt-bindings/clock/qcom,dsi-phy-28nm.h>
7 #include <linux/bitfield.h>
8 #include <linux/clk.h>
9 #include <linux/clk-provider.h>
10 #include <linux/iopoll.h>
11
12 #include "dsi_phy.h"
13 #include "dsi.xml.h"
14 #include "dsi_phy_7nm.xml.h"
15
16 /*
17 * DSI PLL 7nm - clock diagram (eg: DSI0): TODO: updated CPHY diagram
18 *
19 * dsi0_pll_out_div_clk dsi0_pll_bit_clk
20 * | |
21 * | |
22 * +---------+ | +----------+ | +----+
23 * dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0_phy_pll_out_byteclk
24 * +---------+ | +----------+ | +----+
25 * | |
26 * | | dsi0_pll_by_2_bit_clk
27 * | | |
28 * | | +----+ | |\ dsi0_pclk_mux
29 * | |--| /2 |--o--| \ |
30 * | | +----+ | \ | +---------+
31 * | --------------| |--o--| div_7_4 |-- dsi0_phy_pll_out_dsiclk
32 * |------------------------------| / +---------+
33 * | +-----+ | /
34 * -----------| /4? |--o----------|/
35 * +-----+ | |
36 * | |dsiclk_sel
37 * |
38 * dsi0_pll_post_out_div_clk
39 */
40
41 #define VCO_REF_CLK_RATE 19200000
42 #define FRAC_BITS 18
43
44 /* Hardware is pre V4.1 */
45 #define DSI_PHY_7NM_QUIRK_PRE_V4_1 BIT(0)
46 /* Hardware is V4.1 */
47 #define DSI_PHY_7NM_QUIRK_V4_1 BIT(1)
48 /* Hardware is V4.2 */
49 #define DSI_PHY_7NM_QUIRK_V4_2 BIT(2)
50 /* Hardware is V4.3 */
51 #define DSI_PHY_7NM_QUIRK_V4_3 BIT(3)
52 /* Hardware is V5.2 */
53 #define DSI_PHY_7NM_QUIRK_V5_2 BIT(4)
54
55 struct dsi_pll_config {
56 bool enable_ssc;
57 bool ssc_center;
58 u32 ssc_freq;
59 u32 ssc_offset;
60 u32 ssc_adj_per;
61
62 /* out */
63 u32 decimal_div_start;
64 u32 frac_div_start;
65 u32 pll_clock_inverters;
66 u32 ssc_stepsize;
67 u32 ssc_div_per;
68 };
69
70 struct pll_7nm_cached_state {
71 unsigned long vco_rate;
72 u8 bit_clk_div;
73 u8 pix_clk_div;
74 u8 pll_out_div;
75 u8 pll_mux;
76 };
77
78 struct dsi_pll_7nm {
79 struct clk_hw clk_hw;
80
81 struct msm_dsi_phy *phy;
82
83 u64 vco_current_rate;
84
85 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG0 register */
86 spinlock_t postdiv_lock;
87
88 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG1 register */
89 spinlock_t pclk_mux_lock;
90
91 struct pll_7nm_cached_state cached_state;
92
93 struct dsi_pll_7nm *slave;
94 };
95
96 #define to_pll_7nm(x) container_of(x, struct dsi_pll_7nm, clk_hw)
97
98 /*
99 * Global list of private DSI PLL struct pointers. We need this for bonded DSI
100 * mode, where the master PLL's clk_ops needs access the slave's private data
101 */
102 static struct dsi_pll_7nm *pll_7nm_list[DSI_MAX];
103
dsi_pll_setup_config(struct dsi_pll_config * config)104 static void dsi_pll_setup_config(struct dsi_pll_config *config)
105 {
106 config->ssc_freq = 31500;
107 config->ssc_offset = 4800;
108 config->ssc_adj_per = 2;
109
110 /* TODO: ssc enable */
111 config->enable_ssc = false;
112 config->ssc_center = 0;
113 }
114
dsi_pll_calc_dec_frac(struct dsi_pll_7nm * pll,struct dsi_pll_config * config)115 static void dsi_pll_calc_dec_frac(struct dsi_pll_7nm *pll, struct dsi_pll_config *config)
116 {
117 u64 fref = VCO_REF_CLK_RATE;
118 u64 pll_freq;
119 u64 divider;
120 u64 dec, dec_multiple;
121 u32 frac;
122 u64 multiplier;
123
124 pll_freq = pll->vco_current_rate;
125
126 divider = fref * 2;
127
128 multiplier = 1 << FRAC_BITS;
129 dec_multiple = div_u64(pll_freq * multiplier, divider);
130 dec = div_u64_rem(dec_multiple, multiplier, &frac);
131
132 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)
133 config->pll_clock_inverters = 0x28;
134 else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) {
135 if (pll_freq <= 1300000000ULL)
136 config->pll_clock_inverters = 0xa0;
137 else if (pll_freq <= 2500000000ULL)
138 config->pll_clock_inverters = 0x20;
139 else if (pll_freq <= 4000000000ULL)
140 config->pll_clock_inverters = 0x00;
141 else
142 config->pll_clock_inverters = 0x40;
143 } else if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) {
144 if (pll_freq <= 1000000000ULL)
145 config->pll_clock_inverters = 0xa0;
146 else if (pll_freq <= 2500000000ULL)
147 config->pll_clock_inverters = 0x20;
148 else if (pll_freq <= 3020000000ULL)
149 config->pll_clock_inverters = 0x00;
150 else
151 config->pll_clock_inverters = 0x40;
152 } else {
153 /* 4.2, 4.3 */
154 if (pll_freq <= 1000000000ULL)
155 config->pll_clock_inverters = 0xa0;
156 else if (pll_freq <= 2500000000ULL)
157 config->pll_clock_inverters = 0x20;
158 else if (pll_freq <= 3500000000ULL)
159 config->pll_clock_inverters = 0x00;
160 else
161 config->pll_clock_inverters = 0x40;
162 }
163
164 config->decimal_div_start = dec;
165 config->frac_div_start = frac;
166 }
167
168 #define SSC_CENTER BIT(0)
169 #define SSC_EN BIT(1)
170
dsi_pll_calc_ssc(struct dsi_pll_7nm * pll,struct dsi_pll_config * config)171 static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll, struct dsi_pll_config *config)
172 {
173 u32 ssc_per;
174 u32 ssc_mod;
175 u64 ssc_step_size;
176 u64 frac;
177
178 if (!config->enable_ssc) {
179 DBG("SSC not enabled\n");
180 return;
181 }
182
183 ssc_per = DIV_ROUND_CLOSEST(VCO_REF_CLK_RATE, config->ssc_freq) / 2 - 1;
184 ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1);
185 ssc_per -= ssc_mod;
186
187 frac = config->frac_div_start;
188 ssc_step_size = config->decimal_div_start;
189 ssc_step_size *= (1 << FRAC_BITS);
190 ssc_step_size += frac;
191 ssc_step_size *= config->ssc_offset;
192 ssc_step_size *= (config->ssc_adj_per + 1);
193 ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1));
194 ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000);
195
196 config->ssc_div_per = ssc_per;
197 config->ssc_stepsize = ssc_step_size;
198
199 pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n",
200 config->decimal_div_start, frac, FRAC_BITS);
201 pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n",
202 ssc_per, (u32)ssc_step_size, config->ssc_adj_per);
203 }
204
dsi_pll_ssc_commit(struct dsi_pll_7nm * pll,struct dsi_pll_config * config)205 static void dsi_pll_ssc_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config)
206 {
207 void __iomem *base = pll->phy->pll_base;
208
209 if (config->enable_ssc) {
210 pr_debug("SSC is enabled\n");
211
212 writel(config->ssc_stepsize & 0xff,
213 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_LOW_1);
214 writel(config->ssc_stepsize >> 8,
215 base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_HIGH_1);
216 writel(config->ssc_div_per & 0xff,
217 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_LOW_1);
218 writel(config->ssc_div_per >> 8,
219 base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_HIGH_1);
220 writel(config->ssc_adj_per & 0xff,
221 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_LOW_1);
222 writel(config->ssc_adj_per >> 8,
223 base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_HIGH_1);
224 writel(SSC_EN | (config->ssc_center ? SSC_CENTER : 0),
225 base + REG_DSI_7nm_PHY_PLL_SSC_CONTROL);
226 }
227 }
228
dsi_pll_config_hzindep_reg(struct dsi_pll_7nm * pll)229 static void dsi_pll_config_hzindep_reg(struct dsi_pll_7nm *pll)
230 {
231 void __iomem *base = pll->phy->pll_base;
232 u8 analog_controls_five_1 = 0x01, vco_config_1 = 0x00;
233
234 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1))
235 if (pll->vco_current_rate >= 3100000000ULL)
236 analog_controls_five_1 = 0x03;
237
238 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) {
239 if (pll->vco_current_rate < 1520000000ULL)
240 vco_config_1 = 0x08;
241 else if (pll->vco_current_rate < 2990000000ULL)
242 vco_config_1 = 0x01;
243 }
244
245 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) ||
246 (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) {
247 if (pll->vco_current_rate < 1520000000ULL)
248 vco_config_1 = 0x08;
249 else if (pll->vco_current_rate >= 2990000000ULL)
250 vco_config_1 = 0x01;
251 }
252
253 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) {
254 if (pll->vco_current_rate < 1557000000ULL)
255 vco_config_1 = 0x08;
256 else
257 vco_config_1 = 0x01;
258 }
259
260 writel(analog_controls_five_1, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE_1);
261 writel(vco_config_1, base + REG_DSI_7nm_PHY_PLL_VCO_CONFIG_1);
262 writel(0x01, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE);
263 writel(0x03, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_TWO);
264 writel(0x00, base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_THREE);
265 writel(0x00, base + REG_DSI_7nm_PHY_PLL_DSM_DIVIDER);
266 writel(0x4e, base + REG_DSI_7nm_PHY_PLL_FEEDBACK_DIVIDER);
267 writel(0x40, base + REG_DSI_7nm_PHY_PLL_CALIBRATION_SETTINGS);
268 writel(0xba, base + REG_DSI_7nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE);
269 writel(0x0c, base + REG_DSI_7nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE);
270 writel(0x00, base + REG_DSI_7nm_PHY_PLL_OUTDIV);
271 writel(0x00, base + REG_DSI_7nm_PHY_PLL_CORE_OVERRIDE);
272 writel(0x08, base + REG_DSI_7nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO);
273 writel(0x0a, base + REG_DSI_7nm_PHY_PLL_PLL_PROP_GAIN_RATE_1);
274 writel(0xc0, base + REG_DSI_7nm_PHY_PLL_PLL_BAND_SEL_RATE_1);
275 writel(0x84, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1);
276 writel(0x82, base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1);
277 writel(0x4c, base + REG_DSI_7nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1);
278 writel(0x80, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_OVERRIDE);
279 writel(0x29, base + REG_DSI_7nm_PHY_PLL_PFILT);
280 writel(0x2f, base + REG_DSI_7nm_PHY_PLL_PFILT);
281 writel(0x2a, base + REG_DSI_7nm_PHY_PLL_IFILT);
282 writel(!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) ? 0x3f : 0x22,
283 base + REG_DSI_7nm_PHY_PLL_IFILT);
284
285 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) {
286 writel(0x22, base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE);
287 if (pll->slave)
288 writel(0x22, pll->slave->phy->pll_base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE);
289 }
290 }
291
dsi_pll_commit(struct dsi_pll_7nm * pll,struct dsi_pll_config * config)292 static void dsi_pll_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config)
293 {
294 void __iomem *base = pll->phy->pll_base;
295
296 writel(0x12, base + REG_DSI_7nm_PHY_PLL_CORE_INPUT_OVERRIDE);
297 writel(config->decimal_div_start,
298 base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1);
299 writel(config->frac_div_start & 0xff,
300 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1);
301 writel((config->frac_div_start & 0xff00) >> 8,
302 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1);
303 writel((config->frac_div_start & 0x30000) >> 16,
304 base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1);
305 writel(0x40, base + REG_DSI_7nm_PHY_PLL_PLL_LOCKDET_RATE_1);
306 writel(0x06, base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_DELAY);
307 writel(pll->phy->cphy_mode ? 0x00 : 0x10,
308 base + REG_DSI_7nm_PHY_PLL_CMODE_1);
309 writel(config->pll_clock_inverters,
310 base + REG_DSI_7nm_PHY_PLL_CLOCK_INVERTERS_1);
311 }
312
dsi_pll_7nm_vco_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)313 static int dsi_pll_7nm_vco_set_rate(struct clk_hw *hw, unsigned long rate,
314 unsigned long parent_rate)
315 {
316 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
317 struct dsi_pll_config config;
318
319 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_7nm->phy->id, rate,
320 parent_rate);
321
322 pll_7nm->vco_current_rate = rate;
323
324 dsi_pll_setup_config(&config);
325
326 dsi_pll_calc_dec_frac(pll_7nm, &config);
327
328 dsi_pll_calc_ssc(pll_7nm, &config);
329
330 dsi_pll_commit(pll_7nm, &config);
331
332 dsi_pll_config_hzindep_reg(pll_7nm);
333
334 dsi_pll_ssc_commit(pll_7nm, &config);
335
336 /* flush, ensure all register writes are done*/
337 wmb();
338
339 return 0;
340 }
341
dsi_pll_7nm_lock_status(struct dsi_pll_7nm * pll)342 static int dsi_pll_7nm_lock_status(struct dsi_pll_7nm *pll)
343 {
344 int rc;
345 u32 status = 0;
346 u32 const delay_us = 100;
347 u32 const timeout_us = 5000;
348
349 rc = readl_poll_timeout_atomic(pll->phy->pll_base +
350 REG_DSI_7nm_PHY_PLL_COMMON_STATUS_ONE,
351 status,
352 ((status & BIT(0)) > 0),
353 delay_us,
354 timeout_us);
355 if (rc)
356 pr_err("DSI PLL(%d) lock failed, status=0x%08x\n",
357 pll->phy->id, status);
358
359 return rc;
360 }
361
dsi_pll_disable_pll_bias(struct dsi_pll_7nm * pll)362 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll)
363 {
364 u32 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0);
365
366 writel(0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES);
367 writel(data & ~BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0);
368 ndelay(250);
369 }
370
dsi_pll_enable_pll_bias(struct dsi_pll_7nm * pll)371 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll)
372 {
373 u32 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0);
374
375 writel(data | BIT(5), pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0);
376 writel(0xc0, pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES);
377 ndelay(250);
378 }
379
dsi_pll_cmn_clk_cfg0_write(struct dsi_pll_7nm * pll,u32 val)380 static void dsi_pll_cmn_clk_cfg0_write(struct dsi_pll_7nm *pll, u32 val)
381 {
382 unsigned long flags;
383
384 spin_lock_irqsave(&pll->postdiv_lock, flags);
385 writel(val, pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0);
386 spin_unlock_irqrestore(&pll->postdiv_lock, flags);
387 }
388
dsi_pll_cmn_clk_cfg1_update(struct dsi_pll_7nm * pll,u32 mask,u32 val)389 static void dsi_pll_cmn_clk_cfg1_update(struct dsi_pll_7nm *pll, u32 mask,
390 u32 val)
391 {
392 unsigned long flags;
393 u32 data;
394
395 spin_lock_irqsave(&pll->pclk_mux_lock, flags);
396 data = readl(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1);
397 data &= ~mask;
398 data |= val & mask;
399
400 writel(data, pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1);
401 spin_unlock_irqrestore(&pll->pclk_mux_lock, flags);
402 }
403
dsi_pll_disable_global_clk(struct dsi_pll_7nm * pll)404 static void dsi_pll_disable_global_clk(struct dsi_pll_7nm *pll)
405 {
406 dsi_pll_cmn_clk_cfg1_update(pll, DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN, 0);
407 }
408
dsi_pll_enable_global_clk(struct dsi_pll_7nm * pll)409 static void dsi_pll_enable_global_clk(struct dsi_pll_7nm *pll)
410 {
411 u32 cfg_1 = DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN | DSI_7nm_PHY_CMN_CLK_CFG1_CLK_EN_SEL;
412
413 writel(0x04, pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_3);
414 dsi_pll_cmn_clk_cfg1_update(pll, cfg_1, cfg_1);
415 }
416
dsi_pll_phy_dig_reset(struct dsi_pll_7nm * pll)417 static void dsi_pll_phy_dig_reset(struct dsi_pll_7nm *pll)
418 {
419 /*
420 * Reset the PHY digital domain. This would be needed when
421 * coming out of a CX or analog rail power collapse while
422 * ensuring that the pads maintain LP00 or LP11 state
423 */
424 writel(BIT(0), pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4);
425 wmb(); /* Ensure that the reset is deasserted */
426 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4);
427 wmb(); /* Ensure that the reset is deasserted */
428 }
429
dsi_pll_7nm_vco_prepare(struct clk_hw * hw)430 static int dsi_pll_7nm_vco_prepare(struct clk_hw *hw)
431 {
432 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
433 int rc;
434
435 dsi_pll_enable_pll_bias(pll_7nm);
436 if (pll_7nm->slave)
437 dsi_pll_enable_pll_bias(pll_7nm->slave);
438
439 /* Start PLL */
440 writel(BIT(0), pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL);
441
442 /*
443 * ensure all PLL configurations are written prior to checking
444 * for PLL lock.
445 */
446 wmb();
447
448 /* Check for PLL lock */
449 rc = dsi_pll_7nm_lock_status(pll_7nm);
450 if (rc) {
451 pr_err("PLL(%d) lock failed\n", pll_7nm->phy->id);
452 goto error;
453 }
454
455 pll_7nm->phy->pll_on = true;
456
457 /*
458 * assert power on reset for PHY digital in case the PLL is
459 * enabled after CX of analog domain power collapse. This needs
460 * to be done before enabling the global clk.
461 */
462 dsi_pll_phy_dig_reset(pll_7nm);
463 if (pll_7nm->slave)
464 dsi_pll_phy_dig_reset(pll_7nm->slave);
465
466 dsi_pll_enable_global_clk(pll_7nm);
467 if (pll_7nm->slave)
468 dsi_pll_enable_global_clk(pll_7nm->slave);
469
470 error:
471 return rc;
472 }
473
dsi_pll_disable_sub(struct dsi_pll_7nm * pll)474 static void dsi_pll_disable_sub(struct dsi_pll_7nm *pll)
475 {
476 writel(0, pll->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL);
477 dsi_pll_disable_pll_bias(pll);
478 }
479
dsi_pll_7nm_vco_unprepare(struct clk_hw * hw)480 static void dsi_pll_7nm_vco_unprepare(struct clk_hw *hw)
481 {
482 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
483
484 /*
485 * To avoid any stray glitches while abruptly powering down the PLL
486 * make sure to gate the clock using the clock enable bit before
487 * powering down the PLL
488 */
489 dsi_pll_disable_global_clk(pll_7nm);
490 writel(0, pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL);
491 dsi_pll_disable_sub(pll_7nm);
492 if (pll_7nm->slave) {
493 dsi_pll_disable_global_clk(pll_7nm->slave);
494 dsi_pll_disable_sub(pll_7nm->slave);
495 }
496 /* flush, ensure all register writes are done */
497 wmb();
498 pll_7nm->phy->pll_on = false;
499 }
500
dsi_pll_7nm_vco_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)501 static unsigned long dsi_pll_7nm_vco_recalc_rate(struct clk_hw *hw,
502 unsigned long parent_rate)
503 {
504 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
505 void __iomem *base = pll_7nm->phy->pll_base;
506 u64 ref_clk = VCO_REF_CLK_RATE;
507 u64 vco_rate = 0x0;
508 u64 multiplier;
509 u32 frac;
510 u32 dec;
511 u64 pll_freq, tmp64;
512
513 dec = readl(base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1);
514 dec &= 0xff;
515
516 frac = readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1);
517 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1) &
518 0xff) << 8);
519 frac |= ((readl(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1) &
520 0x3) << 16);
521
522 /*
523 * TODO:
524 * 1. Assumes prescaler is disabled
525 */
526 multiplier = 1 << FRAC_BITS;
527 pll_freq = dec * (ref_clk * 2);
528 tmp64 = (ref_clk * 2 * frac);
529 pll_freq += div_u64(tmp64, multiplier);
530
531 vco_rate = pll_freq;
532 pll_7nm->vco_current_rate = vco_rate;
533
534 DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x",
535 pll_7nm->phy->id, (unsigned long)vco_rate, dec, frac);
536
537 return (unsigned long)vco_rate;
538 }
539
dsi_pll_7nm_clk_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * parent_rate)540 static long dsi_pll_7nm_clk_round_rate(struct clk_hw *hw,
541 unsigned long rate, unsigned long *parent_rate)
542 {
543 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw);
544
545 if (rate < pll_7nm->phy->cfg->min_pll_rate)
546 return pll_7nm->phy->cfg->min_pll_rate;
547 else if (rate > pll_7nm->phy->cfg->max_pll_rate)
548 return pll_7nm->phy->cfg->max_pll_rate;
549 else
550 return rate;
551 }
552
553 static const struct clk_ops clk_ops_dsi_pll_7nm_vco = {
554 .round_rate = dsi_pll_7nm_clk_round_rate,
555 .set_rate = dsi_pll_7nm_vco_set_rate,
556 .recalc_rate = dsi_pll_7nm_vco_recalc_rate,
557 .prepare = dsi_pll_7nm_vco_prepare,
558 .unprepare = dsi_pll_7nm_vco_unprepare,
559 };
560
561 /*
562 * PLL Callbacks
563 */
564
dsi_7nm_pll_save_state(struct msm_dsi_phy * phy)565 static void dsi_7nm_pll_save_state(struct msm_dsi_phy *phy)
566 {
567 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw);
568 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state;
569 void __iomem *phy_base = pll_7nm->phy->base;
570 u32 cmn_clk_cfg0, cmn_clk_cfg1;
571
572 cached->pll_out_div = readl(pll_7nm->phy->pll_base +
573 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE);
574 cached->pll_out_div &= 0x3;
575
576 cmn_clk_cfg0 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0);
577 cached->bit_clk_div = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0__MASK, cmn_clk_cfg0);
578 cached->pix_clk_div = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4__MASK, cmn_clk_cfg0);
579
580 cmn_clk_cfg1 = readl(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1);
581 cached->pll_mux = FIELD_GET(DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK, cmn_clk_cfg1);
582
583 DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x",
584 pll_7nm->phy->id, cached->pll_out_div, cached->bit_clk_div,
585 cached->pix_clk_div, cached->pll_mux);
586 }
587
dsi_7nm_pll_restore_state(struct msm_dsi_phy * phy)588 static int dsi_7nm_pll_restore_state(struct msm_dsi_phy *phy)
589 {
590 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw);
591 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state;
592 u32 val;
593 int ret;
594
595 val = readl(pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE);
596 val &= ~0x3;
597 val |= cached->pll_out_div;
598 writel(val, pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE);
599
600 dsi_pll_cmn_clk_cfg0_write(pll_7nm,
601 DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_3_0(cached->bit_clk_div) |
602 DSI_7nm_PHY_CMN_CLK_CFG0_DIV_CTRL_7_4(cached->pix_clk_div));
603 dsi_pll_cmn_clk_cfg1_update(pll_7nm, DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK,
604 cached->pll_mux);
605
606 ret = dsi_pll_7nm_vco_set_rate(phy->vco_hw,
607 pll_7nm->vco_current_rate,
608 VCO_REF_CLK_RATE);
609 if (ret) {
610 DRM_DEV_ERROR(&pll_7nm->phy->pdev->dev,
611 "restore vco rate failed. ret=%d\n", ret);
612 return ret;
613 }
614
615 DBG("DSI PLL%d", pll_7nm->phy->id);
616
617 return 0;
618 }
619
dsi_7nm_set_usecase(struct msm_dsi_phy * phy)620 static int dsi_7nm_set_usecase(struct msm_dsi_phy *phy)
621 {
622 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw);
623 u32 data = 0x0; /* internal PLL */
624
625 DBG("DSI PLL%d", pll_7nm->phy->id);
626
627 switch (phy->usecase) {
628 case MSM_DSI_PHY_STANDALONE:
629 break;
630 case MSM_DSI_PHY_MASTER:
631 pll_7nm->slave = pll_7nm_list[(pll_7nm->phy->id + 1) % DSI_MAX];
632 break;
633 case MSM_DSI_PHY_SLAVE:
634 data = 0x1; /* external PLL */
635 break;
636 default:
637 return -EINVAL;
638 }
639
640 /* set PLL src */
641 dsi_pll_cmn_clk_cfg1_update(pll_7nm, DSI_7nm_PHY_CMN_CLK_CFG1_BITCLK_SEL__MASK,
642 DSI_7nm_PHY_CMN_CLK_CFG1_BITCLK_SEL(data));
643
644 return 0;
645 }
646
647 /*
648 * The post dividers and mux clocks are created using the standard divider and
649 * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux
650 * state to follow the master PLL's divider/mux state. Therefore, we don't
651 * require special clock ops that also configure the slave PLL registers
652 */
pll_7nm_register(struct dsi_pll_7nm * pll_7nm,struct clk_hw ** provided_clocks)653 static int pll_7nm_register(struct dsi_pll_7nm *pll_7nm, struct clk_hw **provided_clocks)
654 {
655 char clk_name[32];
656 struct clk_init_data vco_init = {
657 .parent_data = &(const struct clk_parent_data) {
658 .fw_name = "ref",
659 },
660 .num_parents = 1,
661 .name = clk_name,
662 .flags = CLK_IGNORE_UNUSED,
663 .ops = &clk_ops_dsi_pll_7nm_vco,
664 };
665 struct device *dev = &pll_7nm->phy->pdev->dev;
666 struct clk_hw *hw, *pll_out_div, *pll_bit, *pll_by_2_bit;
667 struct clk_hw *pll_post_out_div, *phy_pll_out_dsi_parent;
668 int ret;
669
670 DBG("DSI%d", pll_7nm->phy->id);
671
672 snprintf(clk_name, sizeof(clk_name), "dsi%dvco_clk", pll_7nm->phy->id);
673 pll_7nm->clk_hw.init = &vco_init;
674
675 ret = devm_clk_hw_register(dev, &pll_7nm->clk_hw);
676 if (ret)
677 return ret;
678
679 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_out_div_clk", pll_7nm->phy->id);
680
681 pll_out_div = devm_clk_hw_register_divider_parent_hw(dev, clk_name,
682 &pll_7nm->clk_hw, CLK_SET_RATE_PARENT,
683 pll_7nm->phy->pll_base +
684 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE,
685 0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL);
686 if (IS_ERR(pll_out_div)) {
687 ret = PTR_ERR(pll_out_div);
688 goto fail;
689 }
690
691 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_bit_clk", pll_7nm->phy->id);
692
693 /* BIT CLK: DIV_CTRL_3_0 */
694 pll_bit = devm_clk_hw_register_divider_parent_hw(dev, clk_name,
695 pll_out_div, CLK_SET_RATE_PARENT,
696 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0,
697 0, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock);
698 if (IS_ERR(pll_bit)) {
699 ret = PTR_ERR(pll_bit);
700 goto fail;
701 }
702
703 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_byteclk", pll_7nm->phy->id);
704
705 /* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */
706 hw = devm_clk_hw_register_fixed_factor_parent_hw(dev, clk_name,
707 pll_bit, CLK_SET_RATE_PARENT, 1,
708 pll_7nm->phy->cphy_mode ? 7 : 8);
709 if (IS_ERR(hw)) {
710 ret = PTR_ERR(hw);
711 goto fail;
712 }
713
714 provided_clocks[DSI_BYTE_PLL_CLK] = hw;
715
716 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_by_2_bit_clk", pll_7nm->phy->id);
717
718 pll_by_2_bit = devm_clk_hw_register_fixed_factor_parent_hw(dev,
719 clk_name, pll_bit, 0, 1, 2);
720 if (IS_ERR(pll_by_2_bit)) {
721 ret = PTR_ERR(pll_by_2_bit);
722 goto fail;
723 }
724
725 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_post_out_div_clk", pll_7nm->phy->id);
726
727 if (pll_7nm->phy->cphy_mode)
728 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw(
729 dev, clk_name, pll_out_div, 0, 2, 7);
730 else
731 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw(
732 dev, clk_name, pll_out_div, 0, 1, 4);
733 if (IS_ERR(pll_post_out_div)) {
734 ret = PTR_ERR(pll_post_out_div);
735 goto fail;
736 }
737
738 /* in CPHY mode, pclk_mux will always have post_out_div as parent
739 * don't register a pclk_mux clock and just use post_out_div instead
740 */
741 if (pll_7nm->phy->cphy_mode) {
742 dsi_pll_cmn_clk_cfg1_update(pll_7nm,
743 DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL__MASK,
744 DSI_7nm_PHY_CMN_CLK_CFG1_DSICLK_SEL(3));
745 phy_pll_out_dsi_parent = pll_post_out_div;
746 } else {
747 snprintf(clk_name, sizeof(clk_name), "dsi%d_pclk_mux", pll_7nm->phy->id);
748
749 hw = devm_clk_hw_register_mux_parent_hws(dev, clk_name,
750 ((const struct clk_hw *[]){
751 pll_bit,
752 pll_by_2_bit,
753 }), 2, 0, pll_7nm->phy->base +
754 REG_DSI_7nm_PHY_CMN_CLK_CFG1,
755 0, 1, 0, &pll_7nm->pclk_mux_lock);
756 if (IS_ERR(hw)) {
757 ret = PTR_ERR(hw);
758 goto fail;
759 }
760
761 phy_pll_out_dsi_parent = hw;
762 }
763
764 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_dsiclk", pll_7nm->phy->id);
765
766 /* PIX CLK DIV : DIV_CTRL_7_4*/
767 hw = devm_clk_hw_register_divider_parent_hw(dev, clk_name,
768 phy_pll_out_dsi_parent, 0,
769 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0,
770 4, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock);
771 if (IS_ERR(hw)) {
772 ret = PTR_ERR(hw);
773 goto fail;
774 }
775
776 provided_clocks[DSI_PIXEL_PLL_CLK] = hw;
777
778 return 0;
779
780 fail:
781
782 return ret;
783 }
784
dsi_pll_7nm_init(struct msm_dsi_phy * phy)785 static int dsi_pll_7nm_init(struct msm_dsi_phy *phy)
786 {
787 struct platform_device *pdev = phy->pdev;
788 struct dsi_pll_7nm *pll_7nm;
789 int ret;
790
791 pll_7nm = devm_kzalloc(&pdev->dev, sizeof(*pll_7nm), GFP_KERNEL);
792 if (!pll_7nm)
793 return -ENOMEM;
794
795 DBG("DSI PLL%d", phy->id);
796
797 pll_7nm_list[phy->id] = pll_7nm;
798
799 spin_lock_init(&pll_7nm->postdiv_lock);
800 spin_lock_init(&pll_7nm->pclk_mux_lock);
801
802 pll_7nm->phy = phy;
803
804 ret = pll_7nm_register(pll_7nm, phy->provided_clocks->hws);
805 if (ret) {
806 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret);
807 return ret;
808 }
809
810 phy->vco_hw = &pll_7nm->clk_hw;
811
812 /* TODO: Remove this when we have proper display handover support */
813 msm_dsi_phy_pll_save_state(phy);
814
815 return 0;
816 }
817
dsi_phy_hw_v4_0_is_pll_on(struct msm_dsi_phy * phy)818 static int dsi_phy_hw_v4_0_is_pll_on(struct msm_dsi_phy *phy)
819 {
820 void __iomem *base = phy->base;
821 u32 data = 0;
822
823 data = readl(base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL);
824 mb(); /* make sure read happened */
825
826 return (data & BIT(0));
827 }
828
dsi_phy_hw_v4_0_config_lpcdrx(struct msm_dsi_phy * phy,bool enable)829 static void dsi_phy_hw_v4_0_config_lpcdrx(struct msm_dsi_phy *phy, bool enable)
830 {
831 void __iomem *lane_base = phy->lane_base;
832 int phy_lane_0 = 0; /* TODO: Support all lane swap configs */
833
834 /*
835 * LPRX and CDRX need to enabled only for physical data lane
836 * corresponding to the logical data lane 0
837 */
838 if (enable)
839 writel(0x3, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0));
840 else
841 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0));
842 }
843
dsi_phy_hw_v4_0_lane_settings(struct msm_dsi_phy * phy)844 static void dsi_phy_hw_v4_0_lane_settings(struct msm_dsi_phy *phy)
845 {
846 int i;
847 const u8 tx_dctrl_0[] = { 0x00, 0x00, 0x00, 0x04, 0x01 };
848 const u8 tx_dctrl_1[] = { 0x40, 0x40, 0x40, 0x46, 0x41 };
849 const u8 *tx_dctrl = tx_dctrl_0;
850 void __iomem *lane_base = phy->lane_base;
851
852 if (!(phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1))
853 tx_dctrl = tx_dctrl_1;
854
855 /* Strength ctrl settings */
856 for (i = 0; i < 5; i++) {
857 /*
858 * Disable LPRX and CDRX for all lanes. And later on, it will
859 * be only enabled for the physical data lane corresponding
860 * to the logical data lane 0
861 */
862 writel(0, lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(i));
863 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_PIN_SWAP(i));
864 }
865
866 dsi_phy_hw_v4_0_config_lpcdrx(phy, true);
867
868 /* other settings */
869 for (i = 0; i < 5; i++) {
870 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG0(i));
871 writel(0x0, lane_base + REG_DSI_7nm_PHY_LN_CFG1(i));
872 writel(i == 4 ? 0x8a : 0xa, lane_base + REG_DSI_7nm_PHY_LN_CFG2(i));
873 writel(tx_dctrl[i], lane_base + REG_DSI_7nm_PHY_LN_TX_DCTRL(i));
874 }
875 }
876
dsi_7nm_phy_enable(struct msm_dsi_phy * phy,struct msm_dsi_phy_clk_request * clk_req)877 static int dsi_7nm_phy_enable(struct msm_dsi_phy *phy,
878 struct msm_dsi_phy_clk_request *clk_req)
879 {
880 int ret;
881 u32 status;
882 u32 const delay_us = 5;
883 u32 const timeout_us = 1000;
884 struct msm_dsi_dphy_timing *timing = &phy->timing;
885 void __iomem *base = phy->base;
886 bool less_than_1500_mhz;
887 u32 vreg_ctrl_0, vreg_ctrl_1, lane_ctrl0;
888 u32 glbl_pemph_ctrl_0;
889 u32 glbl_str_swi_cal_sel_ctrl, glbl_hstx_str_ctrl_0;
890 u32 glbl_rescode_top_ctrl, glbl_rescode_bot_ctrl;
891 u32 data;
892
893 DBG("");
894
895 if (phy->cphy_mode)
896 ret = msm_dsi_cphy_timing_calc_v4(timing, clk_req);
897 else
898 ret = msm_dsi_dphy_timing_calc_v4(timing, clk_req);
899 if (ret) {
900 DRM_DEV_ERROR(&phy->pdev->dev,
901 "%s: PHY timing calculation failed\n", __func__);
902 return -EINVAL;
903 }
904
905 if (dsi_phy_hw_v4_0_is_pll_on(phy))
906 pr_warn("PLL turned on before configuring PHY\n");
907
908 /* Request for REFGEN READY */
909 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) ||
910 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) {
911 writel(0x1, phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10);
912 udelay(500);
913 }
914
915 /* wait for REFGEN READY */
916 ret = readl_poll_timeout_atomic(base + REG_DSI_7nm_PHY_CMN_PHY_STATUS,
917 status, (status & BIT(0)),
918 delay_us, timeout_us);
919 if (ret) {
920 pr_err("Ref gen not ready. Aborting\n");
921 return -EINVAL;
922 }
923
924 /* TODO: CPHY enable path (this is for DPHY only) */
925
926 /* Alter PHY configurations if data rate less than 1.5GHZ*/
927 less_than_1500_mhz = (clk_req->bitclk_rate <= 1500000000);
928
929 glbl_str_swi_cal_sel_ctrl = 0x00;
930 if (phy->cphy_mode) {
931 vreg_ctrl_0 = 0x51;
932 vreg_ctrl_1 = 0x55;
933 glbl_hstx_str_ctrl_0 = 0x00;
934 glbl_pemph_ctrl_0 = 0x11;
935 lane_ctrl0 = 0x17;
936 } else {
937 vreg_ctrl_0 = less_than_1500_mhz ? 0x53 : 0x52;
938 vreg_ctrl_1 = 0x5c;
939 glbl_hstx_str_ctrl_0 = 0x88;
940 glbl_pemph_ctrl_0 = 0x00;
941 lane_ctrl0 = 0x1f;
942 }
943
944 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) {
945 if (phy->cphy_mode) {
946 vreg_ctrl_0 = 0x45;
947 vreg_ctrl_1 = 0x41;
948 glbl_rescode_top_ctrl = 0x00;
949 glbl_rescode_bot_ctrl = 0x00;
950 } else {
951 vreg_ctrl_0 = 0x44;
952 vreg_ctrl_1 = 0x19;
953 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x03;
954 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3c;
955 }
956 } else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) {
957 if (phy->cphy_mode) {
958 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01;
959 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b;
960 } else {
961 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01;
962 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39;
963 }
964 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) {
965 if (phy->cphy_mode) {
966 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01;
967 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b;
968 } else {
969 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x00;
970 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39;
971 }
972 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) {
973 if (phy->cphy_mode) {
974 glbl_hstx_str_ctrl_0 = 0x88;
975 glbl_rescode_top_ctrl = 0x00;
976 glbl_rescode_bot_ctrl = 0x3c;
977 } else {
978 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x00;
979 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x39 : 0x3c;
980 }
981 } else {
982 if (phy->cphy_mode) {
983 glbl_str_swi_cal_sel_ctrl = 0x03;
984 glbl_hstx_str_ctrl_0 = 0x66;
985 } else {
986 vreg_ctrl_0 = less_than_1500_mhz ? 0x5B : 0x59;
987 glbl_str_swi_cal_sel_ctrl = less_than_1500_mhz ? 0x03 : 0x00;
988 glbl_hstx_str_ctrl_0 = less_than_1500_mhz ? 0x66 : 0x88;
989 }
990 glbl_rescode_top_ctrl = 0x03;
991 glbl_rescode_bot_ctrl = 0x3c;
992 }
993
994 /* de-assert digital and pll power down */
995 data = BIT(6) | BIT(5);
996 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0);
997
998 /* Assert PLL core reset */
999 writel(0x00, base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL);
1000
1001 /* turn off resync FIFO */
1002 writel(0x00, base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL);
1003
1004 /* program CMN_CTRL_4 for minor_ver 2 chipsets*/
1005 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) ||
1006 (readl(base + REG_DSI_7nm_PHY_CMN_REVISION_ID0) & (0xf0)) == 0x20)
1007 writel(0x04, base + REG_DSI_7nm_PHY_CMN_CTRL_4);
1008
1009 /* Configure PHY lane swap (TODO: we need to calculate this) */
1010 writel(0x21, base + REG_DSI_7nm_PHY_CMN_LANE_CFG0);
1011 writel(0x84, base + REG_DSI_7nm_PHY_CMN_LANE_CFG1);
1012
1013 if (phy->cphy_mode)
1014 writel(BIT(6), base + REG_DSI_7nm_PHY_CMN_GLBL_CTRL);
1015
1016 /* Enable LDO */
1017 writel(vreg_ctrl_0, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_0);
1018 writel(vreg_ctrl_1, base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_1);
1019
1020 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_3);
1021 writel(glbl_str_swi_cal_sel_ctrl,
1022 base + REG_DSI_7nm_PHY_CMN_GLBL_STR_SWI_CAL_SEL_CTRL);
1023 writel(glbl_hstx_str_ctrl_0,
1024 base + REG_DSI_7nm_PHY_CMN_GLBL_HSTX_STR_CTRL_0);
1025 writel(glbl_pemph_ctrl_0,
1026 base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_0);
1027 if (phy->cphy_mode)
1028 writel(0x01, base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_1);
1029 writel(glbl_rescode_top_ctrl,
1030 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_TOP_CTRL);
1031 writel(glbl_rescode_bot_ctrl,
1032 base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_BOT_CTRL);
1033 writel(0x55, base + REG_DSI_7nm_PHY_CMN_GLBL_LPTX_STR_CTRL);
1034
1035 /* Remove power down from all blocks */
1036 writel(0x7f, base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1037
1038 writel(lane_ctrl0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0);
1039
1040 /* Select full-rate mode */
1041 if (!phy->cphy_mode)
1042 writel(0x40, base + REG_DSI_7nm_PHY_CMN_CTRL_2);
1043
1044 ret = dsi_7nm_set_usecase(phy);
1045 if (ret) {
1046 DRM_DEV_ERROR(&phy->pdev->dev, "%s: set pll usecase failed, %d\n",
1047 __func__, ret);
1048 return ret;
1049 }
1050
1051 /* DSI PHY timings */
1052 if (phy->cphy_mode) {
1053 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0);
1054 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4);
1055 writel(timing->shared_timings.clk_pre,
1056 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5);
1057 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6);
1058 writel(timing->shared_timings.clk_post,
1059 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7);
1060 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8);
1061 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9);
1062 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10);
1063 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11);
1064 } else {
1065 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0);
1066 writel(timing->clk_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_1);
1067 writel(timing->clk_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_2);
1068 writel(timing->clk_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_3);
1069 writel(timing->hs_exit, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4);
1070 writel(timing->hs_zero, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5);
1071 writel(timing->hs_prepare, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6);
1072 writel(timing->hs_trail, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7);
1073 writel(timing->hs_rqst, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8);
1074 writel(0x02, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9);
1075 writel(0x04, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10);
1076 writel(0x00, base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11);
1077 writel(timing->shared_timings.clk_pre,
1078 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_12);
1079 writel(timing->shared_timings.clk_post,
1080 base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_13);
1081 }
1082
1083 /* DSI lane settings */
1084 dsi_phy_hw_v4_0_lane_settings(phy);
1085
1086 DBG("DSI%d PHY enabled", phy->id);
1087
1088 return 0;
1089 }
1090
dsi_7nm_set_continuous_clock(struct msm_dsi_phy * phy,bool enable)1091 static bool dsi_7nm_set_continuous_clock(struct msm_dsi_phy *phy, bool enable)
1092 {
1093 void __iomem *base = phy->base;
1094 u32 data;
1095
1096 data = readl(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1);
1097 if (enable)
1098 data |= BIT(5) | BIT(6);
1099 else
1100 data &= ~(BIT(5) | BIT(6));
1101 writel(data, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1);
1102
1103 return enable;
1104 }
1105
dsi_7nm_phy_disable(struct msm_dsi_phy * phy)1106 static void dsi_7nm_phy_disable(struct msm_dsi_phy *phy)
1107 {
1108 void __iomem *base = phy->base;
1109 u32 data;
1110
1111 DBG("");
1112
1113 if (dsi_phy_hw_v4_0_is_pll_on(phy))
1114 pr_warn("Turning OFF PHY while PLL is on\n");
1115
1116 dsi_phy_hw_v4_0_config_lpcdrx(phy, false);
1117
1118 /* Turn off REFGEN Vote */
1119 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) ||
1120 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) {
1121 writel(0x0, base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10);
1122 wmb();
1123 /* Delay to ensure HW removes vote before PHY shut down */
1124 udelay(2);
1125 }
1126
1127 data = readl(base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1128
1129 /* disable all lanes */
1130 data &= ~0x1F;
1131 writel(data, base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1132 writel(0, base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0);
1133
1134 /* Turn off all PHY blocks */
1135 writel(0x00, base + REG_DSI_7nm_PHY_CMN_CTRL_0);
1136 /* make sure phy is turned off */
1137 wmb();
1138
1139 DBG("DSI%d PHY disabled", phy->id);
1140 }
1141
1142 static const struct regulator_bulk_data dsi_phy_7nm_36mA_regulators[] = {
1143 { .supply = "vdds", .init_load_uA = 36000 },
1144 };
1145
1146 static const struct regulator_bulk_data dsi_phy_7nm_37750uA_regulators[] = {
1147 { .supply = "vdds", .init_load_uA = 37550 },
1148 };
1149
1150 static const struct regulator_bulk_data dsi_phy_7nm_98000uA_regulators[] = {
1151 { .supply = "vdds", .init_load_uA = 98000 },
1152 };
1153
1154 static const struct regulator_bulk_data dsi_phy_7nm_97800uA_regulators[] = {
1155 { .supply = "vdds", .init_load_uA = 97800 },
1156 };
1157
1158 static const struct regulator_bulk_data dsi_phy_7nm_98400uA_regulators[] = {
1159 { .supply = "vdds", .init_load_uA = 98400 },
1160 };
1161
1162 const struct msm_dsi_phy_cfg dsi_phy_7nm_cfgs = {
1163 .has_phy_lane = true,
1164 .regulator_data = dsi_phy_7nm_36mA_regulators,
1165 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators),
1166 .ops = {
1167 .enable = dsi_7nm_phy_enable,
1168 .disable = dsi_7nm_phy_disable,
1169 .pll_init = dsi_pll_7nm_init,
1170 .save_pll_state = dsi_7nm_pll_save_state,
1171 .restore_pll_state = dsi_7nm_pll_restore_state,
1172 .set_continuous_clock = dsi_7nm_set_continuous_clock,
1173 },
1174 .min_pll_rate = 600000000UL,
1175 #ifdef CONFIG_64BIT
1176 .max_pll_rate = 5000000000UL,
1177 #else
1178 .max_pll_rate = ULONG_MAX,
1179 #endif
1180 .io_start = { 0xae94400, 0xae96400 },
1181 .num_dsi_phy = 2,
1182 .quirks = DSI_PHY_7NM_QUIRK_V4_1,
1183 };
1184
1185 const struct msm_dsi_phy_cfg dsi_phy_7nm_6375_cfgs = {
1186 .has_phy_lane = true,
1187 .ops = {
1188 .enable = dsi_7nm_phy_enable,
1189 .disable = dsi_7nm_phy_disable,
1190 .pll_init = dsi_pll_7nm_init,
1191 .save_pll_state = dsi_7nm_pll_save_state,
1192 .restore_pll_state = dsi_7nm_pll_restore_state,
1193 },
1194 .min_pll_rate = 600000000UL,
1195 #ifdef CONFIG_64BIT
1196 .max_pll_rate = 5000000000ULL,
1197 #else
1198 .max_pll_rate = ULONG_MAX,
1199 #endif
1200 .io_start = { 0x5e94400 },
1201 .num_dsi_phy = 1,
1202 .quirks = DSI_PHY_7NM_QUIRK_V4_1,
1203 };
1204
1205 const struct msm_dsi_phy_cfg dsi_phy_7nm_8150_cfgs = {
1206 .has_phy_lane = true,
1207 .regulator_data = dsi_phy_7nm_36mA_regulators,
1208 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators),
1209 .ops = {
1210 .enable = dsi_7nm_phy_enable,
1211 .disable = dsi_7nm_phy_disable,
1212 .pll_init = dsi_pll_7nm_init,
1213 .save_pll_state = dsi_7nm_pll_save_state,
1214 .restore_pll_state = dsi_7nm_pll_restore_state,
1215 .set_continuous_clock = dsi_7nm_set_continuous_clock,
1216 },
1217 .min_pll_rate = 1000000000UL,
1218 .max_pll_rate = 3500000000UL,
1219 .io_start = { 0xae94400, 0xae96400 },
1220 .num_dsi_phy = 2,
1221 .quirks = DSI_PHY_7NM_QUIRK_PRE_V4_1,
1222 };
1223
1224 const struct msm_dsi_phy_cfg dsi_phy_7nm_7280_cfgs = {
1225 .has_phy_lane = true,
1226 .regulator_data = dsi_phy_7nm_37750uA_regulators,
1227 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators),
1228 .ops = {
1229 .enable = dsi_7nm_phy_enable,
1230 .disable = dsi_7nm_phy_disable,
1231 .pll_init = dsi_pll_7nm_init,
1232 .save_pll_state = dsi_7nm_pll_save_state,
1233 .restore_pll_state = dsi_7nm_pll_restore_state,
1234 },
1235 .min_pll_rate = 600000000UL,
1236 #ifdef CONFIG_64BIT
1237 .max_pll_rate = 5000000000ULL,
1238 #else
1239 .max_pll_rate = ULONG_MAX,
1240 #endif
1241 .io_start = { 0xae94400 },
1242 .num_dsi_phy = 1,
1243 .quirks = DSI_PHY_7NM_QUIRK_V4_1,
1244 };
1245
1246 const struct msm_dsi_phy_cfg dsi_phy_5nm_8350_cfgs = {
1247 .has_phy_lane = true,
1248 .regulator_data = dsi_phy_7nm_37750uA_regulators,
1249 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators),
1250 .ops = {
1251 .enable = dsi_7nm_phy_enable,
1252 .disable = dsi_7nm_phy_disable,
1253 .pll_init = dsi_pll_7nm_init,
1254 .save_pll_state = dsi_7nm_pll_save_state,
1255 .restore_pll_state = dsi_7nm_pll_restore_state,
1256 .set_continuous_clock = dsi_7nm_set_continuous_clock,
1257 },
1258 .min_pll_rate = 600000000UL,
1259 #ifdef CONFIG_64BIT
1260 .max_pll_rate = 5000000000UL,
1261 #else
1262 .max_pll_rate = ULONG_MAX,
1263 #endif
1264 .io_start = { 0xae94400, 0xae96400 },
1265 .num_dsi_phy = 2,
1266 .quirks = DSI_PHY_7NM_QUIRK_V4_2,
1267 };
1268
1269 const struct msm_dsi_phy_cfg dsi_phy_5nm_8450_cfgs = {
1270 .has_phy_lane = true,
1271 .regulator_data = dsi_phy_7nm_97800uA_regulators,
1272 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators),
1273 .ops = {
1274 .enable = dsi_7nm_phy_enable,
1275 .disable = dsi_7nm_phy_disable,
1276 .pll_init = dsi_pll_7nm_init,
1277 .save_pll_state = dsi_7nm_pll_save_state,
1278 .restore_pll_state = dsi_7nm_pll_restore_state,
1279 .set_continuous_clock = dsi_7nm_set_continuous_clock,
1280 },
1281 .min_pll_rate = 600000000UL,
1282 #ifdef CONFIG_64BIT
1283 .max_pll_rate = 5000000000UL,
1284 #else
1285 .max_pll_rate = ULONG_MAX,
1286 #endif
1287 .io_start = { 0xae94400, 0xae96400 },
1288 .num_dsi_phy = 2,
1289 .quirks = DSI_PHY_7NM_QUIRK_V4_3,
1290 };
1291
1292 const struct msm_dsi_phy_cfg dsi_phy_4nm_8550_cfgs = {
1293 .has_phy_lane = true,
1294 .regulator_data = dsi_phy_7nm_98400uA_regulators,
1295 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98400uA_regulators),
1296 .ops = {
1297 .enable = dsi_7nm_phy_enable,
1298 .disable = dsi_7nm_phy_disable,
1299 .pll_init = dsi_pll_7nm_init,
1300 .save_pll_state = dsi_7nm_pll_save_state,
1301 .restore_pll_state = dsi_7nm_pll_restore_state,
1302 .set_continuous_clock = dsi_7nm_set_continuous_clock,
1303 },
1304 .min_pll_rate = 600000000UL,
1305 #ifdef CONFIG_64BIT
1306 .max_pll_rate = 5000000000UL,
1307 #else
1308 .max_pll_rate = ULONG_MAX,
1309 #endif
1310 .io_start = { 0xae95000, 0xae97000 },
1311 .num_dsi_phy = 2,
1312 .quirks = DSI_PHY_7NM_QUIRK_V5_2,
1313 };
1314
1315 const struct msm_dsi_phy_cfg dsi_phy_4nm_8650_cfgs = {
1316 .has_phy_lane = true,
1317 .regulator_data = dsi_phy_7nm_98000uA_regulators,
1318 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98000uA_regulators),
1319 .ops = {
1320 .enable = dsi_7nm_phy_enable,
1321 .disable = dsi_7nm_phy_disable,
1322 .pll_init = dsi_pll_7nm_init,
1323 .save_pll_state = dsi_7nm_pll_save_state,
1324 .restore_pll_state = dsi_7nm_pll_restore_state,
1325 .set_continuous_clock = dsi_7nm_set_continuous_clock,
1326 },
1327 .min_pll_rate = 600000000UL,
1328 #ifdef CONFIG_64BIT
1329 .max_pll_rate = 5000000000UL,
1330 #else
1331 .max_pll_rate = ULONG_MAX,
1332 #endif
1333 .io_start = { 0xae95000, 0xae97000 },
1334 .num_dsi_phy = 2,
1335 .quirks = DSI_PHY_7NM_QUIRK_V5_2,
1336 };
1337