Lines Matching +full:clk +full:- +full:phase +full:-

1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) STMicroelectronics 2018 - All Rights Reserved
8 #include <linux/dma-mapping.h>
64 for_each_sg(data->sg, sg, data->sg_len - 1, i) { in sdmmc_idma_validate_data()
65 if (!IS_ALIGNED(data->sg->offset, sizeof(u32)) || in sdmmc_idma_validate_data()
66 !IS_ALIGNED(data->sg->length, SDMMC_IDMA_BURST)) { in sdmmc_idma_validate_data()
67 dev_err(mmc_dev(host->mmc), in sdmmc_idma_validate_data()
69 data->sg->offset, data->sg->length); in sdmmc_idma_validate_data()
70 return -EINVAL; in sdmmc_idma_validate_data()
74 if (!IS_ALIGNED(data->sg->offset, sizeof(u32))) { in sdmmc_idma_validate_data()
75 dev_err(mmc_dev(host->mmc), in sdmmc_idma_validate_data()
77 data->sg->offset, data->sg->length); in sdmmc_idma_validate_data()
78 return -EINVAL; in sdmmc_idma_validate_data()
89 n_elem = dma_map_sg(mmc_dev(host->mmc), in _sdmmc_idma_prep_data()
90 data->sg, in _sdmmc_idma_prep_data()
91 data->sg_len, in _sdmmc_idma_prep_data()
95 dev_err(mmc_dev(host->mmc), "dma_map_sg failed\n"); in _sdmmc_idma_prep_data()
96 return -EINVAL; in _sdmmc_idma_prep_data()
106 if (!next && data->host_cookie == host->next_cookie) in sdmmc_idma_prep_data()
115 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdmmc_idma_unprep_data()
122 struct device *dev = mmc_dev(host->mmc); in sdmmc_idma_setup()
126 return -ENOMEM; in sdmmc_idma_setup()
128 host->dma_priv = idma; in sdmmc_idma_setup()
130 if (host->variant->dma_lli) { in sdmmc_idma_setup()
131 idma->sg_cpu = dmam_alloc_coherent(dev, SDMMC_LLI_BUF_LEN, in sdmmc_idma_setup()
132 &idma->sg_dma, GFP_KERNEL); in sdmmc_idma_setup()
133 if (!idma->sg_cpu) { in sdmmc_idma_setup()
135 return -ENOMEM; in sdmmc_idma_setup()
137 host->mmc->max_segs = SDMMC_LLI_BUF_LEN / in sdmmc_idma_setup()
139 host->mmc->max_seg_size = host->variant->stm32_idmabsize_mask; in sdmmc_idma_setup()
141 host->mmc->max_segs = 1; in sdmmc_idma_setup()
142 host->mmc->max_seg_size = host->mmc->max_req_size; in sdmmc_idma_setup()
145 return dma_set_max_seg_size(dev, host->mmc->max_seg_size); in sdmmc_idma_setup()
151 struct sdmmc_idma *idma = host->dma_priv; in sdmmc_idma_start()
152 struct sdmmc_lli_desc *desc = (struct sdmmc_lli_desc *)idma->sg_cpu; in sdmmc_idma_start()
153 struct mmc_data *data = host->data; in sdmmc_idma_start()
157 if (!host->variant->dma_lli || data->sg_len == 1) { in sdmmc_idma_start()
158 writel_relaxed(sg_dma_address(data->sg), in sdmmc_idma_start()
159 host->base + MMCI_STM32_IDMABASE0R); in sdmmc_idma_start()
161 host->base + MMCI_STM32_IDMACTRLR); in sdmmc_idma_start()
165 for_each_sg(data->sg, sg, data->sg_len, i) { in sdmmc_idma_start()
174 desc[data->sg_len - 1].idmalar &= ~MMCI_STM32_ULA; in sdmmc_idma_start()
177 writel_relaxed(idma->sg_dma, host->base + MMCI_STM32_IDMABAR); in sdmmc_idma_start()
178 writel_relaxed(desc[0].idmalar, host->base + MMCI_STM32_IDMALAR); in sdmmc_idma_start()
179 writel_relaxed(desc[0].idmabase, host->base + MMCI_STM32_IDMABASE0R); in sdmmc_idma_start()
180 writel_relaxed(desc[0].idmasize, host->base + MMCI_STM32_IDMABSIZER); in sdmmc_idma_start()
182 host->base + MMCI_STM32_IDMACTRLR); in sdmmc_idma_start()
189 writel_relaxed(0, host->base + MMCI_STM32_IDMACTRLR); in sdmmc_idma_finalize()
191 if (!data->host_cookie) in sdmmc_idma_finalize()
197 unsigned int clk = 0, ddr = 0; in mmci_sdmmc_set_clkreg() local
199 if (host->mmc->ios.timing == MMC_TIMING_MMC_DDR52 || in mmci_sdmmc_set_clkreg()
200 host->mmc->ios.timing == MMC_TIMING_UHS_DDR50) in mmci_sdmmc_set_clkreg()
209 if (desired >= host->mclk && !ddr) { in mmci_sdmmc_set_clkreg()
210 host->cclk = host->mclk; in mmci_sdmmc_set_clkreg()
212 clk = DIV_ROUND_UP(host->mclk, 2 * desired); in mmci_sdmmc_set_clkreg()
213 if (clk > MCI_STM32_CLK_CLKDIV_MSK) in mmci_sdmmc_set_clkreg()
214 clk = MCI_STM32_CLK_CLKDIV_MSK; in mmci_sdmmc_set_clkreg()
215 host->cclk = host->mclk / (2 * clk); in mmci_sdmmc_set_clkreg()
219 * while power-on phase the clock can't be define to 0, in mmci_sdmmc_set_clkreg()
220 * Only power-off and power-cyc deactivate the clock. in mmci_sdmmc_set_clkreg()
223 clk = MCI_STM32_CLK_CLKDIV_MSK; in mmci_sdmmc_set_clkreg()
224 host->cclk = host->mclk / (2 * clk); in mmci_sdmmc_set_clkreg()
228 if (host->mmc->ios.power_mode == MMC_POWER_ON) in mmci_sdmmc_set_clkreg()
229 host->mmc->actual_clock = host->cclk; in mmci_sdmmc_set_clkreg()
231 host->mmc->actual_clock = 0; in mmci_sdmmc_set_clkreg()
233 if (host->mmc->ios.bus_width == MMC_BUS_WIDTH_4) in mmci_sdmmc_set_clkreg()
234 clk |= MCI_STM32_CLK_WIDEBUS_4; in mmci_sdmmc_set_clkreg()
235 if (host->mmc->ios.bus_width == MMC_BUS_WIDTH_8) in mmci_sdmmc_set_clkreg()
236 clk |= MCI_STM32_CLK_WIDEBUS_8; in mmci_sdmmc_set_clkreg()
238 clk |= MCI_STM32_CLK_HWFCEN; in mmci_sdmmc_set_clkreg()
239 clk |= host->clk_reg_add; in mmci_sdmmc_set_clkreg()
240 clk |= ddr; in mmci_sdmmc_set_clkreg()
246 if (host->mmc->ios.timing >= MMC_TIMING_UHS_SDR50) { in mmci_sdmmc_set_clkreg()
247 clk |= MCI_STM32_CLK_BUSSPEED; in mmci_sdmmc_set_clkreg()
248 if (host->mmc->ios.timing == MMC_TIMING_UHS_SDR104) { in mmci_sdmmc_set_clkreg()
249 clk &= ~MCI_STM32_CLK_SEL_MSK; in mmci_sdmmc_set_clkreg()
250 clk |= MCI_STM32_CLK_SELFBCK; in mmci_sdmmc_set_clkreg()
254 mmci_write_clkreg(host, clk); in mmci_sdmmc_set_clkreg()
259 if (!dlyb || !dlyb->base) in sdmmc_dlyb_input_ck()
263 writel_relaxed(0, dlyb->base + DLYB_CR); in sdmmc_dlyb_input_ck()
268 struct mmc_ios ios = host->mmc->ios; in mmci_sdmmc_set_pwrreg()
269 struct sdmmc_dlyb *dlyb = host->variant_priv; in mmci_sdmmc_set_pwrreg()
272 pwr = host->pwr_reg_add; in mmci_sdmmc_set_pwrreg()
277 /* Only a reset could power-off sdmmc */ in mmci_sdmmc_set_pwrreg()
278 reset_control_assert(host->rst); in mmci_sdmmc_set_pwrreg()
280 reset_control_deassert(host->rst); in mmci_sdmmc_set_pwrreg()
283 * Set the SDMMC in Power-cycle state. in mmci_sdmmc_set_pwrreg()
291 * After power-off (reset): the irq mask defined in probe in mmci_sdmmc_set_pwrreg()
295 writel(MCI_IRQENABLE | host->variant->start_err, in mmci_sdmmc_set_pwrreg()
296 host->base + MMCIMASK0); in mmci_sdmmc_set_pwrreg()
299 pwr |= host->pwr_reg & (MCI_STM32_VSWITCHEN | in mmci_sdmmc_set_pwrreg()
303 * After a power-cycle state, we must set the SDMMC in in mmci_sdmmc_set_pwrreg()
304 * Power-off. The SDMMC_D[7:0], SDMMC_CMD and SDMMC_CK are in mmci_sdmmc_set_pwrreg()
305 * driven high. Then we can set the SDMMC to Power-on state in mmci_sdmmc_set_pwrreg()
319 if (host->mmc->card && mmc_card_sdio(host->mmc->card) && in sdmmc_get_dctrl_cfg()
320 host->data->blocks == 1) in sdmmc_get_dctrl_cfg()
322 else if (host->data->stop && !host->mrq->sbc) in sdmmc_get_dctrl_cfg()
332 void __iomem *base = host->base; in sdmmc_busy_complete()
346 * if busy_d0 is in-progress we must activate busyd0end interrupt in sdmmc_busy_complete()
350 if (!host->busy_status) { in sdmmc_busy_complete()
351 writel_relaxed(mask | host->variant->busy_detect_mask, in sdmmc_busy_complete()
353 host->busy_status = status & in sdmmc_busy_complete()
360 if (host->busy_status) { in sdmmc_busy_complete()
361 writel_relaxed(mask & ~host->variant->busy_detect_mask, in sdmmc_busy_complete()
363 host->busy_status = 0; in sdmmc_busy_complete()
366 writel_relaxed(host->variant->busy_detect_mask, base + MMCICLEAR); in sdmmc_busy_complete()
372 int unit, int phase, bool sampler) in sdmmc_dlyb_set_cfgr() argument
376 writel_relaxed(DLYB_CR_SEN | DLYB_CR_DEN, dlyb->base + DLYB_CR); in sdmmc_dlyb_set_cfgr()
379 FIELD_PREP(DLYB_CFGR_SEL_MASK, phase); in sdmmc_dlyb_set_cfgr()
380 writel_relaxed(cfgr, dlyb->base + DLYB_CFGR); in sdmmc_dlyb_set_cfgr()
383 writel_relaxed(DLYB_CR_DEN, dlyb->base + DLYB_CR); in sdmmc_dlyb_set_cfgr()
388 struct sdmmc_dlyb *dlyb = host->variant_priv; in sdmmc_dlyb_lng_tuning()
395 ret = readl_relaxed_poll_timeout(dlyb->base + DLYB_CFGR, cfgr, in sdmmc_dlyb_lng_tuning()
399 dev_warn(mmc_dev(host->mmc), in sdmmc_dlyb_lng_tuning()
411 return -EINVAL; in sdmmc_dlyb_lng_tuning()
413 dlyb->unit = i; in sdmmc_dlyb_lng_tuning()
414 dlyb->max = __fls(lng); in sdmmc_dlyb_lng_tuning()
421 struct sdmmc_dlyb *dlyb = host->variant_priv; in sdmmc_dlyb_phase_tuning()
423 int phase; in sdmmc_dlyb_phase_tuning() local
425 for (phase = 0; phase <= dlyb->max; phase++) { in sdmmc_dlyb_phase_tuning()
426 sdmmc_dlyb_set_cfgr(dlyb, dlyb->unit, phase, false); in sdmmc_dlyb_phase_tuning()
428 if (mmc_send_tuning(host->mmc, opcode, NULL)) { in sdmmc_dlyb_phase_tuning()
434 end_of_len = phase; in sdmmc_dlyb_phase_tuning()
440 dev_err(mmc_dev(host->mmc), "no tuning point found\n"); in sdmmc_dlyb_phase_tuning()
441 return -EINVAL; in sdmmc_dlyb_phase_tuning()
444 phase = end_of_len - max_len / 2; in sdmmc_dlyb_phase_tuning()
445 sdmmc_dlyb_set_cfgr(dlyb, dlyb->unit, phase, false); in sdmmc_dlyb_phase_tuning()
447 dev_dbg(mmc_dev(host->mmc), "unit:%d max_dly:%d phase:%d\n", in sdmmc_dlyb_phase_tuning()
448 dlyb->unit, dlyb->max, phase); in sdmmc_dlyb_phase_tuning()
456 struct sdmmc_dlyb *dlyb = host->variant_priv; in sdmmc_execute_tuning()
458 if (!dlyb || !dlyb->base) in sdmmc_execute_tuning()
459 return -EINVAL; in sdmmc_execute_tuning()
462 return -EINVAL; in sdmmc_execute_tuning()
470 writel_relaxed(MCI_STM32_VSWENDC, host->base + MMCICLEAR); in sdmmc_pre_sig_volt_vswitch()
472 mmci_write_pwrreg(host, host->pwr_reg | MCI_STM32_VSWITCHEN); in sdmmc_pre_sig_volt_vswitch()
482 if (ios->signal_voltage == MMC_SIGNAL_VOLTAGE_180) { in sdmmc_post_sig_volt_switch()
483 spin_lock_irqsave(&host->lock, flags); in sdmmc_post_sig_volt_switch()
484 mmci_write_pwrreg(host, host->pwr_reg | MCI_STM32_VSWITCH); in sdmmc_post_sig_volt_switch()
485 spin_unlock_irqrestore(&host->lock, flags); in sdmmc_post_sig_volt_switch()
488 ret = readl_relaxed_poll_timeout(host->base + MMCISTATUS, in sdmmc_post_sig_volt_switch()
494 host->base + MMCICLEAR); in sdmmc_post_sig_volt_switch()
495 mmci_write_pwrreg(host, host->pwr_reg & in sdmmc_post_sig_volt_switch()
519 struct device_node *np = host->mmc->parent->of_node; in sdmmc_variant_init()
523 host->ops = &sdmmc_variant_ops; in sdmmc_variant_init()
524 host->pwr_reg = readl_relaxed(host->base + MMCIPOWER); in sdmmc_variant_init()
526 base_dlyb = devm_of_iomap(mmc_dev(host->mmc), np, 1, NULL); in sdmmc_variant_init()
530 dlyb = devm_kzalloc(mmc_dev(host->mmc), sizeof(*dlyb), GFP_KERNEL); in sdmmc_variant_init()
534 dlyb->base = base_dlyb; in sdmmc_variant_init()
535 host->variant_priv = dlyb; in sdmmc_variant_init()
536 host->mmc_ops->execute_tuning = sdmmc_execute_tuning; in sdmmc_variant_init()