Lines Matching refs:idev
154 static void i2c_int_disable(struct axxia_i2c_dev *idev, u32 mask)
158 int_en = readl(idev->base + MST_INT_ENABLE);
159 writel(int_en & ~mask, idev->base + MST_INT_ENABLE);
162 static void i2c_int_enable(struct axxia_i2c_dev *idev, u32 mask)
166 int_en = readl(idev->base + MST_INT_ENABLE);
167 writel(int_en | mask, idev->base + MST_INT_ENABLE);
178 static int axxia_i2c_init(struct axxia_i2c_dev *idev)
180 u32 divisor = clk_get_rate(idev->i2c_clk) / idev->bus_clk_rate;
181 u32 clk_mhz = clk_get_rate(idev->i2c_clk) / 1000000;
188 dev_dbg(idev->dev, "rate=%uHz per_clk=%uMHz -> ratio=1:%u\n",
189 idev->bus_clk_rate, clk_mhz, divisor);
192 writel(0x01, idev->base + SOFT_RESET);
194 while (readl(idev->base + SOFT_RESET) & 1) {
196 dev_warn(idev->dev, "Soft reset failed\n");
202 writel(0x1, idev->base + GLOBAL_CONTROL);
204 if (idev->bus_clk_rate <= I2C_MAX_STANDARD_MODE_FREQ) {
217 writel(t_high, idev->base + SCL_HIGH_PERIOD);
219 writel(t_low, idev->base + SCL_LOW_PERIOD);
221 writel(t_setup, idev->base + SDA_SETUP_TIME);
223 writel(ns_to_clk(300, clk_mhz), idev->base + SDA_HOLD_TIME);
225 writel(ns_to_clk(50, clk_mhz), idev->base + SPIKE_FLTR_LEN);
240 writel(prescale, idev->base + TIMER_CLOCK_DIV);
242 writel(WT_EN | WT_VALUE(tmo_clk), idev->base + WAIT_TIMER_CONTROL);
245 i2c_int_disable(idev, ~0);
248 writel(0x01, idev->base + INTERRUPT_ENABLE);
267 static int axxia_i2c_empty_rx_fifo(struct axxia_i2c_dev *idev)
269 struct i2c_msg *msg = idev->msg_r;
270 size_t rx_fifo_avail = readl(idev->base + MST_RX_FIFO);
271 int bytes_to_transfer = min(rx_fifo_avail, msg->len - idev->msg_xfrd_r);
274 int c = readl(idev->base + MST_DATA);
276 if (idev->msg_xfrd_r == 0 && i2c_m_recv_len(msg)) {
281 idev->msg_err = -EPROTO;
282 i2c_int_disable(idev, ~MST_STATUS_TSS);
283 complete(&idev->msg_complete);
287 writel(msg->len, idev->base + MST_RX_XFER);
289 msg->buf[idev->msg_xfrd_r++] = c;
299 static int axxia_i2c_fill_tx_fifo(struct axxia_i2c_dev *idev)
301 struct i2c_msg *msg = idev->msg;
302 size_t tx_fifo_avail = FIFO_SIZE - readl(idev->base + MST_TX_FIFO);
303 int bytes_to_transfer = min(tx_fifo_avail, msg->len - idev->msg_xfrd);
304 int ret = msg->len - idev->msg_xfrd - bytes_to_transfer;
307 writel(msg->buf[idev->msg_xfrd++], idev->base + MST_DATA);
312 static void axxia_i2c_slv_fifo_event(struct axxia_i2c_dev *idev)
314 u32 fifo_status = readl(idev->base + SLV_RX_FIFO);
317 dev_dbg(idev->dev, "slave irq fifo_status=0x%x\n", fifo_status);
321 i2c_slave_event(idev->slave,
324 val = readl(idev->base + SLV_DATA);
325 i2c_slave_event(idev->slave, I2C_SLAVE_WRITE_RECEIVED, &val);
328 readl(idev->base + SLV_DATA); /* dummy read */
329 i2c_slave_event(idev->slave, I2C_SLAVE_STOP, &val);
332 readl(idev->base + SLV_DATA); /* dummy read */
335 static irqreturn_t axxia_i2c_slv_isr(struct axxia_i2c_dev *idev)
337 u32 status = readl(idev->base + SLV_INT_STATUS);
340 dev_dbg(idev->dev, "slave irq status=0x%x\n", status);
343 axxia_i2c_slv_fifo_event(idev);
345 i2c_slave_event(idev->slave, I2C_SLAVE_READ_REQUESTED, &val);
346 writel(val, idev->base + SLV_DATA);
349 i2c_slave_event(idev->slave, I2C_SLAVE_READ_PROCESSED, &val);
350 writel(val, idev->base + SLV_DATA);
353 i2c_slave_event(idev->slave, I2C_SLAVE_STOP, &val);
355 writel(INT_SLV, idev->base + INTERRUPT_STATUS);
361 struct axxia_i2c_dev *idev = _dev;
365 status = readl(idev->base + INTERRUPT_STATUS);
368 ret = axxia_i2c_slv_isr(idev);
373 status = readl(idev->base + MST_INT_STATUS);
375 if (!idev->msg) {
376 dev_warn(idev->dev, "unexpected interrupt\n");
381 if (i2c_m_rd(idev->msg_r) && (status & MST_STATUS_RFL))
382 axxia_i2c_empty_rx_fifo(idev);
385 if (!i2c_m_rd(idev->msg) && (status & MST_STATUS_TFL)) {
386 if (axxia_i2c_fill_tx_fifo(idev) == 0)
387 i2c_int_disable(idev, MST_STATUS_TFL);
392 i2c_int_disable(idev, ~0);
394 idev->msg_err = -EAGAIN;
396 idev->msg_err = -ENXIO;
398 idev->msg_err = -EIO;
399 dev_dbg(idev->dev, "error %#x, addr=%#x rx=%u/%u tx=%u/%u\n",
401 idev->msg->addr,
402 readl(idev->base + MST_RX_BYTES_XFRD),
403 readl(idev->base + MST_RX_XFER),
404 readl(idev->base + MST_TX_BYTES_XFRD),
405 readl(idev->base + MST_TX_XFER));
406 complete(&idev->msg_complete);
409 i2c_int_disable(idev, ~MST_STATUS_TSS);
410 complete(&idev->msg_complete);
413 int mask = idev->last ? ~0 : ~MST_STATUS_TSS;
415 i2c_int_disable(idev, mask);
416 if (i2c_m_rd(idev->msg_r) && idev->msg_xfrd_r < idev->msg_r->len)
417 axxia_i2c_empty_rx_fifo(idev);
418 complete(&idev->msg_complete);
421 idev->msg_err = -ETIMEDOUT;
422 i2c_int_disable(idev, ~MST_STATUS_TSS);
423 complete(&idev->msg_complete);
428 writel(INT_MST, idev->base + INTERRUPT_STATUS);
433 static void axxia_i2c_set_addr(struct axxia_i2c_dev *idev, struct i2c_msg *msg)
445 writel(addr_1, idev->base + MST_ADDR_1);
446 writel(addr_2, idev->base + MST_ADDR_2);
453 static int axxia_i2c_handle_seq_nak(struct axxia_i2c_dev *idev)
458 if ((readl(idev->base + MST_COMMAND) & CMD_BUSY) == 0)
466 static int axxia_i2c_xfer_seq(struct axxia_i2c_dev *idev, struct i2c_msg msgs[])
472 axxia_i2c_set_addr(idev, &msgs[0]);
474 writel(msgs[0].len, idev->base + MST_TX_XFER);
475 writel(rlen, idev->base + MST_RX_XFER);
477 idev->msg = &msgs[0];
478 idev->msg_r = &msgs[1];
479 idev->msg_xfrd = 0;
480 idev->msg_xfrd_r = 0;
481 idev->last = true;
482 axxia_i2c_fill_tx_fifo(idev);
484 writel(CMD_SEQUENCE, idev->base + MST_COMMAND);
486 reinit_completion(&idev->msg_complete);
487 i2c_int_enable(idev, int_mask);
489 time_left = wait_for_completion_timeout(&idev->msg_complete,
492 if (idev->msg_err == -ENXIO) {
493 if (axxia_i2c_handle_seq_nak(idev))
494 axxia_i2c_init(idev);
495 } else if (readl(idev->base + MST_COMMAND) & CMD_BUSY) {
496 dev_warn(idev->dev, "busy after xfer\n");
500 idev->msg_err = -ETIMEDOUT;
501 i2c_recover_bus(&idev->adapter);
502 axxia_i2c_init(idev);
505 if (unlikely(idev->msg_err) && idev->msg_err != -ENXIO)
506 axxia_i2c_init(idev);
508 return idev->msg_err;
511 static int axxia_i2c_xfer_msg(struct axxia_i2c_dev *idev, struct i2c_msg *msg,
519 idev->msg = msg;
520 idev->msg_r = msg;
521 idev->msg_xfrd = 0;
522 idev->msg_xfrd_r = 0;
523 idev->last = last;
524 reinit_completion(&idev->msg_complete);
526 axxia_i2c_set_addr(idev, msg);
538 writel(rx_xfer, idev->base + MST_RX_XFER);
539 writel(tx_xfer, idev->base + MST_TX_XFER);
543 else if (axxia_i2c_fill_tx_fifo(idev) != 0)
546 wt_value = WT_VALUE(readl(idev->base + WAIT_TIMER_CONTROL));
548 writel(wt_value, idev->base + WAIT_TIMER_CONTROL);
550 if (idev->msg_err)
554 writel(CMD_MANUAL, idev->base + MST_COMMAND);
557 writel(CMD_AUTO, idev->base + MST_COMMAND);
561 writel(WT_EN | wt_value, idev->base + WAIT_TIMER_CONTROL);
563 i2c_int_enable(idev, int_mask);
565 time_left = wait_for_completion_timeout(&idev->msg_complete,
568 i2c_int_disable(idev, int_mask);
570 if (readl(idev->base + MST_COMMAND) & CMD_BUSY)
571 dev_warn(idev->dev, "busy after xfer\n");
574 idev->msg_err = -ETIMEDOUT;
575 i2c_recover_bus(&idev->adapter);
576 axxia_i2c_init(idev);
580 if (unlikely(idev->msg_err) && idev->msg_err != -ENXIO &&
581 idev->msg_err != -ETIMEDOUT)
582 axxia_i2c_init(idev);
584 return idev->msg_err;
602 struct axxia_i2c_dev *idev = i2c_get_adapdata(adap);
606 idev->msg_err = 0;
609 ret = axxia_i2c_xfer_seq(idev, msgs);
613 i2c_int_enable(idev, MST_STATUS_TSS);
616 ret = axxia_i2c_xfer_msg(idev, &msgs[i], i == (num - 1));
623 struct axxia_i2c_dev *idev = i2c_get_adapdata(adap);
625 return !!(readl(idev->base + I2C_BUS_MONITOR) & BM_SCLS);
630 struct axxia_i2c_dev *idev = i2c_get_adapdata(adap);
634 tmp = readl(idev->base + I2C_BUS_MONITOR) & BM_SDAC;
637 writel(tmp, idev->base + I2C_BUS_MONITOR);
642 struct axxia_i2c_dev *idev = i2c_get_adapdata(adap);
644 return !!(readl(idev->base + I2C_BUS_MONITOR) & BM_SDAS);
663 struct axxia_i2c_dev *idev = i2c_get_adapdata(slave->adapter);
667 if (idev->slave)
670 idev->slave = slave;
673 writel(GLOBAL_MST_EN | GLOBAL_SLV_EN, idev->base + GLOBAL_CONTROL);
674 writel(INT_MST | INT_SLV, idev->base + INTERRUPT_ENABLE);
681 writel(SLV_RX_ACSA1, idev->base + SLV_RX_CTL);
682 writel(dec_ctl, idev->base + SLV_ADDR_DEC_CTL);
683 writel(slave->addr, idev->base + SLV_ADDR_1);
688 writel(slv_int_mask, idev->base + SLV_INT_ENABLE);
695 struct axxia_i2c_dev *idev = i2c_get_adapdata(slave->adapter);
698 writel(GLOBAL_MST_EN, idev->base + GLOBAL_CONTROL);
699 writel(INT_MST, idev->base + INTERRUPT_ENABLE);
701 synchronize_irq(idev->irq);
703 idev->slave = NULL;
723 struct axxia_i2c_dev *idev = NULL;
727 idev = devm_kzalloc(&pdev->dev, sizeof(*idev), GFP_KERNEL);
728 if (!idev)
735 idev->irq = platform_get_irq(pdev, 0);
736 if (idev->irq < 0)
737 return idev->irq;
739 idev->i2c_clk = devm_clk_get(&pdev->dev, "i2c");
740 if (IS_ERR(idev->i2c_clk)) {
742 return PTR_ERR(idev->i2c_clk);
745 idev->base = base;
746 idev->dev = &pdev->dev;
747 init_completion(&idev->msg_complete);
749 of_property_read_u32(np, "clock-frequency", &idev->bus_clk_rate);
750 if (idev->bus_clk_rate == 0)
751 idev->bus_clk_rate = I2C_MAX_STANDARD_MODE_FREQ; /* default clock rate */
753 ret = clk_prepare_enable(idev->i2c_clk);
759 ret = axxia_i2c_init(idev);
765 ret = devm_request_irq(&pdev->dev, idev->irq, axxia_i2c_isr, 0,
766 pdev->name, idev);
768 dev_err(&pdev->dev, "failed to claim IRQ%d\n", idev->irq);
772 i2c_set_adapdata(&idev->adapter, idev);
773 strscpy(idev->adapter.name, pdev->name, sizeof(idev->adapter.name));
774 idev->adapter.owner = THIS_MODULE;
775 idev->adapter.algo = &axxia_i2c_algo;
776 idev->adapter.bus_recovery_info = &axxia_i2c_recovery_info;
777 idev->adapter.quirks = &axxia_i2c_quirks;
778 idev->adapter.dev.parent = &pdev->dev;
779 idev->adapter.dev.of_node = pdev->dev.of_node;
781 platform_set_drvdata(pdev, idev);
783 ret = i2c_add_adapter(&idev->adapter);
790 clk_disable_unprepare(idev->i2c_clk);
796 struct axxia_i2c_dev *idev = platform_get_drvdata(pdev);
798 clk_disable_unprepare(idev->i2c_clk);
799 i2c_del_adapter(&idev->adapter);