Lines Matching refs:hw_ep
204 struct musb_hw_ep *hw_ep = qh->hw_ep;
205 int epnum = hw_ep->epnum;
235 musb_ep_set_qh(hw_ep, is_in, qh);
268 hw_ep->tx_channel ? "dma" : "pio");
270 if (!hw_ep->tx_channel)
271 musb_h_tx_start(hw_ep);
273 musb_h_tx_dma_start(hw_ep);
298 struct musb_hw_ep *hw_ep, int is_in)
300 struct musb_qh *qh = musb_ep_get_qh(hw_ep, is_in);
301 struct musb_hw_ep *ep = qh->hw_ep;
329 qh = musb_ep_get_qh(hw_ep, is_in);
386 hw_ep->epnum, is_in ? 'R' : 'T', next_urb(qh));
391 static u16 musb_h_flush_rxfifo(struct musb_hw_ep *hw_ep, u16 csr)
403 musb_writew(hw_ep->regs, MUSB_RXCSR, csr);
404 musb_writew(hw_ep->regs, MUSB_RXCSR, csr);
407 return musb_readw(hw_ep->regs, MUSB_RXCSR);
422 struct musb_hw_ep *hw_ep = musb->endpoints + epnum;
423 void __iomem *epio = hw_ep->regs;
424 struct musb_qh *qh = hw_ep->in_qh;
489 musb_read_fifo(hw_ep, length, buf);
494 musb_h_flush_rxfifo(hw_ep, csr);
573 static void musb_tx_dma_set_mode_mentor(struct musb_hw_ep *hw_ep,
577 struct dma_channel *channel = hw_ep->tx_channel;
578 void __iomem *epio = hw_ep->regs;
600 can_bulk_split(hw_ep->musb, qh->type)))
611 static void musb_tx_dma_set_mode_cppi_tusb(struct musb_hw_ep *hw_ep,
615 struct dma_channel *channel = hw_ep->tx_channel;
627 struct musb_hw_ep *hw_ep, struct musb_qh *qh,
630 struct dma_channel *channel = hw_ep->tx_channel;
634 if (musb_dma_inventra(hw_ep->musb) || musb_dma_ux500(hw_ep->musb))
635 musb_tx_dma_set_mode_mentor(hw_ep, qh,
637 else if (is_cppi_enabled(hw_ep->musb) || tusb_dma_omap(hw_ep->musb))
638 musb_tx_dma_set_mode_cppi_tusb(hw_ep, urb, &mode);
652 void __iomem *epio = hw_ep->regs;
656 hw_ep->tx_channel = NULL;
678 struct musb_hw_ep *hw_ep = musb->endpoints + epnum;
679 void __iomem *epio = hw_ep->regs;
680 struct musb_qh *qh = musb_ep_get_qh(hw_ep, !is_out);
700 hw_ep->tx_channel = NULL;
706 dma_channel = is_out ? hw_ep->tx_channel : hw_ep->rx_channel;
709 dma_controller, hw_ep, is_out);
711 hw_ep->tx_channel = dma_channel;
713 hw_ep->rx_channel = dma_channel;
740 if (!hw_ep->tx_double_buffered)
741 musb_h_tx_flush_fifo(hw_ep);
758 if (!hw_ep->tx_double_buffered)
768 musb_h_ep0_flush_fifo(hw_ep);
784 qh->hb_mult = hw_ep->max_packet_sz_tx
802 load_count = min_t(u32, hw_ep->max_packet_sz_tx, len);
807 hw_ep, qh, urb, offset, len))
828 musb_write_fifo(hw_ep, load_count, buf);
832 musb_write_fifo(hw_ep, load_count, buf);
842 if (hw_ep->rx_reinit) {
850 csr = musb_readw(hw_ep->regs, MUSB_RXCSR);
856 hw_ep->epnum, csr);
870 musb_writew(hw_ep->regs, MUSB_RXCSR, csr);
871 csr = musb_readw(hw_ep->regs, MUSB_RXCSR);
884 hw_ep->rx_channel = dma_channel = NULL;
891 musb_writew(hw_ep->regs, MUSB_RXCSR, csr);
892 csr = musb_readw(hw_ep->regs, MUSB_RXCSR);
983 struct musb_hw_ep *hw_ep = musb->control_ep;
984 struct musb_qh *qh = hw_ep->in_qh;
995 musb_read_fifo(hw_ep, fifo_count, fifo_dest);
1034 musb_write_fifo(hw_ep, fifo_count, fifo_dest);
1060 struct musb_hw_ep *hw_ep = musb->control_ep;
1061 void __iomem *epio = hw_ep->regs;
1062 struct musb_qh *qh = hw_ep->in_qh;
1122 musb_h_ep0_flush_fifo(hw_ep);
1136 musb_h_ep0_flush_fifo(hw_ep);
1172 musb_advance_schedule(musb, urb, hw_ep, 1);
1202 struct musb_hw_ep *hw_ep = musb->endpoints + epnum;
1203 void __iomem *epio = hw_ep->regs;
1204 struct musb_qh *qh = hw_ep->out_qh;
1221 dma = is_dma_capable() ? hw_ep->tx_channel : NULL;
1244 musb_bulk_nak_timeout(musb, hw_ep, 0);
1273 musb_h_tx_flush_fifo(hw_ep);
1409 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_OUT);
1412 if (musb_tx_dma_program(musb->dma_controller, hw_ep, qh, urb,
1415 musb_h_tx_dma_start(hw_ep);
1426 * REVISIT: some docs say that when hw_ep->tx_double_buffered,
1448 musb_write_fifo(hw_ep, length, qh->sg_miter.addr);
1452 musb_write_fifo(hw_ep, length, urb->transfer_buffer + offset);
1465 struct musb_hw_ep *hw_ep,
1470 struct dma_channel *channel = hw_ep->rx_channel;
1471 void __iomem *epio = hw_ep->regs;
1483 musb_writew(hw_ep->regs, MUSB_RXCSR, val);
1490 struct musb_hw_ep *hw_ep,
1536 struct musb_hw_ep *hw_ep,
1541 struct dma_channel *channel = hw_ep->rx_channel;
1542 void __iomem *epio = hw_ep->regs;
1565 if (musb_dma_cppi41(hw_ep->musb))
1566 done = musb_rx_dma_iso_cppi41(dma, hw_ep, qh,
1606 struct musb_hw_ep *hw_ep,
1612 struct musb *musb = hw_ep->musb;
1613 void __iomem *epio = hw_ep->regs;
1614 struct dma_channel *channel = hw_ep->rx_channel;
1659 if (rx_count < hw_ep->max_packet_sz_rx) {
1693 hw_ep->rx_channel = NULL;
1706 struct musb_hw_ep *hw_ep,
1715 struct musb_hw_ep *hw_ep,
1732 struct musb_hw_ep *hw_ep = musb->endpoints + epnum;
1734 void __iomem *epio = hw_ep->regs;
1735 struct musb_qh *qh = hw_ep->in_qh;
1748 dma = is_dma_capable() ? hw_ep->rx_channel : NULL;
1762 musb_h_flush_rxfifo(hw_ep, MUSB_RXCSR_CLRDATATOG);
1807 musb_bulk_nak_timeout(musb, hw_ep, 1);
1835 musb_h_flush_rxfifo(hw_ep, MUSB_RXCSR_CLRDATATOG);
1883 musb_writew(hw_ep->regs, MUSB_RXCSR, val);
1887 done = musb_rx_dma_inventra_cppi41(c, hw_ep, qh, urb, xfer_len);
1888 musb_dbg(hw_ep->musb,
1916 musb_dbg(hw_ep->musb,
1924 if (musb_rx_dma_in_inventra_cppi41(c, hw_ep, qh, urb,
1984 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_IN);
2001 struct musb_hw_ep *hw_ep = NULL;
2010 hw_ep = musb->control_ep;
2026 for (epnum = 1, hw_ep = musb->endpoints + 1;
2028 epnum++, hw_ep++) {
2031 if (musb_ep_get_qh(hw_ep, is_in) != NULL)
2034 if (hw_ep == musb->bulk_ep)
2038 diff = hw_ep->max_packet_sz_rx;
2040 diff = hw_ep->max_packet_sz_tx;
2057 hw_ep = musb->endpoints + epnum;
2059 txtype = (musb_readb(hw_ep->regs, MUSB_TXTYPE)
2071 hw_ep = musb->bulk_ep;
2098 hw_ep = musb->endpoints + best_end;
2106 qh->hw_ep = hw_ep;
2152 * hw_ep gets reprogrammed, or with irqs blocked. Then schedule it.
2310 struct musb_hw_ep *ep = qh->hw_ep;
2396 || musb_ep_get_qh(qh->hw_ep, is_in) != qh) {
2407 musb_ep_set_qh(qh->hw_ep, is_in, NULL);
2439 if (musb_ep_get_qh(qh->hw_ep, is_in) == qh) {
2450 * queue on hw_ep (e.g. bulk ring) when we're done.
2455 musb_advance_schedule(musb, urb, qh->hw_ep, is_in);