Lines Matching +full:disable +full:- +full:hibernation +full:- +full:mode

1 // SPDX-License-Identifier: GPL-2.0
11 * S3C USB2.0 High-speed / OtG driver
19 #include <linux/dma-mapping.h>
65 return hsotg->eps_in[ep_index]; in index_to_ep()
67 return hsotg->eps_out[ep_index]; in index_to_ep()
74 * using_dma - return the DMA status of the driver.
94 return hsotg->params.g_dma; in using_dma()
98 * using_desc_dma - return the descriptor DMA status of the driver.
105 return hsotg->params.g_dma_desc; in using_desc_dma()
109 * dwc2_gadget_incr_frame_num - Increments the targeted frame number.
117 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_incr_frame_num()
120 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_incr_frame_num()
123 hs_ep->target_frame += hs_ep->interval; in dwc2_gadget_incr_frame_num()
124 if (hs_ep->target_frame > limit) { in dwc2_gadget_incr_frame_num()
125 hs_ep->frame_overrun = true; in dwc2_gadget_incr_frame_num()
126 hs_ep->target_frame &= limit; in dwc2_gadget_incr_frame_num()
128 hs_ep->frame_overrun = false; in dwc2_gadget_incr_frame_num()
133 * dwc2_gadget_dec_frame_num_by_one - Decrements the targeted frame number
138 * descriptor frame number filed value. For service interval mode frame
144 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_dec_frame_num_by_one()
147 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_dec_frame_num_by_one()
150 if (hs_ep->target_frame) in dwc2_gadget_dec_frame_num_by_one()
151 hs_ep->target_frame -= 1; in dwc2_gadget_dec_frame_num_by_one()
153 hs_ep->target_frame = limit; in dwc2_gadget_dec_frame_num_by_one()
157 * dwc2_hsotg_en_gsint - enable one or more of the general interrupt
169 dev_dbg(hsotg->dev, "gsintmsk now 0x%08x\n", new_gsintmsk); in dwc2_hsotg_en_gsint()
175 * dwc2_hsotg_disable_gsint - disable one or more of the general interrupt
191 * dwc2_hsotg_ctrl_epint - enable/disable an endpoint irq
222 * dwc2_hsotg_tx_fifo_count - return count of TX FIFOs in device mode
228 if (hsotg->hw_params.en_multiple_tx_fifo) in dwc2_hsotg_tx_fifo_count()
229 /* In dedicated FIFO mode we need count of IN EPs */ in dwc2_hsotg_tx_fifo_count()
230 return hsotg->hw_params.num_dev_in_eps; in dwc2_hsotg_tx_fifo_count()
232 /* In shared FIFO mode we need count of Periodic IN EPs */ in dwc2_hsotg_tx_fifo_count()
233 return hsotg->hw_params.num_dev_perio_in_ep; in dwc2_hsotg_tx_fifo_count()
237 * dwc2_hsotg_tx_fifo_total_depth - return total FIFO depth available for
238 * device mode TX FIFOs
248 np_tx_fifo_size = min_t(u32, hsotg->hw_params.dev_nperio_tx_fifo_size, in dwc2_hsotg_tx_fifo_total_depth()
249 hsotg->params.g_np_tx_fifo_size); in dwc2_hsotg_tx_fifo_total_depth()
252 tx_addr_max = hsotg->hw_params.total_fifo_size; in dwc2_hsotg_tx_fifo_total_depth()
254 addr = hsotg->params.g_rx_fifo_size + np_tx_fifo_size; in dwc2_hsotg_tx_fifo_total_depth()
258 return tx_addr_max - addr; in dwc2_hsotg_tx_fifo_total_depth()
262 * dwc2_gadget_wkup_alert_handler - Handler for WKUP_ALERT interrupt
277 dev_dbg(hsotg->dev, "%s: Wkup_Alert_Int\n", __func__); in dwc2_gadget_wkup_alert_handler()
284 * dwc2_hsotg_tx_fifo_average_depth - returns average depth of device mode
305 * dwc2_hsotg_init_fifo - initialise non-periodic FIFOs
315 u32 *txfsz = hsotg->params.g_tx_fifo_size; in dwc2_hsotg_init_fifo()
318 WARN_ON(hsotg->fifo_map); in dwc2_hsotg_init_fifo()
319 hsotg->fifo_map = 0; in dwc2_hsotg_init_fifo()
322 dwc2_writel(hsotg, hsotg->params.g_rx_fifo_size, GRXFSIZ); in dwc2_hsotg_init_fifo()
323 dwc2_writel(hsotg, (hsotg->params.g_rx_fifo_size << in dwc2_hsotg_init_fifo()
325 (hsotg->params.g_np_tx_fifo_size << FIFOSIZE_DEPTH_SHIFT), in dwc2_hsotg_init_fifo()
336 addr = hsotg->params.g_rx_fifo_size + hsotg->params.g_np_tx_fifo_size; in dwc2_hsotg_init_fifo()
348 WARN_ONCE(addr + txfsz[ep] > hsotg->fifo_mem, in dwc2_hsotg_init_fifo()
356 dwc2_writel(hsotg, hsotg->hw_params.total_fifo_size | in dwc2_hsotg_init_fifo()
375 if (--timeout == 0) { in dwc2_hsotg_init_fifo()
376 dev_err(hsotg->dev, in dwc2_hsotg_init_fifo()
385 dev_dbg(hsotg->dev, "FIFOs reset, timeout at %d\n", timeout); in dwc2_hsotg_init_fifo()
389 * dwc2_hsotg_ep_alloc_request - allocate USB rerequest structure
404 INIT_LIST_HEAD(&req->queue); in dwc2_hsotg_ep_alloc_request()
406 return &req->req; in dwc2_hsotg_ep_alloc_request()
410 * is_ep_periodic - return true if the endpoint is in periodic mode.
413 * Returns true if the endpoint is in periodic mode, meaning it is being
418 return hs_ep->periodic; in is_ep_periodic()
422 * dwc2_hsotg_unmap_dma - unmap the DMA memory being used for the request
434 struct usb_request *req = &hs_req->req; in dwc2_hsotg_unmap_dma()
436 usb_gadget_unmap_request(&hsotg->gadget, req, hs_ep->map_dir); in dwc2_hsotg_unmap_dma()
440 * dwc2_gadget_alloc_ctrl_desc_chains - allocate DMA descriptor chains
449 hsotg->setup_desc[0] = in dwc2_gadget_alloc_ctrl_desc_chains()
450 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
452 &hsotg->setup_desc_dma[0], in dwc2_gadget_alloc_ctrl_desc_chains()
454 if (!hsotg->setup_desc[0]) in dwc2_gadget_alloc_ctrl_desc_chains()
457 hsotg->setup_desc[1] = in dwc2_gadget_alloc_ctrl_desc_chains()
458 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
460 &hsotg->setup_desc_dma[1], in dwc2_gadget_alloc_ctrl_desc_chains()
462 if (!hsotg->setup_desc[1]) in dwc2_gadget_alloc_ctrl_desc_chains()
465 hsotg->ctrl_in_desc = in dwc2_gadget_alloc_ctrl_desc_chains()
466 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
468 &hsotg->ctrl_in_desc_dma, in dwc2_gadget_alloc_ctrl_desc_chains()
470 if (!hsotg->ctrl_in_desc) in dwc2_gadget_alloc_ctrl_desc_chains()
473 hsotg->ctrl_out_desc = in dwc2_gadget_alloc_ctrl_desc_chains()
474 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
476 &hsotg->ctrl_out_desc_dma, in dwc2_gadget_alloc_ctrl_desc_chains()
478 if (!hsotg->ctrl_out_desc) in dwc2_gadget_alloc_ctrl_desc_chains()
484 return -ENOMEM; in dwc2_gadget_alloc_ctrl_desc_chains()
488 * dwc2_hsotg_write_fifo - write packet Data to the TxFIFO
499 * otherwise -ENOSPC is returned if the FIFO space was used up.
509 int buf_pos = hs_req->req.actual; in dwc2_hsotg_write_fifo()
510 int to_write = hs_ep->size_loaded; in dwc2_hsotg_write_fifo()
516 to_write -= (buf_pos - hs_ep->last_load); in dwc2_hsotg_write_fifo()
522 if (periodic && !hsotg->dedicated_fifos) { in dwc2_hsotg_write_fifo()
523 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index)); in dwc2_hsotg_write_fifo()
538 if (hs_ep->fifo_load != 0) { in dwc2_hsotg_write_fifo()
540 return -ENOSPC; in dwc2_hsotg_write_fifo()
543 dev_dbg(hsotg->dev, "%s: left=%d, load=%d, fifo=%d, size %d\n", in dwc2_hsotg_write_fifo()
545 hs_ep->size_loaded, hs_ep->fifo_load, hs_ep->fifo_size); in dwc2_hsotg_write_fifo()
548 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_write_fifo()
551 can_write = hs_ep->fifo_load - size_done; in dwc2_hsotg_write_fifo()
552 dev_dbg(hsotg->dev, "%s: => can_write1=%d\n", in dwc2_hsotg_write_fifo()
555 can_write = hs_ep->fifo_size - can_write; in dwc2_hsotg_write_fifo()
556 dev_dbg(hsotg->dev, "%s: => can_write2=%d\n", in dwc2_hsotg_write_fifo()
561 return -ENOSPC; in dwc2_hsotg_write_fifo()
563 } else if (hsotg->dedicated_fifos && hs_ep->index != 0) { in dwc2_hsotg_write_fifo()
565 DTXFSTS(hs_ep->fifo_index)); in dwc2_hsotg_write_fifo()
571 dev_dbg(hsotg->dev, in dwc2_hsotg_write_fifo()
576 return -ENOSPC; in dwc2_hsotg_write_fifo()
583 max_transfer = hs_ep->ep.maxpacket * hs_ep->mc; in dwc2_hsotg_write_fifo()
585 dev_dbg(hsotg->dev, "%s: GNPTXSTS=%08x, can=%d, to=%d, max_transfer %d\n", in dwc2_hsotg_write_fifo()
589 * limit to 512 bytes of data, it seems at least on the non-periodic in dwc2_hsotg_write_fifo()
597 * limit the write to one max-packet size worth of data, but allow in dwc2_hsotg_write_fifo()
605 if (!hsotg->dedicated_fifos) in dwc2_hsotg_write_fifo()
626 to_write -= pkt_round; in dwc2_hsotg_write_fifo()
634 if (!hsotg->dedicated_fifos) in dwc2_hsotg_write_fifo()
640 dev_dbg(hsotg->dev, "write %d/%d, can_write %d, done %d\n", in dwc2_hsotg_write_fifo()
641 to_write, hs_req->req.length, can_write, buf_pos); in dwc2_hsotg_write_fifo()
644 return -ENOSPC; in dwc2_hsotg_write_fifo()
646 hs_req->req.actual = buf_pos + to_write; in dwc2_hsotg_write_fifo()
647 hs_ep->total_data += to_write; in dwc2_hsotg_write_fifo()
650 hs_ep->fifo_load += to_write; in dwc2_hsotg_write_fifo()
653 data = hs_req->req.buf + buf_pos; in dwc2_hsotg_write_fifo()
655 dwc2_writel_rep(hsotg, EPFIFO(hs_ep->index), data, to_write); in dwc2_hsotg_write_fifo()
657 return (to_write >= can_write) ? -ENOSPC : 0; in dwc2_hsotg_write_fifo()
661 * get_ep_limit - get the maximum data legnth for this endpoint
669 int index = hs_ep->index; in get_ep_limit()
678 if (hs_ep->dir_in) in get_ep_limit()
685 maxpkt--; in get_ep_limit()
686 maxsize--; in get_ep_limit()
693 if ((maxpkt * hs_ep->ep.maxpacket) < maxsize) in get_ep_limit()
694 maxsize = maxpkt * hs_ep->ep.maxpacket; in get_ep_limit()
700 * dwc2_hsotg_read_frameno - read current frame number
717 * dwc2_gadget_get_chain_limit - get the maximum data payload value of the
727 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_chain_limit()
728 int is_isoc = hs_ep->isochronous; in dwc2_gadget_get_chain_limit()
730 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_chain_limit()
731 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_chain_limit()
734 maxsize = (hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_LIMIT : in dwc2_gadget_get_chain_limit()
741 if (hs_ep->index) in dwc2_gadget_get_chain_limit()
749 * dwc2_gadget_get_desc_params - get DMA descriptor parameters.
756 * Control out - MPS,
757 * Isochronous - descriptor rx/tx bytes bitfield limit,
758 * Control In/Bulk/Interrupt - multiple of mps. This will allow to not
760 * Interrupt OUT - if mps not multiple of 4 then a single packet corresponds
767 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_desc_params()
768 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_desc_params()
769 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_desc_params()
772 if (!hs_ep->index && !dir_in) { in dwc2_gadget_get_desc_params()
775 } else if (hs_ep->isochronous) { in dwc2_gadget_get_desc_params()
788 desc_size -= desc_size % mps; in dwc2_gadget_get_desc_params()
792 if (hs_ep->index) in dwc2_gadget_get_desc_params()
807 int dir_in = hs_ep->dir_in; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
808 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
816 hs_ep->desc_count = (len / maxsize) + in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
819 hs_ep->desc_count = 1; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
821 for (i = 0; i < hs_ep->desc_count; ++i) { in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
822 (*desc)->status = 0; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
823 (*desc)->status |= (DEV_DMA_BUFF_STS_HBUSY in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
827 if (!hs_ep->index && !dir_in) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
828 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC); in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
830 (*desc)->status |= in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
832 (*desc)->buf = dma_buff + offset; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
834 len -= maxsize; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
838 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC); in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
841 (*desc)->status |= (len % mps) ? DEV_DMA_SHORT : in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
842 ((hs_ep->send_zlp && true_last) ? in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
845 (*desc)->status |= in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
847 (*desc)->buf = dma_buff + offset; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
850 (*desc)->status &= ~DEV_DMA_BUFF_STS_MASK; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
851 (*desc)->status |= (DEV_DMA_BUFF_STS_HREADY in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
858 * dwc2_gadget_config_nonisoc_xfer_ddma - prepare non ISOC DMA desc chain.
872 struct dwc2_dma_desc *desc = hs_ep->desc_list; in dwc2_gadget_config_nonisoc_xfer_ddma()
877 if (hs_ep->req) in dwc2_gadget_config_nonisoc_xfer_ddma()
878 ureq = &hs_ep->req->req; in dwc2_gadget_config_nonisoc_xfer_ddma()
880 /* non-DMA sg buffer */ in dwc2_gadget_config_nonisoc_xfer_ddma()
881 if (!ureq || !ureq->num_sgs) { in dwc2_gadget_config_nonisoc_xfer_ddma()
888 for_each_sg(ureq->sg, sg, ureq->num_sgs, i) { in dwc2_gadget_config_nonisoc_xfer_ddma()
890 sg_dma_address(sg) + sg->offset, sg_dma_len(sg), in dwc2_gadget_config_nonisoc_xfer_ddma()
892 desc_count += hs_ep->desc_count; in dwc2_gadget_config_nonisoc_xfer_ddma()
895 hs_ep->desc_count = desc_count; in dwc2_gadget_config_nonisoc_xfer_ddma()
899 * dwc2_gadget_fill_isoc_desc - fills next isochronous descriptor in chain.
913 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_fill_isoc_desc()
920 index = hs_ep->next_desc; in dwc2_gadget_fill_isoc_desc()
921 desc = &hs_ep->desc_list[index]; in dwc2_gadget_fill_isoc_desc()
924 if ((desc->status >> DEV_DMA_BUFF_STS_SHIFT) == in dwc2_gadget_fill_isoc_desc()
926 dev_dbg(hsotg->dev, "%s: desc chain full\n", __func__); in dwc2_gadget_fill_isoc_desc()
931 if (hs_ep->next_desc) in dwc2_gadget_fill_isoc_desc()
932 hs_ep->desc_list[index - 1].status &= ~DEV_DMA_L; in dwc2_gadget_fill_isoc_desc()
934 dev_dbg(hsotg->dev, "%s: Filling ep %d, dir %s isoc desc # %d\n", in dwc2_gadget_fill_isoc_desc()
935 __func__, hs_ep->index, hs_ep->dir_in ? "in" : "out", index); in dwc2_gadget_fill_isoc_desc()
937 desc->status = 0; in dwc2_gadget_fill_isoc_desc()
938 desc->status |= (DEV_DMA_BUFF_STS_HBUSY << DEV_DMA_BUFF_STS_SHIFT); in dwc2_gadget_fill_isoc_desc()
940 desc->buf = dma_buff; in dwc2_gadget_fill_isoc_desc()
941 desc->status |= (DEV_DMA_L | DEV_DMA_IOC | in dwc2_gadget_fill_isoc_desc()
944 if (hs_ep->dir_in) { in dwc2_gadget_fill_isoc_desc()
946 pid = DIV_ROUND_UP(len, hs_ep->ep.maxpacket); in dwc2_gadget_fill_isoc_desc()
949 desc->status |= ((pid << DEV_DMA_ISOC_PID_SHIFT) & in dwc2_gadget_fill_isoc_desc()
951 ((len % hs_ep->ep.maxpacket) ? in dwc2_gadget_fill_isoc_desc()
953 ((hs_ep->target_frame << in dwc2_gadget_fill_isoc_desc()
958 desc->status &= ~DEV_DMA_BUFF_STS_MASK; in dwc2_gadget_fill_isoc_desc()
959 desc->status |= (DEV_DMA_BUFF_STS_HREADY << DEV_DMA_BUFF_STS_SHIFT); in dwc2_gadget_fill_isoc_desc()
962 if (hs_ep->dir_in) in dwc2_gadget_fill_isoc_desc()
966 hs_ep->next_desc++; in dwc2_gadget_fill_isoc_desc()
967 if (hs_ep->next_desc >= MAX_DMA_DESC_NUM_HS_ISOC) in dwc2_gadget_fill_isoc_desc()
968 hs_ep->next_desc = 0; in dwc2_gadget_fill_isoc_desc()
974 * dwc2_gadget_start_isoc_ddma - start isochronous transfer in DDMA
982 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_start_isoc_ddma()
984 int index = hs_ep->index; in dwc2_gadget_start_isoc_ddma()
992 if (list_empty(&hs_ep->queue)) { in dwc2_gadget_start_isoc_ddma()
993 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_gadget_start_isoc_ddma()
994 dev_dbg(hsotg->dev, "%s: No requests in queue\n", __func__); in dwc2_gadget_start_isoc_ddma()
1000 desc = &hs_ep->desc_list[i]; in dwc2_gadget_start_isoc_ddma()
1001 desc->status = 0; in dwc2_gadget_start_isoc_ddma()
1002 desc->status |= (DEV_DMA_BUFF_STS_HBUSY in dwc2_gadget_start_isoc_ddma()
1006 hs_ep->next_desc = 0; in dwc2_gadget_start_isoc_ddma()
1007 list_for_each_entry_safe(hs_req, treq, &hs_ep->queue, queue) { in dwc2_gadget_start_isoc_ddma()
1008 dma_addr_t dma_addr = hs_req->req.dma; in dwc2_gadget_start_isoc_ddma()
1010 if (hs_req->req.num_sgs) { in dwc2_gadget_start_isoc_ddma()
1011 WARN_ON(hs_req->req.num_sgs > 1); in dwc2_gadget_start_isoc_ddma()
1012 dma_addr = sg_dma_address(hs_req->req.sg); in dwc2_gadget_start_isoc_ddma()
1015 hs_req->req.length); in dwc2_gadget_start_isoc_ddma()
1020 hs_ep->compl_desc = 0; in dwc2_gadget_start_isoc_ddma()
1021 depctl = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index); in dwc2_gadget_start_isoc_ddma()
1022 dma_reg = hs_ep->dir_in ? DIEPDMA(index) : DOEPDMA(index); in dwc2_gadget_start_isoc_ddma()
1025 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg); in dwc2_gadget_start_isoc_ddma()
1039 * dwc2_hsotg_start_req - start a USB request from an endpoint's queue
1053 struct usb_request *ureq = &hs_req->req; in dwc2_hsotg_start_req()
1054 int index = hs_ep->index; in dwc2_hsotg_start_req()
1055 int dir_in = hs_ep->dir_in; in dwc2_hsotg_start_req()
1066 if (hs_ep->req && !continuing) { in dwc2_hsotg_start_req()
1067 dev_err(hsotg->dev, "%s: active request\n", __func__); in dwc2_hsotg_start_req()
1070 } else if (hs_ep->req != hs_req && continuing) { in dwc2_hsotg_start_req()
1071 dev_err(hsotg->dev, in dwc2_hsotg_start_req()
1082 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x, ep %d, dir %s\n", in dwc2_hsotg_start_req()
1084 hs_ep->dir_in ? "in" : "out"); in dwc2_hsotg_start_req()
1090 dev_warn(hsotg->dev, "%s: ep%d is stalled\n", __func__, index); in dwc2_hsotg_start_req()
1094 length = ureq->length - ureq->actual; in dwc2_hsotg_start_req()
1095 dev_dbg(hsotg->dev, "ureq->length:%d ureq->actual:%d\n", in dwc2_hsotg_start_req()
1096 ureq->length, ureq->actual); in dwc2_hsotg_start_req()
1104 int round = maxreq % hs_ep->ep.maxpacket; in dwc2_hsotg_start_req()
1106 dev_dbg(hsotg->dev, "%s: length %d, max-req %d, r %d\n", in dwc2_hsotg_start_req()
1111 maxreq -= round; in dwc2_hsotg_start_req()
1117 packets = DIV_ROUND_UP(length, hs_ep->ep.maxpacket); in dwc2_hsotg_start_req()
1122 if (hs_ep->isochronous) in dwc2_hsotg_start_req()
1133 if (dir_in && ureq->zero && !continuing) { in dwc2_hsotg_start_req()
1135 if ((ureq->length >= hs_ep->ep.maxpacket) && in dwc2_hsotg_start_req()
1136 !(ureq->length % hs_ep->ep.maxpacket)) in dwc2_hsotg_start_req()
1137 hs_ep->send_zlp = 1; in dwc2_hsotg_start_req()
1143 dev_dbg(hsotg->dev, "%s: %d@%d/%d, 0x%08x => 0x%08x\n", in dwc2_hsotg_start_req()
1144 __func__, packets, length, ureq->length, epsize, epsize_reg); in dwc2_hsotg_start_req()
1147 hs_ep->req = hs_req; in dwc2_hsotg_start_req()
1151 u32 mps = hs_ep->ep.maxpacket; in dwc2_hsotg_start_req()
1153 /* Adjust length: EP0 - MPS, other OUT EPs - multiple of MPS */ in dwc2_hsotg_start_req()
1158 length += (mps - (length % mps)); in dwc2_hsotg_start_req()
1162 offset = ureq->actual; in dwc2_hsotg_start_req()
1165 dwc2_gadget_config_nonisoc_xfer_ddma(hs_ep, ureq->dma + offset, in dwc2_hsotg_start_req()
1169 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg); in dwc2_hsotg_start_req()
1171 dev_dbg(hsotg->dev, "%s: %08x pad => 0x%08x\n", in dwc2_hsotg_start_req()
1172 __func__, (u32)hs_ep->desc_list_dma, dma_reg); in dwc2_hsotg_start_req()
1183 dwc2_writel(hsotg, ureq->dma, dma_reg); in dwc2_hsotg_start_req()
1185 dev_dbg(hsotg->dev, "%s: %pad => 0x%08x\n", in dwc2_hsotg_start_req()
1186 __func__, &ureq->dma, dma_reg); in dwc2_hsotg_start_req()
1190 if (hs_ep->isochronous) { in dwc2_hsotg_start_req()
1192 if (hs_ep->interval == 1) { in dwc2_hsotg_start_req()
1193 if (hs_ep->target_frame & 0x1) in dwc2_hsotg_start_req()
1200 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_hsotg_start_req()
1201 hs_req->req.actual = 0; in dwc2_hsotg_start_req()
1202 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, -ENODATA); in dwc2_hsotg_start_req()
1209 dev_dbg(hsotg->dev, "ep0 state:%d\n", hsotg->ep0_state); in dwc2_hsotg_start_req()
1212 if (!(index == 0 && hsotg->ep0_state == DWC2_EP0_SETUP)) in dwc2_hsotg_start_req()
1215 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl); in dwc2_hsotg_start_req()
1223 hs_ep->size_loaded = length; in dwc2_hsotg_start_req()
1224 hs_ep->last_load = ureq->actual; in dwc2_hsotg_start_req()
1227 /* set these anyway, we may need them for non-periodic in */ in dwc2_hsotg_start_req()
1228 hs_ep->fifo_load = 0; in dwc2_hsotg_start_req()
1240 dev_dbg(hsotg->dev, in dwc2_hsotg_start_req()
1244 dev_dbg(hsotg->dev, "%s: DXEPCTL=0x%08x\n", in dwc2_hsotg_start_req()
1248 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 1); in dwc2_hsotg_start_req()
1252 * dwc2_hsotg_map_dma - map the DMA memory being used for the request
1269 hs_ep->map_dir = hs_ep->dir_in; in dwc2_hsotg_map_dma()
1270 ret = usb_gadget_map_request(&hsotg->gadget, req, hs_ep->dir_in); in dwc2_hsotg_map_dma()
1277 dev_err(hsotg->dev, "%s: failed to map buffer %p, %d bytes\n", in dwc2_hsotg_map_dma()
1278 __func__, req->buf, req->length); in dwc2_hsotg_map_dma()
1280 return -EIO; in dwc2_hsotg_map_dma()
1287 void *req_buf = hs_req->req.buf; in dwc2_hsotg_handle_unaligned_buf_start()
1293 WARN_ON(hs_req->saved_req_buf); in dwc2_hsotg_handle_unaligned_buf_start()
1295 dev_dbg(hsotg->dev, "%s: %s: buf=%p length=%d\n", __func__, in dwc2_hsotg_handle_unaligned_buf_start()
1296 hs_ep->ep.name, req_buf, hs_req->req.length); in dwc2_hsotg_handle_unaligned_buf_start()
1298 hs_req->req.buf = kmalloc(hs_req->req.length, GFP_ATOMIC); in dwc2_hsotg_handle_unaligned_buf_start()
1299 if (!hs_req->req.buf) { in dwc2_hsotg_handle_unaligned_buf_start()
1300 hs_req->req.buf = req_buf; in dwc2_hsotg_handle_unaligned_buf_start()
1301 dev_err(hsotg->dev, in dwc2_hsotg_handle_unaligned_buf_start()
1304 return -ENOMEM; in dwc2_hsotg_handle_unaligned_buf_start()
1308 hs_req->saved_req_buf = req_buf; in dwc2_hsotg_handle_unaligned_buf_start()
1310 if (hs_ep->dir_in) in dwc2_hsotg_handle_unaligned_buf_start()
1311 memcpy(hs_req->req.buf, req_buf, hs_req->req.length); in dwc2_hsotg_handle_unaligned_buf_start()
1321 if (!using_dma(hsotg) || !hs_req->saved_req_buf) in dwc2_hsotg_handle_unaligned_buf_complete()
1324 dev_dbg(hsotg->dev, "%s: %s: status=%d actual-length=%d\n", __func__, in dwc2_hsotg_handle_unaligned_buf_complete()
1325 hs_ep->ep.name, hs_req->req.status, hs_req->req.actual); in dwc2_hsotg_handle_unaligned_buf_complete()
1328 if (!hs_ep->dir_in && !hs_req->req.status) in dwc2_hsotg_handle_unaligned_buf_complete()
1329 memcpy(hs_req->saved_req_buf, hs_req->req.buf, in dwc2_hsotg_handle_unaligned_buf_complete()
1330 hs_req->req.actual); in dwc2_hsotg_handle_unaligned_buf_complete()
1333 kfree(hs_req->req.buf); in dwc2_hsotg_handle_unaligned_buf_complete()
1335 hs_req->req.buf = hs_req->saved_req_buf; in dwc2_hsotg_handle_unaligned_buf_complete()
1336 hs_req->saved_req_buf = NULL; in dwc2_hsotg_handle_unaligned_buf_complete()
1340 * dwc2_gadget_target_frame_elapsed - Checks target frame
1348 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_target_frame_elapsed()
1349 u32 target_frame = hs_ep->target_frame; in dwc2_gadget_target_frame_elapsed()
1350 u32 current_frame = hsotg->frame_number; in dwc2_gadget_target_frame_elapsed()
1351 bool frame_overrun = hs_ep->frame_overrun; in dwc2_gadget_target_frame_elapsed()
1354 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_target_frame_elapsed()
1361 ((current_frame - target_frame) < limit / 2)) in dwc2_gadget_target_frame_elapsed()
1368 * dwc2_gadget_set_ep0_desc_chain - Set EP's desc chain pointers
1378 switch (hsotg->ep0_state) { in dwc2_gadget_set_ep0_desc_chain()
1381 hs_ep->desc_list = hsotg->setup_desc[0]; in dwc2_gadget_set_ep0_desc_chain()
1382 hs_ep->desc_list_dma = hsotg->setup_desc_dma[0]; in dwc2_gadget_set_ep0_desc_chain()
1386 hs_ep->desc_list = hsotg->ctrl_in_desc; in dwc2_gadget_set_ep0_desc_chain()
1387 hs_ep->desc_list_dma = hsotg->ctrl_in_desc_dma; in dwc2_gadget_set_ep0_desc_chain()
1390 hs_ep->desc_list = hsotg->ctrl_out_desc; in dwc2_gadget_set_ep0_desc_chain()
1391 hs_ep->desc_list_dma = hsotg->ctrl_out_desc_dma; in dwc2_gadget_set_ep0_desc_chain()
1394 dev_err(hsotg->dev, "invalid EP 0 state in queue %d\n", in dwc2_gadget_set_ep0_desc_chain()
1395 hsotg->ep0_state); in dwc2_gadget_set_ep0_desc_chain()
1396 return -EINVAL; in dwc2_gadget_set_ep0_desc_chain()
1407 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_queue()
1414 dev_dbg(hs->dev, "%s: req %p: %d@%p, noi=%d, zero=%d, snok=%d\n", in dwc2_hsotg_ep_queue()
1415 ep->name, req, req->length, req->buf, req->no_interrupt, in dwc2_hsotg_ep_queue()
1416 req->zero, req->short_not_ok); in dwc2_hsotg_ep_queue()
1419 if (hs->lx_state != DWC2_L0) { in dwc2_hsotg_ep_queue()
1420 dev_dbg(hs->dev, "%s: submit request only in active state\n", in dwc2_hsotg_ep_queue()
1422 return -EAGAIN; in dwc2_hsotg_ep_queue()
1426 INIT_LIST_HEAD(&hs_req->queue); in dwc2_hsotg_ep_queue()
1427 req->actual = 0; in dwc2_hsotg_ep_queue()
1428 req->status = -EINPROGRESS; in dwc2_hsotg_ep_queue()
1431 if (hs_ep->isochronous && in dwc2_hsotg_ep_queue()
1432 req->length > (hs_ep->mc * hs_ep->ep.maxpacket)) { in dwc2_hsotg_ep_queue()
1433 dev_err(hs->dev, "req length > maxpacket*mc\n"); in dwc2_hsotg_ep_queue()
1434 return -EINVAL; in dwc2_hsotg_ep_queue()
1437 /* In DDMA mode for ISOC's don't queue request if length greater in dwc2_hsotg_ep_queue()
1440 if (using_desc_dma(hs) && hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1442 if (hs_ep->dir_in && req->length > maxsize) { in dwc2_hsotg_ep_queue()
1443 dev_err(hs->dev, "wrong length %d (maxsize=%d)\n", in dwc2_hsotg_ep_queue()
1444 req->length, maxsize); in dwc2_hsotg_ep_queue()
1445 return -EINVAL; in dwc2_hsotg_ep_queue()
1448 if (!hs_ep->dir_in && req->length > hs_ep->ep.maxpacket) { in dwc2_hsotg_ep_queue()
1449 dev_err(hs->dev, "ISOC OUT: wrong length %d (mps=%d)\n", in dwc2_hsotg_ep_queue()
1450 req->length, hs_ep->ep.maxpacket); in dwc2_hsotg_ep_queue()
1451 return -EINVAL; in dwc2_hsotg_ep_queue()
1466 if (using_desc_dma(hs) && !hs_ep->index) { in dwc2_hsotg_ep_queue()
1472 first = list_empty(&hs_ep->queue); in dwc2_hsotg_ep_queue()
1473 list_add_tail(&hs_req->queue, &hs_ep->queue); in dwc2_hsotg_ep_queue()
1476 * Handle DDMA isochronous transfers separately - just add new entry in dwc2_hsotg_ep_queue()
1481 if (using_desc_dma(hs) && hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1482 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) { in dwc2_hsotg_ep_queue()
1483 dma_addr_t dma_addr = hs_req->req.dma; in dwc2_hsotg_ep_queue()
1485 if (hs_req->req.num_sgs) { in dwc2_hsotg_ep_queue()
1486 WARN_ON(hs_req->req.num_sgs > 1); in dwc2_hsotg_ep_queue()
1487 dma_addr = sg_dma_address(hs_req->req.sg); in dwc2_hsotg_ep_queue()
1490 hs_req->req.length); in dwc2_hsotg_ep_queue()
1496 if (!hs_ep->index && !req->length && !hs_ep->dir_in && in dwc2_hsotg_ep_queue()
1497 hs->ep0_state == DWC2_EP0_DATA_OUT) in dwc2_hsotg_ep_queue()
1498 hs_ep->dir_in = 1; in dwc2_hsotg_ep_queue()
1501 if (!hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1507 hs->frame_number = dwc2_hsotg_read_frameno(hs); in dwc2_hsotg_ep_queue()
1513 hs->frame_number = dwc2_hsotg_read_frameno(hs); in dwc2_hsotg_ep_queue()
1516 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) in dwc2_hsotg_ep_queue()
1526 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_queue_lock()
1530 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_queue_lock()
1532 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_queue_lock()
1546 * dwc2_hsotg_complete_oursetup - setup completion callback
1557 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_complete_oursetup()
1559 dev_dbg(hsotg->dev, "%s: ep %p, req %p\n", __func__, ep, req); in dwc2_hsotg_complete_oursetup()
1565 * ep_from_windex - convert control wIndex value to endpoint
1581 if (idx > hsotg->num_of_eps) in ep_from_windex()
1588 * dwc2_hsotg_set_test_mode - Enable usb Test Modes
1590 * @testmode: requested usb test mode
1591 * Enable usb Test Mode requested by the Host.
1607 return -EINVAL; in dwc2_hsotg_set_test_mode()
1614 * dwc2_hsotg_send_reply - send reply to control request
1631 dev_dbg(hsotg->dev, "%s: buff %p, len %d\n", __func__, buff, length); in dwc2_hsotg_send_reply()
1633 req = dwc2_hsotg_ep_alloc_request(&ep->ep, GFP_ATOMIC); in dwc2_hsotg_send_reply()
1634 hsotg->ep0_reply = req; in dwc2_hsotg_send_reply()
1636 dev_warn(hsotg->dev, "%s: cannot alloc req\n", __func__); in dwc2_hsotg_send_reply()
1637 return -ENOMEM; in dwc2_hsotg_send_reply()
1640 req->buf = hsotg->ep0_buff; in dwc2_hsotg_send_reply()
1641 req->length = length; in dwc2_hsotg_send_reply()
1646 req->zero = 0; in dwc2_hsotg_send_reply()
1647 req->complete = dwc2_hsotg_complete_oursetup; in dwc2_hsotg_send_reply()
1650 memcpy(req->buf, buff, length); in dwc2_hsotg_send_reply()
1652 ret = dwc2_hsotg_ep_queue(&ep->ep, req, GFP_ATOMIC); in dwc2_hsotg_send_reply()
1654 dev_warn(hsotg->dev, "%s: cannot queue req\n", __func__); in dwc2_hsotg_send_reply()
1662 * dwc2_hsotg_process_req_status - process request GET_STATUS
1669 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_req_status()
1675 dev_dbg(hsotg->dev, "%s: USB_REQ_GET_STATUS\n", __func__); in dwc2_hsotg_process_req_status()
1677 if (!ep0->dir_in) { in dwc2_hsotg_process_req_status()
1678 dev_warn(hsotg->dev, "%s: direction out?\n", __func__); in dwc2_hsotg_process_req_status()
1679 return -EINVAL; in dwc2_hsotg_process_req_status()
1682 switch (ctrl->bRequestType & USB_RECIP_MASK) { in dwc2_hsotg_process_req_status()
1684 status = hsotg->gadget.is_selfpowered << in dwc2_hsotg_process_req_status()
1686 status |= hsotg->remote_wakeup_allowed << in dwc2_hsotg_process_req_status()
1697 ep = ep_from_windex(hsotg, le16_to_cpu(ctrl->wIndex)); in dwc2_hsotg_process_req_status()
1699 return -ENOENT; in dwc2_hsotg_process_req_status()
1701 reply = cpu_to_le16(ep->halted ? 1 : 0); in dwc2_hsotg_process_req_status()
1708 if (le16_to_cpu(ctrl->wLength) != 2) in dwc2_hsotg_process_req_status()
1709 return -EINVAL; in dwc2_hsotg_process_req_status()
1713 dev_err(hsotg->dev, "%s: failed to send reply\n", __func__); in dwc2_hsotg_process_req_status()
1723 * get_ep_head - return the first request on the endpoint
1730 return list_first_entry_or_null(&hs_ep->queue, struct dwc2_hsotg_req, in get_ep_head()
1735 * dwc2_gadget_start_next_request - Starts next request from ep queue
1738 * If queue is empty and EP is ISOC-OUT - unmasks OUTTKNEPDIS which is masked
1744 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_start_next_request()
1745 int dir_in = hs_ep->dir_in; in dwc2_gadget_start_next_request()
1748 if (!list_empty(&hs_ep->queue)) { in dwc2_gadget_start_next_request()
1753 if (!hs_ep->isochronous) in dwc2_gadget_start_next_request()
1757 dev_dbg(hsotg->dev, "%s: No more ISOC-IN requests\n", in dwc2_gadget_start_next_request()
1760 dev_dbg(hsotg->dev, "%s: No more ISOC-OUT requests\n", in dwc2_gadget_start_next_request()
1766 * dwc2_hsotg_process_req_feature - process request {SET,CLEAR}_FEATURE
1773 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_req_feature()
1775 bool set = (ctrl->bRequest == USB_REQ_SET_FEATURE); in dwc2_hsotg_process_req_feature()
1783 dev_dbg(hsotg->dev, "%s: %s_FEATURE\n", in dwc2_hsotg_process_req_feature()
1786 wValue = le16_to_cpu(ctrl->wValue); in dwc2_hsotg_process_req_feature()
1787 wIndex = le16_to_cpu(ctrl->wIndex); in dwc2_hsotg_process_req_feature()
1788 recip = ctrl->bRequestType & USB_RECIP_MASK; in dwc2_hsotg_process_req_feature()
1795 hsotg->remote_wakeup_allowed = 1; in dwc2_hsotg_process_req_feature()
1797 hsotg->remote_wakeup_allowed = 0; in dwc2_hsotg_process_req_feature()
1802 return -EINVAL; in dwc2_hsotg_process_req_feature()
1804 return -EINVAL; in dwc2_hsotg_process_req_feature()
1806 hsotg->test_mode = wIndex >> 8; in dwc2_hsotg_process_req_feature()
1809 return -ENOENT; in dwc2_hsotg_process_req_feature()
1814 dev_err(hsotg->dev, in dwc2_hsotg_process_req_feature()
1823 dev_dbg(hsotg->dev, "%s: no endpoint for 0x%04x\n", in dwc2_hsotg_process_req_feature()
1825 return -ENOENT; in dwc2_hsotg_process_req_feature()
1830 halted = ep->halted; in dwc2_hsotg_process_req_feature()
1832 if (!ep->wedged) in dwc2_hsotg_process_req_feature()
1833 dwc2_hsotg_ep_sethalt(&ep->ep, set, true); in dwc2_hsotg_process_req_feature()
1837 dev_err(hsotg->dev, in dwc2_hsotg_process_req_feature()
1852 if (ep->req) { in dwc2_hsotg_process_req_feature()
1853 hs_req = ep->req; in dwc2_hsotg_process_req_feature()
1854 ep->req = NULL; in dwc2_hsotg_process_req_feature()
1855 list_del_init(&hs_req->queue); in dwc2_hsotg_process_req_feature()
1856 if (hs_req->req.complete) { in dwc2_hsotg_process_req_feature()
1857 spin_unlock(&hsotg->lock); in dwc2_hsotg_process_req_feature()
1859 &ep->ep, &hs_req->req); in dwc2_hsotg_process_req_feature()
1860 spin_lock(&hsotg->lock); in dwc2_hsotg_process_req_feature()
1865 if (!ep->req) in dwc2_hsotg_process_req_feature()
1872 return -ENOENT; in dwc2_hsotg_process_req_feature()
1876 return -ENOENT; in dwc2_hsotg_process_req_feature()
1884 * dwc2_hsotg_stall_ep0 - stall ep0
1891 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_stall_ep0()
1895 dev_dbg(hsotg->dev, "ep0 stall (dir=%d)\n", ep0->dir_in); in dwc2_hsotg_stall_ep0()
1896 reg = (ep0->dir_in) ? DIEPCTL0 : DOEPCTL0; in dwc2_hsotg_stall_ep0()
1908 dev_dbg(hsotg->dev, in dwc2_hsotg_stall_ep0()
1920 * dwc2_hsotg_process_control - process a control request
1931 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_control()
1935 dev_dbg(hsotg->dev, in dwc2_hsotg_process_control()
1937 ctrl->bRequestType, ctrl->bRequest, ctrl->wValue, in dwc2_hsotg_process_control()
1938 ctrl->wIndex, ctrl->wLength); in dwc2_hsotg_process_control()
1940 if (ctrl->wLength == 0) { in dwc2_hsotg_process_control()
1941 ep0->dir_in = 1; in dwc2_hsotg_process_control()
1942 hsotg->ep0_state = DWC2_EP0_STATUS_IN; in dwc2_hsotg_process_control()
1943 } else if (ctrl->bRequestType & USB_DIR_IN) { in dwc2_hsotg_process_control()
1944 ep0->dir_in = 1; in dwc2_hsotg_process_control()
1945 hsotg->ep0_state = DWC2_EP0_DATA_IN; in dwc2_hsotg_process_control()
1947 ep0->dir_in = 0; in dwc2_hsotg_process_control()
1948 hsotg->ep0_state = DWC2_EP0_DATA_OUT; in dwc2_hsotg_process_control()
1951 if ((ctrl->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) { in dwc2_hsotg_process_control()
1952 switch (ctrl->bRequest) { in dwc2_hsotg_process_control()
1954 hsotg->connected = 1; in dwc2_hsotg_process_control()
1957 dcfg |= (le16_to_cpu(ctrl->wValue) << in dwc2_hsotg_process_control()
1961 dev_info(hsotg->dev, "new address %d\n", ctrl->wValue); in dwc2_hsotg_process_control()
1979 if (ret == 0 && hsotg->driver) { in dwc2_hsotg_process_control()
1980 spin_unlock(&hsotg->lock); in dwc2_hsotg_process_control()
1981 ret = hsotg->driver->setup(&hsotg->gadget, ctrl); in dwc2_hsotg_process_control()
1982 spin_lock(&hsotg->lock); in dwc2_hsotg_process_control()
1984 dev_dbg(hsotg->dev, "driver->setup() ret %d\n", ret); in dwc2_hsotg_process_control()
1987 hsotg->delayed_status = false; in dwc2_hsotg_process_control()
1989 hsotg->delayed_status = true; in dwc2_hsotg_process_control()
2001 * dwc2_hsotg_complete_setup - completion of a setup transfer
2012 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_complete_setup()
2014 if (req->status < 0) { in dwc2_hsotg_complete_setup()
2015 dev_dbg(hsotg->dev, "%s: failed %d\n", __func__, req->status); in dwc2_hsotg_complete_setup()
2019 spin_lock(&hsotg->lock); in dwc2_hsotg_complete_setup()
2020 if (req->actual == 0) in dwc2_hsotg_complete_setup()
2023 dwc2_hsotg_process_control(hsotg, req->buf); in dwc2_hsotg_complete_setup()
2024 spin_unlock(&hsotg->lock); in dwc2_hsotg_complete_setup()
2028 * dwc2_hsotg_enqueue_setup - start a request for EP0 packets
2036 struct usb_request *req = hsotg->ctrl_req; in dwc2_hsotg_enqueue_setup()
2040 dev_dbg(hsotg->dev, "%s: queueing setup request\n", __func__); in dwc2_hsotg_enqueue_setup()
2042 req->zero = 0; in dwc2_hsotg_enqueue_setup()
2043 req->length = 8; in dwc2_hsotg_enqueue_setup()
2044 req->buf = hsotg->ctrl_buff; in dwc2_hsotg_enqueue_setup()
2045 req->complete = dwc2_hsotg_complete_setup; in dwc2_hsotg_enqueue_setup()
2047 if (!list_empty(&hs_req->queue)) { in dwc2_hsotg_enqueue_setup()
2048 dev_dbg(hsotg->dev, "%s already queued???\n", __func__); in dwc2_hsotg_enqueue_setup()
2052 hsotg->eps_out[0]->dir_in = 0; in dwc2_hsotg_enqueue_setup()
2053 hsotg->eps_out[0]->send_zlp = 0; in dwc2_hsotg_enqueue_setup()
2054 hsotg->ep0_state = DWC2_EP0_SETUP; in dwc2_hsotg_enqueue_setup()
2056 ret = dwc2_hsotg_ep_queue(&hsotg->eps_out[0]->ep, req, GFP_ATOMIC); in dwc2_hsotg_enqueue_setup()
2058 dev_err(hsotg->dev, "%s: failed queue (%d)\n", __func__, ret); in dwc2_hsotg_enqueue_setup()
2070 u8 index = hs_ep->index; in dwc2_hsotg_program_zlp()
2071 u32 epctl_reg = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index); in dwc2_hsotg_program_zlp()
2072 u32 epsiz_reg = hs_ep->dir_in ? DIEPTSIZ(index) : DOEPTSIZ(index); in dwc2_hsotg_program_zlp()
2074 if (hs_ep->dir_in) in dwc2_hsotg_program_zlp()
2075 dev_dbg(hsotg->dev, "Sending zero-length packet on ep%d\n", in dwc2_hsotg_program_zlp()
2078 dev_dbg(hsotg->dev, "Receiving zero-length packet on ep%d\n", in dwc2_hsotg_program_zlp()
2082 dma_addr_t dma = hs_ep->desc_list_dma; in dwc2_hsotg_program_zlp()
2102 * dwc2_hsotg_complete_request - complete a request given to us
2120 dev_dbg(hsotg->dev, "%s: nothing to complete?\n", __func__); in dwc2_hsotg_complete_request()
2124 dev_dbg(hsotg->dev, "complete: ep %p %s, req %p, %d => %p\n", in dwc2_hsotg_complete_request()
2125 hs_ep, hs_ep->ep.name, hs_req, result, hs_req->req.complete); in dwc2_hsotg_complete_request()
2132 if (hs_req->req.status == -EINPROGRESS) in dwc2_hsotg_complete_request()
2133 hs_req->req.status = result; in dwc2_hsotg_complete_request()
2140 hs_ep->req = NULL; in dwc2_hsotg_complete_request()
2141 list_del_init(&hs_req->queue); in dwc2_hsotg_complete_request()
2148 if (hs_req->req.complete) { in dwc2_hsotg_complete_request()
2149 spin_unlock(&hsotg->lock); in dwc2_hsotg_complete_request()
2150 usb_gadget_giveback_request(&hs_ep->ep, &hs_req->req); in dwc2_hsotg_complete_request()
2151 spin_lock(&hsotg->lock); in dwc2_hsotg_complete_request()
2155 if (using_desc_dma(hsotg) && hs_ep->isochronous) in dwc2_hsotg_complete_request()
2164 if (!hs_ep->req && result >= 0) in dwc2_hsotg_complete_request()
2169 * dwc2_gadget_complete_isoc_request_ddma - complete an isoc request in DDMA
2179 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_complete_isoc_request_ddma()
2185 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma()
2193 dev_warn(hsotg->dev, "%s: ISOC EP queue empty\n", __func__); in dwc2_gadget_complete_isoc_request_ddma()
2196 ureq = &hs_req->req; in dwc2_gadget_complete_isoc_request_ddma()
2201 mask = hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_MASK : in dwc2_gadget_complete_isoc_request_ddma()
2203 ureq->actual = ureq->length - ((desc_sts & mask) >> in dwc2_gadget_complete_isoc_request_ddma()
2209 if (!hs_ep->dir_in && ureq->length & 0x3) in dwc2_gadget_complete_isoc_request_ddma()
2210 ureq->actual += 4 - (ureq->length & 0x3); in dwc2_gadget_complete_isoc_request_ddma()
2213 ureq->frame_number = in dwc2_gadget_complete_isoc_request_ddma()
2220 hs_ep->compl_desc++; in dwc2_gadget_complete_isoc_request_ddma()
2221 if (hs_ep->compl_desc > (MAX_DMA_DESC_NUM_HS_ISOC - 1)) in dwc2_gadget_complete_isoc_request_ddma()
2222 hs_ep->compl_desc = 0; in dwc2_gadget_complete_isoc_request_ddma()
2223 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma()
2228 * dwc2_gadget_handle_isoc_bna - handle BNA interrupt for ISOC.
2238 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_isoc_bna()
2240 if (!hs_ep->dir_in) in dwc2_gadget_handle_isoc_bna()
2244 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_gadget_handle_isoc_bna()
2245 hs_ep->next_desc = 0; in dwc2_gadget_handle_isoc_bna()
2246 hs_ep->compl_desc = 0; in dwc2_gadget_handle_isoc_bna()
2250 * dwc2_hsotg_rx_data - receive data from the FIFO for an endpoint
2261 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[ep_idx]; in dwc2_hsotg_rx_data()
2262 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_rx_data()
2271 dev_dbg(hsotg->dev, in dwc2_hsotg_rx_data()
2283 read_ptr = hs_req->req.actual; in dwc2_hsotg_rx_data()
2284 max_req = hs_req->req.length - read_ptr; in dwc2_hsotg_rx_data()
2286 dev_dbg(hsotg->dev, "%s: read %d/%d, done %d/%d\n", in dwc2_hsotg_rx_data()
2287 __func__, to_read, max_req, read_ptr, hs_req->req.length); in dwc2_hsotg_rx_data()
2299 hs_ep->total_data += to_read; in dwc2_hsotg_rx_data()
2300 hs_req->req.actual += to_read; in dwc2_hsotg_rx_data()
2304 * note, we might over-write the buffer end by 3 bytes depending on in dwc2_hsotg_rx_data()
2308 hs_req->req.buf + read_ptr, to_read); in dwc2_hsotg_rx_data()
2312 * dwc2_hsotg_ep0_zlp - send/receive zero-length packet on control endpoint
2316 * Generate a zero-length IN packet request for terminating a SETUP
2326 hsotg->eps_out[0]->dir_in = dir_in; in dwc2_hsotg_ep0_zlp()
2327 hsotg->ep0_state = dir_in ? DWC2_EP0_STATUS_IN : DWC2_EP0_STATUS_OUT; in dwc2_hsotg_ep0_zlp()
2329 dwc2_hsotg_program_zlp(hsotg, hsotg->eps_out[0]); in dwc2_hsotg_ep0_zlp()
2333 * dwc2_gadget_get_xfersize_ddma - get transferred bytes amount from desc
2334 * @hs_ep - The endpoint on which transfer went
2341 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_xfersize_ddma()
2342 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_get_xfersize_ddma()
2345 struct dwc2_dma_desc *desc = hs_ep->desc_list; in dwc2_gadget_get_xfersize_ddma()
2348 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_xfersize_ddma()
2349 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_xfersize_ddma()
2352 return -EINVAL; in dwc2_gadget_get_xfersize_ddma()
2355 if (hs_ep->index) in dwc2_gadget_get_xfersize_ddma()
2357 bytes_rem_correction = 4 - (mps % 4); in dwc2_gadget_get_xfersize_ddma()
2359 for (i = 0; i < hs_ep->desc_count; ++i) { in dwc2_gadget_get_xfersize_ddma()
2360 status = desc->status; in dwc2_gadget_get_xfersize_ddma()
2362 bytes_rem -= bytes_rem_correction; in dwc2_gadget_get_xfersize_ddma()
2365 dev_err(hsotg->dev, "descriptor %d closed with %x\n", in dwc2_gadget_get_xfersize_ddma()
2378 * dwc2_hsotg_handle_outdone - handle receiving OutDone/SetupDone from RXFIFO
2389 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[epnum]; in dwc2_hsotg_handle_outdone()
2390 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_handle_outdone()
2391 struct usb_request *req = &hs_req->req; in dwc2_hsotg_handle_outdone()
2396 dev_dbg(hsotg->dev, "%s: no request active\n", __func__); in dwc2_hsotg_handle_outdone()
2400 if (epnum == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_OUT) { in dwc2_hsotg_handle_outdone()
2401 dev_dbg(hsotg->dev, "zlp packet received\n"); in dwc2_hsotg_handle_outdone()
2422 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_handle_outdone()
2423 size_done += hs_ep->last_load; in dwc2_hsotg_handle_outdone()
2425 req->actual = size_done; in dwc2_hsotg_handle_outdone()
2429 if (req->actual < req->length && size_left == 0) { in dwc2_hsotg_handle_outdone()
2434 if (req->actual < req->length && req->short_not_ok) { in dwc2_hsotg_handle_outdone()
2435 dev_dbg(hsotg->dev, "%s: got %d/%d (short not ok) => error\n", in dwc2_hsotg_handle_outdone()
2436 __func__, req->actual, req->length); in dwc2_hsotg_handle_outdone()
2439 * todo - what should we return here? there's no one else in dwc2_hsotg_handle_outdone()
2446 hsotg->ep0_state == DWC2_EP0_DATA_OUT) { in dwc2_hsotg_handle_outdone()
2448 if (!hsotg->delayed_status) in dwc2_hsotg_handle_outdone()
2453 if (!using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_handle_outdone()
2454 req->frame_number = hs_ep->target_frame; in dwc2_hsotg_handle_outdone()
2462 * dwc2_hsotg_handle_rx - RX FIFO has data
2490 dev_dbg(hsotg->dev, "%s: GRXSTSP=0x%08x (%d@%d)\n", in dwc2_hsotg_handle_rx()
2495 dev_dbg(hsotg->dev, "GLOBALOUTNAK\n"); in dwc2_hsotg_handle_rx()
2499 dev_dbg(hsotg->dev, "OutDone (Frame=0x%08x)\n", in dwc2_hsotg_handle_rx()
2507 dev_dbg(hsotg->dev, in dwc2_hsotg_handle_rx()
2516 if (hsotg->ep0_state == DWC2_EP0_SETUP) in dwc2_hsotg_handle_rx()
2525 dev_dbg(hsotg->dev, in dwc2_hsotg_handle_rx()
2530 WARN_ON(hsotg->ep0_state != DWC2_EP0_SETUP); in dwc2_hsotg_handle_rx()
2536 dev_warn(hsotg->dev, "%s: unknown status %08x\n", in dwc2_hsotg_handle_rx()
2545 * dwc2_hsotg_ep0_mps - turn max packet size into register setting
2563 return (u32)-1; in dwc2_hsotg_ep0_mps()
2567 * dwc2_hsotg_set_ep_maxpacket - set endpoint's max-packet field
2595 hs_ep->ep.maxpacket = mps_bytes; in dwc2_hsotg_set_ep_maxpacket()
2596 hs_ep->mc = 1; in dwc2_hsotg_set_ep_maxpacket()
2600 hs_ep->mc = mc; in dwc2_hsotg_set_ep_maxpacket()
2603 hs_ep->ep.maxpacket = mps; in dwc2_hsotg_set_ep_maxpacket()
2621 dev_err(hsotg->dev, "ep%d: bad mps of %d\n", ep, mps); in dwc2_hsotg_set_ep_maxpacket()
2625 * dwc2_hsotg_txfifo_flush - flush Tx FIFO
2636 dev_warn(hsotg->dev, "%s: timeout flushing fifo GRSTCTL_TXFFLSH\n", in dwc2_hsotg_txfifo_flush()
2641 * dwc2_hsotg_trytx - check to see if anything needs transmitting
2651 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_trytx()
2653 if (!hs_ep->dir_in || !hs_req) { in dwc2_hsotg_trytx()
2655 * if request is not enqueued, we disable interrupts in dwc2_hsotg_trytx()
2658 if (hs_ep->index != 0) in dwc2_hsotg_trytx()
2659 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, in dwc2_hsotg_trytx()
2660 hs_ep->dir_in, 0); in dwc2_hsotg_trytx()
2664 if (hs_req->req.actual < hs_req->req.length) { in dwc2_hsotg_trytx()
2665 dev_dbg(hsotg->dev, "trying to write more for ep%d\n", in dwc2_hsotg_trytx()
2666 hs_ep->index); in dwc2_hsotg_trytx()
2674 * dwc2_hsotg_complete_in - complete IN transfer
2684 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_complete_in()
2685 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index)); in dwc2_hsotg_complete_in()
2689 dev_dbg(hsotg->dev, "XferCompl but no req\n"); in dwc2_hsotg_complete_in()
2694 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_IN) { in dwc2_hsotg_complete_in()
2695 dev_dbg(hsotg->dev, "zlp packet sent\n"); in dwc2_hsotg_complete_in()
2701 hs_ep->dir_in = 0; in dwc2_hsotg_complete_in()
2704 if (hsotg->test_mode) { in dwc2_hsotg_complete_in()
2707 ret = dwc2_hsotg_set_test_mode(hsotg, hsotg->test_mode); in dwc2_hsotg_complete_in()
2709 dev_dbg(hsotg->dev, "Invalid Test #%d\n", in dwc2_hsotg_complete_in()
2710 hsotg->test_mode); in dwc2_hsotg_complete_in()
2731 dev_err(hsotg->dev, "error parsing DDMA results %d\n", in dwc2_hsotg_complete_in()
2737 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_complete_in()
2738 size_done += hs_ep->last_load; in dwc2_hsotg_complete_in()
2740 if (hs_req->req.actual != size_done) in dwc2_hsotg_complete_in()
2741 dev_dbg(hsotg->dev, "%s: adjusting size done %d => %d\n", in dwc2_hsotg_complete_in()
2742 __func__, hs_req->req.actual, size_done); in dwc2_hsotg_complete_in()
2744 hs_req->req.actual = size_done; in dwc2_hsotg_complete_in()
2745 dev_dbg(hsotg->dev, "req->length:%d req->actual:%d req->zero:%d\n", in dwc2_hsotg_complete_in()
2746 hs_req->req.length, hs_req->req.actual, hs_req->req.zero); in dwc2_hsotg_complete_in()
2748 if (!size_left && hs_req->req.actual < hs_req->req.length) { in dwc2_hsotg_complete_in()
2749 dev_dbg(hsotg->dev, "%s trying more for req...\n", __func__); in dwc2_hsotg_complete_in()
2755 if (hs_ep->send_zlp) { in dwc2_hsotg_complete_in()
2756 hs_ep->send_zlp = 0; in dwc2_hsotg_complete_in()
2764 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_DATA_IN) { in dwc2_hsotg_complete_in()
2771 if (!using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_complete_in()
2772 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_hsotg_complete_in()
2780 * dwc2_gadget_read_ep_interrupts - reads interrupts for given ep
2783 * @dir_in: Endpoint direction 1-in 0-out.
2808 * dwc2_gadget_handle_ep_disabled - handle DXEPINT_EPDISBLD
2817 * For ISOC-OUT endpoints completes expired requests. If there is remaining
2822 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_ep_disabled()
2824 unsigned char idx = hs_ep->index; in dwc2_gadget_handle_ep_disabled()
2825 int dir_in = hs_ep->dir_in; in dwc2_gadget_handle_ep_disabled()
2829 dev_dbg(hsotg->dev, "%s: EPDisbld\n", __func__); in dwc2_gadget_handle_ep_disabled()
2834 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index); in dwc2_gadget_handle_ep_disabled()
2850 if (!hs_ep->isochronous) in dwc2_gadget_handle_ep_disabled()
2853 if (list_empty(&hs_ep->queue)) { in dwc2_gadget_handle_ep_disabled()
2854 dev_dbg(hsotg->dev, "%s: complete_ep 0x%p, ep->queue empty!\n", in dwc2_gadget_handle_ep_disabled()
2862 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_gadget_handle_ep_disabled()
2863 hs_req->req.actual = 0; in dwc2_gadget_handle_ep_disabled()
2865 -ENODATA); in dwc2_gadget_handle_ep_disabled()
2869 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_ep_disabled()
2874 * dwc2_gadget_handle_out_token_ep_disabled - handle DXEPINT_OUTTKNEPDIS
2877 * This is starting point for ISOC-OUT transfer, synchronization done with
2881 * HW generates OUTTKNEPDIS - out token is received while EP is disabled. Upon
2886 struct dwc2_hsotg *hsotg = ep->parent; in dwc2_gadget_handle_out_token_ep_disabled()
2888 int dir_in = ep->dir_in; in dwc2_gadget_handle_out_token_ep_disabled()
2890 if (dir_in || !ep->isochronous) in dwc2_gadget_handle_out_token_ep_disabled()
2894 if (ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_out_token_ep_disabled()
2896 ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_out_token_ep_disabled()
2902 if (ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_out_token_ep_disabled()
2905 ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_out_token_ep_disabled()
2906 if (ep->interval > 1) { in dwc2_gadget_handle_out_token_ep_disabled()
2907 ctrl = dwc2_readl(hsotg, DOEPCTL(ep->index)); in dwc2_gadget_handle_out_token_ep_disabled()
2908 if (ep->target_frame & 0x1) in dwc2_gadget_handle_out_token_ep_disabled()
2913 dwc2_writel(hsotg, ctrl, DOEPCTL(ep->index)); in dwc2_gadget_handle_out_token_ep_disabled()
2920 hs_req->req.frame_number = ep->target_frame; in dwc2_gadget_handle_out_token_ep_disabled()
2921 hs_req->req.actual = 0; in dwc2_gadget_handle_out_token_ep_disabled()
2922 dwc2_hsotg_complete_request(hsotg, ep, hs_req, -ENODATA); in dwc2_gadget_handle_out_token_ep_disabled()
2927 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_out_token_ep_disabled()
2930 if (!ep->req) in dwc2_gadget_handle_out_token_ep_disabled()
2939 * dwc2_gadget_handle_nak - handle NAK interrupt
2942 * This is starting point for ISOC-IN transfer, synchronization done with
2947 * and 'NAK'. NAK interrupt for ISOC-IN means that token has arrived and ZLP was
2954 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_nak()
2956 int dir_in = hs_ep->dir_in; in dwc2_gadget_handle_nak()
2959 if (!dir_in || !hs_ep->isochronous) in dwc2_gadget_handle_nak()
2962 if (hs_ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_nak()
2965 hs_ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_nak()
2968 /* In service interval mode target_frame must in dwc2_gadget_handle_nak()
2971 if (hsotg->params.service_interval) { in dwc2_gadget_handle_nak()
2975 hs_ep->target_frame &= ~hs_ep->interval + 1; in dwc2_gadget_handle_nak()
2988 hs_ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_nak()
2989 if (hs_ep->interval > 1) { in dwc2_gadget_handle_nak()
2991 DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
2992 if (hs_ep->target_frame & 0x1) in dwc2_gadget_handle_nak()
2997 dwc2_writel(hsotg, ctrl, DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
3004 ctrl = dwc2_readl(hsotg, DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
3008 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index); in dwc2_gadget_handle_nak()
3013 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_gadget_handle_nak()
3014 hs_req->req.actual = 0; in dwc2_gadget_handle_nak()
3015 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, -ENODATA); in dwc2_gadget_handle_nak()
3020 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_nak()
3023 if (!hs_ep->req) in dwc2_gadget_handle_nak()
3028 * dwc2_hsotg_epint - handle an in/out endpoint interrupt
3050 dev_err(hsotg->dev, "%s:Interrupt for unconfigured ep%d(%s)\n", in dwc2_hsotg_epint()
3055 dev_dbg(hsotg->dev, "%s: ep%d(%s) DxEPINT=0x%08x\n", in dwc2_hsotg_epint()
3068 if (using_desc_dma(hsotg) && idx == 0 && !hs_ep->dir_in && in dwc2_hsotg_epint()
3069 hsotg->ep0_state == DWC2_EP0_SETUP && !(ints & DXEPINT_SETUP)) in dwc2_hsotg_epint()
3073 dev_dbg(hsotg->dev, in dwc2_hsotg_epint()
3079 if (using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_epint()
3085 * if operating slave mode in dwc2_hsotg_epint()
3087 if (!hs_ep->isochronous || !(ints & DXEPINT_NAKINTRPT)) in dwc2_hsotg_epint()
3090 if (idx == 0 && !hs_ep->req) in dwc2_hsotg_epint()
3097 if (!hs_ep->isochronous || !(ints & DXEPINT_OUTTKNEPDIS)) in dwc2_hsotg_epint()
3112 dev_dbg(hsotg->dev, "%s: AHBErr\n", __func__); in dwc2_hsotg_epint()
3115 dev_dbg(hsotg->dev, "%s: Setup/Timeout\n", __func__); in dwc2_hsotg_epint()
3120 * setup packet. In non-DMA mode we'd get this in dwc2_hsotg_epint()
3133 dev_dbg(hsotg->dev, "%s: StsPhseRcvd\n", __func__); in dwc2_hsotg_epint()
3136 if (hsotg->ep0_state == DWC2_EP0_DATA_OUT) { in dwc2_hsotg_epint()
3139 if (!hsotg->delayed_status) in dwc2_hsotg_epint()
3157 dev_dbg(hsotg->dev, "%s: B2BSetup/INEPNakEff\n", __func__); in dwc2_hsotg_epint()
3160 dev_dbg(hsotg->dev, "%s: BNA interrupt\n", __func__); in dwc2_hsotg_epint()
3161 if (hs_ep->isochronous) in dwc2_hsotg_epint()
3165 if (dir_in && !hs_ep->isochronous) { in dwc2_hsotg_epint()
3168 dev_dbg(hsotg->dev, "%s: ep%d: INTknTXFEmpMsk\n", in dwc2_hsotg_epint()
3174 dev_warn(hsotg->dev, "%s: ep%d: INTknEP\n", in dwc2_hsotg_epint()
3179 if (hsotg->dedicated_fifos && in dwc2_hsotg_epint()
3181 dev_dbg(hsotg->dev, "%s: ep%d: TxFIFOEmpty\n", in dwc2_hsotg_epint()
3190 * dwc2_hsotg_irq_enumdone - Handle EnumDone interrupt (enumeration done)
3207 dev_dbg(hsotg->dev, "EnumDone (DSTS=0x%08x)\n", dsts); in dwc2_hsotg_irq_enumdone()
3219 hsotg->gadget.speed = USB_SPEED_FULL; in dwc2_hsotg_irq_enumdone()
3225 hsotg->gadget.speed = USB_SPEED_HIGH; in dwc2_hsotg_irq_enumdone()
3231 hsotg->gadget.speed = USB_SPEED_LOW; in dwc2_hsotg_irq_enumdone()
3241 dev_info(hsotg->dev, "new device is %s\n", in dwc2_hsotg_irq_enumdone()
3242 usb_speed_string(hsotg->gadget.speed)); in dwc2_hsotg_irq_enumdone()
3254 for (i = 1; i < hsotg->num_of_eps; i++) { in dwc2_hsotg_irq_enumdone()
3255 if (hsotg->eps_in[i]) in dwc2_hsotg_irq_enumdone()
3258 if (hsotg->eps_out[i]) in dwc2_hsotg_irq_enumdone()
3268 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_irq_enumdone()
3274 * kill_all_requests - remove all requests from the endpoint's queue
3288 ep->req = NULL; in kill_all_requests()
3290 while (!list_empty(&ep->queue)) { in kill_all_requests()
3296 if (!hsotg->dedicated_fifos) in kill_all_requests()
3298 size = (dwc2_readl(hsotg, DTXFSTS(ep->fifo_index)) & 0xffff) * 4; in kill_all_requests()
3299 if (size < ep->fifo_size) in kill_all_requests()
3300 dwc2_hsotg_txfifo_flush(hsotg, ep->fifo_index); in kill_all_requests()
3304 * dwc2_hsotg_disconnect - disconnect service
3315 if (!hsotg->connected) in dwc2_hsotg_disconnect()
3318 hsotg->connected = 0; in dwc2_hsotg_disconnect()
3319 hsotg->test_mode = 0; in dwc2_hsotg_disconnect()
3322 for (ep = 0; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_disconnect()
3323 if (hsotg->eps_in[ep]) in dwc2_hsotg_disconnect()
3324 kill_all_requests(hsotg, hsotg->eps_in[ep], in dwc2_hsotg_disconnect()
3325 -ESHUTDOWN); in dwc2_hsotg_disconnect()
3326 if (hsotg->eps_out[ep]) in dwc2_hsotg_disconnect()
3327 kill_all_requests(hsotg, hsotg->eps_out[ep], in dwc2_hsotg_disconnect()
3328 -ESHUTDOWN); in dwc2_hsotg_disconnect()
3332 hsotg->lx_state = DWC2_L3; in dwc2_hsotg_disconnect()
3334 usb_gadget_set_state(&hsotg->gadget, USB_STATE_NOTATTACHED); in dwc2_hsotg_disconnect()
3338 * dwc2_hsotg_irq_fifoempty - TX FIFO empty interrupt handler
3348 for (epno = 0; epno < hsotg->num_of_eps; epno++) { in dwc2_hsotg_irq_fifoempty()
3354 if (!ep->dir_in) in dwc2_hsotg_irq_fifoempty()
3357 if ((periodic && !ep->periodic) || in dwc2_hsotg_irq_fifoempty()
3358 (!periodic && ep->periodic)) in dwc2_hsotg_irq_fifoempty()
3374 * dwc2_hsotg_core_init_disconnected - issue softreset to the core
3390 kill_all_requests(hsotg, hsotg->eps_out[0], -ECONNRESET); in dwc2_hsotg_core_init_disconnected()
3397 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_core_init_disconnected()
3398 if (hsotg->eps_in[ep]) in dwc2_hsotg_core_init_disconnected()
3399 dwc2_hsotg_ep_disable(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_core_init_disconnected()
3400 if (hsotg->eps_out[ep]) in dwc2_hsotg_core_init_disconnected()
3401 dwc2_hsotg_ep_disable(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_core_init_disconnected()
3428 switch (hsotg->params.speed) { in dwc2_hsotg_core_init_disconnected()
3433 if (hsotg->params.phy_type == DWC2_PHY_TYPE_PARAM_FS) in dwc2_hsotg_core_init_disconnected()
3442 if (hsotg->params.ipg_isoc_en) in dwc2_hsotg_core_init_disconnected()
3462 if (!hsotg->params.external_id_pin_ctl) in dwc2_hsotg_core_init_disconnected()
3469 hsotg->params.ahbcfg, in dwc2_hsotg_core_init_disconnected()
3472 /* Set DDMA mode support in the core if needed */ in dwc2_hsotg_core_init_disconnected()
3477 dwc2_writel(hsotg, ((hsotg->dedicated_fifos) ? in dwc2_hsotg_core_init_disconnected()
3484 * If INTknTXFEmpMsk is enabled, it's important to disable ep interrupts in dwc2_hsotg_core_init_disconnected()
3489 dwc2_writel(hsotg, ((hsotg->dedicated_fifos && !using_dma(hsotg)) ? in dwc2_hsotg_core_init_disconnected()
3496 * don't need XferCompl, we get that from RXFIFO in slave mode. In in dwc2_hsotg_core_init_disconnected()
3497 * DMA mode we may need this and StsPhseRcvd. in dwc2_hsotg_core_init_disconnected()
3511 /* Enable Service Interval mode if supported */ in dwc2_hsotg_core_init_disconnected()
3512 if (using_desc_dma(hsotg) && hsotg->params.service_interval) in dwc2_hsotg_core_init_disconnected()
3517 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_core_init_disconnected()
3525 * Enable the RXFIFO when in slave mode, as this is how we collect in dwc2_hsotg_core_init_disconnected()
3526 * the data. In DMA mode, we get events from the FIFO but also in dwc2_hsotg_core_init_disconnected()
3542 dev_dbg(hsotg->dev, "DCTL=0x%08x\n", dwc2_readl(hsotg, DCTL)); in dwc2_hsotg_core_init_disconnected()
3553 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) | in dwc2_hsotg_core_init_disconnected()
3559 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) | in dwc2_hsotg_core_init_disconnected()
3572 if (using_desc_dma(hsotg) && hsotg->params.service_interval) in dwc2_hsotg_core_init_disconnected()
3575 /* must be at-least 3ms to allow bus to see disconnect */ in dwc2_hsotg_core_init_disconnected()
3578 hsotg->lx_state = DWC2_L0; in dwc2_hsotg_core_init_disconnected()
3582 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_core_init_disconnected()
3589 /* set the soft-disconnect bit */ in dwc2_hsotg_core_disconnect()
3595 /* remove the soft-disconnect and let's go */ in dwc2_hsotg_core_connect()
3596 if (!hsotg->role_sw || (dwc2_readl(hsotg, GOTGCTL) & GOTGCTL_BSESVLD)) in dwc2_hsotg_core_connect()
3601 * dwc2_gadget_handle_incomplete_isoc_in - handle incomplete ISO IN Interrupt.
3606 * - Corrupted IN Token for ISOC EP.
3607 * - Packet not complete in FIFO.
3610 * - Determine the EP
3611 * - Disable EP; when 'Endpoint Disabled' interrupt is received Flush FIFO
3620 dev_dbg(hsotg->dev, "Incomplete isoc in interrupt received:\n"); in dwc2_gadget_handle_incomplete_isoc_in()
3624 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_gadget_handle_incomplete_isoc_in()
3625 hs_ep = hsotg->eps_in[idx]; in dwc2_gadget_handle_incomplete_isoc_in()
3627 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous) in dwc2_gadget_handle_incomplete_isoc_in()
3644 * dwc2_gadget_handle_incomplete_isoc_out - handle incomplete ISO OUT Interrupt
3649 * - Corrupted OUT Token for ISOC EP.
3650 * - Packet not complete in FIFO.
3653 * - Determine the EP
3654 * - Set DCTL_SGOUTNAK and unmask GOUTNAKEFF if target frame elapsed.
3665 dev_dbg(hsotg->dev, "%s: GINTSTS_INCOMPL_SOOUT\n", __func__); in dwc2_gadget_handle_incomplete_isoc_out()
3670 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_gadget_handle_incomplete_isoc_out()
3671 hs_ep = hsotg->eps_out[idx]; in dwc2_gadget_handle_incomplete_isoc_out()
3673 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous) in dwc2_gadget_handle_incomplete_isoc_out()
3697 * dwc2_hsotg_irq - handle device interrupt
3711 spin_lock(&hsotg->lock); in dwc2_hsotg_irq()
3716 dev_dbg(hsotg->dev, "%s: %08x %08x (%08x) retry %d\n", in dwc2_hsotg_irq()
3722 dev_dbg(hsotg->dev, "%s: USBRstDet\n", __func__); in dwc2_hsotg_irq()
3727 if (hsotg->in_ppd && hsotg->lx_state == DWC2_L2) in dwc2_hsotg_irq()
3730 hsotg->lx_state = DWC2_L0; in dwc2_hsotg_irq()
3735 u32 connected = hsotg->connected; in dwc2_hsotg_irq()
3737 dev_dbg(hsotg->dev, "%s: USBRst\n", __func__); in dwc2_hsotg_irq()
3738 dev_dbg(hsotg->dev, "GNPTXSTS=%08x\n", in dwc2_hsotg_irq()
3769 dev_dbg(hsotg->dev, "%s: daint=%08x\n", __func__, daint); in dwc2_hsotg_irq()
3771 for (ep = 0; ep < hsotg->num_of_eps && daint_out; in dwc2_hsotg_irq()
3777 for (ep = 0; ep < hsotg->num_of_eps && daint_in; in dwc2_hsotg_irq()
3787 dev_dbg(hsotg->dev, "NPTxFEmp\n"); in dwc2_hsotg_irq()
3790 * Disable the interrupt to stop it happening again in dwc2_hsotg_irq()
3792 * it needs re-enabling in dwc2_hsotg_irq()
3800 dev_dbg(hsotg->dev, "PTxFEmp\n"); in dwc2_hsotg_irq()
3810 * note, since GINTSTS_RxFLvl doubles as FIFO-not-empty, in dwc2_hsotg_irq()
3819 dev_dbg(hsotg->dev, "GINTSTS_ErlySusp\n"); in dwc2_hsotg_irq()
3824 * these next two seem to crop-up occasionally causing the core in dwc2_hsotg_irq()
3843 dev_dbg(hsotg->dev, "GOUTNakEff triggered\n"); in dwc2_hsotg_irq()
3844 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_irq()
3845 hs_ep = hsotg->eps_out[idx]; in dwc2_hsotg_irq()
3853 if ((epctrl & DXEPCTL_EPENA) && hs_ep->isochronous) { in dwc2_hsotg_irq()
3860 //Non-ISOC EP's in dwc2_hsotg_irq()
3861 if (hs_ep->halted) { in dwc2_hsotg_irq()
3874 dev_info(hsotg->dev, "GINNakEff triggered\n"); in dwc2_hsotg_irq()
3892 if (gintsts & IRQ_RETRY_MASK && --retry_count > 0) in dwc2_hsotg_irq()
3896 if (hsotg->params.service_interval) in dwc2_hsotg_irq()
3899 spin_unlock(&hsotg->lock); in dwc2_hsotg_irq()
3910 epctrl_reg = hs_ep->dir_in ? DIEPCTL(hs_ep->index) : in dwc2_hsotg_ep_stop_xfr()
3911 DOEPCTL(hs_ep->index); in dwc2_hsotg_ep_stop_xfr()
3912 epint_reg = hs_ep->dir_in ? DIEPINT(hs_ep->index) : in dwc2_hsotg_ep_stop_xfr()
3913 DOEPINT(hs_ep->index); in dwc2_hsotg_ep_stop_xfr()
3915 dev_dbg(hsotg->dev, "%s: stopping transfer on %s\n", __func__, in dwc2_hsotg_ep_stop_xfr()
3916 hs_ep->name); in dwc2_hsotg_ep_stop_xfr()
3918 if (hs_ep->dir_in) { in dwc2_hsotg_ep_stop_xfr()
3919 if (hsotg->dedicated_fifos || hs_ep->periodic) { in dwc2_hsotg_ep_stop_xfr()
3924 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3932 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3947 dev_warn(hsotg->dev, "%s: timeout GINTSTS.RXFLVL\n", in dwc2_hsotg_ep_stop_xfr()
3961 dev_warn(hsotg->dev, "%s: timeout GINTSTS.GOUTNAKEFF\n", in dwc2_hsotg_ep_stop_xfr()
3965 /* Disable ep */ in dwc2_hsotg_ep_stop_xfr()
3970 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3976 if (hs_ep->dir_in) { in dwc2_hsotg_ep_stop_xfr()
3979 if (hsotg->dedicated_fifos || hs_ep->periodic) in dwc2_hsotg_ep_stop_xfr()
3980 fifo_index = hs_ep->fifo_index; in dwc2_hsotg_ep_stop_xfr()
3988 if (!hsotg->dedicated_fifos && !hs_ep->periodic) in dwc2_hsotg_ep_stop_xfr()
3998 * dwc2_hsotg_ep_enable - enable the given endpoint
4008 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_enable()
4010 unsigned int index = hs_ep->index; in dwc2_hsotg_ep_enable()
4022 dev_dbg(hsotg->dev, in dwc2_hsotg_ep_enable()
4024 __func__, ep->name, desc->bEndpointAddress, desc->bmAttributes, in dwc2_hsotg_ep_enable()
4025 desc->wMaxPacketSize, desc->bInterval); in dwc2_hsotg_ep_enable()
4029 dev_err(hsotg->dev, "%s: called for EP 0\n", __func__); in dwc2_hsotg_ep_enable()
4030 return -EINVAL; in dwc2_hsotg_ep_enable()
4033 dir_in = (desc->bEndpointAddress & USB_ENDPOINT_DIR_MASK) ? 1 : 0; in dwc2_hsotg_ep_enable()
4034 if (dir_in != hs_ep->dir_in) { in dwc2_hsotg_ep_enable()
4035 dev_err(hsotg->dev, "%s: direction mismatch!\n", __func__); in dwc2_hsotg_ep_enable()
4036 return -EINVAL; in dwc2_hsotg_ep_enable()
4039 ep_type = desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK; in dwc2_hsotg_ep_enable()
4045 dir_in && desc->bInterval > 10) { in dwc2_hsotg_ep_enable()
4046 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4048 return -EINVAL; in dwc2_hsotg_ep_enable()
4054 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4056 return -EINVAL; in dwc2_hsotg_ep_enable()
4064 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x from 0x%08x\n", in dwc2_hsotg_ep_enable()
4072 /* Allocate DMA descriptor chain for non-ctrl endpoints */ in dwc2_hsotg_ep_enable()
4073 if (using_desc_dma(hsotg) && !hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4074 hs_ep->desc_list = dmam_alloc_coherent(hsotg->dev, in dwc2_hsotg_ep_enable()
4076 &hs_ep->desc_list_dma, GFP_ATOMIC); in dwc2_hsotg_ep_enable()
4077 if (!hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4078 ret = -ENOMEM; in dwc2_hsotg_ep_enable()
4083 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_ep_enable()
4095 dwc2_hsotg_set_ep_maxpacket(hsotg, hs_ep->index, mps, mc, dir_in); in dwc2_hsotg_ep_enable()
4097 /* default, set to non-periodic */ in dwc2_hsotg_ep_enable()
4098 hs_ep->isochronous = 0; in dwc2_hsotg_ep_enable()
4099 hs_ep->periodic = 0; in dwc2_hsotg_ep_enable()
4100 hs_ep->halted = 0; in dwc2_hsotg_ep_enable()
4101 hs_ep->wedged = 0; in dwc2_hsotg_ep_enable()
4102 hs_ep->interval = desc->bInterval; in dwc2_hsotg_ep_enable()
4108 hs_ep->isochronous = 1; in dwc2_hsotg_ep_enable()
4109 hs_ep->interval = 1 << (desc->bInterval - 1); in dwc2_hsotg_ep_enable()
4110 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_hsotg_ep_enable()
4111 hs_ep->next_desc = 0; in dwc2_hsotg_ep_enable()
4112 hs_ep->compl_desc = 0; in dwc2_hsotg_ep_enable()
4114 hs_ep->periodic = 1; in dwc2_hsotg_ep_enable()
4132 hs_ep->periodic = 1; in dwc2_hsotg_ep_enable()
4134 if (hsotg->gadget.speed == USB_SPEED_HIGH) in dwc2_hsotg_ep_enable()
4135 hs_ep->interval = 1 << (desc->bInterval - 1); in dwc2_hsotg_ep_enable()
4147 * a unique tx-fifo even if it is non-periodic. in dwc2_hsotg_ep_enable()
4149 if (dir_in && hsotg->dedicated_fifos) { in dwc2_hsotg_ep_enable()
4154 size = hs_ep->ep.maxpacket * hs_ep->mc; in dwc2_hsotg_ep_enable()
4156 if (hsotg->fifo_map & (1 << i)) in dwc2_hsotg_ep_enable()
4169 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4171 ret = -ENOMEM; in dwc2_hsotg_ep_enable()
4175 hsotg->fifo_map |= 1 << fifo_index; in dwc2_hsotg_ep_enable()
4177 hs_ep->fifo_index = fifo_index; in dwc2_hsotg_ep_enable()
4178 hs_ep->fifo_size = fifo_size; in dwc2_hsotg_ep_enable()
4182 if (index && !hs_ep->isochronous) in dwc2_hsotg_ep_enable()
4185 /* WA for Full speed ISOC IN in DDMA mode. in dwc2_hsotg_ep_enable()
4191 if (hsotg->gadget.speed == USB_SPEED_FULL && in dwc2_hsotg_ep_enable()
4192 hs_ep->isochronous && dir_in) { in dwc2_hsotg_ep_enable()
4206 dev_dbg(hsotg->dev, "%s: write DxEPCTL=0x%08x\n", in dwc2_hsotg_ep_enable()
4210 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x\n", in dwc2_hsotg_ep_enable()
4217 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_ep_enable()
4220 if (ret && using_desc_dma(hsotg) && hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4221 dmam_free_coherent(hsotg->dev, desc_num * in dwc2_hsotg_ep_enable()
4223 hs_ep->desc_list, hs_ep->desc_list_dma); in dwc2_hsotg_ep_enable()
4224 hs_ep->desc_list = NULL; in dwc2_hsotg_ep_enable()
4231 * dwc2_hsotg_ep_disable - disable given endpoint
4232 * @ep: The endpoint to disable.
4237 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_disable()
4238 int dir_in = hs_ep->dir_in; in dwc2_hsotg_ep_disable()
4239 int index = hs_ep->index; in dwc2_hsotg_ep_disable()
4243 dev_dbg(hsotg->dev, "%s(ep %p)\n", __func__, ep); in dwc2_hsotg_ep_disable()
4245 if (ep == &hsotg->eps_out[0]->ep) { in dwc2_hsotg_ep_disable()
4246 dev_err(hsotg->dev, "%s: called for ep0\n", __func__); in dwc2_hsotg_ep_disable()
4247 return -EINVAL; in dwc2_hsotg_ep_disable()
4250 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) { in dwc2_hsotg_ep_disable()
4251 dev_err(hsotg->dev, "%s: called in host mode?\n", __func__); in dwc2_hsotg_ep_disable()
4252 return -EINVAL; in dwc2_hsotg_ep_disable()
4266 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl); in dwc2_hsotg_ep_disable()
4269 /* disable endpoint interrupts */ in dwc2_hsotg_ep_disable()
4270 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 0); in dwc2_hsotg_ep_disable()
4273 kill_all_requests(hsotg, hs_ep, -ESHUTDOWN); in dwc2_hsotg_ep_disable()
4275 hsotg->fifo_map &= ~(1 << hs_ep->fifo_index); in dwc2_hsotg_ep_disable()
4276 hs_ep->fifo_index = 0; in dwc2_hsotg_ep_disable()
4277 hs_ep->fifo_size = 0; in dwc2_hsotg_ep_disable()
4285 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_disable_lock()
4289 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_ep_disable_lock()
4291 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_ep_disable_lock()
4296 * on_list - check request is on the given endpoint
4304 list_for_each_entry_safe(req, treq, &ep->queue, queue) { in on_list()
4313 * dwc2_hsotg_ep_dequeue - dequeue given endpoint
4321 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_dequeue()
4324 dev_dbg(hs->dev, "ep_dequeue(%p,%p)\n", ep, req); in dwc2_hsotg_ep_dequeue()
4326 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4329 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4330 return -EINVAL; in dwc2_hsotg_ep_dequeue()
4334 if (req == &hs_ep->req->req) in dwc2_hsotg_ep_dequeue()
4337 dwc2_hsotg_complete_request(hs, hs_ep, hs_req, -ECONNRESET); in dwc2_hsotg_ep_dequeue()
4338 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4344 * dwc2_gadget_ep_set_wedge - set wedge on a given endpoint
4351 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_gadget_ep_set_wedge()
4356 spin_lock_irqsave(&hs->lock, flags); in dwc2_gadget_ep_set_wedge()
4357 hs_ep->wedged = 1; in dwc2_gadget_ep_set_wedge()
4359 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_gadget_ep_set_wedge()
4365 * dwc2_hsotg_ep_sethalt - set halt on a given endpoint
4368 * @now: If true, stall the endpoint now. Otherwise return -EAGAIN if
4377 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_sethalt()
4378 int index = hs_ep->index; in dwc2_hsotg_ep_sethalt()
4383 dev_info(hs->dev, "%s(ep %p %s, %d)\n", __func__, ep, ep->name, value); in dwc2_hsotg_ep_sethalt()
4389 dev_warn(hs->dev, in dwc2_hsotg_ep_sethalt()
4394 if (hs_ep->isochronous) { in dwc2_hsotg_ep_sethalt()
4395 dev_err(hs->dev, "%s is Isochronous Endpoint\n", ep->name); in dwc2_hsotg_ep_sethalt()
4396 return -EINVAL; in dwc2_hsotg_ep_sethalt()
4399 if (!now && value && !list_empty(&hs_ep->queue)) { in dwc2_hsotg_ep_sethalt()
4400 dev_dbg(hs->dev, "%s request is pending, cannot halt\n", in dwc2_hsotg_ep_sethalt()
4401 ep->name); in dwc2_hsotg_ep_sethalt()
4402 return -EAGAIN; in dwc2_hsotg_ep_sethalt()
4405 if (hs_ep->dir_in) { in dwc2_hsotg_ep_sethalt()
4415 hs_ep->wedged = 0; in dwc2_hsotg_ep_sethalt()
4435 hs_ep->wedged = 0; in dwc2_hsotg_ep_sethalt()
4444 hs_ep->halted = value; in dwc2_hsotg_ep_sethalt()
4449 * dwc2_hsotg_ep_sethalt_lock - set halt on a given endpoint with lock held
4456 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_sethalt_lock()
4460 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_sethalt_lock()
4462 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_sethalt_lock()
4469 .disable = dwc2_hsotg_ep_disable_lock,
4480 * dwc2_hsotg_init - initialize the usb core
4502 dev_dbg(hsotg->dev, "GRXFSIZ=0x%08x, GNPTXFSIZ=0x%08x\n", in dwc2_hsotg_init()
4513 * dwc2_hsotg_udc_start - prepare the udc for work
4529 return -ENODEV; in dwc2_hsotg_udc_start()
4533 dev_err(hsotg->dev, "%s: no driver\n", __func__); in dwc2_hsotg_udc_start()
4534 return -EINVAL; in dwc2_hsotg_udc_start()
4537 if (driver->max_speed < USB_SPEED_FULL) in dwc2_hsotg_udc_start()
4538 dev_err(hsotg->dev, "%s: bad speed\n", __func__); in dwc2_hsotg_udc_start()
4540 if (!driver->setup) { in dwc2_hsotg_udc_start()
4541 dev_err(hsotg->dev, "%s: missing entry points\n", __func__); in dwc2_hsotg_udc_start()
4542 return -EINVAL; in dwc2_hsotg_udc_start()
4545 WARN_ON(hsotg->driver); in dwc2_hsotg_udc_start()
4547 hsotg->driver = driver; in dwc2_hsotg_udc_start()
4548 hsotg->gadget.dev.of_node = hsotg->dev->of_node; in dwc2_hsotg_udc_start()
4549 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_udc_start()
4551 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) { in dwc2_hsotg_udc_start()
4557 if (!IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_udc_start()
4558 otg_set_peripheral(hsotg->uphy->otg, &hsotg->gadget); in dwc2_hsotg_udc_start()
4560 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_udc_start()
4566 hsotg->enabled = 0; in dwc2_hsotg_udc_start()
4567 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_udc_start()
4569 gadget->sg_supported = using_desc_dma(hsotg); in dwc2_hsotg_udc_start()
4570 dev_info(hsotg->dev, "bound driver %s\n", driver->driver.name); in dwc2_hsotg_udc_start()
4575 hsotg->driver = NULL; in dwc2_hsotg_udc_start()
4580 * dwc2_hsotg_udc_stop - stop the udc
4592 return -ENODEV; in dwc2_hsotg_udc_stop()
4595 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_udc_stop()
4596 if (hsotg->eps_in[ep]) in dwc2_hsotg_udc_stop()
4597 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_udc_stop()
4598 if (hsotg->eps_out[ep]) in dwc2_hsotg_udc_stop()
4599 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_udc_stop()
4602 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_udc_stop()
4604 hsotg->driver = NULL; in dwc2_hsotg_udc_stop()
4605 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_udc_stop()
4606 hsotg->enabled = 0; in dwc2_hsotg_udc_stop()
4608 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_udc_stop()
4610 if (!IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_udc_stop()
4611 otg_set_peripheral(hsotg->uphy->otg, NULL); in dwc2_hsotg_udc_stop()
4613 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) in dwc2_hsotg_udc_stop()
4620 * dwc2_hsotg_gadget_getframe - read the frame number
4631 * dwc2_hsotg_set_selfpowered - set if device is self/bus powered
4633 * @is_selfpowered: Whether the device is self-powered
4643 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_set_selfpowered()
4644 gadget->is_selfpowered = !!is_selfpowered; in dwc2_hsotg_set_selfpowered()
4645 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_set_selfpowered()
4651 * dwc2_hsotg_pullup - connect/disconnect the USB PHY
4662 dev_dbg(hsotg->dev, "%s: is_on: %d op_state: %d\n", __func__, is_on, in dwc2_hsotg_pullup()
4663 hsotg->op_state); in dwc2_hsotg_pullup()
4665 /* Don't modify pullup state while in host mode */ in dwc2_hsotg_pullup()
4666 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) { in dwc2_hsotg_pullup()
4667 hsotg->enabled = is_on; in dwc2_hsotg_pullup()
4671 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_pullup()
4673 hsotg->enabled = 1; in dwc2_hsotg_pullup()
4675 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_pullup()
4681 hsotg->enabled = 0; in dwc2_hsotg_pullup()
4684 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_pullup()
4685 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_pullup()
4695 dev_dbg(hsotg->dev, "%s: is_active: %d\n", __func__, is_active); in dwc2_hsotg_vbus_session()
4696 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_vbus_session()
4700 * that state before being initialized / de-initialized in dwc2_hsotg_vbus_session()
4702 if (hsotg->lx_state == DWC2_L2 && hsotg->in_ppd) in dwc2_hsotg_vbus_session()
4710 hsotg->op_state = OTG_STATE_B_PERIPHERAL; in dwc2_hsotg_vbus_session()
4713 if (hsotg->enabled) { in dwc2_hsotg_vbus_session()
4714 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_vbus_session()
4723 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_vbus_session()
4728 * dwc2_hsotg_vbus_draw - report bMaxPower field
4738 if (IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_vbus_draw()
4739 return -ENOTSUPP; in dwc2_hsotg_vbus_draw()
4740 return usb_phy_set_power(hsotg->uphy, mA); in dwc2_hsotg_vbus_draw()
4748 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_gadget_set_speed()
4751 hsotg->params.speed = DWC2_SPEED_PARAM_HIGH; in dwc2_gadget_set_speed()
4754 hsotg->params.speed = DWC2_SPEED_PARAM_FULL; in dwc2_gadget_set_speed()
4757 hsotg->params.speed = DWC2_SPEED_PARAM_LOW; in dwc2_gadget_set_speed()
4760 dev_err(hsotg->dev, "invalid speed (%d)\n", speed); in dwc2_gadget_set_speed()
4762 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_gadget_set_speed()
4777 * dwc2_hsotg_initep - initialise a single endpoint
4801 hs_ep->dir_in = dir_in; in dwc2_hsotg_initep()
4802 hs_ep->index = epnum; in dwc2_hsotg_initep()
4804 snprintf(hs_ep->name, sizeof(hs_ep->name), "ep%d%s", epnum, dir); in dwc2_hsotg_initep()
4806 INIT_LIST_HEAD(&hs_ep->queue); in dwc2_hsotg_initep()
4807 INIT_LIST_HEAD(&hs_ep->ep.ep_list); in dwc2_hsotg_initep()
4811 list_add_tail(&hs_ep->ep.ep_list, &hsotg->gadget.ep_list); in dwc2_hsotg_initep()
4813 hs_ep->parent = hsotg; in dwc2_hsotg_initep()
4814 hs_ep->ep.name = hs_ep->name; in dwc2_hsotg_initep()
4816 if (hsotg->params.speed == DWC2_SPEED_PARAM_LOW) in dwc2_hsotg_initep()
4817 usb_ep_set_maxpacket_limit(&hs_ep->ep, 8); in dwc2_hsotg_initep()
4819 usb_ep_set_maxpacket_limit(&hs_ep->ep, in dwc2_hsotg_initep()
4821 hs_ep->ep.ops = &dwc2_hsotg_ep_ops; in dwc2_hsotg_initep()
4824 hs_ep->ep.caps.type_control = true; in dwc2_hsotg_initep()
4826 if (hsotg->params.speed != DWC2_SPEED_PARAM_LOW) { in dwc2_hsotg_initep()
4827 hs_ep->ep.caps.type_iso = true; in dwc2_hsotg_initep()
4828 hs_ep->ep.caps.type_bulk = true; in dwc2_hsotg_initep()
4830 hs_ep->ep.caps.type_int = true; in dwc2_hsotg_initep()
4834 hs_ep->ep.caps.dir_in = true; in dwc2_hsotg_initep()
4836 hs_ep->ep.caps.dir_out = true; in dwc2_hsotg_initep()
4839 * if we're using dma, we need to set the next-endpoint pointer in dwc2_hsotg_initep()
4854 * dwc2_hsotg_hw_cfg - read HW configuration registers
4867 hsotg->num_of_eps = hsotg->hw_params.num_dev_ep; in dwc2_hsotg_hw_cfg()
4870 hsotg->num_of_eps++; in dwc2_hsotg_hw_cfg()
4872 hsotg->eps_in[0] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4875 if (!hsotg->eps_in[0]) in dwc2_hsotg_hw_cfg()
4876 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4878 hsotg->eps_out[0] = hsotg->eps_in[0]; in dwc2_hsotg_hw_cfg()
4880 cfg = hsotg->hw_params.dev_ep_dirs; in dwc2_hsotg_hw_cfg()
4881 for (i = 1, cfg >>= 2; i < hsotg->num_of_eps; i++, cfg >>= 2) { in dwc2_hsotg_hw_cfg()
4885 hsotg->eps_in[i] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4887 if (!hsotg->eps_in[i]) in dwc2_hsotg_hw_cfg()
4888 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4892 hsotg->eps_out[i] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4894 if (!hsotg->eps_out[i]) in dwc2_hsotg_hw_cfg()
4895 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4899 hsotg->fifo_mem = hsotg->hw_params.total_fifo_size; in dwc2_hsotg_hw_cfg()
4900 hsotg->dedicated_fifos = hsotg->hw_params.en_multiple_tx_fifo; in dwc2_hsotg_hw_cfg()
4902 dev_info(hsotg->dev, "EPs: %d, %s fifos, %d entries in SPRAM\n", in dwc2_hsotg_hw_cfg()
4903 hsotg->num_of_eps, in dwc2_hsotg_hw_cfg()
4904 hsotg->dedicated_fifos ? "dedicated" : "shared", in dwc2_hsotg_hw_cfg()
4905 hsotg->fifo_mem); in dwc2_hsotg_hw_cfg()
4910 * dwc2_hsotg_dump - dump state of the udc
4917 struct device *dev = hsotg->dev; in dwc2_hsotg_dump()
4933 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_dump()
4940 for (idx = 0; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_dump()
4942 "ep%d-in: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", idx, in dwc2_hsotg_dump()
4949 "ep%d-out: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", in dwc2_hsotg_dump()
4961 * dwc2_gadget_init - init function for gadget
4967 struct device *dev = hsotg->dev; in dwc2_gadget_init()
4973 hsotg->params.g_np_tx_fifo_size); in dwc2_gadget_init()
4974 dev_dbg(dev, "RXFIFO size: %d\n", hsotg->params.g_rx_fifo_size); in dwc2_gadget_init()
4976 switch (hsotg->params.speed) { in dwc2_gadget_init()
4978 hsotg->gadget.max_speed = USB_SPEED_LOW; in dwc2_gadget_init()
4981 hsotg->gadget.max_speed = USB_SPEED_FULL; in dwc2_gadget_init()
4984 hsotg->gadget.max_speed = USB_SPEED_HIGH; in dwc2_gadget_init()
4988 hsotg->gadget.ops = &dwc2_hsotg_gadget_ops; in dwc2_gadget_init()
4989 hsotg->gadget.name = dev_name(dev); in dwc2_gadget_init()
4990 hsotg->gadget.otg_caps = &hsotg->params.otg_caps; in dwc2_gadget_init()
4991 hsotg->remote_wakeup_allowed = 0; in dwc2_gadget_init()
4993 if (hsotg->params.lpm) in dwc2_gadget_init()
4994 hsotg->gadget.lpm_capable = true; in dwc2_gadget_init()
4996 if (hsotg->dr_mode == USB_DR_MODE_OTG) in dwc2_gadget_init()
4997 hsotg->gadget.is_otg = 1; in dwc2_gadget_init()
4998 else if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) in dwc2_gadget_init()
4999 hsotg->op_state = OTG_STATE_B_PERIPHERAL; in dwc2_gadget_init()
5003 dev_err(hsotg->dev, "Hardware configuration failed: %d\n", ret); in dwc2_gadget_init()
5007 hsotg->ctrl_buff = devm_kzalloc(hsotg->dev, in dwc2_gadget_init()
5009 if (!hsotg->ctrl_buff) in dwc2_gadget_init()
5010 return -ENOMEM; in dwc2_gadget_init()
5012 hsotg->ep0_buff = devm_kzalloc(hsotg->dev, in dwc2_gadget_init()
5014 if (!hsotg->ep0_buff) in dwc2_gadget_init()
5015 return -ENOMEM; in dwc2_gadget_init()
5023 ret = devm_request_irq(hsotg->dev, hsotg->irq, dwc2_hsotg_irq, in dwc2_gadget_init()
5024 IRQF_SHARED, dev_name(hsotg->dev), hsotg); in dwc2_gadget_init()
5030 /* hsotg->num_of_eps holds number of EPs other than ep0 */ in dwc2_gadget_init()
5032 if (hsotg->num_of_eps == 0) { in dwc2_gadget_init()
5034 return -EINVAL; in dwc2_gadget_init()
5039 INIT_LIST_HEAD(&hsotg->gadget.ep_list); in dwc2_gadget_init()
5040 hsotg->gadget.ep0 = &hsotg->eps_out[0]->ep; in dwc2_gadget_init()
5044 hsotg->ctrl_req = dwc2_hsotg_ep_alloc_request(&hsotg->eps_out[0]->ep, in dwc2_gadget_init()
5046 if (!hsotg->ctrl_req) { in dwc2_gadget_init()
5048 return -ENOMEM; in dwc2_gadget_init()
5052 for (epnum = 0; epnum < hsotg->num_of_eps; epnum++) { in dwc2_gadget_init()
5053 if (hsotg->eps_in[epnum]) in dwc2_gadget_init()
5054 dwc2_hsotg_initep(hsotg, hsotg->eps_in[epnum], in dwc2_gadget_init()
5056 if (hsotg->eps_out[epnum]) in dwc2_gadget_init()
5057 dwc2_hsotg_initep(hsotg, hsotg->eps_out[epnum], in dwc2_gadget_init()
5067 * dwc2_hsotg_remove - remove function for hsotg driver
5073 usb_del_gadget_udc(&hsotg->gadget); in dwc2_hsotg_remove()
5074 dwc2_hsotg_ep_free_request(&hsotg->eps_out[0]->ep, hsotg->ctrl_req); in dwc2_hsotg_remove()
5083 if (hsotg->lx_state != DWC2_L0) in dwc2_hsotg_suspend()
5086 if (hsotg->driver) { in dwc2_hsotg_suspend()
5089 dev_info(hsotg->dev, "suspending usb gadget %s\n", in dwc2_hsotg_suspend()
5090 hsotg->driver->driver.name); in dwc2_hsotg_suspend()
5092 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_suspend()
5093 if (hsotg->enabled) in dwc2_hsotg_suspend()
5096 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_suspend()
5097 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_suspend()
5099 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_suspend()
5100 if (hsotg->eps_in[ep]) in dwc2_hsotg_suspend()
5101 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_suspend()
5102 if (hsotg->eps_out[ep]) in dwc2_hsotg_suspend()
5103 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_suspend()
5114 if (hsotg->lx_state == DWC2_L2) in dwc2_hsotg_resume()
5117 if (hsotg->driver) { in dwc2_hsotg_resume()
5118 dev_info(hsotg->dev, "resuming usb gadget %s\n", in dwc2_hsotg_resume()
5119 hsotg->driver->driver.name); in dwc2_hsotg_resume()
5121 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_resume()
5123 if (hsotg->enabled) { in dwc2_hsotg_resume()
5124 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_resume()
5128 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_resume()
5135 * dwc2_backup_device_registers() - Backup controller device registers.
5146 dev_dbg(hsotg->dev, "%s\n", __func__); in dwc2_backup_device_registers()
5149 dr = &hsotg->dr_backup; in dwc2_backup_device_registers()
5151 dr->dcfg = dwc2_readl(hsotg, DCFG); in dwc2_backup_device_registers()
5152 dr->dctl = dwc2_readl(hsotg, DCTL); in dwc2_backup_device_registers()
5153 dr->daintmsk = dwc2_readl(hsotg, DAINTMSK); in dwc2_backup_device_registers()
5154 dr->diepmsk = dwc2_readl(hsotg, DIEPMSK); in dwc2_backup_device_registers()
5155 dr->doepmsk = dwc2_readl(hsotg, DOEPMSK); in dwc2_backup_device_registers()
5157 for (i = 0; i < hsotg->num_of_eps; i++) { in dwc2_backup_device_registers()
5159 dr->diepctl[i] = dwc2_readl(hsotg, DIEPCTL(i)); in dwc2_backup_device_registers()
5162 if (dr->diepctl[i] & DXEPCTL_DPID) in dwc2_backup_device_registers()
5163 dr->diepctl[i] |= DXEPCTL_SETD1PID; in dwc2_backup_device_registers()
5165 dr->diepctl[i] |= DXEPCTL_SETD0PID; in dwc2_backup_device_registers()
5167 dr->dieptsiz[i] = dwc2_readl(hsotg, DIEPTSIZ(i)); in dwc2_backup_device_registers()
5168 dr->diepdma[i] = dwc2_readl(hsotg, DIEPDMA(i)); in dwc2_backup_device_registers()
5171 dr->doepctl[i] = dwc2_readl(hsotg, DOEPCTL(i)); in dwc2_backup_device_registers()
5174 if (dr->doepctl[i] & DXEPCTL_DPID) in dwc2_backup_device_registers()
5175 dr->doepctl[i] |= DXEPCTL_SETD1PID; in dwc2_backup_device_registers()
5177 dr->doepctl[i] |= DXEPCTL_SETD0PID; in dwc2_backup_device_registers()
5179 dr->doeptsiz[i] = dwc2_readl(hsotg, DOEPTSIZ(i)); in dwc2_backup_device_registers()
5180 dr->doepdma[i] = dwc2_readl(hsotg, DOEPDMA(i)); in dwc2_backup_device_registers()
5181 dr->dtxfsiz[i] = dwc2_readl(hsotg, DPTXFSIZN(i)); in dwc2_backup_device_registers()
5183 dr->valid = true; in dwc2_backup_device_registers()
5188 * dwc2_restore_device_registers() - Restore controller device registers.
5202 dev_dbg(hsotg->dev, "%s\n", __func__); in dwc2_restore_device_registers()
5205 dr = &hsotg->dr_backup; in dwc2_restore_device_registers()
5206 if (!dr->valid) { in dwc2_restore_device_registers()
5207 dev_err(hsotg->dev, "%s: no device registers to restore\n", in dwc2_restore_device_registers()
5209 return -EINVAL; in dwc2_restore_device_registers()
5211 dr->valid = false; in dwc2_restore_device_registers()
5214 dwc2_writel(hsotg, dr->dctl, DCTL); in dwc2_restore_device_registers()
5216 dwc2_writel(hsotg, dr->daintmsk, DAINTMSK); in dwc2_restore_device_registers()
5217 dwc2_writel(hsotg, dr->diepmsk, DIEPMSK); in dwc2_restore_device_registers()
5218 dwc2_writel(hsotg, dr->doepmsk, DOEPMSK); in dwc2_restore_device_registers()
5220 for (i = 0; i < hsotg->num_of_eps; i++) { in dwc2_restore_device_registers()
5222 dwc2_writel(hsotg, dr->dieptsiz[i], DIEPTSIZ(i)); in dwc2_restore_device_registers()
5223 dwc2_writel(hsotg, dr->diepdma[i], DIEPDMA(i)); in dwc2_restore_device_registers()
5224 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i)); in dwc2_restore_device_registers()
5225 /** WA for enabled EPx's IN in DDMA mode. On entering to in dwc2_restore_device_registers()
5226 * hibernation wrong value read and saved from DIEPDMAx, in dwc2_restore_device_registers()
5227 * as result BNA interrupt asserted on hibernation exit in dwc2_restore_device_registers()
5231 (dr->diepctl[i] & DXEPCTL_EPENA)) in dwc2_restore_device_registers()
5232 dr->diepdma[i] = hsotg->eps_in[i]->desc_list_dma; in dwc2_restore_device_registers()
5233 dwc2_writel(hsotg, dr->dtxfsiz[i], DPTXFSIZN(i)); in dwc2_restore_device_registers()
5234 dwc2_writel(hsotg, dr->diepctl[i], DIEPCTL(i)); in dwc2_restore_device_registers()
5236 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i)); in dwc2_restore_device_registers()
5237 /* WA for enabled EPx's OUT in DDMA mode. On entering to in dwc2_restore_device_registers()
5238 * hibernation wrong value read and saved from DOEPDMAx, in dwc2_restore_device_registers()
5239 * as result BNA interrupt asserted on hibernation exit in dwc2_restore_device_registers()
5243 (dr->doepctl[i] & DXEPCTL_EPENA)) in dwc2_restore_device_registers()
5244 dr->doepdma[i] = hsotg->eps_out[i]->desc_list_dma; in dwc2_restore_device_registers()
5245 dwc2_writel(hsotg, dr->doepdma[i], DOEPDMA(i)); in dwc2_restore_device_registers()
5246 dwc2_writel(hsotg, dr->doepctl[i], DOEPCTL(i)); in dwc2_restore_device_registers()
5253 * dwc2_gadget_init_lpm - Configure the core to support LPM in device mode
5262 if (!hsotg->params.lpm) in dwc2_gadget_init_lpm()
5266 val |= hsotg->params.hird_threshold_en ? GLPMCFG_HIRD_THRES_EN : 0; in dwc2_gadget_init_lpm()
5267 val |= hsotg->params.lpm_clock_gating ? GLPMCFG_ENBLSLPM : 0; in dwc2_gadget_init_lpm()
5268 val |= hsotg->params.hird_threshold << GLPMCFG_HIRD_THRES_SHIFT; in dwc2_gadget_init_lpm()
5269 val |= hsotg->params.besl ? GLPMCFG_ENBESL : 0; in dwc2_gadget_init_lpm()
5273 dev_dbg(hsotg->dev, "GLPMCFG=0x%08x\n", dwc2_readl(hsotg, GLPMCFG)); in dwc2_gadget_init_lpm()
5276 if (hsotg->params.service_interval) in dwc2_gadget_init_lpm()
5281 * dwc2_gadget_program_ref_clk - Program GREFCLK register in device mode
5291 val |= hsotg->params.ref_clk_per << GREFCLK_REFCLKPER_SHIFT; in dwc2_gadget_program_ref_clk()
5292 val |= hsotg->params.sof_cnt_wkup_alert << in dwc2_gadget_program_ref_clk()
5296 dev_dbg(hsotg->dev, "GREFCLK=0x%08x\n", dwc2_readl(hsotg, GREFCLK)); in dwc2_gadget_program_ref_clk()
5300 * dwc2_gadget_enter_hibernation() - Put controller in Hibernation.
5304 * Return non-zero if failed to enter to hibernation.
5312 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_hibernation()
5313 dev_dbg(hsotg->dev, "Start of hibernation completed\n"); in dwc2_gadget_enter_hibernation()
5316 dev_err(hsotg->dev, "%s: failed to backup global registers\n", in dwc2_gadget_enter_hibernation()
5322 dev_err(hsotg->dev, "%s: failed to backup device registers\n", in dwc2_gadget_enter_hibernation()
5332 /* Set flag to indicate that we are in hibernation */ in dwc2_gadget_enter_hibernation()
5333 hsotg->hibernated = 1; in dwc2_gadget_enter_hibernation()
5341 /* Unmask device mode interrupts in GPWRDN */ in dwc2_gadget_enter_hibernation()
5362 hsotg->gr_backup.gpwrdn = dwc2_readl(hsotg, GPWRDN); in dwc2_gadget_enter_hibernation()
5363 dev_dbg(hsotg->dev, "Hibernation completed\n"); in dwc2_gadget_enter_hibernation()
5370 * This function is for exiting from Device mode hibernation by host initiated
5371 * resume/reset and device initiated remote-wakeup.
5377 * Return non-zero if failed to exit from hibernation.
5389 gr = &hsotg->gr_backup; in dwc2_gadget_exit_hibernation()
5390 dr = &hsotg->dr_backup; in dwc2_gadget_exit_hibernation()
5392 if (!hsotg->hibernated) { in dwc2_gadget_exit_hibernation()
5393 dev_dbg(hsotg->dev, "Already exited from Hibernation\n"); in dwc2_gadget_exit_hibernation()
5396 dev_dbg(hsotg->dev, in dwc2_gadget_exit_hibernation()
5407 /* De-assert Restore */ in dwc2_gadget_exit_hibernation()
5420 dwc2_writel(hsotg, gr->gusbcfg, GUSBCFG); in dwc2_gadget_exit_hibernation()
5421 dwc2_writel(hsotg, dr->dcfg, DCFG); in dwc2_gadget_exit_hibernation()
5422 dwc2_writel(hsotg, dr->dctl, DCTL); in dwc2_gadget_exit_hibernation()
5428 /* De-assert Wakeup Logic */ in dwc2_gadget_exit_hibernation()
5436 dwc2_writel(hsotg, dr->dctl | DCTL_RMTWKUPSIG, DCTL); in dwc2_gadget_exit_hibernation()
5452 dev_err(hsotg->dev, "%s: failed to restore registers\n", in dwc2_gadget_exit_hibernation()
5460 dev_err(hsotg->dev, "%s: failed to restore device registers\n", in dwc2_gadget_exit_hibernation()
5472 hsotg->hibernated = 0; in dwc2_gadget_exit_hibernation()
5473 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_hibernation()
5474 dev_dbg(hsotg->dev, "Hibernation recovery completes here\n"); in dwc2_gadget_exit_hibernation()
5480 * dwc2_gadget_enter_partial_power_down() - Put controller in partial
5485 * Return: non-zero if failed to enter device partial power down.
5487 * This function is for entering device mode partial power down.
5494 dev_dbg(hsotg->dev, "Entering device partial power down started.\n"); in dwc2_gadget_enter_partial_power_down()
5499 dev_err(hsotg->dev, "%s: failed to backup global registers\n", in dwc2_gadget_enter_partial_power_down()
5506 dev_err(hsotg->dev, "%s: failed to backup device registers\n", in dwc2_gadget_enter_partial_power_down()
5532 hsotg->in_ppd = 1; in dwc2_gadget_enter_partial_power_down()
5533 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_partial_power_down()
5535 dev_dbg(hsotg->dev, "Entering device partial power down completed.\n"); in dwc2_gadget_enter_partial_power_down()
5541 * dwc2_gadget_exit_partial_power_down() - Exit controller from device partial
5547 * Return: non-zero if failed to exit device partial power down.
5549 * This function is for exiting from device mode partial power down.
5559 dr = &hsotg->dr_backup; in dwc2_gadget_exit_partial_power_down()
5561 dev_dbg(hsotg->dev, "Exiting device partial Power Down started.\n"); in dwc2_gadget_exit_partial_power_down()
5579 dev_err(hsotg->dev, "%s: failed to restore registers\n", in dwc2_gadget_exit_partial_power_down()
5584 dwc2_writel(hsotg, dr->dcfg, DCFG); in dwc2_gadget_exit_partial_power_down()
5588 dev_err(hsotg->dev, "%s: failed to restore device registers\n", in dwc2_gadget_exit_partial_power_down()
5594 /* Set the Power-On Programming done bit */ in dwc2_gadget_exit_partial_power_down()
5600 hsotg->in_ppd = 0; in dwc2_gadget_exit_partial_power_down()
5601 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_partial_power_down()
5603 dev_dbg(hsotg->dev, "Exiting device partial Power Down completed.\n"); in dwc2_gadget_exit_partial_power_down()
5608 * dwc2_gadget_enter_clock_gating() - Put controller in clock gating.
5612 * Return: non-zero if failed to enter device partial power down.
5614 * This function is for entering device mode clock gating.
5620 dev_dbg(hsotg->dev, "Entering device clock gating.\n"); in dwc2_gadget_enter_clock_gating()
5634 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_clock_gating()
5635 hsotg->bus_suspended = true; in dwc2_gadget_enter_clock_gating()
5639 * dwc2_gadget_exit_clock_gating() - Exit controller from device clock gating.
5644 * This function is for exiting from device mode clock gating.
5651 dev_dbg(hsotg->dev, "Exiting device clock gating.\n"); in dwc2_gadget_exit_clock_gating()
5674 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_clock_gating()
5675 hsotg->bus_suspended = false; in dwc2_gadget_exit_clock_gating()