Lines Matching +full:hs +full:- +full:usb +full:- +full:if
1 // SPDX-License-Identifier: GPL-2.0
11 * S3C USB2.0 High-speed / OtG driver
19 #include <linux/dma-mapping.h>
27 #include <linux/usb/ch9.h>
28 #include <linux/usb/gadget.h>
29 #include <linux/usb/phy.h>
30 #include <linux/usb/composite.h>
65 if (dir_in) in index_to_ep()
66 return hsotg->eps_in[ep_index]; in index_to_ep()
68 return hsotg->eps_out[ep_index]; in index_to_ep()
75 * using_dma - return the DMA status of the driver.
78 * Return true if we're using DMA.
95 return hsotg->params.g_dma; in using_dma()
99 * using_desc_dma - return the descriptor DMA status of the driver.
102 * Return true if we're using descriptor DMA.
106 return hsotg->params.g_dma_desc; in using_desc_dma()
110 * dwc2_gadget_incr_frame_num - Increments the targeted frame number.
113 * This function will also check if the frame number overruns DSTS_SOFFN_LIMIT.
114 * If an overrun occurs it will wrap the value and set the frame_overrun flag.
118 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_incr_frame_num()
121 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_incr_frame_num()
124 hs_ep->target_frame += hs_ep->interval; in dwc2_gadget_incr_frame_num()
125 if (hs_ep->target_frame > limit) { in dwc2_gadget_incr_frame_num()
126 hs_ep->frame_overrun = true; in dwc2_gadget_incr_frame_num()
127 hs_ep->target_frame &= limit; in dwc2_gadget_incr_frame_num()
129 hs_ep->frame_overrun = false; in dwc2_gadget_incr_frame_num()
134 * dwc2_gadget_dec_frame_num_by_one - Decrements the targeted frame number
145 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_dec_frame_num_by_one()
148 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_dec_frame_num_by_one()
151 if (hs_ep->target_frame) in dwc2_gadget_dec_frame_num_by_one()
152 hs_ep->target_frame -= 1; in dwc2_gadget_dec_frame_num_by_one()
154 hs_ep->target_frame = limit; in dwc2_gadget_dec_frame_num_by_one()
158 * dwc2_hsotg_en_gsint - enable one or more of the general interrupt
169 if (new_gsintmsk != gsintmsk) { in dwc2_hsotg_en_gsint()
170 dev_dbg(hsotg->dev, "gsintmsk now 0x%08x\n", new_gsintmsk); in dwc2_hsotg_en_gsint()
176 * dwc2_hsotg_disable_gsint - disable one or more of the general interrupt
187 if (new_gsintmsk != gsintmsk) in dwc2_hsotg_disable_gsint()
192 * dwc2_hsotg_ctrl_epint - enable/disable an endpoint irq
195 * @dir_in: True if direction is in.
209 if (!dir_in) in dwc2_hsotg_ctrl_epint()
214 if (en) in dwc2_hsotg_ctrl_epint()
223 * dwc2_hsotg_tx_fifo_count - return count of TX FIFOs in device mode
229 if (hsotg->hw_params.en_multiple_tx_fifo) in dwc2_hsotg_tx_fifo_count()
231 return hsotg->hw_params.num_dev_in_eps; in dwc2_hsotg_tx_fifo_count()
234 return hsotg->hw_params.num_dev_perio_in_ep; in dwc2_hsotg_tx_fifo_count()
238 * dwc2_hsotg_tx_fifo_total_depth - return total FIFO depth available for
249 np_tx_fifo_size = min_t(u32, hsotg->hw_params.dev_nperio_tx_fifo_size, in dwc2_hsotg_tx_fifo_total_depth()
250 hsotg->params.g_np_tx_fifo_size); in dwc2_hsotg_tx_fifo_total_depth()
253 tx_addr_max = hsotg->hw_params.total_fifo_size; in dwc2_hsotg_tx_fifo_total_depth()
255 addr = hsotg->params.g_rx_fifo_size + np_tx_fifo_size; in dwc2_hsotg_tx_fifo_total_depth()
256 if (tx_addr_max <= addr) in dwc2_hsotg_tx_fifo_total_depth()
259 return tx_addr_max - addr; in dwc2_hsotg_tx_fifo_total_depth()
263 * dwc2_gadget_wkup_alert_handler - Handler for WKUP_ALERT interrupt
277 if (gintsts2 & GINTSTS2_WKUP_ALERT_INT) { in dwc2_gadget_wkup_alert_handler()
278 dev_dbg(hsotg->dev, "%s: Wkup_Alert_Int\n", __func__); in dwc2_gadget_wkup_alert_handler()
285 * dwc2_hsotg_tx_fifo_average_depth - returns average depth of device mode
299 if (!tx_fifo_count) in dwc2_hsotg_tx_fifo_average_depth()
306 * dwc2_hsotg_init_fifo - initialise non-periodic FIFOs
316 u32 *txfsz = hsotg->params.g_tx_fifo_size; in dwc2_hsotg_init_fifo()
318 /* Reset fifo map if not correctly cleared during previous session */ in dwc2_hsotg_init_fifo()
319 WARN_ON(hsotg->fifo_map); in dwc2_hsotg_init_fifo()
320 hsotg->fifo_map = 0; in dwc2_hsotg_init_fifo()
323 dwc2_writel(hsotg, hsotg->params.g_rx_fifo_size, GRXFSIZ); in dwc2_hsotg_init_fifo()
324 dwc2_writel(hsotg, (hsotg->params.g_rx_fifo_size << in dwc2_hsotg_init_fifo()
326 (hsotg->params.g_np_tx_fifo_size << FIFOSIZE_DEPTH_SHIFT), in dwc2_hsotg_init_fifo()
332 * that if the settings have been changed, then they are set to in dwc2_hsotg_init_fifo()
337 addr = hsotg->params.g_rx_fifo_size + hsotg->params.g_np_tx_fifo_size; in dwc2_hsotg_init_fifo()
345 if (!txfsz[ep]) in dwc2_hsotg_init_fifo()
349 WARN_ONCE(addr + txfsz[ep] > hsotg->fifo_mem, in dwc2_hsotg_init_fifo()
357 dwc2_writel(hsotg, hsotg->hw_params.total_fifo_size | in dwc2_hsotg_init_fifo()
373 if ((val & (GRSTCTL_TXFFLSH | GRSTCTL_RXFFLSH)) == 0) in dwc2_hsotg_init_fifo()
376 if (--timeout == 0) { in dwc2_hsotg_init_fifo()
377 dev_err(hsotg->dev, in dwc2_hsotg_init_fifo()
386 dev_dbg(hsotg->dev, "FIFOs reset, timeout at %d\n", timeout); in dwc2_hsotg_init_fifo()
390 * dwc2_hsotg_ep_alloc_request - allocate USB rerequest structure
391 * @ep: USB endpoint to allocate request for.
394 * Allocate a new USB request structure appropriate for the specified endpoint
402 if (!req) in dwc2_hsotg_ep_alloc_request()
405 INIT_LIST_HEAD(&req->queue); in dwc2_hsotg_ep_alloc_request()
407 return &req->req; in dwc2_hsotg_ep_alloc_request()
411 * is_ep_periodic - return true if the endpoint is in periodic mode.
414 * Returns true if the endpoint is in periodic mode, meaning it is being
419 return hs_ep->periodic; in is_ep_periodic()
423 * dwc2_hsotg_unmap_dma - unmap the DMA memory being used for the request
435 struct usb_request *req = &hs_req->req; in dwc2_hsotg_unmap_dma()
437 usb_gadget_unmap_request(&hsotg->gadget, req, hs_ep->map_dir); in dwc2_hsotg_unmap_dma()
441 * dwc2_gadget_alloc_ctrl_desc_chains - allocate DMA descriptor chains
450 hsotg->setup_desc[0] = in dwc2_gadget_alloc_ctrl_desc_chains()
451 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
453 &hsotg->setup_desc_dma[0], in dwc2_gadget_alloc_ctrl_desc_chains()
455 if (!hsotg->setup_desc[0]) in dwc2_gadget_alloc_ctrl_desc_chains()
458 hsotg->setup_desc[1] = in dwc2_gadget_alloc_ctrl_desc_chains()
459 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
461 &hsotg->setup_desc_dma[1], in dwc2_gadget_alloc_ctrl_desc_chains()
463 if (!hsotg->setup_desc[1]) in dwc2_gadget_alloc_ctrl_desc_chains()
466 hsotg->ctrl_in_desc = in dwc2_gadget_alloc_ctrl_desc_chains()
467 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
469 &hsotg->ctrl_in_desc_dma, in dwc2_gadget_alloc_ctrl_desc_chains()
471 if (!hsotg->ctrl_in_desc) in dwc2_gadget_alloc_ctrl_desc_chains()
474 hsotg->ctrl_out_desc = in dwc2_gadget_alloc_ctrl_desc_chains()
475 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
477 &hsotg->ctrl_out_desc_dma, in dwc2_gadget_alloc_ctrl_desc_chains()
479 if (!hsotg->ctrl_out_desc) in dwc2_gadget_alloc_ctrl_desc_chains()
485 return -ENOMEM; in dwc2_gadget_alloc_ctrl_desc_chains()
489 * dwc2_hsotg_write_fifo - write packet Data to the TxFIFO
499 * The return value is zero if there is more space (or nothing was done)
500 * otherwise -ENOSPC is returned if the FIFO space was used up.
510 int buf_pos = hs_req->req.actual; in dwc2_hsotg_write_fifo()
511 int to_write = hs_ep->size_loaded; in dwc2_hsotg_write_fifo()
517 to_write -= (buf_pos - hs_ep->last_load); in dwc2_hsotg_write_fifo()
519 /* if there's nothing to write, get out early */ in dwc2_hsotg_write_fifo()
520 if (to_write == 0) in dwc2_hsotg_write_fifo()
523 if (periodic && !hsotg->dedicated_fifos) { in dwc2_hsotg_write_fifo()
524 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index)); in dwc2_hsotg_write_fifo()
536 * if shared fifo, we cannot write anything until the in dwc2_hsotg_write_fifo()
539 if (hs_ep->fifo_load != 0) { in dwc2_hsotg_write_fifo()
541 return -ENOSPC; in dwc2_hsotg_write_fifo()
544 dev_dbg(hsotg->dev, "%s: left=%d, load=%d, fifo=%d, size %d\n", in dwc2_hsotg_write_fifo()
546 hs_ep->size_loaded, hs_ep->fifo_load, hs_ep->fifo_size); in dwc2_hsotg_write_fifo()
549 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_write_fifo()
552 can_write = hs_ep->fifo_load - size_done; in dwc2_hsotg_write_fifo()
553 dev_dbg(hsotg->dev, "%s: => can_write1=%d\n", in dwc2_hsotg_write_fifo()
556 can_write = hs_ep->fifo_size - can_write; in dwc2_hsotg_write_fifo()
557 dev_dbg(hsotg->dev, "%s: => can_write2=%d\n", in dwc2_hsotg_write_fifo()
560 if (can_write <= 0) { in dwc2_hsotg_write_fifo()
562 return -ENOSPC; in dwc2_hsotg_write_fifo()
564 } else if (hsotg->dedicated_fifos && hs_ep->index != 0) { in dwc2_hsotg_write_fifo()
566 DTXFSTS(hs_ep->fifo_index)); in dwc2_hsotg_write_fifo()
571 if (GNPTXSTS_NP_TXQ_SPC_AVAIL_GET(gnptxsts) == 0) { in dwc2_hsotg_write_fifo()
572 dev_dbg(hsotg->dev, in dwc2_hsotg_write_fifo()
577 return -ENOSPC; in dwc2_hsotg_write_fifo()
584 max_transfer = hs_ep->ep.maxpacket * hs_ep->mc; in dwc2_hsotg_write_fifo()
586 dev_dbg(hsotg->dev, "%s: GNPTXSTS=%08x, can=%d, to=%d, max_transfer %d\n", in dwc2_hsotg_write_fifo()
590 * limit to 512 bytes of data, it seems at least on the non-periodic in dwc2_hsotg_write_fifo()
594 if (can_write > 512 && !periodic) in dwc2_hsotg_write_fifo()
598 * limit the write to one max-packet size worth of data, but allow in dwc2_hsotg_write_fifo()
602 if (to_write > max_transfer) { in dwc2_hsotg_write_fifo()
606 if (!hsotg->dedicated_fifos) in dwc2_hsotg_write_fifo()
612 /* see if we can write data */ in dwc2_hsotg_write_fifo()
614 if (to_write > can_write) { in dwc2_hsotg_write_fifo()
622 * Note, we do not currently check to see if we can ever in dwc2_hsotg_write_fifo()
626 if (pkt_round) in dwc2_hsotg_write_fifo()
627 to_write -= pkt_round; in dwc2_hsotg_write_fifo()
635 if (!hsotg->dedicated_fifos) in dwc2_hsotg_write_fifo()
641 dev_dbg(hsotg->dev, "write %d/%d, can_write %d, done %d\n", in dwc2_hsotg_write_fifo()
642 to_write, hs_req->req.length, can_write, buf_pos); in dwc2_hsotg_write_fifo()
644 if (to_write <= 0) in dwc2_hsotg_write_fifo()
645 return -ENOSPC; in dwc2_hsotg_write_fifo()
647 hs_req->req.actual = buf_pos + to_write; in dwc2_hsotg_write_fifo()
648 hs_ep->total_data += to_write; in dwc2_hsotg_write_fifo()
650 if (periodic) in dwc2_hsotg_write_fifo()
651 hs_ep->fifo_load += to_write; in dwc2_hsotg_write_fifo()
654 data = hs_req->req.buf + buf_pos; in dwc2_hsotg_write_fifo()
656 dwc2_writel_rep(hsotg, EPFIFO(hs_ep->index), data, to_write); in dwc2_hsotg_write_fifo()
658 return (to_write >= can_write) ? -ENOSPC : 0; in dwc2_hsotg_write_fifo()
662 * get_ep_limit - get the maximum data legnth for this endpoint
670 int index = hs_ep->index; in get_ep_limit()
674 if (index != 0) { in get_ep_limit()
679 if (hs_ep->dir_in) in get_ep_limit()
686 maxpkt--; in get_ep_limit()
687 maxsize--; in get_ep_limit()
690 * constrain by packet count if maxpkts*pktsize is greater in get_ep_limit()
694 if ((maxpkt * hs_ep->ep.maxpacket) < maxsize) in get_ep_limit()
695 maxsize = maxpkt * hs_ep->ep.maxpacket; in get_ep_limit()
701 * dwc2_hsotg_read_frameno - read current frame number
718 * dwc2_gadget_get_chain_limit - get the maximum data payload value of the
728 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_chain_limit()
729 int is_isoc = hs_ep->isochronous; in dwc2_gadget_get_chain_limit()
731 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_chain_limit()
732 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_chain_limit()
734 if (is_isoc) in dwc2_gadget_get_chain_limit()
735 maxsize = (hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_LIMIT : in dwc2_gadget_get_chain_limit()
742 if (hs_ep->index) in dwc2_gadget_get_chain_limit()
743 if (usb_endpoint_xfer_int(ep_desc) && !dir_in && (mps % 4)) in dwc2_gadget_get_chain_limit()
750 * dwc2_gadget_get_desc_params - get DMA descriptor parameters.
757 * Control out - MPS,
758 * Isochronous - descriptor rx/tx bytes bitfield limit,
759 * Control In/Bulk/Interrupt - multiple of mps. This will allow to not
761 * Interrupt OUT - if mps not multiple of 4 then a single packet corresponds
768 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_desc_params()
769 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_desc_params()
770 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_desc_params()
773 if (!hs_ep->index && !dir_in) { in dwc2_gadget_get_desc_params()
776 } else if (hs_ep->isochronous) { in dwc2_gadget_get_desc_params()
777 if (dir_in) { in dwc2_gadget_get_desc_params()
789 desc_size -= desc_size % mps; in dwc2_gadget_get_desc_params()
793 if (hs_ep->index) in dwc2_gadget_get_desc_params()
794 if (usb_endpoint_xfer_int(ep_desc) && !dir_in && (mps % 4)) { in dwc2_gadget_get_desc_params()
808 int dir_in = hs_ep->dir_in; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
809 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
817 hs_ep->desc_count = (len / maxsize) + in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
819 if (len == 0) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
820 hs_ep->desc_count = 1; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
822 for (i = 0; i < hs_ep->desc_count; ++i) { in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
823 (*desc)->status = 0; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
824 (*desc)->status |= (DEV_DMA_BUFF_STS_HBUSY in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
827 if (len > maxsize) { in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
828 if (!hs_ep->index && !dir_in) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
829 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC); in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
831 (*desc)->status |= in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
833 (*desc)->buf = dma_buff + offset; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
835 len -= maxsize; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
838 if (true_last) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
839 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC); in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
841 if (dir_in) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
842 (*desc)->status |= (len % mps) ? DEV_DMA_SHORT : in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
843 ((hs_ep->send_zlp && true_last) ? in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
846 (*desc)->status |= in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
848 (*desc)->buf = dma_buff + offset; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
851 (*desc)->status &= ~DEV_DMA_BUFF_STS_MASK; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
852 (*desc)->status |= (DEV_DMA_BUFF_STS_HREADY in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
859 * dwc2_gadget_config_nonisoc_xfer_ddma - prepare non ISOC DMA desc chain.
873 struct dwc2_dma_desc *desc = hs_ep->desc_list; in dwc2_gadget_config_nonisoc_xfer_ddma()
878 if (hs_ep->req) in dwc2_gadget_config_nonisoc_xfer_ddma()
879 ureq = &hs_ep->req->req; in dwc2_gadget_config_nonisoc_xfer_ddma()
881 /* non-DMA sg buffer */ in dwc2_gadget_config_nonisoc_xfer_ddma()
882 if (!ureq || !ureq->num_sgs) { in dwc2_gadget_config_nonisoc_xfer_ddma()
889 for_each_sg(ureq->sg, sg, ureq->num_sgs, i) { in dwc2_gadget_config_nonisoc_xfer_ddma()
891 sg_dma_address(sg) + sg->offset, sg_dma_len(sg), in dwc2_gadget_config_nonisoc_xfer_ddma()
893 desc_count += hs_ep->desc_count; in dwc2_gadget_config_nonisoc_xfer_ddma()
896 hs_ep->desc_count = desc_count; in dwc2_gadget_config_nonisoc_xfer_ddma()
900 * dwc2_gadget_fill_isoc_desc - fills next isochronous descriptor in chain.
902 * @dma_buff: usb requests dma buffer.
903 * @len: usb request transfer length.
905 * Fills next free descriptor with the data of the arrived usb request,
906 * frame info, sets Last and IOC bits increments next_desc. If filled
914 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_fill_isoc_desc()
921 index = hs_ep->next_desc; in dwc2_gadget_fill_isoc_desc()
922 desc = &hs_ep->desc_list[index]; in dwc2_gadget_fill_isoc_desc()
924 /* Check if descriptor chain full */ in dwc2_gadget_fill_isoc_desc()
925 if ((desc->status >> DEV_DMA_BUFF_STS_SHIFT) == in dwc2_gadget_fill_isoc_desc()
927 dev_dbg(hsotg->dev, "%s: desc chain full\n", __func__); in dwc2_gadget_fill_isoc_desc()
931 /* Clear L bit of previous desc if more than one entries in the chain */ in dwc2_gadget_fill_isoc_desc()
932 if (hs_ep->next_desc) in dwc2_gadget_fill_isoc_desc()
933 hs_ep->desc_list[index - 1].status &= ~DEV_DMA_L; in dwc2_gadget_fill_isoc_desc()
935 dev_dbg(hsotg->dev, "%s: Filling ep %d, dir %s isoc desc # %d\n", in dwc2_gadget_fill_isoc_desc()
936 __func__, hs_ep->index, hs_ep->dir_in ? "in" : "out", index); in dwc2_gadget_fill_isoc_desc()
938 desc->status = 0; in dwc2_gadget_fill_isoc_desc()
939 desc->status |= (DEV_DMA_BUFF_STS_HBUSY << DEV_DMA_BUFF_STS_SHIFT); in dwc2_gadget_fill_isoc_desc()
941 desc->buf = dma_buff; in dwc2_gadget_fill_isoc_desc()
942 desc->status |= (DEV_DMA_L | DEV_DMA_IOC | in dwc2_gadget_fill_isoc_desc()
945 if (hs_ep->dir_in) { in dwc2_gadget_fill_isoc_desc()
946 if (len) in dwc2_gadget_fill_isoc_desc()
947 pid = DIV_ROUND_UP(len, hs_ep->ep.maxpacket); in dwc2_gadget_fill_isoc_desc()
950 desc->status |= ((pid << DEV_DMA_ISOC_PID_SHIFT) & in dwc2_gadget_fill_isoc_desc()
952 ((len % hs_ep->ep.maxpacket) ? in dwc2_gadget_fill_isoc_desc()
954 ((hs_ep->target_frame << in dwc2_gadget_fill_isoc_desc()
959 desc->status &= ~DEV_DMA_BUFF_STS_MASK; in dwc2_gadget_fill_isoc_desc()
960 desc->status |= (DEV_DMA_BUFF_STS_HREADY << DEV_DMA_BUFF_STS_SHIFT); in dwc2_gadget_fill_isoc_desc()
963 if (hs_ep->dir_in) in dwc2_gadget_fill_isoc_desc()
967 hs_ep->next_desc++; in dwc2_gadget_fill_isoc_desc()
968 if (hs_ep->next_desc >= MAX_DMA_DESC_NUM_HS_ISOC) in dwc2_gadget_fill_isoc_desc()
969 hs_ep->next_desc = 0; in dwc2_gadget_fill_isoc_desc()
975 * dwc2_gadget_start_isoc_ddma - start isochronous transfer in DDMA
983 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_start_isoc_ddma()
985 int index = hs_ep->index; in dwc2_gadget_start_isoc_ddma()
993 if (list_empty(&hs_ep->queue)) { in dwc2_gadget_start_isoc_ddma()
994 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_gadget_start_isoc_ddma()
995 dev_dbg(hsotg->dev, "%s: No requests in queue\n", __func__); in dwc2_gadget_start_isoc_ddma()
1001 desc = &hs_ep->desc_list[i]; in dwc2_gadget_start_isoc_ddma()
1002 desc->status = 0; in dwc2_gadget_start_isoc_ddma()
1003 desc->status |= (DEV_DMA_BUFF_STS_HBUSY in dwc2_gadget_start_isoc_ddma()
1007 hs_ep->next_desc = 0; in dwc2_gadget_start_isoc_ddma()
1008 list_for_each_entry_safe(hs_req, treq, &hs_ep->queue, queue) { in dwc2_gadget_start_isoc_ddma()
1009 dma_addr_t dma_addr = hs_req->req.dma; in dwc2_gadget_start_isoc_ddma()
1011 if (hs_req->req.num_sgs) { in dwc2_gadget_start_isoc_ddma()
1012 WARN_ON(hs_req->req.num_sgs > 1); in dwc2_gadget_start_isoc_ddma()
1013 dma_addr = sg_dma_address(hs_req->req.sg); in dwc2_gadget_start_isoc_ddma()
1016 hs_req->req.length); in dwc2_gadget_start_isoc_ddma()
1017 if (ret) in dwc2_gadget_start_isoc_ddma()
1021 hs_ep->compl_desc = 0; in dwc2_gadget_start_isoc_ddma()
1022 depctl = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index); in dwc2_gadget_start_isoc_ddma()
1023 dma_reg = hs_ep->dir_in ? DIEPDMA(index) : DOEPDMA(index); in dwc2_gadget_start_isoc_ddma()
1026 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg); in dwc2_gadget_start_isoc_ddma()
1040 * dwc2_hsotg_start_req - start a USB request from an endpoint's queue
1044 * @continuing: True if we are doing more for the current request.
1054 struct usb_request *ureq = &hs_req->req; in dwc2_hsotg_start_req()
1055 int index = hs_ep->index; in dwc2_hsotg_start_req()
1056 int dir_in = hs_ep->dir_in; in dwc2_hsotg_start_req()
1066 if (index != 0) { in dwc2_hsotg_start_req()
1067 if (hs_ep->req && !continuing) { in dwc2_hsotg_start_req()
1068 dev_err(hsotg->dev, "%s: active request\n", __func__); in dwc2_hsotg_start_req()
1071 } else if (hs_ep->req != hs_req && continuing) { in dwc2_hsotg_start_req()
1072 dev_err(hsotg->dev, in dwc2_hsotg_start_req()
1083 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x, ep %d, dir %s\n", in dwc2_hsotg_start_req()
1085 hs_ep->dir_in ? "in" : "out"); in dwc2_hsotg_start_req()
1087 /* If endpoint is stalled, we will restart request later */ in dwc2_hsotg_start_req()
1090 if (index && ctrl & DXEPCTL_STALL) { in dwc2_hsotg_start_req()
1091 dev_warn(hsotg->dev, "%s: ep%d is stalled\n", __func__, index); in dwc2_hsotg_start_req()
1095 length = ureq->length - ureq->actual; in dwc2_hsotg_start_req()
1096 dev_dbg(hsotg->dev, "ureq->length:%d ureq->actual:%d\n", in dwc2_hsotg_start_req()
1097 ureq->length, ureq->actual); in dwc2_hsotg_start_req()
1099 if (!using_desc_dma(hsotg)) in dwc2_hsotg_start_req()
1104 if (length > maxreq) { in dwc2_hsotg_start_req()
1105 int round = maxreq % hs_ep->ep.maxpacket; in dwc2_hsotg_start_req()
1107 dev_dbg(hsotg->dev, "%s: length %d, max-req %d, r %d\n", in dwc2_hsotg_start_req()
1111 if (round) in dwc2_hsotg_start_req()
1112 maxreq -= round; in dwc2_hsotg_start_req()
1117 if (length) in dwc2_hsotg_start_req()
1118 packets = DIV_ROUND_UP(length, hs_ep->ep.maxpacket); in dwc2_hsotg_start_req()
1120 packets = 1; /* send one packet if length is zero. */ in dwc2_hsotg_start_req()
1122 if (dir_in && index != 0) in dwc2_hsotg_start_req()
1123 if (hs_ep->isochronous) in dwc2_hsotg_start_req()
1134 if (dir_in && ureq->zero && !continuing) { in dwc2_hsotg_start_req()
1135 /* Test if zlp is actually required. */ in dwc2_hsotg_start_req()
1136 if ((ureq->length >= hs_ep->ep.maxpacket) && in dwc2_hsotg_start_req()
1137 !(ureq->length % hs_ep->ep.maxpacket)) in dwc2_hsotg_start_req()
1138 hs_ep->send_zlp = 1; in dwc2_hsotg_start_req()
1144 dev_dbg(hsotg->dev, "%s: %d@%d/%d, 0x%08x => 0x%08x\n", in dwc2_hsotg_start_req()
1145 __func__, packets, length, ureq->length, epsize, epsize_reg); in dwc2_hsotg_start_req()
1148 hs_ep->req = hs_req; in dwc2_hsotg_start_req()
1150 if (using_desc_dma(hsotg)) { in dwc2_hsotg_start_req()
1152 u32 mps = hs_ep->ep.maxpacket; in dwc2_hsotg_start_req()
1154 /* Adjust length: EP0 - MPS, other OUT EPs - multiple of MPS */ in dwc2_hsotg_start_req()
1155 if (!dir_in) { in dwc2_hsotg_start_req()
1156 if (!index) in dwc2_hsotg_start_req()
1158 else if (length % mps) in dwc2_hsotg_start_req()
1159 length += (mps - (length % mps)); in dwc2_hsotg_start_req()
1162 if (continuing) in dwc2_hsotg_start_req()
1163 offset = ureq->actual; in dwc2_hsotg_start_req()
1166 dwc2_gadget_config_nonisoc_xfer_ddma(hs_ep, ureq->dma + offset, in dwc2_hsotg_start_req()
1170 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg); in dwc2_hsotg_start_req()
1172 dev_dbg(hsotg->dev, "%s: %08x pad => 0x%08x\n", in dwc2_hsotg_start_req()
1173 __func__, (u32)hs_ep->desc_list_dma, dma_reg); in dwc2_hsotg_start_req()
1178 if (using_dma(hsotg) && !continuing && (length != 0)) { in dwc2_hsotg_start_req()
1184 dwc2_writel(hsotg, ureq->dma, dma_reg); in dwc2_hsotg_start_req()
1186 dev_dbg(hsotg->dev, "%s: %pad => 0x%08x\n", in dwc2_hsotg_start_req()
1187 __func__, &ureq->dma, dma_reg); in dwc2_hsotg_start_req()
1191 if (hs_ep->isochronous) { in dwc2_hsotg_start_req()
1192 if (!dwc2_gadget_target_frame_elapsed(hs_ep)) { in dwc2_hsotg_start_req()
1193 if (hs_ep->interval == 1) { in dwc2_hsotg_start_req()
1194 if (hs_ep->target_frame & 0x1) in dwc2_hsotg_start_req()
1201 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, -ENODATA); in dwc2_hsotg_start_req()
1208 dev_dbg(hsotg->dev, "ep0 state:%d\n", hsotg->ep0_state); in dwc2_hsotg_start_req()
1211 if (!(index == 0 && hsotg->ep0_state == DWC2_EP0_SETUP)) in dwc2_hsotg_start_req()
1214 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl); in dwc2_hsotg_start_req()
1222 hs_ep->size_loaded = length; in dwc2_hsotg_start_req()
1223 hs_ep->last_load = ureq->actual; in dwc2_hsotg_start_req()
1225 if (dir_in && !using_dma(hsotg)) { in dwc2_hsotg_start_req()
1226 /* set these anyway, we may need them for non-periodic in */ in dwc2_hsotg_start_req()
1227 hs_ep->fifo_load = 0; in dwc2_hsotg_start_req()
1238 if (!(dwc2_readl(hsotg, epctrl_reg) & DXEPCTL_EPENA)) in dwc2_hsotg_start_req()
1239 dev_dbg(hsotg->dev, in dwc2_hsotg_start_req()
1243 dev_dbg(hsotg->dev, "%s: DXEPCTL=0x%08x\n", in dwc2_hsotg_start_req()
1247 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 1); in dwc2_hsotg_start_req()
1251 * dwc2_hsotg_map_dma - map the DMA memory being used for the request
1257 * is correctly setup for DMA. If we've been passed an extant DMA address
1258 * then ensure the buffer has been synced to memory. If our buffer has no
1268 hs_ep->map_dir = hs_ep->dir_in; in dwc2_hsotg_map_dma()
1269 ret = usb_gadget_map_request(&hsotg->gadget, req, hs_ep->dir_in); in dwc2_hsotg_map_dma()
1270 if (ret) in dwc2_hsotg_map_dma()
1276 dev_err(hsotg->dev, "%s: failed to map buffer %p, %d bytes\n", in dwc2_hsotg_map_dma()
1277 __func__, req->buf, req->length); in dwc2_hsotg_map_dma()
1279 return -EIO; in dwc2_hsotg_map_dma()
1286 void *req_buf = hs_req->req.buf; in dwc2_hsotg_handle_unaligned_buf_start()
1288 /* If dma is not being used or buffer is aligned */ in dwc2_hsotg_handle_unaligned_buf_start()
1289 if (!using_dma(hsotg) || !((long)req_buf & 3)) in dwc2_hsotg_handle_unaligned_buf_start()
1292 WARN_ON(hs_req->saved_req_buf); in dwc2_hsotg_handle_unaligned_buf_start()
1294 dev_dbg(hsotg->dev, "%s: %s: buf=%p length=%d\n", __func__, in dwc2_hsotg_handle_unaligned_buf_start()
1295 hs_ep->ep.name, req_buf, hs_req->req.length); in dwc2_hsotg_handle_unaligned_buf_start()
1297 hs_req->req.buf = kmalloc(hs_req->req.length, GFP_ATOMIC); in dwc2_hsotg_handle_unaligned_buf_start()
1298 if (!hs_req->req.buf) { in dwc2_hsotg_handle_unaligned_buf_start()
1299 hs_req->req.buf = req_buf; in dwc2_hsotg_handle_unaligned_buf_start()
1300 dev_err(hsotg->dev, in dwc2_hsotg_handle_unaligned_buf_start()
1303 return -ENOMEM; in dwc2_hsotg_handle_unaligned_buf_start()
1307 hs_req->saved_req_buf = req_buf; in dwc2_hsotg_handle_unaligned_buf_start()
1309 if (hs_ep->dir_in) in dwc2_hsotg_handle_unaligned_buf_start()
1310 memcpy(hs_req->req.buf, req_buf, hs_req->req.length); in dwc2_hsotg_handle_unaligned_buf_start()
1319 /* If dma is not being used or buffer was aligned */ in dwc2_hsotg_handle_unaligned_buf_complete()
1320 if (!using_dma(hsotg) || !hs_req->saved_req_buf) in dwc2_hsotg_handle_unaligned_buf_complete()
1323 dev_dbg(hsotg->dev, "%s: %s: status=%d actual-length=%d\n", __func__, in dwc2_hsotg_handle_unaligned_buf_complete()
1324 hs_ep->ep.name, hs_req->req.status, hs_req->req.actual); in dwc2_hsotg_handle_unaligned_buf_complete()
1327 if (!hs_ep->dir_in && !hs_req->req.status) in dwc2_hsotg_handle_unaligned_buf_complete()
1328 memcpy(hs_req->saved_req_buf, hs_req->req.buf, in dwc2_hsotg_handle_unaligned_buf_complete()
1329 hs_req->req.actual); in dwc2_hsotg_handle_unaligned_buf_complete()
1332 kfree(hs_req->req.buf); in dwc2_hsotg_handle_unaligned_buf_complete()
1334 hs_req->req.buf = hs_req->saved_req_buf; in dwc2_hsotg_handle_unaligned_buf_complete()
1335 hs_req->saved_req_buf = NULL; in dwc2_hsotg_handle_unaligned_buf_complete()
1339 * dwc2_gadget_target_frame_elapsed - Checks target frame
1342 * Returns 1 if targeted frame elapsed. If returned 1 then we need to drop
1347 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_target_frame_elapsed()
1348 u32 target_frame = hs_ep->target_frame; in dwc2_gadget_target_frame_elapsed()
1349 u32 current_frame = hsotg->frame_number; in dwc2_gadget_target_frame_elapsed()
1350 bool frame_overrun = hs_ep->frame_overrun; in dwc2_gadget_target_frame_elapsed()
1353 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_target_frame_elapsed()
1356 if (!frame_overrun && current_frame >= target_frame) in dwc2_gadget_target_frame_elapsed()
1359 if (frame_overrun && current_frame >= target_frame && in dwc2_gadget_target_frame_elapsed()
1360 ((current_frame - target_frame) < limit / 2)) in dwc2_gadget_target_frame_elapsed()
1367 * dwc2_gadget_set_ep0_desc_chain - Set EP's desc chain pointers
1377 switch (hsotg->ep0_state) { in dwc2_gadget_set_ep0_desc_chain()
1380 hs_ep->desc_list = hsotg->setup_desc[0]; in dwc2_gadget_set_ep0_desc_chain()
1381 hs_ep->desc_list_dma = hsotg->setup_desc_dma[0]; in dwc2_gadget_set_ep0_desc_chain()
1385 hs_ep->desc_list = hsotg->ctrl_in_desc; in dwc2_gadget_set_ep0_desc_chain()
1386 hs_ep->desc_list_dma = hsotg->ctrl_in_desc_dma; in dwc2_gadget_set_ep0_desc_chain()
1389 hs_ep->desc_list = hsotg->ctrl_out_desc; in dwc2_gadget_set_ep0_desc_chain()
1390 hs_ep->desc_list_dma = hsotg->ctrl_out_desc_dma; in dwc2_gadget_set_ep0_desc_chain()
1393 dev_err(hsotg->dev, "invalid EP 0 state in queue %d\n", in dwc2_gadget_set_ep0_desc_chain()
1394 hsotg->ep0_state); in dwc2_gadget_set_ep0_desc_chain()
1395 return -EINVAL; in dwc2_gadget_set_ep0_desc_chain()
1406 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_queue() local
1413 dev_dbg(hs->dev, "%s: req %p: %d@%p, noi=%d, zero=%d, snok=%d\n", in dwc2_hsotg_ep_queue()
1414 ep->name, req, req->length, req->buf, req->no_interrupt, in dwc2_hsotg_ep_queue()
1415 req->zero, req->short_not_ok); in dwc2_hsotg_ep_queue()
1418 if (hs->lx_state != DWC2_L0) { in dwc2_hsotg_ep_queue()
1419 dev_dbg(hs->dev, "%s: submit request only in active state\n", in dwc2_hsotg_ep_queue()
1421 return -EAGAIN; in dwc2_hsotg_ep_queue()
1425 INIT_LIST_HEAD(&hs_req->queue); in dwc2_hsotg_ep_queue()
1426 req->actual = 0; in dwc2_hsotg_ep_queue()
1427 req->status = -EINPROGRESS; in dwc2_hsotg_ep_queue()
1429 /* Don't queue ISOC request if length greater than mps*mc */ in dwc2_hsotg_ep_queue()
1430 if (hs_ep->isochronous && in dwc2_hsotg_ep_queue()
1431 req->length > (hs_ep->mc * hs_ep->ep.maxpacket)) { in dwc2_hsotg_ep_queue()
1432 dev_err(hs->dev, "req length > maxpacket*mc\n"); in dwc2_hsotg_ep_queue()
1433 return -EINVAL; in dwc2_hsotg_ep_queue()
1436 /* In DDMA mode for ISOC's don't queue request if length greater in dwc2_hsotg_ep_queue()
1439 if (using_desc_dma(hs) && hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1441 if (hs_ep->dir_in && req->length > maxsize) { in dwc2_hsotg_ep_queue()
1442 dev_err(hs->dev, "wrong length %d (maxsize=%d)\n", in dwc2_hsotg_ep_queue()
1443 req->length, maxsize); in dwc2_hsotg_ep_queue()
1444 return -EINVAL; in dwc2_hsotg_ep_queue()
1447 if (!hs_ep->dir_in && req->length > hs_ep->ep.maxpacket) { in dwc2_hsotg_ep_queue()
1448 dev_err(hs->dev, "ISOC OUT: wrong length %d (mps=%d)\n", in dwc2_hsotg_ep_queue()
1449 req->length, hs_ep->ep.maxpacket); in dwc2_hsotg_ep_queue()
1450 return -EINVAL; in dwc2_hsotg_ep_queue()
1454 ret = dwc2_hsotg_handle_unaligned_buf_start(hs, hs_ep, hs_req); in dwc2_hsotg_ep_queue()
1455 if (ret) in dwc2_hsotg_ep_queue()
1458 /* if we're using DMA, sync the buffers as necessary */ in dwc2_hsotg_ep_queue()
1459 if (using_dma(hs)) { in dwc2_hsotg_ep_queue()
1460 ret = dwc2_hsotg_map_dma(hs, hs_ep, req); in dwc2_hsotg_ep_queue()
1461 if (ret) in dwc2_hsotg_ep_queue()
1464 /* If using descriptor DMA configure EP0 descriptor chain pointers */ in dwc2_hsotg_ep_queue()
1465 if (using_desc_dma(hs) && !hs_ep->index) { in dwc2_hsotg_ep_queue()
1466 ret = dwc2_gadget_set_ep0_desc_chain(hs, hs_ep); in dwc2_hsotg_ep_queue()
1467 if (ret) in dwc2_hsotg_ep_queue()
1471 first = list_empty(&hs_ep->queue); in dwc2_hsotg_ep_queue()
1472 list_add_tail(&hs_req->queue, &hs_ep->queue); in dwc2_hsotg_ep_queue()
1475 * Handle DDMA isochronous transfers separately - just add new entry in dwc2_hsotg_ep_queue()
1480 if (using_desc_dma(hs) && hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1481 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) { in dwc2_hsotg_ep_queue()
1482 dma_addr_t dma_addr = hs_req->req.dma; in dwc2_hsotg_ep_queue()
1484 if (hs_req->req.num_sgs) { in dwc2_hsotg_ep_queue()
1485 WARN_ON(hs_req->req.num_sgs > 1); in dwc2_hsotg_ep_queue()
1486 dma_addr = sg_dma_address(hs_req->req.sg); in dwc2_hsotg_ep_queue()
1489 hs_req->req.length); in dwc2_hsotg_ep_queue()
1494 /* Change EP direction if status phase request is after data out */ in dwc2_hsotg_ep_queue()
1495 if (!hs_ep->index && !req->length && !hs_ep->dir_in && in dwc2_hsotg_ep_queue()
1496 hs->ep0_state == DWC2_EP0_DATA_OUT) in dwc2_hsotg_ep_queue()
1497 hs_ep->dir_in = 1; in dwc2_hsotg_ep_queue()
1499 if (first) { in dwc2_hsotg_ep_queue()
1500 if (!hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1501 dwc2_hsotg_start_req(hs, hs_ep, hs_req, false); in dwc2_hsotg_ep_queue()
1506 hs->frame_number = dwc2_hsotg_read_frameno(hs); in dwc2_hsotg_ep_queue()
1512 hs->frame_number = dwc2_hsotg_read_frameno(hs); in dwc2_hsotg_ep_queue()
1515 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) in dwc2_hsotg_ep_queue()
1516 dwc2_hsotg_start_req(hs, hs_ep, hs_req, false); in dwc2_hsotg_ep_queue()
1525 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_queue_lock() local
1529 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_queue_lock()
1531 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_queue_lock()
1545 * dwc2_hsotg_complete_oursetup - setup completion callback
1556 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_complete_oursetup()
1558 dev_dbg(hsotg->dev, "%s: ep %p, req %p\n", __func__, ep, req); in dwc2_hsotg_complete_oursetup()
1564 * ep_from_windex - convert control wIndex value to endpoint
1569 * structure, or return NULL if it is not a valid endpoint.
1577 if (windex >= 0x100) in ep_from_windex()
1580 if (idx > hsotg->num_of_eps) in ep_from_windex()
1587 * dwc2_hsotg_set_test_mode - Enable usb Test Modes
1589 * @testmode: requested usb test mode
1590 * Enable usb Test Mode requested by the Host.
1606 return -EINVAL; in dwc2_hsotg_set_test_mode()
1613 * dwc2_hsotg_send_reply - send reply to control request
1630 dev_dbg(hsotg->dev, "%s: buff %p, len %d\n", __func__, buff, length); in dwc2_hsotg_send_reply()
1632 req = dwc2_hsotg_ep_alloc_request(&ep->ep, GFP_ATOMIC); in dwc2_hsotg_send_reply()
1633 hsotg->ep0_reply = req; in dwc2_hsotg_send_reply()
1634 if (!req) { in dwc2_hsotg_send_reply()
1635 dev_warn(hsotg->dev, "%s: cannot alloc req\n", __func__); in dwc2_hsotg_send_reply()
1636 return -ENOMEM; in dwc2_hsotg_send_reply()
1639 req->buf = hsotg->ep0_buff; in dwc2_hsotg_send_reply()
1640 req->length = length; in dwc2_hsotg_send_reply()
1645 req->zero = 0; in dwc2_hsotg_send_reply()
1646 req->complete = dwc2_hsotg_complete_oursetup; in dwc2_hsotg_send_reply()
1648 if (length) in dwc2_hsotg_send_reply()
1649 memcpy(req->buf, buff, length); in dwc2_hsotg_send_reply()
1651 ret = dwc2_hsotg_ep_queue(&ep->ep, req, GFP_ATOMIC); in dwc2_hsotg_send_reply()
1652 if (ret) { in dwc2_hsotg_send_reply()
1653 dev_warn(hsotg->dev, "%s: cannot queue req\n", __func__); in dwc2_hsotg_send_reply()
1661 * dwc2_hsotg_process_req_status - process request GET_STATUS
1663 * @ctrl: USB control request
1668 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_req_status()
1674 dev_dbg(hsotg->dev, "%s: USB_REQ_GET_STATUS\n", __func__); in dwc2_hsotg_process_req_status()
1676 if (!ep0->dir_in) { in dwc2_hsotg_process_req_status()
1677 dev_warn(hsotg->dev, "%s: direction out?\n", __func__); in dwc2_hsotg_process_req_status()
1678 return -EINVAL; in dwc2_hsotg_process_req_status()
1681 switch (ctrl->bRequestType & USB_RECIP_MASK) { in dwc2_hsotg_process_req_status()
1683 status = hsotg->gadget.is_selfpowered << in dwc2_hsotg_process_req_status()
1685 status |= hsotg->remote_wakeup_allowed << in dwc2_hsotg_process_req_status()
1696 ep = ep_from_windex(hsotg, le16_to_cpu(ctrl->wIndex)); in dwc2_hsotg_process_req_status()
1697 if (!ep) in dwc2_hsotg_process_req_status()
1698 return -ENOENT; in dwc2_hsotg_process_req_status()
1700 reply = cpu_to_le16(ep->halted ? 1 : 0); in dwc2_hsotg_process_req_status()
1707 if (le16_to_cpu(ctrl->wLength) != 2) in dwc2_hsotg_process_req_status()
1708 return -EINVAL; in dwc2_hsotg_process_req_status()
1711 if (ret) { in dwc2_hsotg_process_req_status()
1712 dev_err(hsotg->dev, "%s: failed to send reply\n", __func__); in dwc2_hsotg_process_req_status()
1722 * get_ep_head - return the first request on the endpoint
1729 return list_first_entry_or_null(&hs_ep->queue, struct dwc2_hsotg_req, in get_ep_head()
1734 * dwc2_gadget_start_next_request - Starts next request from ep queue
1737 * If queue is empty and EP is ISOC-OUT - unmasks OUTTKNEPDIS which is masked
1743 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_start_next_request()
1744 int dir_in = hs_ep->dir_in; in dwc2_gadget_start_next_request()
1747 if (!list_empty(&hs_ep->queue)) { in dwc2_gadget_start_next_request()
1752 if (!hs_ep->isochronous) in dwc2_gadget_start_next_request()
1755 if (dir_in) { in dwc2_gadget_start_next_request()
1756 dev_dbg(hsotg->dev, "%s: No more ISOC-IN requests\n", in dwc2_gadget_start_next_request()
1759 dev_dbg(hsotg->dev, "%s: No more ISOC-OUT requests\n", in dwc2_gadget_start_next_request()
1765 * dwc2_hsotg_process_req_feature - process request {SET,CLEAR}_FEATURE
1767 * @ctrl: USB control request
1772 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_req_feature()
1774 bool set = (ctrl->bRequest == USB_REQ_SET_FEATURE); in dwc2_hsotg_process_req_feature()
1782 dev_dbg(hsotg->dev, "%s: %s_FEATURE\n", in dwc2_hsotg_process_req_feature()
1785 wValue = le16_to_cpu(ctrl->wValue); in dwc2_hsotg_process_req_feature()
1786 wIndex = le16_to_cpu(ctrl->wIndex); in dwc2_hsotg_process_req_feature()
1787 recip = ctrl->bRequestType & USB_RECIP_MASK; in dwc2_hsotg_process_req_feature()
1793 if (set) in dwc2_hsotg_process_req_feature()
1794 hsotg->remote_wakeup_allowed = 1; in dwc2_hsotg_process_req_feature()
1796 hsotg->remote_wakeup_allowed = 0; in dwc2_hsotg_process_req_feature()
1800 if ((wIndex & 0xff) != 0) in dwc2_hsotg_process_req_feature()
1801 return -EINVAL; in dwc2_hsotg_process_req_feature()
1802 if (!set) in dwc2_hsotg_process_req_feature()
1803 return -EINVAL; in dwc2_hsotg_process_req_feature()
1805 hsotg->test_mode = wIndex >> 8; in dwc2_hsotg_process_req_feature()
1808 return -ENOENT; in dwc2_hsotg_process_req_feature()
1812 if (ret) { in dwc2_hsotg_process_req_feature()
1813 dev_err(hsotg->dev, in dwc2_hsotg_process_req_feature()
1821 if (!ep) { in dwc2_hsotg_process_req_feature()
1822 dev_dbg(hsotg->dev, "%s: no endpoint for 0x%04x\n", in dwc2_hsotg_process_req_feature()
1824 return -ENOENT; in dwc2_hsotg_process_req_feature()
1829 halted = ep->halted; in dwc2_hsotg_process_req_feature()
1831 if (!ep->wedged) in dwc2_hsotg_process_req_feature()
1832 dwc2_hsotg_ep_sethalt(&ep->ep, set, true); in dwc2_hsotg_process_req_feature()
1835 if (ret) { in dwc2_hsotg_process_req_feature()
1836 dev_err(hsotg->dev, in dwc2_hsotg_process_req_feature()
1842 * we have to complete all requests for ep if it was in dwc2_hsotg_process_req_feature()
1846 if (!set && halted) { in dwc2_hsotg_process_req_feature()
1848 * If we have request in progress, in dwc2_hsotg_process_req_feature()
1851 if (ep->req) { in dwc2_hsotg_process_req_feature()
1852 hs_req = ep->req; in dwc2_hsotg_process_req_feature()
1853 ep->req = NULL; in dwc2_hsotg_process_req_feature()
1854 list_del_init(&hs_req->queue); in dwc2_hsotg_process_req_feature()
1855 if (hs_req->req.complete) { in dwc2_hsotg_process_req_feature()
1856 spin_unlock(&hsotg->lock); in dwc2_hsotg_process_req_feature()
1858 &ep->ep, &hs_req->req); in dwc2_hsotg_process_req_feature()
1859 spin_lock(&hsotg->lock); in dwc2_hsotg_process_req_feature()
1863 /* If we have pending request, then start it */ in dwc2_hsotg_process_req_feature()
1864 if (!ep->req) in dwc2_hsotg_process_req_feature()
1871 return -ENOENT; in dwc2_hsotg_process_req_feature()
1875 return -ENOENT; in dwc2_hsotg_process_req_feature()
1883 * dwc2_hsotg_stall_ep0 - stall ep0
1890 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_stall_ep0()
1894 dev_dbg(hsotg->dev, "ep0 stall (dir=%d)\n", ep0->dir_in); in dwc2_hsotg_stall_ep0()
1895 reg = (ep0->dir_in) ? DIEPCTL0 : DOEPCTL0; in dwc2_hsotg_stall_ep0()
1907 dev_dbg(hsotg->dev, in dwc2_hsotg_stall_ep0()
1919 * dwc2_hsotg_process_control - process a control request
1930 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_control()
1934 dev_dbg(hsotg->dev, in dwc2_hsotg_process_control()
1936 ctrl->bRequestType, ctrl->bRequest, ctrl->wValue, in dwc2_hsotg_process_control()
1937 ctrl->wIndex, ctrl->wLength); in dwc2_hsotg_process_control()
1939 if (ctrl->wLength == 0) { in dwc2_hsotg_process_control()
1940 ep0->dir_in = 1; in dwc2_hsotg_process_control()
1941 hsotg->ep0_state = DWC2_EP0_STATUS_IN; in dwc2_hsotg_process_control()
1942 } else if (ctrl->bRequestType & USB_DIR_IN) { in dwc2_hsotg_process_control()
1943 ep0->dir_in = 1; in dwc2_hsotg_process_control()
1944 hsotg->ep0_state = DWC2_EP0_DATA_IN; in dwc2_hsotg_process_control()
1946 ep0->dir_in = 0; in dwc2_hsotg_process_control()
1947 hsotg->ep0_state = DWC2_EP0_DATA_OUT; in dwc2_hsotg_process_control()
1950 if ((ctrl->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) { in dwc2_hsotg_process_control()
1951 switch (ctrl->bRequest) { in dwc2_hsotg_process_control()
1953 hsotg->connected = 1; in dwc2_hsotg_process_control()
1956 dcfg |= (le16_to_cpu(ctrl->wValue) << in dwc2_hsotg_process_control()
1960 dev_info(hsotg->dev, "new address %d\n", ctrl->wValue); in dwc2_hsotg_process_control()
1978 if (ret == 0 && hsotg->driver) { in dwc2_hsotg_process_control()
1979 spin_unlock(&hsotg->lock); in dwc2_hsotg_process_control()
1980 ret = hsotg->driver->setup(&hsotg->gadget, ctrl); in dwc2_hsotg_process_control()
1981 spin_lock(&hsotg->lock); in dwc2_hsotg_process_control()
1982 if (ret < 0) in dwc2_hsotg_process_control()
1983 dev_dbg(hsotg->dev, "driver->setup() ret %d\n", ret); in dwc2_hsotg_process_control()
1986 hsotg->delayed_status = false; in dwc2_hsotg_process_control()
1987 if (ret == USB_GADGET_DELAYED_STATUS) in dwc2_hsotg_process_control()
1988 hsotg->delayed_status = true; in dwc2_hsotg_process_control()
1995 if (ret < 0) in dwc2_hsotg_process_control()
2000 * dwc2_hsotg_complete_setup - completion of a setup transfer
2011 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_complete_setup()
2013 if (req->status < 0) { in dwc2_hsotg_complete_setup()
2014 dev_dbg(hsotg->dev, "%s: failed %d\n", __func__, req->status); in dwc2_hsotg_complete_setup()
2018 spin_lock(&hsotg->lock); in dwc2_hsotg_complete_setup()
2019 if (req->actual == 0) in dwc2_hsotg_complete_setup()
2022 dwc2_hsotg_process_control(hsotg, req->buf); in dwc2_hsotg_complete_setup()
2023 spin_unlock(&hsotg->lock); in dwc2_hsotg_complete_setup()
2027 * dwc2_hsotg_enqueue_setup - start a request for EP0 packets
2030 * Enqueue a request on EP0 if necessary to received any SETUP packets
2035 struct usb_request *req = hsotg->ctrl_req; in dwc2_hsotg_enqueue_setup()
2039 dev_dbg(hsotg->dev, "%s: queueing setup request\n", __func__); in dwc2_hsotg_enqueue_setup()
2041 req->zero = 0; in dwc2_hsotg_enqueue_setup()
2042 req->length = 8; in dwc2_hsotg_enqueue_setup()
2043 req->buf = hsotg->ctrl_buff; in dwc2_hsotg_enqueue_setup()
2044 req->complete = dwc2_hsotg_complete_setup; in dwc2_hsotg_enqueue_setup()
2046 if (!list_empty(&hs_req->queue)) { in dwc2_hsotg_enqueue_setup()
2047 dev_dbg(hsotg->dev, "%s already queued???\n", __func__); in dwc2_hsotg_enqueue_setup()
2051 hsotg->eps_out[0]->dir_in = 0; in dwc2_hsotg_enqueue_setup()
2052 hsotg->eps_out[0]->send_zlp = 0; in dwc2_hsotg_enqueue_setup()
2053 hsotg->ep0_state = DWC2_EP0_SETUP; in dwc2_hsotg_enqueue_setup()
2055 ret = dwc2_hsotg_ep_queue(&hsotg->eps_out[0]->ep, req, GFP_ATOMIC); in dwc2_hsotg_enqueue_setup()
2056 if (ret < 0) { in dwc2_hsotg_enqueue_setup()
2057 dev_err(hsotg->dev, "%s: failed queue (%d)\n", __func__, ret); in dwc2_hsotg_enqueue_setup()
2069 u8 index = hs_ep->index; in dwc2_hsotg_program_zlp()
2070 u32 epctl_reg = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index); in dwc2_hsotg_program_zlp()
2071 u32 epsiz_reg = hs_ep->dir_in ? DIEPTSIZ(index) : DOEPTSIZ(index); in dwc2_hsotg_program_zlp()
2073 if (hs_ep->dir_in) in dwc2_hsotg_program_zlp()
2074 dev_dbg(hsotg->dev, "Sending zero-length packet on ep%d\n", in dwc2_hsotg_program_zlp()
2077 dev_dbg(hsotg->dev, "Receiving zero-length packet on ep%d\n", in dwc2_hsotg_program_zlp()
2079 if (using_desc_dma(hsotg)) { in dwc2_hsotg_program_zlp()
2081 dma_addr_t dma = hs_ep->desc_list_dma; in dwc2_hsotg_program_zlp()
2083 if (!index) in dwc2_hsotg_program_zlp()
2101 * dwc2_hsotg_complete_request - complete a request given to us
2108 * if it has one and then look to see if we can start a new request
2118 if (!hs_req) { in dwc2_hsotg_complete_request()
2119 dev_dbg(hsotg->dev, "%s: nothing to complete?\n", __func__); in dwc2_hsotg_complete_request()
2123 dev_dbg(hsotg->dev, "complete: ep %p %s, req %p, %d => %p\n", in dwc2_hsotg_complete_request()
2124 hs_ep, hs_ep->ep.name, hs_req, result, hs_req->req.complete); in dwc2_hsotg_complete_request()
2127 * only replace the status if we've not already set an error in dwc2_hsotg_complete_request()
2131 if (hs_req->req.status == -EINPROGRESS) in dwc2_hsotg_complete_request()
2132 hs_req->req.status = result; in dwc2_hsotg_complete_request()
2134 if (using_dma(hsotg)) in dwc2_hsotg_complete_request()
2139 hs_ep->req = NULL; in dwc2_hsotg_complete_request()
2140 list_del_init(&hs_req->queue); in dwc2_hsotg_complete_request()
2147 if (hs_req->req.complete) { in dwc2_hsotg_complete_request()
2148 spin_unlock(&hsotg->lock); in dwc2_hsotg_complete_request()
2149 usb_gadget_giveback_request(&hs_ep->ep, &hs_req->req); in dwc2_hsotg_complete_request()
2150 spin_lock(&hsotg->lock); in dwc2_hsotg_complete_request()
2154 if (using_desc_dma(hsotg) && hs_ep->isochronous) in dwc2_hsotg_complete_request()
2158 * Look to see if there is anything else to do. Note, the completion in dwc2_hsotg_complete_request()
2163 if (!hs_ep->req && result >= 0) in dwc2_hsotg_complete_request()
2168 * dwc2_gadget_complete_isoc_request_ddma - complete an isoc request in DDMA
2178 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_complete_isoc_request_ddma()
2184 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma()
2191 if (!hs_req) { in dwc2_gadget_complete_isoc_request_ddma()
2192 dev_warn(hsotg->dev, "%s: ISOC EP queue empty\n", __func__); in dwc2_gadget_complete_isoc_request_ddma()
2195 ureq = &hs_req->req; in dwc2_gadget_complete_isoc_request_ddma()
2198 if ((desc_sts & DEV_DMA_STS_MASK) >> DEV_DMA_STS_SHIFT == in dwc2_gadget_complete_isoc_request_ddma()
2200 mask = hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_MASK : in dwc2_gadget_complete_isoc_request_ddma()
2202 ureq->actual = ureq->length - ((desc_sts & mask) >> in dwc2_gadget_complete_isoc_request_ddma()
2205 /* Adjust actual len for ISOC Out if len is in dwc2_gadget_complete_isoc_request_ddma()
2208 if (!hs_ep->dir_in && ureq->length & 0x3) in dwc2_gadget_complete_isoc_request_ddma()
2209 ureq->actual += 4 - (ureq->length & 0x3); in dwc2_gadget_complete_isoc_request_ddma()
2212 ureq->frame_number = in dwc2_gadget_complete_isoc_request_ddma()
2219 hs_ep->compl_desc++; in dwc2_gadget_complete_isoc_request_ddma()
2220 if (hs_ep->compl_desc > (MAX_DMA_DESC_NUM_HS_ISOC - 1)) in dwc2_gadget_complete_isoc_request_ddma()
2221 hs_ep->compl_desc = 0; in dwc2_gadget_complete_isoc_request_ddma()
2222 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma()
2227 * dwc2_gadget_handle_isoc_bna - handle BNA interrupt for ISOC.
2230 * If EP ISOC OUT then need to flush RX FIFO to remove source of BNA
2237 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_isoc_bna()
2239 if (!hs_ep->dir_in) in dwc2_gadget_handle_isoc_bna()
2243 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_gadget_handle_isoc_bna()
2244 hs_ep->next_desc = 0; in dwc2_gadget_handle_isoc_bna()
2245 hs_ep->compl_desc = 0; in dwc2_gadget_handle_isoc_bna()
2249 * dwc2_hsotg_rx_data - receive data from the FIFO for an endpoint
2260 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[ep_idx]; in dwc2_hsotg_rx_data()
2261 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_rx_data()
2266 if (!hs_req) { in dwc2_hsotg_rx_data()
2270 dev_dbg(hsotg->dev, in dwc2_hsotg_rx_data()
2282 read_ptr = hs_req->req.actual; in dwc2_hsotg_rx_data()
2283 max_req = hs_req->req.length - read_ptr; in dwc2_hsotg_rx_data()
2285 dev_dbg(hsotg->dev, "%s: read %d/%d, done %d/%d\n", in dwc2_hsotg_rx_data()
2286 __func__, to_read, max_req, read_ptr, hs_req->req.length); in dwc2_hsotg_rx_data()
2288 if (to_read > max_req) { in dwc2_hsotg_rx_data()
2298 hs_ep->total_data += to_read; in dwc2_hsotg_rx_data()
2299 hs_req->req.actual += to_read; in dwc2_hsotg_rx_data()
2303 * note, we might over-write the buffer end by 3 bytes depending on in dwc2_hsotg_rx_data()
2307 hs_req->req.buf + read_ptr, to_read); in dwc2_hsotg_rx_data()
2311 * dwc2_hsotg_ep0_zlp - send/receive zero-length packet on control endpoint
2313 * @dir_in: If IN zlp
2315 * Generate a zero-length IN packet request for terminating a SETUP
2325 hsotg->eps_out[0]->dir_in = dir_in; in dwc2_hsotg_ep0_zlp()
2326 hsotg->ep0_state = dir_in ? DWC2_EP0_STATUS_IN : DWC2_EP0_STATUS_OUT; in dwc2_hsotg_ep0_zlp()
2328 dwc2_hsotg_program_zlp(hsotg, hsotg->eps_out[0]); in dwc2_hsotg_ep0_zlp()
2332 * dwc2_gadget_get_xfersize_ddma - get transferred bytes amount from desc
2333 * @hs_ep - The endpoint on which transfer went
2340 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_xfersize_ddma()
2341 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_get_xfersize_ddma()
2344 struct dwc2_dma_desc *desc = hs_ep->desc_list; in dwc2_gadget_get_xfersize_ddma()
2347 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_xfersize_ddma()
2348 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_xfersize_ddma()
2350 if (!desc) in dwc2_gadget_get_xfersize_ddma()
2351 return -EINVAL; in dwc2_gadget_get_xfersize_ddma()
2354 if (hs_ep->index) in dwc2_gadget_get_xfersize_ddma()
2355 if (usb_endpoint_xfer_int(ep_desc) && !dir_in && (mps % 4)) in dwc2_gadget_get_xfersize_ddma()
2356 bytes_rem_correction = 4 - (mps % 4); in dwc2_gadget_get_xfersize_ddma()
2358 for (i = 0; i < hs_ep->desc_count; ++i) { in dwc2_gadget_get_xfersize_ddma()
2359 status = desc->status; in dwc2_gadget_get_xfersize_ddma()
2361 bytes_rem -= bytes_rem_correction; in dwc2_gadget_get_xfersize_ddma()
2363 if (status & DEV_DMA_STS_MASK) in dwc2_gadget_get_xfersize_ddma()
2364 dev_err(hsotg->dev, "descriptor %d closed with %x\n", in dwc2_gadget_get_xfersize_ddma()
2367 if (status & DEV_DMA_L) in dwc2_gadget_get_xfersize_ddma()
2377 * dwc2_hsotg_handle_outdone - handle receiving OutDone/SetupDone from RXFIFO
2388 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[epnum]; in dwc2_hsotg_handle_outdone()
2389 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_handle_outdone()
2390 struct usb_request *req = &hs_req->req; in dwc2_hsotg_handle_outdone()
2394 if (!hs_req) { in dwc2_hsotg_handle_outdone()
2395 dev_dbg(hsotg->dev, "%s: no request active\n", __func__); in dwc2_hsotg_handle_outdone()
2399 if (epnum == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_OUT) { in dwc2_hsotg_handle_outdone()
2400 dev_dbg(hsotg->dev, "zlp packet received\n"); in dwc2_hsotg_handle_outdone()
2406 if (using_desc_dma(hsotg)) in dwc2_hsotg_handle_outdone()
2409 if (using_dma(hsotg)) { in dwc2_hsotg_handle_outdone()
2421 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_handle_outdone()
2422 size_done += hs_ep->last_load; in dwc2_hsotg_handle_outdone()
2424 req->actual = size_done; in dwc2_hsotg_handle_outdone()
2427 /* if there is more request to do, schedule new transfer */ in dwc2_hsotg_handle_outdone()
2428 if (req->actual < req->length && size_left == 0) { in dwc2_hsotg_handle_outdone()
2433 if (req->actual < req->length && req->short_not_ok) { in dwc2_hsotg_handle_outdone()
2434 dev_dbg(hsotg->dev, "%s: got %d/%d (short not ok) => error\n", in dwc2_hsotg_handle_outdone()
2435 __func__, req->actual, req->length); in dwc2_hsotg_handle_outdone()
2438 * todo - what should we return here? there's no one else in dwc2_hsotg_handle_outdone()
2444 if (!using_desc_dma(hsotg) && epnum == 0 && in dwc2_hsotg_handle_outdone()
2445 hsotg->ep0_state == DWC2_EP0_DATA_OUT) { in dwc2_hsotg_handle_outdone()
2447 if (!hsotg->delayed_status) in dwc2_hsotg_handle_outdone()
2452 if (!using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_handle_outdone()
2453 req->frame_number = hs_ep->target_frame; in dwc2_hsotg_handle_outdone()
2461 * dwc2_hsotg_handle_rx - RX FIFO has data
2469 * chunks, so if you have x packets received on an endpoint you'll get x
2489 dev_dbg(hsotg->dev, "%s: GRXSTSP=0x%08x (%d@%d)\n", in dwc2_hsotg_handle_rx()
2494 dev_dbg(hsotg->dev, "GLOBALOUTNAK\n"); in dwc2_hsotg_handle_rx()
2498 dev_dbg(hsotg->dev, "OutDone (Frame=0x%08x)\n", in dwc2_hsotg_handle_rx()
2501 if (!using_dma(hsotg)) in dwc2_hsotg_handle_rx()
2506 dev_dbg(hsotg->dev, in dwc2_hsotg_handle_rx()
2511 * Call dwc2_hsotg_handle_outdone here if it was not called from in dwc2_hsotg_handle_rx()
2512 * GRXSTS_PKTSTS_OUTDONE. That is, if the core didn't in dwc2_hsotg_handle_rx()
2515 if (hsotg->ep0_state == DWC2_EP0_SETUP) in dwc2_hsotg_handle_rx()
2524 dev_dbg(hsotg->dev, in dwc2_hsotg_handle_rx()
2529 WARN_ON(hsotg->ep0_state != DWC2_EP0_SETUP); in dwc2_hsotg_handle_rx()
2535 dev_warn(hsotg->dev, "%s: unknown status %08x\n", in dwc2_hsotg_handle_rx()
2544 * dwc2_hsotg_ep0_mps - turn max packet size into register setting
2562 return (u32)-1; in dwc2_hsotg_ep0_mps()
2566 * dwc2_hsotg_set_ep_maxpacket - set endpoint's max-packet field
2571 * @dir_in: True if direction is in.
2584 if (!hs_ep) in dwc2_hsotg_set_ep_maxpacket()
2587 if (ep == 0) { in dwc2_hsotg_set_ep_maxpacket()
2592 if (mps > 3) in dwc2_hsotg_set_ep_maxpacket()
2594 hs_ep->ep.maxpacket = mps_bytes; in dwc2_hsotg_set_ep_maxpacket()
2595 hs_ep->mc = 1; in dwc2_hsotg_set_ep_maxpacket()
2597 if (mps > 1024) in dwc2_hsotg_set_ep_maxpacket()
2599 hs_ep->mc = mc; in dwc2_hsotg_set_ep_maxpacket()
2600 if (mc > 3) in dwc2_hsotg_set_ep_maxpacket()
2602 hs_ep->ep.maxpacket = mps; in dwc2_hsotg_set_ep_maxpacket()
2605 if (dir_in) { in dwc2_hsotg_set_ep_maxpacket()
2620 dev_err(hsotg->dev, "ep%d: bad mps of %d\n", ep, mps); in dwc2_hsotg_set_ep_maxpacket()
2624 * dwc2_hsotg_txfifo_flush - flush Tx FIFO
2634 if (dwc2_hsotg_wait_bit_clear(hsotg, GRSTCTL, GRSTCTL_TXFFLSH, 100)) in dwc2_hsotg_txfifo_flush()
2635 dev_warn(hsotg->dev, "%s: timeout flushing fifo GRSTCTL_TXFFLSH\n", in dwc2_hsotg_txfifo_flush()
2640 * dwc2_hsotg_trytx - check to see if anything needs transmitting
2644 * Check to see if there is a request that has data to send, and if so
2650 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_trytx()
2652 if (!hs_ep->dir_in || !hs_req) { in dwc2_hsotg_trytx()
2654 * if request is not enqueued, we disable interrupts in dwc2_hsotg_trytx()
2657 if (hs_ep->index != 0) in dwc2_hsotg_trytx()
2658 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, in dwc2_hsotg_trytx()
2659 hs_ep->dir_in, 0); in dwc2_hsotg_trytx()
2663 if (hs_req->req.actual < hs_req->req.length) { in dwc2_hsotg_trytx()
2664 dev_dbg(hsotg->dev, "trying to write more for ep%d\n", in dwc2_hsotg_trytx()
2665 hs_ep->index); in dwc2_hsotg_trytx()
2673 * dwc2_hsotg_complete_in - complete IN transfer
2683 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_complete_in()
2684 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index)); in dwc2_hsotg_complete_in()
2687 if (!hs_req) { in dwc2_hsotg_complete_in()
2688 dev_dbg(hsotg->dev, "XferCompl but no req\n"); in dwc2_hsotg_complete_in()
2693 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_IN) { in dwc2_hsotg_complete_in()
2694 dev_dbg(hsotg->dev, "zlp packet sent\n"); in dwc2_hsotg_complete_in()
2700 hs_ep->dir_in = 0; in dwc2_hsotg_complete_in()
2703 if (hsotg->test_mode) { in dwc2_hsotg_complete_in()
2706 ret = dwc2_hsotg_set_test_mode(hsotg, hsotg->test_mode); in dwc2_hsotg_complete_in()
2707 if (ret < 0) { in dwc2_hsotg_complete_in()
2708 dev_dbg(hsotg->dev, "Invalid Test #%d\n", in dwc2_hsotg_complete_in()
2709 hsotg->test_mode); in dwc2_hsotg_complete_in()
2727 if (using_desc_dma(hsotg)) { in dwc2_hsotg_complete_in()
2729 if (size_left < 0) in dwc2_hsotg_complete_in()
2730 dev_err(hsotg->dev, "error parsing DDMA results %d\n", in dwc2_hsotg_complete_in()
2736 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_complete_in()
2737 size_done += hs_ep->last_load; in dwc2_hsotg_complete_in()
2739 if (hs_req->req.actual != size_done) in dwc2_hsotg_complete_in()
2740 dev_dbg(hsotg->dev, "%s: adjusting size done %d => %d\n", in dwc2_hsotg_complete_in()
2741 __func__, hs_req->req.actual, size_done); in dwc2_hsotg_complete_in()
2743 hs_req->req.actual = size_done; in dwc2_hsotg_complete_in()
2744 dev_dbg(hsotg->dev, "req->length:%d req->actual:%d req->zero:%d\n", in dwc2_hsotg_complete_in()
2745 hs_req->req.length, hs_req->req.actual, hs_req->req.zero); in dwc2_hsotg_complete_in()
2747 if (!size_left && hs_req->req.actual < hs_req->req.length) { in dwc2_hsotg_complete_in()
2748 dev_dbg(hsotg->dev, "%s trying more for req...\n", __func__); in dwc2_hsotg_complete_in()
2754 if (hs_ep->send_zlp) { in dwc2_hsotg_complete_in()
2755 hs_ep->send_zlp = 0; in dwc2_hsotg_complete_in()
2756 if (!using_desc_dma(hsotg)) { in dwc2_hsotg_complete_in()
2763 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_DATA_IN) { in dwc2_hsotg_complete_in()
2770 if (!using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_complete_in()
2771 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_hsotg_complete_in()
2779 * dwc2_gadget_read_ep_interrupts - reads interrupts for given ep
2782 * @dir_in: Endpoint direction 1-in 0-out.
2807 * dwc2_gadget_handle_ep_disabled - handle DXEPINT_EPDISBLD
2816 * For ISOC-OUT endpoints completes expired requests. If there is remaining
2821 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_ep_disabled()
2823 unsigned char idx = hs_ep->index; in dwc2_gadget_handle_ep_disabled()
2824 int dir_in = hs_ep->dir_in; in dwc2_gadget_handle_ep_disabled()
2828 dev_dbg(hsotg->dev, "%s: EPDisbld\n", __func__); in dwc2_gadget_handle_ep_disabled()
2830 if (dir_in) { in dwc2_gadget_handle_ep_disabled()
2833 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index); in dwc2_gadget_handle_ep_disabled()
2835 if ((epctl & DXEPCTL_STALL) && (epctl & DXEPCTL_EPTYPE_BULK)) { in dwc2_gadget_handle_ep_disabled()
2843 if (dctl & DCTL_GOUTNAKSTS) { in dwc2_gadget_handle_ep_disabled()
2849 if (!hs_ep->isochronous) in dwc2_gadget_handle_ep_disabled()
2852 if (list_empty(&hs_ep->queue)) { in dwc2_gadget_handle_ep_disabled()
2853 dev_dbg(hsotg->dev, "%s: complete_ep 0x%p, ep->queue empty!\n", in dwc2_gadget_handle_ep_disabled()
2860 if (hs_req) in dwc2_gadget_handle_ep_disabled()
2862 -ENODATA); in dwc2_gadget_handle_ep_disabled()
2865 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_ep_disabled()
2870 * dwc2_gadget_handle_out_token_ep_disabled - handle DXEPINT_OUTTKNEPDIS
2873 * This is starting point for ISOC-OUT transfer, synchronization done with
2877 * HW generates OUTTKNEPDIS - out token is received while EP is disabled. Upon
2882 struct dwc2_hsotg *hsotg = ep->parent; in dwc2_gadget_handle_out_token_ep_disabled()
2884 int dir_in = ep->dir_in; in dwc2_gadget_handle_out_token_ep_disabled()
2886 if (dir_in || !ep->isochronous) in dwc2_gadget_handle_out_token_ep_disabled()
2889 if (using_desc_dma(hsotg)) { in dwc2_gadget_handle_out_token_ep_disabled()
2890 if (ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_out_token_ep_disabled()
2892 ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_out_token_ep_disabled()
2898 if (ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_out_token_ep_disabled()
2901 ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_out_token_ep_disabled()
2902 if (ep->interval > 1) { in dwc2_gadget_handle_out_token_ep_disabled()
2903 ctrl = dwc2_readl(hsotg, DOEPCTL(ep->index)); in dwc2_gadget_handle_out_token_ep_disabled()
2904 if (ep->target_frame & 0x1) in dwc2_gadget_handle_out_token_ep_disabled()
2909 dwc2_writel(hsotg, ctrl, DOEPCTL(ep->index)); in dwc2_gadget_handle_out_token_ep_disabled()
2915 if (hs_req) in dwc2_gadget_handle_out_token_ep_disabled()
2916 dwc2_hsotg_complete_request(hsotg, ep, hs_req, -ENODATA); in dwc2_gadget_handle_out_token_ep_disabled()
2920 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_out_token_ep_disabled()
2923 if (!ep->req) in dwc2_gadget_handle_out_token_ep_disabled()
2932 * dwc2_gadget_handle_nak - handle NAK interrupt
2935 * This is starting point for ISOC-IN transfer, synchronization done with
2940 * and 'NAK'. NAK interrupt for ISOC-IN means that token has arrived and ZLP was
2947 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_nak()
2949 int dir_in = hs_ep->dir_in; in dwc2_gadget_handle_nak()
2952 if (!dir_in || !hs_ep->isochronous) in dwc2_gadget_handle_nak()
2955 if (hs_ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_nak()
2957 if (using_desc_dma(hsotg)) { in dwc2_gadget_handle_nak()
2958 hs_ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_nak()
2964 if (hsotg->params.service_interval) { in dwc2_gadget_handle_nak()
2968 hs_ep->target_frame &= ~hs_ep->interval + 1; in dwc2_gadget_handle_nak()
2981 hs_ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_nak()
2982 if (hs_ep->interval > 1) { in dwc2_gadget_handle_nak()
2984 DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
2985 if (hs_ep->target_frame & 0x1) in dwc2_gadget_handle_nak()
2990 dwc2_writel(hsotg, ctrl, DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
2994 if (using_desc_dma(hsotg)) in dwc2_gadget_handle_nak()
2997 ctrl = dwc2_readl(hsotg, DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
2998 if (ctrl & DXEPCTL_EPENA) in dwc2_gadget_handle_nak()
3001 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index); in dwc2_gadget_handle_nak()
3005 if (hs_req) in dwc2_gadget_handle_nak()
3006 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, -ENODATA); in dwc2_gadget_handle_nak()
3010 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_nak()
3013 if (!hs_ep->req) in dwc2_gadget_handle_nak()
3018 * dwc2_hsotg_epint - handle an in/out endpoint interrupt
3021 * @dir_in: Set if this is an IN endpoint
3039 if (!hs_ep) { in dwc2_hsotg_epint()
3040 dev_err(hsotg->dev, "%s:Interrupt for unconfigured ep%d(%s)\n", in dwc2_hsotg_epint()
3045 dev_dbg(hsotg->dev, "%s: ep%d(%s) DxEPINT=0x%08x\n", in dwc2_hsotg_epint()
3048 /* Don't process XferCompl interrupt if it is a setup packet */ in dwc2_hsotg_epint()
3049 if (idx == 0 && (ints & (DXEPINT_SETUP | DXEPINT_SETUP_RCVD))) in dwc2_hsotg_epint()
3053 * Don't process XferCompl interrupt in DDMA if EP0 is still in SETUP in dwc2_hsotg_epint()
3058 if (using_desc_dma(hsotg) && idx == 0 && !hs_ep->dir_in && in dwc2_hsotg_epint()
3059 hsotg->ep0_state == DWC2_EP0_SETUP && !(ints & DXEPINT_SETUP)) in dwc2_hsotg_epint()
3062 if (ints & DXEPINT_XFERCOMPL) { in dwc2_hsotg_epint()
3063 dev_dbg(hsotg->dev, in dwc2_hsotg_epint()
3069 if (using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_epint()
3071 } else if (dir_in) { in dwc2_hsotg_epint()
3075 * if operating slave mode in dwc2_hsotg_epint()
3077 if (!hs_ep->isochronous || !(ints & DXEPINT_NAKINTRPT)) in dwc2_hsotg_epint()
3080 if (idx == 0 && !hs_ep->req) in dwc2_hsotg_epint()
3082 } else if (using_dma(hsotg)) { in dwc2_hsotg_epint()
3087 if (!hs_ep->isochronous || !(ints & DXEPINT_OUTTKNEPDIS)) in dwc2_hsotg_epint()
3092 if (ints & DXEPINT_EPDISBLD) in dwc2_hsotg_epint()
3095 if (ints & DXEPINT_OUTTKNEPDIS) in dwc2_hsotg_epint()
3098 if (ints & DXEPINT_NAKINTRPT) in dwc2_hsotg_epint()
3101 if (ints & DXEPINT_AHBERR) in dwc2_hsotg_epint()
3102 dev_dbg(hsotg->dev, "%s: AHBErr\n", __func__); in dwc2_hsotg_epint()
3104 if (ints & DXEPINT_SETUP) { /* Setup or Timeout */ in dwc2_hsotg_epint()
3105 dev_dbg(hsotg->dev, "%s: Setup/Timeout\n", __func__); in dwc2_hsotg_epint()
3107 if (using_dma(hsotg) && idx == 0) { in dwc2_hsotg_epint()
3110 * setup packet. In non-DMA mode we'd get this in dwc2_hsotg_epint()
3115 if (dir_in) in dwc2_hsotg_epint()
3122 if (ints & DXEPINT_STSPHSERCVD) { in dwc2_hsotg_epint()
3123 dev_dbg(hsotg->dev, "%s: StsPhseRcvd\n", __func__); in dwc2_hsotg_epint()
3126 if (hsotg->ep0_state == DWC2_EP0_DATA_OUT) { in dwc2_hsotg_epint()
3128 if (using_desc_dma(hsotg)) { in dwc2_hsotg_epint()
3129 if (!hsotg->delayed_status) in dwc2_hsotg_epint()
3146 if (ints & DXEPINT_BACK2BACKSETUP) in dwc2_hsotg_epint()
3147 dev_dbg(hsotg->dev, "%s: B2BSetup/INEPNakEff\n", __func__); in dwc2_hsotg_epint()
3149 if (ints & DXEPINT_BNAINTR) { in dwc2_hsotg_epint()
3150 dev_dbg(hsotg->dev, "%s: BNA interrupt\n", __func__); in dwc2_hsotg_epint()
3151 if (hs_ep->isochronous) in dwc2_hsotg_epint()
3155 if (dir_in && !hs_ep->isochronous) { in dwc2_hsotg_epint()
3156 /* not sure if this is important, but we'll clear it anyway */ in dwc2_hsotg_epint()
3157 if (ints & DXEPINT_INTKNTXFEMP) { in dwc2_hsotg_epint()
3158 dev_dbg(hsotg->dev, "%s: ep%d: INTknTXFEmpMsk\n", in dwc2_hsotg_epint()
3163 if (ints & DXEPINT_INTKNEPMIS) { in dwc2_hsotg_epint()
3164 dev_warn(hsotg->dev, "%s: ep%d: INTknEP\n", in dwc2_hsotg_epint()
3169 if (hsotg->dedicated_fifos && in dwc2_hsotg_epint()
3171 dev_dbg(hsotg->dev, "%s: ep%d: TxFIFOEmpty\n", in dwc2_hsotg_epint()
3173 if (!using_dma(hsotg)) in dwc2_hsotg_epint()
3180 * dwc2_hsotg_irq_enumdone - Handle EnumDone interrupt (enumeration done)
3193 * of the USB handshaking, so we should now know what rate in dwc2_hsotg_irq_enumdone()
3197 dev_dbg(hsotg->dev, "EnumDone (DSTS=0x%08x)\n", dsts); in dwc2_hsotg_irq_enumdone()
3209 hsotg->gadget.speed = USB_SPEED_FULL; in dwc2_hsotg_irq_enumdone()
3215 hsotg->gadget.speed = USB_SPEED_HIGH; in dwc2_hsotg_irq_enumdone()
3221 hsotg->gadget.speed = USB_SPEED_LOW; in dwc2_hsotg_irq_enumdone()
3231 dev_info(hsotg->dev, "new device is %s\n", in dwc2_hsotg_irq_enumdone()
3232 usb_speed_string(hsotg->gadget.speed)); in dwc2_hsotg_irq_enumdone()
3239 if (ep0_mps) { in dwc2_hsotg_irq_enumdone()
3244 for (i = 1; i < hsotg->num_of_eps; i++) { in dwc2_hsotg_irq_enumdone()
3245 if (hsotg->eps_in[i]) in dwc2_hsotg_irq_enumdone()
3248 if (hsotg->eps_out[i]) in dwc2_hsotg_irq_enumdone()
3258 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_irq_enumdone()
3264 * kill_all_requests - remove all requests from the endpoint's queue
3278 ep->req = NULL; in kill_all_requests()
3280 while (!list_empty(&ep->queue)) { in kill_all_requests()
3286 if (!hsotg->dedicated_fifos) in kill_all_requests()
3288 size = (dwc2_readl(hsotg, DTXFSTS(ep->fifo_index)) & 0xffff) * 4; in kill_all_requests()
3289 if (size < ep->fifo_size) in kill_all_requests()
3290 dwc2_hsotg_txfifo_flush(hsotg, ep->fifo_index); in kill_all_requests()
3294 * dwc2_hsotg_disconnect - disconnect service
3305 if (!hsotg->connected) in dwc2_hsotg_disconnect()
3308 hsotg->connected = 0; in dwc2_hsotg_disconnect()
3309 hsotg->test_mode = 0; in dwc2_hsotg_disconnect()
3312 for (ep = 0; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_disconnect()
3313 if (hsotg->eps_in[ep]) in dwc2_hsotg_disconnect()
3314 kill_all_requests(hsotg, hsotg->eps_in[ep], in dwc2_hsotg_disconnect()
3315 -ESHUTDOWN); in dwc2_hsotg_disconnect()
3316 if (hsotg->eps_out[ep]) in dwc2_hsotg_disconnect()
3317 kill_all_requests(hsotg, hsotg->eps_out[ep], in dwc2_hsotg_disconnect()
3318 -ESHUTDOWN); in dwc2_hsotg_disconnect()
3322 hsotg->lx_state = DWC2_L3; in dwc2_hsotg_disconnect()
3324 usb_gadget_set_state(&hsotg->gadget, USB_STATE_NOTATTACHED); in dwc2_hsotg_disconnect()
3328 * dwc2_hsotg_irq_fifoempty - TX FIFO empty interrupt handler
3330 * @periodic: True if this is a periodic FIFO interrupt
3338 for (epno = 0; epno < hsotg->num_of_eps; epno++) { in dwc2_hsotg_irq_fifoempty()
3341 if (!ep) in dwc2_hsotg_irq_fifoempty()
3344 if (!ep->dir_in) in dwc2_hsotg_irq_fifoempty()
3347 if ((periodic && !ep->periodic) || in dwc2_hsotg_irq_fifoempty()
3348 (!periodic && ep->periodic)) in dwc2_hsotg_irq_fifoempty()
3352 if (ret < 0) in dwc2_hsotg_irq_fifoempty()
3364 * dwc2_hsotg_core_init_disconnected - issue softreset to the core
3366 * @is_usb_reset: Usb resetting flag
3380 kill_all_requests(hsotg, hsotg->eps_out[0], -ECONNRESET); in dwc2_hsotg_core_init_disconnected()
3382 if (!is_usb_reset) { in dwc2_hsotg_core_init_disconnected()
3383 if (dwc2_core_reset(hsotg, true)) in dwc2_hsotg_core_init_disconnected()
3387 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_core_init_disconnected()
3388 if (hsotg->eps_in[ep]) in dwc2_hsotg_core_init_disconnected()
3389 dwc2_hsotg_ep_disable(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_core_init_disconnected()
3390 if (hsotg->eps_out[ep]) in dwc2_hsotg_core_init_disconnected()
3391 dwc2_hsotg_ep_disable(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_core_init_disconnected()
3413 if (!is_usb_reset) in dwc2_hsotg_core_init_disconnected()
3418 switch (hsotg->params.speed) { in dwc2_hsotg_core_init_disconnected()
3423 if (hsotg->params.phy_type == DWC2_PHY_TYPE_PARAM_FS) in dwc2_hsotg_core_init_disconnected()
3432 if (hsotg->params.ipg_isoc_en) in dwc2_hsotg_core_init_disconnected()
3449 if (!using_desc_dma(hsotg)) in dwc2_hsotg_core_init_disconnected()
3452 if (!hsotg->params.external_id_pin_ctl) in dwc2_hsotg_core_init_disconnected()
3457 if (using_dma(hsotg)) { in dwc2_hsotg_core_init_disconnected()
3459 hsotg->params.ahbcfg, in dwc2_hsotg_core_init_disconnected()
3462 /* Set DDMA mode support in the core if needed */ in dwc2_hsotg_core_init_disconnected()
3463 if (using_desc_dma(hsotg)) in dwc2_hsotg_core_init_disconnected()
3467 dwc2_writel(hsotg, ((hsotg->dedicated_fifos) ? in dwc2_hsotg_core_init_disconnected()
3474 * If INTknTXFEmpMsk is enabled, it's important to disable ep interrupts in dwc2_hsotg_core_init_disconnected()
3479 dwc2_writel(hsotg, ((hsotg->dedicated_fifos && !using_dma(hsotg)) ? in dwc2_hsotg_core_init_disconnected()
3496 if (using_desc_dma(hsotg)) { in dwc2_hsotg_core_init_disconnected()
3501 /* Enable Service Interval mode if supported */ in dwc2_hsotg_core_init_disconnected()
3502 if (using_desc_dma(hsotg) && hsotg->params.service_interval) in dwc2_hsotg_core_init_disconnected()
3507 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_core_init_disconnected()
3519 if (!using_dma(hsotg)) in dwc2_hsotg_core_init_disconnected()
3526 if (!is_usb_reset) { in dwc2_hsotg_core_init_disconnected()
3532 dev_dbg(hsotg->dev, "DCTL=0x%08x\n", dwc2_readl(hsotg, DCTL)); in dwc2_hsotg_core_init_disconnected()
3543 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) | in dwc2_hsotg_core_init_disconnected()
3549 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) | in dwc2_hsotg_core_init_disconnected()
3554 if (!is_usb_reset) in dwc2_hsotg_core_init_disconnected()
3561 /* program GREFCLK register if needed */ in dwc2_hsotg_core_init_disconnected()
3562 if (using_desc_dma(hsotg) && hsotg->params.service_interval) in dwc2_hsotg_core_init_disconnected()
3565 /* must be at-least 3ms to allow bus to see disconnect */ in dwc2_hsotg_core_init_disconnected()
3568 hsotg->lx_state = DWC2_L0; in dwc2_hsotg_core_init_disconnected()
3572 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_core_init_disconnected()
3579 /* set the soft-disconnect bit */ in dwc2_hsotg_core_disconnect()
3585 /* remove the soft-disconnect and let's go */ in dwc2_hsotg_core_connect()
3590 * dwc2_gadget_handle_incomplete_isoc_in - handle incomplete ISO IN Interrupt.
3595 * - Corrupted IN Token for ISOC EP.
3596 * - Packet not complete in FIFO.
3599 * - Determine the EP
3600 * - Disable EP; when 'Endpoint Disabled' interrupt is received Flush FIFO
3609 dev_dbg(hsotg->dev, "Incomplete isoc in interrupt received:\n"); in dwc2_gadget_handle_incomplete_isoc_in()
3613 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_gadget_handle_incomplete_isoc_in()
3614 hs_ep = hsotg->eps_in[idx]; in dwc2_gadget_handle_incomplete_isoc_in()
3616 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous) in dwc2_gadget_handle_incomplete_isoc_in()
3620 if ((epctrl & DXEPCTL_EPENA) && in dwc2_gadget_handle_incomplete_isoc_in()
3633 * dwc2_gadget_handle_incomplete_isoc_out - handle incomplete ISO OUT Interrupt
3638 * - Corrupted OUT Token for ISOC EP.
3639 * - Packet not complete in FIFO.
3642 * - Determine the EP
3643 * - Set DCTL_SGOUTNAK and unmask GOUTNAKEFF if target frame elapsed.
3654 dev_dbg(hsotg->dev, "%s: GINTSTS_INCOMPL_SOOUT\n", __func__); in dwc2_gadget_handle_incomplete_isoc_out()
3659 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_gadget_handle_incomplete_isoc_out()
3660 hs_ep = hsotg->eps_out[idx]; in dwc2_gadget_handle_incomplete_isoc_out()
3662 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous) in dwc2_gadget_handle_incomplete_isoc_out()
3666 if ((epctrl & DXEPCTL_EPENA) && in dwc2_gadget_handle_incomplete_isoc_out()
3674 if (!(gintsts & GINTSTS_GOUTNAKEFF)) { in dwc2_gadget_handle_incomplete_isoc_out()
3686 * dwc2_hsotg_irq - handle device interrupt
3697 if (!dwc2_is_device_mode(hsotg)) in dwc2_hsotg_irq()
3700 spin_lock(&hsotg->lock); in dwc2_hsotg_irq()
3705 dev_dbg(hsotg->dev, "%s: %08x %08x (%08x) retry %d\n", in dwc2_hsotg_irq()
3710 if (gintsts & GINTSTS_RESETDET) { in dwc2_hsotg_irq()
3711 dev_dbg(hsotg->dev, "%s: USBRstDet\n", __func__); in dwc2_hsotg_irq()
3715 /* This event must be used only if controller is suspended */ in dwc2_hsotg_irq()
3716 if (hsotg->in_ppd && hsotg->lx_state == DWC2_L2) in dwc2_hsotg_irq()
3719 hsotg->lx_state = DWC2_L0; in dwc2_hsotg_irq()
3722 if (gintsts & (GINTSTS_USBRST | GINTSTS_RESETDET)) { in dwc2_hsotg_irq()
3724 u32 connected = hsotg->connected; in dwc2_hsotg_irq()
3726 dev_dbg(hsotg->dev, "%s: USBRst\n", __func__); in dwc2_hsotg_irq()
3727 dev_dbg(hsotg->dev, "GNPTXSTS=%08x\n", in dwc2_hsotg_irq()
3732 /* Report disconnection if it is not already done. */ in dwc2_hsotg_irq()
3738 if (usb_status & GOTGCTL_BSESVLD && connected) in dwc2_hsotg_irq()
3742 if (gintsts & GINTSTS_ENUMDONE) { in dwc2_hsotg_irq()
3748 if (gintsts & (GINTSTS_OEPINT | GINTSTS_IEPINT)) { in dwc2_hsotg_irq()
3758 dev_dbg(hsotg->dev, "%s: daint=%08x\n", __func__, daint); in dwc2_hsotg_irq()
3760 for (ep = 0; ep < hsotg->num_of_eps && daint_out; in dwc2_hsotg_irq()
3762 if (daint_out & 1) in dwc2_hsotg_irq()
3766 for (ep = 0; ep < hsotg->num_of_eps && daint_in; in dwc2_hsotg_irq()
3768 if (daint_in & 1) in dwc2_hsotg_irq()
3775 if (gintsts & GINTSTS_NPTXFEMP) { in dwc2_hsotg_irq()
3776 dev_dbg(hsotg->dev, "NPTxFEmp\n"); in dwc2_hsotg_irq()
3781 * it needs re-enabling in dwc2_hsotg_irq()
3788 if (gintsts & GINTSTS_PTXFEMP) { in dwc2_hsotg_irq()
3789 dev_dbg(hsotg->dev, "PTxFEmp\n"); in dwc2_hsotg_irq()
3797 if (gintsts & GINTSTS_RXFLVL) { in dwc2_hsotg_irq()
3799 * note, since GINTSTS_RxFLvl doubles as FIFO-not-empty, in dwc2_hsotg_irq()
3800 * we need to retry dwc2_hsotg_handle_rx if this is still in dwc2_hsotg_irq()
3807 if (gintsts & GINTSTS_ERLYSUSP) { in dwc2_hsotg_irq()
3808 dev_dbg(hsotg->dev, "GINTSTS_ErlySusp\n"); in dwc2_hsotg_irq()
3813 * these next two seem to crop-up occasionally causing the core in dwc2_hsotg_irq()
3814 * to shutdown the USB transfer, so try clearing them and logging in dwc2_hsotg_irq()
3818 if (gintsts & GINTSTS_GOUTNAKEFF) { in dwc2_hsotg_irq()
3832 dev_dbg(hsotg->dev, "GOUTNakEff triggered\n"); in dwc2_hsotg_irq()
3833 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_irq()
3834 hs_ep = hsotg->eps_out[idx]; in dwc2_hsotg_irq()
3836 if (BIT(idx) & ~daintmsk) in dwc2_hsotg_irq()
3842 if ((epctrl & DXEPCTL_EPENA) && hs_ep->isochronous) { in dwc2_hsotg_irq()
3849 //Non-ISOC EP's in dwc2_hsotg_irq()
3850 if (hs_ep->halted) { in dwc2_hsotg_irq()
3851 if (!(epctrl & DXEPCTL_EPENA)) in dwc2_hsotg_irq()
3862 if (gintsts & GINTSTS_GINNAKEFF) { in dwc2_hsotg_irq()
3863 dev_info(hsotg->dev, "GINNakEff triggered\n"); in dwc2_hsotg_irq()
3870 if (gintsts & GINTSTS_INCOMPL_SOIN) in dwc2_hsotg_irq()
3873 if (gintsts & GINTSTS_INCOMPL_SOOUT) in dwc2_hsotg_irq()
3877 * if we've had fifo events, we should try and go around the in dwc2_hsotg_irq()
3878 * loop again to see if there's any point in returning yet. in dwc2_hsotg_irq()
3881 if (gintsts & IRQ_RETRY_MASK && --retry_count > 0) in dwc2_hsotg_irq()
3885 if (hsotg->params.service_interval) in dwc2_hsotg_irq()
3888 spin_unlock(&hsotg->lock); in dwc2_hsotg_irq()
3899 epctrl_reg = hs_ep->dir_in ? DIEPCTL(hs_ep->index) : in dwc2_hsotg_ep_stop_xfr()
3900 DOEPCTL(hs_ep->index); in dwc2_hsotg_ep_stop_xfr()
3901 epint_reg = hs_ep->dir_in ? DIEPINT(hs_ep->index) : in dwc2_hsotg_ep_stop_xfr()
3902 DOEPINT(hs_ep->index); in dwc2_hsotg_ep_stop_xfr()
3904 dev_dbg(hsotg->dev, "%s: stopping transfer on %s\n", __func__, in dwc2_hsotg_ep_stop_xfr()
3905 hs_ep->name); in dwc2_hsotg_ep_stop_xfr()
3907 if (hs_ep->dir_in) { in dwc2_hsotg_ep_stop_xfr()
3908 if (hsotg->dedicated_fifos || hs_ep->periodic) { in dwc2_hsotg_ep_stop_xfr()
3911 if (dwc2_hsotg_wait_bit_set(hsotg, epint_reg, in dwc2_hsotg_ep_stop_xfr()
3913 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3919 if (dwc2_hsotg_wait_bit_set(hsotg, GINTSTS, in dwc2_hsotg_ep_stop_xfr()
3921 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3929 if (!(dwc2_readl(hsotg, GINTSTS) & GINTSTS_GOUTNAKEFF)) in dwc2_hsotg_ep_stop_xfr()
3932 if (!using_dma(hsotg)) { in dwc2_hsotg_ep_stop_xfr()
3934 if (dwc2_hsotg_wait_bit_set(hsotg, GINTSTS, in dwc2_hsotg_ep_stop_xfr()
3936 dev_warn(hsotg->dev, "%s: timeout GINTSTS.RXFLVL\n", in dwc2_hsotg_ep_stop_xfr()
3948 if (dwc2_hsotg_wait_bit_set(hsotg, GINTSTS, in dwc2_hsotg_ep_stop_xfr()
3950 dev_warn(hsotg->dev, "%s: timeout GINTSTS.GOUTNAKEFF\n", in dwc2_hsotg_ep_stop_xfr()
3958 if (dwc2_hsotg_wait_bit_set(hsotg, epint_reg, DXEPINT_EPDISBLD, 100)) in dwc2_hsotg_ep_stop_xfr()
3959 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3965 if (hs_ep->dir_in) { in dwc2_hsotg_ep_stop_xfr()
3968 if (hsotg->dedicated_fifos || hs_ep->periodic) in dwc2_hsotg_ep_stop_xfr()
3969 fifo_index = hs_ep->fifo_index; in dwc2_hsotg_ep_stop_xfr()
3977 if (!hsotg->dedicated_fifos && !hs_ep->periodic) in dwc2_hsotg_ep_stop_xfr()
3987 * dwc2_hsotg_ep_enable - enable the given endpoint
3988 * @ep: The USB endpint to configure
3989 * @desc: The USB endpoint descriptor to configure with.
3991 * This is called from the USB gadget code's usb_ep_enable().
3997 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_enable()
3999 unsigned int index = hs_ep->index; in dwc2_hsotg_ep_enable()
4011 dev_dbg(hsotg->dev, in dwc2_hsotg_ep_enable()
4013 __func__, ep->name, desc->bEndpointAddress, desc->bmAttributes, in dwc2_hsotg_ep_enable()
4014 desc->wMaxPacketSize, desc->bInterval); in dwc2_hsotg_ep_enable()
4017 if (index == 0) { in dwc2_hsotg_ep_enable()
4018 dev_err(hsotg->dev, "%s: called for EP 0\n", __func__); in dwc2_hsotg_ep_enable()
4019 return -EINVAL; in dwc2_hsotg_ep_enable()
4022 dir_in = (desc->bEndpointAddress & USB_ENDPOINT_DIR_MASK) ? 1 : 0; in dwc2_hsotg_ep_enable()
4023 if (dir_in != hs_ep->dir_in) { in dwc2_hsotg_ep_enable()
4024 dev_err(hsotg->dev, "%s: direction mismatch!\n", __func__); in dwc2_hsotg_ep_enable()
4025 return -EINVAL; in dwc2_hsotg_ep_enable()
4028 ep_type = desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK; in dwc2_hsotg_ep_enable()
4033 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC && in dwc2_hsotg_ep_enable()
4034 dir_in && desc->bInterval > 10) { in dwc2_hsotg_ep_enable()
4035 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4037 return -EINVAL; in dwc2_hsotg_ep_enable()
4041 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC && in dwc2_hsotg_ep_enable()
4043 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4045 return -EINVAL; in dwc2_hsotg_ep_enable()
4053 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x from 0x%08x\n", in dwc2_hsotg_ep_enable()
4056 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC) in dwc2_hsotg_ep_enable()
4061 /* Allocate DMA descriptor chain for non-ctrl endpoints */ in dwc2_hsotg_ep_enable()
4062 if (using_desc_dma(hsotg) && !hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4063 hs_ep->desc_list = dmam_alloc_coherent(hsotg->dev, in dwc2_hsotg_ep_enable()
4065 &hs_ep->desc_list_dma, GFP_ATOMIC); in dwc2_hsotg_ep_enable()
4066 if (!hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4067 ret = -ENOMEM; in dwc2_hsotg_ep_enable()
4072 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_ep_enable()
4084 dwc2_hsotg_set_ep_maxpacket(hsotg, hs_ep->index, mps, mc, dir_in); in dwc2_hsotg_ep_enable()
4086 /* default, set to non-periodic */ in dwc2_hsotg_ep_enable()
4087 hs_ep->isochronous = 0; in dwc2_hsotg_ep_enable()
4088 hs_ep->periodic = 0; in dwc2_hsotg_ep_enable()
4089 hs_ep->halted = 0; in dwc2_hsotg_ep_enable()
4090 hs_ep->wedged = 0; in dwc2_hsotg_ep_enable()
4091 hs_ep->interval = desc->bInterval; in dwc2_hsotg_ep_enable()
4097 hs_ep->isochronous = 1; in dwc2_hsotg_ep_enable()
4098 hs_ep->interval = 1 << (desc->bInterval - 1); in dwc2_hsotg_ep_enable()
4099 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_hsotg_ep_enable()
4100 hs_ep->next_desc = 0; in dwc2_hsotg_ep_enable()
4101 hs_ep->compl_desc = 0; in dwc2_hsotg_ep_enable()
4102 if (dir_in) { in dwc2_hsotg_ep_enable()
4103 hs_ep->periodic = 1; in dwc2_hsotg_ep_enable()
4120 if (dir_in) in dwc2_hsotg_ep_enable()
4121 hs_ep->periodic = 1; in dwc2_hsotg_ep_enable()
4123 if (hsotg->gadget.speed == USB_SPEED_HIGH) in dwc2_hsotg_ep_enable()
4124 hs_ep->interval = 1 << (desc->bInterval - 1); in dwc2_hsotg_ep_enable()
4135 * if the hardware has dedicated fifos, we must give each IN EP in dwc2_hsotg_ep_enable()
4136 * a unique tx-fifo even if it is non-periodic. in dwc2_hsotg_ep_enable()
4138 if (dir_in && hsotg->dedicated_fifos) { in dwc2_hsotg_ep_enable()
4143 size = hs_ep->ep.maxpacket * hs_ep->mc; in dwc2_hsotg_ep_enable()
4145 if (hsotg->fifo_map & (1 << i)) in dwc2_hsotg_ep_enable()
4149 if (val < size) in dwc2_hsotg_ep_enable()
4152 if (val < fifo_size) { in dwc2_hsotg_ep_enable()
4157 if (!fifo_index) { in dwc2_hsotg_ep_enable()
4158 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4160 ret = -ENOMEM; in dwc2_hsotg_ep_enable()
4164 hsotg->fifo_map |= 1 << fifo_index; in dwc2_hsotg_ep_enable()
4166 hs_ep->fifo_index = fifo_index; in dwc2_hsotg_ep_enable()
4167 hs_ep->fifo_size = fifo_size; in dwc2_hsotg_ep_enable()
4171 if (index && !hs_ep->isochronous) in dwc2_hsotg_ep_enable()
4180 if (hsotg->gadget.speed == USB_SPEED_FULL && in dwc2_hsotg_ep_enable()
4181 hs_ep->isochronous && dir_in) { in dwc2_hsotg_ep_enable()
4188 if ((gsnpsid >= DWC2_CORE_REV_2_72a && in dwc2_hsotg_ep_enable()
4195 dev_dbg(hsotg->dev, "%s: write DxEPCTL=0x%08x\n", in dwc2_hsotg_ep_enable()
4199 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x\n", in dwc2_hsotg_ep_enable()
4206 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_ep_enable()
4209 if (ret && using_desc_dma(hsotg) && hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4210 dmam_free_coherent(hsotg->dev, desc_num * in dwc2_hsotg_ep_enable()
4212 hs_ep->desc_list, hs_ep->desc_list_dma); in dwc2_hsotg_ep_enable()
4213 hs_ep->desc_list = NULL; in dwc2_hsotg_ep_enable()
4220 * dwc2_hsotg_ep_disable - disable given endpoint
4226 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_disable()
4227 int dir_in = hs_ep->dir_in; in dwc2_hsotg_ep_disable()
4228 int index = hs_ep->index; in dwc2_hsotg_ep_disable()
4232 dev_dbg(hsotg->dev, "%s(ep %p)\n", __func__, ep); in dwc2_hsotg_ep_disable()
4234 if (ep == &hsotg->eps_out[0]->ep) { in dwc2_hsotg_ep_disable()
4235 dev_err(hsotg->dev, "%s: called for ep0\n", __func__); in dwc2_hsotg_ep_disable()
4236 return -EINVAL; in dwc2_hsotg_ep_disable()
4239 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) { in dwc2_hsotg_ep_disable()
4240 dev_err(hsotg->dev, "%s: called in host mode?\n", __func__); in dwc2_hsotg_ep_disable()
4241 return -EINVAL; in dwc2_hsotg_ep_disable()
4248 if (ctrl & DXEPCTL_EPENA) in dwc2_hsotg_ep_disable()
4255 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl); in dwc2_hsotg_ep_disable()
4259 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 0); in dwc2_hsotg_ep_disable()
4262 kill_all_requests(hsotg, hs_ep, -ESHUTDOWN); in dwc2_hsotg_ep_disable()
4264 hsotg->fifo_map &= ~(1 << hs_ep->fifo_index); in dwc2_hsotg_ep_disable()
4265 hs_ep->fifo_index = 0; in dwc2_hsotg_ep_disable()
4266 hs_ep->fifo_size = 0; in dwc2_hsotg_ep_disable()
4274 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_disable_lock()
4278 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_ep_disable_lock()
4280 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_ep_disable_lock()
4285 * on_list - check request is on the given endpoint
4287 * @test: The request to test if it is on the endpoint.
4293 list_for_each_entry_safe(req, treq, &ep->queue, queue) { in on_list()
4294 if (req == test) in on_list()
4302 * dwc2_hsotg_ep_dequeue - dequeue given endpoint
4310 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_dequeue() local
4313 dev_dbg(hs->dev, "ep_dequeue(%p,%p)\n", ep, req); in dwc2_hsotg_ep_dequeue()
4315 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4317 if (!on_list(hs_ep, hs_req)) { in dwc2_hsotg_ep_dequeue()
4318 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4319 return -EINVAL; in dwc2_hsotg_ep_dequeue()
4323 if (req == &hs_ep->req->req) in dwc2_hsotg_ep_dequeue()
4324 dwc2_hsotg_ep_stop_xfr(hs, hs_ep); in dwc2_hsotg_ep_dequeue()
4326 dwc2_hsotg_complete_request(hs, hs_ep, hs_req, -ECONNRESET); in dwc2_hsotg_ep_dequeue()
4327 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4333 * dwc2_gadget_ep_set_wedge - set wedge on a given endpoint
4340 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_gadget_ep_set_wedge() local
4345 spin_lock_irqsave(&hs->lock, flags); in dwc2_gadget_ep_set_wedge()
4346 hs_ep->wedged = 1; in dwc2_gadget_ep_set_wedge()
4348 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_gadget_ep_set_wedge()
4354 * dwc2_hsotg_ep_sethalt - set halt on a given endpoint
4357 * @now: If true, stall the endpoint now. Otherwise return -EAGAIN if
4360 * We need to stall the endpoint immediately if request comes from set_feature
4366 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_sethalt() local
4367 int index = hs_ep->index; in dwc2_hsotg_ep_sethalt()
4372 dev_info(hs->dev, "%s(ep %p %s, %d)\n", __func__, ep, ep->name, value); in dwc2_hsotg_ep_sethalt()
4374 if (index == 0) { in dwc2_hsotg_ep_sethalt()
4375 if (value) in dwc2_hsotg_ep_sethalt()
4376 dwc2_hsotg_stall_ep0(hs); in dwc2_hsotg_ep_sethalt()
4378 dev_warn(hs->dev, in dwc2_hsotg_ep_sethalt()
4383 if (hs_ep->isochronous) { in dwc2_hsotg_ep_sethalt()
4384 dev_err(hs->dev, "%s is Isochronous Endpoint\n", ep->name); in dwc2_hsotg_ep_sethalt()
4385 return -EINVAL; in dwc2_hsotg_ep_sethalt()
4388 if (!now && value && !list_empty(&hs_ep->queue)) { in dwc2_hsotg_ep_sethalt()
4389 dev_dbg(hs->dev, "%s request is pending, cannot halt\n", in dwc2_hsotg_ep_sethalt()
4390 ep->name); in dwc2_hsotg_ep_sethalt()
4391 return -EAGAIN; in dwc2_hsotg_ep_sethalt()
4394 if (hs_ep->dir_in) { in dwc2_hsotg_ep_sethalt()
4396 epctl = dwc2_readl(hs, epreg); in dwc2_hsotg_ep_sethalt()
4398 if (value) { in dwc2_hsotg_ep_sethalt()
4400 if (epctl & DXEPCTL_EPENA) in dwc2_hsotg_ep_sethalt()
4404 hs_ep->wedged = 0; in dwc2_hsotg_ep_sethalt()
4406 if (xfertype == DXEPCTL_EPTYPE_BULK || in dwc2_hsotg_ep_sethalt()
4410 dwc2_writel(hs, epctl, epreg); in dwc2_hsotg_ep_sethalt()
4413 epctl = dwc2_readl(hs, epreg); in dwc2_hsotg_ep_sethalt()
4415 if (value) { in dwc2_hsotg_ep_sethalt()
4417 dwc2_hsotg_en_gsint(hs, GINTSTS_GOUTNAKEFF); in dwc2_hsotg_ep_sethalt()
4419 if (!(dwc2_readl(hs, GINTSTS) & GINTSTS_GOUTNAKEFF)) in dwc2_hsotg_ep_sethalt()
4420 dwc2_set_bit(hs, DCTL, DCTL_SGOUTNAK); in dwc2_hsotg_ep_sethalt()
4424 hs_ep->wedged = 0; in dwc2_hsotg_ep_sethalt()
4426 if (xfertype == DXEPCTL_EPTYPE_BULK || in dwc2_hsotg_ep_sethalt()
4429 dwc2_writel(hs, epctl, epreg); in dwc2_hsotg_ep_sethalt()
4433 hs_ep->halted = value; in dwc2_hsotg_ep_sethalt()
4438 * dwc2_hsotg_ep_sethalt_lock - set halt on a given endpoint with lock held
4445 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_sethalt_lock() local
4449 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_sethalt_lock()
4451 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_sethalt_lock()
4469 * dwc2_hsotg_init - initialize the usb core
4491 dev_dbg(hsotg->dev, "GRXFSIZ=0x%08x, GNPTXFSIZ=0x%08x\n", in dwc2_hsotg_init()
4497 if (using_dma(hsotg)) in dwc2_hsotg_init()
4502 * dwc2_hsotg_udc_start - prepare the udc for work
4503 * @gadget: The usb gadget state
4504 * @driver: The usb gadget driver
4516 if (!hsotg) { in dwc2_hsotg_udc_start()
4518 return -ENODEV; in dwc2_hsotg_udc_start()
4521 if (!driver) { in dwc2_hsotg_udc_start()
4522 dev_err(hsotg->dev, "%s: no driver\n", __func__); in dwc2_hsotg_udc_start()
4523 return -EINVAL; in dwc2_hsotg_udc_start()
4526 if (driver->max_speed < USB_SPEED_FULL) in dwc2_hsotg_udc_start()
4527 dev_err(hsotg->dev, "%s: bad speed\n", __func__); in dwc2_hsotg_udc_start()
4529 if (!driver->setup) { in dwc2_hsotg_udc_start()
4530 dev_err(hsotg->dev, "%s: missing entry points\n", __func__); in dwc2_hsotg_udc_start()
4531 return -EINVAL; in dwc2_hsotg_udc_start()
4534 WARN_ON(hsotg->driver); in dwc2_hsotg_udc_start()
4536 driver->driver.bus = NULL; in dwc2_hsotg_udc_start()
4537 hsotg->driver = driver; in dwc2_hsotg_udc_start()
4538 hsotg->gadget.dev.of_node = hsotg->dev->of_node; in dwc2_hsotg_udc_start()
4539 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_udc_start()
4541 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) { in dwc2_hsotg_udc_start()
4543 if (ret) in dwc2_hsotg_udc_start()
4547 if (!IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_udc_start()
4548 otg_set_peripheral(hsotg->uphy->otg, &hsotg->gadget); in dwc2_hsotg_udc_start()
4550 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_udc_start()
4551 if (dwc2_hw_is_device(hsotg)) { in dwc2_hsotg_udc_start()
4556 hsotg->enabled = 0; in dwc2_hsotg_udc_start()
4557 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_udc_start()
4559 gadget->sg_supported = using_desc_dma(hsotg); in dwc2_hsotg_udc_start()
4560 dev_info(hsotg->dev, "bound driver %s\n", driver->driver.name); in dwc2_hsotg_udc_start()
4565 hsotg->driver = NULL; in dwc2_hsotg_udc_start()
4570 * dwc2_hsotg_udc_stop - stop the udc
4571 * @gadget: The usb gadget state
4581 if (!hsotg) in dwc2_hsotg_udc_stop()
4582 return -ENODEV; in dwc2_hsotg_udc_stop()
4585 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_udc_stop()
4586 if (hsotg->eps_in[ep]) in dwc2_hsotg_udc_stop()
4587 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_udc_stop()
4588 if (hsotg->eps_out[ep]) in dwc2_hsotg_udc_stop()
4589 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_udc_stop()
4592 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_udc_stop()
4594 hsotg->driver = NULL; in dwc2_hsotg_udc_stop()
4595 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_udc_stop()
4596 hsotg->enabled = 0; in dwc2_hsotg_udc_stop()
4598 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_udc_stop()
4600 if (!IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_udc_stop()
4601 otg_set_peripheral(hsotg->uphy->otg, NULL); in dwc2_hsotg_udc_stop()
4603 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) in dwc2_hsotg_udc_stop()
4610 * dwc2_hsotg_gadget_getframe - read the frame number
4611 * @gadget: The usb gadget state
4621 * dwc2_hsotg_set_selfpowered - set if device is self/bus powered
4622 * @gadget: The usb gadget state
4623 * @is_selfpowered: Whether the device is self-powered
4625 * Set if the device is self or bus powered.
4633 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_set_selfpowered()
4634 gadget->is_selfpowered = !!is_selfpowered; in dwc2_hsotg_set_selfpowered()
4635 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_set_selfpowered()
4641 * dwc2_hsotg_pullup - connect/disconnect the USB PHY
4642 * @gadget: The usb gadget state
4643 * @is_on: Current state of the USB PHY
4645 * Connect/Disconnect the USB PHY pullup
4652 dev_dbg(hsotg->dev, "%s: is_on: %d op_state: %d\n", __func__, is_on, in dwc2_hsotg_pullup()
4653 hsotg->op_state); in dwc2_hsotg_pullup()
4656 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) { in dwc2_hsotg_pullup()
4657 hsotg->enabled = is_on; in dwc2_hsotg_pullup()
4661 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_pullup()
4662 if (is_on) { in dwc2_hsotg_pullup()
4663 hsotg->enabled = 1; in dwc2_hsotg_pullup()
4665 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_pullup()
4671 hsotg->enabled = 0; in dwc2_hsotg_pullup()
4674 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_pullup()
4675 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_pullup()
4685 dev_dbg(hsotg->dev, "%s: is_active: %d\n", __func__, is_active); in dwc2_hsotg_vbus_session()
4686 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_vbus_session()
4689 * If controller is in partial power down state, it must exit from in dwc2_hsotg_vbus_session()
4690 * that state before being initialized / de-initialized in dwc2_hsotg_vbus_session()
4692 if (hsotg->lx_state == DWC2_L2 && hsotg->in_ppd) in dwc2_hsotg_vbus_session()
4699 if (is_active) { in dwc2_hsotg_vbus_session()
4700 hsotg->op_state = OTG_STATE_B_PERIPHERAL; in dwc2_hsotg_vbus_session()
4703 if (hsotg->enabled) { in dwc2_hsotg_vbus_session()
4704 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_vbus_session()
4713 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_vbus_session()
4718 * dwc2_hsotg_vbus_draw - report bMaxPower field
4719 * @gadget: The usb gadget state
4728 if (IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_vbus_draw()
4729 return -ENOTSUPP; in dwc2_hsotg_vbus_draw()
4730 return usb_phy_set_power(hsotg->uphy, mA); in dwc2_hsotg_vbus_draw()
4738 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_gadget_set_speed()
4741 hsotg->params.speed = DWC2_SPEED_PARAM_HIGH; in dwc2_gadget_set_speed()
4744 hsotg->params.speed = DWC2_SPEED_PARAM_FULL; in dwc2_gadget_set_speed()
4747 hsotg->params.speed = DWC2_SPEED_PARAM_LOW; in dwc2_gadget_set_speed()
4750 dev_err(hsotg->dev, "invalid speed (%d)\n", speed); in dwc2_gadget_set_speed()
4752 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_gadget_set_speed()
4767 * dwc2_hsotg_initep - initialise a single endpoint
4771 * @dir_in: True if direction is in.
4784 if (epnum == 0) in dwc2_hsotg_initep()
4786 else if (dir_in) in dwc2_hsotg_initep()
4791 hs_ep->dir_in = dir_in; in dwc2_hsotg_initep()
4792 hs_ep->index = epnum; in dwc2_hsotg_initep()
4794 snprintf(hs_ep->name, sizeof(hs_ep->name), "ep%d%s", epnum, dir); in dwc2_hsotg_initep()
4796 INIT_LIST_HEAD(&hs_ep->queue); in dwc2_hsotg_initep()
4797 INIT_LIST_HEAD(&hs_ep->ep.ep_list); in dwc2_hsotg_initep()
4800 if (epnum) in dwc2_hsotg_initep()
4801 list_add_tail(&hs_ep->ep.ep_list, &hsotg->gadget.ep_list); in dwc2_hsotg_initep()
4803 hs_ep->parent = hsotg; in dwc2_hsotg_initep()
4804 hs_ep->ep.name = hs_ep->name; in dwc2_hsotg_initep()
4806 if (hsotg->params.speed == DWC2_SPEED_PARAM_LOW) in dwc2_hsotg_initep()
4807 usb_ep_set_maxpacket_limit(&hs_ep->ep, 8); in dwc2_hsotg_initep()
4809 usb_ep_set_maxpacket_limit(&hs_ep->ep, in dwc2_hsotg_initep()
4811 hs_ep->ep.ops = &dwc2_hsotg_ep_ops; in dwc2_hsotg_initep()
4813 if (epnum == 0) { in dwc2_hsotg_initep()
4814 hs_ep->ep.caps.type_control = true; in dwc2_hsotg_initep()
4816 if (hsotg->params.speed != DWC2_SPEED_PARAM_LOW) { in dwc2_hsotg_initep()
4817 hs_ep->ep.caps.type_iso = true; in dwc2_hsotg_initep()
4818 hs_ep->ep.caps.type_bulk = true; in dwc2_hsotg_initep()
4820 hs_ep->ep.caps.type_int = true; in dwc2_hsotg_initep()
4823 if (dir_in) in dwc2_hsotg_initep()
4824 hs_ep->ep.caps.dir_in = true; in dwc2_hsotg_initep()
4826 hs_ep->ep.caps.dir_out = true; in dwc2_hsotg_initep()
4829 * if we're using dma, we need to set the next-endpoint pointer in dwc2_hsotg_initep()
4833 if (using_dma(hsotg)) { in dwc2_hsotg_initep()
4836 if (dir_in) in dwc2_hsotg_initep()
4844 * dwc2_hsotg_hw_cfg - read HW configuration registers
4847 * Read the USB core HW configuration registers
4857 hsotg->num_of_eps = hsotg->hw_params.num_dev_ep; in dwc2_hsotg_hw_cfg()
4860 hsotg->num_of_eps++; in dwc2_hsotg_hw_cfg()
4862 hsotg->eps_in[0] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4865 if (!hsotg->eps_in[0]) in dwc2_hsotg_hw_cfg()
4866 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4868 hsotg->eps_out[0] = hsotg->eps_in[0]; in dwc2_hsotg_hw_cfg()
4870 cfg = hsotg->hw_params.dev_ep_dirs; in dwc2_hsotg_hw_cfg()
4871 for (i = 1, cfg >>= 2; i < hsotg->num_of_eps; i++, cfg >>= 2) { in dwc2_hsotg_hw_cfg()
4874 if (!(ep_type & 2)) { in dwc2_hsotg_hw_cfg()
4875 hsotg->eps_in[i] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4877 if (!hsotg->eps_in[i]) in dwc2_hsotg_hw_cfg()
4878 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4881 if (!(ep_type & 1)) { in dwc2_hsotg_hw_cfg()
4882 hsotg->eps_out[i] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4884 if (!hsotg->eps_out[i]) in dwc2_hsotg_hw_cfg()
4885 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4889 hsotg->fifo_mem = hsotg->hw_params.total_fifo_size; in dwc2_hsotg_hw_cfg()
4890 hsotg->dedicated_fifos = hsotg->hw_params.en_multiple_tx_fifo; in dwc2_hsotg_hw_cfg()
4892 dev_info(hsotg->dev, "EPs: %d, %s fifos, %d entries in SPRAM\n", in dwc2_hsotg_hw_cfg()
4893 hsotg->num_of_eps, in dwc2_hsotg_hw_cfg()
4894 hsotg->dedicated_fifos ? "dedicated" : "shared", in dwc2_hsotg_hw_cfg()
4895 hsotg->fifo_mem); in dwc2_hsotg_hw_cfg()
4900 * dwc2_hsotg_dump - dump state of the udc
4907 struct device *dev = hsotg->dev; in dwc2_hsotg_dump()
4923 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_dump()
4930 for (idx = 0; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_dump()
4932 "ep%d-in: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", idx, in dwc2_hsotg_dump()
4939 "ep%d-out: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", in dwc2_hsotg_dump()
4951 * dwc2_gadget_init - init function for gadget
4957 struct device *dev = hsotg->dev; in dwc2_gadget_init()
4963 hsotg->params.g_np_tx_fifo_size); in dwc2_gadget_init()
4964 dev_dbg(dev, "RXFIFO size: %d\n", hsotg->params.g_rx_fifo_size); in dwc2_gadget_init()
4966 hsotg->gadget.max_speed = USB_SPEED_HIGH; in dwc2_gadget_init()
4967 hsotg->gadget.ops = &dwc2_hsotg_gadget_ops; in dwc2_gadget_init()
4968 hsotg->gadget.name = dev_name(dev); in dwc2_gadget_init()
4969 hsotg->remote_wakeup_allowed = 0; in dwc2_gadget_init()
4971 if (hsotg->params.lpm) in dwc2_gadget_init()
4972 hsotg->gadget.lpm_capable = true; in dwc2_gadget_init()
4974 if (hsotg->dr_mode == USB_DR_MODE_OTG) in dwc2_gadget_init()
4975 hsotg->gadget.is_otg = 1; in dwc2_gadget_init()
4976 else if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) in dwc2_gadget_init()
4977 hsotg->op_state = OTG_STATE_B_PERIPHERAL; in dwc2_gadget_init()
4980 if (ret) { in dwc2_gadget_init()
4981 dev_err(hsotg->dev, "Hardware configuration failed: %d\n", ret); in dwc2_gadget_init()
4985 hsotg->ctrl_buff = devm_kzalloc(hsotg->dev, in dwc2_gadget_init()
4987 if (!hsotg->ctrl_buff) in dwc2_gadget_init()
4988 return -ENOMEM; in dwc2_gadget_init()
4990 hsotg->ep0_buff = devm_kzalloc(hsotg->dev, in dwc2_gadget_init()
4992 if (!hsotg->ep0_buff) in dwc2_gadget_init()
4993 return -ENOMEM; in dwc2_gadget_init()
4995 if (using_desc_dma(hsotg)) { in dwc2_gadget_init()
4997 if (ret < 0) in dwc2_gadget_init()
5001 ret = devm_request_irq(hsotg->dev, hsotg->irq, dwc2_hsotg_irq, in dwc2_gadget_init()
5002 IRQF_SHARED, dev_name(hsotg->dev), hsotg); in dwc2_gadget_init()
5003 if (ret < 0) { in dwc2_gadget_init()
5008 /* hsotg->num_of_eps holds number of EPs other than ep0 */ in dwc2_gadget_init()
5010 if (hsotg->num_of_eps == 0) { in dwc2_gadget_init()
5012 return -EINVAL; in dwc2_gadget_init()
5017 INIT_LIST_HEAD(&hsotg->gadget.ep_list); in dwc2_gadget_init()
5018 hsotg->gadget.ep0 = &hsotg->eps_out[0]->ep; in dwc2_gadget_init()
5022 hsotg->ctrl_req = dwc2_hsotg_ep_alloc_request(&hsotg->eps_out[0]->ep, in dwc2_gadget_init()
5024 if (!hsotg->ctrl_req) { in dwc2_gadget_init()
5026 return -ENOMEM; in dwc2_gadget_init()
5030 for (epnum = 0; epnum < hsotg->num_of_eps; epnum++) { in dwc2_gadget_init()
5031 if (hsotg->eps_in[epnum]) in dwc2_gadget_init()
5032 dwc2_hsotg_initep(hsotg, hsotg->eps_in[epnum], in dwc2_gadget_init()
5034 if (hsotg->eps_out[epnum]) in dwc2_gadget_init()
5035 dwc2_hsotg_initep(hsotg, hsotg->eps_out[epnum], in dwc2_gadget_init()
5045 * dwc2_hsotg_remove - remove function for hsotg driver
5051 usb_del_gadget_udc(&hsotg->gadget); in dwc2_hsotg_remove()
5052 dwc2_hsotg_ep_free_request(&hsotg->eps_out[0]->ep, hsotg->ctrl_req); in dwc2_hsotg_remove()
5061 if (hsotg->lx_state != DWC2_L0) in dwc2_hsotg_suspend()
5064 if (hsotg->driver) { in dwc2_hsotg_suspend()
5067 dev_info(hsotg->dev, "suspending usb gadget %s\n", in dwc2_hsotg_suspend()
5068 hsotg->driver->driver.name); in dwc2_hsotg_suspend()
5070 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_suspend()
5071 if (hsotg->enabled) in dwc2_hsotg_suspend()
5074 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_suspend()
5075 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_suspend()
5077 for (ep = 0; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_suspend()
5078 if (hsotg->eps_in[ep]) in dwc2_hsotg_suspend()
5079 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_suspend()
5080 if (hsotg->eps_out[ep]) in dwc2_hsotg_suspend()
5081 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_suspend()
5092 if (hsotg->lx_state == DWC2_L2) in dwc2_hsotg_resume()
5095 if (hsotg->driver) { in dwc2_hsotg_resume()
5096 dev_info(hsotg->dev, "resuming usb gadget %s\n", in dwc2_hsotg_resume()
5097 hsotg->driver->driver.name); in dwc2_hsotg_resume()
5099 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_resume()
5101 if (hsotg->enabled) { in dwc2_hsotg_resume()
5102 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_resume()
5106 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_resume()
5113 * dwc2_backup_device_registers() - Backup controller device registers.
5114 * When suspending usb bus, registers needs to be backuped
5115 * if controller power is disabled once suspended.
5124 dev_dbg(hsotg->dev, "%s\n", __func__); in dwc2_backup_device_registers()
5127 dr = &hsotg->dr_backup; in dwc2_backup_device_registers()
5129 dr->dcfg = dwc2_readl(hsotg, DCFG); in dwc2_backup_device_registers()
5130 dr->dctl = dwc2_readl(hsotg, DCTL); in dwc2_backup_device_registers()
5131 dr->daintmsk = dwc2_readl(hsotg, DAINTMSK); in dwc2_backup_device_registers()
5132 dr->diepmsk = dwc2_readl(hsotg, DIEPMSK); in dwc2_backup_device_registers()
5133 dr->doepmsk = dwc2_readl(hsotg, DOEPMSK); in dwc2_backup_device_registers()
5135 for (i = 0; i < hsotg->num_of_eps; i++) { in dwc2_backup_device_registers()
5137 dr->diepctl[i] = dwc2_readl(hsotg, DIEPCTL(i)); in dwc2_backup_device_registers()
5140 if (dr->diepctl[i] & DXEPCTL_DPID) in dwc2_backup_device_registers()
5141 dr->diepctl[i] |= DXEPCTL_SETD1PID; in dwc2_backup_device_registers()
5143 dr->diepctl[i] |= DXEPCTL_SETD0PID; in dwc2_backup_device_registers()
5145 dr->dieptsiz[i] = dwc2_readl(hsotg, DIEPTSIZ(i)); in dwc2_backup_device_registers()
5146 dr->diepdma[i] = dwc2_readl(hsotg, DIEPDMA(i)); in dwc2_backup_device_registers()
5149 dr->doepctl[i] = dwc2_readl(hsotg, DOEPCTL(i)); in dwc2_backup_device_registers()
5152 if (dr->doepctl[i] & DXEPCTL_DPID) in dwc2_backup_device_registers()
5153 dr->doepctl[i] |= DXEPCTL_SETD1PID; in dwc2_backup_device_registers()
5155 dr->doepctl[i] |= DXEPCTL_SETD0PID; in dwc2_backup_device_registers()
5157 dr->doeptsiz[i] = dwc2_readl(hsotg, DOEPTSIZ(i)); in dwc2_backup_device_registers()
5158 dr->doepdma[i] = dwc2_readl(hsotg, DOEPDMA(i)); in dwc2_backup_device_registers()
5159 dr->dtxfsiz[i] = dwc2_readl(hsotg, DPTXFSIZN(i)); in dwc2_backup_device_registers()
5161 dr->valid = true; in dwc2_backup_device_registers()
5166 * dwc2_restore_device_registers() - Restore controller device registers.
5167 * When resuming usb bus, device registers needs to be restored
5168 * if controller power were disabled.
5173 * Return: 0 if successful, negative error code otherwise
5180 dev_dbg(hsotg->dev, "%s\n", __func__); in dwc2_restore_device_registers()
5183 dr = &hsotg->dr_backup; in dwc2_restore_device_registers()
5184 if (!dr->valid) { in dwc2_restore_device_registers()
5185 dev_err(hsotg->dev, "%s: no device registers to restore\n", in dwc2_restore_device_registers()
5187 return -EINVAL; in dwc2_restore_device_registers()
5189 dr->valid = false; in dwc2_restore_device_registers()
5191 if (!remote_wakeup) in dwc2_restore_device_registers()
5192 dwc2_writel(hsotg, dr->dctl, DCTL); in dwc2_restore_device_registers()
5194 dwc2_writel(hsotg, dr->daintmsk, DAINTMSK); in dwc2_restore_device_registers()
5195 dwc2_writel(hsotg, dr->diepmsk, DIEPMSK); in dwc2_restore_device_registers()
5196 dwc2_writel(hsotg, dr->doepmsk, DOEPMSK); in dwc2_restore_device_registers()
5198 for (i = 0; i < hsotg->num_of_eps; i++) { in dwc2_restore_device_registers()
5200 dwc2_writel(hsotg, dr->dieptsiz[i], DIEPTSIZ(i)); in dwc2_restore_device_registers()
5201 dwc2_writel(hsotg, dr->diepdma[i], DIEPDMA(i)); in dwc2_restore_device_registers()
5202 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i)); in dwc2_restore_device_registers()
5208 if (hsotg->params.g_dma_desc && in dwc2_restore_device_registers()
5209 (dr->diepctl[i] & DXEPCTL_EPENA)) in dwc2_restore_device_registers()
5210 dr->diepdma[i] = hsotg->eps_in[i]->desc_list_dma; in dwc2_restore_device_registers()
5211 dwc2_writel(hsotg, dr->dtxfsiz[i], DPTXFSIZN(i)); in dwc2_restore_device_registers()
5212 dwc2_writel(hsotg, dr->diepctl[i], DIEPCTL(i)); in dwc2_restore_device_registers()
5214 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i)); in dwc2_restore_device_registers()
5220 if (hsotg->params.g_dma_desc && in dwc2_restore_device_registers()
5221 (dr->doepctl[i] & DXEPCTL_EPENA)) in dwc2_restore_device_registers()
5222 dr->doepdma[i] = hsotg->eps_out[i]->desc_list_dma; in dwc2_restore_device_registers()
5223 dwc2_writel(hsotg, dr->doepdma[i], DOEPDMA(i)); in dwc2_restore_device_registers()
5224 dwc2_writel(hsotg, dr->doepctl[i], DOEPCTL(i)); in dwc2_restore_device_registers()
5231 * dwc2_gadget_init_lpm - Configure the core to support LPM in device mode
5240 if (!hsotg->params.lpm) in dwc2_gadget_init_lpm()
5244 val |= hsotg->params.hird_threshold_en ? GLPMCFG_HIRD_THRES_EN : 0; in dwc2_gadget_init_lpm()
5245 val |= hsotg->params.lpm_clock_gating ? GLPMCFG_ENBLSLPM : 0; in dwc2_gadget_init_lpm()
5246 val |= hsotg->params.hird_threshold << GLPMCFG_HIRD_THRES_SHIFT; in dwc2_gadget_init_lpm()
5247 val |= hsotg->params.besl ? GLPMCFG_ENBESL : 0; in dwc2_gadget_init_lpm()
5251 dev_dbg(hsotg->dev, "GLPMCFG=0x%08x\n", dwc2_readl(hsotg, GLPMCFG)); in dwc2_gadget_init_lpm()
5254 if (hsotg->params.service_interval) in dwc2_gadget_init_lpm()
5259 * dwc2_gadget_program_ref_clk - Program GREFCLK register in device mode
5269 val |= hsotg->params.ref_clk_per << GREFCLK_REFCLKPER_SHIFT; in dwc2_gadget_program_ref_clk()
5270 val |= hsotg->params.sof_cnt_wkup_alert << in dwc2_gadget_program_ref_clk()
5274 dev_dbg(hsotg->dev, "GREFCLK=0x%08x\n", dwc2_readl(hsotg, GREFCLK)); in dwc2_gadget_program_ref_clk()
5278 * dwc2_gadget_enter_hibernation() - Put controller in Hibernation.
5282 * Return non-zero if failed to enter to hibernation.
5290 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_hibernation()
5291 dev_dbg(hsotg->dev, "Start of hibernation completed\n"); in dwc2_gadget_enter_hibernation()
5293 if (ret) { in dwc2_gadget_enter_hibernation()
5294 dev_err(hsotg->dev, "%s: failed to backup global registers\n", in dwc2_gadget_enter_hibernation()
5299 if (ret) { in dwc2_gadget_enter_hibernation()
5300 dev_err(hsotg->dev, "%s: failed to backup device registers\n", in dwc2_gadget_enter_hibernation()
5311 hsotg->hibernated = 1; in dwc2_gadget_enter_hibernation()
5339 /* Save gpwrdn register for further usage if stschng interrupt */ in dwc2_gadget_enter_hibernation()
5340 hsotg->gr_backup.gpwrdn = dwc2_readl(hsotg, GPWRDN); in dwc2_gadget_enter_hibernation()
5341 dev_dbg(hsotg->dev, "Hibernation completed\n"); in dwc2_gadget_enter_hibernation()
5349 * resume/reset and device initiated remote-wakeup.
5355 * Return non-zero if failed to exit from hibernation.
5367 gr = &hsotg->gr_backup; in dwc2_gadget_exit_hibernation()
5368 dr = &hsotg->dr_backup; in dwc2_gadget_exit_hibernation()
5370 if (!hsotg->hibernated) { in dwc2_gadget_exit_hibernation()
5371 dev_dbg(hsotg->dev, "Already exited from Hibernation\n"); in dwc2_gadget_exit_hibernation()
5374 dev_dbg(hsotg->dev, in dwc2_gadget_exit_hibernation()
5380 if (!reset) { in dwc2_gadget_exit_hibernation()
5385 /* De-assert Restore */ in dwc2_gadget_exit_hibernation()
5391 if (!rem_wakeup) { in dwc2_gadget_exit_hibernation()
5398 dwc2_writel(hsotg, gr->gusbcfg, GUSBCFG); in dwc2_gadget_exit_hibernation()
5399 dwc2_writel(hsotg, dr->dcfg, DCFG); in dwc2_gadget_exit_hibernation()
5400 dwc2_writel(hsotg, dr->dctl, DCTL); in dwc2_gadget_exit_hibernation()
5402 /* On USB Reset, reset device address to zero */ in dwc2_gadget_exit_hibernation()
5403 if (reset) in dwc2_gadget_exit_hibernation()
5406 /* De-assert Wakeup Logic */ in dwc2_gadget_exit_hibernation()
5411 if (rem_wakeup) { in dwc2_gadget_exit_hibernation()
5414 dwc2_writel(hsotg, dr->dctl | DCTL_RMTWKUPSIG, DCTL); in dwc2_gadget_exit_hibernation()
5429 if (ret) { in dwc2_gadget_exit_hibernation()
5430 dev_err(hsotg->dev, "%s: failed to restore registers\n", in dwc2_gadget_exit_hibernation()
5437 if (ret) { in dwc2_gadget_exit_hibernation()
5438 dev_err(hsotg->dev, "%s: failed to restore device registers\n", in dwc2_gadget_exit_hibernation()
5443 if (rem_wakeup) { in dwc2_gadget_exit_hibernation()
5450 hsotg->hibernated = 0; in dwc2_gadget_exit_hibernation()
5451 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_hibernation()
5452 dev_dbg(hsotg->dev, "Hibernation recovery completes here\n"); in dwc2_gadget_exit_hibernation()
5458 * dwc2_gadget_enter_partial_power_down() - Put controller in partial
5463 * Return: non-zero if failed to enter device partial power down.
5472 dev_dbg(hsotg->dev, "Entering device partial power down started.\n"); in dwc2_gadget_enter_partial_power_down()
5476 if (ret) { in dwc2_gadget_enter_partial_power_down()
5477 dev_err(hsotg->dev, "%s: failed to backup global registers\n", in dwc2_gadget_enter_partial_power_down()
5483 if (ret) { in dwc2_gadget_enter_partial_power_down()
5484 dev_err(hsotg->dev, "%s: failed to backup device registers\n", in dwc2_gadget_enter_partial_power_down()
5510 hsotg->in_ppd = 1; in dwc2_gadget_enter_partial_power_down()
5511 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_partial_power_down()
5513 dev_dbg(hsotg->dev, "Entering device partial power down completed.\n"); in dwc2_gadget_enter_partial_power_down()
5519 * dwc2_gadget_exit_partial_power_down() - Exit controller from device partial
5525 * Return: non-zero if failed to exit device partial power down.
5537 dr = &hsotg->dr_backup; in dwc2_gadget_exit_partial_power_down()
5539 dev_dbg(hsotg->dev, "Exiting device partial Power Down started.\n"); in dwc2_gadget_exit_partial_power_down()
5554 if (restore) { in dwc2_gadget_exit_partial_power_down()
5556 if (ret) { in dwc2_gadget_exit_partial_power_down()
5557 dev_err(hsotg->dev, "%s: failed to restore registers\n", in dwc2_gadget_exit_partial_power_down()
5562 dwc2_writel(hsotg, dr->dcfg, DCFG); in dwc2_gadget_exit_partial_power_down()
5565 if (ret) { in dwc2_gadget_exit_partial_power_down()
5566 dev_err(hsotg->dev, "%s: failed to restore device registers\n", in dwc2_gadget_exit_partial_power_down()
5572 /* Set the Power-On Programming done bit */ in dwc2_gadget_exit_partial_power_down()
5578 hsotg->in_ppd = 0; in dwc2_gadget_exit_partial_power_down()
5579 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_partial_power_down()
5581 dev_dbg(hsotg->dev, "Exiting device partial Power Down completed.\n"); in dwc2_gadget_exit_partial_power_down()
5586 * dwc2_gadget_enter_clock_gating() - Put controller in clock gating.
5590 * Return: non-zero if failed to enter device partial power down.
5598 dev_dbg(hsotg->dev, "Entering device clock gating.\n"); in dwc2_gadget_enter_clock_gating()
5612 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_clock_gating()
5613 hsotg->bus_suspended = true; in dwc2_gadget_enter_clock_gating()
5617 * dwc2_gadget_exit_clock_gating() - Exit controller from device clock gating.
5629 dev_dbg(hsotg->dev, "Exiting device clock gating.\n"); in dwc2_gadget_exit_clock_gating()
5643 if (rem_wakeup) { in dwc2_gadget_exit_clock_gating()
5652 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_clock_gating()
5653 hsotg->bus_suspended = false; in dwc2_gadget_exit_clock_gating()