Lines Matching +full:hs +full:- +full:usb +full:- +full:if

1 // SPDX-License-Identifier: GPL-2.0
11 * S3C USB2.0 High-speed / OtG driver
19 #include <linux/dma-mapping.h>
27 #include <linux/usb/ch9.h>
28 #include <linux/usb/gadget.h>
29 #include <linux/usb/phy.h>
30 #include <linux/usb/composite.h>
65 if (dir_in) in index_to_ep()
66 return hsotg->eps_in[ep_index]; in index_to_ep()
68 return hsotg->eps_out[ep_index]; in index_to_ep()
75 * using_dma - return the DMA status of the driver.
78 * Return true if we're using DMA.
95 return hsotg->params.g_dma; in using_dma()
99 * using_desc_dma - return the descriptor DMA status of the driver.
102 * Return true if we're using descriptor DMA.
106 return hsotg->params.g_dma_desc; in using_desc_dma()
110 * dwc2_gadget_incr_frame_num - Increments the targeted frame number.
113 * This function will also check if the frame number overruns DSTS_SOFFN_LIMIT.
114 * If an overrun occurs it will wrap the value and set the frame_overrun flag.
118 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_incr_frame_num()
121 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_incr_frame_num()
124 hs_ep->target_frame += hs_ep->interval; in dwc2_gadget_incr_frame_num()
125 if (hs_ep->target_frame > limit) { in dwc2_gadget_incr_frame_num()
126 hs_ep->frame_overrun = true; in dwc2_gadget_incr_frame_num()
127 hs_ep->target_frame &= limit; in dwc2_gadget_incr_frame_num()
129 hs_ep->frame_overrun = false; in dwc2_gadget_incr_frame_num()
134 * dwc2_gadget_dec_frame_num_by_one - Decrements the targeted frame number
145 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_dec_frame_num_by_one()
148 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_dec_frame_num_by_one()
151 if (hs_ep->target_frame) in dwc2_gadget_dec_frame_num_by_one()
152 hs_ep->target_frame -= 1; in dwc2_gadget_dec_frame_num_by_one()
154 hs_ep->target_frame = limit; in dwc2_gadget_dec_frame_num_by_one()
158 * dwc2_hsotg_en_gsint - enable one or more of the general interrupt
169 if (new_gsintmsk != gsintmsk) { in dwc2_hsotg_en_gsint()
170 dev_dbg(hsotg->dev, "gsintmsk now 0x%08x\n", new_gsintmsk); in dwc2_hsotg_en_gsint()
176 * dwc2_hsotg_disable_gsint - disable one or more of the general interrupt
187 if (new_gsintmsk != gsintmsk) in dwc2_hsotg_disable_gsint()
192 * dwc2_hsotg_ctrl_epint - enable/disable an endpoint irq
195 * @dir_in: True if direction is in.
209 if (!dir_in) in dwc2_hsotg_ctrl_epint()
214 if (en) in dwc2_hsotg_ctrl_epint()
223 * dwc2_hsotg_tx_fifo_count - return count of TX FIFOs in device mode
229 if (hsotg->hw_params.en_multiple_tx_fifo) in dwc2_hsotg_tx_fifo_count()
231 return hsotg->hw_params.num_dev_in_eps; in dwc2_hsotg_tx_fifo_count()
234 return hsotg->hw_params.num_dev_perio_in_ep; in dwc2_hsotg_tx_fifo_count()
238 * dwc2_hsotg_tx_fifo_total_depth - return total FIFO depth available for
249 np_tx_fifo_size = min_t(u32, hsotg->hw_params.dev_nperio_tx_fifo_size, in dwc2_hsotg_tx_fifo_total_depth()
250 hsotg->params.g_np_tx_fifo_size); in dwc2_hsotg_tx_fifo_total_depth()
253 tx_addr_max = hsotg->hw_params.total_fifo_size; in dwc2_hsotg_tx_fifo_total_depth()
255 addr = hsotg->params.g_rx_fifo_size + np_tx_fifo_size; in dwc2_hsotg_tx_fifo_total_depth()
256 if (tx_addr_max <= addr) in dwc2_hsotg_tx_fifo_total_depth()
259 return tx_addr_max - addr; in dwc2_hsotg_tx_fifo_total_depth()
263 * dwc2_gadget_wkup_alert_handler - Handler for WKUP_ALERT interrupt
277 if (gintsts2 & GINTSTS2_WKUP_ALERT_INT) { in dwc2_gadget_wkup_alert_handler()
278 dev_dbg(hsotg->dev, "%s: Wkup_Alert_Int\n", __func__); in dwc2_gadget_wkup_alert_handler()
285 * dwc2_hsotg_tx_fifo_average_depth - returns average depth of device mode
299 if (!tx_fifo_count) in dwc2_hsotg_tx_fifo_average_depth()
306 * dwc2_hsotg_init_fifo - initialise non-periodic FIFOs
316 u32 *txfsz = hsotg->params.g_tx_fifo_size; in dwc2_hsotg_init_fifo()
318 /* Reset fifo map if not correctly cleared during previous session */ in dwc2_hsotg_init_fifo()
319 WARN_ON(hsotg->fifo_map); in dwc2_hsotg_init_fifo()
320 hsotg->fifo_map = 0; in dwc2_hsotg_init_fifo()
323 dwc2_writel(hsotg, hsotg->params.g_rx_fifo_size, GRXFSIZ); in dwc2_hsotg_init_fifo()
324 dwc2_writel(hsotg, (hsotg->params.g_rx_fifo_size << in dwc2_hsotg_init_fifo()
326 (hsotg->params.g_np_tx_fifo_size << FIFOSIZE_DEPTH_SHIFT), in dwc2_hsotg_init_fifo()
332 * that if the settings have been changed, then they are set to in dwc2_hsotg_init_fifo()
337 addr = hsotg->params.g_rx_fifo_size + hsotg->params.g_np_tx_fifo_size; in dwc2_hsotg_init_fifo()
345 if (!txfsz[ep]) in dwc2_hsotg_init_fifo()
349 WARN_ONCE(addr + txfsz[ep] > hsotg->fifo_mem, in dwc2_hsotg_init_fifo()
357 dwc2_writel(hsotg, hsotg->hw_params.total_fifo_size | in dwc2_hsotg_init_fifo()
373 if ((val & (GRSTCTL_TXFFLSH | GRSTCTL_RXFFLSH)) == 0) in dwc2_hsotg_init_fifo()
376 if (--timeout == 0) { in dwc2_hsotg_init_fifo()
377 dev_err(hsotg->dev, in dwc2_hsotg_init_fifo()
386 dev_dbg(hsotg->dev, "FIFOs reset, timeout at %d\n", timeout); in dwc2_hsotg_init_fifo()
390 * dwc2_hsotg_ep_alloc_request - allocate USB rerequest structure
391 * @ep: USB endpoint to allocate request for.
394 * Allocate a new USB request structure appropriate for the specified endpoint
402 if (!req) in dwc2_hsotg_ep_alloc_request()
405 INIT_LIST_HEAD(&req->queue); in dwc2_hsotg_ep_alloc_request()
407 return &req->req; in dwc2_hsotg_ep_alloc_request()
411 * is_ep_periodic - return true if the endpoint is in periodic mode.
414 * Returns true if the endpoint is in periodic mode, meaning it is being
419 return hs_ep->periodic; in is_ep_periodic()
423 * dwc2_hsotg_unmap_dma - unmap the DMA memory being used for the request
435 struct usb_request *req = &hs_req->req; in dwc2_hsotg_unmap_dma()
437 usb_gadget_unmap_request(&hsotg->gadget, req, hs_ep->map_dir); in dwc2_hsotg_unmap_dma()
441 * dwc2_gadget_alloc_ctrl_desc_chains - allocate DMA descriptor chains
450 hsotg->setup_desc[0] = in dwc2_gadget_alloc_ctrl_desc_chains()
451 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
453 &hsotg->setup_desc_dma[0], in dwc2_gadget_alloc_ctrl_desc_chains()
455 if (!hsotg->setup_desc[0]) in dwc2_gadget_alloc_ctrl_desc_chains()
458 hsotg->setup_desc[1] = in dwc2_gadget_alloc_ctrl_desc_chains()
459 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
461 &hsotg->setup_desc_dma[1], in dwc2_gadget_alloc_ctrl_desc_chains()
463 if (!hsotg->setup_desc[1]) in dwc2_gadget_alloc_ctrl_desc_chains()
466 hsotg->ctrl_in_desc = in dwc2_gadget_alloc_ctrl_desc_chains()
467 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
469 &hsotg->ctrl_in_desc_dma, in dwc2_gadget_alloc_ctrl_desc_chains()
471 if (!hsotg->ctrl_in_desc) in dwc2_gadget_alloc_ctrl_desc_chains()
474 hsotg->ctrl_out_desc = in dwc2_gadget_alloc_ctrl_desc_chains()
475 dmam_alloc_coherent(hsotg->dev, in dwc2_gadget_alloc_ctrl_desc_chains()
477 &hsotg->ctrl_out_desc_dma, in dwc2_gadget_alloc_ctrl_desc_chains()
479 if (!hsotg->ctrl_out_desc) in dwc2_gadget_alloc_ctrl_desc_chains()
485 return -ENOMEM; in dwc2_gadget_alloc_ctrl_desc_chains()
489 * dwc2_hsotg_write_fifo - write packet Data to the TxFIFO
499 * The return value is zero if there is more space (or nothing was done)
500 * otherwise -ENOSPC is returned if the FIFO space was used up.
510 int buf_pos = hs_req->req.actual; in dwc2_hsotg_write_fifo()
511 int to_write = hs_ep->size_loaded; in dwc2_hsotg_write_fifo()
517 to_write -= (buf_pos - hs_ep->last_load); in dwc2_hsotg_write_fifo()
519 /* if there's nothing to write, get out early */ in dwc2_hsotg_write_fifo()
520 if (to_write == 0) in dwc2_hsotg_write_fifo()
523 if (periodic && !hsotg->dedicated_fifos) { in dwc2_hsotg_write_fifo()
524 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index)); in dwc2_hsotg_write_fifo()
536 * if shared fifo, we cannot write anything until the in dwc2_hsotg_write_fifo()
539 if (hs_ep->fifo_load != 0) { in dwc2_hsotg_write_fifo()
541 return -ENOSPC; in dwc2_hsotg_write_fifo()
544 dev_dbg(hsotg->dev, "%s: left=%d, load=%d, fifo=%d, size %d\n", in dwc2_hsotg_write_fifo()
546 hs_ep->size_loaded, hs_ep->fifo_load, hs_ep->fifo_size); in dwc2_hsotg_write_fifo()
549 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_write_fifo()
552 can_write = hs_ep->fifo_load - size_done; in dwc2_hsotg_write_fifo()
553 dev_dbg(hsotg->dev, "%s: => can_write1=%d\n", in dwc2_hsotg_write_fifo()
556 can_write = hs_ep->fifo_size - can_write; in dwc2_hsotg_write_fifo()
557 dev_dbg(hsotg->dev, "%s: => can_write2=%d\n", in dwc2_hsotg_write_fifo()
560 if (can_write <= 0) { in dwc2_hsotg_write_fifo()
562 return -ENOSPC; in dwc2_hsotg_write_fifo()
564 } else if (hsotg->dedicated_fifos && hs_ep->index != 0) { in dwc2_hsotg_write_fifo()
566 DTXFSTS(hs_ep->fifo_index)); in dwc2_hsotg_write_fifo()
571 if (GNPTXSTS_NP_TXQ_SPC_AVAIL_GET(gnptxsts) == 0) { in dwc2_hsotg_write_fifo()
572 dev_dbg(hsotg->dev, in dwc2_hsotg_write_fifo()
577 return -ENOSPC; in dwc2_hsotg_write_fifo()
584 max_transfer = hs_ep->ep.maxpacket * hs_ep->mc; in dwc2_hsotg_write_fifo()
586 dev_dbg(hsotg->dev, "%s: GNPTXSTS=%08x, can=%d, to=%d, max_transfer %d\n", in dwc2_hsotg_write_fifo()
590 * limit to 512 bytes of data, it seems at least on the non-periodic in dwc2_hsotg_write_fifo()
594 if (can_write > 512 && !periodic) in dwc2_hsotg_write_fifo()
598 * limit the write to one max-packet size worth of data, but allow in dwc2_hsotg_write_fifo()
602 if (to_write > max_transfer) { in dwc2_hsotg_write_fifo()
606 if (!hsotg->dedicated_fifos) in dwc2_hsotg_write_fifo()
612 /* see if we can write data */ in dwc2_hsotg_write_fifo()
614 if (to_write > can_write) { in dwc2_hsotg_write_fifo()
622 * Note, we do not currently check to see if we can ever in dwc2_hsotg_write_fifo()
626 if (pkt_round) in dwc2_hsotg_write_fifo()
627 to_write -= pkt_round; in dwc2_hsotg_write_fifo()
635 if (!hsotg->dedicated_fifos) in dwc2_hsotg_write_fifo()
641 dev_dbg(hsotg->dev, "write %d/%d, can_write %d, done %d\n", in dwc2_hsotg_write_fifo()
642 to_write, hs_req->req.length, can_write, buf_pos); in dwc2_hsotg_write_fifo()
644 if (to_write <= 0) in dwc2_hsotg_write_fifo()
645 return -ENOSPC; in dwc2_hsotg_write_fifo()
647 hs_req->req.actual = buf_pos + to_write; in dwc2_hsotg_write_fifo()
648 hs_ep->total_data += to_write; in dwc2_hsotg_write_fifo()
650 if (periodic) in dwc2_hsotg_write_fifo()
651 hs_ep->fifo_load += to_write; in dwc2_hsotg_write_fifo()
654 data = hs_req->req.buf + buf_pos; in dwc2_hsotg_write_fifo()
656 dwc2_writel_rep(hsotg, EPFIFO(hs_ep->index), data, to_write); in dwc2_hsotg_write_fifo()
658 return (to_write >= can_write) ? -ENOSPC : 0; in dwc2_hsotg_write_fifo()
662 * get_ep_limit - get the maximum data legnth for this endpoint
670 int index = hs_ep->index; in get_ep_limit()
674 if (index != 0) { in get_ep_limit()
679 if (hs_ep->dir_in) in get_ep_limit()
686 maxpkt--; in get_ep_limit()
687 maxsize--; in get_ep_limit()
690 * constrain by packet count if maxpkts*pktsize is greater in get_ep_limit()
694 if ((maxpkt * hs_ep->ep.maxpacket) < maxsize) in get_ep_limit()
695 maxsize = maxpkt * hs_ep->ep.maxpacket; in get_ep_limit()
701 * dwc2_hsotg_read_frameno - read current frame number
718 * dwc2_gadget_get_chain_limit - get the maximum data payload value of the
728 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_chain_limit()
729 int is_isoc = hs_ep->isochronous; in dwc2_gadget_get_chain_limit()
731 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_chain_limit()
732 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_chain_limit()
734 if (is_isoc) in dwc2_gadget_get_chain_limit()
735 maxsize = (hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_LIMIT : in dwc2_gadget_get_chain_limit()
742 if (hs_ep->index) in dwc2_gadget_get_chain_limit()
743 if (usb_endpoint_xfer_int(ep_desc) && !dir_in && (mps % 4)) in dwc2_gadget_get_chain_limit()
750 * dwc2_gadget_get_desc_params - get DMA descriptor parameters.
757 * Control out - MPS,
758 * Isochronous - descriptor rx/tx bytes bitfield limit,
759 * Control In/Bulk/Interrupt - multiple of mps. This will allow to not
761 * Interrupt OUT - if mps not multiple of 4 then a single packet corresponds
768 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_desc_params()
769 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_desc_params()
770 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_desc_params()
773 if (!hs_ep->index && !dir_in) { in dwc2_gadget_get_desc_params()
776 } else if (hs_ep->isochronous) { in dwc2_gadget_get_desc_params()
777 if (dir_in) { in dwc2_gadget_get_desc_params()
789 desc_size -= desc_size % mps; in dwc2_gadget_get_desc_params()
793 if (hs_ep->index) in dwc2_gadget_get_desc_params()
794 if (usb_endpoint_xfer_int(ep_desc) && !dir_in && (mps % 4)) { in dwc2_gadget_get_desc_params()
808 int dir_in = hs_ep->dir_in; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
809 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
817 hs_ep->desc_count = (len / maxsize) + in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
819 if (len == 0) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
820 hs_ep->desc_count = 1; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
822 for (i = 0; i < hs_ep->desc_count; ++i) { in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
823 (*desc)->status = 0; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
824 (*desc)->status |= (DEV_DMA_BUFF_STS_HBUSY in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
827 if (len > maxsize) { in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
828 if (!hs_ep->index && !dir_in) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
829 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC); in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
831 (*desc)->status |= in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
833 (*desc)->buf = dma_buff + offset; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
835 len -= maxsize; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
838 if (true_last) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
839 (*desc)->status |= (DEV_DMA_L | DEV_DMA_IOC); in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
841 if (dir_in) in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
842 (*desc)->status |= (len % mps) ? DEV_DMA_SHORT : in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
843 ((hs_ep->send_zlp && true_last) ? in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
846 (*desc)->status |= in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
848 (*desc)->buf = dma_buff + offset; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
851 (*desc)->status &= ~DEV_DMA_BUFF_STS_MASK; in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
852 (*desc)->status |= (DEV_DMA_BUFF_STS_HREADY in dwc2_gadget_fill_nonisoc_xfer_ddma_one()
859 * dwc2_gadget_config_nonisoc_xfer_ddma - prepare non ISOC DMA desc chain.
873 struct dwc2_dma_desc *desc = hs_ep->desc_list; in dwc2_gadget_config_nonisoc_xfer_ddma()
878 if (hs_ep->req) in dwc2_gadget_config_nonisoc_xfer_ddma()
879 ureq = &hs_ep->req->req; in dwc2_gadget_config_nonisoc_xfer_ddma()
881 /* non-DMA sg buffer */ in dwc2_gadget_config_nonisoc_xfer_ddma()
882 if (!ureq || !ureq->num_sgs) { in dwc2_gadget_config_nonisoc_xfer_ddma()
889 for_each_sg(ureq->sg, sg, ureq->num_sgs, i) { in dwc2_gadget_config_nonisoc_xfer_ddma()
891 sg_dma_address(sg) + sg->offset, sg_dma_len(sg), in dwc2_gadget_config_nonisoc_xfer_ddma()
893 desc_count += hs_ep->desc_count; in dwc2_gadget_config_nonisoc_xfer_ddma()
896 hs_ep->desc_count = desc_count; in dwc2_gadget_config_nonisoc_xfer_ddma()
900 * dwc2_gadget_fill_isoc_desc - fills next isochronous descriptor in chain.
902 * @dma_buff: usb requests dma buffer.
903 * @len: usb request transfer length.
905 * Fills next free descriptor with the data of the arrived usb request,
906 * frame info, sets Last and IOC bits increments next_desc. If filled
914 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_fill_isoc_desc()
921 index = hs_ep->next_desc; in dwc2_gadget_fill_isoc_desc()
922 desc = &hs_ep->desc_list[index]; in dwc2_gadget_fill_isoc_desc()
924 /* Check if descriptor chain full */ in dwc2_gadget_fill_isoc_desc()
925 if ((desc->status >> DEV_DMA_BUFF_STS_SHIFT) == in dwc2_gadget_fill_isoc_desc()
927 dev_dbg(hsotg->dev, "%s: desc chain full\n", __func__); in dwc2_gadget_fill_isoc_desc()
931 /* Clear L bit of previous desc if more than one entries in the chain */ in dwc2_gadget_fill_isoc_desc()
932 if (hs_ep->next_desc) in dwc2_gadget_fill_isoc_desc()
933 hs_ep->desc_list[index - 1].status &= ~DEV_DMA_L; in dwc2_gadget_fill_isoc_desc()
935 dev_dbg(hsotg->dev, "%s: Filling ep %d, dir %s isoc desc # %d\n", in dwc2_gadget_fill_isoc_desc()
936 __func__, hs_ep->index, hs_ep->dir_in ? "in" : "out", index); in dwc2_gadget_fill_isoc_desc()
938 desc->status = 0; in dwc2_gadget_fill_isoc_desc()
939 desc->status |= (DEV_DMA_BUFF_STS_HBUSY << DEV_DMA_BUFF_STS_SHIFT); in dwc2_gadget_fill_isoc_desc()
941 desc->buf = dma_buff; in dwc2_gadget_fill_isoc_desc()
942 desc->status |= (DEV_DMA_L | DEV_DMA_IOC | in dwc2_gadget_fill_isoc_desc()
945 if (hs_ep->dir_in) { in dwc2_gadget_fill_isoc_desc()
946 if (len) in dwc2_gadget_fill_isoc_desc()
947 pid = DIV_ROUND_UP(len, hs_ep->ep.maxpacket); in dwc2_gadget_fill_isoc_desc()
950 desc->status |= ((pid << DEV_DMA_ISOC_PID_SHIFT) & in dwc2_gadget_fill_isoc_desc()
952 ((len % hs_ep->ep.maxpacket) ? in dwc2_gadget_fill_isoc_desc()
954 ((hs_ep->target_frame << in dwc2_gadget_fill_isoc_desc()
959 desc->status &= ~DEV_DMA_BUFF_STS_MASK; in dwc2_gadget_fill_isoc_desc()
960 desc->status |= (DEV_DMA_BUFF_STS_HREADY << DEV_DMA_BUFF_STS_SHIFT); in dwc2_gadget_fill_isoc_desc()
963 if (hs_ep->dir_in) in dwc2_gadget_fill_isoc_desc()
967 hs_ep->next_desc++; in dwc2_gadget_fill_isoc_desc()
968 if (hs_ep->next_desc >= MAX_DMA_DESC_NUM_HS_ISOC) in dwc2_gadget_fill_isoc_desc()
969 hs_ep->next_desc = 0; in dwc2_gadget_fill_isoc_desc()
975 * dwc2_gadget_start_isoc_ddma - start isochronous transfer in DDMA
983 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_start_isoc_ddma()
985 int index = hs_ep->index; in dwc2_gadget_start_isoc_ddma()
993 if (list_empty(&hs_ep->queue)) { in dwc2_gadget_start_isoc_ddma()
994 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_gadget_start_isoc_ddma()
995 dev_dbg(hsotg->dev, "%s: No requests in queue\n", __func__); in dwc2_gadget_start_isoc_ddma()
1001 desc = &hs_ep->desc_list[i]; in dwc2_gadget_start_isoc_ddma()
1002 desc->status = 0; in dwc2_gadget_start_isoc_ddma()
1003 desc->status |= (DEV_DMA_BUFF_STS_HBUSY in dwc2_gadget_start_isoc_ddma()
1007 hs_ep->next_desc = 0; in dwc2_gadget_start_isoc_ddma()
1008 list_for_each_entry_safe(hs_req, treq, &hs_ep->queue, queue) { in dwc2_gadget_start_isoc_ddma()
1009 dma_addr_t dma_addr = hs_req->req.dma; in dwc2_gadget_start_isoc_ddma()
1011 if (hs_req->req.num_sgs) { in dwc2_gadget_start_isoc_ddma()
1012 WARN_ON(hs_req->req.num_sgs > 1); in dwc2_gadget_start_isoc_ddma()
1013 dma_addr = sg_dma_address(hs_req->req.sg); in dwc2_gadget_start_isoc_ddma()
1016 hs_req->req.length); in dwc2_gadget_start_isoc_ddma()
1017 if (ret) in dwc2_gadget_start_isoc_ddma()
1021 hs_ep->compl_desc = 0; in dwc2_gadget_start_isoc_ddma()
1022 depctl = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index); in dwc2_gadget_start_isoc_ddma()
1023 dma_reg = hs_ep->dir_in ? DIEPDMA(index) : DOEPDMA(index); in dwc2_gadget_start_isoc_ddma()
1026 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg); in dwc2_gadget_start_isoc_ddma()
1040 * dwc2_hsotg_start_req - start a USB request from an endpoint's queue
1044 * @continuing: True if we are doing more for the current request.
1054 struct usb_request *ureq = &hs_req->req; in dwc2_hsotg_start_req()
1055 int index = hs_ep->index; in dwc2_hsotg_start_req()
1056 int dir_in = hs_ep->dir_in; in dwc2_hsotg_start_req()
1066 if (index != 0) { in dwc2_hsotg_start_req()
1067 if (hs_ep->req && !continuing) { in dwc2_hsotg_start_req()
1068 dev_err(hsotg->dev, "%s: active request\n", __func__); in dwc2_hsotg_start_req()
1071 } else if (hs_ep->req != hs_req && continuing) { in dwc2_hsotg_start_req()
1072 dev_err(hsotg->dev, in dwc2_hsotg_start_req()
1083 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x, ep %d, dir %s\n", in dwc2_hsotg_start_req()
1085 hs_ep->dir_in ? "in" : "out"); in dwc2_hsotg_start_req()
1087 /* If endpoint is stalled, we will restart request later */ in dwc2_hsotg_start_req()
1090 if (index && ctrl & DXEPCTL_STALL) { in dwc2_hsotg_start_req()
1091 dev_warn(hsotg->dev, "%s: ep%d is stalled\n", __func__, index); in dwc2_hsotg_start_req()
1095 length = ureq->length - ureq->actual; in dwc2_hsotg_start_req()
1096 dev_dbg(hsotg->dev, "ureq->length:%d ureq->actual:%d\n", in dwc2_hsotg_start_req()
1097 ureq->length, ureq->actual); in dwc2_hsotg_start_req()
1099 if (!using_desc_dma(hsotg)) in dwc2_hsotg_start_req()
1104 if (length > maxreq) { in dwc2_hsotg_start_req()
1105 int round = maxreq % hs_ep->ep.maxpacket; in dwc2_hsotg_start_req()
1107 dev_dbg(hsotg->dev, "%s: length %d, max-req %d, r %d\n", in dwc2_hsotg_start_req()
1111 if (round) in dwc2_hsotg_start_req()
1112 maxreq -= round; in dwc2_hsotg_start_req()
1117 if (length) in dwc2_hsotg_start_req()
1118 packets = DIV_ROUND_UP(length, hs_ep->ep.maxpacket); in dwc2_hsotg_start_req()
1120 packets = 1; /* send one packet if length is zero. */ in dwc2_hsotg_start_req()
1122 if (dir_in && index != 0) in dwc2_hsotg_start_req()
1123 if (hs_ep->isochronous) in dwc2_hsotg_start_req()
1134 if (dir_in && ureq->zero && !continuing) { in dwc2_hsotg_start_req()
1135 /* Test if zlp is actually required. */ in dwc2_hsotg_start_req()
1136 if ((ureq->length >= hs_ep->ep.maxpacket) && in dwc2_hsotg_start_req()
1137 !(ureq->length % hs_ep->ep.maxpacket)) in dwc2_hsotg_start_req()
1138 hs_ep->send_zlp = 1; in dwc2_hsotg_start_req()
1144 dev_dbg(hsotg->dev, "%s: %d@%d/%d, 0x%08x => 0x%08x\n", in dwc2_hsotg_start_req()
1145 __func__, packets, length, ureq->length, epsize, epsize_reg); in dwc2_hsotg_start_req()
1148 hs_ep->req = hs_req; in dwc2_hsotg_start_req()
1150 if (using_desc_dma(hsotg)) { in dwc2_hsotg_start_req()
1152 u32 mps = hs_ep->ep.maxpacket; in dwc2_hsotg_start_req()
1154 /* Adjust length: EP0 - MPS, other OUT EPs - multiple of MPS */ in dwc2_hsotg_start_req()
1155 if (!dir_in) { in dwc2_hsotg_start_req()
1156 if (!index) in dwc2_hsotg_start_req()
1158 else if (length % mps) in dwc2_hsotg_start_req()
1159 length += (mps - (length % mps)); in dwc2_hsotg_start_req()
1162 if (continuing) in dwc2_hsotg_start_req()
1163 offset = ureq->actual; in dwc2_hsotg_start_req()
1166 dwc2_gadget_config_nonisoc_xfer_ddma(hs_ep, ureq->dma + offset, in dwc2_hsotg_start_req()
1170 dwc2_writel(hsotg, hs_ep->desc_list_dma, dma_reg); in dwc2_hsotg_start_req()
1172 dev_dbg(hsotg->dev, "%s: %08x pad => 0x%08x\n", in dwc2_hsotg_start_req()
1173 __func__, (u32)hs_ep->desc_list_dma, dma_reg); in dwc2_hsotg_start_req()
1178 if (using_dma(hsotg) && !continuing && (length != 0)) { in dwc2_hsotg_start_req()
1184 dwc2_writel(hsotg, ureq->dma, dma_reg); in dwc2_hsotg_start_req()
1186 dev_dbg(hsotg->dev, "%s: %pad => 0x%08x\n", in dwc2_hsotg_start_req()
1187 __func__, &ureq->dma, dma_reg); in dwc2_hsotg_start_req()
1191 if (hs_ep->isochronous) { in dwc2_hsotg_start_req()
1192 if (!dwc2_gadget_target_frame_elapsed(hs_ep)) { in dwc2_hsotg_start_req()
1193 if (hs_ep->interval == 1) { in dwc2_hsotg_start_req()
1194 if (hs_ep->target_frame & 0x1) in dwc2_hsotg_start_req()
1201 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_hsotg_start_req()
1202 hs_req->req.actual = 0; in dwc2_hsotg_start_req()
1203 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, -ENODATA); in dwc2_hsotg_start_req()
1210 dev_dbg(hsotg->dev, "ep0 state:%d\n", hsotg->ep0_state); in dwc2_hsotg_start_req()
1213 if (!(index == 0 && hsotg->ep0_state == DWC2_EP0_SETUP)) in dwc2_hsotg_start_req()
1216 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl); in dwc2_hsotg_start_req()
1224 hs_ep->size_loaded = length; in dwc2_hsotg_start_req()
1225 hs_ep->last_load = ureq->actual; in dwc2_hsotg_start_req()
1227 if (dir_in && !using_dma(hsotg)) { in dwc2_hsotg_start_req()
1228 /* set these anyway, we may need them for non-periodic in */ in dwc2_hsotg_start_req()
1229 hs_ep->fifo_load = 0; in dwc2_hsotg_start_req()
1240 if (!(dwc2_readl(hsotg, epctrl_reg) & DXEPCTL_EPENA)) in dwc2_hsotg_start_req()
1241 dev_dbg(hsotg->dev, in dwc2_hsotg_start_req()
1245 dev_dbg(hsotg->dev, "%s: DXEPCTL=0x%08x\n", in dwc2_hsotg_start_req()
1249 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 1); in dwc2_hsotg_start_req()
1253 * dwc2_hsotg_map_dma - map the DMA memory being used for the request
1259 * is correctly setup for DMA. If we've been passed an extant DMA address
1260 * then ensure the buffer has been synced to memory. If our buffer has no
1270 hs_ep->map_dir = hs_ep->dir_in; in dwc2_hsotg_map_dma()
1271 ret = usb_gadget_map_request(&hsotg->gadget, req, hs_ep->dir_in); in dwc2_hsotg_map_dma()
1272 if (ret) in dwc2_hsotg_map_dma()
1278 dev_err(hsotg->dev, "%s: failed to map buffer %p, %d bytes\n", in dwc2_hsotg_map_dma()
1279 __func__, req->buf, req->length); in dwc2_hsotg_map_dma()
1281 return -EIO; in dwc2_hsotg_map_dma()
1288 void *req_buf = hs_req->req.buf; in dwc2_hsotg_handle_unaligned_buf_start()
1290 /* If dma is not being used or buffer is aligned */ in dwc2_hsotg_handle_unaligned_buf_start()
1291 if (!using_dma(hsotg) || !((long)req_buf & 3)) in dwc2_hsotg_handle_unaligned_buf_start()
1294 WARN_ON(hs_req->saved_req_buf); in dwc2_hsotg_handle_unaligned_buf_start()
1296 dev_dbg(hsotg->dev, "%s: %s: buf=%p length=%d\n", __func__, in dwc2_hsotg_handle_unaligned_buf_start()
1297 hs_ep->ep.name, req_buf, hs_req->req.length); in dwc2_hsotg_handle_unaligned_buf_start()
1299 hs_req->req.buf = kmalloc(hs_req->req.length, GFP_ATOMIC); in dwc2_hsotg_handle_unaligned_buf_start()
1300 if (!hs_req->req.buf) { in dwc2_hsotg_handle_unaligned_buf_start()
1301 hs_req->req.buf = req_buf; in dwc2_hsotg_handle_unaligned_buf_start()
1302 dev_err(hsotg->dev, in dwc2_hsotg_handle_unaligned_buf_start()
1305 return -ENOMEM; in dwc2_hsotg_handle_unaligned_buf_start()
1309 hs_req->saved_req_buf = req_buf; in dwc2_hsotg_handle_unaligned_buf_start()
1311 if (hs_ep->dir_in) in dwc2_hsotg_handle_unaligned_buf_start()
1312 memcpy(hs_req->req.buf, req_buf, hs_req->req.length); in dwc2_hsotg_handle_unaligned_buf_start()
1321 /* If dma is not being used or buffer was aligned */ in dwc2_hsotg_handle_unaligned_buf_complete()
1322 if (!using_dma(hsotg) || !hs_req->saved_req_buf) in dwc2_hsotg_handle_unaligned_buf_complete()
1325 dev_dbg(hsotg->dev, "%s: %s: status=%d actual-length=%d\n", __func__, in dwc2_hsotg_handle_unaligned_buf_complete()
1326 hs_ep->ep.name, hs_req->req.status, hs_req->req.actual); in dwc2_hsotg_handle_unaligned_buf_complete()
1329 if (!hs_ep->dir_in && !hs_req->req.status) in dwc2_hsotg_handle_unaligned_buf_complete()
1330 memcpy(hs_req->saved_req_buf, hs_req->req.buf, in dwc2_hsotg_handle_unaligned_buf_complete()
1331 hs_req->req.actual); in dwc2_hsotg_handle_unaligned_buf_complete()
1334 kfree(hs_req->req.buf); in dwc2_hsotg_handle_unaligned_buf_complete()
1336 hs_req->req.buf = hs_req->saved_req_buf; in dwc2_hsotg_handle_unaligned_buf_complete()
1337 hs_req->saved_req_buf = NULL; in dwc2_hsotg_handle_unaligned_buf_complete()
1341 * dwc2_gadget_target_frame_elapsed - Checks target frame
1344 * Returns 1 if targeted frame elapsed. If returned 1 then we need to drop
1349 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_target_frame_elapsed()
1350 u32 target_frame = hs_ep->target_frame; in dwc2_gadget_target_frame_elapsed()
1351 u32 current_frame = hsotg->frame_number; in dwc2_gadget_target_frame_elapsed()
1352 bool frame_overrun = hs_ep->frame_overrun; in dwc2_gadget_target_frame_elapsed()
1355 if (hsotg->gadget.speed != USB_SPEED_HIGH) in dwc2_gadget_target_frame_elapsed()
1358 if (!frame_overrun && current_frame >= target_frame) in dwc2_gadget_target_frame_elapsed()
1361 if (frame_overrun && current_frame >= target_frame && in dwc2_gadget_target_frame_elapsed()
1362 ((current_frame - target_frame) < limit / 2)) in dwc2_gadget_target_frame_elapsed()
1369 * dwc2_gadget_set_ep0_desc_chain - Set EP's desc chain pointers
1379 switch (hsotg->ep0_state) { in dwc2_gadget_set_ep0_desc_chain()
1382 hs_ep->desc_list = hsotg->setup_desc[0]; in dwc2_gadget_set_ep0_desc_chain()
1383 hs_ep->desc_list_dma = hsotg->setup_desc_dma[0]; in dwc2_gadget_set_ep0_desc_chain()
1387 hs_ep->desc_list = hsotg->ctrl_in_desc; in dwc2_gadget_set_ep0_desc_chain()
1388 hs_ep->desc_list_dma = hsotg->ctrl_in_desc_dma; in dwc2_gadget_set_ep0_desc_chain()
1391 hs_ep->desc_list = hsotg->ctrl_out_desc; in dwc2_gadget_set_ep0_desc_chain()
1392 hs_ep->desc_list_dma = hsotg->ctrl_out_desc_dma; in dwc2_gadget_set_ep0_desc_chain()
1395 dev_err(hsotg->dev, "invalid EP 0 state in queue %d\n", in dwc2_gadget_set_ep0_desc_chain()
1396 hsotg->ep0_state); in dwc2_gadget_set_ep0_desc_chain()
1397 return -EINVAL; in dwc2_gadget_set_ep0_desc_chain()
1408 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_queue() local
1415 dev_dbg(hs->dev, "%s: req %p: %d@%p, noi=%d, zero=%d, snok=%d\n", in dwc2_hsotg_ep_queue()
1416 ep->name, req, req->length, req->buf, req->no_interrupt, in dwc2_hsotg_ep_queue()
1417 req->zero, req->short_not_ok); in dwc2_hsotg_ep_queue()
1420 if (hs->lx_state != DWC2_L0) { in dwc2_hsotg_ep_queue()
1421 dev_dbg(hs->dev, "%s: submit request only in active state\n", in dwc2_hsotg_ep_queue()
1423 return -EAGAIN; in dwc2_hsotg_ep_queue()
1427 INIT_LIST_HEAD(&hs_req->queue); in dwc2_hsotg_ep_queue()
1428 req->actual = 0; in dwc2_hsotg_ep_queue()
1429 req->status = -EINPROGRESS; in dwc2_hsotg_ep_queue()
1431 /* Don't queue ISOC request if length greater than mps*mc */ in dwc2_hsotg_ep_queue()
1432 if (hs_ep->isochronous && in dwc2_hsotg_ep_queue()
1433 req->length > (hs_ep->mc * hs_ep->ep.maxpacket)) { in dwc2_hsotg_ep_queue()
1434 dev_err(hs->dev, "req length > maxpacket*mc\n"); in dwc2_hsotg_ep_queue()
1435 return -EINVAL; in dwc2_hsotg_ep_queue()
1438 /* In DDMA mode for ISOC's don't queue request if length greater in dwc2_hsotg_ep_queue()
1441 if (using_desc_dma(hs) && hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1443 if (hs_ep->dir_in && req->length > maxsize) { in dwc2_hsotg_ep_queue()
1444 dev_err(hs->dev, "wrong length %d (maxsize=%d)\n", in dwc2_hsotg_ep_queue()
1445 req->length, maxsize); in dwc2_hsotg_ep_queue()
1446 return -EINVAL; in dwc2_hsotg_ep_queue()
1449 if (!hs_ep->dir_in && req->length > hs_ep->ep.maxpacket) { in dwc2_hsotg_ep_queue()
1450 dev_err(hs->dev, "ISOC OUT: wrong length %d (mps=%d)\n", in dwc2_hsotg_ep_queue()
1451 req->length, hs_ep->ep.maxpacket); in dwc2_hsotg_ep_queue()
1452 return -EINVAL; in dwc2_hsotg_ep_queue()
1456 ret = dwc2_hsotg_handle_unaligned_buf_start(hs, hs_ep, hs_req); in dwc2_hsotg_ep_queue()
1457 if (ret) in dwc2_hsotg_ep_queue()
1460 /* if we're using DMA, sync the buffers as necessary */ in dwc2_hsotg_ep_queue()
1461 if (using_dma(hs)) { in dwc2_hsotg_ep_queue()
1462 ret = dwc2_hsotg_map_dma(hs, hs_ep, req); in dwc2_hsotg_ep_queue()
1463 if (ret) in dwc2_hsotg_ep_queue()
1466 /* If using descriptor DMA configure EP0 descriptor chain pointers */ in dwc2_hsotg_ep_queue()
1467 if (using_desc_dma(hs) && !hs_ep->index) { in dwc2_hsotg_ep_queue()
1468 ret = dwc2_gadget_set_ep0_desc_chain(hs, hs_ep); in dwc2_hsotg_ep_queue()
1469 if (ret) in dwc2_hsotg_ep_queue()
1473 first = list_empty(&hs_ep->queue); in dwc2_hsotg_ep_queue()
1474 list_add_tail(&hs_req->queue, &hs_ep->queue); in dwc2_hsotg_ep_queue()
1477 * Handle DDMA isochronous transfers separately - just add new entry in dwc2_hsotg_ep_queue()
1482 if (using_desc_dma(hs) && hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1483 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) { in dwc2_hsotg_ep_queue()
1484 dma_addr_t dma_addr = hs_req->req.dma; in dwc2_hsotg_ep_queue()
1486 if (hs_req->req.num_sgs) { in dwc2_hsotg_ep_queue()
1487 WARN_ON(hs_req->req.num_sgs > 1); in dwc2_hsotg_ep_queue()
1488 dma_addr = sg_dma_address(hs_req->req.sg); in dwc2_hsotg_ep_queue()
1491 hs_req->req.length); in dwc2_hsotg_ep_queue()
1496 /* Change EP direction if status phase request is after data out */ in dwc2_hsotg_ep_queue()
1497 if (!hs_ep->index && !req->length && !hs_ep->dir_in && in dwc2_hsotg_ep_queue()
1498 hs->ep0_state == DWC2_EP0_DATA_OUT) in dwc2_hsotg_ep_queue()
1499 hs_ep->dir_in = 1; in dwc2_hsotg_ep_queue()
1501 if (first) { in dwc2_hsotg_ep_queue()
1502 if (!hs_ep->isochronous) { in dwc2_hsotg_ep_queue()
1503 dwc2_hsotg_start_req(hs, hs_ep, hs_req, false); in dwc2_hsotg_ep_queue()
1508 hs->frame_number = dwc2_hsotg_read_frameno(hs); in dwc2_hsotg_ep_queue()
1514 hs->frame_number = dwc2_hsotg_read_frameno(hs); in dwc2_hsotg_ep_queue()
1517 if (hs_ep->target_frame != TARGET_FRAME_INITIAL) in dwc2_hsotg_ep_queue()
1518 dwc2_hsotg_start_req(hs, hs_ep, hs_req, false); in dwc2_hsotg_ep_queue()
1527 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_queue_lock() local
1531 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_queue_lock()
1533 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_queue_lock()
1547 * dwc2_hsotg_complete_oursetup - setup completion callback
1558 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_complete_oursetup()
1560 dev_dbg(hsotg->dev, "%s: ep %p, req %p\n", __func__, ep, req); in dwc2_hsotg_complete_oursetup()
1566 * ep_from_windex - convert control wIndex value to endpoint
1571 * structure, or return NULL if it is not a valid endpoint.
1579 if (windex >= 0x100) in ep_from_windex()
1582 if (idx > hsotg->num_of_eps) in ep_from_windex()
1589 * dwc2_hsotg_set_test_mode - Enable usb Test Modes
1591 * @testmode: requested usb test mode
1592 * Enable usb Test Mode requested by the Host.
1608 return -EINVAL; in dwc2_hsotg_set_test_mode()
1615 * dwc2_hsotg_send_reply - send reply to control request
1632 dev_dbg(hsotg->dev, "%s: buff %p, len %d\n", __func__, buff, length); in dwc2_hsotg_send_reply()
1634 req = dwc2_hsotg_ep_alloc_request(&ep->ep, GFP_ATOMIC); in dwc2_hsotg_send_reply()
1635 hsotg->ep0_reply = req; in dwc2_hsotg_send_reply()
1636 if (!req) { in dwc2_hsotg_send_reply()
1637 dev_warn(hsotg->dev, "%s: cannot alloc req\n", __func__); in dwc2_hsotg_send_reply()
1638 return -ENOMEM; in dwc2_hsotg_send_reply()
1641 req->buf = hsotg->ep0_buff; in dwc2_hsotg_send_reply()
1642 req->length = length; in dwc2_hsotg_send_reply()
1647 req->zero = 0; in dwc2_hsotg_send_reply()
1648 req->complete = dwc2_hsotg_complete_oursetup; in dwc2_hsotg_send_reply()
1650 if (length) in dwc2_hsotg_send_reply()
1651 memcpy(req->buf, buff, length); in dwc2_hsotg_send_reply()
1653 ret = dwc2_hsotg_ep_queue(&ep->ep, req, GFP_ATOMIC); in dwc2_hsotg_send_reply()
1654 if (ret) { in dwc2_hsotg_send_reply()
1655 dev_warn(hsotg->dev, "%s: cannot queue req\n", __func__); in dwc2_hsotg_send_reply()
1663 * dwc2_hsotg_process_req_status - process request GET_STATUS
1665 * @ctrl: USB control request
1670 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_req_status()
1676 dev_dbg(hsotg->dev, "%s: USB_REQ_GET_STATUS\n", __func__); in dwc2_hsotg_process_req_status()
1678 if (!ep0->dir_in) { in dwc2_hsotg_process_req_status()
1679 dev_warn(hsotg->dev, "%s: direction out?\n", __func__); in dwc2_hsotg_process_req_status()
1680 return -EINVAL; in dwc2_hsotg_process_req_status()
1683 switch (ctrl->bRequestType & USB_RECIP_MASK) { in dwc2_hsotg_process_req_status()
1685 status = hsotg->gadget.is_selfpowered << in dwc2_hsotg_process_req_status()
1687 status |= hsotg->remote_wakeup_allowed << in dwc2_hsotg_process_req_status()
1698 ep = ep_from_windex(hsotg, le16_to_cpu(ctrl->wIndex)); in dwc2_hsotg_process_req_status()
1699 if (!ep) in dwc2_hsotg_process_req_status()
1700 return -ENOENT; in dwc2_hsotg_process_req_status()
1702 reply = cpu_to_le16(ep->halted ? 1 : 0); in dwc2_hsotg_process_req_status()
1709 if (le16_to_cpu(ctrl->wLength) != 2) in dwc2_hsotg_process_req_status()
1710 return -EINVAL; in dwc2_hsotg_process_req_status()
1713 if (ret) { in dwc2_hsotg_process_req_status()
1714 dev_err(hsotg->dev, "%s: failed to send reply\n", __func__); in dwc2_hsotg_process_req_status()
1724 * get_ep_head - return the first request on the endpoint
1731 return list_first_entry_or_null(&hs_ep->queue, struct dwc2_hsotg_req, in get_ep_head()
1736 * dwc2_gadget_start_next_request - Starts next request from ep queue
1739 * If queue is empty and EP is ISOC-OUT - unmasks OUTTKNEPDIS which is masked
1745 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_start_next_request()
1746 int dir_in = hs_ep->dir_in; in dwc2_gadget_start_next_request()
1749 if (!list_empty(&hs_ep->queue)) { in dwc2_gadget_start_next_request()
1754 if (!hs_ep->isochronous) in dwc2_gadget_start_next_request()
1757 if (dir_in) { in dwc2_gadget_start_next_request()
1758 dev_dbg(hsotg->dev, "%s: No more ISOC-IN requests\n", in dwc2_gadget_start_next_request()
1761 dev_dbg(hsotg->dev, "%s: No more ISOC-OUT requests\n", in dwc2_gadget_start_next_request()
1767 * dwc2_hsotg_process_req_feature - process request {SET,CLEAR}_FEATURE
1769 * @ctrl: USB control request
1774 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_req_feature()
1776 bool set = (ctrl->bRequest == USB_REQ_SET_FEATURE); in dwc2_hsotg_process_req_feature()
1784 dev_dbg(hsotg->dev, "%s: %s_FEATURE\n", in dwc2_hsotg_process_req_feature()
1787 wValue = le16_to_cpu(ctrl->wValue); in dwc2_hsotg_process_req_feature()
1788 wIndex = le16_to_cpu(ctrl->wIndex); in dwc2_hsotg_process_req_feature()
1789 recip = ctrl->bRequestType & USB_RECIP_MASK; in dwc2_hsotg_process_req_feature()
1795 if (set) in dwc2_hsotg_process_req_feature()
1796 hsotg->remote_wakeup_allowed = 1; in dwc2_hsotg_process_req_feature()
1798 hsotg->remote_wakeup_allowed = 0; in dwc2_hsotg_process_req_feature()
1802 if ((wIndex & 0xff) != 0) in dwc2_hsotg_process_req_feature()
1803 return -EINVAL; in dwc2_hsotg_process_req_feature()
1804 if (!set) in dwc2_hsotg_process_req_feature()
1805 return -EINVAL; in dwc2_hsotg_process_req_feature()
1807 hsotg->test_mode = wIndex >> 8; in dwc2_hsotg_process_req_feature()
1810 return -ENOENT; in dwc2_hsotg_process_req_feature()
1814 if (ret) { in dwc2_hsotg_process_req_feature()
1815 dev_err(hsotg->dev, in dwc2_hsotg_process_req_feature()
1823 if (!ep) { in dwc2_hsotg_process_req_feature()
1824 dev_dbg(hsotg->dev, "%s: no endpoint for 0x%04x\n", in dwc2_hsotg_process_req_feature()
1826 return -ENOENT; in dwc2_hsotg_process_req_feature()
1831 halted = ep->halted; in dwc2_hsotg_process_req_feature()
1833 if (!ep->wedged) in dwc2_hsotg_process_req_feature()
1834 dwc2_hsotg_ep_sethalt(&ep->ep, set, true); in dwc2_hsotg_process_req_feature()
1837 if (ret) { in dwc2_hsotg_process_req_feature()
1838 dev_err(hsotg->dev, in dwc2_hsotg_process_req_feature()
1844 * we have to complete all requests for ep if it was in dwc2_hsotg_process_req_feature()
1848 if (!set && halted) { in dwc2_hsotg_process_req_feature()
1850 * If we have request in progress, in dwc2_hsotg_process_req_feature()
1853 if (ep->req) { in dwc2_hsotg_process_req_feature()
1854 hs_req = ep->req; in dwc2_hsotg_process_req_feature()
1855 ep->req = NULL; in dwc2_hsotg_process_req_feature()
1856 list_del_init(&hs_req->queue); in dwc2_hsotg_process_req_feature()
1857 if (hs_req->req.complete) { in dwc2_hsotg_process_req_feature()
1858 spin_unlock(&hsotg->lock); in dwc2_hsotg_process_req_feature()
1860 &ep->ep, &hs_req->req); in dwc2_hsotg_process_req_feature()
1861 spin_lock(&hsotg->lock); in dwc2_hsotg_process_req_feature()
1865 /* If we have pending request, then start it */ in dwc2_hsotg_process_req_feature()
1866 if (!ep->req) in dwc2_hsotg_process_req_feature()
1873 return -ENOENT; in dwc2_hsotg_process_req_feature()
1877 return -ENOENT; in dwc2_hsotg_process_req_feature()
1885 * dwc2_hsotg_stall_ep0 - stall ep0
1892 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_stall_ep0()
1896 dev_dbg(hsotg->dev, "ep0 stall (dir=%d)\n", ep0->dir_in); in dwc2_hsotg_stall_ep0()
1897 reg = (ep0->dir_in) ? DIEPCTL0 : DOEPCTL0; in dwc2_hsotg_stall_ep0()
1909 dev_dbg(hsotg->dev, in dwc2_hsotg_stall_ep0()
1921 * dwc2_hsotg_process_control - process a control request
1932 struct dwc2_hsotg_ep *ep0 = hsotg->eps_out[0]; in dwc2_hsotg_process_control()
1936 dev_dbg(hsotg->dev, in dwc2_hsotg_process_control()
1938 ctrl->bRequestType, ctrl->bRequest, ctrl->wValue, in dwc2_hsotg_process_control()
1939 ctrl->wIndex, ctrl->wLength); in dwc2_hsotg_process_control()
1941 if (ctrl->wLength == 0) { in dwc2_hsotg_process_control()
1942 ep0->dir_in = 1; in dwc2_hsotg_process_control()
1943 hsotg->ep0_state = DWC2_EP0_STATUS_IN; in dwc2_hsotg_process_control()
1944 } else if (ctrl->bRequestType & USB_DIR_IN) { in dwc2_hsotg_process_control()
1945 ep0->dir_in = 1; in dwc2_hsotg_process_control()
1946 hsotg->ep0_state = DWC2_EP0_DATA_IN; in dwc2_hsotg_process_control()
1948 ep0->dir_in = 0; in dwc2_hsotg_process_control()
1949 hsotg->ep0_state = DWC2_EP0_DATA_OUT; in dwc2_hsotg_process_control()
1952 if ((ctrl->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) { in dwc2_hsotg_process_control()
1953 switch (ctrl->bRequest) { in dwc2_hsotg_process_control()
1955 hsotg->connected = 1; in dwc2_hsotg_process_control()
1958 dcfg |= (le16_to_cpu(ctrl->wValue) << in dwc2_hsotg_process_control()
1962 dev_info(hsotg->dev, "new address %d\n", ctrl->wValue); in dwc2_hsotg_process_control()
1980 if (ret == 0 && hsotg->driver) { in dwc2_hsotg_process_control()
1981 spin_unlock(&hsotg->lock); in dwc2_hsotg_process_control()
1982 ret = hsotg->driver->setup(&hsotg->gadget, ctrl); in dwc2_hsotg_process_control()
1983 spin_lock(&hsotg->lock); in dwc2_hsotg_process_control()
1984 if (ret < 0) in dwc2_hsotg_process_control()
1985 dev_dbg(hsotg->dev, "driver->setup() ret %d\n", ret); in dwc2_hsotg_process_control()
1988 hsotg->delayed_status = false; in dwc2_hsotg_process_control()
1989 if (ret == USB_GADGET_DELAYED_STATUS) in dwc2_hsotg_process_control()
1990 hsotg->delayed_status = true; in dwc2_hsotg_process_control()
1997 if (ret < 0) in dwc2_hsotg_process_control()
2002 * dwc2_hsotg_complete_setup - completion of a setup transfer
2013 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_complete_setup()
2015 if (req->status < 0) { in dwc2_hsotg_complete_setup()
2016 dev_dbg(hsotg->dev, "%s: failed %d\n", __func__, req->status); in dwc2_hsotg_complete_setup()
2020 spin_lock(&hsotg->lock); in dwc2_hsotg_complete_setup()
2021 if (req->actual == 0) in dwc2_hsotg_complete_setup()
2024 dwc2_hsotg_process_control(hsotg, req->buf); in dwc2_hsotg_complete_setup()
2025 spin_unlock(&hsotg->lock); in dwc2_hsotg_complete_setup()
2029 * dwc2_hsotg_enqueue_setup - start a request for EP0 packets
2032 * Enqueue a request on EP0 if necessary to received any SETUP packets
2037 struct usb_request *req = hsotg->ctrl_req; in dwc2_hsotg_enqueue_setup()
2041 dev_dbg(hsotg->dev, "%s: queueing setup request\n", __func__); in dwc2_hsotg_enqueue_setup()
2043 req->zero = 0; in dwc2_hsotg_enqueue_setup()
2044 req->length = 8; in dwc2_hsotg_enqueue_setup()
2045 req->buf = hsotg->ctrl_buff; in dwc2_hsotg_enqueue_setup()
2046 req->complete = dwc2_hsotg_complete_setup; in dwc2_hsotg_enqueue_setup()
2048 if (!list_empty(&hs_req->queue)) { in dwc2_hsotg_enqueue_setup()
2049 dev_dbg(hsotg->dev, "%s already queued???\n", __func__); in dwc2_hsotg_enqueue_setup()
2053 hsotg->eps_out[0]->dir_in = 0; in dwc2_hsotg_enqueue_setup()
2054 hsotg->eps_out[0]->send_zlp = 0; in dwc2_hsotg_enqueue_setup()
2055 hsotg->ep0_state = DWC2_EP0_SETUP; in dwc2_hsotg_enqueue_setup()
2057 ret = dwc2_hsotg_ep_queue(&hsotg->eps_out[0]->ep, req, GFP_ATOMIC); in dwc2_hsotg_enqueue_setup()
2058 if (ret < 0) { in dwc2_hsotg_enqueue_setup()
2059 dev_err(hsotg->dev, "%s: failed queue (%d)\n", __func__, ret); in dwc2_hsotg_enqueue_setup()
2071 u8 index = hs_ep->index; in dwc2_hsotg_program_zlp()
2072 u32 epctl_reg = hs_ep->dir_in ? DIEPCTL(index) : DOEPCTL(index); in dwc2_hsotg_program_zlp()
2073 u32 epsiz_reg = hs_ep->dir_in ? DIEPTSIZ(index) : DOEPTSIZ(index); in dwc2_hsotg_program_zlp()
2075 if (hs_ep->dir_in) in dwc2_hsotg_program_zlp()
2076 dev_dbg(hsotg->dev, "Sending zero-length packet on ep%d\n", in dwc2_hsotg_program_zlp()
2079 dev_dbg(hsotg->dev, "Receiving zero-length packet on ep%d\n", in dwc2_hsotg_program_zlp()
2081 if (using_desc_dma(hsotg)) { in dwc2_hsotg_program_zlp()
2083 dma_addr_t dma = hs_ep->desc_list_dma; in dwc2_hsotg_program_zlp()
2085 if (!index) in dwc2_hsotg_program_zlp()
2103 * dwc2_hsotg_complete_request - complete a request given to us
2110 * if it has one and then look to see if we can start a new request
2120 if (!hs_req) { in dwc2_hsotg_complete_request()
2121 dev_dbg(hsotg->dev, "%s: nothing to complete?\n", __func__); in dwc2_hsotg_complete_request()
2125 dev_dbg(hsotg->dev, "complete: ep %p %s, req %p, %d => %p\n", in dwc2_hsotg_complete_request()
2126 hs_ep, hs_ep->ep.name, hs_req, result, hs_req->req.complete); in dwc2_hsotg_complete_request()
2129 * only replace the status if we've not already set an error in dwc2_hsotg_complete_request()
2133 if (hs_req->req.status == -EINPROGRESS) in dwc2_hsotg_complete_request()
2134 hs_req->req.status = result; in dwc2_hsotg_complete_request()
2136 if (using_dma(hsotg)) in dwc2_hsotg_complete_request()
2141 hs_ep->req = NULL; in dwc2_hsotg_complete_request()
2142 list_del_init(&hs_req->queue); in dwc2_hsotg_complete_request()
2149 if (hs_req->req.complete) { in dwc2_hsotg_complete_request()
2150 spin_unlock(&hsotg->lock); in dwc2_hsotg_complete_request()
2151 usb_gadget_giveback_request(&hs_ep->ep, &hs_req->req); in dwc2_hsotg_complete_request()
2152 spin_lock(&hsotg->lock); in dwc2_hsotg_complete_request()
2156 if (using_desc_dma(hsotg) && hs_ep->isochronous) in dwc2_hsotg_complete_request()
2160 * Look to see if there is anything else to do. Note, the completion in dwc2_hsotg_complete_request()
2165 if (!hs_ep->req && result >= 0) in dwc2_hsotg_complete_request()
2170 * dwc2_gadget_complete_isoc_request_ddma - complete an isoc request in DDMA
2180 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_complete_isoc_request_ddma()
2186 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma()
2193 if (!hs_req) { in dwc2_gadget_complete_isoc_request_ddma()
2194 dev_warn(hsotg->dev, "%s: ISOC EP queue empty\n", __func__); in dwc2_gadget_complete_isoc_request_ddma()
2197 ureq = &hs_req->req; in dwc2_gadget_complete_isoc_request_ddma()
2200 if ((desc_sts & DEV_DMA_STS_MASK) >> DEV_DMA_STS_SHIFT == in dwc2_gadget_complete_isoc_request_ddma()
2202 mask = hs_ep->dir_in ? DEV_DMA_ISOC_TX_NBYTES_MASK : in dwc2_gadget_complete_isoc_request_ddma()
2204 ureq->actual = ureq->length - ((desc_sts & mask) >> in dwc2_gadget_complete_isoc_request_ddma()
2207 /* Adjust actual len for ISOC Out if len is in dwc2_gadget_complete_isoc_request_ddma()
2210 if (!hs_ep->dir_in && ureq->length & 0x3) in dwc2_gadget_complete_isoc_request_ddma()
2211 ureq->actual += 4 - (ureq->length & 0x3); in dwc2_gadget_complete_isoc_request_ddma()
2214 ureq->frame_number = in dwc2_gadget_complete_isoc_request_ddma()
2221 hs_ep->compl_desc++; in dwc2_gadget_complete_isoc_request_ddma()
2222 if (hs_ep->compl_desc > (MAX_DMA_DESC_NUM_HS_ISOC - 1)) in dwc2_gadget_complete_isoc_request_ddma()
2223 hs_ep->compl_desc = 0; in dwc2_gadget_complete_isoc_request_ddma()
2224 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma()
2229 * dwc2_gadget_handle_isoc_bna - handle BNA interrupt for ISOC.
2232 * If EP ISOC OUT then need to flush RX FIFO to remove source of BNA
2239 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_isoc_bna()
2241 if (!hs_ep->dir_in) in dwc2_gadget_handle_isoc_bna()
2245 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_gadget_handle_isoc_bna()
2246 hs_ep->next_desc = 0; in dwc2_gadget_handle_isoc_bna()
2247 hs_ep->compl_desc = 0; in dwc2_gadget_handle_isoc_bna()
2251 * dwc2_hsotg_rx_data - receive data from the FIFO for an endpoint
2262 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[ep_idx]; in dwc2_hsotg_rx_data()
2263 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_rx_data()
2268 if (!hs_req) { in dwc2_hsotg_rx_data()
2272 dev_dbg(hsotg->dev, in dwc2_hsotg_rx_data()
2284 read_ptr = hs_req->req.actual; in dwc2_hsotg_rx_data()
2285 max_req = hs_req->req.length - read_ptr; in dwc2_hsotg_rx_data()
2287 dev_dbg(hsotg->dev, "%s: read %d/%d, done %d/%d\n", in dwc2_hsotg_rx_data()
2288 __func__, to_read, max_req, read_ptr, hs_req->req.length); in dwc2_hsotg_rx_data()
2290 if (to_read > max_req) { in dwc2_hsotg_rx_data()
2300 hs_ep->total_data += to_read; in dwc2_hsotg_rx_data()
2301 hs_req->req.actual += to_read; in dwc2_hsotg_rx_data()
2305 * note, we might over-write the buffer end by 3 bytes depending on in dwc2_hsotg_rx_data()
2309 hs_req->req.buf + read_ptr, to_read); in dwc2_hsotg_rx_data()
2313 * dwc2_hsotg_ep0_zlp - send/receive zero-length packet on control endpoint
2315 * @dir_in: If IN zlp
2317 * Generate a zero-length IN packet request for terminating a SETUP
2327 hsotg->eps_out[0]->dir_in = dir_in; in dwc2_hsotg_ep0_zlp()
2328 hsotg->ep0_state = dir_in ? DWC2_EP0_STATUS_IN : DWC2_EP0_STATUS_OUT; in dwc2_hsotg_ep0_zlp()
2330 dwc2_hsotg_program_zlp(hsotg, hsotg->eps_out[0]); in dwc2_hsotg_ep0_zlp()
2334 * dwc2_gadget_get_xfersize_ddma - get transferred bytes amount from desc
2335 * @hs_ep - The endpoint on which transfer went
2342 const struct usb_endpoint_descriptor *ep_desc = hs_ep->ep.desc; in dwc2_gadget_get_xfersize_ddma()
2343 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_get_xfersize_ddma()
2346 struct dwc2_dma_desc *desc = hs_ep->desc_list; in dwc2_gadget_get_xfersize_ddma()
2349 u32 mps = hs_ep->ep.maxpacket; in dwc2_gadget_get_xfersize_ddma()
2350 int dir_in = hs_ep->dir_in; in dwc2_gadget_get_xfersize_ddma()
2352 if (!desc) in dwc2_gadget_get_xfersize_ddma()
2353 return -EINVAL; in dwc2_gadget_get_xfersize_ddma()
2356 if (hs_ep->index) in dwc2_gadget_get_xfersize_ddma()
2357 if (usb_endpoint_xfer_int(ep_desc) && !dir_in && (mps % 4)) in dwc2_gadget_get_xfersize_ddma()
2358 bytes_rem_correction = 4 - (mps % 4); in dwc2_gadget_get_xfersize_ddma()
2360 for (i = 0; i < hs_ep->desc_count; ++i) { in dwc2_gadget_get_xfersize_ddma()
2361 status = desc->status; in dwc2_gadget_get_xfersize_ddma()
2363 bytes_rem -= bytes_rem_correction; in dwc2_gadget_get_xfersize_ddma()
2365 if (status & DEV_DMA_STS_MASK) in dwc2_gadget_get_xfersize_ddma()
2366 dev_err(hsotg->dev, "descriptor %d closed with %x\n", in dwc2_gadget_get_xfersize_ddma()
2369 if (status & DEV_DMA_L) in dwc2_gadget_get_xfersize_ddma()
2379 * dwc2_hsotg_handle_outdone - handle receiving OutDone/SetupDone from RXFIFO
2390 struct dwc2_hsotg_ep *hs_ep = hsotg->eps_out[epnum]; in dwc2_hsotg_handle_outdone()
2391 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_handle_outdone()
2392 struct usb_request *req = &hs_req->req; in dwc2_hsotg_handle_outdone()
2396 if (!hs_req) { in dwc2_hsotg_handle_outdone()
2397 dev_dbg(hsotg->dev, "%s: no request active\n", __func__); in dwc2_hsotg_handle_outdone()
2401 if (epnum == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_OUT) { in dwc2_hsotg_handle_outdone()
2402 dev_dbg(hsotg->dev, "zlp packet received\n"); in dwc2_hsotg_handle_outdone()
2408 if (using_desc_dma(hsotg)) in dwc2_hsotg_handle_outdone()
2411 if (using_dma(hsotg)) { in dwc2_hsotg_handle_outdone()
2423 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_handle_outdone()
2424 size_done += hs_ep->last_load; in dwc2_hsotg_handle_outdone()
2426 req->actual = size_done; in dwc2_hsotg_handle_outdone()
2429 /* if there is more request to do, schedule new transfer */ in dwc2_hsotg_handle_outdone()
2430 if (req->actual < req->length && size_left == 0) { in dwc2_hsotg_handle_outdone()
2435 if (req->actual < req->length && req->short_not_ok) { in dwc2_hsotg_handle_outdone()
2436 dev_dbg(hsotg->dev, "%s: got %d/%d (short not ok) => error\n", in dwc2_hsotg_handle_outdone()
2437 __func__, req->actual, req->length); in dwc2_hsotg_handle_outdone()
2440 * todo - what should we return here? there's no one else in dwc2_hsotg_handle_outdone()
2446 if (!using_desc_dma(hsotg) && epnum == 0 && in dwc2_hsotg_handle_outdone()
2447 hsotg->ep0_state == DWC2_EP0_DATA_OUT) { in dwc2_hsotg_handle_outdone()
2449 if (!hsotg->delayed_status) in dwc2_hsotg_handle_outdone()
2454 if (!using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_handle_outdone()
2455 req->frame_number = hs_ep->target_frame; in dwc2_hsotg_handle_outdone()
2463 * dwc2_hsotg_handle_rx - RX FIFO has data
2471 * chunks, so if you have x packets received on an endpoint you'll get x
2491 dev_dbg(hsotg->dev, "%s: GRXSTSP=0x%08x (%d@%d)\n", in dwc2_hsotg_handle_rx()
2496 dev_dbg(hsotg->dev, "GLOBALOUTNAK\n"); in dwc2_hsotg_handle_rx()
2500 dev_dbg(hsotg->dev, "OutDone (Frame=0x%08x)\n", in dwc2_hsotg_handle_rx()
2503 if (!using_dma(hsotg)) in dwc2_hsotg_handle_rx()
2508 dev_dbg(hsotg->dev, in dwc2_hsotg_handle_rx()
2513 * Call dwc2_hsotg_handle_outdone here if it was not called from in dwc2_hsotg_handle_rx()
2514 * GRXSTS_PKTSTS_OUTDONE. That is, if the core didn't in dwc2_hsotg_handle_rx()
2517 if (hsotg->ep0_state == DWC2_EP0_SETUP) in dwc2_hsotg_handle_rx()
2526 dev_dbg(hsotg->dev, in dwc2_hsotg_handle_rx()
2531 WARN_ON(hsotg->ep0_state != DWC2_EP0_SETUP); in dwc2_hsotg_handle_rx()
2537 dev_warn(hsotg->dev, "%s: unknown status %08x\n", in dwc2_hsotg_handle_rx()
2546 * dwc2_hsotg_ep0_mps - turn max packet size into register setting
2564 return (u32)-1; in dwc2_hsotg_ep0_mps()
2568 * dwc2_hsotg_set_ep_maxpacket - set endpoint's max-packet field
2573 * @dir_in: True if direction is in.
2586 if (!hs_ep) in dwc2_hsotg_set_ep_maxpacket()
2589 if (ep == 0) { in dwc2_hsotg_set_ep_maxpacket()
2594 if (mps > 3) in dwc2_hsotg_set_ep_maxpacket()
2596 hs_ep->ep.maxpacket = mps_bytes; in dwc2_hsotg_set_ep_maxpacket()
2597 hs_ep->mc = 1; in dwc2_hsotg_set_ep_maxpacket()
2599 if (mps > 1024) in dwc2_hsotg_set_ep_maxpacket()
2601 hs_ep->mc = mc; in dwc2_hsotg_set_ep_maxpacket()
2602 if (mc > 3) in dwc2_hsotg_set_ep_maxpacket()
2604 hs_ep->ep.maxpacket = mps; in dwc2_hsotg_set_ep_maxpacket()
2607 if (dir_in) { in dwc2_hsotg_set_ep_maxpacket()
2622 dev_err(hsotg->dev, "ep%d: bad mps of %d\n", ep, mps); in dwc2_hsotg_set_ep_maxpacket()
2626 * dwc2_hsotg_txfifo_flush - flush Tx FIFO
2636 if (dwc2_hsotg_wait_bit_clear(hsotg, GRSTCTL, GRSTCTL_TXFFLSH, 100)) in dwc2_hsotg_txfifo_flush()
2637 dev_warn(hsotg->dev, "%s: timeout flushing fifo GRSTCTL_TXFFLSH\n", in dwc2_hsotg_txfifo_flush()
2642 * dwc2_hsotg_trytx - check to see if anything needs transmitting
2646 * Check to see if there is a request that has data to send, and if so
2652 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_trytx()
2654 if (!hs_ep->dir_in || !hs_req) { in dwc2_hsotg_trytx()
2656 * if request is not enqueued, we disable interrupts in dwc2_hsotg_trytx()
2659 if (hs_ep->index != 0) in dwc2_hsotg_trytx()
2660 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, in dwc2_hsotg_trytx()
2661 hs_ep->dir_in, 0); in dwc2_hsotg_trytx()
2665 if (hs_req->req.actual < hs_req->req.length) { in dwc2_hsotg_trytx()
2666 dev_dbg(hsotg->dev, "trying to write more for ep%d\n", in dwc2_hsotg_trytx()
2667 hs_ep->index); in dwc2_hsotg_trytx()
2675 * dwc2_hsotg_complete_in - complete IN transfer
2685 struct dwc2_hsotg_req *hs_req = hs_ep->req; in dwc2_hsotg_complete_in()
2686 u32 epsize = dwc2_readl(hsotg, DIEPTSIZ(hs_ep->index)); in dwc2_hsotg_complete_in()
2689 if (!hs_req) { in dwc2_hsotg_complete_in()
2690 dev_dbg(hsotg->dev, "XferCompl but no req\n"); in dwc2_hsotg_complete_in()
2695 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_STATUS_IN) { in dwc2_hsotg_complete_in()
2696 dev_dbg(hsotg->dev, "zlp packet sent\n"); in dwc2_hsotg_complete_in()
2702 hs_ep->dir_in = 0; in dwc2_hsotg_complete_in()
2705 if (hsotg->test_mode) { in dwc2_hsotg_complete_in()
2708 ret = dwc2_hsotg_set_test_mode(hsotg, hsotg->test_mode); in dwc2_hsotg_complete_in()
2709 if (ret < 0) { in dwc2_hsotg_complete_in()
2710 dev_dbg(hsotg->dev, "Invalid Test #%d\n", in dwc2_hsotg_complete_in()
2711 hsotg->test_mode); in dwc2_hsotg_complete_in()
2729 if (using_desc_dma(hsotg)) { in dwc2_hsotg_complete_in()
2731 if (size_left < 0) in dwc2_hsotg_complete_in()
2732 dev_err(hsotg->dev, "error parsing DDMA results %d\n", in dwc2_hsotg_complete_in()
2738 size_done = hs_ep->size_loaded - size_left; in dwc2_hsotg_complete_in()
2739 size_done += hs_ep->last_load; in dwc2_hsotg_complete_in()
2741 if (hs_req->req.actual != size_done) in dwc2_hsotg_complete_in()
2742 dev_dbg(hsotg->dev, "%s: adjusting size done %d => %d\n", in dwc2_hsotg_complete_in()
2743 __func__, hs_req->req.actual, size_done); in dwc2_hsotg_complete_in()
2745 hs_req->req.actual = size_done; in dwc2_hsotg_complete_in()
2746 dev_dbg(hsotg->dev, "req->length:%d req->actual:%d req->zero:%d\n", in dwc2_hsotg_complete_in()
2747 hs_req->req.length, hs_req->req.actual, hs_req->req.zero); in dwc2_hsotg_complete_in()
2749 if (!size_left && hs_req->req.actual < hs_req->req.length) { in dwc2_hsotg_complete_in()
2750 dev_dbg(hsotg->dev, "%s trying more for req...\n", __func__); in dwc2_hsotg_complete_in()
2756 if (hs_ep->send_zlp) { in dwc2_hsotg_complete_in()
2757 hs_ep->send_zlp = 0; in dwc2_hsotg_complete_in()
2758 if (!using_desc_dma(hsotg)) { in dwc2_hsotg_complete_in()
2765 if (hs_ep->index == 0 && hsotg->ep0_state == DWC2_EP0_DATA_IN) { in dwc2_hsotg_complete_in()
2772 if (!using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_complete_in()
2773 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_hsotg_complete_in()
2781 * dwc2_gadget_read_ep_interrupts - reads interrupts for given ep
2784 * @dir_in: Endpoint direction 1-in 0-out.
2809 * dwc2_gadget_handle_ep_disabled - handle DXEPINT_EPDISBLD
2818 * For ISOC-OUT endpoints completes expired requests. If there is remaining
2823 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_ep_disabled()
2825 unsigned char idx = hs_ep->index; in dwc2_gadget_handle_ep_disabled()
2826 int dir_in = hs_ep->dir_in; in dwc2_gadget_handle_ep_disabled()
2830 dev_dbg(hsotg->dev, "%s: EPDisbld\n", __func__); in dwc2_gadget_handle_ep_disabled()
2832 if (dir_in) { in dwc2_gadget_handle_ep_disabled()
2835 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index); in dwc2_gadget_handle_ep_disabled()
2837 if ((epctl & DXEPCTL_STALL) && (epctl & DXEPCTL_EPTYPE_BULK)) { in dwc2_gadget_handle_ep_disabled()
2845 if (dctl & DCTL_GOUTNAKSTS) { in dwc2_gadget_handle_ep_disabled()
2851 if (!hs_ep->isochronous) in dwc2_gadget_handle_ep_disabled()
2854 if (list_empty(&hs_ep->queue)) { in dwc2_gadget_handle_ep_disabled()
2855 dev_dbg(hsotg->dev, "%s: complete_ep 0x%p, ep->queue empty!\n", in dwc2_gadget_handle_ep_disabled()
2862 if (hs_req) { in dwc2_gadget_handle_ep_disabled()
2863 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_gadget_handle_ep_disabled()
2864 hs_req->req.actual = 0; in dwc2_gadget_handle_ep_disabled()
2866 -ENODATA); in dwc2_gadget_handle_ep_disabled()
2870 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_ep_disabled()
2875 * dwc2_gadget_handle_out_token_ep_disabled - handle DXEPINT_OUTTKNEPDIS
2878 * This is starting point for ISOC-OUT transfer, synchronization done with
2882 * HW generates OUTTKNEPDIS - out token is received while EP is disabled. Upon
2887 struct dwc2_hsotg *hsotg = ep->parent; in dwc2_gadget_handle_out_token_ep_disabled()
2889 int dir_in = ep->dir_in; in dwc2_gadget_handle_out_token_ep_disabled()
2891 if (dir_in || !ep->isochronous) in dwc2_gadget_handle_out_token_ep_disabled()
2894 if (using_desc_dma(hsotg)) { in dwc2_gadget_handle_out_token_ep_disabled()
2895 if (ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_out_token_ep_disabled()
2897 ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_out_token_ep_disabled()
2903 if (ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_out_token_ep_disabled()
2906 ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_out_token_ep_disabled()
2907 if (ep->interval > 1) { in dwc2_gadget_handle_out_token_ep_disabled()
2908 ctrl = dwc2_readl(hsotg, DOEPCTL(ep->index)); in dwc2_gadget_handle_out_token_ep_disabled()
2909 if (ep->target_frame & 0x1) in dwc2_gadget_handle_out_token_ep_disabled()
2914 dwc2_writel(hsotg, ctrl, DOEPCTL(ep->index)); in dwc2_gadget_handle_out_token_ep_disabled()
2920 if (hs_req) { in dwc2_gadget_handle_out_token_ep_disabled()
2921 hs_req->req.frame_number = ep->target_frame; in dwc2_gadget_handle_out_token_ep_disabled()
2922 hs_req->req.actual = 0; in dwc2_gadget_handle_out_token_ep_disabled()
2923 dwc2_hsotg_complete_request(hsotg, ep, hs_req, -ENODATA); in dwc2_gadget_handle_out_token_ep_disabled()
2928 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_out_token_ep_disabled()
2931 if (!ep->req) in dwc2_gadget_handle_out_token_ep_disabled()
2940 * dwc2_gadget_handle_nak - handle NAK interrupt
2943 * This is starting point for ISOC-IN transfer, synchronization done with
2948 * and 'NAK'. NAK interrupt for ISOC-IN means that token has arrived and ZLP was
2955 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_gadget_handle_nak()
2957 int dir_in = hs_ep->dir_in; in dwc2_gadget_handle_nak()
2960 if (!dir_in || !hs_ep->isochronous) in dwc2_gadget_handle_nak()
2963 if (hs_ep->target_frame == TARGET_FRAME_INITIAL) { in dwc2_gadget_handle_nak()
2965 if (using_desc_dma(hsotg)) { in dwc2_gadget_handle_nak()
2966 hs_ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_nak()
2972 if (hsotg->params.service_interval) { in dwc2_gadget_handle_nak()
2976 hs_ep->target_frame &= ~hs_ep->interval + 1; in dwc2_gadget_handle_nak()
2989 hs_ep->target_frame = hsotg->frame_number; in dwc2_gadget_handle_nak()
2990 if (hs_ep->interval > 1) { in dwc2_gadget_handle_nak()
2992 DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
2993 if (hs_ep->target_frame & 0x1) in dwc2_gadget_handle_nak()
2998 dwc2_writel(hsotg, ctrl, DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
3002 if (using_desc_dma(hsotg)) in dwc2_gadget_handle_nak()
3005 ctrl = dwc2_readl(hsotg, DIEPCTL(hs_ep->index)); in dwc2_gadget_handle_nak()
3006 if (ctrl & DXEPCTL_EPENA) in dwc2_gadget_handle_nak()
3009 dwc2_hsotg_txfifo_flush(hsotg, hs_ep->fifo_index); in dwc2_gadget_handle_nak()
3013 if (hs_req) { in dwc2_gadget_handle_nak()
3014 hs_req->req.frame_number = hs_ep->target_frame; in dwc2_gadget_handle_nak()
3015 hs_req->req.actual = 0; in dwc2_gadget_handle_nak()
3016 dwc2_hsotg_complete_request(hsotg, hs_ep, hs_req, -ENODATA); in dwc2_gadget_handle_nak()
3021 hsotg->frame_number = dwc2_hsotg_read_frameno(hsotg); in dwc2_gadget_handle_nak()
3024 if (!hs_ep->req) in dwc2_gadget_handle_nak()
3029 * dwc2_hsotg_epint - handle an in/out endpoint interrupt
3032 * @dir_in: Set if this is an IN endpoint
3050 if (!hs_ep) { in dwc2_hsotg_epint()
3051 dev_err(hsotg->dev, "%s:Interrupt for unconfigured ep%d(%s)\n", in dwc2_hsotg_epint()
3056 dev_dbg(hsotg->dev, "%s: ep%d(%s) DxEPINT=0x%08x\n", in dwc2_hsotg_epint()
3059 /* Don't process XferCompl interrupt if it is a setup packet */ in dwc2_hsotg_epint()
3060 if (idx == 0 && (ints & (DXEPINT_SETUP | DXEPINT_SETUP_RCVD))) in dwc2_hsotg_epint()
3064 * Don't process XferCompl interrupt in DDMA if EP0 is still in SETUP in dwc2_hsotg_epint()
3069 if (using_desc_dma(hsotg) && idx == 0 && !hs_ep->dir_in && in dwc2_hsotg_epint()
3070 hsotg->ep0_state == DWC2_EP0_SETUP && !(ints & DXEPINT_SETUP)) in dwc2_hsotg_epint()
3073 if (ints & DXEPINT_XFERCOMPL) { in dwc2_hsotg_epint()
3074 dev_dbg(hsotg->dev, in dwc2_hsotg_epint()
3080 if (using_desc_dma(hsotg) && hs_ep->isochronous) { in dwc2_hsotg_epint()
3082 } else if (dir_in) { in dwc2_hsotg_epint()
3086 * if operating slave mode in dwc2_hsotg_epint()
3088 if (!hs_ep->isochronous || !(ints & DXEPINT_NAKINTRPT)) in dwc2_hsotg_epint()
3091 if (idx == 0 && !hs_ep->req) in dwc2_hsotg_epint()
3093 } else if (using_dma(hsotg)) { in dwc2_hsotg_epint()
3098 if (!hs_ep->isochronous || !(ints & DXEPINT_OUTTKNEPDIS)) in dwc2_hsotg_epint()
3103 if (ints & DXEPINT_EPDISBLD) in dwc2_hsotg_epint()
3106 if (ints & DXEPINT_OUTTKNEPDIS) in dwc2_hsotg_epint()
3109 if (ints & DXEPINT_NAKINTRPT) in dwc2_hsotg_epint()
3112 if (ints & DXEPINT_AHBERR) in dwc2_hsotg_epint()
3113 dev_dbg(hsotg->dev, "%s: AHBErr\n", __func__); in dwc2_hsotg_epint()
3115 if (ints & DXEPINT_SETUP) { /* Setup or Timeout */ in dwc2_hsotg_epint()
3116 dev_dbg(hsotg->dev, "%s: Setup/Timeout\n", __func__); in dwc2_hsotg_epint()
3118 if (using_dma(hsotg) && idx == 0) { in dwc2_hsotg_epint()
3121 * setup packet. In non-DMA mode we'd get this in dwc2_hsotg_epint()
3126 if (dir_in) in dwc2_hsotg_epint()
3133 if (ints & DXEPINT_STSPHSERCVD) { in dwc2_hsotg_epint()
3134 dev_dbg(hsotg->dev, "%s: StsPhseRcvd\n", __func__); in dwc2_hsotg_epint()
3137 if (hsotg->ep0_state == DWC2_EP0_DATA_OUT) { in dwc2_hsotg_epint()
3139 if (using_desc_dma(hsotg)) { in dwc2_hsotg_epint()
3140 if (!hsotg->delayed_status) in dwc2_hsotg_epint()
3157 if (ints & DXEPINT_BACK2BACKSETUP) in dwc2_hsotg_epint()
3158 dev_dbg(hsotg->dev, "%s: B2BSetup/INEPNakEff\n", __func__); in dwc2_hsotg_epint()
3160 if (ints & DXEPINT_BNAINTR) { in dwc2_hsotg_epint()
3161 dev_dbg(hsotg->dev, "%s: BNA interrupt\n", __func__); in dwc2_hsotg_epint()
3162 if (hs_ep->isochronous) in dwc2_hsotg_epint()
3166 if (dir_in && !hs_ep->isochronous) { in dwc2_hsotg_epint()
3167 /* not sure if this is important, but we'll clear it anyway */ in dwc2_hsotg_epint()
3168 if (ints & DXEPINT_INTKNTXFEMP) { in dwc2_hsotg_epint()
3169 dev_dbg(hsotg->dev, "%s: ep%d: INTknTXFEmpMsk\n", in dwc2_hsotg_epint()
3174 if (ints & DXEPINT_INTKNEPMIS) { in dwc2_hsotg_epint()
3175 dev_warn(hsotg->dev, "%s: ep%d: INTknEP\n", in dwc2_hsotg_epint()
3180 if (hsotg->dedicated_fifos && in dwc2_hsotg_epint()
3182 dev_dbg(hsotg->dev, "%s: ep%d: TxFIFOEmpty\n", in dwc2_hsotg_epint()
3184 if (!using_dma(hsotg)) in dwc2_hsotg_epint()
3191 * dwc2_hsotg_irq_enumdone - Handle EnumDone interrupt (enumeration done)
3204 * of the USB handshaking, so we should now know what rate in dwc2_hsotg_irq_enumdone()
3208 dev_dbg(hsotg->dev, "EnumDone (DSTS=0x%08x)\n", dsts); in dwc2_hsotg_irq_enumdone()
3220 hsotg->gadget.speed = USB_SPEED_FULL; in dwc2_hsotg_irq_enumdone()
3226 hsotg->gadget.speed = USB_SPEED_HIGH; in dwc2_hsotg_irq_enumdone()
3232 hsotg->gadget.speed = USB_SPEED_LOW; in dwc2_hsotg_irq_enumdone()
3242 dev_info(hsotg->dev, "new device is %s\n", in dwc2_hsotg_irq_enumdone()
3243 usb_speed_string(hsotg->gadget.speed)); in dwc2_hsotg_irq_enumdone()
3250 if (ep0_mps) { in dwc2_hsotg_irq_enumdone()
3255 for (i = 1; i < hsotg->num_of_eps; i++) { in dwc2_hsotg_irq_enumdone()
3256 if (hsotg->eps_in[i]) in dwc2_hsotg_irq_enumdone()
3259 if (hsotg->eps_out[i]) in dwc2_hsotg_irq_enumdone()
3269 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_irq_enumdone()
3275 * kill_all_requests - remove all requests from the endpoint's queue
3289 ep->req = NULL; in kill_all_requests()
3291 while (!list_empty(&ep->queue)) { in kill_all_requests()
3297 if (!hsotg->dedicated_fifos) in kill_all_requests()
3299 size = (dwc2_readl(hsotg, DTXFSTS(ep->fifo_index)) & 0xffff) * 4; in kill_all_requests()
3300 if (size < ep->fifo_size) in kill_all_requests()
3301 dwc2_hsotg_txfifo_flush(hsotg, ep->fifo_index); in kill_all_requests()
3305 * dwc2_hsotg_disconnect - disconnect service
3316 if (!hsotg->connected) in dwc2_hsotg_disconnect()
3319 hsotg->connected = 0; in dwc2_hsotg_disconnect()
3320 hsotg->test_mode = 0; in dwc2_hsotg_disconnect()
3323 for (ep = 0; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_disconnect()
3324 if (hsotg->eps_in[ep]) in dwc2_hsotg_disconnect()
3325 kill_all_requests(hsotg, hsotg->eps_in[ep], in dwc2_hsotg_disconnect()
3326 -ESHUTDOWN); in dwc2_hsotg_disconnect()
3327 if (hsotg->eps_out[ep]) in dwc2_hsotg_disconnect()
3328 kill_all_requests(hsotg, hsotg->eps_out[ep], in dwc2_hsotg_disconnect()
3329 -ESHUTDOWN); in dwc2_hsotg_disconnect()
3333 hsotg->lx_state = DWC2_L3; in dwc2_hsotg_disconnect()
3335 usb_gadget_set_state(&hsotg->gadget, USB_STATE_NOTATTACHED); in dwc2_hsotg_disconnect()
3339 * dwc2_hsotg_irq_fifoempty - TX FIFO empty interrupt handler
3341 * @periodic: True if this is a periodic FIFO interrupt
3349 for (epno = 0; epno < hsotg->num_of_eps; epno++) { in dwc2_hsotg_irq_fifoempty()
3352 if (!ep) in dwc2_hsotg_irq_fifoempty()
3355 if (!ep->dir_in) in dwc2_hsotg_irq_fifoempty()
3358 if ((periodic && !ep->periodic) || in dwc2_hsotg_irq_fifoempty()
3359 (!periodic && ep->periodic)) in dwc2_hsotg_irq_fifoempty()
3363 if (ret < 0) in dwc2_hsotg_irq_fifoempty()
3375 * dwc2_hsotg_core_init_disconnected - issue softreset to the core
3377 * @is_usb_reset: Usb resetting flag
3391 kill_all_requests(hsotg, hsotg->eps_out[0], -ECONNRESET); in dwc2_hsotg_core_init_disconnected()
3393 if (!is_usb_reset) { in dwc2_hsotg_core_init_disconnected()
3394 if (dwc2_core_reset(hsotg, true)) in dwc2_hsotg_core_init_disconnected()
3398 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_core_init_disconnected()
3399 if (hsotg->eps_in[ep]) in dwc2_hsotg_core_init_disconnected()
3400 dwc2_hsotg_ep_disable(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_core_init_disconnected()
3401 if (hsotg->eps_out[ep]) in dwc2_hsotg_core_init_disconnected()
3402 dwc2_hsotg_ep_disable(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_core_init_disconnected()
3424 if (!is_usb_reset) in dwc2_hsotg_core_init_disconnected()
3429 switch (hsotg->params.speed) { in dwc2_hsotg_core_init_disconnected()
3434 if (hsotg->params.phy_type == DWC2_PHY_TYPE_PARAM_FS) in dwc2_hsotg_core_init_disconnected()
3443 if (hsotg->params.ipg_isoc_en) in dwc2_hsotg_core_init_disconnected()
3460 if (!using_desc_dma(hsotg)) in dwc2_hsotg_core_init_disconnected()
3463 if (!hsotg->params.external_id_pin_ctl) in dwc2_hsotg_core_init_disconnected()
3468 if (using_dma(hsotg)) { in dwc2_hsotg_core_init_disconnected()
3470 hsotg->params.ahbcfg, in dwc2_hsotg_core_init_disconnected()
3473 /* Set DDMA mode support in the core if needed */ in dwc2_hsotg_core_init_disconnected()
3474 if (using_desc_dma(hsotg)) in dwc2_hsotg_core_init_disconnected()
3478 dwc2_writel(hsotg, ((hsotg->dedicated_fifos) ? in dwc2_hsotg_core_init_disconnected()
3485 * If INTknTXFEmpMsk is enabled, it's important to disable ep interrupts in dwc2_hsotg_core_init_disconnected()
3490 dwc2_writel(hsotg, ((hsotg->dedicated_fifos && !using_dma(hsotg)) ? in dwc2_hsotg_core_init_disconnected()
3507 if (using_desc_dma(hsotg)) { in dwc2_hsotg_core_init_disconnected()
3512 /* Enable Service Interval mode if supported */ in dwc2_hsotg_core_init_disconnected()
3513 if (using_desc_dma(hsotg) && hsotg->params.service_interval) in dwc2_hsotg_core_init_disconnected()
3518 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_core_init_disconnected()
3530 if (!using_dma(hsotg)) in dwc2_hsotg_core_init_disconnected()
3537 if (!is_usb_reset) { in dwc2_hsotg_core_init_disconnected()
3543 dev_dbg(hsotg->dev, "DCTL=0x%08x\n", dwc2_readl(hsotg, DCTL)); in dwc2_hsotg_core_init_disconnected()
3554 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) | in dwc2_hsotg_core_init_disconnected()
3560 dwc2_writel(hsotg, dwc2_hsotg_ep0_mps(hsotg->eps_out[0]->ep.maxpacket) | in dwc2_hsotg_core_init_disconnected()
3565 if (!is_usb_reset) in dwc2_hsotg_core_init_disconnected()
3572 /* program GREFCLK register if needed */ in dwc2_hsotg_core_init_disconnected()
3573 if (using_desc_dma(hsotg) && hsotg->params.service_interval) in dwc2_hsotg_core_init_disconnected()
3576 /* must be at-least 3ms to allow bus to see disconnect */ in dwc2_hsotg_core_init_disconnected()
3579 hsotg->lx_state = DWC2_L0; in dwc2_hsotg_core_init_disconnected()
3583 dev_dbg(hsotg->dev, "EP0: DIEPCTL0=0x%08x, DOEPCTL0=0x%08x\n", in dwc2_hsotg_core_init_disconnected()
3590 /* set the soft-disconnect bit */ in dwc2_hsotg_core_disconnect()
3596 /* remove the soft-disconnect and let's go */ in dwc2_hsotg_core_connect()
3597 if (!hsotg->role_sw || (dwc2_readl(hsotg, GOTGCTL) & GOTGCTL_BSESVLD)) in dwc2_hsotg_core_connect()
3602 * dwc2_gadget_handle_incomplete_isoc_in - handle incomplete ISO IN Interrupt.
3607 * - Corrupted IN Token for ISOC EP.
3608 * - Packet not complete in FIFO.
3611 * - Determine the EP
3612 * - Disable EP; when 'Endpoint Disabled' interrupt is received Flush FIFO
3621 dev_dbg(hsotg->dev, "Incomplete isoc in interrupt received:\n"); in dwc2_gadget_handle_incomplete_isoc_in()
3625 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_gadget_handle_incomplete_isoc_in()
3626 hs_ep = hsotg->eps_in[idx]; in dwc2_gadget_handle_incomplete_isoc_in()
3628 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous) in dwc2_gadget_handle_incomplete_isoc_in()
3632 if ((epctrl & DXEPCTL_EPENA) && in dwc2_gadget_handle_incomplete_isoc_in()
3645 * dwc2_gadget_handle_incomplete_isoc_out - handle incomplete ISO OUT Interrupt
3650 * - Corrupted OUT Token for ISOC EP.
3651 * - Packet not complete in FIFO.
3654 * - Determine the EP
3655 * - Set DCTL_SGOUTNAK and unmask GOUTNAKEFF if target frame elapsed.
3666 dev_dbg(hsotg->dev, "%s: GINTSTS_INCOMPL_SOOUT\n", __func__); in dwc2_gadget_handle_incomplete_isoc_out()
3671 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_gadget_handle_incomplete_isoc_out()
3672 hs_ep = hsotg->eps_out[idx]; in dwc2_gadget_handle_incomplete_isoc_out()
3674 if ((BIT(idx) & ~daintmsk) || !hs_ep->isochronous) in dwc2_gadget_handle_incomplete_isoc_out()
3678 if ((epctrl & DXEPCTL_EPENA) && in dwc2_gadget_handle_incomplete_isoc_out()
3686 if (!(gintsts & GINTSTS_GOUTNAKEFF)) { in dwc2_gadget_handle_incomplete_isoc_out()
3698 * dwc2_hsotg_irq - handle device interrupt
3709 if (!dwc2_is_device_mode(hsotg)) in dwc2_hsotg_irq()
3712 spin_lock(&hsotg->lock); in dwc2_hsotg_irq()
3717 dev_dbg(hsotg->dev, "%s: %08x %08x (%08x) retry %d\n", in dwc2_hsotg_irq()
3722 if (gintsts & GINTSTS_RESETDET) { in dwc2_hsotg_irq()
3723 dev_dbg(hsotg->dev, "%s: USBRstDet\n", __func__); in dwc2_hsotg_irq()
3727 /* This event must be used only if controller is suspended */ in dwc2_hsotg_irq()
3728 if (hsotg->in_ppd && hsotg->lx_state == DWC2_L2) in dwc2_hsotg_irq()
3731 hsotg->lx_state = DWC2_L0; in dwc2_hsotg_irq()
3734 if (gintsts & (GINTSTS_USBRST | GINTSTS_RESETDET)) { in dwc2_hsotg_irq()
3736 u32 connected = hsotg->connected; in dwc2_hsotg_irq()
3738 dev_dbg(hsotg->dev, "%s: USBRst\n", __func__); in dwc2_hsotg_irq()
3739 dev_dbg(hsotg->dev, "GNPTXSTS=%08x\n", in dwc2_hsotg_irq()
3744 /* Report disconnection if it is not already done. */ in dwc2_hsotg_irq()
3750 if (usb_status & GOTGCTL_BSESVLD && connected) in dwc2_hsotg_irq()
3754 if (gintsts & GINTSTS_ENUMDONE) { in dwc2_hsotg_irq()
3760 if (gintsts & (GINTSTS_OEPINT | GINTSTS_IEPINT)) { in dwc2_hsotg_irq()
3770 dev_dbg(hsotg->dev, "%s: daint=%08x\n", __func__, daint); in dwc2_hsotg_irq()
3772 for (ep = 0; ep < hsotg->num_of_eps && daint_out; in dwc2_hsotg_irq()
3774 if (daint_out & 1) in dwc2_hsotg_irq()
3778 for (ep = 0; ep < hsotg->num_of_eps && daint_in; in dwc2_hsotg_irq()
3780 if (daint_in & 1) in dwc2_hsotg_irq()
3787 if (gintsts & GINTSTS_NPTXFEMP) { in dwc2_hsotg_irq()
3788 dev_dbg(hsotg->dev, "NPTxFEmp\n"); in dwc2_hsotg_irq()
3793 * it needs re-enabling in dwc2_hsotg_irq()
3800 if (gintsts & GINTSTS_PTXFEMP) { in dwc2_hsotg_irq()
3801 dev_dbg(hsotg->dev, "PTxFEmp\n"); in dwc2_hsotg_irq()
3809 if (gintsts & GINTSTS_RXFLVL) { in dwc2_hsotg_irq()
3811 * note, since GINTSTS_RxFLvl doubles as FIFO-not-empty, in dwc2_hsotg_irq()
3812 * we need to retry dwc2_hsotg_handle_rx if this is still in dwc2_hsotg_irq()
3819 if (gintsts & GINTSTS_ERLYSUSP) { in dwc2_hsotg_irq()
3820 dev_dbg(hsotg->dev, "GINTSTS_ErlySusp\n"); in dwc2_hsotg_irq()
3825 * these next two seem to crop-up occasionally causing the core in dwc2_hsotg_irq()
3826 * to shutdown the USB transfer, so try clearing them and logging in dwc2_hsotg_irq()
3830 if (gintsts & GINTSTS_GOUTNAKEFF) { in dwc2_hsotg_irq()
3844 dev_dbg(hsotg->dev, "GOUTNakEff triggered\n"); in dwc2_hsotg_irq()
3845 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_irq()
3846 hs_ep = hsotg->eps_out[idx]; in dwc2_hsotg_irq()
3848 if (BIT(idx) & ~daintmsk) in dwc2_hsotg_irq()
3854 if ((epctrl & DXEPCTL_EPENA) && hs_ep->isochronous) { in dwc2_hsotg_irq()
3861 //Non-ISOC EP's in dwc2_hsotg_irq()
3862 if (hs_ep->halted) { in dwc2_hsotg_irq()
3863 if (!(epctrl & DXEPCTL_EPENA)) in dwc2_hsotg_irq()
3874 if (gintsts & GINTSTS_GINNAKEFF) { in dwc2_hsotg_irq()
3875 dev_info(hsotg->dev, "GINNakEff triggered\n"); in dwc2_hsotg_irq()
3882 if (gintsts & GINTSTS_INCOMPL_SOIN) in dwc2_hsotg_irq()
3885 if (gintsts & GINTSTS_INCOMPL_SOOUT) in dwc2_hsotg_irq()
3889 * if we've had fifo events, we should try and go around the in dwc2_hsotg_irq()
3890 * loop again to see if there's any point in returning yet. in dwc2_hsotg_irq()
3893 if (gintsts & IRQ_RETRY_MASK && --retry_count > 0) in dwc2_hsotg_irq()
3897 if (hsotg->params.service_interval) in dwc2_hsotg_irq()
3900 spin_unlock(&hsotg->lock); in dwc2_hsotg_irq()
3911 epctrl_reg = hs_ep->dir_in ? DIEPCTL(hs_ep->index) : in dwc2_hsotg_ep_stop_xfr()
3912 DOEPCTL(hs_ep->index); in dwc2_hsotg_ep_stop_xfr()
3913 epint_reg = hs_ep->dir_in ? DIEPINT(hs_ep->index) : in dwc2_hsotg_ep_stop_xfr()
3914 DOEPINT(hs_ep->index); in dwc2_hsotg_ep_stop_xfr()
3916 dev_dbg(hsotg->dev, "%s: stopping transfer on %s\n", __func__, in dwc2_hsotg_ep_stop_xfr()
3917 hs_ep->name); in dwc2_hsotg_ep_stop_xfr()
3919 if (hs_ep->dir_in) { in dwc2_hsotg_ep_stop_xfr()
3920 if (hsotg->dedicated_fifos || hs_ep->periodic) { in dwc2_hsotg_ep_stop_xfr()
3923 if (dwc2_hsotg_wait_bit_set(hsotg, epint_reg, in dwc2_hsotg_ep_stop_xfr()
3925 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3931 if (dwc2_hsotg_wait_bit_set(hsotg, GINTSTS, in dwc2_hsotg_ep_stop_xfr()
3933 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3941 if (!(dwc2_readl(hsotg, GINTSTS) & GINTSTS_GOUTNAKEFF)) in dwc2_hsotg_ep_stop_xfr()
3944 if (!using_dma(hsotg)) { in dwc2_hsotg_ep_stop_xfr()
3946 if (dwc2_hsotg_wait_bit_set(hsotg, GINTSTS, in dwc2_hsotg_ep_stop_xfr()
3948 dev_warn(hsotg->dev, "%s: timeout GINTSTS.RXFLVL\n", in dwc2_hsotg_ep_stop_xfr()
3960 if (dwc2_hsotg_wait_bit_set(hsotg, GINTSTS, in dwc2_hsotg_ep_stop_xfr()
3962 dev_warn(hsotg->dev, "%s: timeout GINTSTS.GOUTNAKEFF\n", in dwc2_hsotg_ep_stop_xfr()
3970 if (dwc2_hsotg_wait_bit_set(hsotg, epint_reg, DXEPINT_EPDISBLD, 100)) in dwc2_hsotg_ep_stop_xfr()
3971 dev_warn(hsotg->dev, in dwc2_hsotg_ep_stop_xfr()
3977 if (hs_ep->dir_in) { in dwc2_hsotg_ep_stop_xfr()
3980 if (hsotg->dedicated_fifos || hs_ep->periodic) in dwc2_hsotg_ep_stop_xfr()
3981 fifo_index = hs_ep->fifo_index; in dwc2_hsotg_ep_stop_xfr()
3989 if (!hsotg->dedicated_fifos && !hs_ep->periodic) in dwc2_hsotg_ep_stop_xfr()
3999 * dwc2_hsotg_ep_enable - enable the given endpoint
4000 * @ep: The USB endpint to configure
4001 * @desc: The USB endpoint descriptor to configure with.
4003 * This is called from the USB gadget code's usb_ep_enable().
4009 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_enable()
4011 unsigned int index = hs_ep->index; in dwc2_hsotg_ep_enable()
4023 dev_dbg(hsotg->dev, in dwc2_hsotg_ep_enable()
4025 __func__, ep->name, desc->bEndpointAddress, desc->bmAttributes, in dwc2_hsotg_ep_enable()
4026 desc->wMaxPacketSize, desc->bInterval); in dwc2_hsotg_ep_enable()
4029 if (index == 0) { in dwc2_hsotg_ep_enable()
4030 dev_err(hsotg->dev, "%s: called for EP 0\n", __func__); in dwc2_hsotg_ep_enable()
4031 return -EINVAL; in dwc2_hsotg_ep_enable()
4034 dir_in = (desc->bEndpointAddress & USB_ENDPOINT_DIR_MASK) ? 1 : 0; in dwc2_hsotg_ep_enable()
4035 if (dir_in != hs_ep->dir_in) { in dwc2_hsotg_ep_enable()
4036 dev_err(hsotg->dev, "%s: direction mismatch!\n", __func__); in dwc2_hsotg_ep_enable()
4037 return -EINVAL; in dwc2_hsotg_ep_enable()
4040 ep_type = desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK; in dwc2_hsotg_ep_enable()
4045 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC && in dwc2_hsotg_ep_enable()
4046 dir_in && desc->bInterval > 10) { in dwc2_hsotg_ep_enable()
4047 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4049 return -EINVAL; in dwc2_hsotg_ep_enable()
4053 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC && in dwc2_hsotg_ep_enable()
4055 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4057 return -EINVAL; in dwc2_hsotg_ep_enable()
4065 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x from 0x%08x\n", in dwc2_hsotg_ep_enable()
4068 if (using_desc_dma(hsotg) && ep_type == USB_ENDPOINT_XFER_ISOC) in dwc2_hsotg_ep_enable()
4073 /* Allocate DMA descriptor chain for non-ctrl endpoints */ in dwc2_hsotg_ep_enable()
4074 if (using_desc_dma(hsotg) && !hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4075 hs_ep->desc_list = dmam_alloc_coherent(hsotg->dev, in dwc2_hsotg_ep_enable()
4077 &hs_ep->desc_list_dma, GFP_ATOMIC); in dwc2_hsotg_ep_enable()
4078 if (!hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4079 ret = -ENOMEM; in dwc2_hsotg_ep_enable()
4084 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_ep_enable()
4096 dwc2_hsotg_set_ep_maxpacket(hsotg, hs_ep->index, mps, mc, dir_in); in dwc2_hsotg_ep_enable()
4098 /* default, set to non-periodic */ in dwc2_hsotg_ep_enable()
4099 hs_ep->isochronous = 0; in dwc2_hsotg_ep_enable()
4100 hs_ep->periodic = 0; in dwc2_hsotg_ep_enable()
4101 hs_ep->halted = 0; in dwc2_hsotg_ep_enable()
4102 hs_ep->wedged = 0; in dwc2_hsotg_ep_enable()
4103 hs_ep->interval = desc->bInterval; in dwc2_hsotg_ep_enable()
4109 hs_ep->isochronous = 1; in dwc2_hsotg_ep_enable()
4110 hs_ep->interval = 1 << (desc->bInterval - 1); in dwc2_hsotg_ep_enable()
4111 hs_ep->target_frame = TARGET_FRAME_INITIAL; in dwc2_hsotg_ep_enable()
4112 hs_ep->next_desc = 0; in dwc2_hsotg_ep_enable()
4113 hs_ep->compl_desc = 0; in dwc2_hsotg_ep_enable()
4114 if (dir_in) { in dwc2_hsotg_ep_enable()
4115 hs_ep->periodic = 1; in dwc2_hsotg_ep_enable()
4132 if (dir_in) in dwc2_hsotg_ep_enable()
4133 hs_ep->periodic = 1; in dwc2_hsotg_ep_enable()
4135 if (hsotg->gadget.speed == USB_SPEED_HIGH) in dwc2_hsotg_ep_enable()
4136 hs_ep->interval = 1 << (desc->bInterval - 1); in dwc2_hsotg_ep_enable()
4147 * if the hardware has dedicated fifos, we must give each IN EP in dwc2_hsotg_ep_enable()
4148 * a unique tx-fifo even if it is non-periodic. in dwc2_hsotg_ep_enable()
4150 if (dir_in && hsotg->dedicated_fifos) { in dwc2_hsotg_ep_enable()
4155 size = hs_ep->ep.maxpacket * hs_ep->mc; in dwc2_hsotg_ep_enable()
4157 if (hsotg->fifo_map & (1 << i)) in dwc2_hsotg_ep_enable()
4161 if (val < size) in dwc2_hsotg_ep_enable()
4164 if (val < fifo_size) { in dwc2_hsotg_ep_enable()
4169 if (!fifo_index) { in dwc2_hsotg_ep_enable()
4170 dev_err(hsotg->dev, in dwc2_hsotg_ep_enable()
4172 ret = -ENOMEM; in dwc2_hsotg_ep_enable()
4176 hsotg->fifo_map |= 1 << fifo_index; in dwc2_hsotg_ep_enable()
4178 hs_ep->fifo_index = fifo_index; in dwc2_hsotg_ep_enable()
4179 hs_ep->fifo_size = fifo_size; in dwc2_hsotg_ep_enable()
4183 if (index && !hs_ep->isochronous) in dwc2_hsotg_ep_enable()
4192 if (hsotg->gadget.speed == USB_SPEED_FULL && in dwc2_hsotg_ep_enable()
4193 hs_ep->isochronous && dir_in) { in dwc2_hsotg_ep_enable()
4200 if ((gsnpsid >= DWC2_CORE_REV_2_72a && in dwc2_hsotg_ep_enable()
4207 dev_dbg(hsotg->dev, "%s: write DxEPCTL=0x%08x\n", in dwc2_hsotg_ep_enable()
4211 dev_dbg(hsotg->dev, "%s: read DxEPCTL=0x%08x\n", in dwc2_hsotg_ep_enable()
4218 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_ep_enable()
4221 if (ret && using_desc_dma(hsotg) && hs_ep->desc_list) { in dwc2_hsotg_ep_enable()
4222 dmam_free_coherent(hsotg->dev, desc_num * in dwc2_hsotg_ep_enable()
4224 hs_ep->desc_list, hs_ep->desc_list_dma); in dwc2_hsotg_ep_enable()
4225 hs_ep->desc_list = NULL; in dwc2_hsotg_ep_enable()
4232 * dwc2_hsotg_ep_disable - disable given endpoint
4238 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_disable()
4239 int dir_in = hs_ep->dir_in; in dwc2_hsotg_ep_disable()
4240 int index = hs_ep->index; in dwc2_hsotg_ep_disable()
4244 dev_dbg(hsotg->dev, "%s(ep %p)\n", __func__, ep); in dwc2_hsotg_ep_disable()
4246 if (ep == &hsotg->eps_out[0]->ep) { in dwc2_hsotg_ep_disable()
4247 dev_err(hsotg->dev, "%s: called for ep0\n", __func__); in dwc2_hsotg_ep_disable()
4248 return -EINVAL; in dwc2_hsotg_ep_disable()
4251 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) { in dwc2_hsotg_ep_disable()
4252 dev_err(hsotg->dev, "%s: called in host mode?\n", __func__); in dwc2_hsotg_ep_disable()
4253 return -EINVAL; in dwc2_hsotg_ep_disable()
4260 if (ctrl & DXEPCTL_EPENA) in dwc2_hsotg_ep_disable()
4267 dev_dbg(hsotg->dev, "%s: DxEPCTL=0x%08x\n", __func__, ctrl); in dwc2_hsotg_ep_disable()
4271 dwc2_hsotg_ctrl_epint(hsotg, hs_ep->index, hs_ep->dir_in, 0); in dwc2_hsotg_ep_disable()
4274 kill_all_requests(hsotg, hs_ep, -ESHUTDOWN); in dwc2_hsotg_ep_disable()
4276 hsotg->fifo_map &= ~(1 << hs_ep->fifo_index); in dwc2_hsotg_ep_disable()
4277 hs_ep->fifo_index = 0; in dwc2_hsotg_ep_disable()
4278 hs_ep->fifo_size = 0; in dwc2_hsotg_ep_disable()
4286 struct dwc2_hsotg *hsotg = hs_ep->parent; in dwc2_hsotg_ep_disable_lock()
4290 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_ep_disable_lock()
4292 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_ep_disable_lock()
4297 * on_list - check request is on the given endpoint
4299 * @test: The request to test if it is on the endpoint.
4305 list_for_each_entry_safe(req, treq, &ep->queue, queue) { in on_list()
4306 if (req == test) in on_list()
4314 * dwc2_hsotg_ep_dequeue - dequeue given endpoint
4322 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_dequeue() local
4325 dev_dbg(hs->dev, "ep_dequeue(%p,%p)\n", ep, req); in dwc2_hsotg_ep_dequeue()
4327 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4329 if (!on_list(hs_ep, hs_req)) { in dwc2_hsotg_ep_dequeue()
4330 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4331 return -EINVAL; in dwc2_hsotg_ep_dequeue()
4335 if (req == &hs_ep->req->req) in dwc2_hsotg_ep_dequeue()
4336 dwc2_hsotg_ep_stop_xfr(hs, hs_ep); in dwc2_hsotg_ep_dequeue()
4338 dwc2_hsotg_complete_request(hs, hs_ep, hs_req, -ECONNRESET); in dwc2_hsotg_ep_dequeue()
4339 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_dequeue()
4345 * dwc2_gadget_ep_set_wedge - set wedge on a given endpoint
4352 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_gadget_ep_set_wedge() local
4357 spin_lock_irqsave(&hs->lock, flags); in dwc2_gadget_ep_set_wedge()
4358 hs_ep->wedged = 1; in dwc2_gadget_ep_set_wedge()
4360 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_gadget_ep_set_wedge()
4366 * dwc2_hsotg_ep_sethalt - set halt on a given endpoint
4369 * @now: If true, stall the endpoint now. Otherwise return -EAGAIN if
4372 * We need to stall the endpoint immediately if request comes from set_feature
4378 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_sethalt() local
4379 int index = hs_ep->index; in dwc2_hsotg_ep_sethalt()
4384 dev_info(hs->dev, "%s(ep %p %s, %d)\n", __func__, ep, ep->name, value); in dwc2_hsotg_ep_sethalt()
4386 if (index == 0) { in dwc2_hsotg_ep_sethalt()
4387 if (value) in dwc2_hsotg_ep_sethalt()
4388 dwc2_hsotg_stall_ep0(hs); in dwc2_hsotg_ep_sethalt()
4390 dev_warn(hs->dev, in dwc2_hsotg_ep_sethalt()
4395 if (hs_ep->isochronous) { in dwc2_hsotg_ep_sethalt()
4396 dev_err(hs->dev, "%s is Isochronous Endpoint\n", ep->name); in dwc2_hsotg_ep_sethalt()
4397 return -EINVAL; in dwc2_hsotg_ep_sethalt()
4400 if (!now && value && !list_empty(&hs_ep->queue)) { in dwc2_hsotg_ep_sethalt()
4401 dev_dbg(hs->dev, "%s request is pending, cannot halt\n", in dwc2_hsotg_ep_sethalt()
4402 ep->name); in dwc2_hsotg_ep_sethalt()
4403 return -EAGAIN; in dwc2_hsotg_ep_sethalt()
4406 if (hs_ep->dir_in) { in dwc2_hsotg_ep_sethalt()
4408 epctl = dwc2_readl(hs, epreg); in dwc2_hsotg_ep_sethalt()
4410 if (value) { in dwc2_hsotg_ep_sethalt()
4412 if (epctl & DXEPCTL_EPENA) in dwc2_hsotg_ep_sethalt()
4416 hs_ep->wedged = 0; in dwc2_hsotg_ep_sethalt()
4418 if (xfertype == DXEPCTL_EPTYPE_BULK || in dwc2_hsotg_ep_sethalt()
4422 dwc2_writel(hs, epctl, epreg); in dwc2_hsotg_ep_sethalt()
4425 epctl = dwc2_readl(hs, epreg); in dwc2_hsotg_ep_sethalt()
4427 if (value) { in dwc2_hsotg_ep_sethalt()
4429 dwc2_hsotg_en_gsint(hs, GINTSTS_GOUTNAKEFF); in dwc2_hsotg_ep_sethalt()
4431 if (!(dwc2_readl(hs, GINTSTS) & GINTSTS_GOUTNAKEFF)) in dwc2_hsotg_ep_sethalt()
4432 dwc2_set_bit(hs, DCTL, DCTL_SGOUTNAK); in dwc2_hsotg_ep_sethalt()
4436 hs_ep->wedged = 0; in dwc2_hsotg_ep_sethalt()
4438 if (xfertype == DXEPCTL_EPTYPE_BULK || in dwc2_hsotg_ep_sethalt()
4441 dwc2_writel(hs, epctl, epreg); in dwc2_hsotg_ep_sethalt()
4445 hs_ep->halted = value; in dwc2_hsotg_ep_sethalt()
4450 * dwc2_hsotg_ep_sethalt_lock - set halt on a given endpoint with lock held
4457 struct dwc2_hsotg *hs = hs_ep->parent; in dwc2_hsotg_ep_sethalt_lock() local
4461 spin_lock_irqsave(&hs->lock, flags); in dwc2_hsotg_ep_sethalt_lock()
4463 spin_unlock_irqrestore(&hs->lock, flags); in dwc2_hsotg_ep_sethalt_lock()
4481 * dwc2_hsotg_init - initialize the usb core
4503 dev_dbg(hsotg->dev, "GRXFSIZ=0x%08x, GNPTXFSIZ=0x%08x\n", in dwc2_hsotg_init()
4509 if (using_dma(hsotg)) in dwc2_hsotg_init()
4514 * dwc2_hsotg_udc_start - prepare the udc for work
4515 * @gadget: The usb gadget state
4516 * @driver: The usb gadget driver
4528 if (!hsotg) { in dwc2_hsotg_udc_start()
4530 return -ENODEV; in dwc2_hsotg_udc_start()
4533 if (!driver) { in dwc2_hsotg_udc_start()
4534 dev_err(hsotg->dev, "%s: no driver\n", __func__); in dwc2_hsotg_udc_start()
4535 return -EINVAL; in dwc2_hsotg_udc_start()
4538 if (driver->max_speed < USB_SPEED_FULL) in dwc2_hsotg_udc_start()
4539 dev_err(hsotg->dev, "%s: bad speed\n", __func__); in dwc2_hsotg_udc_start()
4541 if (!driver->setup) { in dwc2_hsotg_udc_start()
4542 dev_err(hsotg->dev, "%s: missing entry points\n", __func__); in dwc2_hsotg_udc_start()
4543 return -EINVAL; in dwc2_hsotg_udc_start()
4546 WARN_ON(hsotg->driver); in dwc2_hsotg_udc_start()
4548 hsotg->driver = driver; in dwc2_hsotg_udc_start()
4549 hsotg->gadget.dev.of_node = hsotg->dev->of_node; in dwc2_hsotg_udc_start()
4550 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_udc_start()
4552 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) { in dwc2_hsotg_udc_start()
4554 if (ret) in dwc2_hsotg_udc_start()
4558 if (!IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_udc_start()
4559 otg_set_peripheral(hsotg->uphy->otg, &hsotg->gadget); in dwc2_hsotg_udc_start()
4561 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_udc_start()
4562 if (dwc2_hw_is_device(hsotg)) { in dwc2_hsotg_udc_start()
4567 hsotg->enabled = 0; in dwc2_hsotg_udc_start()
4568 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_udc_start()
4570 gadget->sg_supported = using_desc_dma(hsotg); in dwc2_hsotg_udc_start()
4571 dev_info(hsotg->dev, "bound driver %s\n", driver->driver.name); in dwc2_hsotg_udc_start()
4576 hsotg->driver = NULL; in dwc2_hsotg_udc_start()
4581 * dwc2_hsotg_udc_stop - stop the udc
4582 * @gadget: The usb gadget state
4592 if (!hsotg) in dwc2_hsotg_udc_stop()
4593 return -ENODEV; in dwc2_hsotg_udc_stop()
4596 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_udc_stop()
4597 if (hsotg->eps_in[ep]) in dwc2_hsotg_udc_stop()
4598 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_udc_stop()
4599 if (hsotg->eps_out[ep]) in dwc2_hsotg_udc_stop()
4600 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_udc_stop()
4603 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_udc_stop()
4605 hsotg->driver = NULL; in dwc2_hsotg_udc_stop()
4606 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_udc_stop()
4607 hsotg->enabled = 0; in dwc2_hsotg_udc_stop()
4609 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_udc_stop()
4611 if (!IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_udc_stop()
4612 otg_set_peripheral(hsotg->uphy->otg, NULL); in dwc2_hsotg_udc_stop()
4614 if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) in dwc2_hsotg_udc_stop()
4621 * dwc2_hsotg_gadget_getframe - read the frame number
4622 * @gadget: The usb gadget state
4632 * dwc2_hsotg_set_selfpowered - set if device is self/bus powered
4633 * @gadget: The usb gadget state
4634 * @is_selfpowered: Whether the device is self-powered
4636 * Set if the device is self or bus powered.
4644 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_set_selfpowered()
4645 gadget->is_selfpowered = !!is_selfpowered; in dwc2_hsotg_set_selfpowered()
4646 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_set_selfpowered()
4652 * dwc2_hsotg_pullup - connect/disconnect the USB PHY
4653 * @gadget: The usb gadget state
4654 * @is_on: Current state of the USB PHY
4656 * Connect/Disconnect the USB PHY pullup
4663 dev_dbg(hsotg->dev, "%s: is_on: %d op_state: %d\n", __func__, is_on, in dwc2_hsotg_pullup()
4664 hsotg->op_state); in dwc2_hsotg_pullup()
4667 if (hsotg->op_state != OTG_STATE_B_PERIPHERAL) { in dwc2_hsotg_pullup()
4668 hsotg->enabled = is_on; in dwc2_hsotg_pullup()
4672 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_pullup()
4673 if (is_on) { in dwc2_hsotg_pullup()
4674 hsotg->enabled = 1; in dwc2_hsotg_pullup()
4676 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_pullup()
4682 hsotg->enabled = 0; in dwc2_hsotg_pullup()
4685 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_pullup()
4686 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_pullup()
4696 dev_dbg(hsotg->dev, "%s: is_active: %d\n", __func__, is_active); in dwc2_hsotg_vbus_session()
4697 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_vbus_session()
4700 * If controller is in partial power down state, it must exit from in dwc2_hsotg_vbus_session()
4701 * that state before being initialized / de-initialized in dwc2_hsotg_vbus_session()
4703 if (hsotg->lx_state == DWC2_L2 && hsotg->in_ppd) in dwc2_hsotg_vbus_session()
4710 if (is_active) { in dwc2_hsotg_vbus_session()
4711 hsotg->op_state = OTG_STATE_B_PERIPHERAL; in dwc2_hsotg_vbus_session()
4714 if (hsotg->enabled) { in dwc2_hsotg_vbus_session()
4715 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_vbus_session()
4724 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_vbus_session()
4729 * dwc2_hsotg_vbus_draw - report bMaxPower field
4730 * @gadget: The usb gadget state
4739 if (IS_ERR_OR_NULL(hsotg->uphy)) in dwc2_hsotg_vbus_draw()
4740 return -ENOTSUPP; in dwc2_hsotg_vbus_draw()
4741 return usb_phy_set_power(hsotg->uphy, mA); in dwc2_hsotg_vbus_draw()
4749 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_gadget_set_speed()
4752 hsotg->params.speed = DWC2_SPEED_PARAM_HIGH; in dwc2_gadget_set_speed()
4755 hsotg->params.speed = DWC2_SPEED_PARAM_FULL; in dwc2_gadget_set_speed()
4758 hsotg->params.speed = DWC2_SPEED_PARAM_LOW; in dwc2_gadget_set_speed()
4761 dev_err(hsotg->dev, "invalid speed (%d)\n", speed); in dwc2_gadget_set_speed()
4763 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_gadget_set_speed()
4778 * dwc2_hsotg_initep - initialise a single endpoint
4782 * @dir_in: True if direction is in.
4795 if (epnum == 0) in dwc2_hsotg_initep()
4797 else if (dir_in) in dwc2_hsotg_initep()
4802 hs_ep->dir_in = dir_in; in dwc2_hsotg_initep()
4803 hs_ep->index = epnum; in dwc2_hsotg_initep()
4805 snprintf(hs_ep->name, sizeof(hs_ep->name), "ep%d%s", epnum, dir); in dwc2_hsotg_initep()
4807 INIT_LIST_HEAD(&hs_ep->queue); in dwc2_hsotg_initep()
4808 INIT_LIST_HEAD(&hs_ep->ep.ep_list); in dwc2_hsotg_initep()
4811 if (epnum) in dwc2_hsotg_initep()
4812 list_add_tail(&hs_ep->ep.ep_list, &hsotg->gadget.ep_list); in dwc2_hsotg_initep()
4814 hs_ep->parent = hsotg; in dwc2_hsotg_initep()
4815 hs_ep->ep.name = hs_ep->name; in dwc2_hsotg_initep()
4817 if (hsotg->params.speed == DWC2_SPEED_PARAM_LOW) in dwc2_hsotg_initep()
4818 usb_ep_set_maxpacket_limit(&hs_ep->ep, 8); in dwc2_hsotg_initep()
4820 usb_ep_set_maxpacket_limit(&hs_ep->ep, in dwc2_hsotg_initep()
4822 hs_ep->ep.ops = &dwc2_hsotg_ep_ops; in dwc2_hsotg_initep()
4824 if (epnum == 0) { in dwc2_hsotg_initep()
4825 hs_ep->ep.caps.type_control = true; in dwc2_hsotg_initep()
4827 if (hsotg->params.speed != DWC2_SPEED_PARAM_LOW) { in dwc2_hsotg_initep()
4828 hs_ep->ep.caps.type_iso = true; in dwc2_hsotg_initep()
4829 hs_ep->ep.caps.type_bulk = true; in dwc2_hsotg_initep()
4831 hs_ep->ep.caps.type_int = true; in dwc2_hsotg_initep()
4834 if (dir_in) in dwc2_hsotg_initep()
4835 hs_ep->ep.caps.dir_in = true; in dwc2_hsotg_initep()
4837 hs_ep->ep.caps.dir_out = true; in dwc2_hsotg_initep()
4840 * if we're using dma, we need to set the next-endpoint pointer in dwc2_hsotg_initep()
4844 if (using_dma(hsotg)) { in dwc2_hsotg_initep()
4847 if (dir_in) in dwc2_hsotg_initep()
4855 * dwc2_hsotg_hw_cfg - read HW configuration registers
4858 * Read the USB core HW configuration registers
4868 hsotg->num_of_eps = hsotg->hw_params.num_dev_ep; in dwc2_hsotg_hw_cfg()
4871 hsotg->num_of_eps++; in dwc2_hsotg_hw_cfg()
4873 hsotg->eps_in[0] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4876 if (!hsotg->eps_in[0]) in dwc2_hsotg_hw_cfg()
4877 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4879 hsotg->eps_out[0] = hsotg->eps_in[0]; in dwc2_hsotg_hw_cfg()
4881 cfg = hsotg->hw_params.dev_ep_dirs; in dwc2_hsotg_hw_cfg()
4882 for (i = 1, cfg >>= 2; i < hsotg->num_of_eps; i++, cfg >>= 2) { in dwc2_hsotg_hw_cfg()
4885 if (!(ep_type & 2)) { in dwc2_hsotg_hw_cfg()
4886 hsotg->eps_in[i] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4888 if (!hsotg->eps_in[i]) in dwc2_hsotg_hw_cfg()
4889 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4892 if (!(ep_type & 1)) { in dwc2_hsotg_hw_cfg()
4893 hsotg->eps_out[i] = devm_kzalloc(hsotg->dev, in dwc2_hsotg_hw_cfg()
4895 if (!hsotg->eps_out[i]) in dwc2_hsotg_hw_cfg()
4896 return -ENOMEM; in dwc2_hsotg_hw_cfg()
4900 hsotg->fifo_mem = hsotg->hw_params.total_fifo_size; in dwc2_hsotg_hw_cfg()
4901 hsotg->dedicated_fifos = hsotg->hw_params.en_multiple_tx_fifo; in dwc2_hsotg_hw_cfg()
4903 dev_info(hsotg->dev, "EPs: %d, %s fifos, %d entries in SPRAM\n", in dwc2_hsotg_hw_cfg()
4904 hsotg->num_of_eps, in dwc2_hsotg_hw_cfg()
4905 hsotg->dedicated_fifos ? "dedicated" : "shared", in dwc2_hsotg_hw_cfg()
4906 hsotg->fifo_mem); in dwc2_hsotg_hw_cfg()
4911 * dwc2_hsotg_dump - dump state of the udc
4918 struct device *dev = hsotg->dev; in dwc2_hsotg_dump()
4934 for (idx = 1; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_dump()
4941 for (idx = 0; idx < hsotg->num_of_eps; idx++) { in dwc2_hsotg_dump()
4943 "ep%d-in: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", idx, in dwc2_hsotg_dump()
4950 "ep%d-out: EPCTL=0x%08x, SIZ=0x%08x, DMA=0x%08x\n", in dwc2_hsotg_dump()
4962 * dwc2_gadget_init - init function for gadget
4968 struct device *dev = hsotg->dev; in dwc2_gadget_init()
4974 hsotg->params.g_np_tx_fifo_size); in dwc2_gadget_init()
4975 dev_dbg(dev, "RXFIFO size: %d\n", hsotg->params.g_rx_fifo_size); in dwc2_gadget_init()
4977 switch (hsotg->params.speed) { in dwc2_gadget_init()
4979 hsotg->gadget.max_speed = USB_SPEED_LOW; in dwc2_gadget_init()
4982 hsotg->gadget.max_speed = USB_SPEED_FULL; in dwc2_gadget_init()
4985 hsotg->gadget.max_speed = USB_SPEED_HIGH; in dwc2_gadget_init()
4989 hsotg->gadget.ops = &dwc2_hsotg_gadget_ops; in dwc2_gadget_init()
4990 hsotg->gadget.name = dev_name(dev); in dwc2_gadget_init()
4991 hsotg->gadget.otg_caps = &hsotg->params.otg_caps; in dwc2_gadget_init()
4992 hsotg->remote_wakeup_allowed = 0; in dwc2_gadget_init()
4994 if (hsotg->params.lpm) in dwc2_gadget_init()
4995 hsotg->gadget.lpm_capable = true; in dwc2_gadget_init()
4997 if (hsotg->dr_mode == USB_DR_MODE_OTG) in dwc2_gadget_init()
4998 hsotg->gadget.is_otg = 1; in dwc2_gadget_init()
4999 else if (hsotg->dr_mode == USB_DR_MODE_PERIPHERAL) in dwc2_gadget_init()
5000 hsotg->op_state = OTG_STATE_B_PERIPHERAL; in dwc2_gadget_init()
5003 if (ret) { in dwc2_gadget_init()
5004 dev_err(hsotg->dev, "Hardware configuration failed: %d\n", ret); in dwc2_gadget_init()
5008 hsotg->ctrl_buff = devm_kzalloc(hsotg->dev, in dwc2_gadget_init()
5010 if (!hsotg->ctrl_buff) in dwc2_gadget_init()
5011 return -ENOMEM; in dwc2_gadget_init()
5013 hsotg->ep0_buff = devm_kzalloc(hsotg->dev, in dwc2_gadget_init()
5015 if (!hsotg->ep0_buff) in dwc2_gadget_init()
5016 return -ENOMEM; in dwc2_gadget_init()
5018 if (using_desc_dma(hsotg)) { in dwc2_gadget_init()
5020 if (ret < 0) in dwc2_gadget_init()
5024 ret = devm_request_irq(hsotg->dev, hsotg->irq, dwc2_hsotg_irq, in dwc2_gadget_init()
5025 IRQF_SHARED, dev_name(hsotg->dev), hsotg); in dwc2_gadget_init()
5026 if (ret < 0) { in dwc2_gadget_init()
5031 /* hsotg->num_of_eps holds number of EPs other than ep0 */ in dwc2_gadget_init()
5033 if (hsotg->num_of_eps == 0) { in dwc2_gadget_init()
5035 return -EINVAL; in dwc2_gadget_init()
5040 INIT_LIST_HEAD(&hsotg->gadget.ep_list); in dwc2_gadget_init()
5041 hsotg->gadget.ep0 = &hsotg->eps_out[0]->ep; in dwc2_gadget_init()
5045 hsotg->ctrl_req = dwc2_hsotg_ep_alloc_request(&hsotg->eps_out[0]->ep, in dwc2_gadget_init()
5047 if (!hsotg->ctrl_req) { in dwc2_gadget_init()
5049 return -ENOMEM; in dwc2_gadget_init()
5053 for (epnum = 0; epnum < hsotg->num_of_eps; epnum++) { in dwc2_gadget_init()
5054 if (hsotg->eps_in[epnum]) in dwc2_gadget_init()
5055 dwc2_hsotg_initep(hsotg, hsotg->eps_in[epnum], in dwc2_gadget_init()
5057 if (hsotg->eps_out[epnum]) in dwc2_gadget_init()
5058 dwc2_hsotg_initep(hsotg, hsotg->eps_out[epnum], in dwc2_gadget_init()
5068 * dwc2_hsotg_remove - remove function for hsotg driver
5074 usb_del_gadget_udc(&hsotg->gadget); in dwc2_hsotg_remove()
5075 dwc2_hsotg_ep_free_request(&hsotg->eps_out[0]->ep, hsotg->ctrl_req); in dwc2_hsotg_remove()
5084 if (hsotg->lx_state != DWC2_L0) in dwc2_hsotg_suspend()
5087 if (hsotg->driver) { in dwc2_hsotg_suspend()
5090 dev_info(hsotg->dev, "suspending usb gadget %s\n", in dwc2_hsotg_suspend()
5091 hsotg->driver->driver.name); in dwc2_hsotg_suspend()
5093 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_suspend()
5094 if (hsotg->enabled) in dwc2_hsotg_suspend()
5097 hsotg->gadget.speed = USB_SPEED_UNKNOWN; in dwc2_hsotg_suspend()
5098 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_suspend()
5100 for (ep = 1; ep < hsotg->num_of_eps; ep++) { in dwc2_hsotg_suspend()
5101 if (hsotg->eps_in[ep]) in dwc2_hsotg_suspend()
5102 dwc2_hsotg_ep_disable_lock(&hsotg->eps_in[ep]->ep); in dwc2_hsotg_suspend()
5103 if (hsotg->eps_out[ep]) in dwc2_hsotg_suspend()
5104 dwc2_hsotg_ep_disable_lock(&hsotg->eps_out[ep]->ep); in dwc2_hsotg_suspend()
5115 if (hsotg->lx_state == DWC2_L2) in dwc2_hsotg_resume()
5118 if (hsotg->driver) { in dwc2_hsotg_resume()
5119 dev_info(hsotg->dev, "resuming usb gadget %s\n", in dwc2_hsotg_resume()
5120 hsotg->driver->driver.name); in dwc2_hsotg_resume()
5122 spin_lock_irqsave(&hsotg->lock, flags); in dwc2_hsotg_resume()
5124 if (hsotg->enabled) { in dwc2_hsotg_resume()
5125 /* Enable ACG feature in device mode,if supported */ in dwc2_hsotg_resume()
5129 spin_unlock_irqrestore(&hsotg->lock, flags); in dwc2_hsotg_resume()
5136 * dwc2_backup_device_registers() - Backup controller device registers.
5137 * When suspending usb bus, registers needs to be backuped
5138 * if controller power is disabled once suspended.
5147 dev_dbg(hsotg->dev, "%s\n", __func__); in dwc2_backup_device_registers()
5150 dr = &hsotg->dr_backup; in dwc2_backup_device_registers()
5152 dr->dcfg = dwc2_readl(hsotg, DCFG); in dwc2_backup_device_registers()
5153 dr->dctl = dwc2_readl(hsotg, DCTL); in dwc2_backup_device_registers()
5154 dr->daintmsk = dwc2_readl(hsotg, DAINTMSK); in dwc2_backup_device_registers()
5155 dr->diepmsk = dwc2_readl(hsotg, DIEPMSK); in dwc2_backup_device_registers()
5156 dr->doepmsk = dwc2_readl(hsotg, DOEPMSK); in dwc2_backup_device_registers()
5158 for (i = 0; i < hsotg->num_of_eps; i++) { in dwc2_backup_device_registers()
5160 dr->diepctl[i] = dwc2_readl(hsotg, DIEPCTL(i)); in dwc2_backup_device_registers()
5163 if (dr->diepctl[i] & DXEPCTL_DPID) in dwc2_backup_device_registers()
5164 dr->diepctl[i] |= DXEPCTL_SETD1PID; in dwc2_backup_device_registers()
5166 dr->diepctl[i] |= DXEPCTL_SETD0PID; in dwc2_backup_device_registers()
5168 dr->dieptsiz[i] = dwc2_readl(hsotg, DIEPTSIZ(i)); in dwc2_backup_device_registers()
5169 dr->diepdma[i] = dwc2_readl(hsotg, DIEPDMA(i)); in dwc2_backup_device_registers()
5172 dr->doepctl[i] = dwc2_readl(hsotg, DOEPCTL(i)); in dwc2_backup_device_registers()
5175 if (dr->doepctl[i] & DXEPCTL_DPID) in dwc2_backup_device_registers()
5176 dr->doepctl[i] |= DXEPCTL_SETD1PID; in dwc2_backup_device_registers()
5178 dr->doepctl[i] |= DXEPCTL_SETD0PID; in dwc2_backup_device_registers()
5180 dr->doeptsiz[i] = dwc2_readl(hsotg, DOEPTSIZ(i)); in dwc2_backup_device_registers()
5181 dr->doepdma[i] = dwc2_readl(hsotg, DOEPDMA(i)); in dwc2_backup_device_registers()
5182 dr->dtxfsiz[i] = dwc2_readl(hsotg, DPTXFSIZN(i)); in dwc2_backup_device_registers()
5184 dr->valid = true; in dwc2_backup_device_registers()
5189 * dwc2_restore_device_registers() - Restore controller device registers.
5190 * When resuming usb bus, device registers needs to be restored
5191 * if controller power were disabled.
5196 * Return: 0 if successful, negative error code otherwise
5203 dev_dbg(hsotg->dev, "%s\n", __func__); in dwc2_restore_device_registers()
5206 dr = &hsotg->dr_backup; in dwc2_restore_device_registers()
5207 if (!dr->valid) { in dwc2_restore_device_registers()
5208 dev_err(hsotg->dev, "%s: no device registers to restore\n", in dwc2_restore_device_registers()
5210 return -EINVAL; in dwc2_restore_device_registers()
5212 dr->valid = false; in dwc2_restore_device_registers()
5214 if (!remote_wakeup) in dwc2_restore_device_registers()
5215 dwc2_writel(hsotg, dr->dctl, DCTL); in dwc2_restore_device_registers()
5217 dwc2_writel(hsotg, dr->daintmsk, DAINTMSK); in dwc2_restore_device_registers()
5218 dwc2_writel(hsotg, dr->diepmsk, DIEPMSK); in dwc2_restore_device_registers()
5219 dwc2_writel(hsotg, dr->doepmsk, DOEPMSK); in dwc2_restore_device_registers()
5221 for (i = 0; i < hsotg->num_of_eps; i++) { in dwc2_restore_device_registers()
5223 dwc2_writel(hsotg, dr->dieptsiz[i], DIEPTSIZ(i)); in dwc2_restore_device_registers()
5224 dwc2_writel(hsotg, dr->diepdma[i], DIEPDMA(i)); in dwc2_restore_device_registers()
5225 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i)); in dwc2_restore_device_registers()
5231 if (using_desc_dma(hsotg) && in dwc2_restore_device_registers()
5232 (dr->diepctl[i] & DXEPCTL_EPENA)) in dwc2_restore_device_registers()
5233 dr->diepdma[i] = hsotg->eps_in[i]->desc_list_dma; in dwc2_restore_device_registers()
5234 dwc2_writel(hsotg, dr->dtxfsiz[i], DPTXFSIZN(i)); in dwc2_restore_device_registers()
5235 dwc2_writel(hsotg, dr->diepctl[i], DIEPCTL(i)); in dwc2_restore_device_registers()
5237 dwc2_writel(hsotg, dr->doeptsiz[i], DOEPTSIZ(i)); in dwc2_restore_device_registers()
5243 if (using_desc_dma(hsotg) && in dwc2_restore_device_registers()
5244 (dr->doepctl[i] & DXEPCTL_EPENA)) in dwc2_restore_device_registers()
5245 dr->doepdma[i] = hsotg->eps_out[i]->desc_list_dma; in dwc2_restore_device_registers()
5246 dwc2_writel(hsotg, dr->doepdma[i], DOEPDMA(i)); in dwc2_restore_device_registers()
5247 dwc2_writel(hsotg, dr->doepctl[i], DOEPCTL(i)); in dwc2_restore_device_registers()
5254 * dwc2_gadget_init_lpm - Configure the core to support LPM in device mode
5263 if (!hsotg->params.lpm) in dwc2_gadget_init_lpm()
5267 val |= hsotg->params.hird_threshold_en ? GLPMCFG_HIRD_THRES_EN : 0; in dwc2_gadget_init_lpm()
5268 val |= hsotg->params.lpm_clock_gating ? GLPMCFG_ENBLSLPM : 0; in dwc2_gadget_init_lpm()
5269 val |= hsotg->params.hird_threshold << GLPMCFG_HIRD_THRES_SHIFT; in dwc2_gadget_init_lpm()
5270 val |= hsotg->params.besl ? GLPMCFG_ENBESL : 0; in dwc2_gadget_init_lpm()
5274 dev_dbg(hsotg->dev, "GLPMCFG=0x%08x\n", dwc2_readl(hsotg, GLPMCFG)); in dwc2_gadget_init_lpm()
5277 if (hsotg->params.service_interval) in dwc2_gadget_init_lpm()
5282 * dwc2_gadget_program_ref_clk - Program GREFCLK register in device mode
5292 val |= hsotg->params.ref_clk_per << GREFCLK_REFCLKPER_SHIFT; in dwc2_gadget_program_ref_clk()
5293 val |= hsotg->params.sof_cnt_wkup_alert << in dwc2_gadget_program_ref_clk()
5297 dev_dbg(hsotg->dev, "GREFCLK=0x%08x\n", dwc2_readl(hsotg, GREFCLK)); in dwc2_gadget_program_ref_clk()
5301 * dwc2_gadget_enter_hibernation() - Put controller in Hibernation.
5305 * Return non-zero if failed to enter to hibernation.
5313 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_hibernation()
5314 dev_dbg(hsotg->dev, "Start of hibernation completed\n"); in dwc2_gadget_enter_hibernation()
5316 if (ret) { in dwc2_gadget_enter_hibernation()
5317 dev_err(hsotg->dev, "%s: failed to backup global registers\n", in dwc2_gadget_enter_hibernation()
5322 if (ret) { in dwc2_gadget_enter_hibernation()
5323 dev_err(hsotg->dev, "%s: failed to backup device registers\n", in dwc2_gadget_enter_hibernation()
5334 hsotg->hibernated = 1; in dwc2_gadget_enter_hibernation()
5362 /* Save gpwrdn register for further usage if stschng interrupt */ in dwc2_gadget_enter_hibernation()
5363 hsotg->gr_backup.gpwrdn = dwc2_readl(hsotg, GPWRDN); in dwc2_gadget_enter_hibernation()
5364 dev_dbg(hsotg->dev, "Hibernation completed\n"); in dwc2_gadget_enter_hibernation()
5372 * resume/reset and device initiated remote-wakeup.
5378 * Return non-zero if failed to exit from hibernation.
5390 gr = &hsotg->gr_backup; in dwc2_gadget_exit_hibernation()
5391 dr = &hsotg->dr_backup; in dwc2_gadget_exit_hibernation()
5393 if (!hsotg->hibernated) { in dwc2_gadget_exit_hibernation()
5394 dev_dbg(hsotg->dev, "Already exited from Hibernation\n"); in dwc2_gadget_exit_hibernation()
5397 dev_dbg(hsotg->dev, in dwc2_gadget_exit_hibernation()
5403 if (!reset) { in dwc2_gadget_exit_hibernation()
5408 /* De-assert Restore */ in dwc2_gadget_exit_hibernation()
5414 if (!rem_wakeup) { in dwc2_gadget_exit_hibernation()
5421 dwc2_writel(hsotg, gr->gusbcfg, GUSBCFG); in dwc2_gadget_exit_hibernation()
5422 dwc2_writel(hsotg, dr->dcfg, DCFG); in dwc2_gadget_exit_hibernation()
5423 dwc2_writel(hsotg, dr->dctl, DCTL); in dwc2_gadget_exit_hibernation()
5425 /* On USB Reset, reset device address to zero */ in dwc2_gadget_exit_hibernation()
5426 if (reset) in dwc2_gadget_exit_hibernation()
5429 /* De-assert Wakeup Logic */ in dwc2_gadget_exit_hibernation()
5434 if (rem_wakeup) { in dwc2_gadget_exit_hibernation()
5437 dwc2_writel(hsotg, dr->dctl | DCTL_RMTWKUPSIG, DCTL); in dwc2_gadget_exit_hibernation()
5452 if (ret) { in dwc2_gadget_exit_hibernation()
5453 dev_err(hsotg->dev, "%s: failed to restore registers\n", in dwc2_gadget_exit_hibernation()
5460 if (ret) { in dwc2_gadget_exit_hibernation()
5461 dev_err(hsotg->dev, "%s: failed to restore device registers\n", in dwc2_gadget_exit_hibernation()
5466 if (rem_wakeup) { in dwc2_gadget_exit_hibernation()
5473 hsotg->hibernated = 0; in dwc2_gadget_exit_hibernation()
5474 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_hibernation()
5475 dev_dbg(hsotg->dev, "Hibernation recovery completes here\n"); in dwc2_gadget_exit_hibernation()
5481 * dwc2_gadget_enter_partial_power_down() - Put controller in partial
5486 * Return: non-zero if failed to enter device partial power down.
5495 dev_dbg(hsotg->dev, "Entering device partial power down started.\n"); in dwc2_gadget_enter_partial_power_down()
5499 if (ret) { in dwc2_gadget_enter_partial_power_down()
5500 dev_err(hsotg->dev, "%s: failed to backup global registers\n", in dwc2_gadget_enter_partial_power_down()
5506 if (ret) { in dwc2_gadget_enter_partial_power_down()
5507 dev_err(hsotg->dev, "%s: failed to backup device registers\n", in dwc2_gadget_enter_partial_power_down()
5533 hsotg->in_ppd = 1; in dwc2_gadget_enter_partial_power_down()
5534 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_partial_power_down()
5536 dev_dbg(hsotg->dev, "Entering device partial power down completed.\n"); in dwc2_gadget_enter_partial_power_down()
5542 * dwc2_gadget_exit_partial_power_down() - Exit controller from device partial
5548 * Return: non-zero if failed to exit device partial power down.
5560 dr = &hsotg->dr_backup; in dwc2_gadget_exit_partial_power_down()
5562 dev_dbg(hsotg->dev, "Exiting device partial Power Down started.\n"); in dwc2_gadget_exit_partial_power_down()
5577 if (restore) { in dwc2_gadget_exit_partial_power_down()
5579 if (ret) { in dwc2_gadget_exit_partial_power_down()
5580 dev_err(hsotg->dev, "%s: failed to restore registers\n", in dwc2_gadget_exit_partial_power_down()
5585 dwc2_writel(hsotg, dr->dcfg, DCFG); in dwc2_gadget_exit_partial_power_down()
5588 if (ret) { in dwc2_gadget_exit_partial_power_down()
5589 dev_err(hsotg->dev, "%s: failed to restore device registers\n", in dwc2_gadget_exit_partial_power_down()
5595 /* Set the Power-On Programming done bit */ in dwc2_gadget_exit_partial_power_down()
5601 hsotg->in_ppd = 0; in dwc2_gadget_exit_partial_power_down()
5602 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_partial_power_down()
5604 dev_dbg(hsotg->dev, "Exiting device partial Power Down completed.\n"); in dwc2_gadget_exit_partial_power_down()
5609 * dwc2_gadget_enter_clock_gating() - Put controller in clock gating.
5613 * Return: non-zero if failed to enter device partial power down.
5621 dev_dbg(hsotg->dev, "Entering device clock gating.\n"); in dwc2_gadget_enter_clock_gating()
5635 hsotg->lx_state = DWC2_L2; in dwc2_gadget_enter_clock_gating()
5636 hsotg->bus_suspended = true; in dwc2_gadget_enter_clock_gating()
5640 * dwc2_gadget_exit_clock_gating() - Exit controller from device clock gating.
5652 dev_dbg(hsotg->dev, "Exiting device clock gating.\n"); in dwc2_gadget_exit_clock_gating()
5666 if (rem_wakeup) { in dwc2_gadget_exit_clock_gating()
5675 hsotg->lx_state = DWC2_L0; in dwc2_gadget_exit_clock_gating()
5676 hsotg->bus_suspended = false; in dwc2_gadget_exit_clock_gating()