Lines Matching refs:preq
340 struct cdnsp_request *preq) in cdnsp_request_to_transfer_ring() argument
342 return cdnsp_get_transfer_ring(pdev, preq->pep, in cdnsp_request_to_transfer_ring()
343 preq->request.stream_id); in cdnsp_request_to_transfer_ring()
602 struct cdnsp_request *preq; in cdnsp_unmap_td_bounce_buffer() local
608 preq = td->preq; in cdnsp_unmap_td_bounce_buffer()
610 trace_cdnsp_bounce_unmap(td->preq, seg->bounce_len, seg->bounce_offs, in cdnsp_unmap_td_bounce_buffer()
613 if (!preq->direction) { in cdnsp_unmap_td_bounce_buffer()
623 len = sg_pcopy_from_buffer(preq->request.sg, preq->request.num_sgs, in cdnsp_unmap_td_bounce_buffer()
691 struct cdnsp_request *preq, in cdnsp_remove_request() argument
705 trace_cdnsp_remove_request_td(preq); in cdnsp_remove_request()
707 cur_td = &preq->td; in cdnsp_remove_request()
708 ep_ring = cdnsp_request_to_transfer_ring(pdev, preq); in cdnsp_remove_request()
715 hw_deq = cdnsp_get_hw_deq(pdev, pep->idx, preq->request.stream_id); in cdnsp_remove_request()
722 cdnsp_find_new_dequeue_state(pdev, pep, preq->request.stream_id, in cdnsp_remove_request()
745 cdnsp_gadget_giveback(pep, cur_td->preq, status); in cdnsp_remove_request()
894 struct cdnsp_request *preq = td->preq; in cdnsp_td_cleanup() local
903 if (preq->request.actual > preq->request.length) { in cdnsp_td_cleanup()
904 preq->request.actual = 0; in cdnsp_td_cleanup()
910 preq->pep->stream_info.td_count--; in cdnsp_td_cleanup()
912 cdnsp_gadget_giveback(preq->pep, preq, *status); in cdnsp_td_cleanup()
1014 td->preq->request.actual = td->preq->request.length - remaining; in cdnsp_process_ctrl_td()
1019 td->preq->request.actual = td->preq->request.length; in cdnsp_process_ctrl_td()
1047 struct cdnsp_request *preq = td->preq; in cdnsp_process_isoc_td() local
1059 requested = preq->request.length; in cdnsp_process_isoc_td()
1064 preq->request.status = 0; in cdnsp_process_isoc_td()
1067 preq->request.status = 0; in cdnsp_process_isoc_td()
1072 preq->request.status = -EOVERFLOW; in cdnsp_process_isoc_td()
1079 preq->request.status = 0; in cdnsp_process_isoc_td()
1088 preq->request.status = -1; in cdnsp_process_isoc_td()
1099 td->preq->request.actual += td_length; in cdnsp_process_isoc_td()
1113 td->preq->request.status = -EXDEV; in cdnsp_skip_isoc_td()
1114 td->preq->request.actual = 0; in cdnsp_skip_isoc_td()
1143 requested = td->preq->request.length; in cdnsp_process_bulk_intr_td()
1151 td->preq->request.actual = remaining; in cdnsp_process_bulk_intr_td()
1165 td->preq->request.actual = ep_trb_len; in cdnsp_process_bulk_intr_td()
1350 desc = td->preq->pep->endpoint.desc; in cdnsp_handle_tx_event()
1662 struct cdnsp_request *preq, in cdnsp_prepare_transfer() argument
1668 ep_ring = cdnsp_get_transfer_ring(pdev, preq->pep, in cdnsp_prepare_transfer()
1669 preq->request.stream_id); in cdnsp_prepare_transfer()
1674 GET_EP_CTX_STATE(preq->pep->out_ctx), in cdnsp_prepare_transfer()
1679 INIT_LIST_HEAD(&preq->td.td_list); in cdnsp_prepare_transfer()
1680 preq->td.preq = preq; in cdnsp_prepare_transfer()
1683 list_add_tail(&preq->td.td_list, &ep_ring->td_list); in cdnsp_prepare_transfer()
1685 preq->pep->stream_info.td_count++; in cdnsp_prepare_transfer()
1687 preq->td.start_seg = ep_ring->enq_seg; in cdnsp_prepare_transfer()
1688 preq->td.first_trb = ep_ring->enqueue; in cdnsp_prepare_transfer()
1705 static unsigned int count_trbs_needed(struct cdnsp_request *preq) in count_trbs_needed() argument
1707 return cdnsp_count_trbs(preq->request.dma, preq->request.length); in count_trbs_needed()
1710 static unsigned int count_sg_trbs_needed(struct cdnsp_request *preq) in count_sg_trbs_needed() argument
1715 full_len = preq->request.length; in count_sg_trbs_needed()
1717 for_each_sg(preq->request.sg, sg, preq->request.num_sgs, i) { in count_sg_trbs_needed()
1729 static unsigned int count_isoc_trbs_needed(struct cdnsp_request *preq) in count_isoc_trbs_needed() argument
1731 return cdnsp_count_trbs(preq->request.dma, preq->request.length); in count_isoc_trbs_needed()
1734 static void cdnsp_check_trb_math(struct cdnsp_request *preq, int running_total) in cdnsp_check_trb_math() argument
1736 if (running_total != preq->request.length) in cdnsp_check_trb_math()
1737 dev_err(preq->pep->pdev->dev, in cdnsp_check_trb_math()
1740 preq->pep->name, running_total, in cdnsp_check_trb_math()
1741 preq->request.length, preq->request.actual); in cdnsp_check_trb_math()
1765 struct cdnsp_request *preq, in cdnsp_td_remainder() argument
1780 maxp = usb_endpoint_maxp(preq->pep->endpoint.desc); in cdnsp_td_remainder()
1788 struct cdnsp_request *preq, u32 enqd_len, in cdnsp_align_td() argument
1796 max_pkt = usb_endpoint_maxp(preq->pep->endpoint.desc); in cdnsp_align_td()
1806 trace_cdnsp_bounce_align_td_split(preq, *trb_buff_len, in cdnsp_align_td()
1818 if (new_buff_len > (preq->request.length - enqd_len)) in cdnsp_align_td()
1819 new_buff_len = (preq->request.length - enqd_len); in cdnsp_align_td()
1822 if (preq->direction) { in cdnsp_align_td()
1823 sg_pcopy_to_buffer(preq->request.sg, in cdnsp_align_td()
1824 preq->request.num_mapped_sgs, in cdnsp_align_td()
1844 trace_cdnsp_bounce_map(preq, new_buff_len, enqd_len, seg->bounce_dma, in cdnsp_align_td()
1854 int cdnsp_queue_bulk_tx(struct cdnsp_device *pdev, struct cdnsp_request *preq) in cdnsp_queue_bulk_tx() argument
1871 ring = cdnsp_request_to_transfer_ring(pdev, preq); in cdnsp_queue_bulk_tx()
1875 full_len = preq->request.length; in cdnsp_queue_bulk_tx()
1877 if (preq->request.num_sgs) { in cdnsp_queue_bulk_tx()
1878 num_sgs = preq->request.num_sgs; in cdnsp_queue_bulk_tx()
1879 sg = preq->request.sg; in cdnsp_queue_bulk_tx()
1882 num_trbs = count_sg_trbs_needed(preq); in cdnsp_queue_bulk_tx()
1884 num_trbs = count_trbs_needed(preq); in cdnsp_queue_bulk_tx()
1885 addr = (u64)preq->request.dma; in cdnsp_queue_bulk_tx()
1889 pep = preq->pep; in cdnsp_queue_bulk_tx()
1892 if (preq->request.zero && preq->request.length && in cdnsp_queue_bulk_tx()
1898 ret = cdnsp_prepare_transfer(pdev, preq, num_trbs); in cdnsp_queue_bulk_tx()
1938 if (cdnsp_align_td(pdev, preq, enqd_len, in cdnsp_queue_bulk_tx()
1943 preq->td.bounce_seg = ring->enq_seg; in cdnsp_queue_bulk_tx()
1957 preq->td.last_trb = ring->enqueue; in cdnsp_queue_bulk_tx()
1962 if (!preq->direction) in cdnsp_queue_bulk_tx()
1967 full_len, preq, in cdnsp_queue_bulk_tx()
1997 cdnsp_check_trb_math(preq, enqd_len); in cdnsp_queue_bulk_tx()
1998 ret = cdnsp_giveback_first_trb(pdev, pep, preq->request.stream_id, in cdnsp_queue_bulk_tx()
2002 preq->td.drbl = 1; in cdnsp_queue_bulk_tx()
2007 int cdnsp_queue_ctrl_tx(struct cdnsp_device *pdev, struct cdnsp_request *preq) in cdnsp_queue_ctrl_tx() argument
2010 struct cdnsp_ep *pep = preq->pep; in cdnsp_queue_ctrl_tx()
2016 ep_ring = cdnsp_request_to_transfer_ring(pdev, preq); in cdnsp_queue_ctrl_tx()
2025 if (preq->request.zero && preq->request.length && in cdnsp_queue_ctrl_tx()
2026 (preq->request.length % maxp == 0)) { in cdnsp_queue_ctrl_tx()
2031 ret = cdnsp_prepare_transfer(pdev, preq, num_trbs); in cdnsp_queue_ctrl_tx()
2036 if (preq->request.length > 0) { in cdnsp_queue_ctrl_tx()
2047 length_field = TRB_LEN(preq->request.length) | in cdnsp_queue_ctrl_tx()
2051 lower_32_bits(preq->request.dma), in cdnsp_queue_ctrl_tx()
2052 upper_32_bits(preq->request.dma), length_field, in cdnsp_queue_ctrl_tx()
2064 lower_32_bits(preq->request.dma), in cdnsp_queue_ctrl_tx()
2065 upper_32_bits(preq->request.dma), 0, in cdnsp_queue_ctrl_tx()
2075 preq->td.last_trb = ep_ring->enqueue; in cdnsp_queue_ctrl_tx()
2078 if (preq->request.length == 0) in cdnsp_queue_ctrl_tx()
2083 if (preq->request.length > 0 && pdev->ep0_expect_in) in cdnsp_queue_ctrl_tx()
2097 cdnsp_ring_ep_doorbell(pdev, pep, preq->request.stream_id); in cdnsp_queue_ctrl_tx()
2146 struct cdnsp_request *preq, in cdnsp_get_burst_count() argument
2154 max_burst = preq->pep->endpoint.comp_desc->bMaxBurst; in cdnsp_get_burst_count()
2168 struct cdnsp_request *preq, in cdnsp_get_last_burst_packet_count() argument
2176 max_burst = preq->pep->endpoint.comp_desc->bMaxBurst; in cdnsp_get_last_burst_packet_count()
2196 struct cdnsp_request *preq) in cdnsp_queue_isoc_tx() argument
2211 ep_ring = preq->pep->ring; in cdnsp_queue_isoc_tx()
2214 td_len = preq->request.length; in cdnsp_queue_isoc_tx()
2215 addr = (u64)preq->request.dma; in cdnsp_queue_isoc_tx()
2218 max_pkt = usb_endpoint_maxp(preq->pep->endpoint.desc); in cdnsp_queue_isoc_tx()
2225 burst_count = cdnsp_get_burst_count(pdev, preq, total_pkt_count); in cdnsp_queue_isoc_tx()
2226 last_burst_pkt = cdnsp_get_last_burst_packet_count(pdev, preq, in cdnsp_queue_isoc_tx()
2228 trbs_per_td = count_isoc_trbs_needed(preq); in cdnsp_queue_isoc_tx()
2230 ret = cdnsp_prepare_transfer(pdev, preq, trbs_per_td); in cdnsp_queue_isoc_tx()
2256 trb_buff_len, td_len, preq, in cdnsp_queue_isoc_tx()
2270 if (usb_endpoint_dir_out(preq->pep->endpoint.desc)) in cdnsp_queue_isoc_tx()
2279 preq->td.last_trb = ep_ring->enqueue; in cdnsp_queue_isoc_tx()
2299 cdnsp_giveback_first_trb(pdev, preq->pep, preq->request.stream_id, in cdnsp_queue_isoc_tx()
2306 list_del_init(&preq->td.td_list); in cdnsp_queue_isoc_tx()
2316 preq->td.last_trb = ep_ring->enqueue; in cdnsp_queue_isoc_tx()
2318 cdnsp_td_to_noop(pdev, ep_ring, &preq->td, true); in cdnsp_queue_isoc_tx()
2321 ep_ring->enqueue = preq->td.first_trb; in cdnsp_queue_isoc_tx()
2322 ep_ring->enq_seg = preq->td.start_seg; in cdnsp_queue_isoc_tx()
2328 struct cdnsp_request *preq) in cdnsp_queue_isoc_tx_prepare() argument
2335 ep_ring = preq->pep->ring; in cdnsp_queue_isoc_tx_prepare()
2336 ep_state = GET_EP_CTX_STATE(preq->pep->out_ctx); in cdnsp_queue_isoc_tx_prepare()
2337 num_trbs = count_isoc_trbs_needed(preq); in cdnsp_queue_isoc_tx_prepare()
2348 return cdnsp_queue_isoc_tx(pdev, preq); in cdnsp_queue_isoc_tx_prepare()