Lines Matching refs:hw_ep

213 	struct musb_hw_ep	*hw_ep = qh->hw_ep;  in musb_start_urb()  local
216 int epnum = hw_ep->epnum; in musb_start_urb()
255 musb_ep_set_qh(hw_ep, is_in, qh); in musb_start_urb()
290 hw_ep->tx_channel ? "dma" : "pio"); in musb_start_urb()
292 if (!hw_ep->tx_channel) in musb_start_urb()
293 musb_h_tx_start(hw_ep); in musb_start_urb()
295 musb_h_tx_dma_start(hw_ep); in musb_start_urb()
337 void __iomem *epio = qh->hw_ep->regs; in musb_save_toggle()
361 struct musb_hw_ep *hw_ep, int is_in) in musb_advance_schedule() argument
363 struct musb_qh *qh = musb_ep_get_qh(hw_ep, is_in); in musb_advance_schedule()
364 struct musb_hw_ep *ep = qh->hw_ep; in musb_advance_schedule()
430 hw_ep->epnum, is_in ? 'R' : 'T', next_urb(qh)); in musb_advance_schedule()
435 static u16 musb_h_flush_rxfifo(struct musb_hw_ep *hw_ep, u16 csr) in musb_h_flush_rxfifo() argument
447 musb_writew(hw_ep->regs, MUSB_RXCSR, csr); in musb_h_flush_rxfifo()
448 musb_writew(hw_ep->regs, MUSB_RXCSR, csr); in musb_h_flush_rxfifo()
451 return musb_readw(hw_ep->regs, MUSB_RXCSR); in musb_h_flush_rxfifo()
466 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_packet_rx() local
467 void __iomem *epio = hw_ep->regs; in musb_host_packet_rx()
468 struct musb_qh *qh = hw_ep->in_qh; in musb_host_packet_rx()
533 musb_read_fifo(hw_ep, length, buf); in musb_host_packet_rx()
538 musb_h_flush_rxfifo(hw_ep, csr); in musb_host_packet_rx()
622 struct musb_hw_ep *hw_ep, struct musb_qh *qh, in musb_tx_dma_program() argument
625 struct dma_channel *channel = hw_ep->tx_channel; in musb_tx_dma_program()
626 void __iomem *epio = hw_ep->regs; in musb_tx_dma_program()
673 hw_ep->tx_channel = NULL; in musb_tx_dma_program()
695 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_ep_program() local
696 void __iomem *epio = hw_ep->regs; in musb_ep_program()
697 struct musb_qh *qh = musb_ep_get_qh(hw_ep, !is_out); in musb_ep_program()
713 dma_channel = is_out ? hw_ep->tx_channel : hw_ep->rx_channel; in musb_ep_program()
716 dma_controller, hw_ep, is_out); in musb_ep_program()
718 hw_ep->tx_channel = dma_channel; in musb_ep_program()
720 hw_ep->rx_channel = dma_channel; in musb_ep_program()
742 musb_h_tx_flush_fifo(hw_ep); in musb_ep_program()
772 musb_h_ep0_flush_fifo(hw_ep); in musb_ep_program()
789 hw_ep->max_packet_sz_tx); in musb_ep_program()
803 load_count = min((u32) hw_ep->max_packet_sz_tx, in musb_ep_program()
809 hw_ep, qh, urb, offset, len)) in musb_ep_program()
815 musb_write_fifo(hw_ep, load_count, buf); in musb_ep_program()
825 if (hw_ep->rx_reinit) { in musb_ep_program()
826 musb_rx_reinit(musb, qh, hw_ep); in musb_ep_program()
838 csr = musb_readw(hw_ep->regs, MUSB_RXCSR); in musb_ep_program()
844 hw_ep->epnum, csr); in musb_ep_program()
859 musb_writew(hw_ep->regs, MUSB_RXCSR, csr); in musb_ep_program()
860 csr = musb_readw(hw_ep->regs, in musb_ep_program()
875 hw_ep->rx_channel = NULL; in musb_ep_program()
884 musb_writew(hw_ep->regs, MUSB_RXCSR, csr); in musb_ep_program()
885 csr = musb_readw(hw_ep->regs, MUSB_RXCSR); in musb_ep_program()
899 struct musb_hw_ep *hw_ep = musb->control_ep; in musb_h_ep0_continue() local
900 struct musb_qh *qh = hw_ep->in_qh; in musb_h_ep0_continue()
911 musb_read_fifo(hw_ep, fifo_count, fifo_dest); in musb_h_ep0_continue()
950 musb_write_fifo(hw_ep, fifo_count, fifo_dest); in musb_h_ep0_continue()
976 struct musb_hw_ep *hw_ep = musb->control_ep; in musb_h_ep0_irq() local
977 void __iomem *epio = hw_ep->regs; in musb_h_ep0_irq()
978 struct musb_qh *qh = hw_ep->in_qh; in musb_h_ep0_irq()
1038 musb_h_ep0_flush_fifo(hw_ep); in musb_h_ep0_irq()
1052 musb_h_ep0_flush_fifo(hw_ep); in musb_h_ep0_irq()
1085 musb_advance_schedule(musb, urb, hw_ep, 1); in musb_h_ep0_irq()
1115 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_tx() local
1116 void __iomem *epio = hw_ep->regs; in musb_host_tx()
1117 struct musb_qh *qh = hw_ep->out_qh; in musb_host_tx()
1134 dma = is_dma_capable() ? hw_ep->tx_channel : NULL; in musb_host_tx()
1179 musb_h_tx_flush_fifo(hw_ep); in musb_host_tx()
1314 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_OUT); in musb_host_tx()
1317 if (musb_tx_dma_program(musb->dma_controller, hw_ep, qh, urb, in musb_host_tx()
1320 musb_h_tx_dma_start(hw_ep); in musb_host_tx()
1339 musb_write_fifo(hw_ep, length, urb->transfer_buffer + offset); in musb_host_tx()
1438 struct musb_hw_ep *hw_ep = musb->endpoints + epnum; in musb_host_rx() local
1439 void __iomem *epio = hw_ep->regs; in musb_host_rx()
1440 struct musb_qh *qh = hw_ep->in_qh; in musb_host_rx()
1453 dma = is_dma_capable() ? hw_ep->rx_channel : NULL; in musb_host_rx()
1467 musb_h_flush_rxfifo(hw_ep, MUSB_RXCSR_CLRDATATOG); in musb_host_rx()
1507 musb_bulk_rx_nak_timeout(musb, hw_ep); in musb_host_rx()
1535 musb_h_flush_rxfifo(hw_ep, MUSB_RXCSR_CLRDATATOG); in musb_host_rx()
1584 musb_writew(hw_ep->regs, MUSB_RXCSR, val); in musb_host_rx()
1700 if (rx_count < hw_ep->max_packet_sz_rx) { in musb_host_rx()
1751 hw_ep->rx_channel = NULL; in musb_host_rx()
1773 musb_advance_schedule(musb, urb, hw_ep, USB_DIR_IN); in musb_host_rx()
1790 struct musb_hw_ep *hw_ep = NULL; in musb_schedule() local
1799 hw_ep = musb->control_ep; in musb_schedule()
1815 for (epnum = 1, hw_ep = musb->endpoints + 1; in musb_schedule()
1817 epnum++, hw_ep++) { in musb_schedule()
1820 if (musb_ep_get_qh(hw_ep, is_in) != NULL) in musb_schedule()
1823 if (hw_ep == musb->bulk_ep) in musb_schedule()
1827 diff = hw_ep->max_packet_sz_rx; in musb_schedule()
1829 diff = hw_ep->max_packet_sz_tx; in musb_schedule()
1846 hw_ep = musb->endpoints + epnum; in musb_schedule()
1848 txtype = (musb_readb(hw_ep->regs, MUSB_TXTYPE) in musb_schedule()
1860 hw_ep = musb->bulk_ep; in musb_schedule()
1883 hw_ep = musb->endpoints + best_end; in musb_schedule()
1891 qh->hw_ep = hw_ep; in musb_schedule()
2089 struct musb_hw_ep *ep = qh->hw_ep; in musb_cleanup_urb()
2180 || musb_ep_get_qh(qh->hw_ep, is_in) != qh) { in musb_urb_dequeue()
2222 if (musb_ep_get_qh(qh->hw_ep, is_in) == qh) { in musb_h_disable()
2238 musb_advance_schedule(musb, urb, qh->hw_ep, is_in); in musb_h_disable()