Searched refs:compl_desc (Results 1 – 6 of 6) sorted by relevance
/linux-6.1.9/drivers/net/ethernet/google/gve/ |
D | gve_rx_dqo.c | 455 const struct gve_rx_compl_desc_dqo *compl_desc, in gve_rx_skb_hash() argument 465 skb_set_hash(skb, le32_to_cpu(compl_desc->hash), hash_type); in gve_rx_skb_hash() 519 const struct gve_rx_compl_desc_dqo *compl_desc, in gve_rx_dqo() argument 522 const u16 buffer_id = le16_to_cpu(compl_desc->buf_id); in gve_rx_dqo() 523 const bool eop = compl_desc->end_of_packet != 0; in gve_rx_dqo() 540 if (unlikely(compl_desc->rx_error)) { in gve_rx_dqo() 546 buf_len = compl_desc->packet_len; in gve_rx_dqo() 676 struct gve_rx_compl_desc_dqo *compl_desc = in gve_rx_poll_dqo() local 681 if (compl_desc->generation == complq->cur_gen_bit) in gve_rx_poll_dqo() 691 err = gve_rx_dqo(napi, rx, compl_desc, rx->q_num); in gve_rx_poll_dqo() [all …]
|
D | gve_tx_dqo.c | 934 struct gve_tx_compl_desc *compl_desc = in gve_clean_tx_done_dqo() local 938 if (compl_desc->generation == tx->dqo_compl.cur_gen_bit) in gve_clean_tx_done_dqo() 947 type = compl_desc->type; in gve_clean_tx_done_dqo() 951 u16 tx_head = le16_to_cpu(compl_desc->tx_head); in gve_clean_tx_done_dqo() 955 u16 compl_tag = le16_to_cpu(compl_desc->completion_tag); in gve_clean_tx_done_dqo() 963 u16 compl_tag = le16_to_cpu(compl_desc->completion_tag); in gve_clean_tx_done_dqo() 969 u16 compl_tag = le16_to_cpu(compl_desc->completion_tag); in gve_clean_tx_done_dqo() 1001 struct gve_tx_compl_desc *compl_desc; in gve_tx_poll_dqo() local 1020 compl_desc = &tx->dqo.compl_ring[tx->dqo_compl.head]; in gve_tx_poll_dqo() 1021 return compl_desc->generation != tx->dqo_compl.cur_gen_bit; in gve_tx_poll_dqo()
|
/linux-6.1.9/drivers/dma/ioat/ |
D | prep.c | 157 struct ioat_ring_ent *compl_desc; in __ioat_prep_xor_lock() local 230 compl_desc = ioat_get_ring_ent(ioat_chan, idx + i); in __ioat_prep_xor_lock() 231 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; in __ioat_prep_xor_lock() 232 hw = compl_desc->hw; in __ioat_prep_xor_lock() 238 dump_desc_dbg(ioat_chan, compl_desc); in __ioat_prep_xor_lock() 241 return &compl_desc->txd; in __ioat_prep_xor_lock() 346 struct ioat_ring_ent *compl_desc; in __ioat_prep_pq_lock() local 443 compl_desc = desc; in __ioat_prep_pq_lock() 446 compl_desc = ioat_get_ring_ent(ioat_chan, idx + i); in __ioat_prep_pq_lock() 447 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; in __ioat_prep_pq_lock() [all …]
|
/linux-6.1.9/drivers/crypto/ccree/ |
D | cc_request_mgr.c | 29 struct cc_hw_desc compl_desc; member 170 hw_desc_init(&req_mgr_h->compl_desc); in cc_req_mgr_init() 171 set_din_const(&req_mgr_h->compl_desc, 0, sizeof(u32)); in cc_req_mgr_init() 172 set_dout_dlli(&req_mgr_h->compl_desc, req_mgr_h->dummy_comp_buff_dma, in cc_req_mgr_init() 174 set_flow_mode(&req_mgr_h->compl_desc, BYPASS); in cc_req_mgr_init() 175 set_queue_last_ind(drvdata, &req_mgr_h->compl_desc); in cc_req_mgr_init() 313 enqueue_seq(drvdata, &req_mgr_h->compl_desc, 1); in cc_do_send_request()
|
/linux-6.1.9/drivers/usb/dwc2/ |
D | gadget.c | 1021 hs_ep->compl_desc = 0; in dwc2_gadget_start_isoc_ddma() 2186 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma() 2221 hs_ep->compl_desc++; in dwc2_gadget_complete_isoc_request_ddma() 2222 if (hs_ep->compl_desc > (MAX_DMA_DESC_NUM_HS_ISOC - 1)) in dwc2_gadget_complete_isoc_request_ddma() 2223 hs_ep->compl_desc = 0; in dwc2_gadget_complete_isoc_request_ddma() 2224 desc_sts = hs_ep->desc_list[hs_ep->compl_desc].status; in dwc2_gadget_complete_isoc_request_ddma() 2247 hs_ep->compl_desc = 0; in dwc2_gadget_handle_isoc_bna() 4113 hs_ep->compl_desc = 0; in dwc2_hsotg_ep_enable()
|
D | core.h | 156 unsigned int compl_desc; member
|