Home
last modified time | relevance | path

Searched refs:wrlen (Results 1 – 8 of 8) sorted by relevance

/linux-6.1.9/drivers/net/ethernet/chelsio/inline_crypto/chtls/
Dchtls_hw.c47 unsigned int wrlen; in __set_tcb_field() local
49 wrlen = roundup(sizeof(*req) + sizeof(*sc), 16); in __set_tcb_field()
52 req = (struct cpl_set_tcb_field *)__skb_put(skb, wrlen); in __set_tcb_field()
68 unsigned int wrlen; in chtls_set_tcb_field() local
71 wrlen = roundup(sizeof(*req) + sizeof(*sc), 16); in chtls_set_tcb_field()
73 skb = alloc_skb(wrlen, GFP_ATOMIC); in chtls_set_tcb_field()
77 credits_needed = DIV_ROUND_UP(wrlen, 16); in chtls_set_tcb_field()
96 unsigned int wrlen; in chtls_set_tcb_field_rpl_skb() local
98 wrlen = sizeof(struct cpl_set_tcb_field) + sizeof(struct ulptx_idata); in chtls_set_tcb_field_rpl_skb()
99 wrlen = roundup(wrlen, 16); in chtls_set_tcb_field_rpl_skb()
[all …]
/linux-6.1.9/drivers/scsi/cxgbi/
Dlibcxgbi.h366 static inline struct sk_buff *alloc_wr(int wrlen, int dlen, gfp_t gfp) in alloc_wr() argument
368 struct sk_buff *skb = alloc_skb(wrlen + dlen, gfp); in alloc_wr()
371 __skb_put(skb, wrlen); in alloc_wr()
372 memset(skb->head, 0, wrlen + dlen); in alloc_wr()
374 pr_info("alloc cpl wr skb %u+%u, OOM.\n", wrlen, dlen); in alloc_wr()
/linux-6.1.9/drivers/infiniband/hw/cxgb4/
Dcm.c649 u32 wrlen = roundup(sizeof(struct cpl_close_con_req), 16); in send_halfclose() local
655 cxgb_mk_close_con_req(skb, wrlen, ep->hwtid, ep->txq_idx, in send_halfclose()
665 int wrlen = roundup(sizeof(*req), 16); in read_tcb() local
672 req = (struct cpl_get_tcb *) skb_put(skb, wrlen); in read_tcb()
673 memset(req, 0, wrlen); in read_tcb()
689 u32 wrlen = roundup(sizeof(struct cpl_abort_req), 16); in send_abort_req() local
696 cxgb_mk_abort_req(req_skb, wrlen, ep->hwtid, ep->txq_idx, in send_abort_req()
726 int win, sizev4, sizev6, wrlen; in send_connect() local
762 wrlen = (ep->com.remote_addr.ss_family == AF_INET) ? in send_connect()
768 skb = get_skb(NULL, wrlen, GFP_KERNEL); in send_connect()
[all …]
/linux-6.1.9/drivers/net/ethernet/chelsio/cxgb4/
Dcxgb4_uld.h75 #define INIT_ULPTX_WR(w, wrlen, atomic, tid) do { \ argument
78 (w)->wr.wr_mid = htonl(FW_WR_LEN16_V(DIV_ROUND_UP(wrlen, 16)) | \
Dsge.c2176 u32 wrlen; in ethofld_calc_tx_flits() local
2178 wrlen = sizeof(struct fw_eth_tx_eo_wr) + sizeof(struct cpl_tx_pkt_core); in ethofld_calc_tx_flits()
2181 wrlen += sizeof(struct cpl_tx_pkt_lso_core); in ethofld_calc_tx_flits()
2183 wrlen += roundup(hdr_len, 16); in ethofld_calc_tx_flits()
2186 flits = DIV_ROUND_UP(wrlen, 8); in ethofld_calc_tx_flits()
2202 u32 hdr_len, u32 wrlen) in write_eo_wr() argument
2213 wrlen16 = DIV_ROUND_UP(wrlen, 16); in write_eo_wr()
2267 u32 wrlen, wrlen16, hdr_len, data_len; in ethofld_hard_xmit() local
2304 wrlen = flits * 8; in ethofld_hard_xmit()
2305 wrlen16 = DIV_ROUND_UP(wrlen, 16); in ethofld_hard_xmit()
[all …]
Dcxgb4_filter.c1746 unsigned int wrlen; in cxgb4_del_hash_filter() local
1771 wrlen = roundup(sizeof(*wr) + (sizeof(*req) + sizeof(*aligner)) in cxgb4_del_hash_filter()
1773 skb = alloc_skb(wrlen, GFP_KERNEL); in cxgb4_del_hash_filter()
1779 req = (struct cpl_set_tcb_field *)__skb_put(skb, wrlen); in cxgb4_del_hash_filter()
1780 INIT_ULPTX_WR(req, wrlen, 0, 0); in cxgb4_del_hash_filter()
/linux-6.1.9/drivers/scsi/cxgbi/cxgb3i/
Dcxgb3i.c333 static unsigned int wrlen __read_mostly;
349 wrlen = wr_len * 8; in init_wr_tab()
412 if (wrs_needed > 1 && len + sizeof(struct tx_data_wr) <= wrlen) in push_tx_frames()
/linux-6.1.9/drivers/crypto/chelsio/
Dchcr_algo.c2403 int aadmax, int wrlen, in chcr_aead_need_fallback() argument
2411 (wrlen > SGE_MAX_WR_LEN)) in chcr_aead_need_fallback()