Lines Matching refs:u_ctx

764 	struct uld_ctx *u_ctx = ULD_CTX(ctx);  in create_wreq()  local
770 qid = u_ctx->lldi.rxq_ids[rxqidx]; in create_wreq()
771 fid = u_ctx->lldi.rxq_ids[0]; in create_wreq()
774 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[portno]); in create_wreq()
802 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_cipher_wr() local
819 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_cipher_wr()
1163 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_handle_cipher_resp() local
1209 wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx]; in chcr_handle_cipher_resp()
1218 skb->dev = u_ctx->lldi.ports[0]; in chcr_handle_cipher_resp()
1369 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_aes_encrypt() local
1381 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_encrypt()
1388 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_encrypt()
1392 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_encrypt()
1411 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_aes_decrypt() local
1427 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_decrypt()
1431 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_decrypt()
1435 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_decrypt()
1442 struct uld_ctx *u_ctx = NULL; in chcr_device_init() local
1447 u_ctx = assign_chcr_device(); in chcr_device_init()
1448 if (!u_ctx) { in chcr_device_init()
1453 ctx->dev = &u_ctx->dev; in chcr_device_init()
1454 ntxq = u_ctx->lldi.ntxq; in chcr_device_init()
1455 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan; in chcr_device_init()
1456 txq_perchan = ntxq / u_ctx->lldi.nchan; in chcr_device_init()
1458 ctx->nrxq = u_ctx->lldi.nrxq; in chcr_device_init()
1568 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_hash_wr() local
1579 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_hash_wr()
1630 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr, in create_hash_wr()
1632 if (dma_mapping_error(&u_ctx->lldi.pdev->dev, in create_hash_wr()
1660 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_update() local
1692 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_update()
1700 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1737 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_update()
1742 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1765 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_final() local
1819 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_final()
1833 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_finup() local
1851 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_finup()
1858 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1912 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_finup()
1917 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1928 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_digest() local
1947 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_digest()
1955 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
2006 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_digest()
2011 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
2023 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_ahash_continue() local
2074 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_continue()
2090 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm)); in chcr_handle_ahash_resp() local
2103 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr, in chcr_handle_ahash_resp()
2130 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_handle_ahash_resp()
2367 struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm)); in chcr_aead_common_exit() local
2369 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op); in chcr_aead_common_exit()
2438 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_authenc_wr() local
2458 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_authenc_wr()
2712 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_add_aead_dst_ent() local
2716 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in chcr_add_aead_dst_ent()
2756 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_add_cipher_dst_ent() local
2760 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in chcr_add_cipher_dst_ent()
2964 struct uld_ctx *u_ctx = ULD_CTX(ctx); in fill_sec_cpl_for_aead() local
2974 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in fill_sec_cpl_for_aead()
3136 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_gcm_wr() local
3153 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_gcm_wr()
3740 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_aead_op() local
3757 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aead_op()
3772 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size); in chcr_aead_op()
3779 skb->dev = u_ctx->lldi.ports[0]; in chcr_aead_op()