Home
last modified time | relevance | path

Searched refs:dma_mapping_error (Results 1 – 25 of 522) sorted by relevance

12345678910>>...21

/linux-5.19.10/drivers/crypto/qat/qat_common/ !
Dqat_asym_algs.c307 if (unlikely(dma_mapping_error(dev, qat_req->in.dh.in.b))) in qat_dh_compute_value()
329 if (unlikely(dma_mapping_error(dev, qat_req->out.dh.r))) in qat_dh_compute_value()
338 if (unlikely(dma_mapping_error(dev, qat_req->phy_in))) in qat_dh_compute_value()
344 if (unlikely(dma_mapping_error(dev, qat_req->phy_out))) in qat_dh_compute_value()
360 if (!dma_mapping_error(dev, qat_req->phy_out)) in qat_dh_compute_value()
365 if (!dma_mapping_error(dev, qat_req->phy_in)) in qat_dh_compute_value()
370 if (!dma_mapping_error(dev, qat_req->out.dh.r)) in qat_dh_compute_value()
376 if (!dma_mapping_error(dev, qat_req->in.dh.in.b)) in qat_dh_compute_value()
712 if (unlikely(dma_mapping_error(dev, qat_req->in.rsa.enc.m))) in qat_rsa_enc()
727 if (unlikely(dma_mapping_error(dev, qat_req->out.rsa.enc.c))) in qat_rsa_enc()
[all …]
/linux-5.19.10/drivers/crypto/caam/ !
Dcaampkc.c345 if (dma_mapping_error(dev, edesc->sec4_sg_dma)) { in rsa_edesc_alloc()
409 if (dma_mapping_error(dev, pdb->n_dma)) { in set_rsa_pub_pdb()
415 if (dma_mapping_error(dev, pdb->e_dma)) { in set_rsa_pub_pdb()
454 if (dma_mapping_error(dev, pdb->n_dma)) { in set_rsa_priv_f1_pdb()
460 if (dma_mapping_error(dev, pdb->d_dma)) { in set_rsa_priv_f1_pdb()
503 if (dma_mapping_error(dev, pdb->d_dma)) { in set_rsa_priv_f2_pdb()
509 if (dma_mapping_error(dev, pdb->p_dma)) { in set_rsa_priv_f2_pdb()
515 if (dma_mapping_error(dev, pdb->q_dma)) { in set_rsa_priv_f2_pdb()
521 if (dma_mapping_error(dev, pdb->tmp1_dma)) { in set_rsa_priv_f2_pdb()
527 if (dma_mapping_error(dev, pdb->tmp2_dma)) { in set_rsa_priv_f2_pdb()
[all …]
Dblob_gen.c89 if (dma_mapping_error(jrdev, dma_in)) { in caam_process_blob()
97 if (dma_mapping_error(jrdev, dma_out)) { in caam_process_blob()
Dcaamhash.c148 if (dma_mapping_error(jrdev, state->ctx_dma)) { in map_seq_out_ptr_ctx()
171 if (dma_mapping_error(jrdev, state->buf_dma)) { in buf_map_to_sec4_sg()
189 if (dma_mapping_error(jrdev, state->ctx_dma)) { in ctx_map_to_sec4_sg()
377 if (dma_mapping_error(jrdev, key_dma)) { in hash_digest_key()
736 if (dma_mapping_error(ctx->jrdev, src_dma)) { in ahash_edesc_add_src()
906 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_ctx()
979 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_final_ctx()
1166 if (dma_mapping_error(jrdev, state->buf_dma)) { in ahash_final_no_ctx()
1271 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_no_ctx()
1819 if (dma_mapping_error(ctx->jrdev, ctx->adata.key_dma)) { in caam_hash_cra_init()
[all …]
Dcaamalg_qi2.c489 if (dma_mapping_error(dev, iv_dma)) { in aead_edesc_alloc()
513 if (dma_mapping_error(dev, edesc->assoclen_dma)) { in aead_edesc_alloc()
534 if (dma_mapping_error(dev, qm_sg_dma)) { in aead_edesc_alloc()
1206 if (dma_mapping_error(dev, iv_dma)) { in skcipher_edesc_alloc()
1230 if (dma_mapping_error(dev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1589 if (dma_mapping_error(ctx->dev, dma_addr)) { in caam_cra_init()
3104 if (dma_mapping_error(dev, state->buf_dma)) { in buf_map_to_qm_sg()
3122 if (dma_mapping_error(dev, state->ctx_dma)) { in ctx_map_to_qm_sg()
3233 if (dma_mapping_error(ctx->dev, key_dma)) { in hash_digest_key()
3253 if (dma_mapping_error(ctx->dev, flc_dma)) { in hash_digest_key()
[all …]
Dkey_gen.c76 if (dma_mapping_error(jrdev, dma_addr)) { in gen_split_key()
/linux-5.19.10/drivers/net/ethernet/stmicro/stmmac/ !
Dchain_mode.c39 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
57 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
70 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
Dring_mode.c43 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
64 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
78 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
/linux-5.19.10/drivers/infiniband/hw/qib/ !
Dqib_user_pages.c64 if (dma_mapping_error(&hwdev->dev, phys)) in qib_map_page()
71 if (dma_mapping_error(&hwdev->dev, phys)) in qib_map_page()
/linux-5.19.10/drivers/spi/ !
Dspi-fsl-cpm.c129 if (dma_mapping_error(dev, mspi->tx_dma)) { in fsl_spi_cpm_bufs()
140 if (dma_mapping_error(dev, mspi->rx_dma)) { in fsl_spi_cpm_bufs()
338 if (dma_mapping_error(dev, mspi->dma_dummy_tx)) { in fsl_spi_cpm_init()
345 if (dma_mapping_error(dev, mspi->dma_dummy_rx)) { in fsl_spi_cpm_init()
/linux-5.19.10/drivers/crypto/allwinner/sun8i-ss/ !
Dsun8i-ss-prng.c102 if (dma_mapping_error(ss->dev, dma_iv)) { in sun8i_ss_prng_generate()
109 if (dma_mapping_error(ss->dev, dma_dst)) { in sun8i_ss_prng_generate()
Dsun8i-ss-hash.c537 if (dma_mapping_error(ss->dev, addr_res)) { in sun8i_ss_hash_run()
589 err = dma_mapping_error(ss->dev, addr_xpad); in sun8i_ss_hash_run()
610 if (dma_mapping_error(ss->dev, addr_res)) { in sun8i_ss_hash_run()
616 err = dma_mapping_error(ss->dev, addr_xpad); in sun8i_ss_hash_run()
649 if (dma_mapping_error(ss->dev, addr_pad)) { in sun8i_ss_hash_run()
/linux-5.19.10/drivers/crypto/allwinner/sun8i-ce/ !
Dsun8i-ce-prng.c98 if (dma_mapping_error(ce->dev, dma_iv)) { in sun8i_ce_prng_generate()
105 if (dma_mapping_error(ce->dev, dma_dst)) { in sun8i_ce_prng_generate()
Dsun8i-ce-trng.c51 if (dma_mapping_error(ce->dev, dma_dst)) { in sun8i_ce_trng_read()
/linux-5.19.10/tools/virtio/linux/ !
Ddma-mapping.h27 #define dma_mapping_error(...) (0) macro
/linux-5.19.10/drivers/net/ethernet/amd/xgbe/ !
Dxgbe-desc.c319 if (dma_mapping_error(pdata->dev, pages_dma)) { in xgbe_alloc_pages()
560 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb()
584 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb()
616 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb()
/linux-5.19.10/drivers/crypto/ccree/ !
Dcc_buffer_mgr.c277 if (dma_mapping_error(dev, ret)) { in cc_map_sg()
393 if (dma_mapping_error(dev, req_ctx->gen_ctx.iv_dma_addr)) { in cc_map_cipher_request()
568 if (dma_mapping_error(dev, areq_ctx->gen_ctx.iv_dma_addr)) { in cc_aead_chain_iv()
979 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request()
993 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request()
1011 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request()
1021 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request()
1032 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request()
1044 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request()
/linux-5.19.10/drivers/misc/bcm-vk/ !
Dbcm_vk_sg.c112 if (unlikely(dma_mapping_error(dev, sg_addr))) { in bcm_vk_dma_alloc()
125 if (unlikely(dma_mapping_error(dev, addr))) { in bcm_vk_dma_alloc()
/linux-5.19.10/drivers/net/ethernet/synopsys/ !
Ddwc-xlgmac-desc.c352 if (dma_mapping_error(pdata->dev, pages_dma)) { in xlgmac_alloc_pages()
536 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb()
560 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb()
592 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb()
/linux-5.19.10/drivers/crypto/ !
Dmxs-dcp.c182 dma_err = dma_mapping_error(sdcp->dev, desc_phys); in mxs_dcp_start_dma()
231 ret = dma_mapping_error(sdcp->dev, key_phys); in mxs_dcp_run_aes()
237 ret = dma_mapping_error(sdcp->dev, src_phys); in mxs_dcp_run_aes()
243 ret = dma_mapping_error(sdcp->dev, dst_phys); in mxs_dcp_run_aes()
559 ret = dma_mapping_error(sdcp->dev, buf_phys); in mxs_dcp_run_sha()
595 ret = dma_mapping_error(sdcp->dev, digest_phys); in mxs_dcp_run_sha()
/linux-5.19.10/drivers/net/wireless/intel/iwlwifi/pcie/ !
Dtx-gen2.c205 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
227 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
Dtx.c1104 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_enqueue_hcmd()
1128 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_enqueue_hcmd()
1256 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_fill_data_tbs()
1275 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_fill_data_tbs()
1383 if (unlikely(dma_mapping_error(trans->dev, hdr_tb_phys))) in iwl_fill_data_tbs_amsdu()
1403 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_fill_data_tbs_amsdu()
1554 if (unlikely(dma_mapping_error(trans->dev, tb1_phys))) in iwl_trans_pcie_tx()
/linux-5.19.10/drivers/soc/qcom/ !
Dqcom-geni-se.c694 if (dma_mapping_error(wrapper->dev, *iova)) in geni_se_tx_dma_prep()
730 if (dma_mapping_error(wrapper->dev, *iova)) in geni_se_rx_dma_prep()
758 if (!dma_mapping_error(wrapper->dev, iova)) in geni_se_tx_dma_unprep()
775 if (!dma_mapping_error(wrapper->dev, iova)) in geni_se_rx_dma_unprep()
/linux-5.19.10/drivers/crypto/cavium/cpt/ !
Dcptvf_reqmanager.c63 if (unlikely(dma_mapping_error(&pdev->dev, in setup_sgio_components()
191 if (dma_mapping_error(&pdev->dev, info->dptr_baddr)) { in setup_sgio_list()
210 if (dma_mapping_error(&pdev->dev, info->rptr_baddr)) { in setup_sgio_list()
455 if (dma_mapping_error(&pdev->dev, info->comp_baddr)) { in process_request()
/linux-5.19.10/drivers/net/ethernet/sfc/ !
Dtx_tso.c205 return unlikely(dma_mapping_error(dma_dev, dma_addr)) ? -ENOMEM : 0; in tso_start()
213 if (likely(!dma_mapping_error(&efx->pci_dev->dev, st->unmap_addr))) { in tso_get_fragment()

12345678910>>...21