/linux-5.19.10/drivers/crypto/qat/qat_common/ ! |
D | qat_asym_algs.c | 307 if (unlikely(dma_mapping_error(dev, qat_req->in.dh.in.b))) in qat_dh_compute_value() 329 if (unlikely(dma_mapping_error(dev, qat_req->out.dh.r))) in qat_dh_compute_value() 338 if (unlikely(dma_mapping_error(dev, qat_req->phy_in))) in qat_dh_compute_value() 344 if (unlikely(dma_mapping_error(dev, qat_req->phy_out))) in qat_dh_compute_value() 360 if (!dma_mapping_error(dev, qat_req->phy_out)) in qat_dh_compute_value() 365 if (!dma_mapping_error(dev, qat_req->phy_in)) in qat_dh_compute_value() 370 if (!dma_mapping_error(dev, qat_req->out.dh.r)) in qat_dh_compute_value() 376 if (!dma_mapping_error(dev, qat_req->in.dh.in.b)) in qat_dh_compute_value() 712 if (unlikely(dma_mapping_error(dev, qat_req->in.rsa.enc.m))) in qat_rsa_enc() 727 if (unlikely(dma_mapping_error(dev, qat_req->out.rsa.enc.c))) in qat_rsa_enc() [all …]
|
/linux-5.19.10/drivers/crypto/caam/ ! |
D | caampkc.c | 345 if (dma_mapping_error(dev, edesc->sec4_sg_dma)) { in rsa_edesc_alloc() 409 if (dma_mapping_error(dev, pdb->n_dma)) { in set_rsa_pub_pdb() 415 if (dma_mapping_error(dev, pdb->e_dma)) { in set_rsa_pub_pdb() 454 if (dma_mapping_error(dev, pdb->n_dma)) { in set_rsa_priv_f1_pdb() 460 if (dma_mapping_error(dev, pdb->d_dma)) { in set_rsa_priv_f1_pdb() 503 if (dma_mapping_error(dev, pdb->d_dma)) { in set_rsa_priv_f2_pdb() 509 if (dma_mapping_error(dev, pdb->p_dma)) { in set_rsa_priv_f2_pdb() 515 if (dma_mapping_error(dev, pdb->q_dma)) { in set_rsa_priv_f2_pdb() 521 if (dma_mapping_error(dev, pdb->tmp1_dma)) { in set_rsa_priv_f2_pdb() 527 if (dma_mapping_error(dev, pdb->tmp2_dma)) { in set_rsa_priv_f2_pdb() [all …]
|
D | blob_gen.c | 89 if (dma_mapping_error(jrdev, dma_in)) { in caam_process_blob() 97 if (dma_mapping_error(jrdev, dma_out)) { in caam_process_blob()
|
D | caamhash.c | 148 if (dma_mapping_error(jrdev, state->ctx_dma)) { in map_seq_out_ptr_ctx() 171 if (dma_mapping_error(jrdev, state->buf_dma)) { in buf_map_to_sec4_sg() 189 if (dma_mapping_error(jrdev, state->ctx_dma)) { in ctx_map_to_sec4_sg() 377 if (dma_mapping_error(jrdev, key_dma)) { in hash_digest_key() 736 if (dma_mapping_error(ctx->jrdev, src_dma)) { in ahash_edesc_add_src() 906 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_ctx() 979 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_final_ctx() 1166 if (dma_mapping_error(jrdev, state->buf_dma)) { in ahash_final_no_ctx() 1271 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_no_ctx() 1819 if (dma_mapping_error(ctx->jrdev, ctx->adata.key_dma)) { in caam_hash_cra_init() [all …]
|
D | caamalg_qi2.c | 489 if (dma_mapping_error(dev, iv_dma)) { in aead_edesc_alloc() 513 if (dma_mapping_error(dev, edesc->assoclen_dma)) { in aead_edesc_alloc() 534 if (dma_mapping_error(dev, qm_sg_dma)) { in aead_edesc_alloc() 1206 if (dma_mapping_error(dev, iv_dma)) { in skcipher_edesc_alloc() 1230 if (dma_mapping_error(dev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc() 1589 if (dma_mapping_error(ctx->dev, dma_addr)) { in caam_cra_init() 3104 if (dma_mapping_error(dev, state->buf_dma)) { in buf_map_to_qm_sg() 3122 if (dma_mapping_error(dev, state->ctx_dma)) { in ctx_map_to_qm_sg() 3233 if (dma_mapping_error(ctx->dev, key_dma)) { in hash_digest_key() 3253 if (dma_mapping_error(ctx->dev, flc_dma)) { in hash_digest_key() [all …]
|
D | key_gen.c | 76 if (dma_mapping_error(jrdev, dma_addr)) { in gen_split_key()
|
/linux-5.19.10/drivers/net/ethernet/stmicro/stmmac/ ! |
D | chain_mode.c | 39 if (dma_mapping_error(priv->device, des2)) in jumbo_frm() 57 if (dma_mapping_error(priv->device, des2)) in jumbo_frm() 70 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
|
D | ring_mode.c | 43 if (dma_mapping_error(priv->device, des2)) in jumbo_frm() 64 if (dma_mapping_error(priv->device, des2)) in jumbo_frm() 78 if (dma_mapping_error(priv->device, des2)) in jumbo_frm()
|
/linux-5.19.10/drivers/infiniband/hw/qib/ ! |
D | qib_user_pages.c | 64 if (dma_mapping_error(&hwdev->dev, phys)) in qib_map_page() 71 if (dma_mapping_error(&hwdev->dev, phys)) in qib_map_page()
|
/linux-5.19.10/drivers/spi/ ! |
D | spi-fsl-cpm.c | 129 if (dma_mapping_error(dev, mspi->tx_dma)) { in fsl_spi_cpm_bufs() 140 if (dma_mapping_error(dev, mspi->rx_dma)) { in fsl_spi_cpm_bufs() 338 if (dma_mapping_error(dev, mspi->dma_dummy_tx)) { in fsl_spi_cpm_init() 345 if (dma_mapping_error(dev, mspi->dma_dummy_rx)) { in fsl_spi_cpm_init()
|
/linux-5.19.10/drivers/crypto/allwinner/sun8i-ss/ ! |
D | sun8i-ss-prng.c | 102 if (dma_mapping_error(ss->dev, dma_iv)) { in sun8i_ss_prng_generate() 109 if (dma_mapping_error(ss->dev, dma_dst)) { in sun8i_ss_prng_generate()
|
D | sun8i-ss-hash.c | 537 if (dma_mapping_error(ss->dev, addr_res)) { in sun8i_ss_hash_run() 589 err = dma_mapping_error(ss->dev, addr_xpad); in sun8i_ss_hash_run() 610 if (dma_mapping_error(ss->dev, addr_res)) { in sun8i_ss_hash_run() 616 err = dma_mapping_error(ss->dev, addr_xpad); in sun8i_ss_hash_run() 649 if (dma_mapping_error(ss->dev, addr_pad)) { in sun8i_ss_hash_run()
|
/linux-5.19.10/drivers/crypto/allwinner/sun8i-ce/ ! |
D | sun8i-ce-prng.c | 98 if (dma_mapping_error(ce->dev, dma_iv)) { in sun8i_ce_prng_generate() 105 if (dma_mapping_error(ce->dev, dma_dst)) { in sun8i_ce_prng_generate()
|
D | sun8i-ce-trng.c | 51 if (dma_mapping_error(ce->dev, dma_dst)) { in sun8i_ce_trng_read()
|
/linux-5.19.10/tools/virtio/linux/ ! |
D | dma-mapping.h | 27 #define dma_mapping_error(...) (0) macro
|
/linux-5.19.10/drivers/net/ethernet/amd/xgbe/ ! |
D | xgbe-desc.c | 319 if (dma_mapping_error(pdata->dev, pages_dma)) { in xgbe_alloc_pages() 560 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb() 584 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb() 616 if (dma_mapping_error(pdata->dev, skb_dma)) { in xgbe_map_tx_skb()
|
/linux-5.19.10/drivers/crypto/ccree/ ! |
D | cc_buffer_mgr.c | 277 if (dma_mapping_error(dev, ret)) { in cc_map_sg() 393 if (dma_mapping_error(dev, req_ctx->gen_ctx.iv_dma_addr)) { in cc_map_cipher_request() 568 if (dma_mapping_error(dev, areq_ctx->gen_ctx.iv_dma_addr)) { in cc_aead_chain_iv() 979 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 993 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 1011 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 1021 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 1032 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request() 1044 if (dma_mapping_error(dev, dma_addr)) { in cc_map_aead_request()
|
/linux-5.19.10/drivers/misc/bcm-vk/ ! |
D | bcm_vk_sg.c | 112 if (unlikely(dma_mapping_error(dev, sg_addr))) { in bcm_vk_dma_alloc() 125 if (unlikely(dma_mapping_error(dev, addr))) { in bcm_vk_dma_alloc()
|
/linux-5.19.10/drivers/net/ethernet/synopsys/ ! |
D | dwc-xlgmac-desc.c | 352 if (dma_mapping_error(pdata->dev, pages_dma)) { in xlgmac_alloc_pages() 536 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb() 560 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb() 592 if (dma_mapping_error(pdata->dev, skb_dma)) { in xlgmac_map_tx_skb()
|
/linux-5.19.10/drivers/crypto/ ! |
D | mxs-dcp.c | 182 dma_err = dma_mapping_error(sdcp->dev, desc_phys); in mxs_dcp_start_dma() 231 ret = dma_mapping_error(sdcp->dev, key_phys); in mxs_dcp_run_aes() 237 ret = dma_mapping_error(sdcp->dev, src_phys); in mxs_dcp_run_aes() 243 ret = dma_mapping_error(sdcp->dev, dst_phys); in mxs_dcp_run_aes() 559 ret = dma_mapping_error(sdcp->dev, buf_phys); in mxs_dcp_run_sha() 595 ret = dma_mapping_error(sdcp->dev, digest_phys); in mxs_dcp_run_sha()
|
/linux-5.19.10/drivers/net/wireless/intel/iwlwifi/pcie/ ! |
D | tx-gen2.c | 205 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd() 227 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_gen2_enqueue_hcmd()
|
D | tx.c | 1104 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_enqueue_hcmd() 1128 if (dma_mapping_error(trans->dev, phys_addr)) { in iwl_pcie_enqueue_hcmd() 1256 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_fill_data_tbs() 1275 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_fill_data_tbs() 1383 if (unlikely(dma_mapping_error(trans->dev, hdr_tb_phys))) in iwl_fill_data_tbs_amsdu() 1403 if (unlikely(dma_mapping_error(trans->dev, tb_phys))) in iwl_fill_data_tbs_amsdu() 1554 if (unlikely(dma_mapping_error(trans->dev, tb1_phys))) in iwl_trans_pcie_tx()
|
/linux-5.19.10/drivers/soc/qcom/ ! |
D | qcom-geni-se.c | 694 if (dma_mapping_error(wrapper->dev, *iova)) in geni_se_tx_dma_prep() 730 if (dma_mapping_error(wrapper->dev, *iova)) in geni_se_rx_dma_prep() 758 if (!dma_mapping_error(wrapper->dev, iova)) in geni_se_tx_dma_unprep() 775 if (!dma_mapping_error(wrapper->dev, iova)) in geni_se_rx_dma_unprep()
|
/linux-5.19.10/drivers/crypto/cavium/cpt/ ! |
D | cptvf_reqmanager.c | 63 if (unlikely(dma_mapping_error(&pdev->dev, in setup_sgio_components() 191 if (dma_mapping_error(&pdev->dev, info->dptr_baddr)) { in setup_sgio_list() 210 if (dma_mapping_error(&pdev->dev, info->rptr_baddr)) { in setup_sgio_list() 455 if (dma_mapping_error(&pdev->dev, info->comp_baddr)) { in process_request()
|
/linux-5.19.10/drivers/net/ethernet/sfc/ ! |
D | tx_tso.c | 205 return unlikely(dma_mapping_error(dma_dev, dma_addr)) ? -ENOMEM : 0; in tso_start() 213 if (likely(!dma_mapping_error(&efx->pci_dev->dev, st->unmap_addr))) { in tso_get_fragment()
|