Lines Matching refs:reqctx

129 static inline void chcr_init_hctx_per_wr(struct chcr_ahash_req_ctx *reqctx)  in chcr_init_hctx_per_wr()  argument
131 memset(&reqctx->hctx_wr, 0, sizeof(struct chcr_hctx_per_wr)); in chcr_init_hctx_per_wr()
213 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_handle_aead_resp() local
218 if (reqctx->verify == VERIFY_SW) { in chcr_handle_aead_resp()
220 reqctx->verify = VERIFY_HW; in chcr_handle_aead_resp()
694 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_cipher_fallback() local
697 skcipher_request_set_tfm(&reqctx->fallback_req, cipher); in chcr_cipher_fallback()
698 skcipher_request_set_callback(&reqctx->fallback_req, req->base.flags, in chcr_cipher_fallback()
700 skcipher_request_set_crypt(&reqctx->fallback_req, req->src, req->dst, in chcr_cipher_fallback()
703 err = op_type ? crypto_skcipher_decrypt(&reqctx->fallback_req) : in chcr_cipher_fallback()
704 crypto_skcipher_encrypt(&reqctx->fallback_req); in chcr_cipher_fallback()
721 struct chcr_aead_reqctx *reqctx = aead_request_ctx(aead_req); in get_qidxs() local
722 *txqidx = reqctx->txqidx; in get_qidxs()
723 *rxqidx = reqctx->rxqidx; in get_qidxs()
730 struct chcr_skcipher_req_ctx *reqctx = in get_qidxs() local
732 *txqidx = reqctx->txqidx; in get_qidxs()
733 *rxqidx = reqctx->rxqidx; in get_qidxs()
740 struct chcr_ahash_req_ctx *reqctx = in get_qidxs() local
742 *txqidx = reqctx->txqidx; in get_qidxs()
743 *rxqidx = reqctx->rxqidx; in get_qidxs()
808 struct chcr_skcipher_req_ctx *reqctx = in create_cipher_wr() local
817 unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan; in create_cipher_wr()
820 nents = sg_nents_xlen(reqctx->dstsg, wrparam->bytes, CHCR_DST_SG_SIZE, in create_cipher_wr()
821 reqctx->dst_ofst); in create_cipher_wr()
825 nents = sg_nents_xlen(reqctx->srcsg, wrparam->bytes, in create_cipher_wr()
826 CHCR_SRC_SG_SIZE, reqctx->src_ofst); in create_cipher_wr()
827 temp = reqctx->imm ? roundup(wrparam->bytes, 16) : in create_cipher_wr()
846 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, 0, in create_cipher_wr()
853 if ((reqctx->op == CHCR_DECRYPT_OP) && in create_cipher_wr()
881 + (reqctx->imm ? (wrparam->bytes) : 0); in create_cipher_wr()
882 create_wreq(c_ctx(tfm), chcr_req, &(wrparam->req->base), reqctx->imm, 0, in create_cipher_wr()
885 reqctx->skb = skb; in create_cipher_wr()
887 if (reqctx->op && (ablkctx->ciph_mode == in create_cipher_wr()
891 reqctx->processed + wrparam->bytes - AES_BLOCK_SIZE); in create_cipher_wr()
1059 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_update_tweak() local
1064 int round = reqctx->last_req_len / AES_BLOCK_SIZE; in chcr_update_tweak()
1067 memcpy(iv, reqctx->iv, AES_BLOCK_SIZE); in chcr_update_tweak()
1099 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_update_cipher_iv() local
1104 ctr_add_iv(iv, req->iv, (reqctx->processed / in chcr_update_cipher_iv()
1107 *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE + in chcr_update_cipher_iv()
1108 CTR_RFC3686_IV_SIZE) = cpu_to_be32((reqctx->processed / in chcr_update_cipher_iv()
1113 if (reqctx->op) in chcr_update_cipher_iv()
1133 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_final_cipher_iv() local
1138 ctr_add_iv(iv, req->iv, DIV_ROUND_UP(reqctx->processed, in chcr_final_cipher_iv()
1141 if (!reqctx->partial_req) in chcr_final_cipher_iv()
1142 memcpy(iv, reqctx->iv, AES_BLOCK_SIZE); in chcr_final_cipher_iv()
1148 if (!reqctx->op) in chcr_final_cipher_iv()
1159 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_handle_cipher_resp() local
1173 if (req->cryptlen == reqctx->processed) { in chcr_handle_cipher_resp()
1180 if (!reqctx->imm) { in chcr_handle_cipher_resp()
1181 bytes = chcr_sg_ent_in_wr(reqctx->srcsg, reqctx->dstsg, 0, in chcr_handle_cipher_resp()
1183 reqctx->src_ofst, reqctx->dst_ofst); in chcr_handle_cipher_resp()
1184 if ((bytes + reqctx->processed) >= req->cryptlen) in chcr_handle_cipher_resp()
1185 bytes = req->cryptlen - reqctx->processed; in chcr_handle_cipher_resp()
1190 bytes = req->cryptlen - reqctx->processed; in chcr_handle_cipher_resp()
1192 err = chcr_update_cipher_iv(req, fw6_pld, reqctx->iv); in chcr_handle_cipher_resp()
1199 memcpy(req->iv, reqctx->init_iv, IV); in chcr_handle_cipher_resp()
1202 reqctx->op); in chcr_handle_cipher_resp()
1208 bytes = adjust_ctr_overflow(reqctx->iv, bytes); in chcr_handle_cipher_resp()
1209 wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx]; in chcr_handle_cipher_resp()
1219 set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx); in chcr_handle_cipher_resp()
1221 reqctx->last_req_len = bytes; in chcr_handle_cipher_resp()
1222 reqctx->processed += bytes; in chcr_handle_cipher_resp()
1247 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in process_cipher() local
1256 reqctx->processed = 0; in process_cipher()
1257 reqctx->partial_req = 0; in process_cipher()
1290 reqctx->imm = (transhdr_len + IV + req->cryptlen) <= in process_cipher()
1295 reqctx->imm = 0; in process_cipher()
1298 if (!reqctx->imm) { in process_cipher()
1302 if ((bytes + reqctx->processed) >= req->cryptlen) in process_cipher()
1303 bytes = req->cryptlen - reqctx->processed; in process_cipher()
1313 memcpy(reqctx->iv, ablkctx->nonce, CTR_RFC3686_NONCE_SIZE); in process_cipher()
1314 memcpy(reqctx->iv + CTR_RFC3686_NONCE_SIZE, req->iv, in process_cipher()
1318 *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE + in process_cipher()
1320 memcpy(reqctx->init_iv, reqctx->iv, IV); in process_cipher()
1324 memcpy(reqctx->iv, req->iv, IV); in process_cipher()
1325 memcpy(reqctx->init_iv, req->iv, IV); in process_cipher()
1334 reqctx->iv : req->iv, in process_cipher()
1338 reqctx->op = op_type; in process_cipher()
1339 reqctx->srcsg = req->src; in process_cipher()
1340 reqctx->dstsg = req->dst; in process_cipher()
1341 reqctx->src_ofst = 0; in process_cipher()
1342 reqctx->dst_ofst = 0; in process_cipher()
1351 reqctx->processed = bytes; in process_cipher()
1352 reqctx->last_req_len = bytes; in process_cipher()
1353 reqctx->partial_req = !!(req->cryptlen - reqctx->processed); in process_cipher()
1365 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_aes_encrypt() local
1374 reqctx->txqidx = cpu % ctx->ntxq; in chcr_aes_encrypt()
1375 reqctx->rxqidx = cpu % ctx->nrxq; in chcr_aes_encrypt()
1382 reqctx->txqidx) && in chcr_aes_encrypt()
1388 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_encrypt()
1393 set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx); in chcr_aes_encrypt()
1398 reqctx->partial_req = 1; in chcr_aes_encrypt()
1410 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_aes_decrypt() local
1419 reqctx->txqidx = cpu % ctx->ntxq; in chcr_aes_decrypt()
1420 reqctx->rxqidx = cpu % ctx->nrxq; in chcr_aes_decrypt()
1428 reqctx->txqidx) && in chcr_aes_decrypt()
1431 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_decrypt()
1436 set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx); in chcr_aes_decrypt()
2019 struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req); in chcr_ahash_continue() local
2020 struct chcr_hctx_per_wr *hctx_wr = &reqctx->hctx_wr; in chcr_ahash_continue()
2031 reqctx->txqidx = cpu % ctx->ntxq; in chcr_ahash_continue()
2032 reqctx->rxqidx = cpu % ctx->nrxq; in chcr_ahash_continue()
2064 params.scmd1 = reqctx->data_len + params.sg_len; in chcr_ahash_continue()
2067 reqctx->data_len += params.sg_len; in chcr_ahash_continue()
2075 set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx); in chcr_ahash_continue()
2086 struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req); in chcr_handle_ahash_resp() local
2087 struct chcr_hctx_per_wr *hctx_wr = &reqctx->hctx_wr; in chcr_handle_ahash_resp()
2107 if (hctx_wr->isfinal || ((hctx_wr->processed + reqctx->reqlen) == in chcr_handle_ahash_resp()
2114 memcpy(reqctx->partial_hash, in chcr_handle_ahash_resp()
2121 memcpy(reqctx->partial_hash, input + sizeof(struct cpl_fw6_pld), in chcr_handle_ahash_resp()
2365 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_aead_common_exit() local
2369 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op); in chcr_aead_common_exit()
2376 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_aead_common_init() local
2383 if (reqctx->op && req->cryptlen < authsize) in chcr_aead_common_init()
2385 if (reqctx->b0_len) in chcr_aead_common_init()
2386 reqctx->scratch_pad = reqctx->iv + IV; in chcr_aead_common_init()
2388 reqctx->scratch_pad = NULL; in chcr_aead_common_init()
2391 reqctx->op); in chcr_aead_common_init()
2441 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in create_authenc_wr() local
2456 unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan; in create_authenc_wr()
2462 reqctx->b0_len = 0; in create_authenc_wr()
2472 (reqctx->op ? -authsize : authsize), CHCR_DST_SG_SIZE, 0); in create_authenc_wr()
2480 reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) < in create_authenc_wr()
2482 temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) in create_authenc_wr()
2488 transhdr_len, reqctx->op)) { in create_authenc_wr()
2491 return ERR_PTR(chcr_aead_fallback(req, reqctx->op)); in create_authenc_wr()
2501 temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize; in create_authenc_wr()
2525 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, in create_authenc_wr()
2526 (reqctx->op == CHCR_ENCRYPT_OP) ? 1 : 0, in create_authenc_wr()
2534 if (reqctx->op == CHCR_ENCRYPT_OP || in create_authenc_wr()
2562 kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); in create_authenc_wr()
2563 create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size, in create_authenc_wr()
2565 reqctx->skb = skb; in create_authenc_wr()
2579 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_aead_dma_map() local
2599 reqctx->iv_dma = dma_map_single(dev, reqctx->iv, (IV + reqctx->b0_len), in chcr_aead_dma_map()
2601 if (dma_mapping_error(dev, reqctx->iv_dma)) in chcr_aead_dma_map()
2603 if (reqctx->b0_len) in chcr_aead_dma_map()
2604 reqctx->b0_dma = reqctx->iv_dma + IV; in chcr_aead_dma_map()
2606 reqctx->b0_dma = 0; in chcr_aead_dma_map()
2632 dma_unmap_single(dev, reqctx->iv_dma, IV, DMA_BIDIRECTIONAL); in chcr_aead_dma_map()
2640 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_aead_dma_unmap() local
2661 dma_unmap_single(dev, reqctx->iv_dma, (IV + reqctx->b0_len), in chcr_aead_dma_unmap()
2681 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_add_aead_src_ent() local
2683 if (reqctx->imm) { in chcr_add_aead_src_ent()
2686 if (reqctx->b0_len) { in chcr_add_aead_src_ent()
2687 memcpy(buf, reqctx->scratch_pad, reqctx->b0_len); in chcr_add_aead_src_ent()
2688 buf += reqctx->b0_len; in chcr_add_aead_src_ent()
2694 if (reqctx->b0_len) in chcr_add_aead_src_ent()
2695 ulptx_walk_add_page(&ulp_walk, reqctx->b0_len, in chcr_add_aead_src_ent()
2696 reqctx->b0_dma); in chcr_add_aead_src_ent()
2707 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_add_aead_dst_ent() local
2714 unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan; in chcr_add_aead_dst_ent()
2718 dsgl_walk_add_page(&dsgl_walk, IV + reqctx->b0_len, reqctx->iv_dma); in chcr_add_aead_dst_ent()
2720 (reqctx->op ? -authsize : authsize); in chcr_add_aead_dst_ent()
2730 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_add_cipher_src_ent() local
2733 memcpy(buf, reqctx->iv, IV); in chcr_add_cipher_src_ent()
2735 if (reqctx->imm) { in chcr_add_cipher_src_ent()
2737 buf, wrparam->bytes, reqctx->processed); in chcr_add_cipher_src_ent()
2740 ulptx_walk_add_sg(&ulp_walk, reqctx->srcsg, wrparam->bytes, in chcr_add_cipher_src_ent()
2741 reqctx->src_ofst); in chcr_add_cipher_src_ent()
2742 reqctx->srcsg = ulp_walk.last_sg; in chcr_add_cipher_src_ent()
2743 reqctx->src_ofst = ulp_walk.last_sg_len; in chcr_add_cipher_src_ent()
2753 struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); in chcr_add_cipher_dst_ent() local
2758 unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan; in chcr_add_cipher_dst_ent()
2762 dsgl_walk_add_sg(&dsgl_walk, reqctx->dstsg, wrparam->bytes, in chcr_add_cipher_dst_ent()
2763 reqctx->dst_ofst); in chcr_add_cipher_dst_ent()
2764 reqctx->dstsg = dsgl_walk.last_sg; in chcr_add_cipher_dst_ent()
2765 reqctx->dst_ofst = dsgl_walk.last_sg_len; in chcr_add_cipher_dst_ent()
2774 struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req); in chcr_add_hash_src_ent() local
2776 if (reqctx->hctx_wr.imm) { in chcr_add_hash_src_ent()
2780 memcpy(buf, reqctx->reqbfr, param->bfr_len); in chcr_add_hash_src_ent()
2784 sg_pcopy_to_buffer(reqctx->hctx_wr.srcsg, in chcr_add_hash_src_ent()
2785 sg_nents(reqctx->hctx_wr.srcsg), buf, in chcr_add_hash_src_ent()
2791 reqctx->hctx_wr.dma_addr); in chcr_add_hash_src_ent()
2792 ulptx_walk_add_sg(&ulp_walk, reqctx->hctx_wr.srcsg, in chcr_add_hash_src_ent()
2793 param->sg_len, reqctx->hctx_wr.src_ofst); in chcr_add_hash_src_ent()
2794 reqctx->hctx_wr.srcsg = ulp_walk.last_sg; in chcr_add_hash_src_ent()
2795 reqctx->hctx_wr.src_ofst = ulp_walk.last_sg_len; in chcr_add_hash_src_ent()
2897 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in generate_b0() local
2898 u8 *b0 = reqctx->scratch_pad; in generate_b0()
2935 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in ccm_format_packet() local
2949 put_unaligned_be16(assoclen, &reqctx->scratch_pad[16]); in ccm_format_packet()
2966 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in fill_sec_cpl_for_aead() local
2969 unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan; in fill_sec_cpl_for_aead()
3039 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in create_aead_ccm_wr() local
3057 reqctx->b0_len = CCM_B0_SIZE + (assoclen ? CCM_AAD_FIELD_SIZE : 0); in create_aead_ccm_wr()
3062 error = aead_ccm_validate_input(reqctx->op, req, aeadctx, sub_type); in create_aead_ccm_wr()
3066 + (reqctx->op ? -authsize : authsize), in create_aead_ccm_wr()
3075 reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen + in create_aead_ccm_wr()
3076 reqctx->b0_len) <= SGE_MAX_WR_LEN; in create_aead_ccm_wr()
3077 temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen + in create_aead_ccm_wr()
3078 reqctx->b0_len, 16) : in create_aead_ccm_wr()
3084 reqctx->b0_len, transhdr_len, reqctx->op)) { in create_aead_ccm_wr()
3087 return ERR_PTR(chcr_aead_fallback(req, reqctx->op)); in create_aead_ccm_wr()
3098 fill_sec_cpl_for_aead(&chcr_req->sec_cpl, dst_size, req, reqctx->op); in create_aead_ccm_wr()
3108 error = ccm_format_packet(req, ivptr, sub_type, reqctx->op, assoclen); in create_aead_ccm_wr()
3116 kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen + in create_aead_ccm_wr()
3117 reqctx->b0_len) : 0); in create_aead_ccm_wr()
3118 create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, 0, in create_aead_ccm_wr()
3120 reqctx->skb = skb; in create_aead_ccm_wr()
3138 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in create_gcm_wr() local
3151 unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan; in create_gcm_wr()
3157 reqctx->b0_len = 0; in create_gcm_wr()
3162 (reqctx->op ? -authsize : authsize), in create_gcm_wr()
3170 reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) <= in create_gcm_wr()
3172 temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) : in create_gcm_wr()
3177 transhdr_len, reqctx->op)) { in create_gcm_wr()
3181 return ERR_PTR(chcr_aead_fallback(req, reqctx->op)); in create_gcm_wr()
3192 temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize; in create_gcm_wr()
3205 FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, (reqctx->op == in create_gcm_wr()
3235 kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); in create_gcm_wr()
3236 create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size, in create_gcm_wr()
3237 transhdr_len, temp, reqctx->verify); in create_gcm_wr()
3238 reqctx->skb = skb; in create_gcm_wr()
3738 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_aead_op() local
3754 return chcr_aead_fallback(req, reqctx->op); in chcr_aead_op()
3758 reqctx->txqidx) && in chcr_aead_op()
3772 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size); in chcr_aead_op()
3780 set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx); in chcr_aead_op()
3788 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_aead_encrypt() local
3793 reqctx->txqidx = cpu % ctx->ntxq; in chcr_aead_encrypt()
3794 reqctx->rxqidx = cpu % ctx->nrxq; in chcr_aead_encrypt()
3797 reqctx->verify = VERIFY_HW; in chcr_aead_encrypt()
3798 reqctx->op = CHCR_ENCRYPT_OP; in chcr_aead_encrypt()
3819 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req); in chcr_aead_decrypt() local
3824 reqctx->txqidx = cpu % ctx->ntxq; in chcr_aead_decrypt()
3825 reqctx->rxqidx = cpu % ctx->nrxq; in chcr_aead_decrypt()
3830 reqctx->verify = VERIFY_SW; in chcr_aead_decrypt()
3833 reqctx->verify = VERIFY_HW; in chcr_aead_decrypt()
3835 reqctx->op = CHCR_DECRYPT_OP; in chcr_aead_decrypt()