Lines Matching refs:crypt

349 static void finish_scattered_hmac(struct crypt_ctl *crypt)  in finish_scattered_hmac()  argument
351 struct aead_request *req = crypt->data.aead_req; in finish_scattered_hmac()
361 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes); in finish_scattered_hmac()
367 struct crypt_ctl *crypt; in one_packet() local
373 crypt = crypt_phys2virt(phys); in one_packet()
375 switch (crypt->ctl_flags & CTL_FLAG_MASK) { in one_packet()
377 struct aead_request *req = crypt->data.aead_req; in one_packet()
380 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet()
381 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet()
383 finish_scattered_hmac(crypt); in one_packet()
389 struct skcipher_request *req = crypt->data.ablk_req; in one_packet()
407 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet()
409 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet()
414 ctx = crypto_tfm_ctx(crypt->data.tfm); in one_packet()
415 dma_pool_free(ctx_pool, crypt->regist_ptr, in one_packet()
416 crypt->regist_buf->phys_addr); in one_packet()
417 dma_pool_free(buffer_pool, crypt->regist_buf, crypt->src_buf); in one_packet()
422 ctx = crypto_tfm_ctx(crypt->data.tfm); in one_packet()
430 crypt->ctl_flags = CTL_FLAG_UNUSED; in one_packet()
683 struct crypt_ctl *crypt; in register_chain_var() local
698 crypt = get_crypt_desc_emerg(); in register_chain_var()
699 if (!crypt) { in register_chain_var()
710 crypt->data.tfm = tfm; in register_chain_var()
711 crypt->regist_ptr = pad; in register_chain_var()
712 crypt->regist_buf = buf; in register_chain_var()
714 crypt->auth_offs = 0; in register_chain_var()
715 crypt->auth_len = HMAC_PAD_BLOCKLEN; in register_chain_var()
716 crypt->crypto_ctx = ctx_addr; in register_chain_var()
717 crypt->src_buf = buf_phys; in register_chain_var()
718 crypt->icv_rev_aes = target; in register_chain_var()
719 crypt->mode = NPE_OP_HASH_GEN_ICV; in register_chain_var()
720 crypt->init_len = init_len; in register_chain_var()
721 crypt->ctl_flags |= CTL_FLAG_GEN_ICV; in register_chain_var()
729 qmgr_put_entry(send_qid, crypt_virt2phys(crypt)); in register_chain_var()
783 struct crypt_ctl *crypt; in gen_rev_aes_key() local
787 crypt = get_crypt_desc_emerg(); in gen_rev_aes_key()
788 if (!crypt) in gen_rev_aes_key()
793 crypt->data.tfm = tfm; in gen_rev_aes_key()
794 crypt->crypt_offs = 0; in gen_rev_aes_key()
795 crypt->crypt_len = AES_BLOCK128; in gen_rev_aes_key()
796 crypt->src_buf = 0; in gen_rev_aes_key()
797 crypt->crypto_ctx = dir->npe_ctx_phys; in gen_rev_aes_key()
798 crypt->icv_rev_aes = dir->npe_ctx_phys + sizeof(u32); in gen_rev_aes_key()
799 crypt->mode = NPE_OP_ENC_GEN_KEY; in gen_rev_aes_key()
800 crypt->init_len = dir->npe_ctx_idx; in gen_rev_aes_key()
801 crypt->ctl_flags |= CTL_FLAG_GEN_REVAES; in gen_rev_aes_key()
804 qmgr_put_entry(send_qid, crypt_virt2phys(crypt)); in gen_rev_aes_key()
977 struct crypt_ctl *crypt; in ablk_perform() local
998 crypt = get_crypt_desc(); in ablk_perform()
999 if (!crypt) in ablk_perform()
1002 crypt->data.ablk_req = req; in ablk_perform()
1003 crypt->crypto_ctx = dir->npe_ctx_phys; in ablk_perform()
1004 crypt->mode = dir->npe_mode; in ablk_perform()
1005 crypt->init_len = dir->npe_ctx_idx; in ablk_perform()
1007 crypt->crypt_offs = 0; in ablk_perform()
1008 crypt->crypt_len = nbytes; in ablk_perform()
1011 memcpy(crypt->iv, req->iv, ivsize); in ablk_perform()
1019 crypt->mode |= NPE_OP_NOT_IN_PLACE; in ablk_perform()
1028 crypt->dst_buf = dst_hook.phys_next; in ablk_perform()
1038 crypt->src_buf = src_hook.phys_next; in ablk_perform()
1039 crypt->ctl_flags |= CTL_FLAG_PERFORM_ABLK; in ablk_perform()
1040 qmgr_put_entry(send_qid, crypt_virt2phys(crypt)); in ablk_perform()
1045 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in ablk_perform()
1048 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in ablk_perform()
1050 crypt->ctl_flags = CTL_FLAG_UNUSED; in ablk_perform()
1094 struct crypt_ctl *crypt; in aead_perform() local
1118 crypt = get_crypt_desc(); in aead_perform()
1119 if (!crypt) in aead_perform()
1122 crypt->data.aead_req = req; in aead_perform()
1123 crypt->crypto_ctx = dir->npe_ctx_phys; in aead_perform()
1124 crypt->mode = dir->npe_mode; in aead_perform()
1125 crypt->init_len = dir->npe_ctx_idx; in aead_perform()
1127 crypt->crypt_offs = cryptoffset; in aead_perform()
1128 crypt->crypt_len = eff_cryptlen; in aead_perform()
1130 crypt->auth_offs = 0; in aead_perform()
1131 crypt->auth_len = req->assoclen + cryptlen; in aead_perform()
1133 memcpy(crypt->iv, req->iv, ivsize); in aead_perform()
1135 buf = chainup_buffers(dev, req->src, crypt->auth_len, in aead_perform()
1138 crypt->src_buf = src_hook.phys_next; in aead_perform()
1144 crypt->icv_rev_aes = buf->phys_addr + in aead_perform()
1152 crypt->mode |= NPE_OP_NOT_IN_PLACE; in aead_perform()
1155 buf = chainup_buffers(dev, req->dst, crypt->auth_len, in aead_perform()
1158 crypt->dst_buf = dst_hook.phys_next; in aead_perform()
1166 crypt->icv_rev_aes = buf->phys_addr + in aead_perform()
1175 &crypt->icv_rev_aes); in aead_perform()
1187 crypt->ctl_flags |= CTL_FLAG_PERFORM_AEAD; in aead_perform()
1188 qmgr_put_entry(send_qid, crypt_virt2phys(crypt)); in aead_perform()
1193 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in aead_perform()
1195 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in aead_perform()
1196 crypt->ctl_flags = CTL_FLAG_UNUSED; in aead_perform()