Lines Matching refs:rctx
78 struct aspeed_sham_reqctx *rctx) in aspeed_ahash_fill_padding() argument
83 AHASH_DBG(hace_dev, "rctx flags:0x%x\n", (u32)rctx->flags); in aspeed_ahash_fill_padding()
85 switch (rctx->flags & SHA_FLAGS_MASK) { in aspeed_ahash_fill_padding()
89 bits[0] = cpu_to_be64(rctx->digcnt[0] << 3); in aspeed_ahash_fill_padding()
90 index = rctx->bufcnt & 0x3f; in aspeed_ahash_fill_padding()
92 *(rctx->buffer + rctx->bufcnt) = 0x80; in aspeed_ahash_fill_padding()
93 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1); in aspeed_ahash_fill_padding()
94 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 8); in aspeed_ahash_fill_padding()
95 rctx->bufcnt += padlen + 8; in aspeed_ahash_fill_padding()
98 bits[1] = cpu_to_be64(rctx->digcnt[0] << 3); in aspeed_ahash_fill_padding()
99 bits[0] = cpu_to_be64(rctx->digcnt[1] << 3 | in aspeed_ahash_fill_padding()
100 rctx->digcnt[0] >> 61); in aspeed_ahash_fill_padding()
101 index = rctx->bufcnt & 0x7f; in aspeed_ahash_fill_padding()
103 *(rctx->buffer + rctx->bufcnt) = 0x80; in aspeed_ahash_fill_padding()
104 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1); in aspeed_ahash_fill_padding()
105 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 16); in aspeed_ahash_fill_padding()
106 rctx->bufcnt += padlen + 16; in aspeed_ahash_fill_padding()
119 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_dma_prepare() local
122 length = rctx->total + rctx->bufcnt; in aspeed_ahash_dma_prepare()
123 remain = length % rctx->block_size; in aspeed_ahash_dma_prepare()
127 if (rctx->bufcnt) in aspeed_ahash_dma_prepare()
128 memcpy(hash_engine->ahash_src_addr, rctx->buffer, rctx->bufcnt); in aspeed_ahash_dma_prepare()
130 if (rctx->total + rctx->bufcnt < ASPEED_CRYPTO_SRC_DMA_BUF_LEN) { in aspeed_ahash_dma_prepare()
132 rctx->bufcnt, rctx->src_sg, in aspeed_ahash_dma_prepare()
133 rctx->offset, rctx->total - remain, 0); in aspeed_ahash_dma_prepare()
134 rctx->offset += rctx->total - remain; in aspeed_ahash_dma_prepare()
141 scatterwalk_map_and_copy(rctx->buffer, rctx->src_sg, in aspeed_ahash_dma_prepare()
142 rctx->offset, remain, 0); in aspeed_ahash_dma_prepare()
144 rctx->bufcnt = remain; in aspeed_ahash_dma_prepare()
145 rctx->digest_dma_addr = dma_map_single(hace_dev->dev, rctx->digest, in aspeed_ahash_dma_prepare()
148 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) { in aspeed_ahash_dma_prepare()
155 hash_engine->digest_dma = rctx->digest_dma_addr; in aspeed_ahash_dma_prepare()
168 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_dma_prepare_sg() local
174 remain = (rctx->total + rctx->bufcnt) % rctx->block_size; in aspeed_ahash_dma_prepare_sg()
175 length = rctx->total + rctx->bufcnt - remain; in aspeed_ahash_dma_prepare_sg()
178 "rctx total", rctx->total, "bufcnt", rctx->bufcnt, in aspeed_ahash_dma_prepare_sg()
181 sg_len = dma_map_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_dma_prepare_sg()
190 rctx->digest_dma_addr = dma_map_single(hace_dev->dev, rctx->digest, in aspeed_ahash_dma_prepare_sg()
193 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) { in aspeed_ahash_dma_prepare_sg()
199 if (rctx->bufcnt != 0) { in aspeed_ahash_dma_prepare_sg()
203 rctx->buffer_dma_addr = dma_map_single(hace_dev->dev, in aspeed_ahash_dma_prepare_sg()
204 rctx->buffer, in aspeed_ahash_dma_prepare_sg()
205 rctx->block_size * 2, in aspeed_ahash_dma_prepare_sg()
207 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) { in aspeed_ahash_dma_prepare_sg()
213 phy_addr = rctx->buffer_dma_addr; in aspeed_ahash_dma_prepare_sg()
214 len = rctx->bufcnt; in aspeed_ahash_dma_prepare_sg()
227 for_each_sg(rctx->src_sg, s, sg_len, i) { in aspeed_ahash_dma_prepare_sg()
250 rctx->offset = rctx->total - remain; in aspeed_ahash_dma_prepare_sg()
251 hash_engine->src_length = rctx->total + rctx->bufcnt - remain; in aspeed_ahash_dma_prepare_sg()
253 hash_engine->digest_dma = rctx->digest_dma_addr; in aspeed_ahash_dma_prepare_sg()
258 if (rctx->bufcnt != 0) in aspeed_ahash_dma_prepare_sg()
259 dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr, in aspeed_ahash_dma_prepare_sg()
260 rctx->block_size * 2, DMA_TO_DEVICE); in aspeed_ahash_dma_prepare_sg()
262 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr, in aspeed_ahash_dma_prepare_sg()
265 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_dma_prepare_sg()
293 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_transfer() local
297 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr, in aspeed_ahash_transfer()
300 dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr, in aspeed_ahash_transfer()
301 rctx->block_size * 2, DMA_TO_DEVICE); in aspeed_ahash_transfer()
303 memcpy(req->result, rctx->digest, rctx->digsize); in aspeed_ahash_transfer()
316 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_hace_ahash_trigger() local
322 rctx->cmd |= HASH_CMD_INT_ENABLE; in aspeed_hace_ahash_trigger()
336 ast_hace_write(hace_dev, rctx->cmd, ASPEED_HACE_HASH_CMD); in aspeed_hace_ahash_trigger()
350 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_hmac_resume() local
358 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr, in aspeed_ahash_hmac_resume()
361 dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr, in aspeed_ahash_hmac_resume()
362 rctx->block_size * 2, DMA_TO_DEVICE); in aspeed_ahash_hmac_resume()
365 memcpy(rctx->buffer, bctx->opad, rctx->block_size); in aspeed_ahash_hmac_resume()
366 memcpy(rctx->buffer + rctx->block_size, rctx->digest, rctx->digsize); in aspeed_ahash_hmac_resume()
368 rctx->bufcnt = rctx->block_size + rctx->digsize; in aspeed_ahash_hmac_resume()
369 rctx->digcnt[0] = rctx->block_size + rctx->digsize; in aspeed_ahash_hmac_resume()
371 aspeed_ahash_fill_padding(hace_dev, rctx); in aspeed_ahash_hmac_resume()
372 memcpy(rctx->digest, rctx->sha_iv, rctx->ivsize); in aspeed_ahash_hmac_resume()
374 rctx->digest_dma_addr = dma_map_single(hace_dev->dev, rctx->digest, in aspeed_ahash_hmac_resume()
377 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) { in aspeed_ahash_hmac_resume()
383 rctx->buffer_dma_addr = dma_map_single(hace_dev->dev, rctx->buffer, in aspeed_ahash_hmac_resume()
384 rctx->block_size * 2, in aspeed_ahash_hmac_resume()
386 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) { in aspeed_ahash_hmac_resume()
392 hash_engine->src_dma = rctx->buffer_dma_addr; in aspeed_ahash_hmac_resume()
393 hash_engine->src_length = rctx->bufcnt; in aspeed_ahash_hmac_resume()
394 hash_engine->digest_dma = rctx->digest_dma_addr; in aspeed_ahash_hmac_resume()
399 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr, in aspeed_ahash_hmac_resume()
409 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_req_final() local
414 aspeed_ahash_fill_padding(hace_dev, rctx); in aspeed_ahash_req_final()
416 rctx->digest_dma_addr = dma_map_single(hace_dev->dev, in aspeed_ahash_req_final()
417 rctx->digest, in aspeed_ahash_req_final()
420 if (dma_mapping_error(hace_dev->dev, rctx->digest_dma_addr)) { in aspeed_ahash_req_final()
426 rctx->buffer_dma_addr = dma_map_single(hace_dev->dev, in aspeed_ahash_req_final()
427 rctx->buffer, in aspeed_ahash_req_final()
428 rctx->block_size * 2, in aspeed_ahash_req_final()
430 if (dma_mapping_error(hace_dev->dev, rctx->buffer_dma_addr)) { in aspeed_ahash_req_final()
436 hash_engine->src_dma = rctx->buffer_dma_addr; in aspeed_ahash_req_final()
437 hash_engine->src_length = rctx->bufcnt; in aspeed_ahash_req_final()
438 hash_engine->digest_dma = rctx->digest_dma_addr; in aspeed_ahash_req_final()
440 if (rctx->flags & SHA_FLAGS_HMAC) in aspeed_ahash_req_final()
447 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr, in aspeed_ahash_req_final()
457 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_update_resume_sg() local
461 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_update_resume_sg()
464 if (rctx->bufcnt != 0) in aspeed_ahash_update_resume_sg()
465 dma_unmap_single(hace_dev->dev, rctx->buffer_dma_addr, in aspeed_ahash_update_resume_sg()
466 rctx->block_size * 2, in aspeed_ahash_update_resume_sg()
469 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr, in aspeed_ahash_update_resume_sg()
472 scatterwalk_map_and_copy(rctx->buffer, rctx->src_sg, rctx->offset, in aspeed_ahash_update_resume_sg()
473 rctx->total - rctx->offset, 0); in aspeed_ahash_update_resume_sg()
475 rctx->bufcnt = rctx->total - rctx->offset; in aspeed_ahash_update_resume_sg()
476 rctx->cmd &= ~HASH_CMD_HASH_SRC_SG_CTRL; in aspeed_ahash_update_resume_sg()
478 if (rctx->flags & SHA_FLAGS_FINUP) in aspeed_ahash_update_resume_sg()
488 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_update_resume() local
492 dma_unmap_single(hace_dev->dev, rctx->digest_dma_addr, in aspeed_ahash_update_resume()
495 if (rctx->flags & SHA_FLAGS_FINUP) in aspeed_ahash_update_resume()
505 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_req_update() local
512 rctx->cmd |= HASH_CMD_HASH_SRC_SG_CTRL; in aspeed_ahash_req_update()
536 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_ahash_do_request() local
546 if (rctx->op == SHA_OP_UPDATE) in aspeed_ahash_do_request()
548 else if (rctx->op == SHA_OP_FINAL) in aspeed_ahash_do_request()
583 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_sham_update() local
590 rctx->total = req->nbytes; in aspeed_sham_update()
591 rctx->src_sg = req->src; in aspeed_sham_update()
592 rctx->offset = 0; in aspeed_sham_update()
593 rctx->src_nents = sg_nents(req->src); in aspeed_sham_update()
594 rctx->op = SHA_OP_UPDATE; in aspeed_sham_update()
596 rctx->digcnt[0] += rctx->total; in aspeed_sham_update()
597 if (rctx->digcnt[0] < rctx->total) in aspeed_sham_update()
598 rctx->digcnt[1]++; in aspeed_sham_update()
600 if (rctx->bufcnt + rctx->total < rctx->block_size) { in aspeed_sham_update()
601 scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt, in aspeed_sham_update()
602 rctx->src_sg, rctx->offset, in aspeed_sham_update()
603 rctx->total, 0); in aspeed_sham_update()
604 rctx->bufcnt += rctx->total; in aspeed_sham_update()
624 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_sham_final() local
630 req->nbytes, rctx->total); in aspeed_sham_final()
631 rctx->op = SHA_OP_FINAL; in aspeed_sham_final()
638 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_sham_finup() local
646 rctx->flags |= SHA_FLAGS_FINUP; in aspeed_sham_finup()
663 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_sham_init() local
673 rctx->cmd = HASH_CMD_ACC_MODE; in aspeed_sham_init()
674 rctx->flags = 0; in aspeed_sham_init()
678 rctx->cmd |= HASH_CMD_SHA1 | HASH_CMD_SHA_SWAP; in aspeed_sham_init()
679 rctx->flags |= SHA_FLAGS_SHA1; in aspeed_sham_init()
680 rctx->digsize = SHA1_DIGEST_SIZE; in aspeed_sham_init()
681 rctx->block_size = SHA1_BLOCK_SIZE; in aspeed_sham_init()
682 rctx->sha_iv = sha1_iv; in aspeed_sham_init()
683 rctx->ivsize = 32; in aspeed_sham_init()
684 memcpy(rctx->digest, sha1_iv, rctx->ivsize); in aspeed_sham_init()
687 rctx->cmd |= HASH_CMD_SHA224 | HASH_CMD_SHA_SWAP; in aspeed_sham_init()
688 rctx->flags |= SHA_FLAGS_SHA224; in aspeed_sham_init()
689 rctx->digsize = SHA224_DIGEST_SIZE; in aspeed_sham_init()
690 rctx->block_size = SHA224_BLOCK_SIZE; in aspeed_sham_init()
691 rctx->sha_iv = sha224_iv; in aspeed_sham_init()
692 rctx->ivsize = 32; in aspeed_sham_init()
693 memcpy(rctx->digest, sha224_iv, rctx->ivsize); in aspeed_sham_init()
696 rctx->cmd |= HASH_CMD_SHA256 | HASH_CMD_SHA_SWAP; in aspeed_sham_init()
697 rctx->flags |= SHA_FLAGS_SHA256; in aspeed_sham_init()
698 rctx->digsize = SHA256_DIGEST_SIZE; in aspeed_sham_init()
699 rctx->block_size = SHA256_BLOCK_SIZE; in aspeed_sham_init()
700 rctx->sha_iv = sha256_iv; in aspeed_sham_init()
701 rctx->ivsize = 32; in aspeed_sham_init()
702 memcpy(rctx->digest, sha256_iv, rctx->ivsize); in aspeed_sham_init()
705 rctx->cmd |= HASH_CMD_SHA512_SER | HASH_CMD_SHA384 | in aspeed_sham_init()
707 rctx->flags |= SHA_FLAGS_SHA384; in aspeed_sham_init()
708 rctx->digsize = SHA384_DIGEST_SIZE; in aspeed_sham_init()
709 rctx->block_size = SHA384_BLOCK_SIZE; in aspeed_sham_init()
710 rctx->sha_iv = (const __be32 *)sha384_iv; in aspeed_sham_init()
711 rctx->ivsize = 64; in aspeed_sham_init()
712 memcpy(rctx->digest, sha384_iv, rctx->ivsize); in aspeed_sham_init()
715 rctx->cmd |= HASH_CMD_SHA512_SER | HASH_CMD_SHA512 | in aspeed_sham_init()
717 rctx->flags |= SHA_FLAGS_SHA512; in aspeed_sham_init()
718 rctx->digsize = SHA512_DIGEST_SIZE; in aspeed_sham_init()
719 rctx->block_size = SHA512_BLOCK_SIZE; in aspeed_sham_init()
720 rctx->sha_iv = (const __be32 *)sha512_iv; in aspeed_sham_init()
721 rctx->ivsize = 64; in aspeed_sham_init()
722 memcpy(rctx->digest, sha512_iv, rctx->ivsize); in aspeed_sham_init()
730 rctx->bufcnt = 0; in aspeed_sham_init()
731 rctx->total = 0; in aspeed_sham_init()
732 rctx->digcnt[0] = 0; in aspeed_sham_init()
733 rctx->digcnt[1] = 0; in aspeed_sham_init()
737 rctx->digcnt[0] = rctx->block_size; in aspeed_sham_init()
738 rctx->bufcnt = rctx->block_size; in aspeed_sham_init()
739 memcpy(rctx->buffer, bctx->ipad, rctx->block_size); in aspeed_sham_init()
740 rctx->flags |= SHA_FLAGS_HMAC; in aspeed_sham_init()
835 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_sham_export() local
837 memcpy(out, rctx, sizeof(*rctx)); in aspeed_sham_export()
844 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); in aspeed_sham_import() local
846 memcpy(rctx, in, sizeof(*rctx)); in aspeed_sham_import()