Lines Matching refs:dd
104 struct atmel_aes_dev *dd; member
347 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset) in atmel_aes_read() argument
349 u32 value = readl_relaxed(dd->io_base + offset); in atmel_aes_read()
352 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_read()
355 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value, in atmel_aes_read()
363 static inline void atmel_aes_write(struct atmel_aes_dev *dd, in atmel_aes_write() argument
367 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_write()
370 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value, in atmel_aes_write()
375 writel_relaxed(value, dd->io_base + offset); in atmel_aes_write()
378 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_n() argument
382 *value = atmel_aes_read(dd, offset); in atmel_aes_read_n()
385 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_n() argument
389 atmel_aes_write(dd, offset, *value); in atmel_aes_write_n()
392 static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_block() argument
395 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block()
398 static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_block() argument
401 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block()
404 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev *dd, in atmel_aes_wait_for_data_ready() argument
407 u32 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_wait_for_data_ready()
410 return resume(dd); in atmel_aes_wait_for_data_ready()
412 dd->resume = resume; in atmel_aes_wait_for_data_ready()
413 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_wait_for_data_ready()
435 static int atmel_aes_hw_init(struct atmel_aes_dev *dd) in atmel_aes_hw_init() argument
439 err = clk_enable(dd->iclk); in atmel_aes_hw_init()
443 atmel_aes_write(dd, AES_CR, AES_CR_SWRST); in atmel_aes_hw_init()
444 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET); in atmel_aes_hw_init()
449 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd) in atmel_aes_get_version() argument
451 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff; in atmel_aes_get_version()
454 static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd) in atmel_aes_hw_version_init() argument
458 err = atmel_aes_hw_init(dd); in atmel_aes_hw_version_init()
462 dd->hw_version = atmel_aes_get_version(dd); in atmel_aes_hw_version_init()
464 dev_info(dd->dev, "version: 0x%x\n", dd->hw_version); in atmel_aes_hw_version_init()
466 clk_disable(dd->iclk); in atmel_aes_hw_version_init()
470 static inline void atmel_aes_set_mode(struct atmel_aes_dev *dd, in atmel_aes_set_mode() argument
474 dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode; in atmel_aes_set_mode()
477 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd) in atmel_aes_is_encrypt() argument
479 return (dd->flags & AES_FLAGS_ENCRYPT); in atmel_aes_is_encrypt()
483 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
486 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd) in atmel_aes_set_iv_as_last_ciphertext_block() argument
488 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_set_iv_as_last_ciphertext_block()
515 static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev *dd) in atmel_aes_ctr_update_req_iv() argument
517 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_update_req_iv()
518 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_update_req_iv()
535 static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_complete() argument
537 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_complete()
541 if (dd->ctx->is_aead) in atmel_aes_complete()
542 atmel_aes_authenc_complete(dd, err); in atmel_aes_complete()
545 clk_disable(dd->iclk); in atmel_aes_complete()
546 dd->flags &= ~AES_FLAGS_BUSY; in atmel_aes_complete()
548 if (!err && !dd->ctx->is_aead && in atmel_aes_complete()
551 atmel_aes_set_iv_as_last_ciphertext_block(dd); in atmel_aes_complete()
553 atmel_aes_ctr_update_req_iv(dd); in atmel_aes_complete()
556 if (dd->is_async) in atmel_aes_complete()
557 dd->areq->complete(dd->areq, err); in atmel_aes_complete()
559 tasklet_schedule(&dd->queue_task); in atmel_aes_complete()
564 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl_key() argument
577 valmr |= dd->flags & AES_FLAGS_MODE_MASK; in atmel_aes_write_ctrl_key()
581 if (dd->caps.has_dualbuff) in atmel_aes_write_ctrl_key()
587 atmel_aes_write(dd, AES_MR, valmr); in atmel_aes_write_ctrl_key()
589 atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen)); in atmel_aes_write_ctrl_key()
592 atmel_aes_write_block(dd, AES_IVR(0), iv); in atmel_aes_write_ctrl_key()
595 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl() argument
599 atmel_aes_write_ctrl_key(dd, use_dma, iv, in atmel_aes_write_ctrl()
600 dd->ctx->key, dd->ctx->keylen); in atmel_aes_write_ctrl()
605 static int atmel_aes_cpu_transfer(struct atmel_aes_dev *dd) in atmel_aes_cpu_transfer() argument
611 atmel_aes_read_block(dd, AES_ODATAR(0), dd->data); in atmel_aes_cpu_transfer()
612 dd->data += 4; in atmel_aes_cpu_transfer()
613 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
615 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
618 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_transfer()
620 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_cpu_transfer()
622 dd->resume = atmel_aes_cpu_transfer; in atmel_aes_cpu_transfer()
623 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_cpu_transfer()
628 if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_cpu_transfer()
629 dd->buf, dd->total)) in atmel_aes_cpu_transfer()
633 return atmel_aes_complete(dd, err); in atmel_aes_cpu_transfer()
635 return dd->cpu_transfer_complete(dd); in atmel_aes_cpu_transfer()
638 static int atmel_aes_cpu_start(struct atmel_aes_dev *dd, in atmel_aes_cpu_start() argument
649 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_cpu_start()
651 dd->total = len; in atmel_aes_cpu_start()
652 dd->real_dst = dst; in atmel_aes_cpu_start()
653 dd->cpu_transfer_complete = resume; in atmel_aes_cpu_start()
654 dd->datalen = len + padlen; in atmel_aes_cpu_start()
655 dd->data = (u32 *)dd->buf; in atmel_aes_cpu_start()
656 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_start()
657 return atmel_aes_wait_for_data_ready(dd, atmel_aes_cpu_transfer); in atmel_aes_cpu_start()
665 static bool atmel_aes_check_aligned(struct atmel_aes_dev *dd, in atmel_aes_check_aligned() argument
672 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
680 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
689 if (!IS_ALIGNED(sg->length, dd->ctx->block_size)) in atmel_aes_check_aligned()
715 static int atmel_aes_map(struct atmel_aes_dev *dd, in atmel_aes_map() argument
723 dd->total = len; in atmel_aes_map()
724 dd->src.sg = src; in atmel_aes_map()
725 dd->dst.sg = dst; in atmel_aes_map()
726 dd->real_dst = dst; in atmel_aes_map()
728 src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src); in atmel_aes_map()
732 dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst); in atmel_aes_map()
734 padlen = atmel_aes_padlen(len, dd->ctx->block_size); in atmel_aes_map()
736 if (dd->buflen < len + padlen) in atmel_aes_map()
740 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_map()
741 dd->src.sg = &dd->aligned_sg; in atmel_aes_map()
742 dd->src.nents = 1; in atmel_aes_map()
743 dd->src.remainder = 0; in atmel_aes_map()
747 dd->dst.sg = &dd->aligned_sg; in atmel_aes_map()
748 dd->dst.nents = 1; in atmel_aes_map()
749 dd->dst.remainder = 0; in atmel_aes_map()
752 sg_init_table(&dd->aligned_sg, 1); in atmel_aes_map()
753 sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen); in atmel_aes_map()
756 if (dd->src.sg == dd->dst.sg) { in atmel_aes_map()
757 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
759 dd->dst.sg_len = dd->src.sg_len; in atmel_aes_map()
760 if (!dd->src.sg_len) in atmel_aes_map()
763 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
765 if (!dd->src.sg_len) in atmel_aes_map()
768 dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_map()
770 if (!dd->dst.sg_len) { in atmel_aes_map()
771 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
780 static void atmel_aes_unmap(struct atmel_aes_dev *dd) in atmel_aes_unmap() argument
782 if (dd->src.sg == dd->dst.sg) { in atmel_aes_unmap()
783 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
786 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
787 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
789 dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_unmap()
792 if (dd->dst.sg != &dd->aligned_sg) in atmel_aes_unmap()
793 atmel_aes_restore_sg(&dd->dst); in atmel_aes_unmap()
795 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
798 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
799 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
802 if (dd->dst.sg == &dd->aligned_sg) in atmel_aes_unmap()
803 sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_unmap()
804 dd->buf, dd->total); in atmel_aes_unmap()
807 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev *dd, in atmel_aes_dma_transfer_start() argument
826 dma = &dd->src; in atmel_aes_dma_transfer_start()
828 config.dst_addr = dd->phys_base + AES_IDATAR(0); in atmel_aes_dma_transfer_start()
832 dma = &dd->dst; in atmel_aes_dma_transfer_start()
834 config.src_addr = dd->phys_base + AES_ODATAR(0); in atmel_aes_dma_transfer_start()
851 desc->callback_param = dd; in atmel_aes_dma_transfer_start()
858 static int atmel_aes_dma_start(struct atmel_aes_dev *dd, in atmel_aes_dma_start() argument
868 switch (dd->ctx->block_size) { in atmel_aes_dma_start()
887 maxburst = dd->caps.max_burst_size; in atmel_aes_dma_start()
895 err = atmel_aes_map(dd, src, dst, len); in atmel_aes_dma_start()
899 dd->resume = resume; in atmel_aes_dma_start()
902 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_DEV_TO_MEM, in atmel_aes_dma_start()
908 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_MEM_TO_DEV, in atmel_aes_dma_start()
916 dmaengine_terminate_sync(dd->dst.chan); in atmel_aes_dma_start()
918 atmel_aes_unmap(dd); in atmel_aes_dma_start()
920 return atmel_aes_complete(dd, err); in atmel_aes_dma_start()
925 struct atmel_aes_dev *dd = data; in atmel_aes_dma_callback() local
927 atmel_aes_unmap(dd); in atmel_aes_dma_callback()
928 dd->is_async = true; in atmel_aes_dma_callback()
929 (void)dd->resume(dd); in atmel_aes_dma_callback()
932 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, in atmel_aes_handle_queue() argument
941 spin_lock_irqsave(&dd->lock, flags); in atmel_aes_handle_queue()
943 ret = crypto_enqueue_request(&dd->queue, new_areq); in atmel_aes_handle_queue()
944 if (dd->flags & AES_FLAGS_BUSY) { in atmel_aes_handle_queue()
945 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
948 backlog = crypto_get_backlog(&dd->queue); in atmel_aes_handle_queue()
949 areq = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
951 dd->flags |= AES_FLAGS_BUSY; in atmel_aes_handle_queue()
952 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
962 dd->areq = areq; in atmel_aes_handle_queue()
964 dd->is_async = start_async; in atmel_aes_handle_queue()
967 err = ctx->start(dd); in atmel_aes_handle_queue()
974 static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd) in atmel_aes_transfer_complete() argument
976 return atmel_aes_complete(dd, 0); in atmel_aes_transfer_complete()
979 static int atmel_aes_start(struct atmel_aes_dev *dd) in atmel_aes_start() argument
981 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_start()
984 dd->ctx->block_size != AES_BLOCK_SIZE); in atmel_aes_start()
987 atmel_aes_set_mode(dd, rctx); in atmel_aes_start()
989 err = atmel_aes_hw_init(dd); in atmel_aes_start()
991 return atmel_aes_complete(dd, err); in atmel_aes_start()
993 atmel_aes_write_ctrl(dd, use_dma, (void *)req->iv); in atmel_aes_start()
995 return atmel_aes_dma_start(dd, req->src, req->dst, in atmel_aes_start()
999 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, in atmel_aes_start()
1003 static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd) in atmel_aes_ctr_transfer() argument
1005 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_transfer()
1006 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_transfer()
1014 ctx->offset += dd->total; in atmel_aes_ctr_transfer()
1016 return atmel_aes_transfer_complete(dd); in atmel_aes_ctr_transfer()
1041 atmel_aes_write_ctrl(dd, use_dma, ctx->iv); in atmel_aes_ctr_transfer()
1052 return atmel_aes_dma_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1055 return atmel_aes_cpu_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1059 static int atmel_aes_ctr_start(struct atmel_aes_dev *dd) in atmel_aes_ctr_start() argument
1061 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_start()
1062 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_start()
1066 atmel_aes_set_mode(dd, rctx); in atmel_aes_ctr_start()
1068 err = atmel_aes_hw_init(dd); in atmel_aes_ctr_start()
1070 return atmel_aes_complete(dd, err); in atmel_aes_ctr_start()
1074 dd->total = 0; in atmel_aes_ctr_start()
1075 return atmel_aes_ctr_transfer(dd); in atmel_aes_ctr_start()
1157 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_crypt()
1269 struct atmel_aes_dev *dd; in atmel_aes_init_tfm() local
1271 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_init_tfm()
1272 if (!dd) in atmel_aes_init_tfm()
1276 ctx->base.dd = dd; in atmel_aes_init_tfm()
1277 ctx->base.dd->ctx = &ctx->base; in atmel_aes_init_tfm()
1286 struct atmel_aes_dev *dd; in atmel_aes_ctr_init_tfm() local
1288 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_ctr_init_tfm()
1289 if (!dd) in atmel_aes_ctr_init_tfm()
1293 ctx->base.dd = dd; in atmel_aes_ctr_init_tfm()
1294 ctx->base.dd->ctx = &ctx->base; in atmel_aes_ctr_init_tfm()
1432 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1436 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
1437 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
1439 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd);
1440 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd);
1441 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd);
1442 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd);
1443 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd);
1444 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd);
1445 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd);
1453 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd, in atmel_aes_gcm_ghash() argument
1458 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash()
1460 dd->data = (u32 *)data; in atmel_aes_gcm_ghash()
1461 dd->datalen = datalen; in atmel_aes_gcm_ghash()
1466 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_ghash()
1467 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_ghash_init); in atmel_aes_gcm_ghash()
1470 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_init() argument
1472 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_init()
1475 atmel_aes_write(dd, AES_AADLENR, dd->total); in atmel_aes_gcm_ghash_init()
1476 atmel_aes_write(dd, AES_CLENR, 0); in atmel_aes_gcm_ghash_init()
1480 atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in); in atmel_aes_gcm_ghash_init()
1482 return atmel_aes_gcm_ghash_finalize(dd); in atmel_aes_gcm_ghash_init()
1485 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_finalize() argument
1487 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_finalize()
1491 while (dd->datalen > 0) { in atmel_aes_gcm_ghash_finalize()
1492 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_ghash_finalize()
1493 dd->data += 4; in atmel_aes_gcm_ghash_finalize()
1494 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_ghash_finalize()
1496 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_ghash_finalize()
1498 dd->resume = atmel_aes_gcm_ghash_finalize; in atmel_aes_gcm_ghash_finalize()
1499 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_ghash_finalize()
1505 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out); in atmel_aes_gcm_ghash_finalize()
1507 return ctx->ghash_resume(dd); in atmel_aes_gcm_ghash_finalize()
1511 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd) in atmel_aes_gcm_start() argument
1513 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_start()
1514 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_start()
1520 u8 *data = dd->buf; in atmel_aes_gcm_start()
1523 atmel_aes_set_mode(dd, rctx); in atmel_aes_gcm_start()
1525 err = atmel_aes_hw_init(dd); in atmel_aes_gcm_start()
1527 return atmel_aes_complete(dd, err); in atmel_aes_gcm_start()
1532 return atmel_aes_gcm_process(dd); in atmel_aes_gcm_start()
1537 if (datalen > dd->buflen) in atmel_aes_gcm_start()
1538 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_start()
1544 return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen, in atmel_aes_gcm_start()
1548 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd) in atmel_aes_gcm_process() argument
1550 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_process()
1551 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_process()
1553 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_process()
1565 dd->flags |= AES_FLAGS_GTAGEN; in atmel_aes_gcm_process()
1567 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_process()
1568 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_length); in atmel_aes_gcm_process()
1571 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd) in atmel_aes_gcm_length() argument
1573 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_length()
1574 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_length()
1581 atmel_aes_write_block(dd, AES_IVR(0), j0); in atmel_aes_gcm_length()
1585 atmel_aes_write(dd, AES_AADLENR, req->assoclen); in atmel_aes_gcm_length()
1586 atmel_aes_write(dd, AES_CLENR, ctx->textlen); in atmel_aes_gcm_length()
1590 dd->datalen = 0; in atmel_aes_gcm_length()
1591 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1596 if (unlikely(req->assoclen + padlen > dd->buflen)) in atmel_aes_gcm_length()
1597 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_length()
1598 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen); in atmel_aes_gcm_length()
1601 dd->data = (u32 *)dd->buf; in atmel_aes_gcm_length()
1602 dd->datalen = req->assoclen + padlen; in atmel_aes_gcm_length()
1603 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1606 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd) in atmel_aes_gcm_data() argument
1608 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_data()
1609 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_data()
1615 while (dd->datalen > 0) { in atmel_aes_gcm_data()
1616 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_data()
1617 dd->data += 4; in atmel_aes_gcm_data()
1618 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_data()
1620 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_data()
1622 dd->resume = atmel_aes_gcm_data; in atmel_aes_gcm_data()
1623 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_data()
1630 return atmel_aes_gcm_tag_init(dd); in atmel_aes_gcm_data()
1639 mr = atmel_aes_read(dd, AES_MR); in atmel_aes_gcm_data()
1642 if (dd->caps.has_dualbuff) in atmel_aes_gcm_data()
1644 atmel_aes_write(dd, AES_MR, mr); in atmel_aes_gcm_data()
1646 return atmel_aes_dma_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1650 return atmel_aes_cpu_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1654 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag_init() argument
1656 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag_init()
1657 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_tag_init()
1658 __be64 *data = dd->buf; in atmel_aes_gcm_tag_init()
1660 if (likely(dd->flags & AES_FLAGS_GTAGEN)) { in atmel_aes_gcm_tag_init()
1661 if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) { in atmel_aes_gcm_tag_init()
1662 dd->resume = atmel_aes_gcm_tag_init; in atmel_aes_gcm_tag_init()
1663 atmel_aes_write(dd, AES_IER, AES_INT_TAGRDY); in atmel_aes_gcm_tag_init()
1667 return atmel_aes_gcm_finalize(dd); in atmel_aes_gcm_tag_init()
1671 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash); in atmel_aes_gcm_tag_init()
1676 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE, in atmel_aes_gcm_tag_init()
1680 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag() argument
1682 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag()
1689 flags = dd->flags; in atmel_aes_gcm_tag()
1690 dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN); in atmel_aes_gcm_tag()
1691 dd->flags |= AES_FLAGS_CTR; in atmel_aes_gcm_tag()
1692 atmel_aes_write_ctrl(dd, false, ctx->j0); in atmel_aes_gcm_tag()
1693 dd->flags = flags; in atmel_aes_gcm_tag()
1695 atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash); in atmel_aes_gcm_tag()
1696 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_finalize); in atmel_aes_gcm_tag()
1699 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_finalize() argument
1701 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_finalize()
1702 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_finalize()
1704 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_finalize()
1709 if (likely(dd->flags & AES_FLAGS_GTAGEN)) in atmel_aes_gcm_finalize()
1710 atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag); in atmel_aes_gcm_finalize()
1712 atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag); in atmel_aes_gcm_finalize()
1724 return atmel_aes_complete(dd, err); in atmel_aes_gcm_finalize()
1740 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_gcm_crypt()
1778 struct atmel_aes_dev *dd; in atmel_aes_gcm_init() local
1780 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_gcm_init()
1781 if (!dd) in atmel_aes_gcm_init()
1785 ctx->base.dd = dd; in atmel_aes_gcm_init()
1786 ctx->base.dd->ctx = &ctx->base; in atmel_aes_gcm_init()
1818 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
1820 static int atmel_aes_xts_start(struct atmel_aes_dev *dd) in atmel_aes_xts_start() argument
1822 struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx); in atmel_aes_xts_start()
1823 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_xts_start()
1828 atmel_aes_set_mode(dd, rctx); in atmel_aes_xts_start()
1830 err = atmel_aes_hw_init(dd); in atmel_aes_xts_start()
1832 return atmel_aes_complete(dd, err); in atmel_aes_xts_start()
1835 flags = dd->flags; in atmel_aes_xts_start()
1836 dd->flags &= ~AES_FLAGS_MODE_MASK; in atmel_aes_xts_start()
1837 dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); in atmel_aes_xts_start()
1838 atmel_aes_write_ctrl_key(dd, false, NULL, in atmel_aes_xts_start()
1840 dd->flags = flags; in atmel_aes_xts_start()
1842 atmel_aes_write_block(dd, AES_IDATAR(0), req->iv); in atmel_aes_xts_start()
1843 return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data); in atmel_aes_xts_start()
1846 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd) in atmel_aes_xts_process_data() argument
1848 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_xts_process_data()
1856 atmel_aes_read_block(dd, AES_ODATAR(0), tweak); in atmel_aes_xts_process_data()
1866 atmel_aes_write_ctrl(dd, use_dma, NULL); in atmel_aes_xts_process_data()
1867 atmel_aes_write_block(dd, AES_TWR(0), tweak); in atmel_aes_xts_process_data()
1868 atmel_aes_write_block(dd, AES_ALPHAR(0), one); in atmel_aes_xts_process_data()
1870 return atmel_aes_dma_start(dd, req->src, req->dst, in atmel_aes_xts_process_data()
1874 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, in atmel_aes_xts_process_data()
1915 struct atmel_aes_dev *dd; in atmel_aes_xts_init_tfm() local
1918 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_xts_init_tfm()
1919 if (!dd) in atmel_aes_xts_init_tfm()
1929 ctx->base.dd = dd; in atmel_aes_xts_init_tfm()
1930 ctx->base.dd->ctx = &ctx->base; in atmel_aes_xts_init_tfm()
1963 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
1964 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
1966 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
1968 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd);
1969 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
1972 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_authenc_complete() argument
1974 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_complete()
1977 if (err && (dd->flags & AES_FLAGS_OWN_SHA)) in atmel_aes_authenc_complete()
1979 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_complete()
1982 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd) in atmel_aes_authenc_start() argument
1984 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_start()
1990 atmel_aes_set_mode(dd, &rctx->base); in atmel_aes_authenc_start()
1992 err = atmel_aes_hw_init(dd); in atmel_aes_authenc_start()
1994 return atmel_aes_complete(dd, err); in atmel_aes_authenc_start()
1997 atmel_aes_authenc_init, dd); in atmel_aes_authenc_start()
2000 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_init() argument
2003 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_init()
2007 dd->is_async = true; in atmel_aes_authenc_init()
2009 return atmel_aes_complete(dd, err); in atmel_aes_authenc_init()
2012 dd->flags |= AES_FLAGS_OWN_SHA; in atmel_aes_authenc_init()
2018 atmel_aes_authenc_transfer, dd); in atmel_aes_authenc_init()
2021 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_transfer() argument
2024 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_transfer()
2026 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_transfer()
2032 dd->is_async = true; in atmel_aes_authenc_transfer()
2034 return atmel_aes_complete(dd, err); in atmel_aes_authenc_transfer()
2053 atmel_aes_write_ctrl(dd, true, iv); in atmel_aes_authenc_transfer()
2057 atmel_aes_write(dd, AES_EMR, emr); in atmel_aes_authenc_transfer()
2060 return atmel_aes_dma_start(dd, src, dst, rctx->textlen, in atmel_aes_authenc_transfer()
2064 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd) in atmel_aes_authenc_digest() argument
2066 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_digest()
2070 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_digest()
2073 atmel_aes_authenc_final, dd); in atmel_aes_authenc_digest()
2076 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_final() argument
2079 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_final()
2082 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_final()
2087 dd->is_async = true; in atmel_aes_authenc_final()
2102 return atmel_aes_complete(dd, err); in atmel_aes_authenc_final()
2144 struct atmel_aes_dev *dd; in atmel_aes_authenc_init_tfm() local
2146 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_authenc_init_tfm()
2147 if (!dd) in atmel_aes_authenc_init_tfm()
2156 ctx->base.dd = dd; in atmel_aes_authenc_init_tfm()
2157 ctx->base.dd->ctx = &ctx->base; in atmel_aes_authenc_init_tfm()
2221 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_authenc_crypt()
2320 static int atmel_aes_buff_init(struct atmel_aes_dev *dd) in atmel_aes_buff_init() argument
2322 dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER); in atmel_aes_buff_init()
2323 dd->buflen = ATMEL_AES_BUFFER_SIZE; in atmel_aes_buff_init()
2324 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
2326 if (!dd->buf) { in atmel_aes_buff_init()
2327 dev_err(dd->dev, "unable to alloc pages.\n"); in atmel_aes_buff_init()
2334 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd) in atmel_aes_buff_cleanup() argument
2336 free_page((unsigned long)dd->buf); in atmel_aes_buff_cleanup()
2339 static int atmel_aes_dma_init(struct atmel_aes_dev *dd) in atmel_aes_dma_init() argument
2344 dd->src.chan = dma_request_chan(dd->dev, "tx"); in atmel_aes_dma_init()
2345 if (IS_ERR(dd->src.chan)) { in atmel_aes_dma_init()
2346 ret = PTR_ERR(dd->src.chan); in atmel_aes_dma_init()
2350 dd->dst.chan = dma_request_chan(dd->dev, "rx"); in atmel_aes_dma_init()
2351 if (IS_ERR(dd->dst.chan)) { in atmel_aes_dma_init()
2352 ret = PTR_ERR(dd->dst.chan); in atmel_aes_dma_init()
2359 dma_release_channel(dd->src.chan); in atmel_aes_dma_init()
2361 dev_err(dd->dev, "no DMA channel available\n"); in atmel_aes_dma_init()
2365 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd) in atmel_aes_dma_cleanup() argument
2367 dma_release_channel(dd->dst.chan); in atmel_aes_dma_cleanup()
2368 dma_release_channel(dd->src.chan); in atmel_aes_dma_cleanup()
2373 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_queue_task() local
2375 atmel_aes_handle_queue(dd, NULL); in atmel_aes_queue_task()
2380 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_done_task() local
2382 dd->is_async = true; in atmel_aes_done_task()
2383 (void)dd->resume(dd); in atmel_aes_done_task()
2404 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd) in atmel_aes_unregister_algs() argument
2409 if (dd->caps.has_authenc) in atmel_aes_unregister_algs()
2414 if (dd->caps.has_xts) in atmel_aes_unregister_algs()
2417 if (dd->caps.has_gcm) in atmel_aes_unregister_algs()
2420 if (dd->caps.has_cfb64) in atmel_aes_unregister_algs()
2435 static int atmel_aes_register_algs(struct atmel_aes_dev *dd) in atmel_aes_register_algs() argument
2447 if (dd->caps.has_cfb64) { in atmel_aes_register_algs()
2455 if (dd->caps.has_gcm) { in atmel_aes_register_algs()
2463 if (dd->caps.has_xts) { in atmel_aes_register_algs()
2472 if (dd->caps.has_authenc) { in atmel_aes_register_algs()
2505 static void atmel_aes_get_cap(struct atmel_aes_dev *dd) in atmel_aes_get_cap() argument
2507 dd->caps.has_dualbuff = 0; in atmel_aes_get_cap()
2508 dd->caps.has_cfb64 = 0; in atmel_aes_get_cap()
2509 dd->caps.has_gcm = 0; in atmel_aes_get_cap()
2510 dd->caps.has_xts = 0; in atmel_aes_get_cap()
2511 dd->caps.has_authenc = 0; in atmel_aes_get_cap()
2512 dd->caps.max_burst_size = 1; in atmel_aes_get_cap()
2515 switch (dd->hw_version & 0xff0) { in atmel_aes_get_cap()
2517 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2518 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2519 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2520 dd->caps.has_xts = 1; in atmel_aes_get_cap()
2521 dd->caps.has_authenc = 1; in atmel_aes_get_cap()
2522 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2525 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2526 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2527 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2528 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2531 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2532 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2533 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2538 dev_warn(dd->dev, in atmel_aes_get_cap()