Lines Matching refs:aes_dev

204 static inline void aes_a_set_endianness(const struct ocs_aes_dev *aes_dev)  in aes_a_set_endianness()  argument
206 iowrite32(0x7FF, aes_dev->base_reg + AES_BYTE_ORDER_CFG_OFFSET); in aes_a_set_endianness()
210 static inline void aes_a_op_trigger(const struct ocs_aes_dev *aes_dev) in aes_a_op_trigger() argument
212 iowrite32(AES_ACTIVE_TRIGGER, aes_dev->base_reg + AES_ACTIVE_OFFSET); in aes_a_op_trigger()
216 static inline void aes_a_op_termination(const struct ocs_aes_dev *aes_dev) in aes_a_op_termination() argument
219 aes_dev->base_reg + AES_ACTIVE_OFFSET); in aes_a_op_termination()
231 static inline void aes_a_set_last_gcx(const struct ocs_aes_dev *aes_dev) in aes_a_set_last_gcx() argument
234 aes_dev->base_reg + AES_ACTIVE_OFFSET); in aes_a_set_last_gcx()
238 static inline void aes_a_wait_last_gcx(const struct ocs_aes_dev *aes_dev) in aes_a_wait_last_gcx() argument
243 aes_active_reg = ioread32(aes_dev->base_reg + in aes_a_wait_last_gcx()
249 static void aes_a_dma_wait_input_buffer_occupancy(const struct ocs_aes_dev *aes_dev) in aes_a_dma_wait_input_buffer_occupancy() argument
254 reg = ioread32(aes_dev->base_reg + AES_A_DMA_STATUS_OFFSET); in aes_a_dma_wait_input_buffer_occupancy()
265 static inline void aes_a_set_last_gcx_and_adata(const struct ocs_aes_dev *aes_dev) in aes_a_set_last_gcx_and_adata() argument
268 aes_dev->base_reg + AES_ACTIVE_OFFSET); in aes_a_set_last_gcx_and_adata()
272 static inline void aes_a_dma_set_xfer_size_zero(const struct ocs_aes_dev *aes_dev) in aes_a_dma_set_xfer_size_zero() argument
274 iowrite32(0, aes_dev->base_reg + AES_A_DMA_SRC_SIZE_OFFSET); in aes_a_dma_set_xfer_size_zero()
275 iowrite32(0, aes_dev->base_reg + AES_A_DMA_DST_SIZE_OFFSET); in aes_a_dma_set_xfer_size_zero()
279 static inline void aes_a_dma_active(const struct ocs_aes_dev *aes_dev) in aes_a_dma_active() argument
282 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET); in aes_a_dma_active()
286 static inline void aes_a_dma_active_src_ll_en(const struct ocs_aes_dev *aes_dev) in aes_a_dma_active_src_ll_en() argument
290 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET); in aes_a_dma_active_src_ll_en()
294 static inline void aes_a_dma_active_dst_ll_en(const struct ocs_aes_dev *aes_dev) in aes_a_dma_active_dst_ll_en() argument
298 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET); in aes_a_dma_active_dst_ll_en()
302 static inline void aes_a_dma_active_src_dst_ll_en(const struct ocs_aes_dev *aes_dev) in aes_a_dma_active_src_dst_ll_en() argument
307 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET); in aes_a_dma_active_src_dst_ll_en()
311 static inline void aes_a_dma_reset_and_activate_perf_cntr(const struct ocs_aes_dev *aes_dev) in aes_a_dma_reset_and_activate_perf_cntr() argument
313 iowrite32(0x00000000, aes_dev->base_reg + AES_A_DMA_PERF_CNTR_OFFSET); in aes_a_dma_reset_and_activate_perf_cntr()
315 aes_dev->base_reg + AES_A_DMA_WHILE_ACTIVE_MODE_OFFSET); in aes_a_dma_reset_and_activate_perf_cntr()
319 static inline void aes_a_dma_wait_and_deactivate_perf_cntr(const struct ocs_aes_dev *aes_dev, in aes_a_dma_wait_and_deactivate_perf_cntr() argument
322 while (ioread32(aes_dev->base_reg + AES_A_DMA_PERF_CNTR_OFFSET) < delay) in aes_a_dma_wait_and_deactivate_perf_cntr()
325 aes_dev->base_reg + AES_A_DMA_WHILE_ACTIVE_MODE_OFFSET); in aes_a_dma_wait_and_deactivate_perf_cntr()
329 static void aes_irq_disable(struct ocs_aes_dev *aes_dev) in aes_irq_disable() argument
335 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET); in aes_irq_disable()
336 iowrite32(AES_DISABLE_INT, aes_dev->base_reg + AES_IER_OFFSET); in aes_irq_disable()
339 isr_val = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET); in aes_irq_disable()
342 aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET); in aes_irq_disable()
344 isr_val = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_MASK_OFFSET); in aes_irq_disable()
347 aes_dev->base_reg + AES_A_DMA_MSI_MASK_OFFSET); in aes_irq_disable()
349 isr_val = ioread32(aes_dev->base_reg + AES_ISR_OFFSET); in aes_irq_disable()
351 iowrite32(isr_val, aes_dev->base_reg + AES_ISR_OFFSET); in aes_irq_disable()
355 static void aes_irq_enable(struct ocs_aes_dev *aes_dev, u8 irq) in aes_irq_enable() argument
366 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET); in aes_irq_enable()
375 iowrite32(AES_COMPLETE_INT, aes_dev->base_reg + AES_IER_OFFSET); in aes_irq_enable()
380 iowrite32(AES_DISABLE_INT, aes_dev->base_reg + AES_IER_OFFSET); in aes_irq_enable()
403 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET); in aes_irq_enable()
408 static int ocs_aes_irq_enable_and_wait(struct ocs_aes_dev *aes_dev, u8 irq) in ocs_aes_irq_enable_and_wait() argument
412 reinit_completion(&aes_dev->irq_completion); in ocs_aes_irq_enable_and_wait()
413 aes_irq_enable(aes_dev, irq); in ocs_aes_irq_enable_and_wait()
414 rc = wait_for_completion_interruptible(&aes_dev->irq_completion); in ocs_aes_irq_enable_and_wait()
418 return aes_dev->dma_err_mask ? -EIO : 0; in ocs_aes_irq_enable_and_wait()
422 static inline void dma_to_ocs_aes_ll(struct ocs_aes_dev *aes_dev, in dma_to_ocs_aes_ll() argument
425 iowrite32(0, aes_dev->base_reg + AES_A_DMA_SRC_SIZE_OFFSET); in dma_to_ocs_aes_ll()
427 aes_dev->base_reg + AES_A_DMA_NEXT_SRC_DESCR_OFFSET); in dma_to_ocs_aes_ll()
431 static inline void dma_from_ocs_aes_ll(struct ocs_aes_dev *aes_dev, in dma_from_ocs_aes_ll() argument
434 iowrite32(0, aes_dev->base_reg + AES_A_DMA_DST_SIZE_OFFSET); in dma_from_ocs_aes_ll()
436 aes_dev->base_reg + AES_A_DMA_NEXT_DST_DESCR_OFFSET); in dma_from_ocs_aes_ll()
441 struct ocs_aes_dev *aes_dev = dev_id; in ocs_aes_irq_handler() local
445 aes_dma_isr = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET); in ocs_aes_irq_handler()
448 aes_irq_disable(aes_dev); in ocs_aes_irq_handler()
451 aes_dev->dma_err_mask = aes_dma_isr & in ocs_aes_irq_handler()
461 complete(&aes_dev->irq_completion); in ocs_aes_irq_handler()
477 int ocs_aes_set_key(struct ocs_aes_dev *aes_dev, u32 key_size, const u8 *key, in ocs_aes_set_key() argument
486 dev_err(aes_dev->dev, in ocs_aes_set_key()
493 dev_err(aes_dev->dev, in ocs_aes_set_key()
507 aes_dev->base_reg + AES_KEY_0_OFFSET + in ocs_aes_set_key()
518 iowrite32(val, aes_dev->base_reg + AES_KEY_SIZE_OFFSET); in ocs_aes_set_key()
524 static inline void set_ocs_aes_command(struct ocs_aes_dev *aes_dev, in set_ocs_aes_command() argument
556 iowrite32(val, aes_dev->base_reg + AES_COMMAND_OFFSET); in set_ocs_aes_command()
559 static void ocs_aes_init(struct ocs_aes_dev *aes_dev, in ocs_aes_init() argument
565 aes_irq_disable(aes_dev); in ocs_aes_init()
568 aes_a_set_endianness(aes_dev); in ocs_aes_init()
571 set_ocs_aes_command(aes_dev, cipher, mode, instruction); in ocs_aes_init()
578 static inline void ocs_aes_write_last_data_blk_len(struct ocs_aes_dev *aes_dev, in ocs_aes_write_last_data_blk_len() argument
593 iowrite32(val, aes_dev->base_reg + AES_PLEN_OFFSET); in ocs_aes_write_last_data_blk_len()
796 int ocs_aes_op(struct ocs_aes_dev *aes_dev, in ocs_aes_op() argument
824 ocs_aes_init(aes_dev, mode, cipher, instruction); in ocs_aes_op()
828 ocs_aes_write_last_data_blk_len(aes_dev, src_size); in ocs_aes_op()
833 iowrite32(iv32[0], aes_dev->base_reg + AES_IV_0_OFFSET); in ocs_aes_op()
834 iowrite32(iv32[1], aes_dev->base_reg + AES_IV_1_OFFSET); in ocs_aes_op()
835 iowrite32(iv32[2], aes_dev->base_reg + AES_IV_2_OFFSET); in ocs_aes_op()
836 iowrite32(iv32[3], aes_dev->base_reg + AES_IV_3_OFFSET); in ocs_aes_op()
840 aes_a_op_trigger(aes_dev); in ocs_aes_op()
843 dma_to_ocs_aes_ll(aes_dev, src_dma_list); in ocs_aes_op()
844 dma_from_ocs_aes_ll(aes_dev, dst_dma_list); in ocs_aes_op()
845 aes_a_dma_active_src_dst_ll_en(aes_dev); in ocs_aes_op()
852 aes_a_set_last_gcx(aes_dev); in ocs_aes_op()
855 aes_a_op_termination(aes_dev); in ocs_aes_op()
859 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT); in ocs_aes_op()
865 iv32[0] = ioread32(aes_dev->base_reg + AES_IV_0_OFFSET); in ocs_aes_op()
866 iv32[1] = ioread32(aes_dev->base_reg + AES_IV_1_OFFSET); in ocs_aes_op()
867 iv32[2] = ioread32(aes_dev->base_reg + AES_IV_2_OFFSET); in ocs_aes_op()
868 iv32[3] = ioread32(aes_dev->base_reg + AES_IV_3_OFFSET); in ocs_aes_op()
875 static void ocs_aes_gcm_write_j0(const struct ocs_aes_dev *aes_dev, in ocs_aes_gcm_write_j0() argument
884 iowrite32(0x00000001, aes_dev->base_reg + AES_IV_0_OFFSET); in ocs_aes_gcm_write_j0()
885 iowrite32(__swab32(j0[2]), aes_dev->base_reg + AES_IV_1_OFFSET); in ocs_aes_gcm_write_j0()
886 iowrite32(__swab32(j0[1]), aes_dev->base_reg + AES_IV_2_OFFSET); in ocs_aes_gcm_write_j0()
887 iowrite32(__swab32(j0[0]), aes_dev->base_reg + AES_IV_3_OFFSET); in ocs_aes_gcm_write_j0()
891 static inline void ocs_aes_gcm_read_tag(struct ocs_aes_dev *aes_dev, in ocs_aes_gcm_read_tag() argument
901 tag_u32[0] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_3_OFFSET)); in ocs_aes_gcm_read_tag()
902 tag_u32[1] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_2_OFFSET)); in ocs_aes_gcm_read_tag()
903 tag_u32[2] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_1_OFFSET)); in ocs_aes_gcm_read_tag()
904 tag_u32[3] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_0_OFFSET)); in ocs_aes_gcm_read_tag()
925 int ocs_aes_gcm_op(struct ocs_aes_dev *aes_dev, in ocs_aes_gcm_op() argument
949 ocs_aes_init(aes_dev, OCS_MODE_GCM, cipher, instruction); in ocs_aes_gcm_op()
952 ocs_aes_gcm_write_j0(aes_dev, iv); in ocs_aes_gcm_op()
955 iowrite32(tag_size, aes_dev->base_reg + AES_TLEN_OFFSET); in ocs_aes_gcm_op()
958 ocs_aes_write_last_data_blk_len(aes_dev, src_size); in ocs_aes_gcm_op()
963 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_0_OFFSET); in ocs_aes_gcm_op()
965 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_1_OFFSET); in ocs_aes_gcm_op()
970 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_2_OFFSET); in ocs_aes_gcm_op()
972 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_3_OFFSET); in ocs_aes_gcm_op()
975 aes_a_op_trigger(aes_dev); in ocs_aes_gcm_op()
980 dma_to_ocs_aes_ll(aes_dev, aad_dma_list); in ocs_aes_gcm_op()
981 aes_a_dma_active_src_ll_en(aes_dev); in ocs_aes_gcm_op()
984 aes_a_set_last_gcx_and_adata(aes_dev); in ocs_aes_gcm_op()
987 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT); in ocs_aes_gcm_op()
991 aes_a_set_last_gcx_and_adata(aes_dev); in ocs_aes_gcm_op()
995 aes_a_wait_last_gcx(aes_dev); in ocs_aes_gcm_op()
996 aes_a_dma_wait_input_buffer_occupancy(aes_dev); in ocs_aes_gcm_op()
1001 dma_to_ocs_aes_ll(aes_dev, src_dma_list); in ocs_aes_gcm_op()
1002 dma_from_ocs_aes_ll(aes_dev, dst_dma_list); in ocs_aes_gcm_op()
1003 aes_a_dma_active_src_dst_ll_en(aes_dev); in ocs_aes_gcm_op()
1005 aes_a_dma_set_xfer_size_zero(aes_dev); in ocs_aes_gcm_op()
1006 aes_a_dma_active(aes_dev); in ocs_aes_gcm_op()
1010 aes_a_set_last_gcx(aes_dev); in ocs_aes_gcm_op()
1013 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT); in ocs_aes_gcm_op()
1017 ocs_aes_gcm_read_tag(aes_dev, out_tag, tag_size); in ocs_aes_gcm_op()
1023 static void ocs_aes_ccm_write_encrypted_tag(struct ocs_aes_dev *aes_dev, in ocs_aes_ccm_write_encrypted_tag() argument
1029 aes_a_dma_wait_input_buffer_occupancy(aes_dev); in ocs_aes_ccm_write_encrypted_tag()
1036 aes_a_dma_reset_and_activate_perf_cntr(aes_dev); in ocs_aes_ccm_write_encrypted_tag()
1037 aes_a_dma_wait_and_deactivate_perf_cntr(aes_dev, in ocs_aes_ccm_write_encrypted_tag()
1042 iowrite8(in_tag[i], aes_dev->base_reg + in ocs_aes_ccm_write_encrypted_tag()
1054 static int ocs_aes_ccm_write_b0(const struct ocs_aes_dev *aes_dev, in ocs_aes_ccm_write_b0() argument
1114 iowrite8(b0[i], aes_dev->base_reg + in ocs_aes_ccm_write_b0()
1126 static void ocs_aes_ccm_write_adata_len(const struct ocs_aes_dev *aes_dev, in ocs_aes_ccm_write_adata_len() argument
1155 aes_dev->base_reg + in ocs_aes_ccm_write_adata_len()
1159 static int ocs_aes_ccm_do_adata(struct ocs_aes_dev *aes_dev, in ocs_aes_ccm_do_adata() argument
1166 aes_a_set_last_gcx_and_adata(aes_dev); in ocs_aes_ccm_do_adata()
1176 ocs_aes_ccm_write_adata_len(aes_dev, adata_size); in ocs_aes_ccm_do_adata()
1179 dma_to_ocs_aes_ll(aes_dev, adata_dma_list); in ocs_aes_ccm_do_adata()
1182 aes_a_dma_active_src_ll_en(aes_dev); in ocs_aes_ccm_do_adata()
1185 aes_a_set_last_gcx_and_adata(aes_dev); in ocs_aes_ccm_do_adata()
1188 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT); in ocs_aes_ccm_do_adata()
1194 aes_a_wait_last_gcx(aes_dev); in ocs_aes_ccm_do_adata()
1195 aes_a_dma_wait_input_buffer_occupancy(aes_dev); in ocs_aes_ccm_do_adata()
1200 static int ocs_aes_ccm_encrypt_do_payload(struct ocs_aes_dev *aes_dev, in ocs_aes_ccm_encrypt_do_payload() argument
1210 dma_to_ocs_aes_ll(aes_dev, src_dma_list); in ocs_aes_ccm_encrypt_do_payload()
1211 dma_from_ocs_aes_ll(aes_dev, dst_dma_list); in ocs_aes_ccm_encrypt_do_payload()
1212 aes_a_dma_active_src_dst_ll_en(aes_dev); in ocs_aes_ccm_encrypt_do_payload()
1215 dma_from_ocs_aes_ll(aes_dev, dst_dma_list); in ocs_aes_ccm_encrypt_do_payload()
1216 aes_a_dma_active_dst_ll_en(aes_dev); in ocs_aes_ccm_encrypt_do_payload()
1223 aes_a_set_last_gcx(aes_dev); in ocs_aes_ccm_encrypt_do_payload()
1226 return ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT); in ocs_aes_ccm_encrypt_do_payload()
1229 static int ocs_aes_ccm_decrypt_do_payload(struct ocs_aes_dev *aes_dev, in ocs_aes_ccm_decrypt_do_payload() argument
1236 aes_a_dma_set_xfer_size_zero(aes_dev); in ocs_aes_ccm_decrypt_do_payload()
1237 aes_a_dma_active(aes_dev); in ocs_aes_ccm_decrypt_do_payload()
1238 aes_a_set_last_gcx(aes_dev); in ocs_aes_ccm_decrypt_do_payload()
1247 dma_to_ocs_aes_ll(aes_dev, src_dma_list); in ocs_aes_ccm_decrypt_do_payload()
1248 dma_from_ocs_aes_ll(aes_dev, dst_dma_list); in ocs_aes_ccm_decrypt_do_payload()
1249 aes_a_dma_active_src_dst_ll_en(aes_dev); in ocs_aes_ccm_decrypt_do_payload()
1255 aes_a_set_last_gcx(aes_dev); in ocs_aes_ccm_decrypt_do_payload()
1260 return ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT); in ocs_aes_ccm_decrypt_do_payload()
1269 static inline int ccm_compare_tag_to_yr(struct ocs_aes_dev *aes_dev, in ccm_compare_tag_to_yr() argument
1278 tag[i] = ioread32(aes_dev->base_reg + in ccm_compare_tag_to_yr()
1280 yr[i] = ioread32(aes_dev->base_reg + in ccm_compare_tag_to_yr()
1307 int ocs_aes_ccm_op(struct ocs_aes_dev *aes_dev, in ocs_aes_ccm_op() argument
1330 ocs_aes_init(aes_dev, OCS_MODE_CCM, cipher, instruction); in ocs_aes_ccm_op()
1345 aes_dev->base_reg + AES_MULTIPURPOSE1_3_OFFSET); in ocs_aes_ccm_op()
1347 aes_dev->base_reg + AES_MULTIPURPOSE1_2_OFFSET); in ocs_aes_ccm_op()
1349 aes_dev->base_reg + AES_MULTIPURPOSE1_1_OFFSET); in ocs_aes_ccm_op()
1351 aes_dev->base_reg + AES_MULTIPURPOSE1_0_OFFSET); in ocs_aes_ccm_op()
1354 iowrite32(tag_size, aes_dev->base_reg + AES_TLEN_OFFSET); in ocs_aes_ccm_op()
1360 ocs_aes_write_last_data_blk_len(aes_dev, src_size); in ocs_aes_ccm_op()
1363 aes_a_op_trigger(aes_dev); in ocs_aes_ccm_op()
1365 aes_a_dma_reset_and_activate_perf_cntr(aes_dev); in ocs_aes_ccm_op()
1368 rc = ocs_aes_ccm_write_b0(aes_dev, iv, adata_size, tag_size, src_size); in ocs_aes_ccm_op()
1375 aes_a_dma_wait_and_deactivate_perf_cntr(aes_dev, in ocs_aes_ccm_op()
1379 ocs_aes_ccm_do_adata(aes_dev, adata_dma_list, adata_size); in ocs_aes_ccm_op()
1383 return ocs_aes_ccm_encrypt_do_payload(aes_dev, dst_dma_list, in ocs_aes_ccm_op()
1387 rc = ocs_aes_ccm_decrypt_do_payload(aes_dev, dst_dma_list, in ocs_aes_ccm_op()
1393 ocs_aes_ccm_write_encrypted_tag(aes_dev, in_tag, tag_size); in ocs_aes_ccm_op()
1394 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT); in ocs_aes_ccm_op()
1398 return ccm_compare_tag_to_yr(aes_dev, tag_size); in ocs_aes_ccm_op()
1417 int ocs_create_linked_list_from_sg(const struct ocs_aes_dev *aes_dev, in ocs_create_linked_list_from_sg() argument
1429 if (!dll_desc || !sg || !aes_dev) in ocs_create_linked_list_from_sg()
1467 dll_desc->vaddr = dma_alloc_coherent(aes_dev->dev, dll_desc->size, in ocs_create_linked_list_from_sg()