0001
0002
0003
0004
0005
0006
0007
0008 #include <linux/dma-mapping.h>
0009 #include <linux/interrupt.h>
0010 #include <linux/platform_device.h>
0011 #include <linux/slab.h>
0012 #include <linux/swab.h>
0013
0014 #include <asm/byteorder.h>
0015 #include <asm/errno.h>
0016
0017 #include <crypto/aes.h>
0018 #include <crypto/gcm.h>
0019
0020 #include "ocs-aes.h"
0021
0022 #define AES_COMMAND_OFFSET 0x0000
0023 #define AES_KEY_0_OFFSET 0x0004
0024 #define AES_KEY_1_OFFSET 0x0008
0025 #define AES_KEY_2_OFFSET 0x000C
0026 #define AES_KEY_3_OFFSET 0x0010
0027 #define AES_KEY_4_OFFSET 0x0014
0028 #define AES_KEY_5_OFFSET 0x0018
0029 #define AES_KEY_6_OFFSET 0x001C
0030 #define AES_KEY_7_OFFSET 0x0020
0031 #define AES_IV_0_OFFSET 0x0024
0032 #define AES_IV_1_OFFSET 0x0028
0033 #define AES_IV_2_OFFSET 0x002C
0034 #define AES_IV_3_OFFSET 0x0030
0035 #define AES_ACTIVE_OFFSET 0x0034
0036 #define AES_STATUS_OFFSET 0x0038
0037 #define AES_KEY_SIZE_OFFSET 0x0044
0038 #define AES_IER_OFFSET 0x0048
0039 #define AES_ISR_OFFSET 0x005C
0040 #define AES_MULTIPURPOSE1_0_OFFSET 0x0200
0041 #define AES_MULTIPURPOSE1_1_OFFSET 0x0204
0042 #define AES_MULTIPURPOSE1_2_OFFSET 0x0208
0043 #define AES_MULTIPURPOSE1_3_OFFSET 0x020C
0044 #define AES_MULTIPURPOSE2_0_OFFSET 0x0220
0045 #define AES_MULTIPURPOSE2_1_OFFSET 0x0224
0046 #define AES_MULTIPURPOSE2_2_OFFSET 0x0228
0047 #define AES_MULTIPURPOSE2_3_OFFSET 0x022C
0048 #define AES_BYTE_ORDER_CFG_OFFSET 0x02C0
0049 #define AES_TLEN_OFFSET 0x0300
0050 #define AES_T_MAC_0_OFFSET 0x0304
0051 #define AES_T_MAC_1_OFFSET 0x0308
0052 #define AES_T_MAC_2_OFFSET 0x030C
0053 #define AES_T_MAC_3_OFFSET 0x0310
0054 #define AES_PLEN_OFFSET 0x0314
0055 #define AES_A_DMA_SRC_ADDR_OFFSET 0x0400
0056 #define AES_A_DMA_DST_ADDR_OFFSET 0x0404
0057 #define AES_A_DMA_SRC_SIZE_OFFSET 0x0408
0058 #define AES_A_DMA_DST_SIZE_OFFSET 0x040C
0059 #define AES_A_DMA_DMA_MODE_OFFSET 0x0410
0060 #define AES_A_DMA_NEXT_SRC_DESCR_OFFSET 0x0418
0061 #define AES_A_DMA_NEXT_DST_DESCR_OFFSET 0x041C
0062 #define AES_A_DMA_WHILE_ACTIVE_MODE_OFFSET 0x0420
0063 #define AES_A_DMA_LOG_OFFSET 0x0424
0064 #define AES_A_DMA_STATUS_OFFSET 0x0428
0065 #define AES_A_DMA_PERF_CNTR_OFFSET 0x042C
0066 #define AES_A_DMA_MSI_ISR_OFFSET 0x0480
0067 #define AES_A_DMA_MSI_IER_OFFSET 0x0484
0068 #define AES_A_DMA_MSI_MASK_OFFSET 0x0488
0069 #define AES_A_DMA_INBUFFER_WRITE_FIFO_OFFSET 0x0600
0070 #define AES_A_DMA_OUTBUFFER_READ_FIFO_OFFSET 0x0700
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087 #define AES_A_DMA_DMA_MODE_ACTIVE BIT(31)
0088 #define AES_A_DMA_DMA_MODE_SRC_LINK_LIST_EN BIT(25)
0089 #define AES_A_DMA_DMA_MODE_DST_LINK_LIST_EN BIT(24)
0090
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100
0101 #define AES_ACTIVE_LAST_ADATA BIT(9)
0102 #define AES_ACTIVE_LAST_CCM_GCM BIT(8)
0103 #define AES_ACTIVE_TERMINATION BIT(1)
0104 #define AES_ACTIVE_TRIGGER BIT(0)
0105
0106 #define AES_DISABLE_INT 0x00000000
0107 #define AES_DMA_CPD_ERR_INT BIT(8)
0108 #define AES_DMA_OUTBUF_RD_ERR_INT BIT(7)
0109 #define AES_DMA_OUTBUF_WR_ERR_INT BIT(6)
0110 #define AES_DMA_INBUF_RD_ERR_INT BIT(5)
0111 #define AES_DMA_INBUF_WR_ERR_INT BIT(4)
0112 #define AES_DMA_BAD_COMP_INT BIT(3)
0113 #define AES_DMA_SAI_INT BIT(2)
0114 #define AES_DMA_SRC_DONE_INT BIT(0)
0115 #define AES_COMPLETE_INT BIT(1)
0116
0117 #define AES_DMA_MSI_MASK_CLEAR BIT(0)
0118
0119 #define AES_128_BIT_KEY 0x00000000
0120 #define AES_256_BIT_KEY BIT(0)
0121
0122 #define AES_DEACTIVATE_PERF_CNTR 0x00000000
0123 #define AES_ACTIVATE_PERF_CNTR BIT(0)
0124
0125 #define AES_MAX_TAG_SIZE_U32 4
0126
0127 #define OCS_LL_DMA_FLAG_TERMINATE BIT(31)
0128
0129
0130
0131
0132
0133 #define AES_DMA_STATUS_INPUT_BUFFER_OCCUPANCY_MASK 0x3FF
0134
0135
0136
0137
0138
0139
0140 #define CCM_DECRYPT_DELAY_TAG_CLK_COUNT 36UL
0141
0142
0143
0144
0145
0146
0147 #define CCM_DECRYPT_DELAY_LAST_GCX_CLK_COUNT 42UL
0148
0149
0150 #define L_PRIME_MIN (1)
0151 #define L_PRIME_MAX (7)
0152
0153
0154
0155
0156
0157
0158
0159
0160
0161
0162
0163 #define L_PRIME_IDX 0
0164 #define COUNTER_START(lprime) (16 - ((lprime) + 1))
0165 #define COUNTER_LEN(lprime) ((lprime) + 1)
0166
0167 enum aes_counter_mode {
0168 AES_CTR_M_NO_INC = 0,
0169 AES_CTR_M_32_INC = 1,
0170 AES_CTR_M_64_INC = 2,
0171 AES_CTR_M_128_INC = 3,
0172 };
0173
0174
0175
0176
0177
0178
0179
0180
0181 struct ocs_dma_linked_list {
0182 u32 src_addr;
0183 u32 src_len;
0184 u32 next;
0185 u32 ll_flags;
0186 } __packed;
0187
0188
0189
0190
0191
0192
0193
0194
0195
0196
0197
0198
0199
0200
0201
0202
0203
0204 static inline void aes_a_set_endianness(const struct ocs_aes_dev *aes_dev)
0205 {
0206 iowrite32(0x7FF, aes_dev->base_reg + AES_BYTE_ORDER_CFG_OFFSET);
0207 }
0208
0209
0210 static inline void aes_a_op_trigger(const struct ocs_aes_dev *aes_dev)
0211 {
0212 iowrite32(AES_ACTIVE_TRIGGER, aes_dev->base_reg + AES_ACTIVE_OFFSET);
0213 }
0214
0215
0216 static inline void aes_a_op_termination(const struct ocs_aes_dev *aes_dev)
0217 {
0218 iowrite32(AES_ACTIVE_TERMINATION,
0219 aes_dev->base_reg + AES_ACTIVE_OFFSET);
0220 }
0221
0222
0223
0224
0225
0226
0227
0228
0229
0230
0231 static inline void aes_a_set_last_gcx(const struct ocs_aes_dev *aes_dev)
0232 {
0233 iowrite32(AES_ACTIVE_LAST_CCM_GCM,
0234 aes_dev->base_reg + AES_ACTIVE_OFFSET);
0235 }
0236
0237
0238 static inline void aes_a_wait_last_gcx(const struct ocs_aes_dev *aes_dev)
0239 {
0240 u32 aes_active_reg;
0241
0242 do {
0243 aes_active_reg = ioread32(aes_dev->base_reg +
0244 AES_ACTIVE_OFFSET);
0245 } while (aes_active_reg & AES_ACTIVE_LAST_CCM_GCM);
0246 }
0247
0248
0249 static void aes_a_dma_wait_input_buffer_occupancy(const struct ocs_aes_dev *aes_dev)
0250 {
0251 u32 reg;
0252
0253 do {
0254 reg = ioread32(aes_dev->base_reg + AES_A_DMA_STATUS_OFFSET);
0255 } while (reg & AES_DMA_STATUS_INPUT_BUFFER_OCCUPANCY_MASK);
0256 }
0257
0258
0259
0260
0261
0262
0263
0264
0265 static inline void aes_a_set_last_gcx_and_adata(const struct ocs_aes_dev *aes_dev)
0266 {
0267 iowrite32(AES_ACTIVE_LAST_ADATA | AES_ACTIVE_LAST_CCM_GCM,
0268 aes_dev->base_reg + AES_ACTIVE_OFFSET);
0269 }
0270
0271
0272 static inline void aes_a_dma_set_xfer_size_zero(const struct ocs_aes_dev *aes_dev)
0273 {
0274 iowrite32(0, aes_dev->base_reg + AES_A_DMA_SRC_SIZE_OFFSET);
0275 iowrite32(0, aes_dev->base_reg + AES_A_DMA_DST_SIZE_OFFSET);
0276 }
0277
0278
0279 static inline void aes_a_dma_active(const struct ocs_aes_dev *aes_dev)
0280 {
0281 iowrite32(AES_A_DMA_DMA_MODE_ACTIVE,
0282 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET);
0283 }
0284
0285
0286 static inline void aes_a_dma_active_src_ll_en(const struct ocs_aes_dev *aes_dev)
0287 {
0288 iowrite32(AES_A_DMA_DMA_MODE_ACTIVE |
0289 AES_A_DMA_DMA_MODE_SRC_LINK_LIST_EN,
0290 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET);
0291 }
0292
0293
0294 static inline void aes_a_dma_active_dst_ll_en(const struct ocs_aes_dev *aes_dev)
0295 {
0296 iowrite32(AES_A_DMA_DMA_MODE_ACTIVE |
0297 AES_A_DMA_DMA_MODE_DST_LINK_LIST_EN,
0298 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET);
0299 }
0300
0301
0302 static inline void aes_a_dma_active_src_dst_ll_en(const struct ocs_aes_dev *aes_dev)
0303 {
0304 iowrite32(AES_A_DMA_DMA_MODE_ACTIVE |
0305 AES_A_DMA_DMA_MODE_SRC_LINK_LIST_EN |
0306 AES_A_DMA_DMA_MODE_DST_LINK_LIST_EN,
0307 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET);
0308 }
0309
0310
0311 static inline void aes_a_dma_reset_and_activate_perf_cntr(const struct ocs_aes_dev *aes_dev)
0312 {
0313 iowrite32(0x00000000, aes_dev->base_reg + AES_A_DMA_PERF_CNTR_OFFSET);
0314 iowrite32(AES_ACTIVATE_PERF_CNTR,
0315 aes_dev->base_reg + AES_A_DMA_WHILE_ACTIVE_MODE_OFFSET);
0316 }
0317
0318
0319 static inline void aes_a_dma_wait_and_deactivate_perf_cntr(const struct ocs_aes_dev *aes_dev,
0320 int delay)
0321 {
0322 while (ioread32(aes_dev->base_reg + AES_A_DMA_PERF_CNTR_OFFSET) < delay)
0323 ;
0324 iowrite32(AES_DEACTIVATE_PERF_CNTR,
0325 aes_dev->base_reg + AES_A_DMA_WHILE_ACTIVE_MODE_OFFSET);
0326 }
0327
0328
0329 static void aes_irq_disable(struct ocs_aes_dev *aes_dev)
0330 {
0331 u32 isr_val = 0;
0332
0333
0334 iowrite32(AES_DISABLE_INT,
0335 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET);
0336 iowrite32(AES_DISABLE_INT, aes_dev->base_reg + AES_IER_OFFSET);
0337
0338
0339 isr_val = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET);
0340 if (isr_val)
0341 iowrite32(isr_val,
0342 aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET);
0343
0344 isr_val = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_MASK_OFFSET);
0345 if (isr_val)
0346 iowrite32(isr_val,
0347 aes_dev->base_reg + AES_A_DMA_MSI_MASK_OFFSET);
0348
0349 isr_val = ioread32(aes_dev->base_reg + AES_ISR_OFFSET);
0350 if (isr_val)
0351 iowrite32(isr_val, aes_dev->base_reg + AES_ISR_OFFSET);
0352 }
0353
0354
0355 static void aes_irq_enable(struct ocs_aes_dev *aes_dev, u8 irq)
0356 {
0357 if (irq == AES_COMPLETE_INT) {
0358
0359 iowrite32(AES_DMA_CPD_ERR_INT |
0360 AES_DMA_OUTBUF_RD_ERR_INT |
0361 AES_DMA_OUTBUF_WR_ERR_INT |
0362 AES_DMA_INBUF_RD_ERR_INT |
0363 AES_DMA_INBUF_WR_ERR_INT |
0364 AES_DMA_BAD_COMP_INT |
0365 AES_DMA_SAI_INT,
0366 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET);
0367
0368
0369
0370
0371
0372
0373
0374
0375 iowrite32(AES_COMPLETE_INT, aes_dev->base_reg + AES_IER_OFFSET);
0376 return;
0377 }
0378 if (irq == AES_DMA_SRC_DONE_INT) {
0379
0380 iowrite32(AES_DISABLE_INT, aes_dev->base_reg + AES_IER_OFFSET);
0381
0382
0383
0384
0385
0386
0387
0388
0389
0390
0391
0392
0393
0394
0395 iowrite32(AES_DMA_CPD_ERR_INT |
0396 AES_DMA_OUTBUF_RD_ERR_INT |
0397 AES_DMA_OUTBUF_WR_ERR_INT |
0398 AES_DMA_INBUF_RD_ERR_INT |
0399 AES_DMA_INBUF_WR_ERR_INT |
0400 AES_DMA_BAD_COMP_INT |
0401 AES_DMA_SAI_INT |
0402 AES_DMA_SRC_DONE_INT,
0403 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET);
0404 }
0405 }
0406
0407
0408 static int ocs_aes_irq_enable_and_wait(struct ocs_aes_dev *aes_dev, u8 irq)
0409 {
0410 int rc;
0411
0412 reinit_completion(&aes_dev->irq_completion);
0413 aes_irq_enable(aes_dev, irq);
0414 rc = wait_for_completion_interruptible(&aes_dev->irq_completion);
0415 if (rc)
0416 return rc;
0417
0418 return aes_dev->dma_err_mask ? -EIO : 0;
0419 }
0420
0421
0422 static inline void dma_to_ocs_aes_ll(struct ocs_aes_dev *aes_dev,
0423 dma_addr_t dma_list)
0424 {
0425 iowrite32(0, aes_dev->base_reg + AES_A_DMA_SRC_SIZE_OFFSET);
0426 iowrite32(dma_list,
0427 aes_dev->base_reg + AES_A_DMA_NEXT_SRC_DESCR_OFFSET);
0428 }
0429
0430
0431 static inline void dma_from_ocs_aes_ll(struct ocs_aes_dev *aes_dev,
0432 dma_addr_t dma_list)
0433 {
0434 iowrite32(0, aes_dev->base_reg + AES_A_DMA_DST_SIZE_OFFSET);
0435 iowrite32(dma_list,
0436 aes_dev->base_reg + AES_A_DMA_NEXT_DST_DESCR_OFFSET);
0437 }
0438
0439 irqreturn_t ocs_aes_irq_handler(int irq, void *dev_id)
0440 {
0441 struct ocs_aes_dev *aes_dev = dev_id;
0442 u32 aes_dma_isr;
0443
0444
0445 aes_dma_isr = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET);
0446
0447
0448 aes_irq_disable(aes_dev);
0449
0450
0451 aes_dev->dma_err_mask = aes_dma_isr &
0452 (AES_DMA_CPD_ERR_INT |
0453 AES_DMA_OUTBUF_RD_ERR_INT |
0454 AES_DMA_OUTBUF_WR_ERR_INT |
0455 AES_DMA_INBUF_RD_ERR_INT |
0456 AES_DMA_INBUF_WR_ERR_INT |
0457 AES_DMA_BAD_COMP_INT |
0458 AES_DMA_SAI_INT);
0459
0460
0461 complete(&aes_dev->irq_completion);
0462
0463 return IRQ_HANDLED;
0464 }
0465
0466
0467
0468
0469
0470
0471
0472
0473
0474
0475
0476
0477 int ocs_aes_set_key(struct ocs_aes_dev *aes_dev, u32 key_size, const u8 *key,
0478 enum ocs_cipher cipher)
0479 {
0480 const u32 *key_u32;
0481 u32 val;
0482 int i;
0483
0484
0485 if (cipher == OCS_AES && !(key_size == 32 || key_size == 16)) {
0486 dev_err(aes_dev->dev,
0487 "%d-bit keys not supported by AES cipher\n",
0488 key_size * 8);
0489 return -EINVAL;
0490 }
0491
0492 if (cipher == OCS_SM4 && key_size != 16) {
0493 dev_err(aes_dev->dev,
0494 "%d-bit keys not supported for SM4 cipher\n",
0495 key_size * 8);
0496 return -EINVAL;
0497 }
0498
0499 if (!key)
0500 return -EINVAL;
0501
0502 key_u32 = (const u32 *)key;
0503
0504
0505 for (i = 0; i < (key_size / sizeof(u32)); i++) {
0506 iowrite32(key_u32[i],
0507 aes_dev->base_reg + AES_KEY_0_OFFSET +
0508 (i * sizeof(u32)));
0509 }
0510
0511
0512
0513
0514
0515
0516
0517 val = (key_size == 16) ? AES_128_BIT_KEY : AES_256_BIT_KEY;
0518 iowrite32(val, aes_dev->base_reg + AES_KEY_SIZE_OFFSET);
0519
0520 return 0;
0521 }
0522
0523
0524 static inline void set_ocs_aes_command(struct ocs_aes_dev *aes_dev,
0525 enum ocs_cipher cipher,
0526 enum ocs_mode mode,
0527 enum ocs_instruction instruction)
0528 {
0529 u32 val;
0530
0531
0532
0533
0534
0535
0536
0537
0538
0539
0540
0541
0542
0543
0544
0545
0546
0547
0548
0549
0550
0551
0552
0553
0554 val = (cipher << 14) | (mode << 8) | (instruction << 6) |
0555 (AES_CTR_M_128_INC << 2);
0556 iowrite32(val, aes_dev->base_reg + AES_COMMAND_OFFSET);
0557 }
0558
0559 static void ocs_aes_init(struct ocs_aes_dev *aes_dev,
0560 enum ocs_mode mode,
0561 enum ocs_cipher cipher,
0562 enum ocs_instruction instruction)
0563 {
0564
0565 aes_irq_disable(aes_dev);
0566
0567
0568 aes_a_set_endianness(aes_dev);
0569
0570
0571 set_ocs_aes_command(aes_dev, cipher, mode, instruction);
0572 }
0573
0574
0575
0576
0577
0578 static inline void ocs_aes_write_last_data_blk_len(struct ocs_aes_dev *aes_dev,
0579 u32 size)
0580 {
0581 u32 val;
0582
0583 if (size == 0) {
0584 val = 0;
0585 goto exit;
0586 }
0587
0588 val = size % AES_BLOCK_SIZE;
0589 if (val == 0)
0590 val = AES_BLOCK_SIZE;
0591
0592 exit:
0593 iowrite32(val, aes_dev->base_reg + AES_PLEN_OFFSET);
0594 }
0595
0596
0597
0598
0599
0600 static int ocs_aes_validate_inputs(dma_addr_t src_dma_list, u32 src_size,
0601 const u8 *iv, u32 iv_size,
0602 dma_addr_t aad_dma_list, u32 aad_size,
0603 const u8 *tag, u32 tag_size,
0604 enum ocs_cipher cipher, enum ocs_mode mode,
0605 enum ocs_instruction instruction,
0606 dma_addr_t dst_dma_list)
0607 {
0608
0609 if (!(cipher == OCS_AES || cipher == OCS_SM4))
0610 return -EINVAL;
0611
0612 if (mode != OCS_MODE_ECB && mode != OCS_MODE_CBC &&
0613 mode != OCS_MODE_CTR && mode != OCS_MODE_CCM &&
0614 mode != OCS_MODE_GCM && mode != OCS_MODE_CTS)
0615 return -EINVAL;
0616
0617 if (instruction != OCS_ENCRYPT && instruction != OCS_DECRYPT &&
0618 instruction != OCS_EXPAND && instruction != OCS_BYPASS)
0619 return -EINVAL;
0620
0621
0622
0623
0624
0625
0626
0627
0628 if (instruction == OCS_BYPASS) {
0629 if (src_dma_list == DMA_MAPPING_ERROR ||
0630 dst_dma_list == DMA_MAPPING_ERROR)
0631 return -EINVAL;
0632
0633 return 0;
0634 }
0635
0636
0637
0638
0639
0640 switch (mode) {
0641 case OCS_MODE_ECB:
0642
0643 if (src_size % AES_BLOCK_SIZE != 0)
0644 return -EINVAL;
0645
0646
0647 if (src_dma_list == DMA_MAPPING_ERROR ||
0648 dst_dma_list == DMA_MAPPING_ERROR)
0649 return -EINVAL;
0650
0651 return 0;
0652
0653 case OCS_MODE_CBC:
0654
0655 if (src_size % AES_BLOCK_SIZE != 0)
0656 return -EINVAL;
0657
0658
0659 if (src_dma_list == DMA_MAPPING_ERROR ||
0660 dst_dma_list == DMA_MAPPING_ERROR)
0661 return -EINVAL;
0662
0663
0664 if (!iv || iv_size != AES_BLOCK_SIZE)
0665 return -EINVAL;
0666
0667 return 0;
0668
0669 case OCS_MODE_CTR:
0670
0671 if (src_size == 0)
0672 return -EINVAL;
0673
0674
0675 if (src_dma_list == DMA_MAPPING_ERROR ||
0676 dst_dma_list == DMA_MAPPING_ERROR)
0677 return -EINVAL;
0678
0679
0680 if (!iv || iv_size != AES_BLOCK_SIZE)
0681 return -EINVAL;
0682
0683 return 0;
0684
0685 case OCS_MODE_CTS:
0686
0687 if (src_size < AES_BLOCK_SIZE)
0688 return -EINVAL;
0689
0690
0691 if (src_dma_list == DMA_MAPPING_ERROR ||
0692 dst_dma_list == DMA_MAPPING_ERROR)
0693 return -EINVAL;
0694
0695
0696 if (!iv || iv_size != AES_BLOCK_SIZE)
0697 return -EINVAL;
0698
0699 return 0;
0700
0701 case OCS_MODE_GCM:
0702
0703 if (!iv || iv_size != GCM_AES_IV_SIZE)
0704 return -EINVAL;
0705
0706
0707
0708
0709
0710 if (src_size && (src_dma_list == DMA_MAPPING_ERROR ||
0711 dst_dma_list == DMA_MAPPING_ERROR))
0712 return -EINVAL;
0713
0714
0715 if (aad_size && aad_dma_list == DMA_MAPPING_ERROR)
0716 return -EINVAL;
0717
0718
0719 if (!tag)
0720 return -EINVAL;
0721
0722
0723 if (tag_size > (AES_MAX_TAG_SIZE_U32 * sizeof(u32)))
0724 return -EINVAL;
0725
0726 return 0;
0727
0728 case OCS_MODE_CCM:
0729
0730 if (!iv || iv_size != AES_BLOCK_SIZE)
0731 return -EINVAL;
0732
0733
0734 if (iv[L_PRIME_IDX] < L_PRIME_MIN ||
0735 iv[L_PRIME_IDX] > L_PRIME_MAX)
0736 return -EINVAL;
0737
0738
0739 if (aad_size && aad_dma_list == DMA_MAPPING_ERROR)
0740 return -EINVAL;
0741
0742
0743 if (tag_size > (AES_MAX_TAG_SIZE_U32 * sizeof(u32)))
0744 return -EINVAL;
0745
0746 if (instruction == OCS_DECRYPT) {
0747
0748
0749
0750
0751 if (src_size && (src_dma_list == DMA_MAPPING_ERROR ||
0752 dst_dma_list == DMA_MAPPING_ERROR))
0753 return -EINVAL;
0754
0755
0756 if (!tag)
0757 return -EINVAL;
0758
0759 return 0;
0760 }
0761
0762
0763
0764
0765
0766
0767
0768 if (dst_dma_list == DMA_MAPPING_ERROR)
0769 return -EINVAL;
0770
0771
0772 if (src_size && src_dma_list == DMA_MAPPING_ERROR)
0773 return -EINVAL;
0774
0775 return 0;
0776
0777 default:
0778 return -EINVAL;
0779 }
0780 }
0781
0782
0783
0784
0785
0786
0787
0788
0789
0790
0791
0792
0793
0794
0795
0796 int ocs_aes_op(struct ocs_aes_dev *aes_dev,
0797 enum ocs_mode mode,
0798 enum ocs_cipher cipher,
0799 enum ocs_instruction instruction,
0800 dma_addr_t dst_dma_list,
0801 dma_addr_t src_dma_list,
0802 u32 src_size,
0803 u8 *iv,
0804 u32 iv_size)
0805 {
0806 u32 *iv32;
0807 int rc;
0808
0809 rc = ocs_aes_validate_inputs(src_dma_list, src_size, iv, iv_size, 0, 0,
0810 NULL, 0, cipher, mode, instruction,
0811 dst_dma_list);
0812 if (rc)
0813 return rc;
0814
0815
0816
0817
0818 if (mode == OCS_MODE_GCM || mode == OCS_MODE_CCM)
0819 return -EINVAL;
0820
0821
0822 iv32 = (u32 *)iv;
0823
0824 ocs_aes_init(aes_dev, mode, cipher, instruction);
0825
0826 if (mode == OCS_MODE_CTS) {
0827
0828 ocs_aes_write_last_data_blk_len(aes_dev, src_size);
0829 }
0830
0831
0832 if (mode != OCS_MODE_ECB) {
0833 iowrite32(iv32[0], aes_dev->base_reg + AES_IV_0_OFFSET);
0834 iowrite32(iv32[1], aes_dev->base_reg + AES_IV_1_OFFSET);
0835 iowrite32(iv32[2], aes_dev->base_reg + AES_IV_2_OFFSET);
0836 iowrite32(iv32[3], aes_dev->base_reg + AES_IV_3_OFFSET);
0837 }
0838
0839
0840 aes_a_op_trigger(aes_dev);
0841
0842
0843 dma_to_ocs_aes_ll(aes_dev, src_dma_list);
0844 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
0845 aes_a_dma_active_src_dst_ll_en(aes_dev);
0846
0847 if (mode == OCS_MODE_CTS) {
0848
0849
0850
0851
0852 aes_a_set_last_gcx(aes_dev);
0853 } else {
0854
0855 aes_a_op_termination(aes_dev);
0856 }
0857
0858
0859 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT);
0860 if (rc)
0861 return rc;
0862
0863 if (mode == OCS_MODE_CTR) {
0864
0865 iv32[0] = ioread32(aes_dev->base_reg + AES_IV_0_OFFSET);
0866 iv32[1] = ioread32(aes_dev->base_reg + AES_IV_1_OFFSET);
0867 iv32[2] = ioread32(aes_dev->base_reg + AES_IV_2_OFFSET);
0868 iv32[3] = ioread32(aes_dev->base_reg + AES_IV_3_OFFSET);
0869 }
0870
0871 return 0;
0872 }
0873
0874
0875 static void ocs_aes_gcm_write_j0(const struct ocs_aes_dev *aes_dev,
0876 const u8 *iv)
0877 {
0878 const u32 *j0 = (u32 *)iv;
0879
0880
0881
0882
0883
0884 iowrite32(0x00000001, aes_dev->base_reg + AES_IV_0_OFFSET);
0885 iowrite32(__swab32(j0[2]), aes_dev->base_reg + AES_IV_1_OFFSET);
0886 iowrite32(__swab32(j0[1]), aes_dev->base_reg + AES_IV_2_OFFSET);
0887 iowrite32(__swab32(j0[0]), aes_dev->base_reg + AES_IV_3_OFFSET);
0888 }
0889
0890
0891 static inline void ocs_aes_gcm_read_tag(struct ocs_aes_dev *aes_dev,
0892 u8 *tag, u32 tag_size)
0893 {
0894 u32 tag_u32[AES_MAX_TAG_SIZE_U32];
0895
0896
0897
0898
0899
0900
0901 tag_u32[0] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_3_OFFSET));
0902 tag_u32[1] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_2_OFFSET));
0903 tag_u32[2] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_1_OFFSET));
0904 tag_u32[3] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_0_OFFSET));
0905
0906 memcpy(tag, tag_u32, tag_size);
0907 }
0908
0909
0910
0911
0912
0913
0914
0915
0916
0917
0918
0919
0920
0921
0922
0923
0924
0925 int ocs_aes_gcm_op(struct ocs_aes_dev *aes_dev,
0926 enum ocs_cipher cipher,
0927 enum ocs_instruction instruction,
0928 dma_addr_t dst_dma_list,
0929 dma_addr_t src_dma_list,
0930 u32 src_size,
0931 const u8 *iv,
0932 dma_addr_t aad_dma_list,
0933 u32 aad_size,
0934 u8 *out_tag,
0935 u32 tag_size)
0936 {
0937 u64 bit_len;
0938 u32 val;
0939 int rc;
0940
0941 rc = ocs_aes_validate_inputs(src_dma_list, src_size, iv,
0942 GCM_AES_IV_SIZE, aad_dma_list,
0943 aad_size, out_tag, tag_size, cipher,
0944 OCS_MODE_GCM, instruction,
0945 dst_dma_list);
0946 if (rc)
0947 return rc;
0948
0949 ocs_aes_init(aes_dev, OCS_MODE_GCM, cipher, instruction);
0950
0951
0952 ocs_aes_gcm_write_j0(aes_dev, iv);
0953
0954
0955 iowrite32(tag_size, aes_dev->base_reg + AES_TLEN_OFFSET);
0956
0957
0958 ocs_aes_write_last_data_blk_len(aes_dev, src_size);
0959
0960
0961 bit_len = (u64)src_size * 8;
0962 val = bit_len & 0xFFFFFFFF;
0963 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_0_OFFSET);
0964 val = bit_len >> 32;
0965 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_1_OFFSET);
0966
0967
0968 bit_len = (u64)aad_size * 8;
0969 val = bit_len & 0xFFFFFFFF;
0970 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_2_OFFSET);
0971 val = bit_len >> 32;
0972 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_3_OFFSET);
0973
0974
0975 aes_a_op_trigger(aes_dev);
0976
0977
0978 if (aad_size) {
0979
0980 dma_to_ocs_aes_ll(aes_dev, aad_dma_list);
0981 aes_a_dma_active_src_ll_en(aes_dev);
0982
0983
0984 aes_a_set_last_gcx_and_adata(aes_dev);
0985
0986
0987 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT);
0988 if (rc)
0989 return rc;
0990 } else {
0991 aes_a_set_last_gcx_and_adata(aes_dev);
0992 }
0993
0994
0995 aes_a_wait_last_gcx(aes_dev);
0996 aes_a_dma_wait_input_buffer_occupancy(aes_dev);
0997
0998
0999 if (src_size) {
1000
1001 dma_to_ocs_aes_ll(aes_dev, src_dma_list);
1002 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
1003 aes_a_dma_active_src_dst_ll_en(aes_dev);
1004 } else {
1005 aes_a_dma_set_xfer_size_zero(aes_dev);
1006 aes_a_dma_active(aes_dev);
1007 }
1008
1009
1010 aes_a_set_last_gcx(aes_dev);
1011
1012
1013 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT);
1014 if (rc)
1015 return rc;
1016
1017 ocs_aes_gcm_read_tag(aes_dev, out_tag, tag_size);
1018
1019 return 0;
1020 }
1021
1022
1023 static void ocs_aes_ccm_write_encrypted_tag(struct ocs_aes_dev *aes_dev,
1024 const u8 *in_tag, u32 tag_size)
1025 {
1026 int i;
1027
1028
1029 aes_a_dma_wait_input_buffer_occupancy(aes_dev);
1030
1031
1032
1033
1034
1035
1036 aes_a_dma_reset_and_activate_perf_cntr(aes_dev);
1037 aes_a_dma_wait_and_deactivate_perf_cntr(aes_dev,
1038 CCM_DECRYPT_DELAY_TAG_CLK_COUNT);
1039
1040
1041 for (i = 0; i < tag_size; i++) {
1042 iowrite8(in_tag[i], aes_dev->base_reg +
1043 AES_A_DMA_INBUFFER_WRITE_FIFO_OFFSET);
1044 }
1045 }
1046
1047
1048
1049
1050
1051
1052
1053
1054 static int ocs_aes_ccm_write_b0(const struct ocs_aes_dev *aes_dev,
1055 const u8 *iv, u32 adata_size, u32 tag_size,
1056 u32 cryptlen)
1057 {
1058 u8 b0[16];
1059 int i, q;
1060
1061
1062 memset(b0, 0, sizeof(b0));
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072 if (adata_size)
1073 b0[0] |= BIT(6);
1074
1075
1076
1077
1078
1079 b0[0] |= (((tag_size - 2) / 2) & 0x7) << 3;
1080
1081
1082
1083
1084
1085 b0[0] |= iv[0] & 0x7;
1086
1087
1088
1089
1090
1091 q = (iv[0] & 0x7) + 1;
1092 for (i = 1; i <= 15 - q; i++)
1093 b0[i] = iv[i];
1094
1095
1096
1097
1098
1099 i = sizeof(b0) - 1;
1100 while (q) {
1101 b0[i] = cryptlen & 0xff;
1102 cryptlen >>= 8;
1103 i--;
1104 q--;
1105 }
1106
1107
1108
1109
1110 if (cryptlen)
1111 return -EOVERFLOW;
1112
1113 for (i = 0; i < sizeof(b0); i++)
1114 iowrite8(b0[i], aes_dev->base_reg +
1115 AES_A_DMA_INBUFFER_WRITE_FIFO_OFFSET);
1116 return 0;
1117 }
1118
1119
1120
1121
1122
1123
1124
1125
1126 static void ocs_aes_ccm_write_adata_len(const struct ocs_aes_dev *aes_dev,
1127 u64 adata_len)
1128 {
1129 u8 enc_a[10];
1130 int i, len;
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141 if (adata_len < 65280) {
1142 len = 2;
1143 *(__be16 *)enc_a = cpu_to_be16(adata_len);
1144 } else if (adata_len <= 0xFFFFFFFF) {
1145 len = 6;
1146 *(__be16 *)enc_a = cpu_to_be16(0xfffe);
1147 *(__be32 *)&enc_a[2] = cpu_to_be32(adata_len);
1148 } else {
1149 len = 10;
1150 *(__be16 *)enc_a = cpu_to_be16(0xffff);
1151 *(__be64 *)&enc_a[2] = cpu_to_be64(adata_len);
1152 }
1153 for (i = 0; i < len; i++)
1154 iowrite8(enc_a[i],
1155 aes_dev->base_reg +
1156 AES_A_DMA_INBUFFER_WRITE_FIFO_OFFSET);
1157 }
1158
1159 static int ocs_aes_ccm_do_adata(struct ocs_aes_dev *aes_dev,
1160 dma_addr_t adata_dma_list, u32 adata_size)
1161 {
1162 int rc;
1163
1164 if (!adata_size) {
1165
1166 aes_a_set_last_gcx_and_adata(aes_dev);
1167 goto exit;
1168 }
1169
1170
1171
1172
1173
1174
1175
1176 ocs_aes_ccm_write_adata_len(aes_dev, adata_size);
1177
1178
1179 dma_to_ocs_aes_ll(aes_dev, adata_dma_list);
1180
1181
1182 aes_a_dma_active_src_ll_en(aes_dev);
1183
1184
1185 aes_a_set_last_gcx_and_adata(aes_dev);
1186
1187
1188 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT);
1189 if (rc)
1190 return rc;
1191
1192 exit:
1193
1194 aes_a_wait_last_gcx(aes_dev);
1195 aes_a_dma_wait_input_buffer_occupancy(aes_dev);
1196
1197 return 0;
1198 }
1199
1200 static int ocs_aes_ccm_encrypt_do_payload(struct ocs_aes_dev *aes_dev,
1201 dma_addr_t dst_dma_list,
1202 dma_addr_t src_dma_list,
1203 u32 src_size)
1204 {
1205 if (src_size) {
1206
1207
1208
1209
1210 dma_to_ocs_aes_ll(aes_dev, src_dma_list);
1211 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
1212 aes_a_dma_active_src_dst_ll_en(aes_dev);
1213 } else {
1214
1215 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
1216 aes_a_dma_active_dst_ll_en(aes_dev);
1217 }
1218
1219
1220
1221
1222
1223 aes_a_set_last_gcx(aes_dev);
1224
1225
1226 return ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT);
1227 }
1228
1229 static int ocs_aes_ccm_decrypt_do_payload(struct ocs_aes_dev *aes_dev,
1230 dma_addr_t dst_dma_list,
1231 dma_addr_t src_dma_list,
1232 u32 src_size)
1233 {
1234 if (!src_size) {
1235
1236 aes_a_dma_set_xfer_size_zero(aes_dev);
1237 aes_a_dma_active(aes_dev);
1238 aes_a_set_last_gcx(aes_dev);
1239
1240 return 0;
1241 }
1242
1243
1244
1245
1246
1247 dma_to_ocs_aes_ll(aes_dev, src_dma_list);
1248 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
1249 aes_a_dma_active_src_dst_ll_en(aes_dev);
1250
1251
1252
1253
1254
1255 aes_a_set_last_gcx(aes_dev);
1256
1257
1258
1259
1260 return ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT);
1261 }
1262
1263
1264
1265
1266
1267
1268
1269 static inline int ccm_compare_tag_to_yr(struct ocs_aes_dev *aes_dev,
1270 u8 tag_size_bytes)
1271 {
1272 u32 tag[AES_MAX_TAG_SIZE_U32];
1273 u32 yr[AES_MAX_TAG_SIZE_U32];
1274 u8 i;
1275
1276
1277 for (i = 0; i < AES_MAX_TAG_SIZE_U32; i++) {
1278 tag[i] = ioread32(aes_dev->base_reg +
1279 AES_T_MAC_0_OFFSET + (i * sizeof(u32)));
1280 yr[i] = ioread32(aes_dev->base_reg +
1281 AES_MULTIPURPOSE2_0_OFFSET +
1282 (i * sizeof(u32)));
1283 }
1284
1285 return memcmp(tag, yr, tag_size_bytes) ? -EBADMSG : 0;
1286 }
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307 int ocs_aes_ccm_op(struct ocs_aes_dev *aes_dev,
1308 enum ocs_cipher cipher,
1309 enum ocs_instruction instruction,
1310 dma_addr_t dst_dma_list,
1311 dma_addr_t src_dma_list,
1312 u32 src_size,
1313 u8 *iv,
1314 dma_addr_t adata_dma_list,
1315 u32 adata_size,
1316 u8 *in_tag,
1317 u32 tag_size)
1318 {
1319 u32 *iv_32;
1320 u8 lprime;
1321 int rc;
1322
1323 rc = ocs_aes_validate_inputs(src_dma_list, src_size, iv,
1324 AES_BLOCK_SIZE, adata_dma_list, adata_size,
1325 in_tag, tag_size, cipher, OCS_MODE_CCM,
1326 instruction, dst_dma_list);
1327 if (rc)
1328 return rc;
1329
1330 ocs_aes_init(aes_dev, OCS_MODE_CCM, cipher, instruction);
1331
1332
1333
1334
1335
1336 lprime = iv[L_PRIME_IDX];
1337 memset(&iv[COUNTER_START(lprime)], 0, COUNTER_LEN(lprime));
1338
1339
1340
1341
1342
1343 iv_32 = (u32 *)iv;
1344 iowrite32(__swab32(iv_32[0]),
1345 aes_dev->base_reg + AES_MULTIPURPOSE1_3_OFFSET);
1346 iowrite32(__swab32(iv_32[1]),
1347 aes_dev->base_reg + AES_MULTIPURPOSE1_2_OFFSET);
1348 iowrite32(__swab32(iv_32[2]),
1349 aes_dev->base_reg + AES_MULTIPURPOSE1_1_OFFSET);
1350 iowrite32(__swab32(iv_32[3]),
1351 aes_dev->base_reg + AES_MULTIPURPOSE1_0_OFFSET);
1352
1353
1354 iowrite32(tag_size, aes_dev->base_reg + AES_TLEN_OFFSET);
1355
1356
1357
1358
1359
1360 ocs_aes_write_last_data_blk_len(aes_dev, src_size);
1361
1362
1363 aes_a_op_trigger(aes_dev);
1364
1365 aes_a_dma_reset_and_activate_perf_cntr(aes_dev);
1366
1367
1368 rc = ocs_aes_ccm_write_b0(aes_dev, iv, adata_size, tag_size, src_size);
1369 if (rc)
1370 return rc;
1371
1372
1373
1374
1375 aes_a_dma_wait_and_deactivate_perf_cntr(aes_dev,
1376 CCM_DECRYPT_DELAY_LAST_GCX_CLK_COUNT);
1377
1378
1379 ocs_aes_ccm_do_adata(aes_dev, adata_dma_list, adata_size);
1380
1381
1382 if (instruction == OCS_ENCRYPT) {
1383 return ocs_aes_ccm_encrypt_do_payload(aes_dev, dst_dma_list,
1384 src_dma_list, src_size);
1385 }
1386
1387 rc = ocs_aes_ccm_decrypt_do_payload(aes_dev, dst_dma_list,
1388 src_dma_list, src_size);
1389 if (rc)
1390 return rc;
1391
1392
1393 ocs_aes_ccm_write_encrypted_tag(aes_dev, in_tag, tag_size);
1394 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT);
1395 if (rc)
1396 return rc;
1397
1398 return ccm_compare_tag_to_yr(aes_dev, tag_size);
1399 }
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417 int ocs_create_linked_list_from_sg(const struct ocs_aes_dev *aes_dev,
1418 struct scatterlist *sg,
1419 int sg_dma_count,
1420 struct ocs_dll_desc *dll_desc,
1421 size_t data_size, size_t data_offset)
1422 {
1423 struct ocs_dma_linked_list *ll = NULL;
1424 struct scatterlist *sg_tmp;
1425 unsigned int tmp;
1426 int dma_nents;
1427 int i;
1428
1429 if (!dll_desc || !sg || !aes_dev)
1430 return -EINVAL;
1431
1432
1433 dll_desc->vaddr = NULL;
1434 dll_desc->dma_addr = DMA_MAPPING_ERROR;
1435 dll_desc->size = 0;
1436
1437 if (data_size == 0)
1438 return 0;
1439
1440
1441 while (data_offset >= sg_dma_len(sg)) {
1442 data_offset -= sg_dma_len(sg);
1443 sg_dma_count--;
1444 sg = sg_next(sg);
1445
1446 if (!sg || sg_dma_count == 0)
1447 return -EINVAL;
1448 }
1449
1450
1451 dma_nents = 0;
1452 tmp = 0;
1453 sg_tmp = sg;
1454 while (tmp < data_offset + data_size) {
1455
1456 if (!sg_tmp)
1457 return -EINVAL;
1458 tmp += sg_dma_len(sg_tmp);
1459 dma_nents++;
1460 sg_tmp = sg_next(sg_tmp);
1461 }
1462 if (dma_nents > sg_dma_count)
1463 return -EINVAL;
1464
1465
1466 dll_desc->size = sizeof(struct ocs_dma_linked_list) * dma_nents;
1467 dll_desc->vaddr = dma_alloc_coherent(aes_dev->dev, dll_desc->size,
1468 &dll_desc->dma_addr, GFP_KERNEL);
1469 if (!dll_desc->vaddr)
1470 return -ENOMEM;
1471
1472
1473 ll = dll_desc->vaddr;
1474 for (i = 0; i < dma_nents; i++, sg = sg_next(sg)) {
1475 ll[i].src_addr = sg_dma_address(sg) + data_offset;
1476 ll[i].src_len = (sg_dma_len(sg) - data_offset) < data_size ?
1477 (sg_dma_len(sg) - data_offset) : data_size;
1478 data_offset = 0;
1479 data_size -= ll[i].src_len;
1480
1481 ll[i].next = dll_desc->dma_addr + (sizeof(*ll) * (i + 1));
1482 ll[i].ll_flags = 0;
1483 }
1484
1485 ll[i - 1].next = 0;
1486 ll[i - 1].ll_flags = OCS_LL_DMA_FLAG_TERMINATE;
1487
1488 return 0;
1489 }