0001
0002
0003
0004
0005
0006
0007 #include <linux/clk.h>
0008 #include <linux/delay.h>
0009 #include <linux/interrupt.h>
0010 #include <linux/iopoll.h>
0011 #include <linux/module.h>
0012 #include <linux/of_device.h>
0013 #include <linux/platform_device.h>
0014 #include <linux/pm_runtime.h>
0015 #include <linux/reset.h>
0016
0017 #include <crypto/aes.h>
0018 #include <crypto/internal/des.h>
0019 #include <crypto/engine.h>
0020 #include <crypto/scatterwalk.h>
0021 #include <crypto/internal/aead.h>
0022 #include <crypto/internal/skcipher.h>
0023
0024 #define DRIVER_NAME "stm32-cryp"
0025
0026
0027 #define FLG_ENCRYPT BIT(0)
0028
0029 #define FLG_AES BIT(1)
0030 #define FLG_DES BIT(2)
0031 #define FLG_TDES BIT(3)
0032 #define FLG_ECB BIT(4)
0033 #define FLG_CBC BIT(5)
0034 #define FLG_CTR BIT(6)
0035 #define FLG_GCM BIT(7)
0036 #define FLG_CCM BIT(8)
0037
0038 #define FLG_MODE_MASK GENMASK(15, 0)
0039
0040
0041
0042 #define CRYP_CR 0x00000000
0043 #define CRYP_SR 0x00000004
0044 #define CRYP_DIN 0x00000008
0045 #define CRYP_DOUT 0x0000000C
0046 #define CRYP_DMACR 0x00000010
0047 #define CRYP_IMSCR 0x00000014
0048 #define CRYP_RISR 0x00000018
0049 #define CRYP_MISR 0x0000001C
0050 #define CRYP_K0LR 0x00000020
0051 #define CRYP_K0RR 0x00000024
0052 #define CRYP_K1LR 0x00000028
0053 #define CRYP_K1RR 0x0000002C
0054 #define CRYP_K2LR 0x00000030
0055 #define CRYP_K2RR 0x00000034
0056 #define CRYP_K3LR 0x00000038
0057 #define CRYP_K3RR 0x0000003C
0058 #define CRYP_IV0LR 0x00000040
0059 #define CRYP_IV0RR 0x00000044
0060 #define CRYP_IV1LR 0x00000048
0061 #define CRYP_IV1RR 0x0000004C
0062 #define CRYP_CSGCMCCM0R 0x00000050
0063 #define CRYP_CSGCM0R 0x00000070
0064
0065
0066 #define CR_DEC_NOT_ENC 0x00000004
0067 #define CR_TDES_ECB 0x00000000
0068 #define CR_TDES_CBC 0x00000008
0069 #define CR_DES_ECB 0x00000010
0070 #define CR_DES_CBC 0x00000018
0071 #define CR_AES_ECB 0x00000020
0072 #define CR_AES_CBC 0x00000028
0073 #define CR_AES_CTR 0x00000030
0074 #define CR_AES_KP 0x00000038
0075 #define CR_AES_GCM 0x00080000
0076 #define CR_AES_CCM 0x00080008
0077 #define CR_AES_UNKNOWN 0xFFFFFFFF
0078 #define CR_ALGO_MASK 0x00080038
0079 #define CR_DATA32 0x00000000
0080 #define CR_DATA16 0x00000040
0081 #define CR_DATA8 0x00000080
0082 #define CR_DATA1 0x000000C0
0083 #define CR_KEY128 0x00000000
0084 #define CR_KEY192 0x00000100
0085 #define CR_KEY256 0x00000200
0086 #define CR_FFLUSH 0x00004000
0087 #define CR_CRYPEN 0x00008000
0088 #define CR_PH_INIT 0x00000000
0089 #define CR_PH_HEADER 0x00010000
0090 #define CR_PH_PAYLOAD 0x00020000
0091 #define CR_PH_FINAL 0x00030000
0092 #define CR_PH_MASK 0x00030000
0093 #define CR_NBPBL_SHIFT 20
0094
0095 #define SR_BUSY 0x00000010
0096 #define SR_OFNE 0x00000004
0097
0098 #define IMSCR_IN BIT(0)
0099 #define IMSCR_OUT BIT(1)
0100
0101 #define MISR_IN BIT(0)
0102 #define MISR_OUT BIT(1)
0103
0104
0105 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
0106 #define GCM_CTR_INIT 2
0107 #define CRYP_AUTOSUSPEND_DELAY 50
0108
0109 struct stm32_cryp_caps {
0110 bool swap_final;
0111 bool padding_wa;
0112 };
0113
0114 struct stm32_cryp_ctx {
0115 struct crypto_engine_ctx enginectx;
0116 struct stm32_cryp *cryp;
0117 int keylen;
0118 __be32 key[AES_KEYSIZE_256 / sizeof(u32)];
0119 unsigned long flags;
0120 };
0121
0122 struct stm32_cryp_reqctx {
0123 unsigned long mode;
0124 };
0125
0126 struct stm32_cryp {
0127 struct list_head list;
0128 struct device *dev;
0129 void __iomem *regs;
0130 struct clk *clk;
0131 unsigned long flags;
0132 u32 irq_status;
0133 const struct stm32_cryp_caps *caps;
0134 struct stm32_cryp_ctx *ctx;
0135
0136 struct crypto_engine *engine;
0137
0138 struct skcipher_request *req;
0139 struct aead_request *areq;
0140
0141 size_t authsize;
0142 size_t hw_blocksize;
0143
0144 size_t payload_in;
0145 size_t header_in;
0146 size_t payload_out;
0147
0148 struct scatterlist *out_sg;
0149
0150 struct scatter_walk in_walk;
0151 struct scatter_walk out_walk;
0152
0153 __be32 last_ctr[4];
0154 u32 gcm_ctr;
0155 };
0156
0157 struct stm32_cryp_list {
0158 struct list_head dev_list;
0159 spinlock_t lock;
0160 };
0161
0162 static struct stm32_cryp_list cryp_list = {
0163 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
0164 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
0165 };
0166
0167 static inline bool is_aes(struct stm32_cryp *cryp)
0168 {
0169 return cryp->flags & FLG_AES;
0170 }
0171
0172 static inline bool is_des(struct stm32_cryp *cryp)
0173 {
0174 return cryp->flags & FLG_DES;
0175 }
0176
0177 static inline bool is_tdes(struct stm32_cryp *cryp)
0178 {
0179 return cryp->flags & FLG_TDES;
0180 }
0181
0182 static inline bool is_ecb(struct stm32_cryp *cryp)
0183 {
0184 return cryp->flags & FLG_ECB;
0185 }
0186
0187 static inline bool is_cbc(struct stm32_cryp *cryp)
0188 {
0189 return cryp->flags & FLG_CBC;
0190 }
0191
0192 static inline bool is_ctr(struct stm32_cryp *cryp)
0193 {
0194 return cryp->flags & FLG_CTR;
0195 }
0196
0197 static inline bool is_gcm(struct stm32_cryp *cryp)
0198 {
0199 return cryp->flags & FLG_GCM;
0200 }
0201
0202 static inline bool is_ccm(struct stm32_cryp *cryp)
0203 {
0204 return cryp->flags & FLG_CCM;
0205 }
0206
0207 static inline bool is_encrypt(struct stm32_cryp *cryp)
0208 {
0209 return cryp->flags & FLG_ENCRYPT;
0210 }
0211
0212 static inline bool is_decrypt(struct stm32_cryp *cryp)
0213 {
0214 return !is_encrypt(cryp);
0215 }
0216
0217 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
0218 {
0219 return readl_relaxed(cryp->regs + ofst);
0220 }
0221
0222 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
0223 {
0224 writel_relaxed(val, cryp->regs + ofst);
0225 }
0226
0227 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
0228 {
0229 u32 status;
0230
0231 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
0232 !(status & SR_BUSY), 10, 100000);
0233 }
0234
0235 static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
0236 {
0237 writel_relaxed(readl_relaxed(cryp->regs + CRYP_CR) | CR_CRYPEN, cryp->regs + CRYP_CR);
0238 }
0239
0240 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
0241 {
0242 u32 status;
0243
0244 return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
0245 !(status & CR_CRYPEN), 10, 100000);
0246 }
0247
0248 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
0249 {
0250 u32 status;
0251
0252 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
0253 status & SR_OFNE, 10, 100000);
0254 }
0255
0256 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
0257 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
0258
0259 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
0260 {
0261 struct stm32_cryp *tmp, *cryp = NULL;
0262
0263 spin_lock_bh(&cryp_list.lock);
0264 if (!ctx->cryp) {
0265 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
0266 cryp = tmp;
0267 break;
0268 }
0269 ctx->cryp = cryp;
0270 } else {
0271 cryp = ctx->cryp;
0272 }
0273
0274 spin_unlock_bh(&cryp_list.lock);
0275
0276 return cryp;
0277 }
0278
0279 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
0280 {
0281 if (!iv)
0282 return;
0283
0284 stm32_cryp_write(cryp, CRYP_IV0LR, be32_to_cpu(*iv++));
0285 stm32_cryp_write(cryp, CRYP_IV0RR, be32_to_cpu(*iv++));
0286
0287 if (is_aes(cryp)) {
0288 stm32_cryp_write(cryp, CRYP_IV1LR, be32_to_cpu(*iv++));
0289 stm32_cryp_write(cryp, CRYP_IV1RR, be32_to_cpu(*iv++));
0290 }
0291 }
0292
0293 static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
0294 {
0295 struct skcipher_request *req = cryp->req;
0296 __be32 *tmp = (void *)req->iv;
0297
0298 if (!tmp)
0299 return;
0300
0301 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
0302 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
0303
0304 if (is_aes(cryp)) {
0305 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
0306 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
0307 }
0308 }
0309
0310 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
0311 {
0312 unsigned int i;
0313 int r_id;
0314
0315 if (is_des(c)) {
0316 stm32_cryp_write(c, CRYP_K1LR, be32_to_cpu(c->ctx->key[0]));
0317 stm32_cryp_write(c, CRYP_K1RR, be32_to_cpu(c->ctx->key[1]));
0318 } else {
0319 r_id = CRYP_K3RR;
0320 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
0321 stm32_cryp_write(c, r_id,
0322 be32_to_cpu(c->ctx->key[i - 1]));
0323 }
0324 }
0325
0326 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
0327 {
0328 if (is_aes(cryp) && is_ecb(cryp))
0329 return CR_AES_ECB;
0330
0331 if (is_aes(cryp) && is_cbc(cryp))
0332 return CR_AES_CBC;
0333
0334 if (is_aes(cryp) && is_ctr(cryp))
0335 return CR_AES_CTR;
0336
0337 if (is_aes(cryp) && is_gcm(cryp))
0338 return CR_AES_GCM;
0339
0340 if (is_aes(cryp) && is_ccm(cryp))
0341 return CR_AES_CCM;
0342
0343 if (is_des(cryp) && is_ecb(cryp))
0344 return CR_DES_ECB;
0345
0346 if (is_des(cryp) && is_cbc(cryp))
0347 return CR_DES_CBC;
0348
0349 if (is_tdes(cryp) && is_ecb(cryp))
0350 return CR_TDES_ECB;
0351
0352 if (is_tdes(cryp) && is_cbc(cryp))
0353 return CR_TDES_CBC;
0354
0355 dev_err(cryp->dev, "Unknown mode\n");
0356 return CR_AES_UNKNOWN;
0357 }
0358
0359 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
0360 {
0361 return is_encrypt(cryp) ? cryp->areq->cryptlen :
0362 cryp->areq->cryptlen - cryp->authsize;
0363 }
0364
0365 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
0366 {
0367 int ret;
0368 __be32 iv[4];
0369
0370
0371 memcpy(iv, cryp->areq->iv, 12);
0372 iv[3] = cpu_to_be32(GCM_CTR_INIT);
0373 cryp->gcm_ctr = GCM_CTR_INIT;
0374 stm32_cryp_hw_write_iv(cryp, iv);
0375
0376 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
0377
0378
0379 ret = stm32_cryp_wait_enable(cryp);
0380 if (ret) {
0381 dev_err(cryp->dev, "Timeout (gcm init)\n");
0382 return ret;
0383 }
0384
0385
0386 if (cryp->areq->assoclen) {
0387 cfg |= CR_PH_HEADER;
0388 stm32_cryp_write(cryp, CRYP_CR, cfg);
0389 } else if (stm32_cryp_get_input_text_len(cryp)) {
0390 cfg |= CR_PH_PAYLOAD;
0391 stm32_cryp_write(cryp, CRYP_CR, cfg);
0392 }
0393
0394 return 0;
0395 }
0396
0397 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
0398 {
0399 u32 cfg;
0400 int err;
0401
0402
0403 if (!cryp->header_in) {
0404
0405 err = stm32_cryp_wait_busy(cryp);
0406 if (err) {
0407 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
0408 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
0409 stm32_cryp_finish_req(cryp, err);
0410 return;
0411 }
0412
0413 if (stm32_cryp_get_input_text_len(cryp)) {
0414
0415 cfg = stm32_cryp_read(cryp, CRYP_CR);
0416 cfg &= ~CR_CRYPEN;
0417 stm32_cryp_write(cryp, CRYP_CR, cfg);
0418
0419 cfg &= ~CR_PH_MASK;
0420 cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
0421 stm32_cryp_write(cryp, CRYP_CR, cfg);
0422 } else {
0423
0424
0425
0426
0427
0428 }
0429 }
0430 }
0431
0432 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
0433 {
0434 unsigned int i;
0435 size_t written;
0436 size_t len;
0437 u32 alen = cryp->areq->assoclen;
0438 u32 block[AES_BLOCK_32] = {0};
0439 u8 *b8 = (u8 *)block;
0440
0441 if (alen <= 65280) {
0442
0443 b8[0] = (alen >> 8) & 0xFF;
0444 b8[1] = alen & 0xFF;
0445 len = 2;
0446 } else {
0447
0448 b8[0] = 0xFF;
0449 b8[1] = 0xFE;
0450 b8[2] = (alen & 0xFF000000) >> 24;
0451 b8[3] = (alen & 0x00FF0000) >> 16;
0452 b8[4] = (alen & 0x0000FF00) >> 8;
0453 b8[5] = alen & 0x000000FF;
0454 len = 6;
0455 }
0456
0457 written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
0458
0459 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
0460 for (i = 0; i < AES_BLOCK_32; i++)
0461 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
0462
0463 cryp->header_in -= written;
0464
0465 stm32_crypt_gcmccm_end_header(cryp);
0466 }
0467
0468 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
0469 {
0470 int ret;
0471 u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
0472 u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
0473 __be32 *bd;
0474 u32 *d;
0475 unsigned int i, textlen;
0476
0477
0478 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
0479 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
0480 iv[AES_BLOCK_SIZE - 1] = 1;
0481 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
0482
0483
0484 memcpy(b0, iv, AES_BLOCK_SIZE);
0485
0486 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
0487
0488 if (cryp->areq->assoclen)
0489 b0[0] |= 0x40;
0490
0491 textlen = stm32_cryp_get_input_text_len(cryp);
0492
0493 b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
0494 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
0495
0496
0497 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
0498
0499
0500 d = (u32 *)b0;
0501 bd = (__be32 *)b0;
0502
0503 for (i = 0; i < AES_BLOCK_32; i++) {
0504 u32 xd = d[i];
0505
0506 if (!cryp->caps->padding_wa)
0507 xd = be32_to_cpu(bd[i]);
0508 stm32_cryp_write(cryp, CRYP_DIN, xd);
0509 }
0510
0511
0512 ret = stm32_cryp_wait_enable(cryp);
0513 if (ret) {
0514 dev_err(cryp->dev, "Timeout (ccm init)\n");
0515 return ret;
0516 }
0517
0518
0519 if (cryp->areq->assoclen) {
0520 cfg |= CR_PH_HEADER | CR_CRYPEN;
0521 stm32_cryp_write(cryp, CRYP_CR, cfg);
0522
0523
0524 stm32_cryp_write_ccm_first_header(cryp);
0525 } else if (stm32_cryp_get_input_text_len(cryp)) {
0526 cfg |= CR_PH_PAYLOAD;
0527 stm32_cryp_write(cryp, CRYP_CR, cfg);
0528 }
0529
0530 return 0;
0531 }
0532
0533 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
0534 {
0535 int ret;
0536 u32 cfg, hw_mode;
0537
0538 pm_runtime_get_sync(cryp->dev);
0539
0540
0541 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
0542
0543
0544 cfg = CR_DATA8 | CR_FFLUSH;
0545
0546 switch (cryp->ctx->keylen) {
0547 case AES_KEYSIZE_128:
0548 cfg |= CR_KEY128;
0549 break;
0550
0551 case AES_KEYSIZE_192:
0552 cfg |= CR_KEY192;
0553 break;
0554
0555 default:
0556 case AES_KEYSIZE_256:
0557 cfg |= CR_KEY256;
0558 break;
0559 }
0560
0561 hw_mode = stm32_cryp_get_hw_mode(cryp);
0562 if (hw_mode == CR_AES_UNKNOWN)
0563 return -EINVAL;
0564
0565
0566 if (is_decrypt(cryp) &&
0567 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
0568
0569 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP);
0570
0571
0572 stm32_cryp_hw_write_key(cryp);
0573
0574
0575 stm32_cryp_enable(cryp);
0576
0577 ret = stm32_cryp_wait_busy(cryp);
0578 if (ret) {
0579 dev_err(cryp->dev, "Timeout (key preparation)\n");
0580 return ret;
0581 }
0582
0583 cfg |= hw_mode | CR_DEC_NOT_ENC;
0584
0585
0586 stm32_cryp_write(cryp, CRYP_CR, cfg);
0587 } else {
0588 cfg |= hw_mode;
0589 if (is_decrypt(cryp))
0590 cfg |= CR_DEC_NOT_ENC;
0591
0592
0593 stm32_cryp_write(cryp, CRYP_CR, cfg);
0594
0595
0596 stm32_cryp_hw_write_key(cryp);
0597 }
0598
0599 switch (hw_mode) {
0600 case CR_AES_GCM:
0601 case CR_AES_CCM:
0602
0603 if (hw_mode == CR_AES_CCM)
0604 ret = stm32_cryp_ccm_init(cryp, cfg);
0605 else
0606 ret = stm32_cryp_gcm_init(cryp, cfg);
0607
0608 if (ret)
0609 return ret;
0610
0611 break;
0612
0613 case CR_DES_CBC:
0614 case CR_TDES_CBC:
0615 case CR_AES_CBC:
0616 case CR_AES_CTR:
0617 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
0618 break;
0619
0620 default:
0621 break;
0622 }
0623
0624
0625 stm32_cryp_enable(cryp);
0626
0627 return 0;
0628 }
0629
0630 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
0631 {
0632 if (!err && (is_gcm(cryp) || is_ccm(cryp)))
0633
0634 err = stm32_cryp_read_auth_tag(cryp);
0635
0636 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
0637 stm32_cryp_get_iv(cryp);
0638
0639 pm_runtime_mark_last_busy(cryp->dev);
0640 pm_runtime_put_autosuspend(cryp->dev);
0641
0642 if (is_gcm(cryp) || is_ccm(cryp))
0643 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
0644 else
0645 crypto_finalize_skcipher_request(cryp->engine, cryp->req,
0646 err);
0647 }
0648
0649 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
0650 {
0651
0652 stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
0653
0654 return 0;
0655 }
0656
0657 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
0658 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
0659 void *areq);
0660
0661 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
0662 {
0663 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
0664
0665 crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
0666
0667 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
0668 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
0669 ctx->enginectx.op.unprepare_request = NULL;
0670 return 0;
0671 }
0672
0673 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
0674 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
0675 void *areq);
0676
0677 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
0678 {
0679 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
0680
0681 tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
0682
0683 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
0684 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
0685 ctx->enginectx.op.unprepare_request = NULL;
0686
0687 return 0;
0688 }
0689
0690 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
0691 {
0692 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
0693 crypto_skcipher_reqtfm(req));
0694 struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
0695 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
0696
0697 if (!cryp)
0698 return -ENODEV;
0699
0700 rctx->mode = mode;
0701
0702 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
0703 }
0704
0705 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
0706 {
0707 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0708 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
0709 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
0710
0711 if (!cryp)
0712 return -ENODEV;
0713
0714 rctx->mode = mode;
0715
0716 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
0717 }
0718
0719 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
0720 unsigned int keylen)
0721 {
0722 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
0723
0724 memcpy(ctx->key, key, keylen);
0725 ctx->keylen = keylen;
0726
0727 return 0;
0728 }
0729
0730 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
0731 unsigned int keylen)
0732 {
0733 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
0734 keylen != AES_KEYSIZE_256)
0735 return -EINVAL;
0736 else
0737 return stm32_cryp_setkey(tfm, key, keylen);
0738 }
0739
0740 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
0741 unsigned int keylen)
0742 {
0743 return verify_skcipher_des_key(tfm, key) ?:
0744 stm32_cryp_setkey(tfm, key, keylen);
0745 }
0746
0747 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
0748 unsigned int keylen)
0749 {
0750 return verify_skcipher_des3_key(tfm, key) ?:
0751 stm32_cryp_setkey(tfm, key, keylen);
0752 }
0753
0754 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
0755 unsigned int keylen)
0756 {
0757 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
0758
0759 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
0760 keylen != AES_KEYSIZE_256)
0761 return -EINVAL;
0762
0763 memcpy(ctx->key, key, keylen);
0764 ctx->keylen = keylen;
0765
0766 return 0;
0767 }
0768
0769 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
0770 unsigned int authsize)
0771 {
0772 switch (authsize) {
0773 case 4:
0774 case 8:
0775 case 12:
0776 case 13:
0777 case 14:
0778 case 15:
0779 case 16:
0780 break;
0781 default:
0782 return -EINVAL;
0783 }
0784
0785 return 0;
0786 }
0787
0788 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
0789 unsigned int authsize)
0790 {
0791 switch (authsize) {
0792 case 4:
0793 case 6:
0794 case 8:
0795 case 10:
0796 case 12:
0797 case 14:
0798 case 16:
0799 break;
0800 default:
0801 return -EINVAL;
0802 }
0803
0804 return 0;
0805 }
0806
0807 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
0808 {
0809 if (req->cryptlen % AES_BLOCK_SIZE)
0810 return -EINVAL;
0811
0812 if (req->cryptlen == 0)
0813 return 0;
0814
0815 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
0816 }
0817
0818 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
0819 {
0820 if (req->cryptlen % AES_BLOCK_SIZE)
0821 return -EINVAL;
0822
0823 if (req->cryptlen == 0)
0824 return 0;
0825
0826 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
0827 }
0828
0829 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
0830 {
0831 if (req->cryptlen % AES_BLOCK_SIZE)
0832 return -EINVAL;
0833
0834 if (req->cryptlen == 0)
0835 return 0;
0836
0837 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
0838 }
0839
0840 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
0841 {
0842 if (req->cryptlen % AES_BLOCK_SIZE)
0843 return -EINVAL;
0844
0845 if (req->cryptlen == 0)
0846 return 0;
0847
0848 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
0849 }
0850
0851 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
0852 {
0853 if (req->cryptlen == 0)
0854 return 0;
0855
0856 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
0857 }
0858
0859 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
0860 {
0861 if (req->cryptlen == 0)
0862 return 0;
0863
0864 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
0865 }
0866
0867 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
0868 {
0869 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
0870 }
0871
0872 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
0873 {
0874 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
0875 }
0876
0877 static inline int crypto_ccm_check_iv(const u8 *iv)
0878 {
0879
0880 if (iv[0] < 1 || iv[0] > 7)
0881 return -EINVAL;
0882
0883 return 0;
0884 }
0885
0886 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
0887 {
0888 int err;
0889
0890 err = crypto_ccm_check_iv(req->iv);
0891 if (err)
0892 return err;
0893
0894 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
0895 }
0896
0897 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
0898 {
0899 int err;
0900
0901 err = crypto_ccm_check_iv(req->iv);
0902 if (err)
0903 return err;
0904
0905 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
0906 }
0907
0908 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
0909 {
0910 if (req->cryptlen % DES_BLOCK_SIZE)
0911 return -EINVAL;
0912
0913 if (req->cryptlen == 0)
0914 return 0;
0915
0916 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
0917 }
0918
0919 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
0920 {
0921 if (req->cryptlen % DES_BLOCK_SIZE)
0922 return -EINVAL;
0923
0924 if (req->cryptlen == 0)
0925 return 0;
0926
0927 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
0928 }
0929
0930 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
0931 {
0932 if (req->cryptlen % DES_BLOCK_SIZE)
0933 return -EINVAL;
0934
0935 if (req->cryptlen == 0)
0936 return 0;
0937
0938 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
0939 }
0940
0941 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
0942 {
0943 if (req->cryptlen % DES_BLOCK_SIZE)
0944 return -EINVAL;
0945
0946 if (req->cryptlen == 0)
0947 return 0;
0948
0949 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
0950 }
0951
0952 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
0953 {
0954 if (req->cryptlen % DES_BLOCK_SIZE)
0955 return -EINVAL;
0956
0957 if (req->cryptlen == 0)
0958 return 0;
0959
0960 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
0961 }
0962
0963 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
0964 {
0965 if (req->cryptlen % DES_BLOCK_SIZE)
0966 return -EINVAL;
0967
0968 if (req->cryptlen == 0)
0969 return 0;
0970
0971 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
0972 }
0973
0974 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
0975 {
0976 if (req->cryptlen % DES_BLOCK_SIZE)
0977 return -EINVAL;
0978
0979 if (req->cryptlen == 0)
0980 return 0;
0981
0982 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
0983 }
0984
0985 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
0986 {
0987 if (req->cryptlen % DES_BLOCK_SIZE)
0988 return -EINVAL;
0989
0990 if (req->cryptlen == 0)
0991 return 0;
0992
0993 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
0994 }
0995
0996 static int stm32_cryp_prepare_req(struct skcipher_request *req,
0997 struct aead_request *areq)
0998 {
0999 struct stm32_cryp_ctx *ctx;
1000 struct stm32_cryp *cryp;
1001 struct stm32_cryp_reqctx *rctx;
1002 struct scatterlist *in_sg;
1003 int ret;
1004
1005 if (!req && !areq)
1006 return -EINVAL;
1007
1008 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
1009 crypto_aead_ctx(crypto_aead_reqtfm(areq));
1010
1011 cryp = ctx->cryp;
1012
1013 if (!cryp)
1014 return -ENODEV;
1015
1016 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1017 rctx->mode &= FLG_MODE_MASK;
1018
1019 ctx->cryp = cryp;
1020
1021 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1022 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1023 cryp->ctx = ctx;
1024
1025 if (req) {
1026 cryp->req = req;
1027 cryp->areq = NULL;
1028 cryp->header_in = 0;
1029 cryp->payload_in = req->cryptlen;
1030 cryp->payload_out = req->cryptlen;
1031 cryp->authsize = 0;
1032 } else {
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049 cryp->areq = areq;
1050 cryp->req = NULL;
1051 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1052 if (is_encrypt(cryp)) {
1053 cryp->payload_in = areq->cryptlen;
1054 cryp->header_in = areq->assoclen;
1055 cryp->payload_out = areq->cryptlen;
1056 } else {
1057 cryp->payload_in = areq->cryptlen - cryp->authsize;
1058 cryp->header_in = areq->assoclen;
1059 cryp->payload_out = cryp->payload_in;
1060 }
1061 }
1062
1063 in_sg = req ? req->src : areq->src;
1064 scatterwalk_start(&cryp->in_walk, in_sg);
1065
1066 cryp->out_sg = req ? req->dst : areq->dst;
1067 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1068
1069 if (is_gcm(cryp) || is_ccm(cryp)) {
1070
1071 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
1072 }
1073
1074 if (is_ctr(cryp))
1075 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1076
1077 ret = stm32_cryp_hw_init(cryp);
1078 return ret;
1079 }
1080
1081 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
1082 void *areq)
1083 {
1084 struct skcipher_request *req = container_of(areq,
1085 struct skcipher_request,
1086 base);
1087
1088 return stm32_cryp_prepare_req(req, NULL);
1089 }
1090
1091 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1092 {
1093 struct skcipher_request *req = container_of(areq,
1094 struct skcipher_request,
1095 base);
1096 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1097 crypto_skcipher_reqtfm(req));
1098 struct stm32_cryp *cryp = ctx->cryp;
1099
1100 if (!cryp)
1101 return -ENODEV;
1102
1103 return stm32_cryp_cpu_start(cryp);
1104 }
1105
1106 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
1107 {
1108 struct aead_request *req = container_of(areq, struct aead_request,
1109 base);
1110
1111 return stm32_cryp_prepare_req(NULL, req);
1112 }
1113
1114 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1115 {
1116 struct aead_request *req = container_of(areq, struct aead_request,
1117 base);
1118 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1119 struct stm32_cryp *cryp = ctx->cryp;
1120
1121 if (!cryp)
1122 return -ENODEV;
1123
1124 if (unlikely(!cryp->payload_in && !cryp->header_in)) {
1125
1126 stm32_cryp_finish_req(cryp, 0);
1127 return 0;
1128 }
1129
1130 return stm32_cryp_cpu_start(cryp);
1131 }
1132
1133 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1134 {
1135 u32 cfg, size_bit;
1136 unsigned int i;
1137 int ret = 0;
1138
1139
1140 cfg = stm32_cryp_read(cryp, CRYP_CR);
1141
1142 cfg &= ~CR_PH_MASK;
1143 cfg |= CR_PH_FINAL;
1144 cfg &= ~CR_DEC_NOT_ENC;
1145 cfg |= CR_CRYPEN;
1146
1147 stm32_cryp_write(cryp, CRYP_CR, cfg);
1148
1149 if (is_gcm(cryp)) {
1150
1151 size_bit = cryp->areq->assoclen * 8;
1152 if (cryp->caps->swap_final)
1153 size_bit = (__force u32)cpu_to_be32(size_bit);
1154
1155 stm32_cryp_write(cryp, CRYP_DIN, 0);
1156 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1157
1158 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1159 cryp->areq->cryptlen - cryp->authsize;
1160 size_bit *= 8;
1161 if (cryp->caps->swap_final)
1162 size_bit = (__force u32)cpu_to_be32(size_bit);
1163
1164 stm32_cryp_write(cryp, CRYP_DIN, 0);
1165 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1166 } else {
1167
1168 u32 iv32[AES_BLOCK_32];
1169 u8 *iv = (u8 *)iv32;
1170 __be32 *biv = (__be32 *)iv32;
1171
1172 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1173 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1174
1175 for (i = 0; i < AES_BLOCK_32; i++) {
1176 u32 xiv = iv32[i];
1177
1178 if (!cryp->caps->padding_wa)
1179 xiv = be32_to_cpu(biv[i]);
1180 stm32_cryp_write(cryp, CRYP_DIN, xiv);
1181 }
1182 }
1183
1184
1185 ret = stm32_cryp_wait_output(cryp);
1186 if (ret) {
1187 dev_err(cryp->dev, "Timeout (read tag)\n");
1188 return ret;
1189 }
1190
1191 if (is_encrypt(cryp)) {
1192 u32 out_tag[AES_BLOCK_32];
1193
1194
1195 for (i = 0; i < AES_BLOCK_32; i++)
1196 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1197
1198 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
1199 } else {
1200
1201 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1202
1203 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
1204
1205 for (i = 0; i < AES_BLOCK_32; i++)
1206 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1207
1208 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1209 ret = -EBADMSG;
1210 }
1211
1212
1213 cfg &= ~CR_CRYPEN;
1214 stm32_cryp_write(cryp, CRYP_CR, cfg);
1215
1216 return ret;
1217 }
1218
1219 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1220 {
1221 u32 cr;
1222
1223 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1224
1225
1226
1227
1228 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1229
1230 cr = stm32_cryp_read(cryp, CRYP_CR);
1231 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1232
1233 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1234
1235 stm32_cryp_write(cryp, CRYP_CR, cr);
1236 }
1237
1238
1239 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
1240 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
1241 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
1242 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
1243 }
1244
1245 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1246 {
1247 unsigned int i;
1248 u32 block[AES_BLOCK_32];
1249
1250 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1251 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1252
1253 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1254 cryp->payload_out), 1);
1255 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1256 cryp->payload_out);
1257 }
1258
1259 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1260 {
1261 unsigned int i;
1262 u32 block[AES_BLOCK_32] = {0};
1263
1264 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1265 cryp->payload_in), 0);
1266 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1267 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1268
1269 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1270 }
1271
1272 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1273 {
1274 int err;
1275 u32 cfg, block[AES_BLOCK_32] = {0};
1276 unsigned int i;
1277
1278
1279
1280
1281 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1282 cfg = stm32_cryp_read(cryp, CRYP_CR);
1283 cfg &= ~CR_CRYPEN;
1284 stm32_cryp_write(cryp, CRYP_CR, cfg);
1285
1286
1287 stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1288
1289
1290 cfg &= ~CR_ALGO_MASK;
1291 cfg |= CR_AES_CTR;
1292 stm32_cryp_write(cryp, CRYP_CR, cfg);
1293
1294
1295 cfg |= CR_CRYPEN;
1296 stm32_cryp_write(cryp, CRYP_CR, cfg);
1297
1298
1299 stm32_cryp_irq_write_block(cryp);
1300
1301 err = stm32_cryp_wait_output(cryp);
1302 if (err) {
1303 dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1304 return stm32_cryp_finish_req(cryp, err);
1305 }
1306
1307
1308
1309
1310
1311
1312 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1313 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1314
1315 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1316 cryp->payload_out), 1);
1317 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1318 cryp->payload_out);
1319
1320
1321 cfg &= ~CR_ALGO_MASK;
1322 cfg |= CR_AES_GCM;
1323 stm32_cryp_write(cryp, CRYP_CR, cfg);
1324
1325
1326 cfg &= ~CR_PH_MASK;
1327 cfg |= CR_PH_FINAL;
1328 stm32_cryp_write(cryp, CRYP_CR, cfg);
1329
1330
1331 for (i = 0; i < AES_BLOCK_32; i++)
1332 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1333
1334
1335 err = stm32_cryp_wait_output(cryp);
1336 if (err) {
1337 dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
1338 return stm32_cryp_finish_req(cryp, err);
1339 }
1340
1341 for (i = 0; i < AES_BLOCK_32; i++)
1342 stm32_cryp_read(cryp, CRYP_DOUT);
1343
1344
1345 stm32_cryp_finish_req(cryp, 0);
1346 }
1347
1348 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1349 {
1350 u32 cfg;
1351
1352
1353 cfg = stm32_cryp_read(cryp, CRYP_CR);
1354 cfg &= ~CR_CRYPEN;
1355 stm32_cryp_write(cryp, CRYP_CR, cfg);
1356
1357 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
1358 cfg |= CR_CRYPEN;
1359 stm32_cryp_write(cryp, CRYP_CR, cfg);
1360 }
1361
1362 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1363 {
1364 int err = 0;
1365 u32 cfg, iv1tmp;
1366 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
1367 u32 block[AES_BLOCK_32] = {0};
1368 unsigned int i;
1369
1370
1371
1372
1373 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1374
1375 cfg = stm32_cryp_read(cryp, CRYP_CR);
1376 cfg &= ~CR_CRYPEN;
1377 stm32_cryp_write(cryp, CRYP_CR, cfg);
1378
1379
1380 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1381
1382
1383 for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1384 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1385
1386
1387 stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1388
1389
1390 cfg &= ~CR_ALGO_MASK;
1391 cfg |= CR_AES_CTR;
1392 stm32_cryp_write(cryp, CRYP_CR, cfg);
1393
1394
1395 cfg |= CR_CRYPEN;
1396 stm32_cryp_write(cryp, CRYP_CR, cfg);
1397
1398
1399 stm32_cryp_irq_write_block(cryp);
1400
1401 err = stm32_cryp_wait_output(cryp);
1402 if (err) {
1403 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1404 return stm32_cryp_finish_req(cryp, err);
1405 }
1406
1407
1408
1409
1410
1411
1412 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1413 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1414
1415 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1416 cryp->payload_out), 1);
1417 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
1418
1419
1420 for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1421 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1422
1423
1424 cfg &= ~CR_ALGO_MASK;
1425 cfg |= CR_AES_CCM;
1426 stm32_cryp_write(cryp, CRYP_CR, cfg);
1427
1428
1429 cfg &= ~CR_PH_MASK;
1430 cfg |= CR_PH_HEADER;
1431 stm32_cryp_write(cryp, CRYP_CR, cfg);
1432
1433
1434 for (i = 0; i < ARRAY_SIZE(block); i++) {
1435 block[i] ^= cstmp1[i];
1436 block[i] ^= cstmp2[i];
1437 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1438 }
1439
1440
1441 err = stm32_cryp_wait_busy(cryp);
1442 if (err)
1443 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1444
1445
1446 stm32_cryp_finish_req(cryp, err);
1447 }
1448
1449 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1450 {
1451 if (unlikely(!cryp->payload_in)) {
1452 dev_warn(cryp->dev, "No more data to process\n");
1453 return;
1454 }
1455
1456 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
1457 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1458 is_encrypt(cryp))) {
1459
1460 if (cryp->caps->padding_wa) {
1461
1462 stm32_cryp_irq_write_gcm_padded_data(cryp);
1463 return;
1464 }
1465
1466
1467 stm32_cryp_irq_set_npblb(cryp);
1468 }
1469
1470 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
1471 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1472 is_decrypt(cryp))) {
1473
1474 if (cryp->caps->padding_wa) {
1475
1476 stm32_cryp_irq_write_ccm_padded_data(cryp);
1477 return;
1478 }
1479
1480
1481 stm32_cryp_irq_set_npblb(cryp);
1482 }
1483
1484 if (is_aes(cryp) && is_ctr(cryp))
1485 stm32_cryp_check_ctr_counter(cryp);
1486
1487 stm32_cryp_irq_write_block(cryp);
1488 }
1489
1490 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
1491 {
1492 unsigned int i;
1493 u32 block[AES_BLOCK_32] = {0};
1494 size_t written;
1495
1496 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
1497
1498 scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
1499 for (i = 0; i < AES_BLOCK_32; i++)
1500 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1501
1502 cryp->header_in -= written;
1503
1504 stm32_crypt_gcmccm_end_header(cryp);
1505 }
1506
1507 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1508 {
1509 struct stm32_cryp *cryp = arg;
1510 u32 ph;
1511 u32 it_mask = stm32_cryp_read(cryp, CRYP_IMSCR);
1512
1513 if (cryp->irq_status & MISR_OUT)
1514
1515 stm32_cryp_irq_read_data(cryp);
1516
1517 if (cryp->irq_status & MISR_IN) {
1518 if (is_gcm(cryp) || is_ccm(cryp)) {
1519 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1520 if (unlikely(ph == CR_PH_HEADER))
1521
1522 stm32_cryp_irq_write_gcmccm_header(cryp);
1523 else
1524
1525 stm32_cryp_irq_write_data(cryp);
1526 if (is_gcm(cryp))
1527 cryp->gcm_ctr++;
1528 } else {
1529
1530 stm32_cryp_irq_write_data(cryp);
1531 }
1532 }
1533
1534
1535 if (!cryp->payload_in && !cryp->header_in)
1536 it_mask &= ~IMSCR_IN;
1537 if (!cryp->payload_out)
1538 it_mask &= ~IMSCR_OUT;
1539 stm32_cryp_write(cryp, CRYP_IMSCR, it_mask);
1540
1541 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out)
1542 stm32_cryp_finish_req(cryp, 0);
1543
1544 return IRQ_HANDLED;
1545 }
1546
1547 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1548 {
1549 struct stm32_cryp *cryp = arg;
1550
1551 cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1552
1553 return IRQ_WAKE_THREAD;
1554 }
1555
1556 static struct skcipher_alg crypto_algs[] = {
1557 {
1558 .base.cra_name = "ecb(aes)",
1559 .base.cra_driver_name = "stm32-ecb-aes",
1560 .base.cra_priority = 200,
1561 .base.cra_flags = CRYPTO_ALG_ASYNC,
1562 .base.cra_blocksize = AES_BLOCK_SIZE,
1563 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1564 .base.cra_alignmask = 0,
1565 .base.cra_module = THIS_MODULE,
1566
1567 .init = stm32_cryp_init_tfm,
1568 .min_keysize = AES_MIN_KEY_SIZE,
1569 .max_keysize = AES_MAX_KEY_SIZE,
1570 .setkey = stm32_cryp_aes_setkey,
1571 .encrypt = stm32_cryp_aes_ecb_encrypt,
1572 .decrypt = stm32_cryp_aes_ecb_decrypt,
1573 },
1574 {
1575 .base.cra_name = "cbc(aes)",
1576 .base.cra_driver_name = "stm32-cbc-aes",
1577 .base.cra_priority = 200,
1578 .base.cra_flags = CRYPTO_ALG_ASYNC,
1579 .base.cra_blocksize = AES_BLOCK_SIZE,
1580 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1581 .base.cra_alignmask = 0,
1582 .base.cra_module = THIS_MODULE,
1583
1584 .init = stm32_cryp_init_tfm,
1585 .min_keysize = AES_MIN_KEY_SIZE,
1586 .max_keysize = AES_MAX_KEY_SIZE,
1587 .ivsize = AES_BLOCK_SIZE,
1588 .setkey = stm32_cryp_aes_setkey,
1589 .encrypt = stm32_cryp_aes_cbc_encrypt,
1590 .decrypt = stm32_cryp_aes_cbc_decrypt,
1591 },
1592 {
1593 .base.cra_name = "ctr(aes)",
1594 .base.cra_driver_name = "stm32-ctr-aes",
1595 .base.cra_priority = 200,
1596 .base.cra_flags = CRYPTO_ALG_ASYNC,
1597 .base.cra_blocksize = 1,
1598 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1599 .base.cra_alignmask = 0,
1600 .base.cra_module = THIS_MODULE,
1601
1602 .init = stm32_cryp_init_tfm,
1603 .min_keysize = AES_MIN_KEY_SIZE,
1604 .max_keysize = AES_MAX_KEY_SIZE,
1605 .ivsize = AES_BLOCK_SIZE,
1606 .setkey = stm32_cryp_aes_setkey,
1607 .encrypt = stm32_cryp_aes_ctr_encrypt,
1608 .decrypt = stm32_cryp_aes_ctr_decrypt,
1609 },
1610 {
1611 .base.cra_name = "ecb(des)",
1612 .base.cra_driver_name = "stm32-ecb-des",
1613 .base.cra_priority = 200,
1614 .base.cra_flags = CRYPTO_ALG_ASYNC,
1615 .base.cra_blocksize = DES_BLOCK_SIZE,
1616 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1617 .base.cra_alignmask = 0,
1618 .base.cra_module = THIS_MODULE,
1619
1620 .init = stm32_cryp_init_tfm,
1621 .min_keysize = DES_BLOCK_SIZE,
1622 .max_keysize = DES_BLOCK_SIZE,
1623 .setkey = stm32_cryp_des_setkey,
1624 .encrypt = stm32_cryp_des_ecb_encrypt,
1625 .decrypt = stm32_cryp_des_ecb_decrypt,
1626 },
1627 {
1628 .base.cra_name = "cbc(des)",
1629 .base.cra_driver_name = "stm32-cbc-des",
1630 .base.cra_priority = 200,
1631 .base.cra_flags = CRYPTO_ALG_ASYNC,
1632 .base.cra_blocksize = DES_BLOCK_SIZE,
1633 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1634 .base.cra_alignmask = 0,
1635 .base.cra_module = THIS_MODULE,
1636
1637 .init = stm32_cryp_init_tfm,
1638 .min_keysize = DES_BLOCK_SIZE,
1639 .max_keysize = DES_BLOCK_SIZE,
1640 .ivsize = DES_BLOCK_SIZE,
1641 .setkey = stm32_cryp_des_setkey,
1642 .encrypt = stm32_cryp_des_cbc_encrypt,
1643 .decrypt = stm32_cryp_des_cbc_decrypt,
1644 },
1645 {
1646 .base.cra_name = "ecb(des3_ede)",
1647 .base.cra_driver_name = "stm32-ecb-des3",
1648 .base.cra_priority = 200,
1649 .base.cra_flags = CRYPTO_ALG_ASYNC,
1650 .base.cra_blocksize = DES_BLOCK_SIZE,
1651 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1652 .base.cra_alignmask = 0,
1653 .base.cra_module = THIS_MODULE,
1654
1655 .init = stm32_cryp_init_tfm,
1656 .min_keysize = 3 * DES_BLOCK_SIZE,
1657 .max_keysize = 3 * DES_BLOCK_SIZE,
1658 .setkey = stm32_cryp_tdes_setkey,
1659 .encrypt = stm32_cryp_tdes_ecb_encrypt,
1660 .decrypt = stm32_cryp_tdes_ecb_decrypt,
1661 },
1662 {
1663 .base.cra_name = "cbc(des3_ede)",
1664 .base.cra_driver_name = "stm32-cbc-des3",
1665 .base.cra_priority = 200,
1666 .base.cra_flags = CRYPTO_ALG_ASYNC,
1667 .base.cra_blocksize = DES_BLOCK_SIZE,
1668 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1669 .base.cra_alignmask = 0,
1670 .base.cra_module = THIS_MODULE,
1671
1672 .init = stm32_cryp_init_tfm,
1673 .min_keysize = 3 * DES_BLOCK_SIZE,
1674 .max_keysize = 3 * DES_BLOCK_SIZE,
1675 .ivsize = DES_BLOCK_SIZE,
1676 .setkey = stm32_cryp_tdes_setkey,
1677 .encrypt = stm32_cryp_tdes_cbc_encrypt,
1678 .decrypt = stm32_cryp_tdes_cbc_decrypt,
1679 },
1680 };
1681
1682 static struct aead_alg aead_algs[] = {
1683 {
1684 .setkey = stm32_cryp_aes_aead_setkey,
1685 .setauthsize = stm32_cryp_aes_gcm_setauthsize,
1686 .encrypt = stm32_cryp_aes_gcm_encrypt,
1687 .decrypt = stm32_cryp_aes_gcm_decrypt,
1688 .init = stm32_cryp_aes_aead_init,
1689 .ivsize = 12,
1690 .maxauthsize = AES_BLOCK_SIZE,
1691
1692 .base = {
1693 .cra_name = "gcm(aes)",
1694 .cra_driver_name = "stm32-gcm-aes",
1695 .cra_priority = 200,
1696 .cra_flags = CRYPTO_ALG_ASYNC,
1697 .cra_blocksize = 1,
1698 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1699 .cra_alignmask = 0,
1700 .cra_module = THIS_MODULE,
1701 },
1702 },
1703 {
1704 .setkey = stm32_cryp_aes_aead_setkey,
1705 .setauthsize = stm32_cryp_aes_ccm_setauthsize,
1706 .encrypt = stm32_cryp_aes_ccm_encrypt,
1707 .decrypt = stm32_cryp_aes_ccm_decrypt,
1708 .init = stm32_cryp_aes_aead_init,
1709 .ivsize = AES_BLOCK_SIZE,
1710 .maxauthsize = AES_BLOCK_SIZE,
1711
1712 .base = {
1713 .cra_name = "ccm(aes)",
1714 .cra_driver_name = "stm32-ccm-aes",
1715 .cra_priority = 200,
1716 .cra_flags = CRYPTO_ALG_ASYNC,
1717 .cra_blocksize = 1,
1718 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1719 .cra_alignmask = 0,
1720 .cra_module = THIS_MODULE,
1721 },
1722 },
1723 };
1724
1725 static const struct stm32_cryp_caps f7_data = {
1726 .swap_final = true,
1727 .padding_wa = true,
1728 };
1729
1730 static const struct stm32_cryp_caps mp1_data = {
1731 .swap_final = false,
1732 .padding_wa = false,
1733 };
1734
1735 static const struct of_device_id stm32_dt_ids[] = {
1736 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1737 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1738 {},
1739 };
1740 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1741
1742 static int stm32_cryp_probe(struct platform_device *pdev)
1743 {
1744 struct device *dev = &pdev->dev;
1745 struct stm32_cryp *cryp;
1746 struct reset_control *rst;
1747 int irq, ret;
1748
1749 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1750 if (!cryp)
1751 return -ENOMEM;
1752
1753 cryp->caps = of_device_get_match_data(dev);
1754 if (!cryp->caps)
1755 return -ENODEV;
1756
1757 cryp->dev = dev;
1758
1759 cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1760 if (IS_ERR(cryp->regs))
1761 return PTR_ERR(cryp->regs);
1762
1763 irq = platform_get_irq(pdev, 0);
1764 if (irq < 0)
1765 return irq;
1766
1767 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1768 stm32_cryp_irq_thread, IRQF_ONESHOT,
1769 dev_name(dev), cryp);
1770 if (ret) {
1771 dev_err(dev, "Cannot grab IRQ\n");
1772 return ret;
1773 }
1774
1775 cryp->clk = devm_clk_get(dev, NULL);
1776 if (IS_ERR(cryp->clk)) {
1777 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
1778
1779 return PTR_ERR(cryp->clk);
1780 }
1781
1782 ret = clk_prepare_enable(cryp->clk);
1783 if (ret) {
1784 dev_err(cryp->dev, "Failed to enable clock\n");
1785 return ret;
1786 }
1787
1788 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
1789 pm_runtime_use_autosuspend(dev);
1790
1791 pm_runtime_get_noresume(dev);
1792 pm_runtime_set_active(dev);
1793 pm_runtime_enable(dev);
1794
1795 rst = devm_reset_control_get(dev, NULL);
1796 if (IS_ERR(rst)) {
1797 ret = PTR_ERR(rst);
1798 if (ret == -EPROBE_DEFER)
1799 goto err_rst;
1800 } else {
1801 reset_control_assert(rst);
1802 udelay(2);
1803 reset_control_deassert(rst);
1804 }
1805
1806 platform_set_drvdata(pdev, cryp);
1807
1808 spin_lock(&cryp_list.lock);
1809 list_add(&cryp->list, &cryp_list.dev_list);
1810 spin_unlock(&cryp_list.lock);
1811
1812
1813 cryp->engine = crypto_engine_alloc_init(dev, 1);
1814 if (!cryp->engine) {
1815 dev_err(dev, "Could not init crypto engine\n");
1816 ret = -ENOMEM;
1817 goto err_engine1;
1818 }
1819
1820 ret = crypto_engine_start(cryp->engine);
1821 if (ret) {
1822 dev_err(dev, "Could not start crypto engine\n");
1823 goto err_engine2;
1824 }
1825
1826 ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1827 if (ret) {
1828 dev_err(dev, "Could not register algs\n");
1829 goto err_algs;
1830 }
1831
1832 ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1833 if (ret)
1834 goto err_aead_algs;
1835
1836 dev_info(dev, "Initialized\n");
1837
1838 pm_runtime_put_sync(dev);
1839
1840 return 0;
1841
1842 err_aead_algs:
1843 crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1844 err_algs:
1845 err_engine2:
1846 crypto_engine_exit(cryp->engine);
1847 err_engine1:
1848 spin_lock(&cryp_list.lock);
1849 list_del(&cryp->list);
1850 spin_unlock(&cryp_list.lock);
1851 err_rst:
1852 pm_runtime_disable(dev);
1853 pm_runtime_put_noidle(dev);
1854
1855 clk_disable_unprepare(cryp->clk);
1856
1857 return ret;
1858 }
1859
1860 static int stm32_cryp_remove(struct platform_device *pdev)
1861 {
1862 struct stm32_cryp *cryp = platform_get_drvdata(pdev);
1863 int ret;
1864
1865 if (!cryp)
1866 return -ENODEV;
1867
1868 ret = pm_runtime_resume_and_get(cryp->dev);
1869 if (ret < 0)
1870 return ret;
1871
1872 crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1873 crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1874
1875 crypto_engine_exit(cryp->engine);
1876
1877 spin_lock(&cryp_list.lock);
1878 list_del(&cryp->list);
1879 spin_unlock(&cryp_list.lock);
1880
1881 pm_runtime_disable(cryp->dev);
1882 pm_runtime_put_noidle(cryp->dev);
1883
1884 clk_disable_unprepare(cryp->clk);
1885
1886 return 0;
1887 }
1888
1889 #ifdef CONFIG_PM
1890 static int stm32_cryp_runtime_suspend(struct device *dev)
1891 {
1892 struct stm32_cryp *cryp = dev_get_drvdata(dev);
1893
1894 clk_disable_unprepare(cryp->clk);
1895
1896 return 0;
1897 }
1898
1899 static int stm32_cryp_runtime_resume(struct device *dev)
1900 {
1901 struct stm32_cryp *cryp = dev_get_drvdata(dev);
1902 int ret;
1903
1904 ret = clk_prepare_enable(cryp->clk);
1905 if (ret) {
1906 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
1907 return ret;
1908 }
1909
1910 return 0;
1911 }
1912 #endif
1913
1914 static const struct dev_pm_ops stm32_cryp_pm_ops = {
1915 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1916 pm_runtime_force_resume)
1917 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
1918 stm32_cryp_runtime_resume, NULL)
1919 };
1920
1921 static struct platform_driver stm32_cryp_driver = {
1922 .probe = stm32_cryp_probe,
1923 .remove = stm32_cryp_remove,
1924 .driver = {
1925 .name = DRIVER_NAME,
1926 .pm = &stm32_cryp_pm_ops,
1927 .of_match_table = stm32_dt_ids,
1928 },
1929 };
1930
1931 module_platform_driver(stm32_cryp_driver);
1932
1933 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
1934 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
1935 MODULE_LICENSE("GPL");