0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include <linux/errno.h>
0011 #include <linux/scatterlist.h>
0012 #include <linux/dma-mapping.h>
0013 #include <linux/dmaengine.h>
0014 #include <linux/omap-dma.h>
0015 #include <linux/interrupt.h>
0016 #include <linux/pm_runtime.h>
0017 #include <crypto/aes.h>
0018 #include <crypto/gcm.h>
0019 #include <crypto/scatterwalk.h>
0020 #include <crypto/skcipher.h>
0021 #include <crypto/internal/aead.h>
0022
0023 #include "omap-crypto.h"
0024 #include "omap-aes.h"
0025
0026 static int omap_aes_gcm_handle_queue(struct omap_aes_dev *dd,
0027 struct aead_request *req);
0028
0029 static void omap_aes_gcm_finish_req(struct omap_aes_dev *dd, int ret)
0030 {
0031 struct aead_request *req = dd->aead_req;
0032
0033 dd->in_sg = NULL;
0034 dd->out_sg = NULL;
0035
0036 crypto_finalize_aead_request(dd->engine, req, ret);
0037
0038 pm_runtime_mark_last_busy(dd->dev);
0039 pm_runtime_put_autosuspend(dd->dev);
0040 }
0041
0042 static void omap_aes_gcm_done_task(struct omap_aes_dev *dd)
0043 {
0044 u8 *tag;
0045 int alen, clen, i, ret = 0, nsg;
0046 struct omap_aes_reqctx *rctx;
0047
0048 alen = ALIGN(dd->assoc_len, AES_BLOCK_SIZE);
0049 clen = ALIGN(dd->total, AES_BLOCK_SIZE);
0050 rctx = aead_request_ctx(dd->aead_req);
0051
0052 nsg = !!(dd->assoc_len && dd->total);
0053
0054 dma_sync_sg_for_device(dd->dev, dd->out_sg, dd->out_sg_len,
0055 DMA_FROM_DEVICE);
0056 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE);
0057 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, DMA_FROM_DEVICE);
0058 omap_aes_crypt_dma_stop(dd);
0059
0060 omap_crypto_cleanup(dd->out_sg, dd->orig_out,
0061 dd->aead_req->assoclen, dd->total,
0062 FLAGS_OUT_DATA_ST_SHIFT, dd->flags);
0063
0064 if (dd->flags & FLAGS_ENCRYPT)
0065 scatterwalk_map_and_copy(rctx->auth_tag,
0066 dd->aead_req->dst,
0067 dd->total + dd->aead_req->assoclen,
0068 dd->authsize, 1);
0069
0070 omap_crypto_cleanup(&dd->in_sgl[0], NULL, 0, alen,
0071 FLAGS_ASSOC_DATA_ST_SHIFT, dd->flags);
0072
0073 omap_crypto_cleanup(&dd->in_sgl[nsg], NULL, 0, clen,
0074 FLAGS_IN_DATA_ST_SHIFT, dd->flags);
0075
0076 if (!(dd->flags & FLAGS_ENCRYPT)) {
0077 tag = (u8 *)rctx->auth_tag;
0078 for (i = 0; i < dd->authsize; i++) {
0079 if (tag[i]) {
0080 ret = -EBADMSG;
0081 }
0082 }
0083 }
0084
0085 omap_aes_gcm_finish_req(dd, ret);
0086 }
0087
0088 static int omap_aes_gcm_copy_buffers(struct omap_aes_dev *dd,
0089 struct aead_request *req)
0090 {
0091 int alen, clen, cryptlen, assoclen, ret;
0092 struct crypto_aead *aead = crypto_aead_reqtfm(req);
0093 unsigned int authlen = crypto_aead_authsize(aead);
0094 struct scatterlist *tmp, sg_arr[2];
0095 int nsg;
0096 u16 flags;
0097
0098 assoclen = req->assoclen;
0099 cryptlen = req->cryptlen;
0100
0101 if (dd->flags & FLAGS_RFC4106_GCM)
0102 assoclen -= 8;
0103
0104 if (!(dd->flags & FLAGS_ENCRYPT))
0105 cryptlen -= authlen;
0106
0107 alen = ALIGN(assoclen, AES_BLOCK_SIZE);
0108 clen = ALIGN(cryptlen, AES_BLOCK_SIZE);
0109
0110 nsg = !!(assoclen && cryptlen);
0111
0112 omap_aes_clear_copy_flags(dd);
0113
0114 sg_init_table(dd->in_sgl, nsg + 1);
0115 if (assoclen) {
0116 tmp = req->src;
0117 ret = omap_crypto_align_sg(&tmp, assoclen,
0118 AES_BLOCK_SIZE, dd->in_sgl,
0119 OMAP_CRYPTO_COPY_DATA |
0120 OMAP_CRYPTO_ZERO_BUF |
0121 OMAP_CRYPTO_FORCE_SINGLE_ENTRY,
0122 FLAGS_ASSOC_DATA_ST_SHIFT,
0123 &dd->flags);
0124 if (ret)
0125 return ret;
0126 }
0127
0128 if (cryptlen) {
0129 tmp = scatterwalk_ffwd(sg_arr, req->src, req->assoclen);
0130
0131 if (nsg)
0132 sg_unmark_end(dd->in_sgl);
0133
0134 ret = omap_crypto_align_sg(&tmp, cryptlen,
0135 AES_BLOCK_SIZE, &dd->in_sgl[nsg],
0136 OMAP_CRYPTO_COPY_DATA |
0137 OMAP_CRYPTO_ZERO_BUF |
0138 OMAP_CRYPTO_FORCE_SINGLE_ENTRY,
0139 FLAGS_IN_DATA_ST_SHIFT,
0140 &dd->flags);
0141 if (ret)
0142 return ret;
0143 }
0144
0145 dd->in_sg = dd->in_sgl;
0146 dd->total = cryptlen;
0147 dd->assoc_len = assoclen;
0148 dd->authsize = authlen;
0149
0150 dd->out_sg = req->dst;
0151 dd->orig_out = req->dst;
0152
0153 dd->out_sg = scatterwalk_ffwd(sg_arr, req->dst, req->assoclen);
0154
0155 flags = 0;
0156 if (req->src == req->dst || dd->out_sg == sg_arr)
0157 flags |= OMAP_CRYPTO_FORCE_COPY;
0158
0159 if (cryptlen) {
0160 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen,
0161 AES_BLOCK_SIZE, &dd->out_sgl,
0162 flags,
0163 FLAGS_OUT_DATA_ST_SHIFT, &dd->flags);
0164 if (ret)
0165 return ret;
0166 }
0167
0168 dd->in_sg_len = sg_nents_for_len(dd->in_sg, alen + clen);
0169 dd->out_sg_len = sg_nents_for_len(dd->out_sg, clen);
0170
0171 return 0;
0172 }
0173
0174 static int do_encrypt_iv(struct aead_request *req, u32 *tag, u32 *iv)
0175 {
0176 struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0177
0178 aes_encrypt(&ctx->actx, (u8 *)tag, (u8 *)iv);
0179 return 0;
0180 }
0181
0182 void omap_aes_gcm_dma_out_callback(void *data)
0183 {
0184 struct omap_aes_dev *dd = data;
0185 struct omap_aes_reqctx *rctx;
0186 int i, val;
0187 u32 *auth_tag, tag[4];
0188
0189 if (!(dd->flags & FLAGS_ENCRYPT))
0190 scatterwalk_map_and_copy(tag, dd->aead_req->src,
0191 dd->total + dd->aead_req->assoclen,
0192 dd->authsize, 0);
0193
0194 rctx = aead_request_ctx(dd->aead_req);
0195 auth_tag = (u32 *)rctx->auth_tag;
0196 for (i = 0; i < 4; i++) {
0197 val = omap_aes_read(dd, AES_REG_TAG_N(dd, i));
0198 auth_tag[i] = val ^ auth_tag[i];
0199 if (!(dd->flags & FLAGS_ENCRYPT))
0200 auth_tag[i] = auth_tag[i] ^ tag[i];
0201 }
0202
0203 omap_aes_gcm_done_task(dd);
0204 }
0205
0206 static int omap_aes_gcm_handle_queue(struct omap_aes_dev *dd,
0207 struct aead_request *req)
0208 {
0209 if (req)
0210 return crypto_transfer_aead_request_to_engine(dd->engine, req);
0211
0212 return 0;
0213 }
0214
0215 static int omap_aes_gcm_prepare_req(struct crypto_engine *engine, void *areq)
0216 {
0217 struct aead_request *req = container_of(areq, struct aead_request,
0218 base);
0219 struct omap_aes_reqctx *rctx = aead_request_ctx(req);
0220 struct omap_aes_dev *dd = rctx->dd;
0221 struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0222 int err;
0223
0224 dd->aead_req = req;
0225
0226 rctx->mode &= FLAGS_MODE_MASK;
0227 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode;
0228
0229 err = omap_aes_gcm_copy_buffers(dd, req);
0230 if (err)
0231 return err;
0232
0233 dd->ctx = &ctx->octx;
0234
0235 return omap_aes_write_ctrl(dd);
0236 }
0237
0238 static int omap_aes_gcm_crypt(struct aead_request *req, unsigned long mode)
0239 {
0240 struct omap_aes_reqctx *rctx = aead_request_ctx(req);
0241 struct crypto_aead *aead = crypto_aead_reqtfm(req);
0242 unsigned int authlen = crypto_aead_authsize(aead);
0243 struct omap_aes_dev *dd;
0244 __be32 counter = cpu_to_be32(1);
0245 int err, assoclen;
0246
0247 memset(rctx->auth_tag, 0, sizeof(rctx->auth_tag));
0248 memcpy(rctx->iv + GCM_AES_IV_SIZE, &counter, 4);
0249
0250 err = do_encrypt_iv(req, (u32 *)rctx->auth_tag, (u32 *)rctx->iv);
0251 if (err)
0252 return err;
0253
0254 if (mode & FLAGS_RFC4106_GCM)
0255 assoclen = req->assoclen - 8;
0256 else
0257 assoclen = req->assoclen;
0258 if (assoclen + req->cryptlen == 0) {
0259 scatterwalk_map_and_copy(rctx->auth_tag, req->dst, 0, authlen,
0260 1);
0261 return 0;
0262 }
0263
0264 dd = omap_aes_find_dev(rctx);
0265 if (!dd)
0266 return -ENODEV;
0267 rctx->mode = mode;
0268
0269 return omap_aes_gcm_handle_queue(dd, req);
0270 }
0271
0272 int omap_aes_gcm_encrypt(struct aead_request *req)
0273 {
0274 struct omap_aes_reqctx *rctx = aead_request_ctx(req);
0275
0276 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE);
0277 return omap_aes_gcm_crypt(req, FLAGS_ENCRYPT | FLAGS_GCM);
0278 }
0279
0280 int omap_aes_gcm_decrypt(struct aead_request *req)
0281 {
0282 struct omap_aes_reqctx *rctx = aead_request_ctx(req);
0283
0284 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE);
0285 return omap_aes_gcm_crypt(req, FLAGS_GCM);
0286 }
0287
0288 int omap_aes_4106gcm_encrypt(struct aead_request *req)
0289 {
0290 struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0291 struct omap_aes_reqctx *rctx = aead_request_ctx(req);
0292
0293 memcpy(rctx->iv, ctx->octx.nonce, 4);
0294 memcpy(rctx->iv + 4, req->iv, 8);
0295 return crypto_ipsec_check_assoclen(req->assoclen) ?:
0296 omap_aes_gcm_crypt(req, FLAGS_ENCRYPT | FLAGS_GCM |
0297 FLAGS_RFC4106_GCM);
0298 }
0299
0300 int omap_aes_4106gcm_decrypt(struct aead_request *req)
0301 {
0302 struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0303 struct omap_aes_reqctx *rctx = aead_request_ctx(req);
0304
0305 memcpy(rctx->iv, ctx->octx.nonce, 4);
0306 memcpy(rctx->iv + 4, req->iv, 8);
0307 return crypto_ipsec_check_assoclen(req->assoclen) ?:
0308 omap_aes_gcm_crypt(req, FLAGS_GCM | FLAGS_RFC4106_GCM);
0309 }
0310
0311 int omap_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
0312 unsigned int keylen)
0313 {
0314 struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
0315 int ret;
0316
0317 ret = aes_expandkey(&ctx->actx, key, keylen);
0318 if (ret)
0319 return ret;
0320
0321 memcpy(ctx->octx.key, key, keylen);
0322 ctx->octx.keylen = keylen;
0323
0324 return 0;
0325 }
0326
0327 int omap_aes_4106gcm_setkey(struct crypto_aead *tfm, const u8 *key,
0328 unsigned int keylen)
0329 {
0330 struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
0331 int ret;
0332
0333 if (keylen < 4)
0334 return -EINVAL;
0335 keylen -= 4;
0336
0337 ret = aes_expandkey(&ctx->actx, key, keylen);
0338 if (ret)
0339 return ret;
0340
0341 memcpy(ctx->octx.key, key, keylen);
0342 memcpy(ctx->octx.nonce, key + keylen, 4);
0343 ctx->octx.keylen = keylen;
0344
0345 return 0;
0346 }
0347
0348 int omap_aes_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
0349 {
0350 return crypto_gcm_check_authsize(authsize);
0351 }
0352
0353 int omap_aes_4106gcm_setauthsize(struct crypto_aead *parent,
0354 unsigned int authsize)
0355 {
0356 return crypto_rfc4106_check_authsize(authsize);
0357 }
0358
0359 static int omap_aes_gcm_crypt_req(struct crypto_engine *engine, void *areq)
0360 {
0361 struct aead_request *req = container_of(areq, struct aead_request,
0362 base);
0363 struct omap_aes_reqctx *rctx = aead_request_ctx(req);
0364 struct omap_aes_dev *dd = rctx->dd;
0365 int ret = 0;
0366
0367 if (!dd)
0368 return -ENODEV;
0369
0370 if (dd->in_sg_len)
0371 ret = omap_aes_crypt_dma_start(dd);
0372 else
0373 omap_aes_gcm_dma_out_callback(dd);
0374
0375 return ret;
0376 }
0377
0378 int omap_aes_gcm_cra_init(struct crypto_aead *tfm)
0379 {
0380 struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
0381
0382 ctx->enginectx.op.prepare_request = omap_aes_gcm_prepare_req;
0383 ctx->enginectx.op.unprepare_request = NULL;
0384 ctx->enginectx.op.do_one_request = omap_aes_gcm_crypt_req;
0385
0386 crypto_aead_set_reqsize(tfm, sizeof(struct omap_aes_reqctx));
0387
0388 return 0;
0389 }