0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include <crypto/internal/aead.h>
0011 #include <crypto/aes.h>
0012 #include <crypto/algapi.h>
0013 #include <crypto/gcm.h>
0014 #include <crypto/scatterwalk.h>
0015 #include <linux/module.h>
0016 #include <linux/types.h>
0017 #include <asm/vio.h>
0018
0019 #include "nx_csbcpb.h"
0020 #include "nx.h"
0021
0022
0023 static int gcm_aes_nx_set_key(struct crypto_aead *tfm,
0024 const u8 *in_key,
0025 unsigned int key_len)
0026 {
0027 struct nx_crypto_ctx *nx_ctx = crypto_aead_ctx(tfm);
0028 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0029 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
0030
0031 nx_ctx_init(nx_ctx, HCOP_FC_AES);
0032
0033 switch (key_len) {
0034 case AES_KEYSIZE_128:
0035 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128);
0036 NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_128);
0037 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128];
0038 break;
0039 case AES_KEYSIZE_192:
0040 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192);
0041 NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_192);
0042 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192];
0043 break;
0044 case AES_KEYSIZE_256:
0045 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256);
0046 NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_256);
0047 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256];
0048 break;
0049 default:
0050 return -EINVAL;
0051 }
0052
0053 csbcpb->cpb.hdr.mode = NX_MODE_AES_GCM;
0054 memcpy(csbcpb->cpb.aes_gcm.key, in_key, key_len);
0055
0056 csbcpb_aead->cpb.hdr.mode = NX_MODE_AES_GCA;
0057 memcpy(csbcpb_aead->cpb.aes_gca.key, in_key, key_len);
0058
0059 return 0;
0060 }
0061
0062 static int gcm4106_aes_nx_set_key(struct crypto_aead *tfm,
0063 const u8 *in_key,
0064 unsigned int key_len)
0065 {
0066 struct nx_crypto_ctx *nx_ctx = crypto_aead_ctx(tfm);
0067 char *nonce = nx_ctx->priv.gcm.nonce;
0068 int rc;
0069
0070 if (key_len < 4)
0071 return -EINVAL;
0072
0073 key_len -= 4;
0074
0075 rc = gcm_aes_nx_set_key(tfm, in_key, key_len);
0076 if (rc)
0077 goto out;
0078
0079 memcpy(nonce, in_key + key_len, 4);
0080 out:
0081 return rc;
0082 }
0083
0084 static int gcm4106_aes_nx_setauthsize(struct crypto_aead *tfm,
0085 unsigned int authsize)
0086 {
0087 switch (authsize) {
0088 case 8:
0089 case 12:
0090 case 16:
0091 break;
0092 default:
0093 return -EINVAL;
0094 }
0095
0096 return 0;
0097 }
0098
0099 static int nx_gca(struct nx_crypto_ctx *nx_ctx,
0100 struct aead_request *req,
0101 u8 *out,
0102 unsigned int assoclen)
0103 {
0104 int rc;
0105 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
0106 struct scatter_walk walk;
0107 struct nx_sg *nx_sg = nx_ctx->in_sg;
0108 unsigned int nbytes = assoclen;
0109 unsigned int processed = 0, to_process;
0110 unsigned int max_sg_len;
0111
0112 if (nbytes <= AES_BLOCK_SIZE) {
0113 scatterwalk_start(&walk, req->src);
0114 scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG);
0115 scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0);
0116 return 0;
0117 }
0118
0119 NX_CPB_FDM(csbcpb_aead) &= ~NX_FDM_CONTINUATION;
0120
0121
0122 max_sg_len = min_t(u64, nx_driver.of.max_sg_len/sizeof(struct nx_sg),
0123 nx_ctx->ap->sglen);
0124 max_sg_len = min_t(u64, max_sg_len,
0125 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
0126
0127 do {
0128
0129
0130
0131
0132 to_process = min_t(u64, nbytes - processed,
0133 nx_ctx->ap->databytelen);
0134 to_process = min_t(u64, to_process,
0135 NX_PAGE_SIZE * (max_sg_len - 1));
0136
0137 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len,
0138 req->src, processed, &to_process);
0139
0140 if ((to_process + processed) < nbytes)
0141 NX_CPB_FDM(csbcpb_aead) |= NX_FDM_INTERMEDIATE;
0142 else
0143 NX_CPB_FDM(csbcpb_aead) &= ~NX_FDM_INTERMEDIATE;
0144
0145 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg)
0146 * sizeof(struct nx_sg);
0147
0148 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead,
0149 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
0150 if (rc)
0151 return rc;
0152
0153 memcpy(csbcpb_aead->cpb.aes_gca.in_pat,
0154 csbcpb_aead->cpb.aes_gca.out_pat,
0155 AES_BLOCK_SIZE);
0156 NX_CPB_FDM(csbcpb_aead) |= NX_FDM_CONTINUATION;
0157
0158 atomic_inc(&(nx_ctx->stats->aes_ops));
0159 atomic64_add(assoclen, &(nx_ctx->stats->aes_bytes));
0160
0161 processed += to_process;
0162 } while (processed < nbytes);
0163
0164 memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE);
0165
0166 return rc;
0167 }
0168
0169 static int gmac(struct aead_request *req, const u8 *iv, unsigned int assoclen)
0170 {
0171 int rc;
0172 struct nx_crypto_ctx *nx_ctx =
0173 crypto_aead_ctx(crypto_aead_reqtfm(req));
0174 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0175 struct nx_sg *nx_sg;
0176 unsigned int nbytes = assoclen;
0177 unsigned int processed = 0, to_process;
0178 unsigned int max_sg_len;
0179
0180
0181 csbcpb->cpb.hdr.mode = NX_MODE_AES_GMAC;
0182
0183 NX_CPB_FDM(csbcpb) &= ~NX_FDM_CONTINUATION;
0184
0185
0186 max_sg_len = min_t(u64, nx_driver.of.max_sg_len/sizeof(struct nx_sg),
0187 nx_ctx->ap->sglen);
0188 max_sg_len = min_t(u64, max_sg_len,
0189 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
0190
0191
0192 memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, iv, AES_BLOCK_SIZE);
0193
0194 do {
0195
0196
0197
0198
0199 to_process = min_t(u64, nbytes - processed,
0200 nx_ctx->ap->databytelen);
0201 to_process = min_t(u64, to_process,
0202 NX_PAGE_SIZE * (max_sg_len - 1));
0203
0204 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len,
0205 req->src, processed, &to_process);
0206
0207 if ((to_process + processed) < nbytes)
0208 NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE;
0209 else
0210 NX_CPB_FDM(csbcpb) &= ~NX_FDM_INTERMEDIATE;
0211
0212 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg)
0213 * sizeof(struct nx_sg);
0214
0215 csbcpb->cpb.aes_gcm.bit_length_data = 0;
0216 csbcpb->cpb.aes_gcm.bit_length_aad = 8 * nbytes;
0217
0218 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
0219 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
0220 if (rc)
0221 goto out;
0222
0223 memcpy(csbcpb->cpb.aes_gcm.in_pat_or_aad,
0224 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE);
0225 memcpy(csbcpb->cpb.aes_gcm.in_s0,
0226 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE);
0227
0228 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
0229
0230 atomic_inc(&(nx_ctx->stats->aes_ops));
0231 atomic64_add(assoclen, &(nx_ctx->stats->aes_bytes));
0232
0233 processed += to_process;
0234 } while (processed < nbytes);
0235
0236 out:
0237
0238 csbcpb->cpb.hdr.mode = NX_MODE_AES_GCM;
0239 return rc;
0240 }
0241
0242 static int gcm_empty(struct aead_request *req, const u8 *iv, int enc)
0243 {
0244 int rc;
0245 struct nx_crypto_ctx *nx_ctx =
0246 crypto_aead_ctx(crypto_aead_reqtfm(req));
0247 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0248 char out[AES_BLOCK_SIZE];
0249 struct nx_sg *in_sg, *out_sg;
0250 int len;
0251
0252
0253
0254
0255
0256
0257
0258 csbcpb->cpb.hdr.mode = NX_MODE_AES_ECB;
0259 memcpy(csbcpb->cpb.aes_ecb.key, csbcpb->cpb.aes_gcm.key,
0260 sizeof(csbcpb->cpb.aes_ecb.key));
0261 if (enc)
0262 NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
0263 else
0264 NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
0265
0266 len = AES_BLOCK_SIZE;
0267
0268
0269 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) iv,
0270 &len, nx_ctx->ap->sglen);
0271
0272 if (len != AES_BLOCK_SIZE)
0273 return -EINVAL;
0274
0275 len = sizeof(out);
0276 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) out, &len,
0277 nx_ctx->ap->sglen);
0278
0279 if (len != sizeof(out))
0280 return -EINVAL;
0281
0282 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
0283 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
0284
0285 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
0286 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
0287 if (rc)
0288 goto out;
0289 atomic_inc(&(nx_ctx->stats->aes_ops));
0290
0291
0292 memcpy(csbcpb->cpb.aes_gcm.out_pat_or_mac, out,
0293 crypto_aead_authsize(crypto_aead_reqtfm(req)));
0294 out:
0295
0296 csbcpb->cpb.hdr.mode = NX_MODE_AES_GCM;
0297
0298
0299
0300
0301
0302 memset(csbcpb->cpb.aes_ecb.key, 0, sizeof(csbcpb->cpb.aes_ecb.key));
0303
0304 return rc;
0305 }
0306
0307 static int gcm_aes_nx_crypt(struct aead_request *req, int enc,
0308 unsigned int assoclen)
0309 {
0310 struct nx_crypto_ctx *nx_ctx =
0311 crypto_aead_ctx(crypto_aead_reqtfm(req));
0312 struct nx_gcm_rctx *rctx = aead_request_ctx(req);
0313 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0314 unsigned int nbytes = req->cryptlen;
0315 unsigned int processed = 0, to_process;
0316 unsigned long irq_flags;
0317 int rc = -EINVAL;
0318
0319 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
0320
0321
0322 *(u32 *)&rctx->iv[NX_GCM_CTR_OFFSET] = 1;
0323
0324 if (nbytes == 0) {
0325 if (assoclen == 0)
0326 rc = gcm_empty(req, rctx->iv, enc);
0327 else
0328 rc = gmac(req, rctx->iv, assoclen);
0329 if (rc)
0330 goto out;
0331 else
0332 goto mac;
0333 }
0334
0335
0336 csbcpb->cpb.aes_gcm.bit_length_aad = assoclen * 8;
0337 if (assoclen) {
0338 rc = nx_gca(nx_ctx, req, csbcpb->cpb.aes_gcm.in_pat_or_aad,
0339 assoclen);
0340 if (rc)
0341 goto out;
0342 }
0343
0344
0345 NX_CPB_FDM(csbcpb) &= ~NX_FDM_CONTINUATION;
0346 if (enc) {
0347 NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
0348 } else {
0349 NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
0350 nbytes -= crypto_aead_authsize(crypto_aead_reqtfm(req));
0351 }
0352
0353 do {
0354 to_process = nbytes - processed;
0355
0356 csbcpb->cpb.aes_gcm.bit_length_data = nbytes * 8;
0357 rc = nx_build_sg_lists(nx_ctx, rctx->iv, req->dst,
0358 req->src, &to_process,
0359 processed + req->assoclen,
0360 csbcpb->cpb.aes_gcm.iv_or_cnt);
0361
0362 if (rc)
0363 goto out;
0364
0365 if ((to_process + processed) < nbytes)
0366 NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE;
0367 else
0368 NX_CPB_FDM(csbcpb) &= ~NX_FDM_INTERMEDIATE;
0369
0370
0371 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
0372 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
0373 if (rc)
0374 goto out;
0375
0376 memcpy(rctx->iv, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE);
0377 memcpy(csbcpb->cpb.aes_gcm.in_pat_or_aad,
0378 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE);
0379 memcpy(csbcpb->cpb.aes_gcm.in_s0,
0380 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE);
0381
0382 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
0383
0384 atomic_inc(&(nx_ctx->stats->aes_ops));
0385 atomic64_add(be32_to_cpu(csbcpb->csb.processed_byte_count),
0386 &(nx_ctx->stats->aes_bytes));
0387
0388 processed += to_process;
0389 } while (processed < nbytes);
0390
0391 mac:
0392 if (enc) {
0393
0394 scatterwalk_map_and_copy(
0395 csbcpb->cpb.aes_gcm.out_pat_or_mac,
0396 req->dst, req->assoclen + nbytes,
0397 crypto_aead_authsize(crypto_aead_reqtfm(req)),
0398 SCATTERWALK_TO_SG);
0399 } else {
0400 u8 *itag = nx_ctx->priv.gcm.iauth_tag;
0401 u8 *otag = csbcpb->cpb.aes_gcm.out_pat_or_mac;
0402
0403 scatterwalk_map_and_copy(
0404 itag, req->src, req->assoclen + nbytes,
0405 crypto_aead_authsize(crypto_aead_reqtfm(req)),
0406 SCATTERWALK_FROM_SG);
0407 rc = crypto_memneq(itag, otag,
0408 crypto_aead_authsize(crypto_aead_reqtfm(req))) ?
0409 -EBADMSG : 0;
0410 }
0411 out:
0412 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
0413 return rc;
0414 }
0415
0416 static int gcm_aes_nx_encrypt(struct aead_request *req)
0417 {
0418 struct nx_gcm_rctx *rctx = aead_request_ctx(req);
0419 char *iv = rctx->iv;
0420
0421 memcpy(iv, req->iv, GCM_AES_IV_SIZE);
0422
0423 return gcm_aes_nx_crypt(req, 1, req->assoclen);
0424 }
0425
0426 static int gcm_aes_nx_decrypt(struct aead_request *req)
0427 {
0428 struct nx_gcm_rctx *rctx = aead_request_ctx(req);
0429 char *iv = rctx->iv;
0430
0431 memcpy(iv, req->iv, GCM_AES_IV_SIZE);
0432
0433 return gcm_aes_nx_crypt(req, 0, req->assoclen);
0434 }
0435
0436 static int gcm4106_aes_nx_encrypt(struct aead_request *req)
0437 {
0438 struct nx_crypto_ctx *nx_ctx =
0439 crypto_aead_ctx(crypto_aead_reqtfm(req));
0440 struct nx_gcm_rctx *rctx = aead_request_ctx(req);
0441 char *iv = rctx->iv;
0442 char *nonce = nx_ctx->priv.gcm.nonce;
0443
0444 memcpy(iv, nonce, NX_GCM4106_NONCE_LEN);
0445 memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8);
0446
0447 if (req->assoclen < 8)
0448 return -EINVAL;
0449
0450 return gcm_aes_nx_crypt(req, 1, req->assoclen - 8);
0451 }
0452
0453 static int gcm4106_aes_nx_decrypt(struct aead_request *req)
0454 {
0455 struct nx_crypto_ctx *nx_ctx =
0456 crypto_aead_ctx(crypto_aead_reqtfm(req));
0457 struct nx_gcm_rctx *rctx = aead_request_ctx(req);
0458 char *iv = rctx->iv;
0459 char *nonce = nx_ctx->priv.gcm.nonce;
0460
0461 memcpy(iv, nonce, NX_GCM4106_NONCE_LEN);
0462 memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8);
0463
0464 if (req->assoclen < 8)
0465 return -EINVAL;
0466
0467 return gcm_aes_nx_crypt(req, 0, req->assoclen - 8);
0468 }
0469
0470 struct aead_alg nx_gcm_aes_alg = {
0471 .base = {
0472 .cra_name = "gcm(aes)",
0473 .cra_driver_name = "gcm-aes-nx",
0474 .cra_priority = 300,
0475 .cra_blocksize = 1,
0476 .cra_ctxsize = sizeof(struct nx_crypto_ctx),
0477 .cra_module = THIS_MODULE,
0478 },
0479 .init = nx_crypto_ctx_aes_gcm_init,
0480 .exit = nx_crypto_ctx_aead_exit,
0481 .ivsize = GCM_AES_IV_SIZE,
0482 .maxauthsize = AES_BLOCK_SIZE,
0483 .setkey = gcm_aes_nx_set_key,
0484 .encrypt = gcm_aes_nx_encrypt,
0485 .decrypt = gcm_aes_nx_decrypt,
0486 };
0487
0488 struct aead_alg nx_gcm4106_aes_alg = {
0489 .base = {
0490 .cra_name = "rfc4106(gcm(aes))",
0491 .cra_driver_name = "rfc4106-gcm-aes-nx",
0492 .cra_priority = 300,
0493 .cra_blocksize = 1,
0494 .cra_ctxsize = sizeof(struct nx_crypto_ctx),
0495 .cra_module = THIS_MODULE,
0496 },
0497 .init = nx_crypto_ctx_aes_gcm_init,
0498 .exit = nx_crypto_ctx_aead_exit,
0499 .ivsize = GCM_RFC4106_IV_SIZE,
0500 .maxauthsize = AES_BLOCK_SIZE,
0501 .setkey = gcm4106_aes_nx_set_key,
0502 .setauthsize = gcm4106_aes_nx_setauthsize,
0503 .encrypt = gcm4106_aes_nx_encrypt,
0504 .decrypt = gcm4106_aes_nx_decrypt,
0505 };