0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include <crypto/internal/hash.h>
0011 #include <crypto/aes.h>
0012 #include <crypto/algapi.h>
0013 #include <linux/module.h>
0014 #include <linux/types.h>
0015 #include <linux/crypto.h>
0016 #include <asm/vio.h>
0017
0018 #include "nx_csbcpb.h"
0019 #include "nx.h"
0020
0021
0022 struct xcbc_state {
0023 u8 state[AES_BLOCK_SIZE];
0024 unsigned int count;
0025 u8 buffer[AES_BLOCK_SIZE];
0026 };
0027
0028 static int nx_xcbc_set_key(struct crypto_shash *desc,
0029 const u8 *in_key,
0030 unsigned int key_len)
0031 {
0032 struct nx_crypto_ctx *nx_ctx = crypto_shash_ctx(desc);
0033 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0034
0035 switch (key_len) {
0036 case AES_KEYSIZE_128:
0037 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128];
0038 break;
0039 default:
0040 return -EINVAL;
0041 }
0042
0043 memcpy(csbcpb->cpb.aes_xcbc.key, in_key, key_len);
0044
0045 return 0;
0046 }
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059 static int nx_xcbc_empty(struct shash_desc *desc, u8 *out)
0060 {
0061 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
0062 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0063 struct nx_sg *in_sg, *out_sg;
0064 u8 keys[2][AES_BLOCK_SIZE];
0065 u8 key[32];
0066 int rc = 0;
0067 int len;
0068
0069
0070 csbcpb->cpb.hdr.mode = NX_MODE_AES_ECB;
0071 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE);
0072 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE);
0073 NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
0074
0075
0076 memset(keys[0], 0x01, sizeof(keys[0]));
0077 memset(keys[1], 0x03, sizeof(keys[1]));
0078
0079 len = sizeof(keys);
0080
0081 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len,
0082 nx_ctx->ap->sglen);
0083
0084 if (len != sizeof(keys))
0085 return -EINVAL;
0086
0087 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) keys, &len,
0088 nx_ctx->ap->sglen);
0089
0090 if (len != sizeof(keys))
0091 return -EINVAL;
0092
0093 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
0094 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
0095
0096 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0);
0097 if (rc)
0098 goto out;
0099 atomic_inc(&(nx_ctx->stats->aes_ops));
0100
0101
0102 keys[1][0] ^= 0x80;
0103
0104 len = sizeof(keys[1]);
0105
0106
0107 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE);
0108 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len,
0109 nx_ctx->ap->sglen);
0110
0111 if (len != sizeof(keys[1]))
0112 return -EINVAL;
0113
0114 len = AES_BLOCK_SIZE;
0115 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len,
0116 nx_ctx->ap->sglen);
0117
0118 if (len != AES_BLOCK_SIZE)
0119 return -EINVAL;
0120
0121 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
0122 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
0123
0124 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0);
0125 if (rc)
0126 goto out;
0127 atomic_inc(&(nx_ctx->stats->aes_ops));
0128
0129 out:
0130
0131 csbcpb->cpb.hdr.mode = NX_MODE_AES_XCBC_MAC;
0132 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE);
0133 NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
0134
0135 return rc;
0136 }
0137
0138 static int nx_crypto_ctx_aes_xcbc_init2(struct crypto_tfm *tfm)
0139 {
0140 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm);
0141 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0142 int err;
0143
0144 err = nx_crypto_ctx_aes_xcbc_init(tfm);
0145 if (err)
0146 return err;
0147
0148 nx_ctx_init(nx_ctx, HCOP_FC_AES);
0149
0150 NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128);
0151 csbcpb->cpb.hdr.mode = NX_MODE_AES_XCBC_MAC;
0152
0153 return 0;
0154 }
0155
0156 static int nx_xcbc_init(struct shash_desc *desc)
0157 {
0158 struct xcbc_state *sctx = shash_desc_ctx(desc);
0159
0160 memset(sctx, 0, sizeof *sctx);
0161
0162 return 0;
0163 }
0164
0165 static int nx_xcbc_update(struct shash_desc *desc,
0166 const u8 *data,
0167 unsigned int len)
0168 {
0169 struct xcbc_state *sctx = shash_desc_ctx(desc);
0170 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
0171 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0172 struct nx_sg *in_sg;
0173 struct nx_sg *out_sg;
0174 u32 to_process = 0, leftover, total;
0175 unsigned int max_sg_len;
0176 unsigned long irq_flags;
0177 int rc = 0;
0178 int data_len;
0179
0180 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
0181
0182
0183 total = sctx->count + len;
0184
0185
0186
0187
0188
0189 if (total <= AES_BLOCK_SIZE) {
0190 memcpy(sctx->buffer + sctx->count, data, len);
0191 sctx->count += len;
0192 goto out;
0193 }
0194
0195 in_sg = nx_ctx->in_sg;
0196 max_sg_len = min_t(u64, nx_driver.of.max_sg_len/sizeof(struct nx_sg),
0197 nx_ctx->ap->sglen);
0198 max_sg_len = min_t(u64, max_sg_len,
0199 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
0200
0201 data_len = AES_BLOCK_SIZE;
0202 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state,
0203 &len, nx_ctx->ap->sglen);
0204
0205 if (data_len != AES_BLOCK_SIZE) {
0206 rc = -EINVAL;
0207 goto out;
0208 }
0209
0210 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
0211
0212 do {
0213 to_process = total - to_process;
0214 to_process = to_process & ~(AES_BLOCK_SIZE - 1);
0215
0216 leftover = total - to_process;
0217
0218
0219
0220
0221
0222
0223 if (!leftover) {
0224 to_process -= AES_BLOCK_SIZE;
0225 leftover = AES_BLOCK_SIZE;
0226 }
0227
0228 if (sctx->count) {
0229 data_len = sctx->count;
0230 in_sg = nx_build_sg_list(nx_ctx->in_sg,
0231 (u8 *) sctx->buffer,
0232 &data_len,
0233 max_sg_len);
0234 if (data_len != sctx->count) {
0235 rc = -EINVAL;
0236 goto out;
0237 }
0238 }
0239
0240 data_len = to_process - sctx->count;
0241 in_sg = nx_build_sg_list(in_sg,
0242 (u8 *) data,
0243 &data_len,
0244 max_sg_len);
0245
0246 if (data_len != to_process - sctx->count) {
0247 rc = -EINVAL;
0248 goto out;
0249 }
0250
0251 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) *
0252 sizeof(struct nx_sg);
0253
0254
0255
0256 if (NX_CPB_FDM(csbcpb) & NX_FDM_CONTINUATION) {
0257 memcpy(csbcpb->cpb.aes_xcbc.cv,
0258 csbcpb->cpb.aes_xcbc.out_cv_mac,
0259 AES_BLOCK_SIZE);
0260 }
0261
0262 NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE;
0263 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) {
0264 rc = -EINVAL;
0265 goto out;
0266 }
0267
0268 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0);
0269 if (rc)
0270 goto out;
0271
0272 atomic_inc(&(nx_ctx->stats->aes_ops));
0273
0274
0275 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
0276
0277 total -= to_process;
0278 data += to_process - sctx->count;
0279 sctx->count = 0;
0280 in_sg = nx_ctx->in_sg;
0281 } while (leftover > AES_BLOCK_SIZE);
0282
0283
0284 memcpy(sctx->buffer, data, leftover);
0285 sctx->count = leftover;
0286
0287 out:
0288 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
0289 return rc;
0290 }
0291
0292 static int nx_xcbc_final(struct shash_desc *desc, u8 *out)
0293 {
0294 struct xcbc_state *sctx = shash_desc_ctx(desc);
0295 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
0296 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
0297 struct nx_sg *in_sg, *out_sg;
0298 unsigned long irq_flags;
0299 int rc = 0;
0300 int len;
0301
0302 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
0303
0304 if (NX_CPB_FDM(csbcpb) & NX_FDM_CONTINUATION) {
0305
0306
0307 memcpy(csbcpb->cpb.aes_xcbc.cv,
0308 csbcpb->cpb.aes_xcbc.out_cv_mac, AES_BLOCK_SIZE);
0309 } else if (sctx->count == 0) {
0310
0311
0312
0313
0314
0315 rc = nx_xcbc_empty(desc, out);
0316 goto out;
0317 }
0318
0319
0320
0321 NX_CPB_FDM(csbcpb) &= ~NX_FDM_INTERMEDIATE;
0322
0323 len = sctx->count;
0324 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer,
0325 &len, nx_ctx->ap->sglen);
0326
0327 if (len != sctx->count) {
0328 rc = -EINVAL;
0329 goto out;
0330 }
0331
0332 len = AES_BLOCK_SIZE;
0333 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len,
0334 nx_ctx->ap->sglen);
0335
0336 if (len != AES_BLOCK_SIZE) {
0337 rc = -EINVAL;
0338 goto out;
0339 }
0340
0341 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
0342 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
0343
0344 if (!nx_ctx->op.outlen) {
0345 rc = -EINVAL;
0346 goto out;
0347 }
0348
0349 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0);
0350 if (rc)
0351 goto out;
0352
0353 atomic_inc(&(nx_ctx->stats->aes_ops));
0354
0355 memcpy(out, csbcpb->cpb.aes_xcbc.out_cv_mac, AES_BLOCK_SIZE);
0356 out:
0357 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
0358 return rc;
0359 }
0360
0361 struct shash_alg nx_shash_aes_xcbc_alg = {
0362 .digestsize = AES_BLOCK_SIZE,
0363 .init = nx_xcbc_init,
0364 .update = nx_xcbc_update,
0365 .final = nx_xcbc_final,
0366 .setkey = nx_xcbc_set_key,
0367 .descsize = sizeof(struct xcbc_state),
0368 .statesize = sizeof(struct xcbc_state),
0369 .base = {
0370 .cra_name = "xcbc(aes)",
0371 .cra_driver_name = "xcbc-aes-nx",
0372 .cra_priority = 300,
0373 .cra_blocksize = AES_BLOCK_SIZE,
0374 .cra_module = THIS_MODULE,
0375 .cra_ctxsize = sizeof(struct nx_crypto_ctx),
0376 .cra_init = nx_crypto_ctx_aes_xcbc_init2,
0377 .cra_exit = nx_crypto_ctx_exit,
0378 }
0379 };