0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include <crypto/internal/hash.h>
0011 #include <crypto/sha2.h>
0012 #include <linux/module.h>
0013 #include <asm/vio.h>
0014 #include <asm/byteorder.h>
0015
0016 #include "nx_csbcpb.h"
0017 #include "nx.h"
0018
0019 struct sha256_state_be {
0020 __be32 state[SHA256_DIGEST_SIZE / 4];
0021 u64 count;
0022 u8 buf[SHA256_BLOCK_SIZE];
0023 };
0024
0025 static int nx_crypto_ctx_sha256_init(struct crypto_tfm *tfm)
0026 {
0027 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm);
0028 int err;
0029
0030 err = nx_crypto_ctx_sha_init(tfm);
0031 if (err)
0032 return err;
0033
0034 nx_ctx_init(nx_ctx, HCOP_FC_SHA);
0035
0036 nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA256];
0037
0038 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256);
0039
0040 return 0;
0041 }
0042
0043 static int nx_sha256_init(struct shash_desc *desc) {
0044 struct sha256_state_be *sctx = shash_desc_ctx(desc);
0045
0046 memset(sctx, 0, sizeof *sctx);
0047
0048 sctx->state[0] = __cpu_to_be32(SHA256_H0);
0049 sctx->state[1] = __cpu_to_be32(SHA256_H1);
0050 sctx->state[2] = __cpu_to_be32(SHA256_H2);
0051 sctx->state[3] = __cpu_to_be32(SHA256_H3);
0052 sctx->state[4] = __cpu_to_be32(SHA256_H4);
0053 sctx->state[5] = __cpu_to_be32(SHA256_H5);
0054 sctx->state[6] = __cpu_to_be32(SHA256_H6);
0055 sctx->state[7] = __cpu_to_be32(SHA256_H7);
0056 sctx->count = 0;
0057
0058 return 0;
0059 }
0060
0061 static int nx_sha256_update(struct shash_desc *desc, const u8 *data,
0062 unsigned int len)
0063 {
0064 struct sha256_state_be *sctx = shash_desc_ctx(desc);
0065 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
0066 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
0067 struct nx_sg *out_sg;
0068 u64 to_process = 0, leftover, total;
0069 unsigned long irq_flags;
0070 int rc = 0;
0071 int data_len;
0072 u32 max_sg_len;
0073 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE);
0074
0075 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
0076
0077
0078
0079
0080
0081 total = (sctx->count % SHA256_BLOCK_SIZE) + len;
0082 if (total < SHA256_BLOCK_SIZE) {
0083 memcpy(sctx->buf + buf_len, data, len);
0084 sctx->count += len;
0085 goto out;
0086 }
0087
0088 memcpy(csbcpb->cpb.sha256.message_digest, sctx->state, SHA256_DIGEST_SIZE);
0089 NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE;
0090 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
0091
0092 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
0093 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
0094 max_sg_len = min_t(u64, max_sg_len,
0095 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
0096
0097 data_len = SHA256_DIGEST_SIZE;
0098 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state,
0099 &data_len, max_sg_len);
0100 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
0101
0102 if (data_len != SHA256_DIGEST_SIZE) {
0103 rc = -EINVAL;
0104 goto out;
0105 }
0106
0107 do {
0108 int used_sgs = 0;
0109 struct nx_sg *in_sg = nx_ctx->in_sg;
0110
0111 if (buf_len) {
0112 data_len = buf_len;
0113 in_sg = nx_build_sg_list(in_sg,
0114 (u8 *) sctx->buf,
0115 &data_len,
0116 max_sg_len);
0117
0118 if (data_len != buf_len) {
0119 rc = -EINVAL;
0120 goto out;
0121 }
0122 used_sgs = in_sg - nx_ctx->in_sg;
0123 }
0124
0125
0126
0127
0128
0129
0130
0131
0132 to_process = min_t(u64, total,
0133 (max_sg_len - 1 - used_sgs) * NX_PAGE_SIZE);
0134 to_process = to_process & ~(SHA256_BLOCK_SIZE - 1);
0135
0136 data_len = to_process - buf_len;
0137 in_sg = nx_build_sg_list(in_sg, (u8 *) data,
0138 &data_len, max_sg_len);
0139
0140 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
0141
0142 to_process = data_len + buf_len;
0143 leftover = total - to_process;
0144
0145
0146
0147
0148
0149 memcpy(csbcpb->cpb.sha256.input_partial_digest,
0150 csbcpb->cpb.sha256.message_digest,
0151 SHA256_DIGEST_SIZE);
0152
0153 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) {
0154 rc = -EINVAL;
0155 goto out;
0156 }
0157
0158 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0);
0159 if (rc)
0160 goto out;
0161
0162 atomic_inc(&(nx_ctx->stats->sha256_ops));
0163
0164 total -= to_process;
0165 data += to_process - buf_len;
0166 buf_len = 0;
0167
0168 } while (leftover >= SHA256_BLOCK_SIZE);
0169
0170
0171 if (leftover)
0172 memcpy(sctx->buf, data, leftover);
0173
0174 sctx->count += len;
0175 memcpy(sctx->state, csbcpb->cpb.sha256.message_digest, SHA256_DIGEST_SIZE);
0176 out:
0177 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
0178 return rc;
0179 }
0180
0181 static int nx_sha256_final(struct shash_desc *desc, u8 *out)
0182 {
0183 struct sha256_state_be *sctx = shash_desc_ctx(desc);
0184 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
0185 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
0186 struct nx_sg *in_sg, *out_sg;
0187 unsigned long irq_flags;
0188 u32 max_sg_len;
0189 int rc = 0;
0190 int len;
0191
0192 spin_lock_irqsave(&nx_ctx->lock, irq_flags);
0193
0194 max_sg_len = min_t(u64, nx_ctx->ap->sglen,
0195 nx_driver.of.max_sg_len/sizeof(struct nx_sg));
0196 max_sg_len = min_t(u64, max_sg_len,
0197 nx_ctx->ap->databytelen/NX_PAGE_SIZE);
0198
0199
0200
0201 if (sctx->count >= SHA256_BLOCK_SIZE) {
0202
0203
0204 memcpy(csbcpb->cpb.sha256.input_partial_digest, sctx->state, SHA256_DIGEST_SIZE);
0205 NX_CPB_FDM(csbcpb) &= ~NX_FDM_INTERMEDIATE;
0206 NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
0207 } else {
0208 NX_CPB_FDM(csbcpb) &= ~NX_FDM_INTERMEDIATE;
0209 NX_CPB_FDM(csbcpb) &= ~NX_FDM_CONTINUATION;
0210 }
0211
0212 csbcpb->cpb.sha256.message_bit_length = (u64) (sctx->count * 8);
0213
0214 len = sctx->count & (SHA256_BLOCK_SIZE - 1);
0215 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf,
0216 &len, max_sg_len);
0217
0218 if (len != (sctx->count & (SHA256_BLOCK_SIZE - 1))) {
0219 rc = -EINVAL;
0220 goto out;
0221 }
0222
0223 len = SHA256_DIGEST_SIZE;
0224 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, max_sg_len);
0225
0226 if (len != SHA256_DIGEST_SIZE) {
0227 rc = -EINVAL;
0228 goto out;
0229 }
0230
0231 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
0232 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
0233 if (!nx_ctx->op.outlen) {
0234 rc = -EINVAL;
0235 goto out;
0236 }
0237
0238 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0);
0239 if (rc)
0240 goto out;
0241
0242 atomic_inc(&(nx_ctx->stats->sha256_ops));
0243
0244 atomic64_add(sctx->count, &(nx_ctx->stats->sha256_bytes));
0245 memcpy(out, csbcpb->cpb.sha256.message_digest, SHA256_DIGEST_SIZE);
0246 out:
0247 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
0248 return rc;
0249 }
0250
0251 static int nx_sha256_export(struct shash_desc *desc, void *out)
0252 {
0253 struct sha256_state_be *sctx = shash_desc_ctx(desc);
0254
0255 memcpy(out, sctx, sizeof(*sctx));
0256
0257 return 0;
0258 }
0259
0260 static int nx_sha256_import(struct shash_desc *desc, const void *in)
0261 {
0262 struct sha256_state_be *sctx = shash_desc_ctx(desc);
0263
0264 memcpy(sctx, in, sizeof(*sctx));
0265
0266 return 0;
0267 }
0268
0269 struct shash_alg nx_shash_sha256_alg = {
0270 .digestsize = SHA256_DIGEST_SIZE,
0271 .init = nx_sha256_init,
0272 .update = nx_sha256_update,
0273 .final = nx_sha256_final,
0274 .export = nx_sha256_export,
0275 .import = nx_sha256_import,
0276 .descsize = sizeof(struct sha256_state_be),
0277 .statesize = sizeof(struct sha256_state_be),
0278 .base = {
0279 .cra_name = "sha256",
0280 .cra_driver_name = "sha256-nx",
0281 .cra_priority = 300,
0282 .cra_blocksize = SHA256_BLOCK_SIZE,
0283 .cra_module = THIS_MODULE,
0284 .cra_ctxsize = sizeof(struct nx_crypto_ctx),
0285 .cra_init = nx_crypto_ctx_sha256_init,
0286 .cra_exit = nx_crypto_ctx_exit,
0287 }
0288 };