0001
0002
0003
0004
0005
0006 #include <linux/cacheflush.h>
0007 #include <crypto/hash.h>
0008 #include <crypto/internal/hash.h>
0009 #include <crypto/sha3.h>
0010 #include <linux/crypto.h>
0011 #include <linux/device.h>
0012 #include <linux/dma-mapping.h>
0013 #include <linux/firmware/xlnx-zynqmp.h>
0014 #include <linux/init.h>
0015 #include <linux/io.h>
0016 #include <linux/kernel.h>
0017 #include <linux/module.h>
0018 #include <linux/of_device.h>
0019 #include <linux/platform_device.h>
0020
0021 #define ZYNQMP_DMA_BIT_MASK 32U
0022 #define ZYNQMP_DMA_ALLOC_FIXED_SIZE 0x1000U
0023
0024 enum zynqmp_sha_op {
0025 ZYNQMP_SHA3_INIT = 1,
0026 ZYNQMP_SHA3_UPDATE = 2,
0027 ZYNQMP_SHA3_FINAL = 4,
0028 };
0029
0030 struct zynqmp_sha_drv_ctx {
0031 struct shash_alg sha3_384;
0032 struct device *dev;
0033 };
0034
0035 struct zynqmp_sha_tfm_ctx {
0036 struct device *dev;
0037 struct crypto_shash *fbk_tfm;
0038 };
0039
0040 struct zynqmp_sha_desc_ctx {
0041 struct shash_desc fbk_req;
0042 };
0043
0044 static dma_addr_t update_dma_addr, final_dma_addr;
0045 static char *ubuf, *fbuf;
0046
0047 static int zynqmp_sha_init_tfm(struct crypto_shash *hash)
0048 {
0049 const char *fallback_driver_name = crypto_shash_alg_name(hash);
0050 struct zynqmp_sha_tfm_ctx *tfm_ctx = crypto_shash_ctx(hash);
0051 struct shash_alg *alg = crypto_shash_alg(hash);
0052 struct crypto_shash *fallback_tfm;
0053 struct zynqmp_sha_drv_ctx *drv_ctx;
0054
0055 drv_ctx = container_of(alg, struct zynqmp_sha_drv_ctx, sha3_384);
0056 tfm_ctx->dev = drv_ctx->dev;
0057
0058
0059 fallback_tfm = crypto_alloc_shash(fallback_driver_name, 0,
0060 CRYPTO_ALG_NEED_FALLBACK);
0061 if (IS_ERR(fallback_tfm))
0062 return PTR_ERR(fallback_tfm);
0063
0064 tfm_ctx->fbk_tfm = fallback_tfm;
0065 hash->descsize += crypto_shash_descsize(tfm_ctx->fbk_tfm);
0066
0067 return 0;
0068 }
0069
0070 static void zynqmp_sha_exit_tfm(struct crypto_shash *hash)
0071 {
0072 struct zynqmp_sha_tfm_ctx *tfm_ctx = crypto_shash_ctx(hash);
0073
0074 if (tfm_ctx->fbk_tfm) {
0075 crypto_free_shash(tfm_ctx->fbk_tfm);
0076 tfm_ctx->fbk_tfm = NULL;
0077 }
0078
0079 memzero_explicit(tfm_ctx, sizeof(struct zynqmp_sha_tfm_ctx));
0080 }
0081
0082 static int zynqmp_sha_init(struct shash_desc *desc)
0083 {
0084 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc);
0085 struct zynqmp_sha_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
0086
0087 dctx->fbk_req.tfm = tctx->fbk_tfm;
0088 return crypto_shash_init(&dctx->fbk_req);
0089 }
0090
0091 static int zynqmp_sha_update(struct shash_desc *desc, const u8 *data, unsigned int length)
0092 {
0093 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc);
0094
0095 return crypto_shash_update(&dctx->fbk_req, data, length);
0096 }
0097
0098 static int zynqmp_sha_final(struct shash_desc *desc, u8 *out)
0099 {
0100 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc);
0101
0102 return crypto_shash_final(&dctx->fbk_req, out);
0103 }
0104
0105 static int zynqmp_sha_finup(struct shash_desc *desc, const u8 *data, unsigned int length, u8 *out)
0106 {
0107 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc);
0108
0109 return crypto_shash_finup(&dctx->fbk_req, data, length, out);
0110 }
0111
0112 static int zynqmp_sha_import(struct shash_desc *desc, const void *in)
0113 {
0114 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc);
0115 struct zynqmp_sha_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
0116
0117 dctx->fbk_req.tfm = tctx->fbk_tfm;
0118 return crypto_shash_import(&dctx->fbk_req, in);
0119 }
0120
0121 static int zynqmp_sha_export(struct shash_desc *desc, void *out)
0122 {
0123 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc);
0124
0125 return crypto_shash_export(&dctx->fbk_req, out);
0126 }
0127
0128 static int zynqmp_sha_digest(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out)
0129 {
0130 unsigned int remaining_len = len;
0131 int update_size;
0132 int ret;
0133
0134 ret = zynqmp_pm_sha_hash(0, 0, ZYNQMP_SHA3_INIT);
0135 if (ret)
0136 return ret;
0137
0138 while (remaining_len != 0) {
0139 memzero_explicit(ubuf, ZYNQMP_DMA_ALLOC_FIXED_SIZE);
0140 if (remaining_len >= ZYNQMP_DMA_ALLOC_FIXED_SIZE) {
0141 update_size = ZYNQMP_DMA_ALLOC_FIXED_SIZE;
0142 remaining_len -= ZYNQMP_DMA_ALLOC_FIXED_SIZE;
0143 } else {
0144 update_size = remaining_len;
0145 remaining_len = 0;
0146 }
0147 memcpy(ubuf, data, update_size);
0148 flush_icache_range((unsigned long)ubuf, (unsigned long)ubuf + update_size);
0149 ret = zynqmp_pm_sha_hash(update_dma_addr, update_size, ZYNQMP_SHA3_UPDATE);
0150 if (ret)
0151 return ret;
0152
0153 data += update_size;
0154 }
0155
0156 ret = zynqmp_pm_sha_hash(final_dma_addr, SHA3_384_DIGEST_SIZE, ZYNQMP_SHA3_FINAL);
0157 memcpy(out, fbuf, SHA3_384_DIGEST_SIZE);
0158 memzero_explicit(fbuf, SHA3_384_DIGEST_SIZE);
0159
0160 return ret;
0161 }
0162
0163 static struct zynqmp_sha_drv_ctx sha3_drv_ctx = {
0164 .sha3_384 = {
0165 .init = zynqmp_sha_init,
0166 .update = zynqmp_sha_update,
0167 .final = zynqmp_sha_final,
0168 .finup = zynqmp_sha_finup,
0169 .digest = zynqmp_sha_digest,
0170 .export = zynqmp_sha_export,
0171 .import = zynqmp_sha_import,
0172 .init_tfm = zynqmp_sha_init_tfm,
0173 .exit_tfm = zynqmp_sha_exit_tfm,
0174 .descsize = sizeof(struct zynqmp_sha_desc_ctx),
0175 .statesize = sizeof(struct sha3_state),
0176 .digestsize = SHA3_384_DIGEST_SIZE,
0177 .base = {
0178 .cra_name = "sha3-384",
0179 .cra_driver_name = "zynqmp-sha3-384",
0180 .cra_priority = 300,
0181 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
0182 CRYPTO_ALG_ALLOCATES_MEMORY |
0183 CRYPTO_ALG_NEED_FALLBACK,
0184 .cra_blocksize = SHA3_384_BLOCK_SIZE,
0185 .cra_ctxsize = sizeof(struct zynqmp_sha_tfm_ctx),
0186 .cra_alignmask = 3,
0187 .cra_module = THIS_MODULE,
0188 }
0189 }
0190 };
0191
0192 static int zynqmp_sha_probe(struct platform_device *pdev)
0193 {
0194 struct device *dev = &pdev->dev;
0195 int err;
0196 u32 v;
0197
0198
0199 err = zynqmp_pm_get_api_version(&v);
0200 if (err)
0201 return err;
0202
0203
0204 err = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(ZYNQMP_DMA_BIT_MASK));
0205 if (err < 0) {
0206 dev_err(dev, "No usable DMA configuration\n");
0207 return err;
0208 }
0209
0210 err = crypto_register_shash(&sha3_drv_ctx.sha3_384);
0211 if (err < 0) {
0212 dev_err(dev, "Failed to register shash alg.\n");
0213 return err;
0214 }
0215
0216 sha3_drv_ctx.dev = dev;
0217 platform_set_drvdata(pdev, &sha3_drv_ctx);
0218
0219 ubuf = dma_alloc_coherent(dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, &update_dma_addr, GFP_KERNEL);
0220 if (!ubuf) {
0221 err = -ENOMEM;
0222 goto err_shash;
0223 }
0224
0225 fbuf = dma_alloc_coherent(dev, SHA3_384_DIGEST_SIZE, &final_dma_addr, GFP_KERNEL);
0226 if (!fbuf) {
0227 err = -ENOMEM;
0228 goto err_mem;
0229 }
0230
0231 return 0;
0232
0233 err_mem:
0234 dma_free_coherent(sha3_drv_ctx.dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, ubuf, update_dma_addr);
0235
0236 err_shash:
0237 crypto_unregister_shash(&sha3_drv_ctx.sha3_384);
0238
0239 return err;
0240 }
0241
0242 static int zynqmp_sha_remove(struct platform_device *pdev)
0243 {
0244 sha3_drv_ctx.dev = platform_get_drvdata(pdev);
0245
0246 dma_free_coherent(sha3_drv_ctx.dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, ubuf, update_dma_addr);
0247 dma_free_coherent(sha3_drv_ctx.dev, SHA3_384_DIGEST_SIZE, fbuf, final_dma_addr);
0248 crypto_unregister_shash(&sha3_drv_ctx.sha3_384);
0249
0250 return 0;
0251 }
0252
0253 static struct platform_driver zynqmp_sha_driver = {
0254 .probe = zynqmp_sha_probe,
0255 .remove = zynqmp_sha_remove,
0256 .driver = {
0257 .name = "zynqmp-sha3-384",
0258 },
0259 };
0260
0261 module_platform_driver(zynqmp_sha_driver);
0262 MODULE_DESCRIPTION("ZynqMP SHA3 hardware acceleration support.");
0263 MODULE_LICENSE("GPL v2");
0264 MODULE_AUTHOR("Harsha <harsha.harsha@xilinx.com>");