0001
0002
0003
0004
0005
0006
0007
0008
0009 #include <linux/crypto.h>
0010 #include <linux/cryptouser.h>
0011 #include <linux/sched.h>
0012 #include <net/netlink.h>
0013 #include <net/sock.h>
0014 #include <crypto/internal/skcipher.h>
0015 #include <crypto/internal/rng.h>
0016 #include <crypto/akcipher.h>
0017 #include <crypto/kpp.h>
0018 #include <crypto/internal/cryptouser.h>
0019
0020 #include "internal.h"
0021
0022 #define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
0023
0024 struct crypto_dump_info {
0025 struct sk_buff *in_skb;
0026 struct sk_buff *out_skb;
0027 u32 nlmsg_seq;
0028 u16 nlmsg_flags;
0029 };
0030
0031 static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg)
0032 {
0033 struct crypto_stat_aead raead;
0034
0035 memset(&raead, 0, sizeof(raead));
0036
0037 strscpy(raead.type, "aead", sizeof(raead.type));
0038
0039 raead.stat_encrypt_cnt = atomic64_read(&alg->stats.aead.encrypt_cnt);
0040 raead.stat_encrypt_tlen = atomic64_read(&alg->stats.aead.encrypt_tlen);
0041 raead.stat_decrypt_cnt = atomic64_read(&alg->stats.aead.decrypt_cnt);
0042 raead.stat_decrypt_tlen = atomic64_read(&alg->stats.aead.decrypt_tlen);
0043 raead.stat_err_cnt = atomic64_read(&alg->stats.aead.err_cnt);
0044
0045 return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead);
0046 }
0047
0048 static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
0049 {
0050 struct crypto_stat_cipher rcipher;
0051
0052 memset(&rcipher, 0, sizeof(rcipher));
0053
0054 strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
0055
0056 rcipher.stat_encrypt_cnt = atomic64_read(&alg->stats.cipher.encrypt_cnt);
0057 rcipher.stat_encrypt_tlen = atomic64_read(&alg->stats.cipher.encrypt_tlen);
0058 rcipher.stat_decrypt_cnt = atomic64_read(&alg->stats.cipher.decrypt_cnt);
0059 rcipher.stat_decrypt_tlen = atomic64_read(&alg->stats.cipher.decrypt_tlen);
0060 rcipher.stat_err_cnt = atomic64_read(&alg->stats.cipher.err_cnt);
0061
0062 return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
0063 }
0064
0065 static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
0066 {
0067 struct crypto_stat_compress rcomp;
0068
0069 memset(&rcomp, 0, sizeof(rcomp));
0070
0071 strscpy(rcomp.type, "compression", sizeof(rcomp.type));
0072 rcomp.stat_compress_cnt = atomic64_read(&alg->stats.compress.compress_cnt);
0073 rcomp.stat_compress_tlen = atomic64_read(&alg->stats.compress.compress_tlen);
0074 rcomp.stat_decompress_cnt = atomic64_read(&alg->stats.compress.decompress_cnt);
0075 rcomp.stat_decompress_tlen = atomic64_read(&alg->stats.compress.decompress_tlen);
0076 rcomp.stat_err_cnt = atomic64_read(&alg->stats.compress.err_cnt);
0077
0078 return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp);
0079 }
0080
0081 static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg)
0082 {
0083 struct crypto_stat_compress racomp;
0084
0085 memset(&racomp, 0, sizeof(racomp));
0086
0087 strscpy(racomp.type, "acomp", sizeof(racomp.type));
0088 racomp.stat_compress_cnt = atomic64_read(&alg->stats.compress.compress_cnt);
0089 racomp.stat_compress_tlen = atomic64_read(&alg->stats.compress.compress_tlen);
0090 racomp.stat_decompress_cnt = atomic64_read(&alg->stats.compress.decompress_cnt);
0091 racomp.stat_decompress_tlen = atomic64_read(&alg->stats.compress.decompress_tlen);
0092 racomp.stat_err_cnt = atomic64_read(&alg->stats.compress.err_cnt);
0093
0094 return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp);
0095 }
0096
0097 static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
0098 {
0099 struct crypto_stat_akcipher rakcipher;
0100
0101 memset(&rakcipher, 0, sizeof(rakcipher));
0102
0103 strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type));
0104 rakcipher.stat_encrypt_cnt = atomic64_read(&alg->stats.akcipher.encrypt_cnt);
0105 rakcipher.stat_encrypt_tlen = atomic64_read(&alg->stats.akcipher.encrypt_tlen);
0106 rakcipher.stat_decrypt_cnt = atomic64_read(&alg->stats.akcipher.decrypt_cnt);
0107 rakcipher.stat_decrypt_tlen = atomic64_read(&alg->stats.akcipher.decrypt_tlen);
0108 rakcipher.stat_sign_cnt = atomic64_read(&alg->stats.akcipher.sign_cnt);
0109 rakcipher.stat_verify_cnt = atomic64_read(&alg->stats.akcipher.verify_cnt);
0110 rakcipher.stat_err_cnt = atomic64_read(&alg->stats.akcipher.err_cnt);
0111
0112 return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER,
0113 sizeof(rakcipher), &rakcipher);
0114 }
0115
0116 static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg)
0117 {
0118 struct crypto_stat_kpp rkpp;
0119
0120 memset(&rkpp, 0, sizeof(rkpp));
0121
0122 strscpy(rkpp.type, "kpp", sizeof(rkpp.type));
0123
0124 rkpp.stat_setsecret_cnt = atomic64_read(&alg->stats.kpp.setsecret_cnt);
0125 rkpp.stat_generate_public_key_cnt = atomic64_read(&alg->stats.kpp.generate_public_key_cnt);
0126 rkpp.stat_compute_shared_secret_cnt = atomic64_read(&alg->stats.kpp.compute_shared_secret_cnt);
0127 rkpp.stat_err_cnt = atomic64_read(&alg->stats.kpp.err_cnt);
0128
0129 return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp);
0130 }
0131
0132 static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg)
0133 {
0134 struct crypto_stat_hash rhash;
0135
0136 memset(&rhash, 0, sizeof(rhash));
0137
0138 strscpy(rhash.type, "ahash", sizeof(rhash.type));
0139
0140 rhash.stat_hash_cnt = atomic64_read(&alg->stats.hash.hash_cnt);
0141 rhash.stat_hash_tlen = atomic64_read(&alg->stats.hash.hash_tlen);
0142 rhash.stat_err_cnt = atomic64_read(&alg->stats.hash.err_cnt);
0143
0144 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
0145 }
0146
0147 static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg)
0148 {
0149 struct crypto_stat_hash rhash;
0150
0151 memset(&rhash, 0, sizeof(rhash));
0152
0153 strscpy(rhash.type, "shash", sizeof(rhash.type));
0154
0155 rhash.stat_hash_cnt = atomic64_read(&alg->stats.hash.hash_cnt);
0156 rhash.stat_hash_tlen = atomic64_read(&alg->stats.hash.hash_tlen);
0157 rhash.stat_err_cnt = atomic64_read(&alg->stats.hash.err_cnt);
0158
0159 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
0160 }
0161
0162 static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg)
0163 {
0164 struct crypto_stat_rng rrng;
0165
0166 memset(&rrng, 0, sizeof(rrng));
0167
0168 strscpy(rrng.type, "rng", sizeof(rrng.type));
0169
0170 rrng.stat_generate_cnt = atomic64_read(&alg->stats.rng.generate_cnt);
0171 rrng.stat_generate_tlen = atomic64_read(&alg->stats.rng.generate_tlen);
0172 rrng.stat_seed_cnt = atomic64_read(&alg->stats.rng.seed_cnt);
0173 rrng.stat_err_cnt = atomic64_read(&alg->stats.rng.err_cnt);
0174
0175 return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng);
0176 }
0177
0178 static int crypto_reportstat_one(struct crypto_alg *alg,
0179 struct crypto_user_alg *ualg,
0180 struct sk_buff *skb)
0181 {
0182 memset(ualg, 0, sizeof(*ualg));
0183
0184 strscpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name));
0185 strscpy(ualg->cru_driver_name, alg->cra_driver_name,
0186 sizeof(ualg->cru_driver_name));
0187 strscpy(ualg->cru_module_name, module_name(alg->cra_module),
0188 sizeof(ualg->cru_module_name));
0189
0190 ualg->cru_type = 0;
0191 ualg->cru_mask = 0;
0192 ualg->cru_flags = alg->cra_flags;
0193 ualg->cru_refcnt = refcount_read(&alg->cra_refcnt);
0194
0195 if (nla_put_u32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority))
0196 goto nla_put_failure;
0197 if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
0198 struct crypto_stat_larval rl;
0199
0200 memset(&rl, 0, sizeof(rl));
0201 strscpy(rl.type, "larval", sizeof(rl.type));
0202 if (nla_put(skb, CRYPTOCFGA_STAT_LARVAL, sizeof(rl), &rl))
0203 goto nla_put_failure;
0204 goto out;
0205 }
0206
0207 switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
0208 case CRYPTO_ALG_TYPE_AEAD:
0209 if (crypto_report_aead(skb, alg))
0210 goto nla_put_failure;
0211 break;
0212 case CRYPTO_ALG_TYPE_SKCIPHER:
0213 if (crypto_report_cipher(skb, alg))
0214 goto nla_put_failure;
0215 break;
0216 case CRYPTO_ALG_TYPE_CIPHER:
0217 if (crypto_report_cipher(skb, alg))
0218 goto nla_put_failure;
0219 break;
0220 case CRYPTO_ALG_TYPE_COMPRESS:
0221 if (crypto_report_comp(skb, alg))
0222 goto nla_put_failure;
0223 break;
0224 case CRYPTO_ALG_TYPE_ACOMPRESS:
0225 if (crypto_report_acomp(skb, alg))
0226 goto nla_put_failure;
0227 break;
0228 case CRYPTO_ALG_TYPE_SCOMPRESS:
0229 if (crypto_report_acomp(skb, alg))
0230 goto nla_put_failure;
0231 break;
0232 case CRYPTO_ALG_TYPE_AKCIPHER:
0233 if (crypto_report_akcipher(skb, alg))
0234 goto nla_put_failure;
0235 break;
0236 case CRYPTO_ALG_TYPE_KPP:
0237 if (crypto_report_kpp(skb, alg))
0238 goto nla_put_failure;
0239 break;
0240 case CRYPTO_ALG_TYPE_AHASH:
0241 if (crypto_report_ahash(skb, alg))
0242 goto nla_put_failure;
0243 break;
0244 case CRYPTO_ALG_TYPE_HASH:
0245 if (crypto_report_shash(skb, alg))
0246 goto nla_put_failure;
0247 break;
0248 case CRYPTO_ALG_TYPE_RNG:
0249 if (crypto_report_rng(skb, alg))
0250 goto nla_put_failure;
0251 break;
0252 default:
0253 pr_err("ERROR: Unhandled alg %d in %s\n",
0254 alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL),
0255 __func__);
0256 }
0257
0258 out:
0259 return 0;
0260
0261 nla_put_failure:
0262 return -EMSGSIZE;
0263 }
0264
0265 static int crypto_reportstat_alg(struct crypto_alg *alg,
0266 struct crypto_dump_info *info)
0267 {
0268 struct sk_buff *in_skb = info->in_skb;
0269 struct sk_buff *skb = info->out_skb;
0270 struct nlmsghdr *nlh;
0271 struct crypto_user_alg *ualg;
0272 int err = 0;
0273
0274 nlh = nlmsg_put(skb, NETLINK_CB(in_skb).portid, info->nlmsg_seq,
0275 CRYPTO_MSG_GETSTAT, sizeof(*ualg), info->nlmsg_flags);
0276 if (!nlh) {
0277 err = -EMSGSIZE;
0278 goto out;
0279 }
0280
0281 ualg = nlmsg_data(nlh);
0282
0283 err = crypto_reportstat_one(alg, ualg, skb);
0284 if (err) {
0285 nlmsg_cancel(skb, nlh);
0286 goto out;
0287 }
0288
0289 nlmsg_end(skb, nlh);
0290
0291 out:
0292 return err;
0293 }
0294
0295 int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
0296 struct nlattr **attrs)
0297 {
0298 struct net *net = sock_net(in_skb->sk);
0299 struct crypto_user_alg *p = nlmsg_data(in_nlh);
0300 struct crypto_alg *alg;
0301 struct sk_buff *skb;
0302 struct crypto_dump_info info;
0303 int err;
0304
0305 if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
0306 return -EINVAL;
0307
0308 alg = crypto_alg_match(p, 0);
0309 if (!alg)
0310 return -ENOENT;
0311
0312 err = -ENOMEM;
0313 skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC);
0314 if (!skb)
0315 goto drop_alg;
0316
0317 info.in_skb = in_skb;
0318 info.out_skb = skb;
0319 info.nlmsg_seq = in_nlh->nlmsg_seq;
0320 info.nlmsg_flags = 0;
0321
0322 err = crypto_reportstat_alg(alg, &info);
0323
0324 drop_alg:
0325 crypto_mod_put(alg);
0326
0327 if (err) {
0328 kfree_skb(skb);
0329 return err;
0330 }
0331
0332 return nlmsg_unicast(net->crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
0333 }
0334
0335 MODULE_LICENSE("GPL");