Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 #ifndef _ASM_ARCHRANDOM_H
0003 #define _ASM_ARCHRANDOM_H
0004 
0005 #include <linux/arm-smccc.h>
0006 #include <linux/bug.h>
0007 #include <linux/kernel.h>
0008 #include <asm/cpufeature.h>
0009 
0010 #define ARM_SMCCC_TRNG_MIN_VERSION  0x10000UL
0011 
0012 extern bool smccc_trng_available;
0013 
0014 static inline bool __init smccc_probe_trng(void)
0015 {
0016     struct arm_smccc_res res;
0017 
0018     arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_VERSION, &res);
0019     if ((s32)res.a0 < 0)
0020         return false;
0021 
0022     return res.a0 >= ARM_SMCCC_TRNG_MIN_VERSION;
0023 }
0024 
0025 static inline bool __arm64_rndr(unsigned long *v)
0026 {
0027     bool ok;
0028 
0029     /*
0030      * Reads of RNDR set PSTATE.NZCV to 0b0000 on success,
0031      * and set PSTATE.NZCV to 0b0100 otherwise.
0032      */
0033     asm volatile(
0034         __mrs_s("%0", SYS_RNDR_EL0) "\n"
0035     "   cset %w1, ne\n"
0036     : "=r" (*v), "=r" (ok)
0037     :
0038     : "cc");
0039 
0040     return ok;
0041 }
0042 
0043 static inline bool __arm64_rndrrs(unsigned long *v)
0044 {
0045     bool ok;
0046 
0047     /*
0048      * Reads of RNDRRS set PSTATE.NZCV to 0b0000 on success,
0049      * and set PSTATE.NZCV to 0b0100 otherwise.
0050      */
0051     asm volatile(
0052         __mrs_s("%0", SYS_RNDRRS_EL0) "\n"
0053     "   cset %w1, ne\n"
0054     : "=r" (*v), "=r" (ok)
0055     :
0056     : "cc");
0057 
0058     return ok;
0059 }
0060 
0061 static inline size_t __must_check arch_get_random_longs(unsigned long *v, size_t max_longs)
0062 {
0063     /*
0064      * Only support the generic interface after we have detected
0065      * the system wide capability, avoiding complexity with the
0066      * cpufeature code and with potential scheduling between CPUs
0067      * with and without the feature.
0068      */
0069     if (max_longs && cpus_have_const_cap(ARM64_HAS_RNG) && __arm64_rndr(v))
0070         return 1;
0071     return 0;
0072 }
0073 
0074 static inline size_t __must_check arch_get_random_seed_longs(unsigned long *v, size_t max_longs)
0075 {
0076     if (!max_longs)
0077         return 0;
0078 
0079     /*
0080      * We prefer the SMCCC call, since its semantics (return actual
0081      * hardware backed entropy) is closer to the idea behind this
0082      * function here than what even the RNDRSS register provides
0083      * (the output of a pseudo RNG freshly seeded by a TRNG).
0084      */
0085     if (smccc_trng_available) {
0086         struct arm_smccc_res res;
0087 
0088         max_longs = min_t(size_t, 3, max_longs);
0089         arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_RND64, max_longs * 64, &res);
0090         if ((int)res.a0 >= 0) {
0091             switch (max_longs) {
0092             case 3:
0093                 *v++ = res.a1;
0094                 fallthrough;
0095             case 2:
0096                 *v++ = res.a2;
0097                 fallthrough;
0098             case 1:
0099                 *v++ = res.a3;
0100                 break;
0101             }
0102             return max_longs;
0103         }
0104     }
0105 
0106     /*
0107      * RNDRRS is not backed by an entropy source but by a DRBG that is
0108      * reseeded after each invocation. This is not a 100% fit but good
0109      * enough to implement this API if no other entropy source exists.
0110      */
0111     if (cpus_have_const_cap(ARM64_HAS_RNG) && __arm64_rndrrs(v))
0112         return 1;
0113 
0114     return 0;
0115 }
0116 
0117 static inline bool __init __early_cpu_has_rndr(void)
0118 {
0119     /* Open code as we run prior to the first call to cpufeature. */
0120     unsigned long ftr = read_sysreg_s(SYS_ID_AA64ISAR0_EL1);
0121     return (ftr >> ID_AA64ISAR0_EL1_RNDR_SHIFT) & 0xf;
0122 }
0123 
0124 static inline size_t __init __must_check
0125 arch_get_random_seed_longs_early(unsigned long *v, size_t max_longs)
0126 {
0127     WARN_ON(system_state != SYSTEM_BOOTING);
0128 
0129     if (!max_longs)
0130         return 0;
0131 
0132     if (smccc_trng_available) {
0133         struct arm_smccc_res res;
0134 
0135         max_longs = min_t(size_t, 3, max_longs);
0136         arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_RND64, max_longs * 64, &res);
0137         if ((int)res.a0 >= 0) {
0138             switch (max_longs) {
0139             case 3:
0140                 *v++ = res.a1;
0141                 fallthrough;
0142             case 2:
0143                 *v++ = res.a2;
0144                 fallthrough;
0145             case 1:
0146                 *v++ = res.a3;
0147                 break;
0148             }
0149             return max_longs;
0150         }
0151     }
0152 
0153     if (__early_cpu_has_rndr() && __arm64_rndr(v))
0154         return 1;
0155 
0156     return 0;
0157 }
0158 #define arch_get_random_seed_longs_early arch_get_random_seed_longs_early
0159 
0160 #endif /* _ASM_ARCHRANDOM_H */