Back to home page

LXR

 
 

    


0001 #include <linux/kernel.h>
0002 #include <linux/types.h>
0003 #include <linux/init.h>
0004 #include <linux/memblock.h>
0005 
0006 static u64 patterns[] __initdata = {
0007     /* The first entry has to be 0 to leave memtest with zeroed memory */
0008     0,
0009     0xffffffffffffffffULL,
0010     0x5555555555555555ULL,
0011     0xaaaaaaaaaaaaaaaaULL,
0012     0x1111111111111111ULL,
0013     0x2222222222222222ULL,
0014     0x4444444444444444ULL,
0015     0x8888888888888888ULL,
0016     0x3333333333333333ULL,
0017     0x6666666666666666ULL,
0018     0x9999999999999999ULL,
0019     0xccccccccccccccccULL,
0020     0x7777777777777777ULL,
0021     0xbbbbbbbbbbbbbbbbULL,
0022     0xddddddddddddddddULL,
0023     0xeeeeeeeeeeeeeeeeULL,
0024     0x7a6c7258554e494cULL, /* yeah ;-) */
0025 };
0026 
0027 static void __init reserve_bad_mem(u64 pattern, phys_addr_t start_bad, phys_addr_t end_bad)
0028 {
0029     pr_info("  %016llx bad mem addr %pa - %pa reserved\n",
0030         cpu_to_be64(pattern), &start_bad, &end_bad);
0031     memblock_reserve(start_bad, end_bad - start_bad);
0032 }
0033 
0034 static void __init memtest(u64 pattern, phys_addr_t start_phys, phys_addr_t size)
0035 {
0036     u64 *p, *start, *end;
0037     phys_addr_t start_bad, last_bad;
0038     phys_addr_t start_phys_aligned;
0039     const size_t incr = sizeof(pattern);
0040 
0041     start_phys_aligned = ALIGN(start_phys, incr);
0042     start = __va(start_phys_aligned);
0043     end = start + (size - (start_phys_aligned - start_phys)) / incr;
0044     start_bad = 0;
0045     last_bad = 0;
0046 
0047     for (p = start; p < end; p++)
0048         *p = pattern;
0049 
0050     for (p = start; p < end; p++, start_phys_aligned += incr) {
0051         if (*p == pattern)
0052             continue;
0053         if (start_phys_aligned == last_bad + incr) {
0054             last_bad += incr;
0055             continue;
0056         }
0057         if (start_bad)
0058             reserve_bad_mem(pattern, start_bad, last_bad + incr);
0059         start_bad = last_bad = start_phys_aligned;
0060     }
0061     if (start_bad)
0062         reserve_bad_mem(pattern, start_bad, last_bad + incr);
0063 }
0064 
0065 static void __init do_one_pass(u64 pattern, phys_addr_t start, phys_addr_t end)
0066 {
0067     u64 i;
0068     phys_addr_t this_start, this_end;
0069 
0070     for_each_free_mem_range(i, NUMA_NO_NODE, MEMBLOCK_NONE, &this_start,
0071                 &this_end, NULL) {
0072         this_start = clamp(this_start, start, end);
0073         this_end = clamp(this_end, start, end);
0074         if (this_start < this_end) {
0075             pr_info("  %pa - %pa pattern %016llx\n",
0076                 &this_start, &this_end, cpu_to_be64(pattern));
0077             memtest(pattern, this_start, this_end - this_start);
0078         }
0079     }
0080 }
0081 
0082 /* default is disabled */
0083 static unsigned int memtest_pattern __initdata;
0084 
0085 static int __init parse_memtest(char *arg)
0086 {
0087     int ret = 0;
0088 
0089     if (arg)
0090         ret = kstrtouint(arg, 0, &memtest_pattern);
0091     else
0092         memtest_pattern = ARRAY_SIZE(patterns);
0093 
0094     return ret;
0095 }
0096 
0097 early_param("memtest", parse_memtest);
0098 
0099 void __init early_memtest(phys_addr_t start, phys_addr_t end)
0100 {
0101     unsigned int i;
0102     unsigned int idx = 0;
0103 
0104     if (!memtest_pattern)
0105         return;
0106 
0107     pr_info("early_memtest: # of tests: %u\n", memtest_pattern);
0108     for (i = memtest_pattern-1; i < UINT_MAX; --i) {
0109         idx = i % ARRAY_SIZE(patterns);
0110         do_one_pass(patterns[idx], start, end);
0111     }
0112 }