1 | // SPDX-License-Identifier: GPL-2.0 |
2 | #include <linux/kernel.h> |
3 | #include <linux/types.h> |
4 | #include <linux/init.h> |
5 | #include <linux/memblock.h> |
6 | |
7 | bool early_memtest_done; |
8 | phys_addr_t early_memtest_bad_size; |
9 | |
10 | static u64 patterns[] __initdata = { |
11 | /* The first entry has to be 0 to leave memtest with zeroed memory */ |
12 | 0, |
13 | 0xffffffffffffffffULL, |
14 | 0x5555555555555555ULL, |
15 | 0xaaaaaaaaaaaaaaaaULL, |
16 | 0x1111111111111111ULL, |
17 | 0x2222222222222222ULL, |
18 | 0x4444444444444444ULL, |
19 | 0x8888888888888888ULL, |
20 | 0x3333333333333333ULL, |
21 | 0x6666666666666666ULL, |
22 | 0x9999999999999999ULL, |
23 | 0xccccccccccccccccULL, |
24 | 0x7777777777777777ULL, |
25 | 0xbbbbbbbbbbbbbbbbULL, |
26 | 0xddddddddddddddddULL, |
27 | 0xeeeeeeeeeeeeeeeeULL, |
28 | 0x7a6c7258554e494cULL, /* yeah ;-) */ |
29 | }; |
30 | |
31 | static void __init reserve_bad_mem(u64 pattern, phys_addr_t start_bad, phys_addr_t end_bad) |
32 | { |
33 | pr_info(" %016llx bad mem addr %pa - %pa reserved\n" , |
34 | cpu_to_be64(pattern), &start_bad, &end_bad); |
35 | memblock_reserve(base: start_bad, size: end_bad - start_bad); |
36 | early_memtest_bad_size += (end_bad - start_bad); |
37 | } |
38 | |
39 | static void __init memtest(u64 pattern, phys_addr_t start_phys, phys_addr_t size) |
40 | { |
41 | u64 *p, *start, *end; |
42 | phys_addr_t start_bad, last_bad; |
43 | phys_addr_t start_phys_aligned; |
44 | const size_t incr = sizeof(pattern); |
45 | |
46 | start_phys_aligned = ALIGN(start_phys, incr); |
47 | start = __va(start_phys_aligned); |
48 | end = start + (size - (start_phys_aligned - start_phys)) / incr; |
49 | start_bad = 0; |
50 | last_bad = 0; |
51 | |
52 | for (p = start; p < end; p++) |
53 | *p = pattern; |
54 | |
55 | for (p = start; p < end; p++, start_phys_aligned += incr) { |
56 | if (*p == pattern) |
57 | continue; |
58 | if (start_phys_aligned == last_bad + incr) { |
59 | last_bad += incr; |
60 | continue; |
61 | } |
62 | if (start_bad) |
63 | reserve_bad_mem(pattern, start_bad, last_bad + incr); |
64 | start_bad = last_bad = start_phys_aligned; |
65 | } |
66 | if (start_bad) |
67 | reserve_bad_mem(pattern, start_bad, last_bad + incr); |
68 | |
69 | early_memtest_done = true; |
70 | } |
71 | |
72 | static void __init do_one_pass(u64 pattern, phys_addr_t start, phys_addr_t end) |
73 | { |
74 | u64 i; |
75 | phys_addr_t this_start, this_end; |
76 | |
77 | for_each_free_mem_range(i, NUMA_NO_NODE, MEMBLOCK_NONE, &this_start, |
78 | &this_end, NULL) { |
79 | this_start = clamp(this_start, start, end); |
80 | this_end = clamp(this_end, start, end); |
81 | if (this_start < this_end) { |
82 | pr_info(" %pa - %pa pattern %016llx\n" , |
83 | &this_start, &this_end, cpu_to_be64(pattern)); |
84 | memtest(pattern, this_start, this_end - this_start); |
85 | } |
86 | } |
87 | } |
88 | |
89 | /* default is disabled */ |
90 | static unsigned int memtest_pattern __initdata; |
91 | |
92 | static int __init parse_memtest(char *arg) |
93 | { |
94 | int ret = 0; |
95 | |
96 | if (arg) |
97 | ret = kstrtouint(s: arg, base: 0, res: &memtest_pattern); |
98 | else |
99 | memtest_pattern = ARRAY_SIZE(patterns); |
100 | |
101 | return ret; |
102 | } |
103 | |
104 | early_param("memtest" , parse_memtest); |
105 | |
106 | void __init early_memtest(phys_addr_t start, phys_addr_t end) |
107 | { |
108 | unsigned int i; |
109 | unsigned int idx = 0; |
110 | |
111 | if (!memtest_pattern) |
112 | return; |
113 | |
114 | pr_info("early_memtest: # of tests: %u\n" , memtest_pattern); |
115 | for (i = memtest_pattern-1; i < UINT_MAX; --i) { |
116 | idx = i % ARRAY_SIZE(patterns); |
117 | do_one_pass(patterns[idx], start, end); |
118 | } |
119 | } |
120 | |