From 13776f9d40a028a245bb766269e360f5b7a62721 Mon Sep 17 00:00:00 2001 From: Junhua Huang Date: Sat, 6 Jul 2019 14:41:15 +0800 Subject: [PATCH] arm64: mm: free the initrd reserved memblock in a aligned manner We should free the initrd reserved memblock in an aligned manner, because the initrd reserves the memblock in an aligned manner in arm64_memblock_init(). Otherwise there are some fragments in memblock_reserved regions after free_initrd_mem(). e.g.: /sys/kernel/debug/memblock # cat reserved 0: 0x0000000080080000..0x00000000817fafff 1: 0x0000000083400000..0x0000000083ffffff 2: 0x0000000090000000..0x000000009000407f 3: 0x00000000b0000000..0x00000000b000003f 4: 0x00000000b26184ea..0x00000000b2618fff The fragments like the ranges from b0000000 to b000003f and from b26184ea to b2618fff should be freed. And we can do free_reserved_area() after memblock_free(), as free_reserved_area() calls __free_pages(), once we've done that it could be allocated somewhere else, but memblock and iomem still say this is reserved memory. Fixes: 05c58752f9dc ("arm64: To remove initrd reserved area entry from memblock") Signed-off-by: Junhua Huang Signed-off-by: Will Deacon --- arch/arm64/mm/init.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/arch/arm64/mm/init.c b/arch/arm64/mm/init.c index f3c7952..b1ee6cb 100644 --- a/arch/arm64/mm/init.c +++ b/arch/arm64/mm/init.c @@ -570,8 +570,12 @@ void free_initmem(void) #ifdef CONFIG_BLK_DEV_INITRD void __init free_initrd_mem(unsigned long start, unsigned long end) { + unsigned long aligned_start, aligned_end; + + aligned_start = __virt_to_phys(start) & PAGE_MASK; + aligned_end = PAGE_ALIGN(__virt_to_phys(end)); + memblock_free(aligned_start, aligned_end - aligned_start); free_reserved_area((void *)start, (void *)end, 0, "initrd"); - memblock_free(__virt_to_phys(start), end - start); } #endif -- 2.7.4