summaryrefslogtreecommitdiff
path: root/mm/mm_init.c
diff options
context:
space:
mode:
authorMiaohe Lin <linmiaohe@huawei.com>2023-08-07 10:35:28 +0800
committerAndrew Morton <akpm@linux-foundation.org>2023-08-21 13:37:47 -0700
commitdaee07bfba3340b07edcf9ae92044398e8a964db (patch)
treecba163608914ff34823dde6f5a91d1010cfbc80d /mm/mm_init.c
parent6379693e3c2683a7c86f395e878534731ac7ed06 (diff)
downloadlwn-daee07bfba3340b07edcf9ae92044398e8a964db.tar.gz
lwn-daee07bfba3340b07edcf9ae92044398e8a964db.zip
mm/mm_init: use helper macro BITS_PER_LONG and BITS_PER_BYTE
It's more readable to use helper macro BITS_PER_LONG and BITS_PER_BYTE. No functional change intended. Link: https://lkml.kernel.org/r/20230807023528.325191-1-linmiaohe@huawei.com Signed-off-by: Miaohe Lin <linmiaohe@huawei.com> Reviewed-by: David Hildenbrand <david@redhat.com> Reviewed-by: Mike Rapoport (IBM) <rppt@kernel.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Diffstat (limited to 'mm/mm_init.c')
-rw-r--r--mm/mm_init.c6
1 files changed, 3 insertions, 3 deletions
diff --git a/mm/mm_init.c b/mm/mm_init.c
index 2a19f3151661..50f2f34745af 100644
--- a/mm/mm_init.c
+++ b/mm/mm_init.c
@@ -79,7 +79,7 @@ void __init mminit_verify_pageflags_layout(void)
int shift, width;
unsigned long or_mask, add_mask;
- shift = 8 * sizeof(unsigned long);
+ shift = BITS_PER_LONG;
width = shift - SECTIONS_WIDTH - NODES_WIDTH - ZONES_WIDTH
- LAST_CPUPID_SHIFT - KASAN_TAG_WIDTH - LRU_GEN_WIDTH - LRU_REFS_WIDTH;
mminit_dprintk(MMINIT_TRACE, "pageflags_layout_widths",
@@ -1426,9 +1426,9 @@ static unsigned long __init usemap_size(unsigned long zone_start_pfn, unsigned l
usemapsize = roundup(zonesize, pageblock_nr_pages);
usemapsize = usemapsize >> pageblock_order;
usemapsize *= NR_PAGEBLOCK_BITS;
- usemapsize = roundup(usemapsize, 8 * sizeof(unsigned long));
+ usemapsize = roundup(usemapsize, BITS_PER_LONG);
- return usemapsize / 8;
+ return usemapsize / BITS_PER_BYTE;
}
static void __ref setup_usemap(struct zone *zone)