1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun #include <linux/node.h> 3*4882a593Smuzhiyun #include <linux/mmzone.h> 4*4882a593Smuzhiyun #include <linux/compaction.h> 5*4882a593Smuzhiyun /* 6*4882a593Smuzhiyun * The order of these masks is important. Matching masks will be seen 7*4882a593Smuzhiyun * first and the left over flags will end up showing by themselves. 8*4882a593Smuzhiyun * 9*4882a593Smuzhiyun * For example, if we have GFP_KERNEL before GFP_USER we wil get: 10*4882a593Smuzhiyun * 11*4882a593Smuzhiyun * GFP_KERNEL|GFP_HARDWALL 12*4882a593Smuzhiyun * 13*4882a593Smuzhiyun * Thus most bits set go first. 14*4882a593Smuzhiyun */ 15*4882a593Smuzhiyun 16*4882a593Smuzhiyun #define __def_gfpflag_names \ 17*4882a593Smuzhiyun {(unsigned long)GFP_TRANSHUGE, "GFP_TRANSHUGE"}, \ 18*4882a593Smuzhiyun {(unsigned long)GFP_TRANSHUGE_LIGHT, "GFP_TRANSHUGE_LIGHT"}, \ 19*4882a593Smuzhiyun {(unsigned long)GFP_HIGHUSER_MOVABLE, "GFP_HIGHUSER_MOVABLE"},\ 20*4882a593Smuzhiyun {(unsigned long)GFP_HIGHUSER, "GFP_HIGHUSER"}, \ 21*4882a593Smuzhiyun {(unsigned long)GFP_USER, "GFP_USER"}, \ 22*4882a593Smuzhiyun {(unsigned long)GFP_KERNEL_ACCOUNT, "GFP_KERNEL_ACCOUNT"}, \ 23*4882a593Smuzhiyun {(unsigned long)GFP_KERNEL, "GFP_KERNEL"}, \ 24*4882a593Smuzhiyun {(unsigned long)GFP_NOFS, "GFP_NOFS"}, \ 25*4882a593Smuzhiyun {(unsigned long)GFP_ATOMIC, "GFP_ATOMIC"}, \ 26*4882a593Smuzhiyun {(unsigned long)GFP_NOIO, "GFP_NOIO"}, \ 27*4882a593Smuzhiyun {(unsigned long)GFP_NOWAIT, "GFP_NOWAIT"}, \ 28*4882a593Smuzhiyun {(unsigned long)GFP_DMA, "GFP_DMA"}, \ 29*4882a593Smuzhiyun {(unsigned long)__GFP_HIGHMEM, "__GFP_HIGHMEM"}, \ 30*4882a593Smuzhiyun {(unsigned long)GFP_DMA32, "GFP_DMA32"}, \ 31*4882a593Smuzhiyun {(unsigned long)__GFP_HIGH, "__GFP_HIGH"}, \ 32*4882a593Smuzhiyun {(unsigned long)__GFP_ATOMIC, "__GFP_ATOMIC"}, \ 33*4882a593Smuzhiyun {(unsigned long)__GFP_IO, "__GFP_IO"}, \ 34*4882a593Smuzhiyun {(unsigned long)__GFP_FS, "__GFP_FS"}, \ 35*4882a593Smuzhiyun {(unsigned long)__GFP_NOWARN, "__GFP_NOWARN"}, \ 36*4882a593Smuzhiyun {(unsigned long)__GFP_RETRY_MAYFAIL, "__GFP_RETRY_MAYFAIL"}, \ 37*4882a593Smuzhiyun {(unsigned long)__GFP_NOFAIL, "__GFP_NOFAIL"}, \ 38*4882a593Smuzhiyun {(unsigned long)__GFP_NORETRY, "__GFP_NORETRY"}, \ 39*4882a593Smuzhiyun {(unsigned long)__GFP_COMP, "__GFP_COMP"}, \ 40*4882a593Smuzhiyun {(unsigned long)__GFP_ZERO, "__GFP_ZERO"}, \ 41*4882a593Smuzhiyun {(unsigned long)__GFP_NOMEMALLOC, "__GFP_NOMEMALLOC"}, \ 42*4882a593Smuzhiyun {(unsigned long)__GFP_MEMALLOC, "__GFP_MEMALLOC"}, \ 43*4882a593Smuzhiyun {(unsigned long)__GFP_HARDWALL, "__GFP_HARDWALL"}, \ 44*4882a593Smuzhiyun {(unsigned long)__GFP_THISNODE, "__GFP_THISNODE"}, \ 45*4882a593Smuzhiyun {(unsigned long)__GFP_RECLAIMABLE, "__GFP_RECLAIMABLE"}, \ 46*4882a593Smuzhiyun {(unsigned long)__GFP_MOVABLE, "__GFP_MOVABLE"}, \ 47*4882a593Smuzhiyun {(unsigned long)__GFP_ACCOUNT, "__GFP_ACCOUNT"}, \ 48*4882a593Smuzhiyun {(unsigned long)__GFP_WRITE, "__GFP_WRITE"}, \ 49*4882a593Smuzhiyun {(unsigned long)__GFP_RECLAIM, "__GFP_RECLAIM"}, \ 50*4882a593Smuzhiyun {(unsigned long)__GFP_DIRECT_RECLAIM, "__GFP_DIRECT_RECLAIM"},\ 51*4882a593Smuzhiyun {(unsigned long)__GFP_KSWAPD_RECLAIM, "__GFP_KSWAPD_RECLAIM"},\ 52*4882a593Smuzhiyun {(unsigned long)__GFP_ZEROTAGS, "__GFP_ZEROTAGS"}, \ 53*4882a593Smuzhiyun {(unsigned long)__GFP_SKIP_KASAN_POISON,"__GFP_SKIP_KASAN_POISON"}\ 54*4882a593Smuzhiyun 55*4882a593Smuzhiyun #define show_gfp_flags(flags) \ 56*4882a593Smuzhiyun (flags) ? __print_flags(flags, "|", \ 57*4882a593Smuzhiyun __def_gfpflag_names \ 58*4882a593Smuzhiyun ) : "none" 59*4882a593Smuzhiyun 60*4882a593Smuzhiyun #ifdef CONFIG_MMU 61*4882a593Smuzhiyun #define IF_HAVE_PG_MLOCK(flag,string) ,{1UL << flag, string} 62*4882a593Smuzhiyun #else 63*4882a593Smuzhiyun #define IF_HAVE_PG_MLOCK(flag,string) 64*4882a593Smuzhiyun #endif 65*4882a593Smuzhiyun 66*4882a593Smuzhiyun #ifdef CONFIG_ARCH_USES_PG_UNCACHED 67*4882a593Smuzhiyun #define IF_HAVE_PG_UNCACHED(flag,string) ,{1UL << flag, string} 68*4882a593Smuzhiyun #else 69*4882a593Smuzhiyun #define IF_HAVE_PG_UNCACHED(flag,string) 70*4882a593Smuzhiyun #endif 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun #ifdef CONFIG_MEMORY_FAILURE 73*4882a593Smuzhiyun #define IF_HAVE_PG_HWPOISON(flag,string) ,{1UL << flag, string} 74*4882a593Smuzhiyun #else 75*4882a593Smuzhiyun #define IF_HAVE_PG_HWPOISON(flag,string) 76*4882a593Smuzhiyun #endif 77*4882a593Smuzhiyun 78*4882a593Smuzhiyun #if defined(CONFIG_PAGE_IDLE_FLAG) && defined(CONFIG_64BIT) 79*4882a593Smuzhiyun #define IF_HAVE_PG_IDLE(flag,string) ,{1UL << flag, string} 80*4882a593Smuzhiyun #else 81*4882a593Smuzhiyun #define IF_HAVE_PG_IDLE(flag,string) 82*4882a593Smuzhiyun #endif 83*4882a593Smuzhiyun 84*4882a593Smuzhiyun #ifdef CONFIG_64BIT 85*4882a593Smuzhiyun #define IF_HAVE_PG_ARCH_2(flag,string) ,{1UL << flag, string} 86*4882a593Smuzhiyun #else 87*4882a593Smuzhiyun #define IF_HAVE_PG_ARCH_2(flag,string) 88*4882a593Smuzhiyun #endif 89*4882a593Smuzhiyun 90*4882a593Smuzhiyun #ifdef CONFIG_KASAN_HW_TAGS 91*4882a593Smuzhiyun #define IF_HAVE_PG_SKIP_KASAN_POISON(flag,string) ,{1UL << flag, string} 92*4882a593Smuzhiyun #else 93*4882a593Smuzhiyun #define IF_HAVE_PG_SKIP_KASAN_POISON(flag,string) 94*4882a593Smuzhiyun #endif 95*4882a593Smuzhiyun 96*4882a593Smuzhiyun #define __def_pageflag_names \ 97*4882a593Smuzhiyun {1UL << PG_locked, "locked" }, \ 98*4882a593Smuzhiyun {1UL << PG_waiters, "waiters" }, \ 99*4882a593Smuzhiyun {1UL << PG_error, "error" }, \ 100*4882a593Smuzhiyun {1UL << PG_referenced, "referenced" }, \ 101*4882a593Smuzhiyun {1UL << PG_uptodate, "uptodate" }, \ 102*4882a593Smuzhiyun {1UL << PG_dirty, "dirty" }, \ 103*4882a593Smuzhiyun {1UL << PG_lru, "lru" }, \ 104*4882a593Smuzhiyun {1UL << PG_active, "active" }, \ 105*4882a593Smuzhiyun {1UL << PG_workingset, "workingset" }, \ 106*4882a593Smuzhiyun {1UL << PG_slab, "slab" }, \ 107*4882a593Smuzhiyun {1UL << PG_owner_priv_1, "owner_priv_1" }, \ 108*4882a593Smuzhiyun {1UL << PG_arch_1, "arch_1" }, \ 109*4882a593Smuzhiyun {1UL << PG_reserved, "reserved" }, \ 110*4882a593Smuzhiyun {1UL << PG_private, "private" }, \ 111*4882a593Smuzhiyun {1UL << PG_private_2, "private_2" }, \ 112*4882a593Smuzhiyun {1UL << PG_writeback, "writeback" }, \ 113*4882a593Smuzhiyun {1UL << PG_head, "head" }, \ 114*4882a593Smuzhiyun {1UL << PG_mappedtodisk, "mappedtodisk" }, \ 115*4882a593Smuzhiyun {1UL << PG_reclaim, "reclaim" }, \ 116*4882a593Smuzhiyun {1UL << PG_swapbacked, "swapbacked" }, \ 117*4882a593Smuzhiyun {1UL << PG_unevictable, "unevictable" } \ 118*4882a593Smuzhiyun IF_HAVE_PG_MLOCK(PG_mlocked, "mlocked" ) \ 119*4882a593Smuzhiyun IF_HAVE_PG_UNCACHED(PG_uncached, "uncached" ) \ 120*4882a593Smuzhiyun IF_HAVE_PG_HWPOISON(PG_hwpoison, "hwpoison" ) \ 121*4882a593Smuzhiyun IF_HAVE_PG_IDLE(PG_young, "young" ) \ 122*4882a593Smuzhiyun IF_HAVE_PG_IDLE(PG_idle, "idle" ) \ 123*4882a593Smuzhiyun IF_HAVE_PG_ARCH_2(PG_arch_2, "arch_2" ) \ 124*4882a593Smuzhiyun IF_HAVE_PG_SKIP_KASAN_POISON(PG_skip_kasan_poison, "skip_kasan_poison") 125*4882a593Smuzhiyun 126*4882a593Smuzhiyun #define show_page_flags(flags) \ 127*4882a593Smuzhiyun (flags) ? __print_flags(flags, "|", \ 128*4882a593Smuzhiyun __def_pageflag_names \ 129*4882a593Smuzhiyun ) : "none" 130*4882a593Smuzhiyun 131*4882a593Smuzhiyun #if defined(CONFIG_X86) 132*4882a593Smuzhiyun #define __VM_ARCH_SPECIFIC_1 {VM_PAT, "pat" } 133*4882a593Smuzhiyun #elif defined(CONFIG_PPC) 134*4882a593Smuzhiyun #define __VM_ARCH_SPECIFIC_1 {VM_SAO, "sao" } 135*4882a593Smuzhiyun #elif defined(CONFIG_PARISC) || defined(CONFIG_IA64) 136*4882a593Smuzhiyun #define __VM_ARCH_SPECIFIC_1 {VM_GROWSUP, "growsup" } 137*4882a593Smuzhiyun #elif !defined(CONFIG_MMU) 138*4882a593Smuzhiyun #define __VM_ARCH_SPECIFIC_1 {VM_MAPPED_COPY,"mappedcopy" } 139*4882a593Smuzhiyun #else 140*4882a593Smuzhiyun #define __VM_ARCH_SPECIFIC_1 {VM_ARCH_1, "arch_1" } 141*4882a593Smuzhiyun #endif 142*4882a593Smuzhiyun 143*4882a593Smuzhiyun #ifdef CONFIG_MEM_SOFT_DIRTY 144*4882a593Smuzhiyun #define IF_HAVE_VM_SOFTDIRTY(flag,name) {flag, name }, 145*4882a593Smuzhiyun #else 146*4882a593Smuzhiyun #define IF_HAVE_VM_SOFTDIRTY(flag,name) 147*4882a593Smuzhiyun #endif 148*4882a593Smuzhiyun 149*4882a593Smuzhiyun #ifdef CONFIG_HAVE_ARCH_USERFAULTFD_MINOR 150*4882a593Smuzhiyun # define IF_HAVE_UFFD_MINOR(flag, name) {flag, name}, 151*4882a593Smuzhiyun #else 152*4882a593Smuzhiyun # define IF_HAVE_UFFD_MINOR(flag, name) 153*4882a593Smuzhiyun #endif 154*4882a593Smuzhiyun 155*4882a593Smuzhiyun #define __def_vmaflag_names \ 156*4882a593Smuzhiyun {VM_READ, "read" }, \ 157*4882a593Smuzhiyun {VM_WRITE, "write" }, \ 158*4882a593Smuzhiyun {VM_EXEC, "exec" }, \ 159*4882a593Smuzhiyun {VM_SHARED, "shared" }, \ 160*4882a593Smuzhiyun {VM_MAYREAD, "mayread" }, \ 161*4882a593Smuzhiyun {VM_MAYWRITE, "maywrite" }, \ 162*4882a593Smuzhiyun {VM_MAYEXEC, "mayexec" }, \ 163*4882a593Smuzhiyun {VM_MAYSHARE, "mayshare" }, \ 164*4882a593Smuzhiyun {VM_GROWSDOWN, "growsdown" }, \ 165*4882a593Smuzhiyun {VM_UFFD_MISSING, "uffd_missing" }, \ 166*4882a593Smuzhiyun IF_HAVE_UFFD_MINOR(VM_UFFD_MINOR, "uffd_minor" ) \ 167*4882a593Smuzhiyun {VM_PFNMAP, "pfnmap" }, \ 168*4882a593Smuzhiyun {VM_DENYWRITE, "denywrite" }, \ 169*4882a593Smuzhiyun {VM_UFFD_WP, "uffd_wp" }, \ 170*4882a593Smuzhiyun {VM_LOCKED, "locked" }, \ 171*4882a593Smuzhiyun {VM_IO, "io" }, \ 172*4882a593Smuzhiyun {VM_SEQ_READ, "seqread" }, \ 173*4882a593Smuzhiyun {VM_RAND_READ, "randread" }, \ 174*4882a593Smuzhiyun {VM_DONTCOPY, "dontcopy" }, \ 175*4882a593Smuzhiyun {VM_DONTEXPAND, "dontexpand" }, \ 176*4882a593Smuzhiyun {VM_LOCKONFAULT, "lockonfault" }, \ 177*4882a593Smuzhiyun {VM_ACCOUNT, "account" }, \ 178*4882a593Smuzhiyun {VM_NORESERVE, "noreserve" }, \ 179*4882a593Smuzhiyun {VM_HUGETLB, "hugetlb" }, \ 180*4882a593Smuzhiyun {VM_SYNC, "sync" }, \ 181*4882a593Smuzhiyun __VM_ARCH_SPECIFIC_1 , \ 182*4882a593Smuzhiyun {VM_WIPEONFORK, "wipeonfork" }, \ 183*4882a593Smuzhiyun {VM_DONTDUMP, "dontdump" }, \ 184*4882a593Smuzhiyun IF_HAVE_VM_SOFTDIRTY(VM_SOFTDIRTY, "softdirty" ) \ 185*4882a593Smuzhiyun {VM_MIXEDMAP, "mixedmap" }, \ 186*4882a593Smuzhiyun {VM_HUGEPAGE, "hugepage" }, \ 187*4882a593Smuzhiyun {VM_NOHUGEPAGE, "nohugepage" }, \ 188*4882a593Smuzhiyun {VM_MERGEABLE, "mergeable" } \ 189*4882a593Smuzhiyun 190*4882a593Smuzhiyun #define show_vma_flags(flags) \ 191*4882a593Smuzhiyun (flags) ? __print_flags(flags, "|", \ 192*4882a593Smuzhiyun __def_vmaflag_names \ 193*4882a593Smuzhiyun ) : "none" 194*4882a593Smuzhiyun 195*4882a593Smuzhiyun #ifdef CONFIG_COMPACTION 196*4882a593Smuzhiyun #define COMPACTION_STATUS \ 197*4882a593Smuzhiyun EM( COMPACT_SKIPPED, "skipped") \ 198*4882a593Smuzhiyun EM( COMPACT_DEFERRED, "deferred") \ 199*4882a593Smuzhiyun EM( COMPACT_CONTINUE, "continue") \ 200*4882a593Smuzhiyun EM( COMPACT_SUCCESS, "success") \ 201*4882a593Smuzhiyun EM( COMPACT_PARTIAL_SKIPPED, "partial_skipped") \ 202*4882a593Smuzhiyun EM( COMPACT_COMPLETE, "complete") \ 203*4882a593Smuzhiyun EM( COMPACT_NO_SUITABLE_PAGE, "no_suitable_page") \ 204*4882a593Smuzhiyun EM( COMPACT_NOT_SUITABLE_ZONE, "not_suitable_zone") \ 205*4882a593Smuzhiyun EMe(COMPACT_CONTENDED, "contended") 206*4882a593Smuzhiyun 207*4882a593Smuzhiyun /* High-level compaction status feedback */ 208*4882a593Smuzhiyun #define COMPACTION_FAILED 1 209*4882a593Smuzhiyun #define COMPACTION_WITHDRAWN 2 210*4882a593Smuzhiyun #define COMPACTION_PROGRESS 3 211*4882a593Smuzhiyun 212*4882a593Smuzhiyun #define compact_result_to_feedback(result) \ 213*4882a593Smuzhiyun ({ \ 214*4882a593Smuzhiyun enum compact_result __result = result; \ 215*4882a593Smuzhiyun (compaction_failed(__result)) ? COMPACTION_FAILED : \ 216*4882a593Smuzhiyun (compaction_withdrawn(__result)) ? COMPACTION_WITHDRAWN : COMPACTION_PROGRESS; \ 217*4882a593Smuzhiyun }) 218*4882a593Smuzhiyun 219*4882a593Smuzhiyun #define COMPACTION_FEEDBACK \ 220*4882a593Smuzhiyun EM(COMPACTION_FAILED, "failed") \ 221*4882a593Smuzhiyun EM(COMPACTION_WITHDRAWN, "withdrawn") \ 222*4882a593Smuzhiyun EMe(COMPACTION_PROGRESS, "progress") 223*4882a593Smuzhiyun 224*4882a593Smuzhiyun #define COMPACTION_PRIORITY \ 225*4882a593Smuzhiyun EM(COMPACT_PRIO_SYNC_FULL, "COMPACT_PRIO_SYNC_FULL") \ 226*4882a593Smuzhiyun EM(COMPACT_PRIO_SYNC_LIGHT, "COMPACT_PRIO_SYNC_LIGHT") \ 227*4882a593Smuzhiyun EMe(COMPACT_PRIO_ASYNC, "COMPACT_PRIO_ASYNC") 228*4882a593Smuzhiyun #else 229*4882a593Smuzhiyun #define COMPACTION_STATUS 230*4882a593Smuzhiyun #define COMPACTION_PRIORITY 231*4882a593Smuzhiyun #define COMPACTION_FEEDBACK 232*4882a593Smuzhiyun #endif 233*4882a593Smuzhiyun 234*4882a593Smuzhiyun #ifdef CONFIG_ZONE_DMA 235*4882a593Smuzhiyun #define IFDEF_ZONE_DMA(X) X 236*4882a593Smuzhiyun #else 237*4882a593Smuzhiyun #define IFDEF_ZONE_DMA(X) 238*4882a593Smuzhiyun #endif 239*4882a593Smuzhiyun 240*4882a593Smuzhiyun #ifdef CONFIG_ZONE_DMA32 241*4882a593Smuzhiyun #define IFDEF_ZONE_DMA32(X) X 242*4882a593Smuzhiyun #else 243*4882a593Smuzhiyun #define IFDEF_ZONE_DMA32(X) 244*4882a593Smuzhiyun #endif 245*4882a593Smuzhiyun 246*4882a593Smuzhiyun #ifdef CONFIG_HIGHMEM 247*4882a593Smuzhiyun #define IFDEF_ZONE_HIGHMEM(X) X 248*4882a593Smuzhiyun #else 249*4882a593Smuzhiyun #define IFDEF_ZONE_HIGHMEM(X) 250*4882a593Smuzhiyun #endif 251*4882a593Smuzhiyun 252*4882a593Smuzhiyun #define ZONE_TYPE \ 253*4882a593Smuzhiyun IFDEF_ZONE_DMA( EM (ZONE_DMA, "DMA")) \ 254*4882a593Smuzhiyun IFDEF_ZONE_DMA32( EM (ZONE_DMA32, "DMA32")) \ 255*4882a593Smuzhiyun EM (ZONE_NORMAL, "Normal") \ 256*4882a593Smuzhiyun IFDEF_ZONE_HIGHMEM( EM (ZONE_HIGHMEM,"HighMem")) \ 257*4882a593Smuzhiyun EMe(ZONE_MOVABLE,"Movable") 258*4882a593Smuzhiyun 259*4882a593Smuzhiyun #define LRU_NAMES \ 260*4882a593Smuzhiyun EM (LRU_INACTIVE_ANON, "inactive_anon") \ 261*4882a593Smuzhiyun EM (LRU_ACTIVE_ANON, "active_anon") \ 262*4882a593Smuzhiyun EM (LRU_INACTIVE_FILE, "inactive_file") \ 263*4882a593Smuzhiyun EM (LRU_ACTIVE_FILE, "active_file") \ 264*4882a593Smuzhiyun EMe(LRU_UNEVICTABLE, "unevictable") 265*4882a593Smuzhiyun 266*4882a593Smuzhiyun /* 267*4882a593Smuzhiyun * First define the enums in the above macros to be exported to userspace 268*4882a593Smuzhiyun * via TRACE_DEFINE_ENUM(). 269*4882a593Smuzhiyun */ 270*4882a593Smuzhiyun #undef EM 271*4882a593Smuzhiyun #undef EMe 272*4882a593Smuzhiyun #define EM(a, b) TRACE_DEFINE_ENUM(a); 273*4882a593Smuzhiyun #define EMe(a, b) TRACE_DEFINE_ENUM(a); 274*4882a593Smuzhiyun 275*4882a593Smuzhiyun COMPACTION_STATUS 276*4882a593Smuzhiyun COMPACTION_PRIORITY 277*4882a593Smuzhiyun /* COMPACTION_FEEDBACK are defines not enums. Not needed here. */ 278*4882a593Smuzhiyun ZONE_TYPE 279*4882a593Smuzhiyun LRU_NAMES 280*4882a593Smuzhiyun 281*4882a593Smuzhiyun /* 282*4882a593Smuzhiyun * Now redefine the EM() and EMe() macros to map the enums to the strings 283*4882a593Smuzhiyun * that will be printed in the output. 284*4882a593Smuzhiyun */ 285*4882a593Smuzhiyun #undef EM 286*4882a593Smuzhiyun #undef EMe 287*4882a593Smuzhiyun #define EM(a, b) {a, b}, 288*4882a593Smuzhiyun #define EMe(a, b) {a, b} 289