Lines Matching refs:gfpflags
2002 static inline bool pfmemalloc_match(struct page *page, gfp_t gfpflags);
2586 slab_out_of_memory(struct kmem_cache *s, gfp_t gfpflags, int nid) in slab_out_of_memory() argument
2594 if ((gfpflags & __GFP_NOWARN) || !__ratelimit(&slub_oom_rs)) in slab_out_of_memory()
2598 nid, gfpflags, &gfpflags); in slab_out_of_memory()
2657 static inline bool pfmemalloc_match(struct page *page, gfp_t gfpflags) in pfmemalloc_match() argument
2660 return gfp_pfmemalloc_allowed(gfpflags); in pfmemalloc_match()
2718 static void *___slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node, in ___slab_alloc() argument
2759 if (unlikely(!pfmemalloc_match(page, gfpflags))) { in ___slab_alloc()
2800 freelist = new_slab_objects(s, gfpflags, node, &c); in ___slab_alloc()
2803 slab_out_of_memory(s, gfpflags, node); in ___slab_alloc()
2808 if (likely(!kmem_cache_debug(s) && pfmemalloc_match(page, gfpflags))) in ___slab_alloc()
2824 static void *__slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node, in __slab_alloc() argument
2840 p = ___slab_alloc(s, gfpflags, node, addr, c); in __slab_alloc()
2868 gfp_t gfpflags, int node, unsigned long addr, size_t orig_size) in slab_alloc_node() argument
2877 s = slab_pre_alloc_hook(s, &objcg, 1, gfpflags); in slab_alloc_node()
2881 object = kfence_alloc(s, orig_size, gfpflags); in slab_alloc_node()
2922 object = __slab_alloc(s, gfpflags, node, addr, c); in slab_alloc_node()
2953 init = slab_want_init_on_alloc(gfpflags, s); in slab_alloc_node()
2956 slab_post_alloc_hook(s, objcg, gfpflags, 1, &object, init); in slab_alloc_node()
2962 gfp_t gfpflags, unsigned long addr, size_t orig_size) in slab_alloc() argument
2964 return slab_alloc_node(s, gfpflags, NUMA_NO_NODE, addr, orig_size); in slab_alloc()
2967 void *kmem_cache_alloc(struct kmem_cache *s, gfp_t gfpflags) in kmem_cache_alloc() argument
2969 void *ret = slab_alloc(s, gfpflags, _RET_IP_, s->object_size); in kmem_cache_alloc()
2972 s->size, gfpflags); in kmem_cache_alloc()
2979 void *kmem_cache_alloc_trace(struct kmem_cache *s, gfp_t gfpflags, size_t size) in kmem_cache_alloc_trace() argument
2981 void *ret = slab_alloc(s, gfpflags, _RET_IP_, size); in kmem_cache_alloc_trace()
2982 trace_kmalloc(_RET_IP_, ret, size, s->size, gfpflags); in kmem_cache_alloc_trace()
2983 ret = kasan_kmalloc(s, ret, size, gfpflags); in kmem_cache_alloc_trace()
2990 void *kmem_cache_alloc_node(struct kmem_cache *s, gfp_t gfpflags, int node) in kmem_cache_alloc_node() argument
2992 void *ret = slab_alloc_node(s, gfpflags, node, _RET_IP_, s->object_size); in kmem_cache_alloc_node()
2995 s->object_size, s->size, gfpflags, node); in kmem_cache_alloc_node()
3003 gfp_t gfpflags, in kmem_cache_alloc_node_trace() argument
3006 void *ret = slab_alloc_node(s, gfpflags, node, _RET_IP_, size); in kmem_cache_alloc_node_trace()
3009 size, s->size, gfpflags, node); in kmem_cache_alloc_node_trace()
3011 ret = kasan_kmalloc(s, ret, size, gfpflags); in kmem_cache_alloc_node_trace()
4532 void *__kmalloc_track_caller(size_t size, gfp_t gfpflags, unsigned long caller) in __kmalloc_track_caller() argument
4538 return kmalloc_large(size, gfpflags); in __kmalloc_track_caller()
4540 s = kmalloc_slab(size, gfpflags); in __kmalloc_track_caller()
4545 ret = slab_alloc(s, gfpflags, caller, size); in __kmalloc_track_caller()
4548 trace_kmalloc(caller, ret, size, s->size, gfpflags); in __kmalloc_track_caller()
4555 void *__kmalloc_node_track_caller(size_t size, gfp_t gfpflags, in __kmalloc_node_track_caller() argument
4562 ret = kmalloc_large_node(size, gfpflags, node); in __kmalloc_node_track_caller()
4566 gfpflags, node); in __kmalloc_node_track_caller()
4571 s = kmalloc_slab(size, gfpflags); in __kmalloc_node_track_caller()
4576 ret = slab_alloc_node(s, gfpflags, node, caller, size); in __kmalloc_node_track_caller()
4579 trace_kmalloc_node(caller, ret, size, s->size, gfpflags, node); in __kmalloc_node_track_caller()