Home
last modified time | relevance | path

Searched defs:kmem_cache (Results 1 – 7 of 7) sorted by relevance

/linux-6.1.9/include/linux/
Dslab_def.h12 struct kmem_cache { struct
13 struct array_cache __percpu *cpu_cache;
16 unsigned int batchcount;
17 unsigned int limit;
18 unsigned int shared;
20 unsigned int size;
21 struct reciprocal_value reciprocal_buffer_size;
24 slab_flags_t flags; /* constant flags */
25 unsigned int num; /* # of objs per slab */
29 unsigned int gfporder;
[all …]
Dslub_def.h90 struct kmem_cache { struct
91 struct kmem_cache_cpu __percpu *cpu_slab; argument
105 struct kmem_cache_order_objects oo; argument
108 struct kmem_cache_order_objects min; argument
109 gfp_t allocflags; /* gfp flags to use on each alloc */
110 int refcount; /* Refcount for slab cache destroy */
111 void (*ctor)(void *);
112 unsigned int inuse; /* Offset to metadata */
113 unsigned int align; /* Alignment */
114 unsigned int red_left_pad; /* Left redzone padding size */
[all …]
Dkvm_types.h96 struct kmem_cache *kmem_cache; member
/linux-6.1.9/tools/testing/radix-tree/
Dlinux.c19 struct kmem_cache { struct
31 void kmem_cache_set_non_kernel(struct kmem_cache *cachep, unsigned int val) in kmem_cache_set_non_kernel() argument
/linux-6.1.9/mm/
Dslab.h205 struct kmem_cache { struct
206 unsigned int object_size;/* The original size of the object */
207 unsigned int size; /* The aligned/padded/added on size */
208 unsigned int align; /* Alignment as calculated */
209 slab_flags_t flags; /* Active flags on the slab */
210 unsigned int useroffset;/* Usercopy region offset */
211 unsigned int usersize; /* Usercopy region size */
212 const char *name; /* Slab name for sysfs */
213 int refcount; /* Use counter */
214 void (*ctor)(void *); /* Called on object slot creation */
[all …]
Dslab_common.c38 struct kmem_cache *kmem_cache; variable
/linux-6.1.9/arch/x86/events/intel/
Dlbr.c621 struct kmem_cache *kmem_cache; in release_lbr_buffers() local
640 struct kmem_cache *kmem_cache; in reserve_lbr_buffers() local