Home
last modified time | relevance | path

Searched refs:kmem_cache (Results 1 – 25 of 564) sorted by relevance

12345678910>>...23

/linux-6.6.21/mm/
Dslab.h47 struct kmem_cache *slab_cache;
60 struct kmem_cache *slab_cache;
272 extern struct kmem_cache *kmem_cache;
285 struct kmem_cache *kmalloc_slab(size_t size, gfp_t flags, unsigned long caller);
287 void *__kmem_cache_alloc_node(struct kmem_cache *s, gfp_t gfpflags,
290 void __kmem_cache_free(struct kmem_cache *s, void *x, unsigned long caller);
295 int __kmem_cache_create(struct kmem_cache *, slab_flags_t flags);
299 extern void create_boot_cache(struct kmem_cache *, const char *name,
303 int slab_unmergeable(struct kmem_cache *s);
304 struct kmem_cache *find_mergeable(unsigned size, unsigned align,
[all …]
Dslub.c212 static inline bool kmem_cache_debug(struct kmem_cache *s) in kmem_cache_debug()
217 static inline bool slub_debug_orig_size(struct kmem_cache *s) in slub_debug_orig_size()
223 void *fixup_red_left(struct kmem_cache *s, void *p) in fixup_red_left()
231 static inline bool kmem_cache_has_cpu_partial(struct kmem_cache *s) in kmem_cache_has_cpu_partial()
319 static int sysfs_slab_add(struct kmem_cache *);
320 static int sysfs_slab_alias(struct kmem_cache *, const char *);
322 static inline int sysfs_slab_add(struct kmem_cache *s) { return 0; } in sysfs_slab_add()
323 static inline int sysfs_slab_alias(struct kmem_cache *s, const char *p) in sysfs_slab_alias()
328 static void debugfs_slab_add(struct kmem_cache *);
330 static inline void debugfs_slab_add(struct kmem_cache *s) { } in debugfs_slab_add()
[all …]
Dslab.c210 static int drain_freelist(struct kmem_cache *cache,
212 static void free_block(struct kmem_cache *cachep, void **objpp, int len,
214 static void slabs_destroy(struct kmem_cache *cachep, struct list_head *list);
215 static int enable_cpucache(struct kmem_cache *cachep, gfp_t gfp);
218 static inline void fixup_objfreelist_debug(struct kmem_cache *cachep,
220 static inline void fixup_slab_list(struct kmem_cache *cachep,
327 static int obj_offset(struct kmem_cache *cachep) in obj_offset()
332 static unsigned long long *dbg_redzone1(struct kmem_cache *cachep, void *objp) in dbg_redzone1()
339 static unsigned long long *dbg_redzone2(struct kmem_cache *cachep, void *objp) in dbg_redzone2()
350 static void **dbg_userword(struct kmem_cache *cachep, void *objp) in dbg_userword()
[all …]
Dslab_common.c40 struct kmem_cache *kmem_cache; variable
85 unsigned int kmem_cache_size(struct kmem_cache *s) in kmem_cache_size()
140 int slab_unmergeable(struct kmem_cache *s) in slab_unmergeable()
162 struct kmem_cache *find_mergeable(unsigned int size, unsigned int align, in find_mergeable()
165 struct kmem_cache *s; in find_mergeable()
209 static struct kmem_cache *create_cache(const char *name, in create_cache()
213 struct kmem_cache *root_cache) in create_cache()
215 struct kmem_cache *s; in create_cache()
222 s = kmem_cache_zalloc(kmem_cache, GFP_KERNEL); in create_cache()
244 kmem_cache_free(kmem_cache, s); in create_cache()
[all …]
/linux-6.6.21/include/linux/
Dkasan.h11 struct kmem_cache;
132 void __kasan_unpoison_object_data(struct kmem_cache *cache, void *object);
133 static __always_inline void kasan_unpoison_object_data(struct kmem_cache *cache, in kasan_unpoison_object_data()
140 void __kasan_poison_object_data(struct kmem_cache *cache, void *object);
141 static __always_inline void kasan_poison_object_data(struct kmem_cache *cache, in kasan_poison_object_data()
148 void * __must_check __kasan_init_slab_obj(struct kmem_cache *cache,
151 struct kmem_cache *cache, const void *object) in kasan_init_slab_obj()
158 bool __kasan_slab_free(struct kmem_cache *s, void *object,
160 static __always_inline bool kasan_slab_free(struct kmem_cache *s, in kasan_slab_free()
182 void * __must_check __kasan_slab_alloc(struct kmem_cache *s,
[all …]
Dslub_def.h98 struct kmem_cache { struct
159 void sysfs_slab_unlink(struct kmem_cache *); argument
160 void sysfs_slab_release(struct kmem_cache *);
162 static inline void sysfs_slab_unlink(struct kmem_cache *s) in sysfs_slab_unlink()
165 static inline void sysfs_slab_release(struct kmem_cache *s) in sysfs_slab_release()
170 void *fixup_red_left(struct kmem_cache *s, void *p);
172 static inline void *nearest_obj(struct kmem_cache *cache, const struct slab *slab, in nearest_obj()
184 static inline unsigned int __obj_to_index(const struct kmem_cache *cache, in __obj_to_index()
191 static inline unsigned int obj_to_index(const struct kmem_cache *cache, in obj_to_index()
199 static inline int objs_per_slab(const struct kmem_cache *cache, in objs_per_slab()
Dslab.h189 struct kmem_cache *kmem_cache_create(const char *name, unsigned int size,
192 struct kmem_cache *kmem_cache_create_usercopy(const char *name,
197 void kmem_cache_destroy(struct kmem_cache *s);
198 int kmem_cache_shrink(struct kmem_cache *s);
387 extern struct kmem_cache *
499 void *kmem_cache_alloc(struct kmem_cache *cachep, gfp_t flags) __assume_slab_alignment __malloc;
500 void *kmem_cache_alloc_lru(struct kmem_cache *s, struct list_lru *lru,
502 void kmem_cache_free(struct kmem_cache *s, void *objp);
511 void kmem_cache_free_bulk(struct kmem_cache *s, size_t size, void **p);
512 int kmem_cache_alloc_bulk(struct kmem_cache *s, gfp_t flags, size_t size, void **p);
[all …]
Dkfence.h94 void kfence_shutdown_cache(struct kmem_cache *s);
100 void *__kfence_alloc(struct kmem_cache *s, size_t size, gfp_t flags);
118 static __always_inline void *kfence_alloc(struct kmem_cache *s, size_t size, gfp_t flags) in kfence_alloc()
229 static inline void kfence_shutdown_cache(struct kmem_cache *s) { } in kfence_shutdown_cache()
230 static inline void *kfence_alloc(struct kmem_cache *s, size_t size, gfp_t flags) { return NULL; } in kfence_alloc()
Dkmsan.h18 struct kmem_cache;
100 void kmsan_slab_alloc(struct kmem_cache *s, void *object, gfp_t flags);
109 void kmsan_slab_free(struct kmem_cache *s, void *object);
270 static inline void kmsan_slab_alloc(struct kmem_cache *s, void *object, in kmsan_slab_alloc()
275 static inline void kmsan_slab_free(struct kmem_cache *s, void *object) in kmsan_slab_free()
Dslab_def.h12 struct kmem_cache { struct
91 static inline void *nearest_obj(struct kmem_cache *cache, const struct slab *slab, in nearest_obj() argument
109 static inline unsigned int obj_to_index(const struct kmem_cache *cache, in obj_to_index()
116 static inline int objs_per_slab(const struct kmem_cache *cache, in objs_per_slab()
Dfault-inject.h92 struct kmem_cache;
105 int should_failslab(struct kmem_cache *s, gfp_t gfpflags);
107 extern bool __should_failslab(struct kmem_cache *s, gfp_t gfpflags);
109 static inline bool __should_failslab(struct kmem_cache *s, gfp_t gfpflags) in __should_failslab()
/linux-6.6.21/tools/testing/radix-tree/
Dlinux.c19 struct kmem_cache { struct
31 void kmem_cache_set_non_kernel(struct kmem_cache *cachep, unsigned int val) in kmem_cache_set_non_kernel() argument
36 unsigned long kmem_cache_get_alloc(struct kmem_cache *cachep) in kmem_cache_get_alloc()
41 unsigned long kmem_cache_nr_allocated(struct kmem_cache *cachep) in kmem_cache_nr_allocated()
46 unsigned long kmem_cache_nr_tallocated(struct kmem_cache *cachep) in kmem_cache_nr_tallocated()
51 void kmem_cache_zero_nr_tallocated(struct kmem_cache *cachep) in kmem_cache_zero_nr_tallocated()
56 void *kmem_cache_alloc_lru(struct kmem_cache *cachep, struct list_lru *lru, in kmem_cache_alloc_lru()
96 void kmem_cache_free_locked(struct kmem_cache *cachep, void *objp) in kmem_cache_free_locked()
114 void kmem_cache_free(struct kmem_cache *cachep, void *objp) in kmem_cache_free()
121 void kmem_cache_free_bulk(struct kmem_cache *cachep, size_t size, void **list) in kmem_cache_free_bulk()
[all …]
/linux-6.6.21/tools/include/linux/
Dslab.h33 void *kmem_cache_alloc_lru(struct kmem_cache *cachep, struct list_lru *, int flags);
34 static inline void *kmem_cache_alloc(struct kmem_cache *cachep, int flags) in kmem_cache_alloc()
38 void kmem_cache_free(struct kmem_cache *cachep, void *objp);
40 struct kmem_cache *kmem_cache_create(const char *name, unsigned int size,
44 void kmem_cache_free_bulk(struct kmem_cache *cachep, size_t size, void **list);
45 int kmem_cache_alloc_bulk(struct kmem_cache *cachep, gfp_t gfp, size_t size,
/linux-6.6.21/lib/
Dslub_kunit.c19 static struct kmem_cache *test_kmem_cache_create(const char *name, in test_kmem_cache_create()
22 struct kmem_cache *s = kmem_cache_create(name, size, 0, in test_kmem_cache_create()
30 struct kmem_cache *s = test_kmem_cache_create("TestSlub_RZ_alloc", 64, in test_clobber_zone()
48 struct kmem_cache *s = test_kmem_cache_create("TestSlub_next_ptr_free", in test_next_pointer()
93 struct kmem_cache *s = test_kmem_cache_create("TestSlub_1th_word_free", in test_first_word()
108 struct kmem_cache *s = test_kmem_cache_create("TestSlub_50th_word_free", in test_clobber_50th_byte()
124 struct kmem_cache *s = test_kmem_cache_create("TestSlub_RZ_free", 64, in test_clobber_redzone_free()
141 struct kmem_cache *s = test_kmem_cache_create("TestSlub_RZ_kmalloc", 32, in test_kmalloc_redzone_access()
/linux-6.6.21/mm/kasan/
Dkasan.h208 struct kmem_cache *cache;
331 size_t kasan_get_alloc_size(void *object, struct kmem_cache *cache);
348 void kasan_print_aux_stacks(struct kmem_cache *cache, const void *object);
350 static inline void kasan_print_aux_stacks(struct kmem_cache *cache, const void *object) { } in kasan_print_aux_stacks()
360 void kasan_init_cache_meta(struct kmem_cache *cache, unsigned int *size);
361 void kasan_init_object_meta(struct kmem_cache *cache, const void *object);
362 struct kasan_alloc_meta *kasan_get_alloc_meta(struct kmem_cache *cache,
364 struct kasan_free_meta *kasan_get_free_meta(struct kmem_cache *cache,
367 static inline void kasan_init_cache_meta(struct kmem_cache *cache, unsigned int *size) { } in kasan_init_cache_meta()
368 static inline void kasan_init_object_meta(struct kmem_cache *cache, const void *object) { } in kasan_init_object_meta()
[all …]
Dquarantine.c127 static struct kmem_cache *qlink_to_cache(struct qlist_node *qlink) in qlink_to_cache()
132 static void *qlink_to_object(struct qlist_node *qlink, struct kmem_cache *cache) in qlink_to_object()
141 static void qlink_free(struct qlist_node *qlink, struct kmem_cache *cache) in qlink_free()
172 static void qlist_free_all(struct qlist_head *q, struct kmem_cache *cache) in qlist_free_all()
181 struct kmem_cache *obj_cache = in qlist_free_all()
191 bool kasan_quarantine_put(struct kmem_cache *cache, void *object) in kasan_quarantine_put()
298 struct kmem_cache *cache) in qlist_move_cache()
309 struct kmem_cache *obj_cache = qlink_to_cache(curr); in qlist_move_cache()
322 struct kmem_cache *cache = arg; in __per_cpu_remove_cache()
348 void kasan_quarantine_remove_cache(struct kmem_cache *cache) in kasan_quarantine_remove_cache()
Dcommon.c136 void __kasan_unpoison_object_data(struct kmem_cache *cache, void *object) in __kasan_unpoison_object_data()
141 void __kasan_poison_object_data(struct kmem_cache *cache, void *object) in __kasan_poison_object_data()
161 static inline u8 assign_tag(struct kmem_cache *cache, in assign_tag()
187 void * __must_check __kasan_init_slab_obj(struct kmem_cache *cache, in __kasan_init_slab_obj()
200 static inline bool ____kasan_slab_free(struct kmem_cache *cache, void *object, in ____kasan_slab_free()
241 bool __kasan_slab_free(struct kmem_cache *cache, void *object, in __kasan_slab_free()
298 void * __must_check __kasan_slab_alloc(struct kmem_cache *cache, in __kasan_slab_alloc()
333 static inline void *____kasan_kmalloc(struct kmem_cache *cache, in ____kasan_kmalloc()
380 void * __must_check __kasan_kmalloc(struct kmem_cache *cache, const void *object, in __kasan_kmalloc()
/linux-6.6.21/fs/jffs2/
Dmalloc.c22 static struct kmem_cache *full_dnode_slab;
23 static struct kmem_cache *raw_dirent_slab;
24 static struct kmem_cache *raw_inode_slab;
25 static struct kmem_cache *tmp_dnode_info_slab;
26 static struct kmem_cache *raw_node_ref_slab;
27 static struct kmem_cache *node_frag_slab;
28 static struct kmem_cache *inode_cache_slab;
30 static struct kmem_cache *xattr_datum_cache;
31 static struct kmem_cache *xattr_ref_cache;
/linux-6.6.21/fs/dlm/
Dmemory.c19 static struct kmem_cache *writequeue_cache;
20 static struct kmem_cache *mhandle_cache;
21 static struct kmem_cache *msg_cache;
22 static struct kmem_cache *lkb_cache;
23 static struct kmem_cache *rsb_cache;
24 static struct kmem_cache *cb_cache;
/linux-6.6.21/drivers/target/
Dtarget_core_alua.h79 extern struct kmem_cache *t10_alua_lu_gp_cache;
80 extern struct kmem_cache *t10_alua_lu_gp_mem_cache;
81 extern struct kmem_cache *t10_alua_tg_pt_gp_cache;
82 extern struct kmem_cache *t10_alua_lba_map_cache;
83 extern struct kmem_cache *t10_alua_lba_map_mem_cache;
/linux-6.6.21/fs/gfs2/
Dutil.h170 extern struct kmem_cache *gfs2_glock_cachep;
171 extern struct kmem_cache *gfs2_glock_aspace_cachep;
172 extern struct kmem_cache *gfs2_inode_cachep;
173 extern struct kmem_cache *gfs2_bufdata_cachep;
174 extern struct kmem_cache *gfs2_rgrpd_cachep;
175 extern struct kmem_cache *gfs2_quotad_cachep;
176 extern struct kmem_cache *gfs2_qadata_cachep;
177 extern struct kmem_cache *gfs2_trans_cachep;
/linux-6.6.21/drivers/target/iscsi/
Discsi_target.h52 extern struct kmem_cache *lio_dr_cache;
53 extern struct kmem_cache *lio_ooo_cache;
54 extern struct kmem_cache *lio_qr_cache;
55 extern struct kmem_cache *lio_r2t_cache;
/linux-6.6.21/fs/ntfs/
Dntfs.h38 extern struct kmem_cache *ntfs_name_cache;
39 extern struct kmem_cache *ntfs_inode_cache;
40 extern struct kmem_cache *ntfs_big_inode_cache;
41 extern struct kmem_cache *ntfs_attr_ctx_cache;
42 extern struct kmem_cache *ntfs_index_ctx_cache;
/linux-6.6.21/fs/xfs/
Dxfs_rmap_item.h31 struct kmem_cache;
71 extern struct kmem_cache *xfs_rui_cache;
72 extern struct kmem_cache *xfs_rud_cache;
Dxfs_bmap_item.h28 struct kmem_cache;
68 extern struct kmem_cache *xfs_bui_cache;
69 extern struct kmem_cache *xfs_bud_cache;

12345678910>>...23