Lines Matching refs:mlx5_cache_ent
124 static void queue_adjust_cache_locked(struct mlx5_cache_ent *ent);
146 static int push_mkey_locked(struct mlx5_cache_ent *ent, bool limit_pendings, in push_mkey_locked()
202 static int push_mkey(struct mlx5_cache_ent *ent, bool limit_pendings, in push_mkey()
213 static void undo_push_reserve_mkey(struct mlx5_cache_ent *ent) in undo_push_reserve_mkey()
222 static void push_to_reserved(struct mlx5_cache_ent *ent, u32 mkey) in push_to_reserved()
231 static u32 pop_stored_mkey(struct mlx5_cache_ent *ent) in pop_stored_mkey()
256 struct mlx5_cache_ent *ent = mkey_out->ent; in create_mkey_callback()
302 static void set_cache_mkc(struct mlx5_cache_ent *ent, void *mkc) in set_cache_mkc()
319 static int add_keys(struct mlx5_cache_ent *ent, unsigned int num) in add_keys()
359 static int create_cache_mkey(struct mlx5_cache_ent *ent, u32 *mkey) in create_cache_mkey()
382 static void remove_cache_mr_locked(struct mlx5_cache_ent *ent) in remove_cache_mr_locked()
395 static int resize_available_mrs(struct mlx5_cache_ent *ent, unsigned int target, in resize_available_mrs()
430 struct mlx5_cache_ent *ent = filp->private_data; in size_write()
468 struct mlx5_cache_ent *ent = filp->private_data; in size_read()
489 struct mlx5_cache_ent *ent = filp->private_data; in limit_write()
513 struct mlx5_cache_ent *ent = filp->private_data; in limit_read()
533 struct mlx5_cache_ent *ent; in someone_adding()
539 ent = rb_entry(node, struct mlx5_cache_ent, node); in someone_adding()
557 static void queue_adjust_cache_locked(struct mlx5_cache_ent *ent) in queue_adjust_cache_locked()
586 static void __cache_work_func(struct mlx5_cache_ent *ent) in __cache_work_func()
653 struct mlx5_cache_ent *ent; in delayed_cache_work_func()
655 ent = container_of(work, struct mlx5_cache_ent, dwork.work); in delayed_cache_work_func()
685 struct mlx5_cache_ent *ent) in mlx5_cache_ent_insert()
688 struct mlx5_cache_ent *cur; in mlx5_cache_ent_insert()
693 cur = rb_entry(*new, struct mlx5_cache_ent, node); in mlx5_cache_ent_insert()
713 static struct mlx5_cache_ent *
718 struct mlx5_cache_ent *cur, *smallest = NULL; in mkey_cache_ent_from_rb_key()
725 cur = rb_entry(node, struct mlx5_cache_ent, node); in mkey_cache_ent_from_rb_key()
746 struct mlx5_cache_ent *ent, in _mlx5_mr_cache_alloc()
815 struct mlx5_cache_ent *ent = mkey_cache_ent_from_rb_key(dev, rb_key); in mlx5_mr_cache_alloc()
823 static void clean_keys(struct mlx5_ib_dev *dev, struct mlx5_cache_ent *ent) in clean_keys()
848 struct mlx5_cache_ent *ent) in mlx5_mkey_cache_debugfs_add_ent()
885 struct mlx5_cache_ent *
890 struct mlx5_cache_ent *ent; in mlx5r_cache_create_ent_locked()
937 struct mlx5_cache_ent *ent; in remove_ent_work_func()
945 ent = rb_entry(cur, struct mlx5_cache_ent, node); in remove_ent_work_func()
970 struct mlx5_cache_ent *ent; in mlx5_mkey_cache_init()
1004 ent = rb_entry(node, struct mlx5_cache_ent, node); in mlx5_mkey_cache_init()
1022 struct mlx5_cache_ent *ent; in mlx5_mkey_cache_cleanup()
1031 ent = rb_entry(node, struct mlx5_cache_ent, node); in mlx5_mkey_cache_cleanup()
1052 ent = rb_entry(node, struct mlx5_cache_ent, node); in mlx5_mkey_cache_cleanup()
1159 struct mlx5_cache_ent *ent; in alloc_cacheable_mr()
1824 struct mlx5_cache_ent *ent; in cache_ent_find_and_store()