Home
last modified time | relevance | path

Searched refs:refs (Results 1 – 25 of 166) sorted by relevance

1234567

/linux-6.6.21/tools/bpf/bpftool/
Dpids.c34 struct obj_refs *refs; in add_ref() local
40 refs = entry->pvalue; in add_ref()
42 for (i = 0; i < refs->ref_cnt; i++) { in add_ref()
43 if (refs->refs[i].pid == e->pid) in add_ref()
47 tmp = realloc(refs->refs, (refs->ref_cnt + 1) * sizeof(*ref)); in add_ref()
53 refs->refs = tmp; in add_ref()
54 ref = &refs->refs[refs->ref_cnt]; in add_ref()
57 refs->ref_cnt++; in add_ref()
63 refs = calloc(1, sizeof(*refs)); in add_ref()
64 if (!refs) { in add_ref()
[all …]
/linux-6.6.21/include/linux/
Dfreelist.h19 atomic_t refs; member
50 atomic_set_release(&node->refs, 1); in __freelist_add()
57 if (atomic_fetch_add_release(REFS_ON_FREELIST - 1, &node->refs) == 1) in __freelist_add()
70 if (!atomic_fetch_add_release(REFS_ON_FREELIST, &node->refs)) { in freelist_add()
82 unsigned int refs; in freelist_try_get() local
86 refs = atomic_read(&head->refs); in freelist_try_get()
87 if ((refs & REFS_MASK) == 0 || in freelist_try_get()
88 !atomic_try_cmpxchg_acquire(&head->refs, &refs, refs+1)) { in freelist_try_get()
106 WARN_ON_ONCE(atomic_read(&head->refs) & REFS_ON_FREELIST); in freelist_try_get()
112 atomic_fetch_add(-2, &head->refs); in freelist_try_get()
[all …]
Drefcount.h112 atomic_t refs; member
115 #define REFCOUNT_INIT(n) { .refs = ATOMIC_INIT(n), }
136 atomic_set(&r->refs, n); in refcount_set()
147 return atomic_read(&r->refs); in refcount_read()
157 } while (!atomic_try_cmpxchg_relaxed(&r->refs, &old, old + i)); in __refcount_add_not_zero()
193 int old = atomic_fetch_add_relaxed(i, &r->refs); in __refcount_add()
272 int old = atomic_fetch_sub_release(i, &r->refs); in __refcount_sub_and_test()
338 int old = atomic_fetch_sub_release(1, &r->refs); in __refcount_dec()
/linux-6.6.21/drivers/media/v4l2-core/
Dv4l2-h264.c60 b->refs[i].longterm = true; in v4l2_h264_init_reflist_builder()
68 if (!b->refs[i].longterm && dpb[i].frame_num > cur_frame_num) in v4l2_h264_init_reflist_builder()
69 b->refs[i].frame_num = (int)dpb[i].frame_num - in v4l2_h264_init_reflist_builder()
72 b->refs[i].frame_num = dpb[i].frame_num; in v4l2_h264_init_reflist_builder()
74 b->refs[i].top_field_order_cnt = dpb[i].top_field_order_cnt; in v4l2_h264_init_reflist_builder()
75 b->refs[i].bottom_field_order_cnt = dpb[i].bottom_field_order_cnt; in v4l2_h264_init_reflist_builder()
113 return min(b->refs[ref->index].top_field_order_cnt, in v4l2_h264_get_poc()
114 b->refs[ref->index].bottom_field_order_cnt); in v4l2_h264_get_poc()
116 return b->refs[ref->index].top_field_order_cnt; in v4l2_h264_get_poc()
118 return b->refs[ref->index].bottom_field_order_cnt; in v4l2_h264_get_poc()
[all …]
/linux-6.6.21/tools/include/linux/
Drefcount.h53 atomic_t refs; member
56 #define REFCOUNT_INIT(n) { .refs = ATOMIC_INIT(n), }
60 atomic_set(&r->refs, n); in refcount_set()
65 return atomic_read(&r->refs); in refcount_read()
78 unsigned int old, new, val = atomic_read(&r->refs); in refcount_inc_not_zero()
89 old = atomic_cmpxchg_relaxed(&r->refs, val, new); in refcount_inc_not_zero()
123 unsigned int old, new, val = atomic_read(&r->refs); in refcount_sub_and_test()
135 old = atomic_cmpxchg_release(&r->refs, val, new); in refcount_sub_and_test()
/linux-6.6.21/drivers/xen/
Dgntdev-dmabuf.c44 grant_ref_t *refs; member
437 int count, u32 domid, u32 *refs, u32 *fd) in dmabuf_exp_from_refs() argument
449 map->grants[i].ref = refs[i]; in dmabuf_exp_from_refs()
488 dmabuf_imp_grant_foreign_access(unsigned long *gfns, u32 *refs, in dmabuf_imp_grant_foreign_access() argument
512 refs[i] = cur_ref; in dmabuf_imp_grant_foreign_access()
522 static void dmabuf_imp_end_foreign_access(u32 *refs, int count) in dmabuf_imp_end_foreign_access() argument
527 if (refs[i] != INVALID_GRANT_REF) in dmabuf_imp_end_foreign_access()
528 gnttab_end_foreign_access(refs[i], NULL); in dmabuf_imp_end_foreign_access()
533 kfree(gntdev_dmabuf->u.imp.refs); in dmabuf_imp_free_storage()
546 gntdev_dmabuf->u.imp.refs = kcalloc(count, in dmabuf_imp_alloc_storage()
[all …]
/linux-6.6.21/io_uring/
Drefs.h12 ((unsigned int) atomic_read(&(req->refs)) + 127u <= 127u)
17 return atomic_inc_not_zero(&req->refs); in req_ref_inc_not_zero()
26 return atomic_dec_and_test(&req->refs); in req_ref_put_and_test()
33 atomic_inc(&req->refs); in req_ref_get()
40 atomic_set(&req->refs, nr); in __io_req_set_refcount()
/linux-6.6.21/drivers/gpu/drm/nouveau/nvkm/core/
Devent.c34 if (--event->refs[index * event->types_nr + type] == 0) { in nvkm_event_put()
51 if (++event->refs[index * event->types_nr + type] == 1) { in nvkm_event_get()
175 if (!event->refs || WARN_ON(id >= event->index_nr)) in nvkm_event_ntfy()
194 if (event->refs) { in nvkm_event_fini()
195 kfree(event->refs); in nvkm_event_fini()
196 event->refs = NULL; in nvkm_event_fini()
204 event->refs = kzalloc(array3_size(index_nr, types_nr, sizeof(*event->refs)), GFP_KERNEL); in __nvkm_event_init()
205 if (!event->refs) in __nvkm_event_init()
/linux-6.6.21/fs/smb/client/
Ddfs_cache.c250 static inline void dump_refs(const struct dfs_info3_param *refs, int numrefs) in dump_refs() argument
256 const struct dfs_info3_param *ref = &refs[i]; in dump_refs()
382 static int copy_ref_data(const struct dfs_info3_param *refs, int numrefs, in copy_ref_data() argument
388 ce->ttl = max_t(int, refs[0].ttl, CACHE_MIN_TTL); in copy_ref_data()
390 ce->srvtype = refs[0].server_type; in copy_ref_data()
391 ce->hdr_flags = refs[0].flags; in copy_ref_data()
392 ce->ref_flags = refs[0].ref_flag; in copy_ref_data()
393 ce->path_consumed = refs[0].path_consumed; in copy_ref_data()
398 t = alloc_target(refs[i].node_name, refs[i].path_consumed); in copy_ref_data()
420 static struct cache_entry *alloc_cache_entry(struct dfs_info3_param *refs, int numrefs) in alloc_cache_entry() argument
[all …]
Ddfs.h26 struct dfs_ref refs[MAX_NESTED_LINKS]; member
29 #define ref_walk_start(w) ((w)->refs)
30 #define ref_walk_end(w) (&(w)->refs[ARRAY_SIZE((w)->refs) - 1])
/linux-6.6.21/mm/
Dgup.c71 static inline struct folio *try_get_folio(struct page *page, int refs) in try_get_folio() argument
79 if (unlikely(!folio_ref_try_add_rcu(folio, refs))) in try_get_folio()
92 if (!put_devmap_managed_page_refs(&folio->page, refs)) in try_get_folio()
93 folio_put_refs(folio, refs); in try_get_folio()
126 struct folio *try_grab_folio(struct page *page, int refs, unsigned int flags) in try_grab_folio() argument
137 return try_get_folio(page, refs); in try_grab_folio()
148 folio = try_get_folio(page, refs); in try_grab_folio()
159 if (!put_devmap_managed_page_refs(&folio->page, refs)) in try_grab_folio()
160 folio_put_refs(folio, refs); in try_grab_folio()
173 atomic_add(refs, &folio->_pincount); in try_grab_folio()
[all …]
Dworkingset.c240 int refs = folio_lru_refs(folio); in lru_gen_eviction() local
241 int tier = lru_tier_from_refs(refs); in lru_gen_eviction()
250 token = (min_seq << LRU_REFS_WIDTH) | max(refs - 1, 0); in lru_gen_eviction()
255 return pack_shadow(mem_cgroup_id(memcg), pgdat, token, refs); in lru_gen_eviction()
282 int hist, tier, refs; in lru_gen_refault() local
305 refs = (token & (BIT(LRU_REFS_WIDTH) - 1)) + workingset; in lru_gen_refault()
306 tier = lru_tier_from_refs(refs); in lru_gen_refault()
318 if (lru_gen_in_fault() || refs >= BIT(LRU_REFS_WIDTH) - 1) { in lru_gen_refault()
/linux-6.6.21/drivers/iommu/arm/arm-smmu-v3/
Darm-smmu-v3-sva.c20 refcount_t refs; member
32 refcount_t refs; member
61 refcount_inc(&cd->refs); in arm_smmu_share_asid()
115 refcount_set(&cd->refs, 1); in arm_smmu_alloc_shared_cd()
281 refcount_inc(&smmu_mn->refs); in arm_smmu_mmu_notifier_get()
296 refcount_set(&smmu_mn->refs, 1); in arm_smmu_mmu_notifier_get()
328 if (!refcount_dec_and_test(&smmu_mn->refs)) in arm_smmu_mmu_notifier_put()
363 refcount_inc(&bond->refs); in __arm_smmu_sva_bind()
374 refcount_set(&bond->refs, 1); in __arm_smmu_sva_bind()
553 if (!WARN_ON(!bond) && refcount_dec_and_test(&bond->refs)) { in arm_smmu_sva_remove_dev_pasid()
/linux-6.6.21/drivers/thermal/
Dk3_j72xx_bandgap.c78 int refs[4]; member
90 idx1 = err_vals->refs[seg]; in create_table_segments()
92 idx2 = err_vals->refs[seg + 1]; in create_table_segments()
95 ref1 = err_vals->refs[seg]; in create_table_segments()
96 ref2 = err_vals->refs[seg + 1]; in create_table_segments()
461 err_vals.refs[0] = MINUS40CREF; in k3_j72xx_bandgap_probe()
462 err_vals.refs[1] = PLUS30CREF; in k3_j72xx_bandgap_probe()
463 err_vals.refs[2] = PLUS125CREF; in k3_j72xx_bandgap_probe()
464 err_vals.refs[3] = PLUS150CREF; in k3_j72xx_bandgap_probe()
/linux-6.6.21/fs/btrfs/
Ddelayed-inode.c52 refcount_set(&delayed_node->refs, 0); in btrfs_init_delayed_node()
69 refcount_inc(&node->refs); in btrfs_get_delayed_node()
78 refcount_inc(&node->refs); /* can be accessed */ in btrfs_get_delayed_node()
100 if (refcount_inc_not_zero(&node->refs)) { in btrfs_get_delayed_node()
101 refcount_inc(&node->refs); in btrfs_get_delayed_node()
135 refcount_set(&node->refs, 2); in btrfs_get_or_create_delayed_node()
176 refcount_inc(&node->refs); /* inserted into list */ in btrfs_queue_delayed_node()
190 refcount_dec(&node->refs); /* not in the list */ in btrfs_dequeue_delayed_node()
211 refcount_inc(&node->refs); in btrfs_first_delayed_node()
238 refcount_inc(&next->refs); in btrfs_next_delayed_node()
[all …]
Ddelayed-ref.h36 refcount_t refs; member
82 refcount_t refs; member
341 WARN_ON(refcount_read(&ref->refs) == 0); in btrfs_put_delayed_ref()
342 if (refcount_dec_and_test(&ref->refs)) { in btrfs_put_delayed_ref()
371 if (refcount_dec_and_test(&head->refs)) in btrfs_put_delayed_ref_head()
/linux-6.6.21/lib/
Drefcount.c59 return atomic_try_cmpxchg_release(&r->refs, &val, 0); in refcount_dec_if_one()
76 unsigned int new, val = atomic_read(&r->refs); in refcount_dec_not_one()
91 } while (!atomic_try_cmpxchg_release(&r->refs, &val, new)); in refcount_dec_not_one()
/linux-6.6.21/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dcgrp.c39 if (refcount_dec_and_test(&ectx->refs)) { in nvkm_cgrp_ectx_put()
65 refcount_inc(&ectx->refs); in nvkm_cgrp_ectx_get()
76 refcount_set(&ectx->refs, 1); in nvkm_cgrp_ectx_get()
100 if (refcount_dec_and_test(&vctx->refs)) { in nvkm_cgrp_vctx_put()
130 refcount_inc(&vctx->refs); in nvkm_cgrp_vctx_get()
151 refcount_set(&vctx->refs, 1); in nvkm_cgrp_vctx_get()
/linux-6.6.21/include/uapi/xen/
Dgntdev.h70 struct ioctl_gntdev_grant_ref refs[1]; member
256 __u32 refs[1]; member
298 __u32 refs[1]; member
/linux-6.6.21/tools/testing/selftests/bpf/progs/
Dmap_kptr.c308 if (p_st->cnt.refs.counter != ref) { in test_map_kptr_ref_pre()
318 if (p_st->cnt.refs.counter != ref) in test_map_kptr_ref_pre()
326 if (p_st->cnt.refs.counter != ref) in test_map_kptr_ref_pre()
338 if (p_st->cnt.refs.counter != ref) in test_map_kptr_ref_pre()
355 if (!p_st || p_st->cnt.refs.counter != ref) in test_map_kptr_ref_post()
361 if (p_st->cnt.refs.counter != ref) { in test_map_kptr_ref_post()
371 if (p_st->cnt.refs.counter != ref) in test_map_kptr_ref_post()
471 if (p->cnt.refs.counter != ref) { in test_map_kptr_ref3()
/linux-6.6.21/drivers/net/ethernet/mellanox/mlx4/
Dport.c69 table->refs[i] = 0; in mlx4_init_mac_table()
83 table->refs[i] = 0; in mlx4_init_vlan_table()
118 if (table->refs[i] && in find_index()
157 if (!table->refs[i]) in mlx4_find_cached_mac()
232 dup_table->refs[index_at_port]) { in __mlx4_register_mac()
242 if (!table->refs[index_at_dup_port] || in __mlx4_register_mac()
251 if (!table->refs[i]) { in __mlx4_register_mac()
255 if (!dup_table->refs[i]) in __mlx4_register_mac()
265 ++table->refs[i]; in __mlx4_register_mac()
308 table->refs[free] = 1; in __mlx4_register_mac()
[all …]
/linux-6.6.21/drivers/misc/sgi-xp/
Dxpc.h684 s32 refs = atomic_dec_return(&ch->references); in xpc_msgqueue_deref() local
686 DBUG_ON(refs < 0); in xpc_msgqueue_deref()
687 if (refs == 0) in xpc_msgqueue_deref()
701 s32 refs = atomic_dec_return(&part->references); in xpc_part_deref() local
703 DBUG_ON(refs < 0); in xpc_part_deref()
704 if (refs == 0 && part->setup_state == XPC_P_SS_WTEARDOWN) in xpc_part_deref()
/linux-6.6.21/kernel/trace/
Dtrace_events_user.c170 struct user_event_refs *refs; member
2074 struct user_event_refs *refs; in user_events_write_core() local
2088 refs = rcu_dereference_sched(info->refs); in user_events_write_core()
2095 if (likely(refs && idx < refs->count)) in user_events_write_core()
2096 user = refs->events[idx]; in user_events_write_core()
2194 struct user_event_refs *refs, *new_refs; in user_events_ref_add() local
2197 refs = rcu_dereference_protected(info->refs, in user_events_ref_add()
2200 if (refs) { in user_events_ref_add()
2201 count = refs->count; in user_events_ref_add()
2204 if (refs->events[i] == user) in user_events_ref_add()
[all …]
/linux-6.6.21/drivers/gpu/drm/nouveau/
Dnouveau_vmm.c65 if (vma && --vma->refs <= 0) { in nouveau_vma_del()
86 vma->refs++; in nouveau_vma_new()
93 vma->refs = 1; in nouveau_vma_new()
/linux-6.6.21/drivers/most/
Dcore.c34 int refs; member
852 if (c->pipe0.refs && c->pipe0.comp->tx_completion) in arm_mbo()
855 if (c->pipe1.refs && c->pipe1.comp->tx_completion) in arm_mbo()
963 if (c->pipe0.refs && c->pipe1.refs && in channel_has_mbo()
996 if (c->pipe0.refs && c->pipe1.refs && in most_get_mbo()
1068 if (c->pipe0.refs && c->pipe0.comp->rx_completion && in most_read_completion()
1072 if (c->pipe1.refs && c->pipe1.comp->rx_completion && in most_read_completion()
1101 if (c->pipe0.refs + c->pipe1.refs > 0) in most_start_channel()
1141 c->pipe0.refs++; in most_start_channel()
1143 c->pipe1.refs++; in most_start_channel()
[all …]

1234567