Home
last modified time | relevance | path

Searched refs:head (Results 1 – 25 of 2412) sorted by relevance

12345678910>>...97

/linux-6.1.9/drivers/scsi/aic7xxx/
Dqueue.h112 #define SLIST_HEAD_INITIALIZER(head) \ argument
123 #define SLIST_EMPTY(head) ((head)->slh_first == NULL) argument
125 #define SLIST_FIRST(head) ((head)->slh_first) argument
127 #define SLIST_FOREACH(var, head, field) \ argument
128 for ((var) = SLIST_FIRST((head)); \
132 #define SLIST_INIT(head) do { \ argument
133 SLIST_FIRST((head)) = NULL; \
141 #define SLIST_INSERT_HEAD(head, elm, field) do { \ argument
142 SLIST_NEXT((elm), field) = SLIST_FIRST((head)); \
143 SLIST_FIRST((head)) = (elm); \
[all …]
/linux-6.1.9/security/tomoyo/
Dcommon.c204 static bool tomoyo_flush(struct tomoyo_io_buffer *head) in tomoyo_flush() argument
206 while (head->r.w_pos) { in tomoyo_flush()
207 const char *w = head->r.w[0]; in tomoyo_flush()
211 if (len > head->read_user_buf_avail) in tomoyo_flush()
212 len = head->read_user_buf_avail; in tomoyo_flush()
215 if (copy_to_user(head->read_user_buf, w, len)) in tomoyo_flush()
217 head->read_user_buf_avail -= len; in tomoyo_flush()
218 head->read_user_buf += len; in tomoyo_flush()
221 head->r.w[0] = w; in tomoyo_flush()
225 if (head->poll) { in tomoyo_flush()
[all …]
Dgc.c42 struct tomoyo_io_buffer *head; in tomoyo_struct_used_by_io_buffer() local
46 list_for_each_entry(head, &tomoyo_io_buffer_list, list) { in tomoyo_struct_used_by_io_buffer()
47 head->users++; in tomoyo_struct_used_by_io_buffer()
49 mutex_lock(&head->io_sem); in tomoyo_struct_used_by_io_buffer()
50 if (head->r.domain == element || head->r.group == element || in tomoyo_struct_used_by_io_buffer()
51 head->r.acl == element || &head->w.domain->list == element) in tomoyo_struct_used_by_io_buffer()
53 mutex_unlock(&head->io_sem); in tomoyo_struct_used_by_io_buffer()
55 head->users--; in tomoyo_struct_used_by_io_buffer()
73 struct tomoyo_io_buffer *head; in tomoyo_name_used_by_io_buffer() local
78 list_for_each_entry(head, &tomoyo_io_buffer_list, list) { in tomoyo_name_used_by_io_buffer()
[all …]
/linux-6.1.9/drivers/gpu/drm/nouveau/dispnv04/
Dhw.c39 NVWriteVgaSeq(struct drm_device *dev, int head, uint8_t index, uint8_t value) in NVWriteVgaSeq() argument
41 NVWritePRMVIO(dev, head, NV_PRMVIO_SRX, index); in NVWriteVgaSeq()
42 NVWritePRMVIO(dev, head, NV_PRMVIO_SR, value); in NVWriteVgaSeq()
46 NVReadVgaSeq(struct drm_device *dev, int head, uint8_t index) in NVReadVgaSeq() argument
48 NVWritePRMVIO(dev, head, NV_PRMVIO_SRX, index); in NVReadVgaSeq()
49 return NVReadPRMVIO(dev, head, NV_PRMVIO_SR); in NVReadVgaSeq()
53 NVWriteVgaGr(struct drm_device *dev, int head, uint8_t index, uint8_t value) in NVWriteVgaGr() argument
55 NVWritePRMVIO(dev, head, NV_PRMVIO_GRX, index); in NVWriteVgaGr()
56 NVWritePRMVIO(dev, head, NV_PRMVIO_GX, value); in NVWriteVgaGr()
60 NVReadVgaGr(struct drm_device *dev, int head, uint8_t index) in NVReadVgaGr() argument
[all …]
Dhw.h37 void NVWriteVgaSeq(struct drm_device *, int head, uint8_t index, uint8_t value);
38 uint8_t NVReadVgaSeq(struct drm_device *, int head, uint8_t index);
39 void NVWriteVgaGr(struct drm_device *, int head, uint8_t index, uint8_t value);
40 uint8_t NVReadVgaGr(struct drm_device *, int head, uint8_t index);
42 void NVBlankScreen(struct drm_device *, int head, bool blank);
48 void nouveau_hw_save_state(struct drm_device *, int head,
50 void nouveau_hw_load_state(struct drm_device *, int head,
52 void nouveau_hw_load_state_palette(struct drm_device *, int head,
60 int head, uint32_t reg) in NVReadCRTC() argument
64 if (head) in NVReadCRTC()
[all …]
/linux-6.1.9/include/linux/
Dplist.h96 #define PLIST_HEAD_INIT(head) \ argument
98 .node_list = LIST_HEAD_INIT((head).node_list) \
105 #define PLIST_HEAD(head) \ argument
106 struct plist_head head = PLIST_HEAD_INIT(head)
125 plist_head_init(struct plist_head *head) in plist_head_init() argument
127 INIT_LIST_HEAD(&head->node_list); in plist_head_init()
142 extern void plist_add(struct plist_node *node, struct plist_head *head);
143 extern void plist_del(struct plist_node *node, struct plist_head *head);
145 extern void plist_requeue(struct plist_node *node, struct plist_head *head);
152 #define plist_for_each(pos, head) \ argument
[all …]
Dlist.h86 static inline void list_add(struct list_head *new, struct list_head *head) in list_add() argument
88 __list_add(new, head, head->next); in list_add()
100 static inline void list_add_tail(struct list_head *new, struct list_head *head) in list_add_tail() argument
102 __list_add(new, head->prev, head); in list_add_tail()
215 static inline void list_move(struct list_head *list, struct list_head *head) in list_move() argument
218 list_add(list, head); in list_move()
227 struct list_head *head) in list_move_tail() argument
230 list_add_tail(list, head); in list_move_tail()
242 static inline void list_bulk_move_tail(struct list_head *head, in list_bulk_move_tail() argument
249 head->prev->next = first; in list_bulk_move_tail()
[all …]
Dbtree-type.h14 static inline void BTREE_FN(init_mempool)(BTREE_TYPE_HEAD *head, in BTREE_FN()
17 btree_init_mempool(&head->h, mempool); in BTREE_FN()
20 static inline int BTREE_FN(init)(BTREE_TYPE_HEAD *head) in BTREE_FN()
22 return btree_init(&head->h); in BTREE_FN()
25 static inline void BTREE_FN(destroy)(BTREE_TYPE_HEAD *head) in BTREE_FN()
27 btree_destroy(&head->h); in BTREE_FN()
38 static inline void *BTREE_FN(lookup)(BTREE_TYPE_HEAD *head, BTREE_KEYTYPE key) in BTREE_FN()
41 return btree_lookup(&head->h, BTREE_TYPE_GEO, &_key); in BTREE_FN()
44 static inline int BTREE_FN(insert)(BTREE_TYPE_HEAD *head, BTREE_KEYTYPE key, in BTREE_FN()
48 return btree_insert(&head->h, BTREE_TYPE_GEO, &_key, val, gfp); in BTREE_FN()
[all …]
Dfreelist.h24 struct freelist_node *head; member
46 struct freelist_node *head = READ_ONCE(list->head); in __freelist_add() local
49 WRITE_ONCE(node->next, head); in __freelist_add()
52 if (!try_cmpxchg_release(&list->head, &head, node)) { in __freelist_add()
81 struct freelist_node *prev, *next, *head = smp_load_acquire(&list->head); in freelist_try_get() local
84 while (head) { in freelist_try_get()
85 prev = head; in freelist_try_get()
86 refs = atomic_read(&head->refs); in freelist_try_get()
88 !atomic_try_cmpxchg_acquire(&head->refs, &refs, refs+1)) { in freelist_try_get()
89 head = smp_load_acquire(&list->head); in freelist_try_get()
[all …]
Dbtree-128.h6 static inline void btree_init_mempool128(struct btree_head128 *head, in btree_init_mempool128() argument
9 btree_init_mempool(&head->h, mempool); in btree_init_mempool128()
12 static inline int btree_init128(struct btree_head128 *head) in btree_init128() argument
14 return btree_init(&head->h); in btree_init128()
17 static inline void btree_destroy128(struct btree_head128 *head) in btree_destroy128() argument
19 btree_destroy(&head->h); in btree_destroy128()
22 static inline void *btree_lookup128(struct btree_head128 *head, u64 k1, u64 k2) in btree_lookup128() argument
25 return btree_lookup(&head->h, &btree_geo128, (unsigned long *)&key); in btree_lookup128()
28 static inline void *btree_get_prev128(struct btree_head128 *head, in btree_get_prev128() argument
34 val = btree_get_prev(&head->h, &btree_geo128, in btree_get_prev128()
[all …]
/linux-6.1.9/net/sched/
Dcls_matchall.c30 struct cls_mall_head *head = rcu_dereference_bh(tp->root); in mall_classify() local
32 if (unlikely(!head)) in mall_classify()
35 if (tc_skip_sw(head->flags)) in mall_classify()
38 *res = head->res; in mall_classify()
39 __this_cpu_inc(head->pf->rhit); in mall_classify()
40 return tcf_exts_exec(skb, &head->exts, res); in mall_classify()
48 static void __mall_destroy(struct cls_mall_head *head) in __mall_destroy() argument
50 tcf_exts_destroy(&head->exts); in __mall_destroy()
51 tcf_exts_put_net(&head->exts); in __mall_destroy()
52 free_percpu(head->pf); in __mall_destroy()
[all …]
Dcls_cgroup.c28 struct cls_cgroup_head *head = rcu_dereference_bh(tp->root); in cls_cgroup_classify() local
31 if (unlikely(!head)) in cls_cgroup_classify()
35 if (!tcf_em_tree_match(skb, &head->ematches, NULL)) in cls_cgroup_classify()
41 return tcf_exts_exec(skb, &head->exts, res); in cls_cgroup_classify()
58 static void __cls_cgroup_destroy(struct cls_cgroup_head *head) in __cls_cgroup_destroy() argument
60 tcf_exts_destroy(&head->exts); in __cls_cgroup_destroy()
61 tcf_em_tree_destroy(&head->ematches); in __cls_cgroup_destroy()
62 tcf_exts_put_net(&head->exts); in __cls_cgroup_destroy()
63 kfree(head); in __cls_cgroup_destroy()
68 struct cls_cgroup_head *head = container_of(to_rcu_work(work), in cls_cgroup_destroy_work() local
[all …]
/linux-6.1.9/tools/include/linux/
Dlist.h61 static inline void list_add(struct list_head *new, struct list_head *head) in list_add() argument
63 __list_add(new, head, head->next); in list_add()
75 static inline void list_add_tail(struct list_head *new, struct list_head *head) in list_add_tail() argument
77 __list_add(new, head->prev, head); in list_add_tail()
154 static inline void list_move(struct list_head *list, struct list_head *head) in list_move() argument
157 list_add(list, head); in list_move()
166 struct list_head *head) in list_move_tail() argument
169 list_add_tail(list, head); in list_move_tail()
178 const struct list_head *head) in list_is_last() argument
180 return list->next == head; in list_is_last()
[all …]
/linux-6.1.9/drivers/gpu/drm/nouveau/nvkm/engine/disp/
Dhead.c34 struct nvkm_head *head; in nvkm_head_find() local
35 list_for_each_entry(head, &disp->heads, head) { in nvkm_head_find()
36 if (head->id == id) in nvkm_head_find()
37 return head; in nvkm_head_find()
44 struct nvkm_head *head, void *data, u32 size) in nvkm_head_mthd_scanoutpos() argument
56 head->func->state(head, &head->arm); in nvkm_head_mthd_scanoutpos()
57 args->v0.vtotal = head->arm.vtotal; in nvkm_head_mthd_scanoutpos()
58 args->v0.vblanks = head->arm.vblanks; in nvkm_head_mthd_scanoutpos()
59 args->v0.vblanke = head->arm.vblanke; in nvkm_head_mthd_scanoutpos()
60 args->v0.htotal = head->arm.htotal; in nvkm_head_mthd_scanoutpos()
[all …]
Dvga.c27 nvkm_rdport(struct nvkm_device *device, int head, u16 port) in nvkm_rdport() argument
35 return nvkm_rd08(device, 0x601000 + (head * 0x2000) + port); in nvkm_rdport()
41 head = 0; /* CR44 selects head */ in nvkm_rdport()
42 return nvkm_rd08(device, 0x0c0000 + (head * 0x2000) + port); in nvkm_rdport()
49 nvkm_wrport(struct nvkm_device *device, int head, u16 port, u8 data) in nvkm_wrport() argument
57 nvkm_wr08(device, 0x601000 + (head * 0x2000) + port, data); in nvkm_wrport()
63 head = 0; /* CR44 selects head */ in nvkm_wrport()
64 nvkm_wr08(device, 0x0c0000 + (head * 0x2000) + port, data); in nvkm_wrport()
69 nvkm_rdvgas(struct nvkm_device *device, int head, u8 index) in nvkm_rdvgas() argument
71 nvkm_wrport(device, head, 0x03c4, index); in nvkm_rdvgas()
[all …]
/linux-6.1.9/tools/testing/selftests/arm64/signal/testcases/
Dtestcases.c5 struct _aarch64_ctx *get_header(struct _aarch64_ctx *head, uint32_t magic, in get_header() argument
11 if (!head || resv_sz < HDR_SZ) in get_header()
15 head->magic != magic && head->magic) { in get_header()
16 offs += head->size; in get_header()
17 head = GET_RESV_NEXT_HEAD(head); in get_header()
19 if (head->magic == magic) { in get_header()
20 found = head; in get_header()
37 term = GET_RESV_NEXT_HEAD(&extra->head); in validate_extra_context()
67 if ((sve->head.size != sizeof(struct sve_context)) && in validate_sve_context()
68 (sve->head.size != regs_size)) { in validate_sve_context()
[all …]
/linux-6.1.9/net/netlabel/
Dnetlabel_addrlist.h82 #define netlbl_af4list_foreach(iter, head) \ argument
83 for (iter = __af4list_valid((head)->next, head); \
84 &iter->list != (head); \
85 iter = __af4list_valid(iter->list.next, head))
87 #define netlbl_af4list_foreach_rcu(iter, head) \ argument
88 for (iter = __af4list_valid_rcu((head)->next, head); \
89 &iter->list != (head); \
90 iter = __af4list_valid_rcu(iter->list.next, head))
92 #define netlbl_af4list_foreach_safe(iter, tmp, head) \ argument
93 for (iter = __af4list_valid((head)->next, head), \
[all …]
/linux-6.1.9/drivers/nvme/host/
Dmultipath.c102 spin_lock_irqsave(&ns->head->requeue_lock, flags); in nvme_failover_req()
104 bio_set_dev(bio, ns->head->disk->part0); in nvme_failover_req()
110 blk_steal_bios(&ns->head->requeue_list, req); in nvme_failover_req()
111 spin_unlock_irqrestore(&ns->head->requeue_lock, flags); in nvme_failover_req()
114 kblockd_schedule_work(&ns->head->requeue_work); in nvme_failover_req()
123 if (!ns->head->disk) in nvme_kick_requeue_lists()
125 kblockd_schedule_work(&ns->head->requeue_work); in nvme_kick_requeue_lists()
127 disk_uevent(ns->head->disk, KOBJ_CHANGE); in nvme_kick_requeue_lists()
143 struct nvme_ns_head *head = ns->head; in nvme_mpath_clear_current_path() local
147 if (!head) in nvme_mpath_clear_current_path()
[all …]
/linux-6.1.9/scripts/gdb/linux/
Dlists.py23 def list_for_each(head): argument
24 if head.type == list_head.get_type().pointer():
25 head = head.dereference()
26 elif head.type != list_head.get_type():
28 .format(head.type))
30 if head['next'] == 0:
32 .format(head.address))
35 node = head['next'].dereference()
36 while node.address != head.address:
41 def list_for_each_entry(head, gdbtype, member): argument
[all …]
/linux-6.1.9/drivers/scsi/sym53c8xx_2/
Dsym_misc.h42 static inline struct sym_quehead *sym_que_first(struct sym_quehead *head) in sym_que_first() argument
44 return (head->flink == head) ? 0 : head->flink; in sym_que_first()
47 static inline struct sym_quehead *sym_que_last(struct sym_quehead *head) in sym_que_last() argument
49 return (head->blink == head) ? 0 : head->blink; in sym_que_last()
69 static inline int sym_que_empty(struct sym_quehead *head) in sym_que_empty() argument
71 return head->flink == head; in sym_que_empty()
75 struct sym_quehead *head) in sym_que_splice() argument
81 struct sym_quehead *at = head->flink; in sym_que_splice()
83 first->blink = head; in sym_que_splice()
84 head->flink = first; in sym_que_splice()
[all …]
/linux-6.1.9/kernel/bpf/
Dpercpu_freelist.c15 struct pcpu_freelist_head *head = per_cpu_ptr(s->freelist, cpu); in pcpu_freelist_init() local
17 raw_spin_lock_init(&head->lock); in pcpu_freelist_init()
18 head->first = NULL; in pcpu_freelist_init()
30 static inline void pcpu_freelist_push_node(struct pcpu_freelist_head *head, in pcpu_freelist_push_node() argument
33 node->next = head->first; in pcpu_freelist_push_node()
34 WRITE_ONCE(head->first, node); in pcpu_freelist_push_node()
37 static inline void ___pcpu_freelist_push(struct pcpu_freelist_head *head, in ___pcpu_freelist_push() argument
40 raw_spin_lock(&head->lock); in ___pcpu_freelist_push()
41 pcpu_freelist_push_node(head, node); in ___pcpu_freelist_push()
42 raw_spin_unlock(&head->lock); in ___pcpu_freelist_push()
[all …]
/linux-6.1.9/lib/
Dbtree.c93 static unsigned long *btree_node_alloc(struct btree_head *head, gfp_t gfp) in btree_node_alloc() argument
97 node = mempool_alloc(head->mempool, gfp); in btree_node_alloc()
176 static inline void __btree_init(struct btree_head *head) in __btree_init() argument
178 head->node = NULL; in __btree_init()
179 head->height = 0; in __btree_init()
182 void btree_init_mempool(struct btree_head *head, mempool_t *mempool) in btree_init_mempool() argument
184 __btree_init(head); in btree_init_mempool()
185 head->mempool = mempool; in btree_init_mempool()
189 int btree_init(struct btree_head *head) in btree_init() argument
191 __btree_init(head); in btree_init()
[all …]
/linux-6.1.9/tools/virtio/ringtest/
Dvirtio_ring_0_9.c101 unsigned head; in add_inbuf() local
111 head = (ring_size - 1) & (guest.avail_idx++); in add_inbuf()
113 head = guest.free_head; in add_inbuf()
118 desc[head].flags = VRING_DESC_F_NEXT; in add_inbuf()
119 desc[head].addr = (unsigned long)(void *)buf; in add_inbuf()
120 desc[head].len = len; in add_inbuf()
125 desc[head].flags &= ~VRING_DESC_F_NEXT; in add_inbuf()
127 guest.free_head = desc[head].next; in add_inbuf()
130 data[head].data = datap; in add_inbuf()
137 (head | (avail & ~(ring_size - 1))) ^ 0x8000; in add_inbuf()
[all …]
/linux-6.1.9/drivers/rpmsg/
Dqcom_glink_smem.c40 __le32 *head; member
54 u32 head; in glink_smem_rx_avail() local
70 head = le32_to_cpu(*pipe->head); in glink_smem_rx_avail()
73 if (head < tail) in glink_smem_rx_avail()
74 return pipe->native.length - tail + head; in glink_smem_rx_avail()
76 return head - tail; in glink_smem_rx_avail()
117 u32 head; in glink_smem_tx_avail() local
121 head = le32_to_cpu(*pipe->head); in glink_smem_tx_avail()
124 if (tail <= head) in glink_smem_tx_avail()
125 avail = pipe->native.length - head + tail; in glink_smem_tx_avail()
[all …]
/linux-6.1.9/sound/pci/ctxfi/
Dctimap.c21 struct list_head *pos, *pre, *head; in input_mapper_add() local
24 head = mappers; in input_mapper_add()
26 if (list_empty(head)) { in input_mapper_add()
29 list_add(&entry->list, head); in input_mapper_add()
33 list_for_each(pos, head) { in input_mapper_add()
41 if (pos != head) { in input_mapper_add()
43 if (pre == head) in input_mapper_add()
44 pre = head->prev; in input_mapper_add()
48 pre = head->prev; in input_mapper_add()
49 pos = head->next; in input_mapper_add()
[all …]

12345678910>>...97