/linux-6.6.21/sound/core/seq/oss/ |
D | seq_oss_readq.c | 48 q->qlen = 0; in snd_seq_oss_readq_new() 76 if (q->qlen) { in snd_seq_oss_readq_clear() 77 q->qlen = 0; in snd_seq_oss_readq_clear() 146 if (q->qlen >= q->maxlen - 1) { in snd_seq_oss_readq_put_event() 153 q->qlen++; in snd_seq_oss_readq_put_event() 171 if (q->qlen == 0) in snd_seq_oss_readq_pick() 184 (q->qlen > 0 || q->head == q->tail), in snd_seq_oss_readq_wait() 195 if (q->qlen > 0) { in snd_seq_oss_readq_free() 197 q->qlen--; in snd_seq_oss_readq_free() 209 return q->qlen; in snd_seq_oss_readq_poll() [all …]
|
/linux-6.6.21/net/sched/ |
D | sch_sfq.c | 103 sfq_index qlen; /* number of skbs in skblist */ member 207 int qlen = slot->qlen; in sfq_link() local 209 p = qlen + SFQ_MAX_FLOWS; in sfq_link() 210 n = q->dep[qlen].next; in sfq_link() 215 q->dep[qlen].next = x; /* sfq_dep_head(q, p)->next = x */ in sfq_link() 235 d = q->slots[x].qlen--; in sfq_dec() 248 d = ++q->slots[x].qlen; in sfq_inc() 310 sch->q.qlen--; in sfq_drop() 350 sfq_index x, qlen; in sfq_enqueue() local 424 if (slot->qlen >= q->maxdepth) { in sfq_enqueue() [all …]
|
D | sch_sfb.c | 40 u16 qlen; /* length of virtual queue */ member 132 if (b[hash].qlen < 0xFFFF) in increment_one_qlen() 133 b[hash].qlen++; in increment_one_qlen() 161 if (b[hash].qlen > 0) in decrement_one_qlen() 162 b[hash].qlen--; in decrement_one_qlen() 201 u32 qlen = 0, prob = 0, totalpm = 0; in sfb_compute_qlen() local 205 if (qlen < b->qlen) in sfb_compute_qlen() 206 qlen = b->qlen; in sfb_compute_qlen() 214 return qlen; in sfb_compute_qlen() 295 if (unlikely(sch->q.qlen >= q->limit)) { in sfb_enqueue() [all …]
|
D | sch_codel.c | 101 if (q->stats.drop_count && sch->q.qlen) { in codel_qdisc_dequeue() 138 unsigned int qlen, dropped = 0; in codel_change() local 172 qlen = sch->q.qlen; in codel_change() 173 while (sch->q.qlen > sch->limit) { in codel_change() 180 qdisc_tree_reduce_backlog(sch, qlen - sch->q.qlen, dropped); in codel_change()
|
D | sch_skbprio.c | 82 if (sch->q.qlen < sch->limit) { in skbprio_enqueue() 94 sch->q.qlen++; in skbprio_enqueue() 125 BUG_ON(sch->q.qlen != 1); in skbprio_enqueue() 148 sch->q.qlen--; in skbprio_dequeue() 157 BUG_ON(sch->q.qlen); in skbprio_dequeue() 255 q->qstats[cl - 1].qlen) < 0) in skbprio_dump_class_stats()
|
D | sch_choke.c | 129 --sch->q.qlen; in choke_drop_by_idx() 220 q->vars.qavg = red_calc_qavg(p, &q->vars, sch->q.qlen); in choke_enqueue() 267 if (sch->q.qlen < q->limit) { in choke_enqueue() 270 ++sch->q.qlen; in choke_enqueue() 297 --sch->q.qlen; in choke_dequeue() 380 unsigned int oqlen = sch->q.qlen, tail = 0; in choke_change() 395 --sch->q.qlen; in choke_change() 398 qdisc_tree_reduce_backlog(sch, oqlen - sch->q.qlen, dropped); in choke_change()
|
D | sch_drr.c | 261 __u32 qlen = qdisc_qlen_sum(cl->qdisc); in drr_dump_class_stats() local 266 if (qlen) in drr_dump_class_stats() 271 gnet_stats_copy_queue(d, cl_q->cpu_qstats, &cl_q->qstats, qlen) < 0) in drr_dump_class_stats() 349 first = !cl->qdisc->q.qlen; in drr_enqueue() 365 sch->q.qlen++; in drr_enqueue() 392 if (cl->qdisc->q.qlen == 0) in drr_dequeue() 398 sch->q.qlen--; in drr_dequeue() 433 if (cl->qdisc->q.qlen) in drr_reset_qdisc()
|
D | sch_hhf.c | 362 sch->q.qlen--; in hhf_drop() 402 if (++sch->q.qlen <= sch->limit) in hhf_enqueue() 445 sch->q.qlen--; in hhf_dequeue() 513 unsigned int qlen, prev_backlog; in hhf_change() local 560 qlen = sch->q.qlen; in hhf_change() 562 while (sch->q.qlen > sch->limit) { in hhf_change() 567 qdisc_tree_reduce_backlog(sch, qlen - sch->q.qlen, in hhf_change()
|
/linux-6.6.21/sound/core/seq/ |
D | seq_midi_event.c | 50 int qlen; member 141 dev->qlen = 0; in reset_encode() 200 dev->qlen = status_event[dev->type].qlen; in snd_midi_event_encode_byte() 202 if (dev->qlen > 0) { in snd_midi_event_encode_byte() 206 dev->qlen--; in snd_midi_event_encode_byte() 210 dev->qlen = status_event[dev->type].qlen - 1; in snd_midi_event_encode_byte() 214 if (dev->qlen == 0) { in snd_midi_event_encode_byte() 320 int qlen; in snd_midi_event_decode() local 331 qlen = status_event[type].qlen + 1; in snd_midi_event_decode() 336 qlen = status_event[type].qlen; in snd_midi_event_decode() [all …]
|
/linux-6.6.21/include/net/ |
D | sch_generic.h | 68 __u32 qlen; member 185 return !READ_ONCE(qdisc->q.qlen); in qdisc_is_empty() 517 return q->q.qlen; in qdisc_qlen() 522 __u32 qlen = q->qstats.qlen; in qdisc_qlen_sum() local 527 qlen += per_cpu_ptr(q->cpu_qstats, i)->qlen; in qdisc_qlen_sum() 529 qlen += q->q.qlen; in qdisc_qlen_sum() 532 return qlen; in qdisc_qlen_sum() 906 this_cpu_inc(sch->cpu_qstats->qlen); in qdisc_qstats_cpu_qlen_inc() 911 this_cpu_dec(sch->cpu_qstats->qlen); in qdisc_qstats_cpu_qlen_dec() 951 __u32 qlen = qdisc_qlen_sum(sch); in qdisc_qstats_copy() local [all …]
|
D | request_sock.h | 162 int qlen; /* # of pending (TCP_SYN_RECV) reqs */ member 180 atomic_t qlen; member 222 atomic_dec(&queue->qlen); in reqsk_queue_removed() 228 atomic_inc(&queue->qlen); in reqsk_queue_added() 233 return atomic_read(&queue->qlen); in reqsk_queue_len()
|
/linux-6.6.21/include/trace/events/ |
D | rcu.h | 514 TP_PROTO(const char *rcuname, struct rcu_head *rhp, long qlen), 516 TP_ARGS(rcuname, rhp, qlen), 522 __field(long, qlen) 529 __entry->qlen = qlen; 534 __entry->qlen) 574 long qlen), 576 TP_ARGS(rcuname, rhp, offset, qlen), 582 __field(long, qlen) 589 __entry->qlen = qlen; 594 __entry->qlen) [all …]
|
/linux-6.6.21/drivers/md/ |
D | dm-ps-queue-length.c | 40 atomic_t qlen; /* the number of in-flight I/Os */ member 101 DMEMIT("%d ", atomic_read(&pi->qlen)); in ql_status() 153 atomic_set(&pi->qlen, 0); in ql_add_path() 204 (atomic_read(&pi->qlen) < atomic_read(&best->qlen))) in ql_select_path() 207 if (!atomic_read(&best->qlen)) in ql_select_path() 228 atomic_inc(&pi->qlen); in ql_start_io() 238 atomic_dec(&pi->qlen); in ql_end_io()
|
/linux-6.6.21/drivers/usb/gadget/legacy/ |
D | gmidi.c | 52 static unsigned int qlen = 32; variable 53 module_param(qlen, uint, S_IRUGO); 54 MODULE_PARM_DESC(qlen, "USB read and write request queue length"); 156 midi_opts->qlen = qlen; in midi_bind()
|
D | printer.c | 51 static unsigned qlen = 10; variable 52 module_param(qlen, uint, S_IRUGO|S_IWUSR); 53 MODULE_PARM_DESC(qlen, "The number of 8k buffers to use per endpoint"); 55 #define QLEN qlen
|
D | zero.c | 66 .qlen = GZERO_QLEN, 254 module_param_named(qlen, gzero_options.qlen, uint, S_IRUGO|S_IWUSR); 255 MODULE_PARM_DESC(qlen, "depth of loopback queue"); 313 lb_opts->qlen = gzero_options.qlen; in zero_bind()
|
/linux-6.6.21/net/core/ |
D | gen_stats.c | 348 qstats->qlen += qcpu->qlen; in gnet_stats_add_queue_cpu() 363 qstats->qlen += q->qlen; in gnet_stats_add_queue() 389 struct gnet_stats_queue *q, __u32 qlen) in gnet_stats_copy_queue() argument 394 qstats.qlen = qlen; in gnet_stats_copy_queue() 398 d->tc_stats.qlen = qstats.qlen; in gnet_stats_copy_queue()
|
D | request_sock.c | 38 queue->fastopenq.qlen = 0; in reqsk_queue_alloc() 98 fastopenq->qlen--; in reqsk_fastopen_remove() 126 fastopenq->qlen++; in reqsk_fastopen_remove()
|
/linux-6.6.21/drivers/usb/gadget/function/ |
D | f_loopback.c | 34 unsigned qlen; member 321 for (i = 0; i < loop->qlen && result == 0; i++) { in alloc_requests() 439 loop->qlen = lb_opts->qlen; in loopback_alloc() 440 if (!loop->qlen) in loopback_alloc() 441 loop->qlen = 32; in loopback_alloc() 477 result = sprintf(page, "%d\n", opts->qlen); in f_lb_opts_qlen_show() 500 opts->qlen = num; in f_lb_opts_qlen_store() 507 CONFIGFS_ATTR(f_lb_opts_, qlen); 577 lb_opts->qlen = GZERO_QLEN; in loopback_alloc_instance()
|
D | g_zero.h | 24 unsigned qlen; member 53 unsigned qlen; member
|
/linux-6.6.21/drivers/crypto/cavium/cpt/ |
D | cptvf_main.c | 99 pqinfo->qlen = 0; in free_pending_queues() 103 static int alloc_pending_queues(struct pending_qinfo *pqinfo, u32 qlen, in alloc_pending_queues() argument 111 pqinfo->qlen = qlen; in alloc_pending_queues() 114 queue->head = kcalloc(qlen, sizeof(*queue->head), GFP_KERNEL); in alloc_pending_queues() 136 static int init_pending_queues(struct cpt_vf *cptvf, u32 qlen, u32 nr_queues) in init_pending_queues() argument 144 ret = alloc_pending_queues(&cptvf->pqinfo, qlen, nr_queues); in init_pending_queues() 202 u32 qlen) in alloc_command_queues() argument 212 cptvf->qsize = min(qlen, cqinfo->qchunksize) * in alloc_command_queues() 215 q_size = qlen * cqinfo->cmd_size; in alloc_command_queues() 276 static int init_command_queues(struct cpt_vf *cptvf, u32 qlen) in init_command_queues() argument [all …]
|
/linux-6.6.21/drivers/nfc/st-nci/ |
D | ndlc.c | 94 if (ndlc->send_q.qlen) in llt_ndlc_send_queue() 96 ndlc->send_q.qlen, ndlc->ack_pending_q.qlen); in llt_ndlc_send_queue() 98 while (ndlc->send_q.qlen) { in llt_ndlc_send_queue() 153 if (ndlc->rcv_q.qlen) in llt_ndlc_rcv_queue() 154 pr_debug("rcvQlen=%d\n", ndlc->rcv_q.qlen); in llt_ndlc_rcv_queue()
|
/linux-6.6.21/drivers/crypto/marvell/octeontx/ |
D | otx_cptvf_main.c | 88 queue->qlen = 0; in free_pending_queues() 93 static int alloc_pending_queues(struct otx_cpt_pending_qinfo *pqinfo, u32 qlen, in alloc_pending_queues() argument 103 queue->head = kcalloc(qlen, sizeof(*queue->head), GFP_KERNEL); in alloc_pending_queues() 112 queue->qlen = qlen; in alloc_pending_queues() 125 static int init_pending_queues(struct otx_cptvf *cptvf, u32 qlen, in init_pending_queues() argument 134 ret = alloc_pending_queues(&cptvf->pqinfo, qlen, num_queues); in init_pending_queues() 187 u32 qlen) in alloc_command_queues() argument 198 cptvf->qsize = min(qlen, cqinfo->qchunksize) * in alloc_command_queues() 201 q_size = qlen * OTX_CPT_INST_SIZE; in alloc_command_queues() 262 static int init_command_queues(struct otx_cptvf *cptvf, u32 qlen) in init_command_queues() argument [all …]
|
/linux-6.6.21/net/kcm/ |
D | kcmproc.c | 119 kcm->sk.sk_receive_queue.qlen, in kcm_format_sock() 121 kcm->sk.sk_write_queue.qlen, in kcm_format_sock() 149 psock->sk->sk_receive_queue.qlen, in kcm_format_psock() 151 psock->sk->sk_write_queue.qlen, in kcm_format_psock() 167 if (psock->sk->sk_receive_queue.qlen) { in kcm_format_psock()
|
/linux-6.6.21/net/nfc/hci/ |
D | llc_shdlc.c | 322 if (shdlc->send_q.qlen == 0) { in llc_shdlc_rcv_s_frame() 462 if (shdlc->rcv_q.qlen) in llc_shdlc_handle_rcv_queue() 463 pr_debug("rcvQlen=%d\n", shdlc->rcv_q.qlen); in llc_shdlc_handle_rcv_queue() 518 if (shdlc->send_q.qlen) in llc_shdlc_handle_send_queue() 520 shdlc->send_q.qlen, shdlc->ns, shdlc->dnr, in llc_shdlc_handle_send_queue() 523 shdlc->ack_pending_q.qlen); in llc_shdlc_handle_send_queue() 525 while (shdlc->send_q.qlen && shdlc->ack_pending_q.qlen < shdlc->w && in llc_shdlc_handle_send_queue()
|