/linux-6.1.9/samples/bpf/ |
D | xdp_redirect_cpu.bpf.c | 222 u32 cpu_idx; in xdp_prognum2_round_robin() local 231 cpu_idx = *cpu_iterator; in xdp_prognum2_round_robin() 237 cpu_selected = bpf_map_lookup_elem(&cpus_available, &cpu_idx); in xdp_prognum2_round_robin() 267 u32 cpu_idx = 0; in xdp_prognum3_proto_separate() local 286 cpu_idx = 0; /* ARP packet handled on separate CPU */ in xdp_prognum3_proto_separate() 289 cpu_idx = 0; in xdp_prognum3_proto_separate() 296 cpu_idx = 2; in xdp_prognum3_proto_separate() 299 cpu_idx = 0; in xdp_prognum3_proto_separate() 302 cpu_idx = 1; in xdp_prognum3_proto_separate() 305 cpu_idx = 0; in xdp_prognum3_proto_separate() [all …]
|
/linux-6.1.9/drivers/irqchip/ |
D | irq-bcm6345-l1.c | 153 unsigned int cpu_idx = cpu_for_irq(intc, d); in __bcm6345_l1_unmask() local 155 intc->cpus[cpu_idx]->enable_cache[word] |= mask; in __bcm6345_l1_unmask() 156 __raw_writel(intc->cpus[cpu_idx]->enable_cache[word], in __bcm6345_l1_unmask() 157 intc->cpus[cpu_idx]->map_base + reg_enable(intc, word)); in __bcm6345_l1_unmask() 165 unsigned int cpu_idx = cpu_for_irq(intc, d); in __bcm6345_l1_mask() local 167 intc->cpus[cpu_idx]->enable_cache[word] &= ~mask; in __bcm6345_l1_mask() 168 __raw_writel(intc->cpus[cpu_idx]->enable_cache[word], in __bcm6345_l1_mask() 169 intc->cpus[cpu_idx]->map_base + reg_enable(intc, word)); in __bcm6345_l1_mask()
|
D | irq-bcm7038-l1.c | 152 static void __bcm7038_l1_unmask(struct irq_data *d, unsigned int cpu_idx) in __bcm7038_l1_unmask() argument 158 intc->cpus[cpu_idx]->mask_cache[word] &= ~mask; in __bcm7038_l1_unmask() 159 l1_writel(mask, intc->cpus[cpu_idx]->map_base + in __bcm7038_l1_unmask() 163 static void __bcm7038_l1_mask(struct irq_data *d, unsigned int cpu_idx) in __bcm7038_l1_mask() argument 169 intc->cpus[cpu_idx]->mask_cache[word] |= mask; in __bcm7038_l1_mask() 170 l1_writel(mask, intc->cpus[cpu_idx]->map_base + in __bcm7038_l1_mask()
|
/linux-6.1.9/kernel/bpf/ |
D | percpu_freelist.c | 103 unsigned int cpu, cpu_idx, i, j, n, m; in pcpu_freelist_populate() local 108 cpu_idx = 0; in pcpu_freelist_populate() 111 j = n + (cpu_idx < m ? 1 : 0); in pcpu_freelist_populate() 117 cpu_idx++; in pcpu_freelist_populate()
|
/linux-6.1.9/kernel/events/ |
D | hw_breakpoint_test.c | 247 int tsk_on_cpu_idx, cpu_idx; in test_task_on_all_and_one_cpu() local 263 cpu_idx = idx; in test_task_on_all_and_one_cpu() 272 unregister_test_bp(&test_bps[cpu_idx]); in test_task_on_all_and_one_cpu()
|
/linux-6.1.9/drivers/thermal/ |
D | cpufreq_cooling.c | 149 int cpu_idx) in get_load() argument 157 int cpu_idx) in get_load() argument 161 struct time_in_idle *idle_time = &cpufreq_cdev->idle_time[cpu_idx]; in get_load()
|
/linux-6.1.9/drivers/soc/fsl/qbman/ |
D | qman_ccsr.c | 667 void qman_set_sdest(u16 channel, unsigned int cpu_idx) in qman_set_sdest() argument 675 cpu_idx /= 2; in qman_set_sdest() 676 after = (before & (~IO_CFG_SDEST_MASK)) | (cpu_idx << 16); in qman_set_sdest() 680 after = (before & (~IO_CFG_SDEST_MASK)) | (cpu_idx << 16); in qman_set_sdest()
|
D | qman_priv.h | 204 void qman_set_sdest(u16 channel, unsigned int cpu_idx);
|
/linux-6.1.9/drivers/net/ethernet/chelsio/cxgb3/ |
D | t3_cpl.h | 228 __u8 cpu_idx:6; member 232 __u8 cpu_idx:6; member 659 __u8 cpu_idx; member 672 __u8 cpu_idx; member 727 __u8 cpu_idx; member
|
D | t3_hw.c | 2473 int i, j, cpu_idx = 0, q_idx = 0; in t3_config_rss() local 2480 val |= (cpus[cpu_idx++] & 0x3f) << (8 * j); in t3_config_rss() 2481 if (cpus[cpu_idx] == 0xff) in t3_config_rss() 2482 cpu_idx = 0; in t3_config_rss()
|
D | cxgb3_offload.c | 1103 req->cpu_idx = 0; in set_l2t_ix()
|
/linux-6.1.9/tools/lib/perf/ |
D | evlist.c | 435 int idx, struct perf_mmap_param *mp, int cpu_idx, in mmap_per_evsel() argument 438 struct perf_cpu evlist_cpu = perf_cpu_map__cpu(evlist->all_cpus, cpu_idx); in mmap_per_evsel()
|
/linux-6.1.9/drivers/edac/ |
D | xgene_edac.c | 520 int cpu_idx) in xgene_edac_pmd_l1_check() argument 526 pg_f = ctx->pmd_csr + cpu_idx * CPU_CSR_STRIDE + CPU_MEMERR_CPU_PAGE; in xgene_edac_pmd_l1_check() 533 ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val, in xgene_edac_pmd_l1_check() 573 ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val, in xgene_edac_pmd_l1_check() 617 ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val, in xgene_edac_pmd_l1_check()
|
/linux-6.1.9/drivers/net/wireless/mediatek/mt76/ |
D | dma.c | 136 Q_WRITE(dev, q, cpu_idx, 0); in mt76_dma_queue_reset() 230 Q_WRITE(dev, q, cpu_idx, q->head); in mt76_dma_kick_queue()
|
D | mt76.h | 174 u32 cpu_idx; member
|
/linux-6.1.9/drivers/scsi/cxgbi/cxgb3i/ |
D | cxgb3i.c | 1171 req->cpu_idx = 0; in ddp_setup_conn_pgidx() 1207 req->cpu_idx = 0; in ddp_setup_conn_digest()
|
/linux-6.1.9/drivers/net/ethernet/mediatek/ |
D | mtk_eth_soc.h | 745 int cpu_idx; member
|
D | mtk_eth_soc.c | 2054 cpu = ring->cpu_idx; in mtk_poll_tx_pdma() 2082 ring->cpu_idx = cpu; in mtk_poll_tx_pdma()
|
/linux-6.1.9/tools/perf/ |
D | builtin-record.c | 1863 int cpu_idx, int thread_idx) in __record__read_lost_samples() argument 1870 if (perf_evsel__read(&evsel->core, cpu_idx, thread_idx, &count) < 0) { in __record__read_lost_samples() 1880 sid = xyarray__entry(evsel->core.sample_id, cpu_idx, thread_idx); in __record__read_lost_samples()
|
/linux-6.1.9/kernel/sched/ |
D | fair.c | 2575 int mem_idx, membuf_idx, cpu_idx, cpubuf_idx; in task_numa_placement() local 2584 cpu_idx = task_faults_idx(NUMA_CPU, nid, priv); in task_numa_placement() 2602 f_diff = f_weight - p->numa_faults[cpu_idx] / 2; in task_numa_placement() 2606 p->numa_faults[cpu_idx] += f_diff; in task_numa_placement() 2618 ng->faults[cpu_idx] += f_diff; in task_numa_placement()
|
/linux-6.1.9/drivers/net/wireless/mediatek/mt76/mt7603/ |
D | mac.c | 1548 dma_idx != readl(&q->regs->cpu_idx)) in mt7603_tx_hang()
|