/linux-6.6.21/mm/ |
D | percpu-vm.c | 86 unsigned int cpu, tcpu; in pcpu_alloc_pages() local 106 for_each_possible_cpu(tcpu) { in pcpu_alloc_pages() 107 if (tcpu == cpu) in pcpu_alloc_pages() 110 __free_page(pages[pcpu_page_idx(tcpu, i)]); in pcpu_alloc_pages() 217 unsigned int cpu, tcpu; in pcpu_map_pages() local 233 for_each_possible_cpu(tcpu) { in pcpu_map_pages() 234 if (tcpu == cpu) in pcpu_map_pages() 236 __pcpu_unmap_pages(pcpu_chunk_addr(chunk, tcpu, page_start), in pcpu_map_pages()
|
D | percpu.c | 2850 unsigned int cpu, tcpu; in pcpu_build_alloc_info() local 2889 for_each_cpu(tcpu, &mask) { in pcpu_build_alloc_info() 2891 (cpu_distance_fn(cpu, tcpu) == LOCAL_DISTANCE && in pcpu_build_alloc_info() 2892 cpu_distance_fn(tcpu, cpu) == LOCAL_DISTANCE)) { in pcpu_build_alloc_info() 2893 group_map[tcpu] = group; in pcpu_build_alloc_info() 2895 cpumask_clear_cpu(tcpu, &mask); in pcpu_build_alloc_info()
|
/linux-6.6.21/drivers/media/pci/b2c2/ |
D | flexcop-dma.c | 12 u8 *tcpu; in flexcop_dma_allocate() local 20 tcpu = dma_alloc_coherent(&pdev->dev, size, &tdma, GFP_KERNEL); in flexcop_dma_allocate() 21 if (tcpu != NULL) { in flexcop_dma_allocate() 23 dma->cpu_addr0 = tcpu; in flexcop_dma_allocate() 25 dma->cpu_addr1 = tcpu + size/2; in flexcop_dma_allocate()
|
/linux-6.6.21/arch/s390/kvm/ |
D | diag.c | 168 struct kvm_vcpu *tcpu; in __diag_time_slice_end_directed() local 180 tcpu = kvm_get_vcpu_by_id(vcpu->kvm, tid); in __diag_time_slice_end_directed() 181 if (!tcpu) in __diag_time_slice_end_directed() 185 tcpu_cpu = READ_ONCE(tcpu->cpu); in __diag_time_slice_end_directed() 201 if (kvm_vcpu_yield_to(tcpu) <= 0) in __diag_time_slice_end_directed()
|
/linux-6.6.21/arch/x86/platform/uv/ |
D | uv_nmi.c | 796 int tcpu; in uv_nmi_dump_state() local 806 for_each_online_cpu(tcpu) { in uv_nmi_dump_state() 807 if (cpumask_test_cpu(tcpu, uv_nmi_cpu_mask)) in uv_nmi_dump_state() 809 else if (tcpu == cpu) in uv_nmi_dump_state() 810 uv_nmi_dump_state_cpu(tcpu, regs); in uv_nmi_dump_state() 812 uv_nmi_trigger_dump(tcpu); in uv_nmi_dump_state()
|
/linux-6.6.21/drivers/xen/events/ |
D | events_base.c | 1772 static int xen_rebind_evtchn_to_cpu(struct irq_info *info, unsigned int tcpu) in xen_rebind_evtchn_to_cpu() argument 1785 bind_vcpu.vcpu = xen_vcpu_nr(tcpu); in xen_rebind_evtchn_to_cpu() 1799 bind_evtchn_to_cpu(info, tcpu, false); in xen_rebind_evtchn_to_cpu() 1837 unsigned int tcpu = select_target_cpu(dest); in set_affinity_irq() local 1840 ret = xen_rebind_evtchn_to_cpu(info_for_irq(data->irq), tcpu); in set_affinity_irq() 1842 irq_data_update_effective_affinity(data, cpumask_of(tcpu)); in set_affinity_irq()
|
/linux-6.6.21/net/core/ |
D | dev.c | 4568 u32 tcpu; in get_rps_cpu() local 4615 tcpu = rflow->cpu; in get_rps_cpu() 4628 if (unlikely(tcpu != next_cpu) && in get_rps_cpu() 4629 (tcpu >= nr_cpu_ids || !cpu_online(tcpu) || in get_rps_cpu() 4630 ((int)(per_cpu(softnet_data, tcpu).input_queue_head - in get_rps_cpu() 4632 tcpu = next_cpu; in get_rps_cpu() 4636 if (tcpu < nr_cpu_ids && cpu_online(tcpu)) { in get_rps_cpu() 4638 cpu = tcpu; in get_rps_cpu() 4646 tcpu = map->cpus[reciprocal_scale(hash, map->len)]; in get_rps_cpu() 4647 if (cpu_online(tcpu)) { in get_rps_cpu() [all …]
|
/linux-6.6.21/kernel/ |
D | workqueue.c | 5568 int tcpu; in workqueue_online_cpu() local 5570 for_each_cpu(tcpu, pt->pod_cpus[pt->cpu_pod[cpu]]) in workqueue_online_cpu() 5571 wq_update_pod(wq, tcpu, cpu, true); in workqueue_online_cpu() 5596 int tcpu; in workqueue_offline_cpu() local 5598 for_each_cpu(tcpu, pt->pod_cpus[pt->cpu_pod[cpu]]) in workqueue_offline_cpu() 5599 wq_update_pod(wq, tcpu, cpu, false); in workqueue_offline_cpu()
|
/linux-6.6.21/Documentation/arch/ia64/ |
D | err_inject.rst | 964 …printf("\t\tcpu,loop,interval,err_type_info,err_struct_info[,err_data_buffer[0],err_data_buffer[1]…
|