Home
last modified time | relevance | path

Searched refs:cpumask (Results 1 – 25 of 272) sorted by relevance

1234567891011

/linux-2.6.39/include/linux/
Dcpumask.h13 typedef struct cpumask { DECLARE_BITMAP(bits, NR_CPUS); } cpumask_t; struct
78 extern const struct cpumask *const cpu_possible_mask;
79 extern const struct cpumask *const cpu_online_mask;
80 extern const struct cpumask *const cpu_present_mask;
81 extern const struct cpumask *const cpu_active_mask;
114 static inline unsigned int cpumask_first(const struct cpumask *srcp) in cpumask_first()
120 static inline unsigned int cpumask_next(int n, const struct cpumask *srcp) in cpumask_next()
125 static inline unsigned int cpumask_next_zero(int n, const struct cpumask *srcp) in cpumask_next_zero()
131 const struct cpumask *srcp, in cpumask_next_and()
132 const struct cpumask *andp) in cpumask_next_and()
[all …]
Dstop_machine.h33 int stop_cpus(const struct cpumask *cpumask, cpu_stop_fn_t fn, void *arg);
34 int try_stop_cpus(const struct cpumask *cpumask, cpu_stop_fn_t fn, void *arg);
77 static inline int stop_cpus(const struct cpumask *cpumask, in stop_cpus() argument
80 if (cpumask_test_cpu(raw_smp_processor_id(), cpumask)) in stop_cpus()
85 static inline int try_stop_cpus(const struct cpumask *cpumask, in try_stop_cpus() argument
88 return stop_cpus(cpumask, fn, arg); in try_stop_cpus()
114 int stop_machine(int (*fn)(void *), void *data, const struct cpumask *cpus);
125 int __stop_machine(int (*fn)(void *), void *data, const struct cpumask *cpus);
130 const struct cpumask *cpus) in __stop_machine()
140 const struct cpumask *cpus) in stop_machine()
Dpadata.h136 struct padata_cpumask cpumask; member
160 struct padata_cpumask cpumask; member
173 const struct cpumask *pcpumask,
174 const struct cpumask *cbcpumask);
180 cpumask_var_t cpumask);
/linux-2.6.39/arch/x86/kernel/apic/
Dapic_flat_64.c32 static const struct cpumask *flat_target_cpus(void) in flat_target_cpus()
37 static void flat_vector_allocation_domain(int cpu, struct cpumask *retmask) in flat_vector_allocation_domain()
80 static void flat_send_IPI_mask(const struct cpumask *cpumask, int vector) in flat_send_IPI_mask() argument
82 unsigned long mask = cpumask_bits(cpumask)[0]; in flat_send_IPI_mask()
88 flat_send_IPI_mask_allbutself(const struct cpumask *cpumask, int vector) in flat_send_IPI_mask_allbutself() argument
90 unsigned long mask = cpumask_bits(cpumask)[0]; in flat_send_IPI_mask_allbutself()
251 static const struct cpumask *physflat_target_cpus(void) in physflat_target_cpus()
256 static void physflat_vector_allocation_domain(int cpu, struct cpumask *retmask) in physflat_vector_allocation_domain()
262 static void physflat_send_IPI_mask(const struct cpumask *cpumask, int vector) in physflat_send_IPI_mask() argument
264 default_send_IPI_mask_sequence_phys(cpumask, vector); in physflat_send_IPI_mask()
[all …]
Dx2apic_phys.c34 static const struct cpumask *x2apic_target_cpus(void) in x2apic_target_cpus()
39 static void x2apic_vector_allocation_domain(int cpu, struct cpumask *retmask) in x2apic_vector_allocation_domain()
58 static void x2apic_send_IPI_mask(const struct cpumask *mask, int vector) in x2apic_send_IPI_mask()
74 x2apic_send_IPI_mask_allbutself(const struct cpumask *mask, int vector) in x2apic_send_IPI_mask_allbutself()
120 static unsigned int x2apic_cpu_mask_to_apicid(const struct cpumask *cpumask) in x2apic_cpu_mask_to_apicid() argument
126 int cpu = cpumask_first(cpumask); in x2apic_cpu_mask_to_apicid()
135 x2apic_cpu_mask_to_apicid_and(const struct cpumask *cpumask, in x2apic_cpu_mask_to_apicid_and() argument
136 const struct cpumask *andmask) in x2apic_cpu_mask_to_apicid_and()
144 for_each_cpu_and(cpu, cpumask, andmask) { in x2apic_cpu_mask_to_apicid_and()
Dx2apic_cluster.c24 static const struct cpumask *x2apic_target_cpus(void) in x2apic_target_cpus()
32 static void x2apic_vector_allocation_domain(int cpu, struct cpumask *retmask) in x2apic_vector_allocation_domain()
57 static void x2apic_send_IPI_mask(const struct cpumask *mask, int vector) in x2apic_send_IPI_mask()
74 x2apic_send_IPI_mask_allbutself(const struct cpumask *mask, int vector) in x2apic_send_IPI_mask_allbutself()
122 static unsigned int x2apic_cpu_mask_to_apicid(const struct cpumask *cpumask) in x2apic_cpu_mask_to_apicid() argument
128 int cpu = cpumask_first(cpumask); in x2apic_cpu_mask_to_apicid()
137 x2apic_cpu_mask_to_apicid_and(const struct cpumask *cpumask, in x2apic_cpu_mask_to_apicid_and() argument
138 const struct cpumask *andmask) in x2apic_cpu_mask_to_apicid_and()
146 for_each_cpu_and(cpu, cpumask, andmask) { in x2apic_cpu_mask_to_apicid_and()
Dbigsmp_32.c29 static const struct cpumask *bigsmp_target_cpus(void) in bigsmp_target_cpus()
109 static unsigned int bigsmp_cpu_mask_to_apicid(const struct cpumask *cpumask) in bigsmp_cpu_mask_to_apicid() argument
111 int cpu = cpumask_first(cpumask); in bigsmp_cpu_mask_to_apicid()
118 static unsigned int bigsmp_cpu_mask_to_apicid_and(const struct cpumask *cpumask, in bigsmp_cpu_mask_to_apicid_and() argument
119 const struct cpumask *andmask) in bigsmp_cpu_mask_to_apicid_and()
127 for_each_cpu_and(cpu, cpumask, andmask) { in bigsmp_cpu_mask_to_apicid_and()
139 static inline void bigsmp_send_IPI_mask(const struct cpumask *mask, int vector) in bigsmp_send_IPI_mask()
180 static void bigsmp_vector_allocation_domain(int cpu, struct cpumask *retmask) in bigsmp_vector_allocation_domain()
Dipi.c22 void default_send_IPI_mask_sequence_phys(const struct cpumask *mask, int vector) in default_send_IPI_mask_sequence_phys()
40 void default_send_IPI_mask_allbutself_phys(const struct cpumask *mask, in default_send_IPI_mask_allbutself_phys()
61 void default_send_IPI_mask_sequence_logical(const struct cpumask *mask, in default_send_IPI_mask_sequence_logical()
81 void default_send_IPI_mask_allbutself_logical(const struct cpumask *mask, in default_send_IPI_mask_allbutself_logical()
104 void default_send_IPI_mask_logical(const struct cpumask *cpumask, int vector) in default_send_IPI_mask_logical() argument
106 unsigned long mask = cpumask_bits(cpumask)[0]; in default_send_IPI_mask_logical()
Dsummit_32.c55 static inline void summit_send_IPI_mask(const struct cpumask *mask, int vector) in summit_send_IPI_mask()
177 static const struct cpumask *summit_target_cpus(void) in summit_target_cpus()
266 static unsigned int summit_cpu_mask_to_apicid(const struct cpumask *cpumask) in summit_cpu_mask_to_apicid() argument
274 for_each_cpu(cpu, cpumask) { in summit_cpu_mask_to_apicid()
287 static unsigned int summit_cpu_mask_to_apicid_and(const struct cpumask *inmask, in summit_cpu_mask_to_apicid_and()
288 const struct cpumask *andmask) in summit_cpu_mask_to_apicid_and()
291 cpumask_var_t cpumask; in summit_cpu_mask_to_apicid_and() local
293 if (!alloc_cpumask_var(&cpumask, GFP_ATOMIC)) in summit_cpu_mask_to_apicid_and()
296 cpumask_and(cpumask, inmask, andmask); in summit_cpu_mask_to_apicid_and()
297 cpumask_and(cpumask, cpumask, cpu_online_mask); in summit_cpu_mask_to_apicid_and()
[all …]
Des7000_32.c397 static void es7000_vector_allocation_domain(int cpu, struct cpumask *retmask) in es7000_vector_allocation_domain()
423 static void es7000_send_IPI_mask(const struct cpumask *mask, int vector) in es7000_send_IPI_mask()
443 static const struct cpumask *target_cpus_cluster(void) in target_cpus_cluster()
448 static const struct cpumask *es7000_target_cpus(void) in es7000_target_cpus()
548 static unsigned int es7000_cpu_mask_to_apicid(const struct cpumask *cpumask) in es7000_cpu_mask_to_apicid() argument
556 for_each_cpu(cpu, cpumask) { in es7000_cpu_mask_to_apicid()
571 es7000_cpu_mask_to_apicid_and(const struct cpumask *inmask, in es7000_cpu_mask_to_apicid_and()
572 const struct cpumask *andmask) in es7000_cpu_mask_to_apicid_and()
575 cpumask_var_t cpumask; in es7000_cpu_mask_to_apicid_and() local
577 if (!alloc_cpumask_var(&cpumask, GFP_ATOMIC)) in es7000_cpu_mask_to_apicid_and()
[all …]
Dapic_noop.c34 static void noop_send_IPI_mask(const struct cpumask *cpumask, int vector) { } in noop_send_IPI_mask() argument
35 static void noop_send_IPI_mask_allbutself(const struct cpumask *cpumask, int vector) { } in noop_send_IPI_mask_allbutself() argument
87 static const struct cpumask *noop_target_cpus(void) in noop_target_cpus()
103 static void noop_vector_allocation_domain(int cpu, struct cpumask *retmask) in noop_vector_allocation_domain()
/linux-2.6.39/kernel/
Dpadata.c39 target_cpu = cpumask_first(pd->cpumask.pcpu); in padata_index_to_cpu()
41 target_cpu = cpumask_next(target_cpu, pd->cpumask.pcpu); in padata_index_to_cpu()
57 cpu_index = padata->seq_nr % cpumask_weight(pd->cpumask.pcpu); in padata_cpu_hash()
120 if (!cpumask_test_cpu(cb_cpu, pd->cpumask.cbcpu)) in padata_do_parallel()
181 num_cpus = cpumask_weight(pd->cpumask.pcpu); in padata_get_next()
364 const struct cpumask *pcpumask, in padata_setup_cpumasks()
365 const struct cpumask *cbcpumask) in padata_setup_cpumasks()
367 if (!alloc_cpumask_var(&pd->cpumask.pcpu, GFP_KERNEL)) in padata_setup_cpumasks()
370 cpumask_and(pd->cpumask.pcpu, pcpumask, cpu_active_mask); in padata_setup_cpumasks()
371 if (!alloc_cpumask_var(&pd->cpumask.cbcpu, GFP_KERNEL)) { in padata_setup_cpumasks()
[all …]
Dstop_machine.c139 int __stop_cpus(const struct cpumask *cpumask, cpu_stop_fn_t fn, void *arg) in __stop_cpus() argument
146 for_each_cpu(cpu, cpumask) { in __stop_cpus()
152 cpu_stop_init_done(&done, cpumask_weight(cpumask)); in __stop_cpus()
160 for_each_cpu(cpu, cpumask) in __stop_cpus()
197 int stop_cpus(const struct cpumask *cpumask, cpu_stop_fn_t fn, void *arg) in stop_cpus() argument
203 ret = __stop_cpus(cpumask, fn, arg); in stop_cpus()
226 int try_stop_cpus(const struct cpumask *cpumask, cpu_stop_fn_t fn, void *arg) in try_stop_cpus() argument
233 ret = __stop_cpus(cpumask, fn, arg); in try_stop_cpus()
406 const struct cpumask *active_cpus;
467 int __stop_machine(int (*fn)(void *), void *data, const struct cpumask *cpus) in __stop_machine()
[all …]
Dsmp.c33 cpumask_var_t cpumask; member
54 if (!zalloc_cpumask_var_node(&cfd->cpumask, GFP_KERNEL, in hotplug_cfd()
65 free_cpumask_var(cfd->cpumask); in hotplug_cfd()
209 if (!cpumask_test_cpu(cpu, data->cpumask)) in generic_smp_call_function_interrupt()
226 if (!cpumask_test_and_clear_cpu(cpu, data->cpumask)) { in generic_smp_call_function_interrupt()
237 WARN_ON(!cpumask_empty(data->cpumask)); in generic_smp_call_function_interrupt()
368 int smp_call_function_any(const struct cpumask *mask, in smp_call_function_any()
372 const struct cpumask *nodemask; in smp_call_function_any()
448 void smp_call_function_many(const struct cpumask *mask, in smp_call_function_many()
488 BUG_ON(atomic_read(&data->refs) || !cpumask_empty(data->cpumask)); in smp_call_function_many()
[all …]
/linux-2.6.39/arch/m32r/kernel/
Dsmp.c89 static void send_IPI_mask(const struct cpumask *, int, int);
163 cpumask_t cpumask; in smp_flush_cache_all() local
167 cpumask = cpu_online_map; in smp_flush_cache_all()
168 cpu_clear(smp_processor_id(), cpumask); in smp_flush_cache_all()
170 mask=cpus_addr(cpumask); in smp_flush_cache_all()
172 send_IPI_mask(&cpumask, INVALIDATE_CACHE_IPI, 0); in smp_flush_cache_all()
382 static void flush_tlb_others(cpumask_t cpumask, struct mm_struct *mm, in flush_tlb_others() argument
400 BUG_ON(cpus_empty(cpumask)); in flush_tlb_others()
402 BUG_ON(cpu_isset(smp_processor_id(), cpumask)); in flush_tlb_others()
406 cpus_and(cpumask, cpumask, cpu_online_map); in flush_tlb_others()
[all …]
/linux-2.6.39/arch/x86/include/asm/uv/
Duv.h6 struct cpumask;
16 extern const struct cpumask *uv_flush_tlb_others(const struct cpumask *cpumask,
27 static inline const struct cpumask *
28 uv_flush_tlb_others(const struct cpumask *cpumask, struct mm_struct *mm, in uv_flush_tlb_others() argument
30 { return cpumask; } in uv_flush_tlb_others()
/linux-2.6.39/arch/x86/include/asm/
Dapic.h290 const struct cpumask *(*target_cpus)(void);
298 void (*vector_allocation_domain)(int cpu, struct cpumask *retmask);
323 unsigned int (*cpu_mask_to_apicid)(const struct cpumask *cpumask);
324 unsigned int (*cpu_mask_to_apicid_and)(const struct cpumask *cpumask,
325 const struct cpumask *andmask);
328 void (*send_IPI_mask)(const struct cpumask *mask, int vector);
329 void (*send_IPI_mask_allbutself)(const struct cpumask *mask,
487 static inline const struct cpumask *default_target_cpus(void) in default_target_cpus()
545 default_cpu_mask_to_apicid(const struct cpumask *cpumask) in default_cpu_mask_to_apicid() argument
547 return cpumask_bits(cpumask)[0] & APIC_ALL_CPUS; in default_cpu_mask_to_apicid()
[all …]
Dipi.h122 extern void default_send_IPI_mask_sequence_phys(const struct cpumask *mask,
124 extern void default_send_IPI_mask_allbutself_phys(const struct cpumask *mask,
149 extern void default_send_IPI_mask_sequence_logical(const struct cpumask *mask,
151 extern void default_send_IPI_mask_allbutself_logical(const struct cpumask *mask,
153 extern void default_send_IPI_mask_logical(const struct cpumask *mask,
Dsmp.h41 static inline struct cpumask *cpu_sibling_mask(int cpu) in cpu_sibling_mask()
46 static inline struct cpumask *cpu_core_mask(int cpu) in cpu_core_mask()
51 static inline struct cpumask *cpu_llc_shared_mask(int cpu) in cpu_llc_shared_mask()
78 void (*send_call_func_ipi)(const struct cpumask *mask);
146 static inline void arch_send_call_function_ipi_mask(const struct cpumask *mask) in arch_send_call_function_ipi_mask()
163 void native_send_call_func_ipi(const struct cpumask *mask);
/linux-2.6.39/arch/tile/include/asm/
Dsmp.h32 void send_IPI_many(const struct cpumask *mask, int tag);
47 extern void on_each_cpu_mask(const struct cpumask *mask,
78 static inline void arch_send_call_function_ipi_mask(struct cpumask *mask) in arch_send_call_function_ipi_mask()
108 extern struct cpumask cpu_lotar_map;
113 extern struct cpumask hash_for_home_map;
117 extern struct cpumask cpu_cacheable_map;
135 static inline int __cpulist_parse_crop(const char *buf, struct cpumask *dstp, in __cpulist_parse_crop()
/linux-2.6.39/arch/x86/mm/
Dtlb.c172 static void flush_tlb_others_ipi(const struct cpumask *cpumask, in flush_tlb_others_ipi() argument
187 if (cpumask_andnot(to_cpumask(f->flush_cpumask), cpumask, cpumask_of(smp_processor_id()))) { in flush_tlb_others_ipi()
205 void native_flush_tlb_others(const struct cpumask *cpumask, in native_flush_tlb_others() argument
212 cpumask = uv_flush_tlb_others(cpumask, mm, va, cpu); in native_flush_tlb_others()
213 if (cpumask) in native_flush_tlb_others()
214 flush_tlb_others_ipi(cpumask, mm, va); in native_flush_tlb_others()
217 flush_tlb_others_ipi(cpumask, mm, va); in native_flush_tlb_others()
/linux-2.6.39/arch/mn10300/mm/
Dtlb-smp.c53 static void flush_tlb_others(cpumask_t cpumask, struct mm_struct *mm,
95 static void flush_tlb_others(cpumask_t cpumask, struct mm_struct *mm, in flush_tlb_others() argument
106 BUG_ON(cpus_empty(cpumask)); in flush_tlb_others()
107 BUG_ON(cpu_isset(smp_processor_id(), cpumask)); in flush_tlb_others()
109 cpus_and(tmp, cpumask, cpu_online_map); in flush_tlb_others()
110 BUG_ON(!cpus_equal(cpumask, tmp)); in flush_tlb_others()
123 atomic_set_mask(cpumask.bits[0], &flush_cpumask.bits[0]); in flush_tlb_others()
/linux-2.6.39/kernel/time/
Dtick-common.c152 const struct cpumask *cpumask) in tick_setup_device() argument
187 if (!cpumask_equal(newdev->cpumask, cpumask)) in tick_setup_device()
188 irq_set_affinity(newdev->irq, cpumask); in tick_setup_device()
218 if (!cpumask_test_cpu(cpu, newdev->cpumask)) in tick_check_new_device()
225 if (!cpumask_equal(newdev->cpumask, cpumask_of(cpu))) { in tick_check_new_device()
238 if (curdev && cpumask_equal(curdev->cpumask, cpumask_of(cpu))) in tick_check_new_device()
/linux-2.6.39/arch/tile/mm/
Dhomecache.c76 static void hv_flush_update(const struct cpumask *cache_cpumask, in hv_flush_update()
77 struct cpumask *tlb_cpumask, in hv_flush_update()
81 struct cpumask mask; in hv_flush_update()
121 const struct cpumask *cache_cpumask_orig, in flush_remote()
124 const struct cpumask *tlb_cpumask_orig, in flush_remote()
129 struct cpumask cache_cpumask_copy, tlb_cpumask_copy; in flush_remote()
130 struct cpumask *cache_cpumask, *tlb_cpumask; in flush_remote()
200 void homecache_evict(const struct cpumask *mask) in homecache_evict()
211 struct cpumask *home_mask) in homecache_mask()
254 struct cpumask home_mask; in homecache_flush_cache()
Dmigrate.h30 const unsigned long *cpumask);
45 const struct cpumask *cache_cpumask,
46 const struct cpumask *tlb_cpumask,

1234567891011