Home
last modified time | relevance | path

Searched refs:__cacheline_aligned (Results 1 – 25 of 43) sorted by relevance

12

/linux-2.4.37.9/include/linux/
Dcache.h27 #ifndef __cacheline_aligned
29 #define __cacheline_aligned ____cacheline_aligned macro
31 #define __cacheline_aligned \ macro
39 #define __cacheline_aligned_in_smp __cacheline_aligned
/linux-2.4.37.9/include/asm-arm/
Dcache.h12 #define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES))) macro
14 #define __cacheline_aligned \ macro
/linux-2.4.37.9/include/asm-sparc64/
Dcache.h16 #define __cacheline_aligned __attribute__((__aligned__(SMP_CACHE_BYTES))) macro
18 #define __cacheline_aligned \ macro
/linux-2.4.37.9/include/asm-ia64/
Dnuma.h33 extern volatile char cpu_to_node_map[NR_CPUS] __cacheline_aligned;
34 extern volatile unsigned long node_to_cpu_mask[NR_NODES] __cacheline_aligned;
/linux-2.4.37.9/include/asm-sh64/
Dcache.h25 #define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES))) macro
27 #define __cacheline_aligned \ macro
/linux-2.4.37.9/include/asm-ppc/
Dcache.h33 #define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES))) macro
35 #define __cacheline_aligned \ macro
/linux-2.4.37.9/arch/ia64/mm/
Dnuma.c65 volatile char cpu_to_node_map[NR_CPUS] __cacheline_aligned;
68 volatile unsigned long node_to_cpu_mask[NR_NODES] __cacheline_aligned;
/linux-2.4.37.9/include/asm-sparc/
Dcache.h19 #define __cacheline_aligned __attribute__((__aligned__(SMP_CACHE_BYTES))) macro
21 #define __cacheline_aligned \ macro
/linux-2.4.37.9/kernel/
Dsoftirq.c45 static struct softirq_action softirq_vec[32] __cacheline_aligned;
149 struct tasklet_head tasklet_vec[NR_CPUS] __cacheline_aligned;
150 struct tasklet_head tasklet_hi_vec[NR_CPUS] __cacheline_aligned;
Dsched.c92 spinlock_t runqueue_lock __cacheline_aligned = SPIN_LOCK_UNLOCKED; /* inner */ variable
93 rwlock_t tasklist_lock __cacheline_aligned = RW_LOCK_UNLOCKED; /* outer */ variable
107 } aligned_data [NR_CPUS] __cacheline_aligned = { {{&init_task,0}}};
/linux-2.4.37.9/include/asm-s390x/
Dinit.h26 #define __cacheline_aligned __attribute__ ((__aligned__(256))) macro
/linux-2.4.37.9/include/asm-s390/
Dinit.h26 #define __cacheline_aligned __attribute__ ((__aligned__(256))) macro
/linux-2.4.37.9/arch/x86_64/kernel/
Dsyscall.c22 sys_call_ptr_t sys_call_table[__NR_syscall_max+1] __cacheline_aligned = {
Dinit_task.c33 struct tss_struct init_tss[NR_CPUS] __cacheline_aligned;
Dsetup64.c27 struct x8664_pda cpu_pda[NR_CPUS] __cacheline_aligned;
45 char boot_cpu_stack[IRQSTACKSIZE] __cacheline_aligned;
Dsmpboot.c66 int cpu_sibling_map[NR_CPUS] __cacheline_aligned;
80 struct cpuinfo_x86 cpu_data[NR_CPUS] __cacheline_aligned;
Dsmp.c107 struct tlb_state cpu_tlbstate[NR_CPUS] __cacheline_aligned = {[0 ... NR_CPUS-1] = { &init_mm, 0, }};
/linux-2.4.37.9/arch/i386/kernel/
Dinit_task.c32 struct tss_struct init_tss[NR_CPUS] __cacheline_aligned = { [0 ... NR_CPUS-1] = INIT_TSS };
Dsmpboot.c70 struct cpuinfo_x86 cpu_data[NR_CPUS] __cacheline_aligned;
966 int cpu_sibling_map[NR_CPUS] __cacheline_aligned;
Dsmp.c108 struct tlb_state cpu_tlbstate[NR_CPUS] __cacheline_aligned = {[0 ... NR_CPUS-1] = { &init_mm, 0, }};
/linux-2.4.37.9/include/asm-parisc/
Dcache.h29 #define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES))) macro
/linux-2.4.37.9/arch/ia64/sn/kernel/sn2/
Dtimer.c37 static volatile long rtc_offset __cacheline_aligned; variable
Dsn2_smp.c63 static spinlock_t sn2_global_ptc_lock __cacheline_aligned = SPIN_LOCK_UNLOCKED; variable
/linux-2.4.37.9/arch/ia64/kernel/
Dsal.c21 spinlock_t sal_lock __cacheline_aligned = SPIN_LOCK_UNLOCKED; variable
/linux-2.4.37.9/arch/ia64/sn/kernel/
Dsn2_smp.c63 static spinlock_t sn2_global_ptc_lock __cacheline_aligned = SPIN_LOCK_UNLOCKED; variable

12