/linux-6.6.21/arch/s390/include/asm/ |
D | preempt.h | 15 static inline int preempt_count(void) in preempt_count() function 17 return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count() 25 old = READ_ONCE(S390_lowcore.preempt_count); in preempt_count_set() 28 } while (__atomic_cmpxchg(&S390_lowcore.preempt_count, in preempt_count_set() 34 __atomic_and(~PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in set_preempt_need_resched() 39 __atomic_or(PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count); in clear_preempt_need_resched() 44 return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched() 55 __atomic_add_const(val, &S390_lowcore.preempt_count); in __preempt_count_add() 59 __atomic_add(val, &S390_lowcore.preempt_count); in __preempt_count_add() 69 return __atomic_add(-1, &S390_lowcore.preempt_count) == 1; in __preempt_count_dec_and_test() [all …]
|
/linux-6.6.21/arch/x86/include/asm/ |
D | preempt.h | 25 static __always_inline int preempt_count(void) in preempt_count() function 27 return raw_cpu_read_4(pcpu_hot.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count() 35 old = raw_cpu_read_4(pcpu_hot.preempt_count); in preempt_count_set() 38 } while (raw_cpu_cmpxchg_4(pcpu_hot.preempt_count, old, new) != old); in preempt_count_set() 47 per_cpu(pcpu_hot.preempt_count, (cpu)) = PREEMPT_DISABLED; \ 61 raw_cpu_and_4(pcpu_hot.preempt_count, ~PREEMPT_NEED_RESCHED); in set_preempt_need_resched() 66 raw_cpu_or_4(pcpu_hot.preempt_count, PREEMPT_NEED_RESCHED); in clear_preempt_need_resched() 71 return !(raw_cpu_read_4(pcpu_hot.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched() 80 raw_cpu_add_4(pcpu_hot.preempt_count, val); in __preempt_count_add() 85 raw_cpu_add_4(pcpu_hot.preempt_count, -val); in __preempt_count_sub() [all …]
|
/linux-6.6.21/include/linux/ |
D | preempt.h | 92 unsigned long pc = preempt_count(); in interrupt_context_level() 108 #define nmi_count() (preempt_count() & NMI_MASK) 109 #define hardirq_count() (preempt_count() & HARDIRQ_MASK) 112 # define irq_count() ((preempt_count() & (NMI_MASK | HARDIRQ_MASK)) | softirq_count()) 114 # define softirq_count() (preempt_count() & SOFTIRQ_MASK) 115 # define irq_count() (preempt_count() & (NMI_MASK | HARDIRQ_MASK | SOFTIRQ_MASK)) 130 # define in_task() (!((preempt_count() & (NMI_MASK | HARDIRQ_MASK)) | in_serving_softirq())) 132 # define in_task() (!(preempt_count() & (NMI_MASK | HARDIRQ_MASK | SOFTIRQ_OFFSET))) 186 #define in_atomic() (preempt_count() != 0) 192 #define in_atomic_preempt_off() (preempt_count() != PREEMPT_DISABLE_OFFSET) [all …]
|
/linux-6.6.21/include/asm-generic/ |
D | preempt.h | 9 static __always_inline int preempt_count(void) in preempt_count() function 11 return READ_ONCE(current_thread_info()->preempt_count); in preempt_count() 16 return ¤t_thread_info()->preempt_count; in preempt_count_ptr() 28 task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \ 32 task_thread_info(p)->preempt_count = PREEMPT_DISABLED; \ 77 return unlikely(preempt_count() == preempt_offset && in should_resched()
|
/linux-6.6.21/arch/arm64/include/asm/ |
D | preempt.h | 11 static inline int preempt_count(void) in preempt_count() function 23 task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \ 27 task_thread_info(p)->preempt_count = PREEMPT_DISABLED; \ 62 u64 pc = READ_ONCE(ti->preempt_count); in __preempt_count_dec_and_test() 74 return !pc || !READ_ONCE(ti->preempt_count); in __preempt_count_dec_and_test() 79 u64 pc = READ_ONCE(current_thread_info()->preempt_count); in should_resched()
|
/linux-6.6.21/tools/testing/radix-tree/ |
D | main.c | 244 nr_allocated, preempt_count); in single_thread_tests() 248 nr_allocated, preempt_count); in single_thread_tests() 252 nr_allocated, preempt_count); in single_thread_tests() 256 nr_allocated, preempt_count); in single_thread_tests() 260 nr_allocated, preempt_count); in single_thread_tests() 264 nr_allocated, preempt_count); in single_thread_tests() 269 nr_allocated, preempt_count); in single_thread_tests() 273 nr_allocated, preempt_count); in single_thread_tests() 281 nr_allocated, preempt_count); in single_thread_tests() 324 nr_allocated, preempt_count); in main()
|
/linux-6.6.21/tools/testing/radix-tree/linux/ |
D | preempt.h | 5 extern int preempt_count; 7 #define preempt_disable() uatomic_inc(&preempt_count) 8 #define preempt_enable() uatomic_dec(&preempt_count)
|
/linux-6.6.21/arch/sh/kernel/ |
D | irq.c | 96 irqctx->tinfo.preempt_count = in handle_one_irq() 97 (irqctx->tinfo.preempt_count & ~SOFTIRQ_MASK) | in handle_one_irq() 98 (curctx->tinfo.preempt_count & SOFTIRQ_MASK); in handle_one_irq() 130 irqctx->tinfo.preempt_count = HARDIRQ_OFFSET; in irq_ctx_init() 138 irqctx->tinfo.preempt_count = 0; in irq_ctx_init()
|
/linux-6.6.21/lib/ |
D | smp_processor_id.c | 16 if (likely(preempt_count())) in check_preemption_disabled() 46 what1, what2, preempt_count() - 1, current->comm, current->pid); in check_preemption_disabled()
|
/linux-6.6.21/arch/nios2/include/asm/ |
D | thread_info.h | 40 int preempt_count; /* 0 => preemptable,<0 => BUG */ member 54 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/m68k/include/asm/ |
D | thread_info.h | 29 int preempt_count; /* 0 => preemptable, <0 => BUG */ member 38 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/riscv/include/asm/ |
D | thread_info.h | 50 int preempt_count; /* 0=>preemptible, <0=>BUG */ member 70 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/arc/include/asm/ |
D | thread_info.h | 41 int preempt_count; /* 0 => preemptable, <0 => BUG */ member 56 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/hexagon/include/asm/ |
D | thread_info.h | 35 int preempt_count; /* 0=>preemptible,<0=>BUG */ member 63 .preempt_count = 1, \
|
/linux-6.6.21/arch/openrisc/include/asm/ |
D | thread_info.h | 47 __s32 preempt_count; /* 0 => preemptable, <0 => BUG */ member 67 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/um/include/asm/ |
D | thread_info.h | 23 int preempt_count; /* 0 => preemptable, member 35 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/parisc/include/asm/ |
D | thread_info.h | 11 int preempt_count; /* 0=premptable, <0=BUG; will also serve as bh-counter */ member 20 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/kernel/trace/ |
D | trace_irqsoff.c | 122 if (!irqs_disabled_flags(*flags) && !preempt_count()) in func_prolog_dec() 440 if (preempt_trace(preempt_count()) || irq_trace()) in start_critical_timings() 448 if (preempt_trace(preempt_count()) || irq_trace()) in stop_critical_timings() 611 if (!preempt_trace(preempt_count()) && irq_trace()) in tracer_hardirqs_on() 618 if (!preempt_trace(preempt_count()) && irq_trace()) in tracer_hardirqs_off() 659 if (preempt_trace(preempt_count()) && !irq_trace()) in tracer_preempt_on() 665 if (preempt_trace(preempt_count()) && !irq_trace()) in tracer_preempt_off()
|
/linux-6.6.21/arch/sparc/include/asm/ |
D | thread_info_32.h | 33 int preempt_count; /* 0 => preemptable, member 63 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/csky/include/asm/ |
D | thread_info.h | 17 int preempt_count; member 27 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/loongarch/include/asm/ |
D | thread_info.h | 29 int preempt_count; /* 0 => preemptible, <0 => BUG */ member 43 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/xtensa/include/asm/ |
D | thread_info.h | 53 __s32 preempt_count; /* 0 => preemptable,< 0 => BUG*/ member 90 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/microblaze/include/asm/ |
D | thread_info.h | 64 __s32 preempt_count; /* 0 => preemptable,< 0 => BUG*/ member 77 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/alpha/include/asm/ |
D | thread_info.h | 23 int preempt_count; /* 0 => preemptable, <0 => BUG */ member 38 .preempt_count = INIT_PREEMPT_COUNT, \
|
/linux-6.6.21/arch/sh/include/asm/ |
D | thread_info.h | 32 int preempt_count; /* 0 => preemptable, <0 => BUG */ member 59 .preempt_count = INIT_PREEMPT_COUNT, \
|