/linux-6.6.21/include/linux/atomic/ |
D | atomic-instrumented.h | 29 static __always_inline int 46 static __always_inline int 64 static __always_inline void 82 static __always_inline void 101 static __always_inline void 119 static __always_inline int 138 static __always_inline int 156 static __always_inline int 175 static __always_inline int 193 static __always_inline int [all …]
|
D | atomic-long.h | 34 static __always_inline long 54 static __always_inline long 75 static __always_inline void 96 static __always_inline void 117 static __always_inline void 138 static __always_inline long 159 static __always_inline long 180 static __always_inline long 201 static __always_inline long 222 static __always_inline long [all …]
|
D | atomic-arch-fallback.h | 441 static __always_inline int 457 static __always_inline int 487 static __always_inline void 504 static __always_inline void 530 static __always_inline void 547 static __always_inline int 574 static __always_inline int 601 static __always_inline int 627 static __always_inline int 650 static __always_inline int [all …]
|
/linux-6.6.21/arch/powerpc/include/asm/ |
D | kup.h | 12 static __always_inline bool kuap_is_disabled(void); 51 static __always_inline bool kuap_is_disabled(void) in kuap_is_disabled() 58 static __always_inline bool kuap_is_disabled(void) { return true; } in kuap_is_disabled() 60 static __always_inline bool 66 static __always_inline void kuap_user_restore(struct pt_regs *regs) { } in kuap_user_restore() 67 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long amr) { } in __kuap_kernel_restore() 75 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() 77 static __always_inline void prevent_user_access(unsigned long dir) { } in prevent_user_access() 78 static __always_inline unsigned long prevent_user_access_return(void) { return 0UL; } in prevent_user_access_return() 79 static __always_inline void restore_user_access(unsigned long flags) { } in restore_user_access() [all …]
|
D | cmpxchg.h | 86 static __always_inline unsigned long 102 static __always_inline unsigned long 118 static __always_inline unsigned long 134 static __always_inline unsigned long 151 static __always_inline unsigned long 167 static __always_inline unsigned long 184 static __always_inline unsigned long 200 static __always_inline unsigned long 217 static __always_inline unsigned long 236 static __always_inline unsigned long [all …]
|
/linux-6.6.21/arch/x86/include/asm/ |
D | atomic.h | 17 static __always_inline int arch_atomic_read(const atomic_t *v) in arch_atomic_read() 26 static __always_inline void arch_atomic_set(atomic_t *v, int i) in arch_atomic_set() 31 static __always_inline void arch_atomic_add(int i, atomic_t *v) in arch_atomic_add() 38 static __always_inline void arch_atomic_sub(int i, atomic_t *v) in arch_atomic_sub() 45 static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v) in arch_atomic_sub_and_test() 51 static __always_inline void arch_atomic_inc(atomic_t *v) in arch_atomic_inc() 58 static __always_inline void arch_atomic_dec(atomic_t *v) in arch_atomic_dec() 65 static __always_inline bool arch_atomic_dec_and_test(atomic_t *v) in arch_atomic_dec_and_test() 71 static __always_inline bool arch_atomic_inc_and_test(atomic_t *v) in arch_atomic_inc_and_test() 77 static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v) in arch_atomic_add_negative() [all …]
|
D | irqflags.h | 17 extern __always_inline unsigned long native_save_fl(void) in native_save_fl() 35 static __always_inline void native_irq_disable(void) in native_irq_disable() 40 static __always_inline void native_irq_enable(void) in native_irq_enable() 45 static __always_inline void native_safe_halt(void) in native_safe_halt() 51 static __always_inline void native_halt(void) in native_halt() 65 static __always_inline unsigned long arch_local_save_flags(void) in arch_local_save_flags() 70 static __always_inline void arch_local_irq_disable(void) in arch_local_irq_disable() 75 static __always_inline void arch_local_irq_enable(void) in arch_local_irq_enable() 84 static __always_inline void arch_safe_halt(void) in arch_safe_halt() 93 static __always_inline void halt(void) in halt() [all …]
|
D | atomic64_64.h | 13 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() 18 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() 23 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add() 30 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub() 37 static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v) in arch_atomic64_sub_and_test() 43 static __always_inline void arch_atomic64_inc(atomic64_t *v) in arch_atomic64_inc() 51 static __always_inline void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec() 59 static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v) in arch_atomic64_dec_and_test() 65 static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v) in arch_atomic64_inc_and_test() 71 static __always_inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v) in arch_atomic64_add_negative() [all …]
|
D | bitops.h | 51 static __always_inline void 65 static __always_inline void 71 static __always_inline void 84 static __always_inline void 91 static __always_inline void 97 static __always_inline bool 110 static __always_inline void 116 static __always_inline void 122 static __always_inline void 135 static __always_inline bool [all …]
|
D | atomic64_32.h | 64 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n) in arch_atomic64_cmpxchg() 70 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) in arch_atomic64_xchg() 82 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() 91 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() 98 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return() 107 static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return() 116 static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v) in arch_atomic64_inc_return() 125 static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v) in arch_atomic64_dec_return() 134 static __always_inline s64 arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add() 142 static __always_inline s64 arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub() [all …]
|
/linux-6.6.21/arch/arm64/include/asm/ |
D | irqflags.h | 24 static __always_inline bool __irqflags_uses_pmr(void) in __irqflags_uses_pmr() 30 static __always_inline void __daif_local_irq_enable(void) in __daif_local_irq_enable() 37 static __always_inline void __pmr_local_irq_enable(void) in __pmr_local_irq_enable() 59 static __always_inline void __daif_local_irq_disable(void) in __daif_local_irq_disable() 66 static __always_inline void __pmr_local_irq_disable(void) in __pmr_local_irq_disable() 87 static __always_inline unsigned long __daif_local_save_flags(void) in __daif_local_save_flags() 92 static __always_inline unsigned long __pmr_local_save_flags(void) in __pmr_local_save_flags() 109 static __always_inline bool __daif_irqs_disabled_flags(unsigned long flags) in __daif_irqs_disabled_flags() 114 static __always_inline bool __pmr_irqs_disabled_flags(unsigned long flags) in __pmr_irqs_disabled_flags() 128 static __always_inline bool __daif_irqs_disabled(void) in __daif_irqs_disabled() [all …]
|
D | kvm_emulate.h | 58 static __always_inline bool vcpu_el1_is_32bit(struct kvm_vcpu *vcpu) in vcpu_el1_is_32bit() 63 static __always_inline bool vcpu_el1_is_32bit(struct kvm_vcpu *vcpu) in vcpu_el1_is_32bit() 147 static __always_inline unsigned long *vcpu_pc(const struct kvm_vcpu *vcpu) in vcpu_pc() 152 static __always_inline unsigned long *vcpu_cpsr(const struct kvm_vcpu *vcpu) in vcpu_cpsr() 157 static __always_inline bool vcpu_mode_is_32bit(const struct kvm_vcpu *vcpu) in vcpu_mode_is_32bit() 162 static __always_inline bool kvm_condition_valid(const struct kvm_vcpu *vcpu) in kvm_condition_valid() 180 static __always_inline unsigned long vcpu_get_reg(const struct kvm_vcpu *vcpu, in vcpu_get_reg() 186 static __always_inline void vcpu_set_reg(struct kvm_vcpu *vcpu, u8 reg_num, in vcpu_set_reg() 295 static __always_inline u64 kvm_vcpu_get_esr(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_esr() 300 static __always_inline int kvm_vcpu_get_condition(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_condition() [all …]
|
/linux-6.6.21/include/linux/ |
D | context_tracking_state.h | 52 static __always_inline int __ct_state(void) in __ct_state() 59 static __always_inline int ct_dynticks(void) in ct_dynticks() 64 static __always_inline int ct_dynticks_cpu(int cpu) in ct_dynticks_cpu() 71 static __always_inline int ct_dynticks_cpu_acquire(int cpu) in ct_dynticks_cpu_acquire() 78 static __always_inline long ct_dynticks_nesting(void) in ct_dynticks_nesting() 83 static __always_inline long ct_dynticks_nesting_cpu(int cpu) in ct_dynticks_nesting_cpu() 90 static __always_inline long ct_dynticks_nmi_nesting(void) in ct_dynticks_nmi_nesting() 95 static __always_inline long ct_dynticks_nmi_nesting_cpu(int cpu) in ct_dynticks_nmi_nesting_cpu() 106 static __always_inline bool context_tracking_enabled(void) in context_tracking_enabled() 111 static __always_inline bool context_tracking_enabled_cpu(int cpu) in context_tracking_enabled_cpu() [all …]
|
D | rwlock_rt.h | 35 static __always_inline void read_lock(rwlock_t *rwlock) in read_lock() 40 static __always_inline void read_lock_bh(rwlock_t *rwlock) in read_lock_bh() 46 static __always_inline void read_lock_irq(rwlock_t *rwlock) in read_lock_irq() 60 static __always_inline void read_unlock(rwlock_t *rwlock) in read_unlock() 65 static __always_inline void read_unlock_bh(rwlock_t *rwlock) in read_unlock_bh() 71 static __always_inline void read_unlock_irq(rwlock_t *rwlock) in read_unlock_irq() 76 static __always_inline void read_unlock_irqrestore(rwlock_t *rwlock, in read_unlock_irqrestore() 82 static __always_inline void write_lock(rwlock_t *rwlock) in write_lock() 88 static __always_inline void write_lock_nested(rwlock_t *rwlock, int subclass) in write_lock_nested() 96 static __always_inline void write_lock_bh(rwlock_t *rwlock) in write_lock_bh() [all …]
|
D | context_tracking.h | 39 static __always_inline void user_enter_irqoff(void) in user_enter_irqoff() 45 static __always_inline void user_exit_irqoff(void) in user_exit_irqoff() 75 static __always_inline bool context_tracking_guest_enter(void) in context_tracking_guest_enter() 83 static __always_inline void context_tracking_guest_exit(void) in context_tracking_guest_exit() 100 static __always_inline bool context_tracking_guest_enter(void) { return false; } in context_tracking_guest_enter() 101 static __always_inline void context_tracking_guest_exit(void) { } in context_tracking_guest_exit() 120 static __always_inline bool rcu_dynticks_curr_cpu_in_eqs(void) in rcu_dynticks_curr_cpu_in_eqs() 129 static __always_inline unsigned long ct_state_inc(int incby) in ct_state_inc() 134 static __always_inline bool warn_rcu_enter(void) in warn_rcu_enter() 151 static __always_inline void warn_rcu_exit(bool rcu) in warn_rcu_exit() [all …]
|
D | kdev_t.h | 24 static __always_inline bool old_valid_dev(dev_t dev) in old_valid_dev() 29 static __always_inline u16 old_encode_dev(dev_t dev) in old_encode_dev() 34 static __always_inline dev_t old_decode_dev(u16 val) in old_decode_dev() 39 static __always_inline u32 new_encode_dev(dev_t dev) in new_encode_dev() 46 static __always_inline dev_t new_decode_dev(u32 dev) in new_decode_dev() 53 static __always_inline u64 huge_encode_dev(dev_t dev) in huge_encode_dev() 58 static __always_inline dev_t huge_decode_dev(u64 dev) in huge_decode_dev() 63 static __always_inline int sysv_valid_dev(dev_t dev) in sysv_valid_dev() 68 static __always_inline u32 sysv_encode_dev(dev_t dev) in sysv_encode_dev() 73 static __always_inline unsigned sysv_major(u32 dev) in sysv_major() [all …]
|
/linux-6.6.21/arch/powerpc/include/asm/nohash/ |
D | kup-booke.h | 21 static __always_inline void __kuap_lock(void) in __kuap_lock() 28 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs) in __kuap_save_and_lock() 36 static __always_inline void kuap_user_restore(struct pt_regs *regs) in kuap_user_restore() 46 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap) in __kuap_kernel_restore() 55 static __always_inline unsigned long __kuap_get_and_assert_locked(void) in __kuap_get_and_assert_locked() 64 static __always_inline void uaccess_begin_booke(unsigned long val) in uaccess_begin_booke() 70 static __always_inline void uaccess_end_booke(void) in uaccess_end_booke() 76 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() 82 static __always_inline void prevent_user_access(unsigned long dir) in prevent_user_access() 87 static __always_inline unsigned long prevent_user_access_return(void) in prevent_user_access_return() [all …]
|
/linux-6.6.21/include/net/ |
D | checksum.h | 27 static __always_inline 38 static __always_inline __wsum csum_and_copy_to_user in csum_and_copy_to_user() 50 static __always_inline __wsum 59 static __always_inline __wsum csum_add(__wsum csum, __wsum addend) in csum_add() 67 static __always_inline __wsum csum_sub(__wsum csum, __wsum addend) in csum_sub() 72 static __always_inline __sum16 csum16_add(__sum16 csum, __be16 addend) in csum16_add() 80 static __always_inline __sum16 csum16_sub(__sum16 csum, __be16 addend) in csum16_sub() 86 static __always_inline __wsum csum_shift(__wsum sum, int offset) in csum_shift() 95 static __always_inline __wsum 101 static __always_inline __wsum [all …]
|
/linux-6.6.21/arch/powerpc/include/asm/nohash/32/ |
D | kup-8xx.h | 14 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs) in __kuap_save_and_lock() 21 static __always_inline void kuap_user_restore(struct pt_regs *regs) in kuap_user_restore() 25 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap) in __kuap_kernel_restore() 31 static __always_inline unsigned long __kuap_get_and_assert_locked(void) in __kuap_get_and_assert_locked() 40 static __always_inline void uaccess_begin_8xx(unsigned long val) in uaccess_begin_8xx() 46 static __always_inline void uaccess_end_8xx(void) in uaccess_end_8xx() 52 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() 58 static __always_inline void prevent_user_access(unsigned long dir) in prevent_user_access() 63 static __always_inline unsigned long prevent_user_access_return(void) in prevent_user_access_return() 74 static __always_inline void restore_user_access(unsigned long flags) in restore_user_access() [all …]
|
/linux-6.6.21/include/asm-generic/ |
D | preempt.h | 9 static __always_inline int preempt_count(void) in preempt_count() 14 static __always_inline volatile int *preempt_count_ptr(void) in preempt_count_ptr() 19 static __always_inline void preempt_count_set(int pc) in preempt_count_set() 35 static __always_inline void set_preempt_need_resched(void) in set_preempt_need_resched() 39 static __always_inline void clear_preempt_need_resched(void) in clear_preempt_need_resched() 43 static __always_inline bool test_preempt_need_resched(void) in test_preempt_need_resched() 52 static __always_inline void __preempt_count_add(int val) in __preempt_count_add() 57 static __always_inline void __preempt_count_sub(int val) in __preempt_count_sub() 62 static __always_inline bool __preempt_count_dec_and_test(void) in __preempt_count_dec_and_test() 75 static __always_inline bool should_resched(int preempt_offset) in should_resched()
|
D | pgtable_uffd.h | 5 static __always_inline int pte_uffd_wp(pte_t pte) in pte_uffd_wp() 10 static __always_inline int pmd_uffd_wp(pmd_t pmd) in pmd_uffd_wp() 15 static __always_inline pte_t pte_mkuffd_wp(pte_t pte) in pte_mkuffd_wp() 20 static __always_inline pmd_t pmd_mkuffd_wp(pmd_t pmd) in pmd_mkuffd_wp() 25 static __always_inline pte_t pte_clear_uffd_wp(pte_t pte) in pte_clear_uffd_wp() 30 static __always_inline pmd_t pmd_clear_uffd_wp(pmd_t pmd) in pmd_clear_uffd_wp() 35 static __always_inline pte_t pte_swp_mkuffd_wp(pte_t pte) in pte_swp_mkuffd_wp() 40 static __always_inline int pte_swp_uffd_wp(pte_t pte) in pte_swp_uffd_wp() 45 static __always_inline pte_t pte_swp_clear_uffd_wp(pte_t pte) in pte_swp_clear_uffd_wp()
|
/linux-6.6.21/arch/x86/kvm/vmx/ |
D | hyperv.h | 31 static __always_inline int evmcs_field_offset(unsigned long field, in evmcs_field_offset() 72 static __always_inline bool kvm_is_using_evmcs(void) in kvm_is_using_evmcs() 77 static __always_inline int get_evmcs_offset(unsigned long field, in get_evmcs_offset() 86 static __always_inline void evmcs_write64(unsigned long field, u64 value) in evmcs_write64() 99 static __always_inline void evmcs_write32(unsigned long field, u32 value) in evmcs_write32() 111 static __always_inline void evmcs_write16(unsigned long field, u16 value) in evmcs_write16() 123 static __always_inline u64 evmcs_read64(unsigned long field) in evmcs_read64() 133 static __always_inline u32 evmcs_read32(unsigned long field) in evmcs_read32() 143 static __always_inline u16 evmcs_read16(unsigned long field) in evmcs_read16() 166 static __always_inline bool kvm_is_using_evmcs(void) { return false; } in kvm_is_using_evmcs() [all …]
|
/linux-6.6.21/arch/powerpc/include/asm/book3s/32/ |
D | kup.h | 18 static __always_inline void kuap_lock_one(unsigned long addr) in kuap_lock_one() 24 static __always_inline void kuap_unlock_one(unsigned long addr) in kuap_unlock_one() 30 static __always_inline void uaccess_begin_32s(unsigned long addr) in uaccess_begin_32s() 44 static __always_inline void uaccess_end_32s(unsigned long addr) in uaccess_end_32s() 58 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs) in __kuap_save_and_lock() 71 static __always_inline void kuap_user_restore(struct pt_regs *regs) in kuap_user_restore() 75 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap) in __kuap_kernel_restore() 90 static __always_inline unsigned long __kuap_get_and_assert_locked(void) in __kuap_get_and_assert_locked() 100 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() 112 static __always_inline void prevent_user_access(unsigned long dir) in prevent_user_access() [all …]
|
/linux-6.6.21/arch/s390/include/asm/ |
D | irqflags.h | 35 static __always_inline void __arch_local_irq_ssm(unsigned long flags) in __arch_local_irq_ssm() 40 static __always_inline unsigned long arch_local_save_flags(void) in arch_local_save_flags() 45 static __always_inline unsigned long arch_local_irq_save(void) in arch_local_irq_save() 50 static __always_inline void arch_local_irq_disable(void) in arch_local_irq_disable() 55 static __always_inline void arch_local_irq_enable(void) in arch_local_irq_enable() 61 static __always_inline void arch_local_irq_restore(unsigned long flags) in arch_local_irq_restore() 68 static __always_inline bool arch_irqs_disabled_flags(unsigned long flags) in arch_irqs_disabled_flags() 73 static __always_inline bool arch_irqs_disabled(void) in arch_irqs_disabled()
|
/linux-6.6.21/arch/powerpc/include/asm/vdso/ |
D | gettimeofday.h | 17 static __always_inline int do_syscall_2(const unsigned long _r0, const unsigned long _r3, in do_syscall_2() 37 static __always_inline 45 static __always_inline 51 static __always_inline 61 static __always_inline 67 static __always_inline 73 static __always_inline 79 static __always_inline 86 static __always_inline u64 __arch_get_hw_counter(s32 clock_mode, in __arch_get_hw_counter() 95 static __always_inline [all …]
|