/linux/arch/powerpc/include/asm/ |
H A D | kup.h | 12 static __always_inline bool kuap_is_disabled(void); 51 static __always_inline bool kuap_is_disabled(void) in kuap_is_disabled() 58 static __always_inline bool kuap_is_disabled(void) { return true; } in kuap_is_disabled() 60 static __always_inline bool 66 static __always_inline void kuap_user_restore(struct pt_regs *regs) { } in kuap_user_restore() 67 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long amr) { } in __kuap_kernel_restore() 75 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() 77 static __always_inline void prevent_user_access(unsigned long dir) { } in prevent_user_access() 78 static __always_inline unsigned long prevent_user_access_return(void) { return 0UL; } in prevent_user_access_return() 79 static __always_inline void restore_user_access(unsigned long flags) { } in restore_user_access() [all …]
|
H A D | cmpxchg.h | 86 static __always_inline unsigned long 102 static __always_inline unsigned long 118 static __always_inline unsigned long 134 static __always_inline unsigned long 151 static __always_inline unsigned long 167 static __always_inline unsigned long 184 static __always_inline unsigned long 200 static __always_inline unsigned long 217 static __always_inline unsigned long 236 static __always_inline unsigned long [all …]
|
/linux/include/linux/ |
H A D | cpumask.h | 33 static __always_inline void set_nr_cpu_ids(unsigned int nr) in set_nr_cpu_ids() 132 static __always_inline void cpu_max_bits_warn(unsigned int cpu, unsigned int bits) in cpu_max_bits_warn() 140 static __always_inline unsigned int cpumask_check(unsigned int cpu) in cpumask_check() 152 static __always_inline unsigned int cpumask_first(const struct cpumask *srcp) in cpumask_first() 163 static __always_inline unsigned int cpumask_first_zero(const struct cpumask *srcp) in cpumask_first_zero() 175 static __always_inline 189 static __always_inline 204 static __always_inline unsigned int cpumask_last(const struct cpumask *srcp) in cpumask_last() 216 static __always_inline 232 static __always_inline [all …]
|
H A D | nodemask.h | 110 static __always_inline unsigned int __nodemask_pr_numnodes(const nodemask_t *m) in __nodemask_pr_numnodes() 114 static __always_inline const unsigned long *__nodemask_pr_bits(const nodemask_t *m) in __nodemask_pr_bits() 129 static __always_inline void __node_set(int node, volatile nodemask_t *dstp) in __node_set() 135 static __always_inline void __node_clear(int node, volatile nodemask_t *dstp) in __node_clear() 141 static __always_inline void __nodes_setall(nodemask_t *dstp, unsigned int nbits) in __nodes_setall() 147 static __always_inline void __nodes_clear(nodemask_t *dstp, unsigned int nbits) in __nodes_clear() 157 static __always_inline bool __node_test_and_set(int node, nodemask_t *addr) in __node_test_and_set() 164 static __always_inline void __nodes_and(nodemask_t *dstp, const nodemask_t *src1p, in __nodes_and() 172 static __always_inline void __nodes_or(nodemask_t *dstp, const nodemask_t *src1p, in __nodes_or() 180 static __always_inline void __nodes_xor(nodemask_t *dstp, const nodemask_t *src1p, in __nodes_xor() [all …]
|
H A D | context_tracking_state.h | 52 static __always_inline int __ct_state(void) in __ct_state() 59 static __always_inline int ct_rcu_watching(void) in ct_rcu_watching() 64 static __always_inline int ct_rcu_watching_cpu(int cpu) in ct_rcu_watching_cpu() 71 static __always_inline int ct_rcu_watching_cpu_acquire(int cpu) in ct_rcu_watching_cpu_acquire() 78 static __always_inline long ct_nesting(void) in ct_nesting() 83 static __always_inline long ct_nesting_cpu(int cpu) in ct_nesting_cpu() 90 static __always_inline long ct_nmi_nesting(void) in ct_nmi_nesting() 95 static __always_inline long ct_nmi_nesting_cpu(int cpu) in ct_nmi_nesting_cpu() 106 static __always_inline bool context_tracking_enabled(void) in context_tracking_enabled() 111 static __always_inline bool context_tracking_enabled_cpu(int cpu) in context_tracking_enabled_cpu() [all …]
|
H A D | context_tracking.h | 39 static __always_inline void user_enter_irqoff(void) in user_enter_irqoff() 45 static __always_inline void user_exit_irqoff(void) in user_exit_irqoff() 75 static __always_inline bool context_tracking_guest_enter(void) in context_tracking_guest_enter() 83 static __always_inline bool context_tracking_guest_exit(void) in context_tracking_guest_exit() 102 static __always_inline bool context_tracking_guest_enter(void) { return false; } in context_tracking_guest_enter() 103 static __always_inline bool context_tracking_guest_exit(void) { return false; } in context_tracking_guest_exit() 126 static __always_inline bool rcu_is_watching_curr_cpu(void) in rcu_is_watching_curr_cpu() 135 static __always_inline unsigned long ct_state_inc(int incby) in ct_state_inc() 140 static __always_inline bool warn_rcu_enter(void) in warn_rcu_enter() 157 static __always_inline void warn_rcu_exit(bool rcu) in warn_rcu_exit() [all …]
|
H A D | kdev_t.h | 24 static __always_inline bool old_valid_dev(dev_t dev) in old_valid_dev() 29 static __always_inline u16 old_encode_dev(dev_t dev) in old_encode_dev() 34 static __always_inline dev_t old_decode_dev(u16 val) in old_decode_dev() 39 static __always_inline u32 new_encode_dev(dev_t dev) in new_encode_dev() 46 static __always_inline dev_t new_decode_dev(u32 dev) in new_decode_dev() 53 static __always_inline u64 huge_encode_dev(dev_t dev) in huge_encode_dev() 58 static __always_inline dev_t huge_decode_dev(u64 dev) in huge_decode_dev() 63 static __always_inline int sysv_valid_dev(dev_t dev) in sysv_valid_dev() 68 static __always_inline u32 sysv_encode_dev(dev_t dev) in sysv_encode_dev() 73 static __always_inline unsigned sysv_major(u32 dev) in sysv_major() [all …]
|
H A D | bitmap.h | 206 static __always_inline 231 static __always_inline void bitmap_zero(unsigned long *dst, unsigned int nbits) in bitmap_zero() 241 static __always_inline void bitmap_fill(unsigned long *dst, unsigned int nbits) in bitmap_fill() 251 static __always_inline 265 static __always_inline 321 static __always_inline 330 static __always_inline 340 static __always_inline 350 static __always_inline 359 static __always_inline [all …]
|
H A D | page-flags.h | 203 static __always_inline const struct page *page_fixed_fake_head(const struct page *page) in page_fixed_fake_head() 235 static __always_inline int page_is_fake_head(const struct page *page) in page_is_fake_head() 240 static __always_inline unsigned long _compound_head(const struct page *page) in _compound_head() 279 static __always_inline int PageTail(const struct page *page) in PageTail() 284 static __always_inline int PageCompound(const struct page *page) in PageCompound() 375 static __always_inline bool folio_test_##name(const struct folio *folio) \ 379 static __always_inline void folio_set_##name(struct folio *folio) \ 383 static __always_inline void folio_clear_##name(struct folio *folio) \ 387 static __always_inline void __folio_set_##name(struct folio *folio) \ 391 static __always_inline void __folio_clear_##name(struct folio *folio) \ [all …]
|
/linux/arch/x86/include/asm/ |
H A D | irqflags.h | 17 extern __always_inline unsigned long native_save_fl(void) in native_save_fl() 35 static __always_inline void native_irq_disable(void) in native_irq_disable() 40 static __always_inline void native_irq_enable(void) in native_irq_enable() 45 static __always_inline void native_safe_halt(void) in native_safe_halt() 51 static __always_inline void native_halt(void) in native_halt() 57 static __always_inline int native_irqs_disabled_flags(unsigned long flags) in native_irqs_disabled_flags() 62 static __always_inline unsigned long native_local_irq_save(void) in native_local_irq_save() 71 static __always_inline void native_local_irq_restore(unsigned long flags) in native_local_irq_restore() 85 static __always_inline unsigned long arch_local_save_flags(void) in arch_local_save_flags() 90 static __always_inline void arch_local_irq_disable(void) in arch_local_irq_disable() [all …]
|
H A D | atomic.h | 17 static __always_inline int arch_atomic_read(const atomic_t *v) in arch_atomic_read() 26 static __always_inline void arch_atomic_set(atomic_t *v, int i) in arch_atomic_set() 31 static __always_inline void arch_atomic_add(int i, atomic_t *v) in arch_atomic_add() 38 static __always_inline void arch_atomic_sub(int i, atomic_t *v) in arch_atomic_sub() 45 static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v) in arch_atomic_sub_and_test() 51 static __always_inline void arch_atomic_inc(atomic_t *v) in arch_atomic_inc() 58 static __always_inline void arch_atomic_dec(atomic_t *v) in arch_atomic_dec() 65 static __always_inline bool arch_atomic_dec_and_test(atomic_t *v) in arch_atomic_dec_and_test() 71 static __always_inline bool arch_atomic_inc_and_test(atomic_t *v) in arch_atomic_inc_and_test() 77 static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v) in arch_atomic_add_negative() [all …]
|
H A D | atomic64_64.h | 13 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() 18 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() 23 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add() 30 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub() 37 static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v) in arch_atomic64_sub_and_test() 43 static __always_inline void arch_atomic64_inc(atomic64_t *v) in arch_atomic64_inc() 51 static __always_inline void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec() 59 static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v) in arch_atomic64_dec_and_test() 65 static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v) in arch_atomic64_inc_and_test() 71 static __always_inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v) in arch_atomic64_add_negative() [all …]
|
H A D | atomic64_32.h | 37 static __always_inline s64 arch_atomic64_read_nonatomic(const atomic64_t *v) in arch_atomic64_read_nonatomic() 91 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in arch_atomic64_cmpxchg() 97 static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in arch_atomic64_try_cmpxchg() 103 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) in arch_atomic64_xchg() 115 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() 124 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() 131 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return() 140 static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return() 149 static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v) in arch_atomic64_inc_return() 158 static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v) in arch_atomic64_dec_return() [all …]
|
/linux/arch/s390/include/asm/ |
H A D | fpu-insn.h | 39 static __always_inline void fpu_cefbr(u8 f1, s32 val) in fpu_cefbr() 47 static __always_inline unsigned long fpu_cgebr(u8 f2, u8 mode) in fpu_cgebr() 58 static __always_inline void fpu_debr(u8 f1, u8 f2) in fpu_debr() 66 static __always_inline void fpu_ld(unsigned short fpr, freg_t *reg) in fpu_ld() 75 static __always_inline void fpu_ldgr(u8 f1, u32 val) in fpu_ldgr() 83 static __always_inline void fpu_lfpc(unsigned int *fpc) in fpu_lfpc() 120 static __always_inline void fpu_std(unsigned short fpr, freg_t *reg) in fpu_std() 129 static __always_inline void fpu_sfpc(unsigned int fpc) in fpu_sfpc() 137 static __always_inline void fpu_stfpc(unsigned int *fpc) in fpu_stfpc() 146 static __always_inline void fpu_vab(u8 v1, u8 v2, u8 v3) in fpu_vab() [all …]
|
H A D | processor.h | 58 static __always_inline struct pcpu *this_pcpu(void) in this_pcpu() 63 static __always_inline void set_cpu_flag(int flag) in set_cpu_flag() 68 static __always_inline void clear_cpu_flag(int flag) in clear_cpu_flag() 73 static __always_inline bool test_cpu_flag(int flag) in test_cpu_flag() 78 static __always_inline bool test_and_set_cpu_flag(int flag) in test_and_set_cpu_flag() 86 static __always_inline bool test_and_clear_cpu_flag(int flag) in test_and_clear_cpu_flag() 98 static __always_inline bool test_cpu_flag_of(int flag, int cpu) in test_cpu_flag_of() 110 static __always_inline unsigned long get_cpu_timer(void) in get_cpu_timer() 145 static __always_inline void __stackleak_poison(unsigned long erase_low, in __stackleak_poison() 274 static __always_inline unsigned long __current_stack_pointer(void) in __current_stack_pointer() [all …]
|
/linux/arch/arm64/include/asm/ |
H A D | irqflags.h | 23 static __always_inline void __daif_local_irq_enable(void) in __daif_local_irq_enable() 30 static __always_inline void __pmr_local_irq_enable(void) in __pmr_local_irq_enable() 52 static __always_inline void __daif_local_irq_disable(void) in __daif_local_irq_disable() 59 static __always_inline void __pmr_local_irq_disable(void) in __pmr_local_irq_disable() 80 static __always_inline unsigned long __daif_local_save_flags(void) in __daif_local_save_flags() 85 static __always_inline unsigned long __pmr_local_save_flags(void) in __pmr_local_save_flags() 102 static __always_inline bool __daif_irqs_disabled_flags(unsigned long flags) in __daif_irqs_disabled_flags() 107 static __always_inline bool __pmr_irqs_disabled_flags(unsigned long flags) in __pmr_irqs_disabled_flags() 121 static __always_inline bool __daif_irqs_disabled(void) in __daif_irqs_disabled() 126 static __always_inline bool __pmr_irqs_disabled(void) in __pmr_irqs_disabled() [all …]
|
H A D | atomic_lse.h | 14 static __always_inline void \ 29 static __always_inline void __lse_atomic_sub(int i, atomic_t *v) in ATOMIC_OP() 37 static __always_inline int \ 68 static __always_inline int \ in ATOMIC_FETCH_OPS() 82 static __always_inline int \ 88 static __always_inline int \ 101 static __always_inline void __lse_atomic_and(int i, atomic_t *v) 107 static __always_inline int \ 121 static __always_inline void \ 136 static __always_inline void __lse_atomic64_sub(s64 i, atomic64_t *v) in ATOMIC64_OP() [all …]
|
/linux/arch/x86/kvm/vmx/ |
H A D | vmx_onhyperv.h | 21 static __always_inline bool kvm_is_using_evmcs(void) in kvm_is_using_evmcs() 26 static __always_inline int get_evmcs_offset(unsigned long field, in get_evmcs_offset() 35 static __always_inline void evmcs_write64(unsigned long field, u64 value) in evmcs_write64() 48 static __always_inline void evmcs_write32(unsigned long field, u32 value) in evmcs_write32() 60 static __always_inline void evmcs_write16(unsigned long field, u16 value) in evmcs_write16() 72 static __always_inline u64 evmcs_read64(unsigned long field) in evmcs_read64() 82 static __always_inline u32 evmcs_read32(unsigned long field) in evmcs_read32() 92 static __always_inline u16 evmcs_read16(unsigned long field) in evmcs_read16() 123 static __always_inline bool kvm_is_using_evmcs(void) { return false; } in kvm_is_using_evmcs() 124 static __always_inline void evmcs_write64(unsigned long field, u64 value) {} in evmcs_write64() [all …]
|
/linux/arch/powerpc/include/asm/nohash/ |
H A D | kup-booke.h | 21 static __always_inline void __kuap_lock(void) in __kuap_lock() 28 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs) in __kuap_save_and_lock() 36 static __always_inline void kuap_user_restore(struct pt_regs *regs) in kuap_user_restore() 46 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap) in __kuap_kernel_restore() 55 static __always_inline unsigned long __kuap_get_and_assert_locked(void) in __kuap_get_and_assert_locked() 64 static __always_inline void uaccess_begin_booke(unsigned long val) in uaccess_begin_booke() 70 static __always_inline void uaccess_end_booke(void) in uaccess_end_booke() 76 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() 82 static __always_inline void prevent_user_access(unsigned long dir) in prevent_user_access() 87 static __always_inline unsigned long prevent_user_access_return(void) in prevent_user_access_return() [all …]
|
/linux/arch/powerpc/include/asm/nohash/32/ |
H A D | kup-8xx.h | 14 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs) in __kuap_save_and_lock() 21 static __always_inline void kuap_user_restore(struct pt_regs *regs) in kuap_user_restore() 25 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap) in __kuap_kernel_restore() 31 static __always_inline unsigned long __kuap_get_and_assert_locked(void) in __kuap_get_and_assert_locked() 40 static __always_inline void uaccess_begin_8xx(unsigned long val) in uaccess_begin_8xx() 46 static __always_inline void uaccess_end_8xx(void) in uaccess_end_8xx() 52 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() 58 static __always_inline void prevent_user_access(unsigned long dir) in prevent_user_access() 63 static __always_inline unsigned long prevent_user_access_return(void) in prevent_user_access_return() 74 static __always_inline void restore_user_access(unsigned long flags) in restore_user_access() [all …]
|
/linux/include/asm-generic/ |
H A D | preempt.h | 9 static __always_inline int preempt_count(void) in preempt_count() 14 static __always_inline volatile int *preempt_count_ptr(void) in preempt_count_ptr() 19 static __always_inline void preempt_count_set(int pc) in preempt_count_set() 35 static __always_inline void set_preempt_need_resched(void) in set_preempt_need_resched() 39 static __always_inline void clear_preempt_need_resched(void) in clear_preempt_need_resched() 43 static __always_inline bool test_preempt_need_resched(void) in test_preempt_need_resched() 52 static __always_inline void __preempt_count_add(int val) in __preempt_count_add() 57 static __always_inline void __preempt_count_sub(int val) in __preempt_count_sub() 62 static __always_inline bool __preempt_count_dec_and_test(void) in __preempt_count_dec_and_test() 75 static __always_inline bool should_resched(int preempt_offset) in should_resched()
|
H A D | pgtable_uffd.h | 5 static __always_inline int pte_uffd_wp(pte_t pte) in pte_uffd_wp() 10 static __always_inline int pmd_uffd_wp(pmd_t pmd) in pmd_uffd_wp() 15 static __always_inline pte_t pte_mkuffd_wp(pte_t pte) in pte_mkuffd_wp() 20 static __always_inline pmd_t pmd_mkuffd_wp(pmd_t pmd) in pmd_mkuffd_wp() 25 static __always_inline pte_t pte_clear_uffd_wp(pte_t pte) in pte_clear_uffd_wp() 30 static __always_inline pmd_t pmd_clear_uffd_wp(pmd_t pmd) in pmd_clear_uffd_wp() 35 static __always_inline pte_t pte_swp_mkuffd_wp(pte_t pte) in pte_swp_mkuffd_wp() 40 static __always_inline int pte_swp_uffd_wp(pte_t pte) in pte_swp_uffd_wp() 45 static __always_inline pte_t pte_swp_clear_uffd_wp(pte_t pte) in pte_swp_clear_uffd_wp()
|
/linux/arch/powerpc/include/asm/book3s/32/ |
H A D | kup.h | 18 static __always_inline void kuap_lock_one(unsigned long addr) in kuap_lock_one() 24 static __always_inline void kuap_unlock_one(unsigned long addr) in kuap_unlock_one() 30 static __always_inline void uaccess_begin_32s(unsigned long addr) in uaccess_begin_32s() 44 static __always_inline void uaccess_end_32s(unsigned long addr) in uaccess_end_32s() 58 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs) in __kuap_save_and_lock() 71 static __always_inline void kuap_user_restore(struct pt_regs *regs) in kuap_user_restore() 75 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap) in __kuap_kernel_restore() 90 static __always_inline unsigned long __kuap_get_and_assert_locked(void) in __kuap_get_and_assert_locked() 100 static __always_inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() 112 static __always_inline void prevent_user_access(unsigned long dir) in prevent_user_access() [all …]
|
/linux/arch/powerpc/include/asm/vdso/ |
H A D | gettimeofday.h | 27 static __always_inline int do_syscall_2(const unsigned long _r0, const unsigned long _r3, in do_syscall_2() 47 static __always_inline 55 static __always_inline 61 static __always_inline 71 static __always_inline 77 static __always_inline 83 static __always_inline 89 static __always_inline 96 static __always_inline u64 __arch_get_hw_counter(s32 clock_mode, in __arch_get_hw_counter() 105 static __always_inline [all …]
|
/linux/include/asm-generic/bitops/ |
H A D | instrumented-non-atomic.h | 25 static __always_inline void 41 static __always_inline void 57 static __always_inline void 64 static __always_inline void __instrument_read_write_bitop(long nr, volatile unsigned long *addr) in __instrument_read_write_bitop() 96 static __always_inline bool 111 static __always_inline bool 126 static __always_inline bool 138 static __always_inline bool 150 static __always_inline bool
|