| /Linux-v5.15/include/linux/atomic/ |
| D | atomic-long.h | 26 static __always_inline long 32 static __always_inline long 38 static __always_inline void 44 static __always_inline void 50 static __always_inline void 56 static __always_inline long 62 static __always_inline long 68 static __always_inline long 74 static __always_inline long 80 static __always_inline long [all …]
|
| D | atomic-instrumented.h | 24 static __always_inline int 31 static __always_inline int 38 static __always_inline void 45 static __always_inline void 52 static __always_inline void 59 static __always_inline int 66 static __always_inline int 73 static __always_inline int 80 static __always_inline int 87 static __always_inline int [all …]
|
| D | atomic-arch-fallback.h | 151 static __always_inline int 160 static __always_inline void 175 static __always_inline int 186 static __always_inline int 196 static __always_inline int 217 static __always_inline int 228 static __always_inline int 238 static __always_inline int 259 static __always_inline int 270 static __always_inline int [all …]
|
| /Linux-v5.15/arch/x86/include/asm/ |
| D | atomic.h | 23 static __always_inline int arch_atomic_read(const atomic_t *v) in arch_atomic_read() 39 static __always_inline void arch_atomic_set(atomic_t *v, int i) in arch_atomic_set() 51 static __always_inline void arch_atomic_add(int i, atomic_t *v) in arch_atomic_add() 65 static __always_inline void arch_atomic_sub(int i, atomic_t *v) in arch_atomic_sub() 81 static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v) in arch_atomic_sub_and_test() 93 static __always_inline void arch_atomic_inc(atomic_t *v) in arch_atomic_inc() 106 static __always_inline void arch_atomic_dec(atomic_t *v) in arch_atomic_dec() 121 static __always_inline bool arch_atomic_dec_and_test(atomic_t *v) in arch_atomic_dec_and_test() 135 static __always_inline bool arch_atomic_inc_and_test(atomic_t *v) in arch_atomic_inc_and_test() 150 static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v) in arch_atomic_add_negative() [all …]
|
| D | bitops.h | 51 static __always_inline void 65 static __always_inline void 71 static __always_inline void 84 static __always_inline void 91 static __always_inline void 97 static __always_inline bool 110 static __always_inline void 116 static __always_inline void 122 static __always_inline void 135 static __always_inline bool [all …]
|
| D | irqflags.h | 20 extern __always_inline unsigned long native_save_fl(void) in native_save_fl() 38 static __always_inline void native_irq_disable(void) in native_irq_disable() 43 static __always_inline void native_irq_enable(void) in native_irq_enable() 68 static __always_inline unsigned long arch_local_save_flags(void) in arch_local_save_flags() 73 static __always_inline void arch_local_irq_disable(void) in arch_local_irq_disable() 78 static __always_inline void arch_local_irq_enable(void) in arch_local_irq_enable() 104 static __always_inline unsigned long arch_local_irq_save(void) in arch_local_irq_save() 125 static __always_inline int arch_irqs_disabled_flags(unsigned long flags) in arch_irqs_disabled_flags() 130 static __always_inline int arch_irqs_disabled(void) in arch_irqs_disabled() 137 static __always_inline void arch_local_irq_restore(unsigned long flags) in arch_local_irq_restore()
|
| D | preempt.h | 25 static __always_inline int preempt_count(void) in preempt_count() 30 static __always_inline void preempt_count_set(int pc) in preempt_count_set() 59 static __always_inline void set_preempt_need_resched(void) in set_preempt_need_resched() 64 static __always_inline void clear_preempt_need_resched(void) in clear_preempt_need_resched() 69 static __always_inline bool test_preempt_need_resched(void) in test_preempt_need_resched() 78 static __always_inline void __preempt_count_add(int val) in __preempt_count_add() 83 static __always_inline void __preempt_count_sub(int val) in __preempt_count_sub() 93 static __always_inline bool __preempt_count_dec_and_test(void) in __preempt_count_dec_and_test() 101 static __always_inline bool should_resched(int preempt_offset) in should_resched()
|
| /Linux-v5.15/include/asm-generic/ |
| D | preempt.h | 9 static __always_inline int preempt_count(void) in preempt_count() 14 static __always_inline volatile int *preempt_count_ptr(void) in preempt_count_ptr() 19 static __always_inline void preempt_count_set(int pc) in preempt_count_set() 35 static __always_inline void set_preempt_need_resched(void) in set_preempt_need_resched() 39 static __always_inline void clear_preempt_need_resched(void) in clear_preempt_need_resched() 43 static __always_inline bool test_preempt_need_resched(void) in test_preempt_need_resched() 52 static __always_inline void __preempt_count_add(int val) in __preempt_count_add() 57 static __always_inline void __preempt_count_sub(int val) in __preempt_count_sub() 62 static __always_inline bool __preempt_count_dec_and_test(void) in __preempt_count_dec_and_test() 75 static __always_inline bool should_resched(int preempt_offset) in should_resched()
|
| D | pgtable_uffd.h | 5 static __always_inline int pte_uffd_wp(pte_t pte) in pte_uffd_wp() 10 static __always_inline int pmd_uffd_wp(pmd_t pmd) in pmd_uffd_wp() 15 static __always_inline pte_t pte_mkuffd_wp(pte_t pte) in pte_mkuffd_wp() 20 static __always_inline pmd_t pmd_mkuffd_wp(pmd_t pmd) in pmd_mkuffd_wp() 25 static __always_inline pte_t pte_clear_uffd_wp(pte_t pte) in pte_clear_uffd_wp() 30 static __always_inline pmd_t pmd_clear_uffd_wp(pmd_t pmd) in pmd_clear_uffd_wp() 35 static __always_inline pte_t pte_swp_mkuffd_wp(pte_t pte) in pte_swp_mkuffd_wp() 40 static __always_inline int pte_swp_uffd_wp(pte_t pte) in pte_swp_uffd_wp() 45 static __always_inline pte_t pte_swp_clear_uffd_wp(pte_t pte) in pte_swp_clear_uffd_wp()
|
| D | qspinlock.h | 22 static __always_inline int queued_spin_is_locked(struct qspinlock *lock) in queued_spin_is_locked() 42 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock) in queued_spin_value_unlocked() 52 static __always_inline int queued_spin_is_contended(struct qspinlock *lock) in queued_spin_is_contended() 61 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock() 78 static __always_inline void queued_spin_lock(struct qspinlock *lock) in queued_spin_lock() 94 static __always_inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() 104 static __always_inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock()
|
| /Linux-v5.15/include/linux/ |
| D | rwlock_rt.h | 34 static __always_inline void read_lock(rwlock_t *rwlock) in read_lock() 39 static __always_inline void read_lock_bh(rwlock_t *rwlock) in read_lock_bh() 45 static __always_inline void read_lock_irq(rwlock_t *rwlock) in read_lock_irq() 59 static __always_inline void read_unlock(rwlock_t *rwlock) in read_unlock() 64 static __always_inline void read_unlock_bh(rwlock_t *rwlock) in read_unlock_bh() 70 static __always_inline void read_unlock_irq(rwlock_t *rwlock) in read_unlock_irq() 75 static __always_inline void read_unlock_irqrestore(rwlock_t *rwlock, in read_unlock_irqrestore() 81 static __always_inline void write_lock(rwlock_t *rwlock) in write_lock() 86 static __always_inline void write_lock_bh(rwlock_t *rwlock) in write_lock_bh() 92 static __always_inline void write_lock_irq(rwlock_t *rwlock) in write_lock_irq() [all …]
|
| D | kdev_t.h | 24 static __always_inline bool old_valid_dev(dev_t dev) in old_valid_dev() 29 static __always_inline u16 old_encode_dev(dev_t dev) in old_encode_dev() 34 static __always_inline dev_t old_decode_dev(u16 val) in old_decode_dev() 39 static __always_inline u32 new_encode_dev(dev_t dev) in new_encode_dev() 46 static __always_inline dev_t new_decode_dev(u32 dev) in new_decode_dev() 53 static __always_inline u64 huge_encode_dev(dev_t dev) in huge_encode_dev() 58 static __always_inline dev_t huge_decode_dev(u64 dev) in huge_decode_dev() 63 static __always_inline int sysv_valid_dev(dev_t dev) in sysv_valid_dev() 68 static __always_inline u32 sysv_encode_dev(dev_t dev) in sysv_encode_dev() 73 static __always_inline unsigned sysv_major(u32 dev) in sysv_major() [all …]
|
| D | kasan.h | 87 static __always_inline bool kasan_enabled(void) in kasan_enabled() 112 static __always_inline void kasan_alloc_pages(struct page *page, in kasan_alloc_pages() 119 static __always_inline void kasan_free_pages(struct page *page, in kasan_free_pages() 137 static __always_inline slab_flags_t kasan_never_merge(void) in kasan_never_merge() 145 static __always_inline void kasan_unpoison_range(const void *addr, size_t size) in kasan_unpoison_range() 152 static __always_inline void kasan_poison_pages(struct page *page, in kasan_poison_pages() 160 static __always_inline void kasan_unpoison_pages(struct page *page, in kasan_unpoison_pages() 169 static __always_inline void kasan_cache_create(struct kmem_cache *cache, in kasan_cache_create() 177 static __always_inline void kasan_cache_create_kmalloc(struct kmem_cache *cache) in kasan_cache_create_kmalloc() 184 static __always_inline size_t kasan_metadata_size(struct kmem_cache *cache) in kasan_metadata_size() [all …]
|
| D | instrumented.h | 24 static __always_inline void instrument_read(const volatile void *v, size_t size) in instrument_read() 39 static __always_inline void instrument_write(const volatile void *v, size_t size) in instrument_write() 54 static __always_inline void instrument_read_write(const volatile void *v, size_t size) in instrument_read_write() 69 static __always_inline void instrument_atomic_read(const volatile void *v, size_t size) in instrument_atomic_read() 84 static __always_inline void instrument_atomic_write(const volatile void *v, size_t size) in instrument_atomic_write() 99 static __always_inline void instrument_atomic_read_write(const volatile void *v, size_t size) in instrument_atomic_read_write() 115 static __always_inline void 132 static __always_inline void
|
| /Linux-v5.15/arch/arm64/include/asm/ |
| D | kvm_emulate.h | 44 static __always_inline bool vcpu_el1_is_32bit(struct kvm_vcpu *vcpu) in vcpu_el1_is_32bit() 133 static __always_inline unsigned long *vcpu_pc(const struct kvm_vcpu *vcpu) in vcpu_pc() 138 static __always_inline unsigned long *vcpu_cpsr(const struct kvm_vcpu *vcpu) in vcpu_cpsr() 143 static __always_inline bool vcpu_mode_is_32bit(const struct kvm_vcpu *vcpu) in vcpu_mode_is_32bit() 148 static __always_inline bool kvm_condition_valid(const struct kvm_vcpu *vcpu) in kvm_condition_valid() 166 static __always_inline unsigned long vcpu_get_reg(const struct kvm_vcpu *vcpu, in vcpu_get_reg() 172 static __always_inline void vcpu_set_reg(struct kvm_vcpu *vcpu, u8 reg_num, in vcpu_set_reg() 225 static __always_inline u32 kvm_vcpu_get_esr(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_esr() 230 static __always_inline int kvm_vcpu_get_condition(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_condition() 240 static __always_inline unsigned long kvm_vcpu_get_hfar(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_hfar() [all …]
|
| /Linux-v5.15/arch/s390/include/asm/ |
| D | irqflags.h | 35 static __always_inline void __arch_local_irq_ssm(unsigned long flags) in __arch_local_irq_ssm() 40 static __always_inline unsigned long arch_local_save_flags(void) in arch_local_save_flags() 45 static __always_inline unsigned long arch_local_irq_save(void) in arch_local_irq_save() 50 static __always_inline void arch_local_irq_disable(void) in arch_local_irq_disable() 55 static __always_inline void arch_local_irq_enable(void) in arch_local_irq_enable() 61 static __always_inline void arch_local_irq_restore(unsigned long flags) in arch_local_irq_restore() 68 static __always_inline bool arch_irqs_disabled_flags(unsigned long flags) in arch_irqs_disabled_flags() 73 static __always_inline bool arch_irqs_disabled(void) in arch_irqs_disabled()
|
| /Linux-v5.15/arch/powerpc/include/asm/ |
| D | kup.h | 81 static __always_inline void setup_kup(void) 87 static __always_inline void allow_read_from_user(const void __user *from, unsigned long size) 93 static __always_inline void allow_write_to_user(void __user *to, unsigned long size) 98 static __always_inline void allow_read_write_user(void __user *to, const void __user *from, 105 static __always_inline void prevent_read_from_user(const void __user *from, unsigned long size) 110 static __always_inline void prevent_write_to_user(void __user *to, unsigned long size) 115 static __always_inline void prevent_read_write_user(void __user *to, const void __user *from, 121 static __always_inline void prevent_current_access_user(void) 126 static __always_inline void prevent_current_read_from_user(void) 131 static __always_inline void prevent_current_write_to_user(void)
|
| D | cmpxchg.h | 85 static __always_inline unsigned long 101 static __always_inline unsigned long 118 static __always_inline unsigned long 134 static __always_inline unsigned long 151 static __always_inline unsigned long 170 static __always_inline unsigned long 215 static __always_inline unsigned long 237 static __always_inline unsigned long 258 static __always_inline unsigned long 285 static __always_inline unsigned long [all …]
|
| /Linux-v5.15/arch/x86/kvm/ |
| D | reverse_cpuid.h | 61 static __always_inline void reverse_cpuid_check(unsigned int x86_leaf) in reverse_cpuid_check() 75 static __always_inline u32 __feature_translate(int x86_feature) in __feature_translate() 85 static __always_inline u32 __feature_leaf(int x86_feature) in __feature_leaf() 96 static __always_inline u32 __feature_bit(int x86_feature) in __feature_bit() 106 static __always_inline struct cpuid_reg x86_feature_cpuid(unsigned int x86_feature) in x86_feature_cpuid() 114 static __always_inline u32 *__cpuid_entry_get_reg(struct kvm_cpuid_entry2 *entry, in __cpuid_entry_get_reg() 132 static __always_inline u32 *cpuid_entry_get_reg(struct kvm_cpuid_entry2 *entry, in cpuid_entry_get_reg() 140 static __always_inline u32 cpuid_entry_get(struct kvm_cpuid_entry2 *entry, in cpuid_entry_get() 148 static __always_inline bool cpuid_entry_has(struct kvm_cpuid_entry2 *entry, in cpuid_entry_has() 154 static __always_inline void cpuid_entry_clear(struct kvm_cpuid_entry2 *entry, in cpuid_entry_clear() [all …]
|
| /Linux-v5.15/arch/x86/include/asm/vdso/ |
| D | gettimeofday.h | 61 static __always_inline 70 static __always_inline 82 static __always_inline 94 static __always_inline 108 static __always_inline 125 static __always_inline 142 static __always_inline 160 static __always_inline long 177 static __always_inline 271 static __always_inline const struct vdso_data *__arch_get_vdso_data(void) in __arch_get_vdso_data() [all …]
|
| /Linux-v5.15/arch/x86/kvm/vmx/ |
| D | vmx_ops.h | 22 static __always_inline void vmcs_check16(unsigned long field) in vmcs_check16() 34 static __always_inline void vmcs_check32(unsigned long field) in vmcs_check32() 46 static __always_inline void vmcs_check64(unsigned long field) in vmcs_check64() 58 static __always_inline void vmcs_checkl(unsigned long field) in vmcs_checkl() 70 static __always_inline unsigned long __vmcs_readl(unsigned long field) in __vmcs_readl() 106 static __always_inline u16 vmcs_read16(unsigned long field) in vmcs_read16() 114 static __always_inline u32 vmcs_read32(unsigned long field) in vmcs_read32() 122 static __always_inline u64 vmcs_read64(unsigned long field) in vmcs_read64() 134 static __always_inline unsigned long vmcs_readl(unsigned long field) in vmcs_readl() 176 static __always_inline void __vmcs_writel(unsigned long field, unsigned long value) in __vmcs_writel() [all …]
|
| /Linux-v5.15/arch/riscv/include/asm/ |
| D | atomic.h | 28 static __always_inline int arch_atomic_read(const atomic_t *v) in arch_atomic_read() 32 static __always_inline void arch_atomic_set(atomic_t *v, int i) in arch_atomic_set() 39 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() 43 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() 55 static __always_inline \ 89 static __always_inline \ in ATOMIC_OPS() 101 static __always_inline \ 114 static __always_inline \ 120 static __always_inline \ 200 static __always_inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) [all …]
|
| /Linux-v5.15/include/uapi/linux/byteorder/ |
| D | little_endian.h | 44 static __always_inline __le64 __cpu_to_le64p(const __u64 *p) in __cpu_to_le64p() 48 static __always_inline __u64 __le64_to_cpup(const __le64 *p) in __le64_to_cpup() 52 static __always_inline __le32 __cpu_to_le32p(const __u32 *p) in __cpu_to_le32p() 56 static __always_inline __u32 __le32_to_cpup(const __le32 *p) in __le32_to_cpup() 60 static __always_inline __le16 __cpu_to_le16p(const __u16 *p) in __cpu_to_le16p() 64 static __always_inline __u16 __le16_to_cpup(const __le16 *p) in __le16_to_cpup() 68 static __always_inline __be64 __cpu_to_be64p(const __u64 *p) in __cpu_to_be64p() 72 static __always_inline __u64 __be64_to_cpup(const __be64 *p) in __be64_to_cpup() 76 static __always_inline __be32 __cpu_to_be32p(const __u32 *p) in __cpu_to_be32p() 80 static __always_inline __u32 __be32_to_cpup(const __be32 *p) in __be32_to_cpup() [all …]
|
| D | big_endian.h | 44 static __always_inline __le64 __cpu_to_le64p(const __u64 *p) in __cpu_to_le64p() 48 static __always_inline __u64 __le64_to_cpup(const __le64 *p) in __le64_to_cpup() 52 static __always_inline __le32 __cpu_to_le32p(const __u32 *p) in __cpu_to_le32p() 56 static __always_inline __u32 __le32_to_cpup(const __le32 *p) in __le32_to_cpup() 60 static __always_inline __le16 __cpu_to_le16p(const __u16 *p) in __cpu_to_le16p() 64 static __always_inline __u16 __le16_to_cpup(const __le16 *p) in __le16_to_cpup() 68 static __always_inline __be64 __cpu_to_be64p(const __u64 *p) in __cpu_to_be64p() 72 static __always_inline __u64 __be64_to_cpup(const __be64 *p) in __be64_to_cpup() 76 static __always_inline __be32 __cpu_to_be32p(const __u32 *p) in __cpu_to_be32p() 80 static __always_inline __u32 __be32_to_cpup(const __be32 *p) in __be32_to_cpup() [all …]
|
| /Linux-v5.15/arch/powerpc/include/asm/vdso/ |
| D | gettimeofday.h | 82 static __always_inline int do_syscall_2(const unsigned long _r0, const unsigned long _r3, 102 static __always_inline 110 static __always_inline 116 static __always_inline 126 static __always_inline 132 static __always_inline 138 static __always_inline 144 static __always_inline 151 static __always_inline u64 __arch_get_hw_counter(s32 clock_mode, 160 static __always_inline [all …]
|