Home
last modified time | relevance | path

Searched refs:__always_inline (Results 1 – 25 of 131) sorted by relevance

123456

/include/linux/atomic/
Datomic-long.h34 static __always_inline long
54 static __always_inline long
75 static __always_inline void
96 static __always_inline void
117 static __always_inline void
138 static __always_inline long
159 static __always_inline long
180 static __always_inline long
201 static __always_inline long
222 static __always_inline long
[all …]
Datomic-instrumented.h29 static __always_inline int
46 static __always_inline int
64 static __always_inline void
82 static __always_inline void
101 static __always_inline void
119 static __always_inline int
138 static __always_inline int
156 static __always_inline int
175 static __always_inline int
193 static __always_inline int
[all …]
Datomic-arch-fallback.h454 static __always_inline int
470 static __always_inline int
500 static __always_inline void
517 static __always_inline void
543 static __always_inline void
560 static __always_inline int
587 static __always_inline int
614 static __always_inline int
640 static __always_inline int
663 static __always_inline int
[all …]
/include/linux/
Dcontext_tracking_state.h52 static __always_inline int __ct_state(void) in __ct_state()
59 static __always_inline int ct_rcu_watching(void) in ct_rcu_watching()
64 static __always_inline int ct_rcu_watching_cpu(int cpu) in ct_rcu_watching_cpu()
71 static __always_inline int ct_rcu_watching_cpu_acquire(int cpu) in ct_rcu_watching_cpu_acquire()
78 static __always_inline long ct_nesting(void) in ct_nesting()
83 static __always_inline long ct_nesting_cpu(int cpu) in ct_nesting_cpu()
90 static __always_inline long ct_nmi_nesting(void) in ct_nmi_nesting()
95 static __always_inline long ct_nmi_nesting_cpu(int cpu) in ct_nmi_nesting_cpu()
106 static __always_inline bool context_tracking_enabled(void) in context_tracking_enabled()
111 static __always_inline bool context_tracking_enabled_cpu(int cpu) in context_tracking_enabled_cpu()
[all …]
Dcpumask.h33 static __always_inline void set_nr_cpu_ids(unsigned int nr) in set_nr_cpu_ids()
132 static __always_inline void cpu_max_bits_warn(unsigned int cpu, unsigned int bits) in cpu_max_bits_warn()
140 static __always_inline unsigned int cpumask_check(unsigned int cpu) in cpumask_check()
152 static __always_inline unsigned int cpumask_first(const struct cpumask *srcp) in cpumask_first()
163 static __always_inline unsigned int cpumask_first_zero(const struct cpumask *srcp) in cpumask_first_zero()
175 static __always_inline
189 static __always_inline
204 static __always_inline unsigned int cpumask_last(const struct cpumask *srcp) in cpumask_last()
216 static __always_inline
232 static __always_inline
[all …]
Dcontext_tracking.h39 static __always_inline void user_enter_irqoff(void) in user_enter_irqoff()
45 static __always_inline void user_exit_irqoff(void) in user_exit_irqoff()
75 static __always_inline bool context_tracking_guest_enter(void) in context_tracking_guest_enter()
83 static __always_inline bool context_tracking_guest_exit(void) in context_tracking_guest_exit()
102 static __always_inline bool context_tracking_guest_enter(void) { return false; } in context_tracking_guest_enter()
103 static __always_inline bool context_tracking_guest_exit(void) { return false; } in context_tracking_guest_exit()
126 static __always_inline bool rcu_is_watching_curr_cpu(void) in rcu_is_watching_curr_cpu()
135 static __always_inline unsigned long ct_state_inc(int incby) in ct_state_inc()
140 static __always_inline bool warn_rcu_enter(void) in warn_rcu_enter()
157 static __always_inline void warn_rcu_exit(bool rcu) in warn_rcu_exit()
[all …]
Dkdev_t.h24 static __always_inline bool old_valid_dev(dev_t dev) in old_valid_dev()
29 static __always_inline u16 old_encode_dev(dev_t dev) in old_encode_dev()
34 static __always_inline dev_t old_decode_dev(u16 val) in old_decode_dev()
39 static __always_inline u32 new_encode_dev(dev_t dev) in new_encode_dev()
46 static __always_inline dev_t new_decode_dev(u32 dev) in new_decode_dev()
53 static __always_inline u64 huge_encode_dev(dev_t dev) in huge_encode_dev()
58 static __always_inline dev_t huge_decode_dev(u64 dev) in huge_decode_dev()
63 static __always_inline int sysv_valid_dev(dev_t dev) in sysv_valid_dev()
68 static __always_inline u32 sysv_encode_dev(dev_t dev) in sysv_encode_dev()
73 static __always_inline unsigned sysv_major(u32 dev) in sysv_major()
[all …]
Drwlock_rt.h35 static __always_inline void read_lock(rwlock_t *rwlock) in read_lock()
40 static __always_inline void read_lock_bh(rwlock_t *rwlock) in read_lock_bh()
46 static __always_inline void read_lock_irq(rwlock_t *rwlock) in read_lock_irq()
60 static __always_inline void read_unlock(rwlock_t *rwlock) in read_unlock()
65 static __always_inline void read_unlock_bh(rwlock_t *rwlock) in read_unlock_bh()
71 static __always_inline void read_unlock_irq(rwlock_t *rwlock) in read_unlock_irq()
76 static __always_inline void read_unlock_irqrestore(rwlock_t *rwlock, in read_unlock_irqrestore()
82 static __always_inline void write_lock(rwlock_t *rwlock) in write_lock()
88 static __always_inline void write_lock_nested(rwlock_t *rwlock, int subclass) in write_lock_nested()
96 static __always_inline void write_lock_bh(rwlock_t *rwlock) in write_lock_bh()
[all …]
Dinstrumented.h24 static __always_inline void instrument_read(const volatile void *v, size_t size) in instrument_read()
38 static __always_inline void instrument_write(const volatile void *v, size_t size) in instrument_write()
52 static __always_inline void instrument_read_write(const volatile void *v, size_t size) in instrument_read_write()
66 static __always_inline void instrument_atomic_read(const volatile void *v, size_t size) in instrument_atomic_read()
80 static __always_inline void instrument_atomic_write(const volatile void *v, size_t size) in instrument_atomic_write()
94 static __always_inline void instrument_atomic_read_write(const volatile void *v, size_t size) in instrument_atomic_read_write()
109 static __always_inline void
126 static __always_inline void
143 static __always_inline void
159 static __always_inline void instrument_memcpy_before(void *to, const void *from, in instrument_memcpy_before()
[all …]
Dnodemask.h110 static __always_inline unsigned int __nodemask_pr_numnodes(const nodemask_t *m) in __nodemask_pr_numnodes()
114 static __always_inline const unsigned long *__nodemask_pr_bits(const nodemask_t *m) in __nodemask_pr_bits()
129 static __always_inline void __node_set(int node, volatile nodemask_t *dstp) in __node_set()
135 static __always_inline void __node_clear(int node, volatile nodemask_t *dstp) in __node_clear()
141 static __always_inline void __nodes_setall(nodemask_t *dstp, unsigned int nbits) in __nodes_setall()
147 static __always_inline void __nodes_clear(nodemask_t *dstp, unsigned int nbits) in __nodes_clear()
157 static __always_inline bool __node_test_and_set(int node, nodemask_t *addr) in __node_test_and_set()
164 static __always_inline void __nodes_and(nodemask_t *dstp, const nodemask_t *src1p, in __nodes_and()
172 static __always_inline void __nodes_or(nodemask_t *dstp, const nodemask_t *src1p, in __nodes_or()
180 static __always_inline void __nodes_xor(nodemask_t *dstp, const nodemask_t *src1p, in __nodes_xor()
[all …]
Dcontext_tracking_irq.h13 static __always_inline void ct_irq_enter(void) { } in ct_irq_enter()
14 static __always_inline void ct_irq_exit(void) { } in ct_irq_exit()
17 static __always_inline void ct_nmi_enter(void) { } in ct_nmi_enter()
18 static __always_inline void ct_nmi_exit(void) { } in ct_nmi_exit()
Dbitmap.h206 static __always_inline
231 static __always_inline void bitmap_zero(unsigned long *dst, unsigned int nbits) in bitmap_zero()
241 static __always_inline void bitmap_fill(unsigned long *dst, unsigned int nbits) in bitmap_fill()
251 static __always_inline
265 static __always_inline
321 static __always_inline
330 static __always_inline
340 static __always_inline
350 static __always_inline
359 static __always_inline
[all …]
/include/asm-generic/
Dpreempt.h9 static __always_inline int preempt_count(void) in preempt_count()
14 static __always_inline volatile int *preempt_count_ptr(void) in preempt_count_ptr()
19 static __always_inline void preempt_count_set(int pc) in preempt_count_set()
35 static __always_inline void set_preempt_need_resched(void) in set_preempt_need_resched()
39 static __always_inline void clear_preempt_need_resched(void) in clear_preempt_need_resched()
43 static __always_inline bool test_preempt_need_resched(void) in test_preempt_need_resched()
52 static __always_inline void __preempt_count_add(int val) in __preempt_count_add()
57 static __always_inline void __preempt_count_sub(int val) in __preempt_count_sub()
62 static __always_inline bool __preempt_count_dec_and_test(void) in __preempt_count_dec_and_test()
75 static __always_inline bool should_resched(int preempt_offset) in should_resched()
Dpgtable_uffd.h5 static __always_inline int pte_uffd_wp(pte_t pte) in pte_uffd_wp()
10 static __always_inline int pmd_uffd_wp(pmd_t pmd) in pmd_uffd_wp()
15 static __always_inline pte_t pte_mkuffd_wp(pte_t pte) in pte_mkuffd_wp()
20 static __always_inline pmd_t pmd_mkuffd_wp(pmd_t pmd) in pmd_mkuffd_wp()
25 static __always_inline pte_t pte_clear_uffd_wp(pte_t pte) in pte_clear_uffd_wp()
30 static __always_inline pmd_t pmd_clear_uffd_wp(pmd_t pmd) in pmd_clear_uffd_wp()
35 static __always_inline pte_t pte_swp_mkuffd_wp(pte_t pte) in pte_swp_mkuffd_wp()
40 static __always_inline int pte_swp_uffd_wp(pte_t pte) in pte_swp_uffd_wp()
45 static __always_inline pte_t pte_swp_clear_uffd_wp(pte_t pte) in pte_swp_clear_uffd_wp()
Dqspinlock.h51 static __always_inline int queued_spin_is_locked(struct qspinlock *lock) in queued_spin_is_locked()
71 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock) in queued_spin_value_unlocked()
81 static __always_inline int queued_spin_is_contended(struct qspinlock *lock) in queued_spin_is_contended()
90 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock()
107 static __always_inline void queued_spin_lock(struct qspinlock *lock) in queued_spin_lock()
123 static __always_inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock()
133 static __always_inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock()
Dspinlock.h33 static __always_inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
53 static __always_inline bool arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
63 static __always_inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
71 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked()
78 static __always_inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked()
85 static __always_inline int arch_spin_is_contended(arch_spinlock_t *lock) in arch_spin_is_contended()
/include/linux/sched/
Didle.h28 static __always_inline void __current_set_polling(void) in __current_set_polling()
34 static __always_inline void __current_clr_polling(void) in __current_clr_polling()
42 static __always_inline void __current_set_polling(void) in __current_set_polling()
48 static __always_inline void __current_clr_polling(void) in __current_clr_polling()
56 static __always_inline bool __must_check current_set_polling_and_test(void) in current_set_polling_and_test()
69 static __always_inline bool __must_check current_clr_polling_and_test(void) in current_clr_polling_and_test()
96 static __always_inline void current_clr_polling(void) in current_clr_polling()
/include/asm-generic/bitops/
Dinstrumented-non-atomic.h25 static __always_inline void
41 static __always_inline void
57 static __always_inline void
64 static __always_inline void __instrument_read_write_bitop(long nr, volatile unsigned long *addr) in __instrument_read_write_bitop()
96 static __always_inline bool
111 static __always_inline bool
126 static __always_inline bool
138 static __always_inline bool
150 static __always_inline bool
Dinstrumented-atomic.h26 static __always_inline void set_bit(long nr, volatile unsigned long *addr) in set_bit()
39 static __always_inline void clear_bit(long nr, volatile unsigned long *addr) in clear_bit()
55 static __always_inline void change_bit(long nr, volatile unsigned long *addr) in change_bit()
68 static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr) in test_and_set_bit()
82 static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr) in test_and_clear_bit()
96 static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr) in test_and_change_bit()
Dgeneric-non-atomic.h27 static __always_inline void
36 static __always_inline void
54 static __always_inline void
72 static __always_inline bool
92 static __always_inline bool
104 static __always_inline bool
120 static __always_inline bool
136 static __always_inline bool
165 static __always_inline bool
Datomic.h14 static __always_inline void
21 static __always_inline void
28 static __always_inline void
35 static __always_inline int
46 static __always_inline int
57 static __always_inline int
/include/net/
Dchecksum.h27 static __always_inline
38 static __always_inline __wsum csum_and_copy_to_user in csum_and_copy_to_user()
50 static __always_inline __wsum
59 static __always_inline __wsum csum_add(__wsum csum, __wsum addend) in csum_add()
67 static __always_inline __wsum csum_sub(__wsum csum, __wsum addend) in csum_sub()
72 static __always_inline __sum16 csum16_add(__sum16 csum, __be16 addend) in csum16_add()
80 static __always_inline __sum16 csum16_sub(__sum16 csum, __be16 addend) in csum16_sub()
86 static __always_inline __wsum csum_shift(__wsum sum, int offset) in csum_shift()
95 static __always_inline __wsum
101 static __always_inline __wsum
[all …]
/include/asm-generic/vdso/
Dvsyscall.h8 static __always_inline struct vdso_data *__arch_get_k_vdso_data(void) in __arch_get_k_vdso_data()
15 static __always_inline void __arch_update_vsyscall(struct vdso_data *vdata, in __arch_update_vsyscall()
22 static __always_inline void __arch_sync_vdso_data(struct vdso_data *vdata) in __arch_sync_vdso_data()
/include/uapi/linux/byteorder/
Dbig_endian.h45 static __always_inline __le64 __cpu_to_le64p(const __u64 *p) in __cpu_to_le64p()
49 static __always_inline __u64 __le64_to_cpup(const __le64 *p) in __le64_to_cpup()
53 static __always_inline __le32 __cpu_to_le32p(const __u32 *p) in __cpu_to_le32p()
57 static __always_inline __u32 __le32_to_cpup(const __le32 *p) in __le32_to_cpup()
61 static __always_inline __le16 __cpu_to_le16p(const __u16 *p) in __cpu_to_le16p()
65 static __always_inline __u16 __le16_to_cpup(const __le16 *p) in __le16_to_cpup()
69 static __always_inline __be64 __cpu_to_be64p(const __u64 *p) in __cpu_to_be64p()
73 static __always_inline __u64 __be64_to_cpup(const __be64 *p) in __be64_to_cpup()
77 static __always_inline __be32 __cpu_to_be32p(const __u32 *p) in __cpu_to_be32p()
81 static __always_inline __u32 __be32_to_cpup(const __be32 *p) in __be32_to_cpup()
[all …]
Dlittle_endian.h45 static __always_inline __le64 __cpu_to_le64p(const __u64 *p) in __cpu_to_le64p()
49 static __always_inline __u64 __le64_to_cpup(const __le64 *p) in __le64_to_cpup()
53 static __always_inline __le32 __cpu_to_le32p(const __u32 *p) in __cpu_to_le32p()
57 static __always_inline __u32 __le32_to_cpup(const __le32 *p) in __le32_to_cpup()
61 static __always_inline __le16 __cpu_to_le16p(const __u16 *p) in __cpu_to_le16p()
65 static __always_inline __u16 __le16_to_cpup(const __le16 *p) in __le16_to_cpup()
69 static __always_inline __be64 __cpu_to_be64p(const __u64 *p) in __cpu_to_be64p()
73 static __always_inline __u64 __be64_to_cpup(const __be64 *p) in __be64_to_cpup()
77 static __always_inline __be32 __cpu_to_be32p(const __u32 *p) in __cpu_to_be32p()
81 static __always_inline __u32 __be32_to_cpup(const __be32 *p) in __be32_to_cpup()
[all …]

123456