/linux/arch/x86/include/asm/ |
H A D | qspinlock.h | 14 static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock) in queued_fetch_set_pending_acquire() 31 extern void native_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val); 33 extern void __pv_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val); 34 extern void __raw_callee_save___pv_queued_spin_unlock(struct qspinlock *lock); 44 static inline void native_queued_spin_unlock(struct qspinlock *lock) in native_queued_spin_unlock() 49 static inline void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath() 54 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() 88 static inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock() 114 #include <asm-generic/qspinlock.h>
|
H A D | rqspinlock.h | 17 typedef struct qspinlock rqspinlock_t;
|
H A D | paravirt.h | 42 __visible void __native_queued_spin_unlock(struct qspinlock *lock); 554 static __always_inline void pv_queued_spin_lock_slowpath(struct qspinlock *lock, in pv_queued_spin_lock_slowpath() 560 static __always_inline void pv_queued_spin_unlock(struct qspinlock *lock) in pv_queued_spin_unlock() 584 void __raw_callee_save___native_queued_spin_unlock(struct qspinlock *lock);
|
H A D | paravirt_types.h | 213 struct qspinlock; 216 void (*queued_spin_lock_slowpath)(struct qspinlock *lock, u32 val);
|
/linux/arch/powerpc/include/asm/ |
H A D | qspinlock.h | 71 static __always_inline int queued_spin_is_locked(struct qspinlock *lock) in queued_spin_is_locked() 76 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock) in queued_spin_value_unlocked() 81 static __always_inline int queued_spin_is_contended(struct qspinlock *lock) in queued_spin_is_contended() 92 static __always_inline int __queued_spin_trylock_nosteal(struct qspinlock *lock) in __queued_spin_trylock_nosteal() 114 static __always_inline int __queued_spin_trylock_steal(struct qspinlock *lock) in __queued_spin_trylock_steal() 138 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock() 146 void queued_spin_lock_slowpath(struct qspinlock *lock); 148 static __always_inline void queued_spin_lock(struct qspinlock *lock) in queued_spin_lock() 154 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock()
|
H A D | qspinlock_types.h | 8 typedef struct qspinlock { struct 50 #error "qspinlock does not support such large CONFIG_NR_CPUS" 69 #error "qspinlock does not support such large CONFIG_NR_CPUS" 9 __anonc0f7eb7b010aqspinlock global() argument
|
/linux/kernel/locking/ |
H A D | qspinlock_paravirt.h | 81 static inline bool pv_hybrid_queued_unfair_trylock(struct qspinlock *lock) in pv_hybrid_queued_unfair_trylock() 110 static __always_inline void set_pending(struct qspinlock *lock) in set_pending() 120 static __always_inline bool trylock_clear_pending(struct qspinlock *lock) in trylock_clear_pending() 128 static __always_inline void set_pending(struct qspinlock *lock) in set_pending() 133 static __always_inline bool trylock_clear_pending(struct qspinlock *lock) in trylock_clear_pending() 168 struct qspinlock *lock; 179 * Allocate memory for the PV qspinlock hash buckets 195 pv_lock_hash = alloc_large_system_hash("PV qspinlock", in __pv_init_lock_hash() 208 static struct qspinlock **pv_hash(struct qspinlock *loc [all...] |
H A D | qspinlock.h | 6 * qspinlock slow path implementations. 13 #include <asm-generic/qspinlock.h> 86 static __always_inline void clear_pending(struct qspinlock *lock) in clear_pending() 99 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() 114 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() 132 static __always_inline void clear_pending(struct qspinlock *lock) in clear_pending() 143 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() 158 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() 184 static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock) in queued_fetch_set_pending_acquire() 196 static __always_inline void set_locked(struct qspinlock *loc [all...] |
H A D | qspinlock.c | 24 #include <asm/qspinlock.h> 30 #include "qspinlock.h" 90 static __always_inline void __pv_kick_node(struct qspinlock *lock, in __pv_kick_node() 92 static __always_inline u32 __pv_wait_head_or_lock(struct qspinlock *lock, in __pv_wait_head_or_lock() 130 void __lockfunc queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath() 401 #include "qspinlock.c"
|
H A D | Makefile | 27 obj-$(CONFIG_QUEUED_SPINLOCKS) += qspinlock.o
|
/linux/arch/powerpc/lib/ |
H A D | qspinlock.c | 10 #include <asm/qspinlock.h> 17 struct qspinlock *lock; 122 static __always_inline u32 trylock_clean_tail(struct qspinlock *lock, u32 tail) in trylock_clean_tail() 161 static __always_inline u32 publish_tail_cpu(struct qspinlock *lock, u32 tail) in publish_tail_cpu() 181 static __always_inline u32 set_mustq(struct qspinlock *lock) in set_mustq() 197 static __always_inline u32 clear_mustq(struct qspinlock *lock) in clear_mustq() 213 static __always_inline bool try_set_sleepy(struct qspinlock *lock, u32 old) in try_set_sleepy() 235 static __always_inline void seen_sleepy_owner(struct qspinlock *lock, u32 val) in seen_sleepy_owner() 260 static struct qnode *get_tail_qnode(struct qspinlock *lock, int prev_cpu) in get_tail_qnode() 284 static __always_inline bool __yield_to_locked_owner(struct qspinlock *loc [all...] |
/linux/include/asm-generic/ |
H A D | rqspinlock.h | 16 #include <asm/qspinlock.h> 33 struct qspinlock; 35 typedef struct qspinlock rqspinlock_t;
|
H A D | qspinlock_types.h | 14 typedef struct qspinlock { struct 15 __anon548462c9010aqspinlock global() argument
|
/linux/arch/mips/include/asm/ |
H A D | spinlock.h | 21 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() 28 #include <asm/qspinlock.h>
|
H A D | Kbuild | 14 generic-y += qspinlock.h
|
/linux/arch/loongarch/include/asm/ |
H A D | qspinlock.h | 13 static inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock() 39 #include <asm-generic/qspinlock.h>
|
/linux/kernel/bpf/ |
H A D | rqspinlock.h | 12 #include "../locking/qspinlock.h" 28 static __always_inline bool try_cmpxchg_tail(struct qspinlock *lock, u32 tail, u32 new_tail) in try_cmpxchg_tail()
|
/linux/arch/x86/kernel/ |
H A D | paravirt-spinlocks.c | 12 __visible void __native_queued_spin_unlock(struct qspinlock *lock) in __native_queued_spin_unlock()
|
/linux/arch/xtensa/include/asm/ |
H A D | Kbuild | 8 generic-y += qspinlock.h
|
/linux/arch/riscv/include/asm/ |
H A D | Kbuild | 16 generic-y += qspinlock.h
|