search for: lock_slowpath

Displaying 7 results from an estimated 7 matches for "lock_slowpath".

2020 Jul 21
2
[PATCH v3 0/6] powerpc: queued spinlocks and rwlocks
...de/asm/qspinlock.h b/arch/powerpc/include/asm/qspinlock.h index b752d34517b3..26d8766a1106 100644 --- a/arch/powerpc/include/asm/qspinlock.h +++ b/arch/powerpc/include/asm/qspinlock.h @@ -31,16 +31,57 @@ static inline void queued_spin_unlock(struct qspinlock *lock) #else extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val); +extern void queued_spin_lock_slowpath_queue(struct qspinlock *lock); #endif static __always_inline void queued_spin_lock(struct qspinlock *lock) { - u32 val = 0; - - if (likely(atomic_try_cmpxchg_lock(&lock->val, &val, _Q_LOCKED_VAL))) + atomic_t *...
2020 Jul 21
2
[PATCH v3 0/6] powerpc: queued spinlocks and rwlocks
...de/asm/qspinlock.h b/arch/powerpc/include/asm/qspinlock.h index b752d34517b3..26d8766a1106 100644 --- a/arch/powerpc/include/asm/qspinlock.h +++ b/arch/powerpc/include/asm/qspinlock.h @@ -31,16 +31,57 @@ static inline void queued_spin_unlock(struct qspinlock *lock) #else extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val); +extern void queued_spin_lock_slowpath_queue(struct qspinlock *lock); #endif static __always_inline void queued_spin_lock(struct qspinlock *lock) { - u32 val = 0; - - if (likely(atomic_try_cmpxchg_lock(&lock->val, &val, _Q_LOCKED_VAL))) + atomic_t *...
2020 Jul 21
0
[PATCH v3 0/6] powerpc: queued spinlocks and rwlocks
...ude/asm/qspinlock.h > index b752d34517b3..26d8766a1106 100644 > --- a/arch/powerpc/include/asm/qspinlock.h > +++ b/arch/powerpc/include/asm/qspinlock.h > @@ -31,16 +31,57 @@ static inline void queued_spin_unlock(struct qspinlock *lock) > > #else > extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val); > +extern void queued_spin_lock_slowpath_queue(struct qspinlock *lock); > #endif > > static __always_inline void queued_spin_lock(struct qspinlock *lock) > { > - u32 val = 0; > - > - if (likely(atomic_try_cmpxchg_lock(&lock->...
2020 Jul 23
2
[PATCH v3 0/6] powerpc: queued spinlocks and rwlocks
...index b752d34517b3..26d8766a1106 100644 >> --- a/arch/powerpc/include/asm/qspinlock.h >> +++ b/arch/powerpc/include/asm/qspinlock.h >> @@ -31,16 +31,57 @@ static inline void queued_spin_unlock(struct qspinlock *lock) >> >> #else >> extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val); >> +extern void queued_spin_lock_slowpath_queue(struct qspinlock *lock); >> #endif >> >> static __always_inline void queued_spin_lock(struct qspinlock *lock) >> { >> - u32 val = 0; >> - >> - if (likely(atomic...
2020 Jul 23
2
[PATCH v3 0/6] powerpc: queued spinlocks and rwlocks
...index b752d34517b3..26d8766a1106 100644 >> --- a/arch/powerpc/include/asm/qspinlock.h >> +++ b/arch/powerpc/include/asm/qspinlock.h >> @@ -31,16 +31,57 @@ static inline void queued_spin_unlock(struct qspinlock *lock) >> >> #else >> extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val); >> +extern void queued_spin_lock_slowpath_queue(struct qspinlock *lock); >> #endif >> >> static __always_inline void queued_spin_lock(struct qspinlock *lock) >> { >> - u32 val = 0; >> - >> - if (likely(atomic...
2020 Jul 07
6
[PATCH v3 0/6] powerpc: queued spinlocks and rwlocks
Excerpts from Waiman Long's message of July 7, 2020 4:39 am: > On 7/6/20 12:35 AM, Nicholas Piggin wrote: >> v3 is updated to use __pv_queued_spin_unlock, noticed by Waiman (thank you). >> >> Thanks, >> Nick >> >> Nicholas Piggin (6): >> powerpc/powernv: must include hvcall.h to get PAPR defines >> powerpc/pseries: move some PAPR
2020 Jul 07
6
[PATCH v3 0/6] powerpc: queued spinlocks and rwlocks
Excerpts from Waiman Long's message of July 7, 2020 4:39 am: > On 7/6/20 12:35 AM, Nicholas Piggin wrote: >> v3 is updated to use __pv_queued_spin_unlock, noticed by Waiman (thank you). >> >> Thanks, >> Nick >> >> Nicholas Piggin (6): >> powerpc/powernv: must include hvcall.h to get PAPR defines >> powerpc/pseries: move some PAPR