Lines Matching refs:tail
117 u32 tail; in encode_tail() local
119 tail = (cpu + 1) << _Q_TAIL_CPU_OFFSET; in encode_tail()
120 tail |= idx << _Q_TAIL_IDX_OFFSET; /* assume < 4 */ in encode_tail()
122 return tail; in encode_tail()
125 static inline __pure struct mcs_spinlock *decode_tail(u32 tail) in decode_tail() argument
127 int cpu = (tail >> _Q_TAIL_CPU_OFFSET) - 1; in decode_tail()
128 int idx = (tail & _Q_TAIL_IDX_MASK) >> _Q_TAIL_IDX_OFFSET; in decode_tail()
176 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() argument
182 return (u32)xchg_relaxed(&lock->tail, in xchg_tail()
183 tail >> _Q_TAIL_OFFSET) << _Q_TAIL_OFFSET; in xchg_tail()
220 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() argument
225 new = (val & _Q_LOCKED_PENDING_MASK) | tail; in xchg_tail()
318 u32 old, tail; in queued_spin_lock_slowpath() local
402 tail = encode_tail(smp_processor_id(), idx); in queued_spin_lock_slowpath()
460 old = xchg_tail(lock, tail); in queued_spin_lock_slowpath()
535 if ((val & _Q_TAIL_MASK) == tail) { in queued_spin_lock_slowpath()