1/* 2 * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <asm_macros.S> 8 9 .globl spin_lock 10 .globl spin_unlock 11 12#if USE_SPINLOCK_CAS 13#if !ARM_ARCH_AT_LEAST(8, 1) 14#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform 15#endif 16 17/* 18 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and 19 * Swap instruction. 20 */ 21 22/* 23 * Acquire lock using Compare and Swap instruction. 24 * 25 * Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use 26 * load exclusive semantics to monitor the address and enter WFE. 27 * 28 * void spin_lock(spinlock_t *lock); 29 */ 30func spin_lock 31 mov w2, #1 321: mov w1, wzr 332: casa w1, w2, [x0] 34 cbz w1, 3f 35 ldxr w1, [x0] 36 cbz w1, 2b 37 wfe 38 b 1b 393: 40 ret 41endfunc spin_lock 42 43#else /* !USE_SPINLOCK_CAS */ 44 45/* 46 * Acquire lock using load-/store-exclusive instruction pair. 47 * 48 * void spin_lock(spinlock_t *lock); 49 */ 50func spin_lock 51 mov w2, #1 52 sevl 53l1: wfe 54l2: ldaxr w1, [x0] 55 cbnz w1, l1 56 stxr w1, w2, [x0] 57 cbnz w1, l2 58 ret 59endfunc spin_lock 60 61#endif /* USE_SPINLOCK_CAS */ 62 63/* 64 * Release lock previously acquired by spin_lock. 65 * 66 * Use store-release to unconditionally clear the spinlock variable. 67 * Store operation generates an event to all cores waiting in WFE 68 * when address is monitored by the global monitor. 69 * 70 * void spin_unlock(spinlock_t *lock); 71 */ 72func spin_unlock 73 stlr wzr, [x0] 74 ret 75endfunc spin_unlock 76