1 /* SPDX-License-Identifier: GPL-2.0
2 *
3 * (C) 2014 Karim Allah Ahmed <karim.allah.ahmed@gmail.com>
4 * (C) 2020, EPAM Systems Inc.
5 */
6 #ifndef _ASM_ARM_XEN_SYSTEM_H
7 #define _ASM_ARM_XEN_SYSTEM_H
8
9 #include <compiler.h>
10 #include <asm/bitops.h>
11
12 /* If *ptr == old, then store new there (and return new).
13 * Otherwise, return the old value.
14 * Atomic.
15 */
16 #define synch_cmpxchg(ptr, old, new) \
17 ({ __typeof__(*ptr) stored = old; \
18 __atomic_compare_exchange_n(ptr, &stored, new, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? new : old; \
19 })
20
21 /* As test_and_clear_bit, but using __ATOMIC_SEQ_CST */
synch_test_and_clear_bit(int nr,volatile void * addr)22 static inline int synch_test_and_clear_bit(int nr, volatile void *addr)
23 {
24 u8 *byte = ((u8 *)addr) + (nr >> 3);
25 u8 bit = 1 << (nr & 7);
26 u8 orig;
27
28 orig = __atomic_fetch_and(byte, ~bit, __ATOMIC_SEQ_CST);
29
30 return (orig & bit) != 0;
31 }
32
33 /* As test_and_set_bit, but using __ATOMIC_SEQ_CST */
synch_test_and_set_bit(int nr,volatile void * base)34 static inline int synch_test_and_set_bit(int nr, volatile void *base)
35 {
36 u8 *byte = ((u8 *)base) + (nr >> 3);
37 u8 bit = 1 << (nr & 7);
38 u8 orig;
39
40 orig = __atomic_fetch_or(byte, bit, __ATOMIC_SEQ_CST);
41
42 return (orig & bit) != 0;
43 }
44
45 /* As set_bit, but using __ATOMIC_SEQ_CST */
synch_set_bit(int nr,volatile void * addr)46 static inline void synch_set_bit(int nr, volatile void *addr)
47 {
48 synch_test_and_set_bit(nr, addr);
49 }
50
51 /* As clear_bit, but using __ATOMIC_SEQ_CST */
synch_clear_bit(int nr,volatile void * addr)52 static inline void synch_clear_bit(int nr, volatile void *addr)
53 {
54 synch_test_and_clear_bit(nr, addr);
55 }
56
57 /* As test_bit, but with a following memory barrier. */
58 //static inline int synch_test_bit(int nr, volatile void *addr)
synch_test_bit(int nr,const void * addr)59 static inline int synch_test_bit(int nr, const void *addr)
60 {
61 int result;
62
63 result = test_bit(nr, addr);
64 barrier();
65 return result;
66 }
67
68 #define xchg(ptr, v) __atomic_exchange_n(ptr, v, __ATOMIC_SEQ_CST)
69 #define xchg(ptr, v) __atomic_exchange_n(ptr, v, __ATOMIC_SEQ_CST)
70
71 #define xen_mb() mb()
72 #define xen_rmb() rmb()
73 #define xen_wmb() wmb()
74
75 #define to_phys(x) ((unsigned long)(x))
76 #define to_virt(x) ((void *)(x))
77
78 #define PFN_UP(x) (unsigned long)(((x) + PAGE_SIZE - 1) >> PAGE_SHIFT)
79 #define PFN_DOWN(x) (unsigned long)((x) >> PAGE_SHIFT)
80 #define PFN_PHYS(x) ((unsigned long)(x) << PAGE_SHIFT)
81 #define PHYS_PFN(x) (unsigned long)((x) >> PAGE_SHIFT)
82
83 #define virt_to_pfn(_virt) (PFN_DOWN(to_phys(_virt)))
84 #define virt_to_mfn(_virt) (PFN_DOWN(to_phys(_virt)))
85 #define mfn_to_virt(_mfn) (to_virt(PFN_PHYS(_mfn)))
86 #define pfn_to_virt(_pfn) (to_virt(PFN_PHYS(_pfn)))
87
88 #endif
89