1 
2 #ifndef __X86_ASM_DEFNS_H__
3 #define __X86_ASM_DEFNS_H__
4 
5 #ifndef COMPILE_OFFSETS
6 /* NB. Auto-generated from arch/.../asm-offsets.c */
7 #include <asm/asm-offsets.h>
8 #endif
9 #include <asm/bug.h>
10 #include <asm/x86-defns.h>
11 #include <xen/stringify.h>
12 #include <asm/cpufeature.h>
13 #include <asm/alternative.h>
14 
15 #ifdef __ASSEMBLY__
16 #ifndef CONFIG_INDIRECT_THUNK
17 .equ CONFIG_INDIRECT_THUNK, 0
18 #endif
19 #else
20 asm ( "\t.equ CONFIG_INDIRECT_THUNK, "
21       __stringify(IS_ENABLED(CONFIG_INDIRECT_THUNK)) );
22 #endif
23 #include <asm/indirect_thunk_asm.h>
24 
25 #ifndef __ASSEMBLY__
26 void ret_from_intr(void);
27 
28 /*
29  * This output constraint should be used for any inline asm which has a "call"
30  * instruction.  Otherwise the asm may be inserted before the frame pointer
31  * gets set up by the containing function.
32  */
33 #ifdef CONFIG_FRAME_POINTER
34 register unsigned long current_stack_pointer asm("rsp");
35 # define ASM_CALL_CONSTRAINT , "+r" (current_stack_pointer)
36 #else
37 # define ASM_CALL_CONSTRAINT
38 #endif
39 
40 #endif
41 
42 #ifndef NDEBUG
43 #define ASSERT_INTERRUPT_STATUS(x, msg)         \
44         pushf;                                  \
45         testb $X86_EFLAGS_IF>>8,1(%rsp);        \
46         j##x  1f;                               \
47         ASSERT_FAILED(msg);                     \
48 1:      addq  $8,%rsp;
49 #else
50 #define ASSERT_INTERRUPT_STATUS(x, msg)
51 #endif
52 
53 #define ASSERT_INTERRUPTS_ENABLED \
54     ASSERT_INTERRUPT_STATUS(nz, "INTERRUPTS ENABLED")
55 #define ASSERT_INTERRUPTS_DISABLED \
56     ASSERT_INTERRUPT_STATUS(z, "INTERRUPTS DISABLED")
57 
58 #ifdef __ASSEMBLY__
59 # define _ASM_EX(p) p-.
60 #else
61 # define _ASM_EX(p) #p "-."
62 #endif
63 
64 /* Exception table entry */
65 #ifdef __ASSEMBLY__
66 # define _ASM__EXTABLE(sfx, from, to)             \
67     .section .ex_table##sfx, "a" ;                \
68     .balign 4 ;                                   \
69     .long _ASM_EX(from), _ASM_EX(to) ;            \
70     .previous
71 #else
72 # define _ASM__EXTABLE(sfx, from, to)             \
73     " .section .ex_table" #sfx ",\"a\"\n"         \
74     " .balign 4\n"                                \
75     " .long " _ASM_EX(from) ", " _ASM_EX(to) "\n" \
76     " .previous\n"
77 #endif
78 
79 #define _ASM_EXTABLE(from, to)     _ASM__EXTABLE(, from, to)
80 #define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
81 
82 #ifdef __ASSEMBLY__
83 
84 #ifdef HAVE_AS_QUOTED_SYM
85 #define SUBSECTION_LBL(tag)                        \
86         .ifndef .L.tag;                            \
87         .equ .L.tag, 1;                            \
88         .equ __stringify(__OBJECT_LABEL__.tag), .; \
89         .endif
90 #else
91 #define SUBSECTION_LBL(tag)                        \
92         .ifndef __OBJECT_LABEL__.tag;              \
93         __OBJECT_LABEL__.tag:;                     \
94         .endif
95 #endif
96 
97 #define UNLIKELY_START(cond, tag) \
98         .Ldispatch.tag:           \
99         j##cond .Lunlikely.tag;   \
100         .subsection 1;            \
101         SUBSECTION_LBL(unlikely); \
102         .Lunlikely.tag:
103 
104 #define UNLIKELY_DISPATCH_LABEL(tag) \
105         .Ldispatch.tag
106 
107 #define UNLIKELY_DONE(cond, tag)  \
108         j##cond .Llikely.tag
109 
110 #define __UNLIKELY_END(tag)       \
111         .subsection 0;            \
112         .Llikely.tag:
113 
114 #define UNLIKELY_END(tag)         \
115         UNLIKELY_DONE(mp, tag);   \
116         __UNLIKELY_END(tag)
117 
118         .equ .Lrax, 0
119         .equ .Lrcx, 1
120         .equ .Lrdx, 2
121         .equ .Lrbx, 3
122         .equ .Lrsp, 4
123         .equ .Lrbp, 5
124         .equ .Lrsi, 6
125         .equ .Lrdi, 7
126         .equ .Lr8,  8
127         .equ .Lr9,  9
128         .equ .Lr10, 10
129         .equ .Lr11, 11
130         .equ .Lr12, 12
131         .equ .Lr13, 13
132         .equ .Lr14, 14
133         .equ .Lr15, 15
134 
135 #define STACK_CPUINFO_FIELD(field) (1 - CPUINFO_sizeof + CPUINFO_##field)
136 #define GET_STACK_END(reg)                        \
137         .if .Lr##reg >= 8;                        \
138         movq $STACK_SIZE-1, %r##reg;              \
139         .else;                                    \
140         movl $STACK_SIZE-1, %e##reg;              \
141         .endif;                                   \
142         orq  %rsp, %r##reg
143 
144 #define GET_CPUINFO_FIELD(field, reg)             \
145         GET_STACK_END(reg);                       \
146         addq $STACK_CPUINFO_FIELD(field), %r##reg
147 
148 #define GET_CURRENT(reg)                          \
149         GET_STACK_END(reg);                       \
150         movq STACK_CPUINFO_FIELD(current_vcpu)(%r##reg), %r##reg
151 
152 #ifndef NDEBUG
153 #define ASSERT_NOT_IN_ATOMIC                                             \
154     sti; /* sometimes called with interrupts disabled: safe to enable */ \
155     call ASSERT_NOT_IN_ATOMIC
156 #else
157 #define ASSERT_NOT_IN_ATOMIC
158 #endif
159 
160 #define CPUINFO_FEATURE_OFFSET(feature)           \
161     (CPUINFO_features + (cpufeat_word(feature) * 4))
162 
163 #else
164 
165 #ifdef HAVE_AS_QUOTED_SYM
166 #define SUBSECTION_LBL(tag)                                          \
167         ".ifndef .L." #tag "\n\t"                                    \
168         ".equ .L." #tag ", 1\n\t"                                    \
169         ".equ \"" __stringify(__OBJECT_LABEL__) "." #tag "\", .\n\t" \
170         ".endif"
171 #else
172 #define SUBSECTION_LBL(tag)                                          \
173         ".ifndef " __stringify(__OBJECT_LABEL__) "." #tag "\n\t"     \
174         __stringify(__OBJECT_LABEL__) "." #tag ":\n\t"               \
175         ".endif"
176 #endif
177 
178 #ifdef __clang__ /* clang's builtin assember can't do .subsection */
179 
180 #define UNLIKELY_START_SECTION ".pushsection .text.unlikely,\"ax\""
181 #define UNLIKELY_END_SECTION   ".popsection"
182 
183 #else
184 
185 #define UNLIKELY_START_SECTION ".subsection 1"
186 #define UNLIKELY_END_SECTION   ".subsection 0"
187 
188 #endif
189 
190 #define UNLIKELY_START(cond, tag)                   \
191         "j" #cond " .Lunlikely." #tag ".%=;\n\t"   \
192         UNLIKELY_START_SECTION "\n\t"               \
193         SUBSECTION_LBL(unlikely) "\n"               \
194         ".Lunlikely." #tag ".%=:"
195 
196 #define UNLIKELY_END(tag)                  \
197         "jmp .Llikely." #tag ".%=;\n\t"    \
198         UNLIKELY_END_SECTION "\n"          \
199         ".Llikely." #tag ".%=:"
200 
201 #endif
202 
203 /* "Raw" instruction opcodes */
204 #define __ASM_CLAC      ".byte 0x0f,0x01,0xca"
205 #define __ASM_STAC      ".byte 0x0f,0x01,0xcb"
206 
207 #ifdef __ASSEMBLY__
208 .macro ASM_STAC
209     ALTERNATIVE "", __ASM_STAC, X86_FEATURE_XEN_SMAP
210 .endm
211 .macro ASM_CLAC
212     ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_XEN_SMAP
213 .endm
214 #else
215 static always_inline void clac(void)
216 {
217     /* Note: a barrier is implicit in alternative() */
218     alternative("", __ASM_CLAC, X86_FEATURE_XEN_SMAP);
219 }
220 
221 static always_inline void stac(void)
222 {
223     /* Note: a barrier is implicit in alternative() */
224     alternative("", __ASM_STAC, X86_FEATURE_XEN_SMAP);
225 }
226 #endif
227 
228 #undef __ASM_STAC
229 #undef __ASM_CLAC
230 
231 #ifdef __ASSEMBLY__
232 .macro SAVE_ALL op, compat=0
233 .ifeqs "\op", "CLAC"
234         ASM_CLAC
235 .else
236 .ifeqs "\op", "STAC"
237         ASM_STAC
238 .else
239 .ifnb \op
240         .err
241 .endif
242 .endif
243 .endif
244         addq  $-(UREGS_error_code-UREGS_r15), %rsp
245         cld
246         movq  %rdi,UREGS_rdi(%rsp)
247         xor   %edi, %edi
248         movq  %rsi,UREGS_rsi(%rsp)
249         xor   %esi, %esi
250         movq  %rdx,UREGS_rdx(%rsp)
251         xor   %edx, %edx
252         movq  %rcx,UREGS_rcx(%rsp)
253         xor   %ecx, %ecx
254         movq  %rax,UREGS_rax(%rsp)
255         xor   %eax, %eax
256 .if !\compat
257         movq  %r8,UREGS_r8(%rsp)
258         movq  %r9,UREGS_r9(%rsp)
259         movq  %r10,UREGS_r10(%rsp)
260         movq  %r11,UREGS_r11(%rsp)
261 .endif
262         xor   %r8d, %r8d
263         xor   %r9d, %r9d
264         xor   %r10d, %r10d
265         xor   %r11d, %r11d
266         movq  %rbx,UREGS_rbx(%rsp)
267         xor   %ebx, %ebx
268         movq  %rbp,UREGS_rbp(%rsp)
269 #ifdef CONFIG_FRAME_POINTER
270 /* Indicate special exception stack frame by inverting the frame pointer. */
271         leaq  UREGS_rbp(%rsp), %rbp
272         notq  %rbp
273 #else
274         xor   %ebp, %ebp
275 #endif
276 .if !\compat
277         movq  %r12,UREGS_r12(%rsp)
278         movq  %r13,UREGS_r13(%rsp)
279         movq  %r14,UREGS_r14(%rsp)
280         movq  %r15,UREGS_r15(%rsp)
281 .endif
282         xor   %r12d, %r12d
283         xor   %r13d, %r13d
284         xor   %r14d, %r14d
285         xor   %r15d, %r15d
286 .endm
287 
288 #define LOAD_ONE_REG(reg, compat) \
289 .if !(compat); \
290         movq  UREGS_r##reg(%rsp),%r##reg; \
291 .else; \
292         movl  UREGS_r##reg(%rsp),%e##reg; \
293 .endif
294 
295 /*
296  * Restore all previously saved registers.
297  *
298  * @adj: extra stack pointer adjustment to be folded into the adjustment done
299  *       anyway at the end of the macro
300  * @compat: R8-R15 don't need reloading, but they are clobbered for added
301  *          safety against information leaks.
302  */
303 .macro RESTORE_ALL adj=0 compat=0
304 .if !\compat
305         movq  UREGS_r15(%rsp), %r15
306         movq  UREGS_r14(%rsp), %r14
307         movq  UREGS_r13(%rsp), %r13
308         movq  UREGS_r12(%rsp), %r12
309 .else
310         xor %r15d, %r15d
311         xor %r14d, %r14d
312         xor %r13d, %r13d
313         xor %r12d, %r12d
314 .endif
315         LOAD_ONE_REG(bp, \compat)
316         LOAD_ONE_REG(bx, \compat)
317 .if !\compat
318         movq  UREGS_r11(%rsp),%r11
319         movq  UREGS_r10(%rsp),%r10
320         movq  UREGS_r9(%rsp),%r9
321         movq  UREGS_r8(%rsp),%r8
322 .else
323         xor %r11d, %r11d
324         xor %r10d, %r10d
325         xor %r9d, %r9d
326         xor %r8d, %r8d
327 .endif
328         LOAD_ONE_REG(ax, \compat)
329         LOAD_ONE_REG(cx, \compat)
330         LOAD_ONE_REG(dx, \compat)
331         LOAD_ONE_REG(si, \compat)
332         LOAD_ONE_REG(di, \compat)
333         subq  $-(UREGS_error_code-UREGS_r15+\adj), %rsp
334 .endm
335 
336 #ifdef CONFIG_PV
337 #define CR4_PV32_RESTORE                               \
338     ALTERNATIVE_2 "",                                  \
339         "call cr4_pv32_restore", X86_FEATURE_XEN_SMEP, \
340         "call cr4_pv32_restore", X86_FEATURE_XEN_SMAP
341 #else
342 #define CR4_PV32_RESTORE
343 #endif
344 
345 #include <asm/spec_ctrl_asm.h>
346 
347 #endif
348 
349 /* Work around AMD erratum #88 */
350 #define safe_swapgs                             \
351         "mfence; swapgs;"
352 
353 #ifdef __sun__
354 #define REX64_PREFIX "rex64\\"
355 #elif defined(__clang__)
356 #define REX64_PREFIX ".byte 0x48; "
357 #else
358 #define REX64_PREFIX "rex64/"
359 #endif
360 
361 #define ELFNOTE(name, type, desc)           \
362     .pushsection .note.name, "a", @note   ; \
363     .p2align 2                            ; \
364     .long 2f - 1f       /* namesz */      ; \
365     .long 4f - 3f       /* descsz */      ; \
366     .long type          /* type   */      ; \
367 1:  .asciz #name        /* name   */      ; \
368 2:  .p2align 2                            ; \
369 3:  desc                /* desc   */      ; \
370 4:  .p2align 2                            ; \
371     .popsection
372 
373 #define ASM_INT(label, val)                 \
374     .p2align 2;                             \
375 label: .long (val);                         \
376     .size label, . - label;                 \
377     .type label, @object
378 
379 #define ASM_CONSTANT(name, value)                \
380     asm ( ".equ " #name ", %P0; .global " #name  \
381           :: "i" ((value)) );
382 #endif /* __X86_ASM_DEFNS_H__ */
383