1 #include <assert.h>
2 #include <stdbool.h>
3 #include <stddef.h>
4 #include <stdint.h>
5 #include <stdlib.h>
6 /*
7 * Use of sse registers must be disabled prior to the definition of
8 * always_inline functions that would use them (memcpy, memset, etc),
9 * so do this as early as possible, aiming to be before any always_inline
10 * functions that are used are declared.
11 * Unfortunately, this cannot be done prior to inclusion of <stdlib.h>
12 * due to functions such as 'atof' that have SSE register return declared,
13 * so do so here, immediately after that.
14 */
15 #if __GNUC__ >= 6
16 # pragma GCC target("no-sse")
17 #endif
18 /*
19 * Attempt detection of unwanted prior inclusion of some headers known to use
20 * always_inline with SSE registers in some library / compiler / optimization
21 * combinations.
22 */
23 #ifdef _STRING_H
24 # error "Must not include <string.h> before x86-emulate.h"
25 #endif
26 #include <string.h>
27
28 /* EOF is a standard macro defined in <stdio.h> so use it for detection */
29 #ifdef EOF
30 # error "Must not include <stdio.h> before x86-emulate.h"
31 #endif
32 #ifdef WRAP
33 # include <stdio.h>
34 #endif
35
36 #include <xen/xen.h>
37
38 #include <xen/asm/msr-index.h>
39 #include <xen/asm/x86-defns.h>
40 #include <xen/asm/x86-vendors.h>
41
42 #include <xen-tools/libs.h>
43
44 #define BUG() abort()
45 #define ASSERT assert
46 #define ASSERT_UNREACHABLE() assert(!__LINE__)
47
48 #define MASK_EXTR(v, m) (((v) & (m)) / ((m) & -(m)))
49 #define MASK_INSR(v, m) (((v) * ((m) & -(m))) & (m))
50
51 #define __init
52 #define __maybe_unused __attribute__((__unused__))
53
54 #define likely(x) __builtin_expect(!!(x), true)
55 #define unlikely(x) __builtin_expect(!!(x), false)
56
57 #define container_of(ptr, type, member) ({ \
58 typeof(((type *)0)->member) *mptr__ = (ptr); \
59 (type *)((char *)mptr__ - offsetof(type, member)); \
60 })
61
62 #define AC_(n,t) (n##t)
63 #define _AC(n,t) AC_(n,t)
64
65 #define hweight32 __builtin_popcount
66 #define hweight64 __builtin_popcountll
67
68 #define is_canonical_address(x) (((int64_t)(x) >> 47) == ((int64_t)(x) >> 63))
69
70 extern uint32_t mxcsr_mask;
71 extern struct cpuid_policy cp;
72
73 #define MMAP_SZ 16384
74 bool emul_test_init(void);
75
76 /* Must save and restore FPU state between any call into libc. */
77 void emul_save_fpu_state(void);
78 void emul_restore_fpu_state(void);
79
80 /*
81 * In order to reasonably use the above, wrap library calls we use and which we
82 * think might access any of the FPU state into wrappers saving/restoring state
83 * around the actual function.
84 */
85 #ifndef WRAP
86 # if 0 /* This only works for explicit calls, not for compiler generated ones. */
87 # define WRAP(x) typeof(x) x asm("emul_" #x)
88 # else
89 # define WRAP(x) asm(".equ " #x ", emul_" #x)
90 # endif
91 #endif
92
93 WRAP(fwrite);
94 WRAP(memcmp);
95 WRAP(memcpy);
96 WRAP(memset);
97 WRAP(printf);
98 WRAP(putchar);
99 WRAP(puts);
100
101 #undef WRAP
102
103 #include "x86_emulate/x86_emulate.h"
104
105 void evex_disp8_test(void *instr, struct x86_emulate_ctxt *ctxt,
106 const struct x86_emulate_ops *ops);
107 void predicates_test(void *instr, struct x86_emulate_ctxt *ctxt,
108 int (*fetch)(enum x86_segment seg,
109 unsigned long offset,
110 void *p_data,
111 unsigned int bytes,
112 struct x86_emulate_ctxt *ctxt));
113
xgetbv(uint32_t xcr)114 static inline uint64_t xgetbv(uint32_t xcr)
115 {
116 uint32_t lo, hi;
117
118 asm ( ".byte 0x0f, 0x01, 0xd0" : "=a" (lo), "=d" (hi) : "c" (xcr) );
119
120 return ((uint64_t)hi << 32) | lo;
121 }
122
123 /* Intentionally checking OSXSAVE here. */
124 #define cpu_has_xsave (cp.basic.raw[1].c & (1u << 27))
125
xcr0_mask(uint64_t mask)126 static inline bool xcr0_mask(uint64_t mask)
127 {
128 return cpu_has_xsave && ((xgetbv(0) & mask) == mask);
129 }
130
131 #define cache_line_size() (cp.basic.clflush_size * 8)
132 #define cpu_has_fpu cp.basic.fpu
133 #define cpu_has_mmx cp.basic.mmx
134 #define cpu_has_fxsr cp.basic.fxsr
135 #define cpu_has_sse cp.basic.sse
136 #define cpu_has_sse2 cp.basic.sse2
137 #define cpu_has_sse3 cp.basic.sse3
138 #define cpu_has_pclmulqdq cp.basic.pclmulqdq
139 #define cpu_has_ssse3 cp.basic.ssse3
140 #define cpu_has_fma (cp.basic.fma && xcr0_mask(6))
141 #define cpu_has_sse4_1 cp.basic.sse4_1
142 #define cpu_has_sse4_2 cp.basic.sse4_2
143 #define cpu_has_popcnt cp.basic.popcnt
144 #define cpu_has_aesni cp.basic.aesni
145 #define cpu_has_avx (cp.basic.avx && xcr0_mask(6))
146 #define cpu_has_f16c (cp.basic.f16c && xcr0_mask(6))
147
148 #define cpu_has_avx2 (cp.feat.avx2 && xcr0_mask(6))
149 #define cpu_has_bmi1 cp.feat.bmi1
150 #define cpu_has_bmi2 cp.feat.bmi2
151 #define cpu_has_avx512f (cp.feat.avx512f && xcr0_mask(0xe6))
152 #define cpu_has_avx512dq (cp.feat.avx512dq && xcr0_mask(0xe6))
153 #define cpu_has_avx512_ifma (cp.feat.avx512_ifma && xcr0_mask(0xe6))
154 #define cpu_has_avx512er (cp.feat.avx512er && xcr0_mask(0xe6))
155 #define cpu_has_avx512cd (cp.feat.avx512cd && xcr0_mask(0xe6))
156 #define cpu_has_sha cp.feat.sha
157 #define cpu_has_avx512bw (cp.feat.avx512bw && xcr0_mask(0xe6))
158 #define cpu_has_avx512vl (cp.feat.avx512vl && xcr0_mask(0xe6))
159 #define cpu_has_avx512_vbmi (cp.feat.avx512_vbmi && xcr0_mask(0xe6))
160 #define cpu_has_avx512_vbmi2 (cp.feat.avx512_vbmi2 && xcr0_mask(0xe6))
161 #define cpu_has_gfni cp.feat.gfni
162 #define cpu_has_vaes (cp.feat.vaes && xcr0_mask(6))
163 #define cpu_has_vpclmulqdq (cp.feat.vpclmulqdq && xcr0_mask(6))
164 #define cpu_has_avx512_vnni (cp.feat.avx512_vnni && xcr0_mask(0xe6))
165 #define cpu_has_avx512_bitalg (cp.feat.avx512_bitalg && xcr0_mask(0xe6))
166 #define cpu_has_avx512_vpopcntdq (cp.feat.avx512_vpopcntdq && xcr0_mask(0xe6))
167 #define cpu_has_movdiri cp.feat.movdiri
168 #define cpu_has_movdir64b cp.feat.movdir64b
169 #define cpu_has_avx512_4vnniw (cp.feat.avx512_4vnniw && xcr0_mask(0xe6))
170 #define cpu_has_avx512_4fmaps (cp.feat.avx512_4fmaps && xcr0_mask(0xe6))
171 #define cpu_has_serialize cp.feat.serialize
172 #define cpu_has_avx512_bf16 (cp.feat.avx512_bf16 && xcr0_mask(0xe6))
173
174 #define cpu_has_xgetbv1 (cpu_has_xsave && cp.xstate.xgetbv1)
175
176 #define cpu_has_3dnow_ext cp.extd._3dnowext
177 #define cpu_has_sse4a cp.extd.sse4a
178 #define cpu_has_xop (cp.extd.xop && xcr0_mask(6))
179 #define cpu_has_fma4 (cp.extd.fma4 && xcr0_mask(6))
180 #define cpu_has_tbm cp.extd.tbm
181
182 int emul_test_cpuid(
183 uint32_t leaf,
184 uint32_t subleaf,
185 struct cpuid_leaf *res,
186 struct x86_emulate_ctxt *ctxt);
187
188 int emul_test_read_cr(
189 unsigned int reg,
190 unsigned long *val,
191 struct x86_emulate_ctxt *ctxt);
192
193 int emul_test_read_xcr(
194 unsigned int reg,
195 uint64_t *val,
196 struct x86_emulate_ctxt *ctxt);
197
198 int emul_test_get_fpu(
199 enum x86_emulate_fpu_type type,
200 struct x86_emulate_ctxt *ctxt);
201
202 void emul_test_put_fpu(
203 struct x86_emulate_ctxt *ctxt,
204 enum x86_emulate_fpu_type backout,
205 const struct x86_emul_fpu_aux *aux);
206