1*4f1223e8SApple OSS Distributions #ifdef T_NAMESPACE
2*4f1223e8SApple OSS Distributions #undef T_NAMESPACE
3*4f1223e8SApple OSS Distributions #endif
4*4f1223e8SApple OSS Distributions
5*4f1223e8SApple OSS Distributions #include <darwintest.h>
6*4f1223e8SApple OSS Distributions #include <unistd.h>
7*4f1223e8SApple OSS Distributions #include <signal.h>
8*4f1223e8SApple OSS Distributions #include <sys/time.h>
9*4f1223e8SApple OSS Distributions #include <sys/mman.h>
10*4f1223e8SApple OSS Distributions #include <immintrin.h>
11*4f1223e8SApple OSS Distributions #include <mach/mach.h>
12*4f1223e8SApple OSS Distributions #include <stdio.h>
13*4f1223e8SApple OSS Distributions #include <string.h>
14*4f1223e8SApple OSS Distributions #include <err.h>
15*4f1223e8SApple OSS Distributions #include <i386/cpu_capabilities.h>
16*4f1223e8SApple OSS Distributions
17*4f1223e8SApple OSS Distributions T_GLOBAL_META(
18*4f1223e8SApple OSS Distributions T_META_NAMESPACE("xnu.intel"),
19*4f1223e8SApple OSS Distributions T_META_CHECK_LEAKS(false),
20*4f1223e8SApple OSS Distributions T_META_RADAR_COMPONENT_NAME("xnu"),
21*4f1223e8SApple OSS Distributions T_META_RADAR_COMPONENT_VERSION("intel"),
22*4f1223e8SApple OSS Distributions T_META_OWNER("seth_goldberg"),
23*4f1223e8SApple OSS Distributions T_META_RUN_CONCURRENTLY(true)
24*4f1223e8SApple OSS Distributions );
25*4f1223e8SApple OSS Distributions
26*4f1223e8SApple OSS Distributions #define QUICK_RUN_TIME (2)
27*4f1223e8SApple OSS Distributions #define NORMAL_RUN_TIME (10)
28*4f1223e8SApple OSS Distributions #define LONG_RUN_TIME (10*60)
29*4f1223e8SApple OSS Distributions #define TIMEOUT_OVERHEAD (10)
30*4f1223e8SApple OSS Distributions
31*4f1223e8SApple OSS Distributions volatile boolean_t checking = true;
32*4f1223e8SApple OSS Distributions char vec_str_buf[8196];
33*4f1223e8SApple OSS Distributions char karray_str_buf[1024];
34*4f1223e8SApple OSS Distributions
35*4f1223e8SApple OSS Distributions /*
36*4f1223e8SApple OSS Distributions * ymm defines/globals/prototypes
37*4f1223e8SApple OSS Distributions */
38*4f1223e8SApple OSS Distributions #define STOP_COOKIE_256 0x01234567
39*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
40*4f1223e8SApple OSS Distributions #define YMM_MAX 16
41*4f1223e8SApple OSS Distributions #define X86_AVX_STATE_T x86_avx_state64_t
42*4f1223e8SApple OSS Distributions #define X86_AVX_STATE_COUNT x86_AVX_STATE64_COUNT
43*4f1223e8SApple OSS Distributions #define X86_AVX_STATE_FLAVOR x86_AVX_STATE64
44*4f1223e8SApple OSS Distributions #define MCONTEXT_SIZE_256 sizeof(struct __darwin_mcontext_avx64)
45*4f1223e8SApple OSS Distributions #else
46*4f1223e8SApple OSS Distributions #define YMM_MAX 8
47*4f1223e8SApple OSS Distributions #define X86_AVX_STATE_T x86_avx_state32_t
48*4f1223e8SApple OSS Distributions #define X86_AVX_STATE_COUNT x86_AVX_STATE32_COUNT
49*4f1223e8SApple OSS Distributions #define X86_AVX_STATE_FLAVOR x86_AVX_STATE32
50*4f1223e8SApple OSS Distributions #define MCONTEXT_SIZE_256 sizeof(struct __darwin_mcontext_avx32)
51*4f1223e8SApple OSS Distributions #endif
52*4f1223e8SApple OSS Distributions #define VECTOR256 __m256
53*4f1223e8SApple OSS Distributions #define VEC256ALIGN __attribute ((aligned(32)))
54*4f1223e8SApple OSS Distributions static inline void populate_ymm(void);
55*4f1223e8SApple OSS Distributions static inline void check_ymm(void);
56*4f1223e8SApple OSS Distributions VECTOR256 vec256array0[YMM_MAX] VEC256ALIGN;
57*4f1223e8SApple OSS Distributions VECTOR256 vec256array1[YMM_MAX] VEC256ALIGN;
58*4f1223e8SApple OSS Distributions VECTOR256 vec256array2[YMM_MAX] VEC256ALIGN;
59*4f1223e8SApple OSS Distributions VECTOR256 vec256array3[YMM_MAX] VEC256ALIGN;
60*4f1223e8SApple OSS Distributions
61*4f1223e8SApple OSS Distributions /*
62*4f1223e8SApple OSS Distributions * zmm defines/globals/prototypes
63*4f1223e8SApple OSS Distributions */
64*4f1223e8SApple OSS Distributions #define STOP_COOKIE_512 0x0123456789abcdefULL
65*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
66*4f1223e8SApple OSS Distributions #define ZMM_MAX 32
67*4f1223e8SApple OSS Distributions #define X86_AVX512_STATE_T x86_avx512_state64_t
68*4f1223e8SApple OSS Distributions #define X86_AVX512_STATE_COUNT x86_AVX512_STATE64_COUNT
69*4f1223e8SApple OSS Distributions #define X86_AVX512_STATE_FLAVOR x86_AVX512_STATE64
70*4f1223e8SApple OSS Distributions #define MCONTEXT_SIZE_512 sizeof(struct __darwin_mcontext_avx512_64)
71*4f1223e8SApple OSS Distributions #else
72*4f1223e8SApple OSS Distributions #define ZMM_MAX 8
73*4f1223e8SApple OSS Distributions #define X86_AVX512_STATE_T x86_avx512_state32_t
74*4f1223e8SApple OSS Distributions #define X86_AVX512_STATE_COUNT x86_AVX512_STATE32_COUNT
75*4f1223e8SApple OSS Distributions #define X86_AVX512_STATE_FLAVOR x86_AVX512_STATE32
76*4f1223e8SApple OSS Distributions #define MCONTEXT_SIZE_512 sizeof(struct __darwin_mcontext_avx512_32)
77*4f1223e8SApple OSS Distributions #endif
78*4f1223e8SApple OSS Distributions #define VECTOR512 __m512
79*4f1223e8SApple OSS Distributions #define VEC512ALIGN __attribute ((aligned(64)))
80*4f1223e8SApple OSS Distributions #define OPMASK uint64_t
81*4f1223e8SApple OSS Distributions #define KARRAY_MAX 8
82*4f1223e8SApple OSS Distributions static inline void zero_zmm(void);
83*4f1223e8SApple OSS Distributions static inline void zero_opmask(void);
84*4f1223e8SApple OSS Distributions static inline void populate_zmm(void);
85*4f1223e8SApple OSS Distributions static inline void populate_opmask(void);
86*4f1223e8SApple OSS Distributions static inline void check_zmm(boolean_t check_cookie);
87*4f1223e8SApple OSS Distributions VECTOR512 vec512array0[ZMM_MAX] VEC512ALIGN;
88*4f1223e8SApple OSS Distributions VECTOR512 vec512array1[ZMM_MAX] VEC512ALIGN;
89*4f1223e8SApple OSS Distributions VECTOR512 vec512array2[ZMM_MAX] VEC512ALIGN;
90*4f1223e8SApple OSS Distributions VECTOR512 vec512array3[ZMM_MAX] VEC512ALIGN;
91*4f1223e8SApple OSS Distributions OPMASK karray0[8];
92*4f1223e8SApple OSS Distributions OPMASK karray1[8];
93*4f1223e8SApple OSS Distributions OPMASK karray2[8];
94*4f1223e8SApple OSS Distributions OPMASK karray3[8];
95*4f1223e8SApple OSS Distributions
96*4f1223e8SApple OSS Distributions kern_return_t _thread_get_state_avx(thread_t thread, int flavor, thread_state_t state,
97*4f1223e8SApple OSS Distributions mach_msg_type_number_t *state_count);
98*4f1223e8SApple OSS Distributions kern_return_t _thread_get_state_avx512(thread_t thread, int flavor, thread_state_t state,
99*4f1223e8SApple OSS Distributions mach_msg_type_number_t *state_count);
100*4f1223e8SApple OSS Distributions
101*4f1223e8SApple OSS Distributions /*
102*4f1223e8SApple OSS Distributions * Common functions
103*4f1223e8SApple OSS Distributions */
104*4f1223e8SApple OSS Distributions
105*4f1223e8SApple OSS Distributions int
memcmp_unoptimized(const void * s1,const void * s2,size_t n)106*4f1223e8SApple OSS Distributions memcmp_unoptimized(const void *s1, const void *s2, size_t n)
107*4f1223e8SApple OSS Distributions {
108*4f1223e8SApple OSS Distributions if (n != 0) {
109*4f1223e8SApple OSS Distributions const unsigned char *p1 = s1, *p2 = s2;
110*4f1223e8SApple OSS Distributions do {
111*4f1223e8SApple OSS Distributions if (*p1++ != *p2++) {
112*4f1223e8SApple OSS Distributions return *--p1 - *--p2;
113*4f1223e8SApple OSS Distributions }
114*4f1223e8SApple OSS Distributions } while (--n != 0);
115*4f1223e8SApple OSS Distributions }
116*4f1223e8SApple OSS Distributions return 0;
117*4f1223e8SApple OSS Distributions }
118*4f1223e8SApple OSS Distributions
119*4f1223e8SApple OSS Distributions void
start_timer(int seconds,void (* handler)(int,siginfo_t *,void *))120*4f1223e8SApple OSS Distributions start_timer(int seconds, void (*handler)(int, siginfo_t *, void *))
121*4f1223e8SApple OSS Distributions {
122*4f1223e8SApple OSS Distributions struct sigaction sigalrm_action = {
123*4f1223e8SApple OSS Distributions .sa_sigaction = handler,
124*4f1223e8SApple OSS Distributions .sa_flags = SA_RESTART,
125*4f1223e8SApple OSS Distributions .sa_mask = 0
126*4f1223e8SApple OSS Distributions };
127*4f1223e8SApple OSS Distributions struct itimerval timer = {
128*4f1223e8SApple OSS Distributions .it_value.tv_sec = seconds,
129*4f1223e8SApple OSS Distributions .it_value.tv_usec = 0,
130*4f1223e8SApple OSS Distributions .it_interval.tv_sec = 0,
131*4f1223e8SApple OSS Distributions .it_interval.tv_usec = 0
132*4f1223e8SApple OSS Distributions };
133*4f1223e8SApple OSS Distributions T_QUIET; T_WITH_ERRNO;
134*4f1223e8SApple OSS Distributions T_ASSERT_NE(sigaction(SIGALRM, &sigalrm_action, NULL), -1, NULL);
135*4f1223e8SApple OSS Distributions T_QUIET; T_WITH_ERRNO;
136*4f1223e8SApple OSS Distributions T_ASSERT_NE(setitimer(ITIMER_REAL, &timer, NULL), -1, NULL);
137*4f1223e8SApple OSS Distributions }
138*4f1223e8SApple OSS Distributions
139*4f1223e8SApple OSS Distributions void
require_avx(void)140*4f1223e8SApple OSS Distributions require_avx(void)
141*4f1223e8SApple OSS Distributions {
142*4f1223e8SApple OSS Distributions if ((_get_cpu_capabilities() & kHasAVX1_0) != kHasAVX1_0) {
143*4f1223e8SApple OSS Distributions T_SKIP("AVX not supported on this system");
144*4f1223e8SApple OSS Distributions }
145*4f1223e8SApple OSS Distributions }
146*4f1223e8SApple OSS Distributions
147*4f1223e8SApple OSS Distributions void
require_avx512(void)148*4f1223e8SApple OSS Distributions require_avx512(void)
149*4f1223e8SApple OSS Distributions {
150*4f1223e8SApple OSS Distributions if ((_get_cpu_capabilities() & kHasAVX512F) != kHasAVX512F) {
151*4f1223e8SApple OSS Distributions T_SKIP("AVX-512 not supported on this system");
152*4f1223e8SApple OSS Distributions }
153*4f1223e8SApple OSS Distributions }
154*4f1223e8SApple OSS Distributions
155*4f1223e8SApple OSS Distributions /*
156*4f1223e8SApple OSS Distributions * ymm functions
157*4f1223e8SApple OSS Distributions */
158*4f1223e8SApple OSS Distributions
159*4f1223e8SApple OSS Distributions static inline void
store_ymm(VECTOR256 * vec256array)160*4f1223e8SApple OSS Distributions store_ymm(VECTOR256 *vec256array)
161*4f1223e8SApple OSS Distributions {
162*4f1223e8SApple OSS Distributions int i = 0;
163*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %%ymm0, %0" :"=m" (vec256array[i]));
164*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm1, %0" :"=m" (vec256array[i]));
165*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm2, %0" :"=m" (vec256array[i]));
166*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm3, %0" :"=m" (vec256array[i]));
167*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm4, %0" :"=m" (vec256array[i]));
168*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm5, %0" :"=m" (vec256array[i]));
169*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm6, %0" :"=m" (vec256array[i]));
170*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm7, %0" :"=m" (vec256array[i]));
171*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
172*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm8, %0" :"=m" (vec256array[i]));
173*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm9, %0" :"=m" (vec256array[i]));
174*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm10, %0" :"=m" (vec256array[i]));
175*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm11, %0" :"=m" (vec256array[i]));
176*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm12, %0" :"=m" (vec256array[i]));
177*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm13, %0" :"=m" (vec256array[i]));
178*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm14, %0" :"=m" (vec256array[i]));
179*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%ymm15, %0" :"=m" (vec256array[i]));
180*4f1223e8SApple OSS Distributions #endif
181*4f1223e8SApple OSS Distributions }
182*4f1223e8SApple OSS Distributions
183*4f1223e8SApple OSS Distributions static inline void
restore_ymm(VECTOR256 * vec256array)184*4f1223e8SApple OSS Distributions restore_ymm(VECTOR256 *vec256array)
185*4f1223e8SApple OSS Distributions {
186*4f1223e8SApple OSS Distributions VECTOR256 *p = vec256array;
187*4f1223e8SApple OSS Distributions
188*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm0" :: "m" (*(__m256i*)p) : "ymm0"); p++;
189*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm1" :: "m" (*(__m256i*)p) : "ymm1"); p++;
190*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm2" :: "m" (*(__m256i*)p) : "ymm2"); p++;
191*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm3" :: "m" (*(__m256i*)p) : "ymm3"); p++;
192*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm4" :: "m" (*(__m256i*)p) : "ymm4"); p++;
193*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm5" :: "m" (*(__m256i*)p) : "ymm5"); p++;
194*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm6" :: "m" (*(__m256i*)p) : "ymm6"); p++;
195*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm7" :: "m" (*(__m256i*)p) : "ymm7");
196*4f1223e8SApple OSS Distributions
197*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
198*4f1223e8SApple OSS Distributions ++p; __asm__ volatile ("vmovaps %0, %%ymm8" :: "m" (*(__m256i*)p) : "ymm8"); p++;
199*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm9" :: "m" (*(__m256i*)p) : "ymm9"); p++;
200*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm10" :: "m" (*(__m256i*)p) : "ymm10"); p++;
201*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm11" :: "m" (*(__m256i*)p) : "ymm11"); p++;
202*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm12" :: "m" (*(__m256i*)p) : "ymm12"); p++;
203*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm13" :: "m" (*(__m256i*)p) : "ymm13"); p++;
204*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm14" :: "m" (*(__m256i*)p) : "ymm14"); p++;
205*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm15" :: "m" (*(__m256i*)p) : "ymm15");
206*4f1223e8SApple OSS Distributions #endif
207*4f1223e8SApple OSS Distributions }
208*4f1223e8SApple OSS Distributions
209*4f1223e8SApple OSS Distributions static inline void
populate_ymm(void)210*4f1223e8SApple OSS Distributions populate_ymm(void)
211*4f1223e8SApple OSS Distributions {
212*4f1223e8SApple OSS Distributions int j;
213*4f1223e8SApple OSS Distributions uint32_t p[8] VEC256ALIGN;
214*4f1223e8SApple OSS Distributions
215*4f1223e8SApple OSS Distributions for (j = 0; j < (int) (sizeof(p) / sizeof(p[0])); j++) {
216*4f1223e8SApple OSS Distributions p[j] = getpid();
217*4f1223e8SApple OSS Distributions }
218*4f1223e8SApple OSS Distributions
219*4f1223e8SApple OSS Distributions p[0] = 0x22222222;
220*4f1223e8SApple OSS Distributions p[7] = 0x77777777;
221*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm0" :: "m" (*(__m256i*)p) : "ymm0");
222*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm1" :: "m" (*(__m256i*)p) : "ymm1");
223*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm2" :: "m" (*(__m256i*)p) : "ymm2");
224*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm3" :: "m" (*(__m256i*)p) : "ymm3");
225*4f1223e8SApple OSS Distributions
226*4f1223e8SApple OSS Distributions p[0] = 0x44444444;
227*4f1223e8SApple OSS Distributions p[7] = 0xEEEEEEEE;
228*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm4" :: "m" (*(__m256i*)p) : "ymm4");
229*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm5" :: "m" (*(__m256i*)p) : "ymm5");
230*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm6" :: "m" (*(__m256i*)p) : "ymm6");
231*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm7" :: "m" (*(__m256i*)p) : "ymm7");
232*4f1223e8SApple OSS Distributions
233*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
234*4f1223e8SApple OSS Distributions p[0] = 0x88888888;
235*4f1223e8SApple OSS Distributions p[7] = 0xAAAAAAAA;
236*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm8" :: "m" (*(__m256i*)p) : "ymm8");
237*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm9" :: "m" (*(__m256i*)p) : "ymm9");
238*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm10" :: "m" (*(__m256i*)p) : "ymm10");
239*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm11" :: "m" (*(__m256i*)p) : "ymm11");
240*4f1223e8SApple OSS Distributions
241*4f1223e8SApple OSS Distributions p[0] = 0xBBBBBBBB;
242*4f1223e8SApple OSS Distributions p[7] = 0xCCCCCCCC;
243*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm12" :: "m" (*(__m256i*)p) : "ymm12");
244*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm13" :: "m" (*(__m256i*)p) : "ymm13");
245*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm14" :: "m" (*(__m256i*)p) : "ymm14");
246*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%ymm15" :: "m" (*(__m256i*)p) : "ymm15");
247*4f1223e8SApple OSS Distributions #endif
248*4f1223e8SApple OSS Distributions
249*4f1223e8SApple OSS Distributions store_ymm(vec256array0);
250*4f1223e8SApple OSS Distributions }
251*4f1223e8SApple OSS Distributions
252*4f1223e8SApple OSS Distributions void
vec256_to_string(VECTOR256 * vec,char * buf)253*4f1223e8SApple OSS Distributions vec256_to_string(VECTOR256 *vec, char *buf)
254*4f1223e8SApple OSS Distributions {
255*4f1223e8SApple OSS Distributions unsigned int vec_idx = 0;
256*4f1223e8SApple OSS Distributions unsigned int buf_idx = 0;
257*4f1223e8SApple OSS Distributions int ret = 0;
258*4f1223e8SApple OSS Distributions
259*4f1223e8SApple OSS Distributions for (vec_idx = 0; vec_idx < YMM_MAX; vec_idx++) {
260*4f1223e8SApple OSS Distributions uint64_t a[4];
261*4f1223e8SApple OSS Distributions bcopy(&vec[vec_idx], &a[0], sizeof(a));
262*4f1223e8SApple OSS Distributions ret = sprintf(
263*4f1223e8SApple OSS Distributions buf + buf_idx,
264*4f1223e8SApple OSS Distributions "0x%016llx:%016llx:%016llx:%016llx\n",
265*4f1223e8SApple OSS Distributions a[0], a[1], a[2], a[3]
266*4f1223e8SApple OSS Distributions );
267*4f1223e8SApple OSS Distributions T_QUIET; T_ASSERT_POSIX_SUCCESS(ret, "sprintf()");
268*4f1223e8SApple OSS Distributions buf_idx += ret;
269*4f1223e8SApple OSS Distributions }
270*4f1223e8SApple OSS Distributions }
271*4f1223e8SApple OSS Distributions
272*4f1223e8SApple OSS Distributions void
assert_ymm_eq(void * a,void * b,int c)273*4f1223e8SApple OSS Distributions assert_ymm_eq(void *a, void *b, int c)
274*4f1223e8SApple OSS Distributions {
275*4f1223e8SApple OSS Distributions if (memcmp_unoptimized(a, b, c)) {
276*4f1223e8SApple OSS Distributions vec256_to_string(a, vec_str_buf);
277*4f1223e8SApple OSS Distributions T_LOG("Compare failed, vector A:\n%s", vec_str_buf);
278*4f1223e8SApple OSS Distributions vec256_to_string(b, vec_str_buf);
279*4f1223e8SApple OSS Distributions T_LOG("Compare failed, vector B:\n%s", vec_str_buf);
280*4f1223e8SApple OSS Distributions T_ASSERT_FAIL("vectors not equal");
281*4f1223e8SApple OSS Distributions }
282*4f1223e8SApple OSS Distributions }
283*4f1223e8SApple OSS Distributions
284*4f1223e8SApple OSS Distributions void
check_ymm(void)285*4f1223e8SApple OSS Distributions check_ymm(void)
286*4f1223e8SApple OSS Distributions {
287*4f1223e8SApple OSS Distributions uint32_t *p = (uint32_t *) &vec256array1[7];
288*4f1223e8SApple OSS Distributions store_ymm(vec256array1);
289*4f1223e8SApple OSS Distributions if (p[0] == STOP_COOKIE_256) {
290*4f1223e8SApple OSS Distributions return;
291*4f1223e8SApple OSS Distributions }
292*4f1223e8SApple OSS Distributions assert_ymm_eq(vec256array0, vec256array1, sizeof(vec256array0));
293*4f1223e8SApple OSS Distributions }
294*4f1223e8SApple OSS Distributions
295*4f1223e8SApple OSS Distributions static void
copy_ymm_state_to_vector(X86_AVX_STATE_T * sp,VECTOR256 * vp)296*4f1223e8SApple OSS Distributions copy_ymm_state_to_vector(X86_AVX_STATE_T *sp, VECTOR256 *vp)
297*4f1223e8SApple OSS Distributions {
298*4f1223e8SApple OSS Distributions int i;
299*4f1223e8SApple OSS Distributions struct __darwin_xmm_reg *xmm = &sp->__fpu_xmm0;
300*4f1223e8SApple OSS Distributions struct __darwin_xmm_reg *ymmh = &sp->__fpu_ymmh0;
301*4f1223e8SApple OSS Distributions
302*4f1223e8SApple OSS Distributions for (i = 0; i < YMM_MAX; i++) {
303*4f1223e8SApple OSS Distributions bcopy(&xmm[i], &vp[i], sizeof(*xmm));
304*4f1223e8SApple OSS Distributions bcopy(&ymmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*ymmh)), sizeof(*ymmh));
305*4f1223e8SApple OSS Distributions }
306*4f1223e8SApple OSS Distributions }
307*4f1223e8SApple OSS Distributions
308*4f1223e8SApple OSS Distributions static void
ymm_sigalrm_handler(int signum __unused,siginfo_t * info __unused,void * ctx)309*4f1223e8SApple OSS Distributions ymm_sigalrm_handler(int signum __unused, siginfo_t *info __unused, void *ctx)
310*4f1223e8SApple OSS Distributions {
311*4f1223e8SApple OSS Distributions ucontext_t *contextp = (ucontext_t *) ctx;
312*4f1223e8SApple OSS Distributions mcontext_t mcontext = contextp->uc_mcontext;
313*4f1223e8SApple OSS Distributions X86_AVX_STATE_T *avx_state = (X86_AVX_STATE_T *) &mcontext->__fs;
314*4f1223e8SApple OSS Distributions uint32_t *xp = (uint32_t *) &avx_state->__fpu_xmm7;
315*4f1223e8SApple OSS Distributions uint32_t *yp = (uint32_t *) &avx_state->__fpu_ymmh7;
316*4f1223e8SApple OSS Distributions
317*4f1223e8SApple OSS Distributions T_LOG("Got SIGALRM");
318*4f1223e8SApple OSS Distributions
319*4f1223e8SApple OSS Distributions /* Check for AVX state */
320*4f1223e8SApple OSS Distributions T_QUIET;
321*4f1223e8SApple OSS Distributions T_ASSERT_GE(contextp->uc_mcsize, MCONTEXT_SIZE_256, "check context size");
322*4f1223e8SApple OSS Distributions
323*4f1223e8SApple OSS Distributions /* Check that the state in the context is what's set and expected */
324*4f1223e8SApple OSS Distributions copy_ymm_state_to_vector(avx_state, vec256array3);
325*4f1223e8SApple OSS Distributions assert_ymm_eq(vec256array3, vec256array0, sizeof(vec256array1));
326*4f1223e8SApple OSS Distributions
327*4f1223e8SApple OSS Distributions /* Change the context and break the main loop */
328*4f1223e8SApple OSS Distributions xp[0] = STOP_COOKIE_256;
329*4f1223e8SApple OSS Distributions yp[0] = STOP_COOKIE_256;
330*4f1223e8SApple OSS Distributions checking = FALSE;
331*4f1223e8SApple OSS Distributions }
332*4f1223e8SApple OSS Distributions
333*4f1223e8SApple OSS Distributions kern_return_t
_thread_get_state_avx(thread_t thread,int flavor,thread_state_t state,mach_msg_type_number_t * state_count)334*4f1223e8SApple OSS Distributions _thread_get_state_avx(
335*4f1223e8SApple OSS Distributions thread_t thread,
336*4f1223e8SApple OSS Distributions int flavor,
337*4f1223e8SApple OSS Distributions thread_state_t state, /* pointer to OUT array */
338*4f1223e8SApple OSS Distributions mach_msg_type_number_t *state_count) /*IN/OUT*/
339*4f1223e8SApple OSS Distributions {
340*4f1223e8SApple OSS Distributions kern_return_t rv;
341*4f1223e8SApple OSS Distributions VECTOR256 ymms[YMM_MAX];
342*4f1223e8SApple OSS Distributions
343*4f1223e8SApple OSS Distributions /*
344*4f1223e8SApple OSS Distributions * We must save and restore the YMMs across thread_get_state() because
345*4f1223e8SApple OSS Distributions * code in thread_get_state changes at least one xmm register AFTER the
346*4f1223e8SApple OSS Distributions * thread_get_state has saved the state in userspace. While it's still
347*4f1223e8SApple OSS Distributions * possible for something to muck with %xmms BEFORE making the mach
348*4f1223e8SApple OSS Distributions * system call (and rendering this save/restore useless), that does not
349*4f1223e8SApple OSS Distributions * currently occur, and since we depend on the avx state saved in the
350*4f1223e8SApple OSS Distributions * thread_get_state to be the same as that manually copied from YMMs after
351*4f1223e8SApple OSS Distributions * thread_get_state returns, we have to go through these machinations.
352*4f1223e8SApple OSS Distributions */
353*4f1223e8SApple OSS Distributions store_ymm(ymms);
354*4f1223e8SApple OSS Distributions
355*4f1223e8SApple OSS Distributions rv = thread_get_state(thread, flavor, state, state_count);
356*4f1223e8SApple OSS Distributions
357*4f1223e8SApple OSS Distributions restore_ymm(ymms);
358*4f1223e8SApple OSS Distributions
359*4f1223e8SApple OSS Distributions return rv;
360*4f1223e8SApple OSS Distributions }
361*4f1223e8SApple OSS Distributions
362*4f1223e8SApple OSS Distributions void
ymm_integrity(int time)363*4f1223e8SApple OSS Distributions ymm_integrity(int time)
364*4f1223e8SApple OSS Distributions {
365*4f1223e8SApple OSS Distributions mach_msg_type_number_t avx_count = X86_AVX_STATE_COUNT;
366*4f1223e8SApple OSS Distributions kern_return_t kret;
367*4f1223e8SApple OSS Distributions X86_AVX_STATE_T avx_state, avx_state2;
368*4f1223e8SApple OSS Distributions mach_port_t ts = mach_thread_self();
369*4f1223e8SApple OSS Distributions
370*4f1223e8SApple OSS Distributions bzero(&avx_state, sizeof(avx_state));
371*4f1223e8SApple OSS Distributions bzero(&avx_state2, sizeof(avx_state));
372*4f1223e8SApple OSS Distributions
373*4f1223e8SApple OSS Distributions kret = _thread_get_state_avx(
374*4f1223e8SApple OSS Distributions ts, X86_AVX_STATE_FLAVOR, (thread_state_t)&avx_state, &avx_count
375*4f1223e8SApple OSS Distributions );
376*4f1223e8SApple OSS Distributions
377*4f1223e8SApple OSS Distributions store_ymm(vec256array2);
378*4f1223e8SApple OSS Distributions
379*4f1223e8SApple OSS Distributions T_QUIET; T_ASSERT_MACH_SUCCESS(kret, "thread_get_state()");
380*4f1223e8SApple OSS Distributions vec256_to_string(vec256array2, vec_str_buf);
381*4f1223e8SApple OSS Distributions T_LOG("Initial state:\n%s", vec_str_buf);
382*4f1223e8SApple OSS Distributions
383*4f1223e8SApple OSS Distributions copy_ymm_state_to_vector(&avx_state, vec256array1);
384*4f1223e8SApple OSS Distributions assert_ymm_eq(vec256array2, vec256array1, sizeof(vec256array1));
385*4f1223e8SApple OSS Distributions
386*4f1223e8SApple OSS Distributions populate_ymm();
387*4f1223e8SApple OSS Distributions
388*4f1223e8SApple OSS Distributions kret = _thread_get_state_avx(
389*4f1223e8SApple OSS Distributions ts, X86_AVX_STATE_FLAVOR, (thread_state_t)&avx_state2, &avx_count
390*4f1223e8SApple OSS Distributions );
391*4f1223e8SApple OSS Distributions
392*4f1223e8SApple OSS Distributions store_ymm(vec256array2);
393*4f1223e8SApple OSS Distributions
394*4f1223e8SApple OSS Distributions T_QUIET; T_ASSERT_MACH_SUCCESS(kret, "thread_get_state()");
395*4f1223e8SApple OSS Distributions vec256_to_string(vec256array2, vec_str_buf);
396*4f1223e8SApple OSS Distributions T_LOG("Populated state:\n%s", vec_str_buf);
397*4f1223e8SApple OSS Distributions
398*4f1223e8SApple OSS Distributions copy_ymm_state_to_vector(&avx_state2, vec256array1);
399*4f1223e8SApple OSS Distributions assert_ymm_eq(vec256array2, vec256array1, sizeof(vec256array0));
400*4f1223e8SApple OSS Distributions
401*4f1223e8SApple OSS Distributions T_LOG("Running for %ds…", time);
402*4f1223e8SApple OSS Distributions start_timer(time, ymm_sigalrm_handler);
403*4f1223e8SApple OSS Distributions
404*4f1223e8SApple OSS Distributions /* re-populate because printing mucks up XMMs */
405*4f1223e8SApple OSS Distributions populate_ymm();
406*4f1223e8SApple OSS Distributions
407*4f1223e8SApple OSS Distributions /* Check state until timer fires */
408*4f1223e8SApple OSS Distributions while (checking) {
409*4f1223e8SApple OSS Distributions check_ymm();
410*4f1223e8SApple OSS Distributions }
411*4f1223e8SApple OSS Distributions
412*4f1223e8SApple OSS Distributions /* Check that the sig handler changed out AVX state */
413*4f1223e8SApple OSS Distributions store_ymm(vec256array1);
414*4f1223e8SApple OSS Distributions
415*4f1223e8SApple OSS Distributions uint32_t *p = (uint32_t *) &vec256array1[7];
416*4f1223e8SApple OSS Distributions if (p[0] != STOP_COOKIE_256 ||
417*4f1223e8SApple OSS Distributions p[4] != STOP_COOKIE_256) {
418*4f1223e8SApple OSS Distributions vec256_to_string(vec256array1, vec_str_buf);
419*4f1223e8SApple OSS Distributions T_ASSERT_FAIL("sigreturn failed to stick");
420*4f1223e8SApple OSS Distributions T_LOG("State:\n%s", vec_str_buf);
421*4f1223e8SApple OSS Distributions }
422*4f1223e8SApple OSS Distributions
423*4f1223e8SApple OSS Distributions T_LOG("Ran for %ds", time);
424*4f1223e8SApple OSS Distributions T_PASS("No ymm register corruption occurred");
425*4f1223e8SApple OSS Distributions }
426*4f1223e8SApple OSS Distributions
427*4f1223e8SApple OSS Distributions /*
428*4f1223e8SApple OSS Distributions * zmm functions
429*4f1223e8SApple OSS Distributions */
430*4f1223e8SApple OSS Distributions
431*4f1223e8SApple OSS Distributions static inline void
store_opmask(OPMASK k[])432*4f1223e8SApple OSS Distributions store_opmask(OPMASK k[])
433*4f1223e8SApple OSS Distributions {
434*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %%k0, %0" :"=m" (k[0]));
435*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %%k1, %0" :"=m" (k[1]));
436*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %%k2, %0" :"=m" (k[2]));
437*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %%k3, %0" :"=m" (k[3]));
438*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %%k4, %0" :"=m" (k[4]));
439*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %%k5, %0" :"=m" (k[5]));
440*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %%k6, %0" :"=m" (k[6]));
441*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %%k7, %0" :"=m" (k[7]));
442*4f1223e8SApple OSS Distributions }
443*4f1223e8SApple OSS Distributions
444*4f1223e8SApple OSS Distributions static inline void
store_zmm(VECTOR512 * vecarray)445*4f1223e8SApple OSS Distributions store_zmm(VECTOR512 *vecarray)
446*4f1223e8SApple OSS Distributions {
447*4f1223e8SApple OSS Distributions int i = 0;
448*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %%zmm0, %0" :"=m" (vecarray[i]));
449*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm1, %0" :"=m" (vecarray[i]));
450*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm2, %0" :"=m" (vecarray[i]));
451*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm3, %0" :"=m" (vecarray[i]));
452*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm4, %0" :"=m" (vecarray[i]));
453*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm5, %0" :"=m" (vecarray[i]));
454*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm6, %0" :"=m" (vecarray[i]));
455*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm7, %0" :"=m" (vecarray[i]));
456*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
457*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm8, %0" :"=m" (vecarray[i]));
458*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm9, %0" :"=m" (vecarray[i]));
459*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm10, %0" :"=m" (vecarray[i]));
460*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm11, %0" :"=m" (vecarray[i]));
461*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm12, %0" :"=m" (vecarray[i]));
462*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm13, %0" :"=m" (vecarray[i]));
463*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm14, %0" :"=m" (vecarray[i]));
464*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm15, %0" :"=m" (vecarray[i]));
465*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm16, %0" :"=m" (vecarray[i]));
466*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm17, %0" :"=m" (vecarray[i]));
467*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm18, %0" :"=m" (vecarray[i]));
468*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm19, %0" :"=m" (vecarray[i]));
469*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm20, %0" :"=m" (vecarray[i]));
470*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm21, %0" :"=m" (vecarray[i]));
471*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm22, %0" :"=m" (vecarray[i]));
472*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm23, %0" :"=m" (vecarray[i]));
473*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm24, %0" :"=m" (vecarray[i]));
474*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm25, %0" :"=m" (vecarray[i]));
475*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm26, %0" :"=m" (vecarray[i]));
476*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm27, %0" :"=m" (vecarray[i]));
477*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm28, %0" :"=m" (vecarray[i]));
478*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm29, %0" :"=m" (vecarray[i]));
479*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm30, %0" :"=m" (vecarray[i]));
480*4f1223e8SApple OSS Distributions i++; __asm__ volatile ("vmovaps %%zmm31, %0" :"=m" (vecarray[i]));
481*4f1223e8SApple OSS Distributions #endif
482*4f1223e8SApple OSS Distributions }
483*4f1223e8SApple OSS Distributions
484*4f1223e8SApple OSS Distributions static inline void
restore_zmm(VECTOR512 * vecarray)485*4f1223e8SApple OSS Distributions restore_zmm(VECTOR512 *vecarray)
486*4f1223e8SApple OSS Distributions {
487*4f1223e8SApple OSS Distributions VECTOR512 *p = vecarray;
488*4f1223e8SApple OSS Distributions
489*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm0" :: "m" (*(__m512i*)p) : "zmm0"); p++;
490*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm1" :: "m" (*(__m512i*)p) : "zmm1"); p++;
491*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm2" :: "m" (*(__m512i*)p) : "zmm2"); p++;
492*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm3" :: "m" (*(__m512i*)p) : "zmm3"); p++;
493*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm4" :: "m" (*(__m512i*)p) : "zmm4"); p++;
494*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm5" :: "m" (*(__m512i*)p) : "zmm5"); p++;
495*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm6" :: "m" (*(__m512i*)p) : "zmm6"); p++;
496*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm7" :: "m" (*(__m512i*)p) : "zmm7");
497*4f1223e8SApple OSS Distributions
498*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
499*4f1223e8SApple OSS Distributions ++p; __asm__ volatile ("vmovaps %0, %%zmm8" :: "m" (*(__m512i*)p) : "zmm8"); p++;
500*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm9" :: "m" (*(__m512i*)p) : "zmm9"); p++;
501*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm10" :: "m" (*(__m512i*)p) : "zmm10"); p++;
502*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm11" :: "m" (*(__m512i*)p) : "zmm11"); p++;
503*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm12" :: "m" (*(__m512i*)p) : "zmm12"); p++;
504*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm13" :: "m" (*(__m512i*)p) : "zmm13"); p++;
505*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm14" :: "m" (*(__m512i*)p) : "zmm14"); p++;
506*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm15" :: "m" (*(__m512i*)p) : "zmm15"); p++;
507*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm16" :: "m" (*(__m512i*)p) : "zmm16"); p++;
508*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm17" :: "m" (*(__m512i*)p) : "zmm17"); p++;
509*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm18" :: "m" (*(__m512i*)p) : "zmm18"); p++;
510*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm19" :: "m" (*(__m512i*)p) : "zmm19"); p++;
511*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm20" :: "m" (*(__m512i*)p) : "zmm20"); p++;
512*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm21" :: "m" (*(__m512i*)p) : "zmm21"); p++;
513*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm22" :: "m" (*(__m512i*)p) : "zmm22"); p++;
514*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm23" :: "m" (*(__m512i*)p) : "zmm23"); p++;
515*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm24" :: "m" (*(__m512i*)p) : "zmm24"); p++;
516*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm25" :: "m" (*(__m512i*)p) : "zmm25"); p++;
517*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm26" :: "m" (*(__m512i*)p) : "zmm26"); p++;
518*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm27" :: "m" (*(__m512i*)p) : "zmm27"); p++;
519*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm28" :: "m" (*(__m512i*)p) : "zmm28"); p++;
520*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm29" :: "m" (*(__m512i*)p) : "zmm29"); p++;
521*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm30" :: "m" (*(__m512i*)p) : "zmm30"); p++;
522*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm31" :: "m" (*(__m512i*)p) : "zmm31");
523*4f1223e8SApple OSS Distributions #endif
524*4f1223e8SApple OSS Distributions }
525*4f1223e8SApple OSS Distributions
526*4f1223e8SApple OSS Distributions static inline void
zero_opmask(void)527*4f1223e8SApple OSS Distributions zero_opmask(void)
528*4f1223e8SApple OSS Distributions {
529*4f1223e8SApple OSS Distributions uint64_t zero = 0x0000000000000000ULL;
530*4f1223e8SApple OSS Distributions
531*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k0" : :"m" (zero) : "k0");
532*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k1" : :"m" (zero) : "k1");
533*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k2" : :"m" (zero) : "k2");
534*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k3" : :"m" (zero) : "k3");
535*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k4" : :"m" (zero) : "k4");
536*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k5" : :"m" (zero) : "k5");
537*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k6" : :"m" (zero) : "k6");
538*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k7" : :"m" (zero) : "k7");
539*4f1223e8SApple OSS Distributions store_opmask(karray0);
540*4f1223e8SApple OSS Distributions }
541*4f1223e8SApple OSS Distributions
542*4f1223e8SApple OSS Distributions static inline void
populate_opmask(void)543*4f1223e8SApple OSS Distributions populate_opmask(void)
544*4f1223e8SApple OSS Distributions {
545*4f1223e8SApple OSS Distributions uint64_t k[8];
546*4f1223e8SApple OSS Distributions
547*4f1223e8SApple OSS Distributions for (int j = 0; j < 8; j++) {
548*4f1223e8SApple OSS Distributions k[j] = ((uint64_t) getpid() << 32) + (0x11111111 * j);
549*4f1223e8SApple OSS Distributions }
550*4f1223e8SApple OSS Distributions
551*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k0" : :"m" (k[0]) : "k0");
552*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k1" : :"m" (k[1]) : "k1");
553*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k2" : :"m" (k[2]) : "k2");
554*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k3" : :"m" (k[3]) : "k3");
555*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k4" : :"m" (k[4]) : "k4");
556*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k5" : :"m" (k[5]) : "k5");
557*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k6" : :"m" (k[6]) : "k6");
558*4f1223e8SApple OSS Distributions __asm__ volatile ("kmovq %0, %%k7" : :"m" (k[7]) : "k7");
559*4f1223e8SApple OSS Distributions
560*4f1223e8SApple OSS Distributions store_opmask(karray0);
561*4f1223e8SApple OSS Distributions }
562*4f1223e8SApple OSS Distributions
563*4f1223e8SApple OSS Distributions kern_return_t
_thread_get_state_avx512(thread_t thread,int flavor,thread_state_t state,mach_msg_type_number_t * state_count)564*4f1223e8SApple OSS Distributions _thread_get_state_avx512(
565*4f1223e8SApple OSS Distributions thread_t thread,
566*4f1223e8SApple OSS Distributions int flavor,
567*4f1223e8SApple OSS Distributions thread_state_t state, /* pointer to OUT array */
568*4f1223e8SApple OSS Distributions mach_msg_type_number_t *state_count) /*IN/OUT*/
569*4f1223e8SApple OSS Distributions {
570*4f1223e8SApple OSS Distributions kern_return_t rv;
571*4f1223e8SApple OSS Distributions VECTOR512 zmms[ZMM_MAX];
572*4f1223e8SApple OSS Distributions
573*4f1223e8SApple OSS Distributions /*
574*4f1223e8SApple OSS Distributions * We must save and restore the ZMMs across thread_get_state() because
575*4f1223e8SApple OSS Distributions * code in thread_get_state changes at least one xmm register AFTER the
576*4f1223e8SApple OSS Distributions * thread_get_state has saved the state in userspace. While it's still
577*4f1223e8SApple OSS Distributions * possible for something to muck with %XMMs BEFORE making the mach
578*4f1223e8SApple OSS Distributions * system call (and rendering this save/restore useless), that does not
579*4f1223e8SApple OSS Distributions * currently occur, and since we depend on the avx512 state saved in the
580*4f1223e8SApple OSS Distributions * thread_get_state to be the same as that manually copied from ZMMs after
581*4f1223e8SApple OSS Distributions * thread_get_state returns, we have to go through these machinations.
582*4f1223e8SApple OSS Distributions */
583*4f1223e8SApple OSS Distributions store_zmm(zmms);
584*4f1223e8SApple OSS Distributions
585*4f1223e8SApple OSS Distributions rv = thread_get_state(thread, flavor, state, state_count);
586*4f1223e8SApple OSS Distributions
587*4f1223e8SApple OSS Distributions restore_zmm(zmms);
588*4f1223e8SApple OSS Distributions
589*4f1223e8SApple OSS Distributions return rv;
590*4f1223e8SApple OSS Distributions }
591*4f1223e8SApple OSS Distributions
592*4f1223e8SApple OSS Distributions static inline void
zero_zmm(void)593*4f1223e8SApple OSS Distributions zero_zmm(void)
594*4f1223e8SApple OSS Distributions {
595*4f1223e8SApple OSS Distributions uint64_t zero[8] VEC512ALIGN = {0};
596*4f1223e8SApple OSS Distributions
597*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm0" :: "m" (zero) : "zmm0");
598*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm1" :: "m" (zero) : "zmm1");
599*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm2" :: "m" (zero) : "zmm2");
600*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm3" :: "m" (zero) : "zmm3");
601*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm4" :: "m" (zero) : "zmm4");
602*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm5" :: "m" (zero) : "zmm5");
603*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm6" :: "m" (zero) : "zmm6");
604*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm7" :: "m" (zero) : "zmm7");
605*4f1223e8SApple OSS Distributions
606*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
607*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm8" :: "m" (zero) : "zmm8");
608*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm9" :: "m" (zero) : "zmm9");
609*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm10" :: "m" (zero) : "zmm10");
610*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm11" :: "m" (zero) : "zmm11");
611*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm12" :: "m" (zero) : "zmm12");
612*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm13" :: "m" (zero) : "zmm13");
613*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm14" :: "m" (zero) : "zmm14");
614*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm15" :: "m" (zero) : "zmm15");
615*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm16" :: "m" (zero) : "zmm16");
616*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm17" :: "m" (zero) : "zmm17");
617*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm18" :: "m" (zero) : "zmm18");
618*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm19" :: "m" (zero) : "zmm19");
619*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm20" :: "m" (zero) : "zmm20");
620*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm21" :: "m" (zero) : "zmm21");
621*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm22" :: "m" (zero) : "zmm22");
622*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm23" :: "m" (zero) : "zmm23");
623*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm24" :: "m" (zero) : "zmm24");
624*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm25" :: "m" (zero) : "zmm25");
625*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm26" :: "m" (zero) : "zmm26");
626*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm27" :: "m" (zero) : "zmm27");
627*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm28" :: "m" (zero) : "zmm28");
628*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm29" :: "m" (zero) : "zmm29");
629*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm30" :: "m" (zero) : "zmm30");
630*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm31" :: "m" (zero) : "zmm31");
631*4f1223e8SApple OSS Distributions #endif
632*4f1223e8SApple OSS Distributions
633*4f1223e8SApple OSS Distributions store_zmm(vec512array0);
634*4f1223e8SApple OSS Distributions }
635*4f1223e8SApple OSS Distributions
636*4f1223e8SApple OSS Distributions static inline void
populate_zmm(void)637*4f1223e8SApple OSS Distributions populate_zmm(void)
638*4f1223e8SApple OSS Distributions {
639*4f1223e8SApple OSS Distributions int j;
640*4f1223e8SApple OSS Distributions uint64_t p[8] VEC512ALIGN;
641*4f1223e8SApple OSS Distributions
642*4f1223e8SApple OSS Distributions for (j = 0; j < (int) (sizeof(p) / sizeof(p[0])); j++) {
643*4f1223e8SApple OSS Distributions p[j] = ((uint64_t) getpid() << 32) + getpid();
644*4f1223e8SApple OSS Distributions }
645*4f1223e8SApple OSS Distributions
646*4f1223e8SApple OSS Distributions p[0] = 0x0000000000000000ULL;
647*4f1223e8SApple OSS Distributions p[2] = 0x4444444444444444ULL;
648*4f1223e8SApple OSS Distributions p[4] = 0x8888888888888888ULL;
649*4f1223e8SApple OSS Distributions p[7] = 0xCCCCCCCCCCCCCCCCULL;
650*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm0" :: "m" (*(__m512i*)p) : "zmm0");
651*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm1" :: "m" (*(__m512i*)p) : "zmm1");
652*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm2" :: "m" (*(__m512i*)p) : "zmm2");
653*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm3" :: "m" (*(__m512i*)p) : "zmm3");
654*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm4" :: "m" (*(__m512i*)p) : "zmm4");
655*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm5" :: "m" (*(__m512i*)p) : "zmm5");
656*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm6" :: "m" (*(__m512i*)p) : "zmm6");
657*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm7" :: "m" (*(__m512i*)p) : "zmm7");
658*4f1223e8SApple OSS Distributions
659*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
660*4f1223e8SApple OSS Distributions p[0] = 0x1111111111111111ULL;
661*4f1223e8SApple OSS Distributions p[2] = 0x5555555555555555ULL;
662*4f1223e8SApple OSS Distributions p[4] = 0x9999999999999999ULL;
663*4f1223e8SApple OSS Distributions p[7] = 0xDDDDDDDDDDDDDDDDULL;
664*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm8" :: "m" (*(__m512i*)p) : "zmm8");
665*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm9" :: "m" (*(__m512i*)p) : "zmm9");
666*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm10" :: "m" (*(__m512i*)p) : "zmm10");
667*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm11" :: "m" (*(__m512i*)p) : "zmm11");
668*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm12" :: "m" (*(__m512i*)p) : "zmm12");
669*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm13" :: "m" (*(__m512i*)p) : "zmm13");
670*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm14" :: "m" (*(__m512i*)p) : "zmm14");
671*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm15" :: "m" (*(__m512i*)p) : "zmm15");
672*4f1223e8SApple OSS Distributions
673*4f1223e8SApple OSS Distributions p[0] = 0x2222222222222222ULL;
674*4f1223e8SApple OSS Distributions p[2] = 0x6666666666666666ULL;
675*4f1223e8SApple OSS Distributions p[4] = 0xAAAAAAAAAAAAAAAAULL;
676*4f1223e8SApple OSS Distributions p[7] = 0xEEEEEEEEEEEEEEEEULL;
677*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm16" :: "m" (*(__m512i*)p) : "zmm16");
678*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm17" :: "m" (*(__m512i*)p) : "zmm17");
679*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm18" :: "m" (*(__m512i*)p) : "zmm18");
680*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm19" :: "m" (*(__m512i*)p) : "zmm19");
681*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm20" :: "m" (*(__m512i*)p) : "zmm20");
682*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm21" :: "m" (*(__m512i*)p) : "zmm21");
683*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm22" :: "m" (*(__m512i*)p) : "zmm22");
684*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm23" :: "m" (*(__m512i*)p) : "zmm23");
685*4f1223e8SApple OSS Distributions
686*4f1223e8SApple OSS Distributions p[0] = 0x3333333333333333ULL;
687*4f1223e8SApple OSS Distributions p[2] = 0x7777777777777777ULL;
688*4f1223e8SApple OSS Distributions p[4] = 0xBBBBBBBBBBBBBBBBULL;
689*4f1223e8SApple OSS Distributions p[7] = 0xFFFFFFFFFFFFFFFFULL;
690*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm24" :: "m" (*(__m512i*)p) : "zmm24");
691*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm25" :: "m" (*(__m512i*)p) : "zmm25");
692*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm26" :: "m" (*(__m512i*)p) : "zmm26");
693*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm27" :: "m" (*(__m512i*)p) : "zmm27");
694*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm28" :: "m" (*(__m512i*)p) : "zmm28");
695*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm29" :: "m" (*(__m512i*)p) : "zmm29");
696*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm30" :: "m" (*(__m512i*)p) : "zmm30");
697*4f1223e8SApple OSS Distributions __asm__ volatile ("vmovaps %0, %%zmm31" :: "m" (*(__m512i*)p) : "zmm31");
698*4f1223e8SApple OSS Distributions #endif
699*4f1223e8SApple OSS Distributions
700*4f1223e8SApple OSS Distributions store_zmm(vec512array0);
701*4f1223e8SApple OSS Distributions }
702*4f1223e8SApple OSS Distributions
703*4f1223e8SApple OSS Distributions void
vec512_to_string(VECTOR512 * vec,char * buf)704*4f1223e8SApple OSS Distributions vec512_to_string(VECTOR512 *vec, char *buf)
705*4f1223e8SApple OSS Distributions {
706*4f1223e8SApple OSS Distributions unsigned int vec_idx = 0;
707*4f1223e8SApple OSS Distributions unsigned int buf_idx = 0;
708*4f1223e8SApple OSS Distributions int ret = 0;
709*4f1223e8SApple OSS Distributions
710*4f1223e8SApple OSS Distributions for (vec_idx = 0; vec_idx < ZMM_MAX; vec_idx++) {
711*4f1223e8SApple OSS Distributions uint64_t a[8];
712*4f1223e8SApple OSS Distributions bcopy(&vec[vec_idx], &a[0], sizeof(a));
713*4f1223e8SApple OSS Distributions ret = sprintf(
714*4f1223e8SApple OSS Distributions buf + buf_idx,
715*4f1223e8SApple OSS Distributions "0x%016llx:%016llx:%016llx:%016llx:"
716*4f1223e8SApple OSS Distributions "%016llx:%016llx:%016llx:%016llx%s",
717*4f1223e8SApple OSS Distributions a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7],
718*4f1223e8SApple OSS Distributions vec_idx < ZMM_MAX - 1 ? "\n" : ""
719*4f1223e8SApple OSS Distributions );
720*4f1223e8SApple OSS Distributions T_QUIET; T_ASSERT_POSIX_SUCCESS(ret, "sprintf()");
721*4f1223e8SApple OSS Distributions buf_idx += ret;
722*4f1223e8SApple OSS Distributions }
723*4f1223e8SApple OSS Distributions }
724*4f1223e8SApple OSS Distributions
725*4f1223e8SApple OSS Distributions void
opmask_to_string(OPMASK * karray,char * buf)726*4f1223e8SApple OSS Distributions opmask_to_string(OPMASK *karray, char *buf)
727*4f1223e8SApple OSS Distributions {
728*4f1223e8SApple OSS Distributions unsigned int karray_idx = 0;
729*4f1223e8SApple OSS Distributions unsigned int buf_idx = 0;
730*4f1223e8SApple OSS Distributions int ret = 0;
731*4f1223e8SApple OSS Distributions
732*4f1223e8SApple OSS Distributions for (karray_idx = 0; karray_idx < KARRAY_MAX; karray_idx++) {
733*4f1223e8SApple OSS Distributions ret = sprintf(
734*4f1223e8SApple OSS Distributions buf + buf_idx,
735*4f1223e8SApple OSS Distributions "k%d: 0x%016llx%s",
736*4f1223e8SApple OSS Distributions karray_idx, karray[karray_idx],
737*4f1223e8SApple OSS Distributions karray_idx < KARRAY_MAX ? "\n" : ""
738*4f1223e8SApple OSS Distributions );
739*4f1223e8SApple OSS Distributions T_QUIET; T_ASSERT_POSIX_SUCCESS(ret, "sprintf()");
740*4f1223e8SApple OSS Distributions buf_idx += ret;
741*4f1223e8SApple OSS Distributions }
742*4f1223e8SApple OSS Distributions }
743*4f1223e8SApple OSS Distributions
744*4f1223e8SApple OSS Distributions static void
assert_zmm_eq(void * a,void * b,int c)745*4f1223e8SApple OSS Distributions assert_zmm_eq(void *a, void *b, int c)
746*4f1223e8SApple OSS Distributions {
747*4f1223e8SApple OSS Distributions if (memcmp_unoptimized(a, b, c)) {
748*4f1223e8SApple OSS Distributions vec512_to_string(a, vec_str_buf);
749*4f1223e8SApple OSS Distributions T_LOG("Compare failed, vector A:\n%s", vec_str_buf);
750*4f1223e8SApple OSS Distributions vec512_to_string(b, vec_str_buf);
751*4f1223e8SApple OSS Distributions T_LOG("Compare failed, vector B:\n%s", vec_str_buf);
752*4f1223e8SApple OSS Distributions T_ASSERT_FAIL("Vectors not equal");
753*4f1223e8SApple OSS Distributions }
754*4f1223e8SApple OSS Distributions }
755*4f1223e8SApple OSS Distributions
756*4f1223e8SApple OSS Distributions static void
assert_opmask_eq(OPMASK * a,OPMASK * b)757*4f1223e8SApple OSS Distributions assert_opmask_eq(OPMASK *a, OPMASK *b)
758*4f1223e8SApple OSS Distributions {
759*4f1223e8SApple OSS Distributions for (int i = 0; i < KARRAY_MAX; i++) {
760*4f1223e8SApple OSS Distributions if (a[i] != b[i]) {
761*4f1223e8SApple OSS Distributions opmask_to_string(a, karray_str_buf);
762*4f1223e8SApple OSS Distributions T_LOG("Compare failed, opmask A:\n%s", karray_str_buf);
763*4f1223e8SApple OSS Distributions opmask_to_string(b, karray_str_buf);
764*4f1223e8SApple OSS Distributions T_LOG("Compare failed, opmask B:\n%s", karray_str_buf);
765*4f1223e8SApple OSS Distributions T_ASSERT_FAIL("opmasks not equal");
766*4f1223e8SApple OSS Distributions }
767*4f1223e8SApple OSS Distributions }
768*4f1223e8SApple OSS Distributions }
769*4f1223e8SApple OSS Distributions
770*4f1223e8SApple OSS Distributions void
check_zmm(boolean_t check_cookie)771*4f1223e8SApple OSS Distributions check_zmm(boolean_t check_cookie)
772*4f1223e8SApple OSS Distributions {
773*4f1223e8SApple OSS Distributions uint64_t *p = (uint64_t *) &vec512array1[7];
774*4f1223e8SApple OSS Distributions store_opmask(karray1);
775*4f1223e8SApple OSS Distributions store_zmm(vec512array1);
776*4f1223e8SApple OSS Distributions if (check_cookie && p[0] == STOP_COOKIE_512) {
777*4f1223e8SApple OSS Distributions return;
778*4f1223e8SApple OSS Distributions }
779*4f1223e8SApple OSS Distributions
780*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array0, vec512array1, sizeof(vec512array0));
781*4f1223e8SApple OSS Distributions assert_opmask_eq(karray0, karray1);
782*4f1223e8SApple OSS Distributions }
783*4f1223e8SApple OSS Distributions
784*4f1223e8SApple OSS Distributions static void
copy_state_to_opmask(X86_AVX512_STATE_T * sp,OPMASK * op)785*4f1223e8SApple OSS Distributions copy_state_to_opmask(X86_AVX512_STATE_T *sp, OPMASK *op)
786*4f1223e8SApple OSS Distributions {
787*4f1223e8SApple OSS Distributions OPMASK *k = (OPMASK *) &sp->__fpu_k0;
788*4f1223e8SApple OSS Distributions for (int i = 0; i < KARRAY_MAX; i++) {
789*4f1223e8SApple OSS Distributions bcopy(&k[i], &op[i], sizeof(*op));
790*4f1223e8SApple OSS Distributions }
791*4f1223e8SApple OSS Distributions }
792*4f1223e8SApple OSS Distributions
793*4f1223e8SApple OSS Distributions static void
copy_zmm_state_to_vector(X86_AVX512_STATE_T * sp,VECTOR512 * vp)794*4f1223e8SApple OSS Distributions copy_zmm_state_to_vector(X86_AVX512_STATE_T *sp, VECTOR512 *vp)
795*4f1223e8SApple OSS Distributions {
796*4f1223e8SApple OSS Distributions int i;
797*4f1223e8SApple OSS Distributions struct __darwin_xmm_reg *xmm = &sp->__fpu_xmm0;
798*4f1223e8SApple OSS Distributions struct __darwin_xmm_reg *ymmh = &sp->__fpu_ymmh0;
799*4f1223e8SApple OSS Distributions struct __darwin_ymm_reg *zmmh = &sp->__fpu_zmmh0;
800*4f1223e8SApple OSS Distributions #if defined(__x86_64__)
801*4f1223e8SApple OSS Distributions struct __darwin_zmm_reg *zmm = &sp->__fpu_zmm16;
802*4f1223e8SApple OSS Distributions
803*4f1223e8SApple OSS Distributions for (i = 0; i < ZMM_MAX / 2; i++) {
804*4f1223e8SApple OSS Distributions bcopy(&xmm[i], &vp[i], sizeof(*xmm));
805*4f1223e8SApple OSS Distributions bcopy(&ymmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*ymmh)), sizeof(*ymmh));
806*4f1223e8SApple OSS Distributions bcopy(&zmmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*zmmh)), sizeof(*zmmh));
807*4f1223e8SApple OSS Distributions bcopy(&zmm[i], &vp[(ZMM_MAX / 2) + i], sizeof(*zmm));
808*4f1223e8SApple OSS Distributions }
809*4f1223e8SApple OSS Distributions #else
810*4f1223e8SApple OSS Distributions for (i = 0; i < ZMM_MAX; i++) {
811*4f1223e8SApple OSS Distributions bcopy(&xmm[i], &vp[i], sizeof(*xmm));
812*4f1223e8SApple OSS Distributions bcopy(&ymmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*ymmh)), sizeof(*ymmh));
813*4f1223e8SApple OSS Distributions bcopy(&zmmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*zmmh)), sizeof(*zmmh));
814*4f1223e8SApple OSS Distributions }
815*4f1223e8SApple OSS Distributions #endif
816*4f1223e8SApple OSS Distributions }
817*4f1223e8SApple OSS Distributions
818*4f1223e8SApple OSS Distributions static void
zmm_sigalrm_handler(int signum __unused,siginfo_t * info __unused,void * ctx)819*4f1223e8SApple OSS Distributions zmm_sigalrm_handler(int signum __unused, siginfo_t *info __unused, void *ctx)
820*4f1223e8SApple OSS Distributions {
821*4f1223e8SApple OSS Distributions ucontext_t *contextp = (ucontext_t *) ctx;
822*4f1223e8SApple OSS Distributions mcontext_t mcontext = contextp->uc_mcontext;
823*4f1223e8SApple OSS Distributions X86_AVX512_STATE_T *avx_state = (X86_AVX512_STATE_T *) &mcontext->__fs;
824*4f1223e8SApple OSS Distributions uint64_t *xp = (uint64_t *) &avx_state->__fpu_xmm7;
825*4f1223e8SApple OSS Distributions uint64_t *yp = (uint64_t *) &avx_state->__fpu_ymmh7;
826*4f1223e8SApple OSS Distributions uint64_t *zp = (uint64_t *) &avx_state->__fpu_zmmh7;
827*4f1223e8SApple OSS Distributions uint64_t *kp = (uint64_t *) &avx_state->__fpu_k0;
828*4f1223e8SApple OSS Distributions
829*4f1223e8SApple OSS Distributions /* Check for AVX512 state */
830*4f1223e8SApple OSS Distributions T_QUIET;
831*4f1223e8SApple OSS Distributions T_ASSERT_GE(contextp->uc_mcsize, MCONTEXT_SIZE_512, "check context size");
832*4f1223e8SApple OSS Distributions
833*4f1223e8SApple OSS Distributions /* Check that the state in the context is what's set and expected */
834*4f1223e8SApple OSS Distributions copy_zmm_state_to_vector(avx_state, vec512array3);
835*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array3, vec512array0, sizeof(vec512array3));
836*4f1223e8SApple OSS Distributions copy_state_to_opmask(avx_state, karray3);
837*4f1223e8SApple OSS Distributions assert_opmask_eq(karray3, karray0);
838*4f1223e8SApple OSS Distributions
839*4f1223e8SApple OSS Distributions /* Change the context and break the main loop */
840*4f1223e8SApple OSS Distributions xp[0] = STOP_COOKIE_512;
841*4f1223e8SApple OSS Distributions yp[0] = STOP_COOKIE_512;
842*4f1223e8SApple OSS Distributions zp[0] = STOP_COOKIE_512;
843*4f1223e8SApple OSS Distributions kp[7] = STOP_COOKIE_512;
844*4f1223e8SApple OSS Distributions checking = FALSE;
845*4f1223e8SApple OSS Distributions }
846*4f1223e8SApple OSS Distributions
847*4f1223e8SApple OSS Distributions static void
zmm_sigalrm_handler_no_mod(int signum __unused,siginfo_t * info __unused,void * ctx)848*4f1223e8SApple OSS Distributions zmm_sigalrm_handler_no_mod(int signum __unused, siginfo_t *info __unused, void *ctx)
849*4f1223e8SApple OSS Distributions {
850*4f1223e8SApple OSS Distributions ucontext_t *contextp = (ucontext_t *) ctx;
851*4f1223e8SApple OSS Distributions mcontext_t mcontext = contextp->uc_mcontext;
852*4f1223e8SApple OSS Distributions X86_AVX512_STATE_T *avx_state = (X86_AVX512_STATE_T *) &mcontext->__fs;
853*4f1223e8SApple OSS Distributions uint64_t *xp = (uint64_t *) &avx_state->__fpu_xmm7;
854*4f1223e8SApple OSS Distributions uint64_t *yp = (uint64_t *) &avx_state->__fpu_ymmh7;
855*4f1223e8SApple OSS Distributions uint64_t *zp = (uint64_t *) &avx_state->__fpu_zmmh7;
856*4f1223e8SApple OSS Distributions uint64_t *kp = (uint64_t *) &avx_state->__fpu_k0;
857*4f1223e8SApple OSS Distributions
858*4f1223e8SApple OSS Distributions /* Check for AVX512 state */
859*4f1223e8SApple OSS Distributions T_QUIET;
860*4f1223e8SApple OSS Distributions T_ASSERT_GE(contextp->uc_mcsize, MCONTEXT_SIZE_512, "check context size");
861*4f1223e8SApple OSS Distributions
862*4f1223e8SApple OSS Distributions /* Check that the state in the context is what's set and expected */
863*4f1223e8SApple OSS Distributions copy_zmm_state_to_vector(avx_state, vec512array3);
864*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array3, vec512array0, sizeof(vec512array3));
865*4f1223e8SApple OSS Distributions copy_state_to_opmask(avx_state, karray3);
866*4f1223e8SApple OSS Distributions assert_opmask_eq(karray3, karray0);
867*4f1223e8SApple OSS Distributions
868*4f1223e8SApple OSS Distributions /* Change the context and break the main loop */
869*4f1223e8SApple OSS Distributions checking = FALSE;
870*4f1223e8SApple OSS Distributions }
871*4f1223e8SApple OSS Distributions
872*4f1223e8SApple OSS Distributions
873*4f1223e8SApple OSS Distributions void
zmm_integrity(int time)874*4f1223e8SApple OSS Distributions zmm_integrity(int time)
875*4f1223e8SApple OSS Distributions {
876*4f1223e8SApple OSS Distributions mach_msg_type_number_t avx_count = X86_AVX512_STATE_COUNT;
877*4f1223e8SApple OSS Distributions kern_return_t kret;
878*4f1223e8SApple OSS Distributions X86_AVX512_STATE_T avx_state, avx_state2;
879*4f1223e8SApple OSS Distributions mach_port_t ts = mach_thread_self();
880*4f1223e8SApple OSS Distributions
881*4f1223e8SApple OSS Distributions bzero(&avx_state, sizeof(avx_state));
882*4f1223e8SApple OSS Distributions bzero(&avx_state2, sizeof(avx_state));
883*4f1223e8SApple OSS Distributions
884*4f1223e8SApple OSS Distributions store_zmm(vec512array2);
885*4f1223e8SApple OSS Distributions store_opmask(karray2);
886*4f1223e8SApple OSS Distributions
887*4f1223e8SApple OSS Distributions kret = _thread_get_state_avx512(
888*4f1223e8SApple OSS Distributions ts, X86_AVX512_STATE_FLAVOR, (thread_state_t)&avx_state, &avx_count
889*4f1223e8SApple OSS Distributions );
890*4f1223e8SApple OSS Distributions
891*4f1223e8SApple OSS Distributions T_QUIET; T_ASSERT_MACH_SUCCESS(kret, "thread_get_state()");
892*4f1223e8SApple OSS Distributions vec512_to_string(vec512array2, vec_str_buf);
893*4f1223e8SApple OSS Distributions opmask_to_string(karray2, karray_str_buf);
894*4f1223e8SApple OSS Distributions T_LOG("Initial state:\n%s\n%s", vec_str_buf, karray_str_buf);
895*4f1223e8SApple OSS Distributions
896*4f1223e8SApple OSS Distributions copy_zmm_state_to_vector(&avx_state, vec512array1);
897*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array2, vec512array1, sizeof(vec512array1));
898*4f1223e8SApple OSS Distributions copy_state_to_opmask(&avx_state, karray1);
899*4f1223e8SApple OSS Distributions assert_opmask_eq(karray2, karray1);
900*4f1223e8SApple OSS Distributions
901*4f1223e8SApple OSS Distributions populate_zmm();
902*4f1223e8SApple OSS Distributions populate_opmask();
903*4f1223e8SApple OSS Distributions
904*4f1223e8SApple OSS Distributions kret = _thread_get_state_avx512(
905*4f1223e8SApple OSS Distributions ts, X86_AVX512_STATE_FLAVOR, (thread_state_t)&avx_state2, &avx_count
906*4f1223e8SApple OSS Distributions );
907*4f1223e8SApple OSS Distributions
908*4f1223e8SApple OSS Distributions store_zmm(vec512array2);
909*4f1223e8SApple OSS Distributions store_opmask(karray2);
910*4f1223e8SApple OSS Distributions
911*4f1223e8SApple OSS Distributions T_QUIET; T_ASSERT_MACH_SUCCESS(kret, "thread_get_state()");
912*4f1223e8SApple OSS Distributions vec512_to_string(vec512array2, vec_str_buf);
913*4f1223e8SApple OSS Distributions opmask_to_string(karray2, karray_str_buf);
914*4f1223e8SApple OSS Distributions T_LOG("Populated state:\n%s\n%s", vec_str_buf, karray_str_buf);
915*4f1223e8SApple OSS Distributions
916*4f1223e8SApple OSS Distributions copy_zmm_state_to_vector(&avx_state2, vec512array1);
917*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array2, vec512array1, sizeof(vec512array1));
918*4f1223e8SApple OSS Distributions copy_state_to_opmask(&avx_state2, karray1);
919*4f1223e8SApple OSS Distributions assert_opmask_eq(karray2, karray1);
920*4f1223e8SApple OSS Distributions
921*4f1223e8SApple OSS Distributions T_LOG("Running for %ds…", time);
922*4f1223e8SApple OSS Distributions start_timer(time, zmm_sigalrm_handler);
923*4f1223e8SApple OSS Distributions
924*4f1223e8SApple OSS Distributions /* re-populate because printing mucks up XMMs */
925*4f1223e8SApple OSS Distributions populate_zmm();
926*4f1223e8SApple OSS Distributions populate_opmask();
927*4f1223e8SApple OSS Distributions
928*4f1223e8SApple OSS Distributions /* Check state until timer fires */
929*4f1223e8SApple OSS Distributions while (checking) {
930*4f1223e8SApple OSS Distributions check_zmm(TRUE);
931*4f1223e8SApple OSS Distributions }
932*4f1223e8SApple OSS Distributions
933*4f1223e8SApple OSS Distributions /* Check that the sig handler changed our AVX state */
934*4f1223e8SApple OSS Distributions store_zmm(vec512array1);
935*4f1223e8SApple OSS Distributions store_opmask(karray1);
936*4f1223e8SApple OSS Distributions
937*4f1223e8SApple OSS Distributions uint64_t *p = (uint64_t *) &vec512array1[7];
938*4f1223e8SApple OSS Distributions if (p[0] != STOP_COOKIE_512 ||
939*4f1223e8SApple OSS Distributions p[2] != STOP_COOKIE_512 ||
940*4f1223e8SApple OSS Distributions p[4] != STOP_COOKIE_512 ||
941*4f1223e8SApple OSS Distributions karray1[7] != STOP_COOKIE_512) {
942*4f1223e8SApple OSS Distributions vec512_to_string(vec512array1, vec_str_buf);
943*4f1223e8SApple OSS Distributions opmask_to_string(karray1, karray_str_buf);
944*4f1223e8SApple OSS Distributions T_ASSERT_FAIL("sigreturn failed to stick");
945*4f1223e8SApple OSS Distributions T_LOG("State:\n%s\n%s", vec_str_buf, karray_str_buf);
946*4f1223e8SApple OSS Distributions }
947*4f1223e8SApple OSS Distributions
948*4f1223e8SApple OSS Distributions T_LOG("Ran for %ds", time);
949*4f1223e8SApple OSS Distributions T_PASS("No zmm register corruption occurred");
950*4f1223e8SApple OSS Distributions }
951*4f1223e8SApple OSS Distributions
952*4f1223e8SApple OSS Distributions void
zmm_zeroing_optimization_integrity(int time)953*4f1223e8SApple OSS Distributions zmm_zeroing_optimization_integrity(int time)
954*4f1223e8SApple OSS Distributions {
955*4f1223e8SApple OSS Distributions /*
956*4f1223e8SApple OSS Distributions * Check ZMM zero and OpMask zero
957*4f1223e8SApple OSS Distributions */
958*4f1223e8SApple OSS Distributions T_LOG("Checking ZMM zero and OpMask zero");
959*4f1223e8SApple OSS Distributions checking = true;
960*4f1223e8SApple OSS Distributions zero_zmm();
961*4f1223e8SApple OSS Distributions zero_opmask();
962*4f1223e8SApple OSS Distributions
963*4f1223e8SApple OSS Distributions T_LOG("Running for %ds…", time);
964*4f1223e8SApple OSS Distributions start_timer(time, zmm_sigalrm_handler_no_mod);
965*4f1223e8SApple OSS Distributions
966*4f1223e8SApple OSS Distributions /* re-populate because printing mucks up XMMs */
967*4f1223e8SApple OSS Distributions zero_zmm();
968*4f1223e8SApple OSS Distributions zero_opmask();
969*4f1223e8SApple OSS Distributions
970*4f1223e8SApple OSS Distributions /* Check state until timer fires */
971*4f1223e8SApple OSS Distributions while (checking) {
972*4f1223e8SApple OSS Distributions check_zmm(FALSE);
973*4f1223e8SApple OSS Distributions }
974*4f1223e8SApple OSS Distributions
975*4f1223e8SApple OSS Distributions /* Check that sig handler did not changed our AVX state */
976*4f1223e8SApple OSS Distributions store_zmm(vec512array2);
977*4f1223e8SApple OSS Distributions store_opmask(karray2);
978*4f1223e8SApple OSS Distributions
979*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array0, vec512array2, sizeof(vec512array2));
980*4f1223e8SApple OSS Distributions assert_opmask_eq(karray0, karray2);
981*4f1223e8SApple OSS Distributions
982*4f1223e8SApple OSS Distributions T_LOG("Ran for %ds", time);
983*4f1223e8SApple OSS Distributions T_PASS("ZMM zero and OpMask zero");
984*4f1223e8SApple OSS Distributions
985*4f1223e8SApple OSS Distributions
986*4f1223e8SApple OSS Distributions /*
987*4f1223e8SApple OSS Distributions * Check ZMM zero and OpMask non-zero
988*4f1223e8SApple OSS Distributions */
989*4f1223e8SApple OSS Distributions T_LOG("Checking ZMM zero and OpMask non-zero");
990*4f1223e8SApple OSS Distributions checking = true;
991*4f1223e8SApple OSS Distributions zero_zmm();
992*4f1223e8SApple OSS Distributions populate_opmask();
993*4f1223e8SApple OSS Distributions
994*4f1223e8SApple OSS Distributions T_LOG("Running for %ds…", time);
995*4f1223e8SApple OSS Distributions start_timer(time, zmm_sigalrm_handler_no_mod);
996*4f1223e8SApple OSS Distributions
997*4f1223e8SApple OSS Distributions /* re-populate because printing mucks up XMMs */
998*4f1223e8SApple OSS Distributions zero_zmm();
999*4f1223e8SApple OSS Distributions populate_opmask();
1000*4f1223e8SApple OSS Distributions
1001*4f1223e8SApple OSS Distributions /* Check state until timer fires */
1002*4f1223e8SApple OSS Distributions while (checking) {
1003*4f1223e8SApple OSS Distributions check_zmm(FALSE);
1004*4f1223e8SApple OSS Distributions }
1005*4f1223e8SApple OSS Distributions
1006*4f1223e8SApple OSS Distributions /* Check that sig handler did not changed our AVX state */
1007*4f1223e8SApple OSS Distributions store_zmm(vec512array2);
1008*4f1223e8SApple OSS Distributions store_opmask(karray2);
1009*4f1223e8SApple OSS Distributions
1010*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array0, vec512array2, sizeof(vec512array2));
1011*4f1223e8SApple OSS Distributions assert_opmask_eq(karray0, karray2);
1012*4f1223e8SApple OSS Distributions
1013*4f1223e8SApple OSS Distributions T_LOG("Ran for %ds", time);
1014*4f1223e8SApple OSS Distributions T_PASS("ZMM zero and OpMask non-zero");
1015*4f1223e8SApple OSS Distributions
1016*4f1223e8SApple OSS Distributions
1017*4f1223e8SApple OSS Distributions /*
1018*4f1223e8SApple OSS Distributions * Check ZMM non-zero and OpMask zero
1019*4f1223e8SApple OSS Distributions */
1020*4f1223e8SApple OSS Distributions T_LOG("Checking ZMM non-zero and OpMask zero");
1021*4f1223e8SApple OSS Distributions checking = true;
1022*4f1223e8SApple OSS Distributions populate_zmm();
1023*4f1223e8SApple OSS Distributions zero_opmask();
1024*4f1223e8SApple OSS Distributions
1025*4f1223e8SApple OSS Distributions T_LOG("Running for %ds…", time);
1026*4f1223e8SApple OSS Distributions start_timer(time, zmm_sigalrm_handler_no_mod);
1027*4f1223e8SApple OSS Distributions
1028*4f1223e8SApple OSS Distributions /* re-populate because printing mucks up XMMs */
1029*4f1223e8SApple OSS Distributions populate_zmm();
1030*4f1223e8SApple OSS Distributions zero_opmask();
1031*4f1223e8SApple OSS Distributions
1032*4f1223e8SApple OSS Distributions /* Check state until timer fires */
1033*4f1223e8SApple OSS Distributions while (checking) {
1034*4f1223e8SApple OSS Distributions check_zmm(FALSE);
1035*4f1223e8SApple OSS Distributions }
1036*4f1223e8SApple OSS Distributions
1037*4f1223e8SApple OSS Distributions /* Check that sig handler did not changed our AVX state */
1038*4f1223e8SApple OSS Distributions store_zmm(vec512array2);
1039*4f1223e8SApple OSS Distributions store_opmask(karray2);
1040*4f1223e8SApple OSS Distributions
1041*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array0, vec512array2, sizeof(vec512array2));
1042*4f1223e8SApple OSS Distributions assert_opmask_eq(karray0, karray2);
1043*4f1223e8SApple OSS Distributions
1044*4f1223e8SApple OSS Distributions T_LOG("Ran for %ds", time);
1045*4f1223e8SApple OSS Distributions T_PASS("ZMM non-zero and OpMask zero");
1046*4f1223e8SApple OSS Distributions
1047*4f1223e8SApple OSS Distributions
1048*4f1223e8SApple OSS Distributions /*
1049*4f1223e8SApple OSS Distributions * Check ZMM non-zero and OpMask non-zero
1050*4f1223e8SApple OSS Distributions */
1051*4f1223e8SApple OSS Distributions T_LOG("Checking ZMM non-zero and OpMask non-zero");
1052*4f1223e8SApple OSS Distributions checking = true;
1053*4f1223e8SApple OSS Distributions populate_zmm();
1054*4f1223e8SApple OSS Distributions populate_opmask();
1055*4f1223e8SApple OSS Distributions
1056*4f1223e8SApple OSS Distributions T_LOG("Running for %ds…", time);
1057*4f1223e8SApple OSS Distributions start_timer(time, zmm_sigalrm_handler_no_mod);
1058*4f1223e8SApple OSS Distributions
1059*4f1223e8SApple OSS Distributions /* re-populate because printing mucks up XMMs */
1060*4f1223e8SApple OSS Distributions populate_zmm();
1061*4f1223e8SApple OSS Distributions populate_opmask();
1062*4f1223e8SApple OSS Distributions
1063*4f1223e8SApple OSS Distributions /* Check state until timer fires */
1064*4f1223e8SApple OSS Distributions while (checking) {
1065*4f1223e8SApple OSS Distributions check_zmm(FALSE);
1066*4f1223e8SApple OSS Distributions }
1067*4f1223e8SApple OSS Distributions
1068*4f1223e8SApple OSS Distributions /* Check that sig handler did not changed our AVX state */
1069*4f1223e8SApple OSS Distributions store_zmm(vec512array2);
1070*4f1223e8SApple OSS Distributions store_opmask(karray2);
1071*4f1223e8SApple OSS Distributions
1072*4f1223e8SApple OSS Distributions assert_zmm_eq(vec512array0, vec512array2, sizeof(vec512array2));
1073*4f1223e8SApple OSS Distributions assert_opmask_eq(karray0, karray2);
1074*4f1223e8SApple OSS Distributions
1075*4f1223e8SApple OSS Distributions T_LOG("Ran for %ds", time);
1076*4f1223e8SApple OSS Distributions T_PASS("ZMM non-zero and OpMask non-zero");
1077*4f1223e8SApple OSS Distributions }
1078*4f1223e8SApple OSS Distributions
1079*4f1223e8SApple OSS Distributions /*
1080*4f1223e8SApple OSS Distributions * Main test declarations
1081*4f1223e8SApple OSS Distributions */
1082*4f1223e8SApple OSS Distributions T_DECL(ymm_integrity,
1083*4f1223e8SApple OSS Distributions "Quick soak test to verify that AVX "
1084*4f1223e8SApple OSS Distributions "register state is maintained correctly",
1085*4f1223e8SApple OSS Distributions T_META_TIMEOUT(NORMAL_RUN_TIME + TIMEOUT_OVERHEAD)) {
1086*4f1223e8SApple OSS Distributions require_avx();
1087*4f1223e8SApple OSS Distributions ymm_integrity(NORMAL_RUN_TIME);
1088*4f1223e8SApple OSS Distributions }
1089*4f1223e8SApple OSS Distributions
1090*4f1223e8SApple OSS Distributions T_DECL(ymm_integrity_stress,
1091*4f1223e8SApple OSS Distributions "Extended soak test to verify that AVX "
1092*4f1223e8SApple OSS Distributions "register state is maintained correctly",
1093*4f1223e8SApple OSS Distributions T_META_TIMEOUT(LONG_RUN_TIME + TIMEOUT_OVERHEAD),
1094*4f1223e8SApple OSS Distributions T_META_ENABLED(false)) {
1095*4f1223e8SApple OSS Distributions require_avx();
1096*4f1223e8SApple OSS Distributions ymm_integrity(LONG_RUN_TIME);
1097*4f1223e8SApple OSS Distributions }
1098*4f1223e8SApple OSS Distributions
1099*4f1223e8SApple OSS Distributions T_DECL(zmm_integrity,
1100*4f1223e8SApple OSS Distributions "Quick soak test to verify that AVX-512 "
1101*4f1223e8SApple OSS Distributions "register state is maintained correctly",
1102*4f1223e8SApple OSS Distributions T_META_TIMEOUT(NORMAL_RUN_TIME + TIMEOUT_OVERHEAD)) {
1103*4f1223e8SApple OSS Distributions require_avx512();
1104*4f1223e8SApple OSS Distributions zmm_integrity(NORMAL_RUN_TIME);
1105*4f1223e8SApple OSS Distributions }
1106*4f1223e8SApple OSS Distributions
1107*4f1223e8SApple OSS Distributions T_DECL(zmm_integrity_stress,
1108*4f1223e8SApple OSS Distributions "Extended soak test to verify that AVX-512 "
1109*4f1223e8SApple OSS Distributions "register state is maintained correctly",
1110*4f1223e8SApple OSS Distributions T_META_TIMEOUT(LONG_RUN_TIME + TIMEOUT_OVERHEAD),
1111*4f1223e8SApple OSS Distributions T_META_ENABLED(false)) {
1112*4f1223e8SApple OSS Distributions require_avx512();
1113*4f1223e8SApple OSS Distributions zmm_integrity(LONG_RUN_TIME);
1114*4f1223e8SApple OSS Distributions }
1115*4f1223e8SApple OSS Distributions
1116*4f1223e8SApple OSS Distributions T_DECL(zmm_zeroing_optimization_integrity,
1117*4f1223e8SApple OSS Distributions "Quick soak test to verify AVX-512 "
1118*4f1223e8SApple OSS Distributions "register state is maintained with "
1119*4f1223e8SApple OSS Distributions "zeroing optimizations enabled",
1120*4f1223e8SApple OSS Distributions T_META_TIMEOUT(QUICK_RUN_TIME + TIMEOUT_OVERHEAD)) {
1121*4f1223e8SApple OSS Distributions require_avx512();
1122*4f1223e8SApple OSS Distributions zmm_zeroing_optimization_integrity(QUICK_RUN_TIME);
1123*4f1223e8SApple OSS Distributions }
1124