xref: /xnu-11417.101.15/tests/avx.c (revision e3723e1f17661b24996789d8afc084c0c3303b26)
1*e3723e1fSApple OSS Distributions #ifdef T_NAMESPACE
2*e3723e1fSApple OSS Distributions #undef T_NAMESPACE
3*e3723e1fSApple OSS Distributions #endif
4*e3723e1fSApple OSS Distributions 
5*e3723e1fSApple OSS Distributions #include <darwintest.h>
6*e3723e1fSApple OSS Distributions #include <unistd.h>
7*e3723e1fSApple OSS Distributions #include <signal.h>
8*e3723e1fSApple OSS Distributions #include <sys/time.h>
9*e3723e1fSApple OSS Distributions #include <sys/mman.h>
10*e3723e1fSApple OSS Distributions #include <immintrin.h>
11*e3723e1fSApple OSS Distributions #include <mach/mach.h>
12*e3723e1fSApple OSS Distributions #include <stdio.h>
13*e3723e1fSApple OSS Distributions #include <string.h>
14*e3723e1fSApple OSS Distributions #include <err.h>
15*e3723e1fSApple OSS Distributions #include <i386/cpu_capabilities.h>
16*e3723e1fSApple OSS Distributions 
17*e3723e1fSApple OSS Distributions T_GLOBAL_META(
18*e3723e1fSApple OSS Distributions 	T_META_NAMESPACE("xnu.intel"),
19*e3723e1fSApple OSS Distributions 	T_META_CHECK_LEAKS(false),
20*e3723e1fSApple OSS Distributions 	T_META_RADAR_COMPONENT_NAME("xnu"),
21*e3723e1fSApple OSS Distributions 	T_META_RADAR_COMPONENT_VERSION("intel"),
22*e3723e1fSApple OSS Distributions 	T_META_OWNER("seth_goldberg"),
23*e3723e1fSApple OSS Distributions 	T_META_RUN_CONCURRENTLY(true)
24*e3723e1fSApple OSS Distributions 	);
25*e3723e1fSApple OSS Distributions 
26*e3723e1fSApple OSS Distributions #define QUICK_RUN_TIME   (2)
27*e3723e1fSApple OSS Distributions #define NORMAL_RUN_TIME  (10)
28*e3723e1fSApple OSS Distributions #define LONG_RUN_TIME    (10*60)
29*e3723e1fSApple OSS Distributions #define TIMEOUT_OVERHEAD (10)
30*e3723e1fSApple OSS Distributions 
31*e3723e1fSApple OSS Distributions volatile boolean_t checking = true;
32*e3723e1fSApple OSS Distributions char vec_str_buf[8196];
33*e3723e1fSApple OSS Distributions char karray_str_buf[1024];
34*e3723e1fSApple OSS Distributions 
35*e3723e1fSApple OSS Distributions /*
36*e3723e1fSApple OSS Distributions  * ymm defines/globals/prototypes
37*e3723e1fSApple OSS Distributions  */
38*e3723e1fSApple OSS Distributions #define STOP_COOKIE_256 0x01234567
39*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
40*e3723e1fSApple OSS Distributions #define YMM_MAX                 16
41*e3723e1fSApple OSS Distributions #define X86_AVX_STATE_T         x86_avx_state64_t
42*e3723e1fSApple OSS Distributions #define X86_AVX_STATE_COUNT     x86_AVX_STATE64_COUNT
43*e3723e1fSApple OSS Distributions #define X86_AVX_STATE_FLAVOR    x86_AVX_STATE64
44*e3723e1fSApple OSS Distributions #define MCONTEXT_SIZE_256       sizeof(struct __darwin_mcontext_avx64)
45*e3723e1fSApple OSS Distributions #else
46*e3723e1fSApple OSS Distributions #define YMM_MAX                 8
47*e3723e1fSApple OSS Distributions #define X86_AVX_STATE_T         x86_avx_state32_t
48*e3723e1fSApple OSS Distributions #define X86_AVX_STATE_COUNT     x86_AVX_STATE32_COUNT
49*e3723e1fSApple OSS Distributions #define X86_AVX_STATE_FLAVOR    x86_AVX_STATE32
50*e3723e1fSApple OSS Distributions #define MCONTEXT_SIZE_256       sizeof(struct __darwin_mcontext_avx32)
51*e3723e1fSApple OSS Distributions #endif
52*e3723e1fSApple OSS Distributions #define VECTOR256 __m256
53*e3723e1fSApple OSS Distributions #define VEC256ALIGN __attribute ((aligned(32)))
54*e3723e1fSApple OSS Distributions static inline void populate_ymm(void);
55*e3723e1fSApple OSS Distributions static inline void check_ymm(void);
56*e3723e1fSApple OSS Distributions VECTOR256       vec256array0[YMM_MAX] VEC256ALIGN;
57*e3723e1fSApple OSS Distributions VECTOR256       vec256array1[YMM_MAX] VEC256ALIGN;
58*e3723e1fSApple OSS Distributions VECTOR256       vec256array2[YMM_MAX] VEC256ALIGN;
59*e3723e1fSApple OSS Distributions VECTOR256       vec256array3[YMM_MAX] VEC256ALIGN;
60*e3723e1fSApple OSS Distributions 
61*e3723e1fSApple OSS Distributions /*
62*e3723e1fSApple OSS Distributions  * zmm defines/globals/prototypes
63*e3723e1fSApple OSS Distributions  */
64*e3723e1fSApple OSS Distributions #define STOP_COOKIE_512 0x0123456789abcdefULL
65*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
66*e3723e1fSApple OSS Distributions #define ZMM_MAX                 32
67*e3723e1fSApple OSS Distributions #define X86_AVX512_STATE_T      x86_avx512_state64_t
68*e3723e1fSApple OSS Distributions #define X86_AVX512_STATE_COUNT  x86_AVX512_STATE64_COUNT
69*e3723e1fSApple OSS Distributions #define X86_AVX512_STATE_FLAVOR x86_AVX512_STATE64
70*e3723e1fSApple OSS Distributions #define MCONTEXT_SIZE_512       sizeof(struct __darwin_mcontext_avx512_64)
71*e3723e1fSApple OSS Distributions #else
72*e3723e1fSApple OSS Distributions #define ZMM_MAX                 8
73*e3723e1fSApple OSS Distributions #define X86_AVX512_STATE_T      x86_avx512_state32_t
74*e3723e1fSApple OSS Distributions #define X86_AVX512_STATE_COUNT  x86_AVX512_STATE32_COUNT
75*e3723e1fSApple OSS Distributions #define X86_AVX512_STATE_FLAVOR x86_AVX512_STATE32
76*e3723e1fSApple OSS Distributions #define MCONTEXT_SIZE_512       sizeof(struct __darwin_mcontext_avx512_32)
77*e3723e1fSApple OSS Distributions #endif
78*e3723e1fSApple OSS Distributions #define VECTOR512 __m512
79*e3723e1fSApple OSS Distributions #define VEC512ALIGN __attribute ((aligned(64)))
80*e3723e1fSApple OSS Distributions #define OPMASK uint64_t
81*e3723e1fSApple OSS Distributions #define KARRAY_MAX              8
82*e3723e1fSApple OSS Distributions static inline void zero_zmm(void);
83*e3723e1fSApple OSS Distributions static inline void zero_opmask(void);
84*e3723e1fSApple OSS Distributions static inline void populate_zmm(void);
85*e3723e1fSApple OSS Distributions static inline void populate_opmask(void);
86*e3723e1fSApple OSS Distributions static inline void check_zmm(boolean_t check_cookie);
87*e3723e1fSApple OSS Distributions VECTOR512       vec512array0[ZMM_MAX] VEC512ALIGN;
88*e3723e1fSApple OSS Distributions VECTOR512       vec512array1[ZMM_MAX] VEC512ALIGN;
89*e3723e1fSApple OSS Distributions VECTOR512       vec512array2[ZMM_MAX] VEC512ALIGN;
90*e3723e1fSApple OSS Distributions VECTOR512       vec512array3[ZMM_MAX] VEC512ALIGN;
91*e3723e1fSApple OSS Distributions OPMASK karray0[8];
92*e3723e1fSApple OSS Distributions OPMASK karray1[8];
93*e3723e1fSApple OSS Distributions OPMASK karray2[8];
94*e3723e1fSApple OSS Distributions OPMASK karray3[8];
95*e3723e1fSApple OSS Distributions 
96*e3723e1fSApple OSS Distributions kern_return_t _thread_get_state_avx(thread_t thread, int flavor, thread_state_t state,
97*e3723e1fSApple OSS Distributions     mach_msg_type_number_t *state_count);
98*e3723e1fSApple OSS Distributions kern_return_t _thread_get_state_avx512(thread_t thread, int flavor, thread_state_t state,
99*e3723e1fSApple OSS Distributions     mach_msg_type_number_t *state_count);
100*e3723e1fSApple OSS Distributions 
101*e3723e1fSApple OSS Distributions /*
102*e3723e1fSApple OSS Distributions  * Common functions
103*e3723e1fSApple OSS Distributions  */
104*e3723e1fSApple OSS Distributions 
105*e3723e1fSApple OSS Distributions int
memcmp_unoptimized(const void * s1,const void * s2,size_t n)106*e3723e1fSApple OSS Distributions memcmp_unoptimized(const void *s1, const void *s2, size_t n)
107*e3723e1fSApple OSS Distributions {
108*e3723e1fSApple OSS Distributions 	if (n != 0) {
109*e3723e1fSApple OSS Distributions 		const unsigned char *p1 = s1, *p2 = s2;
110*e3723e1fSApple OSS Distributions 		do {
111*e3723e1fSApple OSS Distributions 			if (*p1++ != *p2++) {
112*e3723e1fSApple OSS Distributions 				return *--p1 - *--p2;
113*e3723e1fSApple OSS Distributions 			}
114*e3723e1fSApple OSS Distributions 		} while (--n != 0);
115*e3723e1fSApple OSS Distributions 	}
116*e3723e1fSApple OSS Distributions 	return 0;
117*e3723e1fSApple OSS Distributions }
118*e3723e1fSApple OSS Distributions 
119*e3723e1fSApple OSS Distributions void
start_timer(int seconds,void (* handler)(int,siginfo_t *,void *))120*e3723e1fSApple OSS Distributions start_timer(int seconds, void (*handler)(int, siginfo_t *, void *))
121*e3723e1fSApple OSS Distributions {
122*e3723e1fSApple OSS Distributions 	struct sigaction sigalrm_action = {
123*e3723e1fSApple OSS Distributions 		.sa_sigaction = handler,
124*e3723e1fSApple OSS Distributions 		.sa_flags = SA_RESTART,
125*e3723e1fSApple OSS Distributions 		.sa_mask = 0
126*e3723e1fSApple OSS Distributions 	};
127*e3723e1fSApple OSS Distributions 	struct itimerval timer = {
128*e3723e1fSApple OSS Distributions 		.it_value.tv_sec = seconds,
129*e3723e1fSApple OSS Distributions 		.it_value.tv_usec = 0,
130*e3723e1fSApple OSS Distributions 		.it_interval.tv_sec = 0,
131*e3723e1fSApple OSS Distributions 		.it_interval.tv_usec = 0
132*e3723e1fSApple OSS Distributions 	};
133*e3723e1fSApple OSS Distributions 	T_QUIET; T_WITH_ERRNO;
134*e3723e1fSApple OSS Distributions 	T_ASSERT_NE(sigaction(SIGALRM, &sigalrm_action, NULL), -1, NULL);
135*e3723e1fSApple OSS Distributions 	T_QUIET; T_WITH_ERRNO;
136*e3723e1fSApple OSS Distributions 	T_ASSERT_NE(setitimer(ITIMER_REAL, &timer, NULL), -1, NULL);
137*e3723e1fSApple OSS Distributions }
138*e3723e1fSApple OSS Distributions 
139*e3723e1fSApple OSS Distributions void
require_avx(void)140*e3723e1fSApple OSS Distributions require_avx(void)
141*e3723e1fSApple OSS Distributions {
142*e3723e1fSApple OSS Distributions 	if ((_get_cpu_capabilities() & kHasAVX1_0) != kHasAVX1_0) {
143*e3723e1fSApple OSS Distributions 		T_SKIP("AVX not supported on this system");
144*e3723e1fSApple OSS Distributions 	}
145*e3723e1fSApple OSS Distributions }
146*e3723e1fSApple OSS Distributions 
147*e3723e1fSApple OSS Distributions void
require_avx512(void)148*e3723e1fSApple OSS Distributions require_avx512(void)
149*e3723e1fSApple OSS Distributions {
150*e3723e1fSApple OSS Distributions 	if ((_get_cpu_capabilities() & kHasAVX512F) != kHasAVX512F) {
151*e3723e1fSApple OSS Distributions 		T_SKIP("AVX-512 not supported on this system");
152*e3723e1fSApple OSS Distributions 	}
153*e3723e1fSApple OSS Distributions }
154*e3723e1fSApple OSS Distributions 
155*e3723e1fSApple OSS Distributions /*
156*e3723e1fSApple OSS Distributions  * ymm functions
157*e3723e1fSApple OSS Distributions  */
158*e3723e1fSApple OSS Distributions 
159*e3723e1fSApple OSS Distributions static inline void
store_ymm(VECTOR256 * vec256array)160*e3723e1fSApple OSS Distributions store_ymm(VECTOR256 *vec256array)
161*e3723e1fSApple OSS Distributions {
162*e3723e1fSApple OSS Distributions 	int i = 0;
163*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %%ymm0, %0" :"=m" (vec256array[i]));
164*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm1, %0" :"=m" (vec256array[i]));
165*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm2, %0" :"=m" (vec256array[i]));
166*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm3, %0" :"=m" (vec256array[i]));
167*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm4, %0" :"=m" (vec256array[i]));
168*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm5, %0" :"=m" (vec256array[i]));
169*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm6, %0" :"=m" (vec256array[i]));
170*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm7, %0" :"=m" (vec256array[i]));
171*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
172*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm8, %0" :"=m" (vec256array[i]));
173*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm9, %0" :"=m" (vec256array[i]));
174*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm10, %0" :"=m" (vec256array[i]));
175*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm11, %0" :"=m" (vec256array[i]));
176*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm12, %0" :"=m" (vec256array[i]));
177*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm13, %0" :"=m" (vec256array[i]));
178*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm14, %0" :"=m" (vec256array[i]));
179*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%ymm15, %0" :"=m" (vec256array[i]));
180*e3723e1fSApple OSS Distributions #endif
181*e3723e1fSApple OSS Distributions }
182*e3723e1fSApple OSS Distributions 
183*e3723e1fSApple OSS Distributions static inline void
restore_ymm(VECTOR256 * vec256array)184*e3723e1fSApple OSS Distributions restore_ymm(VECTOR256 *vec256array)
185*e3723e1fSApple OSS Distributions {
186*e3723e1fSApple OSS Distributions 	VECTOR256 *p = vec256array;
187*e3723e1fSApple OSS Distributions 
188*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm0" :: "m" (*(__m256i*)p) : "ymm0"); p++;
189*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm1" :: "m" (*(__m256i*)p) : "ymm1"); p++;
190*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm2" :: "m" (*(__m256i*)p) : "ymm2"); p++;
191*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm3" :: "m" (*(__m256i*)p) : "ymm3"); p++;
192*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm4" :: "m" (*(__m256i*)p) : "ymm4"); p++;
193*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm5" :: "m" (*(__m256i*)p) : "ymm5"); p++;
194*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm6" :: "m" (*(__m256i*)p) : "ymm6"); p++;
195*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm7" :: "m" (*(__m256i*)p) : "ymm7");
196*e3723e1fSApple OSS Distributions 
197*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
198*e3723e1fSApple OSS Distributions 	++p; __asm__ volatile ("vmovaps  %0, %%ymm8" :: "m" (*(__m256i*)p) : "ymm8"); p++;
199*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm9" :: "m" (*(__m256i*)p) : "ymm9"); p++;
200*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm10" :: "m" (*(__m256i*)p) : "ymm10"); p++;
201*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm11" :: "m" (*(__m256i*)p) : "ymm11"); p++;
202*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm12" :: "m" (*(__m256i*)p) : "ymm12"); p++;
203*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm13" :: "m" (*(__m256i*)p) : "ymm13"); p++;
204*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm14" :: "m" (*(__m256i*)p) : "ymm14"); p++;
205*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm15" :: "m" (*(__m256i*)p) : "ymm15");
206*e3723e1fSApple OSS Distributions #endif
207*e3723e1fSApple OSS Distributions }
208*e3723e1fSApple OSS Distributions 
209*e3723e1fSApple OSS Distributions static inline void
populate_ymm(void)210*e3723e1fSApple OSS Distributions populate_ymm(void)
211*e3723e1fSApple OSS Distributions {
212*e3723e1fSApple OSS Distributions 	int j;
213*e3723e1fSApple OSS Distributions 	uint32_t p[8] VEC256ALIGN;
214*e3723e1fSApple OSS Distributions 
215*e3723e1fSApple OSS Distributions 	for (j = 0; j < (int) (sizeof(p) / sizeof(p[0])); j++) {
216*e3723e1fSApple OSS Distributions 		p[j] = getpid();
217*e3723e1fSApple OSS Distributions 	}
218*e3723e1fSApple OSS Distributions 
219*e3723e1fSApple OSS Distributions 	p[0] = 0x22222222;
220*e3723e1fSApple OSS Distributions 	p[7] = 0x77777777;
221*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm0" :: "m" (*(__m256i*)p) : "ymm0");
222*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm1" :: "m" (*(__m256i*)p) : "ymm1");
223*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm2" :: "m" (*(__m256i*)p) : "ymm2");
224*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm3" :: "m" (*(__m256i*)p) : "ymm3");
225*e3723e1fSApple OSS Distributions 
226*e3723e1fSApple OSS Distributions 	p[0] = 0x44444444;
227*e3723e1fSApple OSS Distributions 	p[7] = 0xEEEEEEEE;
228*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm4" :: "m" (*(__m256i*)p) : "ymm4");
229*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm5" :: "m" (*(__m256i*)p) : "ymm5");
230*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm6" :: "m" (*(__m256i*)p) : "ymm6");
231*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm7" :: "m" (*(__m256i*)p) : "ymm7");
232*e3723e1fSApple OSS Distributions 
233*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
234*e3723e1fSApple OSS Distributions 	p[0] = 0x88888888;
235*e3723e1fSApple OSS Distributions 	p[7] = 0xAAAAAAAA;
236*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm8" :: "m" (*(__m256i*)p) : "ymm8");
237*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm9" :: "m" (*(__m256i*)p) : "ymm9");
238*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm10" :: "m" (*(__m256i*)p) : "ymm10");
239*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm11" :: "m" (*(__m256i*)p) : "ymm11");
240*e3723e1fSApple OSS Distributions 
241*e3723e1fSApple OSS Distributions 	p[0] = 0xBBBBBBBB;
242*e3723e1fSApple OSS Distributions 	p[7] = 0xCCCCCCCC;
243*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm12" :: "m" (*(__m256i*)p) : "ymm12");
244*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm13" :: "m" (*(__m256i*)p) : "ymm13");
245*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm14" :: "m" (*(__m256i*)p) : "ymm14");
246*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%ymm15" :: "m" (*(__m256i*)p) : "ymm15");
247*e3723e1fSApple OSS Distributions #endif
248*e3723e1fSApple OSS Distributions 
249*e3723e1fSApple OSS Distributions 	store_ymm(vec256array0);
250*e3723e1fSApple OSS Distributions }
251*e3723e1fSApple OSS Distributions 
252*e3723e1fSApple OSS Distributions void
vec256_to_string(VECTOR256 * vec,char * buf)253*e3723e1fSApple OSS Distributions vec256_to_string(VECTOR256 *vec, char *buf)
254*e3723e1fSApple OSS Distributions {
255*e3723e1fSApple OSS Distributions 	unsigned int vec_idx = 0;
256*e3723e1fSApple OSS Distributions 	unsigned int buf_idx = 0;
257*e3723e1fSApple OSS Distributions 	int ret = 0;
258*e3723e1fSApple OSS Distributions 
259*e3723e1fSApple OSS Distributions 	for (vec_idx = 0; vec_idx < YMM_MAX; vec_idx++) {
260*e3723e1fSApple OSS Distributions 		uint64_t a[4];
261*e3723e1fSApple OSS Distributions 		bcopy(&vec[vec_idx], &a[0], sizeof(a));
262*e3723e1fSApple OSS Distributions 		ret = sprintf(
263*e3723e1fSApple OSS Distributions 			buf + buf_idx,
264*e3723e1fSApple OSS Distributions 			"0x%016llx:%016llx:%016llx:%016llx\n",
265*e3723e1fSApple OSS Distributions 			a[0], a[1], a[2], a[3]
266*e3723e1fSApple OSS Distributions 			);
267*e3723e1fSApple OSS Distributions 		T_QUIET; T_ASSERT_POSIX_SUCCESS(ret, "sprintf()");
268*e3723e1fSApple OSS Distributions 		buf_idx += ret;
269*e3723e1fSApple OSS Distributions 	}
270*e3723e1fSApple OSS Distributions }
271*e3723e1fSApple OSS Distributions 
272*e3723e1fSApple OSS Distributions void
assert_ymm_eq(void * a,void * b,int c)273*e3723e1fSApple OSS Distributions assert_ymm_eq(void *a, void *b, int c)
274*e3723e1fSApple OSS Distributions {
275*e3723e1fSApple OSS Distributions 	if (memcmp_unoptimized(a, b, c)) {
276*e3723e1fSApple OSS Distributions 		vec256_to_string(a, vec_str_buf);
277*e3723e1fSApple OSS Distributions 		T_LOG("Compare failed, vector A:\n%s", vec_str_buf);
278*e3723e1fSApple OSS Distributions 		vec256_to_string(b, vec_str_buf);
279*e3723e1fSApple OSS Distributions 		T_LOG("Compare failed, vector B:\n%s", vec_str_buf);
280*e3723e1fSApple OSS Distributions 		T_ASSERT_FAIL("vectors not equal");
281*e3723e1fSApple OSS Distributions 	}
282*e3723e1fSApple OSS Distributions }
283*e3723e1fSApple OSS Distributions 
284*e3723e1fSApple OSS Distributions void
check_ymm(void)285*e3723e1fSApple OSS Distributions check_ymm(void)
286*e3723e1fSApple OSS Distributions {
287*e3723e1fSApple OSS Distributions 	uint32_t *p = (uint32_t *) &vec256array1[7];
288*e3723e1fSApple OSS Distributions 	store_ymm(vec256array1);
289*e3723e1fSApple OSS Distributions 	if (p[0] == STOP_COOKIE_256) {
290*e3723e1fSApple OSS Distributions 		return;
291*e3723e1fSApple OSS Distributions 	}
292*e3723e1fSApple OSS Distributions 	assert_ymm_eq(vec256array0, vec256array1, sizeof(vec256array0));
293*e3723e1fSApple OSS Distributions }
294*e3723e1fSApple OSS Distributions 
295*e3723e1fSApple OSS Distributions static void
copy_ymm_state_to_vector(X86_AVX_STATE_T * sp,VECTOR256 * vp)296*e3723e1fSApple OSS Distributions copy_ymm_state_to_vector(X86_AVX_STATE_T *sp, VECTOR256 *vp)
297*e3723e1fSApple OSS Distributions {
298*e3723e1fSApple OSS Distributions 	int     i;
299*e3723e1fSApple OSS Distributions 	struct  __darwin_xmm_reg *xmm  = &sp->__fpu_xmm0;
300*e3723e1fSApple OSS Distributions 	struct  __darwin_xmm_reg *ymmh = &sp->__fpu_ymmh0;
301*e3723e1fSApple OSS Distributions 
302*e3723e1fSApple OSS Distributions 	for (i = 0; i < YMM_MAX; i++) {
303*e3723e1fSApple OSS Distributions 		bcopy(&xmm[i], &vp[i], sizeof(*xmm));
304*e3723e1fSApple OSS Distributions 		bcopy(&ymmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*ymmh)), sizeof(*ymmh));
305*e3723e1fSApple OSS Distributions 	}
306*e3723e1fSApple OSS Distributions }
307*e3723e1fSApple OSS Distributions 
308*e3723e1fSApple OSS Distributions static void
ymm_sigalrm_handler(int signum __unused,siginfo_t * info __unused,void * ctx)309*e3723e1fSApple OSS Distributions ymm_sigalrm_handler(int signum __unused, siginfo_t *info __unused, void *ctx)
310*e3723e1fSApple OSS Distributions {
311*e3723e1fSApple OSS Distributions 	ucontext_t *contextp = (ucontext_t *) ctx;
312*e3723e1fSApple OSS Distributions 	mcontext_t mcontext = contextp->uc_mcontext;
313*e3723e1fSApple OSS Distributions 	X86_AVX_STATE_T *avx_state = (X86_AVX_STATE_T *) &mcontext->__fs;
314*e3723e1fSApple OSS Distributions 	uint32_t *xp = (uint32_t *) &avx_state->__fpu_xmm7;
315*e3723e1fSApple OSS Distributions 	uint32_t *yp = (uint32_t *) &avx_state->__fpu_ymmh7;
316*e3723e1fSApple OSS Distributions 
317*e3723e1fSApple OSS Distributions 	T_LOG("Got SIGALRM");
318*e3723e1fSApple OSS Distributions 
319*e3723e1fSApple OSS Distributions 	/* Check for AVX state */
320*e3723e1fSApple OSS Distributions 	T_QUIET;
321*e3723e1fSApple OSS Distributions 	T_ASSERT_GE(contextp->uc_mcsize, MCONTEXT_SIZE_256, "check context size");
322*e3723e1fSApple OSS Distributions 
323*e3723e1fSApple OSS Distributions 	/* Check that the state in the context is what's set and expected */
324*e3723e1fSApple OSS Distributions 	copy_ymm_state_to_vector(avx_state, vec256array3);
325*e3723e1fSApple OSS Distributions 	assert_ymm_eq(vec256array3, vec256array0, sizeof(vec256array1));
326*e3723e1fSApple OSS Distributions 
327*e3723e1fSApple OSS Distributions 	/* Change the context and break the main loop */
328*e3723e1fSApple OSS Distributions 	xp[0] = STOP_COOKIE_256;
329*e3723e1fSApple OSS Distributions 	yp[0] = STOP_COOKIE_256;
330*e3723e1fSApple OSS Distributions 	checking = FALSE;
331*e3723e1fSApple OSS Distributions }
332*e3723e1fSApple OSS Distributions 
333*e3723e1fSApple OSS Distributions kern_return_t
_thread_get_state_avx(thread_t thread,int flavor,thread_state_t state,mach_msg_type_number_t * state_count)334*e3723e1fSApple OSS Distributions _thread_get_state_avx(
335*e3723e1fSApple OSS Distributions 	thread_t                thread,
336*e3723e1fSApple OSS Distributions 	int                     flavor,
337*e3723e1fSApple OSS Distributions 	thread_state_t          state,          /* pointer to OUT array */
338*e3723e1fSApple OSS Distributions 	mach_msg_type_number_t  *state_count)   /*IN/OUT*/
339*e3723e1fSApple OSS Distributions {
340*e3723e1fSApple OSS Distributions 	kern_return_t rv;
341*e3723e1fSApple OSS Distributions 	VECTOR256 ymms[YMM_MAX];
342*e3723e1fSApple OSS Distributions 
343*e3723e1fSApple OSS Distributions 	/*
344*e3723e1fSApple OSS Distributions 	 * We must save and restore the YMMs across thread_get_state() because
345*e3723e1fSApple OSS Distributions 	 * code in thread_get_state changes at least one xmm register AFTER the
346*e3723e1fSApple OSS Distributions 	 * thread_get_state has saved the state in userspace.  While it's still
347*e3723e1fSApple OSS Distributions 	 * possible for something to muck with %xmms BEFORE making the mach
348*e3723e1fSApple OSS Distributions 	 * system call (and rendering this save/restore useless), that does not
349*e3723e1fSApple OSS Distributions 	 * currently occur, and since we depend on the avx state saved in the
350*e3723e1fSApple OSS Distributions 	 * thread_get_state to be the same as that manually copied from YMMs after
351*e3723e1fSApple OSS Distributions 	 * thread_get_state returns, we have to go through these machinations.
352*e3723e1fSApple OSS Distributions 	 */
353*e3723e1fSApple OSS Distributions 	store_ymm(ymms);
354*e3723e1fSApple OSS Distributions 
355*e3723e1fSApple OSS Distributions 	rv = thread_get_state(thread, flavor, state, state_count);
356*e3723e1fSApple OSS Distributions 
357*e3723e1fSApple OSS Distributions 	restore_ymm(ymms);
358*e3723e1fSApple OSS Distributions 
359*e3723e1fSApple OSS Distributions 	return rv;
360*e3723e1fSApple OSS Distributions }
361*e3723e1fSApple OSS Distributions 
362*e3723e1fSApple OSS Distributions void
ymm_integrity(int time)363*e3723e1fSApple OSS Distributions ymm_integrity(int time)
364*e3723e1fSApple OSS Distributions {
365*e3723e1fSApple OSS Distributions 	mach_msg_type_number_t avx_count = X86_AVX_STATE_COUNT;
366*e3723e1fSApple OSS Distributions 	kern_return_t kret;
367*e3723e1fSApple OSS Distributions 	X86_AVX_STATE_T avx_state, avx_state2;
368*e3723e1fSApple OSS Distributions 	mach_port_t ts = mach_thread_self();
369*e3723e1fSApple OSS Distributions 
370*e3723e1fSApple OSS Distributions 	bzero(&avx_state, sizeof(avx_state));
371*e3723e1fSApple OSS Distributions 	bzero(&avx_state2, sizeof(avx_state));
372*e3723e1fSApple OSS Distributions 
373*e3723e1fSApple OSS Distributions 	kret = _thread_get_state_avx(
374*e3723e1fSApple OSS Distributions 		ts, X86_AVX_STATE_FLAVOR, (thread_state_t)&avx_state, &avx_count
375*e3723e1fSApple OSS Distributions 		);
376*e3723e1fSApple OSS Distributions 
377*e3723e1fSApple OSS Distributions 	store_ymm(vec256array2);
378*e3723e1fSApple OSS Distributions 
379*e3723e1fSApple OSS Distributions 	T_QUIET; T_ASSERT_MACH_SUCCESS(kret, "thread_get_state()");
380*e3723e1fSApple OSS Distributions 	vec256_to_string(vec256array2, vec_str_buf);
381*e3723e1fSApple OSS Distributions 	T_LOG("Initial state:\n%s", vec_str_buf);
382*e3723e1fSApple OSS Distributions 
383*e3723e1fSApple OSS Distributions 	copy_ymm_state_to_vector(&avx_state, vec256array1);
384*e3723e1fSApple OSS Distributions 	assert_ymm_eq(vec256array2, vec256array1, sizeof(vec256array1));
385*e3723e1fSApple OSS Distributions 
386*e3723e1fSApple OSS Distributions 	populate_ymm();
387*e3723e1fSApple OSS Distributions 
388*e3723e1fSApple OSS Distributions 	kret = _thread_get_state_avx(
389*e3723e1fSApple OSS Distributions 		ts, X86_AVX_STATE_FLAVOR, (thread_state_t)&avx_state2, &avx_count
390*e3723e1fSApple OSS Distributions 		);
391*e3723e1fSApple OSS Distributions 
392*e3723e1fSApple OSS Distributions 	store_ymm(vec256array2);
393*e3723e1fSApple OSS Distributions 
394*e3723e1fSApple OSS Distributions 	T_QUIET; T_ASSERT_MACH_SUCCESS(kret, "thread_get_state()");
395*e3723e1fSApple OSS Distributions 	vec256_to_string(vec256array2, vec_str_buf);
396*e3723e1fSApple OSS Distributions 	T_LOG("Populated state:\n%s", vec_str_buf);
397*e3723e1fSApple OSS Distributions 
398*e3723e1fSApple OSS Distributions 	copy_ymm_state_to_vector(&avx_state2, vec256array1);
399*e3723e1fSApple OSS Distributions 	assert_ymm_eq(vec256array2, vec256array1, sizeof(vec256array0));
400*e3723e1fSApple OSS Distributions 
401*e3723e1fSApple OSS Distributions 	T_LOG("Running for %ds…", time);
402*e3723e1fSApple OSS Distributions 	start_timer(time, ymm_sigalrm_handler);
403*e3723e1fSApple OSS Distributions 
404*e3723e1fSApple OSS Distributions 	/* re-populate because printing mucks up XMMs */
405*e3723e1fSApple OSS Distributions 	populate_ymm();
406*e3723e1fSApple OSS Distributions 
407*e3723e1fSApple OSS Distributions 	/* Check state until timer fires */
408*e3723e1fSApple OSS Distributions 	while (checking) {
409*e3723e1fSApple OSS Distributions 		check_ymm();
410*e3723e1fSApple OSS Distributions 	}
411*e3723e1fSApple OSS Distributions 
412*e3723e1fSApple OSS Distributions 	/* Check that the sig handler changed out AVX state */
413*e3723e1fSApple OSS Distributions 	store_ymm(vec256array1);
414*e3723e1fSApple OSS Distributions 
415*e3723e1fSApple OSS Distributions 	uint32_t *p = (uint32_t *) &vec256array1[7];
416*e3723e1fSApple OSS Distributions 	if (p[0] != STOP_COOKIE_256 ||
417*e3723e1fSApple OSS Distributions 	    p[4] != STOP_COOKIE_256) {
418*e3723e1fSApple OSS Distributions 		vec256_to_string(vec256array1, vec_str_buf);
419*e3723e1fSApple OSS Distributions 		T_ASSERT_FAIL("sigreturn failed to stick");
420*e3723e1fSApple OSS Distributions 		T_LOG("State:\n%s", vec_str_buf);
421*e3723e1fSApple OSS Distributions 	}
422*e3723e1fSApple OSS Distributions 
423*e3723e1fSApple OSS Distributions 	T_LOG("Ran for %ds", time);
424*e3723e1fSApple OSS Distributions 	T_PASS("No ymm register corruption occurred");
425*e3723e1fSApple OSS Distributions }
426*e3723e1fSApple OSS Distributions 
427*e3723e1fSApple OSS Distributions /*
428*e3723e1fSApple OSS Distributions  * zmm functions
429*e3723e1fSApple OSS Distributions  */
430*e3723e1fSApple OSS Distributions 
431*e3723e1fSApple OSS Distributions static inline void
store_opmask(OPMASK k[])432*e3723e1fSApple OSS Distributions store_opmask(OPMASK k[])
433*e3723e1fSApple OSS Distributions {
434*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %%k0, %0" :"=m" (k[0]));
435*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %%k1, %0" :"=m" (k[1]));
436*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %%k2, %0" :"=m" (k[2]));
437*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %%k3, %0" :"=m" (k[3]));
438*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %%k4, %0" :"=m" (k[4]));
439*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %%k5, %0" :"=m" (k[5]));
440*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %%k6, %0" :"=m" (k[6]));
441*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %%k7, %0" :"=m" (k[7]));
442*e3723e1fSApple OSS Distributions }
443*e3723e1fSApple OSS Distributions 
444*e3723e1fSApple OSS Distributions static inline void
store_zmm(VECTOR512 * vecarray)445*e3723e1fSApple OSS Distributions store_zmm(VECTOR512 *vecarray)
446*e3723e1fSApple OSS Distributions {
447*e3723e1fSApple OSS Distributions 	int i = 0;
448*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %%zmm0, %0" :"=m" (vecarray[i]));
449*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm1, %0" :"=m" (vecarray[i]));
450*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm2, %0" :"=m" (vecarray[i]));
451*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm3, %0" :"=m" (vecarray[i]));
452*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm4, %0" :"=m" (vecarray[i]));
453*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm5, %0" :"=m" (vecarray[i]));
454*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm6, %0" :"=m" (vecarray[i]));
455*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm7, %0" :"=m" (vecarray[i]));
456*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
457*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm8, %0" :"=m" (vecarray[i]));
458*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm9, %0" :"=m" (vecarray[i]));
459*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm10, %0" :"=m" (vecarray[i]));
460*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm11, %0" :"=m" (vecarray[i]));
461*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm12, %0" :"=m" (vecarray[i]));
462*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm13, %0" :"=m" (vecarray[i]));
463*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm14, %0" :"=m" (vecarray[i]));
464*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm15, %0" :"=m" (vecarray[i]));
465*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm16, %0" :"=m" (vecarray[i]));
466*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm17, %0" :"=m" (vecarray[i]));
467*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm18, %0" :"=m" (vecarray[i]));
468*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm19, %0" :"=m" (vecarray[i]));
469*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm20, %0" :"=m" (vecarray[i]));
470*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm21, %0" :"=m" (vecarray[i]));
471*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm22, %0" :"=m" (vecarray[i]));
472*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm23, %0" :"=m" (vecarray[i]));
473*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm24, %0" :"=m" (vecarray[i]));
474*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm25, %0" :"=m" (vecarray[i]));
475*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm26, %0" :"=m" (vecarray[i]));
476*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm27, %0" :"=m" (vecarray[i]));
477*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm28, %0" :"=m" (vecarray[i]));
478*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm29, %0" :"=m" (vecarray[i]));
479*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm30, %0" :"=m" (vecarray[i]));
480*e3723e1fSApple OSS Distributions 	i++; __asm__ volatile ("vmovaps  %%zmm31, %0" :"=m" (vecarray[i]));
481*e3723e1fSApple OSS Distributions #endif
482*e3723e1fSApple OSS Distributions }
483*e3723e1fSApple OSS Distributions 
484*e3723e1fSApple OSS Distributions static inline void
restore_zmm(VECTOR512 * vecarray)485*e3723e1fSApple OSS Distributions restore_zmm(VECTOR512 *vecarray)
486*e3723e1fSApple OSS Distributions {
487*e3723e1fSApple OSS Distributions 	VECTOR512 *p = vecarray;
488*e3723e1fSApple OSS Distributions 
489*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm0" :: "m" (*(__m512i*)p) : "zmm0"); p++;
490*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm1" :: "m" (*(__m512i*)p) : "zmm1"); p++;
491*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm2" :: "m" (*(__m512i*)p) : "zmm2"); p++;
492*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm3" :: "m" (*(__m512i*)p) : "zmm3"); p++;
493*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm4" :: "m" (*(__m512i*)p) : "zmm4"); p++;
494*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm5" :: "m" (*(__m512i*)p) : "zmm5"); p++;
495*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm6" :: "m" (*(__m512i*)p) : "zmm6"); p++;
496*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm7" :: "m" (*(__m512i*)p) : "zmm7");
497*e3723e1fSApple OSS Distributions 
498*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
499*e3723e1fSApple OSS Distributions 	++p; __asm__ volatile ("vmovaps  %0, %%zmm8" :: "m" (*(__m512i*)p) : "zmm8"); p++;
500*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm9" :: "m" (*(__m512i*)p) : "zmm9"); p++;
501*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm10" :: "m" (*(__m512i*)p) : "zmm10"); p++;
502*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm11" :: "m" (*(__m512i*)p) : "zmm11"); p++;
503*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm12" :: "m" (*(__m512i*)p) : "zmm12"); p++;
504*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm13" :: "m" (*(__m512i*)p) : "zmm13"); p++;
505*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm14" :: "m" (*(__m512i*)p) : "zmm14"); p++;
506*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm15" :: "m" (*(__m512i*)p) : "zmm15"); p++;
507*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm16" :: "m" (*(__m512i*)p) : "zmm16"); p++;
508*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm17" :: "m" (*(__m512i*)p) : "zmm17"); p++;
509*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm18" :: "m" (*(__m512i*)p) : "zmm18"); p++;
510*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm19" :: "m" (*(__m512i*)p) : "zmm19"); p++;
511*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm20" :: "m" (*(__m512i*)p) : "zmm20"); p++;
512*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm21" :: "m" (*(__m512i*)p) : "zmm21"); p++;
513*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm22" :: "m" (*(__m512i*)p) : "zmm22"); p++;
514*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm23" :: "m" (*(__m512i*)p) : "zmm23"); p++;
515*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm24" :: "m" (*(__m512i*)p) : "zmm24"); p++;
516*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm25" :: "m" (*(__m512i*)p) : "zmm25"); p++;
517*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm26" :: "m" (*(__m512i*)p) : "zmm26"); p++;
518*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm27" :: "m" (*(__m512i*)p) : "zmm27"); p++;
519*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm28" :: "m" (*(__m512i*)p) : "zmm28"); p++;
520*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm29" :: "m" (*(__m512i*)p) : "zmm29"); p++;
521*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm30" :: "m" (*(__m512i*)p) : "zmm30"); p++;
522*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm31" :: "m" (*(__m512i*)p) : "zmm31");
523*e3723e1fSApple OSS Distributions #endif
524*e3723e1fSApple OSS Distributions }
525*e3723e1fSApple OSS Distributions 
526*e3723e1fSApple OSS Distributions static inline void
zero_opmask(void)527*e3723e1fSApple OSS Distributions zero_opmask(void)
528*e3723e1fSApple OSS Distributions {
529*e3723e1fSApple OSS Distributions 	uint64_t zero = 0x0000000000000000ULL;
530*e3723e1fSApple OSS Distributions 
531*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k0" : :"m" (zero) : "k0");
532*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k1" : :"m" (zero) : "k1");
533*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k2" : :"m" (zero) : "k2");
534*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k3" : :"m" (zero) : "k3");
535*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k4" : :"m" (zero) : "k4");
536*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k5" : :"m" (zero) : "k5");
537*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k6" : :"m" (zero) : "k6");
538*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k7" : :"m" (zero) : "k7");
539*e3723e1fSApple OSS Distributions 	store_opmask(karray0);
540*e3723e1fSApple OSS Distributions }
541*e3723e1fSApple OSS Distributions 
542*e3723e1fSApple OSS Distributions static inline void
populate_opmask(void)543*e3723e1fSApple OSS Distributions populate_opmask(void)
544*e3723e1fSApple OSS Distributions {
545*e3723e1fSApple OSS Distributions 	uint64_t k[8];
546*e3723e1fSApple OSS Distributions 
547*e3723e1fSApple OSS Distributions 	for (int j = 0; j < 8; j++) {
548*e3723e1fSApple OSS Distributions 		k[j] = ((uint64_t) getpid() << 32) + (0x11111111 * j);
549*e3723e1fSApple OSS Distributions 	}
550*e3723e1fSApple OSS Distributions 
551*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k0" : :"m" (k[0]) : "k0");
552*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k1" : :"m" (k[1]) : "k1");
553*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k2" : :"m" (k[2]) : "k2");
554*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k3" : :"m" (k[3]) : "k3");
555*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k4" : :"m" (k[4]) : "k4");
556*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k5" : :"m" (k[5]) : "k5");
557*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k6" : :"m" (k[6]) : "k6");
558*e3723e1fSApple OSS Distributions 	__asm__ volatile ("kmovq %0, %%k7" : :"m" (k[7]) : "k7");
559*e3723e1fSApple OSS Distributions 
560*e3723e1fSApple OSS Distributions 	store_opmask(karray0);
561*e3723e1fSApple OSS Distributions }
562*e3723e1fSApple OSS Distributions 
563*e3723e1fSApple OSS Distributions kern_return_t
_thread_get_state_avx512(thread_t thread,int flavor,thread_state_t state,mach_msg_type_number_t * state_count)564*e3723e1fSApple OSS Distributions _thread_get_state_avx512(
565*e3723e1fSApple OSS Distributions 	thread_t                thread,
566*e3723e1fSApple OSS Distributions 	int                     flavor,
567*e3723e1fSApple OSS Distributions 	thread_state_t          state,          /* pointer to OUT array */
568*e3723e1fSApple OSS Distributions 	mach_msg_type_number_t  *state_count)   /*IN/OUT*/
569*e3723e1fSApple OSS Distributions {
570*e3723e1fSApple OSS Distributions 	kern_return_t rv;
571*e3723e1fSApple OSS Distributions 	VECTOR512 zmms[ZMM_MAX];
572*e3723e1fSApple OSS Distributions 
573*e3723e1fSApple OSS Distributions 	/*
574*e3723e1fSApple OSS Distributions 	 * We must save and restore the ZMMs across thread_get_state() because
575*e3723e1fSApple OSS Distributions 	 * code in thread_get_state changes at least one xmm register AFTER the
576*e3723e1fSApple OSS Distributions 	 * thread_get_state has saved the state in userspace.  While it's still
577*e3723e1fSApple OSS Distributions 	 * possible for something to muck with %XMMs BEFORE making the mach
578*e3723e1fSApple OSS Distributions 	 * system call (and rendering this save/restore useless), that does not
579*e3723e1fSApple OSS Distributions 	 * currently occur, and since we depend on the avx512 state saved in the
580*e3723e1fSApple OSS Distributions 	 * thread_get_state to be the same as that manually copied from ZMMs after
581*e3723e1fSApple OSS Distributions 	 * thread_get_state returns, we have to go through these machinations.
582*e3723e1fSApple OSS Distributions 	 */
583*e3723e1fSApple OSS Distributions 	store_zmm(zmms);
584*e3723e1fSApple OSS Distributions 
585*e3723e1fSApple OSS Distributions 	rv = thread_get_state(thread, flavor, state, state_count);
586*e3723e1fSApple OSS Distributions 
587*e3723e1fSApple OSS Distributions 	restore_zmm(zmms);
588*e3723e1fSApple OSS Distributions 
589*e3723e1fSApple OSS Distributions 	return rv;
590*e3723e1fSApple OSS Distributions }
591*e3723e1fSApple OSS Distributions 
592*e3723e1fSApple OSS Distributions static inline void
zero_zmm(void)593*e3723e1fSApple OSS Distributions zero_zmm(void)
594*e3723e1fSApple OSS Distributions {
595*e3723e1fSApple OSS Distributions 	uint64_t zero[8] VEC512ALIGN = {0};
596*e3723e1fSApple OSS Distributions 
597*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm0" :: "m" (zero) : "zmm0");
598*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm1" :: "m" (zero) : "zmm1");
599*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm2" :: "m" (zero) : "zmm2");
600*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm3" :: "m" (zero) : "zmm3");
601*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm4" :: "m" (zero) : "zmm4");
602*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm5" :: "m" (zero) : "zmm5");
603*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm6" :: "m" (zero) : "zmm6");
604*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm7" :: "m" (zero) : "zmm7");
605*e3723e1fSApple OSS Distributions 
606*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
607*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm8" :: "m" (zero) : "zmm8");
608*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm9" :: "m" (zero) : "zmm9");
609*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm10" :: "m" (zero) : "zmm10");
610*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm11" :: "m" (zero) : "zmm11");
611*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm12" :: "m" (zero) : "zmm12");
612*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm13" :: "m" (zero) : "zmm13");
613*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm14" :: "m" (zero) : "zmm14");
614*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm15" :: "m" (zero) : "zmm15");
615*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm16" :: "m" (zero) : "zmm16");
616*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm17" :: "m" (zero) : "zmm17");
617*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm18" :: "m" (zero) : "zmm18");
618*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm19" :: "m" (zero) : "zmm19");
619*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm20" :: "m" (zero) : "zmm20");
620*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm21" :: "m" (zero) : "zmm21");
621*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm22" :: "m" (zero) : "zmm22");
622*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm23" :: "m" (zero) : "zmm23");
623*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm24" :: "m" (zero) : "zmm24");
624*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm25" :: "m" (zero) : "zmm25");
625*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm26" :: "m" (zero) : "zmm26");
626*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm27" :: "m" (zero) : "zmm27");
627*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm28" :: "m" (zero) : "zmm28");
628*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm29" :: "m" (zero) : "zmm29");
629*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm30" :: "m" (zero) : "zmm30");
630*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm31" :: "m" (zero) : "zmm31");
631*e3723e1fSApple OSS Distributions #endif
632*e3723e1fSApple OSS Distributions 
633*e3723e1fSApple OSS Distributions 	store_zmm(vec512array0);
634*e3723e1fSApple OSS Distributions }
635*e3723e1fSApple OSS Distributions 
636*e3723e1fSApple OSS Distributions static inline void
populate_zmm(void)637*e3723e1fSApple OSS Distributions populate_zmm(void)
638*e3723e1fSApple OSS Distributions {
639*e3723e1fSApple OSS Distributions 	int j;
640*e3723e1fSApple OSS Distributions 	uint64_t p[8] VEC512ALIGN;
641*e3723e1fSApple OSS Distributions 
642*e3723e1fSApple OSS Distributions 	for (j = 0; j < (int) (sizeof(p) / sizeof(p[0])); j++) {
643*e3723e1fSApple OSS Distributions 		p[j] = ((uint64_t) getpid() << 32) + getpid();
644*e3723e1fSApple OSS Distributions 	}
645*e3723e1fSApple OSS Distributions 
646*e3723e1fSApple OSS Distributions 	p[0] = 0x0000000000000000ULL;
647*e3723e1fSApple OSS Distributions 	p[2] = 0x4444444444444444ULL;
648*e3723e1fSApple OSS Distributions 	p[4] = 0x8888888888888888ULL;
649*e3723e1fSApple OSS Distributions 	p[7] = 0xCCCCCCCCCCCCCCCCULL;
650*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm0" :: "m" (*(__m512i*)p) : "zmm0");
651*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm1" :: "m" (*(__m512i*)p) : "zmm1");
652*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm2" :: "m" (*(__m512i*)p) : "zmm2");
653*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm3" :: "m" (*(__m512i*)p) : "zmm3");
654*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm4" :: "m" (*(__m512i*)p) : "zmm4");
655*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm5" :: "m" (*(__m512i*)p) : "zmm5");
656*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm6" :: "m" (*(__m512i*)p) : "zmm6");
657*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm7" :: "m" (*(__m512i*)p) : "zmm7");
658*e3723e1fSApple OSS Distributions 
659*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
660*e3723e1fSApple OSS Distributions 	p[0] = 0x1111111111111111ULL;
661*e3723e1fSApple OSS Distributions 	p[2] = 0x5555555555555555ULL;
662*e3723e1fSApple OSS Distributions 	p[4] = 0x9999999999999999ULL;
663*e3723e1fSApple OSS Distributions 	p[7] = 0xDDDDDDDDDDDDDDDDULL;
664*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm8" :: "m" (*(__m512i*)p) : "zmm8");
665*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm9" :: "m" (*(__m512i*)p) : "zmm9");
666*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm10" :: "m" (*(__m512i*)p) : "zmm10");
667*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm11" :: "m" (*(__m512i*)p) : "zmm11");
668*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm12" :: "m" (*(__m512i*)p) : "zmm12");
669*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm13" :: "m" (*(__m512i*)p) : "zmm13");
670*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm14" :: "m" (*(__m512i*)p) : "zmm14");
671*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm15" :: "m" (*(__m512i*)p) : "zmm15");
672*e3723e1fSApple OSS Distributions 
673*e3723e1fSApple OSS Distributions 	p[0] = 0x2222222222222222ULL;
674*e3723e1fSApple OSS Distributions 	p[2] = 0x6666666666666666ULL;
675*e3723e1fSApple OSS Distributions 	p[4] = 0xAAAAAAAAAAAAAAAAULL;
676*e3723e1fSApple OSS Distributions 	p[7] = 0xEEEEEEEEEEEEEEEEULL;
677*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm16" :: "m" (*(__m512i*)p) : "zmm16");
678*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm17" :: "m" (*(__m512i*)p) : "zmm17");
679*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm18" :: "m" (*(__m512i*)p) : "zmm18");
680*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm19" :: "m" (*(__m512i*)p) : "zmm19");
681*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm20" :: "m" (*(__m512i*)p) : "zmm20");
682*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm21" :: "m" (*(__m512i*)p) : "zmm21");
683*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm22" :: "m" (*(__m512i*)p) : "zmm22");
684*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm23" :: "m" (*(__m512i*)p) : "zmm23");
685*e3723e1fSApple OSS Distributions 
686*e3723e1fSApple OSS Distributions 	p[0] = 0x3333333333333333ULL;
687*e3723e1fSApple OSS Distributions 	p[2] = 0x7777777777777777ULL;
688*e3723e1fSApple OSS Distributions 	p[4] = 0xBBBBBBBBBBBBBBBBULL;
689*e3723e1fSApple OSS Distributions 	p[7] = 0xFFFFFFFFFFFFFFFFULL;
690*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm24" :: "m" (*(__m512i*)p) : "zmm24");
691*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm25" :: "m" (*(__m512i*)p) : "zmm25");
692*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm26" :: "m" (*(__m512i*)p) : "zmm26");
693*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm27" :: "m" (*(__m512i*)p) : "zmm27");
694*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm28" :: "m" (*(__m512i*)p) : "zmm28");
695*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm29" :: "m" (*(__m512i*)p) : "zmm29");
696*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm30" :: "m" (*(__m512i*)p) : "zmm30");
697*e3723e1fSApple OSS Distributions 	__asm__ volatile ("vmovaps  %0, %%zmm31" :: "m" (*(__m512i*)p) : "zmm31");
698*e3723e1fSApple OSS Distributions #endif
699*e3723e1fSApple OSS Distributions 
700*e3723e1fSApple OSS Distributions 	store_zmm(vec512array0);
701*e3723e1fSApple OSS Distributions }
702*e3723e1fSApple OSS Distributions 
703*e3723e1fSApple OSS Distributions void
vec512_to_string(VECTOR512 * vec,char * buf)704*e3723e1fSApple OSS Distributions vec512_to_string(VECTOR512 *vec, char *buf)
705*e3723e1fSApple OSS Distributions {
706*e3723e1fSApple OSS Distributions 	unsigned int vec_idx = 0;
707*e3723e1fSApple OSS Distributions 	unsigned int buf_idx = 0;
708*e3723e1fSApple OSS Distributions 	int ret = 0;
709*e3723e1fSApple OSS Distributions 
710*e3723e1fSApple OSS Distributions 	for (vec_idx = 0; vec_idx < ZMM_MAX; vec_idx++) {
711*e3723e1fSApple OSS Distributions 		uint64_t a[8];
712*e3723e1fSApple OSS Distributions 		bcopy(&vec[vec_idx], &a[0], sizeof(a));
713*e3723e1fSApple OSS Distributions 		ret = sprintf(
714*e3723e1fSApple OSS Distributions 			buf + buf_idx,
715*e3723e1fSApple OSS Distributions 			"0x%016llx:%016llx:%016llx:%016llx:"
716*e3723e1fSApple OSS Distributions 			"%016llx:%016llx:%016llx:%016llx%s",
717*e3723e1fSApple OSS Distributions 			a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7],
718*e3723e1fSApple OSS Distributions 			vec_idx < ZMM_MAX - 1 ? "\n" : ""
719*e3723e1fSApple OSS Distributions 			);
720*e3723e1fSApple OSS Distributions 		T_QUIET; T_ASSERT_POSIX_SUCCESS(ret, "sprintf()");
721*e3723e1fSApple OSS Distributions 		buf_idx += ret;
722*e3723e1fSApple OSS Distributions 	}
723*e3723e1fSApple OSS Distributions }
724*e3723e1fSApple OSS Distributions 
725*e3723e1fSApple OSS Distributions void
opmask_to_string(OPMASK * karray,char * buf)726*e3723e1fSApple OSS Distributions opmask_to_string(OPMASK *karray, char *buf)
727*e3723e1fSApple OSS Distributions {
728*e3723e1fSApple OSS Distributions 	unsigned int karray_idx = 0;
729*e3723e1fSApple OSS Distributions 	unsigned int buf_idx = 0;
730*e3723e1fSApple OSS Distributions 	int ret = 0;
731*e3723e1fSApple OSS Distributions 
732*e3723e1fSApple OSS Distributions 	for (karray_idx = 0; karray_idx < KARRAY_MAX; karray_idx++) {
733*e3723e1fSApple OSS Distributions 		ret = sprintf(
734*e3723e1fSApple OSS Distributions 			buf + buf_idx,
735*e3723e1fSApple OSS Distributions 			"k%d: 0x%016llx%s",
736*e3723e1fSApple OSS Distributions 			karray_idx, karray[karray_idx],
737*e3723e1fSApple OSS Distributions 			karray_idx < KARRAY_MAX ? "\n" : ""
738*e3723e1fSApple OSS Distributions 			);
739*e3723e1fSApple OSS Distributions 		T_QUIET; T_ASSERT_POSIX_SUCCESS(ret, "sprintf()");
740*e3723e1fSApple OSS Distributions 		buf_idx += ret;
741*e3723e1fSApple OSS Distributions 	}
742*e3723e1fSApple OSS Distributions }
743*e3723e1fSApple OSS Distributions 
744*e3723e1fSApple OSS Distributions static void
assert_zmm_eq(void * a,void * b,int c)745*e3723e1fSApple OSS Distributions assert_zmm_eq(void *a, void *b, int c)
746*e3723e1fSApple OSS Distributions {
747*e3723e1fSApple OSS Distributions 	if (memcmp_unoptimized(a, b, c)) {
748*e3723e1fSApple OSS Distributions 		vec512_to_string(a, vec_str_buf);
749*e3723e1fSApple OSS Distributions 		T_LOG("Compare failed, vector A:\n%s", vec_str_buf);
750*e3723e1fSApple OSS Distributions 		vec512_to_string(b, vec_str_buf);
751*e3723e1fSApple OSS Distributions 		T_LOG("Compare failed, vector B:\n%s", vec_str_buf);
752*e3723e1fSApple OSS Distributions 		T_ASSERT_FAIL("Vectors not equal");
753*e3723e1fSApple OSS Distributions 	}
754*e3723e1fSApple OSS Distributions }
755*e3723e1fSApple OSS Distributions 
756*e3723e1fSApple OSS Distributions static void
assert_opmask_eq(OPMASK * a,OPMASK * b)757*e3723e1fSApple OSS Distributions assert_opmask_eq(OPMASK *a, OPMASK *b)
758*e3723e1fSApple OSS Distributions {
759*e3723e1fSApple OSS Distributions 	for (int i = 0; i < KARRAY_MAX; i++) {
760*e3723e1fSApple OSS Distributions 		if (a[i] != b[i]) {
761*e3723e1fSApple OSS Distributions 			opmask_to_string(a, karray_str_buf);
762*e3723e1fSApple OSS Distributions 			T_LOG("Compare failed, opmask A:\n%s", karray_str_buf);
763*e3723e1fSApple OSS Distributions 			opmask_to_string(b, karray_str_buf);
764*e3723e1fSApple OSS Distributions 			T_LOG("Compare failed, opmask B:\n%s", karray_str_buf);
765*e3723e1fSApple OSS Distributions 			T_ASSERT_FAIL("opmasks not equal");
766*e3723e1fSApple OSS Distributions 		}
767*e3723e1fSApple OSS Distributions 	}
768*e3723e1fSApple OSS Distributions }
769*e3723e1fSApple OSS Distributions 
770*e3723e1fSApple OSS Distributions void
check_zmm(boolean_t check_cookie)771*e3723e1fSApple OSS Distributions check_zmm(boolean_t check_cookie)
772*e3723e1fSApple OSS Distributions {
773*e3723e1fSApple OSS Distributions 	uint64_t *p = (uint64_t *) &vec512array1[7];
774*e3723e1fSApple OSS Distributions 	store_opmask(karray1);
775*e3723e1fSApple OSS Distributions 	store_zmm(vec512array1);
776*e3723e1fSApple OSS Distributions 	if (check_cookie && p[0] == STOP_COOKIE_512) {
777*e3723e1fSApple OSS Distributions 		return;
778*e3723e1fSApple OSS Distributions 	}
779*e3723e1fSApple OSS Distributions 
780*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array0, vec512array1, sizeof(vec512array0));
781*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray0, karray1);
782*e3723e1fSApple OSS Distributions }
783*e3723e1fSApple OSS Distributions 
784*e3723e1fSApple OSS Distributions static void
copy_state_to_opmask(X86_AVX512_STATE_T * sp,OPMASK * op)785*e3723e1fSApple OSS Distributions copy_state_to_opmask(X86_AVX512_STATE_T *sp, OPMASK *op)
786*e3723e1fSApple OSS Distributions {
787*e3723e1fSApple OSS Distributions 	OPMASK *k = (OPMASK *) &sp->__fpu_k0;
788*e3723e1fSApple OSS Distributions 	for (int i = 0; i < KARRAY_MAX; i++) {
789*e3723e1fSApple OSS Distributions 		bcopy(&k[i], &op[i], sizeof(*op));
790*e3723e1fSApple OSS Distributions 	}
791*e3723e1fSApple OSS Distributions }
792*e3723e1fSApple OSS Distributions 
793*e3723e1fSApple OSS Distributions static void
copy_zmm_state_to_vector(X86_AVX512_STATE_T * sp,VECTOR512 * vp)794*e3723e1fSApple OSS Distributions copy_zmm_state_to_vector(X86_AVX512_STATE_T *sp, VECTOR512 *vp)
795*e3723e1fSApple OSS Distributions {
796*e3723e1fSApple OSS Distributions 	int     i;
797*e3723e1fSApple OSS Distributions 	struct  __darwin_xmm_reg *xmm  = &sp->__fpu_xmm0;
798*e3723e1fSApple OSS Distributions 	struct  __darwin_xmm_reg *ymmh = &sp->__fpu_ymmh0;
799*e3723e1fSApple OSS Distributions 	struct  __darwin_ymm_reg *zmmh = &sp->__fpu_zmmh0;
800*e3723e1fSApple OSS Distributions #if defined(__x86_64__)
801*e3723e1fSApple OSS Distributions 	struct  __darwin_zmm_reg *zmm  = &sp->__fpu_zmm16;
802*e3723e1fSApple OSS Distributions 
803*e3723e1fSApple OSS Distributions 	for (i = 0; i < ZMM_MAX / 2; i++) {
804*e3723e1fSApple OSS Distributions 		bcopy(&xmm[i], &vp[i], sizeof(*xmm));
805*e3723e1fSApple OSS Distributions 		bcopy(&ymmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*ymmh)), sizeof(*ymmh));
806*e3723e1fSApple OSS Distributions 		bcopy(&zmmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*zmmh)), sizeof(*zmmh));
807*e3723e1fSApple OSS Distributions 		bcopy(&zmm[i], &vp[(ZMM_MAX / 2) + i], sizeof(*zmm));
808*e3723e1fSApple OSS Distributions 	}
809*e3723e1fSApple OSS Distributions #else
810*e3723e1fSApple OSS Distributions 	for (i = 0; i < ZMM_MAX; i++) {
811*e3723e1fSApple OSS Distributions 		bcopy(&xmm[i], &vp[i], sizeof(*xmm));
812*e3723e1fSApple OSS Distributions 		bcopy(&ymmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*ymmh)), sizeof(*ymmh));
813*e3723e1fSApple OSS Distributions 		bcopy(&zmmh[i], (void *) ((uint64_t)&vp[i] + sizeof(*zmmh)), sizeof(*zmmh));
814*e3723e1fSApple OSS Distributions 	}
815*e3723e1fSApple OSS Distributions #endif
816*e3723e1fSApple OSS Distributions }
817*e3723e1fSApple OSS Distributions 
818*e3723e1fSApple OSS Distributions static void
zmm_sigalrm_handler(int signum __unused,siginfo_t * info __unused,void * ctx)819*e3723e1fSApple OSS Distributions zmm_sigalrm_handler(int signum __unused, siginfo_t *info __unused, void *ctx)
820*e3723e1fSApple OSS Distributions {
821*e3723e1fSApple OSS Distributions 	ucontext_t *contextp = (ucontext_t *) ctx;
822*e3723e1fSApple OSS Distributions 	mcontext_t mcontext = contextp->uc_mcontext;
823*e3723e1fSApple OSS Distributions 	X86_AVX512_STATE_T *avx_state = (X86_AVX512_STATE_T *) &mcontext->__fs;
824*e3723e1fSApple OSS Distributions 	uint64_t *xp = (uint64_t *) &avx_state->__fpu_xmm7;
825*e3723e1fSApple OSS Distributions 	uint64_t *yp = (uint64_t *) &avx_state->__fpu_ymmh7;
826*e3723e1fSApple OSS Distributions 	uint64_t *zp = (uint64_t *) &avx_state->__fpu_zmmh7;
827*e3723e1fSApple OSS Distributions 	uint64_t *kp = (uint64_t *) &avx_state->__fpu_k0;
828*e3723e1fSApple OSS Distributions 
829*e3723e1fSApple OSS Distributions 	/* Check for AVX512 state */
830*e3723e1fSApple OSS Distributions 	T_QUIET;
831*e3723e1fSApple OSS Distributions 	T_ASSERT_GE(contextp->uc_mcsize, MCONTEXT_SIZE_512, "check context size");
832*e3723e1fSApple OSS Distributions 
833*e3723e1fSApple OSS Distributions 	/* Check that the state in the context is what's set and expected */
834*e3723e1fSApple OSS Distributions 	copy_zmm_state_to_vector(avx_state, vec512array3);
835*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array3, vec512array0, sizeof(vec512array3));
836*e3723e1fSApple OSS Distributions 	copy_state_to_opmask(avx_state, karray3);
837*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray3, karray0);
838*e3723e1fSApple OSS Distributions 
839*e3723e1fSApple OSS Distributions 	/* Change the context and break the main loop */
840*e3723e1fSApple OSS Distributions 	xp[0] = STOP_COOKIE_512;
841*e3723e1fSApple OSS Distributions 	yp[0] = STOP_COOKIE_512;
842*e3723e1fSApple OSS Distributions 	zp[0] = STOP_COOKIE_512;
843*e3723e1fSApple OSS Distributions 	kp[7] = STOP_COOKIE_512;
844*e3723e1fSApple OSS Distributions 	checking = FALSE;
845*e3723e1fSApple OSS Distributions }
846*e3723e1fSApple OSS Distributions 
847*e3723e1fSApple OSS Distributions static void
zmm_sigalrm_handler_no_mod(int signum __unused,siginfo_t * info __unused,void * ctx)848*e3723e1fSApple OSS Distributions zmm_sigalrm_handler_no_mod(int signum __unused, siginfo_t *info __unused, void *ctx)
849*e3723e1fSApple OSS Distributions {
850*e3723e1fSApple OSS Distributions 	ucontext_t *contextp = (ucontext_t *) ctx;
851*e3723e1fSApple OSS Distributions 	mcontext_t mcontext = contextp->uc_mcontext;
852*e3723e1fSApple OSS Distributions 	X86_AVX512_STATE_T *avx_state = (X86_AVX512_STATE_T *) &mcontext->__fs;
853*e3723e1fSApple OSS Distributions 	uint64_t *xp = (uint64_t *) &avx_state->__fpu_xmm7;
854*e3723e1fSApple OSS Distributions 	uint64_t *yp = (uint64_t *) &avx_state->__fpu_ymmh7;
855*e3723e1fSApple OSS Distributions 	uint64_t *zp = (uint64_t *) &avx_state->__fpu_zmmh7;
856*e3723e1fSApple OSS Distributions 	uint64_t *kp = (uint64_t *) &avx_state->__fpu_k0;
857*e3723e1fSApple OSS Distributions 
858*e3723e1fSApple OSS Distributions 	/* Check for AVX512 state */
859*e3723e1fSApple OSS Distributions 	T_QUIET;
860*e3723e1fSApple OSS Distributions 	T_ASSERT_GE(contextp->uc_mcsize, MCONTEXT_SIZE_512, "check context size");
861*e3723e1fSApple OSS Distributions 
862*e3723e1fSApple OSS Distributions 	/* Check that the state in the context is what's set and expected */
863*e3723e1fSApple OSS Distributions 	copy_zmm_state_to_vector(avx_state, vec512array3);
864*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array3, vec512array0, sizeof(vec512array3));
865*e3723e1fSApple OSS Distributions 	copy_state_to_opmask(avx_state, karray3);
866*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray3, karray0);
867*e3723e1fSApple OSS Distributions 
868*e3723e1fSApple OSS Distributions 	/* Change the context and break the main loop */
869*e3723e1fSApple OSS Distributions 	checking = FALSE;
870*e3723e1fSApple OSS Distributions }
871*e3723e1fSApple OSS Distributions 
872*e3723e1fSApple OSS Distributions 
873*e3723e1fSApple OSS Distributions void
zmm_integrity(int time)874*e3723e1fSApple OSS Distributions zmm_integrity(int time)
875*e3723e1fSApple OSS Distributions {
876*e3723e1fSApple OSS Distributions 	mach_msg_type_number_t avx_count = X86_AVX512_STATE_COUNT;
877*e3723e1fSApple OSS Distributions 	kern_return_t kret;
878*e3723e1fSApple OSS Distributions 	X86_AVX512_STATE_T avx_state, avx_state2;
879*e3723e1fSApple OSS Distributions 	mach_port_t ts = mach_thread_self();
880*e3723e1fSApple OSS Distributions 
881*e3723e1fSApple OSS Distributions 	bzero(&avx_state, sizeof(avx_state));
882*e3723e1fSApple OSS Distributions 	bzero(&avx_state2, sizeof(avx_state));
883*e3723e1fSApple OSS Distributions 
884*e3723e1fSApple OSS Distributions 	store_zmm(vec512array2);
885*e3723e1fSApple OSS Distributions 	store_opmask(karray2);
886*e3723e1fSApple OSS Distributions 
887*e3723e1fSApple OSS Distributions 	kret = _thread_get_state_avx512(
888*e3723e1fSApple OSS Distributions 		ts, X86_AVX512_STATE_FLAVOR, (thread_state_t)&avx_state, &avx_count
889*e3723e1fSApple OSS Distributions 		);
890*e3723e1fSApple OSS Distributions 
891*e3723e1fSApple OSS Distributions 	T_QUIET; T_ASSERT_MACH_SUCCESS(kret, "thread_get_state()");
892*e3723e1fSApple OSS Distributions 	vec512_to_string(vec512array2, vec_str_buf);
893*e3723e1fSApple OSS Distributions 	opmask_to_string(karray2, karray_str_buf);
894*e3723e1fSApple OSS Distributions 	T_LOG("Initial state:\n%s\n%s", vec_str_buf, karray_str_buf);
895*e3723e1fSApple OSS Distributions 
896*e3723e1fSApple OSS Distributions 	copy_zmm_state_to_vector(&avx_state, vec512array1);
897*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array2, vec512array1, sizeof(vec512array1));
898*e3723e1fSApple OSS Distributions 	copy_state_to_opmask(&avx_state, karray1);
899*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray2, karray1);
900*e3723e1fSApple OSS Distributions 
901*e3723e1fSApple OSS Distributions 	populate_zmm();
902*e3723e1fSApple OSS Distributions 	populate_opmask();
903*e3723e1fSApple OSS Distributions 
904*e3723e1fSApple OSS Distributions 	kret = _thread_get_state_avx512(
905*e3723e1fSApple OSS Distributions 		ts, X86_AVX512_STATE_FLAVOR, (thread_state_t)&avx_state2, &avx_count
906*e3723e1fSApple OSS Distributions 		);
907*e3723e1fSApple OSS Distributions 
908*e3723e1fSApple OSS Distributions 	store_zmm(vec512array2);
909*e3723e1fSApple OSS Distributions 	store_opmask(karray2);
910*e3723e1fSApple OSS Distributions 
911*e3723e1fSApple OSS Distributions 	T_QUIET; T_ASSERT_MACH_SUCCESS(kret, "thread_get_state()");
912*e3723e1fSApple OSS Distributions 	vec512_to_string(vec512array2, vec_str_buf);
913*e3723e1fSApple OSS Distributions 	opmask_to_string(karray2, karray_str_buf);
914*e3723e1fSApple OSS Distributions 	T_LOG("Populated state:\n%s\n%s", vec_str_buf, karray_str_buf);
915*e3723e1fSApple OSS Distributions 
916*e3723e1fSApple OSS Distributions 	copy_zmm_state_to_vector(&avx_state2, vec512array1);
917*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array2, vec512array1, sizeof(vec512array1));
918*e3723e1fSApple OSS Distributions 	copy_state_to_opmask(&avx_state2, karray1);
919*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray2, karray1);
920*e3723e1fSApple OSS Distributions 
921*e3723e1fSApple OSS Distributions 	T_LOG("Running for %ds…", time);
922*e3723e1fSApple OSS Distributions 	start_timer(time, zmm_sigalrm_handler);
923*e3723e1fSApple OSS Distributions 
924*e3723e1fSApple OSS Distributions 	/* re-populate because printing mucks up XMMs */
925*e3723e1fSApple OSS Distributions 	populate_zmm();
926*e3723e1fSApple OSS Distributions 	populate_opmask();
927*e3723e1fSApple OSS Distributions 
928*e3723e1fSApple OSS Distributions 	/* Check state until timer fires */
929*e3723e1fSApple OSS Distributions 	while (checking) {
930*e3723e1fSApple OSS Distributions 		check_zmm(TRUE);
931*e3723e1fSApple OSS Distributions 	}
932*e3723e1fSApple OSS Distributions 
933*e3723e1fSApple OSS Distributions 	/* Check that the sig handler changed our AVX state */
934*e3723e1fSApple OSS Distributions 	store_zmm(vec512array1);
935*e3723e1fSApple OSS Distributions 	store_opmask(karray1);
936*e3723e1fSApple OSS Distributions 
937*e3723e1fSApple OSS Distributions 	uint64_t *p = (uint64_t *) &vec512array1[7];
938*e3723e1fSApple OSS Distributions 	if (p[0] != STOP_COOKIE_512 ||
939*e3723e1fSApple OSS Distributions 	    p[2] != STOP_COOKIE_512 ||
940*e3723e1fSApple OSS Distributions 	    p[4] != STOP_COOKIE_512 ||
941*e3723e1fSApple OSS Distributions 	    karray1[7] != STOP_COOKIE_512) {
942*e3723e1fSApple OSS Distributions 		vec512_to_string(vec512array1, vec_str_buf);
943*e3723e1fSApple OSS Distributions 		opmask_to_string(karray1, karray_str_buf);
944*e3723e1fSApple OSS Distributions 		T_ASSERT_FAIL("sigreturn failed to stick");
945*e3723e1fSApple OSS Distributions 		T_LOG("State:\n%s\n%s", vec_str_buf, karray_str_buf);
946*e3723e1fSApple OSS Distributions 	}
947*e3723e1fSApple OSS Distributions 
948*e3723e1fSApple OSS Distributions 	T_LOG("Ran for %ds", time);
949*e3723e1fSApple OSS Distributions 	T_PASS("No zmm register corruption occurred");
950*e3723e1fSApple OSS Distributions }
951*e3723e1fSApple OSS Distributions 
952*e3723e1fSApple OSS Distributions void
zmm_zeroing_optimization_integrity(int time)953*e3723e1fSApple OSS Distributions zmm_zeroing_optimization_integrity(int time)
954*e3723e1fSApple OSS Distributions {
955*e3723e1fSApple OSS Distributions 	/*
956*e3723e1fSApple OSS Distributions 	 * Check ZMM zero and OpMask zero
957*e3723e1fSApple OSS Distributions 	 */
958*e3723e1fSApple OSS Distributions 	T_LOG("Checking ZMM zero and OpMask zero");
959*e3723e1fSApple OSS Distributions 	checking = true;
960*e3723e1fSApple OSS Distributions 	zero_zmm();
961*e3723e1fSApple OSS Distributions 	zero_opmask();
962*e3723e1fSApple OSS Distributions 
963*e3723e1fSApple OSS Distributions 	T_LOG("Running for %ds…", time);
964*e3723e1fSApple OSS Distributions 	start_timer(time, zmm_sigalrm_handler_no_mod);
965*e3723e1fSApple OSS Distributions 
966*e3723e1fSApple OSS Distributions 	/* re-populate because printing mucks up XMMs */
967*e3723e1fSApple OSS Distributions 	zero_zmm();
968*e3723e1fSApple OSS Distributions 	zero_opmask();
969*e3723e1fSApple OSS Distributions 
970*e3723e1fSApple OSS Distributions 	/* Check state until timer fires */
971*e3723e1fSApple OSS Distributions 	while (checking) {
972*e3723e1fSApple OSS Distributions 		check_zmm(FALSE);
973*e3723e1fSApple OSS Distributions 	}
974*e3723e1fSApple OSS Distributions 
975*e3723e1fSApple OSS Distributions 	/* Check that sig handler did not changed our AVX state */
976*e3723e1fSApple OSS Distributions 	store_zmm(vec512array2);
977*e3723e1fSApple OSS Distributions 	store_opmask(karray2);
978*e3723e1fSApple OSS Distributions 
979*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array0, vec512array2, sizeof(vec512array2));
980*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray0, karray2);
981*e3723e1fSApple OSS Distributions 
982*e3723e1fSApple OSS Distributions 	T_LOG("Ran for %ds", time);
983*e3723e1fSApple OSS Distributions 	T_PASS("ZMM zero and OpMask zero");
984*e3723e1fSApple OSS Distributions 
985*e3723e1fSApple OSS Distributions 
986*e3723e1fSApple OSS Distributions 	/*
987*e3723e1fSApple OSS Distributions 	 * Check ZMM zero and OpMask non-zero
988*e3723e1fSApple OSS Distributions 	 */
989*e3723e1fSApple OSS Distributions 	T_LOG("Checking ZMM zero and OpMask non-zero");
990*e3723e1fSApple OSS Distributions 	checking = true;
991*e3723e1fSApple OSS Distributions 	zero_zmm();
992*e3723e1fSApple OSS Distributions 	populate_opmask();
993*e3723e1fSApple OSS Distributions 
994*e3723e1fSApple OSS Distributions 	T_LOG("Running for %ds…", time);
995*e3723e1fSApple OSS Distributions 	start_timer(time, zmm_sigalrm_handler_no_mod);
996*e3723e1fSApple OSS Distributions 
997*e3723e1fSApple OSS Distributions 	/* re-populate because printing mucks up XMMs */
998*e3723e1fSApple OSS Distributions 	zero_zmm();
999*e3723e1fSApple OSS Distributions 	populate_opmask();
1000*e3723e1fSApple OSS Distributions 
1001*e3723e1fSApple OSS Distributions 	/* Check state until timer fires */
1002*e3723e1fSApple OSS Distributions 	while (checking) {
1003*e3723e1fSApple OSS Distributions 		check_zmm(FALSE);
1004*e3723e1fSApple OSS Distributions 	}
1005*e3723e1fSApple OSS Distributions 
1006*e3723e1fSApple OSS Distributions 	/* Check that sig handler did not changed our AVX state */
1007*e3723e1fSApple OSS Distributions 	store_zmm(vec512array2);
1008*e3723e1fSApple OSS Distributions 	store_opmask(karray2);
1009*e3723e1fSApple OSS Distributions 
1010*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array0, vec512array2, sizeof(vec512array2));
1011*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray0, karray2);
1012*e3723e1fSApple OSS Distributions 
1013*e3723e1fSApple OSS Distributions 	T_LOG("Ran for %ds", time);
1014*e3723e1fSApple OSS Distributions 	T_PASS("ZMM zero and OpMask non-zero");
1015*e3723e1fSApple OSS Distributions 
1016*e3723e1fSApple OSS Distributions 
1017*e3723e1fSApple OSS Distributions 	/*
1018*e3723e1fSApple OSS Distributions 	 * Check ZMM non-zero and OpMask zero
1019*e3723e1fSApple OSS Distributions 	 */
1020*e3723e1fSApple OSS Distributions 	T_LOG("Checking ZMM non-zero and OpMask zero");
1021*e3723e1fSApple OSS Distributions 	checking = true;
1022*e3723e1fSApple OSS Distributions 	populate_zmm();
1023*e3723e1fSApple OSS Distributions 	zero_opmask();
1024*e3723e1fSApple OSS Distributions 
1025*e3723e1fSApple OSS Distributions 	T_LOG("Running for %ds…", time);
1026*e3723e1fSApple OSS Distributions 	start_timer(time, zmm_sigalrm_handler_no_mod);
1027*e3723e1fSApple OSS Distributions 
1028*e3723e1fSApple OSS Distributions 	/* re-populate because printing mucks up XMMs */
1029*e3723e1fSApple OSS Distributions 	populate_zmm();
1030*e3723e1fSApple OSS Distributions 	zero_opmask();
1031*e3723e1fSApple OSS Distributions 
1032*e3723e1fSApple OSS Distributions 	/* Check state until timer fires */
1033*e3723e1fSApple OSS Distributions 	while (checking) {
1034*e3723e1fSApple OSS Distributions 		check_zmm(FALSE);
1035*e3723e1fSApple OSS Distributions 	}
1036*e3723e1fSApple OSS Distributions 
1037*e3723e1fSApple OSS Distributions 	/* Check that sig handler did not changed our AVX state */
1038*e3723e1fSApple OSS Distributions 	store_zmm(vec512array2);
1039*e3723e1fSApple OSS Distributions 	store_opmask(karray2);
1040*e3723e1fSApple OSS Distributions 
1041*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array0, vec512array2, sizeof(vec512array2));
1042*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray0, karray2);
1043*e3723e1fSApple OSS Distributions 
1044*e3723e1fSApple OSS Distributions 	T_LOG("Ran for %ds", time);
1045*e3723e1fSApple OSS Distributions 	T_PASS("ZMM non-zero and OpMask zero");
1046*e3723e1fSApple OSS Distributions 
1047*e3723e1fSApple OSS Distributions 
1048*e3723e1fSApple OSS Distributions 	/*
1049*e3723e1fSApple OSS Distributions 	 * Check ZMM non-zero and OpMask non-zero
1050*e3723e1fSApple OSS Distributions 	 */
1051*e3723e1fSApple OSS Distributions 	T_LOG("Checking ZMM non-zero and OpMask non-zero");
1052*e3723e1fSApple OSS Distributions 	checking = true;
1053*e3723e1fSApple OSS Distributions 	populate_zmm();
1054*e3723e1fSApple OSS Distributions 	populate_opmask();
1055*e3723e1fSApple OSS Distributions 
1056*e3723e1fSApple OSS Distributions 	T_LOG("Running for %ds…", time);
1057*e3723e1fSApple OSS Distributions 	start_timer(time, zmm_sigalrm_handler_no_mod);
1058*e3723e1fSApple OSS Distributions 
1059*e3723e1fSApple OSS Distributions 	/* re-populate because printing mucks up XMMs */
1060*e3723e1fSApple OSS Distributions 	populate_zmm();
1061*e3723e1fSApple OSS Distributions 	populate_opmask();
1062*e3723e1fSApple OSS Distributions 
1063*e3723e1fSApple OSS Distributions 	/* Check state until timer fires */
1064*e3723e1fSApple OSS Distributions 	while (checking) {
1065*e3723e1fSApple OSS Distributions 		check_zmm(FALSE);
1066*e3723e1fSApple OSS Distributions 	}
1067*e3723e1fSApple OSS Distributions 
1068*e3723e1fSApple OSS Distributions 	/* Check that sig handler did not changed our AVX state */
1069*e3723e1fSApple OSS Distributions 	store_zmm(vec512array2);
1070*e3723e1fSApple OSS Distributions 	store_opmask(karray2);
1071*e3723e1fSApple OSS Distributions 
1072*e3723e1fSApple OSS Distributions 	assert_zmm_eq(vec512array0, vec512array2, sizeof(vec512array2));
1073*e3723e1fSApple OSS Distributions 	assert_opmask_eq(karray0, karray2);
1074*e3723e1fSApple OSS Distributions 
1075*e3723e1fSApple OSS Distributions 	T_LOG("Ran for %ds", time);
1076*e3723e1fSApple OSS Distributions 	T_PASS("ZMM non-zero and OpMask non-zero");
1077*e3723e1fSApple OSS Distributions }
1078*e3723e1fSApple OSS Distributions 
1079*e3723e1fSApple OSS Distributions /*
1080*e3723e1fSApple OSS Distributions  * Main test declarations
1081*e3723e1fSApple OSS Distributions  */
1082*e3723e1fSApple OSS Distributions T_DECL(ymm_integrity,
1083*e3723e1fSApple OSS Distributions     "Quick soak test to verify that AVX "
1084*e3723e1fSApple OSS Distributions     "register state is maintained correctly",
1085*e3723e1fSApple OSS Distributions     T_META_TIMEOUT(NORMAL_RUN_TIME + TIMEOUT_OVERHEAD)) {
1086*e3723e1fSApple OSS Distributions 	require_avx();
1087*e3723e1fSApple OSS Distributions 	ymm_integrity(NORMAL_RUN_TIME);
1088*e3723e1fSApple OSS Distributions }
1089*e3723e1fSApple OSS Distributions 
1090*e3723e1fSApple OSS Distributions T_DECL(ymm_integrity_stress,
1091*e3723e1fSApple OSS Distributions     "Extended soak test to verify that AVX "
1092*e3723e1fSApple OSS Distributions     "register state is maintained correctly",
1093*e3723e1fSApple OSS Distributions     T_META_TIMEOUT(LONG_RUN_TIME + TIMEOUT_OVERHEAD),
1094*e3723e1fSApple OSS Distributions     T_META_ENABLED(false)) {
1095*e3723e1fSApple OSS Distributions 	require_avx();
1096*e3723e1fSApple OSS Distributions 	ymm_integrity(LONG_RUN_TIME);
1097*e3723e1fSApple OSS Distributions }
1098*e3723e1fSApple OSS Distributions 
1099*e3723e1fSApple OSS Distributions T_DECL(zmm_integrity,
1100*e3723e1fSApple OSS Distributions     "Quick soak test to verify that AVX-512 "
1101*e3723e1fSApple OSS Distributions     "register state is maintained correctly",
1102*e3723e1fSApple OSS Distributions     T_META_TIMEOUT(NORMAL_RUN_TIME + TIMEOUT_OVERHEAD)) {
1103*e3723e1fSApple OSS Distributions 	require_avx512();
1104*e3723e1fSApple OSS Distributions 	zmm_integrity(NORMAL_RUN_TIME);
1105*e3723e1fSApple OSS Distributions }
1106*e3723e1fSApple OSS Distributions 
1107*e3723e1fSApple OSS Distributions T_DECL(zmm_integrity_stress,
1108*e3723e1fSApple OSS Distributions     "Extended soak test to verify that AVX-512 "
1109*e3723e1fSApple OSS Distributions     "register state is maintained correctly",
1110*e3723e1fSApple OSS Distributions     T_META_TIMEOUT(LONG_RUN_TIME + TIMEOUT_OVERHEAD),
1111*e3723e1fSApple OSS Distributions     T_META_ENABLED(false)) {
1112*e3723e1fSApple OSS Distributions 	require_avx512();
1113*e3723e1fSApple OSS Distributions 	zmm_integrity(LONG_RUN_TIME);
1114*e3723e1fSApple OSS Distributions }
1115*e3723e1fSApple OSS Distributions 
1116*e3723e1fSApple OSS Distributions T_DECL(zmm_zeroing_optimization_integrity,
1117*e3723e1fSApple OSS Distributions     "Quick soak test to verify AVX-512 "
1118*e3723e1fSApple OSS Distributions     "register state is maintained with "
1119*e3723e1fSApple OSS Distributions     "zeroing optimizations enabled",
1120*e3723e1fSApple OSS Distributions     T_META_TIMEOUT(QUICK_RUN_TIME + TIMEOUT_OVERHEAD)) {
1121*e3723e1fSApple OSS Distributions 	require_avx512();
1122*e3723e1fSApple OSS Distributions 	zmm_zeroing_optimization_integrity(QUICK_RUN_TIME);
1123*e3723e1fSApple OSS Distributions }
1124