2015-12-10 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / testsuite / gcc.target / i386 / avx-vzeroall-1.c
blob996357a7e02bbfba90ce6e738a1c37f7b9876d22
1 /* { dg-do run } */
2 /* { dg-require-effective-target avx } */
3 /* { dg-options "-O2 -mavx" } */
5 #include "avx-check.h"
7 static void
8 avx_test (void)
10 __m256i src;
11 #ifdef __x86_64__
12 char reg_save[16][32];
13 char d[16][32];
14 #else
15 char reg_save[8][32];
16 char d[8][32];
17 #endif
19 int s[8] = {1, 2, 3, 4, 5, 6, 7, 8};
21 __builtin_memset (d, 0, sizeof d);
22 __builtin_memset (reg_save, -1, sizeof reg_save);
24 src = _mm256_loadu_si256 ((__m256i*) s);
26 _mm256_zeroall ();
28 __asm__ __volatile__ ("vmovdqu %%ymm0,%0":"=m"(reg_save[0]));
29 __asm__ __volatile__ ("vmovdqu %%ymm1,%0":"=m"(reg_save[1]));
30 __asm__ __volatile__ ("vmovdqu %%ymm2,%0":"=m"(reg_save[2]));
31 __asm__ __volatile__ ("vmovdqu %%ymm3,%0":"=m"(reg_save[3]));
32 __asm__ __volatile__ ("vmovdqu %%ymm4,%0":"=m"(reg_save[4]));
33 __asm__ __volatile__ ("vmovdqu %%ymm5,%0":"=m"(reg_save[5]));
34 __asm__ __volatile__ ("vmovdqu %%ymm6,%0":"=m"(reg_save[6]));
35 __asm__ __volatile__ ("vmovdqu %%ymm7,%0":"=m"(reg_save[7]));
36 #ifdef __x86_64__
37 __asm__ __volatile__ ("vmovdqu %%ymm8,%0":"=m"(reg_save[8]));
38 __asm__ __volatile__ ("vmovdqu %%ymm9,%0":"=m"(reg_save[9]));
39 __asm__ __volatile__ ("vmovdqu %%ymm10,%0":"=m"(reg_save[10]));
40 __asm__ __volatile__ ("vmovdqu %%ymm11,%0":"=m"(reg_save[11]));
41 __asm__ __volatile__ ("vmovdqu %%ymm12,%0":"=m"(reg_save[12]));
42 __asm__ __volatile__ ("vmovdqu %%ymm13,%0":"=m"(reg_save[13]));
43 __asm__ __volatile__ ("vmovdqu %%ymm14,%0":"=m"(reg_save[14]));
44 __asm__ __volatile__ ("vmovdqu %%ymm15,%0":"=m"(reg_save[15]));
45 #endif
47 if (__builtin_memcmp (reg_save, d, sizeof d))
48 abort ();
50 _mm256_storeu_si256 ((__m256i*) d, src);