1 /* { dg-do compile } */
2 /* { dg-options "-w -O1 -fdump-tree-optimized" } */
3 /* { dg-options "-w -O1 -fdump-tree-optimized -msse" { target { i?86-*-* x86_64-*-* } } } */
5 #define vector __attribute__(( vector_size(16) ))
7 float f(vector
float a
, int b
, vector
float c
)
10 a
= (vector
float){0,0,0,0};
11 c
= (vector
float){0,0,0,0};
13 float d
= ((float*)&a
)[0];
14 float d1
= ((float*)&c
)[0];
19 /* We should be able to optimize this to just "return 0.0;" */
20 /* { dg-final { scan-tree-dump-not "BIT_FIELD_REF" "optimized"} } */
21 /* { dg-final { scan-tree-dump-times "return 0.0" 1 "optimized"} } */