2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int constant_p
;
48 ST_DATA
char debug_modes
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= 0x20000000)
60 #define CODE_ON() (nocode_wanted &= ~0x20000000)
62 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
63 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
64 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
67 ST_DATA
const char *funcname
;
68 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
69 static CString initstr
;
72 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
73 #define VT_PTRDIFF_T VT_INT
75 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
76 #define VT_PTRDIFF_T VT_LLONG
78 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
79 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
82 static struct switch_t
{
86 } **p
; int n
; /* list of case ranges */
87 int def_sym
; /* default symbol */
91 struct switch_t
*prev
;
93 } *cur_switch
; /* current switch */
95 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
96 /*list of temporary local variables on the stack in current function. */
97 static struct temp_local_variable
{
98 int location
; //offset on stack. Svalue.c.i
101 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
102 static int nb_temp_local_vars
;
104 static struct scope
{
106 struct { int loc
, locorig
, num
; } vla
;
107 struct { Sym
*s
; int n
; } cl
;
110 } *cur_scope
, *loop_scope
, *root_scope
;
119 #define precedence_parser
120 static void init_prec(void);
123 static void gen_cast(CType
*type
);
124 static void gen_cast_s(int t
);
125 static inline CType
*pointed_type(CType
*type
);
126 static int is_compatible_types(CType
*type1
, CType
*type2
);
127 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
128 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
129 static void parse_expr_type(CType
*type
);
130 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
131 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
132 static void block(int is_expr
);
133 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
134 static int decl(int l
);
135 static void expr_eq(void);
136 static void vpush_type_size(CType
*type
, int *a
);
137 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
138 static inline int64_t expr_const64(void);
139 static void vpush64(int ty
, unsigned long long v
);
140 static void vpush(CType
*type
);
141 static int gvtst(int inv
, int t
);
142 static void gen_inline_functions(TCCState
*s
);
143 static void free_inline_functions(TCCState
*s
);
144 static void skip_or_save_block(TokenString
**str
);
145 static void gv_dup(void);
146 static int get_temp_local_var(int size
,int align
);
147 static void clear_temp_local_var_list();
148 static void cast_error(CType
*st
, CType
*dt
);
150 /* ------------------------------------------------------------------------- */
151 /* Automagical code suppression */
153 /* Clear 'nocode_wanted' at forward label if it was used */
154 ST_FUNC
void gsym(int t
)
162 /* Clear 'nocode_wanted' if current pc is a label */
168 tcc_tcov_block_begin(tcc_state
);
172 /* Set 'nocode_wanted' after unconditional (backwards) jump */
173 static void gjmp_addr_acs(int t
)
179 /* Set 'nocode_wanted' after unconditional (forwards) jump */
180 static int gjmp_acs(int t
)
187 /* These are #undef'd at the end of this file */
188 #define gjmp_addr gjmp_addr_acs
189 #define gjmp gjmp_acs
190 /* ------------------------------------------------------------------------- */
192 ST_INLN
int is_float(int t
)
194 int bt
= t
& VT_BTYPE
;
195 return bt
== VT_LDOUBLE
201 static inline int is_integer_btype(int bt
)
210 static int btype_size(int bt
)
212 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
216 bt
== VT_PTR
? PTR_SIZE
: 0;
219 /* returns function return register from type */
220 static int R_RET(int t
)
224 #ifdef TCC_TARGET_X86_64
225 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
227 #elif defined TCC_TARGET_RISCV64
228 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
234 /* returns 2nd function return register, if any */
235 static int R2_RET(int t
)
241 #elif defined TCC_TARGET_X86_64
246 #elif defined TCC_TARGET_RISCV64
253 /* returns true for two-word types */
254 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
256 /* put function return registers to stack value */
257 static void PUT_R_RET(SValue
*sv
, int t
)
259 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
262 /* returns function return register class for type t */
263 static int RC_RET(int t
)
265 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
268 /* returns generic register class for type t */
269 static int RC_TYPE(int t
)
273 #ifdef TCC_TARGET_X86_64
274 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
276 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
278 #elif defined TCC_TARGET_RISCV64
279 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
285 /* returns 2nd register class corresponding to t and rc */
286 static int RC2_TYPE(int t
, int rc
)
288 if (!USING_TWO_WORDS(t
))
303 /* we use our own 'finite' function to avoid potential problems with
304 non standard math libs */
305 /* XXX: endianness dependent */
306 ST_FUNC
int ieee_finite(double d
)
309 memcpy(p
, &d
, sizeof(double));
310 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
313 /* compiling intel long double natively */
314 #if (defined __i386__ || defined __x86_64__) \
315 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
316 # define TCC_IS_NATIVE_387
319 ST_FUNC
void test_lvalue(void)
321 if (!(vtop
->r
& VT_LVAL
))
325 ST_FUNC
void check_vstack(void)
327 if (vtop
!= vstack
- 1)
328 tcc_error("internal compiler error: vstack leak (%d)",
329 (int)(vtop
- vstack
+ 1));
332 /* vstack debugging aid */
334 void pv (const char *lbl
, int a
, int b
)
337 for (i
= a
; i
< a
+ b
; ++i
) {
338 SValue
*p
= &vtop
[-i
];
339 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
340 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
345 /* ------------------------------------------------------------------------- */
346 /* initialize vstack and types. This must be done also for tcc -E */
347 ST_FUNC
void tccgen_init(TCCState
*s1
)
350 memset(vtop
, 0, sizeof *vtop
);
352 /* define some often used types */
355 char_type
.t
= VT_BYTE
;
356 if (s1
->char_is_unsigned
)
357 char_type
.t
|= VT_UNSIGNED
;
358 char_pointer_type
= char_type
;
359 mk_pointer(&char_pointer_type
);
361 func_old_type
.t
= VT_FUNC
;
362 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
363 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
364 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
365 #ifdef precedence_parser
371 ST_FUNC
int tccgen_compile(TCCState
*s1
)
373 cur_text_section
= NULL
;
376 anon_sym
= SYM_FIRST_ANOM
;
378 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
380 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
384 #ifdef TCC_TARGET_ARM
388 printf("%s: **** new file\n", file
->filename
);
390 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
393 gen_inline_functions(s1
);
395 /* end of translation unit info */
401 ST_FUNC
void tccgen_finish(TCCState
*s1
)
404 free_inline_functions(s1
);
405 sym_pop(&global_stack
, NULL
, 0);
406 sym_pop(&local_stack
, NULL
, 0);
407 /* free preprocessor macros */
410 dynarray_reset(&sym_pools
, &nb_sym_pools
);
411 sym_free_first
= NULL
;
414 /* ------------------------------------------------------------------------- */
415 ST_FUNC ElfSym
*elfsym(Sym
*s
)
419 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
422 /* apply storage attributes to Elf symbol */
423 ST_FUNC
void update_storage(Sym
*sym
)
426 int sym_bind
, old_sym_bind
;
432 if (sym
->a
.visibility
)
433 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
436 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
437 sym_bind
= STB_LOCAL
;
438 else if (sym
->a
.weak
)
441 sym_bind
= STB_GLOBAL
;
442 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
443 if (sym_bind
!= old_sym_bind
) {
444 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
448 if (sym
->a
.dllimport
)
449 esym
->st_other
|= ST_PE_IMPORT
;
450 if (sym
->a
.dllexport
)
451 esym
->st_other
|= ST_PE_EXPORT
;
455 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
456 get_tok_str(sym
->v
, NULL
),
457 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
465 /* ------------------------------------------------------------------------- */
466 /* update sym->c so that it points to an external symbol in section
467 'section' with value 'value' */
469 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
470 addr_t value
, unsigned long size
,
471 int can_add_underscore
)
473 int sym_type
, sym_bind
, info
, other
, t
;
479 name
= get_tok_str(sym
->v
, NULL
);
481 if ((t
& VT_BTYPE
) == VT_FUNC
) {
483 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
484 sym_type
= STT_NOTYPE
;
485 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
488 sym_type
= STT_OBJECT
;
490 if (t
& (VT_STATIC
| VT_INLINE
))
491 sym_bind
= STB_LOCAL
;
493 sym_bind
= STB_GLOBAL
;
497 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
498 Sym
*ref
= sym
->type
.ref
;
499 if (ref
->a
.nodecorate
) {
500 can_add_underscore
= 0;
502 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
503 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
505 other
|= ST_PE_STDCALL
;
506 can_add_underscore
= 0;
511 if (sym
->asm_label
) {
512 name
= get_tok_str(sym
->asm_label
, NULL
);
513 can_add_underscore
= 0;
516 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
518 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
522 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
523 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
526 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
530 esym
->st_value
= value
;
531 esym
->st_size
= size
;
532 esym
->st_shndx
= sh_num
;
537 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
539 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
541 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
544 /* add a new relocation entry to symbol 'sym' in section 's' */
545 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
550 if (nocode_wanted
&& s
== cur_text_section
)
555 put_extern_sym(sym
, NULL
, 0, 0);
559 /* now we can add ELF relocation info */
560 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
564 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
566 greloca(s
, sym
, offset
, type
, 0);
570 /* ------------------------------------------------------------------------- */
571 /* symbol allocator */
572 static Sym
*__sym_malloc(void)
574 Sym
*sym_pool
, *sym
, *last_sym
;
577 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
578 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
580 last_sym
= sym_free_first
;
582 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
583 sym
->next
= last_sym
;
587 sym_free_first
= last_sym
;
591 static inline Sym
*sym_malloc(void)
595 sym
= sym_free_first
;
597 sym
= __sym_malloc();
598 sym_free_first
= sym
->next
;
601 sym
= tcc_malloc(sizeof(Sym
));
606 ST_INLN
void sym_free(Sym
*sym
)
609 sym
->next
= sym_free_first
;
610 sym_free_first
= sym
;
616 /* push, without hashing */
617 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
622 memset(s
, 0, sizeof *s
);
632 /* find a symbol and return its associated structure. 's' is the top
633 of the symbol stack */
634 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
646 /* structure lookup */
647 ST_INLN Sym
*struct_find(int v
)
650 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
652 return table_ident
[v
]->sym_struct
;
655 /* find an identifier */
656 ST_INLN Sym
*sym_find(int v
)
659 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
661 return table_ident
[v
]->sym_identifier
;
664 static int sym_scope(Sym
*s
)
666 if (IS_ENUM_VAL (s
->type
.t
))
667 return s
->type
.ref
->sym_scope
;
672 /* push a given symbol on the symbol stack */
673 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
682 s
= sym_push2(ps
, v
, type
->t
, c
);
683 s
->type
.ref
= type
->ref
;
685 /* don't record fields or anonymous symbols */
687 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
688 /* record symbol in token array */
689 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
691 ps
= &ts
->sym_struct
;
693 ps
= &ts
->sym_identifier
;
696 s
->sym_scope
= local_scope
;
697 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
698 tcc_error("redeclaration of '%s'",
699 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
704 /* push a global identifier */
705 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
708 s
= sym_push2(&global_stack
, v
, t
, c
);
709 s
->r
= VT_CONST
| VT_SYM
;
710 /* don't record anonymous symbol */
711 if (v
< SYM_FIRST_ANOM
) {
712 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
713 /* modify the top most local identifier, so that sym_identifier will
714 point to 's' when popped; happens when called from inline asm */
715 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
716 ps
= &(*ps
)->prev_tok
;
723 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
724 pop them yet from the list, but do remove them from the token array. */
725 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
735 /* remove symbol in token array */
737 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
738 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
740 ps
= &ts
->sym_struct
;
742 ps
= &ts
->sym_identifier
;
753 /* ------------------------------------------------------------------------- */
754 static void vcheck_cmp(void)
756 /* cannot let cpu flags if other instruction are generated. Also
757 avoid leaving VT_JMP anywhere except on the top of the stack
758 because it would complicate the code generator.
760 Don't do this when nocode_wanted. vtop might come from
761 !nocode_wanted regions (see 88_codeopt.c) and transforming
762 it to a register without actually generating code is wrong
763 as their value might still be used for real. All values
764 we push under nocode_wanted will eventually be popped
765 again, so that the VT_CMP/VT_JMP value will be in vtop
766 when code is unsuppressed again. */
768 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
772 static void vsetc(CType
*type
, int r
, CValue
*vc
)
774 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
775 tcc_error("memory full (vstack)");
785 ST_FUNC
void vswap(void)
795 /* pop stack value */
796 ST_FUNC
void vpop(void)
799 v
= vtop
->r
& VT_VALMASK
;
800 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
801 /* for x86, we need to pop the FP stack */
803 o(0xd8dd); /* fstp %st(0) */
807 /* need to put correct jump if && or || without test */
814 /* push constant of type "type" with useless value */
815 static void vpush(CType
*type
)
817 vset(type
, VT_CONST
, 0);
820 /* push arbitrary 64bit constant */
821 static void vpush64(int ty
, unsigned long long v
)
828 vsetc(&ctype
, VT_CONST
, &cval
);
831 /* push integer constant */
832 ST_FUNC
void vpushi(int v
)
837 /* push a pointer sized constant */
838 static void vpushs(addr_t v
)
840 vpush64(VT_SIZE_T
, v
);
843 /* push long long constant */
844 static inline void vpushll(long long v
)
846 vpush64(VT_LLONG
, v
);
849 ST_FUNC
void vset(CType
*type
, int r
, int v
)
853 vsetc(type
, r
, &cval
);
856 static void vseti(int r
, int v
)
864 ST_FUNC
void vpushv(SValue
*v
)
866 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
867 tcc_error("memory full (vstack)");
872 static void vdup(void)
877 /* rotate n first stack elements to the bottom
878 I1 ... In -> I2 ... In I1 [top is right]
880 ST_FUNC
void vrotb(int n
)
892 /* rotate the n elements before entry e towards the top
893 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
895 ST_FUNC
void vrote(SValue
*e
, int n
)
902 for(i
= 0;i
< n
- 1; i
++)
907 /* rotate n first stack elements to the top
908 I1 ... In -> In I1 ... I(n-1) [top is right]
910 ST_FUNC
void vrott(int n
)
915 /* ------------------------------------------------------------------------- */
916 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
918 /* called from generators to set the result from relational ops */
919 ST_FUNC
void vset_VT_CMP(int op
)
927 /* called once before asking generators to load VT_CMP to a register */
928 static void vset_VT_JMP(void)
930 int op
= vtop
->cmp_op
;
932 if (vtop
->jtrue
|| vtop
->jfalse
) {
933 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
934 int inv
= op
& (op
< 2); /* small optimization */
935 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
937 /* otherwise convert flags (rsp. 0/1) to register */
939 if (op
< 2) /* doesn't seem to happen */
944 /* Set CPU Flags, doesn't yet jump */
945 static void gvtst_set(int inv
, int t
)
949 if (vtop
->r
!= VT_CMP
) {
952 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
953 vset_VT_CMP(vtop
->c
.i
!= 0);
956 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
957 *p
= gjmp_append(*p
, t
);
960 /* Generate value test
962 * Generate a test for any value (jump, comparison and integers) */
963 static int gvtst(int inv
, int t
)
968 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
973 /* jump to the wanted target */
975 t
= gjmp_cond(op
^ inv
, t
);
978 /* resolve complementary jumps to here */
985 /* generate a zero or nozero test */
986 static void gen_test_zero(int op
)
988 if (vtop
->r
== VT_CMP
) {
992 vtop
->jfalse
= vtop
->jtrue
;
1002 /* ------------------------------------------------------------------------- */
1003 /* push a symbol value of TYPE */
1004 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1008 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1012 /* Return a static symbol pointing to a section */
1013 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1019 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1020 sym
->type
.t
|= VT_STATIC
;
1021 put_extern_sym(sym
, sec
, offset
, size
);
1025 /* push a reference to a section offset by adding a dummy symbol */
1026 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1028 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1031 /* define a new external reference to a symbol 'v' of type 'u' */
1032 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1038 /* push forward reference */
1039 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1040 s
->type
.ref
= type
->ref
;
1041 } else if (IS_ASM_SYM(s
)) {
1042 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1043 s
->type
.ref
= type
->ref
;
1049 /* create an external reference with no specific type similar to asm labels.
1050 This avoids type conflicts if the symbol is used from C too */
1051 ST_FUNC Sym
*external_helper_sym(int v
)
1053 CType ct
= { VT_ASM_FUNC
, NULL
};
1054 return external_global_sym(v
, &ct
);
1057 /* push a reference to an helper function (such as memmove) */
1058 ST_FUNC
void vpush_helper_func(int v
)
1060 vpushsym(&func_old_type
, external_helper_sym(v
));
1063 /* Merge symbol attributes. */
1064 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1066 if (sa1
->aligned
&& !sa
->aligned
)
1067 sa
->aligned
= sa1
->aligned
;
1068 sa
->packed
|= sa1
->packed
;
1069 sa
->weak
|= sa1
->weak
;
1070 if (sa1
->visibility
!= STV_DEFAULT
) {
1071 int vis
= sa
->visibility
;
1072 if (vis
== STV_DEFAULT
1073 || vis
> sa1
->visibility
)
1074 vis
= sa1
->visibility
;
1075 sa
->visibility
= vis
;
1077 sa
->dllexport
|= sa1
->dllexport
;
1078 sa
->nodecorate
|= sa1
->nodecorate
;
1079 sa
->dllimport
|= sa1
->dllimport
;
1082 /* Merge function attributes. */
1083 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1085 if (fa1
->func_call
&& !fa
->func_call
)
1086 fa
->func_call
= fa1
->func_call
;
1087 if (fa1
->func_type
&& !fa
->func_type
)
1088 fa
->func_type
= fa1
->func_type
;
1089 if (fa1
->func_args
&& !fa
->func_args
)
1090 fa
->func_args
= fa1
->func_args
;
1091 if (fa1
->func_noreturn
)
1092 fa
->func_noreturn
= 1;
1099 /* Merge attributes. */
1100 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1102 merge_symattr(&ad
->a
, &ad1
->a
);
1103 merge_funcattr(&ad
->f
, &ad1
->f
);
1106 ad
->section
= ad1
->section
;
1107 if (ad1
->alias_target
)
1108 ad
->alias_target
= ad1
->alias_target
;
1110 ad
->asm_label
= ad1
->asm_label
;
1112 ad
->attr_mode
= ad1
->attr_mode
;
1115 /* Merge some type attributes. */
1116 static void patch_type(Sym
*sym
, CType
*type
)
1118 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1119 if (!(sym
->type
.t
& VT_EXTERN
))
1120 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1121 sym
->type
.t
&= ~VT_EXTERN
;
1124 if (IS_ASM_SYM(sym
)) {
1125 /* stay static if both are static */
1126 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1127 sym
->type
.ref
= type
->ref
;
1130 if (!is_compatible_types(&sym
->type
, type
)) {
1131 tcc_error("incompatible types for redefinition of '%s'",
1132 get_tok_str(sym
->v
, NULL
));
1134 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1135 int static_proto
= sym
->type
.t
& VT_STATIC
;
1136 /* warn if static follows non-static function declaration */
1137 if ((type
->t
& VT_STATIC
) && !static_proto
1138 /* XXX this test for inline shouldn't be here. Until we
1139 implement gnu-inline mode again it silences a warning for
1140 mingw caused by our workarounds. */
1141 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1142 tcc_warning("static storage ignored for redefinition of '%s'",
1143 get_tok_str(sym
->v
, NULL
));
1145 /* set 'inline' if both agree or if one has static */
1146 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1147 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1148 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1149 static_proto
|= VT_INLINE
;
1152 if (0 == (type
->t
& VT_EXTERN
)) {
1153 struct FuncAttr f
= sym
->type
.ref
->f
;
1154 /* put complete type, use static from prototype */
1155 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1156 sym
->type
.ref
= type
->ref
;
1157 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1159 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1162 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1163 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1164 sym
->type
.ref
= type
->ref
;
1168 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1169 /* set array size if it was omitted in extern declaration */
1170 sym
->type
.ref
->c
= type
->ref
->c
;
1172 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1173 tcc_warning("storage mismatch for redefinition of '%s'",
1174 get_tok_str(sym
->v
, NULL
));
1178 /* Merge some storage attributes. */
1179 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1182 patch_type(sym
, type
);
1184 #ifdef TCC_TARGET_PE
1185 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1186 tcc_error("incompatible dll linkage for redefinition of '%s'",
1187 get_tok_str(sym
->v
, NULL
));
1189 merge_symattr(&sym
->a
, &ad
->a
);
1191 sym
->asm_label
= ad
->asm_label
;
1192 update_storage(sym
);
1195 /* copy sym to other stack */
1196 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1199 s
= sym_malloc(), *s
= *s0
;
1200 s
->prev
= *ps
, *ps
= s
;
1201 if (s
->v
< SYM_FIRST_ANOM
) {
1202 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1203 s
->prev_tok
= *ps
, *ps
= s
;
1208 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1209 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1211 int bt
= s
->type
.t
& VT_BTYPE
;
1212 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1213 Sym
**sp
= &s
->type
.ref
;
1214 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1215 Sym
*s2
= sym_copy(s
, ps
);
1216 sp
= &(*sp
= s2
)->next
;
1217 sym_copy_ref(s2
, ps
);
1222 /* define a new external reference to a symbol 'v' */
1223 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1227 /* look for global symbol */
1229 while (s
&& s
->sym_scope
)
1233 /* push forward reference */
1234 s
= global_identifier_push(v
, type
->t
, 0);
1237 s
->asm_label
= ad
->asm_label
;
1238 s
->type
.ref
= type
->ref
;
1239 /* copy type to the global stack */
1241 sym_copy_ref(s
, &global_stack
);
1243 patch_storage(s
, ad
, type
);
1245 /* push variables on local_stack if any */
1246 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1247 s
= sym_copy(s
, &local_stack
);
1251 /* save registers up to (vtop - n) stack entry */
1252 ST_FUNC
void save_regs(int n
)
1255 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1259 /* save r to the memory stack, and mark it as being free */
1260 ST_FUNC
void save_reg(int r
)
1262 save_reg_upstack(r
, 0);
1265 /* save r to the memory stack, and mark it as being free,
1266 if seen up to (vtop - n) stack entry */
1267 ST_FUNC
void save_reg_upstack(int r
, int n
)
1269 int l
, size
, align
, bt
;
1272 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1277 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1278 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1279 /* must save value on stack if not already done */
1281 bt
= p
->type
.t
& VT_BTYPE
;
1284 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1287 size
= type_size(&sv
.type
, &align
);
1288 l
= get_temp_local_var(size
,align
);
1289 sv
.r
= VT_LOCAL
| VT_LVAL
;
1291 store(p
->r
& VT_VALMASK
, &sv
);
1292 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1293 /* x86 specific: need to pop fp register ST0 if saved */
1294 if (r
== TREG_ST0
) {
1295 o(0xd8dd); /* fstp %st(0) */
1298 /* special long long case */
1299 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1304 /* mark that stack entry as being saved on the stack */
1305 if (p
->r
& VT_LVAL
) {
1306 /* also clear the bounded flag because the
1307 relocation address of the function was stored in
1309 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1311 p
->r
= VT_LVAL
| VT_LOCAL
;
1320 #ifdef TCC_TARGET_ARM
1321 /* find a register of class 'rc2' with at most one reference on stack.
1322 * If none, call get_reg(rc) */
1323 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1328 for(r
=0;r
<NB_REGS
;r
++) {
1329 if (reg_classes
[r
] & rc2
) {
1332 for(p
= vstack
; p
<= vtop
; p
++) {
1333 if ((p
->r
& VT_VALMASK
) == r
||
1345 /* find a free register of class 'rc'. If none, save one register */
1346 ST_FUNC
int get_reg(int rc
)
1351 /* find a free register */
1352 for(r
=0;r
<NB_REGS
;r
++) {
1353 if (reg_classes
[r
] & rc
) {
1356 for(p
=vstack
;p
<=vtop
;p
++) {
1357 if ((p
->r
& VT_VALMASK
) == r
||
1366 /* no register left : free the first one on the stack (VERY
1367 IMPORTANT to start from the bottom to ensure that we don't
1368 spill registers used in gen_opi()) */
1369 for(p
=vstack
;p
<=vtop
;p
++) {
1370 /* look at second register (if long long) */
1372 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1374 r
= p
->r
& VT_VALMASK
;
1375 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1381 /* Should never comes here */
1385 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1386 static int get_temp_local_var(int size
,int align
){
1388 struct temp_local_variable
*temp_var
;
1395 for(i
=0;i
<nb_temp_local_vars
;i
++){
1396 temp_var
=&arr_temp_local_vars
[i
];
1397 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1400 /*check if temp_var is free*/
1402 for(p
=vstack
;p
<=vtop
;p
++) {
1404 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1405 if(p
->c
.i
==temp_var
->location
){
1412 found_var
=temp_var
->location
;
1418 loc
= (loc
- size
) & -align
;
1419 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1420 temp_var
=&arr_temp_local_vars
[i
];
1421 temp_var
->location
=loc
;
1422 temp_var
->size
=size
;
1423 temp_var
->align
=align
;
1424 nb_temp_local_vars
++;
1431 static void clear_temp_local_var_list(){
1432 nb_temp_local_vars
=0;
1435 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1437 static void move_reg(int r
, int s
, int t
)
1451 /* get address of vtop (vtop MUST BE an lvalue) */
1452 ST_FUNC
void gaddrof(void)
1454 vtop
->r
&= ~VT_LVAL
;
1455 /* tricky: if saved lvalue, then we can go back to lvalue */
1456 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1457 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1460 #ifdef CONFIG_TCC_BCHECK
1461 /* generate a bounded pointer addition */
1462 static void gen_bounded_ptr_add(void)
1464 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1469 vpush_helper_func(TOK___bound_ptr_add
);
1474 /* returned pointer is in REG_IRET */
1475 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1478 /* relocation offset of the bounding function call point */
1479 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1482 /* patch pointer addition in vtop so that pointer dereferencing is
1484 static void gen_bounded_ptr_deref(void)
1494 size
= type_size(&vtop
->type
, &align
);
1496 case 1: func
= TOK___bound_ptr_indir1
; break;
1497 case 2: func
= TOK___bound_ptr_indir2
; break;
1498 case 4: func
= TOK___bound_ptr_indir4
; break;
1499 case 8: func
= TOK___bound_ptr_indir8
; break;
1500 case 12: func
= TOK___bound_ptr_indir12
; break;
1501 case 16: func
= TOK___bound_ptr_indir16
; break;
1503 /* may happen with struct member access */
1506 sym
= external_helper_sym(func
);
1508 put_extern_sym(sym
, NULL
, 0, 0);
1509 /* patch relocation */
1510 /* XXX: find a better solution ? */
1511 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1512 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1515 /* generate lvalue bound code */
1516 static void gbound(void)
1520 vtop
->r
&= ~VT_MUSTBOUND
;
1521 /* if lvalue, then use checking code before dereferencing */
1522 if (vtop
->r
& VT_LVAL
) {
1523 /* if not VT_BOUNDED value, then make one */
1524 if (!(vtop
->r
& VT_BOUNDED
)) {
1525 /* must save type because we must set it to int to get pointer */
1527 vtop
->type
.t
= VT_PTR
;
1530 gen_bounded_ptr_add();
1534 /* then check for dereferencing */
1535 gen_bounded_ptr_deref();
1539 /* we need to call __bound_ptr_add before we start to load function
1540 args into registers */
1541 ST_FUNC
void gbound_args(int nb_args
)
1546 for (i
= 1; i
<= nb_args
; ++i
)
1547 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1553 sv
= vtop
- nb_args
;
1554 if (sv
->r
& VT_SYM
) {
1558 #ifndef TCC_TARGET_PE
1559 || v
== TOK_sigsetjmp
1560 || v
== TOK___sigsetjmp
1563 vpush_helper_func(TOK___bound_setjmp
);
1566 func_bound_add_epilog
= 1;
1568 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1569 if (v
== TOK_alloca
)
1570 func_bound_add_epilog
= 1;
1573 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1574 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1579 /* Add bounds for local symbols from S to E (via ->prev) */
1580 static void add_local_bounds(Sym
*s
, Sym
*e
)
1582 for (; s
!= e
; s
= s
->prev
) {
1583 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1585 /* Add arrays/structs/unions because we always take address */
1586 if ((s
->type
.t
& VT_ARRAY
)
1587 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1588 || s
->a
.addrtaken
) {
1589 /* add local bound info */
1590 int align
, size
= type_size(&s
->type
, &align
);
1591 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1592 2 * sizeof(addr_t
));
1593 bounds_ptr
[0] = s
->c
;
1594 bounds_ptr
[1] = size
;
1600 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1601 static void pop_local_syms(Sym
*b
, int keep
)
1603 #ifdef CONFIG_TCC_BCHECK
1604 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1605 add_local_bounds(local_stack
, b
);
1608 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1609 sym_pop(&local_stack
, b
, keep
);
1612 static void incr_bf_adr(int o
)
1614 vtop
->type
= char_pointer_type
;
1618 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1622 /* single-byte load mode for packed or otherwise unaligned bitfields */
1623 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1626 save_reg_upstack(vtop
->r
, 1);
1627 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1628 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1637 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1639 vpushi((1 << n
) - 1), gen_op('&');
1642 vpushi(bits
), gen_op(TOK_SHL
);
1645 bits
+= n
, bit_size
-= n
, o
= 1;
1648 if (!(type
->t
& VT_UNSIGNED
)) {
1649 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1650 vpushi(n
), gen_op(TOK_SHL
);
1651 vpushi(n
), gen_op(TOK_SAR
);
1655 /* single-byte store mode for packed or otherwise unaligned bitfields */
1656 static void store_packed_bf(int bit_pos
, int bit_size
)
1658 int bits
, n
, o
, m
, c
;
1659 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1661 save_reg_upstack(vtop
->r
, 1);
1662 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1664 incr_bf_adr(o
); // X B
1666 c
? vdup() : gv_dup(); // B V X
1669 vpushi(bits
), gen_op(TOK_SHR
);
1671 vpushi(bit_pos
), gen_op(TOK_SHL
);
1676 m
= ((1 << n
) - 1) << bit_pos
;
1677 vpushi(m
), gen_op('&'); // X B V1
1678 vpushv(vtop
-1); // X B V1 B
1679 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1680 gen_op('&'); // X B V1 B1
1681 gen_op('|'); // X B V2
1683 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1684 vstore(), vpop(); // X B
1685 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1690 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1693 if (0 == sv
->type
.ref
)
1695 t
= sv
->type
.ref
->auxtype
;
1696 if (t
!= -1 && t
!= VT_STRUCT
) {
1697 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1703 /* store vtop a register belonging to class 'rc'. lvalues are
1704 converted to values. Cannot be used if cannot be converted to
1705 register value (such as structures). */
1706 ST_FUNC
int gv(int rc
)
1708 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1709 int bit_pos
, bit_size
, size
, align
;
1711 /* NOTE: get_reg can modify vstack[] */
1712 if (vtop
->type
.t
& VT_BITFIELD
) {
1715 bit_pos
= BIT_POS(vtop
->type
.t
);
1716 bit_size
= BIT_SIZE(vtop
->type
.t
);
1717 /* remove bit field info to avoid loops */
1718 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1721 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1722 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1723 type
.t
|= VT_UNSIGNED
;
1725 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1727 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1732 if (r
== VT_STRUCT
) {
1733 load_packed_bf(&type
, bit_pos
, bit_size
);
1735 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1736 /* cast to int to propagate signedness in following ops */
1738 /* generate shifts */
1739 vpushi(bits
- (bit_pos
+ bit_size
));
1741 vpushi(bits
- bit_size
);
1742 /* NOTE: transformed to SHR if unsigned */
1747 if (is_float(vtop
->type
.t
) &&
1748 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1749 /* CPUs usually cannot use float constants, so we store them
1750 generically in data segment */
1751 init_params p
= { rodata_section
};
1752 unsigned long offset
;
1753 size
= type_size(&vtop
->type
, &align
);
1755 size
= 0, align
= 1;
1756 offset
= section_add(p
.sec
, size
, align
);
1757 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1759 init_putv(&p
, &vtop
->type
, offset
);
1762 #ifdef CONFIG_TCC_BCHECK
1763 if (vtop
->r
& VT_MUSTBOUND
)
1767 bt
= vtop
->type
.t
& VT_BTYPE
;
1769 #ifdef TCC_TARGET_RISCV64
1771 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1774 rc2
= RC2_TYPE(bt
, rc
);
1776 /* need to reload if:
1778 - lvalue (need to dereference pointer)
1779 - already a register, but not in the right class */
1780 r
= vtop
->r
& VT_VALMASK
;
1781 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1782 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1784 if (!r_ok
|| !r2_ok
) {
1788 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1789 int original_type
= vtop
->type
.t
;
1791 /* two register type load :
1792 expand to two words temporarily */
1793 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1795 unsigned long long ll
= vtop
->c
.i
;
1796 vtop
->c
.i
= ll
; /* first word */
1798 vtop
->r
= r
; /* save register value */
1799 vpushi(ll
>> 32); /* second word */
1800 } else if (vtop
->r
& VT_LVAL
) {
1801 /* We do not want to modifier the long long pointer here.
1802 So we save any other instances down the stack */
1803 save_reg_upstack(vtop
->r
, 1);
1804 /* load from memory */
1805 vtop
->type
.t
= load_type
;
1808 vtop
[-1].r
= r
; /* save register value */
1809 /* increment pointer to get second word */
1810 vtop
->type
.t
= VT_PTRDIFF_T
;
1815 vtop
->type
.t
= load_type
;
1817 /* move registers */
1820 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1823 vtop
[-1].r
= r
; /* save register value */
1824 vtop
->r
= vtop
[-1].r2
;
1826 /* Allocate second register. Here we rely on the fact that
1827 get_reg() tries first to free r2 of an SValue. */
1831 /* write second register */
1834 vtop
->type
.t
= original_type
;
1836 if (vtop
->r
== VT_CMP
)
1838 /* one register type load */
1843 #ifdef TCC_TARGET_C67
1844 /* uses register pairs for doubles */
1845 if (bt
== VT_DOUBLE
)
1852 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1853 ST_FUNC
void gv2(int rc1
, int rc2
)
1855 /* generate more generic register first. But VT_JMP or VT_CMP
1856 values must be generated first in all cases to avoid possible
1858 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1863 /* test if reload is needed for first register */
1864 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1874 /* test if reload is needed for first register */
1875 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1882 /* expand 64bit on stack in two ints */
1883 ST_FUNC
void lexpand(void)
1886 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1887 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1888 if (v
== VT_CONST
) {
1891 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1897 vtop
[0].r
= vtop
[-1].r2
;
1898 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1900 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1905 /* build a long long from two ints */
1906 static void lbuild(int t
)
1908 gv2(RC_INT
, RC_INT
);
1909 vtop
[-1].r2
= vtop
[0].r
;
1910 vtop
[-1].type
.t
= t
;
1915 /* convert stack entry to register and duplicate its value in another
1917 static void gv_dup(void)
1923 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1924 if (t
& VT_BITFIELD
) {
1934 /* stack: H L L1 H1 */
1944 /* duplicate value */
1954 /* generate CPU independent (unsigned) long long operations */
1955 static void gen_opl(int op
)
1957 int t
, a
, b
, op1
, c
, i
;
1959 unsigned short reg_iret
= REG_IRET
;
1960 unsigned short reg_lret
= REG_IRE2
;
1966 func
= TOK___divdi3
;
1969 func
= TOK___udivdi3
;
1972 func
= TOK___moddi3
;
1975 func
= TOK___umoddi3
;
1982 /* call generic long long function */
1983 vpush_helper_func(func
);
1988 vtop
->r2
= reg_lret
;
1996 //pv("gen_opl A",0,2);
2002 /* stack: L1 H1 L2 H2 */
2007 vtop
[-2] = vtop
[-3];
2010 /* stack: H1 H2 L1 L2 */
2011 //pv("gen_opl B",0,4);
2017 /* stack: H1 H2 L1 L2 ML MH */
2020 /* stack: ML MH H1 H2 L1 L2 */
2024 /* stack: ML MH H1 L2 H2 L1 */
2029 /* stack: ML MH M1 M2 */
2032 } else if (op
== '+' || op
== '-') {
2033 /* XXX: add non carry method too (for MIPS or alpha) */
2039 /* stack: H1 H2 (L1 op L2) */
2042 gen_op(op1
+ 1); /* TOK_xxxC2 */
2045 /* stack: H1 H2 (L1 op L2) */
2048 /* stack: (L1 op L2) H1 H2 */
2050 /* stack: (L1 op L2) (H1 op H2) */
2058 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2059 t
= vtop
[-1].type
.t
;
2063 /* stack: L H shift */
2065 /* constant: simpler */
2066 /* NOTE: all comments are for SHL. the other cases are
2067 done by swapping words */
2078 if (op
!= TOK_SAR
) {
2111 /* XXX: should provide a faster fallback on x86 ? */
2114 func
= TOK___ashrdi3
;
2117 func
= TOK___lshrdi3
;
2120 func
= TOK___ashldi3
;
2126 /* compare operations */
2132 /* stack: L1 H1 L2 H2 */
2134 vtop
[-1] = vtop
[-2];
2136 /* stack: L1 L2 H1 H2 */
2140 /* when values are equal, we need to compare low words. since
2141 the jump is inverted, we invert the test too. */
2144 else if (op1
== TOK_GT
)
2146 else if (op1
== TOK_ULT
)
2148 else if (op1
== TOK_UGT
)
2158 /* generate non equal test */
2160 vset_VT_CMP(TOK_NE
);
2164 /* compare low. Always unsigned */
2168 else if (op1
== TOK_LE
)
2170 else if (op1
== TOK_GT
)
2172 else if (op1
== TOK_GE
)
2175 #if 0//def TCC_TARGET_I386
2176 if (op
== TOK_NE
) { gsym(b
); break; }
2177 if (op
== TOK_EQ
) { gsym(a
); break; }
2186 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2188 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2189 return (a
^ b
) >> 63 ? -x
: x
;
2192 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2194 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2197 /* handle integer constant optimizations and various machine
2199 static void gen_opic(int op
)
2201 SValue
*v1
= vtop
- 1;
2203 int t1
= v1
->type
.t
& VT_BTYPE
;
2204 int t2
= v2
->type
.t
& VT_BTYPE
;
2205 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2206 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2207 int nonconst
= (v1
->r
| v2
->r
) & VT_NONCONST
;
2208 uint64_t l1
= c1
? v1
->c
.i
: 0;
2209 uint64_t l2
= c2
? v2
->c
.i
: 0;
2210 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2212 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2213 l1
= ((uint32_t)l1
|
2214 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2215 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2216 l2
= ((uint32_t)l2
|
2217 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2221 case '+': l1
+= l2
; break;
2222 case '-': l1
-= l2
; break;
2223 case '&': l1
&= l2
; break;
2224 case '^': l1
^= l2
; break;
2225 case '|': l1
|= l2
; break;
2226 case '*': l1
*= l2
; break;
2233 /* if division by zero, generate explicit division */
2235 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2236 tcc_error("division by zero in constant");
2240 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2241 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2242 case TOK_UDIV
: l1
= l1
/ l2
; break;
2243 case TOK_UMOD
: l1
= l1
% l2
; break;
2246 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2247 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2249 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2252 case TOK_ULT
: l1
= l1
< l2
; break;
2253 case TOK_UGE
: l1
= l1
>= l2
; break;
2254 case TOK_EQ
: l1
= l1
== l2
; break;
2255 case TOK_NE
: l1
= l1
!= l2
; break;
2256 case TOK_ULE
: l1
= l1
<= l2
; break;
2257 case TOK_UGT
: l1
= l1
> l2
; break;
2258 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2259 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2260 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2261 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2263 case TOK_LAND
: l1
= l1
&& l2
; break;
2264 case TOK_LOR
: l1
= l1
|| l2
; break;
2268 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2269 l1
= ((uint32_t)l1
|
2270 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2274 nonconst
= VT_NONCONST
;
2275 /* if commutative ops, put c2 as constant */
2276 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2277 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2279 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2280 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2282 if (!const_wanted
&&
2284 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2285 (l1
== -1 && op
== TOK_SAR
))) {
2286 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2288 } else if (!const_wanted
&&
2289 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2291 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2292 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2293 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2298 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2301 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2302 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2305 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2306 /* filter out NOP operations like x*1, x-0, x&-1... */
2308 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2309 /* try to use shifts instead of muls or divs */
2310 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2319 else if (op
== TOK_PDIV
)
2325 } else if (c2
&& (op
== '+' || op
== '-') &&
2326 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2327 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2328 /* symbol + constant case */
2332 /* The backends can't always deal with addends to symbols
2333 larger than +-1<<31. Don't construct such. */
2340 /* call low level op generator */
2341 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2342 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2348 if (vtop
->r
== VT_CONST
)
2349 vtop
->r
|= nonconst
;
2352 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2353 # define gen_negf gen_opf
2354 #elif defined TCC_TARGET_ARM
2355 void gen_negf(int op
)
2357 /* arm will detect 0-x and replace by vneg */
2358 vpushi(0), vswap(), gen_op('-');
2361 /* XXX: implement in gen_opf() for other backends too */
2362 void gen_negf(int op
)
2364 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2365 subtract(-0, x), but with them it's really a sign flip
2366 operation. We implement this with bit manipulation and have
2367 to do some type reinterpretation for this, which TCC can do
2370 int align
, size
, bt
;
2372 size
= type_size(&vtop
->type
, &align
);
2373 bt
= vtop
->type
.t
& VT_BTYPE
;
2374 save_reg(gv(RC_TYPE(bt
)));
2376 incr_bf_adr(size
- 1);
2378 vpushi(0x80); /* flip sign */
2385 /* generate a floating point operation with constant propagation */
2386 static void gen_opif(int op
)
2390 #if defined _MSC_VER && defined __x86_64__
2391 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2401 /* currently, we cannot do computations with forward symbols */
2402 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2403 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2405 if (v1
->type
.t
== VT_FLOAT
) {
2408 } else if (v1
->type
.t
== VT_DOUBLE
) {
2415 /* NOTE: we only do constant propagation if finite number (not
2416 NaN or infinity) (ANSI spec) */
2417 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
2420 case '+': f1
+= f2
; break;
2421 case '-': f1
-= f2
; break;
2422 case '*': f1
*= f2
; break;
2425 union { float f
; unsigned u
; } x1
, x2
, y
;
2426 /* If not in initializer we need to potentially generate
2427 FP exceptions at runtime, otherwise we want to fold. */
2430 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2431 when used to compile the f1 /= f2 below, would be -nan */
2432 x1
.f
= f1
, x2
.f
= f2
;
2434 y
.u
= 0x7fc00000; /* nan */
2436 y
.u
= 0x7f800000; /* infinity */
2437 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2446 /* XXX: also handles tests ? */
2452 /* XXX: overflow test ? */
2453 if (v1
->type
.t
== VT_FLOAT
) {
2455 } else if (v1
->type
.t
== VT_DOUBLE
) {
2462 if (op
== TOK_NEG
) {
2470 /* print a type. If 'varstr' is not NULL, then the variable is also
2471 printed in the type */
2473 /* XXX: add array and function pointers */
2474 static void type_to_str(char *buf
, int buf_size
,
2475 CType
*type
, const char *varstr
)
2487 pstrcat(buf
, buf_size
, "extern ");
2489 pstrcat(buf
, buf_size
, "static ");
2491 pstrcat(buf
, buf_size
, "typedef ");
2493 pstrcat(buf
, buf_size
, "inline ");
2495 if (t
& VT_VOLATILE
)
2496 pstrcat(buf
, buf_size
, "volatile ");
2497 if (t
& VT_CONSTANT
)
2498 pstrcat(buf
, buf_size
, "const ");
2500 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2501 || ((t
& VT_UNSIGNED
)
2502 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2505 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2507 buf_size
-= strlen(buf
);
2543 tstr
= "long double";
2545 pstrcat(buf
, buf_size
, tstr
);
2552 pstrcat(buf
, buf_size
, tstr
);
2553 v
= type
->ref
->v
& ~SYM_STRUCT
;
2554 if (v
>= SYM_FIRST_ANOM
)
2555 pstrcat(buf
, buf_size
, "<anonymous>");
2557 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2562 if (varstr
&& '*' == *varstr
) {
2563 pstrcat(buf1
, sizeof(buf1
), "(");
2564 pstrcat(buf1
, sizeof(buf1
), varstr
);
2565 pstrcat(buf1
, sizeof(buf1
), ")");
2567 pstrcat(buf1
, buf_size
, "(");
2569 while (sa
!= NULL
) {
2571 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2572 pstrcat(buf1
, sizeof(buf1
), buf2
);
2575 pstrcat(buf1
, sizeof(buf1
), ", ");
2577 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2578 pstrcat(buf1
, sizeof(buf1
), ", ...");
2579 pstrcat(buf1
, sizeof(buf1
), ")");
2580 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2584 if (t
& (VT_ARRAY
|VT_VLA
)) {
2585 if (varstr
&& '*' == *varstr
)
2586 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2588 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2589 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2592 pstrcpy(buf1
, sizeof(buf1
), "*");
2593 if (t
& VT_CONSTANT
)
2594 pstrcat(buf1
, buf_size
, "const ");
2595 if (t
& VT_VOLATILE
)
2596 pstrcat(buf1
, buf_size
, "volatile ");
2598 pstrcat(buf1
, sizeof(buf1
), varstr
);
2599 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2603 pstrcat(buf
, buf_size
, " ");
2604 pstrcat(buf
, buf_size
, varstr
);
2609 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2611 char buf1
[256], buf2
[256];
2612 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2613 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2614 tcc_error(fmt
, buf1
, buf2
);
2617 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2619 char buf1
[256], buf2
[256];
2620 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2621 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2622 tcc_warning(fmt
, buf1
, buf2
);
2625 static int pointed_size(CType
*type
)
2628 return type_size(pointed_type(type
), &align
);
2631 static inline int is_null_pointer(SValue
*p
)
2633 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2635 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2636 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2637 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2638 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2639 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2640 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2644 /* compare function types. OLD functions match any new functions */
2645 static int is_compatible_func(CType
*type1
, CType
*type2
)
2651 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2653 if (s1
->f
.func_type
!= s2
->f
.func_type
2654 && s1
->f
.func_type
!= FUNC_OLD
2655 && s2
->f
.func_type
!= FUNC_OLD
)
2658 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2660 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2671 /* return true if type1 and type2 are the same. If unqualified is
2672 true, qualifiers on the types are ignored.
2674 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2678 t1
= type1
->t
& VT_TYPE
;
2679 t2
= type2
->t
& VT_TYPE
;
2681 /* strip qualifiers before comparing */
2682 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2683 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2686 /* Default Vs explicit signedness only matters for char */
2687 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2691 /* XXX: bitfields ? */
2696 && !(type1
->ref
->c
< 0
2697 || type2
->ref
->c
< 0
2698 || type1
->ref
->c
== type2
->ref
->c
))
2701 /* test more complicated cases */
2702 bt1
= t1
& VT_BTYPE
;
2703 if (bt1
== VT_PTR
) {
2704 type1
= pointed_type(type1
);
2705 type2
= pointed_type(type2
);
2706 return is_compatible_types(type1
, type2
);
2707 } else if (bt1
== VT_STRUCT
) {
2708 return (type1
->ref
== type2
->ref
);
2709 } else if (bt1
== VT_FUNC
) {
2710 return is_compatible_func(type1
, type2
);
2711 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2712 /* If both are enums then they must be the same, if only one is then
2713 t1 and t2 must be equal, which was checked above already. */
2714 return type1
->ref
== type2
->ref
;
2720 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2721 type is stored in DEST if non-null (except for pointer plus/minus) . */
2722 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2724 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2725 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2731 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2732 ret
= op
== '?' ? 1 : 0;
2733 /* NOTE: as an extension, we accept void on only one side */
2735 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2736 if (op
== '+') ; /* Handled in caller */
2737 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2738 /* If one is a null ptr constant the result type is the other. */
2739 else if (is_null_pointer (op2
)) type
= *type1
;
2740 else if (is_null_pointer (op1
)) type
= *type2
;
2741 else if (bt1
!= bt2
) {
2742 /* accept comparison or cond-expr between pointer and integer
2744 if ((op
== '?' || TOK_ISCOND(op
))
2745 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2746 tcc_warning("pointer/integer mismatch in %s",
2747 op
== '?' ? "conditional expression" : "comparison");
2748 else if (op
!= '-' || !is_integer_btype(bt2
))
2750 type
= *(bt1
== VT_PTR
? type1
: type2
);
2752 CType
*pt1
= pointed_type(type1
);
2753 CType
*pt2
= pointed_type(type2
);
2754 int pbt1
= pt1
->t
& VT_BTYPE
;
2755 int pbt2
= pt2
->t
& VT_BTYPE
;
2756 int newquals
, copied
= 0;
2757 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2758 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2759 if (op
!= '?' && !TOK_ISCOND(op
))
2762 type_incompatibility_warning(type1
, type2
,
2764 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2765 : "pointer type mismatch in comparison('%s' and '%s')");
2768 /* pointers to void get preferred, otherwise the
2769 pointed to types minus qualifs should be compatible */
2770 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2771 /* combine qualifs */
2772 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2773 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2776 /* copy the pointer target symbol */
2777 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2780 pointed_type(&type
)->t
|= newquals
;
2782 /* pointers to incomplete arrays get converted to
2783 pointers to completed ones if possible */
2784 if (pt1
->t
& VT_ARRAY
2785 && pt2
->t
& VT_ARRAY
2786 && pointed_type(&type
)->ref
->c
< 0
2787 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2790 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2792 pointed_type(&type
)->ref
=
2793 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2794 0, pointed_type(&type
)->ref
->c
);
2795 pointed_type(&type
)->ref
->c
=
2796 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2802 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2803 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2806 } else if (is_float(bt1
) || is_float(bt2
)) {
2807 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2808 type
.t
= VT_LDOUBLE
;
2809 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2814 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2815 /* cast to biggest op */
2816 type
.t
= VT_LLONG
| VT_LONG
;
2817 if (bt1
== VT_LLONG
)
2819 if (bt2
== VT_LLONG
)
2821 /* convert to unsigned if it does not fit in a long long */
2822 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2823 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2824 type
.t
|= VT_UNSIGNED
;
2826 /* integer operations */
2827 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2828 /* convert to unsigned if it does not fit in an integer */
2829 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2830 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2831 type
.t
|= VT_UNSIGNED
;
2838 /* generic gen_op: handles types problems */
2839 ST_FUNC
void gen_op(int op
)
2841 int t1
, t2
, bt1
, bt2
, t
;
2842 CType type1
, combtype
;
2845 t1
= vtop
[-1].type
.t
;
2846 t2
= vtop
[0].type
.t
;
2847 bt1
= t1
& VT_BTYPE
;
2848 bt2
= t2
& VT_BTYPE
;
2850 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2851 if (bt2
== VT_FUNC
) {
2852 mk_pointer(&vtop
->type
);
2855 if (bt1
== VT_FUNC
) {
2857 mk_pointer(&vtop
->type
);
2862 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2863 tcc_error_noabort("invalid operand types for binary operation");
2865 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2866 /* at least one operand is a pointer */
2867 /* relational op: must be both pointers */
2871 /* if both pointers, then it must be the '-' op */
2872 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2874 tcc_error("cannot use pointers here");
2875 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2878 vtop
->type
.t
= VT_PTRDIFF_T
;
2882 /* exactly one pointer : must be '+' or '-'. */
2883 if (op
!= '-' && op
!= '+')
2884 tcc_error("cannot use pointers here");
2885 /* Put pointer as first operand */
2886 if (bt2
== VT_PTR
) {
2888 t
= t1
, t1
= t2
, t2
= t
;
2891 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2892 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2895 type1
= vtop
[-1].type
;
2896 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2898 #ifdef CONFIG_TCC_BCHECK
2899 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2900 /* if bounded pointers, we generate a special code to
2907 gen_bounded_ptr_add();
2913 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
2914 /* put again type if gen_opic() swaped operands */
2918 /* floats can only be used for a few operations */
2919 if (is_float(combtype
.t
)
2920 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
2922 tcc_error("invalid operands for binary operation");
2923 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2924 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2925 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2927 t
|= (VT_LONG
& t1
);
2931 t
= t2
= combtype
.t
;
2932 /* XXX: currently, some unsigned operations are explicit, so
2933 we modify them here */
2934 if (t
& VT_UNSIGNED
) {
2941 else if (op
== TOK_LT
)
2943 else if (op
== TOK_GT
)
2945 else if (op
== TOK_LE
)
2947 else if (op
== TOK_GE
)
2953 /* special case for shifts and long long: we keep the shift as
2955 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2962 if (TOK_ISCOND(op
)) {
2963 /* relational op: the result is an int */
2964 vtop
->type
.t
= VT_INT
;
2969 // Make sure that we have converted to an rvalue:
2970 if (vtop
->r
& VT_LVAL
)
2971 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2974 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2975 #define gen_cvt_itof1 gen_cvt_itof
2977 /* generic itof for unsigned long long case */
2978 static void gen_cvt_itof1(int t
)
2980 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2981 (VT_LLONG
| VT_UNSIGNED
)) {
2984 vpush_helper_func(TOK___floatundisf
);
2985 #if LDOUBLE_SIZE != 8
2986 else if (t
== VT_LDOUBLE
)
2987 vpush_helper_func(TOK___floatundixf
);
2990 vpush_helper_func(TOK___floatundidf
);
3001 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3002 #define gen_cvt_ftoi1 gen_cvt_ftoi
3004 /* generic ftoi for unsigned long long case */
3005 static void gen_cvt_ftoi1(int t
)
3008 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3009 /* not handled natively */
3010 st
= vtop
->type
.t
& VT_BTYPE
;
3012 vpush_helper_func(TOK___fixunssfdi
);
3013 #if LDOUBLE_SIZE != 8
3014 else if (st
== VT_LDOUBLE
)
3015 vpush_helper_func(TOK___fixunsxfdi
);
3018 vpush_helper_func(TOK___fixunsdfdi
);
3029 /* special delayed cast for char/short */
3030 static void force_charshort_cast(void)
3032 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3033 int dbt
= vtop
->type
.t
;
3034 vtop
->r
&= ~VT_MUSTCAST
;
3036 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3040 static void gen_cast_s(int t
)
3048 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3049 static void gen_cast(CType
*type
)
3051 int sbt
, dbt
, sf
, df
, c
;
3052 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3054 /* special delayed cast for char/short */
3055 if (vtop
->r
& VT_MUSTCAST
)
3056 force_charshort_cast();
3058 /* bitfields first get cast to ints */
3059 if (vtop
->type
.t
& VT_BITFIELD
)
3062 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3063 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3071 dbt_bt
= dbt
& VT_BTYPE
;
3072 sbt_bt
= sbt
& VT_BTYPE
;
3073 if (dbt_bt
== VT_VOID
)
3075 if (sbt_bt
== VT_VOID
) {
3077 cast_error(&vtop
->type
, type
);
3080 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3081 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3082 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3085 /* constant case: we can do it now */
3086 /* XXX: in ISOC, cannot do it if error in convert */
3087 if (sbt
== VT_FLOAT
)
3088 vtop
->c
.ld
= vtop
->c
.f
;
3089 else if (sbt
== VT_DOUBLE
)
3090 vtop
->c
.ld
= vtop
->c
.d
;
3093 if (sbt_bt
== VT_LLONG
) {
3094 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3095 vtop
->c
.ld
= vtop
->c
.i
;
3097 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3099 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3100 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3102 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3105 if (dbt
== VT_FLOAT
)
3106 vtop
->c
.f
= (float)vtop
->c
.ld
;
3107 else if (dbt
== VT_DOUBLE
)
3108 vtop
->c
.d
= (double)vtop
->c
.ld
;
3109 } else if (sf
&& dbt
== VT_BOOL
) {
3110 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3113 vtop
->c
.i
= vtop
->c
.ld
;
3114 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3116 else if (sbt
& VT_UNSIGNED
)
3117 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3119 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3121 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3123 else if (dbt
== VT_BOOL
)
3124 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3126 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3127 dbt_bt
== VT_SHORT
? 0xffff :
3130 if (!(dbt
& VT_UNSIGNED
))
3131 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3136 } else if (dbt
== VT_BOOL
3137 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3138 == (VT_CONST
| VT_SYM
)) {
3139 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3145 /* cannot generate code for global or static initializers */
3146 if (nocode_wanted
& DATA_ONLY_WANTED
)
3149 /* non constant case: generate code */
3150 if (dbt
== VT_BOOL
) {
3151 gen_test_zero(TOK_NE
);
3157 /* convert from fp to fp */
3160 /* convert int to fp */
3163 /* convert fp to int */
3165 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3168 goto again
; /* may need char/short cast */
3173 ds
= btype_size(dbt_bt
);
3174 ss
= btype_size(sbt_bt
);
3175 if (ds
== 0 || ss
== 0)
3178 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3179 tcc_error("cast to incomplete type");
3181 /* same size and no sign conversion needed */
3182 if (ds
== ss
&& ds
>= 4)
3184 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3185 tcc_warning("cast between pointer and integer of different size");
3186 if (sbt_bt
== VT_PTR
) {
3187 /* put integer type to allow logical operations below */
3188 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3192 /* processor allows { int a = 0, b = *(char*)&a; }
3193 That means that if we cast to less width, we can just
3194 change the type and read it still later. */
3195 #define ALLOW_SUBTYPE_ACCESS 1
3197 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3198 /* value still in memory */
3202 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3204 goto done
; /* no 64bit envolved */
3212 /* generate high word */
3213 if (sbt
& VT_UNSIGNED
) {
3222 } else if (ss
== 8) {
3223 /* from long long: just take low order word */
3231 /* need to convert from 32bit to 64bit */
3232 if (sbt
& VT_UNSIGNED
) {
3233 #if defined(TCC_TARGET_RISCV64)
3234 /* RISC-V keeps 32bit vals in registers sign-extended.
3235 So here we need a zero-extension. */
3244 ss
= ds
, ds
= 4, dbt
= sbt
;
3245 } else if (ss
== 8) {
3246 /* RISC-V keeps 32bit vals in registers sign-extended.
3247 So here we need a sign-extension for signed types and
3248 zero-extension. for unsigned types. */
3249 #if !defined(TCC_TARGET_RISCV64)
3250 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3259 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3265 bits
= (ss
- ds
) * 8;
3266 /* for unsigned, gen_op will convert SAR to SHR */
3267 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3270 vpushi(bits
- trunc
);
3277 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3280 /* return type size as known at compile time. Put alignment at 'a' */
3281 ST_FUNC
int type_size(CType
*type
, int *a
)
3286 bt
= type
->t
& VT_BTYPE
;
3287 if (bt
== VT_STRUCT
) {
3292 } else if (bt
== VT_PTR
) {
3293 if (type
->t
& VT_ARRAY
) {
3297 ts
= type_size(&s
->type
, a
);
3299 if (ts
< 0 && s
->c
< 0)
3307 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3309 return -1; /* incomplete enum */
3310 } else if (bt
== VT_LDOUBLE
) {
3312 return LDOUBLE_SIZE
;
3313 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3314 #ifdef TCC_TARGET_I386
3315 #ifdef TCC_TARGET_PE
3320 #elif defined(TCC_TARGET_ARM)
3330 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3333 } else if (bt
== VT_SHORT
) {
3336 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3340 /* char, void, function, _Bool */
3346 /* push type size as known at runtime time on top of value stack. Put
3348 static void vpush_type_size(CType
*type
, int *a
)
3350 if (type
->t
& VT_VLA
) {
3351 type_size(&type
->ref
->type
, a
);
3352 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3354 int size
= type_size(type
, a
);
3356 tcc_error("unknown type size");
3365 /* return the pointed type of t */
3366 static inline CType
*pointed_type(CType
*type
)
3368 return &type
->ref
->type
;
3371 /* modify type so that its it is a pointer to type. */
3372 ST_FUNC
void mk_pointer(CType
*type
)
3375 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3376 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3380 /* return true if type1 and type2 are exactly the same (including
3383 static int is_compatible_types(CType
*type1
, CType
*type2
)
3385 return compare_types(type1
,type2
,0);
3388 /* return true if type1 and type2 are the same (ignoring qualifiers).
3390 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3392 return compare_types(type1
,type2
,1);
3395 static void cast_error(CType
*st
, CType
*dt
)
3397 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3400 /* verify type compatibility to store vtop in 'dt' type */
3401 static void verify_assign_cast(CType
*dt
)
3403 CType
*st
, *type1
, *type2
;
3404 int dbt
, sbt
, qualwarn
, lvl
;
3406 st
= &vtop
->type
; /* source type */
3407 dbt
= dt
->t
& VT_BTYPE
;
3408 sbt
= st
->t
& VT_BTYPE
;
3409 if (dt
->t
& VT_CONSTANT
)
3410 tcc_warning("assignment of read-only location");
3414 tcc_error("assignment to void expression");
3417 /* special cases for pointers */
3418 /* '0' can also be a pointer */
3419 if (is_null_pointer(vtop
))
3421 /* accept implicit pointer to integer cast with warning */
3422 if (is_integer_btype(sbt
)) {
3423 tcc_warning("assignment makes pointer from integer without a cast");
3426 type1
= pointed_type(dt
);
3428 type2
= pointed_type(st
);
3429 else if (sbt
== VT_FUNC
)
3430 type2
= st
; /* a function is implicitly a function pointer */
3433 if (is_compatible_types(type1
, type2
))
3435 for (qualwarn
= lvl
= 0;; ++lvl
) {
3436 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3437 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3439 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3440 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3441 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3443 type1
= pointed_type(type1
);
3444 type2
= pointed_type(type2
);
3446 if (!is_compatible_unqualified_types(type1
, type2
)) {
3447 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3448 /* void * can match anything */
3449 } else if (dbt
== sbt
3450 && is_integer_btype(sbt
& VT_BTYPE
)
3451 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3452 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3453 /* Like GCC don't warn by default for merely changes
3454 in pointer target signedness. Do warn for different
3455 base types, though, in particular for unsigned enums
3456 and signed int targets. */
3458 tcc_warning("assignment from incompatible pointer type");
3463 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3469 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3470 tcc_warning("assignment makes integer from pointer without a cast");
3471 } else if (sbt
== VT_STRUCT
) {
3472 goto case_VT_STRUCT
;
3474 /* XXX: more tests */
3478 if (!is_compatible_unqualified_types(dt
, st
)) {
3486 static void gen_assign_cast(CType
*dt
)
3488 verify_assign_cast(dt
);
3492 /* store vtop in lvalue pushed on stack */
3493 ST_FUNC
void vstore(void)
3495 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3497 ft
= vtop
[-1].type
.t
;
3498 sbt
= vtop
->type
.t
& VT_BTYPE
;
3499 dbt
= ft
& VT_BTYPE
;
3500 verify_assign_cast(&vtop
[-1].type
);
3502 if (sbt
== VT_STRUCT
) {
3503 /* if structure, only generate pointer */
3504 /* structure assignment : generate memcpy */
3505 size
= type_size(&vtop
->type
, &align
);
3506 /* destination, keep on stack() as result */
3508 #ifdef CONFIG_TCC_BCHECK
3509 if (vtop
->r
& VT_MUSTBOUND
)
3510 gbound(); /* check would be wrong after gaddrof() */
3512 vtop
->type
.t
= VT_PTR
;
3516 #ifdef CONFIG_TCC_BCHECK
3517 if (vtop
->r
& VT_MUSTBOUND
)
3520 vtop
->type
.t
= VT_PTR
;
3523 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3525 #ifdef CONFIG_TCC_BCHECK
3526 && !tcc_state
->do_bounds_check
3529 gen_struct_copy(size
);
3535 /* Use memmove, rather than memcpy, as dest and src may be same: */
3538 vpush_helper_func(TOK_memmove8
);
3539 else if(!(align
& 3))
3540 vpush_helper_func(TOK_memmove4
);
3543 vpush_helper_func(TOK_memmove
);
3548 } else if (ft
& VT_BITFIELD
) {
3549 /* bitfield store handling */
3551 /* save lvalue as expression result (example: s.b = s.a = n;) */
3552 vdup(), vtop
[-1] = vtop
[-2];
3554 bit_pos
= BIT_POS(ft
);
3555 bit_size
= BIT_SIZE(ft
);
3556 /* remove bit field info to avoid loops */
3557 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3559 if (dbt
== VT_BOOL
) {
3560 gen_cast(&vtop
[-1].type
);
3561 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3563 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3564 if (dbt
!= VT_BOOL
) {
3565 gen_cast(&vtop
[-1].type
);
3566 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3568 if (r
== VT_STRUCT
) {
3569 store_packed_bf(bit_pos
, bit_size
);
3571 unsigned long long mask
= (1ULL << bit_size
) - 1;
3572 if (dbt
!= VT_BOOL
) {
3574 if (dbt
== VT_LLONG
)
3577 vpushi((unsigned)mask
);
3584 /* duplicate destination */
3587 /* load destination, mask and or with source */
3588 if (dbt
== VT_LLONG
)
3589 vpushll(~(mask
<< bit_pos
));
3591 vpushi(~((unsigned)mask
<< bit_pos
));
3596 /* ... and discard */
3599 } else if (dbt
== VT_VOID
) {
3602 /* optimize char/short casts */
3604 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3605 && is_integer_btype(sbt
)
3607 if ((vtop
->r
& VT_MUSTCAST
)
3608 && btype_size(dbt
) > btype_size(sbt
)
3610 force_charshort_cast();
3613 gen_cast(&vtop
[-1].type
);
3616 #ifdef CONFIG_TCC_BCHECK
3617 /* bound check case */
3618 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3624 gv(RC_TYPE(dbt
)); /* generate value */
3627 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3628 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3629 vtop
->type
.t
= ft
& VT_TYPE
;
3632 /* if lvalue was saved on stack, must read it */
3633 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3635 r
= get_reg(RC_INT
);
3636 sv
.type
.t
= VT_PTRDIFF_T
;
3637 sv
.r
= VT_LOCAL
| VT_LVAL
;
3638 sv
.c
.i
= vtop
[-1].c
.i
;
3640 vtop
[-1].r
= r
| VT_LVAL
;
3643 r
= vtop
->r
& VT_VALMASK
;
3644 /* two word case handling :
3645 store second register at word + 4 (or +8 for x86-64) */
3646 if (USING_TWO_WORDS(dbt
)) {
3647 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3648 vtop
[-1].type
.t
= load_type
;
3651 /* convert to int to increment easily */
3652 vtop
->type
.t
= VT_PTRDIFF_T
;
3658 vtop
[-1].type
.t
= load_type
;
3659 /* XXX: it works because r2 is spilled last ! */
3660 store(vtop
->r2
, vtop
- 1);
3666 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3670 /* post defines POST/PRE add. c is the token ++ or -- */
3671 ST_FUNC
void inc(int post
, int c
)
3674 vdup(); /* save lvalue */
3676 gv_dup(); /* duplicate value */
3681 vpushi(c
- TOK_MID
);
3683 vstore(); /* store value */
3685 vpop(); /* if post op, return saved value */
3688 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3690 /* read the string */
3694 while (tok
== TOK_STR
) {
3695 /* XXX: add \0 handling too ? */
3696 cstr_cat(astr
, tokc
.str
.data
, -1);
3699 cstr_ccat(astr
, '\0');
3702 /* If I is >= 1 and a power of two, returns log2(i)+1.
3703 If I is 0 returns 0. */
3704 ST_FUNC
int exact_log2p1(int i
)
3709 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3720 /* Parse __attribute__((...)) GNUC extension. */
3721 static void parse_attribute(AttributeDef
*ad
)
3727 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3732 while (tok
!= ')') {
3733 if (tok
< TOK_IDENT
)
3734 expect("attribute name");
3746 tcc_warning_c(warn_implicit_function_declaration
)(
3747 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3748 s
= external_global_sym(tok
, &func_old_type
);
3749 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3750 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3751 ad
->cleanup_func
= s
;
3756 case TOK_CONSTRUCTOR1
:
3757 case TOK_CONSTRUCTOR2
:
3758 ad
->f
.func_ctor
= 1;
3760 case TOK_DESTRUCTOR1
:
3761 case TOK_DESTRUCTOR2
:
3762 ad
->f
.func_dtor
= 1;
3764 case TOK_ALWAYS_INLINE1
:
3765 case TOK_ALWAYS_INLINE2
:
3766 ad
->f
.func_alwinl
= 1;
3771 parse_mult_str(&astr
, "section name");
3772 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3779 parse_mult_str(&astr
, "alias(\"target\")");
3780 ad
->alias_target
= /* save string as token, for later */
3781 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3785 case TOK_VISIBILITY1
:
3786 case TOK_VISIBILITY2
:
3788 parse_mult_str(&astr
,
3789 "visibility(\"default|hidden|internal|protected\")");
3790 if (!strcmp (astr
.data
, "default"))
3791 ad
->a
.visibility
= STV_DEFAULT
;
3792 else if (!strcmp (astr
.data
, "hidden"))
3793 ad
->a
.visibility
= STV_HIDDEN
;
3794 else if (!strcmp (astr
.data
, "internal"))
3795 ad
->a
.visibility
= STV_INTERNAL
;
3796 else if (!strcmp (astr
.data
, "protected"))
3797 ad
->a
.visibility
= STV_PROTECTED
;
3799 expect("visibility(\"default|hidden|internal|protected\")");
3808 if (n
<= 0 || (n
& (n
- 1)) != 0)
3809 tcc_error("alignment must be a positive power of two");
3814 ad
->a
.aligned
= exact_log2p1(n
);
3815 if (n
!= 1 << (ad
->a
.aligned
- 1))
3816 tcc_error("alignment of %d is larger than implemented", n
);
3828 /* currently, no need to handle it because tcc does not
3829 track unused objects */
3833 ad
->f
.func_noreturn
= 1;
3838 ad
->f
.func_call
= FUNC_CDECL
;
3843 ad
->f
.func_call
= FUNC_STDCALL
;
3845 #ifdef TCC_TARGET_I386
3855 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3861 ad
->f
.func_call
= FUNC_FASTCALLW
;
3868 ad
->attr_mode
= VT_LLONG
+ 1;
3871 ad
->attr_mode
= VT_BYTE
+ 1;
3874 ad
->attr_mode
= VT_SHORT
+ 1;
3878 ad
->attr_mode
= VT_INT
+ 1;
3881 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3888 ad
->a
.dllexport
= 1;
3890 case TOK_NODECORATE
:
3891 ad
->a
.nodecorate
= 1;
3894 ad
->a
.dllimport
= 1;
3897 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
3898 /* skip parameters */
3900 int parenthesis
= 0;
3904 else if (tok
== ')')
3907 } while (parenthesis
&& tok
!= -1);
3920 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3923 int v1
= v
| SYM_FIELD
;
3925 while ((s
= s
->next
) != NULL
) {
3930 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
3931 && s
->v
>= (SYM_FIRST_ANOM
| SYM_FIELD
)) {
3932 /* try to find field in anonymous sub-struct/union */
3933 Sym
*ret
= find_field (&s
->type
, v1
, cumofs
);
3941 if (!(v
& SYM_FIELD
)) { /* top-level call */
3944 tcc_error("dereferencing incomplete type '%s'",
3945 get_tok_str(s
->v
& ~SYM_STRUCT
, 0));
3947 tcc_error("field not found: %s",
3948 get_tok_str(v
, &tokc
));
3953 static void check_fields (CType
*type
, int check
)
3957 while ((s
= s
->next
) != NULL
) {
3958 int v
= s
->v
& ~SYM_FIELD
;
3959 if (v
< SYM_FIRST_ANOM
) {
3960 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
3961 if (check
&& (ts
->tok
& SYM_FIELD
))
3962 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
3963 ts
->tok
^= SYM_FIELD
;
3964 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
3965 check_fields (&s
->type
, check
);
3969 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3971 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3972 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3973 int pcc
= !tcc_state
->ms_bitfields
;
3974 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3981 prevbt
= VT_STRUCT
; /* make it never match */
3986 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3987 if (f
->type
.t
& VT_BITFIELD
)
3988 bit_size
= BIT_SIZE(f
->type
.t
);
3991 size
= type_size(&f
->type
, &align
);
3992 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3995 if (pcc
&& bit_size
== 0) {
3996 /* in pcc mode, packing does not affect zero-width bitfields */
3999 /* in pcc mode, attribute packed overrides if set. */
4000 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4003 /* pragma pack overrides align if lesser and packs bitfields always */
4006 if (pragma_pack
< align
)
4007 align
= pragma_pack
;
4008 /* in pcc mode pragma pack also overrides individual align */
4009 if (pcc
&& pragma_pack
< a
)
4013 /* some individual align was specified */
4017 if (type
->ref
->type
.t
== VT_UNION
) {
4018 if (pcc
&& bit_size
>= 0)
4019 size
= (bit_size
+ 7) >> 3;
4024 } else if (bit_size
< 0) {
4026 c
+= (bit_pos
+ 7) >> 3;
4027 c
= (c
+ align
- 1) & -align
;
4036 /* A bit-field. Layout is more complicated. There are two
4037 options: PCC (GCC) compatible and MS compatible */
4039 /* In PCC layout a bit-field is placed adjacent to the
4040 preceding bit-fields, except if:
4042 - an individual alignment was given
4043 - it would overflow its base type container and
4044 there is no packing */
4045 if (bit_size
== 0) {
4047 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4049 } else if (f
->a
.aligned
) {
4051 } else if (!packed
) {
4053 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4054 if (ofs
> size
/ align
)
4058 /* in pcc mode, long long bitfields have type int if they fit */
4059 if (size
== 8 && bit_size
<= 32)
4060 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4062 while (bit_pos
>= align
* 8)
4063 c
+= align
, bit_pos
-= align
* 8;
4066 /* In PCC layout named bit-fields influence the alignment
4067 of the containing struct using the base types alignment,
4068 except for packed fields (which here have correct align). */
4069 if (f
->v
& SYM_FIRST_ANOM
4070 // && bit_size // ??? gcc on ARM/rpi does that
4075 bt
= f
->type
.t
& VT_BTYPE
;
4076 if ((bit_pos
+ bit_size
> size
* 8)
4077 || (bit_size
> 0) == (bt
!= prevbt
)
4079 c
= (c
+ align
- 1) & -align
;
4082 /* In MS bitfield mode a bit-field run always uses
4083 at least as many bits as the underlying type.
4084 To start a new run it's also required that this
4085 or the last bit-field had non-zero width. */
4086 if (bit_size
|| prev_bit_size
)
4089 /* In MS layout the records alignment is normally
4090 influenced by the field, except for a zero-width
4091 field at the start of a run (but by further zero-width
4092 fields it is again). */
4093 if (bit_size
== 0 && prevbt
!= bt
)
4096 prev_bit_size
= bit_size
;
4099 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4100 | (bit_pos
<< VT_STRUCT_SHIFT
);
4101 bit_pos
+= bit_size
;
4103 if (align
> maxalign
)
4107 printf("set field %s offset %-2d size %-2d align %-2d",
4108 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4109 if (f
->type
.t
& VT_BITFIELD
) {
4110 printf(" pos %-2d bits %-2d",
4123 c
+= (bit_pos
+ 7) >> 3;
4125 /* store size and alignment */
4126 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4130 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4131 /* can happen if individual align for some member was given. In
4132 this case MSVC ignores maxalign when aligning the size */
4137 c
= (c
+ a
- 1) & -a
;
4141 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4144 /* check whether we can access bitfields by their type */
4145 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4149 if (0 == (f
->type
.t
& VT_BITFIELD
))
4153 bit_size
= BIT_SIZE(f
->type
.t
);
4156 bit_pos
= BIT_POS(f
->type
.t
);
4157 size
= type_size(&f
->type
, &align
);
4159 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4160 #ifdef TCC_TARGET_ARM
4161 && !(f
->c
& (align
- 1))
4166 /* try to access the field using a different type */
4167 c0
= -1, s
= align
= 1;
4170 px
= f
->c
* 8 + bit_pos
;
4171 cx
= (px
>> 3) & -align
;
4172 px
= px
- (cx
<< 3);
4175 s
= (px
+ bit_size
+ 7) >> 3;
4185 s
= type_size(&t
, &align
);
4189 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4190 #ifdef TCC_TARGET_ARM
4191 && !(cx
& (align
- 1))
4194 /* update offset and bit position */
4197 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4198 | (bit_pos
<< VT_STRUCT_SHIFT
);
4202 printf("FIX field %s offset %-2d size %-2d align %-2d "
4203 "pos %-2d bits %-2d\n",
4204 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4205 cx
, s
, align
, px
, bit_size
);
4208 /* fall back to load/store single-byte wise */
4209 f
->auxtype
= VT_STRUCT
;
4211 printf("FIX field %s : load byte-wise\n",
4212 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4218 static void do_Static_assert(void);
4220 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4221 static void struct_decl(CType
*type
, int u
)
4223 int v
, c
, size
, align
, flexible
;
4224 int bit_size
, bsize
, bt
;
4226 AttributeDef ad
, ad1
;
4229 memset(&ad
, 0, sizeof ad
);
4231 parse_attribute(&ad
);
4235 /* struct already defined ? return it */
4237 expect("struct/union/enum name");
4239 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4242 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4244 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4249 /* Record the original enum/struct/union token. */
4250 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4252 /* we put an undefined size for struct/union */
4253 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4254 s
->r
= 0; /* default alignment is zero as gcc */
4256 type
->t
= s
->type
.t
;
4262 tcc_error("struct/union/enum already defined");
4264 /* cannot be empty */
4265 /* non empty enums are not allowed */
4268 long long ll
= 0, pl
= 0, nl
= 0;
4271 /* enum symbols have static storage */
4272 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4276 expect("identifier");
4278 if (ss
&& !local_stack
)
4279 tcc_error("redefinition of enumerator '%s'",
4280 get_tok_str(v
, NULL
));
4284 ll
= expr_const64();
4286 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4288 *ps
= ss
, ps
= &ss
->next
;
4297 /* NOTE: we accept a trailing comma */
4302 /* set integral type of the enum */
4305 if (pl
!= (unsigned)pl
)
4306 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4308 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4309 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4310 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4312 /* set type for enum members */
4313 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4315 if (ll
== (int)ll
) /* default is int if it fits */
4317 if (t
.t
& VT_UNSIGNED
) {
4318 ss
->type
.t
|= VT_UNSIGNED
;
4319 if (ll
== (unsigned)ll
)
4322 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4323 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4328 while (tok
!= '}') {
4329 if (tok
== TOK_STATIC_ASSERT
) {
4333 if (!parse_btype(&btype
, &ad1
, 0)) {
4339 tcc_error("flexible array member '%s' not at the end of struct",
4340 get_tok_str(v
, NULL
));
4346 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4348 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4349 expect("identifier");
4351 int v
= btype
.ref
->v
;
4352 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4353 if (tcc_state
->ms_extensions
== 0)
4354 expect("identifier");
4358 if (type_size(&type1
, &align
) < 0) {
4359 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4362 tcc_error("field '%s' has incomplete type",
4363 get_tok_str(v
, NULL
));
4365 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4366 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4367 (type1
.t
& VT_STORAGE
))
4368 tcc_error("invalid type for '%s'",
4369 get_tok_str(v
, NULL
));
4373 bit_size
= expr_const();
4374 /* XXX: handle v = 0 case for messages */
4376 tcc_error("negative width in bit-field '%s'",
4377 get_tok_str(v
, NULL
));
4378 if (v
&& bit_size
== 0)
4379 tcc_error("zero width for bit-field '%s'",
4380 get_tok_str(v
, NULL
));
4381 parse_attribute(&ad1
);
4383 size
= type_size(&type1
, &align
);
4384 if (bit_size
>= 0) {
4385 bt
= type1
.t
& VT_BTYPE
;
4391 tcc_error("bitfields must have scalar type");
4393 if (bit_size
> bsize
) {
4394 tcc_error("width of '%s' exceeds its type",
4395 get_tok_str(v
, NULL
));
4396 } else if (bit_size
== bsize
4397 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4398 /* no need for bit fields */
4400 } else if (bit_size
== 64) {
4401 tcc_error("field width 64 not implemented");
4403 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4405 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4408 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4409 /* Remember we've seen a real field to check
4410 for placement of flexible array member. */
4413 /* If member is a struct or bit-field, enforce
4414 placing into the struct (as anonymous). */
4416 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4421 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4426 if (tok
== ';' || tok
== TOK_EOF
)
4433 parse_attribute(&ad
);
4434 if (ad
.cleanup_func
) {
4435 tcc_warning("attribute '__cleanup__' ignored on type");
4437 check_fields(type
, 1);
4438 check_fields(type
, 0);
4439 struct_layout(type
, &ad
);
4441 tcc_debug_fix_anon(tcc_state
, type
);
4446 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4448 merge_symattr(&ad
->a
, &s
->a
);
4449 merge_funcattr(&ad
->f
, &s
->f
);
4452 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4453 are added to the element type, copied because it could be a typedef. */
4454 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4456 while (type
->t
& VT_ARRAY
) {
4457 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4458 type
= &type
->ref
->type
;
4460 type
->t
|= qualifiers
;
4463 /* return 0 if no type declaration. otherwise, return the basic type
4466 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4468 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4472 memset(ad
, 0, sizeof(AttributeDef
));
4482 /* currently, we really ignore extension */
4492 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4493 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4494 tmbt
: tcc_error("too many basic types");
4497 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4502 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4519 memset(&ad1
, 0, sizeof(AttributeDef
));
4520 if (parse_btype(&type1
, &ad1
, 0)) {
4521 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4523 n
= 1 << (ad1
.a
.aligned
- 1);
4525 type_size(&type1
, &n
);
4528 if (n
< 0 || (n
& (n
- 1)) != 0)
4529 tcc_error("alignment must be a positive power of two");
4532 ad
->a
.aligned
= exact_log2p1(n
);
4536 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4537 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4538 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4539 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4546 #ifdef TCC_TARGET_ARM64
4548 /* GCC's __uint128_t appears in some Linux header files. Make it a
4549 synonym for long double to get the size and alignment right. */
4557 tcc_error("_Complex is not yet supported");
4562 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4563 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4571 struct_decl(&type1
, VT_ENUM
);
4574 type
->ref
= type1
.ref
;
4577 struct_decl(&type1
, VT_STRUCT
);
4580 struct_decl(&type1
, VT_UNION
);
4583 /* type modifiers */
4587 parse_btype_qualify(type
, VT_ATOMIC
);
4590 parse_expr_type(&type1
);
4591 /* remove all storage modifiers except typedef */
4592 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4594 sym_to_attr(ad
, type1
.ref
);
4602 parse_btype_qualify(type
, VT_CONSTANT
);
4610 parse_btype_qualify(type
, VT_VOLATILE
);
4617 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4618 tcc_error("signed and unsigned modifier");
4631 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4632 tcc_error("signed and unsigned modifier");
4633 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4649 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4650 tcc_error("multiple storage classes");
4662 ad
->f
.func_noreturn
= 1;
4664 /* GNUC attribute */
4665 case TOK_ATTRIBUTE1
:
4666 case TOK_ATTRIBUTE2
:
4667 parse_attribute(ad
);
4668 if (ad
->attr_mode
) {
4669 u
= ad
->attr_mode
-1;
4670 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4678 parse_expr_type(&type1
);
4679 /* remove all storage modifiers except typedef */
4680 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4682 sym_to_attr(ad
, type1
.ref
);
4684 case TOK_THREAD_LOCAL
:
4685 tcc_error("_Thread_local is not implemented");
4690 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4694 if (tok
== ':' && ignore_label
) {
4695 /* ignore if it's a label */
4700 t
&= ~(VT_BTYPE
|VT_LONG
);
4701 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4702 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4703 type
->ref
= s
->type
.ref
;
4705 parse_btype_qualify(type
, t
);
4707 /* get attributes from typedef */
4716 if (tcc_state
->char_is_unsigned
) {
4717 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4720 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4721 bt
= t
& (VT_BTYPE
|VT_LONG
);
4723 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4724 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4725 if (bt
== VT_LDOUBLE
)
4726 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4732 /* convert a function parameter type (array to pointer and function to
4733 function pointer) */
4734 static inline void convert_parameter_type(CType
*pt
)
4736 /* remove const and volatile qualifiers (XXX: const could be used
4737 to indicate a const function parameter */
4738 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4739 /* array must be transformed to pointer according to ANSI C */
4741 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4746 ST_FUNC
void parse_asm_str(CString
*astr
)
4749 parse_mult_str(astr
, "string constant");
4752 /* Parse an asm label and return the token */
4753 static int asm_label_instr(void)
4759 parse_asm_str(&astr
);
4762 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4764 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4769 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4771 int n
, l
, t1
, arg_size
, align
;
4772 Sym
**plast
, *s
, *first
;
4775 TokenString
*vla_array_tok
= NULL
;
4776 int *vla_array_str
= NULL
;
4779 /* function type, or recursive declarator (return if so) */
4781 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4785 else if (parse_btype(&pt
, &ad1
, 0))
4787 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4788 merge_attr (ad
, &ad1
);
4799 /* read param name and compute offset */
4800 if (l
!= FUNC_OLD
) {
4801 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4803 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4804 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4805 tcc_error("parameter declared as void");
4810 pt
.t
= VT_VOID
; /* invalid type */
4815 expect("identifier");
4816 convert_parameter_type(&pt
);
4817 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4818 /* these symbols may be evaluated for VLArrays (see below, under
4819 nocode_wanted) which is why we push them here as normal symbols
4820 temporarily. Example: int func(int a, int b[++a]); */
4821 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4827 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4832 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4833 tcc_error("invalid type");
4836 /* if no parameters, then old type prototype */
4839 /* remove parameter symbols from token table, keep on stack */
4841 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4842 for (s
= first
; s
; s
= s
->next
)
4846 /* NOTE: const is ignored in returned type as it has a special
4847 meaning in gcc / C++ */
4848 type
->t
&= ~VT_CONSTANT
;
4849 /* some ancient pre-K&R C allows a function to return an array
4850 and the array brackets to be put after the arguments, such
4851 that "int c()[]" means something like "int[] c()" */
4854 skip(']'); /* only handle simple "[]" */
4857 /* we push a anonymous symbol which will contain the function prototype */
4858 ad
->f
.func_args
= arg_size
;
4859 ad
->f
.func_type
= l
;
4860 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4866 } else if (tok
== '[') {
4867 int saved_nocode_wanted
= nocode_wanted
;
4868 /* array definition */
4872 if (td
& TYPE_PARAM
) while (1) {
4873 /* XXX The optional type-quals and static should only be accepted
4874 in parameter decls. The '*' as well, and then even only
4875 in prototypes (not function defs). */
4877 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4888 /* Code generation is not done now but has to be done
4889 at start of function. Save code here for later use. */
4891 skip_or_save_block(&vla_array_tok
);
4893 vla_array_str
= vla_array_tok
->str
;
4894 begin_macro(vla_array_tok
, 2);
4903 } else if (tok
!= ']') {
4904 if (!local_stack
|| (storage
& VT_STATIC
))
4905 vpushi(expr_const());
4907 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4908 length must always be evaluated, even under nocode_wanted,
4909 so that its size slot is initialized (e.g. under sizeof
4915 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4918 tcc_error("invalid array size");
4920 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4921 tcc_error("size of variable length array should be an integer");
4927 /* parse next post type */
4928 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
4930 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4931 tcc_error("declaration of an array of functions");
4932 if ((type
->t
& VT_BTYPE
) == VT_VOID
4933 || type_size(type
, &align
) < 0)
4934 tcc_error("declaration of an array of incomplete type elements");
4936 t1
|= type
->t
& VT_VLA
;
4941 tcc_error("need explicit inner array size in VLAs");
4944 loc
-= type_size(&int_type
, &align
);
4948 vpush_type_size(type
, &align
);
4950 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4957 nocode_wanted
= saved_nocode_wanted
;
4959 /* we push an anonymous symbol which will contain the array
4961 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4962 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4965 if (vla_array_str
) {
4967 s
->vla_array_str
= vla_array_str
;
4969 tok_str_free_str(vla_array_str
);
4975 /* Parse a type declarator (except basic type), and return the type
4976 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4977 expected. 'type' should contain the basic type. 'ad' is the
4978 attribute definition of the basic type. It can be modified by
4979 type_decl(). If this (possibly abstract) declarator is a pointer chain
4980 it returns the innermost pointed to type (equals *type, but is a different
4981 pointer), otherwise returns type itself, that's used for recursive calls. */
4982 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4985 int qualifiers
, storage
;
4987 /* recursive type, remove storage bits first, apply them later again */
4988 storage
= type
->t
& VT_STORAGE
;
4989 type
->t
&= ~VT_STORAGE
;
4992 while (tok
== '*') {
4998 qualifiers
|= VT_ATOMIC
;
5003 qualifiers
|= VT_CONSTANT
;
5008 qualifiers
|= VT_VOLATILE
;
5014 /* XXX: clarify attribute handling */
5015 case TOK_ATTRIBUTE1
:
5016 case TOK_ATTRIBUTE2
:
5017 parse_attribute(ad
);
5021 type
->t
|= qualifiers
;
5023 /* innermost pointed to type is the one for the first derivation */
5024 ret
= pointed_type(type
);
5028 /* This is possibly a parameter type list for abstract declarators
5029 ('int ()'), use post_type for testing this. */
5030 if (!post_type(type
, ad
, 0, td
)) {
5031 /* It's not, so it's a nested declarator, and the post operations
5032 apply to the innermost pointed to type (if any). */
5033 /* XXX: this is not correct to modify 'ad' at this point, but
5034 the syntax is not clear */
5035 parse_attribute(ad
);
5036 post
= type_decl(type
, ad
, v
, td
);
5040 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5041 /* type identifier */
5046 if (!(td
& TYPE_ABSTRACT
))
5047 expect("identifier");
5050 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5051 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5052 parse_attribute(ad
);
5057 /* indirection with full error checking and bound check */
5058 ST_FUNC
void indir(void)
5060 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5061 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5065 if (vtop
->r
& VT_LVAL
)
5067 vtop
->type
= *pointed_type(&vtop
->type
);
5068 /* Arrays and functions are never lvalues */
5069 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5070 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5072 /* if bound checking, the referenced pointer must be checked */
5073 #ifdef CONFIG_TCC_BCHECK
5074 if (tcc_state
->do_bounds_check
)
5075 vtop
->r
|= VT_MUSTBOUND
;
5080 /* pass a parameter to a function and do type checking and casting */
5081 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5086 func_type
= func
->f
.func_type
;
5087 if (func_type
== FUNC_OLD
||
5088 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5089 /* default casting : only need to convert float to double */
5090 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5091 gen_cast_s(VT_DOUBLE
);
5092 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5093 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5094 type
.ref
= vtop
->type
.ref
;
5096 } else if (vtop
->r
& VT_MUSTCAST
) {
5097 force_charshort_cast();
5099 } else if (arg
== NULL
) {
5100 tcc_error("too many arguments to function");
5103 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5104 gen_assign_cast(&type
);
5108 /* parse an expression and return its type without any side effect. */
5109 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5118 /* parse an expression of the form '(type)' or '(expr)' and return its
5120 static void parse_expr_type(CType
*type
)
5126 if (parse_btype(type
, &ad
, 0)) {
5127 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5129 expr_type(type
, gexpr
);
5134 static void parse_type(CType
*type
)
5139 if (!parse_btype(type
, &ad
, 0)) {
5142 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5145 static void parse_builtin_params(int nc
, const char *args
)
5154 while ((c
= *args
++)) {
5169 type
.t
= VT_CONSTANT
;
5175 type
.t
= VT_CONSTANT
;
5177 type
.t
|= char_type
.t
;
5189 gen_assign_cast(&type
);
5196 static void parse_atomic(int atok
)
5198 int size
, align
, arg
, t
, save
= 0;
5199 CType
*atom
, *atom_ptr
, ct
= {0};
5202 static const char *const templates
[] = {
5204 * Each entry consists of callback and function template.
5205 * The template represents argument types and return type.
5207 * ? void (return-only)
5210 * A read-only atomic
5211 * p pointer to memory
5218 /* keep in order of appearance in tcctok.h: */
5219 /* __atomic_store */ "alm.?",
5220 /* __atomic_load */ "Asm.v",
5221 /* __atomic_exchange */ "alsm.v",
5222 /* __atomic_compare_exchange */ "aplbmm.b",
5223 /* __atomic_fetch_add */ "avm.v",
5224 /* __atomic_fetch_sub */ "avm.v",
5225 /* __atomic_fetch_or */ "avm.v",
5226 /* __atomic_fetch_xor */ "avm.v",
5227 /* __atomic_fetch_and */ "avm.v",
5228 /* __atomic_fetch_nand */ "avm.v",
5229 /* __atomic_and_fetch */ "avm.v",
5230 /* __atomic_sub_fetch */ "avm.v",
5231 /* __atomic_or_fetch */ "avm.v",
5232 /* __atomic_xor_fetch */ "avm.v",
5233 /* __atomic_and_fetch */ "avm.v",
5234 /* __atomic_nand_fetch */ "avm.v"
5236 const char *template = templates
[(atok
- TOK___atomic_store
)];
5238 atom
= atom_ptr
= NULL
;
5239 size
= 0; /* pacify compiler */
5244 switch (template[arg
]) {
5247 atom_ptr
= &vtop
->type
;
5248 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5250 atom
= pointed_type(atom_ptr
);
5251 size
= type_size(atom
, &align
);
5253 || (size
& (size
- 1))
5254 || (atok
> TOK___atomic_compare_exchange
5255 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5256 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5257 expect("integral or integer-sized pointer target type");
5258 /* GCC does not care either: */
5259 /* if (!(atom->t & VT_ATOMIC))
5260 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5264 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5265 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5266 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5267 gen_assign_cast(atom_ptr
);
5270 gen_assign_cast(atom
);
5274 gen_assign_cast(atom
);
5283 gen_assign_cast(&int_type
);
5287 gen_assign_cast(&ct
);
5290 if ('.' == template[++arg
])
5297 switch (template[arg
+ 1]) {
5306 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5307 vpush_helper_func(tok_alloc_const(buf
));
5308 vrott(arg
- save
+ 1);
5309 gfunc_call(arg
- save
);
5312 PUT_R_RET(vtop
, ct
.t
);
5313 t
= ct
.t
& VT_BTYPE
;
5314 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5316 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5318 vtop
->type
.t
= VT_INT
;
5330 ST_FUNC
void unary(void)
5332 int n
, t
, align
, size
, r
, sizeof_caller
;
5337 /* generate line number info */
5339 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5341 sizeof_caller
= in_sizeof
;
5344 /* XXX: GCC 2.95.3 does not generate a table although it should be
5352 #ifdef TCC_TARGET_PE
5353 t
= VT_SHORT
|VT_UNSIGNED
;
5361 vsetc(&type
, VT_CONST
, &tokc
);
5365 t
= VT_INT
| VT_UNSIGNED
;
5371 t
= VT_LLONG
| VT_UNSIGNED
;
5383 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5386 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5388 case TOK___FUNCTION__
:
5390 goto tok_identifier
;
5396 /* special function name identifier */
5397 len
= strlen(funcname
) + 1;
5398 /* generate char[len] type */
5399 type
.t
= char_type
.t
;
5400 if (tcc_state
->warn_write_strings
& WARN_ON
)
5401 type
.t
|= VT_CONSTANT
;
5405 sec
= rodata_section
;
5406 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5408 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5413 #ifdef TCC_TARGET_PE
5414 t
= VT_SHORT
| VT_UNSIGNED
;
5420 /* string parsing */
5423 if (tcc_state
->warn_write_strings
& WARN_ON
)
5428 memset(&ad
, 0, sizeof(AttributeDef
));
5429 ad
.section
= rodata_section
;
5430 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5435 if (parse_btype(&type
, &ad
, 0)) {
5436 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5438 /* check ISOC99 compound literal */
5440 /* data is allocated locally by default */
5445 /* all except arrays are lvalues */
5446 if (!(type
.t
& VT_ARRAY
))
5448 memset(&ad
, 0, sizeof(AttributeDef
));
5449 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5451 if (sizeof_caller
) {
5458 } else if (tok
== '{') {
5459 int saved_nocode_wanted
= nocode_wanted
;
5460 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5462 if (0 == local_scope
)
5463 tcc_error("statement expression outside of function");
5464 /* save all registers */
5466 /* statement expression : we do not accept break/continue
5467 inside as GCC does. We do retain the nocode_wanted state,
5468 as statement expressions can't ever be entered from the
5469 outside, so any reactivation of code emission (from labels
5470 or loop heads) can be disabled again after the end of it. */
5472 /* If the statement expr can be entered, then we retain the current
5473 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5474 If it can't be entered then the state is that from before the
5475 statement expression. */
5476 if (saved_nocode_wanted
)
5477 nocode_wanted
= saved_nocode_wanted
;
5492 /* functions names must be treated as function pointers,
5493 except for unary '&' and sizeof. Since we consider that
5494 functions are not lvalues, we only have to handle it
5495 there and in function calls. */
5496 /* arrays can also be used although they are not lvalues */
5497 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5498 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5501 vtop
->sym
->a
.addrtaken
= 1;
5502 mk_pointer(&vtop
->type
);
5508 gen_test_zero(TOK_EQ
);
5519 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5520 tcc_error("pointer not accepted for unary plus");
5521 /* In order to force cast, we add zero, except for floating point
5522 where we really need an noop (otherwise -0.0 will be transformed
5524 if (!is_float(vtop
->type
.t
)) {
5536 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5537 if (t
== TOK_SIZEOF
) {
5538 vpush_type_size(&type
, &align
);
5539 gen_cast_s(VT_SIZE_T
);
5541 type_size(&type
, &align
);
5543 if (vtop
[1].r
& VT_SYM
)
5544 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5545 if (s
&& s
->a
.aligned
)
5546 align
= 1 << (s
->a
.aligned
- 1);
5551 case TOK_builtin_expect
:
5552 /* __builtin_expect is a no-op for now */
5553 parse_builtin_params(0, "ee");
5556 case TOK_builtin_types_compatible_p
:
5557 parse_builtin_params(0, "tt");
5558 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5559 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5560 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5564 case TOK_builtin_choose_expr
:
5591 case TOK_builtin_constant_p
:
5593 parse_builtin_params(1, "e");
5595 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5596 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5600 case TOK_builtin_frame_address
:
5601 case TOK_builtin_return_address
:
5607 level
= expr_const64();
5609 tcc_error("%s only takes positive integers",
5610 tok1
== TOK_builtin_return_address
?
5611 "__builtin_return_address" :
5612 "__builtin_frame_address");
5617 vset(&type
, VT_LOCAL
, 0); /* local frame */
5619 #ifdef TCC_TARGET_RISCV64
5623 mk_pointer(&vtop
->type
);
5624 indir(); /* -> parent frame */
5626 if (tok1
== TOK_builtin_return_address
) {
5627 // assume return address is just above frame pointer on stack
5628 #ifdef TCC_TARGET_ARM
5631 #elif defined TCC_TARGET_RISCV64
5638 mk_pointer(&vtop
->type
);
5643 #ifdef TCC_TARGET_RISCV64
5644 case TOK_builtin_va_start
:
5645 parse_builtin_params(0, "ee");
5646 r
= vtop
->r
& VT_VALMASK
;
5650 tcc_error("__builtin_va_start expects a local variable");
5655 #ifdef TCC_TARGET_X86_64
5656 #ifdef TCC_TARGET_PE
5657 case TOK_builtin_va_start
:
5658 parse_builtin_params(0, "ee");
5659 r
= vtop
->r
& VT_VALMASK
;
5663 tcc_error("__builtin_va_start expects a local variable");
5665 vtop
->type
= char_pointer_type
;
5670 case TOK_builtin_va_arg_types
:
5671 parse_builtin_params(0, "t");
5672 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5679 #ifdef TCC_TARGET_ARM64
5680 case TOK_builtin_va_start
: {
5681 parse_builtin_params(0, "ee");
5685 vtop
->type
.t
= VT_VOID
;
5688 case TOK_builtin_va_arg
: {
5689 parse_builtin_params(0, "et");
5697 case TOK___arm64_clear_cache
: {
5698 parse_builtin_params(0, "ee");
5701 vtop
->type
.t
= VT_VOID
;
5706 /* atomic operations */
5707 case TOK___atomic_store
:
5708 case TOK___atomic_load
:
5709 case TOK___atomic_exchange
:
5710 case TOK___atomic_compare_exchange
:
5711 case TOK___atomic_fetch_add
:
5712 case TOK___atomic_fetch_sub
:
5713 case TOK___atomic_fetch_or
:
5714 case TOK___atomic_fetch_xor
:
5715 case TOK___atomic_fetch_and
:
5716 case TOK___atomic_fetch_nand
:
5717 case TOK___atomic_add_fetch
:
5718 case TOK___atomic_sub_fetch
:
5719 case TOK___atomic_or_fetch
:
5720 case TOK___atomic_xor_fetch
:
5721 case TOK___atomic_and_fetch
:
5722 case TOK___atomic_nand_fetch
:
5726 /* pre operations */
5737 if (is_float(vtop
->type
.t
)) {
5747 goto tok_identifier
;
5749 /* allow to take the address of a label */
5750 if (tok
< TOK_UIDENT
)
5751 expect("label identifier");
5752 s
= label_find(tok
);
5754 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5756 if (s
->r
== LABEL_DECLARED
)
5757 s
->r
= LABEL_FORWARD
;
5759 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5760 s
->type
.t
= VT_VOID
;
5761 mk_pointer(&s
->type
);
5762 s
->type
.t
|= VT_STATIC
;
5764 vpushsym(&s
->type
, s
);
5770 CType controlling_type
;
5771 int has_default
= 0;
5774 TokenString
*str
= NULL
;
5775 int saved_const_wanted
= const_wanted
;
5780 expr_type(&controlling_type
, expr_eq
);
5781 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5782 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5783 mk_pointer(&controlling_type
);
5784 const_wanted
= saved_const_wanted
;
5788 if (tok
== TOK_DEFAULT
) {
5790 tcc_error("too many 'default'");
5796 AttributeDef ad_tmp
;
5800 parse_btype(&cur_type
, &ad_tmp
, 0);
5801 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5802 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5804 tcc_error("type match twice");
5814 skip_or_save_block(&str
);
5816 skip_or_save_block(NULL
);
5823 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5824 tcc_error("type '%s' does not match any association", buf
);
5826 begin_macro(str
, 1);
5835 // special qnan , snan and infinity values
5840 vtop
->type
.t
= VT_FLOAT
;
5845 goto special_math_val
;
5848 goto special_math_val
;
5855 expect("identifier");
5857 if (!s
|| IS_ASM_SYM(s
)) {
5858 const char *name
= get_tok_str(t
, NULL
);
5860 tcc_error("'%s' undeclared", name
);
5861 /* for simple function calls, we tolerate undeclared
5862 external reference to int() function */
5863 tcc_warning_c(warn_implicit_function_declaration
)(
5864 "implicit declaration of function '%s'", name
);
5865 s
= external_global_sym(t
, &func_old_type
);
5869 /* A symbol that has a register is a local register variable,
5870 which starts out as VT_LOCAL value. */
5871 if ((r
& VT_VALMASK
) < VT_CONST
)
5872 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5874 vset(&s
->type
, r
, s
->c
);
5875 /* Point to s as backpointer (even without r&VT_SYM).
5876 Will be used by at least the x86 inline asm parser for
5882 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5883 vtop
->c
.i
= s
->enum_val
;
5888 /* post operations */
5890 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5893 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5894 int qualifiers
, cumofs
= 0;
5896 if (tok
== TOK_ARROW
)
5898 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5901 /* expect pointer on structure */
5902 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5903 expect("struct or union");
5904 if (tok
== TOK_CDOUBLE
)
5905 expect("field name");
5907 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5908 expect("field name");
5909 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5910 /* add field offset to pointer */
5911 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5914 /* change type to field type, and set to lvalue */
5915 vtop
->type
= s
->type
;
5916 vtop
->type
.t
|= qualifiers
;
5917 /* an array is never an lvalue */
5918 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5920 #ifdef CONFIG_TCC_BCHECK
5921 /* if bound checking, the referenced pointer must be checked */
5922 if (tcc_state
->do_bounds_check
)
5923 vtop
->r
|= VT_MUSTBOUND
;
5927 } else if (tok
== '[') {
5933 } else if (tok
== '(') {
5936 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5939 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5940 /* pointer test (no array accepted) */
5941 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5942 vtop
->type
= *pointed_type(&vtop
->type
);
5943 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5947 expect("function pointer");
5950 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5952 /* get return type */
5955 sa
= s
->next
; /* first parameter */
5956 nb_args
= regsize
= 0;
5958 /* compute first implicit argument if a structure is returned */
5959 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5960 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5961 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5962 &ret_align
, ®size
);
5963 if (ret_nregs
<= 0) {
5964 /* get some space for the returned structure */
5965 size
= type_size(&s
->type
, &align
);
5966 #ifdef TCC_TARGET_ARM64
5967 /* On arm64, a small struct is return in registers.
5968 It is much easier to write it to memory if we know
5969 that we are allowed to write some extra bytes, so
5970 round the allocated space up to a power of 2: */
5972 while (size
& (size
- 1))
5973 size
= (size
| (size
- 1)) + 1;
5975 loc
= (loc
- size
) & -align
;
5977 ret
.r
= VT_LOCAL
| VT_LVAL
;
5978 /* pass it as 'int' to avoid structure arg passing
5980 vseti(VT_LOCAL
, loc
);
5981 #ifdef CONFIG_TCC_BCHECK
5982 if (tcc_state
->do_bounds_check
)
5996 if (ret_nregs
> 0) {
5997 /* return in register */
5999 PUT_R_RET(&ret
, ret
.type
.t
);
6004 gfunc_param_typed(s
, sa
);
6014 tcc_error("too few arguments to function");
6016 gfunc_call(nb_args
);
6018 if (ret_nregs
< 0) {
6019 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6020 #ifdef TCC_TARGET_RISCV64
6021 arch_transfer_ret_regs(1);
6025 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6026 vsetc(&ret
.type
, r
, &ret
.c
);
6027 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6030 /* handle packed struct return */
6031 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6034 size
= type_size(&s
->type
, &align
);
6035 /* We're writing whole regs often, make sure there's enough
6036 space. Assume register size is power of 2. */
6037 if (regsize
> align
)
6039 loc
= (loc
- size
) & -align
;
6043 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6047 if (--ret_nregs
== 0)
6051 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6054 /* Promote char/short return values. This is matters only
6055 for calling function that were not compiled by TCC and
6056 only on some architectures. For those where it doesn't
6057 matter we expect things to be already promoted to int,
6059 t
= s
->type
.t
& VT_BTYPE
;
6060 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6062 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6064 vtop
->type
.t
= VT_INT
;
6068 if (s
->f
.func_noreturn
) {
6070 tcc_tcov_block_end(tcc_state
, -1);
6079 #ifndef precedence_parser /* original top-down parser */
6081 static void expr_prod(void)
6086 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6093 static void expr_sum(void)
6098 while ((t
= tok
) == '+' || t
== '-') {
6105 static void expr_shift(void)
6110 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6117 static void expr_cmp(void)
6122 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6123 t
== TOK_ULT
|| t
== TOK_UGE
) {
6130 static void expr_cmpeq(void)
6135 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6142 static void expr_and(void)
6145 while (tok
== '&') {
6152 static void expr_xor(void)
6155 while (tok
== '^') {
6162 static void expr_or(void)
6165 while (tok
== '|') {
6172 static void expr_landor(int op
);
6174 static void expr_land(void)
6177 if (tok
== TOK_LAND
)
6181 static void expr_lor(void)
6188 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6189 #else /* defined precedence_parser */
6190 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6191 # define expr_lor() unary(), expr_infix(1)
6193 static int precedence(int tok
)
6196 case TOK_LOR
: return 1;
6197 case TOK_LAND
: return 2;
6201 case TOK_EQ
: case TOK_NE
: return 6;
6202 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6203 case TOK_SHL
: case TOK_SAR
: return 8;
6204 case '+': case '-': return 9;
6205 case '*': case '/': case '%': return 10;
6207 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6212 static unsigned char prec
[256];
6213 static void init_prec(void)
6216 for (i
= 0; i
< 256; i
++)
6217 prec
[i
] = precedence(i
);
6219 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6221 static void expr_landor(int op
);
6223 static void expr_infix(int p
)
6226 while ((p2
= precedence(t
)) >= p
) {
6227 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6232 if (precedence(tok
) > p2
)
6241 /* Assuming vtop is a value used in a conditional context
6242 (i.e. compared with zero) return 0 if it's false, 1 if
6243 true and -1 if it can't be statically determined. */
6244 static int condition_3way(void)
6247 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6248 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6250 gen_cast_s(VT_BOOL
);
6257 static void expr_landor(int op
)
6259 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6261 c
= f
? i
: condition_3way();
6263 save_regs(1), cc
= 0;
6265 nocode_wanted
++, f
= 1;
6273 expr_landor_next(op
);
6285 static int is_cond_bool(SValue
*sv
)
6287 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6288 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6289 return (unsigned)sv
->c
.i
< 2;
6290 if (sv
->r
== VT_CMP
)
6295 static void expr_cond(void)
6297 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6305 c
= condition_3way();
6306 g
= (tok
== ':' && gnu_ext
);
6316 /* needed to avoid having different registers saved in
6323 ncw_prev
= nocode_wanted
;
6329 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6330 mk_pointer(&vtop
->type
);
6331 sv
= *vtop
; /* save value to handle it later */
6332 vtop
--; /* no vpop so that FP stack is not flushed */
6342 nocode_wanted
= ncw_prev
;
6348 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6349 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6350 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6351 this code jumps directly to the if's then/else branches. */
6356 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6359 nocode_wanted
= ncw_prev
;
6360 // tcc_warning("two conditions expr_cond");
6364 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6365 mk_pointer(&vtop
->type
);
6367 /* cast operands to correct type according to ISOC rules */
6368 if (!combine_types(&type
, &sv
, vtop
, '?'))
6369 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6370 "type mismatch in conditional expression (have '%s' and '%s')");
6371 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6372 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6373 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6375 /* now we convert second operand */
6379 mk_pointer(&vtop
->type
);
6381 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6385 rc
= RC_TYPE(type
.t
);
6386 /* for long longs, we use fixed registers to avoid having
6387 to handle a complicated move */
6388 if (USING_TWO_WORDS(type
.t
))
6389 rc
= RC_RET(type
.t
);
6397 nocode_wanted
= ncw_prev
;
6399 /* this is horrible, but we must also convert first
6405 mk_pointer(&vtop
->type
);
6407 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6413 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6423 static void expr_eq(void)
6428 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6436 gen_op(TOK_ASSIGN_OP(t
));
6442 ST_FUNC
void gexpr(void)
6448 constant_p
&= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6449 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6455 /* parse a constant expression and return value in vtop. */
6456 static void expr_const1(void)
6459 nocode_wanted
+= unevalmask
+ 1;
6461 nocode_wanted
-= unevalmask
+ 1;
6465 /* parse an integer constant and return its value. */
6466 static inline int64_t expr_const64(void)
6470 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6471 expect("constant expression");
6477 /* parse an integer constant and return its value.
6478 Complain if it doesn't fit 32bit (signed or unsigned). */
6479 ST_FUNC
int expr_const(void)
6482 int64_t wc
= expr_const64();
6484 if (c
!= wc
&& (unsigned)c
!= wc
)
6485 tcc_error("constant exceeds 32 bit");
6489 /* ------------------------------------------------------------------------- */
6490 /* return from function */
6492 #ifndef TCC_TARGET_ARM64
6493 static void gfunc_return(CType
*func_type
)
6495 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6496 CType type
, ret_type
;
6497 int ret_align
, ret_nregs
, regsize
;
6498 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6499 &ret_align
, ®size
);
6500 if (ret_nregs
< 0) {
6501 #ifdef TCC_TARGET_RISCV64
6502 arch_transfer_ret_regs(0);
6504 } else if (0 == ret_nregs
) {
6505 /* if returning structure, must copy it to implicit
6506 first pointer arg location */
6509 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6512 /* copy structure value to pointer */
6515 /* returning structure packed into registers */
6516 int size
, addr
, align
, rc
;
6517 size
= type_size(func_type
,&align
);
6518 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6519 (vtop
->c
.i
& (ret_align
-1)))
6520 && (align
& (ret_align
-1))) {
6521 loc
= (loc
- size
) & -ret_align
;
6524 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6528 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6530 vtop
->type
= ret_type
;
6531 rc
= RC_RET(ret_type
.t
);
6539 if (--ret_nregs
== 0)
6541 /* We assume that when a structure is returned in multiple
6542 registers, their classes are consecutive values of the
6545 vtop
->c
.i
+= regsize
;
6550 gv(RC_RET(func_type
->t
));
6552 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6556 static void check_func_return(void)
6558 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6560 if (!strcmp (funcname
, "main")
6561 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6562 /* main returns 0 by default */
6564 gen_assign_cast(&func_vt
);
6565 gfunc_return(&func_vt
);
6567 tcc_warning("function might return no value: '%s'", funcname
);
6571 /* ------------------------------------------------------------------------- */
6574 static int case_cmpi(const void *pa
, const void *pb
)
6576 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6577 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6578 return a
< b
? -1 : a
> b
;
6581 static int case_cmpu(const void *pa
, const void *pb
)
6583 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6584 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6585 return a
< b
? -1 : a
> b
;
6588 static void gtst_addr(int t
, int a
)
6590 gsym_addr(gvtst(0, t
), a
);
6593 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6597 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6614 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6616 gcase(base
, len
/2, bsym
);
6620 base
+= e
; len
-= e
;
6630 if (p
->v1
== p
->v2
) {
6632 gtst_addr(0, p
->sym
);
6642 gtst_addr(0, p
->sym
);
6646 *bsym
= gjmp(*bsym
);
6649 /* ------------------------------------------------------------------------- */
6650 /* __attribute__((cleanup(fn))) */
6652 static void try_call_scope_cleanup(Sym
*stop
)
6654 Sym
*cls
= cur_scope
->cl
.s
;
6656 for (; cls
!= stop
; cls
= cls
->ncl
) {
6657 Sym
*fs
= cls
->next
;
6658 Sym
*vs
= cls
->prev_tok
;
6660 vpushsym(&fs
->type
, fs
);
6661 vset(&vs
->type
, vs
->r
, vs
->c
);
6663 mk_pointer(&vtop
->type
);
6669 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6674 if (!cur_scope
->cl
.s
)
6677 /* search NCA of both cleanup chains given parents and initial depth */
6678 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6679 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6681 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6683 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6686 try_call_scope_cleanup(cc
);
6689 /* call 'func' for each __attribute__((cleanup(func))) */
6690 static void block_cleanup(struct scope
*o
)
6694 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6695 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6700 try_call_scope_cleanup(o
->cl
.s
);
6701 pcl
->jnext
= gjmp(0);
6703 goto remove_pending
;
6713 try_call_scope_cleanup(o
->cl
.s
);
6716 /* ------------------------------------------------------------------------- */
6719 static void vla_restore(int loc
)
6722 gen_vla_sp_restore(loc
);
6725 static void vla_leave(struct scope
*o
)
6727 struct scope
*c
= cur_scope
, *v
= NULL
;
6728 for (; c
!= o
&& c
; c
= c
->prev
)
6732 vla_restore(v
->vla
.locorig
);
6735 /* ------------------------------------------------------------------------- */
6738 static void new_scope(struct scope
*o
)
6740 /* copy and link previous scope */
6742 o
->prev
= cur_scope
;
6744 cur_scope
->vla
.num
= 0;
6746 /* record local declaration stack position */
6747 o
->lstk
= local_stack
;
6748 o
->llstk
= local_label_stack
;
6752 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
6755 static void prev_scope(struct scope
*o
, int is_expr
)
6759 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6760 block_cleanup(o
->prev
);
6762 /* pop locally defined labels */
6763 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6765 /* In the is_expr case (a statement expression is finished here),
6766 vtop might refer to symbols on the local_stack. Either via the
6767 type or via vtop->sym. We can't pop those nor any that in turn
6768 might be referred to. To make it easier we don't roll back
6769 any symbols in that case; some upper level call to block() will
6770 do that. We do have to remove such symbols from the lookup
6771 tables, though. sym_pop will do that. */
6773 /* pop locally defined symbols */
6774 pop_local_syms(o
->lstk
, is_expr
);
6775 cur_scope
= o
->prev
;
6779 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
6782 /* leave a scope via break/continue(/goto) */
6783 static void leave_scope(struct scope
*o
)
6787 try_call_scope_cleanup(o
->cl
.s
);
6791 /* ------------------------------------------------------------------------- */
6792 /* call block from 'for do while' loops */
6794 static void lblock(int *bsym
, int *csym
)
6796 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6797 int *b
= co
->bsym
, *c
= co
->csym
;
6811 static void block(int is_expr
)
6813 int a
, b
, c
, d
, e
, t
;
6818 /* default return value is (void) */
6820 vtop
->type
.t
= VT_VOID
;
6825 /* If the token carries a value, next() might destroy it. Only with
6826 invalid code such as f(){"123"4;} */
6827 if (TOK_HAS_VALUE(t
))
6832 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6835 //new_scope(&o); //?? breaks tests2.122
6841 if (tok
== TOK_ELSE
) {
6846 gsym(d
); /* patch else jmp */
6850 //prev_scope(&o,0); //?? breaks tests2.122
6852 } else if (t
== TOK_WHILE
) {
6865 } else if (t
== '{') {
6868 /* handle local labels declarations */
6869 while (tok
== TOK_LABEL
) {
6872 if (tok
< TOK_UIDENT
)
6873 expect("label identifier");
6874 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6876 } while (tok
== ',');
6880 while (tok
!= '}') {
6889 prev_scope(&o
, is_expr
);
6892 else if (!nocode_wanted
)
6893 check_func_return();
6895 } else if (t
== TOK_RETURN
) {
6896 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6900 gen_assign_cast(&func_vt
);
6902 if (vtop
->type
.t
!= VT_VOID
)
6903 tcc_warning("void function returns a value");
6907 tcc_warning("'return' with no value");
6910 leave_scope(root_scope
);
6912 gfunc_return(&func_vt
);
6914 /* jump unless last stmt in top-level block */
6915 if (tok
!= '}' || local_scope
!= 1)
6918 tcc_tcov_block_end (tcc_state
, -1);
6921 } else if (t
== TOK_BREAK
) {
6923 if (!cur_scope
->bsym
)
6924 tcc_error("cannot break");
6925 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6926 leave_scope(cur_switch
->scope
);
6928 leave_scope(loop_scope
);
6929 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6932 } else if (t
== TOK_CONTINUE
) {
6934 if (!cur_scope
->csym
)
6935 tcc_error("cannot continue");
6936 leave_scope(loop_scope
);
6937 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6940 } else if (t
== TOK_FOR
) {
6945 /* c99 for-loop init decl? */
6946 if (!decl(VT_JMP
)) {
6947 /* no, regular for-loop init expr */
6975 } else if (t
== TOK_DO
) {
6991 } else if (t
== TOK_SWITCH
) {
6992 struct switch_t
*sw
;
6995 sw
= tcc_mallocz(sizeof *sw
);
6997 sw
->scope
= cur_scope
;
6998 sw
->prev
= cur_switch
;
6999 sw
->nocode_wanted
= nocode_wanted
;
7005 sw
->sv
= *vtop
--; /* save switch value */
7008 b
= gjmp(0); /* jump to first case */
7010 a
= gjmp(a
); /* add implicit break */
7014 if (sw
->nocode_wanted
)
7016 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7017 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7019 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7020 for (b
= 1; b
< sw
->n
; b
++)
7021 if (sw
->sv
.type
.t
& VT_UNSIGNED
7022 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7023 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7024 tcc_error("duplicate case value");
7027 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7030 gsym_addr(d
, sw
->def_sym
);
7037 dynarray_reset(&sw
->p
, &sw
->n
);
7038 cur_switch
= sw
->prev
;
7042 } else if (t
== TOK_CASE
) {
7043 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7046 cr
->v1
= cr
->v2
= expr_const64();
7047 if (gnu_ext
&& tok
== TOK_DOTS
) {
7049 cr
->v2
= expr_const64();
7050 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7051 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7052 tcc_warning("empty case range");
7054 /* case and default are unreachable from a switch under nocode_wanted */
7055 if (!cur_switch
->nocode_wanted
)
7057 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7060 goto block_after_label
;
7062 } else if (t
== TOK_DEFAULT
) {
7065 if (cur_switch
->def_sym
)
7066 tcc_error("too many 'default'");
7067 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? 1 : gind();
7070 goto block_after_label
;
7072 } else if (t
== TOK_GOTO
) {
7073 if (cur_scope
->vla
.num
)
7074 vla_restore(cur_scope
->vla
.locorig
);
7075 if (tok
== '*' && gnu_ext
) {
7079 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7083 } else if (tok
>= TOK_UIDENT
) {
7084 s
= label_find(tok
);
7085 /* put forward definition if needed */
7087 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7088 else if (s
->r
== LABEL_DECLARED
)
7089 s
->r
= LABEL_FORWARD
;
7091 if (s
->r
& LABEL_FORWARD
) {
7092 /* start new goto chain for cleanups, linked via label->next */
7093 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7094 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7095 pending_gotos
->prev_tok
= s
;
7096 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7097 pending_gotos
->next
= s
;
7099 s
->jnext
= gjmp(s
->jnext
);
7101 try_call_cleanup_goto(s
->cleanupstate
);
7102 gjmp_addr(s
->jnext
);
7107 expect("label identifier");
7111 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7115 if (tok
== ':' && t
>= TOK_UIDENT
) {
7120 if (s
->r
== LABEL_DEFINED
)
7121 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7122 s
->r
= LABEL_DEFINED
;
7124 Sym
*pcl
; /* pending cleanup goto */
7125 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7127 sym_pop(&s
->next
, NULL
, 0);
7131 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7134 s
->cleanupstate
= cur_scope
->cl
.s
;
7138 /* Accept attributes after labels (e.g. 'unused') */
7139 AttributeDef ad_tmp
;
7140 parse_attribute(&ad_tmp
);
7143 tcc_tcov_reset_ind(tcc_state
);
7144 vla_restore(cur_scope
->vla
.loc
);
7147 /* we accept this, but it is a mistake */
7148 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7151 /* expression case */
7168 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7171 /* This skips over a stream of tokens containing balanced {} and ()
7172 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7173 with a '{'). If STR then allocates and stores the skipped tokens
7174 in *STR. This doesn't check if () and {} are nested correctly,
7175 i.e. "({)}" is accepted. */
7176 static void skip_or_save_block(TokenString
**str
)
7178 int braces
= tok
== '{';
7181 *str
= tok_str_alloc();
7193 if (str
|| level
> 0)
7194 tcc_error("unexpected end of file");
7199 tok_str_add_tok(*str
);
7201 if (t
== '{' || t
== '(' || t
== '[') {
7203 } else if (t
== '}' || t
== ')' || t
== ']') {
7205 if (level
== 0 && braces
&& t
== '}')
7210 tok_str_add(*str
, -1);
7211 tok_str_add(*str
, 0);
7215 #define EXPR_CONST 1
7218 static void parse_init_elem(int expr_type
)
7220 int saved_global_expr
;
7223 /* compound literals must be allocated globally in this case */
7224 saved_global_expr
= global_expr
;
7227 global_expr
= saved_global_expr
;
7228 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7229 (compound literals). */
7230 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7231 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7232 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7233 #ifdef TCC_TARGET_PE
7234 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7237 tcc_error("initializer element is not constant");
7246 static void init_assert(init_params
*p
, int offset
)
7248 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7249 : !nocode_wanted
&& offset
> p
->local_offset
)
7250 tcc_internal_error("initializer overflow");
7253 #define init_assert(sec, offset)
7256 /* put zeros for variable based init */
7257 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7259 init_assert(p
, c
+ size
);
7261 /* nothing to do because globals are already set to zero */
7263 vpush_helper_func(TOK_memset
);
7265 #ifdef TCC_TARGET_ARM
7277 #define DIF_SIZE_ONLY 2
7278 #define DIF_HAVE_ELEM 4
7281 /* delete relocations for specified range c ... c + size. Unfortunatly
7282 in very special cases, relocations may occur unordered */
7283 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7285 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7286 if (!sec
|| !sec
->reloc
)
7288 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7289 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7290 while (rel
< rel_end
) {
7291 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7292 sec
->reloc
->data_offset
-= sizeof *rel
;
7295 memcpy(rel2
, rel
, sizeof *rel
);
7302 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7304 if (ref
== p
->flex_array_ref
) {
7305 if (index
>= ref
->c
)
7307 } else if (ref
->c
< 0)
7308 tcc_error("flexible array has zero size in this context");
7311 /* t is the array or struct type. c is the array or struct
7312 address. cur_field is the pointer to the current
7313 field, for arrays the 'c' member contains the current start
7314 index. 'flags' is as in decl_initializer.
7315 'al' contains the already initialized length of the
7316 current container (starting at c). This returns the new length of that. */
7317 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7318 Sym
**cur_field
, int flags
, int al
)
7321 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7322 unsigned long corig
= c
;
7327 if (flags
& DIF_HAVE_ELEM
)
7330 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7337 /* NOTE: we only support ranges for last designator */
7338 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7340 if (!(type
->t
& VT_ARRAY
))
7341 expect("array type");
7343 index
= index_last
= expr_const();
7344 if (tok
== TOK_DOTS
&& gnu_ext
) {
7346 index_last
= expr_const();
7350 decl_design_flex(p
, s
, index_last
);
7351 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7352 tcc_error("index exceeds array bounds or range is empty");
7354 (*cur_field
)->c
= index_last
;
7355 type
= pointed_type(type
);
7356 elem_size
= type_size(type
, &align
);
7357 c
+= index
* elem_size
;
7358 nb_elems
= index_last
- index
+ 1;
7365 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7366 expect("struct/union type");
7368 f
= find_field(type
, l
, &cumofs
);
7379 } else if (!gnu_ext
) {
7384 if (type
->t
& VT_ARRAY
) {
7385 index
= (*cur_field
)->c
;
7387 decl_design_flex(p
, s
, index
);
7389 tcc_error("too many initializers");
7390 type
= pointed_type(type
);
7391 elem_size
= type_size(type
, &align
);
7392 c
+= index
* elem_size
;
7395 /* Skip bitfield padding. Also with size 32 and 64. */
7396 while (f
&& (f
->v
& SYM_FIRST_ANOM
) &&
7397 is_integer_btype(f
->type
.t
& VT_BTYPE
))
7398 *cur_field
= f
= f
->next
;
7400 tcc_error("too many initializers");
7406 if (!elem_size
) /* for structs */
7407 elem_size
= type_size(type
, &align
);
7409 /* Using designators the same element can be initialized more
7410 than once. In that case we need to delete possibly already
7411 existing relocations. */
7412 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7413 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7414 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7417 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7419 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7423 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7424 /* make init_putv/vstore believe it were a struct */
7426 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7430 vpush_ref(type
, p
->sec
, c
, elem_size
);
7432 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7433 for (i
= 1; i
< nb_elems
; i
++) {
7435 init_putv(p
, type
, c
+ elem_size
* i
);
7440 c
+= nb_elems
* elem_size
;
7446 /* store a value or an expression directly in global data or in local array */
7447 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7453 Section
*sec
= p
->sec
;
7457 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7459 size
= type_size(type
, &align
);
7460 if (type
->t
& VT_BITFIELD
)
7461 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7462 init_assert(p
, c
+ size
);
7465 /* XXX: not portable */
7466 /* XXX: generate error if incorrect relocation */
7467 gen_assign_cast(&dtype
);
7468 bt
= type
->t
& VT_BTYPE
;
7470 if ((vtop
->r
& VT_SYM
)
7472 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7473 || (type
->t
& VT_BITFIELD
))
7474 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7476 tcc_error("initializer element is not computable at load time");
7478 if (NODATA_WANTED
) {
7483 ptr
= sec
->data
+ c
;
7486 /* XXX: make code faster ? */
7487 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7488 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7489 /* XXX This rejects compound literals like
7490 '(void *){ptr}'. The problem is that '&sym' is
7491 represented the same way, which would be ruled out
7492 by the SYM_FIRST_ANOM check above, but also '"string"'
7493 in 'char *p = "string"' is represented the same
7494 with the type being VT_PTR and the symbol being an
7495 anonymous one. That is, there's no difference in vtop
7496 between '(void *){x}' and '&(void *){x}'. Ignore
7497 pointer typed entities here. Hopefully no real code
7498 will ever use compound literals with scalar type. */
7499 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7500 /* These come from compound literals, memcpy stuff over. */
7504 esym
= elfsym(vtop
->sym
);
7505 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7506 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7508 /* We need to copy over all memory contents, and that
7509 includes relocations. Use the fact that relocs are
7510 created it order, so look from the end of relocs
7511 until we hit one before the copied region. */
7512 unsigned long relofs
= ssec
->reloc
->data_offset
;
7513 while (relofs
>= sizeof(*rel
)) {
7514 relofs
-= sizeof(*rel
);
7515 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7516 if (rel
->r_offset
>= esym
->st_value
+ size
)
7518 if (rel
->r_offset
< esym
->st_value
)
7520 put_elf_reloca(symtab_section
, sec
,
7521 c
+ rel
->r_offset
- esym
->st_value
,
7522 ELFW(R_TYPE
)(rel
->r_info
),
7523 ELFW(R_SYM
)(rel
->r_info
),
7533 if (type
->t
& VT_BITFIELD
) {
7534 int bit_pos
, bit_size
, bits
, n
;
7535 unsigned char *p
, v
, m
;
7536 bit_pos
= BIT_POS(vtop
->type
.t
);
7537 bit_size
= BIT_SIZE(vtop
->type
.t
);
7538 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7539 bit_pos
&= 7, bits
= 0;
7544 v
= val
>> bits
<< bit_pos
;
7545 m
= ((1 << n
) - 1) << bit_pos
;
7546 *p
= (*p
& ~m
) | (v
& m
);
7547 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7552 *(char *)ptr
= val
!= 0;
7558 write16le(ptr
, val
);
7561 write32le(ptr
, val
);
7564 write64le(ptr
, val
);
7567 #if defined TCC_IS_NATIVE_387
7568 /* Host and target platform may be different but both have x87.
7569 On windows, tcc does not use VT_LDOUBLE, except when it is a
7570 cross compiler. In this case a mingw gcc as host compiler
7571 comes here with 10-byte long doubles, while msvc or tcc won't.
7572 tcc itself can still translate by asm.
7573 In any case we avoid possibly random bytes 11 and 12.
7575 if (sizeof (long double) >= 10)
7576 memcpy(ptr
, &vtop
->c
.ld
, 10);
7578 else if (sizeof (long double) == sizeof (double))
7579 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7581 else if (vtop
->c
.ld
== 0.0)
7585 /* For other platforms it should work natively, but may not work
7586 for cross compilers */
7587 if (sizeof(long double) == LDOUBLE_SIZE
)
7588 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7589 else if (sizeof(double) == LDOUBLE_SIZE
)
7590 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7591 #ifndef TCC_CROSS_TEST
7593 tcc_error("can't cross compile long double constants");
7598 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7601 if (vtop
->r
& VT_SYM
)
7602 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7604 write64le(ptr
, val
);
7607 write32le(ptr
, val
);
7611 write64le(ptr
, val
);
7615 if (vtop
->r
& VT_SYM
)
7616 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7617 write32le(ptr
, val
);
7621 //tcc_internal_error("unexpected type");
7627 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7634 /* 't' contains the type and storage info. 'c' is the offset of the
7635 object in section 'sec'. If 'sec' is NULL, it means stack based
7636 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7637 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7638 size only evaluation is wanted (only for arrays). */
7639 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7641 int len
, n
, no_oblock
, i
;
7647 /* generate line number info */
7648 if (debug_modes
&& !p
->sec
)
7649 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7651 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7652 /* In case of strings we have special handling for arrays, so
7653 don't consume them as initializer value (which would commit them
7654 to some anonymous symbol). */
7655 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7656 (!(flags
& DIF_SIZE_ONLY
)
7657 /* a struct may be initialized from a struct of same type, as in
7658 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7659 In that case we need to parse the element in order to check
7660 it for compatibility below */
7661 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7663 int ncw_prev
= nocode_wanted
;
7664 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7666 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7667 nocode_wanted
= ncw_prev
;
7668 flags
|= DIF_HAVE_ELEM
;
7671 if (type
->t
& VT_ARRAY
) {
7673 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7681 t1
= pointed_type(type
);
7682 size1
= type_size(t1
, &align1
);
7684 /* only parse strings here if correct type (otherwise: handle
7685 them as ((w)char *) expressions */
7686 if ((tok
== TOK_LSTR
&&
7687 #ifdef TCC_TARGET_PE
7688 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7690 (t1
->t
& VT_BTYPE
) == VT_INT
7692 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7694 cstr_reset(&initstr
);
7695 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7696 tcc_error("unhandled string literal merging");
7697 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7699 initstr
.size
-= size1
;
7701 len
+= tokc
.str
.size
;
7703 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7705 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7708 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7709 && tok
!= TOK_EOF
) {
7710 /* Not a lone literal but part of a bigger expression. */
7711 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7712 tokc
.str
.size
= initstr
.size
;
7713 tokc
.str
.data
= initstr
.data
;
7717 decl_design_flex(p
, s
, len
);
7718 if (!(flags
& DIF_SIZE_ONLY
)) {
7723 tcc_warning("initializer-string for array is too long");
7724 /* in order to go faster for common case (char
7725 string in global variable, we handle it
7727 if (p
->sec
&& size1
== 1) {
7728 init_assert(p
, c
+ nb
);
7730 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7734 /* only add trailing zero if enough storage (no
7735 warning in this case since it is standard) */
7736 if (flags
& DIF_CLEAR
)
7739 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7743 } else if (size1
== 1)
7744 ch
= ((unsigned char *)initstr
.data
)[i
];
7746 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7748 init_putv(p
, t1
, c
+ i
* size1
);
7759 /* zero memory once in advance */
7760 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7761 init_putz(p
, c
, n
*size1
);
7766 /* GNU extension: if the initializer is empty for a flex array,
7767 it's size is zero. We won't enter the loop, so set the size
7769 decl_design_flex(p
, s
, len
);
7770 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7771 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7772 flags
&= ~DIF_HAVE_ELEM
;
7773 if (type
->t
& VT_ARRAY
) {
7775 /* special test for multi dimensional arrays (may not
7776 be strictly correct if designators are used at the
7778 if (no_oblock
&& len
>= n
*size1
)
7781 if (s
->type
.t
== VT_UNION
)
7785 if (no_oblock
&& f
== NULL
)
7797 } else if ((flags
& DIF_HAVE_ELEM
)
7798 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7799 The source type might have VT_CONSTANT set, which is
7800 of course assignable to non-const elements. */
7801 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7804 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7806 if ((flags
& DIF_FIRST
) || tok
== '{') {
7816 } else if (tok
== '{') {
7817 if (flags
& DIF_HAVE_ELEM
)
7820 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7823 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7824 /* If we supported only ISO C we wouldn't have to accept calling
7825 this on anything than an array if DIF_SIZE_ONLY (and even then
7826 only on the outermost level, so no recursion would be needed),
7827 because initializing a flex array member isn't supported.
7828 But GNU C supports it, so we need to recurse even into
7829 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7830 /* just skip expression */
7831 if (flags
& DIF_HAVE_ELEM
)
7834 skip_or_save_block(NULL
);
7837 if (!(flags
& DIF_HAVE_ELEM
)) {
7838 /* This should happen only when we haven't parsed
7839 the init element above for fear of committing a
7840 string constant to memory too early. */
7841 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7842 expect("string constant");
7843 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7845 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7846 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7848 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7852 init_putv(p
, type
, c
);
7856 /* parse an initializer for type 't' if 'has_init' is non zero, and
7857 allocate space in local or global data space ('r' is either
7858 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7859 variable 'v' of scope 'scope' is declared before initializers
7860 are parsed. If 'v' is zero, then a reference to the new object
7861 is put in the value stack. If 'has_init' is 2, a special parsing
7862 is done to handle string constants. */
7863 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7864 int has_init
, int v
, int global
)
7866 int size
, align
, addr
;
7867 TokenString
*init_str
= NULL
;
7870 Sym
*flexible_array
;
7872 int saved_nocode_wanted
= nocode_wanted
;
7873 #ifdef CONFIG_TCC_BCHECK
7874 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7876 init_params p
= {0};
7878 /* Always allocate static or global variables */
7879 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7880 nocode_wanted
|= DATA_ONLY_WANTED
;
7882 flexible_array
= NULL
;
7883 size
= type_size(type
, &align
);
7885 /* exactly one flexible array may be initialized, either the
7886 toplevel array or the last member of the toplevel struct */
7889 /* If the base type itself was an array type of unspecified size
7890 (like in 'typedef int arr[]; arr x = {1};') then we will
7891 overwrite the unknown size by the real one for this decl.
7892 We need to unshare the ref symbol holding that size. */
7893 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7894 p
.flex_array_ref
= type
->ref
;
7896 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7897 Sym
*field
= type
->ref
->next
;
7900 field
= field
->next
;
7901 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7902 flexible_array
= field
;
7903 p
.flex_array_ref
= field
->type
.ref
;
7910 /* If unknown size, do a dry-run 1st pass */
7912 tcc_error("unknown type size");
7913 if (has_init
== 2) {
7914 /* only get strings */
7915 init_str
= tok_str_alloc();
7916 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7917 tok_str_add_tok(init_str
);
7920 tok_str_add(init_str
, -1);
7921 tok_str_add(init_str
, 0);
7923 skip_or_save_block(&init_str
);
7927 begin_macro(init_str
, 1);
7929 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7930 /* prepare second initializer parsing */
7931 macro_ptr
= init_str
->str
;
7934 /* if still unknown size, error */
7935 size
= type_size(type
, &align
);
7937 tcc_error("unknown type size");
7939 /* If there's a flex member and it was used in the initializer
7941 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
7942 size
+= flexible_array
->type
.ref
->c
7943 * pointed_size(&flexible_array
->type
);
7946 /* take into account specified alignment if bigger */
7947 if (ad
->a
.aligned
) {
7948 int speca
= 1 << (ad
->a
.aligned
- 1);
7951 } else if (ad
->a
.packed
) {
7955 if (!v
&& NODATA_WANTED
)
7956 size
= 0, align
= 1;
7958 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7960 #ifdef CONFIG_TCC_BCHECK
7962 /* add padding between stack variables for bound checking */
7966 loc
= (loc
- size
) & -align
;
7968 p
.local_offset
= addr
+ size
;
7969 #ifdef CONFIG_TCC_BCHECK
7971 /* add padding between stack variables for bound checking */
7976 /* local variable */
7977 #ifdef CONFIG_TCC_ASM
7978 if (ad
->asm_label
) {
7979 int reg
= asm_parse_regvar(ad
->asm_label
);
7981 r
= (r
& ~VT_VALMASK
) | reg
;
7984 sym
= sym_push(v
, type
, r
, addr
);
7985 if (ad
->cleanup_func
) {
7986 Sym
*cls
= sym_push2(&all_cleanups
,
7987 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7988 cls
->prev_tok
= sym
;
7989 cls
->next
= ad
->cleanup_func
;
7990 cls
->ncl
= cur_scope
->cl
.s
;
7991 cur_scope
->cl
.s
= cls
;
7996 /* push local reference */
7997 vset(type
, r
, addr
);
8002 /* see if the symbol was already defined */
8005 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8006 && sym
->type
.ref
->c
> type
->ref
->c
) {
8007 /* flex array was already declared with explicit size
8009 int arr[] = { 1,2,3 }; */
8010 type
->ref
->c
= sym
->type
.ref
->c
;
8011 size
= type_size(type
, &align
);
8013 patch_storage(sym
, ad
, type
);
8014 /* we accept several definitions of the same global variable. */
8015 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8020 /* allocate symbol in corresponding section */
8024 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8025 tp
= &tp
->ref
->type
;
8026 if (tp
->t
& VT_CONSTANT
) {
8027 sec
= rodata_section
;
8028 } else if (has_init
) {
8030 /*if (tcc_state->g_debug & 4)
8031 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8032 } else if (tcc_state
->nocommon
)
8037 addr
= section_add(sec
, size
, align
);
8038 #ifdef CONFIG_TCC_BCHECK
8039 /* add padding if bound check */
8041 section_add(sec
, 1, 1);
8044 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8045 sec
= common_section
;
8050 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8051 patch_storage(sym
, ad
, NULL
);
8053 /* update symbol definition */
8054 put_extern_sym(sym
, sec
, addr
, size
);
8056 /* push global reference */
8057 vpush_ref(type
, sec
, addr
, size
);
8062 #ifdef CONFIG_TCC_BCHECK
8063 /* handles bounds now because the symbol must be defined
8064 before for the relocation */
8068 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8069 /* then add global bound info */
8070 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8071 bounds_ptr
[0] = 0; /* relocated */
8072 bounds_ptr
[1] = size
;
8077 if (type
->t
& VT_VLA
) {
8083 /* save before-VLA stack pointer if needed */
8084 if (cur_scope
->vla
.num
== 0) {
8085 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8086 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8088 gen_vla_sp_save(loc
-= PTR_SIZE
);
8089 cur_scope
->vla
.locorig
= loc
;
8093 vpush_type_size(type
, &a
);
8094 gen_vla_alloc(type
, a
);
8095 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8096 /* on _WIN64, because of the function args scratch area, the
8097 result of alloca differs from RSP and is returned in RAX. */
8098 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8100 gen_vla_sp_save(addr
);
8101 cur_scope
->vla
.loc
= addr
;
8102 cur_scope
->vla
.num
++;
8103 } else if (has_init
) {
8105 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8106 /* patch flexible array member size back to -1, */
8107 /* for possible subsequent similar declarations */
8109 flexible_array
->type
.ref
->c
= -1;
8113 /* restore parse state if needed */
8119 nocode_wanted
= saved_nocode_wanted
;
8122 /* generate vla code saved in post_type() */
8123 static void func_vla_arg_code(Sym
*arg
)
8126 TokenString
*vla_array_tok
= NULL
;
8129 func_vla_arg_code(arg
->type
.ref
);
8131 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8132 loc
-= type_size(&int_type
, &align
);
8134 arg
->type
.ref
->c
= loc
;
8137 vla_array_tok
= tok_str_alloc();
8138 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8139 begin_macro(vla_array_tok
, 1);
8144 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8146 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8153 static void func_vla_arg(Sym
*sym
)
8157 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8158 if (arg
->type
.t
& VT_VLA
)
8159 func_vla_arg_code(arg
);
8162 /* parse a function defined by symbol 'sym' and generate its code in
8163 'cur_text_section' */
8164 static void gen_function(Sym
*sym
)
8166 struct scope f
= { 0 };
8167 cur_scope
= root_scope
= &f
;
8169 ind
= cur_text_section
->data_offset
;
8170 if (sym
->a
.aligned
) {
8171 size_t newoff
= section_add(cur_text_section
, 0,
8172 1 << (sym
->a
.aligned
- 1));
8173 gen_fill_nops(newoff
- ind
);
8175 /* NOTE: we patch the symbol size later */
8176 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8177 if (sym
->type
.ref
->f
.func_ctor
)
8178 add_array (tcc_state
, ".init_array", sym
->c
);
8179 if (sym
->type
.ref
->f
.func_dtor
)
8180 add_array (tcc_state
, ".fini_array", sym
->c
);
8182 funcname
= get_tok_str(sym
->v
, NULL
);
8184 func_vt
= sym
->type
.ref
->type
;
8185 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8187 /* put debug symbol */
8188 tcc_debug_funcstart(tcc_state
, sym
);
8189 /* push a dummy symbol to enable local sym storage */
8190 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8191 local_scope
= 1; /* for function parameters */
8193 tcc_debug_prolog_epilog(tcc_state
, 0);
8196 clear_temp_local_var_list();
8201 /* reset local stack */
8202 pop_local_syms(NULL
, 0);
8203 tcc_debug_prolog_epilog(tcc_state
, 1);
8205 cur_text_section
->data_offset
= ind
;
8207 label_pop(&global_label_stack
, NULL
, 0);
8208 sym_pop(&all_cleanups
, NULL
, 0);
8209 /* patch symbol size */
8210 elfsym(sym
)->st_size
= ind
- func_ind
;
8211 /* end of function */
8212 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8213 /* It's better to crash than to generate wrong code */
8214 cur_text_section
= NULL
;
8215 funcname
= ""; /* for safety */
8216 func_vt
.t
= VT_VOID
; /* for safety */
8217 func_var
= 0; /* for safety */
8218 ind
= 0; /* for safety */
8220 nocode_wanted
= DATA_ONLY_WANTED
;
8222 /* do this after funcend debug info */
8226 static void gen_inline_functions(TCCState
*s
)
8229 int inline_generated
, i
;
8230 struct InlineFunc
*fn
;
8232 tcc_open_bf(s
, ":inline:", 0);
8233 /* iterate while inline function are referenced */
8235 inline_generated
= 0;
8236 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8237 fn
= s
->inline_fns
[i
];
8239 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8240 /* the function was used or forced (and then not internal):
8241 generate its code and convert it to a normal function */
8243 tcc_debug_putfile(s
, fn
->filename
);
8244 begin_macro(fn
->func_str
, 1);
8246 cur_text_section
= text_section
;
8250 inline_generated
= 1;
8253 } while (inline_generated
);
8257 static void free_inline_functions(TCCState
*s
)
8260 /* free tokens of unused inline functions */
8261 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8262 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8264 tok_str_free(fn
->func_str
);
8266 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8269 static void do_Static_assert(void){
8279 tcc_error("_Static_assert fail");
8281 goto static_assert_out
;
8285 parse_mult_str(&error_str
, "string constant");
8287 tcc_error("%s", (char *)error_str
.data
);
8288 cstr_free(&error_str
);
8294 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8295 or VT_CMP if parsing old style parameter list
8296 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8297 static int decl(int l
)
8299 int v
, has_init
, r
, oldint
;
8302 AttributeDef ad
, adbase
;
8305 if (tok
== TOK_STATIC_ASSERT
) {
8311 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8314 /* skip redundant ';' if not in old parameter decl scope */
8315 if (tok
== ';' && l
!= VT_CMP
) {
8321 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8322 /* global asm block */
8326 if (tok
>= TOK_UIDENT
) {
8327 /* special test for old K&R protos without explicit int
8328 type. Only accepted when defining global data */
8333 expect("declaration");
8339 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8341 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8342 tcc_warning("unnamed struct/union that defines no instances");
8346 if (IS_ENUM(btype
.t
)) {
8352 while (1) { /* iterate thru each declaration */
8355 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8359 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8360 printf("type = '%s'\n", buf
);
8363 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8364 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8365 tcc_error("function without file scope cannot be static");
8366 /* if old style function prototype, we accept a
8369 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8373 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8374 if (sym
->f
.func_alwinl
8375 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8376 == (VT_EXTERN
| VT_INLINE
))) {
8377 /* always_inline functions must be handled as if they
8378 don't generate multiple global defs, even if extern
8379 inline, i.e. GNU inline semantics for those. Rewrite
8380 them into static inline. */
8381 type
.t
&= ~VT_EXTERN
;
8382 type
.t
|= VT_STATIC
;
8385 /* always compile 'extern inline' */
8386 if (type
.t
& VT_EXTERN
)
8387 type
.t
&= ~VT_INLINE
;
8389 } else if (oldint
) {
8390 tcc_warning("type defaults to int");
8393 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8394 ad
.asm_label
= asm_label_instr();
8395 /* parse one last attribute list, after asm label */
8396 parse_attribute(&ad
);
8398 /* gcc does not allow __asm__("label") with function definition,
8405 #ifdef TCC_TARGET_PE
8406 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8407 if (type
.t
& VT_STATIC
)
8408 tcc_error("cannot have dll linkage with static");
8409 if (type
.t
& VT_TYPEDEF
) {
8410 tcc_warning("'%s' attribute ignored for typedef",
8411 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8412 (ad
.a
.dllexport
= 0, "dllexport"));
8413 } else if (ad
.a
.dllimport
) {
8414 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8417 type
.t
|= VT_EXTERN
;
8423 tcc_error("cannot use local functions");
8424 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8425 expect("function definition");
8427 /* reject abstract declarators in function definition
8428 make old style params without decl have int type */
8430 while ((sym
= sym
->next
) != NULL
) {
8431 if (!(sym
->v
& ~SYM_FIELD
))
8432 expect("identifier");
8433 if (sym
->type
.t
== VT_VOID
)
8434 sym
->type
= int_type
;
8437 /* apply post-declaraton attributes */
8438 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8440 /* put function symbol */
8441 type
.t
&= ~VT_EXTERN
;
8442 sym
= external_sym(v
, &type
, 0, &ad
);
8444 /* static inline functions are just recorded as a kind
8445 of macro. Their code will be emitted at the end of
8446 the compilation unit only if they are used */
8447 if (sym
->type
.t
& VT_INLINE
) {
8448 struct InlineFunc
*fn
;
8449 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8450 strcpy(fn
->filename
, file
->filename
);
8452 skip_or_save_block(&fn
->func_str
);
8453 dynarray_add(&tcc_state
->inline_fns
,
8454 &tcc_state
->nb_inline_fns
, fn
);
8456 /* compute text section */
8457 cur_text_section
= ad
.section
;
8458 if (!cur_text_section
)
8459 cur_text_section
= text_section
;
8465 /* find parameter in function parameter list */
8466 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8467 if ((sym
->v
& ~SYM_FIELD
) == v
)
8469 tcc_error("declaration for parameter '%s' but no such parameter",
8470 get_tok_str(v
, NULL
));
8472 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8473 tcc_error("storage class specified for '%s'",
8474 get_tok_str(v
, NULL
));
8475 if (sym
->type
.t
!= VT_VOID
)
8476 tcc_error("redefinition of parameter '%s'",
8477 get_tok_str(v
, NULL
));
8478 convert_parameter_type(&type
);
8480 } else if (type
.t
& VT_TYPEDEF
) {
8481 /* save typedefed type */
8482 /* XXX: test storage specifiers ? */
8484 if (sym
&& sym
->sym_scope
== local_scope
) {
8485 if (!is_compatible_types(&sym
->type
, &type
)
8486 || !(sym
->type
.t
& VT_TYPEDEF
))
8487 tcc_error("incompatible redefinition of '%s'",
8488 get_tok_str(v
, NULL
));
8491 sym
= sym_push(v
, &type
, 0, 0);
8494 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8495 merge_funcattr(&sym
->type
.ref
->f
, &ad
.f
);
8497 tcc_debug_typedef (tcc_state
, sym
);
8498 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8499 && !(type
.t
& VT_EXTERN
)) {
8500 tcc_error("declaration of void object");
8503 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8504 /* external function definition */
8505 /* specific case for func_call attribute */
8506 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8507 } else if (!(type
.t
& VT_ARRAY
)) {
8508 /* not lvalue if array */
8511 has_init
= (tok
== '=');
8512 if (has_init
&& (type
.t
& VT_VLA
))
8513 tcc_error("variable length array cannot be initialized");
8514 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8515 || (type
.t
& VT_BTYPE
) == VT_FUNC
8516 /* as with GCC, uninitialized global arrays with no size
8517 are considered extern: */
8518 || ((type
.t
& VT_ARRAY
) && !has_init
8519 && l
== VT_CONST
&& type
.ref
->c
< 0)
8521 /* external variable or function */
8522 type
.t
|= VT_EXTERN
;
8523 sym
= external_sym(v
, &type
, r
, &ad
);
8524 if (ad
.alias_target
) {
8525 /* Aliases need to be emitted when their target
8526 symbol is emitted, even if perhaps unreferenced.
8527 We only support the case where the base is
8528 already defined, otherwise we would need
8529 deferring to emit the aliases until the end of
8530 the compile unit. */
8531 Sym
*alias_target
= sym_find(ad
.alias_target
);
8532 ElfSym
*esym
= elfsym(alias_target
);
8534 tcc_error("unsupported forward __alias__ attribute");
8535 put_extern_sym2(sym
, esym
->st_shndx
,
8536 esym
->st_value
, esym
->st_size
, 1);
8539 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8545 else if (l
== VT_CONST
)
8546 /* uninitialized global variables may be overridden */
8547 type
.t
|= VT_EXTERN
;
8548 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8564 /* ------------------------------------------------------------------------- */
8567 /* ------------------------------------------------------------------------- */