2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 ST_DATA
char debug_modes
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { int t
= ind
; CODE_ON(); if (debug_modes
) tcc_tcov_block_begin(tcc_state
); return t
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
84 static CString initstr
;
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
97 static struct switch_t
{
101 } **p
; int n
; /* list of case ranges */
102 int def_sym
; /* default symbol */
105 struct switch_t
*prev
;
107 } *cur_switch
; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 static struct temp_local_variable
{
112 int location
; //offset on stack. Svalue.c.i
115 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
116 static int nb_temp_local_vars
;
118 static struct scope
{
120 struct { int loc
, locorig
, num
; } vla
;
121 struct { Sym
*s
; int n
; } cl
;
124 } *cur_scope
, *loop_scope
, *root_scope
;
133 #define precedence_parser
134 static void init_prec(void);
137 static void gen_cast(CType
*type
);
138 static void gen_cast_s(int t
);
139 static inline CType
*pointed_type(CType
*type
);
140 static int is_compatible_types(CType
*type1
, CType
*type2
);
141 static int parse_btype(CType
*type
, AttributeDef
*ad
);
142 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
143 static void parse_expr_type(CType
*type
);
144 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
145 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
146 static void block(int is_expr
);
147 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
148 static void decl(int l
);
149 static int decl0(int l
, int is_for_loop_init
, Sym
*);
150 static void expr_eq(void);
151 static void vpush_type_size(CType
*type
, int *a
);
152 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
153 static inline int64_t expr_const64(void);
154 static void vpush64(int ty
, unsigned long long v
);
155 static void vpush(CType
*type
);
156 static int gvtst(int inv
, int t
);
157 static void gen_inline_functions(TCCState
*s
);
158 static void free_inline_functions(TCCState
*s
);
159 static void skip_or_save_block(TokenString
**str
);
160 static void gv_dup(void);
161 static int get_temp_local_var(int size
,int align
);
162 static void clear_temp_local_var_list();
163 static void cast_error(CType
*st
, CType
*dt
);
165 /* ------------------------------------------------------------------------- */
167 ST_INLN
int is_float(int t
)
169 int bt
= t
& VT_BTYPE
;
170 return bt
== VT_LDOUBLE
176 static inline int is_integer_btype(int bt
)
185 static int btype_size(int bt
)
187 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
191 bt
== VT_PTR
? PTR_SIZE
: 0;
194 /* returns function return register from type */
195 static int R_RET(int t
)
199 #ifdef TCC_TARGET_X86_64
200 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
202 #elif defined TCC_TARGET_RISCV64
203 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
209 /* returns 2nd function return register, if any */
210 static int R2_RET(int t
)
216 #elif defined TCC_TARGET_X86_64
221 #elif defined TCC_TARGET_RISCV64
228 /* returns true for two-word types */
229 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
231 /* put function return registers to stack value */
232 static void PUT_R_RET(SValue
*sv
, int t
)
234 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
237 /* returns function return register class for type t */
238 static int RC_RET(int t
)
240 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
243 /* returns generic register class for type t */
244 static int RC_TYPE(int t
)
248 #ifdef TCC_TARGET_X86_64
249 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
251 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
253 #elif defined TCC_TARGET_RISCV64
254 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
260 /* returns 2nd register class corresponding to t and rc */
261 static int RC2_TYPE(int t
, int rc
)
263 if (!USING_TWO_WORDS(t
))
278 /* we use our own 'finite' function to avoid potential problems with
279 non standard math libs */
280 /* XXX: endianness dependent */
281 ST_FUNC
int ieee_finite(double d
)
284 memcpy(p
, &d
, sizeof(double));
285 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
288 /* compiling intel long double natively */
289 #if (defined __i386__ || defined __x86_64__) \
290 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
291 # define TCC_IS_NATIVE_387
294 ST_FUNC
void test_lvalue(void)
296 if (!(vtop
->r
& VT_LVAL
))
300 ST_FUNC
void check_vstack(void)
302 if (vtop
!= vstack
- 1)
303 tcc_error("internal compiler error: vstack leak (%d)",
304 (int)(vtop
- vstack
+ 1));
307 /* vstack debugging aid */
309 void pv (const char *lbl
, int a
, int b
)
312 for (i
= a
; i
< a
+ b
; ++i
) {
313 SValue
*p
= &vtop
[-i
];
314 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
315 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
320 /* ------------------------------------------------------------------------- */
321 /* initialize vstack and types. This must be done also for tcc -E */
322 ST_FUNC
void tccgen_init(TCCState
*s1
)
325 memset(vtop
, 0, sizeof *vtop
);
327 /* define some often used types */
330 char_type
.t
= VT_BYTE
;
331 if (s1
->char_is_unsigned
)
332 char_type
.t
|= VT_UNSIGNED
;
333 char_pointer_type
= char_type
;
334 mk_pointer(&char_pointer_type
);
336 func_old_type
.t
= VT_FUNC
;
337 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
338 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
339 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
340 #ifdef precedence_parser
346 ST_FUNC
int tccgen_compile(TCCState
*s1
)
348 cur_text_section
= NULL
;
351 anon_sym
= SYM_FIRST_ANOM
;
353 nocode_wanted
= 0x80000000;
355 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
359 #ifdef TCC_TARGET_ARM
363 printf("%s: **** new file\n", file
->filename
);
365 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
368 gen_inline_functions(s1
);
370 /* end of translation unit info */
376 ST_FUNC
void tccgen_finish(TCCState
*s1
)
379 free_inline_functions(s1
);
380 sym_pop(&global_stack
, NULL
, 0);
381 sym_pop(&local_stack
, NULL
, 0);
382 /* free preprocessor macros */
385 dynarray_reset(&sym_pools
, &nb_sym_pools
);
386 sym_free_first
= NULL
;
389 /* ------------------------------------------------------------------------- */
390 ST_FUNC ElfSym
*elfsym(Sym
*s
)
394 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
397 /* apply storage attributes to Elf symbol */
398 ST_FUNC
void update_storage(Sym
*sym
)
401 int sym_bind
, old_sym_bind
;
407 if (sym
->a
.visibility
)
408 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
411 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
412 sym_bind
= STB_LOCAL
;
413 else if (sym
->a
.weak
)
416 sym_bind
= STB_GLOBAL
;
417 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
418 if (sym_bind
!= old_sym_bind
) {
419 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
423 if (sym
->a
.dllimport
)
424 esym
->st_other
|= ST_PE_IMPORT
;
425 if (sym
->a
.dllexport
)
426 esym
->st_other
|= ST_PE_EXPORT
;
430 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
431 get_tok_str(sym
->v
, NULL
),
432 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
440 /* ------------------------------------------------------------------------- */
441 /* update sym->c so that it points to an external symbol in section
442 'section' with value 'value' */
444 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
445 addr_t value
, unsigned long size
,
446 int can_add_underscore
)
448 int sym_type
, sym_bind
, info
, other
, t
;
454 name
= get_tok_str(sym
->v
, NULL
);
456 if ((t
& VT_BTYPE
) == VT_FUNC
) {
458 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
459 sym_type
= STT_NOTYPE
;
460 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
463 sym_type
= STT_OBJECT
;
465 if (t
& (VT_STATIC
| VT_INLINE
))
466 sym_bind
= STB_LOCAL
;
468 sym_bind
= STB_GLOBAL
;
472 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
473 Sym
*ref
= sym
->type
.ref
;
474 if (ref
->a
.nodecorate
) {
475 can_add_underscore
= 0;
477 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
478 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
480 other
|= ST_PE_STDCALL
;
481 can_add_underscore
= 0;
486 if (sym
->asm_label
) {
487 name
= get_tok_str(sym
->asm_label
, NULL
);
488 can_add_underscore
= 0;
491 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
493 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
497 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
498 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
501 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
505 esym
->st_value
= value
;
506 esym
->st_size
= size
;
507 esym
->st_shndx
= sh_num
;
512 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
513 addr_t value
, unsigned long size
)
515 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
516 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
519 /* add a new relocation entry to symbol 'sym' in section 's' */
520 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
525 if (nocode_wanted
&& s
== cur_text_section
)
530 put_extern_sym(sym
, NULL
, 0, 0);
534 /* now we can add ELF relocation info */
535 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
539 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
541 greloca(s
, sym
, offset
, type
, 0);
545 /* ------------------------------------------------------------------------- */
546 /* symbol allocator */
547 static Sym
*__sym_malloc(void)
549 Sym
*sym_pool
, *sym
, *last_sym
;
552 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
553 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
555 last_sym
= sym_free_first
;
557 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
558 sym
->next
= last_sym
;
562 sym_free_first
= last_sym
;
566 static inline Sym
*sym_malloc(void)
570 sym
= sym_free_first
;
572 sym
= __sym_malloc();
573 sym_free_first
= sym
->next
;
576 sym
= tcc_malloc(sizeof(Sym
));
581 ST_INLN
void sym_free(Sym
*sym
)
584 sym
->next
= sym_free_first
;
585 sym_free_first
= sym
;
591 /* push, without hashing */
592 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
597 memset(s
, 0, sizeof *s
);
607 /* find a symbol and return its associated structure. 's' is the top
608 of the symbol stack */
609 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
621 /* structure lookup */
622 ST_INLN Sym
*struct_find(int v
)
625 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
627 return table_ident
[v
]->sym_struct
;
630 /* find an identifier */
631 ST_INLN Sym
*sym_find(int v
)
634 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
636 return table_ident
[v
]->sym_identifier
;
639 static int sym_scope(Sym
*s
)
641 if (IS_ENUM_VAL (s
->type
.t
))
642 return s
->type
.ref
->sym_scope
;
647 /* push a given symbol on the symbol stack */
648 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
657 s
= sym_push2(ps
, v
, type
->t
, c
);
658 s
->type
.ref
= type
->ref
;
660 /* don't record fields or anonymous symbols */
662 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
663 /* record symbol in token array */
664 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
666 ps
= &ts
->sym_struct
;
668 ps
= &ts
->sym_identifier
;
671 s
->sym_scope
= local_scope
;
672 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
673 tcc_error("redeclaration of '%s'",
674 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
679 /* push a global identifier */
680 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
683 s
= sym_push2(&global_stack
, v
, t
, c
);
684 s
->r
= VT_CONST
| VT_SYM
;
685 /* don't record anonymous symbol */
686 if (v
< SYM_FIRST_ANOM
) {
687 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
688 /* modify the top most local identifier, so that sym_identifier will
689 point to 's' when popped; happens when called from inline asm */
690 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
691 ps
= &(*ps
)->prev_tok
;
698 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
699 pop them yet from the list, but do remove them from the token array. */
700 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
710 /* remove symbol in token array */
712 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
713 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
715 ps
= &ts
->sym_struct
;
717 ps
= &ts
->sym_identifier
;
728 /* ------------------------------------------------------------------------- */
729 static void vcheck_cmp(void)
731 /* cannot let cpu flags if other instruction are generated. Also
732 avoid leaving VT_JMP anywhere except on the top of the stack
733 because it would complicate the code generator.
735 Don't do this when nocode_wanted. vtop might come from
736 !nocode_wanted regions (see 88_codeopt.c) and transforming
737 it to a register without actually generating code is wrong
738 as their value might still be used for real. All values
739 we push under nocode_wanted will eventually be popped
740 again, so that the VT_CMP/VT_JMP value will be in vtop
741 when code is unsuppressed again. */
743 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
747 static void vsetc(CType
*type
, int r
, CValue
*vc
)
749 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
750 tcc_error("memory full (vstack)");
760 ST_FUNC
void vswap(void)
770 /* pop stack value */
771 ST_FUNC
void vpop(void)
774 v
= vtop
->r
& VT_VALMASK
;
775 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
776 /* for x86, we need to pop the FP stack */
778 o(0xd8dd); /* fstp %st(0) */
782 /* need to put correct jump if && or || without test */
789 /* push constant of type "type" with useless value */
790 static void vpush(CType
*type
)
792 vset(type
, VT_CONST
, 0);
795 /* push arbitrary 64bit constant */
796 static void vpush64(int ty
, unsigned long long v
)
803 vsetc(&ctype
, VT_CONST
, &cval
);
806 /* push integer constant */
807 ST_FUNC
void vpushi(int v
)
812 /* push a pointer sized constant */
813 static void vpushs(addr_t v
)
815 vpush64(VT_SIZE_T
, v
);
818 /* push long long constant */
819 static inline void vpushll(long long v
)
821 vpush64(VT_LLONG
, v
);
824 ST_FUNC
void vset(CType
*type
, int r
, int v
)
828 vsetc(type
, r
, &cval
);
831 static void vseti(int r
, int v
)
839 ST_FUNC
void vpushv(SValue
*v
)
841 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
842 tcc_error("memory full (vstack)");
847 static void vdup(void)
852 /* rotate n first stack elements to the bottom
853 I1 ... In -> I2 ... In I1 [top is right]
855 ST_FUNC
void vrotb(int n
)
867 /* rotate the n elements before entry e towards the top
868 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
870 ST_FUNC
void vrote(SValue
*e
, int n
)
877 for(i
= 0;i
< n
- 1; i
++)
882 /* rotate n first stack elements to the top
883 I1 ... In -> In I1 ... I(n-1) [top is right]
885 ST_FUNC
void vrott(int n
)
890 /* ------------------------------------------------------------------------- */
891 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
893 /* called from generators to set the result from relational ops */
894 ST_FUNC
void vset_VT_CMP(int op
)
902 /* called once before asking generators to load VT_CMP to a register */
903 static void vset_VT_JMP(void)
905 int op
= vtop
->cmp_op
;
907 if (vtop
->jtrue
|| vtop
->jfalse
) {
908 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
909 int inv
= op
& (op
< 2); /* small optimization */
910 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
912 /* otherwise convert flags (rsp. 0/1) to register */
914 if (op
< 2) /* doesn't seem to happen */
919 /* Set CPU Flags, doesn't yet jump */
920 static void gvtst_set(int inv
, int t
)
924 if (vtop
->r
!= VT_CMP
) {
927 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
928 vset_VT_CMP(vtop
->c
.i
!= 0);
931 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
932 *p
= gjmp_append(*p
, t
);
935 /* Generate value test
937 * Generate a test for any value (jump, comparison and integers) */
938 static int gvtst(int inv
, int t
)
943 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
948 /* jump to the wanted target */
950 t
= gjmp_cond(op
^ inv
, t
);
953 /* resolve complementary jumps to here */
960 /* generate a zero or nozero test */
961 static void gen_test_zero(int op
)
963 if (vtop
->r
== VT_CMP
) {
967 vtop
->jfalse
= vtop
->jtrue
;
977 /* ------------------------------------------------------------------------- */
978 /* push a symbol value of TYPE */
979 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
983 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
987 /* Return a static symbol pointing to a section */
988 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
994 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
995 sym
->type
.t
|= VT_STATIC
;
996 put_extern_sym(sym
, sec
, offset
, size
);
1000 /* push a reference to a section offset by adding a dummy symbol */
1001 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1003 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1006 /* define a new external reference to a symbol 'v' of type 'u' */
1007 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1013 /* push forward reference */
1014 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1015 s
->type
.ref
= type
->ref
;
1016 } else if (IS_ASM_SYM(s
)) {
1017 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1018 s
->type
.ref
= type
->ref
;
1024 /* create an external reference with no specific type similar to asm labels.
1025 This avoids type conflicts if the symbol is used from C too */
1026 ST_FUNC Sym
*external_helper_sym(int v
)
1028 CType ct
= { VT_ASM_FUNC
, NULL
};
1029 return external_global_sym(v
, &ct
);
1032 /* push a reference to an helper function (such as memmove) */
1033 ST_FUNC
void vpush_helper_func(int v
)
1035 vpushsym(&func_old_type
, external_helper_sym(v
));
1038 /* Merge symbol attributes. */
1039 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1041 if (sa1
->aligned
&& !sa
->aligned
)
1042 sa
->aligned
= sa1
->aligned
;
1043 sa
->packed
|= sa1
->packed
;
1044 sa
->weak
|= sa1
->weak
;
1045 if (sa1
->visibility
!= STV_DEFAULT
) {
1046 int vis
= sa
->visibility
;
1047 if (vis
== STV_DEFAULT
1048 || vis
> sa1
->visibility
)
1049 vis
= sa1
->visibility
;
1050 sa
->visibility
= vis
;
1052 sa
->dllexport
|= sa1
->dllexport
;
1053 sa
->nodecorate
|= sa1
->nodecorate
;
1054 sa
->dllimport
|= sa1
->dllimport
;
1057 /* Merge function attributes. */
1058 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1060 if (fa1
->func_call
&& !fa
->func_call
)
1061 fa
->func_call
= fa1
->func_call
;
1062 if (fa1
->func_type
&& !fa
->func_type
)
1063 fa
->func_type
= fa1
->func_type
;
1064 if (fa1
->func_args
&& !fa
->func_args
)
1065 fa
->func_args
= fa1
->func_args
;
1066 if (fa1
->func_noreturn
)
1067 fa
->func_noreturn
= 1;
1074 /* Merge attributes. */
1075 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1077 merge_symattr(&ad
->a
, &ad1
->a
);
1078 merge_funcattr(&ad
->f
, &ad1
->f
);
1081 ad
->section
= ad1
->section
;
1082 if (ad1
->alias_target
)
1083 ad
->alias_target
= ad1
->alias_target
;
1085 ad
->asm_label
= ad1
->asm_label
;
1087 ad
->attr_mode
= ad1
->attr_mode
;
1090 /* Merge some type attributes. */
1091 static void patch_type(Sym
*sym
, CType
*type
)
1093 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1094 if (!(sym
->type
.t
& VT_EXTERN
))
1095 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1096 sym
->type
.t
&= ~VT_EXTERN
;
1099 if (IS_ASM_SYM(sym
)) {
1100 /* stay static if both are static */
1101 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1102 sym
->type
.ref
= type
->ref
;
1105 if (!is_compatible_types(&sym
->type
, type
)) {
1106 tcc_error("incompatible types for redefinition of '%s'",
1107 get_tok_str(sym
->v
, NULL
));
1109 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1110 int static_proto
= sym
->type
.t
& VT_STATIC
;
1111 /* warn if static follows non-static function declaration */
1112 if ((type
->t
& VT_STATIC
) && !static_proto
1113 /* XXX this test for inline shouldn't be here. Until we
1114 implement gnu-inline mode again it silences a warning for
1115 mingw caused by our workarounds. */
1116 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1117 tcc_warning("static storage ignored for redefinition of '%s'",
1118 get_tok_str(sym
->v
, NULL
));
1120 /* set 'inline' if both agree or if one has static */
1121 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1122 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1123 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1124 static_proto
|= VT_INLINE
;
1127 if (0 == (type
->t
& VT_EXTERN
)) {
1128 struct FuncAttr f
= sym
->type
.ref
->f
;
1129 /* put complete type, use static from prototype */
1130 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1131 sym
->type
.ref
= type
->ref
;
1132 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1134 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1137 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1138 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1139 sym
->type
.ref
= type
->ref
;
1143 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1144 /* set array size if it was omitted in extern declaration */
1145 sym
->type
.ref
->c
= type
->ref
->c
;
1147 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1148 tcc_warning("storage mismatch for redefinition of '%s'",
1149 get_tok_str(sym
->v
, NULL
));
1153 /* Merge some storage attributes. */
1154 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1157 patch_type(sym
, type
);
1159 #ifdef TCC_TARGET_PE
1160 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1161 tcc_error("incompatible dll linkage for redefinition of '%s'",
1162 get_tok_str(sym
->v
, NULL
));
1164 merge_symattr(&sym
->a
, &ad
->a
);
1166 sym
->asm_label
= ad
->asm_label
;
1167 update_storage(sym
);
1170 /* copy sym to other stack */
1171 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1174 s
= sym_malloc(), *s
= *s0
;
1175 s
->prev
= *ps
, *ps
= s
;
1176 if (s
->v
< SYM_FIRST_ANOM
) {
1177 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1178 s
->prev_tok
= *ps
, *ps
= s
;
1183 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1184 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1186 int bt
= s
->type
.t
& VT_BTYPE
;
1187 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1188 Sym
**sp
= &s
->type
.ref
;
1189 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1190 Sym
*s2
= sym_copy(s
, ps
);
1191 sp
= &(*sp
= s2
)->next
;
1192 sym_copy_ref(s2
, ps
);
1197 /* define a new external reference to a symbol 'v' */
1198 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1202 /* look for global symbol */
1204 while (s
&& s
->sym_scope
)
1208 /* push forward reference */
1209 s
= global_identifier_push(v
, type
->t
, 0);
1212 s
->asm_label
= ad
->asm_label
;
1213 s
->type
.ref
= type
->ref
;
1214 /* copy type to the global stack */
1216 sym_copy_ref(s
, &global_stack
);
1218 patch_storage(s
, ad
, type
);
1220 /* push variables on local_stack if any */
1221 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1222 s
= sym_copy(s
, &local_stack
);
1226 /* save registers up to (vtop - n) stack entry */
1227 ST_FUNC
void save_regs(int n
)
1230 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1234 /* save r to the memory stack, and mark it as being free */
1235 ST_FUNC
void save_reg(int r
)
1237 save_reg_upstack(r
, 0);
1240 /* save r to the memory stack, and mark it as being free,
1241 if seen up to (vtop - n) stack entry */
1242 ST_FUNC
void save_reg_upstack(int r
, int n
)
1244 int l
, size
, align
, bt
;
1247 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1252 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1253 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1254 /* must save value on stack if not already done */
1256 bt
= p
->type
.t
& VT_BTYPE
;
1259 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1262 size
= type_size(&sv
.type
, &align
);
1263 l
= get_temp_local_var(size
,align
);
1264 sv
.r
= VT_LOCAL
| VT_LVAL
;
1266 store(p
->r
& VT_VALMASK
, &sv
);
1267 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1268 /* x86 specific: need to pop fp register ST0 if saved */
1269 if (r
== TREG_ST0
) {
1270 o(0xd8dd); /* fstp %st(0) */
1273 /* special long long case */
1274 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1279 /* mark that stack entry as being saved on the stack */
1280 if (p
->r
& VT_LVAL
) {
1281 /* also clear the bounded flag because the
1282 relocation address of the function was stored in
1284 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1286 p
->r
= VT_LVAL
| VT_LOCAL
;
1295 #ifdef TCC_TARGET_ARM
1296 /* find a register of class 'rc2' with at most one reference on stack.
1297 * If none, call get_reg(rc) */
1298 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1303 for(r
=0;r
<NB_REGS
;r
++) {
1304 if (reg_classes
[r
] & rc2
) {
1307 for(p
= vstack
; p
<= vtop
; p
++) {
1308 if ((p
->r
& VT_VALMASK
) == r
||
1320 /* find a free register of class 'rc'. If none, save one register */
1321 ST_FUNC
int get_reg(int rc
)
1326 /* find a free register */
1327 for(r
=0;r
<NB_REGS
;r
++) {
1328 if (reg_classes
[r
] & rc
) {
1331 for(p
=vstack
;p
<=vtop
;p
++) {
1332 if ((p
->r
& VT_VALMASK
) == r
||
1341 /* no register left : free the first one on the stack (VERY
1342 IMPORTANT to start from the bottom to ensure that we don't
1343 spill registers used in gen_opi()) */
1344 for(p
=vstack
;p
<=vtop
;p
++) {
1345 /* look at second register (if long long) */
1347 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1349 r
= p
->r
& VT_VALMASK
;
1350 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1356 /* Should never comes here */
1360 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1361 static int get_temp_local_var(int size
,int align
){
1363 struct temp_local_variable
*temp_var
;
1370 for(i
=0;i
<nb_temp_local_vars
;i
++){
1371 temp_var
=&arr_temp_local_vars
[i
];
1372 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1375 /*check if temp_var is free*/
1377 for(p
=vstack
;p
<=vtop
;p
++) {
1379 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1380 if(p
->c
.i
==temp_var
->location
){
1387 found_var
=temp_var
->location
;
1393 loc
= (loc
- size
) & -align
;
1394 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1395 temp_var
=&arr_temp_local_vars
[i
];
1396 temp_var
->location
=loc
;
1397 temp_var
->size
=size
;
1398 temp_var
->align
=align
;
1399 nb_temp_local_vars
++;
1406 static void clear_temp_local_var_list(){
1407 nb_temp_local_vars
=0;
1410 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1412 static void move_reg(int r
, int s
, int t
)
1426 /* get address of vtop (vtop MUST BE an lvalue) */
1427 ST_FUNC
void gaddrof(void)
1429 vtop
->r
&= ~VT_LVAL
;
1430 /* tricky: if saved lvalue, then we can go back to lvalue */
1431 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1432 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1435 #ifdef CONFIG_TCC_BCHECK
1436 /* generate a bounded pointer addition */
1437 static void gen_bounded_ptr_add(void)
1439 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1444 vpush_helper_func(TOK___bound_ptr_add
);
1449 /* returned pointer is in REG_IRET */
1450 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1453 /* relocation offset of the bounding function call point */
1454 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1457 /* patch pointer addition in vtop so that pointer dereferencing is
1459 static void gen_bounded_ptr_deref(void)
1469 size
= type_size(&vtop
->type
, &align
);
1471 case 1: func
= TOK___bound_ptr_indir1
; break;
1472 case 2: func
= TOK___bound_ptr_indir2
; break;
1473 case 4: func
= TOK___bound_ptr_indir4
; break;
1474 case 8: func
= TOK___bound_ptr_indir8
; break;
1475 case 12: func
= TOK___bound_ptr_indir12
; break;
1476 case 16: func
= TOK___bound_ptr_indir16
; break;
1478 /* may happen with struct member access */
1481 sym
= external_helper_sym(func
);
1483 put_extern_sym(sym
, NULL
, 0, 0);
1484 /* patch relocation */
1485 /* XXX: find a better solution ? */
1486 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1487 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1490 /* generate lvalue bound code */
1491 static void gbound(void)
1495 vtop
->r
&= ~VT_MUSTBOUND
;
1496 /* if lvalue, then use checking code before dereferencing */
1497 if (vtop
->r
& VT_LVAL
) {
1498 /* if not VT_BOUNDED value, then make one */
1499 if (!(vtop
->r
& VT_BOUNDED
)) {
1500 /* must save type because we must set it to int to get pointer */
1502 vtop
->type
.t
= VT_PTR
;
1505 gen_bounded_ptr_add();
1509 /* then check for dereferencing */
1510 gen_bounded_ptr_deref();
1514 /* we need to call __bound_ptr_add before we start to load function
1515 args into registers */
1516 ST_FUNC
void gbound_args(int nb_args
)
1521 for (i
= 1; i
<= nb_args
; ++i
)
1522 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1528 sv
= vtop
- nb_args
;
1529 if (sv
->r
& VT_SYM
) {
1533 #ifndef TCC_TARGET_PE
1534 || v
== TOK_sigsetjmp
1535 || v
== TOK___sigsetjmp
1538 vpush_helper_func(TOK___bound_setjmp
);
1541 func_bound_add_epilog
= 1;
1543 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1544 if (v
== TOK_alloca
)
1545 func_bound_add_epilog
= 1;
1548 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1549 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1554 /* Add bounds for local symbols from S to E (via ->prev) */
1555 static void add_local_bounds(Sym
*s
, Sym
*e
)
1557 for (; s
!= e
; s
= s
->prev
) {
1558 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1560 /* Add arrays/structs/unions because we always take address */
1561 if ((s
->type
.t
& VT_ARRAY
)
1562 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1563 || s
->a
.addrtaken
) {
1564 /* add local bound info */
1565 int align
, size
= type_size(&s
->type
, &align
);
1566 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1567 2 * sizeof(addr_t
));
1568 bounds_ptr
[0] = s
->c
;
1569 bounds_ptr
[1] = size
;
1575 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1576 static void pop_local_syms(Sym
*b
, int keep
)
1578 #ifdef CONFIG_TCC_BCHECK
1579 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1580 add_local_bounds(local_stack
, b
);
1583 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1584 sym_pop(&local_stack
, b
, keep
);
1587 static void incr_bf_adr(int o
)
1589 vtop
->type
= char_pointer_type
;
1593 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1597 /* single-byte load mode for packed or otherwise unaligned bitfields */
1598 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1601 save_reg_upstack(vtop
->r
, 1);
1602 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1603 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1612 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1614 vpushi((1 << n
) - 1), gen_op('&');
1617 vpushi(bits
), gen_op(TOK_SHL
);
1620 bits
+= n
, bit_size
-= n
, o
= 1;
1623 if (!(type
->t
& VT_UNSIGNED
)) {
1624 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1625 vpushi(n
), gen_op(TOK_SHL
);
1626 vpushi(n
), gen_op(TOK_SAR
);
1630 /* single-byte store mode for packed or otherwise unaligned bitfields */
1631 static void store_packed_bf(int bit_pos
, int bit_size
)
1633 int bits
, n
, o
, m
, c
;
1634 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1636 save_reg_upstack(vtop
->r
, 1);
1637 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1639 incr_bf_adr(o
); // X B
1641 c
? vdup() : gv_dup(); // B V X
1644 vpushi(bits
), gen_op(TOK_SHR
);
1646 vpushi(bit_pos
), gen_op(TOK_SHL
);
1651 m
= ((1 << n
) - 1) << bit_pos
;
1652 vpushi(m
), gen_op('&'); // X B V1
1653 vpushv(vtop
-1); // X B V1 B
1654 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1655 gen_op('&'); // X B V1 B1
1656 gen_op('|'); // X B V2
1658 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1659 vstore(), vpop(); // X B
1660 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1665 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1668 if (0 == sv
->type
.ref
)
1670 t
= sv
->type
.ref
->auxtype
;
1671 if (t
!= -1 && t
!= VT_STRUCT
) {
1672 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1678 /* store vtop a register belonging to class 'rc'. lvalues are
1679 converted to values. Cannot be used if cannot be converted to
1680 register value (such as structures). */
1681 ST_FUNC
int gv(int rc
)
1683 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1684 int bit_pos
, bit_size
, size
, align
;
1686 /* NOTE: get_reg can modify vstack[] */
1687 if (vtop
->type
.t
& VT_BITFIELD
) {
1690 bit_pos
= BIT_POS(vtop
->type
.t
);
1691 bit_size
= BIT_SIZE(vtop
->type
.t
);
1692 /* remove bit field info to avoid loops */
1693 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1696 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1697 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1698 type
.t
|= VT_UNSIGNED
;
1700 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1702 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1707 if (r
== VT_STRUCT
) {
1708 load_packed_bf(&type
, bit_pos
, bit_size
);
1710 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1711 /* cast to int to propagate signedness in following ops */
1713 /* generate shifts */
1714 vpushi(bits
- (bit_pos
+ bit_size
));
1716 vpushi(bits
- bit_size
);
1717 /* NOTE: transformed to SHR if unsigned */
1722 if (is_float(vtop
->type
.t
) &&
1723 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1724 /* CPUs usually cannot use float constants, so we store them
1725 generically in data segment */
1726 init_params p
= { rodata_section
};
1727 unsigned long offset
;
1728 size
= type_size(&vtop
->type
, &align
);
1730 size
= 0, align
= 1;
1731 offset
= section_add(p
.sec
, size
, align
);
1732 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1734 init_putv(&p
, &vtop
->type
, offset
);
1737 #ifdef CONFIG_TCC_BCHECK
1738 if (vtop
->r
& VT_MUSTBOUND
)
1742 bt
= vtop
->type
.t
& VT_BTYPE
;
1744 #ifdef TCC_TARGET_RISCV64
1746 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1749 rc2
= RC2_TYPE(bt
, rc
);
1751 /* need to reload if:
1753 - lvalue (need to dereference pointer)
1754 - already a register, but not in the right class */
1755 r
= vtop
->r
& VT_VALMASK
;
1756 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1757 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1759 if (!r_ok
|| !r2_ok
) {
1763 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1764 int original_type
= vtop
->type
.t
;
1766 /* two register type load :
1767 expand to two words temporarily */
1768 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1770 unsigned long long ll
= vtop
->c
.i
;
1771 vtop
->c
.i
= ll
; /* first word */
1773 vtop
->r
= r
; /* save register value */
1774 vpushi(ll
>> 32); /* second word */
1775 } else if (vtop
->r
& VT_LVAL
) {
1776 /* We do not want to modifier the long long pointer here.
1777 So we save any other instances down the stack */
1778 save_reg_upstack(vtop
->r
, 1);
1779 /* load from memory */
1780 vtop
->type
.t
= load_type
;
1783 vtop
[-1].r
= r
; /* save register value */
1784 /* increment pointer to get second word */
1785 vtop
->type
.t
= VT_PTRDIFF_T
;
1790 vtop
->type
.t
= load_type
;
1792 /* move registers */
1795 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1798 vtop
[-1].r
= r
; /* save register value */
1799 vtop
->r
= vtop
[-1].r2
;
1801 /* Allocate second register. Here we rely on the fact that
1802 get_reg() tries first to free r2 of an SValue. */
1806 /* write second register */
1809 vtop
->type
.t
= original_type
;
1811 if (vtop
->r
== VT_CMP
)
1813 /* one register type load */
1818 #ifdef TCC_TARGET_C67
1819 /* uses register pairs for doubles */
1820 if (bt
== VT_DOUBLE
)
1827 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1828 ST_FUNC
void gv2(int rc1
, int rc2
)
1830 /* generate more generic register first. But VT_JMP or VT_CMP
1831 values must be generated first in all cases to avoid possible
1833 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1838 /* test if reload is needed for first register */
1839 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1849 /* test if reload is needed for first register */
1850 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1857 /* expand 64bit on stack in two ints */
1858 ST_FUNC
void lexpand(void)
1861 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1862 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1863 if (v
== VT_CONST
) {
1866 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1872 vtop
[0].r
= vtop
[-1].r2
;
1873 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1875 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1880 /* build a long long from two ints */
1881 static void lbuild(int t
)
1883 gv2(RC_INT
, RC_INT
);
1884 vtop
[-1].r2
= vtop
[0].r
;
1885 vtop
[-1].type
.t
= t
;
1890 /* convert stack entry to register and duplicate its value in another
1892 static void gv_dup(void)
1898 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1899 if (t
& VT_BITFIELD
) {
1909 /* stack: H L L1 H1 */
1919 /* duplicate value */
1929 /* generate CPU independent (unsigned) long long operations */
1930 static void gen_opl(int op
)
1932 int t
, a
, b
, op1
, c
, i
;
1934 unsigned short reg_iret
= REG_IRET
;
1935 unsigned short reg_lret
= REG_IRE2
;
1941 func
= TOK___divdi3
;
1944 func
= TOK___udivdi3
;
1947 func
= TOK___moddi3
;
1950 func
= TOK___umoddi3
;
1957 /* call generic long long function */
1958 vpush_helper_func(func
);
1963 vtop
->r2
= reg_lret
;
1971 //pv("gen_opl A",0,2);
1977 /* stack: L1 H1 L2 H2 */
1982 vtop
[-2] = vtop
[-3];
1985 /* stack: H1 H2 L1 L2 */
1986 //pv("gen_opl B",0,4);
1992 /* stack: H1 H2 L1 L2 ML MH */
1995 /* stack: ML MH H1 H2 L1 L2 */
1999 /* stack: ML MH H1 L2 H2 L1 */
2004 /* stack: ML MH M1 M2 */
2007 } else if (op
== '+' || op
== '-') {
2008 /* XXX: add non carry method too (for MIPS or alpha) */
2014 /* stack: H1 H2 (L1 op L2) */
2017 gen_op(op1
+ 1); /* TOK_xxxC2 */
2020 /* stack: H1 H2 (L1 op L2) */
2023 /* stack: (L1 op L2) H1 H2 */
2025 /* stack: (L1 op L2) (H1 op H2) */
2033 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2034 t
= vtop
[-1].type
.t
;
2038 /* stack: L H shift */
2040 /* constant: simpler */
2041 /* NOTE: all comments are for SHL. the other cases are
2042 done by swapping words */
2053 if (op
!= TOK_SAR
) {
2086 /* XXX: should provide a faster fallback on x86 ? */
2089 func
= TOK___ashrdi3
;
2092 func
= TOK___lshrdi3
;
2095 func
= TOK___ashldi3
;
2101 /* compare operations */
2107 /* stack: L1 H1 L2 H2 */
2109 vtop
[-1] = vtop
[-2];
2111 /* stack: L1 L2 H1 H2 */
2115 /* when values are equal, we need to compare low words. since
2116 the jump is inverted, we invert the test too. */
2119 else if (op1
== TOK_GT
)
2121 else if (op1
== TOK_ULT
)
2123 else if (op1
== TOK_UGT
)
2133 /* generate non equal test */
2135 vset_VT_CMP(TOK_NE
);
2139 /* compare low. Always unsigned */
2143 else if (op1
== TOK_LE
)
2145 else if (op1
== TOK_GT
)
2147 else if (op1
== TOK_GE
)
2150 #if 0//def TCC_TARGET_I386
2151 if (op
== TOK_NE
) { gsym(b
); break; }
2152 if (op
== TOK_EQ
) { gsym(a
); break; }
2161 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2163 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2164 return (a
^ b
) >> 63 ? -x
: x
;
2167 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2169 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2172 /* handle integer constant optimizations and various machine
2174 static void gen_opic(int op
)
2176 SValue
*v1
= vtop
- 1;
2178 int t1
= v1
->type
.t
& VT_BTYPE
;
2179 int t2
= v2
->type
.t
& VT_BTYPE
;
2180 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2181 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2182 uint64_t l1
= c1
? v1
->c
.i
: 0;
2183 uint64_t l2
= c2
? v2
->c
.i
: 0;
2184 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2186 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2187 l1
= ((uint32_t)l1
|
2188 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2189 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2190 l2
= ((uint32_t)l2
|
2191 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2195 case '+': l1
+= l2
; break;
2196 case '-': l1
-= l2
; break;
2197 case '&': l1
&= l2
; break;
2198 case '^': l1
^= l2
; break;
2199 case '|': l1
|= l2
; break;
2200 case '*': l1
*= l2
; break;
2207 /* if division by zero, generate explicit division */
2209 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2210 tcc_error("division by zero in constant");
2214 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2215 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2216 case TOK_UDIV
: l1
= l1
/ l2
; break;
2217 case TOK_UMOD
: l1
= l1
% l2
; break;
2220 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2221 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2223 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2226 case TOK_ULT
: l1
= l1
< l2
; break;
2227 case TOK_UGE
: l1
= l1
>= l2
; break;
2228 case TOK_EQ
: l1
= l1
== l2
; break;
2229 case TOK_NE
: l1
= l1
!= l2
; break;
2230 case TOK_ULE
: l1
= l1
<= l2
; break;
2231 case TOK_UGT
: l1
= l1
> l2
; break;
2232 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2233 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2234 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2235 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2237 case TOK_LAND
: l1
= l1
&& l2
; break;
2238 case TOK_LOR
: l1
= l1
|| l2
; break;
2242 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2243 l1
= ((uint32_t)l1
|
2244 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2248 /* if commutative ops, put c2 as constant */
2249 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2250 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2252 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2253 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2255 if (!const_wanted
&&
2257 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2258 (l1
== -1 && op
== TOK_SAR
))) {
2259 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2261 } else if (!const_wanted
&&
2262 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2264 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2265 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2266 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2271 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2274 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2275 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2278 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2279 /* filter out NOP operations like x*1, x-0, x&-1... */
2281 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2282 /* try to use shifts instead of muls or divs */
2283 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2292 else if (op
== TOK_PDIV
)
2298 } else if (c2
&& (op
== '+' || op
== '-') &&
2299 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2300 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2301 /* symbol + constant case */
2305 /* The backends can't always deal with addends to symbols
2306 larger than +-1<<31. Don't construct such. */
2313 /* call low level op generator */
2314 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2315 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2323 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2324 # define gen_negf gen_opf
2325 #elif defined TCC_TARGET_ARM
2326 void gen_negf(int op
)
2328 /* arm will detect 0-x and replace by vneg */
2329 vpushi(0), vswap(), gen_op('-');
2332 /* XXX: implement in gen_opf() for other backends too */
2333 void gen_negf(int op
)
2335 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2336 subtract(-0, x), but with them it's really a sign flip
2337 operation. We implement this with bit manipulation and have
2338 to do some type reinterpretation for this, which TCC can do
2341 int align
, size
, bt
;
2343 size
= type_size(&vtop
->type
, &align
);
2344 bt
= vtop
->type
.t
& VT_BTYPE
;
2345 save_reg(gv(RC_TYPE(bt
)));
2347 incr_bf_adr(size
- 1);
2349 vpushi(0x80); /* flip sign */
2356 /* generate a floating point operation with constant propagation */
2357 static void gen_opif(int op
)
2361 #if defined _MSC_VER && defined __x86_64__
2362 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2372 /* currently, we cannot do computations with forward symbols */
2373 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2374 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2376 if (v1
->type
.t
== VT_FLOAT
) {
2379 } else if (v1
->type
.t
== VT_DOUBLE
) {
2386 /* NOTE: we only do constant propagation if finite number (not
2387 NaN or infinity) (ANSI spec) */
2388 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
2391 case '+': f1
+= f2
; break;
2392 case '-': f1
-= f2
; break;
2393 case '*': f1
*= f2
; break;
2396 union { float f
; unsigned u
; } x1
, x2
, y
;
2397 /* If not in initializer we need to potentially generate
2398 FP exceptions at runtime, otherwise we want to fold. */
2401 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2402 when used to compile the f1 /= f2 below, would be -nan */
2403 x1
.f
= f1
, x2
.f
= f2
;
2405 y
.u
= 0x7fc00000; /* nan */
2407 y
.u
= 0x7f800000; /* infinity */
2408 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2417 /* XXX: also handles tests ? */
2423 /* XXX: overflow test ? */
2424 if (v1
->type
.t
== VT_FLOAT
) {
2426 } else if (v1
->type
.t
== VT_DOUBLE
) {
2433 if (op
== TOK_NEG
) {
2441 /* print a type. If 'varstr' is not NULL, then the variable is also
2442 printed in the type */
2444 /* XXX: add array and function pointers */
2445 static void type_to_str(char *buf
, int buf_size
,
2446 CType
*type
, const char *varstr
)
2458 pstrcat(buf
, buf_size
, "extern ");
2460 pstrcat(buf
, buf_size
, "static ");
2462 pstrcat(buf
, buf_size
, "typedef ");
2464 pstrcat(buf
, buf_size
, "inline ");
2466 if (t
& VT_VOLATILE
)
2467 pstrcat(buf
, buf_size
, "volatile ");
2468 if (t
& VT_CONSTANT
)
2469 pstrcat(buf
, buf_size
, "const ");
2471 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2472 || ((t
& VT_UNSIGNED
)
2473 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2476 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2478 buf_size
-= strlen(buf
);
2514 tstr
= "long double";
2516 pstrcat(buf
, buf_size
, tstr
);
2523 pstrcat(buf
, buf_size
, tstr
);
2524 v
= type
->ref
->v
& ~SYM_STRUCT
;
2525 if (v
>= SYM_FIRST_ANOM
)
2526 pstrcat(buf
, buf_size
, "<anonymous>");
2528 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2533 if (varstr
&& '*' == *varstr
) {
2534 pstrcat(buf1
, sizeof(buf1
), "(");
2535 pstrcat(buf1
, sizeof(buf1
), varstr
);
2536 pstrcat(buf1
, sizeof(buf1
), ")");
2538 pstrcat(buf1
, buf_size
, "(");
2540 while (sa
!= NULL
) {
2542 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2543 pstrcat(buf1
, sizeof(buf1
), buf2
);
2546 pstrcat(buf1
, sizeof(buf1
), ", ");
2548 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2549 pstrcat(buf1
, sizeof(buf1
), ", ...");
2550 pstrcat(buf1
, sizeof(buf1
), ")");
2551 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2555 if (t
& (VT_ARRAY
|VT_VLA
)) {
2556 if (varstr
&& '*' == *varstr
)
2557 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2559 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2560 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2563 pstrcpy(buf1
, sizeof(buf1
), "*");
2564 if (t
& VT_CONSTANT
)
2565 pstrcat(buf1
, buf_size
, "const ");
2566 if (t
& VT_VOLATILE
)
2567 pstrcat(buf1
, buf_size
, "volatile ");
2569 pstrcat(buf1
, sizeof(buf1
), varstr
);
2570 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2574 pstrcat(buf
, buf_size
, " ");
2575 pstrcat(buf
, buf_size
, varstr
);
2580 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2582 char buf1
[256], buf2
[256];
2583 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2584 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2585 tcc_error(fmt
, buf1
, buf2
);
2588 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2590 char buf1
[256], buf2
[256];
2591 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2592 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2593 tcc_warning(fmt
, buf1
, buf2
);
2596 static int pointed_size(CType
*type
)
2599 return type_size(pointed_type(type
), &align
);
2602 static inline int is_null_pointer(SValue
*p
)
2604 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2606 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2607 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2608 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2609 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2610 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2611 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2615 /* compare function types. OLD functions match any new functions */
2616 static int is_compatible_func(CType
*type1
, CType
*type2
)
2622 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2624 if (s1
->f
.func_type
!= s2
->f
.func_type
2625 && s1
->f
.func_type
!= FUNC_OLD
2626 && s2
->f
.func_type
!= FUNC_OLD
)
2629 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2631 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2642 /* return true if type1 and type2 are the same. If unqualified is
2643 true, qualifiers on the types are ignored.
2645 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2649 t1
= type1
->t
& VT_TYPE
;
2650 t2
= type2
->t
& VT_TYPE
;
2652 /* strip qualifiers before comparing */
2653 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2654 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2657 /* Default Vs explicit signedness only matters for char */
2658 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2662 /* XXX: bitfields ? */
2667 && !(type1
->ref
->c
< 0
2668 || type2
->ref
->c
< 0
2669 || type1
->ref
->c
== type2
->ref
->c
))
2672 /* test more complicated cases */
2673 bt1
= t1
& VT_BTYPE
;
2674 if (bt1
== VT_PTR
) {
2675 type1
= pointed_type(type1
);
2676 type2
= pointed_type(type2
);
2677 return is_compatible_types(type1
, type2
);
2678 } else if (bt1
== VT_STRUCT
) {
2679 return (type1
->ref
== type2
->ref
);
2680 } else if (bt1
== VT_FUNC
) {
2681 return is_compatible_func(type1
, type2
);
2682 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2683 /* If both are enums then they must be the same, if only one is then
2684 t1 and t2 must be equal, which was checked above already. */
2685 return type1
->ref
== type2
->ref
;
2691 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2692 type is stored in DEST if non-null (except for pointer plus/minus) . */
2693 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2695 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2696 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2702 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2703 ret
= op
== '?' ? 1 : 0;
2704 /* NOTE: as an extension, we accept void on only one side */
2706 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2707 if (op
== '+') ; /* Handled in caller */
2708 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2709 /* If one is a null ptr constant the result type is the other. */
2710 else if (is_null_pointer (op2
)) type
= *type1
;
2711 else if (is_null_pointer (op1
)) type
= *type2
;
2712 else if (bt1
!= bt2
) {
2713 /* accept comparison or cond-expr between pointer and integer
2715 if ((op
== '?' || TOK_ISCOND(op
))
2716 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2717 tcc_warning("pointer/integer mismatch in %s",
2718 op
== '?' ? "conditional expression" : "comparison");
2719 else if (op
!= '-' || !is_integer_btype(bt2
))
2721 type
= *(bt1
== VT_PTR
? type1
: type2
);
2723 CType
*pt1
= pointed_type(type1
);
2724 CType
*pt2
= pointed_type(type2
);
2725 int pbt1
= pt1
->t
& VT_BTYPE
;
2726 int pbt2
= pt2
->t
& VT_BTYPE
;
2727 int newquals
, copied
= 0;
2728 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2729 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2730 if (op
!= '?' && !TOK_ISCOND(op
))
2733 type_incompatibility_warning(type1
, type2
,
2735 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2736 : "pointer type mismatch in comparison('%s' and '%s')");
2739 /* pointers to void get preferred, otherwise the
2740 pointed to types minus qualifs should be compatible */
2741 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2742 /* combine qualifs */
2743 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2744 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2747 /* copy the pointer target symbol */
2748 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2751 pointed_type(&type
)->t
|= newquals
;
2753 /* pointers to incomplete arrays get converted to
2754 pointers to completed ones if possible */
2755 if (pt1
->t
& VT_ARRAY
2756 && pt2
->t
& VT_ARRAY
2757 && pointed_type(&type
)->ref
->c
< 0
2758 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2761 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2763 pointed_type(&type
)->ref
=
2764 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2765 0, pointed_type(&type
)->ref
->c
);
2766 pointed_type(&type
)->ref
->c
=
2767 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2773 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2774 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2777 } else if (is_float(bt1
) || is_float(bt2
)) {
2778 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2779 type
.t
= VT_LDOUBLE
;
2780 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2785 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2786 /* cast to biggest op */
2787 type
.t
= VT_LLONG
| VT_LONG
;
2788 if (bt1
== VT_LLONG
)
2790 if (bt2
== VT_LLONG
)
2792 /* convert to unsigned if it does not fit in a long long */
2793 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2794 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2795 type
.t
|= VT_UNSIGNED
;
2797 /* integer operations */
2798 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2799 /* convert to unsigned if it does not fit in an integer */
2800 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2801 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2802 type
.t
|= VT_UNSIGNED
;
2809 /* generic gen_op: handles types problems */
2810 ST_FUNC
void gen_op(int op
)
2812 int t1
, t2
, bt1
, bt2
, t
;
2813 CType type1
, combtype
;
2816 t1
= vtop
[-1].type
.t
;
2817 t2
= vtop
[0].type
.t
;
2818 bt1
= t1
& VT_BTYPE
;
2819 bt2
= t2
& VT_BTYPE
;
2821 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2822 if (bt2
== VT_FUNC
) {
2823 mk_pointer(&vtop
->type
);
2826 if (bt1
== VT_FUNC
) {
2828 mk_pointer(&vtop
->type
);
2833 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2834 tcc_error_noabort("invalid operand types for binary operation");
2836 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2837 /* at least one operand is a pointer */
2838 /* relational op: must be both pointers */
2842 /* if both pointers, then it must be the '-' op */
2843 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2845 tcc_error("cannot use pointers here");
2846 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2849 vtop
->type
.t
= VT_PTRDIFF_T
;
2853 /* exactly one pointer : must be '+' or '-'. */
2854 if (op
!= '-' && op
!= '+')
2855 tcc_error("cannot use pointers here");
2856 /* Put pointer as first operand */
2857 if (bt2
== VT_PTR
) {
2859 t
= t1
, t1
= t2
, t2
= t
;
2862 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2863 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2866 type1
= vtop
[-1].type
;
2867 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2869 #ifdef CONFIG_TCC_BCHECK
2870 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2871 /* if bounded pointers, we generate a special code to
2878 gen_bounded_ptr_add();
2884 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
2885 /* put again type if gen_opic() swaped operands */
2889 /* floats can only be used for a few operations */
2890 if (is_float(combtype
.t
)
2891 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
2893 tcc_error("invalid operands for binary operation");
2894 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2895 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2896 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2898 t
|= (VT_LONG
& t1
);
2902 t
= t2
= combtype
.t
;
2903 /* XXX: currently, some unsigned operations are explicit, so
2904 we modify them here */
2905 if (t
& VT_UNSIGNED
) {
2912 else if (op
== TOK_LT
)
2914 else if (op
== TOK_GT
)
2916 else if (op
== TOK_LE
)
2918 else if (op
== TOK_GE
)
2924 /* special case for shifts and long long: we keep the shift as
2926 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2933 if (TOK_ISCOND(op
)) {
2934 /* relational op: the result is an int */
2935 vtop
->type
.t
= VT_INT
;
2940 // Make sure that we have converted to an rvalue:
2941 if (vtop
->r
& VT_LVAL
)
2942 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2945 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2946 #define gen_cvt_itof1 gen_cvt_itof
2948 /* generic itof for unsigned long long case */
2949 static void gen_cvt_itof1(int t
)
2951 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2952 (VT_LLONG
| VT_UNSIGNED
)) {
2955 vpush_helper_func(TOK___floatundisf
);
2956 #if LDOUBLE_SIZE != 8
2957 else if (t
== VT_LDOUBLE
)
2958 vpush_helper_func(TOK___floatundixf
);
2961 vpush_helper_func(TOK___floatundidf
);
2972 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2973 #define gen_cvt_ftoi1 gen_cvt_ftoi
2975 /* generic ftoi for unsigned long long case */
2976 static void gen_cvt_ftoi1(int t
)
2979 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2980 /* not handled natively */
2981 st
= vtop
->type
.t
& VT_BTYPE
;
2983 vpush_helper_func(TOK___fixunssfdi
);
2984 #if LDOUBLE_SIZE != 8
2985 else if (st
== VT_LDOUBLE
)
2986 vpush_helper_func(TOK___fixunsxfdi
);
2989 vpush_helper_func(TOK___fixunsdfdi
);
3000 /* special delayed cast for char/short */
3001 static void force_charshort_cast(void)
3003 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3004 int dbt
= vtop
->type
.t
;
3005 vtop
->r
&= ~VT_MUSTCAST
;
3007 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3011 static void gen_cast_s(int t
)
3019 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3020 static void gen_cast(CType
*type
)
3022 int sbt
, dbt
, sf
, df
, c
;
3023 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3025 /* special delayed cast for char/short */
3026 if (vtop
->r
& VT_MUSTCAST
)
3027 force_charshort_cast();
3029 /* bitfields first get cast to ints */
3030 if (vtop
->type
.t
& VT_BITFIELD
)
3033 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3034 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3042 dbt_bt
= dbt
& VT_BTYPE
;
3043 sbt_bt
= sbt
& VT_BTYPE
;
3044 if (dbt_bt
== VT_VOID
)
3046 if (sbt_bt
== VT_VOID
) {
3048 cast_error(&vtop
->type
, type
);
3051 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3052 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3053 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3056 /* constant case: we can do it now */
3057 /* XXX: in ISOC, cannot do it if error in convert */
3058 if (sbt
== VT_FLOAT
)
3059 vtop
->c
.ld
= vtop
->c
.f
;
3060 else if (sbt
== VT_DOUBLE
)
3061 vtop
->c
.ld
= vtop
->c
.d
;
3064 if (sbt_bt
== VT_LLONG
) {
3065 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3066 vtop
->c
.ld
= vtop
->c
.i
;
3068 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3070 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3071 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3073 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3076 if (dbt
== VT_FLOAT
)
3077 vtop
->c
.f
= (float)vtop
->c
.ld
;
3078 else if (dbt
== VT_DOUBLE
)
3079 vtop
->c
.d
= (double)vtop
->c
.ld
;
3080 } else if (sf
&& dbt
== VT_BOOL
) {
3081 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3084 vtop
->c
.i
= vtop
->c
.ld
;
3085 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3087 else if (sbt
& VT_UNSIGNED
)
3088 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3090 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3092 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3094 else if (dbt
== VT_BOOL
)
3095 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3097 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3098 dbt_bt
== VT_SHORT
? 0xffff :
3101 if (!(dbt
& VT_UNSIGNED
))
3102 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3107 } else if (dbt
== VT_BOOL
3108 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3109 == (VT_CONST
| VT_SYM
)) {
3110 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3116 /* cannot generate code for global or static initializers */
3117 if (STATIC_DATA_WANTED
)
3120 /* non constant case: generate code */
3121 if (dbt
== VT_BOOL
) {
3122 gen_test_zero(TOK_NE
);
3128 /* convert from fp to fp */
3131 /* convert int to fp */
3134 /* convert fp to int */
3136 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3139 goto again
; /* may need char/short cast */
3144 ds
= btype_size(dbt_bt
);
3145 ss
= btype_size(sbt_bt
);
3146 if (ds
== 0 || ss
== 0)
3149 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3150 tcc_error("cast to incomplete type");
3152 /* same size and no sign conversion needed */
3153 if (ds
== ss
&& ds
>= 4)
3155 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3156 tcc_warning("cast between pointer and integer of different size");
3157 if (sbt_bt
== VT_PTR
) {
3158 /* put integer type to allow logical operations below */
3159 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3163 /* processor allows { int a = 0, b = *(char*)&a; }
3164 That means that if we cast to less width, we can just
3165 change the type and read it still later. */
3166 #define ALLOW_SUBTYPE_ACCESS 1
3168 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3169 /* value still in memory */
3173 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3175 goto done
; /* no 64bit envolved */
3183 /* generate high word */
3184 if (sbt
& VT_UNSIGNED
) {
3193 } else if (ss
== 8) {
3194 /* from long long: just take low order word */
3202 /* need to convert from 32bit to 64bit */
3203 if (sbt
& VT_UNSIGNED
) {
3204 #if defined(TCC_TARGET_RISCV64)
3205 /* RISC-V keeps 32bit vals in registers sign-extended.
3206 So here we need a zero-extension. */
3215 ss
= ds
, ds
= 4, dbt
= sbt
;
3216 } else if (ss
== 8) {
3217 /* RISC-V keeps 32bit vals in registers sign-extended.
3218 So here we need a sign-extension for signed types and
3219 zero-extension. for unsigned types. */
3220 #if !defined(TCC_TARGET_RISCV64)
3221 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3230 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3236 bits
= (ss
- ds
) * 8;
3237 /* for unsigned, gen_op will convert SAR to SHR */
3238 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3241 vpushi(bits
- trunc
);
3248 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3251 /* return type size as known at compile time. Put alignment at 'a' */
3252 ST_FUNC
int type_size(CType
*type
, int *a
)
3257 bt
= type
->t
& VT_BTYPE
;
3258 if (bt
== VT_STRUCT
) {
3263 } else if (bt
== VT_PTR
) {
3264 if (type
->t
& VT_ARRAY
) {
3268 ts
= type_size(&s
->type
, a
);
3270 if (ts
< 0 && s
->c
< 0)
3278 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3280 return -1; /* incomplete enum */
3281 } else if (bt
== VT_LDOUBLE
) {
3283 return LDOUBLE_SIZE
;
3284 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3285 #ifdef TCC_TARGET_I386
3286 #ifdef TCC_TARGET_PE
3291 #elif defined(TCC_TARGET_ARM)
3301 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3304 } else if (bt
== VT_SHORT
) {
3307 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3311 /* char, void, function, _Bool */
3317 /* push type size as known at runtime time on top of value stack. Put
3319 static void vpush_type_size(CType
*type
, int *a
)
3321 if (type
->t
& VT_VLA
) {
3322 type_size(&type
->ref
->type
, a
);
3323 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3325 int size
= type_size(type
, a
);
3327 tcc_error("unknown type size");
3336 /* return the pointed type of t */
3337 static inline CType
*pointed_type(CType
*type
)
3339 return &type
->ref
->type
;
3342 /* modify type so that its it is a pointer to type. */
3343 ST_FUNC
void mk_pointer(CType
*type
)
3346 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3347 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3351 /* return true if type1 and type2 are exactly the same (including
3354 static int is_compatible_types(CType
*type1
, CType
*type2
)
3356 return compare_types(type1
,type2
,0);
3359 /* return true if type1 and type2 are the same (ignoring qualifiers).
3361 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3363 return compare_types(type1
,type2
,1);
3366 static void cast_error(CType
*st
, CType
*dt
)
3368 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3371 /* verify type compatibility to store vtop in 'dt' type */
3372 static void verify_assign_cast(CType
*dt
)
3374 CType
*st
, *type1
, *type2
;
3375 int dbt
, sbt
, qualwarn
, lvl
;
3377 st
= &vtop
->type
; /* source type */
3378 dbt
= dt
->t
& VT_BTYPE
;
3379 sbt
= st
->t
& VT_BTYPE
;
3380 if (dt
->t
& VT_CONSTANT
)
3381 tcc_warning("assignment of read-only location");
3385 tcc_error("assignment to void expression");
3388 /* special cases for pointers */
3389 /* '0' can also be a pointer */
3390 if (is_null_pointer(vtop
))
3392 /* accept implicit pointer to integer cast with warning */
3393 if (is_integer_btype(sbt
)) {
3394 tcc_warning("assignment makes pointer from integer without a cast");
3397 type1
= pointed_type(dt
);
3399 type2
= pointed_type(st
);
3400 else if (sbt
== VT_FUNC
)
3401 type2
= st
; /* a function is implicitly a function pointer */
3404 if (is_compatible_types(type1
, type2
))
3406 for (qualwarn
= lvl
= 0;; ++lvl
) {
3407 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3408 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3410 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3411 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3412 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3414 type1
= pointed_type(type1
);
3415 type2
= pointed_type(type2
);
3417 if (!is_compatible_unqualified_types(type1
, type2
)) {
3418 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3419 /* void * can match anything */
3420 } else if (dbt
== sbt
3421 && is_integer_btype(sbt
& VT_BTYPE
)
3422 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3423 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3424 /* Like GCC don't warn by default for merely changes
3425 in pointer target signedness. Do warn for different
3426 base types, though, in particular for unsigned enums
3427 and signed int targets. */
3429 tcc_warning("assignment from incompatible pointer type");
3434 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3440 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3441 tcc_warning("assignment makes integer from pointer without a cast");
3442 } else if (sbt
== VT_STRUCT
) {
3443 goto case_VT_STRUCT
;
3445 /* XXX: more tests */
3449 if (!is_compatible_unqualified_types(dt
, st
)) {
3457 static void gen_assign_cast(CType
*dt
)
3459 verify_assign_cast(dt
);
3463 /* store vtop in lvalue pushed on stack */
3464 ST_FUNC
void vstore(void)
3466 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3468 ft
= vtop
[-1].type
.t
;
3469 sbt
= vtop
->type
.t
& VT_BTYPE
;
3470 dbt
= ft
& VT_BTYPE
;
3472 verify_assign_cast(&vtop
[-1].type
);
3474 if (sbt
== VT_STRUCT
) {
3475 /* if structure, only generate pointer */
3476 /* structure assignment : generate memcpy */
3477 /* XXX: optimize if small size */
3478 size
= type_size(&vtop
->type
, &align
);
3482 #ifdef CONFIG_TCC_BCHECK
3483 if (vtop
->r
& VT_MUSTBOUND
)
3484 gbound(); /* check would be wrong after gaddrof() */
3486 vtop
->type
.t
= VT_PTR
;
3489 /* address of memcpy() */
3492 vpush_helper_func(TOK_memmove8
);
3493 else if(!(align
& 3))
3494 vpush_helper_func(TOK_memmove4
);
3497 /* Use memmove, rather than memcpy, as dest and src may be same: */
3498 vpush_helper_func(TOK_memmove
);
3503 #ifdef CONFIG_TCC_BCHECK
3504 if (vtop
->r
& VT_MUSTBOUND
)
3507 vtop
->type
.t
= VT_PTR
;
3512 /* leave source on stack */
3514 } else if (ft
& VT_BITFIELD
) {
3515 /* bitfield store handling */
3517 /* save lvalue as expression result (example: s.b = s.a = n;) */
3518 vdup(), vtop
[-1] = vtop
[-2];
3520 bit_pos
= BIT_POS(ft
);
3521 bit_size
= BIT_SIZE(ft
);
3522 /* remove bit field info to avoid loops */
3523 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3525 if (dbt
== VT_BOOL
) {
3526 gen_cast(&vtop
[-1].type
);
3527 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3529 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3530 if (dbt
!= VT_BOOL
) {
3531 gen_cast(&vtop
[-1].type
);
3532 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3534 if (r
== VT_STRUCT
) {
3535 store_packed_bf(bit_pos
, bit_size
);
3537 unsigned long long mask
= (1ULL << bit_size
) - 1;
3538 if (dbt
!= VT_BOOL
) {
3540 if (dbt
== VT_LLONG
)
3543 vpushi((unsigned)mask
);
3550 /* duplicate destination */
3553 /* load destination, mask and or with source */
3554 if (dbt
== VT_LLONG
)
3555 vpushll(~(mask
<< bit_pos
));
3557 vpushi(~((unsigned)mask
<< bit_pos
));
3562 /* ... and discard */
3565 } else if (dbt
== VT_VOID
) {
3568 /* optimize char/short casts */
3570 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3571 && is_integer_btype(sbt
)
3573 if ((vtop
->r
& VT_MUSTCAST
)
3574 && btype_size(dbt
) > btype_size(sbt
)
3576 force_charshort_cast();
3579 gen_cast(&vtop
[-1].type
);
3582 #ifdef CONFIG_TCC_BCHECK
3583 /* bound check case */
3584 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3590 gv(RC_TYPE(dbt
)); /* generate value */
3593 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3594 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3595 vtop
->type
.t
= ft
& VT_TYPE
;
3598 /* if lvalue was saved on stack, must read it */
3599 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3601 r
= get_reg(RC_INT
);
3602 sv
.type
.t
= VT_PTRDIFF_T
;
3603 sv
.r
= VT_LOCAL
| VT_LVAL
;
3604 sv
.c
.i
= vtop
[-1].c
.i
;
3606 vtop
[-1].r
= r
| VT_LVAL
;
3609 r
= vtop
->r
& VT_VALMASK
;
3610 /* two word case handling :
3611 store second register at word + 4 (or +8 for x86-64) */
3612 if (USING_TWO_WORDS(dbt
)) {
3613 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3614 vtop
[-1].type
.t
= load_type
;
3617 /* convert to int to increment easily */
3618 vtop
->type
.t
= VT_PTRDIFF_T
;
3624 vtop
[-1].type
.t
= load_type
;
3625 /* XXX: it works because r2 is spilled last ! */
3626 store(vtop
->r2
, vtop
- 1);
3632 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3636 /* post defines POST/PRE add. c is the token ++ or -- */
3637 ST_FUNC
void inc(int post
, int c
)
3640 vdup(); /* save lvalue */
3642 gv_dup(); /* duplicate value */
3647 vpushi(c
- TOK_MID
);
3649 vstore(); /* store value */
3651 vpop(); /* if post op, return saved value */
3654 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3656 /* read the string */
3660 while (tok
== TOK_STR
) {
3661 /* XXX: add \0 handling too ? */
3662 cstr_cat(astr
, tokc
.str
.data
, -1);
3665 cstr_ccat(astr
, '\0');
3668 /* If I is >= 1 and a power of two, returns log2(i)+1.
3669 If I is 0 returns 0. */
3670 ST_FUNC
int exact_log2p1(int i
)
3675 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3686 /* Parse __attribute__((...)) GNUC extension. */
3687 static void parse_attribute(AttributeDef
*ad
)
3693 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3698 while (tok
!= ')') {
3699 if (tok
< TOK_IDENT
)
3700 expect("attribute name");
3712 tcc_warning_c(warn_implicit_function_declaration
)(
3713 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3714 s
= external_global_sym(tok
, &func_old_type
);
3715 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3716 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3717 ad
->cleanup_func
= s
;
3722 case TOK_CONSTRUCTOR1
:
3723 case TOK_CONSTRUCTOR2
:
3724 ad
->f
.func_ctor
= 1;
3726 case TOK_DESTRUCTOR1
:
3727 case TOK_DESTRUCTOR2
:
3728 ad
->f
.func_dtor
= 1;
3730 case TOK_ALWAYS_INLINE1
:
3731 case TOK_ALWAYS_INLINE2
:
3732 ad
->f
.func_alwinl
= 1;
3737 parse_mult_str(&astr
, "section name");
3738 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3745 parse_mult_str(&astr
, "alias(\"target\")");
3746 ad
->alias_target
= /* save string as token, for later */
3747 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3751 case TOK_VISIBILITY1
:
3752 case TOK_VISIBILITY2
:
3754 parse_mult_str(&astr
,
3755 "visibility(\"default|hidden|internal|protected\")");
3756 if (!strcmp (astr
.data
, "default"))
3757 ad
->a
.visibility
= STV_DEFAULT
;
3758 else if (!strcmp (astr
.data
, "hidden"))
3759 ad
->a
.visibility
= STV_HIDDEN
;
3760 else if (!strcmp (astr
.data
, "internal"))
3761 ad
->a
.visibility
= STV_INTERNAL
;
3762 else if (!strcmp (astr
.data
, "protected"))
3763 ad
->a
.visibility
= STV_PROTECTED
;
3765 expect("visibility(\"default|hidden|internal|protected\")");
3774 if (n
<= 0 || (n
& (n
- 1)) != 0)
3775 tcc_error("alignment must be a positive power of two");
3780 ad
->a
.aligned
= exact_log2p1(n
);
3781 if (n
!= 1 << (ad
->a
.aligned
- 1))
3782 tcc_error("alignment of %d is larger than implemented", n
);
3794 /* currently, no need to handle it because tcc does not
3795 track unused objects */
3799 ad
->f
.func_noreturn
= 1;
3804 ad
->f
.func_call
= FUNC_CDECL
;
3809 ad
->f
.func_call
= FUNC_STDCALL
;
3811 #ifdef TCC_TARGET_I386
3821 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3827 ad
->f
.func_call
= FUNC_FASTCALLW
;
3834 ad
->attr_mode
= VT_LLONG
+ 1;
3837 ad
->attr_mode
= VT_BYTE
+ 1;
3840 ad
->attr_mode
= VT_SHORT
+ 1;
3844 ad
->attr_mode
= VT_INT
+ 1;
3847 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3854 ad
->a
.dllexport
= 1;
3856 case TOK_NODECORATE
:
3857 ad
->a
.nodecorate
= 1;
3860 ad
->a
.dllimport
= 1;
3863 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
3864 /* skip parameters */
3866 int parenthesis
= 0;
3870 else if (tok
== ')')
3873 } while (parenthesis
&& tok
!= -1);
3886 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3890 while ((s
= s
->next
) != NULL
) {
3891 if ((s
->v
& SYM_FIELD
) &&
3892 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3893 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3894 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3906 static void check_fields (CType
*type
, int check
)
3910 while ((s
= s
->next
) != NULL
) {
3911 int v
= s
->v
& ~SYM_FIELD
;
3912 if (v
< SYM_FIRST_ANOM
) {
3913 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
3914 if (check
&& (ts
->tok
& SYM_FIELD
))
3915 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
3916 ts
->tok
^= SYM_FIELD
;
3917 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
3918 check_fields (&s
->type
, check
);
3922 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3924 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3925 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3926 int pcc
= !tcc_state
->ms_bitfields
;
3927 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3934 prevbt
= VT_STRUCT
; /* make it never match */
3939 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3940 if (f
->type
.t
& VT_BITFIELD
)
3941 bit_size
= BIT_SIZE(f
->type
.t
);
3944 size
= type_size(&f
->type
, &align
);
3945 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3948 if (pcc
&& bit_size
== 0) {
3949 /* in pcc mode, packing does not affect zero-width bitfields */
3952 /* in pcc mode, attribute packed overrides if set. */
3953 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3956 /* pragma pack overrides align if lesser and packs bitfields always */
3959 if (pragma_pack
< align
)
3960 align
= pragma_pack
;
3961 /* in pcc mode pragma pack also overrides individual align */
3962 if (pcc
&& pragma_pack
< a
)
3966 /* some individual align was specified */
3970 if (type
->ref
->type
.t
== VT_UNION
) {
3971 if (pcc
&& bit_size
>= 0)
3972 size
= (bit_size
+ 7) >> 3;
3977 } else if (bit_size
< 0) {
3979 c
+= (bit_pos
+ 7) >> 3;
3980 c
= (c
+ align
- 1) & -align
;
3989 /* A bit-field. Layout is more complicated. There are two
3990 options: PCC (GCC) compatible and MS compatible */
3992 /* In PCC layout a bit-field is placed adjacent to the
3993 preceding bit-fields, except if:
3995 - an individual alignment was given
3996 - it would overflow its base type container and
3997 there is no packing */
3998 if (bit_size
== 0) {
4000 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4002 } else if (f
->a
.aligned
) {
4004 } else if (!packed
) {
4006 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4007 if (ofs
> size
/ align
)
4011 /* in pcc mode, long long bitfields have type int if they fit */
4012 if (size
== 8 && bit_size
<= 32)
4013 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4015 while (bit_pos
>= align
* 8)
4016 c
+= align
, bit_pos
-= align
* 8;
4019 /* In PCC layout named bit-fields influence the alignment
4020 of the containing struct using the base types alignment,
4021 except for packed fields (which here have correct align). */
4022 if (f
->v
& SYM_FIRST_ANOM
4023 // && bit_size // ??? gcc on ARM/rpi does that
4028 bt
= f
->type
.t
& VT_BTYPE
;
4029 if ((bit_pos
+ bit_size
> size
* 8)
4030 || (bit_size
> 0) == (bt
!= prevbt
)
4032 c
= (c
+ align
- 1) & -align
;
4035 /* In MS bitfield mode a bit-field run always uses
4036 at least as many bits as the underlying type.
4037 To start a new run it's also required that this
4038 or the last bit-field had non-zero width. */
4039 if (bit_size
|| prev_bit_size
)
4042 /* In MS layout the records alignment is normally
4043 influenced by the field, except for a zero-width
4044 field at the start of a run (but by further zero-width
4045 fields it is again). */
4046 if (bit_size
== 0 && prevbt
!= bt
)
4049 prev_bit_size
= bit_size
;
4052 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4053 | (bit_pos
<< VT_STRUCT_SHIFT
);
4054 bit_pos
+= bit_size
;
4056 if (align
> maxalign
)
4060 printf("set field %s offset %-2d size %-2d align %-2d",
4061 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4062 if (f
->type
.t
& VT_BITFIELD
) {
4063 printf(" pos %-2d bits %-2d",
4076 c
+= (bit_pos
+ 7) >> 3;
4078 /* store size and alignment */
4079 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4083 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4084 /* can happen if individual align for some member was given. In
4085 this case MSVC ignores maxalign when aligning the size */
4090 c
= (c
+ a
- 1) & -a
;
4094 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4097 /* check whether we can access bitfields by their type */
4098 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4102 if (0 == (f
->type
.t
& VT_BITFIELD
))
4106 bit_size
= BIT_SIZE(f
->type
.t
);
4109 bit_pos
= BIT_POS(f
->type
.t
);
4110 size
= type_size(&f
->type
, &align
);
4112 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4113 #ifdef TCC_TARGET_ARM
4114 && !(f
->c
& (align
- 1))
4119 /* try to access the field using a different type */
4120 c0
= -1, s
= align
= 1;
4123 px
= f
->c
* 8 + bit_pos
;
4124 cx
= (px
>> 3) & -align
;
4125 px
= px
- (cx
<< 3);
4128 s
= (px
+ bit_size
+ 7) >> 3;
4138 s
= type_size(&t
, &align
);
4142 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4143 #ifdef TCC_TARGET_ARM
4144 && !(cx
& (align
- 1))
4147 /* update offset and bit position */
4150 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4151 | (bit_pos
<< VT_STRUCT_SHIFT
);
4155 printf("FIX field %s offset %-2d size %-2d align %-2d "
4156 "pos %-2d bits %-2d\n",
4157 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4158 cx
, s
, align
, px
, bit_size
);
4161 /* fall back to load/store single-byte wise */
4162 f
->auxtype
= VT_STRUCT
;
4164 printf("FIX field %s : load byte-wise\n",
4165 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4171 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4172 static void struct_decl(CType
*type
, int u
)
4174 int v
, c
, size
, align
, flexible
;
4175 int bit_size
, bsize
, bt
;
4177 AttributeDef ad
, ad1
;
4180 memset(&ad
, 0, sizeof ad
);
4182 parse_attribute(&ad
);
4186 /* struct already defined ? return it */
4188 expect("struct/union/enum name");
4190 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4193 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4195 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4200 /* Record the original enum/struct/union token. */
4201 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4203 /* we put an undefined size for struct/union */
4204 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4205 s
->r
= 0; /* default alignment is zero as gcc */
4207 type
->t
= s
->type
.t
;
4213 tcc_error("struct/union/enum already defined");
4215 /* cannot be empty */
4216 /* non empty enums are not allowed */
4219 long long ll
= 0, pl
= 0, nl
= 0;
4222 /* enum symbols have static storage */
4223 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4227 expect("identifier");
4229 if (ss
&& !local_stack
)
4230 tcc_error("redefinition of enumerator '%s'",
4231 get_tok_str(v
, NULL
));
4235 ll
= expr_const64();
4237 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4239 *ps
= ss
, ps
= &ss
->next
;
4248 /* NOTE: we accept a trailing comma */
4253 /* set integral type of the enum */
4256 if (pl
!= (unsigned)pl
)
4257 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4259 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4260 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4261 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4263 /* set type for enum members */
4264 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4266 if (ll
== (int)ll
) /* default is int if it fits */
4268 if (t
.t
& VT_UNSIGNED
) {
4269 ss
->type
.t
|= VT_UNSIGNED
;
4270 if (ll
== (unsigned)ll
)
4273 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4274 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4279 while (tok
!= '}') {
4280 if (!parse_btype(&btype
, &ad1
)) {
4286 tcc_error("flexible array member '%s' not at the end of struct",
4287 get_tok_str(v
, NULL
));
4293 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4295 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4296 expect("identifier");
4298 int v
= btype
.ref
->v
;
4299 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4300 if (tcc_state
->ms_extensions
== 0)
4301 expect("identifier");
4305 if (type_size(&type1
, &align
) < 0) {
4306 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4309 tcc_error("field '%s' has incomplete type",
4310 get_tok_str(v
, NULL
));
4312 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4313 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4314 (type1
.t
& VT_STORAGE
))
4315 tcc_error("invalid type for '%s'",
4316 get_tok_str(v
, NULL
));
4320 bit_size
= expr_const();
4321 /* XXX: handle v = 0 case for messages */
4323 tcc_error("negative width in bit-field '%s'",
4324 get_tok_str(v
, NULL
));
4325 if (v
&& bit_size
== 0)
4326 tcc_error("zero width for bit-field '%s'",
4327 get_tok_str(v
, NULL
));
4328 parse_attribute(&ad1
);
4330 size
= type_size(&type1
, &align
);
4331 if (bit_size
>= 0) {
4332 bt
= type1
.t
& VT_BTYPE
;
4338 tcc_error("bitfields must have scalar type");
4340 if (bit_size
> bsize
) {
4341 tcc_error("width of '%s' exceeds its type",
4342 get_tok_str(v
, NULL
));
4343 } else if (bit_size
== bsize
4344 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4345 /* no need for bit fields */
4347 } else if (bit_size
== 64) {
4348 tcc_error("field width 64 not implemented");
4350 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4352 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4355 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4356 /* Remember we've seen a real field to check
4357 for placement of flexible array member. */
4360 /* If member is a struct or bit-field, enforce
4361 placing into the struct (as anonymous). */
4363 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4368 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4373 if (tok
== ';' || tok
== TOK_EOF
)
4380 parse_attribute(&ad
);
4381 if (ad
.cleanup_func
) {
4382 tcc_warning("attribute '__cleanup__' ignored on type");
4384 check_fields(type
, 1);
4385 check_fields(type
, 0);
4386 struct_layout(type
, &ad
);
4388 tcc_debug_fix_anon(tcc_state
, type
);
4393 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4395 merge_symattr(&ad
->a
, &s
->a
);
4396 merge_funcattr(&ad
->f
, &s
->f
);
4399 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4400 are added to the element type, copied because it could be a typedef. */
4401 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4403 while (type
->t
& VT_ARRAY
) {
4404 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4405 type
= &type
->ref
->type
;
4407 type
->t
|= qualifiers
;
4410 /* return 0 if no type declaration. otherwise, return the basic type
4413 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4415 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4419 memset(ad
, 0, sizeof(AttributeDef
));
4429 /* currently, we really ignore extension */
4439 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4440 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4441 tmbt
: tcc_error("too many basic types");
4444 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4449 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4466 memset(&ad1
, 0, sizeof(AttributeDef
));
4467 if (parse_btype(&type1
, &ad1
)) {
4468 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4470 n
= 1 << (ad1
.a
.aligned
- 1);
4472 type_size(&type1
, &n
);
4475 if (n
<= 0 || (n
& (n
- 1)) != 0)
4476 tcc_error("alignment must be a positive power of two");
4479 ad
->a
.aligned
= exact_log2p1(n
);
4483 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4484 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4485 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4486 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4493 #ifdef TCC_TARGET_ARM64
4495 /* GCC's __uint128_t appears in some Linux header files. Make it a
4496 synonym for long double to get the size and alignment right. */
4507 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4508 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4516 struct_decl(&type1
, VT_ENUM
);
4519 type
->ref
= type1
.ref
;
4522 struct_decl(&type1
, VT_STRUCT
);
4525 struct_decl(&type1
, VT_UNION
);
4528 /* type modifiers */
4532 parse_btype_qualify(type
, VT_ATOMIC
);
4535 parse_expr_type(&type1
);
4536 /* remove all storage modifiers except typedef */
4537 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4539 sym_to_attr(ad
, type1
.ref
);
4547 parse_btype_qualify(type
, VT_CONSTANT
);
4555 parse_btype_qualify(type
, VT_VOLATILE
);
4562 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4563 tcc_error("signed and unsigned modifier");
4576 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4577 tcc_error("signed and unsigned modifier");
4578 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4594 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4595 tcc_error("multiple storage classes");
4607 ad
->f
.func_noreturn
= 1;
4609 /* GNUC attribute */
4610 case TOK_ATTRIBUTE1
:
4611 case TOK_ATTRIBUTE2
:
4612 parse_attribute(ad
);
4613 if (ad
->attr_mode
) {
4614 u
= ad
->attr_mode
-1;
4615 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4623 parse_expr_type(&type1
);
4624 /* remove all storage modifiers except typedef */
4625 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4627 sym_to_attr(ad
, type1
.ref
);
4633 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4637 if (tok
== ':' && !in_generic
) {
4638 /* ignore if it's a label */
4643 t
&= ~(VT_BTYPE
|VT_LONG
);
4644 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4645 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4646 type
->ref
= s
->type
.ref
;
4648 parse_btype_qualify(type
, t
);
4650 /* get attributes from typedef */
4659 if (tcc_state
->char_is_unsigned
) {
4660 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4663 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4664 bt
= t
& (VT_BTYPE
|VT_LONG
);
4666 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4667 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4668 if (bt
== VT_LDOUBLE
)
4669 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4675 /* convert a function parameter type (array to pointer and function to
4676 function pointer) */
4677 static inline void convert_parameter_type(CType
*pt
)
4679 /* remove const and volatile qualifiers (XXX: const could be used
4680 to indicate a const function parameter */
4681 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4682 /* array must be transformed to pointer according to ANSI C */
4684 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4689 ST_FUNC
void parse_asm_str(CString
*astr
)
4692 parse_mult_str(astr
, "string constant");
4695 /* Parse an asm label and return the token */
4696 static int asm_label_instr(void)
4702 parse_asm_str(&astr
);
4705 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4707 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4712 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4714 int n
, l
, t1
, arg_size
, align
, unused_align
;
4715 Sym
**plast
, *s
, *first
;
4718 TokenString
*vla_array_tok
= NULL
;
4719 int *vla_array_str
= NULL
;
4722 /* function type, or recursive declarator (return if so) */
4724 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4728 else if (parse_btype(&pt
, &ad1
))
4730 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4731 merge_attr (ad
, &ad1
);
4742 /* read param name and compute offset */
4743 if (l
!= FUNC_OLD
) {
4744 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4746 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4747 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4748 tcc_error("parameter declared as void");
4753 pt
.t
= VT_VOID
; /* invalid type */
4758 expect("identifier");
4759 convert_parameter_type(&pt
);
4760 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4761 s
= sym_push(n
, &pt
, 0, 0);
4767 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4772 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4773 tcc_error("invalid type");
4776 /* if no parameters, then old type prototype */
4779 /* remove parameter symbols from token table, keep on stack */
4781 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4782 for (s
= first
; s
; s
= s
->next
)
4786 /* NOTE: const is ignored in returned type as it has a special
4787 meaning in gcc / C++ */
4788 type
->t
&= ~VT_CONSTANT
;
4789 /* some ancient pre-K&R C allows a function to return an array
4790 and the array brackets to be put after the arguments, such
4791 that "int c()[]" means something like "int[] c()" */
4794 skip(']'); /* only handle simple "[]" */
4797 /* we push a anonymous symbol which will contain the function prototype */
4798 ad
->f
.func_args
= arg_size
;
4799 ad
->f
.func_type
= l
;
4800 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4806 } else if (tok
== '[') {
4807 int saved_nocode_wanted
= nocode_wanted
;
4808 /* array definition */
4812 if (td
& TYPE_PARAM
) while (1) {
4813 /* XXX The optional type-quals and static should only be accepted
4814 in parameter decls. The '*' as well, and then even only
4815 in prototypes (not function defs). */
4817 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4830 /* Code generation is not done now but has to be done
4831 at start of function. Save code here for later use. */
4833 vla_array_tok
= tok_str_alloc();
4842 tok_str_add_tok(vla_array_tok
);
4846 tok_str_add(vla_array_tok
, -1);
4847 tok_str_add(vla_array_tok
, 0);
4848 vla_array_str
= vla_array_tok
->str
;
4849 begin_macro(vla_array_tok
, 2);
4858 } else if (tok
!= ']') {
4859 if (!local_stack
|| (storage
& VT_STATIC
))
4860 vpushi(expr_const());
4862 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4863 length must always be evaluated, even under nocode_wanted,
4864 so that its size slot is initialized (e.g. under sizeof
4870 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4873 tcc_error("invalid array size");
4875 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4876 tcc_error("size of variable length array should be an integer");
4882 /* parse next post type */
4883 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
4885 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4886 tcc_error("declaration of an array of functions");
4887 if ((type
->t
& VT_BTYPE
) == VT_VOID
4888 || type_size(type
, &unused_align
) < 0)
4889 tcc_error("declaration of an array of incomplete type elements");
4891 t1
|= type
->t
& VT_VLA
;
4896 tcc_error("need explicit inner array size in VLAs");
4899 loc
-= type_size(&int_type
, &align
);
4903 vpush_type_size(type
, &align
);
4905 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4912 nocode_wanted
= saved_nocode_wanted
;
4914 /* we push an anonymous symbol which will contain the array
4916 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4917 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4919 if (vla_array_str
) {
4921 s
->vla_array_str
= vla_array_str
;
4923 tok_str_free_str(vla_array_str
);
4929 /* Parse a type declarator (except basic type), and return the type
4930 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4931 expected. 'type' should contain the basic type. 'ad' is the
4932 attribute definition of the basic type. It can be modified by
4933 type_decl(). If this (possibly abstract) declarator is a pointer chain
4934 it returns the innermost pointed to type (equals *type, but is a different
4935 pointer), otherwise returns type itself, that's used for recursive calls. */
4936 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4939 int qualifiers
, storage
;
4941 /* recursive type, remove storage bits first, apply them later again */
4942 storage
= type
->t
& VT_STORAGE
;
4943 type
->t
&= ~VT_STORAGE
;
4946 while (tok
== '*') {
4952 qualifiers
|= VT_ATOMIC
;
4957 qualifiers
|= VT_CONSTANT
;
4962 qualifiers
|= VT_VOLATILE
;
4968 /* XXX: clarify attribute handling */
4969 case TOK_ATTRIBUTE1
:
4970 case TOK_ATTRIBUTE2
:
4971 parse_attribute(ad
);
4975 type
->t
|= qualifiers
;
4977 /* innermost pointed to type is the one for the first derivation */
4978 ret
= pointed_type(type
);
4982 /* This is possibly a parameter type list for abstract declarators
4983 ('int ()'), use post_type for testing this. */
4984 if (!post_type(type
, ad
, 0, td
)) {
4985 /* It's not, so it's a nested declarator, and the post operations
4986 apply to the innermost pointed to type (if any). */
4987 /* XXX: this is not correct to modify 'ad' at this point, but
4988 the syntax is not clear */
4989 parse_attribute(ad
);
4990 post
= type_decl(type
, ad
, v
, td
);
4994 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4995 /* type identifier */
5000 if (!(td
& TYPE_ABSTRACT
))
5001 expect("identifier");
5004 post_type(post
, ad
, storage
, td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5005 parse_attribute(ad
);
5010 /* indirection with full error checking and bound check */
5011 ST_FUNC
void indir(void)
5013 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5014 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5018 if (vtop
->r
& VT_LVAL
)
5020 vtop
->type
= *pointed_type(&vtop
->type
);
5021 /* Arrays and functions are never lvalues */
5022 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5023 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5025 /* if bound checking, the referenced pointer must be checked */
5026 #ifdef CONFIG_TCC_BCHECK
5027 if (tcc_state
->do_bounds_check
)
5028 vtop
->r
|= VT_MUSTBOUND
;
5033 /* pass a parameter to a function and do type checking and casting */
5034 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5039 func_type
= func
->f
.func_type
;
5040 if (func_type
== FUNC_OLD
||
5041 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5042 /* default casting : only need to convert float to double */
5043 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5044 gen_cast_s(VT_DOUBLE
);
5045 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5046 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5047 type
.ref
= vtop
->type
.ref
;
5049 } else if (vtop
->r
& VT_MUSTCAST
) {
5050 force_charshort_cast();
5052 } else if (arg
== NULL
) {
5053 tcc_error("too many arguments to function");
5056 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5057 gen_assign_cast(&type
);
5061 /* parse an expression and return its type without any side effect. */
5062 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5071 /* parse an expression of the form '(type)' or '(expr)' and return its
5073 static void parse_expr_type(CType
*type
)
5079 if (parse_btype(type
, &ad
)) {
5080 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5082 expr_type(type
, gexpr
);
5087 static void parse_type(CType
*type
)
5092 if (!parse_btype(type
, &ad
)) {
5095 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5098 static void parse_builtin_params(int nc
, const char *args
)
5107 while ((c
= *args
++)) {
5122 type
.t
= VT_CONSTANT
;
5128 type
.t
= VT_CONSTANT
;
5130 type
.t
|= char_type
.t
;
5142 gen_assign_cast(&type
);
5149 static void parse_atomic(int atok
)
5151 int size
, align
, arg
;
5152 CType
*atom
, *atom_ptr
, ct
= {0};
5154 static const char *const templates
[] = {
5156 * Each entry consists of callback and function template.
5157 * The template represents argument types and return type.
5159 * ? void (return-only)
5162 * A read-only atomic
5163 * p pointer to memory
5168 /* keep in order of appearance in tcctok.h: */
5169 /* __atomic_store */ "avm.?",
5170 /* __atomic_load */ "Am.v",
5171 /* __atomic_exchange */ "avm.v",
5172 /* __atomic_compare_exchange */ "apvbmm.b",
5173 /* __atomic_fetch_add */ "avm.v",
5174 /* __atomic_fetch_sub */ "avm.v",
5175 /* __atomic_fetch_or */ "avm.v",
5176 /* __atomic_fetch_xor */ "avm.v",
5177 /* __atomic_fetch_and */ "avm.v"
5179 const char *template = templates
[(atok
- TOK___atomic_store
)];
5181 atom
= atom_ptr
= NULL
;
5182 size
= 0; /* pacify compiler */
5187 switch (template[arg
]) {
5190 atom_ptr
= &vtop
->type
;
5191 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5193 atom
= pointed_type(atom_ptr
);
5194 size
= type_size(atom
, &align
);
5196 || (size
& (size
- 1))
5197 || (atok
> TOK___atomic_compare_exchange
5198 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5199 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5200 expect("integral or integer-sized pointer target type");
5201 /* GCC does not care either: */
5202 /* if (!(atom->t & VT_ATOMIC))
5203 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5207 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5208 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5209 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5210 gen_assign_cast(atom_ptr
);
5213 gen_assign_cast(atom
);
5216 gen_assign_cast(&int_type
);
5220 gen_assign_cast(&ct
);
5223 if ('.' == template[++arg
])
5230 switch (template[arg
+ 1]) {
5239 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5240 vpush_helper_func(tok_alloc_const(buf
));
5245 PUT_R_RET(vtop
, ct
.t
);
5246 if (ct
.t
== VT_BOOL
) {
5248 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5250 vtop
->type
.t
= VT_INT
;
5255 ST_FUNC
void unary(void)
5257 int n
, t
, align
, size
, r
, sizeof_caller
;
5262 /* generate line number info */
5264 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5266 sizeof_caller
= in_sizeof
;
5269 /* XXX: GCC 2.95.3 does not generate a table although it should be
5277 #ifdef TCC_TARGET_PE
5278 t
= VT_SHORT
|VT_UNSIGNED
;
5286 vsetc(&type
, VT_CONST
, &tokc
);
5290 t
= VT_INT
| VT_UNSIGNED
;
5296 t
= VT_LLONG
| VT_UNSIGNED
;
5308 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5311 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5313 case TOK___FUNCTION__
:
5315 goto tok_identifier
;
5321 /* special function name identifier */
5322 len
= strlen(funcname
) + 1;
5323 /* generate char[len] type */
5324 type
.t
= char_type
.t
;
5325 if (tcc_state
->warn_write_strings
& WARN_ON
)
5326 type
.t
|= VT_CONSTANT
;
5330 sec
= rodata_section
;
5331 vpush_ref(&type
, sec
, sec
->data_offset
, NODATA_WANTED
? 0 : len
);
5333 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5338 #ifdef TCC_TARGET_PE
5339 t
= VT_SHORT
| VT_UNSIGNED
;
5345 /* string parsing */
5348 if (tcc_state
->warn_write_strings
& WARN_ON
)
5353 memset(&ad
, 0, sizeof(AttributeDef
));
5354 ad
.section
= rodata_section
;
5355 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5360 if (parse_btype(&type
, &ad
)) {
5361 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5363 /* check ISOC99 compound literal */
5365 /* data is allocated locally by default */
5370 /* all except arrays are lvalues */
5371 if (!(type
.t
& VT_ARRAY
))
5373 memset(&ad
, 0, sizeof(AttributeDef
));
5374 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5376 if (sizeof_caller
) {
5383 } else if (tok
== '{') {
5384 int saved_nocode_wanted
= nocode_wanted
;
5385 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5387 if (0 == local_scope
)
5388 tcc_error("statement expression outside of function");
5389 /* save all registers */
5391 /* statement expression : we do not accept break/continue
5392 inside as GCC does. We do retain the nocode_wanted state,
5393 as statement expressions can't ever be entered from the
5394 outside, so any reactivation of code emission (from labels
5395 or loop heads) can be disabled again after the end of it. */
5397 /* If the statement expr can be entered, then we retain the current
5398 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5399 If it can't be entered then the state is that from before the
5400 statement expression. */
5401 if (saved_nocode_wanted
)
5402 nocode_wanted
= saved_nocode_wanted
;
5417 /* functions names must be treated as function pointers,
5418 except for unary '&' and sizeof. Since we consider that
5419 functions are not lvalues, we only have to handle it
5420 there and in function calls. */
5421 /* arrays can also be used although they are not lvalues */
5422 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5423 !(vtop
->type
.t
& VT_ARRAY
))
5426 vtop
->sym
->a
.addrtaken
= 1;
5427 mk_pointer(&vtop
->type
);
5433 gen_test_zero(TOK_EQ
);
5444 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5445 tcc_error("pointer not accepted for unary plus");
5446 /* In order to force cast, we add zero, except for floating point
5447 where we really need an noop (otherwise -0.0 will be transformed
5449 if (!is_float(vtop
->type
.t
)) {
5461 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5462 if (t
== TOK_SIZEOF
) {
5463 vpush_type_size(&type
, &align
);
5464 gen_cast_s(VT_SIZE_T
);
5466 type_size(&type
, &align
);
5468 if (vtop
[1].r
& VT_SYM
)
5469 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5470 if (s
&& s
->a
.aligned
)
5471 align
= 1 << (s
->a
.aligned
- 1);
5476 case TOK_builtin_expect
:
5477 /* __builtin_expect is a no-op for now */
5478 parse_builtin_params(0, "ee");
5481 case TOK_builtin_types_compatible_p
:
5482 parse_builtin_params(0, "tt");
5483 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5484 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5485 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5489 case TOK_builtin_choose_expr
:
5516 case TOK_builtin_constant_p
:
5517 parse_builtin_params(1, "e");
5518 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5519 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5523 case TOK_builtin_frame_address
:
5524 case TOK_builtin_return_address
:
5530 if (tok
!= TOK_CINT
) {
5531 tcc_error("%s only takes positive integers",
5532 tok1
== TOK_builtin_return_address
?
5533 "__builtin_return_address" :
5534 "__builtin_frame_address");
5536 level
= (uint32_t)tokc
.i
;
5541 vset(&type
, VT_LOCAL
, 0); /* local frame */
5543 #ifdef TCC_TARGET_RISCV64
5547 mk_pointer(&vtop
->type
);
5548 indir(); /* -> parent frame */
5550 if (tok1
== TOK_builtin_return_address
) {
5551 // assume return address is just above frame pointer on stack
5552 #ifdef TCC_TARGET_ARM
5555 #elif defined TCC_TARGET_RISCV64
5562 mk_pointer(&vtop
->type
);
5567 #ifdef TCC_TARGET_RISCV64
5568 case TOK_builtin_va_start
:
5569 parse_builtin_params(0, "ee");
5570 r
= vtop
->r
& VT_VALMASK
;
5574 tcc_error("__builtin_va_start expects a local variable");
5579 #ifdef TCC_TARGET_X86_64
5580 #ifdef TCC_TARGET_PE
5581 case TOK_builtin_va_start
:
5582 parse_builtin_params(0, "ee");
5583 r
= vtop
->r
& VT_VALMASK
;
5587 tcc_error("__builtin_va_start expects a local variable");
5589 vtop
->type
= char_pointer_type
;
5594 case TOK_builtin_va_arg_types
:
5595 parse_builtin_params(0, "t");
5596 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5603 #ifdef TCC_TARGET_ARM64
5604 case TOK_builtin_va_start
: {
5605 parse_builtin_params(0, "ee");
5609 vtop
->type
.t
= VT_VOID
;
5612 case TOK_builtin_va_arg
: {
5613 parse_builtin_params(0, "et");
5621 case TOK___arm64_clear_cache
: {
5622 parse_builtin_params(0, "ee");
5625 vtop
->type
.t
= VT_VOID
;
5630 /* atomic operations */
5631 case TOK___atomic_store
:
5632 case TOK___atomic_load
:
5633 case TOK___atomic_exchange
:
5634 case TOK___atomic_compare_exchange
:
5635 case TOK___atomic_fetch_add
:
5636 case TOK___atomic_fetch_sub
:
5637 case TOK___atomic_fetch_or
:
5638 case TOK___atomic_fetch_xor
:
5639 case TOK___atomic_fetch_and
:
5643 /* pre operations */
5654 if (is_float(vtop
->type
.t
)) {
5664 goto tok_identifier
;
5666 /* allow to take the address of a label */
5667 if (tok
< TOK_UIDENT
)
5668 expect("label identifier");
5669 s
= label_find(tok
);
5671 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5673 if (s
->r
== LABEL_DECLARED
)
5674 s
->r
= LABEL_FORWARD
;
5677 s
->type
.t
= VT_VOID
;
5678 mk_pointer(&s
->type
);
5679 s
->type
.t
|= VT_STATIC
;
5681 vpushsym(&s
->type
, s
);
5687 CType controlling_type
;
5688 int has_default
= 0;
5691 TokenString
*str
= NULL
;
5692 int saved_const_wanted
= const_wanted
;
5697 expr_type(&controlling_type
, expr_eq
);
5698 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5699 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5700 mk_pointer(&controlling_type
);
5701 const_wanted
= saved_const_wanted
;
5705 if (tok
== TOK_DEFAULT
) {
5707 tcc_error("too many 'default'");
5713 AttributeDef ad_tmp
;
5718 parse_btype(&cur_type
, &ad_tmp
);
5721 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5722 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5724 tcc_error("type match twice");
5734 skip_or_save_block(&str
);
5736 skip_or_save_block(NULL
);
5743 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5744 tcc_error("type '%s' does not match any association", buf
);
5746 begin_macro(str
, 1);
5755 // special qnan , snan and infinity values
5760 vtop
->type
.t
= VT_FLOAT
;
5765 goto special_math_val
;
5768 goto special_math_val
;
5775 expect("identifier");
5777 if (!s
|| IS_ASM_SYM(s
)) {
5778 const char *name
= get_tok_str(t
, NULL
);
5780 tcc_error("'%s' undeclared", name
);
5781 /* for simple function calls, we tolerate undeclared
5782 external reference to int() function */
5783 tcc_warning_c(warn_implicit_function_declaration
)(
5784 "implicit declaration of function '%s'", name
);
5785 s
= external_global_sym(t
, &func_old_type
);
5789 /* A symbol that has a register is a local register variable,
5790 which starts out as VT_LOCAL value. */
5791 if ((r
& VT_VALMASK
) < VT_CONST
)
5792 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5794 vset(&s
->type
, r
, s
->c
);
5795 /* Point to s as backpointer (even without r&VT_SYM).
5796 Will be used by at least the x86 inline asm parser for
5802 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5803 vtop
->c
.i
= s
->enum_val
;
5808 /* post operations */
5810 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5813 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5814 int qualifiers
, cumofs
= 0;
5816 if (tok
== TOK_ARROW
)
5818 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5821 /* expect pointer on structure */
5822 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5823 expect("struct or union");
5824 if (tok
== TOK_CDOUBLE
)
5825 expect("field name");
5827 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5828 expect("field name");
5829 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5831 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5832 /* add field offset to pointer */
5833 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5834 vpushi(cumofs
+ s
->c
);
5836 /* change type to field type, and set to lvalue */
5837 vtop
->type
= s
->type
;
5838 vtop
->type
.t
|= qualifiers
;
5839 /* an array is never an lvalue */
5840 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5842 #ifdef CONFIG_TCC_BCHECK
5843 /* if bound checking, the referenced pointer must be checked */
5844 if (tcc_state
->do_bounds_check
)
5845 vtop
->r
|= VT_MUSTBOUND
;
5849 } else if (tok
== '[') {
5855 } else if (tok
== '(') {
5858 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5861 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5862 /* pointer test (no array accepted) */
5863 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5864 vtop
->type
= *pointed_type(&vtop
->type
);
5865 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5869 expect("function pointer");
5872 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5874 /* get return type */
5877 sa
= s
->next
; /* first parameter */
5878 nb_args
= regsize
= 0;
5880 /* compute first implicit argument if a structure is returned */
5881 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5882 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5883 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5884 &ret_align
, ®size
);
5885 if (ret_nregs
<= 0) {
5886 /* get some space for the returned structure */
5887 size
= type_size(&s
->type
, &align
);
5888 #ifdef TCC_TARGET_ARM64
5889 /* On arm64, a small struct is return in registers.
5890 It is much easier to write it to memory if we know
5891 that we are allowed to write some extra bytes, so
5892 round the allocated space up to a power of 2: */
5894 while (size
& (size
- 1))
5895 size
= (size
| (size
- 1)) + 1;
5897 loc
= (loc
- size
) & -align
;
5899 ret
.r
= VT_LOCAL
| VT_LVAL
;
5900 /* pass it as 'int' to avoid structure arg passing
5902 vseti(VT_LOCAL
, loc
);
5903 #ifdef CONFIG_TCC_BCHECK
5904 if (tcc_state
->do_bounds_check
)
5918 if (ret_nregs
> 0) {
5919 /* return in register */
5921 PUT_R_RET(&ret
, ret
.type
.t
);
5926 gfunc_param_typed(s
, sa
);
5936 tcc_error("too few arguments to function");
5938 gfunc_call(nb_args
);
5940 if (ret_nregs
< 0) {
5941 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
5942 #ifdef TCC_TARGET_RISCV64
5943 arch_transfer_ret_regs(1);
5947 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5948 vsetc(&ret
.type
, r
, &ret
.c
);
5949 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5952 /* handle packed struct return */
5953 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5956 size
= type_size(&s
->type
, &align
);
5957 /* We're writing whole regs often, make sure there's enough
5958 space. Assume register size is power of 2. */
5959 if (regsize
> align
)
5961 loc
= (loc
- size
) & -align
;
5965 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5969 if (--ret_nregs
== 0)
5973 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5976 /* Promote char/short return values. This is matters only
5977 for calling function that were not compiled by TCC and
5978 only on some architectures. For those where it doesn't
5979 matter we expect things to be already promoted to int,
5981 t
= s
->type
.t
& VT_BTYPE
;
5982 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5984 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5986 vtop
->type
.t
= VT_INT
;
5990 if (s
->f
.func_noreturn
) {
5992 tcc_tcov_block_end(tcc_state
, -1);
6001 #ifndef precedence_parser /* original top-down parser */
6003 static void expr_prod(void)
6008 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6015 static void expr_sum(void)
6020 while ((t
= tok
) == '+' || t
== '-') {
6027 static void expr_shift(void)
6032 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6039 static void expr_cmp(void)
6044 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6045 t
== TOK_ULT
|| t
== TOK_UGE
) {
6052 static void expr_cmpeq(void)
6057 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6064 static void expr_and(void)
6067 while (tok
== '&') {
6074 static void expr_xor(void)
6077 while (tok
== '^') {
6084 static void expr_or(void)
6087 while (tok
== '|') {
6094 static void expr_landor(int op
);
6096 static void expr_land(void)
6099 if (tok
== TOK_LAND
)
6103 static void expr_lor(void)
6110 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6111 #else /* defined precedence_parser */
6112 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6113 # define expr_lor() unary(), expr_infix(1)
6115 static int precedence(int tok
)
6118 case TOK_LOR
: return 1;
6119 case TOK_LAND
: return 2;
6123 case TOK_EQ
: case TOK_NE
: return 6;
6124 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6125 case TOK_SHL
: case TOK_SAR
: return 8;
6126 case '+': case '-': return 9;
6127 case '*': case '/': case '%': return 10;
6129 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6134 static unsigned char prec
[256];
6135 static void init_prec(void)
6138 for (i
= 0; i
< 256; i
++)
6139 prec
[i
] = precedence(i
);
6141 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6143 static void expr_landor(int op
);
6145 static void expr_infix(int p
)
6148 while ((p2
= precedence(t
)) >= p
) {
6149 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6154 if (precedence(tok
) > p2
)
6163 /* Assuming vtop is a value used in a conditional context
6164 (i.e. compared with zero) return 0 if it's false, 1 if
6165 true and -1 if it can't be statically determined. */
6166 static int condition_3way(void)
6169 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6170 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6172 gen_cast_s(VT_BOOL
);
6179 static void expr_landor(int op
)
6181 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6183 c
= f
? i
: condition_3way();
6185 save_regs(1), cc
= 0;
6187 nocode_wanted
++, f
= 1;
6195 expr_landor_next(op
);
6207 static int is_cond_bool(SValue
*sv
)
6209 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6210 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6211 return (unsigned)sv
->c
.i
< 2;
6212 if (sv
->r
== VT_CMP
)
6217 static void expr_cond(void)
6219 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6227 c
= condition_3way();
6228 g
= (tok
== ':' && gnu_ext
);
6238 /* needed to avoid having different registers saved in
6245 ncw_prev
= nocode_wanted
;
6251 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6252 mk_pointer(&vtop
->type
);
6253 sv
= *vtop
; /* save value to handle it later */
6254 vtop
--; /* no vpop so that FP stack is not flushed */
6264 nocode_wanted
= ncw_prev
;
6270 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6271 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6272 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6273 this code jumps directly to the if's then/else branches. */
6278 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6281 nocode_wanted
= ncw_prev
;
6282 // tcc_warning("two conditions expr_cond");
6286 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6287 mk_pointer(&vtop
->type
);
6289 /* cast operands to correct type according to ISOC rules */
6290 if (!combine_types(&type
, &sv
, vtop
, '?'))
6291 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6292 "type mismatch in conditional expression (have '%s' and '%s')");
6293 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6294 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6295 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6297 /* now we convert second operand */
6301 mk_pointer(&vtop
->type
);
6303 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6307 rc
= RC_TYPE(type
.t
);
6308 /* for long longs, we use fixed registers to avoid having
6309 to handle a complicated move */
6310 if (USING_TWO_WORDS(type
.t
))
6311 rc
= RC_RET(type
.t
);
6319 nocode_wanted
= ncw_prev
;
6321 /* this is horrible, but we must also convert first
6327 mk_pointer(&vtop
->type
);
6329 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6335 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6345 static void expr_eq(void)
6350 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6358 gen_op(TOK_ASSIGN_OP(t
));
6364 ST_FUNC
void gexpr(void)
6375 /* parse a constant expression and return value in vtop. */
6376 static void expr_const1(void)
6379 nocode_wanted
+= unevalmask
+ 1;
6381 nocode_wanted
-= unevalmask
+ 1;
6385 /* parse an integer constant and return its value. */
6386 static inline int64_t expr_const64(void)
6390 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6391 expect("constant expression");
6397 /* parse an integer constant and return its value.
6398 Complain if it doesn't fit 32bit (signed or unsigned). */
6399 ST_FUNC
int expr_const(void)
6402 int64_t wc
= expr_const64();
6404 if (c
!= wc
&& (unsigned)c
!= wc
)
6405 tcc_error("constant exceeds 32 bit");
6409 /* ------------------------------------------------------------------------- */
6410 /* return from function */
6412 #ifndef TCC_TARGET_ARM64
6413 static void gfunc_return(CType
*func_type
)
6415 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6416 CType type
, ret_type
;
6417 int ret_align
, ret_nregs
, regsize
;
6418 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6419 &ret_align
, ®size
);
6420 if (ret_nregs
< 0) {
6421 #ifdef TCC_TARGET_RISCV64
6422 arch_transfer_ret_regs(0);
6424 } else if (0 == ret_nregs
) {
6425 /* if returning structure, must copy it to implicit
6426 first pointer arg location */
6429 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6432 /* copy structure value to pointer */
6435 /* returning structure packed into registers */
6436 int size
, addr
, align
, rc
;
6437 size
= type_size(func_type
,&align
);
6438 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6439 (vtop
->c
.i
& (ret_align
-1)))
6440 && (align
& (ret_align
-1))) {
6441 loc
= (loc
- size
) & -ret_align
;
6444 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6448 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6450 vtop
->type
= ret_type
;
6451 rc
= RC_RET(ret_type
.t
);
6459 if (--ret_nregs
== 0)
6461 /* We assume that when a structure is returned in multiple
6462 registers, their classes are consecutive values of the
6465 vtop
->c
.i
+= regsize
;
6470 gv(RC_RET(func_type
->t
));
6472 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6476 static void check_func_return(void)
6478 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6480 if (!strcmp (funcname
, "main")
6481 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6482 /* main returns 0 by default */
6484 gen_assign_cast(&func_vt
);
6485 gfunc_return(&func_vt
);
6487 tcc_warning("function might return no value: '%s'", funcname
);
6491 /* ------------------------------------------------------------------------- */
6494 static int case_cmpi(const void *pa
, const void *pb
)
6496 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6497 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6498 return a
< b
? -1 : a
> b
;
6501 static int case_cmpu(const void *pa
, const void *pb
)
6503 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6504 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6505 return a
< b
? -1 : a
> b
;
6508 static void gtst_addr(int t
, int a
)
6510 gsym_addr(gvtst(0, t
), a
);
6513 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6517 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6534 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6536 gcase(base
, len
/2, bsym
);
6540 base
+= e
; len
-= e
;
6550 if (p
->v1
== p
->v2
) {
6552 gtst_addr(0, p
->sym
);
6562 gtst_addr(0, p
->sym
);
6566 *bsym
= gjmp(*bsym
);
6569 /* ------------------------------------------------------------------------- */
6570 /* __attribute__((cleanup(fn))) */
6572 static void try_call_scope_cleanup(Sym
*stop
)
6574 Sym
*cls
= cur_scope
->cl
.s
;
6576 for (; cls
!= stop
; cls
= cls
->ncl
) {
6577 Sym
*fs
= cls
->next
;
6578 Sym
*vs
= cls
->prev_tok
;
6580 vpushsym(&fs
->type
, fs
);
6581 vset(&vs
->type
, vs
->r
, vs
->c
);
6583 mk_pointer(&vtop
->type
);
6589 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6594 if (!cur_scope
->cl
.s
)
6597 /* search NCA of both cleanup chains given parents and initial depth */
6598 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6599 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6601 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6603 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6606 try_call_scope_cleanup(cc
);
6609 /* call 'func' for each __attribute__((cleanup(func))) */
6610 static void block_cleanup(struct scope
*o
)
6614 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6615 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6620 try_call_scope_cleanup(o
->cl
.s
);
6621 pcl
->jnext
= gjmp(0);
6623 goto remove_pending
;
6633 try_call_scope_cleanup(o
->cl
.s
);
6636 /* ------------------------------------------------------------------------- */
6639 static void vla_restore(int loc
)
6642 gen_vla_sp_restore(loc
);
6645 static void vla_leave(struct scope
*o
)
6647 struct scope
*c
= cur_scope
, *v
= NULL
;
6648 for (; c
!= o
&& c
; c
= c
->prev
)
6652 vla_restore(v
->vla
.locorig
);
6655 /* ------------------------------------------------------------------------- */
6658 static void new_scope(struct scope
*o
)
6660 /* copy and link previous scope */
6662 o
->prev
= cur_scope
;
6664 cur_scope
->vla
.num
= 0;
6666 /* record local declaration stack position */
6667 o
->lstk
= local_stack
;
6668 o
->llstk
= local_label_stack
;
6672 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
6675 static void prev_scope(struct scope
*o
, int is_expr
)
6679 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6680 block_cleanup(o
->prev
);
6682 /* pop locally defined labels */
6683 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6685 /* In the is_expr case (a statement expression is finished here),
6686 vtop might refer to symbols on the local_stack. Either via the
6687 type or via vtop->sym. We can't pop those nor any that in turn
6688 might be referred to. To make it easier we don't roll back
6689 any symbols in that case; some upper level call to block() will
6690 do that. We do have to remove such symbols from the lookup
6691 tables, though. sym_pop will do that. */
6693 /* pop locally defined symbols */
6694 pop_local_syms(o
->lstk
, is_expr
);
6695 cur_scope
= o
->prev
;
6699 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
6702 /* leave a scope via break/continue(/goto) */
6703 static void leave_scope(struct scope
*o
)
6707 try_call_scope_cleanup(o
->cl
.s
);
6711 /* ------------------------------------------------------------------------- */
6712 /* call block from 'for do while' loops */
6714 static void lblock(int *bsym
, int *csym
)
6716 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6717 int *b
= co
->bsym
, *c
= co
->csym
;
6731 static void block(int is_expr
)
6733 int a
, b
, c
, d
, e
, t
;
6738 /* default return value is (void) */
6740 vtop
->type
.t
= VT_VOID
;
6745 /* If the token carries a value, next() might destroy it. Only with
6746 invalid code such as f(){"123"4;} */
6747 if (TOK_HAS_VALUE(t
))
6752 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6760 if (tok
== TOK_ELSE
) {
6765 gsym(d
); /* patch else jmp */
6770 } else if (t
== TOK_WHILE
) {
6782 } else if (t
== '{') {
6785 /* handle local labels declarations */
6786 while (tok
== TOK_LABEL
) {
6789 if (tok
< TOK_UIDENT
)
6790 expect("label identifier");
6791 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6793 } while (tok
== ',');
6797 while (tok
!= '}') {
6806 prev_scope(&o
, is_expr
);
6809 else if (!nocode_wanted
)
6810 check_func_return();
6812 } else if (t
== TOK_RETURN
) {
6813 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6817 gen_assign_cast(&func_vt
);
6819 if (vtop
->type
.t
!= VT_VOID
)
6820 tcc_warning("void function returns a value");
6824 tcc_warning("'return' with no value");
6827 leave_scope(root_scope
);
6829 gfunc_return(&func_vt
);
6831 /* jump unless last stmt in top-level block */
6832 if (tok
!= '}' || local_scope
!= 1)
6835 tcc_tcov_block_end (tcc_state
, -1);
6838 } else if (t
== TOK_BREAK
) {
6840 if (!cur_scope
->bsym
)
6841 tcc_error("cannot break");
6842 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6843 leave_scope(cur_switch
->scope
);
6845 leave_scope(loop_scope
);
6846 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6849 } else if (t
== TOK_CONTINUE
) {
6851 if (!cur_scope
->csym
)
6852 tcc_error("cannot continue");
6853 leave_scope(loop_scope
);
6854 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6857 } else if (t
== TOK_FOR
) {
6862 /* c99 for-loop init decl? */
6863 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6864 /* no, regular for-loop init expr */
6892 } else if (t
== TOK_DO
) {
6906 } else if (t
== TOK_SWITCH
) {
6907 struct switch_t
*sw
;
6909 sw
= tcc_mallocz(sizeof *sw
);
6911 sw
->scope
= cur_scope
;
6912 sw
->prev
= cur_switch
;
6918 sw
->sv
= *vtop
--; /* save switch value */
6921 b
= gjmp(0); /* jump to first case */
6923 a
= gjmp(a
); /* add implicit break */
6927 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
6928 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
6930 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
6932 for (b
= 1; b
< sw
->n
; b
++)
6933 if (sw
->sv
.type
.t
& VT_UNSIGNED
6934 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
6935 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
6936 tcc_error("duplicate case value");
6940 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
6943 gsym_addr(d
, sw
->def_sym
);
6949 dynarray_reset(&sw
->p
, &sw
->n
);
6950 cur_switch
= sw
->prev
;
6953 } else if (t
== TOK_CASE
) {
6954 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6957 cr
->v1
= cr
->v2
= expr_const64();
6958 if (gnu_ext
&& tok
== TOK_DOTS
) {
6960 cr
->v2
= expr_const64();
6961 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
6962 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
6963 tcc_warning("empty case range");
6966 tcc_tcov_reset_ind(tcc_state
);
6968 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6971 goto block_after_label
;
6973 } else if (t
== TOK_DEFAULT
) {
6976 if (cur_switch
->def_sym
)
6977 tcc_error("too many 'default'");
6979 tcc_tcov_reset_ind(tcc_state
);
6980 cur_switch
->def_sym
= gind();
6983 goto block_after_label
;
6985 } else if (t
== TOK_GOTO
) {
6986 if (cur_scope
->vla
.num
)
6987 vla_restore(cur_scope
->vla
.locorig
);
6988 if (tok
== '*' && gnu_ext
) {
6992 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6996 } else if (tok
>= TOK_UIDENT
) {
6997 s
= label_find(tok
);
6998 /* put forward definition if needed */
7000 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7001 else if (s
->r
== LABEL_DECLARED
)
7002 s
->r
= LABEL_FORWARD
;
7004 if (s
->r
& LABEL_FORWARD
) {
7005 /* start new goto chain for cleanups, linked via label->next */
7006 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7007 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7008 pending_gotos
->prev_tok
= s
;
7009 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7010 pending_gotos
->next
= s
;
7012 s
->jnext
= gjmp(s
->jnext
);
7014 try_call_cleanup_goto(s
->cleanupstate
);
7015 gjmp_addr(s
->jnext
);
7020 expect("label identifier");
7024 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7028 if (tok
== ':' && t
>= TOK_UIDENT
) {
7033 if (s
->r
== LABEL_DEFINED
)
7034 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7035 s
->r
= LABEL_DEFINED
;
7037 Sym
*pcl
; /* pending cleanup goto */
7038 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7040 sym_pop(&s
->next
, NULL
, 0);
7044 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7047 s
->cleanupstate
= cur_scope
->cl
.s
;
7050 vla_restore(cur_scope
->vla
.loc
);
7053 /* we accept this, but it is a mistake */
7054 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7057 /* expression case */
7074 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7077 /* This skips over a stream of tokens containing balanced {} and ()
7078 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7079 with a '{'). If STR then allocates and stores the skipped tokens
7080 in *STR. This doesn't check if () and {} are nested correctly,
7081 i.e. "({)}" is accepted. */
7082 static void skip_or_save_block(TokenString
**str
)
7084 int braces
= tok
== '{';
7087 *str
= tok_str_alloc();
7089 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7091 if (tok
== TOK_EOF
) {
7092 if (str
|| level
> 0)
7093 tcc_error("unexpected end of file");
7098 tok_str_add_tok(*str
);
7101 if (t
== '{' || t
== '(') {
7103 } else if (t
== '}' || t
== ')') {
7105 if (level
== 0 && braces
&& t
== '}')
7110 tok_str_add(*str
, -1);
7111 tok_str_add(*str
, 0);
7115 #define EXPR_CONST 1
7118 static void parse_init_elem(int expr_type
)
7120 int saved_global_expr
;
7123 /* compound literals must be allocated globally in this case */
7124 saved_global_expr
= global_expr
;
7127 global_expr
= saved_global_expr
;
7128 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7129 (compound literals). */
7130 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7131 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7132 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7133 #ifdef TCC_TARGET_PE
7134 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7137 tcc_error("initializer element is not constant");
7146 static void init_assert(init_params
*p
, int offset
)
7148 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7149 : !nocode_wanted
&& offset
> p
->local_offset
)
7150 tcc_internal_error("initializer overflow");
7153 #define init_assert(sec, offset)
7156 /* put zeros for variable based init */
7157 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7159 init_assert(p
, c
+ size
);
7161 /* nothing to do because globals are already set to zero */
7163 vpush_helper_func(TOK_memset
);
7165 #ifdef TCC_TARGET_ARM
7177 #define DIF_SIZE_ONLY 2
7178 #define DIF_HAVE_ELEM 4
7181 /* delete relocations for specified range c ... c + size. Unfortunatly
7182 in very special cases, relocations may occur unordered */
7183 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7185 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7186 if (!sec
|| !sec
->reloc
)
7188 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7189 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7190 while (rel
< rel_end
) {
7191 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7192 sec
->reloc
->data_offset
-= sizeof *rel
;
7195 memcpy(rel2
, rel
, sizeof *rel
);
7202 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7204 if (ref
== p
->flex_array_ref
) {
7205 if (index
>= ref
->c
)
7207 } else if (ref
->c
< 0)
7208 tcc_error("flexible array has zero size in this context");
7211 /* t is the array or struct type. c is the array or struct
7212 address. cur_field is the pointer to the current
7213 field, for arrays the 'c' member contains the current start
7214 index. 'flags' is as in decl_initializer.
7215 'al' contains the already initialized length of the
7216 current container (starting at c). This returns the new length of that. */
7217 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7218 Sym
**cur_field
, int flags
, int al
)
7221 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7222 unsigned long corig
= c
;
7227 if (flags
& DIF_HAVE_ELEM
)
7230 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7237 /* NOTE: we only support ranges for last designator */
7238 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7240 if (!(type
->t
& VT_ARRAY
))
7241 expect("array type");
7243 index
= index_last
= expr_const();
7244 if (tok
== TOK_DOTS
&& gnu_ext
) {
7246 index_last
= expr_const();
7250 decl_design_flex(p
, s
, index_last
);
7251 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7252 tcc_error("index exceeds array bounds or range is empty");
7254 (*cur_field
)->c
= index_last
;
7255 type
= pointed_type(type
);
7256 elem_size
= type_size(type
, &align
);
7257 c
+= index
* elem_size
;
7258 nb_elems
= index_last
- index
+ 1;
7265 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7266 expect("struct/union type");
7268 f
= find_field(type
, l
, &cumofs
);
7281 } else if (!gnu_ext
) {
7286 if (type
->t
& VT_ARRAY
) {
7287 index
= (*cur_field
)->c
;
7289 decl_design_flex(p
, s
, index
);
7291 tcc_error("too many initializers");
7292 type
= pointed_type(type
);
7293 elem_size
= type_size(type
, &align
);
7294 c
+= index
* elem_size
;
7297 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7298 *cur_field
= f
= f
->next
;
7300 tcc_error("too many initializers");
7306 if (!elem_size
) /* for structs */
7307 elem_size
= type_size(type
, &align
);
7309 /* Using designators the same element can be initialized more
7310 than once. In that case we need to delete possibly already
7311 existing relocations. */
7312 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7313 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7314 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7317 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7319 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7323 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7324 /* make init_putv/vstore believe it were a struct */
7326 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7330 vpush_ref(type
, p
->sec
, c
, elem_size
);
7332 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7333 for (i
= 1; i
< nb_elems
; i
++) {
7335 init_putv(p
, type
, c
+ elem_size
* i
);
7340 c
+= nb_elems
* elem_size
;
7346 /* store a value or an expression directly in global data or in local array */
7347 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7353 Section
*sec
= p
->sec
;
7357 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7359 size
= type_size(type
, &align
);
7360 if (type
->t
& VT_BITFIELD
)
7361 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7362 init_assert(p
, c
+ size
);
7365 /* XXX: not portable */
7366 /* XXX: generate error if incorrect relocation */
7367 gen_assign_cast(&dtype
);
7368 bt
= type
->t
& VT_BTYPE
;
7370 if ((vtop
->r
& VT_SYM
)
7372 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7373 || (type
->t
& VT_BITFIELD
))
7374 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7376 tcc_error("initializer element is not computable at load time");
7378 if (NODATA_WANTED
) {
7383 ptr
= sec
->data
+ c
;
7386 /* XXX: make code faster ? */
7387 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7388 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7389 /* XXX This rejects compound literals like
7390 '(void *){ptr}'. The problem is that '&sym' is
7391 represented the same way, which would be ruled out
7392 by the SYM_FIRST_ANOM check above, but also '"string"'
7393 in 'char *p = "string"' is represented the same
7394 with the type being VT_PTR and the symbol being an
7395 anonymous one. That is, there's no difference in vtop
7396 between '(void *){x}' and '&(void *){x}'. Ignore
7397 pointer typed entities here. Hopefully no real code
7398 will ever use compound literals with scalar type. */
7399 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7400 /* These come from compound literals, memcpy stuff over. */
7404 esym
= elfsym(vtop
->sym
);
7405 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7406 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7408 /* We need to copy over all memory contents, and that
7409 includes relocations. Use the fact that relocs are
7410 created it order, so look from the end of relocs
7411 until we hit one before the copied region. */
7412 unsigned long relofs
= ssec
->reloc
->data_offset
;
7413 while (relofs
>= sizeof(*rel
)) {
7414 relofs
-= sizeof(*rel
);
7415 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7416 if (rel
->r_offset
>= esym
->st_value
+ size
)
7418 if (rel
->r_offset
< esym
->st_value
)
7420 put_elf_reloca(symtab_section
, sec
,
7421 c
+ rel
->r_offset
- esym
->st_value
,
7422 ELFW(R_TYPE
)(rel
->r_info
),
7423 ELFW(R_SYM
)(rel
->r_info
),
7433 if (type
->t
& VT_BITFIELD
) {
7434 int bit_pos
, bit_size
, bits
, n
;
7435 unsigned char *p
, v
, m
;
7436 bit_pos
= BIT_POS(vtop
->type
.t
);
7437 bit_size
= BIT_SIZE(vtop
->type
.t
);
7438 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7439 bit_pos
&= 7, bits
= 0;
7444 v
= val
>> bits
<< bit_pos
;
7445 m
= ((1 << n
) - 1) << bit_pos
;
7446 *p
= (*p
& ~m
) | (v
& m
);
7447 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7452 *(char *)ptr
= val
!= 0;
7458 write16le(ptr
, val
);
7461 write32le(ptr
, val
);
7464 write64le(ptr
, val
);
7467 #if defined TCC_IS_NATIVE_387
7468 /* Host and target platform may be different but both have x87.
7469 On windows, tcc does not use VT_LDOUBLE, except when it is a
7470 cross compiler. In this case a mingw gcc as host compiler
7471 comes here with 10-byte long doubles, while msvc or tcc won't.
7472 tcc itself can still translate by asm.
7473 In any case we avoid possibly random bytes 11 and 12.
7475 if (sizeof (long double) >= 10)
7476 memcpy(ptr
, &vtop
->c
.ld
, 10);
7478 else if (sizeof (long double) == sizeof (double))
7479 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7481 else if (vtop
->c
.ld
== 0.0)
7485 /* For other platforms it should work natively, but may not work
7486 for cross compilers */
7487 if (sizeof(long double) == LDOUBLE_SIZE
)
7488 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7489 else if (sizeof(double) == LDOUBLE_SIZE
)
7490 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7491 #ifndef TCC_CROSS_TEST
7493 tcc_error("can't cross compile long double constants");
7498 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7501 if (vtop
->r
& VT_SYM
)
7502 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7504 write64le(ptr
, val
);
7507 write32le(ptr
, val
);
7511 write64le(ptr
, val
);
7515 if (vtop
->r
& VT_SYM
)
7516 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7517 write32le(ptr
, val
);
7521 //tcc_internal_error("unexpected type");
7527 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7534 /* 't' contains the type and storage info. 'c' is the offset of the
7535 object in section 'sec'. If 'sec' is NULL, it means stack based
7536 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7537 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7538 size only evaluation is wanted (only for arrays). */
7539 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7541 int len
, n
, no_oblock
, i
;
7547 /* generate line number info */
7548 if (debug_modes
&& !p
->sec
)
7549 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7551 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7552 /* In case of strings we have special handling for arrays, so
7553 don't consume them as initializer value (which would commit them
7554 to some anonymous symbol). */
7555 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7556 (!(flags
& DIF_SIZE_ONLY
)
7557 /* a struct may be initialized from a struct of same type, as in
7558 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7559 In that case we need to parse the element in order to check
7560 it for compatibility below */
7561 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7563 int ncw_prev
= nocode_wanted
;
7564 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7566 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7567 nocode_wanted
= ncw_prev
;
7568 flags
|= DIF_HAVE_ELEM
;
7571 if (type
->t
& VT_ARRAY
) {
7573 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7581 t1
= pointed_type(type
);
7582 size1
= type_size(t1
, &align1
);
7584 /* only parse strings here if correct type (otherwise: handle
7585 them as ((w)char *) expressions */
7586 if ((tok
== TOK_LSTR
&&
7587 #ifdef TCC_TARGET_PE
7588 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7590 (t1
->t
& VT_BTYPE
) == VT_INT
7592 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7594 cstr_reset(&initstr
);
7595 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7596 tcc_error("unhandled string literal merging");
7597 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7599 initstr
.size
-= size1
;
7601 len
+= tokc
.str
.size
;
7603 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7605 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7608 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7609 && tok
!= TOK_EOF
) {
7610 /* Not a lone literal but part of a bigger expression. */
7611 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7612 tokc
.str
.size
= initstr
.size
;
7613 tokc
.str
.data
= initstr
.data
;
7617 decl_design_flex(p
, s
, len
);
7618 if (!(flags
& DIF_SIZE_ONLY
)) {
7623 tcc_warning("initializer-string for array is too long");
7624 /* in order to go faster for common case (char
7625 string in global variable, we handle it
7627 if (p
->sec
&& size1
== 1) {
7628 init_assert(p
, c
+ nb
);
7630 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7634 /* only add trailing zero if enough storage (no
7635 warning in this case since it is standard) */
7636 if (flags
& DIF_CLEAR
)
7639 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7643 } else if (size1
== 1)
7644 ch
= ((unsigned char *)initstr
.data
)[i
];
7646 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7648 init_putv(p
, t1
, c
+ i
* size1
);
7659 /* zero memory once in advance */
7660 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7661 init_putz(p
, c
, n
*size1
);
7666 /* GNU extension: if the initializer is empty for a flex array,
7667 it's size is zero. We won't enter the loop, so set the size
7669 decl_design_flex(p
, s
, len
);
7670 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7671 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7672 flags
&= ~DIF_HAVE_ELEM
;
7673 if (type
->t
& VT_ARRAY
) {
7675 /* special test for multi dimensional arrays (may not
7676 be strictly correct if designators are used at the
7678 if (no_oblock
&& len
>= n
*size1
)
7681 if (s
->type
.t
== VT_UNION
)
7685 if (no_oblock
&& f
== NULL
)
7697 } else if ((flags
& DIF_HAVE_ELEM
)
7698 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7699 The source type might have VT_CONSTANT set, which is
7700 of course assignable to non-const elements. */
7701 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7704 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7706 if ((flags
& DIF_FIRST
) || tok
== '{') {
7716 } else if (tok
== '{') {
7717 if (flags
& DIF_HAVE_ELEM
)
7720 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7723 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7724 /* If we supported only ISO C we wouldn't have to accept calling
7725 this on anything than an array if DIF_SIZE_ONLY (and even then
7726 only on the outermost level, so no recursion would be needed),
7727 because initializing a flex array member isn't supported.
7728 But GNU C supports it, so we need to recurse even into
7729 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7730 /* just skip expression */
7731 if (flags
& DIF_HAVE_ELEM
)
7734 skip_or_save_block(NULL
);
7737 if (!(flags
& DIF_HAVE_ELEM
)) {
7738 /* This should happen only when we haven't parsed
7739 the init element above for fear of committing a
7740 string constant to memory too early. */
7741 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7742 expect("string constant");
7743 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7745 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7746 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7748 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7752 init_putv(p
, type
, c
);
7756 /* parse an initializer for type 't' if 'has_init' is non zero, and
7757 allocate space in local or global data space ('r' is either
7758 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7759 variable 'v' of scope 'scope' is declared before initializers
7760 are parsed. If 'v' is zero, then a reference to the new object
7761 is put in the value stack. If 'has_init' is 2, a special parsing
7762 is done to handle string constants. */
7763 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7764 int has_init
, int v
, int scope
)
7766 int size
, align
, addr
;
7767 TokenString
*init_str
= NULL
;
7770 Sym
*flexible_array
;
7772 int saved_nocode_wanted
= nocode_wanted
;
7773 #ifdef CONFIG_TCC_BCHECK
7774 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7776 init_params p
= {0};
7778 /* Always allocate static or global variables */
7779 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7780 nocode_wanted
|= 0x80000000;
7782 flexible_array
= NULL
;
7783 size
= type_size(type
, &align
);
7785 /* exactly one flexible array may be initialized, either the
7786 toplevel array or the last member of the toplevel struct */
7789 /* If the base type itself was an array type of unspecified size
7790 (like in 'typedef int arr[]; arr x = {1};') then we will
7791 overwrite the unknown size by the real one for this decl.
7792 We need to unshare the ref symbol holding that size. */
7793 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7794 p
.flex_array_ref
= type
->ref
;
7796 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7797 Sym
*field
= type
->ref
->next
;
7800 field
= field
->next
;
7801 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7802 flexible_array
= field
;
7803 p
.flex_array_ref
= field
->type
.ref
;
7810 /* If unknown size, do a dry-run 1st pass */
7812 tcc_error("unknown type size");
7813 if (has_init
== 2) {
7814 /* only get strings */
7815 init_str
= tok_str_alloc();
7816 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7817 tok_str_add_tok(init_str
);
7820 tok_str_add(init_str
, -1);
7821 tok_str_add(init_str
, 0);
7823 skip_or_save_block(&init_str
);
7827 begin_macro(init_str
, 1);
7829 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7830 /* prepare second initializer parsing */
7831 macro_ptr
= init_str
->str
;
7834 /* if still unknown size, error */
7835 size
= type_size(type
, &align
);
7837 tcc_error("unknown type size");
7839 /* If there's a flex member and it was used in the initializer
7841 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
7842 size
+= flexible_array
->type
.ref
->c
7843 * pointed_size(&flexible_array
->type
);
7846 /* take into account specified alignment if bigger */
7847 if (ad
->a
.aligned
) {
7848 int speca
= 1 << (ad
->a
.aligned
- 1);
7851 } else if (ad
->a
.packed
) {
7855 if (!v
&& NODATA_WANTED
)
7856 size
= 0, align
= 1;
7858 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7860 #ifdef CONFIG_TCC_BCHECK
7862 /* add padding between stack variables for bound checking */
7866 loc
= (loc
- size
) & -align
;
7868 p
.local_offset
= addr
+ size
;
7869 #ifdef CONFIG_TCC_BCHECK
7871 /* add padding between stack variables for bound checking */
7876 /* local variable */
7877 #ifdef CONFIG_TCC_ASM
7878 if (ad
->asm_label
) {
7879 int reg
= asm_parse_regvar(ad
->asm_label
);
7881 r
= (r
& ~VT_VALMASK
) | reg
;
7884 sym
= sym_push(v
, type
, r
, addr
);
7885 if (ad
->cleanup_func
) {
7886 Sym
*cls
= sym_push2(&all_cleanups
,
7887 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7888 cls
->prev_tok
= sym
;
7889 cls
->next
= ad
->cleanup_func
;
7890 cls
->ncl
= cur_scope
->cl
.s
;
7891 cur_scope
->cl
.s
= cls
;
7896 /* push local reference */
7897 vset(type
, r
, addr
);
7901 if (v
&& scope
== VT_CONST
) {
7902 /* see if the symbol was already defined */
7905 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
7906 && sym
->type
.ref
->c
> type
->ref
->c
) {
7907 /* flex array was already declared with explicit size
7909 int arr[] = { 1,2,3 }; */
7910 type
->ref
->c
= sym
->type
.ref
->c
;
7911 size
= type_size(type
, &align
);
7913 patch_storage(sym
, ad
, type
);
7914 /* we accept several definitions of the same global variable. */
7915 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7920 /* allocate symbol in corresponding section */
7924 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
7925 tp
= &tp
->ref
->type
;
7926 if (tp
->t
& VT_CONSTANT
) {
7927 sec
= rodata_section
;
7928 } else if (has_init
) {
7930 /*if (tcc_state->g_debug & 4)
7931 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
7932 } else if (tcc_state
->nocommon
)
7937 addr
= section_add(sec
, size
, align
);
7938 #ifdef CONFIG_TCC_BCHECK
7939 /* add padding if bound check */
7941 section_add(sec
, 1, 1);
7944 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7945 sec
= common_section
;
7950 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7951 patch_storage(sym
, ad
, NULL
);
7953 /* update symbol definition */
7954 put_extern_sym(sym
, sec
, addr
, size
);
7956 /* push global reference */
7957 vpush_ref(type
, sec
, addr
, size
);
7962 #ifdef CONFIG_TCC_BCHECK
7963 /* handles bounds now because the symbol must be defined
7964 before for the relocation */
7968 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7969 /* then add global bound info */
7970 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7971 bounds_ptr
[0] = 0; /* relocated */
7972 bounds_ptr
[1] = size
;
7977 if (type
->t
& VT_VLA
) {
7983 /* save before-VLA stack pointer if needed */
7984 if (cur_scope
->vla
.num
== 0) {
7985 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
7986 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
7988 gen_vla_sp_save(loc
-= PTR_SIZE
);
7989 cur_scope
->vla
.locorig
= loc
;
7993 vpush_type_size(type
, &a
);
7994 gen_vla_alloc(type
, a
);
7995 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7996 /* on _WIN64, because of the function args scratch area, the
7997 result of alloca differs from RSP and is returned in RAX. */
7998 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8000 gen_vla_sp_save(addr
);
8001 cur_scope
->vla
.loc
= addr
;
8002 cur_scope
->vla
.num
++;
8003 } else if (has_init
) {
8005 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8006 /* patch flexible array member size back to -1, */
8007 /* for possible subsequent similar declarations */
8009 flexible_array
->type
.ref
->c
= -1;
8013 /* restore parse state if needed */
8019 nocode_wanted
= saved_nocode_wanted
;
8022 /* generate vla code saved in post_type() */
8023 static void func_vla_arg_code(Sym
*arg
)
8026 TokenString
*vla_array_tok
= NULL
;
8029 func_vla_arg_code(arg
->type
.ref
);
8031 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8032 loc
-= type_size(&int_type
, &align
);
8034 arg
->type
.ref
->c
= loc
;
8037 vla_array_tok
= tok_str_alloc();
8038 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8039 begin_macro(vla_array_tok
, 1);
8044 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8046 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8053 static void func_vla_arg(Sym
*sym
)
8057 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8058 if (arg
->type
.t
& VT_VLA
)
8059 func_vla_arg_code(arg
);
8062 /* parse a function defined by symbol 'sym' and generate its code in
8063 'cur_text_section' */
8064 static void gen_function(Sym
*sym
)
8066 struct scope f
= { 0 };
8067 cur_scope
= root_scope
= &f
;
8069 ind
= cur_text_section
->data_offset
;
8070 if (sym
->a
.aligned
) {
8071 size_t newoff
= section_add(cur_text_section
, 0,
8072 1 << (sym
->a
.aligned
- 1));
8073 gen_fill_nops(newoff
- ind
);
8075 /* NOTE: we patch the symbol size later */
8076 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8077 if (sym
->type
.ref
->f
.func_ctor
)
8078 add_array (tcc_state
, ".init_array", sym
->c
);
8079 if (sym
->type
.ref
->f
.func_dtor
)
8080 add_array (tcc_state
, ".fini_array", sym
->c
);
8082 funcname
= get_tok_str(sym
->v
, NULL
);
8084 func_vt
= sym
->type
.ref
->type
;
8085 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8087 /* put debug symbol */
8088 tcc_debug_funcstart(tcc_state
, sym
);
8089 /* push a dummy symbol to enable local sym storage */
8090 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8091 local_scope
= 1; /* for function parameters */
8095 clear_temp_local_var_list();
8100 /* reset local stack */
8101 pop_local_syms(NULL
, 0);
8103 cur_text_section
->data_offset
= ind
;
8105 label_pop(&global_label_stack
, NULL
, 0);
8106 sym_pop(&all_cleanups
, NULL
, 0);
8107 /* patch symbol size */
8108 elfsym(sym
)->st_size
= ind
- func_ind
;
8109 /* end of function */
8110 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8111 /* It's better to crash than to generate wrong code */
8112 cur_text_section
= NULL
;
8113 funcname
= ""; /* for safety */
8114 func_vt
.t
= VT_VOID
; /* for safety */
8115 func_var
= 0; /* for safety */
8116 ind
= 0; /* for safety */
8118 nocode_wanted
= 0x80000000;
8120 /* do this after funcend debug info */
8124 static void gen_inline_functions(TCCState
*s
)
8127 int inline_generated
, i
;
8128 struct InlineFunc
*fn
;
8130 tcc_open_bf(s
, ":inline:", 0);
8131 /* iterate while inline function are referenced */
8133 inline_generated
= 0;
8134 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8135 fn
= s
->inline_fns
[i
];
8137 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8138 /* the function was used or forced (and then not internal):
8139 generate its code and convert it to a normal function */
8141 tcc_debug_putfile(s
, fn
->filename
);
8142 begin_macro(fn
->func_str
, 1);
8144 cur_text_section
= text_section
;
8148 inline_generated
= 1;
8151 } while (inline_generated
);
8155 static void free_inline_functions(TCCState
*s
)
8158 /* free tokens of unused inline functions */
8159 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8160 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8162 tok_str_free(fn
->func_str
);
8164 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8167 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8168 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8169 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8171 int v
, has_init
, r
, oldint
;
8174 AttributeDef ad
, adbase
;
8177 if (tok
== TOK_STATIC_ASSERT
) {
8187 tcc_error("_Static_assert fail");
8189 goto static_assert_out
;
8193 parse_mult_str(&error_str
, "string constant");
8195 tcc_error("%s", (char *)error_str
.data
);
8196 cstr_free(&error_str
);
8204 if (!parse_btype(&btype
, &adbase
)) {
8205 if (is_for_loop_init
)
8207 /* skip redundant ';' if not in old parameter decl scope */
8208 if (tok
== ';' && l
!= VT_CMP
) {
8214 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8215 /* global asm block */
8219 if (tok
>= TOK_UIDENT
) {
8220 /* special test for old K&R protos without explicit int
8221 type. Only accepted when defining global data */
8226 expect("declaration");
8232 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8234 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8235 tcc_warning("unnamed struct/union that defines no instances");
8239 if (IS_ENUM(btype
.t
)) {
8245 while (1) { /* iterate thru each declaration */
8248 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8252 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8253 printf("type = '%s'\n", buf
);
8256 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8257 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8258 tcc_error("function without file scope cannot be static");
8259 /* if old style function prototype, we accept a
8262 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8263 decl0(VT_CMP
, 0, sym
);
8264 #ifdef TCC_TARGET_MACHO
8265 if (sym
->f
.func_alwinl
8266 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8267 == (VT_EXTERN
| VT_INLINE
))) {
8268 /* always_inline functions must be handled as if they
8269 don't generate multiple global defs, even if extern
8270 inline, i.e. GNU inline semantics for those. Rewrite
8271 them into static inline. */
8272 type
.t
&= ~VT_EXTERN
;
8273 type
.t
|= VT_STATIC
;
8276 /* always compile 'extern inline' */
8277 if (type
.t
& VT_EXTERN
)
8278 type
.t
&= ~VT_INLINE
;
8280 } else if (oldint
) {
8281 tcc_warning("type defaults to int");
8284 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8285 ad
.asm_label
= asm_label_instr();
8286 /* parse one last attribute list, after asm label */
8287 parse_attribute(&ad
);
8289 /* gcc does not allow __asm__("label") with function definition,
8296 #ifdef TCC_TARGET_PE
8297 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8298 if (type
.t
& VT_STATIC
)
8299 tcc_error("cannot have dll linkage with static");
8300 if (type
.t
& VT_TYPEDEF
) {
8301 tcc_warning("'%s' attribute ignored for typedef",
8302 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8303 (ad
.a
.dllexport
= 0, "dllexport"));
8304 } else if (ad
.a
.dllimport
) {
8305 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8308 type
.t
|= VT_EXTERN
;
8314 tcc_error("cannot use local functions");
8315 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8316 expect("function definition");
8318 /* reject abstract declarators in function definition
8319 make old style params without decl have int type */
8321 while ((sym
= sym
->next
) != NULL
) {
8322 if (!(sym
->v
& ~SYM_FIELD
))
8323 expect("identifier");
8324 if (sym
->type
.t
== VT_VOID
)
8325 sym
->type
= int_type
;
8328 /* apply post-declaraton attributes */
8329 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8331 /* put function symbol */
8332 type
.t
&= ~VT_EXTERN
;
8333 sym
= external_sym(v
, &type
, 0, &ad
);
8335 /* static inline functions are just recorded as a kind
8336 of macro. Their code will be emitted at the end of
8337 the compilation unit only if they are used */
8338 if (sym
->type
.t
& VT_INLINE
) {
8339 struct InlineFunc
*fn
;
8340 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8341 strcpy(fn
->filename
, file
->filename
);
8343 skip_or_save_block(&fn
->func_str
);
8344 dynarray_add(&tcc_state
->inline_fns
,
8345 &tcc_state
->nb_inline_fns
, fn
);
8347 /* compute text section */
8348 cur_text_section
= ad
.section
;
8349 if (!cur_text_section
)
8350 cur_text_section
= text_section
;
8356 /* find parameter in function parameter list */
8357 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8358 if ((sym
->v
& ~SYM_FIELD
) == v
)
8360 tcc_error("declaration for parameter '%s' but no such parameter",
8361 get_tok_str(v
, NULL
));
8363 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8364 tcc_error("storage class specified for '%s'",
8365 get_tok_str(v
, NULL
));
8366 if (sym
->type
.t
!= VT_VOID
)
8367 tcc_error("redefinition of parameter '%s'",
8368 get_tok_str(v
, NULL
));
8369 convert_parameter_type(&type
);
8371 } else if (type
.t
& VT_TYPEDEF
) {
8372 /* save typedefed type */
8373 /* XXX: test storage specifiers ? */
8375 if (sym
&& sym
->sym_scope
== local_scope
) {
8376 if (!is_compatible_types(&sym
->type
, &type
)
8377 || !(sym
->type
.t
& VT_TYPEDEF
))
8378 tcc_error("incompatible redefinition of '%s'",
8379 get_tok_str(v
, NULL
));
8382 sym
= sym_push(v
, &type
, 0, 0);
8387 tcc_debug_typedef (tcc_state
, sym
);
8388 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8389 && !(type
.t
& VT_EXTERN
)) {
8390 tcc_error("declaration of void object");
8393 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8394 /* external function definition */
8395 /* specific case for func_call attribute */
8397 } else if (!(type
.t
& VT_ARRAY
)) {
8398 /* not lvalue if array */
8401 has_init
= (tok
== '=');
8402 if (has_init
&& (type
.t
& VT_VLA
))
8403 tcc_error("variable length array cannot be initialized");
8404 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8405 || (type
.t
& VT_BTYPE
) == VT_FUNC
8406 /* as with GCC, uninitialized global arrays with no size
8407 are considered extern: */
8408 || ((type
.t
& VT_ARRAY
) && !has_init
8409 && l
== VT_CONST
&& type
.ref
->c
< 0)
8411 /* external variable or function */
8412 type
.t
|= VT_EXTERN
;
8413 sym
= external_sym(v
, &type
, r
, &ad
);
8414 if (ad
.alias_target
) {
8415 /* Aliases need to be emitted when their target
8416 symbol is emitted, even if perhaps unreferenced.
8417 We only support the case where the base is
8418 already defined, otherwise we would need
8419 deferring to emit the aliases until the end of
8420 the compile unit. */
8421 Sym
*alias_target
= sym_find(ad
.alias_target
);
8422 ElfSym
*esym
= elfsym(alias_target
);
8424 tcc_error("unsupported forward __alias__ attribute");
8425 put_extern_sym2(sym
, esym
->st_shndx
,
8426 esym
->st_value
, esym
->st_size
, 1);
8429 if (type
.t
& VT_STATIC
)
8435 else if (l
== VT_CONST
)
8436 /* uninitialized global variables may be overridden */
8437 type
.t
|= VT_EXTERN
;
8438 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8442 if (is_for_loop_init
)
8454 static void decl(int l
)
8459 /* ------------------------------------------------------------------------- */
8462 /* ------------------------------------------------------------------------- */