2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 ST_DATA
char debug_modes
;
50 static SValue _vstack
[1 + VSTACK_SIZE
];
51 #define vstack (_vstack + 1)
53 ST_DATA
int const_wanted
; /* true if constant wanted */
54 ST_DATA
int nocode_wanted
; /* no code generation wanted */
55 #define unevalmask 0xffff /* unevaluated subexpression */
56 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
57 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 /* Automagical code suppression ----> */
60 #define CODE_OFF() (nocode_wanted |= 0x20000000)
61 #define CODE_ON() (nocode_wanted &= ~0x20000000)
63 /* Clear 'nocode_wanted' at label if it was used */
64 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
65 static int gind(void) { int t
= ind
; CODE_ON(); if (debug_modes
) tcc_tcov_block_begin(tcc_state
); return t
; }
67 /* Set 'nocode_wanted' after unconditional jumps */
68 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
69 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
71 /* These are #undef'd at the end of this file */
72 #define gjmp_addr gjmp_addr_acs
76 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
77 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
78 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 ST_DATA
const char *funcname
;
82 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
83 static CString initstr
;
86 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
87 #define VT_PTRDIFF_T VT_INT
89 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
90 #define VT_PTRDIFF_T VT_LLONG
92 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
93 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
96 static struct switch_t
{
100 } **p
; int n
; /* list of case ranges */
101 int def_sym
; /* default symbol */
104 struct switch_t
*prev
;
106 } *cur_switch
; /* current switch */
108 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
109 /*list of temporary local variables on the stack in current function. */
110 static struct temp_local_variable
{
111 int location
; //offset on stack. Svalue.c.i
114 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
115 static int nb_temp_local_vars
;
117 static struct scope
{
119 struct { int loc
, locorig
, num
; } vla
;
120 struct { Sym
*s
; int n
; } cl
;
123 } *cur_scope
, *loop_scope
, *root_scope
;
132 #define precedence_parser
133 static void init_prec(void);
136 static void gen_cast(CType
*type
);
137 static void gen_cast_s(int t
);
138 static inline CType
*pointed_type(CType
*type
);
139 static int is_compatible_types(CType
*type1
, CType
*type2
);
140 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
141 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
142 static void parse_expr_type(CType
*type
);
143 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
144 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
145 static void block(int is_expr
);
146 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
147 static void decl(int l
);
148 static int decl0(int l
, int is_for_loop_init
, Sym
*);
149 static void expr_eq(void);
150 static void vpush_type_size(CType
*type
, int *a
);
151 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
152 static inline int64_t expr_const64(void);
153 static void vpush64(int ty
, unsigned long long v
);
154 static void vpush(CType
*type
);
155 static int gvtst(int inv
, int t
);
156 static void gen_inline_functions(TCCState
*s
);
157 static void free_inline_functions(TCCState
*s
);
158 static void skip_or_save_block(TokenString
**str
);
159 static void gv_dup(void);
160 static int get_temp_local_var(int size
,int align
);
161 static void clear_temp_local_var_list();
162 static void cast_error(CType
*st
, CType
*dt
);
164 /* ------------------------------------------------------------------------- */
166 ST_INLN
int is_float(int t
)
168 int bt
= t
& VT_BTYPE
;
169 return bt
== VT_LDOUBLE
175 static inline int is_integer_btype(int bt
)
184 static int btype_size(int bt
)
186 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
190 bt
== VT_PTR
? PTR_SIZE
: 0;
193 /* returns function return register from type */
194 static int R_RET(int t
)
198 #ifdef TCC_TARGET_X86_64
199 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
201 #elif defined TCC_TARGET_RISCV64
202 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
208 /* returns 2nd function return register, if any */
209 static int R2_RET(int t
)
215 #elif defined TCC_TARGET_X86_64
220 #elif defined TCC_TARGET_RISCV64
227 /* returns true for two-word types */
228 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
230 /* put function return registers to stack value */
231 static void PUT_R_RET(SValue
*sv
, int t
)
233 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
236 /* returns function return register class for type t */
237 static int RC_RET(int t
)
239 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
242 /* returns generic register class for type t */
243 static int RC_TYPE(int t
)
247 #ifdef TCC_TARGET_X86_64
248 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
250 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
252 #elif defined TCC_TARGET_RISCV64
253 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
259 /* returns 2nd register class corresponding to t and rc */
260 static int RC2_TYPE(int t
, int rc
)
262 if (!USING_TWO_WORDS(t
))
277 /* we use our own 'finite' function to avoid potential problems with
278 non standard math libs */
279 /* XXX: endianness dependent */
280 ST_FUNC
int ieee_finite(double d
)
283 memcpy(p
, &d
, sizeof(double));
284 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
287 /* compiling intel long double natively */
288 #if (defined __i386__ || defined __x86_64__) \
289 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
290 # define TCC_IS_NATIVE_387
293 ST_FUNC
void test_lvalue(void)
295 if (!(vtop
->r
& VT_LVAL
))
299 ST_FUNC
void check_vstack(void)
301 if (vtop
!= vstack
- 1)
302 tcc_error("internal compiler error: vstack leak (%d)",
303 (int)(vtop
- vstack
+ 1));
306 /* vstack debugging aid */
308 void pv (const char *lbl
, int a
, int b
)
311 for (i
= a
; i
< a
+ b
; ++i
) {
312 SValue
*p
= &vtop
[-i
];
313 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
314 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
319 /* ------------------------------------------------------------------------- */
320 /* initialize vstack and types. This must be done also for tcc -E */
321 ST_FUNC
void tccgen_init(TCCState
*s1
)
324 memset(vtop
, 0, sizeof *vtop
);
326 /* define some often used types */
329 char_type
.t
= VT_BYTE
;
330 if (s1
->char_is_unsigned
)
331 char_type
.t
|= VT_UNSIGNED
;
332 char_pointer_type
= char_type
;
333 mk_pointer(&char_pointer_type
);
335 func_old_type
.t
= VT_FUNC
;
336 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
337 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
338 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
339 #ifdef precedence_parser
345 ST_FUNC
int tccgen_compile(TCCState
*s1
)
347 cur_text_section
= NULL
;
350 anon_sym
= SYM_FIRST_ANOM
;
352 nocode_wanted
= 0x80000000;
354 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
358 #ifdef TCC_TARGET_ARM
362 printf("%s: **** new file\n", file
->filename
);
364 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
367 gen_inline_functions(s1
);
369 /* end of translation unit info */
375 ST_FUNC
void tccgen_finish(TCCState
*s1
)
378 free_inline_functions(s1
);
379 sym_pop(&global_stack
, NULL
, 0);
380 sym_pop(&local_stack
, NULL
, 0);
381 /* free preprocessor macros */
384 dynarray_reset(&sym_pools
, &nb_sym_pools
);
385 sym_free_first
= NULL
;
388 /* ------------------------------------------------------------------------- */
389 ST_FUNC ElfSym
*elfsym(Sym
*s
)
393 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
396 /* apply storage attributes to Elf symbol */
397 ST_FUNC
void update_storage(Sym
*sym
)
400 int sym_bind
, old_sym_bind
;
406 if (sym
->a
.visibility
)
407 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
410 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
411 sym_bind
= STB_LOCAL
;
412 else if (sym
->a
.weak
)
415 sym_bind
= STB_GLOBAL
;
416 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
417 if (sym_bind
!= old_sym_bind
) {
418 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
422 if (sym
->a
.dllimport
)
423 esym
->st_other
|= ST_PE_IMPORT
;
424 if (sym
->a
.dllexport
)
425 esym
->st_other
|= ST_PE_EXPORT
;
429 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
430 get_tok_str(sym
->v
, NULL
),
431 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
439 /* ------------------------------------------------------------------------- */
440 /* update sym->c so that it points to an external symbol in section
441 'section' with value 'value' */
443 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
444 addr_t value
, unsigned long size
,
445 int can_add_underscore
)
447 int sym_type
, sym_bind
, info
, other
, t
;
453 name
= get_tok_str(sym
->v
, NULL
);
455 if ((t
& VT_BTYPE
) == VT_FUNC
) {
457 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
458 sym_type
= STT_NOTYPE
;
459 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
462 sym_type
= STT_OBJECT
;
464 if (t
& (VT_STATIC
| VT_INLINE
))
465 sym_bind
= STB_LOCAL
;
467 sym_bind
= STB_GLOBAL
;
471 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
472 Sym
*ref
= sym
->type
.ref
;
473 if (ref
->a
.nodecorate
) {
474 can_add_underscore
= 0;
476 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
477 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
479 other
|= ST_PE_STDCALL
;
480 can_add_underscore
= 0;
485 if (sym
->asm_label
) {
486 name
= get_tok_str(sym
->asm_label
, NULL
);
487 can_add_underscore
= 0;
490 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
492 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
496 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
497 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
500 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
504 esym
->st_value
= value
;
505 esym
->st_size
= size
;
506 esym
->st_shndx
= sh_num
;
511 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
513 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
515 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
518 /* add a new relocation entry to symbol 'sym' in section 's' */
519 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
524 if (nocode_wanted
&& s
== cur_text_section
)
529 put_extern_sym(sym
, NULL
, 0, 0);
533 /* now we can add ELF relocation info */
534 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
538 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
540 greloca(s
, sym
, offset
, type
, 0);
544 /* ------------------------------------------------------------------------- */
545 /* symbol allocator */
546 static Sym
*__sym_malloc(void)
548 Sym
*sym_pool
, *sym
, *last_sym
;
551 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
552 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
554 last_sym
= sym_free_first
;
556 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
557 sym
->next
= last_sym
;
561 sym_free_first
= last_sym
;
565 static inline Sym
*sym_malloc(void)
569 sym
= sym_free_first
;
571 sym
= __sym_malloc();
572 sym_free_first
= sym
->next
;
575 sym
= tcc_malloc(sizeof(Sym
));
580 ST_INLN
void sym_free(Sym
*sym
)
583 sym
->next
= sym_free_first
;
584 sym_free_first
= sym
;
590 /* push, without hashing */
591 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
596 memset(s
, 0, sizeof *s
);
606 /* find a symbol and return its associated structure. 's' is the top
607 of the symbol stack */
608 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
620 /* structure lookup */
621 ST_INLN Sym
*struct_find(int v
)
624 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
626 return table_ident
[v
]->sym_struct
;
629 /* find an identifier */
630 ST_INLN Sym
*sym_find(int v
)
633 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
635 return table_ident
[v
]->sym_identifier
;
638 static int sym_scope(Sym
*s
)
640 if (IS_ENUM_VAL (s
->type
.t
))
641 return s
->type
.ref
->sym_scope
;
646 /* push a given symbol on the symbol stack */
647 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
656 s
= sym_push2(ps
, v
, type
->t
, c
);
657 s
->type
.ref
= type
->ref
;
659 /* don't record fields or anonymous symbols */
661 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
662 /* record symbol in token array */
663 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
665 ps
= &ts
->sym_struct
;
667 ps
= &ts
->sym_identifier
;
670 s
->sym_scope
= local_scope
;
671 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
672 tcc_error("redeclaration of '%s'",
673 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
678 /* push a global identifier */
679 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
682 s
= sym_push2(&global_stack
, v
, t
, c
);
683 s
->r
= VT_CONST
| VT_SYM
;
684 /* don't record anonymous symbol */
685 if (v
< SYM_FIRST_ANOM
) {
686 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
687 /* modify the top most local identifier, so that sym_identifier will
688 point to 's' when popped; happens when called from inline asm */
689 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
690 ps
= &(*ps
)->prev_tok
;
697 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
698 pop them yet from the list, but do remove them from the token array. */
699 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
709 /* remove symbol in token array */
711 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
712 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
714 ps
= &ts
->sym_struct
;
716 ps
= &ts
->sym_identifier
;
727 /* ------------------------------------------------------------------------- */
728 static void vcheck_cmp(void)
730 /* cannot let cpu flags if other instruction are generated. Also
731 avoid leaving VT_JMP anywhere except on the top of the stack
732 because it would complicate the code generator.
734 Don't do this when nocode_wanted. vtop might come from
735 !nocode_wanted regions (see 88_codeopt.c) and transforming
736 it to a register without actually generating code is wrong
737 as their value might still be used for real. All values
738 we push under nocode_wanted will eventually be popped
739 again, so that the VT_CMP/VT_JMP value will be in vtop
740 when code is unsuppressed again. */
742 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
746 static void vsetc(CType
*type
, int r
, CValue
*vc
)
748 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
749 tcc_error("memory full (vstack)");
759 ST_FUNC
void vswap(void)
769 /* pop stack value */
770 ST_FUNC
void vpop(void)
773 v
= vtop
->r
& VT_VALMASK
;
774 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
775 /* for x86, we need to pop the FP stack */
777 o(0xd8dd); /* fstp %st(0) */
781 /* need to put correct jump if && or || without test */
788 /* push constant of type "type" with useless value */
789 static void vpush(CType
*type
)
791 vset(type
, VT_CONST
, 0);
794 /* push arbitrary 64bit constant */
795 static void vpush64(int ty
, unsigned long long v
)
802 vsetc(&ctype
, VT_CONST
, &cval
);
805 /* push integer constant */
806 ST_FUNC
void vpushi(int v
)
811 /* push a pointer sized constant */
812 static void vpushs(addr_t v
)
814 vpush64(VT_SIZE_T
, v
);
817 /* push long long constant */
818 static inline void vpushll(long long v
)
820 vpush64(VT_LLONG
, v
);
823 ST_FUNC
void vset(CType
*type
, int r
, int v
)
827 vsetc(type
, r
, &cval
);
830 static void vseti(int r
, int v
)
838 ST_FUNC
void vpushv(SValue
*v
)
840 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
841 tcc_error("memory full (vstack)");
846 static void vdup(void)
851 /* rotate n first stack elements to the bottom
852 I1 ... In -> I2 ... In I1 [top is right]
854 ST_FUNC
void vrotb(int n
)
866 /* rotate the n elements before entry e towards the top
867 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
869 ST_FUNC
void vrote(SValue
*e
, int n
)
876 for(i
= 0;i
< n
- 1; i
++)
881 /* rotate n first stack elements to the top
882 I1 ... In -> In I1 ... I(n-1) [top is right]
884 ST_FUNC
void vrott(int n
)
889 /* ------------------------------------------------------------------------- */
890 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
892 /* called from generators to set the result from relational ops */
893 ST_FUNC
void vset_VT_CMP(int op
)
901 /* called once before asking generators to load VT_CMP to a register */
902 static void vset_VT_JMP(void)
904 int op
= vtop
->cmp_op
;
906 if (vtop
->jtrue
|| vtop
->jfalse
) {
907 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
908 int inv
= op
& (op
< 2); /* small optimization */
909 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
911 /* otherwise convert flags (rsp. 0/1) to register */
913 if (op
< 2) /* doesn't seem to happen */
918 /* Set CPU Flags, doesn't yet jump */
919 static void gvtst_set(int inv
, int t
)
923 if (vtop
->r
!= VT_CMP
) {
926 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
927 vset_VT_CMP(vtop
->c
.i
!= 0);
930 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
931 *p
= gjmp_append(*p
, t
);
934 /* Generate value test
936 * Generate a test for any value (jump, comparison and integers) */
937 static int gvtst(int inv
, int t
)
942 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
947 /* jump to the wanted target */
949 t
= gjmp_cond(op
^ inv
, t
);
952 /* resolve complementary jumps to here */
959 /* generate a zero or nozero test */
960 static void gen_test_zero(int op
)
962 if (vtop
->r
== VT_CMP
) {
966 vtop
->jfalse
= vtop
->jtrue
;
976 /* ------------------------------------------------------------------------- */
977 /* push a symbol value of TYPE */
978 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
982 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
986 /* Return a static symbol pointing to a section */
987 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
993 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
994 sym
->type
.t
|= VT_STATIC
;
995 put_extern_sym(sym
, sec
, offset
, size
);
999 /* push a reference to a section offset by adding a dummy symbol */
1000 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1002 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1005 /* define a new external reference to a symbol 'v' of type 'u' */
1006 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1012 /* push forward reference */
1013 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1014 s
->type
.ref
= type
->ref
;
1015 } else if (IS_ASM_SYM(s
)) {
1016 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1017 s
->type
.ref
= type
->ref
;
1023 /* create an external reference with no specific type similar to asm labels.
1024 This avoids type conflicts if the symbol is used from C too */
1025 ST_FUNC Sym
*external_helper_sym(int v
)
1027 CType ct
= { VT_ASM_FUNC
, NULL
};
1028 return external_global_sym(v
, &ct
);
1031 /* push a reference to an helper function (such as memmove) */
1032 ST_FUNC
void vpush_helper_func(int v
)
1034 vpushsym(&func_old_type
, external_helper_sym(v
));
1037 /* Merge symbol attributes. */
1038 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1040 if (sa1
->aligned
&& !sa
->aligned
)
1041 sa
->aligned
= sa1
->aligned
;
1042 sa
->packed
|= sa1
->packed
;
1043 sa
->weak
|= sa1
->weak
;
1044 if (sa1
->visibility
!= STV_DEFAULT
) {
1045 int vis
= sa
->visibility
;
1046 if (vis
== STV_DEFAULT
1047 || vis
> sa1
->visibility
)
1048 vis
= sa1
->visibility
;
1049 sa
->visibility
= vis
;
1051 sa
->dllexport
|= sa1
->dllexport
;
1052 sa
->nodecorate
|= sa1
->nodecorate
;
1053 sa
->dllimport
|= sa1
->dllimport
;
1056 /* Merge function attributes. */
1057 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1059 if (fa1
->func_call
&& !fa
->func_call
)
1060 fa
->func_call
= fa1
->func_call
;
1061 if (fa1
->func_type
&& !fa
->func_type
)
1062 fa
->func_type
= fa1
->func_type
;
1063 if (fa1
->func_args
&& !fa
->func_args
)
1064 fa
->func_args
= fa1
->func_args
;
1065 if (fa1
->func_noreturn
)
1066 fa
->func_noreturn
= 1;
1073 /* Merge attributes. */
1074 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1076 merge_symattr(&ad
->a
, &ad1
->a
);
1077 merge_funcattr(&ad
->f
, &ad1
->f
);
1080 ad
->section
= ad1
->section
;
1081 if (ad1
->alias_target
)
1082 ad
->alias_target
= ad1
->alias_target
;
1084 ad
->asm_label
= ad1
->asm_label
;
1086 ad
->attr_mode
= ad1
->attr_mode
;
1089 /* Merge some type attributes. */
1090 static void patch_type(Sym
*sym
, CType
*type
)
1092 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1093 if (!(sym
->type
.t
& VT_EXTERN
))
1094 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1095 sym
->type
.t
&= ~VT_EXTERN
;
1098 if (IS_ASM_SYM(sym
)) {
1099 /* stay static if both are static */
1100 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1101 sym
->type
.ref
= type
->ref
;
1104 if (!is_compatible_types(&sym
->type
, type
)) {
1105 tcc_error("incompatible types for redefinition of '%s'",
1106 get_tok_str(sym
->v
, NULL
));
1108 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1109 int static_proto
= sym
->type
.t
& VT_STATIC
;
1110 /* warn if static follows non-static function declaration */
1111 if ((type
->t
& VT_STATIC
) && !static_proto
1112 /* XXX this test for inline shouldn't be here. Until we
1113 implement gnu-inline mode again it silences a warning for
1114 mingw caused by our workarounds. */
1115 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1116 tcc_warning("static storage ignored for redefinition of '%s'",
1117 get_tok_str(sym
->v
, NULL
));
1119 /* set 'inline' if both agree or if one has static */
1120 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1121 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1122 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1123 static_proto
|= VT_INLINE
;
1126 if (0 == (type
->t
& VT_EXTERN
)) {
1127 struct FuncAttr f
= sym
->type
.ref
->f
;
1128 /* put complete type, use static from prototype */
1129 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1130 sym
->type
.ref
= type
->ref
;
1131 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1133 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1136 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1137 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1138 sym
->type
.ref
= type
->ref
;
1142 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1143 /* set array size if it was omitted in extern declaration */
1144 sym
->type
.ref
->c
= type
->ref
->c
;
1146 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1147 tcc_warning("storage mismatch for redefinition of '%s'",
1148 get_tok_str(sym
->v
, NULL
));
1152 /* Merge some storage attributes. */
1153 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1156 patch_type(sym
, type
);
1158 #ifdef TCC_TARGET_PE
1159 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1160 tcc_error("incompatible dll linkage for redefinition of '%s'",
1161 get_tok_str(sym
->v
, NULL
));
1163 merge_symattr(&sym
->a
, &ad
->a
);
1165 sym
->asm_label
= ad
->asm_label
;
1166 update_storage(sym
);
1169 /* copy sym to other stack */
1170 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1173 s
= sym_malloc(), *s
= *s0
;
1174 s
->prev
= *ps
, *ps
= s
;
1175 if (s
->v
< SYM_FIRST_ANOM
) {
1176 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1177 s
->prev_tok
= *ps
, *ps
= s
;
1182 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1183 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1185 int bt
= s
->type
.t
& VT_BTYPE
;
1186 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1187 Sym
**sp
= &s
->type
.ref
;
1188 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1189 Sym
*s2
= sym_copy(s
, ps
);
1190 sp
= &(*sp
= s2
)->next
;
1191 sym_copy_ref(s2
, ps
);
1196 /* define a new external reference to a symbol 'v' */
1197 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1201 /* look for global symbol */
1203 while (s
&& s
->sym_scope
)
1207 /* push forward reference */
1208 s
= global_identifier_push(v
, type
->t
, 0);
1211 s
->asm_label
= ad
->asm_label
;
1212 s
->type
.ref
= type
->ref
;
1213 /* copy type to the global stack */
1215 sym_copy_ref(s
, &global_stack
);
1217 patch_storage(s
, ad
, type
);
1219 /* push variables on local_stack if any */
1220 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1221 s
= sym_copy(s
, &local_stack
);
1225 /* save registers up to (vtop - n) stack entry */
1226 ST_FUNC
void save_regs(int n
)
1229 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1233 /* save r to the memory stack, and mark it as being free */
1234 ST_FUNC
void save_reg(int r
)
1236 save_reg_upstack(r
, 0);
1239 /* save r to the memory stack, and mark it as being free,
1240 if seen up to (vtop - n) stack entry */
1241 ST_FUNC
void save_reg_upstack(int r
, int n
)
1243 int l
, size
, align
, bt
;
1246 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1251 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1252 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1253 /* must save value on stack if not already done */
1255 bt
= p
->type
.t
& VT_BTYPE
;
1258 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1261 size
= type_size(&sv
.type
, &align
);
1262 l
= get_temp_local_var(size
,align
);
1263 sv
.r
= VT_LOCAL
| VT_LVAL
;
1265 store(p
->r
& VT_VALMASK
, &sv
);
1266 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1267 /* x86 specific: need to pop fp register ST0 if saved */
1268 if (r
== TREG_ST0
) {
1269 o(0xd8dd); /* fstp %st(0) */
1272 /* special long long case */
1273 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1278 /* mark that stack entry as being saved on the stack */
1279 if (p
->r
& VT_LVAL
) {
1280 /* also clear the bounded flag because the
1281 relocation address of the function was stored in
1283 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1285 p
->r
= VT_LVAL
| VT_LOCAL
;
1294 #ifdef TCC_TARGET_ARM
1295 /* find a register of class 'rc2' with at most one reference on stack.
1296 * If none, call get_reg(rc) */
1297 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1302 for(r
=0;r
<NB_REGS
;r
++) {
1303 if (reg_classes
[r
] & rc2
) {
1306 for(p
= vstack
; p
<= vtop
; p
++) {
1307 if ((p
->r
& VT_VALMASK
) == r
||
1319 /* find a free register of class 'rc'. If none, save one register */
1320 ST_FUNC
int get_reg(int rc
)
1325 /* find a free register */
1326 for(r
=0;r
<NB_REGS
;r
++) {
1327 if (reg_classes
[r
] & rc
) {
1330 for(p
=vstack
;p
<=vtop
;p
++) {
1331 if ((p
->r
& VT_VALMASK
) == r
||
1340 /* no register left : free the first one on the stack (VERY
1341 IMPORTANT to start from the bottom to ensure that we don't
1342 spill registers used in gen_opi()) */
1343 for(p
=vstack
;p
<=vtop
;p
++) {
1344 /* look at second register (if long long) */
1346 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1348 r
= p
->r
& VT_VALMASK
;
1349 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1355 /* Should never comes here */
1359 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1360 static int get_temp_local_var(int size
,int align
){
1362 struct temp_local_variable
*temp_var
;
1369 for(i
=0;i
<nb_temp_local_vars
;i
++){
1370 temp_var
=&arr_temp_local_vars
[i
];
1371 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1374 /*check if temp_var is free*/
1376 for(p
=vstack
;p
<=vtop
;p
++) {
1378 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1379 if(p
->c
.i
==temp_var
->location
){
1386 found_var
=temp_var
->location
;
1392 loc
= (loc
- size
) & -align
;
1393 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1394 temp_var
=&arr_temp_local_vars
[i
];
1395 temp_var
->location
=loc
;
1396 temp_var
->size
=size
;
1397 temp_var
->align
=align
;
1398 nb_temp_local_vars
++;
1405 static void clear_temp_local_var_list(){
1406 nb_temp_local_vars
=0;
1409 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1411 static void move_reg(int r
, int s
, int t
)
1425 /* get address of vtop (vtop MUST BE an lvalue) */
1426 ST_FUNC
void gaddrof(void)
1428 vtop
->r
&= ~VT_LVAL
;
1429 /* tricky: if saved lvalue, then we can go back to lvalue */
1430 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1431 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1434 #ifdef CONFIG_TCC_BCHECK
1435 /* generate a bounded pointer addition */
1436 static void gen_bounded_ptr_add(void)
1438 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1443 vpush_helper_func(TOK___bound_ptr_add
);
1448 /* returned pointer is in REG_IRET */
1449 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1452 /* relocation offset of the bounding function call point */
1453 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1456 /* patch pointer addition in vtop so that pointer dereferencing is
1458 static void gen_bounded_ptr_deref(void)
1468 size
= type_size(&vtop
->type
, &align
);
1470 case 1: func
= TOK___bound_ptr_indir1
; break;
1471 case 2: func
= TOK___bound_ptr_indir2
; break;
1472 case 4: func
= TOK___bound_ptr_indir4
; break;
1473 case 8: func
= TOK___bound_ptr_indir8
; break;
1474 case 12: func
= TOK___bound_ptr_indir12
; break;
1475 case 16: func
= TOK___bound_ptr_indir16
; break;
1477 /* may happen with struct member access */
1480 sym
= external_helper_sym(func
);
1482 put_extern_sym(sym
, NULL
, 0, 0);
1483 /* patch relocation */
1484 /* XXX: find a better solution ? */
1485 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1486 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1489 /* generate lvalue bound code */
1490 static void gbound(void)
1494 vtop
->r
&= ~VT_MUSTBOUND
;
1495 /* if lvalue, then use checking code before dereferencing */
1496 if (vtop
->r
& VT_LVAL
) {
1497 /* if not VT_BOUNDED value, then make one */
1498 if (!(vtop
->r
& VT_BOUNDED
)) {
1499 /* must save type because we must set it to int to get pointer */
1501 vtop
->type
.t
= VT_PTR
;
1504 gen_bounded_ptr_add();
1508 /* then check for dereferencing */
1509 gen_bounded_ptr_deref();
1513 /* we need to call __bound_ptr_add before we start to load function
1514 args into registers */
1515 ST_FUNC
void gbound_args(int nb_args
)
1520 for (i
= 1; i
<= nb_args
; ++i
)
1521 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1527 sv
= vtop
- nb_args
;
1528 if (sv
->r
& VT_SYM
) {
1532 #ifndef TCC_TARGET_PE
1533 || v
== TOK_sigsetjmp
1534 || v
== TOK___sigsetjmp
1537 vpush_helper_func(TOK___bound_setjmp
);
1540 func_bound_add_epilog
= 1;
1542 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1543 if (v
== TOK_alloca
)
1544 func_bound_add_epilog
= 1;
1547 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1548 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1553 /* Add bounds for local symbols from S to E (via ->prev) */
1554 static void add_local_bounds(Sym
*s
, Sym
*e
)
1556 for (; s
!= e
; s
= s
->prev
) {
1557 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1559 /* Add arrays/structs/unions because we always take address */
1560 if ((s
->type
.t
& VT_ARRAY
)
1561 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1562 || s
->a
.addrtaken
) {
1563 /* add local bound info */
1564 int align
, size
= type_size(&s
->type
, &align
);
1565 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1566 2 * sizeof(addr_t
));
1567 bounds_ptr
[0] = s
->c
;
1568 bounds_ptr
[1] = size
;
1574 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1575 static void pop_local_syms(Sym
*b
, int keep
)
1577 #ifdef CONFIG_TCC_BCHECK
1578 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1579 add_local_bounds(local_stack
, b
);
1582 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1583 sym_pop(&local_stack
, b
, keep
);
1586 static void incr_bf_adr(int o
)
1588 vtop
->type
= char_pointer_type
;
1592 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1596 /* single-byte load mode for packed or otherwise unaligned bitfields */
1597 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1600 save_reg_upstack(vtop
->r
, 1);
1601 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1602 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1611 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1613 vpushi((1 << n
) - 1), gen_op('&');
1616 vpushi(bits
), gen_op(TOK_SHL
);
1619 bits
+= n
, bit_size
-= n
, o
= 1;
1622 if (!(type
->t
& VT_UNSIGNED
)) {
1623 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1624 vpushi(n
), gen_op(TOK_SHL
);
1625 vpushi(n
), gen_op(TOK_SAR
);
1629 /* single-byte store mode for packed or otherwise unaligned bitfields */
1630 static void store_packed_bf(int bit_pos
, int bit_size
)
1632 int bits
, n
, o
, m
, c
;
1633 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1635 save_reg_upstack(vtop
->r
, 1);
1636 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1638 incr_bf_adr(o
); // X B
1640 c
? vdup() : gv_dup(); // B V X
1643 vpushi(bits
), gen_op(TOK_SHR
);
1645 vpushi(bit_pos
), gen_op(TOK_SHL
);
1650 m
= ((1 << n
) - 1) << bit_pos
;
1651 vpushi(m
), gen_op('&'); // X B V1
1652 vpushv(vtop
-1); // X B V1 B
1653 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1654 gen_op('&'); // X B V1 B1
1655 gen_op('|'); // X B V2
1657 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1658 vstore(), vpop(); // X B
1659 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1664 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1667 if (0 == sv
->type
.ref
)
1669 t
= sv
->type
.ref
->auxtype
;
1670 if (t
!= -1 && t
!= VT_STRUCT
) {
1671 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1677 /* store vtop a register belonging to class 'rc'. lvalues are
1678 converted to values. Cannot be used if cannot be converted to
1679 register value (such as structures). */
1680 ST_FUNC
int gv(int rc
)
1682 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1683 int bit_pos
, bit_size
, size
, align
;
1685 /* NOTE: get_reg can modify vstack[] */
1686 if (vtop
->type
.t
& VT_BITFIELD
) {
1689 bit_pos
= BIT_POS(vtop
->type
.t
);
1690 bit_size
= BIT_SIZE(vtop
->type
.t
);
1691 /* remove bit field info to avoid loops */
1692 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1695 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1696 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1697 type
.t
|= VT_UNSIGNED
;
1699 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1701 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1706 if (r
== VT_STRUCT
) {
1707 load_packed_bf(&type
, bit_pos
, bit_size
);
1709 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1710 /* cast to int to propagate signedness in following ops */
1712 /* generate shifts */
1713 vpushi(bits
- (bit_pos
+ bit_size
));
1715 vpushi(bits
- bit_size
);
1716 /* NOTE: transformed to SHR if unsigned */
1721 if (is_float(vtop
->type
.t
) &&
1722 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1723 /* CPUs usually cannot use float constants, so we store them
1724 generically in data segment */
1725 init_params p
= { rodata_section
};
1726 unsigned long offset
;
1727 size
= type_size(&vtop
->type
, &align
);
1729 size
= 0, align
= 1;
1730 offset
= section_add(p
.sec
, size
, align
);
1731 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1733 init_putv(&p
, &vtop
->type
, offset
);
1736 #ifdef CONFIG_TCC_BCHECK
1737 if (vtop
->r
& VT_MUSTBOUND
)
1741 bt
= vtop
->type
.t
& VT_BTYPE
;
1743 #ifdef TCC_TARGET_RISCV64
1745 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1748 rc2
= RC2_TYPE(bt
, rc
);
1750 /* need to reload if:
1752 - lvalue (need to dereference pointer)
1753 - already a register, but not in the right class */
1754 r
= vtop
->r
& VT_VALMASK
;
1755 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1756 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1758 if (!r_ok
|| !r2_ok
) {
1762 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1763 int original_type
= vtop
->type
.t
;
1765 /* two register type load :
1766 expand to two words temporarily */
1767 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1769 unsigned long long ll
= vtop
->c
.i
;
1770 vtop
->c
.i
= ll
; /* first word */
1772 vtop
->r
= r
; /* save register value */
1773 vpushi(ll
>> 32); /* second word */
1774 } else if (vtop
->r
& VT_LVAL
) {
1775 /* We do not want to modifier the long long pointer here.
1776 So we save any other instances down the stack */
1777 save_reg_upstack(vtop
->r
, 1);
1778 /* load from memory */
1779 vtop
->type
.t
= load_type
;
1782 vtop
[-1].r
= r
; /* save register value */
1783 /* increment pointer to get second word */
1784 vtop
->type
.t
= VT_PTRDIFF_T
;
1789 vtop
->type
.t
= load_type
;
1791 /* move registers */
1794 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1797 vtop
[-1].r
= r
; /* save register value */
1798 vtop
->r
= vtop
[-1].r2
;
1800 /* Allocate second register. Here we rely on the fact that
1801 get_reg() tries first to free r2 of an SValue. */
1805 /* write second register */
1808 vtop
->type
.t
= original_type
;
1810 if (vtop
->r
== VT_CMP
)
1812 /* one register type load */
1817 #ifdef TCC_TARGET_C67
1818 /* uses register pairs for doubles */
1819 if (bt
== VT_DOUBLE
)
1826 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1827 ST_FUNC
void gv2(int rc1
, int rc2
)
1829 /* generate more generic register first. But VT_JMP or VT_CMP
1830 values must be generated first in all cases to avoid possible
1832 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1837 /* test if reload is needed for first register */
1838 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1848 /* test if reload is needed for first register */
1849 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1856 /* expand 64bit on stack in two ints */
1857 ST_FUNC
void lexpand(void)
1860 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1861 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1862 if (v
== VT_CONST
) {
1865 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1871 vtop
[0].r
= vtop
[-1].r2
;
1872 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1874 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1879 /* build a long long from two ints */
1880 static void lbuild(int t
)
1882 gv2(RC_INT
, RC_INT
);
1883 vtop
[-1].r2
= vtop
[0].r
;
1884 vtop
[-1].type
.t
= t
;
1889 /* convert stack entry to register and duplicate its value in another
1891 static void gv_dup(void)
1897 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1898 if (t
& VT_BITFIELD
) {
1908 /* stack: H L L1 H1 */
1918 /* duplicate value */
1928 /* generate CPU independent (unsigned) long long operations */
1929 static void gen_opl(int op
)
1931 int t
, a
, b
, op1
, c
, i
;
1933 unsigned short reg_iret
= REG_IRET
;
1934 unsigned short reg_lret
= REG_IRE2
;
1940 func
= TOK___divdi3
;
1943 func
= TOK___udivdi3
;
1946 func
= TOK___moddi3
;
1949 func
= TOK___umoddi3
;
1956 /* call generic long long function */
1957 vpush_helper_func(func
);
1962 vtop
->r2
= reg_lret
;
1970 //pv("gen_opl A",0,2);
1976 /* stack: L1 H1 L2 H2 */
1981 vtop
[-2] = vtop
[-3];
1984 /* stack: H1 H2 L1 L2 */
1985 //pv("gen_opl B",0,4);
1991 /* stack: H1 H2 L1 L2 ML MH */
1994 /* stack: ML MH H1 H2 L1 L2 */
1998 /* stack: ML MH H1 L2 H2 L1 */
2003 /* stack: ML MH M1 M2 */
2006 } else if (op
== '+' || op
== '-') {
2007 /* XXX: add non carry method too (for MIPS or alpha) */
2013 /* stack: H1 H2 (L1 op L2) */
2016 gen_op(op1
+ 1); /* TOK_xxxC2 */
2019 /* stack: H1 H2 (L1 op L2) */
2022 /* stack: (L1 op L2) H1 H2 */
2024 /* stack: (L1 op L2) (H1 op H2) */
2032 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2033 t
= vtop
[-1].type
.t
;
2037 /* stack: L H shift */
2039 /* constant: simpler */
2040 /* NOTE: all comments are for SHL. the other cases are
2041 done by swapping words */
2052 if (op
!= TOK_SAR
) {
2085 /* XXX: should provide a faster fallback on x86 ? */
2088 func
= TOK___ashrdi3
;
2091 func
= TOK___lshrdi3
;
2094 func
= TOK___ashldi3
;
2100 /* compare operations */
2106 /* stack: L1 H1 L2 H2 */
2108 vtop
[-1] = vtop
[-2];
2110 /* stack: L1 L2 H1 H2 */
2114 /* when values are equal, we need to compare low words. since
2115 the jump is inverted, we invert the test too. */
2118 else if (op1
== TOK_GT
)
2120 else if (op1
== TOK_ULT
)
2122 else if (op1
== TOK_UGT
)
2132 /* generate non equal test */
2134 vset_VT_CMP(TOK_NE
);
2138 /* compare low. Always unsigned */
2142 else if (op1
== TOK_LE
)
2144 else if (op1
== TOK_GT
)
2146 else if (op1
== TOK_GE
)
2149 #if 0//def TCC_TARGET_I386
2150 if (op
== TOK_NE
) { gsym(b
); break; }
2151 if (op
== TOK_EQ
) { gsym(a
); break; }
2160 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2162 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2163 return (a
^ b
) >> 63 ? -x
: x
;
2166 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2168 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2171 /* handle integer constant optimizations and various machine
2173 static void gen_opic(int op
)
2175 SValue
*v1
= vtop
- 1;
2177 int t1
= v1
->type
.t
& VT_BTYPE
;
2178 int t2
= v2
->type
.t
& VT_BTYPE
;
2179 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2180 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2181 uint64_t l1
= c1
? v1
->c
.i
: 0;
2182 uint64_t l2
= c2
? v2
->c
.i
: 0;
2183 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2185 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2186 l1
= ((uint32_t)l1
|
2187 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2188 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2189 l2
= ((uint32_t)l2
|
2190 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2194 case '+': l1
+= l2
; break;
2195 case '-': l1
-= l2
; break;
2196 case '&': l1
&= l2
; break;
2197 case '^': l1
^= l2
; break;
2198 case '|': l1
|= l2
; break;
2199 case '*': l1
*= l2
; break;
2206 /* if division by zero, generate explicit division */
2208 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2209 tcc_error("division by zero in constant");
2213 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2214 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2215 case TOK_UDIV
: l1
= l1
/ l2
; break;
2216 case TOK_UMOD
: l1
= l1
% l2
; break;
2219 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2220 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2222 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2225 case TOK_ULT
: l1
= l1
< l2
; break;
2226 case TOK_UGE
: l1
= l1
>= l2
; break;
2227 case TOK_EQ
: l1
= l1
== l2
; break;
2228 case TOK_NE
: l1
= l1
!= l2
; break;
2229 case TOK_ULE
: l1
= l1
<= l2
; break;
2230 case TOK_UGT
: l1
= l1
> l2
; break;
2231 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2232 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2233 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2234 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2236 case TOK_LAND
: l1
= l1
&& l2
; break;
2237 case TOK_LOR
: l1
= l1
|| l2
; break;
2241 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2242 l1
= ((uint32_t)l1
|
2243 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2247 /* if commutative ops, put c2 as constant */
2248 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2249 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2251 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2252 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2254 if (!const_wanted
&&
2256 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2257 (l1
== -1 && op
== TOK_SAR
))) {
2258 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2260 } else if (!const_wanted
&&
2261 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2263 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2264 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2265 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2270 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2273 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2274 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2277 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2278 /* filter out NOP operations like x*1, x-0, x&-1... */
2280 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2281 /* try to use shifts instead of muls or divs */
2282 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2291 else if (op
== TOK_PDIV
)
2297 } else if (c2
&& (op
== '+' || op
== '-') &&
2298 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2299 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2300 /* symbol + constant case */
2304 /* The backends can't always deal with addends to symbols
2305 larger than +-1<<31. Don't construct such. */
2312 /* call low level op generator */
2313 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2314 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2322 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2323 # define gen_negf gen_opf
2324 #elif defined TCC_TARGET_ARM
2325 void gen_negf(int op
)
2327 /* arm will detect 0-x and replace by vneg */
2328 vpushi(0), vswap(), gen_op('-');
2331 /* XXX: implement in gen_opf() for other backends too */
2332 void gen_negf(int op
)
2334 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2335 subtract(-0, x), but with them it's really a sign flip
2336 operation. We implement this with bit manipulation and have
2337 to do some type reinterpretation for this, which TCC can do
2340 int align
, size
, bt
;
2342 size
= type_size(&vtop
->type
, &align
);
2343 bt
= vtop
->type
.t
& VT_BTYPE
;
2344 save_reg(gv(RC_TYPE(bt
)));
2346 incr_bf_adr(size
- 1);
2348 vpushi(0x80); /* flip sign */
2355 /* generate a floating point operation with constant propagation */
2356 static void gen_opif(int op
)
2360 #if defined _MSC_VER && defined __x86_64__
2361 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2371 /* currently, we cannot do computations with forward symbols */
2372 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2373 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2375 if (v1
->type
.t
== VT_FLOAT
) {
2378 } else if (v1
->type
.t
== VT_DOUBLE
) {
2385 /* NOTE: we only do constant propagation if finite number (not
2386 NaN or infinity) (ANSI spec) */
2387 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
2390 case '+': f1
+= f2
; break;
2391 case '-': f1
-= f2
; break;
2392 case '*': f1
*= f2
; break;
2395 union { float f
; unsigned u
; } x1
, x2
, y
;
2396 /* If not in initializer we need to potentially generate
2397 FP exceptions at runtime, otherwise we want to fold. */
2400 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2401 when used to compile the f1 /= f2 below, would be -nan */
2402 x1
.f
= f1
, x2
.f
= f2
;
2404 y
.u
= 0x7fc00000; /* nan */
2406 y
.u
= 0x7f800000; /* infinity */
2407 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2416 /* XXX: also handles tests ? */
2422 /* XXX: overflow test ? */
2423 if (v1
->type
.t
== VT_FLOAT
) {
2425 } else if (v1
->type
.t
== VT_DOUBLE
) {
2432 if (op
== TOK_NEG
) {
2440 /* print a type. If 'varstr' is not NULL, then the variable is also
2441 printed in the type */
2443 /* XXX: add array and function pointers */
2444 static void type_to_str(char *buf
, int buf_size
,
2445 CType
*type
, const char *varstr
)
2457 pstrcat(buf
, buf_size
, "extern ");
2459 pstrcat(buf
, buf_size
, "static ");
2461 pstrcat(buf
, buf_size
, "typedef ");
2463 pstrcat(buf
, buf_size
, "inline ");
2465 if (t
& VT_VOLATILE
)
2466 pstrcat(buf
, buf_size
, "volatile ");
2467 if (t
& VT_CONSTANT
)
2468 pstrcat(buf
, buf_size
, "const ");
2470 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2471 || ((t
& VT_UNSIGNED
)
2472 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2475 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2477 buf_size
-= strlen(buf
);
2513 tstr
= "long double";
2515 pstrcat(buf
, buf_size
, tstr
);
2522 pstrcat(buf
, buf_size
, tstr
);
2523 v
= type
->ref
->v
& ~SYM_STRUCT
;
2524 if (v
>= SYM_FIRST_ANOM
)
2525 pstrcat(buf
, buf_size
, "<anonymous>");
2527 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2532 if (varstr
&& '*' == *varstr
) {
2533 pstrcat(buf1
, sizeof(buf1
), "(");
2534 pstrcat(buf1
, sizeof(buf1
), varstr
);
2535 pstrcat(buf1
, sizeof(buf1
), ")");
2537 pstrcat(buf1
, buf_size
, "(");
2539 while (sa
!= NULL
) {
2541 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2542 pstrcat(buf1
, sizeof(buf1
), buf2
);
2545 pstrcat(buf1
, sizeof(buf1
), ", ");
2547 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2548 pstrcat(buf1
, sizeof(buf1
), ", ...");
2549 pstrcat(buf1
, sizeof(buf1
), ")");
2550 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2554 if (t
& (VT_ARRAY
|VT_VLA
)) {
2555 if (varstr
&& '*' == *varstr
)
2556 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2558 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2559 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2562 pstrcpy(buf1
, sizeof(buf1
), "*");
2563 if (t
& VT_CONSTANT
)
2564 pstrcat(buf1
, buf_size
, "const ");
2565 if (t
& VT_VOLATILE
)
2566 pstrcat(buf1
, buf_size
, "volatile ");
2568 pstrcat(buf1
, sizeof(buf1
), varstr
);
2569 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2573 pstrcat(buf
, buf_size
, " ");
2574 pstrcat(buf
, buf_size
, varstr
);
2579 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2581 char buf1
[256], buf2
[256];
2582 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2583 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2584 tcc_error(fmt
, buf1
, buf2
);
2587 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2589 char buf1
[256], buf2
[256];
2590 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2591 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2592 tcc_warning(fmt
, buf1
, buf2
);
2595 static int pointed_size(CType
*type
)
2598 return type_size(pointed_type(type
), &align
);
2601 static inline int is_null_pointer(SValue
*p
)
2603 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2605 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2606 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2607 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2608 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2609 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2610 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2614 /* compare function types. OLD functions match any new functions */
2615 static int is_compatible_func(CType
*type1
, CType
*type2
)
2621 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2623 if (s1
->f
.func_type
!= s2
->f
.func_type
2624 && s1
->f
.func_type
!= FUNC_OLD
2625 && s2
->f
.func_type
!= FUNC_OLD
)
2628 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2630 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2641 /* return true if type1 and type2 are the same. If unqualified is
2642 true, qualifiers on the types are ignored.
2644 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2648 t1
= type1
->t
& VT_TYPE
;
2649 t2
= type2
->t
& VT_TYPE
;
2651 /* strip qualifiers before comparing */
2652 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2653 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2656 /* Default Vs explicit signedness only matters for char */
2657 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2661 /* XXX: bitfields ? */
2666 && !(type1
->ref
->c
< 0
2667 || type2
->ref
->c
< 0
2668 || type1
->ref
->c
== type2
->ref
->c
))
2671 /* test more complicated cases */
2672 bt1
= t1
& VT_BTYPE
;
2673 if (bt1
== VT_PTR
) {
2674 type1
= pointed_type(type1
);
2675 type2
= pointed_type(type2
);
2676 return is_compatible_types(type1
, type2
);
2677 } else if (bt1
== VT_STRUCT
) {
2678 return (type1
->ref
== type2
->ref
);
2679 } else if (bt1
== VT_FUNC
) {
2680 return is_compatible_func(type1
, type2
);
2681 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2682 /* If both are enums then they must be the same, if only one is then
2683 t1 and t2 must be equal, which was checked above already. */
2684 return type1
->ref
== type2
->ref
;
2690 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2691 type is stored in DEST if non-null (except for pointer plus/minus) . */
2692 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2694 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2695 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2701 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2702 ret
= op
== '?' ? 1 : 0;
2703 /* NOTE: as an extension, we accept void on only one side */
2705 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2706 if (op
== '+') ; /* Handled in caller */
2707 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2708 /* If one is a null ptr constant the result type is the other. */
2709 else if (is_null_pointer (op2
)) type
= *type1
;
2710 else if (is_null_pointer (op1
)) type
= *type2
;
2711 else if (bt1
!= bt2
) {
2712 /* accept comparison or cond-expr between pointer and integer
2714 if ((op
== '?' || TOK_ISCOND(op
))
2715 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2716 tcc_warning("pointer/integer mismatch in %s",
2717 op
== '?' ? "conditional expression" : "comparison");
2718 else if (op
!= '-' || !is_integer_btype(bt2
))
2720 type
= *(bt1
== VT_PTR
? type1
: type2
);
2722 CType
*pt1
= pointed_type(type1
);
2723 CType
*pt2
= pointed_type(type2
);
2724 int pbt1
= pt1
->t
& VT_BTYPE
;
2725 int pbt2
= pt2
->t
& VT_BTYPE
;
2726 int newquals
, copied
= 0;
2727 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2728 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2729 if (op
!= '?' && !TOK_ISCOND(op
))
2732 type_incompatibility_warning(type1
, type2
,
2734 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2735 : "pointer type mismatch in comparison('%s' and '%s')");
2738 /* pointers to void get preferred, otherwise the
2739 pointed to types minus qualifs should be compatible */
2740 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2741 /* combine qualifs */
2742 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2743 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2746 /* copy the pointer target symbol */
2747 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2750 pointed_type(&type
)->t
|= newquals
;
2752 /* pointers to incomplete arrays get converted to
2753 pointers to completed ones if possible */
2754 if (pt1
->t
& VT_ARRAY
2755 && pt2
->t
& VT_ARRAY
2756 && pointed_type(&type
)->ref
->c
< 0
2757 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2760 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2762 pointed_type(&type
)->ref
=
2763 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2764 0, pointed_type(&type
)->ref
->c
);
2765 pointed_type(&type
)->ref
->c
=
2766 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2772 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2773 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2776 } else if (is_float(bt1
) || is_float(bt2
)) {
2777 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2778 type
.t
= VT_LDOUBLE
;
2779 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2784 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2785 /* cast to biggest op */
2786 type
.t
= VT_LLONG
| VT_LONG
;
2787 if (bt1
== VT_LLONG
)
2789 if (bt2
== VT_LLONG
)
2791 /* convert to unsigned if it does not fit in a long long */
2792 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2793 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2794 type
.t
|= VT_UNSIGNED
;
2796 /* integer operations */
2797 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2798 /* convert to unsigned if it does not fit in an integer */
2799 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2800 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2801 type
.t
|= VT_UNSIGNED
;
2808 /* generic gen_op: handles types problems */
2809 ST_FUNC
void gen_op(int op
)
2811 int t1
, t2
, bt1
, bt2
, t
;
2812 CType type1
, combtype
;
2815 t1
= vtop
[-1].type
.t
;
2816 t2
= vtop
[0].type
.t
;
2817 bt1
= t1
& VT_BTYPE
;
2818 bt2
= t2
& VT_BTYPE
;
2820 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2821 if (bt2
== VT_FUNC
) {
2822 mk_pointer(&vtop
->type
);
2825 if (bt1
== VT_FUNC
) {
2827 mk_pointer(&vtop
->type
);
2832 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2833 tcc_error_noabort("invalid operand types for binary operation");
2835 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2836 /* at least one operand is a pointer */
2837 /* relational op: must be both pointers */
2841 /* if both pointers, then it must be the '-' op */
2842 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2844 tcc_error("cannot use pointers here");
2845 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2848 vtop
->type
.t
= VT_PTRDIFF_T
;
2852 /* exactly one pointer : must be '+' or '-'. */
2853 if (op
!= '-' && op
!= '+')
2854 tcc_error("cannot use pointers here");
2855 /* Put pointer as first operand */
2856 if (bt2
== VT_PTR
) {
2858 t
= t1
, t1
= t2
, t2
= t
;
2861 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2862 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2865 type1
= vtop
[-1].type
;
2866 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2868 #ifdef CONFIG_TCC_BCHECK
2869 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2870 /* if bounded pointers, we generate a special code to
2877 gen_bounded_ptr_add();
2883 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
2884 /* put again type if gen_opic() swaped operands */
2888 /* floats can only be used for a few operations */
2889 if (is_float(combtype
.t
)
2890 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
2892 tcc_error("invalid operands for binary operation");
2893 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2894 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2895 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2897 t
|= (VT_LONG
& t1
);
2901 t
= t2
= combtype
.t
;
2902 /* XXX: currently, some unsigned operations are explicit, so
2903 we modify them here */
2904 if (t
& VT_UNSIGNED
) {
2911 else if (op
== TOK_LT
)
2913 else if (op
== TOK_GT
)
2915 else if (op
== TOK_LE
)
2917 else if (op
== TOK_GE
)
2923 /* special case for shifts and long long: we keep the shift as
2925 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2932 if (TOK_ISCOND(op
)) {
2933 /* relational op: the result is an int */
2934 vtop
->type
.t
= VT_INT
;
2939 // Make sure that we have converted to an rvalue:
2940 if (vtop
->r
& VT_LVAL
)
2941 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2944 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2945 #define gen_cvt_itof1 gen_cvt_itof
2947 /* generic itof for unsigned long long case */
2948 static void gen_cvt_itof1(int t
)
2950 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2951 (VT_LLONG
| VT_UNSIGNED
)) {
2954 vpush_helper_func(TOK___floatundisf
);
2955 #if LDOUBLE_SIZE != 8
2956 else if (t
== VT_LDOUBLE
)
2957 vpush_helper_func(TOK___floatundixf
);
2960 vpush_helper_func(TOK___floatundidf
);
2971 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2972 #define gen_cvt_ftoi1 gen_cvt_ftoi
2974 /* generic ftoi for unsigned long long case */
2975 static void gen_cvt_ftoi1(int t
)
2978 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2979 /* not handled natively */
2980 st
= vtop
->type
.t
& VT_BTYPE
;
2982 vpush_helper_func(TOK___fixunssfdi
);
2983 #if LDOUBLE_SIZE != 8
2984 else if (st
== VT_LDOUBLE
)
2985 vpush_helper_func(TOK___fixunsxfdi
);
2988 vpush_helper_func(TOK___fixunsdfdi
);
2999 /* special delayed cast for char/short */
3000 static void force_charshort_cast(void)
3002 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3003 int dbt
= vtop
->type
.t
;
3004 vtop
->r
&= ~VT_MUSTCAST
;
3006 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3010 static void gen_cast_s(int t
)
3018 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3019 static void gen_cast(CType
*type
)
3021 int sbt
, dbt
, sf
, df
, c
;
3022 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3024 /* special delayed cast for char/short */
3025 if (vtop
->r
& VT_MUSTCAST
)
3026 force_charshort_cast();
3028 /* bitfields first get cast to ints */
3029 if (vtop
->type
.t
& VT_BITFIELD
)
3032 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3033 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3041 dbt_bt
= dbt
& VT_BTYPE
;
3042 sbt_bt
= sbt
& VT_BTYPE
;
3043 if (dbt_bt
== VT_VOID
)
3045 if (sbt_bt
== VT_VOID
) {
3047 cast_error(&vtop
->type
, type
);
3050 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3051 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3052 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3055 /* constant case: we can do it now */
3056 /* XXX: in ISOC, cannot do it if error in convert */
3057 if (sbt
== VT_FLOAT
)
3058 vtop
->c
.ld
= vtop
->c
.f
;
3059 else if (sbt
== VT_DOUBLE
)
3060 vtop
->c
.ld
= vtop
->c
.d
;
3063 if (sbt_bt
== VT_LLONG
) {
3064 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3065 vtop
->c
.ld
= vtop
->c
.i
;
3067 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3069 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3070 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3072 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3075 if (dbt
== VT_FLOAT
)
3076 vtop
->c
.f
= (float)vtop
->c
.ld
;
3077 else if (dbt
== VT_DOUBLE
)
3078 vtop
->c
.d
= (double)vtop
->c
.ld
;
3079 } else if (sf
&& dbt
== VT_BOOL
) {
3080 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3083 vtop
->c
.i
= vtop
->c
.ld
;
3084 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3086 else if (sbt
& VT_UNSIGNED
)
3087 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3089 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3091 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3093 else if (dbt
== VT_BOOL
)
3094 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3096 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3097 dbt_bt
== VT_SHORT
? 0xffff :
3100 if (!(dbt
& VT_UNSIGNED
))
3101 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3106 } else if (dbt
== VT_BOOL
3107 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3108 == (VT_CONST
| VT_SYM
)) {
3109 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3115 /* cannot generate code for global or static initializers */
3116 if (STATIC_DATA_WANTED
)
3119 /* non constant case: generate code */
3120 if (dbt
== VT_BOOL
) {
3121 gen_test_zero(TOK_NE
);
3127 /* convert from fp to fp */
3130 /* convert int to fp */
3133 /* convert fp to int */
3135 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3138 goto again
; /* may need char/short cast */
3143 ds
= btype_size(dbt_bt
);
3144 ss
= btype_size(sbt_bt
);
3145 if (ds
== 0 || ss
== 0)
3148 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3149 tcc_error("cast to incomplete type");
3151 /* same size and no sign conversion needed */
3152 if (ds
== ss
&& ds
>= 4)
3154 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3155 tcc_warning("cast between pointer and integer of different size");
3156 if (sbt_bt
== VT_PTR
) {
3157 /* put integer type to allow logical operations below */
3158 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3162 /* processor allows { int a = 0, b = *(char*)&a; }
3163 That means that if we cast to less width, we can just
3164 change the type and read it still later. */
3165 #define ALLOW_SUBTYPE_ACCESS 1
3167 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3168 /* value still in memory */
3172 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3174 goto done
; /* no 64bit envolved */
3182 /* generate high word */
3183 if (sbt
& VT_UNSIGNED
) {
3192 } else if (ss
== 8) {
3193 /* from long long: just take low order word */
3201 /* need to convert from 32bit to 64bit */
3202 if (sbt
& VT_UNSIGNED
) {
3203 #if defined(TCC_TARGET_RISCV64)
3204 /* RISC-V keeps 32bit vals in registers sign-extended.
3205 So here we need a zero-extension. */
3214 ss
= ds
, ds
= 4, dbt
= sbt
;
3215 } else if (ss
== 8) {
3216 /* RISC-V keeps 32bit vals in registers sign-extended.
3217 So here we need a sign-extension for signed types and
3218 zero-extension. for unsigned types. */
3219 #if !defined(TCC_TARGET_RISCV64)
3220 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3229 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3235 bits
= (ss
- ds
) * 8;
3236 /* for unsigned, gen_op will convert SAR to SHR */
3237 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3240 vpushi(bits
- trunc
);
3247 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3250 /* return type size as known at compile time. Put alignment at 'a' */
3251 ST_FUNC
int type_size(CType
*type
, int *a
)
3256 bt
= type
->t
& VT_BTYPE
;
3257 if (bt
== VT_STRUCT
) {
3262 } else if (bt
== VT_PTR
) {
3263 if (type
->t
& VT_ARRAY
) {
3267 ts
= type_size(&s
->type
, a
);
3269 if (ts
< 0 && s
->c
< 0)
3277 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3279 return -1; /* incomplete enum */
3280 } else if (bt
== VT_LDOUBLE
) {
3282 return LDOUBLE_SIZE
;
3283 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3284 #ifdef TCC_TARGET_I386
3285 #ifdef TCC_TARGET_PE
3290 #elif defined(TCC_TARGET_ARM)
3300 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3303 } else if (bt
== VT_SHORT
) {
3306 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3310 /* char, void, function, _Bool */
3316 /* push type size as known at runtime time on top of value stack. Put
3318 static void vpush_type_size(CType
*type
, int *a
)
3320 if (type
->t
& VT_VLA
) {
3321 type_size(&type
->ref
->type
, a
);
3322 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3324 int size
= type_size(type
, a
);
3326 tcc_error("unknown type size");
3335 /* return the pointed type of t */
3336 static inline CType
*pointed_type(CType
*type
)
3338 return &type
->ref
->type
;
3341 /* modify type so that its it is a pointer to type. */
3342 ST_FUNC
void mk_pointer(CType
*type
)
3345 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3346 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3350 /* return true if type1 and type2 are exactly the same (including
3353 static int is_compatible_types(CType
*type1
, CType
*type2
)
3355 return compare_types(type1
,type2
,0);
3358 /* return true if type1 and type2 are the same (ignoring qualifiers).
3360 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3362 return compare_types(type1
,type2
,1);
3365 static void cast_error(CType
*st
, CType
*dt
)
3367 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3370 /* verify type compatibility to store vtop in 'dt' type */
3371 static void verify_assign_cast(CType
*dt
)
3373 CType
*st
, *type1
, *type2
;
3374 int dbt
, sbt
, qualwarn
, lvl
;
3376 st
= &vtop
->type
; /* source type */
3377 dbt
= dt
->t
& VT_BTYPE
;
3378 sbt
= st
->t
& VT_BTYPE
;
3379 if (dt
->t
& VT_CONSTANT
)
3380 tcc_warning("assignment of read-only location");
3384 tcc_error("assignment to void expression");
3387 /* special cases for pointers */
3388 /* '0' can also be a pointer */
3389 if (is_null_pointer(vtop
))
3391 /* accept implicit pointer to integer cast with warning */
3392 if (is_integer_btype(sbt
)) {
3393 tcc_warning("assignment makes pointer from integer without a cast");
3396 type1
= pointed_type(dt
);
3398 type2
= pointed_type(st
);
3399 else if (sbt
== VT_FUNC
)
3400 type2
= st
; /* a function is implicitly a function pointer */
3403 if (is_compatible_types(type1
, type2
))
3405 for (qualwarn
= lvl
= 0;; ++lvl
) {
3406 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3407 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3409 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3410 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3411 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3413 type1
= pointed_type(type1
);
3414 type2
= pointed_type(type2
);
3416 if (!is_compatible_unqualified_types(type1
, type2
)) {
3417 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3418 /* void * can match anything */
3419 } else if (dbt
== sbt
3420 && is_integer_btype(sbt
& VT_BTYPE
)
3421 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3422 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3423 /* Like GCC don't warn by default for merely changes
3424 in pointer target signedness. Do warn for different
3425 base types, though, in particular for unsigned enums
3426 and signed int targets. */
3428 tcc_warning("assignment from incompatible pointer type");
3433 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3439 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3440 tcc_warning("assignment makes integer from pointer without a cast");
3441 } else if (sbt
== VT_STRUCT
) {
3442 goto case_VT_STRUCT
;
3444 /* XXX: more tests */
3448 if (!is_compatible_unqualified_types(dt
, st
)) {
3456 static void gen_assign_cast(CType
*dt
)
3458 verify_assign_cast(dt
);
3462 /* store vtop in lvalue pushed on stack */
3463 ST_FUNC
void vstore(void)
3465 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3467 ft
= vtop
[-1].type
.t
;
3468 sbt
= vtop
->type
.t
& VT_BTYPE
;
3469 dbt
= ft
& VT_BTYPE
;
3470 verify_assign_cast(&vtop
[-1].type
);
3472 if (sbt
== VT_STRUCT
) {
3473 /* if structure, only generate pointer */
3474 /* structure assignment : generate memcpy */
3475 size
= type_size(&vtop
->type
, &align
);
3476 /* destination, keep on stack() as result */
3478 #ifdef CONFIG_TCC_BCHECK
3479 if (vtop
->r
& VT_MUSTBOUND
)
3480 gbound(); /* check would be wrong after gaddrof() */
3482 vtop
->type
.t
= VT_PTR
;
3486 #ifdef CONFIG_TCC_BCHECK
3487 if (vtop
->r
& VT_MUSTBOUND
)
3490 vtop
->type
.t
= VT_PTR
;
3493 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3495 #ifdef CONFIG_TCC_BCHECK
3496 && !tcc_state
->do_bounds_check
3499 gen_struct_copy(size
);
3505 /* Use memmove, rather than memcpy, as dest and src may be same: */
3508 vpush_helper_func(TOK_memmove8
);
3509 else if(!(align
& 3))
3510 vpush_helper_func(TOK_memmove4
);
3513 vpush_helper_func(TOK_memmove
);
3518 } else if (ft
& VT_BITFIELD
) {
3519 /* bitfield store handling */
3521 /* save lvalue as expression result (example: s.b = s.a = n;) */
3522 vdup(), vtop
[-1] = vtop
[-2];
3524 bit_pos
= BIT_POS(ft
);
3525 bit_size
= BIT_SIZE(ft
);
3526 /* remove bit field info to avoid loops */
3527 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3529 if (dbt
== VT_BOOL
) {
3530 gen_cast(&vtop
[-1].type
);
3531 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3533 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3534 if (dbt
!= VT_BOOL
) {
3535 gen_cast(&vtop
[-1].type
);
3536 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3538 if (r
== VT_STRUCT
) {
3539 store_packed_bf(bit_pos
, bit_size
);
3541 unsigned long long mask
= (1ULL << bit_size
) - 1;
3542 if (dbt
!= VT_BOOL
) {
3544 if (dbt
== VT_LLONG
)
3547 vpushi((unsigned)mask
);
3554 /* duplicate destination */
3557 /* load destination, mask and or with source */
3558 if (dbt
== VT_LLONG
)
3559 vpushll(~(mask
<< bit_pos
));
3561 vpushi(~((unsigned)mask
<< bit_pos
));
3566 /* ... and discard */
3569 } else if (dbt
== VT_VOID
) {
3572 /* optimize char/short casts */
3574 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3575 && is_integer_btype(sbt
)
3577 if ((vtop
->r
& VT_MUSTCAST
)
3578 && btype_size(dbt
) > btype_size(sbt
)
3580 force_charshort_cast();
3583 gen_cast(&vtop
[-1].type
);
3586 #ifdef CONFIG_TCC_BCHECK
3587 /* bound check case */
3588 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3594 gv(RC_TYPE(dbt
)); /* generate value */
3597 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3598 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3599 vtop
->type
.t
= ft
& VT_TYPE
;
3602 /* if lvalue was saved on stack, must read it */
3603 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3605 r
= get_reg(RC_INT
);
3606 sv
.type
.t
= VT_PTRDIFF_T
;
3607 sv
.r
= VT_LOCAL
| VT_LVAL
;
3608 sv
.c
.i
= vtop
[-1].c
.i
;
3610 vtop
[-1].r
= r
| VT_LVAL
;
3613 r
= vtop
->r
& VT_VALMASK
;
3614 /* two word case handling :
3615 store second register at word + 4 (or +8 for x86-64) */
3616 if (USING_TWO_WORDS(dbt
)) {
3617 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3618 vtop
[-1].type
.t
= load_type
;
3621 /* convert to int to increment easily */
3622 vtop
->type
.t
= VT_PTRDIFF_T
;
3628 vtop
[-1].type
.t
= load_type
;
3629 /* XXX: it works because r2 is spilled last ! */
3630 store(vtop
->r2
, vtop
- 1);
3636 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3640 /* post defines POST/PRE add. c is the token ++ or -- */
3641 ST_FUNC
void inc(int post
, int c
)
3644 vdup(); /* save lvalue */
3646 gv_dup(); /* duplicate value */
3651 vpushi(c
- TOK_MID
);
3653 vstore(); /* store value */
3655 vpop(); /* if post op, return saved value */
3658 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3660 /* read the string */
3664 while (tok
== TOK_STR
) {
3665 /* XXX: add \0 handling too ? */
3666 cstr_cat(astr
, tokc
.str
.data
, -1);
3669 cstr_ccat(astr
, '\0');
3672 /* If I is >= 1 and a power of two, returns log2(i)+1.
3673 If I is 0 returns 0. */
3674 ST_FUNC
int exact_log2p1(int i
)
3679 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3690 /* Parse __attribute__((...)) GNUC extension. */
3691 static void parse_attribute(AttributeDef
*ad
)
3697 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3702 while (tok
!= ')') {
3703 if (tok
< TOK_IDENT
)
3704 expect("attribute name");
3716 tcc_warning_c(warn_implicit_function_declaration
)(
3717 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3718 s
= external_global_sym(tok
, &func_old_type
);
3719 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3720 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3721 ad
->cleanup_func
= s
;
3726 case TOK_CONSTRUCTOR1
:
3727 case TOK_CONSTRUCTOR2
:
3728 ad
->f
.func_ctor
= 1;
3730 case TOK_DESTRUCTOR1
:
3731 case TOK_DESTRUCTOR2
:
3732 ad
->f
.func_dtor
= 1;
3734 case TOK_ALWAYS_INLINE1
:
3735 case TOK_ALWAYS_INLINE2
:
3736 ad
->f
.func_alwinl
= 1;
3741 parse_mult_str(&astr
, "section name");
3742 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3749 parse_mult_str(&astr
, "alias(\"target\")");
3750 ad
->alias_target
= /* save string as token, for later */
3751 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3755 case TOK_VISIBILITY1
:
3756 case TOK_VISIBILITY2
:
3758 parse_mult_str(&astr
,
3759 "visibility(\"default|hidden|internal|protected\")");
3760 if (!strcmp (astr
.data
, "default"))
3761 ad
->a
.visibility
= STV_DEFAULT
;
3762 else if (!strcmp (astr
.data
, "hidden"))
3763 ad
->a
.visibility
= STV_HIDDEN
;
3764 else if (!strcmp (astr
.data
, "internal"))
3765 ad
->a
.visibility
= STV_INTERNAL
;
3766 else if (!strcmp (astr
.data
, "protected"))
3767 ad
->a
.visibility
= STV_PROTECTED
;
3769 expect("visibility(\"default|hidden|internal|protected\")");
3778 if (n
<= 0 || (n
& (n
- 1)) != 0)
3779 tcc_error("alignment must be a positive power of two");
3784 ad
->a
.aligned
= exact_log2p1(n
);
3785 if (n
!= 1 << (ad
->a
.aligned
- 1))
3786 tcc_error("alignment of %d is larger than implemented", n
);
3798 /* currently, no need to handle it because tcc does not
3799 track unused objects */
3803 ad
->f
.func_noreturn
= 1;
3808 ad
->f
.func_call
= FUNC_CDECL
;
3813 ad
->f
.func_call
= FUNC_STDCALL
;
3815 #ifdef TCC_TARGET_I386
3825 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3831 ad
->f
.func_call
= FUNC_FASTCALLW
;
3838 ad
->attr_mode
= VT_LLONG
+ 1;
3841 ad
->attr_mode
= VT_BYTE
+ 1;
3844 ad
->attr_mode
= VT_SHORT
+ 1;
3848 ad
->attr_mode
= VT_INT
+ 1;
3851 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3858 ad
->a
.dllexport
= 1;
3860 case TOK_NODECORATE
:
3861 ad
->a
.nodecorate
= 1;
3864 ad
->a
.dllimport
= 1;
3867 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
3868 /* skip parameters */
3870 int parenthesis
= 0;
3874 else if (tok
== ')')
3877 } while (parenthesis
&& tok
!= -1);
3890 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3894 while ((s
= s
->next
) != NULL
) {
3895 if ((s
->v
& SYM_FIELD
) &&
3896 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3897 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3898 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3910 static void check_fields (CType
*type
, int check
)
3914 while ((s
= s
->next
) != NULL
) {
3915 int v
= s
->v
& ~SYM_FIELD
;
3916 if (v
< SYM_FIRST_ANOM
) {
3917 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
3918 if (check
&& (ts
->tok
& SYM_FIELD
))
3919 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
3920 ts
->tok
^= SYM_FIELD
;
3921 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
3922 check_fields (&s
->type
, check
);
3926 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3928 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3929 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3930 int pcc
= !tcc_state
->ms_bitfields
;
3931 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3938 prevbt
= VT_STRUCT
; /* make it never match */
3943 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3944 if (f
->type
.t
& VT_BITFIELD
)
3945 bit_size
= BIT_SIZE(f
->type
.t
);
3948 size
= type_size(&f
->type
, &align
);
3949 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3952 if (pcc
&& bit_size
== 0) {
3953 /* in pcc mode, packing does not affect zero-width bitfields */
3956 /* in pcc mode, attribute packed overrides if set. */
3957 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3960 /* pragma pack overrides align if lesser and packs bitfields always */
3963 if (pragma_pack
< align
)
3964 align
= pragma_pack
;
3965 /* in pcc mode pragma pack also overrides individual align */
3966 if (pcc
&& pragma_pack
< a
)
3970 /* some individual align was specified */
3974 if (type
->ref
->type
.t
== VT_UNION
) {
3975 if (pcc
&& bit_size
>= 0)
3976 size
= (bit_size
+ 7) >> 3;
3981 } else if (bit_size
< 0) {
3983 c
+= (bit_pos
+ 7) >> 3;
3984 c
= (c
+ align
- 1) & -align
;
3993 /* A bit-field. Layout is more complicated. There are two
3994 options: PCC (GCC) compatible and MS compatible */
3996 /* In PCC layout a bit-field is placed adjacent to the
3997 preceding bit-fields, except if:
3999 - an individual alignment was given
4000 - it would overflow its base type container and
4001 there is no packing */
4002 if (bit_size
== 0) {
4004 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4006 } else if (f
->a
.aligned
) {
4008 } else if (!packed
) {
4010 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4011 if (ofs
> size
/ align
)
4015 /* in pcc mode, long long bitfields have type int if they fit */
4016 if (size
== 8 && bit_size
<= 32)
4017 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4019 while (bit_pos
>= align
* 8)
4020 c
+= align
, bit_pos
-= align
* 8;
4023 /* In PCC layout named bit-fields influence the alignment
4024 of the containing struct using the base types alignment,
4025 except for packed fields (which here have correct align). */
4026 if (f
->v
& SYM_FIRST_ANOM
4027 // && bit_size // ??? gcc on ARM/rpi does that
4032 bt
= f
->type
.t
& VT_BTYPE
;
4033 if ((bit_pos
+ bit_size
> size
* 8)
4034 || (bit_size
> 0) == (bt
!= prevbt
)
4036 c
= (c
+ align
- 1) & -align
;
4039 /* In MS bitfield mode a bit-field run always uses
4040 at least as many bits as the underlying type.
4041 To start a new run it's also required that this
4042 or the last bit-field had non-zero width. */
4043 if (bit_size
|| prev_bit_size
)
4046 /* In MS layout the records alignment is normally
4047 influenced by the field, except for a zero-width
4048 field at the start of a run (but by further zero-width
4049 fields it is again). */
4050 if (bit_size
== 0 && prevbt
!= bt
)
4053 prev_bit_size
= bit_size
;
4056 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4057 | (bit_pos
<< VT_STRUCT_SHIFT
);
4058 bit_pos
+= bit_size
;
4060 if (align
> maxalign
)
4064 printf("set field %s offset %-2d size %-2d align %-2d",
4065 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4066 if (f
->type
.t
& VT_BITFIELD
) {
4067 printf(" pos %-2d bits %-2d",
4080 c
+= (bit_pos
+ 7) >> 3;
4082 /* store size and alignment */
4083 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4087 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4088 /* can happen if individual align for some member was given. In
4089 this case MSVC ignores maxalign when aligning the size */
4094 c
= (c
+ a
- 1) & -a
;
4098 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4101 /* check whether we can access bitfields by their type */
4102 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4106 if (0 == (f
->type
.t
& VT_BITFIELD
))
4110 bit_size
= BIT_SIZE(f
->type
.t
);
4113 bit_pos
= BIT_POS(f
->type
.t
);
4114 size
= type_size(&f
->type
, &align
);
4116 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4117 #ifdef TCC_TARGET_ARM
4118 && !(f
->c
& (align
- 1))
4123 /* try to access the field using a different type */
4124 c0
= -1, s
= align
= 1;
4127 px
= f
->c
* 8 + bit_pos
;
4128 cx
= (px
>> 3) & -align
;
4129 px
= px
- (cx
<< 3);
4132 s
= (px
+ bit_size
+ 7) >> 3;
4142 s
= type_size(&t
, &align
);
4146 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4147 #ifdef TCC_TARGET_ARM
4148 && !(cx
& (align
- 1))
4151 /* update offset and bit position */
4154 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4155 | (bit_pos
<< VT_STRUCT_SHIFT
);
4159 printf("FIX field %s offset %-2d size %-2d align %-2d "
4160 "pos %-2d bits %-2d\n",
4161 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4162 cx
, s
, align
, px
, bit_size
);
4165 /* fall back to load/store single-byte wise */
4166 f
->auxtype
= VT_STRUCT
;
4168 printf("FIX field %s : load byte-wise\n",
4169 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4175 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4176 static void struct_decl(CType
*type
, int u
)
4178 int v
, c
, size
, align
, flexible
;
4179 int bit_size
, bsize
, bt
;
4181 AttributeDef ad
, ad1
;
4184 memset(&ad
, 0, sizeof ad
);
4186 parse_attribute(&ad
);
4190 /* struct already defined ? return it */
4192 expect("struct/union/enum name");
4194 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4197 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4199 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4204 /* Record the original enum/struct/union token. */
4205 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4207 /* we put an undefined size for struct/union */
4208 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4209 s
->r
= 0; /* default alignment is zero as gcc */
4211 type
->t
= s
->type
.t
;
4217 tcc_error("struct/union/enum already defined");
4219 /* cannot be empty */
4220 /* non empty enums are not allowed */
4223 long long ll
= 0, pl
= 0, nl
= 0;
4226 /* enum symbols have static storage */
4227 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4231 expect("identifier");
4233 if (ss
&& !local_stack
)
4234 tcc_error("redefinition of enumerator '%s'",
4235 get_tok_str(v
, NULL
));
4239 ll
= expr_const64();
4241 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4243 *ps
= ss
, ps
= &ss
->next
;
4252 /* NOTE: we accept a trailing comma */
4257 /* set integral type of the enum */
4260 if (pl
!= (unsigned)pl
)
4261 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4263 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4264 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4265 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4267 /* set type for enum members */
4268 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4270 if (ll
== (int)ll
) /* default is int if it fits */
4272 if (t
.t
& VT_UNSIGNED
) {
4273 ss
->type
.t
|= VT_UNSIGNED
;
4274 if (ll
== (unsigned)ll
)
4277 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4278 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4283 while (tok
!= '}') {
4284 if (!parse_btype(&btype
, &ad1
, 0)) {
4290 tcc_error("flexible array member '%s' not at the end of struct",
4291 get_tok_str(v
, NULL
));
4297 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4299 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4300 expect("identifier");
4302 int v
= btype
.ref
->v
;
4303 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4304 if (tcc_state
->ms_extensions
== 0)
4305 expect("identifier");
4309 if (type_size(&type1
, &align
) < 0) {
4310 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4313 tcc_error("field '%s' has incomplete type",
4314 get_tok_str(v
, NULL
));
4316 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4317 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4318 (type1
.t
& VT_STORAGE
))
4319 tcc_error("invalid type for '%s'",
4320 get_tok_str(v
, NULL
));
4324 bit_size
= expr_const();
4325 /* XXX: handle v = 0 case for messages */
4327 tcc_error("negative width in bit-field '%s'",
4328 get_tok_str(v
, NULL
));
4329 if (v
&& bit_size
== 0)
4330 tcc_error("zero width for bit-field '%s'",
4331 get_tok_str(v
, NULL
));
4332 parse_attribute(&ad1
);
4334 size
= type_size(&type1
, &align
);
4335 if (bit_size
>= 0) {
4336 bt
= type1
.t
& VT_BTYPE
;
4342 tcc_error("bitfields must have scalar type");
4344 if (bit_size
> bsize
) {
4345 tcc_error("width of '%s' exceeds its type",
4346 get_tok_str(v
, NULL
));
4347 } else if (bit_size
== bsize
4348 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4349 /* no need for bit fields */
4351 } else if (bit_size
== 64) {
4352 tcc_error("field width 64 not implemented");
4354 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4356 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4359 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4360 /* Remember we've seen a real field to check
4361 for placement of flexible array member. */
4364 /* If member is a struct or bit-field, enforce
4365 placing into the struct (as anonymous). */
4367 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4372 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4377 if (tok
== ';' || tok
== TOK_EOF
)
4384 parse_attribute(&ad
);
4385 if (ad
.cleanup_func
) {
4386 tcc_warning("attribute '__cleanup__' ignored on type");
4388 check_fields(type
, 1);
4389 check_fields(type
, 0);
4390 struct_layout(type
, &ad
);
4392 tcc_debug_fix_anon(tcc_state
, type
);
4397 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4399 merge_symattr(&ad
->a
, &s
->a
);
4400 merge_funcattr(&ad
->f
, &s
->f
);
4403 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4404 are added to the element type, copied because it could be a typedef. */
4405 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4407 while (type
->t
& VT_ARRAY
) {
4408 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4409 type
= &type
->ref
->type
;
4411 type
->t
|= qualifiers
;
4414 /* return 0 if no type declaration. otherwise, return the basic type
4417 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4419 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4423 memset(ad
, 0, sizeof(AttributeDef
));
4433 /* currently, we really ignore extension */
4443 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4444 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4445 tmbt
: tcc_error("too many basic types");
4448 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4453 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4470 memset(&ad1
, 0, sizeof(AttributeDef
));
4471 if (parse_btype(&type1
, &ad1
, 0)) {
4472 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4474 n
= 1 << (ad1
.a
.aligned
- 1);
4476 type_size(&type1
, &n
);
4479 if (n
< 0 || (n
& (n
- 1)) != 0)
4480 tcc_error("alignment must be a positive power of two");
4483 ad
->a
.aligned
= exact_log2p1(n
);
4487 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4488 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4489 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4490 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4497 #ifdef TCC_TARGET_ARM64
4499 /* GCC's __uint128_t appears in some Linux header files. Make it a
4500 synonym for long double to get the size and alignment right. */
4511 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4512 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4520 struct_decl(&type1
, VT_ENUM
);
4523 type
->ref
= type1
.ref
;
4526 struct_decl(&type1
, VT_STRUCT
);
4529 struct_decl(&type1
, VT_UNION
);
4532 /* type modifiers */
4536 parse_btype_qualify(type
, VT_ATOMIC
);
4539 parse_expr_type(&type1
);
4540 /* remove all storage modifiers except typedef */
4541 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4543 sym_to_attr(ad
, type1
.ref
);
4551 parse_btype_qualify(type
, VT_CONSTANT
);
4559 parse_btype_qualify(type
, VT_VOLATILE
);
4566 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4567 tcc_error("signed and unsigned modifier");
4580 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4581 tcc_error("signed and unsigned modifier");
4582 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4598 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4599 tcc_error("multiple storage classes");
4611 ad
->f
.func_noreturn
= 1;
4613 /* GNUC attribute */
4614 case TOK_ATTRIBUTE1
:
4615 case TOK_ATTRIBUTE2
:
4616 parse_attribute(ad
);
4617 if (ad
->attr_mode
) {
4618 u
= ad
->attr_mode
-1;
4619 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4627 parse_expr_type(&type1
);
4628 /* remove all storage modifiers except typedef */
4629 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4631 sym_to_attr(ad
, type1
.ref
);
4637 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4641 if (tok
== ':' && ignore_label
) {
4642 /* ignore if it's a label */
4647 t
&= ~(VT_BTYPE
|VT_LONG
);
4648 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4649 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4650 type
->ref
= s
->type
.ref
;
4652 parse_btype_qualify(type
, t
);
4654 /* get attributes from typedef */
4663 if (tcc_state
->char_is_unsigned
) {
4664 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4667 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4668 bt
= t
& (VT_BTYPE
|VT_LONG
);
4670 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4671 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4672 if (bt
== VT_LDOUBLE
)
4673 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4679 /* convert a function parameter type (array to pointer and function to
4680 function pointer) */
4681 static inline void convert_parameter_type(CType
*pt
)
4683 /* remove const and volatile qualifiers (XXX: const could be used
4684 to indicate a const function parameter */
4685 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4686 /* array must be transformed to pointer according to ANSI C */
4688 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4693 ST_FUNC
void parse_asm_str(CString
*astr
)
4696 parse_mult_str(astr
, "string constant");
4699 /* Parse an asm label and return the token */
4700 static int asm_label_instr(void)
4706 parse_asm_str(&astr
);
4709 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4711 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4716 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4718 int n
, l
, t1
, arg_size
, align
, unused_align
;
4719 Sym
**plast
, *s
, *first
;
4722 TokenString
*vla_array_tok
= NULL
;
4723 int *vla_array_str
= NULL
;
4726 /* function type, or recursive declarator (return if so) */
4728 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4732 else if (parse_btype(&pt
, &ad1
, 0))
4734 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4735 merge_attr (ad
, &ad1
);
4746 /* read param name and compute offset */
4747 if (l
!= FUNC_OLD
) {
4748 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4750 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4751 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4752 tcc_error("parameter declared as void");
4757 pt
.t
= VT_VOID
; /* invalid type */
4762 expect("identifier");
4763 convert_parameter_type(&pt
);
4764 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4765 s
= sym_push(n
, &pt
, 0, 0);
4771 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4776 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4777 tcc_error("invalid type");
4780 /* if no parameters, then old type prototype */
4783 /* remove parameter symbols from token table, keep on stack */
4785 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4786 for (s
= first
; s
; s
= s
->next
)
4790 /* NOTE: const is ignored in returned type as it has a special
4791 meaning in gcc / C++ */
4792 type
->t
&= ~VT_CONSTANT
;
4793 /* some ancient pre-K&R C allows a function to return an array
4794 and the array brackets to be put after the arguments, such
4795 that "int c()[]" means something like "int[] c()" */
4798 skip(']'); /* only handle simple "[]" */
4801 /* we push a anonymous symbol which will contain the function prototype */
4802 ad
->f
.func_args
= arg_size
;
4803 ad
->f
.func_type
= l
;
4804 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4810 } else if (tok
== '[') {
4811 int saved_nocode_wanted
= nocode_wanted
;
4812 /* array definition */
4816 if (td
& TYPE_PARAM
) while (1) {
4817 /* XXX The optional type-quals and static should only be accepted
4818 in parameter decls. The '*' as well, and then even only
4819 in prototypes (not function defs). */
4821 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4834 /* Code generation is not done now but has to be done
4835 at start of function. Save code here for later use. */
4837 vla_array_tok
= tok_str_alloc();
4846 tok_str_add_tok(vla_array_tok
);
4850 tok_str_add(vla_array_tok
, -1);
4851 tok_str_add(vla_array_tok
, 0);
4852 vla_array_str
= vla_array_tok
->str
;
4853 begin_macro(vla_array_tok
, 2);
4862 } else if (tok
!= ']') {
4863 if (!local_stack
|| (storage
& VT_STATIC
))
4864 vpushi(expr_const());
4866 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4867 length must always be evaluated, even under nocode_wanted,
4868 so that its size slot is initialized (e.g. under sizeof
4874 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4877 tcc_error("invalid array size");
4879 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4880 tcc_error("size of variable length array should be an integer");
4886 /* parse next post type */
4887 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
4889 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4890 tcc_error("declaration of an array of functions");
4891 if ((type
->t
& VT_BTYPE
) == VT_VOID
4892 || type_size(type
, &unused_align
) < 0)
4893 tcc_error("declaration of an array of incomplete type elements");
4895 t1
|= type
->t
& VT_VLA
;
4900 tcc_error("need explicit inner array size in VLAs");
4903 loc
-= type_size(&int_type
, &align
);
4907 vpush_type_size(type
, &align
);
4909 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4916 nocode_wanted
= saved_nocode_wanted
;
4918 /* we push an anonymous symbol which will contain the array
4920 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4921 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4923 if (vla_array_str
) {
4925 s
->vla_array_str
= vla_array_str
;
4927 tok_str_free_str(vla_array_str
);
4933 /* Parse a type declarator (except basic type), and return the type
4934 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4935 expected. 'type' should contain the basic type. 'ad' is the
4936 attribute definition of the basic type. It can be modified by
4937 type_decl(). If this (possibly abstract) declarator is a pointer chain
4938 it returns the innermost pointed to type (equals *type, but is a different
4939 pointer), otherwise returns type itself, that's used for recursive calls. */
4940 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4943 int qualifiers
, storage
;
4945 /* recursive type, remove storage bits first, apply them later again */
4946 storage
= type
->t
& VT_STORAGE
;
4947 type
->t
&= ~VT_STORAGE
;
4950 while (tok
== '*') {
4956 qualifiers
|= VT_ATOMIC
;
4961 qualifiers
|= VT_CONSTANT
;
4966 qualifiers
|= VT_VOLATILE
;
4972 /* XXX: clarify attribute handling */
4973 case TOK_ATTRIBUTE1
:
4974 case TOK_ATTRIBUTE2
:
4975 parse_attribute(ad
);
4979 type
->t
|= qualifiers
;
4981 /* innermost pointed to type is the one for the first derivation */
4982 ret
= pointed_type(type
);
4986 /* This is possibly a parameter type list for abstract declarators
4987 ('int ()'), use post_type for testing this. */
4988 if (!post_type(type
, ad
, 0, td
)) {
4989 /* It's not, so it's a nested declarator, and the post operations
4990 apply to the innermost pointed to type (if any). */
4991 /* XXX: this is not correct to modify 'ad' at this point, but
4992 the syntax is not clear */
4993 parse_attribute(ad
);
4994 post
= type_decl(type
, ad
, v
, td
);
4998 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4999 /* type identifier */
5004 if (!(td
& TYPE_ABSTRACT
))
5005 expect("identifier");
5008 post_type(post
, ad
, storage
, td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5009 parse_attribute(ad
);
5014 /* indirection with full error checking and bound check */
5015 ST_FUNC
void indir(void)
5017 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5018 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5022 if (vtop
->r
& VT_LVAL
)
5024 vtop
->type
= *pointed_type(&vtop
->type
);
5025 /* Arrays and functions are never lvalues */
5026 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5027 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5029 /* if bound checking, the referenced pointer must be checked */
5030 #ifdef CONFIG_TCC_BCHECK
5031 if (tcc_state
->do_bounds_check
)
5032 vtop
->r
|= VT_MUSTBOUND
;
5037 /* pass a parameter to a function and do type checking and casting */
5038 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5043 func_type
= func
->f
.func_type
;
5044 if (func_type
== FUNC_OLD
||
5045 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5046 /* default casting : only need to convert float to double */
5047 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5048 gen_cast_s(VT_DOUBLE
);
5049 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5050 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5051 type
.ref
= vtop
->type
.ref
;
5053 } else if (vtop
->r
& VT_MUSTCAST
) {
5054 force_charshort_cast();
5056 } else if (arg
== NULL
) {
5057 tcc_error("too many arguments to function");
5060 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5061 gen_assign_cast(&type
);
5065 /* parse an expression and return its type without any side effect. */
5066 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5075 /* parse an expression of the form '(type)' or '(expr)' and return its
5077 static void parse_expr_type(CType
*type
)
5083 if (parse_btype(type
, &ad
, 0)) {
5084 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5086 expr_type(type
, gexpr
);
5091 static void parse_type(CType
*type
)
5096 if (!parse_btype(type
, &ad
, 0)) {
5099 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5102 static void parse_builtin_params(int nc
, const char *args
)
5111 while ((c
= *args
++)) {
5126 type
.t
= VT_CONSTANT
;
5132 type
.t
= VT_CONSTANT
;
5134 type
.t
|= char_type
.t
;
5146 gen_assign_cast(&type
);
5153 static void parse_atomic(int atok
)
5155 int size
, align
, arg
;
5156 CType
*atom
, *atom_ptr
, ct
= {0};
5158 static const char *const templates
[] = {
5160 * Each entry consists of callback and function template.
5161 * The template represents argument types and return type.
5163 * ? void (return-only)
5166 * A read-only atomic
5167 * p pointer to memory
5172 /* keep in order of appearance in tcctok.h: */
5173 /* __atomic_store */ "avm.?",
5174 /* __atomic_load */ "Am.v",
5175 /* __atomic_exchange */ "avm.v",
5176 /* __atomic_compare_exchange */ "apvbmm.b",
5177 /* __atomic_fetch_add */ "avm.v",
5178 /* __atomic_fetch_sub */ "avm.v",
5179 /* __atomic_fetch_or */ "avm.v",
5180 /* __atomic_fetch_xor */ "avm.v",
5181 /* __atomic_fetch_and */ "avm.v"
5183 const char *template = templates
[(atok
- TOK___atomic_store
)];
5185 atom
= atom_ptr
= NULL
;
5186 size
= 0; /* pacify compiler */
5191 switch (template[arg
]) {
5194 atom_ptr
= &vtop
->type
;
5195 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5197 atom
= pointed_type(atom_ptr
);
5198 size
= type_size(atom
, &align
);
5200 || (size
& (size
- 1))
5201 || (atok
> TOK___atomic_compare_exchange
5202 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5203 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5204 expect("integral or integer-sized pointer target type");
5205 /* GCC does not care either: */
5206 /* if (!(atom->t & VT_ATOMIC))
5207 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5211 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5212 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5213 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5214 gen_assign_cast(atom_ptr
);
5217 gen_assign_cast(atom
);
5220 gen_assign_cast(&int_type
);
5224 gen_assign_cast(&ct
);
5227 if ('.' == template[++arg
])
5234 switch (template[arg
+ 1]) {
5243 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5244 vpush_helper_func(tok_alloc_const(buf
));
5249 PUT_R_RET(vtop
, ct
.t
);
5250 if (ct
.t
== VT_BOOL
) {
5252 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5254 vtop
->type
.t
= VT_INT
;
5259 ST_FUNC
void unary(void)
5261 int n
, t
, align
, size
, r
, sizeof_caller
;
5266 /* generate line number info */
5268 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5270 sizeof_caller
= in_sizeof
;
5273 /* XXX: GCC 2.95.3 does not generate a table although it should be
5281 #ifdef TCC_TARGET_PE
5282 t
= VT_SHORT
|VT_UNSIGNED
;
5290 vsetc(&type
, VT_CONST
, &tokc
);
5294 t
= VT_INT
| VT_UNSIGNED
;
5300 t
= VT_LLONG
| VT_UNSIGNED
;
5312 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5315 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5317 case TOK___FUNCTION__
:
5319 goto tok_identifier
;
5325 /* special function name identifier */
5326 len
= strlen(funcname
) + 1;
5327 /* generate char[len] type */
5328 type
.t
= char_type
.t
;
5329 if (tcc_state
->warn_write_strings
& WARN_ON
)
5330 type
.t
|= VT_CONSTANT
;
5334 sec
= rodata_section
;
5335 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5337 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5342 #ifdef TCC_TARGET_PE
5343 t
= VT_SHORT
| VT_UNSIGNED
;
5349 /* string parsing */
5352 if (tcc_state
->warn_write_strings
& WARN_ON
)
5357 memset(&ad
, 0, sizeof(AttributeDef
));
5358 ad
.section
= rodata_section
;
5359 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5364 if (parse_btype(&type
, &ad
, 0)) {
5365 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5367 /* check ISOC99 compound literal */
5369 /* data is allocated locally by default */
5374 /* all except arrays are lvalues */
5375 if (!(type
.t
& VT_ARRAY
))
5377 memset(&ad
, 0, sizeof(AttributeDef
));
5378 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5380 if (sizeof_caller
) {
5387 } else if (tok
== '{') {
5388 int saved_nocode_wanted
= nocode_wanted
;
5389 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5391 if (0 == local_scope
)
5392 tcc_error("statement expression outside of function");
5393 /* save all registers */
5395 /* statement expression : we do not accept break/continue
5396 inside as GCC does. We do retain the nocode_wanted state,
5397 as statement expressions can't ever be entered from the
5398 outside, so any reactivation of code emission (from labels
5399 or loop heads) can be disabled again after the end of it. */
5401 /* If the statement expr can be entered, then we retain the current
5402 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5403 If it can't be entered then the state is that from before the
5404 statement expression. */
5405 if (saved_nocode_wanted
)
5406 nocode_wanted
= saved_nocode_wanted
;
5421 /* functions names must be treated as function pointers,
5422 except for unary '&' and sizeof. Since we consider that
5423 functions are not lvalues, we only have to handle it
5424 there and in function calls. */
5425 /* arrays can also be used although they are not lvalues */
5426 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5427 !(vtop
->type
.t
& VT_ARRAY
))
5430 vtop
->sym
->a
.addrtaken
= 1;
5431 mk_pointer(&vtop
->type
);
5437 gen_test_zero(TOK_EQ
);
5448 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5449 tcc_error("pointer not accepted for unary plus");
5450 /* In order to force cast, we add zero, except for floating point
5451 where we really need an noop (otherwise -0.0 will be transformed
5453 if (!is_float(vtop
->type
.t
)) {
5465 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5466 if (t
== TOK_SIZEOF
) {
5467 vpush_type_size(&type
, &align
);
5468 gen_cast_s(VT_SIZE_T
);
5470 type_size(&type
, &align
);
5472 if (vtop
[1].r
& VT_SYM
)
5473 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5474 if (s
&& s
->a
.aligned
)
5475 align
= 1 << (s
->a
.aligned
- 1);
5480 case TOK_builtin_expect
:
5481 /* __builtin_expect is a no-op for now */
5482 parse_builtin_params(0, "ee");
5485 case TOK_builtin_types_compatible_p
:
5486 parse_builtin_params(0, "tt");
5487 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5488 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5489 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5493 case TOK_builtin_choose_expr
:
5520 case TOK_builtin_constant_p
:
5521 parse_builtin_params(1, "e");
5522 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5523 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5527 case TOK_builtin_frame_address
:
5528 case TOK_builtin_return_address
:
5534 if (tok
!= TOK_CINT
) {
5535 tcc_error("%s only takes positive integers",
5536 tok1
== TOK_builtin_return_address
?
5537 "__builtin_return_address" :
5538 "__builtin_frame_address");
5540 level
= (uint32_t)tokc
.i
;
5545 vset(&type
, VT_LOCAL
, 0); /* local frame */
5547 #ifdef TCC_TARGET_RISCV64
5551 mk_pointer(&vtop
->type
);
5552 indir(); /* -> parent frame */
5554 if (tok1
== TOK_builtin_return_address
) {
5555 // assume return address is just above frame pointer on stack
5556 #ifdef TCC_TARGET_ARM
5559 #elif defined TCC_TARGET_RISCV64
5566 mk_pointer(&vtop
->type
);
5571 #ifdef TCC_TARGET_RISCV64
5572 case TOK_builtin_va_start
:
5573 parse_builtin_params(0, "ee");
5574 r
= vtop
->r
& VT_VALMASK
;
5578 tcc_error("__builtin_va_start expects a local variable");
5583 #ifdef TCC_TARGET_X86_64
5584 #ifdef TCC_TARGET_PE
5585 case TOK_builtin_va_start
:
5586 parse_builtin_params(0, "ee");
5587 r
= vtop
->r
& VT_VALMASK
;
5591 tcc_error("__builtin_va_start expects a local variable");
5593 vtop
->type
= char_pointer_type
;
5598 case TOK_builtin_va_arg_types
:
5599 parse_builtin_params(0, "t");
5600 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5607 #ifdef TCC_TARGET_ARM64
5608 case TOK_builtin_va_start
: {
5609 parse_builtin_params(0, "ee");
5613 vtop
->type
.t
= VT_VOID
;
5616 case TOK_builtin_va_arg
: {
5617 parse_builtin_params(0, "et");
5625 case TOK___arm64_clear_cache
: {
5626 parse_builtin_params(0, "ee");
5629 vtop
->type
.t
= VT_VOID
;
5634 /* atomic operations */
5635 case TOK___atomic_store
:
5636 case TOK___atomic_load
:
5637 case TOK___atomic_exchange
:
5638 case TOK___atomic_compare_exchange
:
5639 case TOK___atomic_fetch_add
:
5640 case TOK___atomic_fetch_sub
:
5641 case TOK___atomic_fetch_or
:
5642 case TOK___atomic_fetch_xor
:
5643 case TOK___atomic_fetch_and
:
5647 /* pre operations */
5658 if (is_float(vtop
->type
.t
)) {
5668 goto tok_identifier
;
5670 /* allow to take the address of a label */
5671 if (tok
< TOK_UIDENT
)
5672 expect("label identifier");
5673 s
= label_find(tok
);
5675 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5677 if (s
->r
== LABEL_DECLARED
)
5678 s
->r
= LABEL_FORWARD
;
5681 s
->type
.t
= VT_VOID
;
5682 mk_pointer(&s
->type
);
5683 s
->type
.t
|= VT_STATIC
;
5685 vpushsym(&s
->type
, s
);
5691 CType controlling_type
;
5692 int has_default
= 0;
5695 TokenString
*str
= NULL
;
5696 int saved_const_wanted
= const_wanted
;
5701 expr_type(&controlling_type
, expr_eq
);
5702 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5703 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5704 mk_pointer(&controlling_type
);
5705 const_wanted
= saved_const_wanted
;
5709 if (tok
== TOK_DEFAULT
) {
5711 tcc_error("too many 'default'");
5717 AttributeDef ad_tmp
;
5721 parse_btype(&cur_type
, &ad_tmp
, 0);
5722 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5723 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5725 tcc_error("type match twice");
5735 skip_or_save_block(&str
);
5737 skip_or_save_block(NULL
);
5744 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5745 tcc_error("type '%s' does not match any association", buf
);
5747 begin_macro(str
, 1);
5756 // special qnan , snan and infinity values
5761 vtop
->type
.t
= VT_FLOAT
;
5766 goto special_math_val
;
5769 goto special_math_val
;
5776 expect("identifier");
5778 if (!s
|| IS_ASM_SYM(s
)) {
5779 const char *name
= get_tok_str(t
, NULL
);
5781 tcc_error("'%s' undeclared", name
);
5782 /* for simple function calls, we tolerate undeclared
5783 external reference to int() function */
5784 tcc_warning_c(warn_implicit_function_declaration
)(
5785 "implicit declaration of function '%s'", name
);
5786 s
= external_global_sym(t
, &func_old_type
);
5790 /* A symbol that has a register is a local register variable,
5791 which starts out as VT_LOCAL value. */
5792 if ((r
& VT_VALMASK
) < VT_CONST
)
5793 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5795 vset(&s
->type
, r
, s
->c
);
5796 /* Point to s as backpointer (even without r&VT_SYM).
5797 Will be used by at least the x86 inline asm parser for
5803 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5804 vtop
->c
.i
= s
->enum_val
;
5809 /* post operations */
5811 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5814 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5815 int qualifiers
, cumofs
= 0;
5817 if (tok
== TOK_ARROW
)
5819 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5822 /* expect pointer on structure */
5823 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5824 expect("struct or union");
5825 if (tok
== TOK_CDOUBLE
)
5826 expect("field name");
5828 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5829 expect("field name");
5830 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5832 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5833 /* add field offset to pointer */
5834 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5835 vpushi(cumofs
+ s
->c
);
5837 /* change type to field type, and set to lvalue */
5838 vtop
->type
= s
->type
;
5839 vtop
->type
.t
|= qualifiers
;
5840 /* an array is never an lvalue */
5841 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5843 #ifdef CONFIG_TCC_BCHECK
5844 /* if bound checking, the referenced pointer must be checked */
5845 if (tcc_state
->do_bounds_check
)
5846 vtop
->r
|= VT_MUSTBOUND
;
5850 } else if (tok
== '[') {
5856 } else if (tok
== '(') {
5859 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5862 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5863 /* pointer test (no array accepted) */
5864 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5865 vtop
->type
= *pointed_type(&vtop
->type
);
5866 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5870 expect("function pointer");
5873 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5875 /* get return type */
5878 sa
= s
->next
; /* first parameter */
5879 nb_args
= regsize
= 0;
5881 /* compute first implicit argument if a structure is returned */
5882 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5883 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5884 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5885 &ret_align
, ®size
);
5886 if (ret_nregs
<= 0) {
5887 /* get some space for the returned structure */
5888 size
= type_size(&s
->type
, &align
);
5889 #ifdef TCC_TARGET_ARM64
5890 /* On arm64, a small struct is return in registers.
5891 It is much easier to write it to memory if we know
5892 that we are allowed to write some extra bytes, so
5893 round the allocated space up to a power of 2: */
5895 while (size
& (size
- 1))
5896 size
= (size
| (size
- 1)) + 1;
5898 loc
= (loc
- size
) & -align
;
5900 ret
.r
= VT_LOCAL
| VT_LVAL
;
5901 /* pass it as 'int' to avoid structure arg passing
5903 vseti(VT_LOCAL
, loc
);
5904 #ifdef CONFIG_TCC_BCHECK
5905 if (tcc_state
->do_bounds_check
)
5919 if (ret_nregs
> 0) {
5920 /* return in register */
5922 PUT_R_RET(&ret
, ret
.type
.t
);
5927 gfunc_param_typed(s
, sa
);
5937 tcc_error("too few arguments to function");
5939 gfunc_call(nb_args
);
5941 if (ret_nregs
< 0) {
5942 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
5943 #ifdef TCC_TARGET_RISCV64
5944 arch_transfer_ret_regs(1);
5948 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5949 vsetc(&ret
.type
, r
, &ret
.c
);
5950 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5953 /* handle packed struct return */
5954 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5957 size
= type_size(&s
->type
, &align
);
5958 /* We're writing whole regs often, make sure there's enough
5959 space. Assume register size is power of 2. */
5960 if (regsize
> align
)
5962 loc
= (loc
- size
) & -align
;
5966 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5970 if (--ret_nregs
== 0)
5974 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5977 /* Promote char/short return values. This is matters only
5978 for calling function that were not compiled by TCC and
5979 only on some architectures. For those where it doesn't
5980 matter we expect things to be already promoted to int,
5982 t
= s
->type
.t
& VT_BTYPE
;
5983 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5985 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5987 vtop
->type
.t
= VT_INT
;
5991 if (s
->f
.func_noreturn
) {
5993 tcc_tcov_block_end(tcc_state
, -1);
6002 #ifndef precedence_parser /* original top-down parser */
6004 static void expr_prod(void)
6009 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6016 static void expr_sum(void)
6021 while ((t
= tok
) == '+' || t
== '-') {
6028 static void expr_shift(void)
6033 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6040 static void expr_cmp(void)
6045 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6046 t
== TOK_ULT
|| t
== TOK_UGE
) {
6053 static void expr_cmpeq(void)
6058 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6065 static void expr_and(void)
6068 while (tok
== '&') {
6075 static void expr_xor(void)
6078 while (tok
== '^') {
6085 static void expr_or(void)
6088 while (tok
== '|') {
6095 static void expr_landor(int op
);
6097 static void expr_land(void)
6100 if (tok
== TOK_LAND
)
6104 static void expr_lor(void)
6111 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6112 #else /* defined precedence_parser */
6113 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6114 # define expr_lor() unary(), expr_infix(1)
6116 static int precedence(int tok
)
6119 case TOK_LOR
: return 1;
6120 case TOK_LAND
: return 2;
6124 case TOK_EQ
: case TOK_NE
: return 6;
6125 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6126 case TOK_SHL
: case TOK_SAR
: return 8;
6127 case '+': case '-': return 9;
6128 case '*': case '/': case '%': return 10;
6130 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6135 static unsigned char prec
[256];
6136 static void init_prec(void)
6139 for (i
= 0; i
< 256; i
++)
6140 prec
[i
] = precedence(i
);
6142 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6144 static void expr_landor(int op
);
6146 static void expr_infix(int p
)
6149 while ((p2
= precedence(t
)) >= p
) {
6150 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6155 if (precedence(tok
) > p2
)
6164 /* Assuming vtop is a value used in a conditional context
6165 (i.e. compared with zero) return 0 if it's false, 1 if
6166 true and -1 if it can't be statically determined. */
6167 static int condition_3way(void)
6170 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6171 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6173 gen_cast_s(VT_BOOL
);
6180 static void expr_landor(int op
)
6182 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6184 c
= f
? i
: condition_3way();
6186 save_regs(1), cc
= 0;
6188 nocode_wanted
++, f
= 1;
6196 expr_landor_next(op
);
6208 static int is_cond_bool(SValue
*sv
)
6210 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6211 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6212 return (unsigned)sv
->c
.i
< 2;
6213 if (sv
->r
== VT_CMP
)
6218 static void expr_cond(void)
6220 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6228 c
= condition_3way();
6229 g
= (tok
== ':' && gnu_ext
);
6239 /* needed to avoid having different registers saved in
6246 ncw_prev
= nocode_wanted
;
6252 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6253 mk_pointer(&vtop
->type
);
6254 sv
= *vtop
; /* save value to handle it later */
6255 vtop
--; /* no vpop so that FP stack is not flushed */
6265 nocode_wanted
= ncw_prev
;
6271 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6272 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6273 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6274 this code jumps directly to the if's then/else branches. */
6279 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6282 nocode_wanted
= ncw_prev
;
6283 // tcc_warning("two conditions expr_cond");
6287 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6288 mk_pointer(&vtop
->type
);
6290 /* cast operands to correct type according to ISOC rules */
6291 if (!combine_types(&type
, &sv
, vtop
, '?'))
6292 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6293 "type mismatch in conditional expression (have '%s' and '%s')");
6294 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6295 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6296 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6298 /* now we convert second operand */
6302 mk_pointer(&vtop
->type
);
6304 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6308 rc
= RC_TYPE(type
.t
);
6309 /* for long longs, we use fixed registers to avoid having
6310 to handle a complicated move */
6311 if (USING_TWO_WORDS(type
.t
))
6312 rc
= RC_RET(type
.t
);
6320 nocode_wanted
= ncw_prev
;
6322 /* this is horrible, but we must also convert first
6328 mk_pointer(&vtop
->type
);
6330 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6336 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6346 static void expr_eq(void)
6351 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6359 gen_op(TOK_ASSIGN_OP(t
));
6365 ST_FUNC
void gexpr(void)
6376 /* parse a constant expression and return value in vtop. */
6377 static void expr_const1(void)
6380 nocode_wanted
+= unevalmask
+ 1;
6382 nocode_wanted
-= unevalmask
+ 1;
6386 /* parse an integer constant and return its value. */
6387 static inline int64_t expr_const64(void)
6391 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6392 expect("constant expression");
6398 /* parse an integer constant and return its value.
6399 Complain if it doesn't fit 32bit (signed or unsigned). */
6400 ST_FUNC
int expr_const(void)
6403 int64_t wc
= expr_const64();
6405 if (c
!= wc
&& (unsigned)c
!= wc
)
6406 tcc_error("constant exceeds 32 bit");
6410 /* ------------------------------------------------------------------------- */
6411 /* return from function */
6413 #ifndef TCC_TARGET_ARM64
6414 static void gfunc_return(CType
*func_type
)
6416 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6417 CType type
, ret_type
;
6418 int ret_align
, ret_nregs
, regsize
;
6419 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6420 &ret_align
, ®size
);
6421 if (ret_nregs
< 0) {
6422 #ifdef TCC_TARGET_RISCV64
6423 arch_transfer_ret_regs(0);
6425 } else if (0 == ret_nregs
) {
6426 /* if returning structure, must copy it to implicit
6427 first pointer arg location */
6430 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6433 /* copy structure value to pointer */
6436 /* returning structure packed into registers */
6437 int size
, addr
, align
, rc
;
6438 size
= type_size(func_type
,&align
);
6439 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6440 (vtop
->c
.i
& (ret_align
-1)))
6441 && (align
& (ret_align
-1))) {
6442 loc
= (loc
- size
) & -ret_align
;
6445 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6449 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6451 vtop
->type
= ret_type
;
6452 rc
= RC_RET(ret_type
.t
);
6460 if (--ret_nregs
== 0)
6462 /* We assume that when a structure is returned in multiple
6463 registers, their classes are consecutive values of the
6466 vtop
->c
.i
+= regsize
;
6471 gv(RC_RET(func_type
->t
));
6473 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6477 static void check_func_return(void)
6479 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6481 if (!strcmp (funcname
, "main")
6482 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6483 /* main returns 0 by default */
6485 gen_assign_cast(&func_vt
);
6486 gfunc_return(&func_vt
);
6488 tcc_warning("function might return no value: '%s'", funcname
);
6492 /* ------------------------------------------------------------------------- */
6495 static int case_cmpi(const void *pa
, const void *pb
)
6497 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6498 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6499 return a
< b
? -1 : a
> b
;
6502 static int case_cmpu(const void *pa
, const void *pb
)
6504 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6505 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6506 return a
< b
? -1 : a
> b
;
6509 static void gtst_addr(int t
, int a
)
6511 gsym_addr(gvtst(0, t
), a
);
6514 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6518 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6535 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6537 gcase(base
, len
/2, bsym
);
6541 base
+= e
; len
-= e
;
6551 if (p
->v1
== p
->v2
) {
6553 gtst_addr(0, p
->sym
);
6563 gtst_addr(0, p
->sym
);
6567 *bsym
= gjmp(*bsym
);
6570 /* ------------------------------------------------------------------------- */
6571 /* __attribute__((cleanup(fn))) */
6573 static void try_call_scope_cleanup(Sym
*stop
)
6575 Sym
*cls
= cur_scope
->cl
.s
;
6577 for (; cls
!= stop
; cls
= cls
->ncl
) {
6578 Sym
*fs
= cls
->next
;
6579 Sym
*vs
= cls
->prev_tok
;
6581 vpushsym(&fs
->type
, fs
);
6582 vset(&vs
->type
, vs
->r
, vs
->c
);
6584 mk_pointer(&vtop
->type
);
6590 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6595 if (!cur_scope
->cl
.s
)
6598 /* search NCA of both cleanup chains given parents and initial depth */
6599 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6600 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6602 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6604 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6607 try_call_scope_cleanup(cc
);
6610 /* call 'func' for each __attribute__((cleanup(func))) */
6611 static void block_cleanup(struct scope
*o
)
6615 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6616 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6621 try_call_scope_cleanup(o
->cl
.s
);
6622 pcl
->jnext
= gjmp(0);
6624 goto remove_pending
;
6634 try_call_scope_cleanup(o
->cl
.s
);
6637 /* ------------------------------------------------------------------------- */
6640 static void vla_restore(int loc
)
6643 gen_vla_sp_restore(loc
);
6646 static void vla_leave(struct scope
*o
)
6648 struct scope
*c
= cur_scope
, *v
= NULL
;
6649 for (; c
!= o
&& c
; c
= c
->prev
)
6653 vla_restore(v
->vla
.locorig
);
6656 /* ------------------------------------------------------------------------- */
6659 static void new_scope(struct scope
*o
)
6661 /* copy and link previous scope */
6663 o
->prev
= cur_scope
;
6665 cur_scope
->vla
.num
= 0;
6667 /* record local declaration stack position */
6668 o
->lstk
= local_stack
;
6669 o
->llstk
= local_label_stack
;
6673 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
6676 static void prev_scope(struct scope
*o
, int is_expr
)
6680 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6681 block_cleanup(o
->prev
);
6683 /* pop locally defined labels */
6684 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6686 /* In the is_expr case (a statement expression is finished here),
6687 vtop might refer to symbols on the local_stack. Either via the
6688 type or via vtop->sym. We can't pop those nor any that in turn
6689 might be referred to. To make it easier we don't roll back
6690 any symbols in that case; some upper level call to block() will
6691 do that. We do have to remove such symbols from the lookup
6692 tables, though. sym_pop will do that. */
6694 /* pop locally defined symbols */
6695 pop_local_syms(o
->lstk
, is_expr
);
6696 cur_scope
= o
->prev
;
6700 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
6703 /* leave a scope via break/continue(/goto) */
6704 static void leave_scope(struct scope
*o
)
6708 try_call_scope_cleanup(o
->cl
.s
);
6712 /* ------------------------------------------------------------------------- */
6713 /* call block from 'for do while' loops */
6715 static void lblock(int *bsym
, int *csym
)
6717 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6718 int *b
= co
->bsym
, *c
= co
->csym
;
6732 static void block(int is_expr
)
6734 int a
, b
, c
, d
, e
, t
;
6739 /* default return value is (void) */
6741 vtop
->type
.t
= VT_VOID
;
6746 /* If the token carries a value, next() might destroy it. Only with
6747 invalid code such as f(){"123"4;} */
6748 if (TOK_HAS_VALUE(t
))
6753 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6761 if (tok
== TOK_ELSE
) {
6766 gsym(d
); /* patch else jmp */
6771 } else if (t
== TOK_WHILE
) {
6783 } else if (t
== '{') {
6786 /* handle local labels declarations */
6787 while (tok
== TOK_LABEL
) {
6790 if (tok
< TOK_UIDENT
)
6791 expect("label identifier");
6792 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6794 } while (tok
== ',');
6798 while (tok
!= '}') {
6807 prev_scope(&o
, is_expr
);
6810 else if (!nocode_wanted
)
6811 check_func_return();
6813 } else if (t
== TOK_RETURN
) {
6814 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6818 gen_assign_cast(&func_vt
);
6820 if (vtop
->type
.t
!= VT_VOID
)
6821 tcc_warning("void function returns a value");
6825 tcc_warning("'return' with no value");
6828 leave_scope(root_scope
);
6830 gfunc_return(&func_vt
);
6832 /* jump unless last stmt in top-level block */
6833 if (tok
!= '}' || local_scope
!= 1)
6836 tcc_tcov_block_end (tcc_state
, -1);
6839 } else if (t
== TOK_BREAK
) {
6841 if (!cur_scope
->bsym
)
6842 tcc_error("cannot break");
6843 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6844 leave_scope(cur_switch
->scope
);
6846 leave_scope(loop_scope
);
6847 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6850 } else if (t
== TOK_CONTINUE
) {
6852 if (!cur_scope
->csym
)
6853 tcc_error("cannot continue");
6854 leave_scope(loop_scope
);
6855 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6858 } else if (t
== TOK_FOR
) {
6863 /* c99 for-loop init decl? */
6864 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6865 /* no, regular for-loop init expr */
6893 } else if (t
== TOK_DO
) {
6907 } else if (t
== TOK_SWITCH
) {
6908 struct switch_t
*sw
;
6910 sw
= tcc_mallocz(sizeof *sw
);
6912 sw
->scope
= cur_scope
;
6913 sw
->prev
= cur_switch
;
6919 sw
->sv
= *vtop
--; /* save switch value */
6922 b
= gjmp(0); /* jump to first case */
6924 a
= gjmp(a
); /* add implicit break */
6928 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
6929 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
6931 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
6933 for (b
= 1; b
< sw
->n
; b
++)
6934 if (sw
->sv
.type
.t
& VT_UNSIGNED
6935 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
6936 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
6937 tcc_error("duplicate case value");
6941 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
6944 gsym_addr(d
, sw
->def_sym
);
6950 dynarray_reset(&sw
->p
, &sw
->n
);
6951 cur_switch
= sw
->prev
;
6954 } else if (t
== TOK_CASE
) {
6955 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6958 cr
->v1
= cr
->v2
= expr_const64();
6959 if (gnu_ext
&& tok
== TOK_DOTS
) {
6961 cr
->v2
= expr_const64();
6962 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
6963 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
6964 tcc_warning("empty case range");
6967 tcc_tcov_reset_ind(tcc_state
);
6969 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6972 goto block_after_label
;
6974 } else if (t
== TOK_DEFAULT
) {
6977 if (cur_switch
->def_sym
)
6978 tcc_error("too many 'default'");
6980 tcc_tcov_reset_ind(tcc_state
);
6981 cur_switch
->def_sym
= gind();
6984 goto block_after_label
;
6986 } else if (t
== TOK_GOTO
) {
6987 if (cur_scope
->vla
.num
)
6988 vla_restore(cur_scope
->vla
.locorig
);
6989 if (tok
== '*' && gnu_ext
) {
6993 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6997 } else if (tok
>= TOK_UIDENT
) {
6998 s
= label_find(tok
);
6999 /* put forward definition if needed */
7001 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7002 else if (s
->r
== LABEL_DECLARED
)
7003 s
->r
= LABEL_FORWARD
;
7005 if (s
->r
& LABEL_FORWARD
) {
7006 /* start new goto chain for cleanups, linked via label->next */
7007 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7008 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7009 pending_gotos
->prev_tok
= s
;
7010 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7011 pending_gotos
->next
= s
;
7013 s
->jnext
= gjmp(s
->jnext
);
7015 try_call_cleanup_goto(s
->cleanupstate
);
7016 gjmp_addr(s
->jnext
);
7021 expect("label identifier");
7025 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7029 if (tok
== ':' && t
>= TOK_UIDENT
) {
7034 if (s
->r
== LABEL_DEFINED
)
7035 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7036 s
->r
= LABEL_DEFINED
;
7038 Sym
*pcl
; /* pending cleanup goto */
7039 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7041 sym_pop(&s
->next
, NULL
, 0);
7045 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7048 s
->cleanupstate
= cur_scope
->cl
.s
;
7051 vla_restore(cur_scope
->vla
.loc
);
7054 /* we accept this, but it is a mistake */
7055 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7058 /* expression case */
7075 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7078 /* This skips over a stream of tokens containing balanced {} and ()
7079 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7080 with a '{'). If STR then allocates and stores the skipped tokens
7081 in *STR. This doesn't check if () and {} are nested correctly,
7082 i.e. "({)}" is accepted. */
7083 static void skip_or_save_block(TokenString
**str
)
7085 int braces
= tok
== '{';
7088 *str
= tok_str_alloc();
7090 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7092 if (tok
== TOK_EOF
) {
7093 if (str
|| level
> 0)
7094 tcc_error("unexpected end of file");
7099 tok_str_add_tok(*str
);
7102 if (t
== '{' || t
== '(') {
7104 } else if (t
== '}' || t
== ')') {
7106 if (level
== 0 && braces
&& t
== '}')
7111 tok_str_add(*str
, -1);
7112 tok_str_add(*str
, 0);
7116 #define EXPR_CONST 1
7119 static void parse_init_elem(int expr_type
)
7121 int saved_global_expr
;
7124 /* compound literals must be allocated globally in this case */
7125 saved_global_expr
= global_expr
;
7128 global_expr
= saved_global_expr
;
7129 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7130 (compound literals). */
7131 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7132 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7133 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7134 #ifdef TCC_TARGET_PE
7135 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7138 tcc_error("initializer element is not constant");
7147 static void init_assert(init_params
*p
, int offset
)
7149 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7150 : !nocode_wanted
&& offset
> p
->local_offset
)
7151 tcc_internal_error("initializer overflow");
7154 #define init_assert(sec, offset)
7157 /* put zeros for variable based init */
7158 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7160 init_assert(p
, c
+ size
);
7162 /* nothing to do because globals are already set to zero */
7164 vpush_helper_func(TOK_memset
);
7166 #ifdef TCC_TARGET_ARM
7178 #define DIF_SIZE_ONLY 2
7179 #define DIF_HAVE_ELEM 4
7182 /* delete relocations for specified range c ... c + size. Unfortunatly
7183 in very special cases, relocations may occur unordered */
7184 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7186 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7187 if (!sec
|| !sec
->reloc
)
7189 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7190 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7191 while (rel
< rel_end
) {
7192 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7193 sec
->reloc
->data_offset
-= sizeof *rel
;
7196 memcpy(rel2
, rel
, sizeof *rel
);
7203 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7205 if (ref
== p
->flex_array_ref
) {
7206 if (index
>= ref
->c
)
7208 } else if (ref
->c
< 0)
7209 tcc_error("flexible array has zero size in this context");
7212 /* t is the array or struct type. c is the array or struct
7213 address. cur_field is the pointer to the current
7214 field, for arrays the 'c' member contains the current start
7215 index. 'flags' is as in decl_initializer.
7216 'al' contains the already initialized length of the
7217 current container (starting at c). This returns the new length of that. */
7218 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7219 Sym
**cur_field
, int flags
, int al
)
7222 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7223 unsigned long corig
= c
;
7228 if (flags
& DIF_HAVE_ELEM
)
7231 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7238 /* NOTE: we only support ranges for last designator */
7239 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7241 if (!(type
->t
& VT_ARRAY
))
7242 expect("array type");
7244 index
= index_last
= expr_const();
7245 if (tok
== TOK_DOTS
&& gnu_ext
) {
7247 index_last
= expr_const();
7251 decl_design_flex(p
, s
, index_last
);
7252 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7253 tcc_error("index exceeds array bounds or range is empty");
7255 (*cur_field
)->c
= index_last
;
7256 type
= pointed_type(type
);
7257 elem_size
= type_size(type
, &align
);
7258 c
+= index
* elem_size
;
7259 nb_elems
= index_last
- index
+ 1;
7266 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7267 expect("struct/union type");
7269 f
= find_field(type
, l
, &cumofs
);
7282 } else if (!gnu_ext
) {
7287 if (type
->t
& VT_ARRAY
) {
7288 index
= (*cur_field
)->c
;
7290 decl_design_flex(p
, s
, index
);
7292 tcc_error("too many initializers");
7293 type
= pointed_type(type
);
7294 elem_size
= type_size(type
, &align
);
7295 c
+= index
* elem_size
;
7298 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7299 *cur_field
= f
= f
->next
;
7301 tcc_error("too many initializers");
7307 if (!elem_size
) /* for structs */
7308 elem_size
= type_size(type
, &align
);
7310 /* Using designators the same element can be initialized more
7311 than once. In that case we need to delete possibly already
7312 existing relocations. */
7313 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7314 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7315 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7318 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7320 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7324 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7325 /* make init_putv/vstore believe it were a struct */
7327 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7331 vpush_ref(type
, p
->sec
, c
, elem_size
);
7333 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7334 for (i
= 1; i
< nb_elems
; i
++) {
7336 init_putv(p
, type
, c
+ elem_size
* i
);
7341 c
+= nb_elems
* elem_size
;
7347 /* store a value or an expression directly in global data or in local array */
7348 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7354 Section
*sec
= p
->sec
;
7358 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7360 size
= type_size(type
, &align
);
7361 if (type
->t
& VT_BITFIELD
)
7362 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7363 init_assert(p
, c
+ size
);
7366 /* XXX: not portable */
7367 /* XXX: generate error if incorrect relocation */
7368 gen_assign_cast(&dtype
);
7369 bt
= type
->t
& VT_BTYPE
;
7371 if ((vtop
->r
& VT_SYM
)
7373 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7374 || (type
->t
& VT_BITFIELD
))
7375 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7377 tcc_error("initializer element is not computable at load time");
7379 if (NODATA_WANTED
) {
7384 ptr
= sec
->data
+ c
;
7387 /* XXX: make code faster ? */
7388 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7389 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7390 /* XXX This rejects compound literals like
7391 '(void *){ptr}'. The problem is that '&sym' is
7392 represented the same way, which would be ruled out
7393 by the SYM_FIRST_ANOM check above, but also '"string"'
7394 in 'char *p = "string"' is represented the same
7395 with the type being VT_PTR and the symbol being an
7396 anonymous one. That is, there's no difference in vtop
7397 between '(void *){x}' and '&(void *){x}'. Ignore
7398 pointer typed entities here. Hopefully no real code
7399 will ever use compound literals with scalar type. */
7400 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7401 /* These come from compound literals, memcpy stuff over. */
7405 esym
= elfsym(vtop
->sym
);
7406 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7407 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7409 /* We need to copy over all memory contents, and that
7410 includes relocations. Use the fact that relocs are
7411 created it order, so look from the end of relocs
7412 until we hit one before the copied region. */
7413 unsigned long relofs
= ssec
->reloc
->data_offset
;
7414 while (relofs
>= sizeof(*rel
)) {
7415 relofs
-= sizeof(*rel
);
7416 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7417 if (rel
->r_offset
>= esym
->st_value
+ size
)
7419 if (rel
->r_offset
< esym
->st_value
)
7421 put_elf_reloca(symtab_section
, sec
,
7422 c
+ rel
->r_offset
- esym
->st_value
,
7423 ELFW(R_TYPE
)(rel
->r_info
),
7424 ELFW(R_SYM
)(rel
->r_info
),
7434 if (type
->t
& VT_BITFIELD
) {
7435 int bit_pos
, bit_size
, bits
, n
;
7436 unsigned char *p
, v
, m
;
7437 bit_pos
= BIT_POS(vtop
->type
.t
);
7438 bit_size
= BIT_SIZE(vtop
->type
.t
);
7439 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7440 bit_pos
&= 7, bits
= 0;
7445 v
= val
>> bits
<< bit_pos
;
7446 m
= ((1 << n
) - 1) << bit_pos
;
7447 *p
= (*p
& ~m
) | (v
& m
);
7448 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7453 *(char *)ptr
= val
!= 0;
7459 write16le(ptr
, val
);
7462 write32le(ptr
, val
);
7465 write64le(ptr
, val
);
7468 #if defined TCC_IS_NATIVE_387
7469 /* Host and target platform may be different but both have x87.
7470 On windows, tcc does not use VT_LDOUBLE, except when it is a
7471 cross compiler. In this case a mingw gcc as host compiler
7472 comes here with 10-byte long doubles, while msvc or tcc won't.
7473 tcc itself can still translate by asm.
7474 In any case we avoid possibly random bytes 11 and 12.
7476 if (sizeof (long double) >= 10)
7477 memcpy(ptr
, &vtop
->c
.ld
, 10);
7479 else if (sizeof (long double) == sizeof (double))
7480 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7482 else if (vtop
->c
.ld
== 0.0)
7486 /* For other platforms it should work natively, but may not work
7487 for cross compilers */
7488 if (sizeof(long double) == LDOUBLE_SIZE
)
7489 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7490 else if (sizeof(double) == LDOUBLE_SIZE
)
7491 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7492 #ifndef TCC_CROSS_TEST
7494 tcc_error("can't cross compile long double constants");
7499 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7502 if (vtop
->r
& VT_SYM
)
7503 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7505 write64le(ptr
, val
);
7508 write32le(ptr
, val
);
7512 write64le(ptr
, val
);
7516 if (vtop
->r
& VT_SYM
)
7517 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7518 write32le(ptr
, val
);
7522 //tcc_internal_error("unexpected type");
7528 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7535 /* 't' contains the type and storage info. 'c' is the offset of the
7536 object in section 'sec'. If 'sec' is NULL, it means stack based
7537 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7538 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7539 size only evaluation is wanted (only for arrays). */
7540 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7542 int len
, n
, no_oblock
, i
;
7548 /* generate line number info */
7549 if (debug_modes
&& !p
->sec
)
7550 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7552 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7553 /* In case of strings we have special handling for arrays, so
7554 don't consume them as initializer value (which would commit them
7555 to some anonymous symbol). */
7556 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7557 (!(flags
& DIF_SIZE_ONLY
)
7558 /* a struct may be initialized from a struct of same type, as in
7559 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7560 In that case we need to parse the element in order to check
7561 it for compatibility below */
7562 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7564 int ncw_prev
= nocode_wanted
;
7565 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7567 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7568 nocode_wanted
= ncw_prev
;
7569 flags
|= DIF_HAVE_ELEM
;
7572 if (type
->t
& VT_ARRAY
) {
7574 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7582 t1
= pointed_type(type
);
7583 size1
= type_size(t1
, &align1
);
7585 /* only parse strings here if correct type (otherwise: handle
7586 them as ((w)char *) expressions */
7587 if ((tok
== TOK_LSTR
&&
7588 #ifdef TCC_TARGET_PE
7589 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7591 (t1
->t
& VT_BTYPE
) == VT_INT
7593 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7595 cstr_reset(&initstr
);
7596 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7597 tcc_error("unhandled string literal merging");
7598 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7600 initstr
.size
-= size1
;
7602 len
+= tokc
.str
.size
;
7604 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7606 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7609 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7610 && tok
!= TOK_EOF
) {
7611 /* Not a lone literal but part of a bigger expression. */
7612 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7613 tokc
.str
.size
= initstr
.size
;
7614 tokc
.str
.data
= initstr
.data
;
7618 decl_design_flex(p
, s
, len
);
7619 if (!(flags
& DIF_SIZE_ONLY
)) {
7624 tcc_warning("initializer-string for array is too long");
7625 /* in order to go faster for common case (char
7626 string in global variable, we handle it
7628 if (p
->sec
&& size1
== 1) {
7629 init_assert(p
, c
+ nb
);
7631 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7635 /* only add trailing zero if enough storage (no
7636 warning in this case since it is standard) */
7637 if (flags
& DIF_CLEAR
)
7640 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7644 } else if (size1
== 1)
7645 ch
= ((unsigned char *)initstr
.data
)[i
];
7647 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7649 init_putv(p
, t1
, c
+ i
* size1
);
7660 /* zero memory once in advance */
7661 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7662 init_putz(p
, c
, n
*size1
);
7667 /* GNU extension: if the initializer is empty for a flex array,
7668 it's size is zero. We won't enter the loop, so set the size
7670 decl_design_flex(p
, s
, len
);
7671 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7672 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7673 flags
&= ~DIF_HAVE_ELEM
;
7674 if (type
->t
& VT_ARRAY
) {
7676 /* special test for multi dimensional arrays (may not
7677 be strictly correct if designators are used at the
7679 if (no_oblock
&& len
>= n
*size1
)
7682 if (s
->type
.t
== VT_UNION
)
7686 if (no_oblock
&& f
== NULL
)
7698 } else if ((flags
& DIF_HAVE_ELEM
)
7699 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7700 The source type might have VT_CONSTANT set, which is
7701 of course assignable to non-const elements. */
7702 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7705 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7707 if ((flags
& DIF_FIRST
) || tok
== '{') {
7717 } else if (tok
== '{') {
7718 if (flags
& DIF_HAVE_ELEM
)
7721 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7724 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7725 /* If we supported only ISO C we wouldn't have to accept calling
7726 this on anything than an array if DIF_SIZE_ONLY (and even then
7727 only on the outermost level, so no recursion would be needed),
7728 because initializing a flex array member isn't supported.
7729 But GNU C supports it, so we need to recurse even into
7730 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7731 /* just skip expression */
7732 if (flags
& DIF_HAVE_ELEM
)
7735 skip_or_save_block(NULL
);
7738 if (!(flags
& DIF_HAVE_ELEM
)) {
7739 /* This should happen only when we haven't parsed
7740 the init element above for fear of committing a
7741 string constant to memory too early. */
7742 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7743 expect("string constant");
7744 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7746 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7747 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7749 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7753 init_putv(p
, type
, c
);
7757 /* parse an initializer for type 't' if 'has_init' is non zero, and
7758 allocate space in local or global data space ('r' is either
7759 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7760 variable 'v' of scope 'scope' is declared before initializers
7761 are parsed. If 'v' is zero, then a reference to the new object
7762 is put in the value stack. If 'has_init' is 2, a special parsing
7763 is done to handle string constants. */
7764 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7765 int has_init
, int v
, int scope
)
7767 int size
, align
, addr
;
7768 TokenString
*init_str
= NULL
;
7771 Sym
*flexible_array
;
7773 int saved_nocode_wanted
= nocode_wanted
;
7774 #ifdef CONFIG_TCC_BCHECK
7775 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7777 init_params p
= {0};
7779 /* Always allocate static or global variables */
7780 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7781 nocode_wanted
|= 0x80000000;
7783 flexible_array
= NULL
;
7784 size
= type_size(type
, &align
);
7786 /* exactly one flexible array may be initialized, either the
7787 toplevel array or the last member of the toplevel struct */
7790 /* If the base type itself was an array type of unspecified size
7791 (like in 'typedef int arr[]; arr x = {1};') then we will
7792 overwrite the unknown size by the real one for this decl.
7793 We need to unshare the ref symbol holding that size. */
7794 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7795 p
.flex_array_ref
= type
->ref
;
7797 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7798 Sym
*field
= type
->ref
->next
;
7801 field
= field
->next
;
7802 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7803 flexible_array
= field
;
7804 p
.flex_array_ref
= field
->type
.ref
;
7811 /* If unknown size, do a dry-run 1st pass */
7813 tcc_error("unknown type size");
7814 if (has_init
== 2) {
7815 /* only get strings */
7816 init_str
= tok_str_alloc();
7817 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7818 tok_str_add_tok(init_str
);
7821 tok_str_add(init_str
, -1);
7822 tok_str_add(init_str
, 0);
7824 skip_or_save_block(&init_str
);
7828 begin_macro(init_str
, 1);
7830 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7831 /* prepare second initializer parsing */
7832 macro_ptr
= init_str
->str
;
7835 /* if still unknown size, error */
7836 size
= type_size(type
, &align
);
7838 tcc_error("unknown type size");
7840 /* If there's a flex member and it was used in the initializer
7842 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
7843 size
+= flexible_array
->type
.ref
->c
7844 * pointed_size(&flexible_array
->type
);
7847 /* take into account specified alignment if bigger */
7848 if (ad
->a
.aligned
) {
7849 int speca
= 1 << (ad
->a
.aligned
- 1);
7852 } else if (ad
->a
.packed
) {
7856 if (!v
&& NODATA_WANTED
)
7857 size
= 0, align
= 1;
7859 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7861 #ifdef CONFIG_TCC_BCHECK
7863 /* add padding between stack variables for bound checking */
7867 loc
= (loc
- size
) & -align
;
7869 p
.local_offset
= addr
+ size
;
7870 #ifdef CONFIG_TCC_BCHECK
7872 /* add padding between stack variables for bound checking */
7877 /* local variable */
7878 #ifdef CONFIG_TCC_ASM
7879 if (ad
->asm_label
) {
7880 int reg
= asm_parse_regvar(ad
->asm_label
);
7882 r
= (r
& ~VT_VALMASK
) | reg
;
7885 sym
= sym_push(v
, type
, r
, addr
);
7886 if (ad
->cleanup_func
) {
7887 Sym
*cls
= sym_push2(&all_cleanups
,
7888 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7889 cls
->prev_tok
= sym
;
7890 cls
->next
= ad
->cleanup_func
;
7891 cls
->ncl
= cur_scope
->cl
.s
;
7892 cur_scope
->cl
.s
= cls
;
7897 /* push local reference */
7898 vset(type
, r
, addr
);
7902 if (v
&& scope
== VT_CONST
) {
7903 /* see if the symbol was already defined */
7906 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
7907 && sym
->type
.ref
->c
> type
->ref
->c
) {
7908 /* flex array was already declared with explicit size
7910 int arr[] = { 1,2,3 }; */
7911 type
->ref
->c
= sym
->type
.ref
->c
;
7912 size
= type_size(type
, &align
);
7914 patch_storage(sym
, ad
, type
);
7915 /* we accept several definitions of the same global variable. */
7916 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7921 /* allocate symbol in corresponding section */
7925 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
7926 tp
= &tp
->ref
->type
;
7927 if (tp
->t
& VT_CONSTANT
) {
7928 sec
= rodata_section
;
7929 } else if (has_init
) {
7931 /*if (tcc_state->g_debug & 4)
7932 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
7933 } else if (tcc_state
->nocommon
)
7938 addr
= section_add(sec
, size
, align
);
7939 #ifdef CONFIG_TCC_BCHECK
7940 /* add padding if bound check */
7942 section_add(sec
, 1, 1);
7945 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7946 sec
= common_section
;
7951 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7952 patch_storage(sym
, ad
, NULL
);
7954 /* update symbol definition */
7955 put_extern_sym(sym
, sec
, addr
, size
);
7957 /* push global reference */
7958 vpush_ref(type
, sec
, addr
, size
);
7963 #ifdef CONFIG_TCC_BCHECK
7964 /* handles bounds now because the symbol must be defined
7965 before for the relocation */
7969 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7970 /* then add global bound info */
7971 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7972 bounds_ptr
[0] = 0; /* relocated */
7973 bounds_ptr
[1] = size
;
7978 if (type
->t
& VT_VLA
) {
7984 /* save before-VLA stack pointer if needed */
7985 if (cur_scope
->vla
.num
== 0) {
7986 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
7987 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
7989 gen_vla_sp_save(loc
-= PTR_SIZE
);
7990 cur_scope
->vla
.locorig
= loc
;
7994 vpush_type_size(type
, &a
);
7995 gen_vla_alloc(type
, a
);
7996 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7997 /* on _WIN64, because of the function args scratch area, the
7998 result of alloca differs from RSP and is returned in RAX. */
7999 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8001 gen_vla_sp_save(addr
);
8002 cur_scope
->vla
.loc
= addr
;
8003 cur_scope
->vla
.num
++;
8004 } else if (has_init
) {
8006 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8007 /* patch flexible array member size back to -1, */
8008 /* for possible subsequent similar declarations */
8010 flexible_array
->type
.ref
->c
= -1;
8014 /* restore parse state if needed */
8020 nocode_wanted
= saved_nocode_wanted
;
8023 /* generate vla code saved in post_type() */
8024 static void func_vla_arg_code(Sym
*arg
)
8027 TokenString
*vla_array_tok
= NULL
;
8030 func_vla_arg_code(arg
->type
.ref
);
8032 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8033 loc
-= type_size(&int_type
, &align
);
8035 arg
->type
.ref
->c
= loc
;
8038 vla_array_tok
= tok_str_alloc();
8039 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8040 begin_macro(vla_array_tok
, 1);
8045 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8047 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8054 static void func_vla_arg(Sym
*sym
)
8058 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8059 if (arg
->type
.t
& VT_VLA
)
8060 func_vla_arg_code(arg
);
8063 /* parse a function defined by symbol 'sym' and generate its code in
8064 'cur_text_section' */
8065 static void gen_function(Sym
*sym
)
8067 struct scope f
= { 0 };
8068 cur_scope
= root_scope
= &f
;
8070 ind
= cur_text_section
->data_offset
;
8071 if (sym
->a
.aligned
) {
8072 size_t newoff
= section_add(cur_text_section
, 0,
8073 1 << (sym
->a
.aligned
- 1));
8074 gen_fill_nops(newoff
- ind
);
8076 /* NOTE: we patch the symbol size later */
8077 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8078 if (sym
->type
.ref
->f
.func_ctor
)
8079 add_array (tcc_state
, ".init_array", sym
->c
);
8080 if (sym
->type
.ref
->f
.func_dtor
)
8081 add_array (tcc_state
, ".fini_array", sym
->c
);
8083 funcname
= get_tok_str(sym
->v
, NULL
);
8085 func_vt
= sym
->type
.ref
->type
;
8086 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8088 /* put debug symbol */
8089 tcc_debug_funcstart(tcc_state
, sym
);
8090 /* push a dummy symbol to enable local sym storage */
8091 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8092 local_scope
= 1; /* for function parameters */
8096 clear_temp_local_var_list();
8101 /* reset local stack */
8102 pop_local_syms(NULL
, 0);
8104 cur_text_section
->data_offset
= ind
;
8106 label_pop(&global_label_stack
, NULL
, 0);
8107 sym_pop(&all_cleanups
, NULL
, 0);
8108 /* patch symbol size */
8109 elfsym(sym
)->st_size
= ind
- func_ind
;
8110 /* end of function */
8111 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8112 /* It's better to crash than to generate wrong code */
8113 cur_text_section
= NULL
;
8114 funcname
= ""; /* for safety */
8115 func_vt
.t
= VT_VOID
; /* for safety */
8116 func_var
= 0; /* for safety */
8117 ind
= 0; /* for safety */
8119 nocode_wanted
= 0x80000000;
8121 /* do this after funcend debug info */
8125 static void gen_inline_functions(TCCState
*s
)
8128 int inline_generated
, i
;
8129 struct InlineFunc
*fn
;
8131 tcc_open_bf(s
, ":inline:", 0);
8132 /* iterate while inline function are referenced */
8134 inline_generated
= 0;
8135 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8136 fn
= s
->inline_fns
[i
];
8138 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8139 /* the function was used or forced (and then not internal):
8140 generate its code and convert it to a normal function */
8142 tcc_debug_putfile(s
, fn
->filename
);
8143 begin_macro(fn
->func_str
, 1);
8145 cur_text_section
= text_section
;
8149 inline_generated
= 1;
8152 } while (inline_generated
);
8156 static void free_inline_functions(TCCState
*s
)
8159 /* free tokens of unused inline functions */
8160 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8161 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8163 tok_str_free(fn
->func_str
);
8165 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8168 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8169 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8170 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8172 int v
, has_init
, r
, oldint
;
8175 AttributeDef ad
, adbase
;
8178 if (tok
== TOK_STATIC_ASSERT
) {
8188 tcc_error("_Static_assert fail");
8190 goto static_assert_out
;
8194 parse_mult_str(&error_str
, "string constant");
8196 tcc_error("%s", (char *)error_str
.data
);
8197 cstr_free(&error_str
);
8205 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8206 if (is_for_loop_init
)
8208 /* skip redundant ';' if not in old parameter decl scope */
8209 if (tok
== ';' && l
!= VT_CMP
) {
8215 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8216 /* global asm block */
8220 if (tok
>= TOK_UIDENT
) {
8221 /* special test for old K&R protos without explicit int
8222 type. Only accepted when defining global data */
8227 expect("declaration");
8233 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8235 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8236 tcc_warning("unnamed struct/union that defines no instances");
8240 if (IS_ENUM(btype
.t
)) {
8246 while (1) { /* iterate thru each declaration */
8249 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8253 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8254 printf("type = '%s'\n", buf
);
8257 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8258 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8259 tcc_error("function without file scope cannot be static");
8260 /* if old style function prototype, we accept a
8263 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8264 decl0(VT_CMP
, 0, sym
);
8265 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8266 if (sym
->f
.func_alwinl
8267 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8268 == (VT_EXTERN
| VT_INLINE
))) {
8269 /* always_inline functions must be handled as if they
8270 don't generate multiple global defs, even if extern
8271 inline, i.e. GNU inline semantics for those. Rewrite
8272 them into static inline. */
8273 type
.t
&= ~VT_EXTERN
;
8274 type
.t
|= VT_STATIC
;
8277 /* always compile 'extern inline' */
8278 if (type
.t
& VT_EXTERN
)
8279 type
.t
&= ~VT_INLINE
;
8281 } else if (oldint
) {
8282 tcc_warning("type defaults to int");
8285 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8286 ad
.asm_label
= asm_label_instr();
8287 /* parse one last attribute list, after asm label */
8288 parse_attribute(&ad
);
8290 /* gcc does not allow __asm__("label") with function definition,
8297 #ifdef TCC_TARGET_PE
8298 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8299 if (type
.t
& VT_STATIC
)
8300 tcc_error("cannot have dll linkage with static");
8301 if (type
.t
& VT_TYPEDEF
) {
8302 tcc_warning("'%s' attribute ignored for typedef",
8303 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8304 (ad
.a
.dllexport
= 0, "dllexport"));
8305 } else if (ad
.a
.dllimport
) {
8306 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8309 type
.t
|= VT_EXTERN
;
8315 tcc_error("cannot use local functions");
8316 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8317 expect("function definition");
8319 /* reject abstract declarators in function definition
8320 make old style params without decl have int type */
8322 while ((sym
= sym
->next
) != NULL
) {
8323 if (!(sym
->v
& ~SYM_FIELD
))
8324 expect("identifier");
8325 if (sym
->type
.t
== VT_VOID
)
8326 sym
->type
= int_type
;
8329 /* apply post-declaraton attributes */
8330 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8332 /* put function symbol */
8333 type
.t
&= ~VT_EXTERN
;
8334 sym
= external_sym(v
, &type
, 0, &ad
);
8336 /* static inline functions are just recorded as a kind
8337 of macro. Their code will be emitted at the end of
8338 the compilation unit only if they are used */
8339 if (sym
->type
.t
& VT_INLINE
) {
8340 struct InlineFunc
*fn
;
8341 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8342 strcpy(fn
->filename
, file
->filename
);
8344 skip_or_save_block(&fn
->func_str
);
8345 dynarray_add(&tcc_state
->inline_fns
,
8346 &tcc_state
->nb_inline_fns
, fn
);
8348 /* compute text section */
8349 cur_text_section
= ad
.section
;
8350 if (!cur_text_section
)
8351 cur_text_section
= text_section
;
8357 /* find parameter in function parameter list */
8358 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8359 if ((sym
->v
& ~SYM_FIELD
) == v
)
8361 tcc_error("declaration for parameter '%s' but no such parameter",
8362 get_tok_str(v
, NULL
));
8364 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8365 tcc_error("storage class specified for '%s'",
8366 get_tok_str(v
, NULL
));
8367 if (sym
->type
.t
!= VT_VOID
)
8368 tcc_error("redefinition of parameter '%s'",
8369 get_tok_str(v
, NULL
));
8370 convert_parameter_type(&type
);
8372 } else if (type
.t
& VT_TYPEDEF
) {
8373 /* save typedefed type */
8374 /* XXX: test storage specifiers ? */
8376 if (sym
&& sym
->sym_scope
== local_scope
) {
8377 if (!is_compatible_types(&sym
->type
, &type
)
8378 || !(sym
->type
.t
& VT_TYPEDEF
))
8379 tcc_error("incompatible redefinition of '%s'",
8380 get_tok_str(v
, NULL
));
8383 sym
= sym_push(v
, &type
, 0, 0);
8388 tcc_debug_typedef (tcc_state
, sym
);
8389 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8390 && !(type
.t
& VT_EXTERN
)) {
8391 tcc_error("declaration of void object");
8394 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8395 /* external function definition */
8396 /* specific case for func_call attribute */
8398 } else if (!(type
.t
& VT_ARRAY
)) {
8399 /* not lvalue if array */
8402 has_init
= (tok
== '=');
8403 if (has_init
&& (type
.t
& VT_VLA
))
8404 tcc_error("variable length array cannot be initialized");
8405 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8406 || (type
.t
& VT_BTYPE
) == VT_FUNC
8407 /* as with GCC, uninitialized global arrays with no size
8408 are considered extern: */
8409 || ((type
.t
& VT_ARRAY
) && !has_init
8410 && l
== VT_CONST
&& type
.ref
->c
< 0)
8412 /* external variable or function */
8413 type
.t
|= VT_EXTERN
;
8414 sym
= external_sym(v
, &type
, r
, &ad
);
8415 if (ad
.alias_target
) {
8416 /* Aliases need to be emitted when their target
8417 symbol is emitted, even if perhaps unreferenced.
8418 We only support the case where the base is
8419 already defined, otherwise we would need
8420 deferring to emit the aliases until the end of
8421 the compile unit. */
8422 Sym
*alias_target
= sym_find(ad
.alias_target
);
8423 ElfSym
*esym
= elfsym(alias_target
);
8425 tcc_error("unsupported forward __alias__ attribute");
8426 put_extern_sym2(sym
, esym
->st_shndx
,
8427 esym
->st_value
, esym
->st_size
, 1);
8430 if (type
.t
& VT_STATIC
)
8436 else if (l
== VT_CONST
)
8437 /* uninitialized global variables may be overridden */
8438 type
.t
|= VT_EXTERN
;
8439 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8443 if (is_for_loop_init
)
8455 static void decl(int l
)
8460 /* ------------------------------------------------------------------------- */
8463 /* ------------------------------------------------------------------------- */