2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 ST_DATA
char debug_modes
;
50 static SValue _vstack
[1 + VSTACK_SIZE
];
51 #define vstack (_vstack + 1)
53 ST_DATA
int const_wanted
; /* true if constant wanted */
54 ST_DATA
int nocode_wanted
; /* no code generation wanted */
55 #define unevalmask 0xffff /* unevaluated subexpression */
56 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
57 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 /* Automagical code suppression ----> */
60 #define CODE_OFF() (nocode_wanted |= 0x20000000)
61 #define CODE_ON() (nocode_wanted &= ~0x20000000)
63 /* Clear 'nocode_wanted' at label if it was used */
64 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
65 static int gind(int known_unreachable
)
68 if (!known_unreachable
)
71 tcc_tcov_block_begin(tcc_state
);
75 /* Set 'nocode_wanted' after unconditional jumps */
76 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
77 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
79 /* These are #undef'd at the end of this file */
80 #define gjmp_addr gjmp_addr_acs
84 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
85 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
86 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
89 ST_DATA
const char *funcname
;
90 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
91 static CString initstr
;
94 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
95 #define VT_PTRDIFF_T VT_INT
97 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
98 #define VT_PTRDIFF_T VT_LLONG
100 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
101 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
104 static struct switch_t
{
108 } **p
; int n
; /* list of case ranges */
109 int def_sym
; /* default symbol */
113 struct switch_t
*prev
;
115 } *cur_switch
; /* current switch */
117 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
118 /*list of temporary local variables on the stack in current function. */
119 static struct temp_local_variable
{
120 int location
; //offset on stack. Svalue.c.i
123 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
124 static int nb_temp_local_vars
;
126 static struct scope
{
128 struct { int loc
, locorig
, num
; } vla
;
129 struct { Sym
*s
; int n
; } cl
;
132 } *cur_scope
, *loop_scope
, *root_scope
;
141 #define precedence_parser
142 static void init_prec(void);
145 static void gen_cast(CType
*type
);
146 static void gen_cast_s(int t
);
147 static inline CType
*pointed_type(CType
*type
);
148 static int is_compatible_types(CType
*type1
, CType
*type2
);
149 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
150 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
151 static void parse_expr_type(CType
*type
);
152 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
153 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
154 static void block(int is_expr
);
155 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
156 static void decl(int l
);
157 static int decl0(int l
, int is_for_loop_init
, Sym
*);
158 static void expr_eq(void);
159 static void vpush_type_size(CType
*type
, int *a
);
160 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
161 static inline int64_t expr_const64(void);
162 static void vpush64(int ty
, unsigned long long v
);
163 static void vpush(CType
*type
);
164 static int gvtst(int inv
, int t
);
165 static void gen_inline_functions(TCCState
*s
);
166 static void free_inline_functions(TCCState
*s
);
167 static void skip_or_save_block(TokenString
**str
);
168 static void gv_dup(void);
169 static int get_temp_local_var(int size
,int align
);
170 static void clear_temp_local_var_list();
171 static void cast_error(CType
*st
, CType
*dt
);
173 /* ------------------------------------------------------------------------- */
175 ST_INLN
int is_float(int t
)
177 int bt
= t
& VT_BTYPE
;
178 return bt
== VT_LDOUBLE
184 static inline int is_integer_btype(int bt
)
193 static int btype_size(int bt
)
195 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
199 bt
== VT_PTR
? PTR_SIZE
: 0;
202 /* returns function return register from type */
203 static int R_RET(int t
)
207 #ifdef TCC_TARGET_X86_64
208 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
210 #elif defined TCC_TARGET_RISCV64
211 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
217 /* returns 2nd function return register, if any */
218 static int R2_RET(int t
)
224 #elif defined TCC_TARGET_X86_64
229 #elif defined TCC_TARGET_RISCV64
236 /* returns true for two-word types */
237 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
239 /* put function return registers to stack value */
240 static void PUT_R_RET(SValue
*sv
, int t
)
242 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
245 /* returns function return register class for type t */
246 static int RC_RET(int t
)
248 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
251 /* returns generic register class for type t */
252 static int RC_TYPE(int t
)
256 #ifdef TCC_TARGET_X86_64
257 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
259 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
261 #elif defined TCC_TARGET_RISCV64
262 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
268 /* returns 2nd register class corresponding to t and rc */
269 static int RC2_TYPE(int t
, int rc
)
271 if (!USING_TWO_WORDS(t
))
286 /* we use our own 'finite' function to avoid potential problems with
287 non standard math libs */
288 /* XXX: endianness dependent */
289 ST_FUNC
int ieee_finite(double d
)
292 memcpy(p
, &d
, sizeof(double));
293 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
296 /* compiling intel long double natively */
297 #if (defined __i386__ || defined __x86_64__) \
298 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
299 # define TCC_IS_NATIVE_387
302 ST_FUNC
void test_lvalue(void)
304 if (!(vtop
->r
& VT_LVAL
))
308 ST_FUNC
void check_vstack(void)
310 if (vtop
!= vstack
- 1)
311 tcc_error("internal compiler error: vstack leak (%d)",
312 (int)(vtop
- vstack
+ 1));
315 /* vstack debugging aid */
317 void pv (const char *lbl
, int a
, int b
)
320 for (i
= a
; i
< a
+ b
; ++i
) {
321 SValue
*p
= &vtop
[-i
];
322 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
323 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
328 /* ------------------------------------------------------------------------- */
329 /* initialize vstack and types. This must be done also for tcc -E */
330 ST_FUNC
void tccgen_init(TCCState
*s1
)
333 memset(vtop
, 0, sizeof *vtop
);
335 /* define some often used types */
338 char_type
.t
= VT_BYTE
;
339 if (s1
->char_is_unsigned
)
340 char_type
.t
|= VT_UNSIGNED
;
341 char_pointer_type
= char_type
;
342 mk_pointer(&char_pointer_type
);
344 func_old_type
.t
= VT_FUNC
;
345 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
346 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
347 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
348 #ifdef precedence_parser
354 ST_FUNC
int tccgen_compile(TCCState
*s1
)
356 cur_text_section
= NULL
;
359 anon_sym
= SYM_FIRST_ANOM
;
361 nocode_wanted
= 0x80000000;
363 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
367 #ifdef TCC_TARGET_ARM
371 printf("%s: **** new file\n", file
->filename
);
373 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
376 gen_inline_functions(s1
);
378 /* end of translation unit info */
384 ST_FUNC
void tccgen_finish(TCCState
*s1
)
387 free_inline_functions(s1
);
388 sym_pop(&global_stack
, NULL
, 0);
389 sym_pop(&local_stack
, NULL
, 0);
390 /* free preprocessor macros */
393 dynarray_reset(&sym_pools
, &nb_sym_pools
);
394 sym_free_first
= NULL
;
397 /* ------------------------------------------------------------------------- */
398 ST_FUNC ElfSym
*elfsym(Sym
*s
)
402 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
405 /* apply storage attributes to Elf symbol */
406 ST_FUNC
void update_storage(Sym
*sym
)
409 int sym_bind
, old_sym_bind
;
415 if (sym
->a
.visibility
)
416 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
419 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
420 sym_bind
= STB_LOCAL
;
421 else if (sym
->a
.weak
)
424 sym_bind
= STB_GLOBAL
;
425 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
426 if (sym_bind
!= old_sym_bind
) {
427 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
431 if (sym
->a
.dllimport
)
432 esym
->st_other
|= ST_PE_IMPORT
;
433 if (sym
->a
.dllexport
)
434 esym
->st_other
|= ST_PE_EXPORT
;
438 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
439 get_tok_str(sym
->v
, NULL
),
440 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
448 /* ------------------------------------------------------------------------- */
449 /* update sym->c so that it points to an external symbol in section
450 'section' with value 'value' */
452 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
453 addr_t value
, unsigned long size
,
454 int can_add_underscore
)
456 int sym_type
, sym_bind
, info
, other
, t
;
462 name
= get_tok_str(sym
->v
, NULL
);
464 if ((t
& VT_BTYPE
) == VT_FUNC
) {
466 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
467 sym_type
= STT_NOTYPE
;
468 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
471 sym_type
= STT_OBJECT
;
473 if (t
& (VT_STATIC
| VT_INLINE
))
474 sym_bind
= STB_LOCAL
;
476 sym_bind
= STB_GLOBAL
;
480 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
481 Sym
*ref
= sym
->type
.ref
;
482 if (ref
->a
.nodecorate
) {
483 can_add_underscore
= 0;
485 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
486 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
488 other
|= ST_PE_STDCALL
;
489 can_add_underscore
= 0;
494 if (sym
->asm_label
) {
495 name
= get_tok_str(sym
->asm_label
, NULL
);
496 can_add_underscore
= 0;
499 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
501 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
505 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
506 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
509 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
513 esym
->st_value
= value
;
514 esym
->st_size
= size
;
515 esym
->st_shndx
= sh_num
;
520 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
522 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
524 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
527 /* add a new relocation entry to symbol 'sym' in section 's' */
528 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
533 if (nocode_wanted
&& s
== cur_text_section
)
538 put_extern_sym(sym
, NULL
, 0, 0);
542 /* now we can add ELF relocation info */
543 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
547 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
549 greloca(s
, sym
, offset
, type
, 0);
553 /* ------------------------------------------------------------------------- */
554 /* symbol allocator */
555 static Sym
*__sym_malloc(void)
557 Sym
*sym_pool
, *sym
, *last_sym
;
560 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
561 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
563 last_sym
= sym_free_first
;
565 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
566 sym
->next
= last_sym
;
570 sym_free_first
= last_sym
;
574 static inline Sym
*sym_malloc(void)
578 sym
= sym_free_first
;
580 sym
= __sym_malloc();
581 sym_free_first
= sym
->next
;
584 sym
= tcc_malloc(sizeof(Sym
));
589 ST_INLN
void sym_free(Sym
*sym
)
592 sym
->next
= sym_free_first
;
593 sym_free_first
= sym
;
599 /* push, without hashing */
600 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
605 memset(s
, 0, sizeof *s
);
615 /* find a symbol and return its associated structure. 's' is the top
616 of the symbol stack */
617 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
629 /* structure lookup */
630 ST_INLN Sym
*struct_find(int v
)
633 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
635 return table_ident
[v
]->sym_struct
;
638 /* find an identifier */
639 ST_INLN Sym
*sym_find(int v
)
642 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
644 return table_ident
[v
]->sym_identifier
;
647 static int sym_scope(Sym
*s
)
649 if (IS_ENUM_VAL (s
->type
.t
))
650 return s
->type
.ref
->sym_scope
;
655 /* push a given symbol on the symbol stack */
656 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
665 s
= sym_push2(ps
, v
, type
->t
, c
);
666 s
->type
.ref
= type
->ref
;
668 /* don't record fields or anonymous symbols */
670 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
671 /* record symbol in token array */
672 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
674 ps
= &ts
->sym_struct
;
676 ps
= &ts
->sym_identifier
;
679 s
->sym_scope
= local_scope
;
680 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
681 tcc_error("redeclaration of '%s'",
682 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
687 /* push a global identifier */
688 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
691 s
= sym_push2(&global_stack
, v
, t
, c
);
692 s
->r
= VT_CONST
| VT_SYM
;
693 /* don't record anonymous symbol */
694 if (v
< SYM_FIRST_ANOM
) {
695 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
696 /* modify the top most local identifier, so that sym_identifier will
697 point to 's' when popped; happens when called from inline asm */
698 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
699 ps
= &(*ps
)->prev_tok
;
706 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
707 pop them yet from the list, but do remove them from the token array. */
708 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
718 /* remove symbol in token array */
720 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
721 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
723 ps
= &ts
->sym_struct
;
725 ps
= &ts
->sym_identifier
;
736 /* ------------------------------------------------------------------------- */
737 static void vcheck_cmp(void)
739 /* cannot let cpu flags if other instruction are generated. Also
740 avoid leaving VT_JMP anywhere except on the top of the stack
741 because it would complicate the code generator.
743 Don't do this when nocode_wanted. vtop might come from
744 !nocode_wanted regions (see 88_codeopt.c) and transforming
745 it to a register without actually generating code is wrong
746 as their value might still be used for real. All values
747 we push under nocode_wanted will eventually be popped
748 again, so that the VT_CMP/VT_JMP value will be in vtop
749 when code is unsuppressed again. */
751 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
755 static void vsetc(CType
*type
, int r
, CValue
*vc
)
757 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
758 tcc_error("memory full (vstack)");
768 ST_FUNC
void vswap(void)
778 /* pop stack value */
779 ST_FUNC
void vpop(void)
782 v
= vtop
->r
& VT_VALMASK
;
783 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
784 /* for x86, we need to pop the FP stack */
786 o(0xd8dd); /* fstp %st(0) */
790 /* need to put correct jump if && or || without test */
797 /* push constant of type "type" with useless value */
798 static void vpush(CType
*type
)
800 vset(type
, VT_CONST
, 0);
803 /* push arbitrary 64bit constant */
804 static void vpush64(int ty
, unsigned long long v
)
811 vsetc(&ctype
, VT_CONST
, &cval
);
814 /* push integer constant */
815 ST_FUNC
void vpushi(int v
)
820 /* push a pointer sized constant */
821 static void vpushs(addr_t v
)
823 vpush64(VT_SIZE_T
, v
);
826 /* push long long constant */
827 static inline void vpushll(long long v
)
829 vpush64(VT_LLONG
, v
);
832 ST_FUNC
void vset(CType
*type
, int r
, int v
)
836 vsetc(type
, r
, &cval
);
839 static void vseti(int r
, int v
)
847 ST_FUNC
void vpushv(SValue
*v
)
849 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
850 tcc_error("memory full (vstack)");
855 static void vdup(void)
860 /* rotate n first stack elements to the bottom
861 I1 ... In -> I2 ... In I1 [top is right]
863 ST_FUNC
void vrotb(int n
)
875 /* rotate the n elements before entry e towards the top
876 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
878 ST_FUNC
void vrote(SValue
*e
, int n
)
885 for(i
= 0;i
< n
- 1; i
++)
890 /* rotate n first stack elements to the top
891 I1 ... In -> In I1 ... I(n-1) [top is right]
893 ST_FUNC
void vrott(int n
)
898 /* ------------------------------------------------------------------------- */
899 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
901 /* called from generators to set the result from relational ops */
902 ST_FUNC
void vset_VT_CMP(int op
)
910 /* called once before asking generators to load VT_CMP to a register */
911 static void vset_VT_JMP(void)
913 int op
= vtop
->cmp_op
;
915 if (vtop
->jtrue
|| vtop
->jfalse
) {
916 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
917 int inv
= op
& (op
< 2); /* small optimization */
918 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
920 /* otherwise convert flags (rsp. 0/1) to register */
922 if (op
< 2) /* doesn't seem to happen */
927 /* Set CPU Flags, doesn't yet jump */
928 static void gvtst_set(int inv
, int t
)
932 if (vtop
->r
!= VT_CMP
) {
935 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
936 vset_VT_CMP(vtop
->c
.i
!= 0);
939 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
940 *p
= gjmp_append(*p
, t
);
943 /* Generate value test
945 * Generate a test for any value (jump, comparison and integers) */
946 static int gvtst(int inv
, int t
)
951 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
956 /* jump to the wanted target */
958 t
= gjmp_cond(op
^ inv
, t
);
961 /* resolve complementary jumps to here */
968 /* generate a zero or nozero test */
969 static void gen_test_zero(int op
)
971 if (vtop
->r
== VT_CMP
) {
975 vtop
->jfalse
= vtop
->jtrue
;
985 /* ------------------------------------------------------------------------- */
986 /* push a symbol value of TYPE */
987 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
991 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
995 /* Return a static symbol pointing to a section */
996 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1002 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1003 sym
->type
.t
|= VT_STATIC
;
1004 put_extern_sym(sym
, sec
, offset
, size
);
1008 /* push a reference to a section offset by adding a dummy symbol */
1009 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1011 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1014 /* define a new external reference to a symbol 'v' of type 'u' */
1015 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1021 /* push forward reference */
1022 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1023 s
->type
.ref
= type
->ref
;
1024 } else if (IS_ASM_SYM(s
)) {
1025 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1026 s
->type
.ref
= type
->ref
;
1032 /* create an external reference with no specific type similar to asm labels.
1033 This avoids type conflicts if the symbol is used from C too */
1034 ST_FUNC Sym
*external_helper_sym(int v
)
1036 CType ct
= { VT_ASM_FUNC
, NULL
};
1037 return external_global_sym(v
, &ct
);
1040 /* push a reference to an helper function (such as memmove) */
1041 ST_FUNC
void vpush_helper_func(int v
)
1043 vpushsym(&func_old_type
, external_helper_sym(v
));
1046 /* Merge symbol attributes. */
1047 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1049 if (sa1
->aligned
&& !sa
->aligned
)
1050 sa
->aligned
= sa1
->aligned
;
1051 sa
->packed
|= sa1
->packed
;
1052 sa
->weak
|= sa1
->weak
;
1053 if (sa1
->visibility
!= STV_DEFAULT
) {
1054 int vis
= sa
->visibility
;
1055 if (vis
== STV_DEFAULT
1056 || vis
> sa1
->visibility
)
1057 vis
= sa1
->visibility
;
1058 sa
->visibility
= vis
;
1060 sa
->dllexport
|= sa1
->dllexport
;
1061 sa
->nodecorate
|= sa1
->nodecorate
;
1062 sa
->dllimport
|= sa1
->dllimport
;
1065 /* Merge function attributes. */
1066 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1068 if (fa1
->func_call
&& !fa
->func_call
)
1069 fa
->func_call
= fa1
->func_call
;
1070 if (fa1
->func_type
&& !fa
->func_type
)
1071 fa
->func_type
= fa1
->func_type
;
1072 if (fa1
->func_args
&& !fa
->func_args
)
1073 fa
->func_args
= fa1
->func_args
;
1074 if (fa1
->func_noreturn
)
1075 fa
->func_noreturn
= 1;
1082 /* Merge attributes. */
1083 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1085 merge_symattr(&ad
->a
, &ad1
->a
);
1086 merge_funcattr(&ad
->f
, &ad1
->f
);
1089 ad
->section
= ad1
->section
;
1090 if (ad1
->alias_target
)
1091 ad
->alias_target
= ad1
->alias_target
;
1093 ad
->asm_label
= ad1
->asm_label
;
1095 ad
->attr_mode
= ad1
->attr_mode
;
1098 /* Merge some type attributes. */
1099 static void patch_type(Sym
*sym
, CType
*type
)
1101 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1102 if (!(sym
->type
.t
& VT_EXTERN
))
1103 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1104 sym
->type
.t
&= ~VT_EXTERN
;
1107 if (IS_ASM_SYM(sym
)) {
1108 /* stay static if both are static */
1109 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1110 sym
->type
.ref
= type
->ref
;
1113 if (!is_compatible_types(&sym
->type
, type
)) {
1114 tcc_error("incompatible types for redefinition of '%s'",
1115 get_tok_str(sym
->v
, NULL
));
1117 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1118 int static_proto
= sym
->type
.t
& VT_STATIC
;
1119 /* warn if static follows non-static function declaration */
1120 if ((type
->t
& VT_STATIC
) && !static_proto
1121 /* XXX this test for inline shouldn't be here. Until we
1122 implement gnu-inline mode again it silences a warning for
1123 mingw caused by our workarounds. */
1124 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1125 tcc_warning("static storage ignored for redefinition of '%s'",
1126 get_tok_str(sym
->v
, NULL
));
1128 /* set 'inline' if both agree or if one has static */
1129 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1130 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1131 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1132 static_proto
|= VT_INLINE
;
1135 if (0 == (type
->t
& VT_EXTERN
)) {
1136 struct FuncAttr f
= sym
->type
.ref
->f
;
1137 /* put complete type, use static from prototype */
1138 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1139 sym
->type
.ref
= type
->ref
;
1140 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1142 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1145 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1146 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1147 sym
->type
.ref
= type
->ref
;
1151 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1152 /* set array size if it was omitted in extern declaration */
1153 sym
->type
.ref
->c
= type
->ref
->c
;
1155 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1156 tcc_warning("storage mismatch for redefinition of '%s'",
1157 get_tok_str(sym
->v
, NULL
));
1161 /* Merge some storage attributes. */
1162 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1165 patch_type(sym
, type
);
1167 #ifdef TCC_TARGET_PE
1168 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1169 tcc_error("incompatible dll linkage for redefinition of '%s'",
1170 get_tok_str(sym
->v
, NULL
));
1172 merge_symattr(&sym
->a
, &ad
->a
);
1174 sym
->asm_label
= ad
->asm_label
;
1175 update_storage(sym
);
1178 /* copy sym to other stack */
1179 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1182 s
= sym_malloc(), *s
= *s0
;
1183 s
->prev
= *ps
, *ps
= s
;
1184 if (s
->v
< SYM_FIRST_ANOM
) {
1185 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1186 s
->prev_tok
= *ps
, *ps
= s
;
1191 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1192 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1194 int bt
= s
->type
.t
& VT_BTYPE
;
1195 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1196 Sym
**sp
= &s
->type
.ref
;
1197 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1198 Sym
*s2
= sym_copy(s
, ps
);
1199 sp
= &(*sp
= s2
)->next
;
1200 sym_copy_ref(s2
, ps
);
1205 /* define a new external reference to a symbol 'v' */
1206 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1210 /* look for global symbol */
1212 while (s
&& s
->sym_scope
)
1216 /* push forward reference */
1217 s
= global_identifier_push(v
, type
->t
, 0);
1220 s
->asm_label
= ad
->asm_label
;
1221 s
->type
.ref
= type
->ref
;
1222 /* copy type to the global stack */
1224 sym_copy_ref(s
, &global_stack
);
1226 patch_storage(s
, ad
, type
);
1228 /* push variables on local_stack if any */
1229 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1230 s
= sym_copy(s
, &local_stack
);
1234 /* save registers up to (vtop - n) stack entry */
1235 ST_FUNC
void save_regs(int n
)
1238 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1242 /* save r to the memory stack, and mark it as being free */
1243 ST_FUNC
void save_reg(int r
)
1245 save_reg_upstack(r
, 0);
1248 /* save r to the memory stack, and mark it as being free,
1249 if seen up to (vtop - n) stack entry */
1250 ST_FUNC
void save_reg_upstack(int r
, int n
)
1252 int l
, size
, align
, bt
;
1255 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1260 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1261 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1262 /* must save value on stack if not already done */
1264 bt
= p
->type
.t
& VT_BTYPE
;
1267 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1270 size
= type_size(&sv
.type
, &align
);
1271 l
= get_temp_local_var(size
,align
);
1272 sv
.r
= VT_LOCAL
| VT_LVAL
;
1274 store(p
->r
& VT_VALMASK
, &sv
);
1275 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1276 /* x86 specific: need to pop fp register ST0 if saved */
1277 if (r
== TREG_ST0
) {
1278 o(0xd8dd); /* fstp %st(0) */
1281 /* special long long case */
1282 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1287 /* mark that stack entry as being saved on the stack */
1288 if (p
->r
& VT_LVAL
) {
1289 /* also clear the bounded flag because the
1290 relocation address of the function was stored in
1292 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1294 p
->r
= VT_LVAL
| VT_LOCAL
;
1303 #ifdef TCC_TARGET_ARM
1304 /* find a register of class 'rc2' with at most one reference on stack.
1305 * If none, call get_reg(rc) */
1306 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1311 for(r
=0;r
<NB_REGS
;r
++) {
1312 if (reg_classes
[r
] & rc2
) {
1315 for(p
= vstack
; p
<= vtop
; p
++) {
1316 if ((p
->r
& VT_VALMASK
) == r
||
1328 /* find a free register of class 'rc'. If none, save one register */
1329 ST_FUNC
int get_reg(int rc
)
1334 /* find a free register */
1335 for(r
=0;r
<NB_REGS
;r
++) {
1336 if (reg_classes
[r
] & rc
) {
1339 for(p
=vstack
;p
<=vtop
;p
++) {
1340 if ((p
->r
& VT_VALMASK
) == r
||
1349 /* no register left : free the first one on the stack (VERY
1350 IMPORTANT to start from the bottom to ensure that we don't
1351 spill registers used in gen_opi()) */
1352 for(p
=vstack
;p
<=vtop
;p
++) {
1353 /* look at second register (if long long) */
1355 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1357 r
= p
->r
& VT_VALMASK
;
1358 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1364 /* Should never comes here */
1368 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1369 static int get_temp_local_var(int size
,int align
){
1371 struct temp_local_variable
*temp_var
;
1378 for(i
=0;i
<nb_temp_local_vars
;i
++){
1379 temp_var
=&arr_temp_local_vars
[i
];
1380 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1383 /*check if temp_var is free*/
1385 for(p
=vstack
;p
<=vtop
;p
++) {
1387 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1388 if(p
->c
.i
==temp_var
->location
){
1395 found_var
=temp_var
->location
;
1401 loc
= (loc
- size
) & -align
;
1402 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1403 temp_var
=&arr_temp_local_vars
[i
];
1404 temp_var
->location
=loc
;
1405 temp_var
->size
=size
;
1406 temp_var
->align
=align
;
1407 nb_temp_local_vars
++;
1414 static void clear_temp_local_var_list(){
1415 nb_temp_local_vars
=0;
1418 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1420 static void move_reg(int r
, int s
, int t
)
1434 /* get address of vtop (vtop MUST BE an lvalue) */
1435 ST_FUNC
void gaddrof(void)
1437 vtop
->r
&= ~VT_LVAL
;
1438 /* tricky: if saved lvalue, then we can go back to lvalue */
1439 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1440 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1443 #ifdef CONFIG_TCC_BCHECK
1444 /* generate a bounded pointer addition */
1445 static void gen_bounded_ptr_add(void)
1447 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1452 vpush_helper_func(TOK___bound_ptr_add
);
1457 /* returned pointer is in REG_IRET */
1458 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1461 /* relocation offset of the bounding function call point */
1462 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1465 /* patch pointer addition in vtop so that pointer dereferencing is
1467 static void gen_bounded_ptr_deref(void)
1477 size
= type_size(&vtop
->type
, &align
);
1479 case 1: func
= TOK___bound_ptr_indir1
; break;
1480 case 2: func
= TOK___bound_ptr_indir2
; break;
1481 case 4: func
= TOK___bound_ptr_indir4
; break;
1482 case 8: func
= TOK___bound_ptr_indir8
; break;
1483 case 12: func
= TOK___bound_ptr_indir12
; break;
1484 case 16: func
= TOK___bound_ptr_indir16
; break;
1486 /* may happen with struct member access */
1489 sym
= external_helper_sym(func
);
1491 put_extern_sym(sym
, NULL
, 0, 0);
1492 /* patch relocation */
1493 /* XXX: find a better solution ? */
1494 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1495 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1498 /* generate lvalue bound code */
1499 static void gbound(void)
1503 vtop
->r
&= ~VT_MUSTBOUND
;
1504 /* if lvalue, then use checking code before dereferencing */
1505 if (vtop
->r
& VT_LVAL
) {
1506 /* if not VT_BOUNDED value, then make one */
1507 if (!(vtop
->r
& VT_BOUNDED
)) {
1508 /* must save type because we must set it to int to get pointer */
1510 vtop
->type
.t
= VT_PTR
;
1513 gen_bounded_ptr_add();
1517 /* then check for dereferencing */
1518 gen_bounded_ptr_deref();
1522 /* we need to call __bound_ptr_add before we start to load function
1523 args into registers */
1524 ST_FUNC
void gbound_args(int nb_args
)
1529 for (i
= 1; i
<= nb_args
; ++i
)
1530 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1536 sv
= vtop
- nb_args
;
1537 if (sv
->r
& VT_SYM
) {
1541 #ifndef TCC_TARGET_PE
1542 || v
== TOK_sigsetjmp
1543 || v
== TOK___sigsetjmp
1546 vpush_helper_func(TOK___bound_setjmp
);
1549 func_bound_add_epilog
= 1;
1551 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1552 if (v
== TOK_alloca
)
1553 func_bound_add_epilog
= 1;
1556 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1557 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1562 /* Add bounds for local symbols from S to E (via ->prev) */
1563 static void add_local_bounds(Sym
*s
, Sym
*e
)
1565 for (; s
!= e
; s
= s
->prev
) {
1566 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1568 /* Add arrays/structs/unions because we always take address */
1569 if ((s
->type
.t
& VT_ARRAY
)
1570 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1571 || s
->a
.addrtaken
) {
1572 /* add local bound info */
1573 int align
, size
= type_size(&s
->type
, &align
);
1574 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1575 2 * sizeof(addr_t
));
1576 bounds_ptr
[0] = s
->c
;
1577 bounds_ptr
[1] = size
;
1583 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1584 static void pop_local_syms(Sym
*b
, int keep
)
1586 #ifdef CONFIG_TCC_BCHECK
1587 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1588 add_local_bounds(local_stack
, b
);
1591 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1592 sym_pop(&local_stack
, b
, keep
);
1595 static void incr_bf_adr(int o
)
1597 vtop
->type
= char_pointer_type
;
1601 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1605 /* single-byte load mode for packed or otherwise unaligned bitfields */
1606 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1609 save_reg_upstack(vtop
->r
, 1);
1610 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1611 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1620 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1622 vpushi((1 << n
) - 1), gen_op('&');
1625 vpushi(bits
), gen_op(TOK_SHL
);
1628 bits
+= n
, bit_size
-= n
, o
= 1;
1631 if (!(type
->t
& VT_UNSIGNED
)) {
1632 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1633 vpushi(n
), gen_op(TOK_SHL
);
1634 vpushi(n
), gen_op(TOK_SAR
);
1638 /* single-byte store mode for packed or otherwise unaligned bitfields */
1639 static void store_packed_bf(int bit_pos
, int bit_size
)
1641 int bits
, n
, o
, m
, c
;
1642 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1644 save_reg_upstack(vtop
->r
, 1);
1645 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1647 incr_bf_adr(o
); // X B
1649 c
? vdup() : gv_dup(); // B V X
1652 vpushi(bits
), gen_op(TOK_SHR
);
1654 vpushi(bit_pos
), gen_op(TOK_SHL
);
1659 m
= ((1 << n
) - 1) << bit_pos
;
1660 vpushi(m
), gen_op('&'); // X B V1
1661 vpushv(vtop
-1); // X B V1 B
1662 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1663 gen_op('&'); // X B V1 B1
1664 gen_op('|'); // X B V2
1666 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1667 vstore(), vpop(); // X B
1668 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1673 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1676 if (0 == sv
->type
.ref
)
1678 t
= sv
->type
.ref
->auxtype
;
1679 if (t
!= -1 && t
!= VT_STRUCT
) {
1680 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1686 /* store vtop a register belonging to class 'rc'. lvalues are
1687 converted to values. Cannot be used if cannot be converted to
1688 register value (such as structures). */
1689 ST_FUNC
int gv(int rc
)
1691 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1692 int bit_pos
, bit_size
, size
, align
;
1694 /* NOTE: get_reg can modify vstack[] */
1695 if (vtop
->type
.t
& VT_BITFIELD
) {
1698 bit_pos
= BIT_POS(vtop
->type
.t
);
1699 bit_size
= BIT_SIZE(vtop
->type
.t
);
1700 /* remove bit field info to avoid loops */
1701 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1704 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1705 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1706 type
.t
|= VT_UNSIGNED
;
1708 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1710 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1715 if (r
== VT_STRUCT
) {
1716 load_packed_bf(&type
, bit_pos
, bit_size
);
1718 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1719 /* cast to int to propagate signedness in following ops */
1721 /* generate shifts */
1722 vpushi(bits
- (bit_pos
+ bit_size
));
1724 vpushi(bits
- bit_size
);
1725 /* NOTE: transformed to SHR if unsigned */
1730 if (is_float(vtop
->type
.t
) &&
1731 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1732 /* CPUs usually cannot use float constants, so we store them
1733 generically in data segment */
1734 init_params p
= { rodata_section
};
1735 unsigned long offset
;
1736 size
= type_size(&vtop
->type
, &align
);
1738 size
= 0, align
= 1;
1739 offset
= section_add(p
.sec
, size
, align
);
1740 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1742 init_putv(&p
, &vtop
->type
, offset
);
1745 #ifdef CONFIG_TCC_BCHECK
1746 if (vtop
->r
& VT_MUSTBOUND
)
1750 bt
= vtop
->type
.t
& VT_BTYPE
;
1752 #ifdef TCC_TARGET_RISCV64
1754 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1757 rc2
= RC2_TYPE(bt
, rc
);
1759 /* need to reload if:
1761 - lvalue (need to dereference pointer)
1762 - already a register, but not in the right class */
1763 r
= vtop
->r
& VT_VALMASK
;
1764 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1765 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1767 if (!r_ok
|| !r2_ok
) {
1771 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1772 int original_type
= vtop
->type
.t
;
1774 /* two register type load :
1775 expand to two words temporarily */
1776 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1778 unsigned long long ll
= vtop
->c
.i
;
1779 vtop
->c
.i
= ll
; /* first word */
1781 vtop
->r
= r
; /* save register value */
1782 vpushi(ll
>> 32); /* second word */
1783 } else if (vtop
->r
& VT_LVAL
) {
1784 /* We do not want to modifier the long long pointer here.
1785 So we save any other instances down the stack */
1786 save_reg_upstack(vtop
->r
, 1);
1787 /* load from memory */
1788 vtop
->type
.t
= load_type
;
1791 vtop
[-1].r
= r
; /* save register value */
1792 /* increment pointer to get second word */
1793 vtop
->type
.t
= VT_PTRDIFF_T
;
1798 vtop
->type
.t
= load_type
;
1800 /* move registers */
1803 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1806 vtop
[-1].r
= r
; /* save register value */
1807 vtop
->r
= vtop
[-1].r2
;
1809 /* Allocate second register. Here we rely on the fact that
1810 get_reg() tries first to free r2 of an SValue. */
1814 /* write second register */
1817 vtop
->type
.t
= original_type
;
1819 if (vtop
->r
== VT_CMP
)
1821 /* one register type load */
1826 #ifdef TCC_TARGET_C67
1827 /* uses register pairs for doubles */
1828 if (bt
== VT_DOUBLE
)
1835 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1836 ST_FUNC
void gv2(int rc1
, int rc2
)
1838 /* generate more generic register first. But VT_JMP or VT_CMP
1839 values must be generated first in all cases to avoid possible
1841 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1846 /* test if reload is needed for first register */
1847 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1857 /* test if reload is needed for first register */
1858 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1865 /* expand 64bit on stack in two ints */
1866 ST_FUNC
void lexpand(void)
1869 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1870 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1871 if (v
== VT_CONST
) {
1874 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1880 vtop
[0].r
= vtop
[-1].r2
;
1881 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1883 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1888 /* build a long long from two ints */
1889 static void lbuild(int t
)
1891 gv2(RC_INT
, RC_INT
);
1892 vtop
[-1].r2
= vtop
[0].r
;
1893 vtop
[-1].type
.t
= t
;
1898 /* convert stack entry to register and duplicate its value in another
1900 static void gv_dup(void)
1906 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1907 if (t
& VT_BITFIELD
) {
1917 /* stack: H L L1 H1 */
1927 /* duplicate value */
1937 /* generate CPU independent (unsigned) long long operations */
1938 static void gen_opl(int op
)
1940 int t
, a
, b
, op1
, c
, i
;
1942 unsigned short reg_iret
= REG_IRET
;
1943 unsigned short reg_lret
= REG_IRE2
;
1949 func
= TOK___divdi3
;
1952 func
= TOK___udivdi3
;
1955 func
= TOK___moddi3
;
1958 func
= TOK___umoddi3
;
1965 /* call generic long long function */
1966 vpush_helper_func(func
);
1971 vtop
->r2
= reg_lret
;
1979 //pv("gen_opl A",0,2);
1985 /* stack: L1 H1 L2 H2 */
1990 vtop
[-2] = vtop
[-3];
1993 /* stack: H1 H2 L1 L2 */
1994 //pv("gen_opl B",0,4);
2000 /* stack: H1 H2 L1 L2 ML MH */
2003 /* stack: ML MH H1 H2 L1 L2 */
2007 /* stack: ML MH H1 L2 H2 L1 */
2012 /* stack: ML MH M1 M2 */
2015 } else if (op
== '+' || op
== '-') {
2016 /* XXX: add non carry method too (for MIPS or alpha) */
2022 /* stack: H1 H2 (L1 op L2) */
2025 gen_op(op1
+ 1); /* TOK_xxxC2 */
2028 /* stack: H1 H2 (L1 op L2) */
2031 /* stack: (L1 op L2) H1 H2 */
2033 /* stack: (L1 op L2) (H1 op H2) */
2041 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2042 t
= vtop
[-1].type
.t
;
2046 /* stack: L H shift */
2048 /* constant: simpler */
2049 /* NOTE: all comments are for SHL. the other cases are
2050 done by swapping words */
2061 if (op
!= TOK_SAR
) {
2094 /* XXX: should provide a faster fallback on x86 ? */
2097 func
= TOK___ashrdi3
;
2100 func
= TOK___lshrdi3
;
2103 func
= TOK___ashldi3
;
2109 /* compare operations */
2115 /* stack: L1 H1 L2 H2 */
2117 vtop
[-1] = vtop
[-2];
2119 /* stack: L1 L2 H1 H2 */
2123 /* when values are equal, we need to compare low words. since
2124 the jump is inverted, we invert the test too. */
2127 else if (op1
== TOK_GT
)
2129 else if (op1
== TOK_ULT
)
2131 else if (op1
== TOK_UGT
)
2141 /* generate non equal test */
2143 vset_VT_CMP(TOK_NE
);
2147 /* compare low. Always unsigned */
2151 else if (op1
== TOK_LE
)
2153 else if (op1
== TOK_GT
)
2155 else if (op1
== TOK_GE
)
2158 #if 0//def TCC_TARGET_I386
2159 if (op
== TOK_NE
) { gsym(b
); break; }
2160 if (op
== TOK_EQ
) { gsym(a
); break; }
2169 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2171 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2172 return (a
^ b
) >> 63 ? -x
: x
;
2175 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2177 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2180 /* handle integer constant optimizations and various machine
2182 static void gen_opic(int op
)
2184 SValue
*v1
= vtop
- 1;
2186 int t1
= v1
->type
.t
& VT_BTYPE
;
2187 int t2
= v2
->type
.t
& VT_BTYPE
;
2188 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2189 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2190 int nonconst
= (v1
->r
| v2
->r
) & VT_NONCONST
;
2191 uint64_t l1
= c1
? v1
->c
.i
: 0;
2192 uint64_t l2
= c2
? v2
->c
.i
: 0;
2193 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2195 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2196 l1
= ((uint32_t)l1
|
2197 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2198 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2199 l2
= ((uint32_t)l2
|
2200 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2204 case '+': l1
+= l2
; break;
2205 case '-': l1
-= l2
; break;
2206 case '&': l1
&= l2
; break;
2207 case '^': l1
^= l2
; break;
2208 case '|': l1
|= l2
; break;
2209 case '*': l1
*= l2
; break;
2216 /* if division by zero, generate explicit division */
2218 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2219 tcc_error("division by zero in constant");
2223 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2224 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2225 case TOK_UDIV
: l1
= l1
/ l2
; break;
2226 case TOK_UMOD
: l1
= l1
% l2
; break;
2229 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2230 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2232 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2235 case TOK_ULT
: l1
= l1
< l2
; break;
2236 case TOK_UGE
: l1
= l1
>= l2
; break;
2237 case TOK_EQ
: l1
= l1
== l2
; break;
2238 case TOK_NE
: l1
= l1
!= l2
; break;
2239 case TOK_ULE
: l1
= l1
<= l2
; break;
2240 case TOK_UGT
: l1
= l1
> l2
; break;
2241 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2242 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2243 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2244 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2246 case TOK_LAND
: l1
= l1
&& l2
; break;
2247 case TOK_LOR
: l1
= l1
|| l2
; break;
2251 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2252 l1
= ((uint32_t)l1
|
2253 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2257 nonconst
= VT_NONCONST
;
2258 /* if commutative ops, put c2 as constant */
2259 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2260 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2262 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2263 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2265 if (!const_wanted
&&
2267 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2268 (l1
== -1 && op
== TOK_SAR
))) {
2269 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2271 } else if (!const_wanted
&&
2272 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2274 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2275 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2276 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2281 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2284 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2285 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2288 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2289 /* filter out NOP operations like x*1, x-0, x&-1... */
2291 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2292 /* try to use shifts instead of muls or divs */
2293 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2302 else if (op
== TOK_PDIV
)
2308 } else if (c2
&& (op
== '+' || op
== '-') &&
2309 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2310 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2311 /* symbol + constant case */
2315 /* The backends can't always deal with addends to symbols
2316 larger than +-1<<31. Don't construct such. */
2323 /* call low level op generator */
2324 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2325 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2331 if (vtop
->r
== VT_CONST
)
2332 vtop
->r
|= nonconst
;
2335 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2336 # define gen_negf gen_opf
2337 #elif defined TCC_TARGET_ARM
2338 void gen_negf(int op
)
2340 /* arm will detect 0-x and replace by vneg */
2341 vpushi(0), vswap(), gen_op('-');
2344 /* XXX: implement in gen_opf() for other backends too */
2345 void gen_negf(int op
)
2347 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2348 subtract(-0, x), but with them it's really a sign flip
2349 operation. We implement this with bit manipulation and have
2350 to do some type reinterpretation for this, which TCC can do
2353 int align
, size
, bt
;
2355 size
= type_size(&vtop
->type
, &align
);
2356 bt
= vtop
->type
.t
& VT_BTYPE
;
2357 save_reg(gv(RC_TYPE(bt
)));
2359 incr_bf_adr(size
- 1);
2361 vpushi(0x80); /* flip sign */
2368 /* generate a floating point operation with constant propagation */
2369 static void gen_opif(int op
)
2373 #if defined _MSC_VER && defined __x86_64__
2374 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2384 /* currently, we cannot do computations with forward symbols */
2385 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2386 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2388 if (v1
->type
.t
== VT_FLOAT
) {
2391 } else if (v1
->type
.t
== VT_DOUBLE
) {
2398 /* NOTE: we only do constant propagation if finite number (not
2399 NaN or infinity) (ANSI spec) */
2400 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
2403 case '+': f1
+= f2
; break;
2404 case '-': f1
-= f2
; break;
2405 case '*': f1
*= f2
; break;
2408 union { float f
; unsigned u
; } x1
, x2
, y
;
2409 /* If not in initializer we need to potentially generate
2410 FP exceptions at runtime, otherwise we want to fold. */
2413 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2414 when used to compile the f1 /= f2 below, would be -nan */
2415 x1
.f
= f1
, x2
.f
= f2
;
2417 y
.u
= 0x7fc00000; /* nan */
2419 y
.u
= 0x7f800000; /* infinity */
2420 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2429 /* XXX: also handles tests ? */
2435 /* XXX: overflow test ? */
2436 if (v1
->type
.t
== VT_FLOAT
) {
2438 } else if (v1
->type
.t
== VT_DOUBLE
) {
2445 if (op
== TOK_NEG
) {
2453 /* print a type. If 'varstr' is not NULL, then the variable is also
2454 printed in the type */
2456 /* XXX: add array and function pointers */
2457 static void type_to_str(char *buf
, int buf_size
,
2458 CType
*type
, const char *varstr
)
2470 pstrcat(buf
, buf_size
, "extern ");
2472 pstrcat(buf
, buf_size
, "static ");
2474 pstrcat(buf
, buf_size
, "typedef ");
2476 pstrcat(buf
, buf_size
, "inline ");
2478 if (t
& VT_VOLATILE
)
2479 pstrcat(buf
, buf_size
, "volatile ");
2480 if (t
& VT_CONSTANT
)
2481 pstrcat(buf
, buf_size
, "const ");
2483 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2484 || ((t
& VT_UNSIGNED
)
2485 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2488 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2490 buf_size
-= strlen(buf
);
2526 tstr
= "long double";
2528 pstrcat(buf
, buf_size
, tstr
);
2535 pstrcat(buf
, buf_size
, tstr
);
2536 v
= type
->ref
->v
& ~SYM_STRUCT
;
2537 if (v
>= SYM_FIRST_ANOM
)
2538 pstrcat(buf
, buf_size
, "<anonymous>");
2540 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2545 if (varstr
&& '*' == *varstr
) {
2546 pstrcat(buf1
, sizeof(buf1
), "(");
2547 pstrcat(buf1
, sizeof(buf1
), varstr
);
2548 pstrcat(buf1
, sizeof(buf1
), ")");
2550 pstrcat(buf1
, buf_size
, "(");
2552 while (sa
!= NULL
) {
2554 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2555 pstrcat(buf1
, sizeof(buf1
), buf2
);
2558 pstrcat(buf1
, sizeof(buf1
), ", ");
2560 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2561 pstrcat(buf1
, sizeof(buf1
), ", ...");
2562 pstrcat(buf1
, sizeof(buf1
), ")");
2563 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2567 if (t
& (VT_ARRAY
|VT_VLA
)) {
2568 if (varstr
&& '*' == *varstr
)
2569 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2571 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2572 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2575 pstrcpy(buf1
, sizeof(buf1
), "*");
2576 if (t
& VT_CONSTANT
)
2577 pstrcat(buf1
, buf_size
, "const ");
2578 if (t
& VT_VOLATILE
)
2579 pstrcat(buf1
, buf_size
, "volatile ");
2581 pstrcat(buf1
, sizeof(buf1
), varstr
);
2582 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2586 pstrcat(buf
, buf_size
, " ");
2587 pstrcat(buf
, buf_size
, varstr
);
2592 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2594 char buf1
[256], buf2
[256];
2595 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2596 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2597 tcc_error(fmt
, buf1
, buf2
);
2600 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2602 char buf1
[256], buf2
[256];
2603 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2604 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2605 tcc_warning(fmt
, buf1
, buf2
);
2608 static int pointed_size(CType
*type
)
2611 return type_size(pointed_type(type
), &align
);
2614 static inline int is_null_pointer(SValue
*p
)
2616 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2618 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2619 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2620 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2621 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2622 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2623 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2627 /* compare function types. OLD functions match any new functions */
2628 static int is_compatible_func(CType
*type1
, CType
*type2
)
2634 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2636 if (s1
->f
.func_type
!= s2
->f
.func_type
2637 && s1
->f
.func_type
!= FUNC_OLD
2638 && s2
->f
.func_type
!= FUNC_OLD
)
2641 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2643 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2654 /* return true if type1 and type2 are the same. If unqualified is
2655 true, qualifiers on the types are ignored.
2657 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2661 t1
= type1
->t
& VT_TYPE
;
2662 t2
= type2
->t
& VT_TYPE
;
2664 /* strip qualifiers before comparing */
2665 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2666 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2669 /* Default Vs explicit signedness only matters for char */
2670 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2674 /* XXX: bitfields ? */
2679 && !(type1
->ref
->c
< 0
2680 || type2
->ref
->c
< 0
2681 || type1
->ref
->c
== type2
->ref
->c
))
2684 /* test more complicated cases */
2685 bt1
= t1
& VT_BTYPE
;
2686 if (bt1
== VT_PTR
) {
2687 type1
= pointed_type(type1
);
2688 type2
= pointed_type(type2
);
2689 return is_compatible_types(type1
, type2
);
2690 } else if (bt1
== VT_STRUCT
) {
2691 return (type1
->ref
== type2
->ref
);
2692 } else if (bt1
== VT_FUNC
) {
2693 return is_compatible_func(type1
, type2
);
2694 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2695 /* If both are enums then they must be the same, if only one is then
2696 t1 and t2 must be equal, which was checked above already. */
2697 return type1
->ref
== type2
->ref
;
2703 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2704 type is stored in DEST if non-null (except for pointer plus/minus) . */
2705 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2707 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2708 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2714 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2715 ret
= op
== '?' ? 1 : 0;
2716 /* NOTE: as an extension, we accept void on only one side */
2718 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2719 if (op
== '+') ; /* Handled in caller */
2720 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2721 /* If one is a null ptr constant the result type is the other. */
2722 else if (is_null_pointer (op2
)) type
= *type1
;
2723 else if (is_null_pointer (op1
)) type
= *type2
;
2724 else if (bt1
!= bt2
) {
2725 /* accept comparison or cond-expr between pointer and integer
2727 if ((op
== '?' || TOK_ISCOND(op
))
2728 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2729 tcc_warning("pointer/integer mismatch in %s",
2730 op
== '?' ? "conditional expression" : "comparison");
2731 else if (op
!= '-' || !is_integer_btype(bt2
))
2733 type
= *(bt1
== VT_PTR
? type1
: type2
);
2735 CType
*pt1
= pointed_type(type1
);
2736 CType
*pt2
= pointed_type(type2
);
2737 int pbt1
= pt1
->t
& VT_BTYPE
;
2738 int pbt2
= pt2
->t
& VT_BTYPE
;
2739 int newquals
, copied
= 0;
2740 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2741 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2742 if (op
!= '?' && !TOK_ISCOND(op
))
2745 type_incompatibility_warning(type1
, type2
,
2747 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2748 : "pointer type mismatch in comparison('%s' and '%s')");
2751 /* pointers to void get preferred, otherwise the
2752 pointed to types minus qualifs should be compatible */
2753 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2754 /* combine qualifs */
2755 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2756 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2759 /* copy the pointer target symbol */
2760 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2763 pointed_type(&type
)->t
|= newquals
;
2765 /* pointers to incomplete arrays get converted to
2766 pointers to completed ones if possible */
2767 if (pt1
->t
& VT_ARRAY
2768 && pt2
->t
& VT_ARRAY
2769 && pointed_type(&type
)->ref
->c
< 0
2770 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2773 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2775 pointed_type(&type
)->ref
=
2776 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2777 0, pointed_type(&type
)->ref
->c
);
2778 pointed_type(&type
)->ref
->c
=
2779 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2785 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2786 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2789 } else if (is_float(bt1
) || is_float(bt2
)) {
2790 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2791 type
.t
= VT_LDOUBLE
;
2792 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2797 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2798 /* cast to biggest op */
2799 type
.t
= VT_LLONG
| VT_LONG
;
2800 if (bt1
== VT_LLONG
)
2802 if (bt2
== VT_LLONG
)
2804 /* convert to unsigned if it does not fit in a long long */
2805 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2806 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2807 type
.t
|= VT_UNSIGNED
;
2809 /* integer operations */
2810 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2811 /* convert to unsigned if it does not fit in an integer */
2812 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2813 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2814 type
.t
|= VT_UNSIGNED
;
2821 /* generic gen_op: handles types problems */
2822 ST_FUNC
void gen_op(int op
)
2824 int t1
, t2
, bt1
, bt2
, t
;
2825 CType type1
, combtype
;
2828 t1
= vtop
[-1].type
.t
;
2829 t2
= vtop
[0].type
.t
;
2830 bt1
= t1
& VT_BTYPE
;
2831 bt2
= t2
& VT_BTYPE
;
2833 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2834 if (bt2
== VT_FUNC
) {
2835 mk_pointer(&vtop
->type
);
2838 if (bt1
== VT_FUNC
) {
2840 mk_pointer(&vtop
->type
);
2845 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2846 tcc_error_noabort("invalid operand types for binary operation");
2848 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2849 /* at least one operand is a pointer */
2850 /* relational op: must be both pointers */
2854 /* if both pointers, then it must be the '-' op */
2855 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2857 tcc_error("cannot use pointers here");
2858 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2861 vtop
->type
.t
= VT_PTRDIFF_T
;
2865 /* exactly one pointer : must be '+' or '-'. */
2866 if (op
!= '-' && op
!= '+')
2867 tcc_error("cannot use pointers here");
2868 /* Put pointer as first operand */
2869 if (bt2
== VT_PTR
) {
2871 t
= t1
, t1
= t2
, t2
= t
;
2874 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2875 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2878 type1
= vtop
[-1].type
;
2879 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2881 #ifdef CONFIG_TCC_BCHECK
2882 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2883 /* if bounded pointers, we generate a special code to
2890 gen_bounded_ptr_add();
2896 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
2897 /* put again type if gen_opic() swaped operands */
2901 /* floats can only be used for a few operations */
2902 if (is_float(combtype
.t
)
2903 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
2905 tcc_error("invalid operands for binary operation");
2906 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2907 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2908 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2910 t
|= (VT_LONG
& t1
);
2914 t
= t2
= combtype
.t
;
2915 /* XXX: currently, some unsigned operations are explicit, so
2916 we modify them here */
2917 if (t
& VT_UNSIGNED
) {
2924 else if (op
== TOK_LT
)
2926 else if (op
== TOK_GT
)
2928 else if (op
== TOK_LE
)
2930 else if (op
== TOK_GE
)
2936 /* special case for shifts and long long: we keep the shift as
2938 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2945 if (TOK_ISCOND(op
)) {
2946 /* relational op: the result is an int */
2947 vtop
->type
.t
= VT_INT
;
2952 // Make sure that we have converted to an rvalue:
2953 if (vtop
->r
& VT_LVAL
)
2954 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2957 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2958 #define gen_cvt_itof1 gen_cvt_itof
2960 /* generic itof for unsigned long long case */
2961 static void gen_cvt_itof1(int t
)
2963 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2964 (VT_LLONG
| VT_UNSIGNED
)) {
2967 vpush_helper_func(TOK___floatundisf
);
2968 #if LDOUBLE_SIZE != 8
2969 else if (t
== VT_LDOUBLE
)
2970 vpush_helper_func(TOK___floatundixf
);
2973 vpush_helper_func(TOK___floatundidf
);
2984 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2985 #define gen_cvt_ftoi1 gen_cvt_ftoi
2987 /* generic ftoi for unsigned long long case */
2988 static void gen_cvt_ftoi1(int t
)
2991 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2992 /* not handled natively */
2993 st
= vtop
->type
.t
& VT_BTYPE
;
2995 vpush_helper_func(TOK___fixunssfdi
);
2996 #if LDOUBLE_SIZE != 8
2997 else if (st
== VT_LDOUBLE
)
2998 vpush_helper_func(TOK___fixunsxfdi
);
3001 vpush_helper_func(TOK___fixunsdfdi
);
3012 /* special delayed cast for char/short */
3013 static void force_charshort_cast(void)
3015 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3016 int dbt
= vtop
->type
.t
;
3017 vtop
->r
&= ~VT_MUSTCAST
;
3019 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3023 static void gen_cast_s(int t
)
3031 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3032 static void gen_cast(CType
*type
)
3034 int sbt
, dbt
, sf
, df
, c
;
3035 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3037 /* special delayed cast for char/short */
3038 if (vtop
->r
& VT_MUSTCAST
)
3039 force_charshort_cast();
3041 /* bitfields first get cast to ints */
3042 if (vtop
->type
.t
& VT_BITFIELD
)
3045 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3046 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3054 dbt_bt
= dbt
& VT_BTYPE
;
3055 sbt_bt
= sbt
& VT_BTYPE
;
3056 if (dbt_bt
== VT_VOID
)
3058 if (sbt_bt
== VT_VOID
) {
3060 cast_error(&vtop
->type
, type
);
3063 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3064 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3065 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3068 /* constant case: we can do it now */
3069 /* XXX: in ISOC, cannot do it if error in convert */
3070 if (sbt
== VT_FLOAT
)
3071 vtop
->c
.ld
= vtop
->c
.f
;
3072 else if (sbt
== VT_DOUBLE
)
3073 vtop
->c
.ld
= vtop
->c
.d
;
3076 if (sbt_bt
== VT_LLONG
) {
3077 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3078 vtop
->c
.ld
= vtop
->c
.i
;
3080 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3082 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3083 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3085 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3088 if (dbt
== VT_FLOAT
)
3089 vtop
->c
.f
= (float)vtop
->c
.ld
;
3090 else if (dbt
== VT_DOUBLE
)
3091 vtop
->c
.d
= (double)vtop
->c
.ld
;
3092 } else if (sf
&& dbt
== VT_BOOL
) {
3093 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3096 vtop
->c
.i
= vtop
->c
.ld
;
3097 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3099 else if (sbt
& VT_UNSIGNED
)
3100 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3102 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3104 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3106 else if (dbt
== VT_BOOL
)
3107 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3109 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3110 dbt_bt
== VT_SHORT
? 0xffff :
3113 if (!(dbt
& VT_UNSIGNED
))
3114 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3119 } else if (dbt
== VT_BOOL
3120 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3121 == (VT_CONST
| VT_SYM
)) {
3122 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3128 /* cannot generate code for global or static initializers */
3129 if (STATIC_DATA_WANTED
)
3132 /* non constant case: generate code */
3133 if (dbt
== VT_BOOL
) {
3134 gen_test_zero(TOK_NE
);
3140 /* convert from fp to fp */
3143 /* convert int to fp */
3146 /* convert fp to int */
3148 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3151 goto again
; /* may need char/short cast */
3156 ds
= btype_size(dbt_bt
);
3157 ss
= btype_size(sbt_bt
);
3158 if (ds
== 0 || ss
== 0)
3161 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3162 tcc_error("cast to incomplete type");
3164 /* same size and no sign conversion needed */
3165 if (ds
== ss
&& ds
>= 4)
3167 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3168 tcc_warning("cast between pointer and integer of different size");
3169 if (sbt_bt
== VT_PTR
) {
3170 /* put integer type to allow logical operations below */
3171 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3175 /* processor allows { int a = 0, b = *(char*)&a; }
3176 That means that if we cast to less width, we can just
3177 change the type and read it still later. */
3178 #define ALLOW_SUBTYPE_ACCESS 1
3180 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3181 /* value still in memory */
3185 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3187 goto done
; /* no 64bit envolved */
3195 /* generate high word */
3196 if (sbt
& VT_UNSIGNED
) {
3205 } else if (ss
== 8) {
3206 /* from long long: just take low order word */
3214 /* need to convert from 32bit to 64bit */
3215 if (sbt
& VT_UNSIGNED
) {
3216 #if defined(TCC_TARGET_RISCV64)
3217 /* RISC-V keeps 32bit vals in registers sign-extended.
3218 So here we need a zero-extension. */
3227 ss
= ds
, ds
= 4, dbt
= sbt
;
3228 } else if (ss
== 8) {
3229 /* RISC-V keeps 32bit vals in registers sign-extended.
3230 So here we need a sign-extension for signed types and
3231 zero-extension. for unsigned types. */
3232 #if !defined(TCC_TARGET_RISCV64)
3233 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3242 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3248 bits
= (ss
- ds
) * 8;
3249 /* for unsigned, gen_op will convert SAR to SHR */
3250 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3253 vpushi(bits
- trunc
);
3260 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3263 /* return type size as known at compile time. Put alignment at 'a' */
3264 ST_FUNC
int type_size(CType
*type
, int *a
)
3269 bt
= type
->t
& VT_BTYPE
;
3270 if (bt
== VT_STRUCT
) {
3275 } else if (bt
== VT_PTR
) {
3276 if (type
->t
& VT_ARRAY
) {
3280 ts
= type_size(&s
->type
, a
);
3282 if (ts
< 0 && s
->c
< 0)
3290 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3292 return -1; /* incomplete enum */
3293 } else if (bt
== VT_LDOUBLE
) {
3295 return LDOUBLE_SIZE
;
3296 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3297 #ifdef TCC_TARGET_I386
3298 #ifdef TCC_TARGET_PE
3303 #elif defined(TCC_TARGET_ARM)
3313 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3316 } else if (bt
== VT_SHORT
) {
3319 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3323 /* char, void, function, _Bool */
3329 /* push type size as known at runtime time on top of value stack. Put
3331 static void vpush_type_size(CType
*type
, int *a
)
3333 if (type
->t
& VT_VLA
) {
3334 type_size(&type
->ref
->type
, a
);
3335 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3337 int size
= type_size(type
, a
);
3339 tcc_error("unknown type size");
3348 /* return the pointed type of t */
3349 static inline CType
*pointed_type(CType
*type
)
3351 return &type
->ref
->type
;
3354 /* modify type so that its it is a pointer to type. */
3355 ST_FUNC
void mk_pointer(CType
*type
)
3358 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3359 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3363 /* return true if type1 and type2 are exactly the same (including
3366 static int is_compatible_types(CType
*type1
, CType
*type2
)
3368 return compare_types(type1
,type2
,0);
3371 /* return true if type1 and type2 are the same (ignoring qualifiers).
3373 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3375 return compare_types(type1
,type2
,1);
3378 static void cast_error(CType
*st
, CType
*dt
)
3380 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3383 /* verify type compatibility to store vtop in 'dt' type */
3384 static void verify_assign_cast(CType
*dt
)
3386 CType
*st
, *type1
, *type2
;
3387 int dbt
, sbt
, qualwarn
, lvl
;
3389 st
= &vtop
->type
; /* source type */
3390 dbt
= dt
->t
& VT_BTYPE
;
3391 sbt
= st
->t
& VT_BTYPE
;
3392 if (dt
->t
& VT_CONSTANT
)
3393 tcc_warning("assignment of read-only location");
3397 tcc_error("assignment to void expression");
3400 /* special cases for pointers */
3401 /* '0' can also be a pointer */
3402 if (is_null_pointer(vtop
))
3404 /* accept implicit pointer to integer cast with warning */
3405 if (is_integer_btype(sbt
)) {
3406 tcc_warning("assignment makes pointer from integer without a cast");
3409 type1
= pointed_type(dt
);
3411 type2
= pointed_type(st
);
3412 else if (sbt
== VT_FUNC
)
3413 type2
= st
; /* a function is implicitly a function pointer */
3416 if (is_compatible_types(type1
, type2
))
3418 for (qualwarn
= lvl
= 0;; ++lvl
) {
3419 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3420 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3422 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3423 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3424 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3426 type1
= pointed_type(type1
);
3427 type2
= pointed_type(type2
);
3429 if (!is_compatible_unqualified_types(type1
, type2
)) {
3430 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3431 /* void * can match anything */
3432 } else if (dbt
== sbt
3433 && is_integer_btype(sbt
& VT_BTYPE
)
3434 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3435 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3436 /* Like GCC don't warn by default for merely changes
3437 in pointer target signedness. Do warn for different
3438 base types, though, in particular for unsigned enums
3439 and signed int targets. */
3441 tcc_warning("assignment from incompatible pointer type");
3446 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3452 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3453 tcc_warning("assignment makes integer from pointer without a cast");
3454 } else if (sbt
== VT_STRUCT
) {
3455 goto case_VT_STRUCT
;
3457 /* XXX: more tests */
3461 if (!is_compatible_unqualified_types(dt
, st
)) {
3469 static void gen_assign_cast(CType
*dt
)
3471 verify_assign_cast(dt
);
3475 /* store vtop in lvalue pushed on stack */
3476 ST_FUNC
void vstore(void)
3478 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3480 ft
= vtop
[-1].type
.t
;
3481 sbt
= vtop
->type
.t
& VT_BTYPE
;
3482 dbt
= ft
& VT_BTYPE
;
3483 verify_assign_cast(&vtop
[-1].type
);
3485 if (sbt
== VT_STRUCT
) {
3486 /* if structure, only generate pointer */
3487 /* structure assignment : generate memcpy */
3488 size
= type_size(&vtop
->type
, &align
);
3489 /* destination, keep on stack() as result */
3491 #ifdef CONFIG_TCC_BCHECK
3492 if (vtop
->r
& VT_MUSTBOUND
)
3493 gbound(); /* check would be wrong after gaddrof() */
3495 vtop
->type
.t
= VT_PTR
;
3499 #ifdef CONFIG_TCC_BCHECK
3500 if (vtop
->r
& VT_MUSTBOUND
)
3503 vtop
->type
.t
= VT_PTR
;
3506 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3508 #ifdef CONFIG_TCC_BCHECK
3509 && !tcc_state
->do_bounds_check
3512 gen_struct_copy(size
);
3518 /* Use memmove, rather than memcpy, as dest and src may be same: */
3521 vpush_helper_func(TOK_memmove8
);
3522 else if(!(align
& 3))
3523 vpush_helper_func(TOK_memmove4
);
3526 vpush_helper_func(TOK_memmove
);
3531 } else if (ft
& VT_BITFIELD
) {
3532 /* bitfield store handling */
3534 /* save lvalue as expression result (example: s.b = s.a = n;) */
3535 vdup(), vtop
[-1] = vtop
[-2];
3537 bit_pos
= BIT_POS(ft
);
3538 bit_size
= BIT_SIZE(ft
);
3539 /* remove bit field info to avoid loops */
3540 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3542 if (dbt
== VT_BOOL
) {
3543 gen_cast(&vtop
[-1].type
);
3544 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3546 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3547 if (dbt
!= VT_BOOL
) {
3548 gen_cast(&vtop
[-1].type
);
3549 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3551 if (r
== VT_STRUCT
) {
3552 store_packed_bf(bit_pos
, bit_size
);
3554 unsigned long long mask
= (1ULL << bit_size
) - 1;
3555 if (dbt
!= VT_BOOL
) {
3557 if (dbt
== VT_LLONG
)
3560 vpushi((unsigned)mask
);
3567 /* duplicate destination */
3570 /* load destination, mask and or with source */
3571 if (dbt
== VT_LLONG
)
3572 vpushll(~(mask
<< bit_pos
));
3574 vpushi(~((unsigned)mask
<< bit_pos
));
3579 /* ... and discard */
3582 } else if (dbt
== VT_VOID
) {
3585 /* optimize char/short casts */
3587 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3588 && is_integer_btype(sbt
)
3590 if ((vtop
->r
& VT_MUSTCAST
)
3591 && btype_size(dbt
) > btype_size(sbt
)
3593 force_charshort_cast();
3596 gen_cast(&vtop
[-1].type
);
3599 #ifdef CONFIG_TCC_BCHECK
3600 /* bound check case */
3601 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3607 gv(RC_TYPE(dbt
)); /* generate value */
3610 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3611 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3612 vtop
->type
.t
= ft
& VT_TYPE
;
3615 /* if lvalue was saved on stack, must read it */
3616 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3618 r
= get_reg(RC_INT
);
3619 sv
.type
.t
= VT_PTRDIFF_T
;
3620 sv
.r
= VT_LOCAL
| VT_LVAL
;
3621 sv
.c
.i
= vtop
[-1].c
.i
;
3623 vtop
[-1].r
= r
| VT_LVAL
;
3626 r
= vtop
->r
& VT_VALMASK
;
3627 /* two word case handling :
3628 store second register at word + 4 (or +8 for x86-64) */
3629 if (USING_TWO_WORDS(dbt
)) {
3630 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3631 vtop
[-1].type
.t
= load_type
;
3634 /* convert to int to increment easily */
3635 vtop
->type
.t
= VT_PTRDIFF_T
;
3641 vtop
[-1].type
.t
= load_type
;
3642 /* XXX: it works because r2 is spilled last ! */
3643 store(vtop
->r2
, vtop
- 1);
3649 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3653 /* post defines POST/PRE add. c is the token ++ or -- */
3654 ST_FUNC
void inc(int post
, int c
)
3657 vdup(); /* save lvalue */
3659 gv_dup(); /* duplicate value */
3664 vpushi(c
- TOK_MID
);
3666 vstore(); /* store value */
3668 vpop(); /* if post op, return saved value */
3671 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3673 /* read the string */
3677 while (tok
== TOK_STR
) {
3678 /* XXX: add \0 handling too ? */
3679 cstr_cat(astr
, tokc
.str
.data
, -1);
3682 cstr_ccat(astr
, '\0');
3685 /* If I is >= 1 and a power of two, returns log2(i)+1.
3686 If I is 0 returns 0. */
3687 ST_FUNC
int exact_log2p1(int i
)
3692 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3703 /* Parse __attribute__((...)) GNUC extension. */
3704 static void parse_attribute(AttributeDef
*ad
)
3710 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3715 while (tok
!= ')') {
3716 if (tok
< TOK_IDENT
)
3717 expect("attribute name");
3729 tcc_warning_c(warn_implicit_function_declaration
)(
3730 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3731 s
= external_global_sym(tok
, &func_old_type
);
3732 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3733 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3734 ad
->cleanup_func
= s
;
3739 case TOK_CONSTRUCTOR1
:
3740 case TOK_CONSTRUCTOR2
:
3741 ad
->f
.func_ctor
= 1;
3743 case TOK_DESTRUCTOR1
:
3744 case TOK_DESTRUCTOR2
:
3745 ad
->f
.func_dtor
= 1;
3747 case TOK_ALWAYS_INLINE1
:
3748 case TOK_ALWAYS_INLINE2
:
3749 ad
->f
.func_alwinl
= 1;
3754 parse_mult_str(&astr
, "section name");
3755 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3762 parse_mult_str(&astr
, "alias(\"target\")");
3763 ad
->alias_target
= /* save string as token, for later */
3764 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3768 case TOK_VISIBILITY1
:
3769 case TOK_VISIBILITY2
:
3771 parse_mult_str(&astr
,
3772 "visibility(\"default|hidden|internal|protected\")");
3773 if (!strcmp (astr
.data
, "default"))
3774 ad
->a
.visibility
= STV_DEFAULT
;
3775 else if (!strcmp (astr
.data
, "hidden"))
3776 ad
->a
.visibility
= STV_HIDDEN
;
3777 else if (!strcmp (astr
.data
, "internal"))
3778 ad
->a
.visibility
= STV_INTERNAL
;
3779 else if (!strcmp (astr
.data
, "protected"))
3780 ad
->a
.visibility
= STV_PROTECTED
;
3782 expect("visibility(\"default|hidden|internal|protected\")");
3791 if (n
<= 0 || (n
& (n
- 1)) != 0)
3792 tcc_error("alignment must be a positive power of two");
3797 ad
->a
.aligned
= exact_log2p1(n
);
3798 if (n
!= 1 << (ad
->a
.aligned
- 1))
3799 tcc_error("alignment of %d is larger than implemented", n
);
3811 /* currently, no need to handle it because tcc does not
3812 track unused objects */
3816 ad
->f
.func_noreturn
= 1;
3821 ad
->f
.func_call
= FUNC_CDECL
;
3826 ad
->f
.func_call
= FUNC_STDCALL
;
3828 #ifdef TCC_TARGET_I386
3838 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3844 ad
->f
.func_call
= FUNC_FASTCALLW
;
3851 ad
->attr_mode
= VT_LLONG
+ 1;
3854 ad
->attr_mode
= VT_BYTE
+ 1;
3857 ad
->attr_mode
= VT_SHORT
+ 1;
3861 ad
->attr_mode
= VT_INT
+ 1;
3864 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3871 ad
->a
.dllexport
= 1;
3873 case TOK_NODECORATE
:
3874 ad
->a
.nodecorate
= 1;
3877 ad
->a
.dllimport
= 1;
3880 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
3881 /* skip parameters */
3883 int parenthesis
= 0;
3887 else if (tok
== ')')
3890 } while (parenthesis
&& tok
!= -1);
3903 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3907 while ((s
= s
->next
) != NULL
) {
3908 if ((s
->v
& SYM_FIELD
) &&
3909 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3910 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3911 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3923 static void check_fields (CType
*type
, int check
)
3927 while ((s
= s
->next
) != NULL
) {
3928 int v
= s
->v
& ~SYM_FIELD
;
3929 if (v
< SYM_FIRST_ANOM
) {
3930 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
3931 if (check
&& (ts
->tok
& SYM_FIELD
))
3932 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
3933 ts
->tok
^= SYM_FIELD
;
3934 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
3935 check_fields (&s
->type
, check
);
3939 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3941 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3942 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3943 int pcc
= !tcc_state
->ms_bitfields
;
3944 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3951 prevbt
= VT_STRUCT
; /* make it never match */
3956 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3957 if (f
->type
.t
& VT_BITFIELD
)
3958 bit_size
= BIT_SIZE(f
->type
.t
);
3961 size
= type_size(&f
->type
, &align
);
3962 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3965 if (pcc
&& bit_size
== 0) {
3966 /* in pcc mode, packing does not affect zero-width bitfields */
3969 /* in pcc mode, attribute packed overrides if set. */
3970 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3973 /* pragma pack overrides align if lesser and packs bitfields always */
3976 if (pragma_pack
< align
)
3977 align
= pragma_pack
;
3978 /* in pcc mode pragma pack also overrides individual align */
3979 if (pcc
&& pragma_pack
< a
)
3983 /* some individual align was specified */
3987 if (type
->ref
->type
.t
== VT_UNION
) {
3988 if (pcc
&& bit_size
>= 0)
3989 size
= (bit_size
+ 7) >> 3;
3994 } else if (bit_size
< 0) {
3996 c
+= (bit_pos
+ 7) >> 3;
3997 c
= (c
+ align
- 1) & -align
;
4006 /* A bit-field. Layout is more complicated. There are two
4007 options: PCC (GCC) compatible and MS compatible */
4009 /* In PCC layout a bit-field is placed adjacent to the
4010 preceding bit-fields, except if:
4012 - an individual alignment was given
4013 - it would overflow its base type container and
4014 there is no packing */
4015 if (bit_size
== 0) {
4017 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4019 } else if (f
->a
.aligned
) {
4021 } else if (!packed
) {
4023 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4024 if (ofs
> size
/ align
)
4028 /* in pcc mode, long long bitfields have type int if they fit */
4029 if (size
== 8 && bit_size
<= 32)
4030 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4032 while (bit_pos
>= align
* 8)
4033 c
+= align
, bit_pos
-= align
* 8;
4036 /* In PCC layout named bit-fields influence the alignment
4037 of the containing struct using the base types alignment,
4038 except for packed fields (which here have correct align). */
4039 if (f
->v
& SYM_FIRST_ANOM
4040 // && bit_size // ??? gcc on ARM/rpi does that
4045 bt
= f
->type
.t
& VT_BTYPE
;
4046 if ((bit_pos
+ bit_size
> size
* 8)
4047 || (bit_size
> 0) == (bt
!= prevbt
)
4049 c
= (c
+ align
- 1) & -align
;
4052 /* In MS bitfield mode a bit-field run always uses
4053 at least as many bits as the underlying type.
4054 To start a new run it's also required that this
4055 or the last bit-field had non-zero width. */
4056 if (bit_size
|| prev_bit_size
)
4059 /* In MS layout the records alignment is normally
4060 influenced by the field, except for a zero-width
4061 field at the start of a run (but by further zero-width
4062 fields it is again). */
4063 if (bit_size
== 0 && prevbt
!= bt
)
4066 prev_bit_size
= bit_size
;
4069 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4070 | (bit_pos
<< VT_STRUCT_SHIFT
);
4071 bit_pos
+= bit_size
;
4073 if (align
> maxalign
)
4077 printf("set field %s offset %-2d size %-2d align %-2d",
4078 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4079 if (f
->type
.t
& VT_BITFIELD
) {
4080 printf(" pos %-2d bits %-2d",
4093 c
+= (bit_pos
+ 7) >> 3;
4095 /* store size and alignment */
4096 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4100 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4101 /* can happen if individual align for some member was given. In
4102 this case MSVC ignores maxalign when aligning the size */
4107 c
= (c
+ a
- 1) & -a
;
4111 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4114 /* check whether we can access bitfields by their type */
4115 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4119 if (0 == (f
->type
.t
& VT_BITFIELD
))
4123 bit_size
= BIT_SIZE(f
->type
.t
);
4126 bit_pos
= BIT_POS(f
->type
.t
);
4127 size
= type_size(&f
->type
, &align
);
4129 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4130 #ifdef TCC_TARGET_ARM
4131 && !(f
->c
& (align
- 1))
4136 /* try to access the field using a different type */
4137 c0
= -1, s
= align
= 1;
4140 px
= f
->c
* 8 + bit_pos
;
4141 cx
= (px
>> 3) & -align
;
4142 px
= px
- (cx
<< 3);
4145 s
= (px
+ bit_size
+ 7) >> 3;
4155 s
= type_size(&t
, &align
);
4159 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4160 #ifdef TCC_TARGET_ARM
4161 && !(cx
& (align
- 1))
4164 /* update offset and bit position */
4167 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4168 | (bit_pos
<< VT_STRUCT_SHIFT
);
4172 printf("FIX field %s offset %-2d size %-2d align %-2d "
4173 "pos %-2d bits %-2d\n",
4174 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4175 cx
, s
, align
, px
, bit_size
);
4178 /* fall back to load/store single-byte wise */
4179 f
->auxtype
= VT_STRUCT
;
4181 printf("FIX field %s : load byte-wise\n",
4182 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4188 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4189 static void struct_decl(CType
*type
, int u
)
4191 int v
, c
, size
, align
, flexible
;
4192 int bit_size
, bsize
, bt
;
4194 AttributeDef ad
, ad1
;
4197 memset(&ad
, 0, sizeof ad
);
4199 parse_attribute(&ad
);
4203 /* struct already defined ? return it */
4205 expect("struct/union/enum name");
4207 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4210 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4212 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4217 /* Record the original enum/struct/union token. */
4218 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4220 /* we put an undefined size for struct/union */
4221 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4222 s
->r
= 0; /* default alignment is zero as gcc */
4224 type
->t
= s
->type
.t
;
4230 tcc_error("struct/union/enum already defined");
4232 /* cannot be empty */
4233 /* non empty enums are not allowed */
4236 long long ll
= 0, pl
= 0, nl
= 0;
4239 /* enum symbols have static storage */
4240 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4244 expect("identifier");
4246 if (ss
&& !local_stack
)
4247 tcc_error("redefinition of enumerator '%s'",
4248 get_tok_str(v
, NULL
));
4252 ll
= expr_const64();
4254 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4256 *ps
= ss
, ps
= &ss
->next
;
4265 /* NOTE: we accept a trailing comma */
4270 /* set integral type of the enum */
4273 if (pl
!= (unsigned)pl
)
4274 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4276 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4277 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4278 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4280 /* set type for enum members */
4281 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4283 if (ll
== (int)ll
) /* default is int if it fits */
4285 if (t
.t
& VT_UNSIGNED
) {
4286 ss
->type
.t
|= VT_UNSIGNED
;
4287 if (ll
== (unsigned)ll
)
4290 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4291 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4296 while (tok
!= '}') {
4297 if (!parse_btype(&btype
, &ad1
, 0)) {
4303 tcc_error("flexible array member '%s' not at the end of struct",
4304 get_tok_str(v
, NULL
));
4310 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4312 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4313 expect("identifier");
4315 int v
= btype
.ref
->v
;
4316 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4317 if (tcc_state
->ms_extensions
== 0)
4318 expect("identifier");
4322 if (type_size(&type1
, &align
) < 0) {
4323 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4326 tcc_error("field '%s' has incomplete type",
4327 get_tok_str(v
, NULL
));
4329 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4330 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4331 (type1
.t
& VT_STORAGE
))
4332 tcc_error("invalid type for '%s'",
4333 get_tok_str(v
, NULL
));
4337 bit_size
= expr_const();
4338 /* XXX: handle v = 0 case for messages */
4340 tcc_error("negative width in bit-field '%s'",
4341 get_tok_str(v
, NULL
));
4342 if (v
&& bit_size
== 0)
4343 tcc_error("zero width for bit-field '%s'",
4344 get_tok_str(v
, NULL
));
4345 parse_attribute(&ad1
);
4347 size
= type_size(&type1
, &align
);
4348 if (bit_size
>= 0) {
4349 bt
= type1
.t
& VT_BTYPE
;
4355 tcc_error("bitfields must have scalar type");
4357 if (bit_size
> bsize
) {
4358 tcc_error("width of '%s' exceeds its type",
4359 get_tok_str(v
, NULL
));
4360 } else if (bit_size
== bsize
4361 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4362 /* no need for bit fields */
4364 } else if (bit_size
== 64) {
4365 tcc_error("field width 64 not implemented");
4367 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4369 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4372 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4373 /* Remember we've seen a real field to check
4374 for placement of flexible array member. */
4377 /* If member is a struct or bit-field, enforce
4378 placing into the struct (as anonymous). */
4380 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4385 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4390 if (tok
== ';' || tok
== TOK_EOF
)
4397 parse_attribute(&ad
);
4398 if (ad
.cleanup_func
) {
4399 tcc_warning("attribute '__cleanup__' ignored on type");
4401 check_fields(type
, 1);
4402 check_fields(type
, 0);
4403 struct_layout(type
, &ad
);
4405 tcc_debug_fix_anon(tcc_state
, type
);
4410 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4412 merge_symattr(&ad
->a
, &s
->a
);
4413 merge_funcattr(&ad
->f
, &s
->f
);
4416 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4417 are added to the element type, copied because it could be a typedef. */
4418 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4420 while (type
->t
& VT_ARRAY
) {
4421 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4422 type
= &type
->ref
->type
;
4424 type
->t
|= qualifiers
;
4427 /* return 0 if no type declaration. otherwise, return the basic type
4430 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4432 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4436 memset(ad
, 0, sizeof(AttributeDef
));
4446 /* currently, we really ignore extension */
4456 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4457 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4458 tmbt
: tcc_error("too many basic types");
4461 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4466 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4483 memset(&ad1
, 0, sizeof(AttributeDef
));
4484 if (parse_btype(&type1
, &ad1
, 0)) {
4485 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4487 n
= 1 << (ad1
.a
.aligned
- 1);
4489 type_size(&type1
, &n
);
4492 if (n
< 0 || (n
& (n
- 1)) != 0)
4493 tcc_error("alignment must be a positive power of two");
4496 ad
->a
.aligned
= exact_log2p1(n
);
4500 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4501 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4502 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4503 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4510 #ifdef TCC_TARGET_ARM64
4512 /* GCC's __uint128_t appears in some Linux header files. Make it a
4513 synonym for long double to get the size and alignment right. */
4524 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4525 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4533 struct_decl(&type1
, VT_ENUM
);
4536 type
->ref
= type1
.ref
;
4539 struct_decl(&type1
, VT_STRUCT
);
4542 struct_decl(&type1
, VT_UNION
);
4545 /* type modifiers */
4549 parse_btype_qualify(type
, VT_ATOMIC
);
4552 parse_expr_type(&type1
);
4553 /* remove all storage modifiers except typedef */
4554 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4556 sym_to_attr(ad
, type1
.ref
);
4564 parse_btype_qualify(type
, VT_CONSTANT
);
4572 parse_btype_qualify(type
, VT_VOLATILE
);
4579 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4580 tcc_error("signed and unsigned modifier");
4593 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4594 tcc_error("signed and unsigned modifier");
4595 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4611 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4612 tcc_error("multiple storage classes");
4624 ad
->f
.func_noreturn
= 1;
4626 /* GNUC attribute */
4627 case TOK_ATTRIBUTE1
:
4628 case TOK_ATTRIBUTE2
:
4629 parse_attribute(ad
);
4630 if (ad
->attr_mode
) {
4631 u
= ad
->attr_mode
-1;
4632 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4640 parse_expr_type(&type1
);
4641 /* remove all storage modifiers except typedef */
4642 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4644 sym_to_attr(ad
, type1
.ref
);
4650 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4654 if (tok
== ':' && ignore_label
) {
4655 /* ignore if it's a label */
4660 t
&= ~(VT_BTYPE
|VT_LONG
);
4661 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4662 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4663 type
->ref
= s
->type
.ref
;
4665 parse_btype_qualify(type
, t
);
4667 /* get attributes from typedef */
4676 if (tcc_state
->char_is_unsigned
) {
4677 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4680 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4681 bt
= t
& (VT_BTYPE
|VT_LONG
);
4683 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4684 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4685 if (bt
== VT_LDOUBLE
)
4686 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4692 /* convert a function parameter type (array to pointer and function to
4693 function pointer) */
4694 static inline void convert_parameter_type(CType
*pt
)
4696 /* remove const and volatile qualifiers (XXX: const could be used
4697 to indicate a const function parameter */
4698 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4699 /* array must be transformed to pointer according to ANSI C */
4701 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4706 ST_FUNC
void parse_asm_str(CString
*astr
)
4709 parse_mult_str(astr
, "string constant");
4712 /* Parse an asm label and return the token */
4713 static int asm_label_instr(void)
4719 parse_asm_str(&astr
);
4722 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4724 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4729 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4731 int n
, l
, t1
, arg_size
, align
, unused_align
;
4732 Sym
**plast
, *s
, *first
;
4735 TokenString
*vla_array_tok
= NULL
;
4736 int *vla_array_str
= NULL
;
4739 /* function type, or recursive declarator (return if so) */
4741 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4745 else if (parse_btype(&pt
, &ad1
, 0))
4747 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4748 merge_attr (ad
, &ad1
);
4759 /* read param name and compute offset */
4760 if (l
!= FUNC_OLD
) {
4761 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4763 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4764 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4765 tcc_error("parameter declared as void");
4770 pt
.t
= VT_VOID
; /* invalid type */
4775 expect("identifier");
4776 convert_parameter_type(&pt
);
4777 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4778 s
= sym_push(n
, &pt
, 0, 0);
4784 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4789 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4790 tcc_error("invalid type");
4793 /* if no parameters, then old type prototype */
4796 /* remove parameter symbols from token table, keep on stack */
4798 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4799 for (s
= first
; s
; s
= s
->next
)
4803 /* NOTE: const is ignored in returned type as it has a special
4804 meaning in gcc / C++ */
4805 type
->t
&= ~VT_CONSTANT
;
4806 /* some ancient pre-K&R C allows a function to return an array
4807 and the array brackets to be put after the arguments, such
4808 that "int c()[]" means something like "int[] c()" */
4811 skip(']'); /* only handle simple "[]" */
4814 /* we push a anonymous symbol which will contain the function prototype */
4815 ad
->f
.func_args
= arg_size
;
4816 ad
->f
.func_type
= l
;
4817 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4823 } else if (tok
== '[') {
4824 int saved_nocode_wanted
= nocode_wanted
;
4825 /* array definition */
4829 if (td
& TYPE_PARAM
) while (1) {
4830 /* XXX The optional type-quals and static should only be accepted
4831 in parameter decls. The '*' as well, and then even only
4832 in prototypes (not function defs). */
4834 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4847 /* Code generation is not done now but has to be done
4848 at start of function. Save code here for later use. */
4850 vla_array_tok
= tok_str_alloc();
4859 tok_str_add_tok(vla_array_tok
);
4863 tok_str_add(vla_array_tok
, -1);
4864 tok_str_add(vla_array_tok
, 0);
4865 vla_array_str
= vla_array_tok
->str
;
4866 begin_macro(vla_array_tok
, 2);
4875 } else if (tok
!= ']') {
4876 if (!local_stack
|| (storage
& VT_STATIC
))
4877 vpushi(expr_const());
4879 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4880 length must always be evaluated, even under nocode_wanted,
4881 so that its size slot is initialized (e.g. under sizeof
4887 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4890 tcc_error("invalid array size");
4892 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4893 tcc_error("size of variable length array should be an integer");
4899 /* parse next post type */
4900 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
4902 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4903 tcc_error("declaration of an array of functions");
4904 if ((type
->t
& VT_BTYPE
) == VT_VOID
4905 || type_size(type
, &unused_align
) < 0)
4906 tcc_error("declaration of an array of incomplete type elements");
4908 t1
|= type
->t
& VT_VLA
;
4913 tcc_error("need explicit inner array size in VLAs");
4916 loc
-= type_size(&int_type
, &align
);
4920 vpush_type_size(type
, &align
);
4922 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4929 nocode_wanted
= saved_nocode_wanted
;
4931 /* we push an anonymous symbol which will contain the array
4933 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4934 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4936 if (vla_array_str
) {
4938 s
->vla_array_str
= vla_array_str
;
4940 tok_str_free_str(vla_array_str
);
4946 /* Parse a type declarator (except basic type), and return the type
4947 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4948 expected. 'type' should contain the basic type. 'ad' is the
4949 attribute definition of the basic type. It can be modified by
4950 type_decl(). If this (possibly abstract) declarator is a pointer chain
4951 it returns the innermost pointed to type (equals *type, but is a different
4952 pointer), otherwise returns type itself, that's used for recursive calls. */
4953 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4956 int qualifiers
, storage
;
4958 /* recursive type, remove storage bits first, apply them later again */
4959 storage
= type
->t
& VT_STORAGE
;
4960 type
->t
&= ~VT_STORAGE
;
4963 while (tok
== '*') {
4969 qualifiers
|= VT_ATOMIC
;
4974 qualifiers
|= VT_CONSTANT
;
4979 qualifiers
|= VT_VOLATILE
;
4985 /* XXX: clarify attribute handling */
4986 case TOK_ATTRIBUTE1
:
4987 case TOK_ATTRIBUTE2
:
4988 parse_attribute(ad
);
4992 type
->t
|= qualifiers
;
4994 /* innermost pointed to type is the one for the first derivation */
4995 ret
= pointed_type(type
);
4999 /* This is possibly a parameter type list for abstract declarators
5000 ('int ()'), use post_type for testing this. */
5001 if (!post_type(type
, ad
, 0, td
)) {
5002 /* It's not, so it's a nested declarator, and the post operations
5003 apply to the innermost pointed to type (if any). */
5004 /* XXX: this is not correct to modify 'ad' at this point, but
5005 the syntax is not clear */
5006 parse_attribute(ad
);
5007 post
= type_decl(type
, ad
, v
, td
);
5011 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5012 /* type identifier */
5017 if (!(td
& TYPE_ABSTRACT
))
5018 expect("identifier");
5021 post_type(post
, ad
, storage
, td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5022 parse_attribute(ad
);
5027 /* indirection with full error checking and bound check */
5028 ST_FUNC
void indir(void)
5030 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5031 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5035 if (vtop
->r
& VT_LVAL
)
5037 vtop
->type
= *pointed_type(&vtop
->type
);
5038 /* Arrays and functions are never lvalues */
5039 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5040 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5042 /* if bound checking, the referenced pointer must be checked */
5043 #ifdef CONFIG_TCC_BCHECK
5044 if (tcc_state
->do_bounds_check
)
5045 vtop
->r
|= VT_MUSTBOUND
;
5050 /* pass a parameter to a function and do type checking and casting */
5051 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5056 func_type
= func
->f
.func_type
;
5057 if (func_type
== FUNC_OLD
||
5058 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5059 /* default casting : only need to convert float to double */
5060 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5061 gen_cast_s(VT_DOUBLE
);
5062 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5063 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5064 type
.ref
= vtop
->type
.ref
;
5066 } else if (vtop
->r
& VT_MUSTCAST
) {
5067 force_charshort_cast();
5069 } else if (arg
== NULL
) {
5070 tcc_error("too many arguments to function");
5073 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5074 gen_assign_cast(&type
);
5078 /* parse an expression and return its type without any side effect. */
5079 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5088 /* parse an expression of the form '(type)' or '(expr)' and return its
5090 static void parse_expr_type(CType
*type
)
5096 if (parse_btype(type
, &ad
, 0)) {
5097 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5099 expr_type(type
, gexpr
);
5104 static void parse_type(CType
*type
)
5109 if (!parse_btype(type
, &ad
, 0)) {
5112 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5115 static void parse_builtin_params(int nc
, const char *args
)
5124 while ((c
= *args
++)) {
5139 type
.t
= VT_CONSTANT
;
5145 type
.t
= VT_CONSTANT
;
5147 type
.t
|= char_type
.t
;
5159 gen_assign_cast(&type
);
5166 static void parse_atomic(int atok
)
5168 int size
, align
, arg
;
5169 CType
*atom
, *atom_ptr
, ct
= {0};
5171 static const char *const templates
[] = {
5173 * Each entry consists of callback and function template.
5174 * The template represents argument types and return type.
5176 * ? void (return-only)
5179 * A read-only atomic
5180 * p pointer to memory
5185 /* keep in order of appearance in tcctok.h: */
5186 /* __atomic_store */ "avm.?",
5187 /* __atomic_load */ "Am.v",
5188 /* __atomic_exchange */ "avm.v",
5189 /* __atomic_compare_exchange */ "apvbmm.b",
5190 /* __atomic_fetch_add */ "avm.v",
5191 /* __atomic_fetch_sub */ "avm.v",
5192 /* __atomic_fetch_or */ "avm.v",
5193 /* __atomic_fetch_xor */ "avm.v",
5194 /* __atomic_fetch_and */ "avm.v"
5196 const char *template = templates
[(atok
- TOK___atomic_store
)];
5198 atom
= atom_ptr
= NULL
;
5199 size
= 0; /* pacify compiler */
5204 switch (template[arg
]) {
5207 atom_ptr
= &vtop
->type
;
5208 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5210 atom
= pointed_type(atom_ptr
);
5211 size
= type_size(atom
, &align
);
5213 || (size
& (size
- 1))
5214 || (atok
> TOK___atomic_compare_exchange
5215 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5216 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5217 expect("integral or integer-sized pointer target type");
5218 /* GCC does not care either: */
5219 /* if (!(atom->t & VT_ATOMIC))
5220 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5224 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5225 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5226 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5227 gen_assign_cast(atom_ptr
);
5230 gen_assign_cast(atom
);
5233 gen_assign_cast(&int_type
);
5237 gen_assign_cast(&ct
);
5240 if ('.' == template[++arg
])
5247 switch (template[arg
+ 1]) {
5256 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5257 vpush_helper_func(tok_alloc_const(buf
));
5262 PUT_R_RET(vtop
, ct
.t
);
5263 if (ct
.t
== VT_BOOL
) {
5265 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5267 vtop
->type
.t
= VT_INT
;
5272 ST_FUNC
void unary(void)
5274 int n
, t
, align
, size
, r
, sizeof_caller
;
5279 /* generate line number info */
5281 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5283 sizeof_caller
= in_sizeof
;
5286 /* XXX: GCC 2.95.3 does not generate a table although it should be
5294 #ifdef TCC_TARGET_PE
5295 t
= VT_SHORT
|VT_UNSIGNED
;
5303 vsetc(&type
, VT_CONST
, &tokc
);
5307 t
= VT_INT
| VT_UNSIGNED
;
5313 t
= VT_LLONG
| VT_UNSIGNED
;
5325 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5328 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5330 case TOK___FUNCTION__
:
5332 goto tok_identifier
;
5338 /* special function name identifier */
5339 len
= strlen(funcname
) + 1;
5340 /* generate char[len] type */
5341 type
.t
= char_type
.t
;
5342 if (tcc_state
->warn_write_strings
& WARN_ON
)
5343 type
.t
|= VT_CONSTANT
;
5347 sec
= rodata_section
;
5348 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5350 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5355 #ifdef TCC_TARGET_PE
5356 t
= VT_SHORT
| VT_UNSIGNED
;
5362 /* string parsing */
5365 if (tcc_state
->warn_write_strings
& WARN_ON
)
5370 memset(&ad
, 0, sizeof(AttributeDef
));
5371 ad
.section
= rodata_section
;
5372 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5377 if (parse_btype(&type
, &ad
, 0)) {
5378 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5380 /* check ISOC99 compound literal */
5382 /* data is allocated locally by default */
5387 /* all except arrays are lvalues */
5388 if (!(type
.t
& VT_ARRAY
))
5390 memset(&ad
, 0, sizeof(AttributeDef
));
5391 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5393 if (sizeof_caller
) {
5400 } else if (tok
== '{') {
5401 int saved_nocode_wanted
= nocode_wanted
;
5402 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5404 if (0 == local_scope
)
5405 tcc_error("statement expression outside of function");
5406 /* save all registers */
5408 /* statement expression : we do not accept break/continue
5409 inside as GCC does. We do retain the nocode_wanted state,
5410 as statement expressions can't ever be entered from the
5411 outside, so any reactivation of code emission (from labels
5412 or loop heads) can be disabled again after the end of it. */
5414 /* If the statement expr can be entered, then we retain the current
5415 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5416 If it can't be entered then the state is that from before the
5417 statement expression. */
5418 if (saved_nocode_wanted
)
5419 nocode_wanted
= saved_nocode_wanted
;
5434 /* functions names must be treated as function pointers,
5435 except for unary '&' and sizeof. Since we consider that
5436 functions are not lvalues, we only have to handle it
5437 there and in function calls. */
5438 /* arrays can also be used although they are not lvalues */
5439 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5440 !(vtop
->type
.t
& VT_ARRAY
))
5443 vtop
->sym
->a
.addrtaken
= 1;
5444 mk_pointer(&vtop
->type
);
5450 gen_test_zero(TOK_EQ
);
5461 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5462 tcc_error("pointer not accepted for unary plus");
5463 /* In order to force cast, we add zero, except for floating point
5464 where we really need an noop (otherwise -0.0 will be transformed
5466 if (!is_float(vtop
->type
.t
)) {
5478 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5479 if (t
== TOK_SIZEOF
) {
5480 vpush_type_size(&type
, &align
);
5481 gen_cast_s(VT_SIZE_T
);
5483 type_size(&type
, &align
);
5485 if (vtop
[1].r
& VT_SYM
)
5486 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5487 if (s
&& s
->a
.aligned
)
5488 align
= 1 << (s
->a
.aligned
- 1);
5493 case TOK_builtin_expect
:
5494 /* __builtin_expect is a no-op for now */
5495 parse_builtin_params(0, "ee");
5498 case TOK_builtin_types_compatible_p
:
5499 parse_builtin_params(0, "tt");
5500 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5501 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5502 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5506 case TOK_builtin_choose_expr
:
5533 case TOK_builtin_constant_p
:
5534 parse_builtin_params(1, "e");
5535 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5536 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5540 case TOK_builtin_frame_address
:
5541 case TOK_builtin_return_address
:
5547 if (tok
!= TOK_CINT
) {
5548 tcc_error("%s only takes positive integers",
5549 tok1
== TOK_builtin_return_address
?
5550 "__builtin_return_address" :
5551 "__builtin_frame_address");
5553 level
= (uint32_t)tokc
.i
;
5558 vset(&type
, VT_LOCAL
, 0); /* local frame */
5560 #ifdef TCC_TARGET_RISCV64
5564 mk_pointer(&vtop
->type
);
5565 indir(); /* -> parent frame */
5567 if (tok1
== TOK_builtin_return_address
) {
5568 // assume return address is just above frame pointer on stack
5569 #ifdef TCC_TARGET_ARM
5572 #elif defined TCC_TARGET_RISCV64
5579 mk_pointer(&vtop
->type
);
5584 #ifdef TCC_TARGET_RISCV64
5585 case TOK_builtin_va_start
:
5586 parse_builtin_params(0, "ee");
5587 r
= vtop
->r
& VT_VALMASK
;
5591 tcc_error("__builtin_va_start expects a local variable");
5596 #ifdef TCC_TARGET_X86_64
5597 #ifdef TCC_TARGET_PE
5598 case TOK_builtin_va_start
:
5599 parse_builtin_params(0, "ee");
5600 r
= vtop
->r
& VT_VALMASK
;
5604 tcc_error("__builtin_va_start expects a local variable");
5606 vtop
->type
= char_pointer_type
;
5611 case TOK_builtin_va_arg_types
:
5612 parse_builtin_params(0, "t");
5613 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5620 #ifdef TCC_TARGET_ARM64
5621 case TOK_builtin_va_start
: {
5622 parse_builtin_params(0, "ee");
5626 vtop
->type
.t
= VT_VOID
;
5629 case TOK_builtin_va_arg
: {
5630 parse_builtin_params(0, "et");
5638 case TOK___arm64_clear_cache
: {
5639 parse_builtin_params(0, "ee");
5642 vtop
->type
.t
= VT_VOID
;
5647 /* atomic operations */
5648 case TOK___atomic_store
:
5649 case TOK___atomic_load
:
5650 case TOK___atomic_exchange
:
5651 case TOK___atomic_compare_exchange
:
5652 case TOK___atomic_fetch_add
:
5653 case TOK___atomic_fetch_sub
:
5654 case TOK___atomic_fetch_or
:
5655 case TOK___atomic_fetch_xor
:
5656 case TOK___atomic_fetch_and
:
5660 /* pre operations */
5671 if (is_float(vtop
->type
.t
)) {
5681 goto tok_identifier
;
5683 /* allow to take the address of a label */
5684 if (tok
< TOK_UIDENT
)
5685 expect("label identifier");
5686 s
= label_find(tok
);
5688 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5690 if (s
->r
== LABEL_DECLARED
)
5691 s
->r
= LABEL_FORWARD
;
5693 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5694 s
->type
.t
= VT_VOID
;
5695 mk_pointer(&s
->type
);
5696 s
->type
.t
|= VT_STATIC
;
5698 vpushsym(&s
->type
, s
);
5704 CType controlling_type
;
5705 int has_default
= 0;
5708 TokenString
*str
= NULL
;
5709 int saved_const_wanted
= const_wanted
;
5714 expr_type(&controlling_type
, expr_eq
);
5715 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5716 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5717 mk_pointer(&controlling_type
);
5718 const_wanted
= saved_const_wanted
;
5722 if (tok
== TOK_DEFAULT
) {
5724 tcc_error("too many 'default'");
5730 AttributeDef ad_tmp
;
5734 parse_btype(&cur_type
, &ad_tmp
, 0);
5735 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5736 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5738 tcc_error("type match twice");
5748 skip_or_save_block(&str
);
5750 skip_or_save_block(NULL
);
5757 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5758 tcc_error("type '%s' does not match any association", buf
);
5760 begin_macro(str
, 1);
5769 // special qnan , snan and infinity values
5774 vtop
->type
.t
= VT_FLOAT
;
5779 goto special_math_val
;
5782 goto special_math_val
;
5789 expect("identifier");
5791 if (!s
|| IS_ASM_SYM(s
)) {
5792 const char *name
= get_tok_str(t
, NULL
);
5794 tcc_error("'%s' undeclared", name
);
5795 /* for simple function calls, we tolerate undeclared
5796 external reference to int() function */
5797 tcc_warning_c(warn_implicit_function_declaration
)(
5798 "implicit declaration of function '%s'", name
);
5799 s
= external_global_sym(t
, &func_old_type
);
5803 /* A symbol that has a register is a local register variable,
5804 which starts out as VT_LOCAL value. */
5805 if ((r
& VT_VALMASK
) < VT_CONST
)
5806 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5808 vset(&s
->type
, r
, s
->c
);
5809 /* Point to s as backpointer (even without r&VT_SYM).
5810 Will be used by at least the x86 inline asm parser for
5816 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5817 vtop
->c
.i
= s
->enum_val
;
5822 /* post operations */
5824 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5827 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5828 int qualifiers
, cumofs
= 0;
5830 if (tok
== TOK_ARROW
)
5832 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5835 /* expect pointer on structure */
5836 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5837 expect("struct or union");
5838 if (tok
== TOK_CDOUBLE
)
5839 expect("field name");
5841 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5842 expect("field name");
5843 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5845 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5846 /* add field offset to pointer */
5847 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5848 vpushi(cumofs
+ s
->c
);
5850 /* change type to field type, and set to lvalue */
5851 vtop
->type
= s
->type
;
5852 vtop
->type
.t
|= qualifiers
;
5853 /* an array is never an lvalue */
5854 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5856 #ifdef CONFIG_TCC_BCHECK
5857 /* if bound checking, the referenced pointer must be checked */
5858 if (tcc_state
->do_bounds_check
)
5859 vtop
->r
|= VT_MUSTBOUND
;
5863 } else if (tok
== '[') {
5869 } else if (tok
== '(') {
5872 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5875 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5876 /* pointer test (no array accepted) */
5877 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5878 vtop
->type
= *pointed_type(&vtop
->type
);
5879 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5883 expect("function pointer");
5886 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5888 /* get return type */
5891 sa
= s
->next
; /* first parameter */
5892 nb_args
= regsize
= 0;
5894 /* compute first implicit argument if a structure is returned */
5895 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5896 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5897 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5898 &ret_align
, ®size
);
5899 if (ret_nregs
<= 0) {
5900 /* get some space for the returned structure */
5901 size
= type_size(&s
->type
, &align
);
5902 #ifdef TCC_TARGET_ARM64
5903 /* On arm64, a small struct is return in registers.
5904 It is much easier to write it to memory if we know
5905 that we are allowed to write some extra bytes, so
5906 round the allocated space up to a power of 2: */
5908 while (size
& (size
- 1))
5909 size
= (size
| (size
- 1)) + 1;
5911 loc
= (loc
- size
) & -align
;
5913 ret
.r
= VT_LOCAL
| VT_LVAL
;
5914 /* pass it as 'int' to avoid structure arg passing
5916 vseti(VT_LOCAL
, loc
);
5917 #ifdef CONFIG_TCC_BCHECK
5918 if (tcc_state
->do_bounds_check
)
5932 if (ret_nregs
> 0) {
5933 /* return in register */
5935 PUT_R_RET(&ret
, ret
.type
.t
);
5940 gfunc_param_typed(s
, sa
);
5950 tcc_error("too few arguments to function");
5952 gfunc_call(nb_args
);
5954 if (ret_nregs
< 0) {
5955 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
5956 #ifdef TCC_TARGET_RISCV64
5957 arch_transfer_ret_regs(1);
5961 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5962 vsetc(&ret
.type
, r
, &ret
.c
);
5963 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5966 /* handle packed struct return */
5967 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5970 size
= type_size(&s
->type
, &align
);
5971 /* We're writing whole regs often, make sure there's enough
5972 space. Assume register size is power of 2. */
5973 if (regsize
> align
)
5975 loc
= (loc
- size
) & -align
;
5979 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5983 if (--ret_nregs
== 0)
5987 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5990 /* Promote char/short return values. This is matters only
5991 for calling function that were not compiled by TCC and
5992 only on some architectures. For those where it doesn't
5993 matter we expect things to be already promoted to int,
5995 t
= s
->type
.t
& VT_BTYPE
;
5996 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5998 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6000 vtop
->type
.t
= VT_INT
;
6004 if (s
->f
.func_noreturn
) {
6006 tcc_tcov_block_end(tcc_state
, -1);
6015 #ifndef precedence_parser /* original top-down parser */
6017 static void expr_prod(void)
6022 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6029 static void expr_sum(void)
6034 while ((t
= tok
) == '+' || t
== '-') {
6041 static void expr_shift(void)
6046 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6053 static void expr_cmp(void)
6058 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6059 t
== TOK_ULT
|| t
== TOK_UGE
) {
6066 static void expr_cmpeq(void)
6071 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6078 static void expr_and(void)
6081 while (tok
== '&') {
6088 static void expr_xor(void)
6091 while (tok
== '^') {
6098 static void expr_or(void)
6101 while (tok
== '|') {
6108 static void expr_landor(int op
);
6110 static void expr_land(void)
6113 if (tok
== TOK_LAND
)
6117 static void expr_lor(void)
6124 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6125 #else /* defined precedence_parser */
6126 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6127 # define expr_lor() unary(), expr_infix(1)
6129 static int precedence(int tok
)
6132 case TOK_LOR
: return 1;
6133 case TOK_LAND
: return 2;
6137 case TOK_EQ
: case TOK_NE
: return 6;
6138 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6139 case TOK_SHL
: case TOK_SAR
: return 8;
6140 case '+': case '-': return 9;
6141 case '*': case '/': case '%': return 10;
6143 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6148 static unsigned char prec
[256];
6149 static void init_prec(void)
6152 for (i
= 0; i
< 256; i
++)
6153 prec
[i
] = precedence(i
);
6155 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6157 static void expr_landor(int op
);
6159 static void expr_infix(int p
)
6162 while ((p2
= precedence(t
)) >= p
) {
6163 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6168 if (precedence(tok
) > p2
)
6177 /* Assuming vtop is a value used in a conditional context
6178 (i.e. compared with zero) return 0 if it's false, 1 if
6179 true and -1 if it can't be statically determined. */
6180 static int condition_3way(void)
6183 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6184 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6186 gen_cast_s(VT_BOOL
);
6193 static void expr_landor(int op
)
6195 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6197 c
= f
? i
: condition_3way();
6199 save_regs(1), cc
= 0;
6201 nocode_wanted
++, f
= 1;
6209 expr_landor_next(op
);
6221 static int is_cond_bool(SValue
*sv
)
6223 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6224 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6225 return (unsigned)sv
->c
.i
< 2;
6226 if (sv
->r
== VT_CMP
)
6231 static void expr_cond(void)
6233 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6241 c
= condition_3way();
6242 g
= (tok
== ':' && gnu_ext
);
6252 /* needed to avoid having different registers saved in
6259 ncw_prev
= nocode_wanted
;
6265 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6266 mk_pointer(&vtop
->type
);
6267 sv
= *vtop
; /* save value to handle it later */
6268 vtop
--; /* no vpop so that FP stack is not flushed */
6278 nocode_wanted
= ncw_prev
;
6284 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6285 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6286 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6287 this code jumps directly to the if's then/else branches. */
6292 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6295 nocode_wanted
= ncw_prev
;
6296 // tcc_warning("two conditions expr_cond");
6300 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6301 mk_pointer(&vtop
->type
);
6303 /* cast operands to correct type according to ISOC rules */
6304 if (!combine_types(&type
, &sv
, vtop
, '?'))
6305 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6306 "type mismatch in conditional expression (have '%s' and '%s')");
6307 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6308 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6309 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6311 /* now we convert second operand */
6315 mk_pointer(&vtop
->type
);
6317 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6321 rc
= RC_TYPE(type
.t
);
6322 /* for long longs, we use fixed registers to avoid having
6323 to handle a complicated move */
6324 if (USING_TWO_WORDS(type
.t
))
6325 rc
= RC_RET(type
.t
);
6333 nocode_wanted
= ncw_prev
;
6335 /* this is horrible, but we must also convert first
6341 mk_pointer(&vtop
->type
);
6343 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6349 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6359 static void expr_eq(void)
6364 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6372 gen_op(TOK_ASSIGN_OP(t
));
6378 ST_FUNC
void gexpr(void)
6389 /* parse a constant expression and return value in vtop. */
6390 static void expr_const1(void)
6393 nocode_wanted
+= unevalmask
+ 1;
6395 nocode_wanted
-= unevalmask
+ 1;
6399 /* parse an integer constant and return its value. */
6400 static inline int64_t expr_const64(void)
6404 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6405 expect("constant expression");
6411 /* parse an integer constant and return its value.
6412 Complain if it doesn't fit 32bit (signed or unsigned). */
6413 ST_FUNC
int expr_const(void)
6416 int64_t wc
= expr_const64();
6418 if (c
!= wc
&& (unsigned)c
!= wc
)
6419 tcc_error("constant exceeds 32 bit");
6423 /* ------------------------------------------------------------------------- */
6424 /* return from function */
6426 #ifndef TCC_TARGET_ARM64
6427 static void gfunc_return(CType
*func_type
)
6429 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6430 CType type
, ret_type
;
6431 int ret_align
, ret_nregs
, regsize
;
6432 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6433 &ret_align
, ®size
);
6434 if (ret_nregs
< 0) {
6435 #ifdef TCC_TARGET_RISCV64
6436 arch_transfer_ret_regs(0);
6438 } else if (0 == ret_nregs
) {
6439 /* if returning structure, must copy it to implicit
6440 first pointer arg location */
6443 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6446 /* copy structure value to pointer */
6449 /* returning structure packed into registers */
6450 int size
, addr
, align
, rc
;
6451 size
= type_size(func_type
,&align
);
6452 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6453 (vtop
->c
.i
& (ret_align
-1)))
6454 && (align
& (ret_align
-1))) {
6455 loc
= (loc
- size
) & -ret_align
;
6458 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6462 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6464 vtop
->type
= ret_type
;
6465 rc
= RC_RET(ret_type
.t
);
6473 if (--ret_nregs
== 0)
6475 /* We assume that when a structure is returned in multiple
6476 registers, their classes are consecutive values of the
6479 vtop
->c
.i
+= regsize
;
6484 gv(RC_RET(func_type
->t
));
6486 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6490 static void check_func_return(void)
6492 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6494 if (!strcmp (funcname
, "main")
6495 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6496 /* main returns 0 by default */
6498 gen_assign_cast(&func_vt
);
6499 gfunc_return(&func_vt
);
6501 tcc_warning("function might return no value: '%s'", funcname
);
6505 /* ------------------------------------------------------------------------- */
6508 static int case_cmpi(const void *pa
, const void *pb
)
6510 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6511 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6512 return a
< b
? -1 : a
> b
;
6515 static int case_cmpu(const void *pa
, const void *pb
)
6517 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6518 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6519 return a
< b
? -1 : a
> b
;
6522 static void gtst_addr(int t
, int a
)
6524 gsym_addr(gvtst(0, t
), a
);
6527 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6531 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6548 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6550 gcase(base
, len
/2, bsym
);
6554 base
+= e
; len
-= e
;
6564 if (p
->v1
== p
->v2
) {
6566 gtst_addr(0, p
->sym
);
6576 gtst_addr(0, p
->sym
);
6580 *bsym
= gjmp(*bsym
);
6583 /* ------------------------------------------------------------------------- */
6584 /* __attribute__((cleanup(fn))) */
6586 static void try_call_scope_cleanup(Sym
*stop
)
6588 Sym
*cls
= cur_scope
->cl
.s
;
6590 for (; cls
!= stop
; cls
= cls
->ncl
) {
6591 Sym
*fs
= cls
->next
;
6592 Sym
*vs
= cls
->prev_tok
;
6594 vpushsym(&fs
->type
, fs
);
6595 vset(&vs
->type
, vs
->r
, vs
->c
);
6597 mk_pointer(&vtop
->type
);
6603 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6608 if (!cur_scope
->cl
.s
)
6611 /* search NCA of both cleanup chains given parents and initial depth */
6612 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6613 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6615 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6617 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6620 try_call_scope_cleanup(cc
);
6623 /* call 'func' for each __attribute__((cleanup(func))) */
6624 static void block_cleanup(struct scope
*o
)
6628 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6629 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6634 try_call_scope_cleanup(o
->cl
.s
);
6635 pcl
->jnext
= gjmp(0);
6637 goto remove_pending
;
6647 try_call_scope_cleanup(o
->cl
.s
);
6650 /* ------------------------------------------------------------------------- */
6653 static void vla_restore(int loc
)
6656 gen_vla_sp_restore(loc
);
6659 static void vla_leave(struct scope
*o
)
6661 struct scope
*c
= cur_scope
, *v
= NULL
;
6662 for (; c
!= o
&& c
; c
= c
->prev
)
6666 vla_restore(v
->vla
.locorig
);
6669 /* ------------------------------------------------------------------------- */
6672 static void new_scope(struct scope
*o
)
6674 /* copy and link previous scope */
6676 o
->prev
= cur_scope
;
6678 cur_scope
->vla
.num
= 0;
6680 /* record local declaration stack position */
6681 o
->lstk
= local_stack
;
6682 o
->llstk
= local_label_stack
;
6686 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
6689 static void prev_scope(struct scope
*o
, int is_expr
)
6693 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6694 block_cleanup(o
->prev
);
6696 /* pop locally defined labels */
6697 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6699 /* In the is_expr case (a statement expression is finished here),
6700 vtop might refer to symbols on the local_stack. Either via the
6701 type or via vtop->sym. We can't pop those nor any that in turn
6702 might be referred to. To make it easier we don't roll back
6703 any symbols in that case; some upper level call to block() will
6704 do that. We do have to remove such symbols from the lookup
6705 tables, though. sym_pop will do that. */
6707 /* pop locally defined symbols */
6708 pop_local_syms(o
->lstk
, is_expr
);
6709 cur_scope
= o
->prev
;
6713 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
6716 /* leave a scope via break/continue(/goto) */
6717 static void leave_scope(struct scope
*o
)
6721 try_call_scope_cleanup(o
->cl
.s
);
6725 /* ------------------------------------------------------------------------- */
6726 /* call block from 'for do while' loops */
6728 static void lblock(int *bsym
, int *csym
)
6730 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6731 int *b
= co
->bsym
, *c
= co
->csym
;
6745 static void block(int is_expr
)
6747 int a
, b
, c
, d
, e
, t
;
6752 /* default return value is (void) */
6754 vtop
->type
.t
= VT_VOID
;
6759 /* If the token carries a value, next() might destroy it. Only with
6760 invalid code such as f(){"123"4;} */
6761 if (TOK_HAS_VALUE(t
))
6766 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6774 if (tok
== TOK_ELSE
) {
6779 gsym(d
); /* patch else jmp */
6784 } else if (t
== TOK_WHILE
) {
6796 } else if (t
== '{') {
6799 /* handle local labels declarations */
6800 while (tok
== TOK_LABEL
) {
6803 if (tok
< TOK_UIDENT
)
6804 expect("label identifier");
6805 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6807 } while (tok
== ',');
6811 while (tok
!= '}') {
6820 prev_scope(&o
, is_expr
);
6823 else if (!nocode_wanted
)
6824 check_func_return();
6826 } else if (t
== TOK_RETURN
) {
6827 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6831 gen_assign_cast(&func_vt
);
6833 if (vtop
->type
.t
!= VT_VOID
)
6834 tcc_warning("void function returns a value");
6838 tcc_warning("'return' with no value");
6841 leave_scope(root_scope
);
6843 gfunc_return(&func_vt
);
6845 /* jump unless last stmt in top-level block */
6846 if (tok
!= '}' || local_scope
!= 1)
6849 tcc_tcov_block_end (tcc_state
, -1);
6852 } else if (t
== TOK_BREAK
) {
6854 if (!cur_scope
->bsym
)
6855 tcc_error("cannot break");
6856 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6857 leave_scope(cur_switch
->scope
);
6859 leave_scope(loop_scope
);
6860 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6863 } else if (t
== TOK_CONTINUE
) {
6865 if (!cur_scope
->csym
)
6866 tcc_error("cannot continue");
6867 leave_scope(loop_scope
);
6868 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6871 } else if (t
== TOK_FOR
) {
6876 /* c99 for-loop init decl? */
6877 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6878 /* no, regular for-loop init expr */
6906 } else if (t
== TOK_DO
) {
6920 } else if (t
== TOK_SWITCH
) {
6921 struct switch_t
*sw
;
6923 sw
= tcc_mallocz(sizeof *sw
);
6925 sw
->scope
= cur_scope
;
6926 sw
->prev
= cur_switch
;
6927 sw
->nocode_wanted
= nocode_wanted
;
6933 sw
->sv
= *vtop
--; /* save switch value */
6936 b
= gjmp(0); /* jump to first case */
6938 a
= gjmp(a
); /* add implicit break */
6942 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
6943 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
6945 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
6947 for (b
= 1; b
< sw
->n
; b
++)
6948 if (sw
->sv
.type
.t
& VT_UNSIGNED
6949 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
6950 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
6951 tcc_error("duplicate case value");
6955 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
6958 gsym_addr(d
, sw
->def_sym
);
6964 dynarray_reset(&sw
->p
, &sw
->n
);
6965 cur_switch
= sw
->prev
;
6968 } else if (t
== TOK_CASE
) {
6969 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6972 cr
->v1
= cr
->v2
= expr_const64();
6973 if (gnu_ext
&& tok
== TOK_DOTS
) {
6975 cr
->v2
= expr_const64();
6976 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
6977 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
6978 tcc_warning("empty case range");
6981 tcc_tcov_reset_ind(tcc_state
);
6982 cr
->sym
= gind(cur_switch
->nocode_wanted
);
6983 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6986 goto block_after_label
;
6988 } else if (t
== TOK_DEFAULT
) {
6991 if (cur_switch
->def_sym
)
6992 tcc_error("too many 'default'");
6994 tcc_tcov_reset_ind(tcc_state
);
6995 cur_switch
->def_sym
= gind(cur_switch
->nocode_wanted
);
6998 goto block_after_label
;
7000 } else if (t
== TOK_GOTO
) {
7001 if (cur_scope
->vla
.num
)
7002 vla_restore(cur_scope
->vla
.locorig
);
7003 if (tok
== '*' && gnu_ext
) {
7007 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7011 } else if (tok
>= TOK_UIDENT
) {
7012 s
= label_find(tok
);
7013 /* put forward definition if needed */
7015 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7016 else if (s
->r
== LABEL_DECLARED
)
7017 s
->r
= LABEL_FORWARD
;
7019 if (s
->r
& LABEL_FORWARD
) {
7020 /* start new goto chain for cleanups, linked via label->next */
7021 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7022 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7023 pending_gotos
->prev_tok
= s
;
7024 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7025 pending_gotos
->next
= s
;
7027 s
->jnext
= gjmp(s
->jnext
);
7029 try_call_cleanup_goto(s
->cleanupstate
);
7030 gjmp_addr(s
->jnext
);
7035 expect("label identifier");
7039 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7043 if (tok
== ':' && t
>= TOK_UIDENT
) {
7048 if (s
->r
== LABEL_DEFINED
)
7049 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7050 s
->r
= LABEL_DEFINED
;
7052 Sym
*pcl
; /* pending cleanup goto */
7053 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7055 sym_pop(&s
->next
, NULL
, 0);
7059 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7062 s
->cleanupstate
= cur_scope
->cl
.s
;
7066 /* Accept attributes after labels (e.g. 'unused') */
7067 AttributeDef ad_tmp
;
7068 parse_attribute(&ad_tmp
);
7070 vla_restore(cur_scope
->vla
.loc
);
7073 /* we accept this, but it is a mistake */
7074 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7077 /* expression case */
7094 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7097 /* This skips over a stream of tokens containing balanced {} and ()
7098 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7099 with a '{'). If STR then allocates and stores the skipped tokens
7100 in *STR. This doesn't check if () and {} are nested correctly,
7101 i.e. "({)}" is accepted. */
7102 static void skip_or_save_block(TokenString
**str
)
7104 int braces
= tok
== '{';
7107 *str
= tok_str_alloc();
7109 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7111 if (tok
== TOK_EOF
) {
7112 if (str
|| level
> 0)
7113 tcc_error("unexpected end of file");
7118 tok_str_add_tok(*str
);
7121 if (t
== '{' || t
== '(') {
7123 } else if (t
== '}' || t
== ')') {
7125 if (level
== 0 && braces
&& t
== '}')
7130 tok_str_add(*str
, -1);
7131 tok_str_add(*str
, 0);
7135 #define EXPR_CONST 1
7138 static void parse_init_elem(int expr_type
)
7140 int saved_global_expr
;
7143 /* compound literals must be allocated globally in this case */
7144 saved_global_expr
= global_expr
;
7147 global_expr
= saved_global_expr
;
7148 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7149 (compound literals). */
7150 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7151 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7152 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7153 #ifdef TCC_TARGET_PE
7154 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7157 tcc_error("initializer element is not constant");
7166 static void init_assert(init_params
*p
, int offset
)
7168 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7169 : !nocode_wanted
&& offset
> p
->local_offset
)
7170 tcc_internal_error("initializer overflow");
7173 #define init_assert(sec, offset)
7176 /* put zeros for variable based init */
7177 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7179 init_assert(p
, c
+ size
);
7181 /* nothing to do because globals are already set to zero */
7183 vpush_helper_func(TOK_memset
);
7185 #ifdef TCC_TARGET_ARM
7197 #define DIF_SIZE_ONLY 2
7198 #define DIF_HAVE_ELEM 4
7201 /* delete relocations for specified range c ... c + size. Unfortunatly
7202 in very special cases, relocations may occur unordered */
7203 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7205 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7206 if (!sec
|| !sec
->reloc
)
7208 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7209 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7210 while (rel
< rel_end
) {
7211 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7212 sec
->reloc
->data_offset
-= sizeof *rel
;
7215 memcpy(rel2
, rel
, sizeof *rel
);
7222 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7224 if (ref
== p
->flex_array_ref
) {
7225 if (index
>= ref
->c
)
7227 } else if (ref
->c
< 0)
7228 tcc_error("flexible array has zero size in this context");
7231 /* t is the array or struct type. c is the array or struct
7232 address. cur_field is the pointer to the current
7233 field, for arrays the 'c' member contains the current start
7234 index. 'flags' is as in decl_initializer.
7235 'al' contains the already initialized length of the
7236 current container (starting at c). This returns the new length of that. */
7237 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7238 Sym
**cur_field
, int flags
, int al
)
7241 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7242 unsigned long corig
= c
;
7247 if (flags
& DIF_HAVE_ELEM
)
7250 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7257 /* NOTE: we only support ranges for last designator */
7258 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7260 if (!(type
->t
& VT_ARRAY
))
7261 expect("array type");
7263 index
= index_last
= expr_const();
7264 if (tok
== TOK_DOTS
&& gnu_ext
) {
7266 index_last
= expr_const();
7270 decl_design_flex(p
, s
, index_last
);
7271 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7272 tcc_error("index exceeds array bounds or range is empty");
7274 (*cur_field
)->c
= index_last
;
7275 type
= pointed_type(type
);
7276 elem_size
= type_size(type
, &align
);
7277 c
+= index
* elem_size
;
7278 nb_elems
= index_last
- index
+ 1;
7285 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7286 expect("struct/union type");
7288 f
= find_field(type
, l
, &cumofs
);
7301 } else if (!gnu_ext
) {
7306 if (type
->t
& VT_ARRAY
) {
7307 index
= (*cur_field
)->c
;
7309 decl_design_flex(p
, s
, index
);
7311 tcc_error("too many initializers");
7312 type
= pointed_type(type
);
7313 elem_size
= type_size(type
, &align
);
7314 c
+= index
* elem_size
;
7317 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7318 *cur_field
= f
= f
->next
;
7320 tcc_error("too many initializers");
7326 if (!elem_size
) /* for structs */
7327 elem_size
= type_size(type
, &align
);
7329 /* Using designators the same element can be initialized more
7330 than once. In that case we need to delete possibly already
7331 existing relocations. */
7332 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7333 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7334 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7337 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7339 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7343 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7344 /* make init_putv/vstore believe it were a struct */
7346 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7350 vpush_ref(type
, p
->sec
, c
, elem_size
);
7352 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7353 for (i
= 1; i
< nb_elems
; i
++) {
7355 init_putv(p
, type
, c
+ elem_size
* i
);
7360 c
+= nb_elems
* elem_size
;
7366 /* store a value or an expression directly in global data or in local array */
7367 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7373 Section
*sec
= p
->sec
;
7377 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7379 size
= type_size(type
, &align
);
7380 if (type
->t
& VT_BITFIELD
)
7381 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7382 init_assert(p
, c
+ size
);
7385 /* XXX: not portable */
7386 /* XXX: generate error if incorrect relocation */
7387 gen_assign_cast(&dtype
);
7388 bt
= type
->t
& VT_BTYPE
;
7390 if ((vtop
->r
& VT_SYM
)
7392 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7393 || (type
->t
& VT_BITFIELD
))
7394 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7396 tcc_error("initializer element is not computable at load time");
7398 if (NODATA_WANTED
) {
7403 ptr
= sec
->data
+ c
;
7406 /* XXX: make code faster ? */
7407 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7408 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7409 /* XXX This rejects compound literals like
7410 '(void *){ptr}'. The problem is that '&sym' is
7411 represented the same way, which would be ruled out
7412 by the SYM_FIRST_ANOM check above, but also '"string"'
7413 in 'char *p = "string"' is represented the same
7414 with the type being VT_PTR and the symbol being an
7415 anonymous one. That is, there's no difference in vtop
7416 between '(void *){x}' and '&(void *){x}'. Ignore
7417 pointer typed entities here. Hopefully no real code
7418 will ever use compound literals with scalar type. */
7419 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7420 /* These come from compound literals, memcpy stuff over. */
7424 esym
= elfsym(vtop
->sym
);
7425 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7426 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7428 /* We need to copy over all memory contents, and that
7429 includes relocations. Use the fact that relocs are
7430 created it order, so look from the end of relocs
7431 until we hit one before the copied region. */
7432 unsigned long relofs
= ssec
->reloc
->data_offset
;
7433 while (relofs
>= sizeof(*rel
)) {
7434 relofs
-= sizeof(*rel
);
7435 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7436 if (rel
->r_offset
>= esym
->st_value
+ size
)
7438 if (rel
->r_offset
< esym
->st_value
)
7440 put_elf_reloca(symtab_section
, sec
,
7441 c
+ rel
->r_offset
- esym
->st_value
,
7442 ELFW(R_TYPE
)(rel
->r_info
),
7443 ELFW(R_SYM
)(rel
->r_info
),
7453 if (type
->t
& VT_BITFIELD
) {
7454 int bit_pos
, bit_size
, bits
, n
;
7455 unsigned char *p
, v
, m
;
7456 bit_pos
= BIT_POS(vtop
->type
.t
);
7457 bit_size
= BIT_SIZE(vtop
->type
.t
);
7458 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7459 bit_pos
&= 7, bits
= 0;
7464 v
= val
>> bits
<< bit_pos
;
7465 m
= ((1 << n
) - 1) << bit_pos
;
7466 *p
= (*p
& ~m
) | (v
& m
);
7467 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7472 *(char *)ptr
= val
!= 0;
7478 write16le(ptr
, val
);
7481 write32le(ptr
, val
);
7484 write64le(ptr
, val
);
7487 #if defined TCC_IS_NATIVE_387
7488 /* Host and target platform may be different but both have x87.
7489 On windows, tcc does not use VT_LDOUBLE, except when it is a
7490 cross compiler. In this case a mingw gcc as host compiler
7491 comes here with 10-byte long doubles, while msvc or tcc won't.
7492 tcc itself can still translate by asm.
7493 In any case we avoid possibly random bytes 11 and 12.
7495 if (sizeof (long double) >= 10)
7496 memcpy(ptr
, &vtop
->c
.ld
, 10);
7498 else if (sizeof (long double) == sizeof (double))
7499 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7501 else if (vtop
->c
.ld
== 0.0)
7505 /* For other platforms it should work natively, but may not work
7506 for cross compilers */
7507 if (sizeof(long double) == LDOUBLE_SIZE
)
7508 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7509 else if (sizeof(double) == LDOUBLE_SIZE
)
7510 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7511 #ifndef TCC_CROSS_TEST
7513 tcc_error("can't cross compile long double constants");
7518 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7521 if (vtop
->r
& VT_SYM
)
7522 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7524 write64le(ptr
, val
);
7527 write32le(ptr
, val
);
7531 write64le(ptr
, val
);
7535 if (vtop
->r
& VT_SYM
)
7536 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7537 write32le(ptr
, val
);
7541 //tcc_internal_error("unexpected type");
7547 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7554 /* 't' contains the type and storage info. 'c' is the offset of the
7555 object in section 'sec'. If 'sec' is NULL, it means stack based
7556 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7557 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7558 size only evaluation is wanted (only for arrays). */
7559 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7561 int len
, n
, no_oblock
, i
;
7567 /* generate line number info */
7568 if (debug_modes
&& !p
->sec
)
7569 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7571 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7572 /* In case of strings we have special handling for arrays, so
7573 don't consume them as initializer value (which would commit them
7574 to some anonymous symbol). */
7575 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7576 (!(flags
& DIF_SIZE_ONLY
)
7577 /* a struct may be initialized from a struct of same type, as in
7578 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7579 In that case we need to parse the element in order to check
7580 it for compatibility below */
7581 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7583 int ncw_prev
= nocode_wanted
;
7584 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7586 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7587 nocode_wanted
= ncw_prev
;
7588 flags
|= DIF_HAVE_ELEM
;
7591 if (type
->t
& VT_ARRAY
) {
7593 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7601 t1
= pointed_type(type
);
7602 size1
= type_size(t1
, &align1
);
7604 /* only parse strings here if correct type (otherwise: handle
7605 them as ((w)char *) expressions */
7606 if ((tok
== TOK_LSTR
&&
7607 #ifdef TCC_TARGET_PE
7608 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7610 (t1
->t
& VT_BTYPE
) == VT_INT
7612 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7614 cstr_reset(&initstr
);
7615 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7616 tcc_error("unhandled string literal merging");
7617 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7619 initstr
.size
-= size1
;
7621 len
+= tokc
.str
.size
;
7623 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7625 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7628 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7629 && tok
!= TOK_EOF
) {
7630 /* Not a lone literal but part of a bigger expression. */
7631 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7632 tokc
.str
.size
= initstr
.size
;
7633 tokc
.str
.data
= initstr
.data
;
7637 decl_design_flex(p
, s
, len
);
7638 if (!(flags
& DIF_SIZE_ONLY
)) {
7643 tcc_warning("initializer-string for array is too long");
7644 /* in order to go faster for common case (char
7645 string in global variable, we handle it
7647 if (p
->sec
&& size1
== 1) {
7648 init_assert(p
, c
+ nb
);
7650 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7654 /* only add trailing zero if enough storage (no
7655 warning in this case since it is standard) */
7656 if (flags
& DIF_CLEAR
)
7659 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7663 } else if (size1
== 1)
7664 ch
= ((unsigned char *)initstr
.data
)[i
];
7666 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7668 init_putv(p
, t1
, c
+ i
* size1
);
7679 /* zero memory once in advance */
7680 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7681 init_putz(p
, c
, n
*size1
);
7686 /* GNU extension: if the initializer is empty for a flex array,
7687 it's size is zero. We won't enter the loop, so set the size
7689 decl_design_flex(p
, s
, len
);
7690 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7691 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7692 flags
&= ~DIF_HAVE_ELEM
;
7693 if (type
->t
& VT_ARRAY
) {
7695 /* special test for multi dimensional arrays (may not
7696 be strictly correct if designators are used at the
7698 if (no_oblock
&& len
>= n
*size1
)
7701 if (s
->type
.t
== VT_UNION
)
7705 if (no_oblock
&& f
== NULL
)
7717 } else if ((flags
& DIF_HAVE_ELEM
)
7718 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7719 The source type might have VT_CONSTANT set, which is
7720 of course assignable to non-const elements. */
7721 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7724 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7726 if ((flags
& DIF_FIRST
) || tok
== '{') {
7736 } else if (tok
== '{') {
7737 if (flags
& DIF_HAVE_ELEM
)
7740 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7743 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7744 /* If we supported only ISO C we wouldn't have to accept calling
7745 this on anything than an array if DIF_SIZE_ONLY (and even then
7746 only on the outermost level, so no recursion would be needed),
7747 because initializing a flex array member isn't supported.
7748 But GNU C supports it, so we need to recurse even into
7749 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7750 /* just skip expression */
7751 if (flags
& DIF_HAVE_ELEM
)
7754 skip_or_save_block(NULL
);
7757 if (!(flags
& DIF_HAVE_ELEM
)) {
7758 /* This should happen only when we haven't parsed
7759 the init element above for fear of committing a
7760 string constant to memory too early. */
7761 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7762 expect("string constant");
7763 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7765 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7766 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7768 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7772 init_putv(p
, type
, c
);
7776 /* parse an initializer for type 't' if 'has_init' is non zero, and
7777 allocate space in local or global data space ('r' is either
7778 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7779 variable 'v' of scope 'scope' is declared before initializers
7780 are parsed. If 'v' is zero, then a reference to the new object
7781 is put in the value stack. If 'has_init' is 2, a special parsing
7782 is done to handle string constants. */
7783 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7784 int has_init
, int v
, int scope
)
7786 int size
, align
, addr
;
7787 TokenString
*init_str
= NULL
;
7790 Sym
*flexible_array
;
7792 int saved_nocode_wanted
= nocode_wanted
;
7793 #ifdef CONFIG_TCC_BCHECK
7794 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7796 init_params p
= {0};
7798 /* Always allocate static or global variables */
7799 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7800 nocode_wanted
|= 0x80000000;
7802 flexible_array
= NULL
;
7803 size
= type_size(type
, &align
);
7805 /* exactly one flexible array may be initialized, either the
7806 toplevel array or the last member of the toplevel struct */
7809 /* If the base type itself was an array type of unspecified size
7810 (like in 'typedef int arr[]; arr x = {1};') then we will
7811 overwrite the unknown size by the real one for this decl.
7812 We need to unshare the ref symbol holding that size. */
7813 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7814 p
.flex_array_ref
= type
->ref
;
7816 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7817 Sym
*field
= type
->ref
->next
;
7820 field
= field
->next
;
7821 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7822 flexible_array
= field
;
7823 p
.flex_array_ref
= field
->type
.ref
;
7830 /* If unknown size, do a dry-run 1st pass */
7832 tcc_error("unknown type size");
7833 if (has_init
== 2) {
7834 /* only get strings */
7835 init_str
= tok_str_alloc();
7836 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7837 tok_str_add_tok(init_str
);
7840 tok_str_add(init_str
, -1);
7841 tok_str_add(init_str
, 0);
7843 skip_or_save_block(&init_str
);
7847 begin_macro(init_str
, 1);
7849 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7850 /* prepare second initializer parsing */
7851 macro_ptr
= init_str
->str
;
7854 /* if still unknown size, error */
7855 size
= type_size(type
, &align
);
7857 tcc_error("unknown type size");
7859 /* If there's a flex member and it was used in the initializer
7861 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
7862 size
+= flexible_array
->type
.ref
->c
7863 * pointed_size(&flexible_array
->type
);
7866 /* take into account specified alignment if bigger */
7867 if (ad
->a
.aligned
) {
7868 int speca
= 1 << (ad
->a
.aligned
- 1);
7871 } else if (ad
->a
.packed
) {
7875 if (!v
&& NODATA_WANTED
)
7876 size
= 0, align
= 1;
7878 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7880 #ifdef CONFIG_TCC_BCHECK
7882 /* add padding between stack variables for bound checking */
7886 loc
= (loc
- size
) & -align
;
7888 p
.local_offset
= addr
+ size
;
7889 #ifdef CONFIG_TCC_BCHECK
7891 /* add padding between stack variables for bound checking */
7896 /* local variable */
7897 #ifdef CONFIG_TCC_ASM
7898 if (ad
->asm_label
) {
7899 int reg
= asm_parse_regvar(ad
->asm_label
);
7901 r
= (r
& ~VT_VALMASK
) | reg
;
7904 sym
= sym_push(v
, type
, r
, addr
);
7905 if (ad
->cleanup_func
) {
7906 Sym
*cls
= sym_push2(&all_cleanups
,
7907 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7908 cls
->prev_tok
= sym
;
7909 cls
->next
= ad
->cleanup_func
;
7910 cls
->ncl
= cur_scope
->cl
.s
;
7911 cur_scope
->cl
.s
= cls
;
7916 /* push local reference */
7917 vset(type
, r
, addr
);
7921 if (v
&& scope
== VT_CONST
) {
7922 /* see if the symbol was already defined */
7925 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
7926 && sym
->type
.ref
->c
> type
->ref
->c
) {
7927 /* flex array was already declared with explicit size
7929 int arr[] = { 1,2,3 }; */
7930 type
->ref
->c
= sym
->type
.ref
->c
;
7931 size
= type_size(type
, &align
);
7933 patch_storage(sym
, ad
, type
);
7934 /* we accept several definitions of the same global variable. */
7935 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7940 /* allocate symbol in corresponding section */
7944 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
7945 tp
= &tp
->ref
->type
;
7946 if (tp
->t
& VT_CONSTANT
) {
7947 sec
= rodata_section
;
7948 } else if (has_init
) {
7950 /*if (tcc_state->g_debug & 4)
7951 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
7952 } else if (tcc_state
->nocommon
)
7957 addr
= section_add(sec
, size
, align
);
7958 #ifdef CONFIG_TCC_BCHECK
7959 /* add padding if bound check */
7961 section_add(sec
, 1, 1);
7964 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7965 sec
= common_section
;
7970 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7971 patch_storage(sym
, ad
, NULL
);
7973 /* update symbol definition */
7974 put_extern_sym(sym
, sec
, addr
, size
);
7976 /* push global reference */
7977 vpush_ref(type
, sec
, addr
, size
);
7982 #ifdef CONFIG_TCC_BCHECK
7983 /* handles bounds now because the symbol must be defined
7984 before for the relocation */
7988 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7989 /* then add global bound info */
7990 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7991 bounds_ptr
[0] = 0; /* relocated */
7992 bounds_ptr
[1] = size
;
7997 if (type
->t
& VT_VLA
) {
8003 /* save before-VLA stack pointer if needed */
8004 if (cur_scope
->vla
.num
== 0) {
8005 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8006 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8008 gen_vla_sp_save(loc
-= PTR_SIZE
);
8009 cur_scope
->vla
.locorig
= loc
;
8013 vpush_type_size(type
, &a
);
8014 gen_vla_alloc(type
, a
);
8015 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8016 /* on _WIN64, because of the function args scratch area, the
8017 result of alloca differs from RSP and is returned in RAX. */
8018 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8020 gen_vla_sp_save(addr
);
8021 cur_scope
->vla
.loc
= addr
;
8022 cur_scope
->vla
.num
++;
8023 } else if (has_init
) {
8025 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8026 /* patch flexible array member size back to -1, */
8027 /* for possible subsequent similar declarations */
8029 flexible_array
->type
.ref
->c
= -1;
8033 /* restore parse state if needed */
8039 nocode_wanted
= saved_nocode_wanted
;
8042 /* generate vla code saved in post_type() */
8043 static void func_vla_arg_code(Sym
*arg
)
8046 TokenString
*vla_array_tok
= NULL
;
8049 func_vla_arg_code(arg
->type
.ref
);
8051 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8052 loc
-= type_size(&int_type
, &align
);
8054 arg
->type
.ref
->c
= loc
;
8057 vla_array_tok
= tok_str_alloc();
8058 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8059 begin_macro(vla_array_tok
, 1);
8064 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8066 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8073 static void func_vla_arg(Sym
*sym
)
8077 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8078 if (arg
->type
.t
& VT_VLA
)
8079 func_vla_arg_code(arg
);
8082 /* parse a function defined by symbol 'sym' and generate its code in
8083 'cur_text_section' */
8084 static void gen_function(Sym
*sym
)
8086 struct scope f
= { 0 };
8087 cur_scope
= root_scope
= &f
;
8089 ind
= cur_text_section
->data_offset
;
8090 if (sym
->a
.aligned
) {
8091 size_t newoff
= section_add(cur_text_section
, 0,
8092 1 << (sym
->a
.aligned
- 1));
8093 gen_fill_nops(newoff
- ind
);
8095 /* NOTE: we patch the symbol size later */
8096 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8097 if (sym
->type
.ref
->f
.func_ctor
)
8098 add_array (tcc_state
, ".init_array", sym
->c
);
8099 if (sym
->type
.ref
->f
.func_dtor
)
8100 add_array (tcc_state
, ".fini_array", sym
->c
);
8102 funcname
= get_tok_str(sym
->v
, NULL
);
8104 func_vt
= sym
->type
.ref
->type
;
8105 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8107 /* put debug symbol */
8108 tcc_debug_funcstart(tcc_state
, sym
);
8109 /* push a dummy symbol to enable local sym storage */
8110 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8111 local_scope
= 1; /* for function parameters */
8115 clear_temp_local_var_list();
8120 /* reset local stack */
8121 pop_local_syms(NULL
, 0);
8123 cur_text_section
->data_offset
= ind
;
8125 label_pop(&global_label_stack
, NULL
, 0);
8126 sym_pop(&all_cleanups
, NULL
, 0);
8127 /* patch symbol size */
8128 elfsym(sym
)->st_size
= ind
- func_ind
;
8129 /* end of function */
8130 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8131 /* It's better to crash than to generate wrong code */
8132 cur_text_section
= NULL
;
8133 funcname
= ""; /* for safety */
8134 func_vt
.t
= VT_VOID
; /* for safety */
8135 func_var
= 0; /* for safety */
8136 ind
= 0; /* for safety */
8138 nocode_wanted
= 0x80000000;
8140 /* do this after funcend debug info */
8144 static void gen_inline_functions(TCCState
*s
)
8147 int inline_generated
, i
;
8148 struct InlineFunc
*fn
;
8150 tcc_open_bf(s
, ":inline:", 0);
8151 /* iterate while inline function are referenced */
8153 inline_generated
= 0;
8154 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8155 fn
= s
->inline_fns
[i
];
8157 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8158 /* the function was used or forced (and then not internal):
8159 generate its code and convert it to a normal function */
8161 tcc_debug_putfile(s
, fn
->filename
);
8162 begin_macro(fn
->func_str
, 1);
8164 cur_text_section
= text_section
;
8168 inline_generated
= 1;
8171 } while (inline_generated
);
8175 static void free_inline_functions(TCCState
*s
)
8178 /* free tokens of unused inline functions */
8179 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8180 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8182 tok_str_free(fn
->func_str
);
8184 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8187 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8188 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8189 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8191 int v
, has_init
, r
, oldint
;
8194 AttributeDef ad
, adbase
;
8197 if (tok
== TOK_STATIC_ASSERT
) {
8207 tcc_error("_Static_assert fail");
8209 goto static_assert_out
;
8213 parse_mult_str(&error_str
, "string constant");
8215 tcc_error("%s", (char *)error_str
.data
);
8216 cstr_free(&error_str
);
8224 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8225 if (is_for_loop_init
)
8227 /* skip redundant ';' if not in old parameter decl scope */
8228 if (tok
== ';' && l
!= VT_CMP
) {
8234 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8235 /* global asm block */
8239 if (tok
>= TOK_UIDENT
) {
8240 /* special test for old K&R protos without explicit int
8241 type. Only accepted when defining global data */
8246 expect("declaration");
8252 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8254 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8255 tcc_warning("unnamed struct/union that defines no instances");
8259 if (IS_ENUM(btype
.t
)) {
8265 while (1) { /* iterate thru each declaration */
8268 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8272 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8273 printf("type = '%s'\n", buf
);
8276 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8277 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8278 tcc_error("function without file scope cannot be static");
8279 /* if old style function prototype, we accept a
8282 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8283 decl0(VT_CMP
, 0, sym
);
8284 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8285 if (sym
->f
.func_alwinl
8286 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8287 == (VT_EXTERN
| VT_INLINE
))) {
8288 /* always_inline functions must be handled as if they
8289 don't generate multiple global defs, even if extern
8290 inline, i.e. GNU inline semantics for those. Rewrite
8291 them into static inline. */
8292 type
.t
&= ~VT_EXTERN
;
8293 type
.t
|= VT_STATIC
;
8296 /* always compile 'extern inline' */
8297 if (type
.t
& VT_EXTERN
)
8298 type
.t
&= ~VT_INLINE
;
8300 } else if (oldint
) {
8301 tcc_warning("type defaults to int");
8304 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8305 ad
.asm_label
= asm_label_instr();
8306 /* parse one last attribute list, after asm label */
8307 parse_attribute(&ad
);
8309 /* gcc does not allow __asm__("label") with function definition,
8316 #ifdef TCC_TARGET_PE
8317 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8318 if (type
.t
& VT_STATIC
)
8319 tcc_error("cannot have dll linkage with static");
8320 if (type
.t
& VT_TYPEDEF
) {
8321 tcc_warning("'%s' attribute ignored for typedef",
8322 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8323 (ad
.a
.dllexport
= 0, "dllexport"));
8324 } else if (ad
.a
.dllimport
) {
8325 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8328 type
.t
|= VT_EXTERN
;
8334 tcc_error("cannot use local functions");
8335 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8336 expect("function definition");
8338 /* reject abstract declarators in function definition
8339 make old style params without decl have int type */
8341 while ((sym
= sym
->next
) != NULL
) {
8342 if (!(sym
->v
& ~SYM_FIELD
))
8343 expect("identifier");
8344 if (sym
->type
.t
== VT_VOID
)
8345 sym
->type
= int_type
;
8348 /* apply post-declaraton attributes */
8349 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8351 /* put function symbol */
8352 type
.t
&= ~VT_EXTERN
;
8353 sym
= external_sym(v
, &type
, 0, &ad
);
8355 /* static inline functions are just recorded as a kind
8356 of macro. Their code will be emitted at the end of
8357 the compilation unit only if they are used */
8358 if (sym
->type
.t
& VT_INLINE
) {
8359 struct InlineFunc
*fn
;
8360 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8361 strcpy(fn
->filename
, file
->filename
);
8363 skip_or_save_block(&fn
->func_str
);
8364 dynarray_add(&tcc_state
->inline_fns
,
8365 &tcc_state
->nb_inline_fns
, fn
);
8367 /* compute text section */
8368 cur_text_section
= ad
.section
;
8369 if (!cur_text_section
)
8370 cur_text_section
= text_section
;
8376 /* find parameter in function parameter list */
8377 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8378 if ((sym
->v
& ~SYM_FIELD
) == v
)
8380 tcc_error("declaration for parameter '%s' but no such parameter",
8381 get_tok_str(v
, NULL
));
8383 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8384 tcc_error("storage class specified for '%s'",
8385 get_tok_str(v
, NULL
));
8386 if (sym
->type
.t
!= VT_VOID
)
8387 tcc_error("redefinition of parameter '%s'",
8388 get_tok_str(v
, NULL
));
8389 convert_parameter_type(&type
);
8391 } else if (type
.t
& VT_TYPEDEF
) {
8392 /* save typedefed type */
8393 /* XXX: test storage specifiers ? */
8395 if (sym
&& sym
->sym_scope
== local_scope
) {
8396 if (!is_compatible_types(&sym
->type
, &type
)
8397 || !(sym
->type
.t
& VT_TYPEDEF
))
8398 tcc_error("incompatible redefinition of '%s'",
8399 get_tok_str(v
, NULL
));
8402 sym
= sym_push(v
, &type
, 0, 0);
8407 tcc_debug_typedef (tcc_state
, sym
);
8408 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8409 && !(type
.t
& VT_EXTERN
)) {
8410 tcc_error("declaration of void object");
8413 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8414 /* external function definition */
8415 /* specific case for func_call attribute */
8417 } else if (!(type
.t
& VT_ARRAY
)) {
8418 /* not lvalue if array */
8421 has_init
= (tok
== '=');
8422 if (has_init
&& (type
.t
& VT_VLA
))
8423 tcc_error("variable length array cannot be initialized");
8424 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8425 || (type
.t
& VT_BTYPE
) == VT_FUNC
8426 /* as with GCC, uninitialized global arrays with no size
8427 are considered extern: */
8428 || ((type
.t
& VT_ARRAY
) && !has_init
8429 && l
== VT_CONST
&& type
.ref
->c
< 0)
8431 /* external variable or function */
8432 type
.t
|= VT_EXTERN
;
8433 sym
= external_sym(v
, &type
, r
, &ad
);
8434 if (ad
.alias_target
) {
8435 /* Aliases need to be emitted when their target
8436 symbol is emitted, even if perhaps unreferenced.
8437 We only support the case where the base is
8438 already defined, otherwise we would need
8439 deferring to emit the aliases until the end of
8440 the compile unit. */
8441 Sym
*alias_target
= sym_find(ad
.alias_target
);
8442 ElfSym
*esym
= elfsym(alias_target
);
8444 tcc_error("unsupported forward __alias__ attribute");
8445 put_extern_sym2(sym
, esym
->st_shndx
,
8446 esym
->st_value
, esym
->st_size
, 1);
8449 if (type
.t
& VT_STATIC
)
8455 else if (l
== VT_CONST
)
8456 /* uninitialized global variables may be overridden */
8457 type
.t
|= VT_EXTERN
;
8458 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8462 if (is_for_loop_init
)
8474 static void decl(int l
)
8479 /* ------------------------------------------------------------------------- */
8482 /* ------------------------------------------------------------------------- */