2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int constant_p
;
48 ST_DATA
char debug_modes
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF_BIT 0x20000000
60 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
61 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
63 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
64 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
65 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
68 ST_DATA
const char *funcname
;
69 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
70 static CString initstr
;
73 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
74 #define VT_PTRDIFF_T VT_INT
76 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
77 #define VT_PTRDIFF_T VT_LLONG
79 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
80 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
83 static struct switch_t
{
87 } **p
; int n
; /* list of case ranges */
88 int def_sym
; /* default symbol */
92 struct switch_t
*prev
;
94 } *cur_switch
; /* current switch */
96 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
97 /*list of temporary local variables on the stack in current function. */
98 static struct temp_local_variable
{
99 int location
; //offset on stack. Svalue.c.i
102 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
103 static int nb_temp_local_vars
;
105 static struct scope
{
107 struct { int loc
, locorig
, num
; } vla
;
108 struct { Sym
*s
; int n
; } cl
;
111 } *cur_scope
, *loop_scope
, *root_scope
;
120 #define precedence_parser
121 static void init_prec(void);
124 static void gen_cast(CType
*type
);
125 static void gen_cast_s(int t
);
126 static inline CType
*pointed_type(CType
*type
);
127 static int is_compatible_types(CType
*type1
, CType
*type2
);
128 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
129 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
130 static void parse_expr_type(CType
*type
);
131 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
132 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
133 static void block(int is_expr
);
134 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
135 static int decl(int l
);
136 static void expr_eq(void);
137 static void vpush_type_size(CType
*type
, int *a
);
138 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
139 static inline int64_t expr_const64(void);
140 static void vpush64(int ty
, unsigned long long v
);
141 static void vpush(CType
*type
);
142 static int gvtst(int inv
, int t
);
143 static void gen_inline_functions(TCCState
*s
);
144 static void free_inline_functions(TCCState
*s
);
145 static void skip_or_save_block(TokenString
**str
);
146 static void gv_dup(void);
147 static int get_temp_local_var(int size
,int align
);
148 static void clear_temp_local_var_list();
149 static void cast_error(CType
*st
, CType
*dt
);
151 /* ------------------------------------------------------------------------- */
152 /* Automagical code suppression */
154 /* Clear 'nocode_wanted' at forward label if it was used */
155 ST_FUNC
void gsym(int t
)
163 /* Clear 'nocode_wanted' if current pc is a label */
169 tcc_tcov_block_begin(tcc_state
);
173 /* Set 'nocode_wanted' after unconditional (backwards) jump */
174 static void gjmp_addr_acs(int t
)
180 /* Set 'nocode_wanted' after unconditional (forwards) jump */
181 static int gjmp_acs(int t
)
188 /* These are #undef'd at the end of this file */
189 #define gjmp_addr gjmp_addr_acs
190 #define gjmp gjmp_acs
191 /* ------------------------------------------------------------------------- */
193 ST_INLN
int is_float(int t
)
195 int bt
= t
& VT_BTYPE
;
196 return bt
== VT_LDOUBLE
202 static inline int is_integer_btype(int bt
)
211 static int btype_size(int bt
)
213 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
217 bt
== VT_PTR
? PTR_SIZE
: 0;
220 /* returns function return register from type */
221 static int R_RET(int t
)
225 #ifdef TCC_TARGET_X86_64
226 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
228 #elif defined TCC_TARGET_RISCV64
229 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
235 /* returns 2nd function return register, if any */
236 static int R2_RET(int t
)
242 #elif defined TCC_TARGET_X86_64
247 #elif defined TCC_TARGET_RISCV64
254 /* returns true for two-word types */
255 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
257 /* put function return registers to stack value */
258 static void PUT_R_RET(SValue
*sv
, int t
)
260 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
263 /* returns function return register class for type t */
264 static int RC_RET(int t
)
266 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
269 /* returns generic register class for type t */
270 static int RC_TYPE(int t
)
274 #ifdef TCC_TARGET_X86_64
275 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
277 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
279 #elif defined TCC_TARGET_RISCV64
280 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
286 /* returns 2nd register class corresponding to t and rc */
287 static int RC2_TYPE(int t
, int rc
)
289 if (!USING_TWO_WORDS(t
))
304 /* we use our own 'finite' function to avoid potential problems with
305 non standard math libs */
306 /* XXX: endianness dependent */
307 ST_FUNC
int ieee_finite(double d
)
310 memcpy(p
, &d
, sizeof(double));
311 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
314 /* compiling intel long double natively */
315 #if (defined __i386__ || defined __x86_64__) \
316 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
317 # define TCC_IS_NATIVE_387
320 ST_FUNC
void test_lvalue(void)
322 if (!(vtop
->r
& VT_LVAL
))
326 ST_FUNC
void check_vstack(void)
328 if (vtop
!= vstack
- 1)
329 tcc_error("internal compiler error: vstack leak (%d)",
330 (int)(vtop
- vstack
+ 1));
333 /* vstack debugging aid */
335 void pv (const char *lbl
, int a
, int b
)
338 for (i
= a
; i
< a
+ b
; ++i
) {
339 SValue
*p
= &vtop
[-i
];
340 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
341 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
346 /* ------------------------------------------------------------------------- */
347 /* initialize vstack and types. This must be done also for tcc -E */
348 ST_FUNC
void tccgen_init(TCCState
*s1
)
351 memset(vtop
, 0, sizeof *vtop
);
353 /* define some often used types */
356 char_type
.t
= VT_BYTE
;
357 if (s1
->char_is_unsigned
)
358 char_type
.t
|= VT_UNSIGNED
;
359 char_pointer_type
= char_type
;
360 mk_pointer(&char_pointer_type
);
362 func_old_type
.t
= VT_FUNC
;
363 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
364 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
365 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
366 #ifdef precedence_parser
372 ST_FUNC
int tccgen_compile(TCCState
*s1
)
374 cur_text_section
= NULL
;
377 anon_sym
= SYM_FIRST_ANOM
;
379 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
381 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
385 #ifdef TCC_TARGET_ARM
389 printf("%s: **** new file\n", file
->filename
);
391 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
394 gen_inline_functions(s1
);
396 /* end of translation unit info */
402 ST_FUNC
void tccgen_finish(TCCState
*s1
)
405 free_inline_functions(s1
);
406 sym_pop(&global_stack
, NULL
, 0);
407 sym_pop(&local_stack
, NULL
, 0);
408 /* free preprocessor macros */
411 dynarray_reset(&sym_pools
, &nb_sym_pools
);
412 sym_free_first
= NULL
;
415 /* ------------------------------------------------------------------------- */
416 ST_FUNC ElfSym
*elfsym(Sym
*s
)
420 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
423 /* apply storage attributes to Elf symbol */
424 ST_FUNC
void update_storage(Sym
*sym
)
427 int sym_bind
, old_sym_bind
;
433 if (sym
->a
.visibility
)
434 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
437 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
438 sym_bind
= STB_LOCAL
;
439 else if (sym
->a
.weak
)
442 sym_bind
= STB_GLOBAL
;
443 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
444 if (sym_bind
!= old_sym_bind
) {
445 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
449 if (sym
->a
.dllimport
)
450 esym
->st_other
|= ST_PE_IMPORT
;
451 if (sym
->a
.dllexport
)
452 esym
->st_other
|= ST_PE_EXPORT
;
456 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
457 get_tok_str(sym
->v
, NULL
),
458 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
466 /* ------------------------------------------------------------------------- */
467 /* update sym->c so that it points to an external symbol in section
468 'section' with value 'value' */
470 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
471 addr_t value
, unsigned long size
,
472 int can_add_underscore
)
474 int sym_type
, sym_bind
, info
, other
, t
;
480 name
= get_tok_str(sym
->v
, NULL
);
482 if ((t
& VT_BTYPE
) == VT_FUNC
) {
484 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
485 sym_type
= STT_NOTYPE
;
486 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
489 sym_type
= STT_OBJECT
;
491 if (t
& (VT_STATIC
| VT_INLINE
))
492 sym_bind
= STB_LOCAL
;
494 sym_bind
= STB_GLOBAL
;
498 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
499 Sym
*ref
= sym
->type
.ref
;
500 if (ref
->a
.nodecorate
) {
501 can_add_underscore
= 0;
503 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
504 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
506 other
|= ST_PE_STDCALL
;
507 can_add_underscore
= 0;
512 if (sym
->asm_label
) {
513 name
= get_tok_str(sym
->asm_label
, NULL
);
514 can_add_underscore
= 0;
517 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
519 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
523 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
524 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
527 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
531 esym
->st_value
= value
;
532 esym
->st_size
= size
;
533 esym
->st_shndx
= sh_num
;
538 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
540 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
542 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
545 /* add a new relocation entry to symbol 'sym' in section 's' */
546 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
551 if (nocode_wanted
&& s
== cur_text_section
)
556 put_extern_sym(sym
, NULL
, 0, 0);
560 /* now we can add ELF relocation info */
561 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
565 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
567 greloca(s
, sym
, offset
, type
, 0);
571 /* ------------------------------------------------------------------------- */
572 /* symbol allocator */
573 static Sym
*__sym_malloc(void)
575 Sym
*sym_pool
, *sym
, *last_sym
;
578 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
579 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
581 last_sym
= sym_free_first
;
583 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
584 sym
->next
= last_sym
;
588 sym_free_first
= last_sym
;
592 static inline Sym
*sym_malloc(void)
596 sym
= sym_free_first
;
598 sym
= __sym_malloc();
599 sym_free_first
= sym
->next
;
602 sym
= tcc_malloc(sizeof(Sym
));
607 ST_INLN
void sym_free(Sym
*sym
)
610 sym
->next
= sym_free_first
;
611 sym_free_first
= sym
;
617 /* push, without hashing */
618 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
623 memset(s
, 0, sizeof *s
);
633 /* find a symbol and return its associated structure. 's' is the top
634 of the symbol stack */
635 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
647 /* structure lookup */
648 ST_INLN Sym
*struct_find(int v
)
651 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
653 return table_ident
[v
]->sym_struct
;
656 /* find an identifier */
657 ST_INLN Sym
*sym_find(int v
)
660 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
662 return table_ident
[v
]->sym_identifier
;
665 static int sym_scope(Sym
*s
)
667 if (IS_ENUM_VAL (s
->type
.t
))
668 return s
->type
.ref
->sym_scope
;
673 /* push a given symbol on the symbol stack */
674 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
683 s
= sym_push2(ps
, v
, type
->t
, c
);
684 s
->type
.ref
= type
->ref
;
686 /* don't record fields or anonymous symbols */
688 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
689 /* record symbol in token array */
690 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
692 ps
= &ts
->sym_struct
;
694 ps
= &ts
->sym_identifier
;
697 s
->sym_scope
= local_scope
;
698 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
699 tcc_error("redeclaration of '%s'",
700 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
705 /* push a global identifier */
706 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
709 s
= sym_push2(&global_stack
, v
, t
, c
);
710 s
->r
= VT_CONST
| VT_SYM
;
711 /* don't record anonymous symbol */
712 if (v
< SYM_FIRST_ANOM
) {
713 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
714 /* modify the top most local identifier, so that sym_identifier will
715 point to 's' when popped; happens when called from inline asm */
716 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
717 ps
= &(*ps
)->prev_tok
;
724 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
725 pop them yet from the list, but do remove them from the token array. */
726 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
736 /* remove symbol in token array */
738 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
739 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
741 ps
= &ts
->sym_struct
;
743 ps
= &ts
->sym_identifier
;
755 ST_FUNC Sym
*label_find(int v
)
758 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
760 return table_ident
[v
]->sym_label
;
763 ST_FUNC Sym
*label_push(Sym
**ptop
, int v
, int flags
)
766 s
= sym_push2(ptop
, v
, VT_STATIC
, 0);
768 ps
= &table_ident
[v
- TOK_IDENT
]->sym_label
;
769 if (ptop
== &global_label_stack
) {
770 /* modify the top most local identifier, so that
771 sym_identifier will point to 's' when popped */
773 ps
= &(*ps
)->prev_tok
;
780 /* pop labels until element last is reached. Look if any labels are
781 undefined. Define symbols if '&&label' was used. */
782 ST_FUNC
void label_pop(Sym
**ptop
, Sym
*slast
, int keep
)
785 for(s
= *ptop
; s
!= slast
; s
= s1
) {
787 if (s
->r
== LABEL_DECLARED
) {
788 tcc_warning_c(warn_all
)("label '%s' declared but not used", get_tok_str(s
->v
, NULL
));
789 } else if (s
->r
== LABEL_FORWARD
) {
790 tcc_error("label '%s' used but not defined",
791 get_tok_str(s
->v
, NULL
));
794 /* define corresponding symbol. A size of
796 put_extern_sym(s
, cur_text_section
, s
->jnext
, 1);
800 if (s
->r
!= LABEL_GONE
)
801 table_ident
[s
->v
- TOK_IDENT
]->sym_label
= s
->prev_tok
;
811 /* ------------------------------------------------------------------------- */
812 static void vcheck_cmp(void)
814 /* cannot let cpu flags if other instruction are generated. Also
815 avoid leaving VT_JMP anywhere except on the top of the stack
816 because it would complicate the code generator.
818 Don't do this when nocode_wanted. vtop might come from
819 !nocode_wanted regions (see 88_codeopt.c) and transforming
820 it to a register without actually generating code is wrong
821 as their value might still be used for real. All values
822 we push under nocode_wanted will eventually be popped
823 again, so that the VT_CMP/VT_JMP value will be in vtop
824 when code is unsuppressed again. */
826 /* However if it's just automatic suppression via CODE_OFF/ON()
827 then it seems that we better let things work undisturbed.
828 How can it work at all under nocode_wanted? Well, gv() will
829 actually clear it at the gsym() in load()/VT_JMP in the
830 generator backends */
832 if (vtop
->r
== VT_CMP
&& 0 == (nocode_wanted
& ~CODE_OFF_BIT
))
836 static void vsetc(CType
*type
, int r
, CValue
*vc
)
838 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
839 tcc_error("memory full (vstack)");
849 ST_FUNC
void vswap(void)
859 /* pop stack value */
860 ST_FUNC
void vpop(void)
863 v
= vtop
->r
& VT_VALMASK
;
864 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
865 /* for x86, we need to pop the FP stack */
867 o(0xd8dd); /* fstp %st(0) */
871 /* need to put correct jump if && or || without test */
878 /* push constant of type "type" with useless value */
879 static void vpush(CType
*type
)
881 vset(type
, VT_CONST
, 0);
884 /* push arbitrary 64bit constant */
885 static void vpush64(int ty
, unsigned long long v
)
892 vsetc(&ctype
, VT_CONST
, &cval
);
895 /* push integer constant */
896 ST_FUNC
void vpushi(int v
)
901 /* push a pointer sized constant */
902 static void vpushs(addr_t v
)
904 vpush64(VT_SIZE_T
, v
);
907 /* push long long constant */
908 static inline void vpushll(long long v
)
910 vpush64(VT_LLONG
, v
);
913 ST_FUNC
void vset(CType
*type
, int r
, int v
)
917 vsetc(type
, r
, &cval
);
920 static void vseti(int r
, int v
)
928 ST_FUNC
void vpushv(SValue
*v
)
930 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
931 tcc_error("memory full (vstack)");
936 static void vdup(void)
941 /* rotate n first stack elements to the bottom
942 I1 ... In -> I2 ... In I1 [top is right]
944 ST_FUNC
void vrotb(int n
)
956 /* rotate the n elements before entry e towards the top
957 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
959 ST_FUNC
void vrote(SValue
*e
, int n
)
966 for(i
= 0;i
< n
- 1; i
++)
971 /* rotate n first stack elements to the top
972 I1 ... In -> In I1 ... I(n-1) [top is right]
974 ST_FUNC
void vrott(int n
)
979 /* ------------------------------------------------------------------------- */
980 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
982 /* called from generators to set the result from relational ops */
983 ST_FUNC
void vset_VT_CMP(int op
)
991 /* called once before asking generators to load VT_CMP to a register */
992 static void vset_VT_JMP(void)
994 int op
= vtop
->cmp_op
;
996 if (vtop
->jtrue
|| vtop
->jfalse
) {
997 int origt
= vtop
->type
.t
;
998 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
999 int inv
= op
& (op
< 2); /* small optimization */
1000 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1001 vtop
->type
.t
|= origt
& (VT_UNSIGNED
| VT_DEFSIGN
);
1003 /* otherwise convert flags (rsp. 0/1) to register */
1005 if (op
< 2) /* doesn't seem to happen */
1010 /* Set CPU Flags, doesn't yet jump */
1011 static void gvtst_set(int inv
, int t
)
1015 if (vtop
->r
!= VT_CMP
) {
1018 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1019 vset_VT_CMP(vtop
->c
.i
!= 0);
1022 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1023 *p
= gjmp_append(*p
, t
);
1026 /* Generate value test
1028 * Generate a test for any value (jump, comparison and integers) */
1029 static int gvtst(int inv
, int t
)
1034 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1036 x
= u
, u
= t
, t
= x
;
1039 /* jump to the wanted target */
1041 t
= gjmp_cond(op
^ inv
, t
);
1044 /* resolve complementary jumps to here */
1051 /* generate a zero or nozero test */
1052 static void gen_test_zero(int op
)
1054 if (vtop
->r
== VT_CMP
) {
1058 vtop
->jfalse
= vtop
->jtrue
;
1068 /* ------------------------------------------------------------------------- */
1069 /* push a symbol value of TYPE */
1070 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1074 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1078 /* Return a static symbol pointing to a section */
1079 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1085 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1086 sym
->type
.t
|= VT_STATIC
;
1087 put_extern_sym(sym
, sec
, offset
, size
);
1091 /* push a reference to a section offset by adding a dummy symbol */
1092 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1094 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1097 /* define a new external reference to a symbol 'v' of type 'u' */
1098 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1104 /* push forward reference */
1105 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1106 s
->type
.ref
= type
->ref
;
1107 } else if (IS_ASM_SYM(s
)) {
1108 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1109 s
->type
.ref
= type
->ref
;
1115 /* create an external reference with no specific type similar to asm labels.
1116 This avoids type conflicts if the symbol is used from C too */
1117 ST_FUNC Sym
*external_helper_sym(int v
)
1119 CType ct
= { VT_ASM_FUNC
, NULL
};
1120 return external_global_sym(v
, &ct
);
1123 /* push a reference to an helper function (such as memmove) */
1124 ST_FUNC
void vpush_helper_func(int v
)
1126 vpushsym(&func_old_type
, external_helper_sym(v
));
1129 /* Merge symbol attributes. */
1130 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1132 if (sa1
->aligned
&& !sa
->aligned
)
1133 sa
->aligned
= sa1
->aligned
;
1134 sa
->packed
|= sa1
->packed
;
1135 sa
->weak
|= sa1
->weak
;
1136 sa
->nodebug
|= sa1
->nodebug
;
1137 if (sa1
->visibility
!= STV_DEFAULT
) {
1138 int vis
= sa
->visibility
;
1139 if (vis
== STV_DEFAULT
1140 || vis
> sa1
->visibility
)
1141 vis
= sa1
->visibility
;
1142 sa
->visibility
= vis
;
1144 sa
->dllexport
|= sa1
->dllexport
;
1145 sa
->nodecorate
|= sa1
->nodecorate
;
1146 sa
->dllimport
|= sa1
->dllimport
;
1149 /* Merge function attributes. */
1150 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1152 if (fa1
->func_call
&& !fa
->func_call
)
1153 fa
->func_call
= fa1
->func_call
;
1154 if (fa1
->func_type
&& !fa
->func_type
)
1155 fa
->func_type
= fa1
->func_type
;
1156 if (fa1
->func_args
&& !fa
->func_args
)
1157 fa
->func_args
= fa1
->func_args
;
1158 if (fa1
->func_noreturn
)
1159 fa
->func_noreturn
= 1;
1166 /* Merge attributes. */
1167 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1169 merge_symattr(&ad
->a
, &ad1
->a
);
1170 merge_funcattr(&ad
->f
, &ad1
->f
);
1173 ad
->section
= ad1
->section
;
1174 if (ad1
->alias_target
)
1175 ad
->alias_target
= ad1
->alias_target
;
1177 ad
->asm_label
= ad1
->asm_label
;
1179 ad
->attr_mode
= ad1
->attr_mode
;
1182 /* Merge some type attributes. */
1183 static void patch_type(Sym
*sym
, CType
*type
)
1185 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1186 if (!(sym
->type
.t
& VT_EXTERN
))
1187 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1188 sym
->type
.t
&= ~VT_EXTERN
;
1191 if (IS_ASM_SYM(sym
)) {
1192 /* stay static if both are static */
1193 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1194 sym
->type
.ref
= type
->ref
;
1197 if (!is_compatible_types(&sym
->type
, type
)) {
1198 tcc_error("incompatible types for redefinition of '%s'",
1199 get_tok_str(sym
->v
, NULL
));
1201 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1202 int static_proto
= sym
->type
.t
& VT_STATIC
;
1203 /* warn if static follows non-static function declaration */
1204 if ((type
->t
& VT_STATIC
) && !static_proto
1205 /* XXX this test for inline shouldn't be here. Until we
1206 implement gnu-inline mode again it silences a warning for
1207 mingw caused by our workarounds. */
1208 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1209 tcc_warning("static storage ignored for redefinition of '%s'",
1210 get_tok_str(sym
->v
, NULL
));
1212 /* set 'inline' if both agree or if one has static */
1213 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1214 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1215 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1216 static_proto
|= VT_INLINE
;
1219 if (0 == (type
->t
& VT_EXTERN
)) {
1220 struct FuncAttr f
= sym
->type
.ref
->f
;
1221 /* put complete type, use static from prototype */
1222 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1223 sym
->type
.ref
= type
->ref
;
1224 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1226 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1229 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1230 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1231 sym
->type
.ref
= type
->ref
;
1235 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1236 /* set array size if it was omitted in extern declaration */
1237 sym
->type
.ref
->c
= type
->ref
->c
;
1239 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1240 tcc_warning("storage mismatch for redefinition of '%s'",
1241 get_tok_str(sym
->v
, NULL
));
1245 /* Merge some storage attributes. */
1246 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1249 patch_type(sym
, type
);
1251 #ifdef TCC_TARGET_PE
1252 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1253 tcc_error("incompatible dll linkage for redefinition of '%s'",
1254 get_tok_str(sym
->v
, NULL
));
1256 merge_symattr(&sym
->a
, &ad
->a
);
1258 sym
->asm_label
= ad
->asm_label
;
1259 update_storage(sym
);
1262 /* copy sym to other stack */
1263 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1266 s
= sym_malloc(), *s
= *s0
;
1267 s
->prev
= *ps
, *ps
= s
;
1268 if (s
->v
< SYM_FIRST_ANOM
) {
1269 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1270 s
->prev_tok
= *ps
, *ps
= s
;
1275 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1276 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1278 int bt
= s
->type
.t
& VT_BTYPE
;
1279 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1280 Sym
**sp
= &s
->type
.ref
;
1281 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1282 Sym
*s2
= sym_copy(s
, ps
);
1283 sp
= &(*sp
= s2
)->next
;
1284 sym_copy_ref(s2
, ps
);
1289 /* define a new external reference to a symbol 'v' */
1290 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1294 /* look for global symbol */
1296 while (s
&& s
->sym_scope
)
1300 /* push forward reference */
1301 s
= global_identifier_push(v
, type
->t
, 0);
1304 s
->asm_label
= ad
->asm_label
;
1305 s
->type
.ref
= type
->ref
;
1306 /* copy type to the global stack */
1308 sym_copy_ref(s
, &global_stack
);
1310 patch_storage(s
, ad
, type
);
1312 /* push variables on local_stack if any */
1313 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1314 s
= sym_copy(s
, &local_stack
);
1318 /* save registers up to (vtop - n) stack entry */
1319 ST_FUNC
void save_regs(int n
)
1322 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1326 /* save r to the memory stack, and mark it as being free */
1327 ST_FUNC
void save_reg(int r
)
1329 save_reg_upstack(r
, 0);
1332 /* save r to the memory stack, and mark it as being free,
1333 if seen up to (vtop - n) stack entry */
1334 ST_FUNC
void save_reg_upstack(int r
, int n
)
1336 int l
, size
, align
, bt
;
1339 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1344 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1345 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1346 /* must save value on stack if not already done */
1348 bt
= p
->type
.t
& VT_BTYPE
;
1351 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1354 size
= type_size(&sv
.type
, &align
);
1355 l
= get_temp_local_var(size
,align
);
1356 sv
.r
= VT_LOCAL
| VT_LVAL
;
1358 store(p
->r
& VT_VALMASK
, &sv
);
1359 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1360 /* x86 specific: need to pop fp register ST0 if saved */
1361 if (r
== TREG_ST0
) {
1362 o(0xd8dd); /* fstp %st(0) */
1365 /* special long long case */
1366 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1371 /* mark that stack entry as being saved on the stack */
1372 if (p
->r
& VT_LVAL
) {
1373 /* also clear the bounded flag because the
1374 relocation address of the function was stored in
1376 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1378 p
->r
= VT_LVAL
| VT_LOCAL
;
1387 #ifdef TCC_TARGET_ARM
1388 /* find a register of class 'rc2' with at most one reference on stack.
1389 * If none, call get_reg(rc) */
1390 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1395 for(r
=0;r
<NB_REGS
;r
++) {
1396 if (reg_classes
[r
] & rc2
) {
1399 for(p
= vstack
; p
<= vtop
; p
++) {
1400 if ((p
->r
& VT_VALMASK
) == r
||
1412 /* find a free register of class 'rc'. If none, save one register */
1413 ST_FUNC
int get_reg(int rc
)
1418 /* find a free register */
1419 for(r
=0;r
<NB_REGS
;r
++) {
1420 if (reg_classes
[r
] & rc
) {
1423 for(p
=vstack
;p
<=vtop
;p
++) {
1424 if ((p
->r
& VT_VALMASK
) == r
||
1433 /* no register left : free the first one on the stack (VERY
1434 IMPORTANT to start from the bottom to ensure that we don't
1435 spill registers used in gen_opi()) */
1436 for(p
=vstack
;p
<=vtop
;p
++) {
1437 /* look at second register (if long long) */
1439 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1441 r
= p
->r
& VT_VALMASK
;
1442 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1448 /* Should never comes here */
1452 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1453 static int get_temp_local_var(int size
,int align
){
1455 struct temp_local_variable
*temp_var
;
1462 for(i
=0;i
<nb_temp_local_vars
;i
++){
1463 temp_var
=&arr_temp_local_vars
[i
];
1464 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1467 /*check if temp_var is free*/
1469 for(p
=vstack
;p
<=vtop
;p
++) {
1471 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1472 if(p
->c
.i
==temp_var
->location
){
1479 found_var
=temp_var
->location
;
1485 loc
= (loc
- size
) & -align
;
1486 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1487 temp_var
=&arr_temp_local_vars
[i
];
1488 temp_var
->location
=loc
;
1489 temp_var
->size
=size
;
1490 temp_var
->align
=align
;
1491 nb_temp_local_vars
++;
1498 static void clear_temp_local_var_list(){
1499 nb_temp_local_vars
=0;
1502 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1504 static void move_reg(int r
, int s
, int t
)
1518 /* get address of vtop (vtop MUST BE an lvalue) */
1519 ST_FUNC
void gaddrof(void)
1521 vtop
->r
&= ~VT_LVAL
;
1522 /* tricky: if saved lvalue, then we can go back to lvalue */
1523 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1524 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1527 #ifdef CONFIG_TCC_BCHECK
1528 /* generate a bounded pointer addition */
1529 static void gen_bounded_ptr_add(void)
1531 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1536 vpush_helper_func(TOK___bound_ptr_add
);
1541 /* returned pointer is in REG_IRET */
1542 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1545 /* relocation offset of the bounding function call point */
1546 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1549 /* patch pointer addition in vtop so that pointer dereferencing is
1551 static void gen_bounded_ptr_deref(void)
1561 size
= type_size(&vtop
->type
, &align
);
1563 case 1: func
= TOK___bound_ptr_indir1
; break;
1564 case 2: func
= TOK___bound_ptr_indir2
; break;
1565 case 4: func
= TOK___bound_ptr_indir4
; break;
1566 case 8: func
= TOK___bound_ptr_indir8
; break;
1567 case 12: func
= TOK___bound_ptr_indir12
; break;
1568 case 16: func
= TOK___bound_ptr_indir16
; break;
1570 /* may happen with struct member access */
1573 sym
= external_helper_sym(func
);
1575 put_extern_sym(sym
, NULL
, 0, 0);
1576 /* patch relocation */
1577 /* XXX: find a better solution ? */
1578 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1579 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1582 /* generate lvalue bound code */
1583 static void gbound(void)
1587 vtop
->r
&= ~VT_MUSTBOUND
;
1588 /* if lvalue, then use checking code before dereferencing */
1589 if (vtop
->r
& VT_LVAL
) {
1590 /* if not VT_BOUNDED value, then make one */
1591 if (!(vtop
->r
& VT_BOUNDED
)) {
1592 /* must save type because we must set it to int to get pointer */
1594 vtop
->type
.t
= VT_PTR
;
1597 gen_bounded_ptr_add();
1601 /* then check for dereferencing */
1602 gen_bounded_ptr_deref();
1606 /* we need to call __bound_ptr_add before we start to load function
1607 args into registers */
1608 ST_FUNC
void gbound_args(int nb_args
)
1613 for (i
= 1; i
<= nb_args
; ++i
)
1614 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1620 sv
= vtop
- nb_args
;
1621 if (sv
->r
& VT_SYM
) {
1625 #ifndef TCC_TARGET_PE
1626 || v
== TOK_sigsetjmp
1627 || v
== TOK___sigsetjmp
1630 vpush_helper_func(TOK___bound_setjmp
);
1633 func_bound_add_epilog
= 1;
1635 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1636 if (v
== TOK_alloca
)
1637 func_bound_add_epilog
= 1;
1640 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1641 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1646 /* Add bounds for local symbols from S to E (via ->prev) */
1647 static void add_local_bounds(Sym
*s
, Sym
*e
)
1649 for (; s
!= e
; s
= s
->prev
) {
1650 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1652 /* Add arrays/structs/unions because we always take address */
1653 if ((s
->type
.t
& VT_ARRAY
)
1654 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1655 || s
->a
.addrtaken
) {
1656 /* add local bound info */
1657 int align
, size
= type_size(&s
->type
, &align
);
1658 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1659 2 * sizeof(addr_t
));
1660 bounds_ptr
[0] = s
->c
;
1661 bounds_ptr
[1] = size
;
1667 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1668 static void pop_local_syms(Sym
*b
, int keep
)
1670 #ifdef CONFIG_TCC_BCHECK
1671 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1672 add_local_bounds(local_stack
, b
);
1675 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1676 sym_pop(&local_stack
, b
, keep
);
1679 static void incr_bf_adr(int o
)
1681 vtop
->type
= char_pointer_type
;
1685 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1689 /* single-byte load mode for packed or otherwise unaligned bitfields */
1690 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1693 save_reg_upstack(vtop
->r
, 1);
1694 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1695 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1704 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1706 vpushi((1 << n
) - 1), gen_op('&');
1709 vpushi(bits
), gen_op(TOK_SHL
);
1712 bits
+= n
, bit_size
-= n
, o
= 1;
1715 if (!(type
->t
& VT_UNSIGNED
)) {
1716 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1717 vpushi(n
), gen_op(TOK_SHL
);
1718 vpushi(n
), gen_op(TOK_SAR
);
1722 /* single-byte store mode for packed or otherwise unaligned bitfields */
1723 static void store_packed_bf(int bit_pos
, int bit_size
)
1725 int bits
, n
, o
, m
, c
;
1726 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1728 save_reg_upstack(vtop
->r
, 1);
1729 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1731 incr_bf_adr(o
); // X B
1733 c
? vdup() : gv_dup(); // B V X
1736 vpushi(bits
), gen_op(TOK_SHR
);
1738 vpushi(bit_pos
), gen_op(TOK_SHL
);
1743 m
= ((1 << n
) - 1) << bit_pos
;
1744 vpushi(m
), gen_op('&'); // X B V1
1745 vpushv(vtop
-1); // X B V1 B
1746 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1747 gen_op('&'); // X B V1 B1
1748 gen_op('|'); // X B V2
1750 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1751 vstore(), vpop(); // X B
1752 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1757 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1760 if (0 == sv
->type
.ref
)
1762 t
= sv
->type
.ref
->auxtype
;
1763 if (t
!= -1 && t
!= VT_STRUCT
) {
1764 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1770 /* store vtop a register belonging to class 'rc'. lvalues are
1771 converted to values. Cannot be used if cannot be converted to
1772 register value (such as structures). */
1773 ST_FUNC
int gv(int rc
)
1775 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1776 int bit_pos
, bit_size
, size
, align
;
1778 /* NOTE: get_reg can modify vstack[] */
1779 if (vtop
->type
.t
& VT_BITFIELD
) {
1782 bit_pos
= BIT_POS(vtop
->type
.t
);
1783 bit_size
= BIT_SIZE(vtop
->type
.t
);
1784 /* remove bit field info to avoid loops */
1785 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1788 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1789 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1790 type
.t
|= VT_UNSIGNED
;
1792 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1794 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1799 if (r
== VT_STRUCT
) {
1800 load_packed_bf(&type
, bit_pos
, bit_size
);
1802 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1803 /* cast to int to propagate signedness in following ops */
1805 /* generate shifts */
1806 vpushi(bits
- (bit_pos
+ bit_size
));
1808 vpushi(bits
- bit_size
);
1809 /* NOTE: transformed to SHR if unsigned */
1814 if (is_float(vtop
->type
.t
) &&
1815 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1816 /* CPUs usually cannot use float constants, so we store them
1817 generically in data segment */
1818 init_params p
= { rodata_section
};
1819 unsigned long offset
;
1820 size
= type_size(&vtop
->type
, &align
);
1822 size
= 0, align
= 1;
1823 offset
= section_add(p
.sec
, size
, align
);
1824 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1826 init_putv(&p
, &vtop
->type
, offset
);
1829 #ifdef CONFIG_TCC_BCHECK
1830 if (vtop
->r
& VT_MUSTBOUND
)
1834 bt
= vtop
->type
.t
& VT_BTYPE
;
1836 #ifdef TCC_TARGET_RISCV64
1838 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1841 rc2
= RC2_TYPE(bt
, rc
);
1843 /* need to reload if:
1845 - lvalue (need to dereference pointer)
1846 - already a register, but not in the right class */
1847 r
= vtop
->r
& VT_VALMASK
;
1848 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1849 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1851 if (!r_ok
|| !r2_ok
) {
1855 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1856 int original_type
= vtop
->type
.t
;
1858 /* two register type load :
1859 expand to two words temporarily */
1860 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1862 unsigned long long ll
= vtop
->c
.i
;
1863 vtop
->c
.i
= ll
; /* first word */
1865 vtop
->r
= r
; /* save register value */
1866 vpushi(ll
>> 32); /* second word */
1867 } else if (vtop
->r
& VT_LVAL
) {
1868 /* We do not want to modifier the long long pointer here.
1869 So we save any other instances down the stack */
1870 save_reg_upstack(vtop
->r
, 1);
1871 /* load from memory */
1872 vtop
->type
.t
= load_type
;
1875 vtop
[-1].r
= r
; /* save register value */
1876 /* increment pointer to get second word */
1877 vtop
->type
.t
= VT_PTRDIFF_T
;
1882 vtop
->type
.t
= load_type
;
1884 /* move registers */
1887 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1890 vtop
[-1].r
= r
; /* save register value */
1891 vtop
->r
= vtop
[-1].r2
;
1893 /* Allocate second register. Here we rely on the fact that
1894 get_reg() tries first to free r2 of an SValue. */
1898 /* write second register */
1901 vtop
->type
.t
= original_type
;
1903 if (vtop
->r
== VT_CMP
)
1905 /* one register type load */
1910 #ifdef TCC_TARGET_C67
1911 /* uses register pairs for doubles */
1912 if (bt
== VT_DOUBLE
)
1919 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1920 ST_FUNC
void gv2(int rc1
, int rc2
)
1922 /* generate more generic register first. But VT_JMP or VT_CMP
1923 values must be generated first in all cases to avoid possible
1925 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1930 /* test if reload is needed for first register */
1931 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1941 /* test if reload is needed for first register */
1942 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1949 /* expand 64bit on stack in two ints */
1950 ST_FUNC
void lexpand(void)
1953 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1954 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1955 if (v
== VT_CONST
) {
1958 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1964 vtop
[0].r
= vtop
[-1].r2
;
1965 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1967 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1972 /* build a long long from two ints */
1973 static void lbuild(int t
)
1975 gv2(RC_INT
, RC_INT
);
1976 vtop
[-1].r2
= vtop
[0].r
;
1977 vtop
[-1].type
.t
= t
;
1982 /* convert stack entry to register and duplicate its value in another
1984 static void gv_dup(void)
1990 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1991 if (t
& VT_BITFIELD
) {
2001 /* stack: H L L1 H1 */
2011 /* duplicate value */
2021 /* generate CPU independent (unsigned) long long operations */
2022 static void gen_opl(int op
)
2024 int t
, a
, b
, op1
, c
, i
;
2026 unsigned short reg_iret
= REG_IRET
;
2027 unsigned short reg_lret
= REG_IRE2
;
2033 func
= TOK___divdi3
;
2036 func
= TOK___udivdi3
;
2039 func
= TOK___moddi3
;
2042 func
= TOK___umoddi3
;
2049 /* call generic long long function */
2050 vpush_helper_func(func
);
2055 vtop
->r2
= reg_lret
;
2063 //pv("gen_opl A",0,2);
2069 /* stack: L1 H1 L2 H2 */
2074 vtop
[-2] = vtop
[-3];
2077 /* stack: H1 H2 L1 L2 */
2078 //pv("gen_opl B",0,4);
2084 /* stack: H1 H2 L1 L2 ML MH */
2087 /* stack: ML MH H1 H2 L1 L2 */
2091 /* stack: ML MH H1 L2 H2 L1 */
2096 /* stack: ML MH M1 M2 */
2099 } else if (op
== '+' || op
== '-') {
2100 /* XXX: add non carry method too (for MIPS or alpha) */
2106 /* stack: H1 H2 (L1 op L2) */
2109 gen_op(op1
+ 1); /* TOK_xxxC2 */
2112 /* stack: H1 H2 (L1 op L2) */
2115 /* stack: (L1 op L2) H1 H2 */
2117 /* stack: (L1 op L2) (H1 op H2) */
2125 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2126 t
= vtop
[-1].type
.t
;
2130 /* stack: L H shift */
2132 /* constant: simpler */
2133 /* NOTE: all comments are for SHL. the other cases are
2134 done by swapping words */
2145 if (op
!= TOK_SAR
) {
2178 /* XXX: should provide a faster fallback on x86 ? */
2181 func
= TOK___ashrdi3
;
2184 func
= TOK___lshrdi3
;
2187 func
= TOK___ashldi3
;
2193 /* compare operations */
2199 /* stack: L1 H1 L2 H2 */
2201 vtop
[-1] = vtop
[-2];
2203 /* stack: L1 L2 H1 H2 */
2207 /* when values are equal, we need to compare low words. since
2208 the jump is inverted, we invert the test too. */
2211 else if (op1
== TOK_GT
)
2213 else if (op1
== TOK_ULT
)
2215 else if (op1
== TOK_UGT
)
2225 /* generate non equal test */
2227 vset_VT_CMP(TOK_NE
);
2231 /* compare low. Always unsigned */
2235 else if (op1
== TOK_LE
)
2237 else if (op1
== TOK_GT
)
2239 else if (op1
== TOK_GE
)
2242 #if 0//def TCC_TARGET_I386
2243 if (op
== TOK_NE
) { gsym(b
); break; }
2244 if (op
== TOK_EQ
) { gsym(a
); break; }
2253 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2255 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2256 return (a
^ b
) >> 63 ? -x
: x
;
2259 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2261 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2264 /* handle integer constant optimizations and various machine
2266 static void gen_opic(int op
)
2268 SValue
*v1
= vtop
- 1;
2270 int t1
= v1
->type
.t
& VT_BTYPE
;
2271 int t2
= v2
->type
.t
& VT_BTYPE
;
2272 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2273 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2274 int nonconst
= (v1
->r
| v2
->r
) & VT_NONCONST
;
2275 uint64_t l1
= c1
? v1
->c
.i
: 0;
2276 uint64_t l2
= c2
? v2
->c
.i
: 0;
2277 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2279 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2280 l1
= ((uint32_t)l1
|
2281 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2282 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2283 l2
= ((uint32_t)l2
|
2284 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2288 case '+': l1
+= l2
; break;
2289 case '-': l1
-= l2
; break;
2290 case '&': l1
&= l2
; break;
2291 case '^': l1
^= l2
; break;
2292 case '|': l1
|= l2
; break;
2293 case '*': l1
*= l2
; break;
2300 /* if division by zero, generate explicit division */
2302 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2303 tcc_error("division by zero in constant");
2307 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2308 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2309 case TOK_UDIV
: l1
= l1
/ l2
; break;
2310 case TOK_UMOD
: l1
= l1
% l2
; break;
2313 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2314 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2316 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2319 case TOK_ULT
: l1
= l1
< l2
; break;
2320 case TOK_UGE
: l1
= l1
>= l2
; break;
2321 case TOK_EQ
: l1
= l1
== l2
; break;
2322 case TOK_NE
: l1
= l1
!= l2
; break;
2323 case TOK_ULE
: l1
= l1
<= l2
; break;
2324 case TOK_UGT
: l1
= l1
> l2
; break;
2325 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2326 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2327 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2328 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2330 case TOK_LAND
: l1
= l1
&& l2
; break;
2331 case TOK_LOR
: l1
= l1
|| l2
; break;
2335 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2336 l1
= ((uint32_t)l1
|
2337 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2341 nonconst
= VT_NONCONST
;
2342 /* if commutative ops, put c2 as constant */
2343 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2344 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2346 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2347 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2349 if (!const_wanted
&&
2351 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2352 (l1
== -1 && op
== TOK_SAR
))) {
2353 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2355 } else if (!const_wanted
&&
2356 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2358 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2359 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2360 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2365 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2368 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2369 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2372 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2373 /* filter out NOP operations like x*1, x-0, x&-1... */
2375 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2376 /* try to use shifts instead of muls or divs */
2377 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2386 else if (op
== TOK_PDIV
)
2392 } else if (c2
&& (op
== '+' || op
== '-') &&
2393 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2394 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2395 /* symbol + constant case */
2399 /* The backends can't always deal with addends to symbols
2400 larger than +-1<<31. Don't construct such. */
2407 /* call low level op generator */
2408 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2409 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2415 if (vtop
->r
== VT_CONST
)
2416 vtop
->r
|= nonconst
;
2419 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2420 # define gen_negf gen_opf
2421 #elif defined TCC_TARGET_ARM
2422 void gen_negf(int op
)
2424 /* arm will detect 0-x and replace by vneg */
2425 vpushi(0), vswap(), gen_op('-');
2428 /* XXX: implement in gen_opf() for other backends too */
2429 void gen_negf(int op
)
2431 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2432 subtract(-0, x), but with them it's really a sign flip
2433 operation. We implement this with bit manipulation and have
2434 to do some type reinterpretation for this, which TCC can do
2437 int align
, size
, bt
;
2439 size
= type_size(&vtop
->type
, &align
);
2440 bt
= vtop
->type
.t
& VT_BTYPE
;
2441 save_reg(gv(RC_TYPE(bt
)));
2443 incr_bf_adr(size
- 1);
2445 vpushi(0x80); /* flip sign */
2452 /* generate a floating point operation with constant propagation */
2453 static void gen_opif(int op
)
2457 #if defined _MSC_VER && defined __x86_64__
2458 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2468 /* currently, we cannot do computations with forward symbols */
2469 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2470 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2472 if (v1
->type
.t
== VT_FLOAT
) {
2475 } else if (v1
->type
.t
== VT_DOUBLE
) {
2482 /* NOTE: we only do constant propagation if finite number (not
2483 NaN or infinity) (ANSI spec) */
2484 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
2487 case '+': f1
+= f2
; break;
2488 case '-': f1
-= f2
; break;
2489 case '*': f1
*= f2
; break;
2492 union { float f
; unsigned u
; } x1
, x2
, y
;
2493 /* If not in initializer we need to potentially generate
2494 FP exceptions at runtime, otherwise we want to fold. */
2497 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2498 when used to compile the f1 /= f2 below, would be -nan */
2499 x1
.f
= f1
, x2
.f
= f2
;
2501 y
.u
= 0x7fc00000; /* nan */
2503 y
.u
= 0x7f800000; /* infinity */
2504 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2513 /* XXX: also handles tests ? */
2519 /* XXX: overflow test ? */
2520 if (v1
->type
.t
== VT_FLOAT
) {
2522 } else if (v1
->type
.t
== VT_DOUBLE
) {
2529 if (op
== TOK_NEG
) {
2537 /* print a type. If 'varstr' is not NULL, then the variable is also
2538 printed in the type */
2540 /* XXX: add array and function pointers */
2541 static void type_to_str(char *buf
, int buf_size
,
2542 CType
*type
, const char *varstr
)
2554 pstrcat(buf
, buf_size
, "extern ");
2556 pstrcat(buf
, buf_size
, "static ");
2558 pstrcat(buf
, buf_size
, "typedef ");
2560 pstrcat(buf
, buf_size
, "inline ");
2562 if (t
& VT_VOLATILE
)
2563 pstrcat(buf
, buf_size
, "volatile ");
2564 if (t
& VT_CONSTANT
)
2565 pstrcat(buf
, buf_size
, "const ");
2567 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2568 || ((t
& VT_UNSIGNED
)
2569 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2572 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2574 buf_size
-= strlen(buf
);
2610 tstr
= "long double";
2612 pstrcat(buf
, buf_size
, tstr
);
2619 pstrcat(buf
, buf_size
, tstr
);
2620 v
= type
->ref
->v
& ~SYM_STRUCT
;
2621 if (v
>= SYM_FIRST_ANOM
)
2622 pstrcat(buf
, buf_size
, "<anonymous>");
2624 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2629 if (varstr
&& '*' == *varstr
) {
2630 pstrcat(buf1
, sizeof(buf1
), "(");
2631 pstrcat(buf1
, sizeof(buf1
), varstr
);
2632 pstrcat(buf1
, sizeof(buf1
), ")");
2634 pstrcat(buf1
, buf_size
, "(");
2636 while (sa
!= NULL
) {
2638 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2639 pstrcat(buf1
, sizeof(buf1
), buf2
);
2642 pstrcat(buf1
, sizeof(buf1
), ", ");
2644 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2645 pstrcat(buf1
, sizeof(buf1
), ", ...");
2646 pstrcat(buf1
, sizeof(buf1
), ")");
2647 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2651 if (t
& (VT_ARRAY
|VT_VLA
)) {
2652 if (varstr
&& '*' == *varstr
)
2653 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2655 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2656 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2659 pstrcpy(buf1
, sizeof(buf1
), "*");
2660 if (t
& VT_CONSTANT
)
2661 pstrcat(buf1
, buf_size
, "const ");
2662 if (t
& VT_VOLATILE
)
2663 pstrcat(buf1
, buf_size
, "volatile ");
2665 pstrcat(buf1
, sizeof(buf1
), varstr
);
2666 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2670 pstrcat(buf
, buf_size
, " ");
2671 pstrcat(buf
, buf_size
, varstr
);
2676 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2678 char buf1
[256], buf2
[256];
2679 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2680 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2681 tcc_error(fmt
, buf1
, buf2
);
2684 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2686 char buf1
[256], buf2
[256];
2687 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2688 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2689 tcc_warning(fmt
, buf1
, buf2
);
2692 static int pointed_size(CType
*type
)
2695 return type_size(pointed_type(type
), &align
);
2698 static inline int is_null_pointer(SValue
*p
)
2700 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2702 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2703 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2704 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2705 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2706 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2707 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2711 /* compare function types. OLD functions match any new functions */
2712 static int is_compatible_func(CType
*type1
, CType
*type2
)
2718 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2720 if (s1
->f
.func_type
!= s2
->f
.func_type
2721 && s1
->f
.func_type
!= FUNC_OLD
2722 && s2
->f
.func_type
!= FUNC_OLD
)
2725 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2727 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2738 /* return true if type1 and type2 are the same. If unqualified is
2739 true, qualifiers on the types are ignored.
2741 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2745 t1
= type1
->t
& VT_TYPE
;
2746 t2
= type2
->t
& VT_TYPE
;
2748 /* strip qualifiers before comparing */
2749 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2750 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2753 /* Default Vs explicit signedness only matters for char */
2754 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2758 /* XXX: bitfields ? */
2763 && !(type1
->ref
->c
< 0
2764 || type2
->ref
->c
< 0
2765 || type1
->ref
->c
== type2
->ref
->c
))
2768 /* test more complicated cases */
2769 bt1
= t1
& VT_BTYPE
;
2770 if (bt1
== VT_PTR
) {
2771 type1
= pointed_type(type1
);
2772 type2
= pointed_type(type2
);
2773 return is_compatible_types(type1
, type2
);
2774 } else if (bt1
== VT_STRUCT
) {
2775 return (type1
->ref
== type2
->ref
);
2776 } else if (bt1
== VT_FUNC
) {
2777 return is_compatible_func(type1
, type2
);
2778 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2779 /* If both are enums then they must be the same, if only one is then
2780 t1 and t2 must be equal, which was checked above already. */
2781 return type1
->ref
== type2
->ref
;
2787 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2788 type is stored in DEST if non-null (except for pointer plus/minus) . */
2789 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2791 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2792 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2798 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2799 ret
= op
== '?' ? 1 : 0;
2800 /* NOTE: as an extension, we accept void on only one side */
2802 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2803 if (op
== '+') ; /* Handled in caller */
2804 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2805 /* If one is a null ptr constant the result type is the other. */
2806 else if (is_null_pointer (op2
)) type
= *type1
;
2807 else if (is_null_pointer (op1
)) type
= *type2
;
2808 else if (bt1
!= bt2
) {
2809 /* accept comparison or cond-expr between pointer and integer
2811 if ((op
== '?' || TOK_ISCOND(op
))
2812 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2813 tcc_warning("pointer/integer mismatch in %s",
2814 op
== '?' ? "conditional expression" : "comparison");
2815 else if (op
!= '-' || !is_integer_btype(bt2
))
2817 type
= *(bt1
== VT_PTR
? type1
: type2
);
2819 CType
*pt1
= pointed_type(type1
);
2820 CType
*pt2
= pointed_type(type2
);
2821 int pbt1
= pt1
->t
& VT_BTYPE
;
2822 int pbt2
= pt2
->t
& VT_BTYPE
;
2823 int newquals
, copied
= 0;
2824 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2825 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2826 if (op
!= '?' && !TOK_ISCOND(op
))
2829 type_incompatibility_warning(type1
, type2
,
2831 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2832 : "pointer type mismatch in comparison('%s' and '%s')");
2835 /* pointers to void get preferred, otherwise the
2836 pointed to types minus qualifs should be compatible */
2837 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2838 /* combine qualifs */
2839 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2840 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2843 /* copy the pointer target symbol */
2844 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2847 pointed_type(&type
)->t
|= newquals
;
2849 /* pointers to incomplete arrays get converted to
2850 pointers to completed ones if possible */
2851 if (pt1
->t
& VT_ARRAY
2852 && pt2
->t
& VT_ARRAY
2853 && pointed_type(&type
)->ref
->c
< 0
2854 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2857 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2859 pointed_type(&type
)->ref
=
2860 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2861 0, pointed_type(&type
)->ref
->c
);
2862 pointed_type(&type
)->ref
->c
=
2863 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2869 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2870 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2873 } else if (is_float(bt1
) || is_float(bt2
)) {
2874 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2875 type
.t
= VT_LDOUBLE
;
2876 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2881 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2882 /* cast to biggest op */
2883 type
.t
= VT_LLONG
| VT_LONG
;
2884 if (bt1
== VT_LLONG
)
2886 if (bt2
== VT_LLONG
)
2888 /* convert to unsigned if it does not fit in a long long */
2889 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2890 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2891 type
.t
|= VT_UNSIGNED
;
2893 /* integer operations */
2894 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2895 /* convert to unsigned if it does not fit in an integer */
2896 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2897 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2898 type
.t
|= VT_UNSIGNED
;
2905 /* generic gen_op: handles types problems */
2906 ST_FUNC
void gen_op(int op
)
2908 int t1
, t2
, bt1
, bt2
, t
;
2909 CType type1
, combtype
;
2912 t1
= vtop
[-1].type
.t
;
2913 t2
= vtop
[0].type
.t
;
2914 bt1
= t1
& VT_BTYPE
;
2915 bt2
= t2
& VT_BTYPE
;
2917 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2918 if (bt2
== VT_FUNC
) {
2919 mk_pointer(&vtop
->type
);
2922 if (bt1
== VT_FUNC
) {
2924 mk_pointer(&vtop
->type
);
2929 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2930 tcc_error_noabort("invalid operand types for binary operation");
2932 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2933 /* at least one operand is a pointer */
2934 /* relational op: must be both pointers */
2938 /* if both pointers, then it must be the '-' op */
2939 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2941 tcc_error("cannot use pointers here");
2942 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2945 vtop
->type
.t
= VT_PTRDIFF_T
;
2949 /* exactly one pointer : must be '+' or '-'. */
2950 if (op
!= '-' && op
!= '+')
2951 tcc_error("cannot use pointers here");
2952 /* Put pointer as first operand */
2953 if (bt2
== VT_PTR
) {
2955 t
= t1
, t1
= t2
, t2
= t
;
2958 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2959 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2962 type1
= vtop
[-1].type
;
2963 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2965 #ifdef CONFIG_TCC_BCHECK
2966 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2967 /* if bounded pointers, we generate a special code to
2974 gen_bounded_ptr_add();
2980 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
2981 /* put again type if gen_opic() swaped operands */
2985 /* floats can only be used for a few operations */
2986 if (is_float(combtype
.t
)
2987 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
2989 tcc_error("invalid operands for binary operation");
2990 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2991 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2992 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2994 t
|= (VT_LONG
& t1
);
2998 t
= t2
= combtype
.t
;
2999 /* XXX: currently, some unsigned operations are explicit, so
3000 we modify them here */
3001 if (t
& VT_UNSIGNED
) {
3008 else if (op
== TOK_LT
)
3010 else if (op
== TOK_GT
)
3012 else if (op
== TOK_LE
)
3014 else if (op
== TOK_GE
)
3020 /* special case for shifts and long long: we keep the shift as
3022 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3029 if (TOK_ISCOND(op
)) {
3030 /* relational op: the result is an int */
3031 vtop
->type
.t
= VT_INT
;
3036 // Make sure that we have converted to an rvalue:
3037 if (vtop
->r
& VT_LVAL
)
3038 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3041 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3042 #define gen_cvt_itof1 gen_cvt_itof
3044 /* generic itof for unsigned long long case */
3045 static void gen_cvt_itof1(int t
)
3047 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3048 (VT_LLONG
| VT_UNSIGNED
)) {
3051 vpush_helper_func(TOK___floatundisf
);
3052 #if LDOUBLE_SIZE != 8
3053 else if (t
== VT_LDOUBLE
)
3054 vpush_helper_func(TOK___floatundixf
);
3057 vpush_helper_func(TOK___floatundidf
);
3068 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3069 #define gen_cvt_ftoi1 gen_cvt_ftoi
3071 /* generic ftoi for unsigned long long case */
3072 static void gen_cvt_ftoi1(int t
)
3075 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3076 /* not handled natively */
3077 st
= vtop
->type
.t
& VT_BTYPE
;
3079 vpush_helper_func(TOK___fixunssfdi
);
3080 #if LDOUBLE_SIZE != 8
3081 else if (st
== VT_LDOUBLE
)
3082 vpush_helper_func(TOK___fixunsxfdi
);
3085 vpush_helper_func(TOK___fixunsdfdi
);
3096 /* special delayed cast for char/short */
3097 static void force_charshort_cast(void)
3099 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3100 int dbt
= vtop
->type
.t
;
3101 vtop
->r
&= ~VT_MUSTCAST
;
3103 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3107 static void gen_cast_s(int t
)
3115 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3116 static void gen_cast(CType
*type
)
3118 int sbt
, dbt
, sf
, df
, c
;
3119 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3121 /* special delayed cast for char/short */
3122 if (vtop
->r
& VT_MUSTCAST
)
3123 force_charshort_cast();
3125 /* bitfields first get cast to ints */
3126 if (vtop
->type
.t
& VT_BITFIELD
)
3129 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3130 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3138 dbt_bt
= dbt
& VT_BTYPE
;
3139 sbt_bt
= sbt
& VT_BTYPE
;
3140 if (dbt_bt
== VT_VOID
)
3142 if (sbt_bt
== VT_VOID
) {
3144 cast_error(&vtop
->type
, type
);
3147 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3148 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3149 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3152 /* constant case: we can do it now */
3153 /* XXX: in ISOC, cannot do it if error in convert */
3154 if (sbt
== VT_FLOAT
)
3155 vtop
->c
.ld
= vtop
->c
.f
;
3156 else if (sbt
== VT_DOUBLE
)
3157 vtop
->c
.ld
= vtop
->c
.d
;
3160 if (sbt_bt
== VT_LLONG
) {
3161 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3162 vtop
->c
.ld
= vtop
->c
.i
;
3164 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3166 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3167 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3169 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3172 if (dbt
== VT_FLOAT
)
3173 vtop
->c
.f
= (float)vtop
->c
.ld
;
3174 else if (dbt
== VT_DOUBLE
)
3175 vtop
->c
.d
= (double)vtop
->c
.ld
;
3176 } else if (sf
&& dbt
== VT_BOOL
) {
3177 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3180 vtop
->c
.i
= vtop
->c
.ld
;
3181 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3183 else if (sbt
& VT_UNSIGNED
)
3184 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3186 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3188 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3190 else if (dbt
== VT_BOOL
)
3191 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3193 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3194 dbt_bt
== VT_SHORT
? 0xffff :
3197 if (!(dbt
& VT_UNSIGNED
))
3198 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3203 } else if (dbt
== VT_BOOL
3204 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3205 == (VT_CONST
| VT_SYM
)) {
3206 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3212 /* cannot generate code for global or static initializers */
3213 if (nocode_wanted
& DATA_ONLY_WANTED
)
3216 /* non constant case: generate code */
3217 if (dbt
== VT_BOOL
) {
3218 gen_test_zero(TOK_NE
);
3224 /* convert from fp to fp */
3227 /* convert int to fp */
3230 /* convert fp to int */
3232 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3235 goto again
; /* may need char/short cast */
3240 ds
= btype_size(dbt_bt
);
3241 ss
= btype_size(sbt_bt
);
3242 if (ds
== 0 || ss
== 0)
3245 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3246 tcc_error("cast to incomplete type");
3248 /* same size and no sign conversion needed */
3249 if (ds
== ss
&& ds
>= 4)
3251 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3252 tcc_warning("cast between pointer and integer of different size");
3253 if (sbt_bt
== VT_PTR
) {
3254 /* put integer type to allow logical operations below */
3255 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3259 /* processor allows { int a = 0, b = *(char*)&a; }
3260 That means that if we cast to less width, we can just
3261 change the type and read it still later. */
3262 #define ALLOW_SUBTYPE_ACCESS 1
3264 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3265 /* value still in memory */
3269 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3271 goto done
; /* no 64bit envolved */
3279 /* generate high word */
3280 if (sbt
& VT_UNSIGNED
) {
3289 } else if (ss
== 8) {
3290 /* from long long: just take low order word */
3298 /* need to convert from 32bit to 64bit */
3299 if (sbt
& VT_UNSIGNED
) {
3300 #if defined(TCC_TARGET_RISCV64)
3301 /* RISC-V keeps 32bit vals in registers sign-extended.
3302 So here we need a zero-extension. */
3311 ss
= ds
, ds
= 4, dbt
= sbt
;
3312 } else if (ss
== 8) {
3313 /* RISC-V keeps 32bit vals in registers sign-extended.
3314 So here we need a sign-extension for signed types and
3315 zero-extension. for unsigned types. */
3316 #if !defined(TCC_TARGET_RISCV64)
3317 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3326 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3332 bits
= (ss
- ds
) * 8;
3333 /* for unsigned, gen_op will convert SAR to SHR */
3334 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3337 vpushi(bits
- trunc
);
3344 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3347 /* return type size as known at compile time. Put alignment at 'a' */
3348 ST_FUNC
int type_size(CType
*type
, int *a
)
3353 bt
= type
->t
& VT_BTYPE
;
3354 if (bt
== VT_STRUCT
) {
3359 } else if (bt
== VT_PTR
) {
3360 if (type
->t
& VT_ARRAY
) {
3364 ts
= type_size(&s
->type
, a
);
3366 if (ts
< 0 && s
->c
< 0)
3374 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3376 return -1; /* incomplete enum */
3377 } else if (bt
== VT_LDOUBLE
) {
3379 return LDOUBLE_SIZE
;
3380 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3381 #ifdef TCC_TARGET_I386
3382 #ifdef TCC_TARGET_PE
3387 #elif defined(TCC_TARGET_ARM)
3397 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3400 } else if (bt
== VT_SHORT
) {
3403 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3407 /* char, void, function, _Bool */
3413 /* push type size as known at runtime time on top of value stack. Put
3415 static void vpush_type_size(CType
*type
, int *a
)
3417 if (type
->t
& VT_VLA
) {
3418 type_size(&type
->ref
->type
, a
);
3419 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3421 int size
= type_size(type
, a
);
3423 tcc_error("unknown type size");
3432 /* return the pointed type of t */
3433 static inline CType
*pointed_type(CType
*type
)
3435 return &type
->ref
->type
;
3438 /* modify type so that its it is a pointer to type. */
3439 ST_FUNC
void mk_pointer(CType
*type
)
3442 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3443 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3447 /* return true if type1 and type2 are exactly the same (including
3450 static int is_compatible_types(CType
*type1
, CType
*type2
)
3452 return compare_types(type1
,type2
,0);
3455 /* return true if type1 and type2 are the same (ignoring qualifiers).
3457 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3459 return compare_types(type1
,type2
,1);
3462 static void cast_error(CType
*st
, CType
*dt
)
3464 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3467 /* verify type compatibility to store vtop in 'dt' type */
3468 static void verify_assign_cast(CType
*dt
)
3470 CType
*st
, *type1
, *type2
;
3471 int dbt
, sbt
, qualwarn
, lvl
;
3473 st
= &vtop
->type
; /* source type */
3474 dbt
= dt
->t
& VT_BTYPE
;
3475 sbt
= st
->t
& VT_BTYPE
;
3476 if (dt
->t
& VT_CONSTANT
)
3477 tcc_warning("assignment of read-only location");
3481 tcc_error("assignment to void expression");
3484 /* special cases for pointers */
3485 /* '0' can also be a pointer */
3486 if (is_null_pointer(vtop
))
3488 /* accept implicit pointer to integer cast with warning */
3489 if (is_integer_btype(sbt
)) {
3490 tcc_warning("assignment makes pointer from integer without a cast");
3493 type1
= pointed_type(dt
);
3495 type2
= pointed_type(st
);
3496 else if (sbt
== VT_FUNC
)
3497 type2
= st
; /* a function is implicitly a function pointer */
3500 if (is_compatible_types(type1
, type2
))
3502 for (qualwarn
= lvl
= 0;; ++lvl
) {
3503 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3504 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3506 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3507 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3508 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3510 type1
= pointed_type(type1
);
3511 type2
= pointed_type(type2
);
3513 if (!is_compatible_unqualified_types(type1
, type2
)) {
3514 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3515 /* void * can match anything */
3516 } else if (dbt
== sbt
3517 && is_integer_btype(sbt
& VT_BTYPE
)
3518 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3519 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3520 /* Like GCC don't warn by default for merely changes
3521 in pointer target signedness. Do warn for different
3522 base types, though, in particular for unsigned enums
3523 and signed int targets. */
3525 tcc_warning("assignment from incompatible pointer type");
3530 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3536 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3537 tcc_warning("assignment makes integer from pointer without a cast");
3538 } else if (sbt
== VT_STRUCT
) {
3539 goto case_VT_STRUCT
;
3541 /* XXX: more tests */
3545 if (!is_compatible_unqualified_types(dt
, st
)) {
3553 static void gen_assign_cast(CType
*dt
)
3555 verify_assign_cast(dt
);
3559 /* store vtop in lvalue pushed on stack */
3560 ST_FUNC
void vstore(void)
3562 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3564 ft
= vtop
[-1].type
.t
;
3565 sbt
= vtop
->type
.t
& VT_BTYPE
;
3566 dbt
= ft
& VT_BTYPE
;
3567 verify_assign_cast(&vtop
[-1].type
);
3569 if (sbt
== VT_STRUCT
) {
3570 /* if structure, only generate pointer */
3571 /* structure assignment : generate memcpy */
3572 size
= type_size(&vtop
->type
, &align
);
3573 /* destination, keep on stack() as result */
3575 #ifdef CONFIG_TCC_BCHECK
3576 if (vtop
->r
& VT_MUSTBOUND
)
3577 gbound(); /* check would be wrong after gaddrof() */
3579 vtop
->type
.t
= VT_PTR
;
3583 #ifdef CONFIG_TCC_BCHECK
3584 if (vtop
->r
& VT_MUSTBOUND
)
3587 vtop
->type
.t
= VT_PTR
;
3590 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3592 #ifdef CONFIG_TCC_BCHECK
3593 && !tcc_state
->do_bounds_check
3596 gen_struct_copy(size
);
3602 /* Use memmove, rather than memcpy, as dest and src may be same: */
3605 vpush_helper_func(TOK_memmove8
);
3606 else if(!(align
& 3))
3607 vpush_helper_func(TOK_memmove4
);
3610 vpush_helper_func(TOK_memmove
);
3615 } else if (ft
& VT_BITFIELD
) {
3616 /* bitfield store handling */
3618 /* save lvalue as expression result (example: s.b = s.a = n;) */
3619 vdup(), vtop
[-1] = vtop
[-2];
3621 bit_pos
= BIT_POS(ft
);
3622 bit_size
= BIT_SIZE(ft
);
3623 /* remove bit field info to avoid loops */
3624 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3626 if (dbt
== VT_BOOL
) {
3627 gen_cast(&vtop
[-1].type
);
3628 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3630 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3631 if (dbt
!= VT_BOOL
) {
3632 gen_cast(&vtop
[-1].type
);
3633 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3635 if (r
== VT_STRUCT
) {
3636 store_packed_bf(bit_pos
, bit_size
);
3638 unsigned long long mask
= (1ULL << bit_size
) - 1;
3639 if (dbt
!= VT_BOOL
) {
3641 if (dbt
== VT_LLONG
)
3644 vpushi((unsigned)mask
);
3651 /* duplicate destination */
3654 /* load destination, mask and or with source */
3655 if (dbt
== VT_LLONG
)
3656 vpushll(~(mask
<< bit_pos
));
3658 vpushi(~((unsigned)mask
<< bit_pos
));
3663 /* ... and discard */
3666 } else if (dbt
== VT_VOID
) {
3669 /* optimize char/short casts */
3671 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3672 && is_integer_btype(sbt
)
3674 if ((vtop
->r
& VT_MUSTCAST
)
3675 && btype_size(dbt
) > btype_size(sbt
)
3677 force_charshort_cast();
3680 gen_cast(&vtop
[-1].type
);
3683 #ifdef CONFIG_TCC_BCHECK
3684 /* bound check case */
3685 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3691 gv(RC_TYPE(dbt
)); /* generate value */
3694 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3695 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3696 vtop
->type
.t
= ft
& VT_TYPE
;
3699 /* if lvalue was saved on stack, must read it */
3700 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3702 r
= get_reg(RC_INT
);
3703 sv
.type
.t
= VT_PTRDIFF_T
;
3704 sv
.r
= VT_LOCAL
| VT_LVAL
;
3705 sv
.c
.i
= vtop
[-1].c
.i
;
3707 vtop
[-1].r
= r
| VT_LVAL
;
3710 r
= vtop
->r
& VT_VALMASK
;
3711 /* two word case handling :
3712 store second register at word + 4 (or +8 for x86-64) */
3713 if (USING_TWO_WORDS(dbt
)) {
3714 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3715 vtop
[-1].type
.t
= load_type
;
3718 /* convert to int to increment easily */
3719 vtop
->type
.t
= VT_PTRDIFF_T
;
3725 vtop
[-1].type
.t
= load_type
;
3726 /* XXX: it works because r2 is spilled last ! */
3727 store(vtop
->r2
, vtop
- 1);
3733 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3737 /* post defines POST/PRE add. c is the token ++ or -- */
3738 ST_FUNC
void inc(int post
, int c
)
3741 vdup(); /* save lvalue */
3743 gv_dup(); /* duplicate value */
3748 vpushi(c
- TOK_MID
);
3750 vstore(); /* store value */
3752 vpop(); /* if post op, return saved value */
3755 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3757 /* read the string */
3761 while (tok
== TOK_STR
) {
3762 /* XXX: add \0 handling too ? */
3763 cstr_cat(astr
, tokc
.str
.data
, -1);
3766 cstr_ccat(astr
, '\0');
3769 /* If I is >= 1 and a power of two, returns log2(i)+1.
3770 If I is 0 returns 0. */
3771 ST_FUNC
int exact_log2p1(int i
)
3776 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3787 /* Parse __attribute__((...)) GNUC extension. */
3788 static void parse_attribute(AttributeDef
*ad
)
3794 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3799 while (tok
!= ')') {
3800 if (tok
< TOK_IDENT
)
3801 expect("attribute name");
3813 tcc_warning_c(warn_implicit_function_declaration
)(
3814 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3815 s
= external_global_sym(tok
, &func_old_type
);
3816 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3817 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3818 ad
->cleanup_func
= s
;
3823 case TOK_CONSTRUCTOR1
:
3824 case TOK_CONSTRUCTOR2
:
3825 ad
->f
.func_ctor
= 1;
3827 case TOK_DESTRUCTOR1
:
3828 case TOK_DESTRUCTOR2
:
3829 ad
->f
.func_dtor
= 1;
3831 case TOK_ALWAYS_INLINE1
:
3832 case TOK_ALWAYS_INLINE2
:
3833 ad
->f
.func_alwinl
= 1;
3838 parse_mult_str(&astr
, "section name");
3839 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3846 parse_mult_str(&astr
, "alias(\"target\")");
3847 ad
->alias_target
= /* save string as token, for later */
3848 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3852 case TOK_VISIBILITY1
:
3853 case TOK_VISIBILITY2
:
3855 parse_mult_str(&astr
,
3856 "visibility(\"default|hidden|internal|protected\")");
3857 if (!strcmp (astr
.data
, "default"))
3858 ad
->a
.visibility
= STV_DEFAULT
;
3859 else if (!strcmp (astr
.data
, "hidden"))
3860 ad
->a
.visibility
= STV_HIDDEN
;
3861 else if (!strcmp (astr
.data
, "internal"))
3862 ad
->a
.visibility
= STV_INTERNAL
;
3863 else if (!strcmp (astr
.data
, "protected"))
3864 ad
->a
.visibility
= STV_PROTECTED
;
3866 expect("visibility(\"default|hidden|internal|protected\")");
3875 if (n
<= 0 || (n
& (n
- 1)) != 0)
3876 tcc_error("alignment must be a positive power of two");
3881 ad
->a
.aligned
= exact_log2p1(n
);
3882 if (n
!= 1 << (ad
->a
.aligned
- 1))
3883 tcc_error("alignment of %d is larger than implemented", n
);
3899 /* currently, no need to handle it because tcc does not
3900 track unused objects */
3904 ad
->f
.func_noreturn
= 1;
3909 ad
->f
.func_call
= FUNC_CDECL
;
3914 ad
->f
.func_call
= FUNC_STDCALL
;
3916 #ifdef TCC_TARGET_I386
3926 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3932 ad
->f
.func_call
= FUNC_FASTCALLW
;
3939 ad
->attr_mode
= VT_LLONG
+ 1;
3942 ad
->attr_mode
= VT_BYTE
+ 1;
3945 ad
->attr_mode
= VT_SHORT
+ 1;
3949 ad
->attr_mode
= VT_INT
+ 1;
3952 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3959 ad
->a
.dllexport
= 1;
3961 case TOK_NODECORATE
:
3962 ad
->a
.nodecorate
= 1;
3965 ad
->a
.dllimport
= 1;
3968 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
3969 /* skip parameters */
3971 int parenthesis
= 0;
3975 else if (tok
== ')')
3978 } while (parenthesis
&& tok
!= -1);
3991 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3994 int v1
= v
| SYM_FIELD
;
3996 while ((s
= s
->next
) != NULL
) {
4001 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
4002 && s
->v
>= (SYM_FIRST_ANOM
| SYM_FIELD
)) {
4003 /* try to find field in anonymous sub-struct/union */
4004 Sym
*ret
= find_field (&s
->type
, v1
, cumofs
);
4012 if (!(v
& SYM_FIELD
)) { /* top-level call */
4015 tcc_error("dereferencing incomplete type '%s'",
4016 get_tok_str(s
->v
& ~SYM_STRUCT
, 0));
4018 tcc_error("field not found: %s",
4019 get_tok_str(v
, &tokc
));
4024 static void check_fields (CType
*type
, int check
)
4028 while ((s
= s
->next
) != NULL
) {
4029 int v
= s
->v
& ~SYM_FIELD
;
4030 if (v
< SYM_FIRST_ANOM
) {
4031 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4032 if (check
&& (ts
->tok
& SYM_FIELD
))
4033 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4034 ts
->tok
^= SYM_FIELD
;
4035 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4036 check_fields (&s
->type
, check
);
4040 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4042 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4043 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4044 int pcc
= !tcc_state
->ms_bitfields
;
4045 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4052 prevbt
= VT_STRUCT
; /* make it never match */
4057 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4058 if (f
->type
.t
& VT_BITFIELD
)
4059 bit_size
= BIT_SIZE(f
->type
.t
);
4062 size
= type_size(&f
->type
, &align
);
4063 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4066 if (pcc
&& bit_size
== 0) {
4067 /* in pcc mode, packing does not affect zero-width bitfields */
4070 /* in pcc mode, attribute packed overrides if set. */
4071 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4074 /* pragma pack overrides align if lesser and packs bitfields always */
4077 if (pragma_pack
< align
)
4078 align
= pragma_pack
;
4079 /* in pcc mode pragma pack also overrides individual align */
4080 if (pcc
&& pragma_pack
< a
)
4084 /* some individual align was specified */
4088 if (type
->ref
->type
.t
== VT_UNION
) {
4089 if (pcc
&& bit_size
>= 0)
4090 size
= (bit_size
+ 7) >> 3;
4095 } else if (bit_size
< 0) {
4097 c
+= (bit_pos
+ 7) >> 3;
4098 c
= (c
+ align
- 1) & -align
;
4107 /* A bit-field. Layout is more complicated. There are two
4108 options: PCC (GCC) compatible and MS compatible */
4110 /* In PCC layout a bit-field is placed adjacent to the
4111 preceding bit-fields, except if:
4113 - an individual alignment was given
4114 - it would overflow its base type container and
4115 there is no packing */
4116 if (bit_size
== 0) {
4118 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4120 } else if (f
->a
.aligned
) {
4122 } else if (!packed
) {
4124 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4125 if (ofs
> size
/ align
)
4129 /* in pcc mode, long long bitfields have type int if they fit */
4130 if (size
== 8 && bit_size
<= 32)
4131 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4133 while (bit_pos
>= align
* 8)
4134 c
+= align
, bit_pos
-= align
* 8;
4137 /* In PCC layout named bit-fields influence the alignment
4138 of the containing struct using the base types alignment,
4139 except for packed fields (which here have correct align). */
4140 if (f
->v
& SYM_FIRST_ANOM
4141 // && bit_size // ??? gcc on ARM/rpi does that
4146 bt
= f
->type
.t
& VT_BTYPE
;
4147 if ((bit_pos
+ bit_size
> size
* 8)
4148 || (bit_size
> 0) == (bt
!= prevbt
)
4150 c
= (c
+ align
- 1) & -align
;
4153 /* In MS bitfield mode a bit-field run always uses
4154 at least as many bits as the underlying type.
4155 To start a new run it's also required that this
4156 or the last bit-field had non-zero width. */
4157 if (bit_size
|| prev_bit_size
)
4160 /* In MS layout the records alignment is normally
4161 influenced by the field, except for a zero-width
4162 field at the start of a run (but by further zero-width
4163 fields it is again). */
4164 if (bit_size
== 0 && prevbt
!= bt
)
4167 prev_bit_size
= bit_size
;
4170 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4171 | (bit_pos
<< VT_STRUCT_SHIFT
);
4172 bit_pos
+= bit_size
;
4174 if (align
> maxalign
)
4178 printf("set field %s offset %-2d size %-2d align %-2d",
4179 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4180 if (f
->type
.t
& VT_BITFIELD
) {
4181 printf(" pos %-2d bits %-2d",
4194 c
+= (bit_pos
+ 7) >> 3;
4196 /* store size and alignment */
4197 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4201 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4202 /* can happen if individual align for some member was given. In
4203 this case MSVC ignores maxalign when aligning the size */
4208 c
= (c
+ a
- 1) & -a
;
4212 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4215 /* check whether we can access bitfields by their type */
4216 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4220 if (0 == (f
->type
.t
& VT_BITFIELD
))
4224 bit_size
= BIT_SIZE(f
->type
.t
);
4227 bit_pos
= BIT_POS(f
->type
.t
);
4228 size
= type_size(&f
->type
, &align
);
4230 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4231 #ifdef TCC_TARGET_ARM
4232 && !(f
->c
& (align
- 1))
4237 /* try to access the field using a different type */
4238 c0
= -1, s
= align
= 1;
4241 px
= f
->c
* 8 + bit_pos
;
4242 cx
= (px
>> 3) & -align
;
4243 px
= px
- (cx
<< 3);
4246 s
= (px
+ bit_size
+ 7) >> 3;
4256 s
= type_size(&t
, &align
);
4260 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4261 #ifdef TCC_TARGET_ARM
4262 && !(cx
& (align
- 1))
4265 /* update offset and bit position */
4268 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4269 | (bit_pos
<< VT_STRUCT_SHIFT
);
4273 printf("FIX field %s offset %-2d size %-2d align %-2d "
4274 "pos %-2d bits %-2d\n",
4275 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4276 cx
, s
, align
, px
, bit_size
);
4279 /* fall back to load/store single-byte wise */
4280 f
->auxtype
= VT_STRUCT
;
4282 printf("FIX field %s : load byte-wise\n",
4283 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4289 static void do_Static_assert(void);
4291 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4292 static void struct_decl(CType
*type
, int u
)
4294 int v
, c
, size
, align
, flexible
;
4295 int bit_size
, bsize
, bt
;
4297 AttributeDef ad
, ad1
;
4300 memset(&ad
, 0, sizeof ad
);
4302 parse_attribute(&ad
);
4306 /* struct already defined ? return it */
4308 expect("struct/union/enum name");
4310 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4313 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4315 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4320 /* Record the original enum/struct/union token. */
4321 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4323 /* we put an undefined size for struct/union */
4324 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4325 s
->r
= 0; /* default alignment is zero as gcc */
4327 type
->t
= s
->type
.t
;
4333 tcc_error("struct/union/enum already defined");
4335 /* cannot be empty */
4336 /* non empty enums are not allowed */
4339 long long ll
= 0, pl
= 0, nl
= 0;
4342 /* enum symbols have static storage */
4343 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4347 expect("identifier");
4349 if (ss
&& !local_stack
)
4350 tcc_error("redefinition of enumerator '%s'",
4351 get_tok_str(v
, NULL
));
4355 ll
= expr_const64();
4357 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4359 *ps
= ss
, ps
= &ss
->next
;
4368 /* NOTE: we accept a trailing comma */
4373 /* set integral type of the enum */
4376 if (pl
!= (unsigned)pl
)
4377 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4379 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4380 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4381 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4383 /* set type for enum members */
4384 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4386 if (ll
== (int)ll
) /* default is int if it fits */
4388 if (t
.t
& VT_UNSIGNED
) {
4389 ss
->type
.t
|= VT_UNSIGNED
;
4390 if (ll
== (unsigned)ll
)
4393 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4394 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4399 while (tok
!= '}') {
4400 if (tok
== TOK_STATIC_ASSERT
) {
4404 if (!parse_btype(&btype
, &ad1
, 0)) {
4410 tcc_error("flexible array member '%s' not at the end of struct",
4411 get_tok_str(v
, NULL
));
4417 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4419 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4420 expect("identifier");
4422 int v
= btype
.ref
->v
;
4423 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4424 if (tcc_state
->ms_extensions
== 0)
4425 expect("identifier");
4429 if (type_size(&type1
, &align
) < 0) {
4430 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4433 tcc_error("field '%s' has incomplete type",
4434 get_tok_str(v
, NULL
));
4436 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4437 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4438 (type1
.t
& VT_STORAGE
))
4439 tcc_error("invalid type for '%s'",
4440 get_tok_str(v
, NULL
));
4444 bit_size
= expr_const();
4445 /* XXX: handle v = 0 case for messages */
4447 tcc_error("negative width in bit-field '%s'",
4448 get_tok_str(v
, NULL
));
4449 if (v
&& bit_size
== 0)
4450 tcc_error("zero width for bit-field '%s'",
4451 get_tok_str(v
, NULL
));
4452 parse_attribute(&ad1
);
4454 size
= type_size(&type1
, &align
);
4455 if (bit_size
>= 0) {
4456 bt
= type1
.t
& VT_BTYPE
;
4462 tcc_error("bitfields must have scalar type");
4464 if (bit_size
> bsize
) {
4465 tcc_error("width of '%s' exceeds its type",
4466 get_tok_str(v
, NULL
));
4467 } else if (bit_size
== bsize
4468 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4469 /* no need for bit fields */
4471 } else if (bit_size
== 64) {
4472 tcc_error("field width 64 not implemented");
4474 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4476 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4479 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4480 /* Remember we've seen a real field to check
4481 for placement of flexible array member. */
4484 /* If member is a struct or bit-field, enforce
4485 placing into the struct (as anonymous). */
4487 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4492 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4497 if (tok
== ';' || tok
== TOK_EOF
)
4504 parse_attribute(&ad
);
4505 if (ad
.cleanup_func
) {
4506 tcc_warning("attribute '__cleanup__' ignored on type");
4508 check_fields(type
, 1);
4509 check_fields(type
, 0);
4510 struct_layout(type
, &ad
);
4512 tcc_debug_fix_anon(tcc_state
, type
);
4517 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4519 merge_symattr(&ad
->a
, &s
->a
);
4520 merge_funcattr(&ad
->f
, &s
->f
);
4523 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4524 are added to the element type, copied because it could be a typedef. */
4525 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4527 while (type
->t
& VT_ARRAY
) {
4528 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4529 type
= &type
->ref
->type
;
4531 type
->t
|= qualifiers
;
4534 /* return 0 if no type declaration. otherwise, return the basic type
4537 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4539 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4543 memset(ad
, 0, sizeof(AttributeDef
));
4553 /* currently, we really ignore extension */
4563 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4564 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4565 tmbt
: tcc_error("too many basic types");
4568 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4573 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4590 memset(&ad1
, 0, sizeof(AttributeDef
));
4591 if (parse_btype(&type1
, &ad1
, 0)) {
4592 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4594 n
= 1 << (ad1
.a
.aligned
- 1);
4596 type_size(&type1
, &n
);
4599 if (n
< 0 || (n
& (n
- 1)) != 0)
4600 tcc_error("alignment must be a positive power of two");
4603 ad
->a
.aligned
= exact_log2p1(n
);
4607 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4608 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4609 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4610 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4617 #ifdef TCC_TARGET_ARM64
4619 /* GCC's __uint128_t appears in some Linux header files. Make it a
4620 synonym for long double to get the size and alignment right. */
4628 tcc_error("_Complex is not yet supported");
4633 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4634 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4642 struct_decl(&type1
, VT_ENUM
);
4645 type
->ref
= type1
.ref
;
4648 struct_decl(&type1
, VT_STRUCT
);
4651 struct_decl(&type1
, VT_UNION
);
4654 /* type modifiers */
4658 parse_btype_qualify(type
, VT_ATOMIC
);
4661 parse_expr_type(&type1
);
4662 /* remove all storage modifiers except typedef */
4663 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4665 sym_to_attr(ad
, type1
.ref
);
4673 parse_btype_qualify(type
, VT_CONSTANT
);
4681 parse_btype_qualify(type
, VT_VOLATILE
);
4688 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4689 tcc_error("signed and unsigned modifier");
4702 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4703 tcc_error("signed and unsigned modifier");
4704 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4720 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4721 tcc_error("multiple storage classes");
4733 ad
->f
.func_noreturn
= 1;
4735 /* GNUC attribute */
4736 case TOK_ATTRIBUTE1
:
4737 case TOK_ATTRIBUTE2
:
4738 parse_attribute(ad
);
4739 if (ad
->attr_mode
) {
4740 u
= ad
->attr_mode
-1;
4741 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4749 parse_expr_type(&type1
);
4750 /* remove all storage modifiers except typedef */
4751 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4753 sym_to_attr(ad
, type1
.ref
);
4755 case TOK_THREAD_LOCAL
:
4756 tcc_error("_Thread_local is not implemented");
4761 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4765 if (tok
== ':' && ignore_label
) {
4766 /* ignore if it's a label */
4771 t
&= ~(VT_BTYPE
|VT_LONG
);
4772 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4773 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4774 type
->ref
= s
->type
.ref
;
4776 parse_btype_qualify(type
, t
);
4778 /* get attributes from typedef */
4787 if (tcc_state
->char_is_unsigned
) {
4788 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4791 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4792 bt
= t
& (VT_BTYPE
|VT_LONG
);
4794 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4795 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4796 if (bt
== VT_LDOUBLE
)
4797 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4803 /* convert a function parameter type (array to pointer and function to
4804 function pointer) */
4805 static inline void convert_parameter_type(CType
*pt
)
4807 /* remove const and volatile qualifiers (XXX: const could be used
4808 to indicate a const function parameter */
4809 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4810 /* array must be transformed to pointer according to ANSI C */
4812 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4817 ST_FUNC
void parse_asm_str(CString
*astr
)
4820 parse_mult_str(astr
, "string constant");
4823 /* Parse an asm label and return the token */
4824 static int asm_label_instr(void)
4830 parse_asm_str(&astr
);
4833 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4835 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4840 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4842 int n
, l
, t1
, arg_size
, align
;
4843 Sym
**plast
, *s
, *first
;
4846 TokenString
*vla_array_tok
= NULL
;
4847 int *vla_array_str
= NULL
;
4850 /* function type, or recursive declarator (return if so) */
4852 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4856 else if (parse_btype(&pt
, &ad1
, 0))
4858 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4859 merge_attr (ad
, &ad1
);
4870 /* read param name and compute offset */
4871 if (l
!= FUNC_OLD
) {
4872 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4874 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4875 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4876 tcc_error("parameter declared as void");
4881 pt
.t
= VT_VOID
; /* invalid type */
4886 expect("identifier");
4887 convert_parameter_type(&pt
);
4888 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4889 /* these symbols may be evaluated for VLArrays (see below, under
4890 nocode_wanted) which is why we push them here as normal symbols
4891 temporarily. Example: int func(int a, int b[++a]); */
4892 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4898 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4903 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4904 tcc_error("invalid type");
4907 /* if no parameters, then old type prototype */
4910 /* remove parameter symbols from token table, keep on stack */
4912 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4913 for (s
= first
; s
; s
= s
->next
)
4917 /* NOTE: const is ignored in returned type as it has a special
4918 meaning in gcc / C++ */
4919 type
->t
&= ~VT_CONSTANT
;
4920 /* some ancient pre-K&R C allows a function to return an array
4921 and the array brackets to be put after the arguments, such
4922 that "int c()[]" means something like "int[] c()" */
4925 skip(']'); /* only handle simple "[]" */
4928 /* we push a anonymous symbol which will contain the function prototype */
4929 ad
->f
.func_args
= arg_size
;
4930 ad
->f
.func_type
= l
;
4931 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4937 } else if (tok
== '[') {
4938 int saved_nocode_wanted
= nocode_wanted
;
4939 /* array definition */
4943 if (td
& TYPE_PARAM
) while (1) {
4944 /* XXX The optional type-quals and static should only be accepted
4945 in parameter decls. The '*' as well, and then even only
4946 in prototypes (not function defs). */
4948 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4959 /* Code generation is not done now but has to be done
4960 at start of function. Save code here for later use. */
4962 skip_or_save_block(&vla_array_tok
);
4964 vla_array_str
= vla_array_tok
->str
;
4965 begin_macro(vla_array_tok
, 2);
4974 } else if (tok
!= ']') {
4975 if (!local_stack
|| (storage
& VT_STATIC
))
4976 vpushi(expr_const());
4978 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4979 length must always be evaluated, even under nocode_wanted,
4980 so that its size slot is initialized (e.g. under sizeof
4986 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4989 tcc_error("invalid array size");
4991 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4992 tcc_error("size of variable length array should be an integer");
4998 /* parse next post type */
4999 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
5001 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5002 tcc_error("declaration of an array of functions");
5003 if ((type
->t
& VT_BTYPE
) == VT_VOID
5004 || type_size(type
, &align
) < 0)
5005 tcc_error("declaration of an array of incomplete type elements");
5007 t1
|= type
->t
& VT_VLA
;
5012 tcc_error("need explicit inner array size in VLAs");
5015 loc
-= type_size(&int_type
, &align
);
5019 vpush_type_size(type
, &align
);
5021 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5028 nocode_wanted
= saved_nocode_wanted
;
5030 /* we push an anonymous symbol which will contain the array
5032 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5033 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5036 if (vla_array_str
) {
5038 s
->vla_array_str
= vla_array_str
;
5040 tok_str_free_str(vla_array_str
);
5046 /* Parse a type declarator (except basic type), and return the type
5047 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5048 expected. 'type' should contain the basic type. 'ad' is the
5049 attribute definition of the basic type. It can be modified by
5050 type_decl(). If this (possibly abstract) declarator is a pointer chain
5051 it returns the innermost pointed to type (equals *type, but is a different
5052 pointer), otherwise returns type itself, that's used for recursive calls. */
5053 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5056 int qualifiers
, storage
;
5058 /* recursive type, remove storage bits first, apply them later again */
5059 storage
= type
->t
& VT_STORAGE
;
5060 type
->t
&= ~VT_STORAGE
;
5063 while (tok
== '*') {
5069 qualifiers
|= VT_ATOMIC
;
5074 qualifiers
|= VT_CONSTANT
;
5079 qualifiers
|= VT_VOLATILE
;
5085 /* XXX: clarify attribute handling */
5086 case TOK_ATTRIBUTE1
:
5087 case TOK_ATTRIBUTE2
:
5088 parse_attribute(ad
);
5092 type
->t
|= qualifiers
;
5094 /* innermost pointed to type is the one for the first derivation */
5095 ret
= pointed_type(type
);
5099 /* This is possibly a parameter type list for abstract declarators
5100 ('int ()'), use post_type for testing this. */
5101 if (!post_type(type
, ad
, 0, td
)) {
5102 /* It's not, so it's a nested declarator, and the post operations
5103 apply to the innermost pointed to type (if any). */
5104 /* XXX: this is not correct to modify 'ad' at this point, but
5105 the syntax is not clear */
5106 parse_attribute(ad
);
5107 post
= type_decl(type
, ad
, v
, td
);
5111 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5112 /* type identifier */
5117 if (!(td
& TYPE_ABSTRACT
))
5118 expect("identifier");
5121 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5122 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5123 parse_attribute(ad
);
5128 /* indirection with full error checking and bound check */
5129 ST_FUNC
void indir(void)
5131 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5132 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5136 if (vtop
->r
& VT_LVAL
)
5138 vtop
->type
= *pointed_type(&vtop
->type
);
5139 /* Arrays and functions are never lvalues */
5140 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5141 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5143 /* if bound checking, the referenced pointer must be checked */
5144 #ifdef CONFIG_TCC_BCHECK
5145 if (tcc_state
->do_bounds_check
)
5146 vtop
->r
|= VT_MUSTBOUND
;
5151 /* pass a parameter to a function and do type checking and casting */
5152 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5157 func_type
= func
->f
.func_type
;
5158 if (func_type
== FUNC_OLD
||
5159 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5160 /* default casting : only need to convert float to double */
5161 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5162 gen_cast_s(VT_DOUBLE
);
5163 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5164 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5165 type
.ref
= vtop
->type
.ref
;
5167 } else if (vtop
->r
& VT_MUSTCAST
) {
5168 force_charshort_cast();
5170 } else if (arg
== NULL
) {
5171 tcc_error("too many arguments to function");
5174 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5175 gen_assign_cast(&type
);
5179 /* parse an expression and return its type without any side effect. */
5180 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5189 /* parse an expression of the form '(type)' or '(expr)' and return its
5191 static void parse_expr_type(CType
*type
)
5197 if (parse_btype(type
, &ad
, 0)) {
5198 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5200 expr_type(type
, gexpr
);
5205 static void parse_type(CType
*type
)
5210 if (!parse_btype(type
, &ad
, 0)) {
5213 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5216 static void parse_builtin_params(int nc
, const char *args
)
5225 while ((c
= *args
++)) {
5240 type
.t
= VT_CONSTANT
;
5246 type
.t
= VT_CONSTANT
;
5248 type
.t
|= char_type
.t
;
5260 gen_assign_cast(&type
);
5267 static void parse_atomic(int atok
)
5269 int size
, align
, arg
, t
, save
= 0;
5270 CType
*atom
, *atom_ptr
, ct
= {0};
5273 static const char *const templates
[] = {
5275 * Each entry consists of callback and function template.
5276 * The template represents argument types and return type.
5278 * ? void (return-only)
5281 * A read-only atomic
5282 * p pointer to memory
5289 /* keep in order of appearance in tcctok.h: */
5290 /* __atomic_store */ "alm.?",
5291 /* __atomic_load */ "Asm.v",
5292 /* __atomic_exchange */ "alsm.v",
5293 /* __atomic_compare_exchange */ "aplbmm.b",
5294 /* __atomic_fetch_add */ "avm.v",
5295 /* __atomic_fetch_sub */ "avm.v",
5296 /* __atomic_fetch_or */ "avm.v",
5297 /* __atomic_fetch_xor */ "avm.v",
5298 /* __atomic_fetch_and */ "avm.v",
5299 /* __atomic_fetch_nand */ "avm.v",
5300 /* __atomic_and_fetch */ "avm.v",
5301 /* __atomic_sub_fetch */ "avm.v",
5302 /* __atomic_or_fetch */ "avm.v",
5303 /* __atomic_xor_fetch */ "avm.v",
5304 /* __atomic_and_fetch */ "avm.v",
5305 /* __atomic_nand_fetch */ "avm.v"
5307 const char *template = templates
[(atok
- TOK___atomic_store
)];
5309 atom
= atom_ptr
= NULL
;
5310 size
= 0; /* pacify compiler */
5315 switch (template[arg
]) {
5318 atom_ptr
= &vtop
->type
;
5319 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5321 atom
= pointed_type(atom_ptr
);
5322 size
= type_size(atom
, &align
);
5324 || (size
& (size
- 1))
5325 || (atok
> TOK___atomic_compare_exchange
5326 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5327 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5328 expect("integral or integer-sized pointer target type");
5329 /* GCC does not care either: */
5330 /* if (!(atom->t & VT_ATOMIC))
5331 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5335 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5336 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5337 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5338 gen_assign_cast(atom_ptr
);
5341 gen_assign_cast(atom
);
5345 gen_assign_cast(atom
);
5354 gen_assign_cast(&int_type
);
5358 gen_assign_cast(&ct
);
5361 if ('.' == template[++arg
])
5368 switch (template[arg
+ 1]) {
5377 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5378 vpush_helper_func(tok_alloc_const(buf
));
5379 vrott(arg
- save
+ 1);
5380 gfunc_call(arg
- save
);
5383 PUT_R_RET(vtop
, ct
.t
);
5384 t
= ct
.t
& VT_BTYPE
;
5385 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5387 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5389 vtop
->type
.t
= VT_INT
;
5401 ST_FUNC
void unary(void)
5403 int n
, t
, align
, size
, r
, sizeof_caller
;
5408 /* generate line number info */
5410 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5412 sizeof_caller
= in_sizeof
;
5415 /* XXX: GCC 2.95.3 does not generate a table although it should be
5423 #ifdef TCC_TARGET_PE
5424 t
= VT_SHORT
|VT_UNSIGNED
;
5432 vsetc(&type
, VT_CONST
, &tokc
);
5436 t
= VT_INT
| VT_UNSIGNED
;
5442 t
= VT_LLONG
| VT_UNSIGNED
;
5454 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5457 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5459 case TOK___FUNCTION__
:
5461 goto tok_identifier
;
5467 /* special function name identifier */
5468 len
= strlen(funcname
) + 1;
5469 /* generate char[len] type */
5470 type
.t
= char_type
.t
;
5471 if (tcc_state
->warn_write_strings
& WARN_ON
)
5472 type
.t
|= VT_CONSTANT
;
5476 sec
= rodata_section
;
5477 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5479 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5484 #ifdef TCC_TARGET_PE
5485 t
= VT_SHORT
| VT_UNSIGNED
;
5491 /* string parsing */
5494 if (tcc_state
->warn_write_strings
& WARN_ON
)
5499 memset(&ad
, 0, sizeof(AttributeDef
));
5500 ad
.section
= rodata_section
;
5501 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5506 if (parse_btype(&type
, &ad
, 0)) {
5507 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5509 /* check ISOC99 compound literal */
5511 /* data is allocated locally by default */
5516 /* all except arrays are lvalues */
5517 if (!(type
.t
& VT_ARRAY
))
5519 memset(&ad
, 0, sizeof(AttributeDef
));
5520 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5522 if (sizeof_caller
) {
5529 } else if (tok
== '{') {
5530 int saved_nocode_wanted
= nocode_wanted
;
5531 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5533 if (0 == local_scope
)
5534 tcc_error("statement expression outside of function");
5535 /* save all registers */
5537 /* statement expression : we do not accept break/continue
5538 inside as GCC does. We do retain the nocode_wanted state,
5539 as statement expressions can't ever be entered from the
5540 outside, so any reactivation of code emission (from labels
5541 or loop heads) can be disabled again after the end of it. */
5543 /* If the statement expr can be entered, then we retain the current
5544 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5545 If it can't be entered then the state is that from before the
5546 statement expression. */
5547 if (saved_nocode_wanted
)
5548 nocode_wanted
= saved_nocode_wanted
;
5563 /* functions names must be treated as function pointers,
5564 except for unary '&' and sizeof. Since we consider that
5565 functions are not lvalues, we only have to handle it
5566 there and in function calls. */
5567 /* arrays can also be used although they are not lvalues */
5568 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5569 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5572 vtop
->sym
->a
.addrtaken
= 1;
5573 mk_pointer(&vtop
->type
);
5579 gen_test_zero(TOK_EQ
);
5590 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5591 tcc_error("pointer not accepted for unary plus");
5592 /* In order to force cast, we add zero, except for floating point
5593 where we really need an noop (otherwise -0.0 will be transformed
5595 if (!is_float(vtop
->type
.t
)) {
5607 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5608 if (t
== TOK_SIZEOF
) {
5609 vpush_type_size(&type
, &align
);
5610 gen_cast_s(VT_SIZE_T
);
5612 type_size(&type
, &align
);
5614 if (vtop
[1].r
& VT_SYM
)
5615 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5616 if (s
&& s
->a
.aligned
)
5617 align
= 1 << (s
->a
.aligned
- 1);
5622 case TOK_builtin_expect
:
5623 /* __builtin_expect is a no-op for now */
5624 parse_builtin_params(0, "ee");
5627 case TOK_builtin_types_compatible_p
:
5628 parse_builtin_params(0, "tt");
5629 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5630 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5631 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5635 case TOK_builtin_choose_expr
:
5662 case TOK_builtin_constant_p
:
5664 parse_builtin_params(1, "e");
5666 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5667 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5671 case TOK_builtin_frame_address
:
5672 case TOK_builtin_return_address
:
5678 level
= expr_const64();
5680 tcc_error("%s only takes positive integers",
5681 tok1
== TOK_builtin_return_address
?
5682 "__builtin_return_address" :
5683 "__builtin_frame_address");
5688 vset(&type
, VT_LOCAL
, 0); /* local frame */
5690 #ifdef TCC_TARGET_RISCV64
5694 mk_pointer(&vtop
->type
);
5695 indir(); /* -> parent frame */
5697 if (tok1
== TOK_builtin_return_address
) {
5698 // assume return address is just above frame pointer on stack
5699 #ifdef TCC_TARGET_ARM
5702 #elif defined TCC_TARGET_RISCV64
5709 mk_pointer(&vtop
->type
);
5714 #ifdef TCC_TARGET_RISCV64
5715 case TOK_builtin_va_start
:
5716 parse_builtin_params(0, "ee");
5717 r
= vtop
->r
& VT_VALMASK
;
5721 tcc_error("__builtin_va_start expects a local variable");
5726 #ifdef TCC_TARGET_X86_64
5727 #ifdef TCC_TARGET_PE
5728 case TOK_builtin_va_start
:
5729 parse_builtin_params(0, "ee");
5730 r
= vtop
->r
& VT_VALMASK
;
5734 tcc_error("__builtin_va_start expects a local variable");
5736 vtop
->type
= char_pointer_type
;
5741 case TOK_builtin_va_arg_types
:
5742 parse_builtin_params(0, "t");
5743 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5750 #ifdef TCC_TARGET_ARM64
5751 case TOK_builtin_va_start
: {
5752 parse_builtin_params(0, "ee");
5756 vtop
->type
.t
= VT_VOID
;
5759 case TOK_builtin_va_arg
: {
5760 parse_builtin_params(0, "et");
5768 case TOK___arm64_clear_cache
: {
5769 parse_builtin_params(0, "ee");
5772 vtop
->type
.t
= VT_VOID
;
5777 /* atomic operations */
5778 case TOK___atomic_store
:
5779 case TOK___atomic_load
:
5780 case TOK___atomic_exchange
:
5781 case TOK___atomic_compare_exchange
:
5782 case TOK___atomic_fetch_add
:
5783 case TOK___atomic_fetch_sub
:
5784 case TOK___atomic_fetch_or
:
5785 case TOK___atomic_fetch_xor
:
5786 case TOK___atomic_fetch_and
:
5787 case TOK___atomic_fetch_nand
:
5788 case TOK___atomic_add_fetch
:
5789 case TOK___atomic_sub_fetch
:
5790 case TOK___atomic_or_fetch
:
5791 case TOK___atomic_xor_fetch
:
5792 case TOK___atomic_and_fetch
:
5793 case TOK___atomic_nand_fetch
:
5797 /* pre operations */
5808 if (is_float(vtop
->type
.t
)) {
5818 goto tok_identifier
;
5820 /* allow to take the address of a label */
5821 if (tok
< TOK_UIDENT
)
5822 expect("label identifier");
5823 s
= label_find(tok
);
5825 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5827 if (s
->r
== LABEL_DECLARED
)
5828 s
->r
= LABEL_FORWARD
;
5830 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5831 s
->type
.t
= VT_VOID
;
5832 mk_pointer(&s
->type
);
5833 s
->type
.t
|= VT_STATIC
;
5835 vpushsym(&s
->type
, s
);
5841 CType controlling_type
;
5842 int has_default
= 0;
5845 TokenString
*str
= NULL
;
5846 int saved_const_wanted
= const_wanted
;
5851 expr_type(&controlling_type
, expr_eq
);
5852 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5853 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5854 mk_pointer(&controlling_type
);
5855 const_wanted
= saved_const_wanted
;
5859 if (tok
== TOK_DEFAULT
) {
5861 tcc_error("too many 'default'");
5867 AttributeDef ad_tmp
;
5871 parse_btype(&cur_type
, &ad_tmp
, 0);
5872 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5873 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5875 tcc_error("type match twice");
5885 skip_or_save_block(&str
);
5887 skip_or_save_block(NULL
);
5894 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5895 tcc_error("type '%s' does not match any association", buf
);
5897 begin_macro(str
, 1);
5906 // special qnan , snan and infinity values
5911 vtop
->type
.t
= VT_FLOAT
;
5916 goto special_math_val
;
5919 goto special_math_val
;
5926 expect("identifier");
5928 if (!s
|| IS_ASM_SYM(s
)) {
5929 const char *name
= get_tok_str(t
, NULL
);
5931 tcc_error("'%s' undeclared", name
);
5932 /* for simple function calls, we tolerate undeclared
5933 external reference to int() function */
5934 tcc_warning_c(warn_implicit_function_declaration
)(
5935 "implicit declaration of function '%s'", name
);
5936 s
= external_global_sym(t
, &func_old_type
);
5940 /* A symbol that has a register is a local register variable,
5941 which starts out as VT_LOCAL value. */
5942 if ((r
& VT_VALMASK
) < VT_CONST
)
5943 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5945 vset(&s
->type
, r
, s
->c
);
5946 /* Point to s as backpointer (even without r&VT_SYM).
5947 Will be used by at least the x86 inline asm parser for
5953 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5954 vtop
->c
.i
= s
->enum_val
;
5959 /* post operations */
5961 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5964 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5965 int qualifiers
, cumofs
= 0;
5967 if (tok
== TOK_ARROW
)
5969 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5972 /* expect pointer on structure */
5973 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5974 expect("struct or union");
5975 if (tok
== TOK_CDOUBLE
)
5976 expect("field name");
5978 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5979 expect("field name");
5980 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5981 /* add field offset to pointer */
5982 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5985 /* change type to field type, and set to lvalue */
5986 vtop
->type
= s
->type
;
5987 vtop
->type
.t
|= qualifiers
;
5988 /* an array is never an lvalue */
5989 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5991 #ifdef CONFIG_TCC_BCHECK
5992 /* if bound checking, the referenced pointer must be checked */
5993 if (tcc_state
->do_bounds_check
)
5994 vtop
->r
|= VT_MUSTBOUND
;
5998 } else if (tok
== '[') {
6004 } else if (tok
== '(') {
6007 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6010 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6011 /* pointer test (no array accepted) */
6012 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6013 vtop
->type
= *pointed_type(&vtop
->type
);
6014 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6018 expect("function pointer");
6021 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6023 /* get return type */
6026 sa
= s
->next
; /* first parameter */
6027 nb_args
= regsize
= 0;
6029 /* compute first implicit argument if a structure is returned */
6030 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6031 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6032 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6033 &ret_align
, ®size
);
6034 if (ret_nregs
<= 0) {
6035 /* get some space for the returned structure */
6036 size
= type_size(&s
->type
, &align
);
6037 #ifdef TCC_TARGET_ARM64
6038 /* On arm64, a small struct is return in registers.
6039 It is much easier to write it to memory if we know
6040 that we are allowed to write some extra bytes, so
6041 round the allocated space up to a power of 2: */
6043 while (size
& (size
- 1))
6044 size
= (size
| (size
- 1)) + 1;
6046 loc
= (loc
- size
) & -align
;
6048 ret
.r
= VT_LOCAL
| VT_LVAL
;
6049 /* pass it as 'int' to avoid structure arg passing
6051 vseti(VT_LOCAL
, loc
);
6052 #ifdef CONFIG_TCC_BCHECK
6053 if (tcc_state
->do_bounds_check
)
6067 if (ret_nregs
> 0) {
6068 /* return in register */
6070 PUT_R_RET(&ret
, ret
.type
.t
);
6075 gfunc_param_typed(s
, sa
);
6085 tcc_error("too few arguments to function");
6087 gfunc_call(nb_args
);
6089 if (ret_nregs
< 0) {
6090 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6091 #ifdef TCC_TARGET_RISCV64
6092 arch_transfer_ret_regs(1);
6096 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6097 vsetc(&ret
.type
, r
, &ret
.c
);
6098 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6101 /* handle packed struct return */
6102 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6105 size
= type_size(&s
->type
, &align
);
6106 /* We're writing whole regs often, make sure there's enough
6107 space. Assume register size is power of 2. */
6108 if (regsize
> align
)
6110 loc
= (loc
- size
) & -align
;
6114 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6118 if (--ret_nregs
== 0)
6122 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6125 /* Promote char/short return values. This is matters only
6126 for calling function that were not compiled by TCC and
6127 only on some architectures. For those where it doesn't
6128 matter we expect things to be already promoted to int,
6130 t
= s
->type
.t
& VT_BTYPE
;
6131 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6133 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6135 vtop
->type
.t
= VT_INT
;
6139 if (s
->f
.func_noreturn
) {
6141 tcc_tcov_block_end(tcc_state
, -1);
6150 #ifndef precedence_parser /* original top-down parser */
6152 static void expr_prod(void)
6157 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6164 static void expr_sum(void)
6169 while ((t
= tok
) == '+' || t
== '-') {
6176 static void expr_shift(void)
6181 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6188 static void expr_cmp(void)
6193 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6194 t
== TOK_ULT
|| t
== TOK_UGE
) {
6201 static void expr_cmpeq(void)
6206 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6213 static void expr_and(void)
6216 while (tok
== '&') {
6223 static void expr_xor(void)
6226 while (tok
== '^') {
6233 static void expr_or(void)
6236 while (tok
== '|') {
6243 static void expr_landor(int op
);
6245 static void expr_land(void)
6248 if (tok
== TOK_LAND
)
6252 static void expr_lor(void)
6259 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6260 #else /* defined precedence_parser */
6261 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6262 # define expr_lor() unary(), expr_infix(1)
6264 static int precedence(int tok
)
6267 case TOK_LOR
: return 1;
6268 case TOK_LAND
: return 2;
6272 case TOK_EQ
: case TOK_NE
: return 6;
6273 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6274 case TOK_SHL
: case TOK_SAR
: return 8;
6275 case '+': case '-': return 9;
6276 case '*': case '/': case '%': return 10;
6278 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6283 static unsigned char prec
[256];
6284 static void init_prec(void)
6287 for (i
= 0; i
< 256; i
++)
6288 prec
[i
] = precedence(i
);
6290 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6292 static void expr_landor(int op
);
6294 static void expr_infix(int p
)
6297 while ((p2
= precedence(t
)) >= p
) {
6298 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6303 if (precedence(tok
) > p2
)
6312 /* Assuming vtop is a value used in a conditional context
6313 (i.e. compared with zero) return 0 if it's false, 1 if
6314 true and -1 if it can't be statically determined. */
6315 static int condition_3way(void)
6318 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6319 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6321 gen_cast_s(VT_BOOL
);
6328 static void expr_landor(int op
)
6330 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6332 c
= f
? i
: condition_3way();
6334 save_regs(1), cc
= 0;
6336 nocode_wanted
++, f
= 1;
6344 expr_landor_next(op
);
6356 static int is_cond_bool(SValue
*sv
)
6358 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6359 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6360 return (unsigned)sv
->c
.i
< 2;
6361 if (sv
->r
== VT_CMP
)
6366 static void expr_cond(void)
6368 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6375 c
= condition_3way();
6376 g
= (tok
== ':' && gnu_ext
);
6386 /* needed to avoid having different registers saved in
6398 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6399 mk_pointer(&vtop
->type
);
6400 sv
= *vtop
; /* save value to handle it later */
6401 vtop
--; /* no vpop so that FP stack is not flushed */
6418 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6419 mk_pointer(&vtop
->type
);
6421 /* cast operands to correct type according to ISOC rules */
6422 if (!combine_types(&type
, &sv
, vtop
, '?'))
6423 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6424 "type mismatch in conditional expression (have '%s' and '%s')");
6426 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6427 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6428 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6429 this code jumps directly to the if's then/else branches. */
6434 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6438 // tcc_warning("two conditions expr_cond");
6442 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6443 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6444 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6446 /* now we convert second operand */
6450 mk_pointer(&vtop
->type
);
6452 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6456 rc
= RC_TYPE(type
.t
);
6457 /* for long longs, we use fixed registers to avoid having
6458 to handle a complicated move */
6459 if (USING_TWO_WORDS(type
.t
))
6460 rc
= RC_RET(type
.t
);
6471 /* this is horrible, but we must also convert first
6477 mk_pointer(&vtop
->type
);
6479 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6485 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6495 static void expr_eq(void)
6500 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6508 gen_op(TOK_ASSIGN_OP(t
));
6514 ST_FUNC
void gexpr(void)
6520 constant_p
&= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6521 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6527 /* parse a constant expression and return value in vtop. */
6528 static void expr_const1(void)
6531 nocode_wanted
+= unevalmask
+ 1;
6533 nocode_wanted
-= unevalmask
+ 1;
6537 /* parse an integer constant and return its value. */
6538 static inline int64_t expr_const64(void)
6542 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6543 expect("constant expression");
6549 /* parse an integer constant and return its value.
6550 Complain if it doesn't fit 32bit (signed or unsigned). */
6551 ST_FUNC
int expr_const(void)
6554 int64_t wc
= expr_const64();
6556 if (c
!= wc
&& (unsigned)c
!= wc
)
6557 tcc_error("constant exceeds 32 bit");
6561 /* ------------------------------------------------------------------------- */
6562 /* return from function */
6564 #ifndef TCC_TARGET_ARM64
6565 static void gfunc_return(CType
*func_type
)
6567 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6568 CType type
, ret_type
;
6569 int ret_align
, ret_nregs
, regsize
;
6570 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6571 &ret_align
, ®size
);
6572 if (ret_nregs
< 0) {
6573 #ifdef TCC_TARGET_RISCV64
6574 arch_transfer_ret_regs(0);
6576 } else if (0 == ret_nregs
) {
6577 /* if returning structure, must copy it to implicit
6578 first pointer arg location */
6581 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6584 /* copy structure value to pointer */
6587 /* returning structure packed into registers */
6588 int size
, addr
, align
, rc
;
6589 size
= type_size(func_type
,&align
);
6590 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6591 (vtop
->c
.i
& (ret_align
-1)))
6592 && (align
& (ret_align
-1))) {
6593 loc
= (loc
- size
) & -ret_align
;
6596 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6600 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6602 vtop
->type
= ret_type
;
6603 rc
= RC_RET(ret_type
.t
);
6611 if (--ret_nregs
== 0)
6613 /* We assume that when a structure is returned in multiple
6614 registers, their classes are consecutive values of the
6617 vtop
->c
.i
+= regsize
;
6622 gv(RC_RET(func_type
->t
));
6624 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6628 static void check_func_return(void)
6630 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6632 if (!strcmp (funcname
, "main")
6633 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6634 /* main returns 0 by default */
6636 gen_assign_cast(&func_vt
);
6637 gfunc_return(&func_vt
);
6639 tcc_warning("function might return no value: '%s'", funcname
);
6643 /* ------------------------------------------------------------------------- */
6646 static int case_cmpi(const void *pa
, const void *pb
)
6648 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6649 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6650 return a
< b
? -1 : a
> b
;
6653 static int case_cmpu(const void *pa
, const void *pb
)
6655 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6656 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6657 return a
< b
? -1 : a
> b
;
6660 static void gtst_addr(int t
, int a
)
6662 gsym_addr(gvtst(0, t
), a
);
6665 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6669 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6686 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6688 gcase(base
, len
/2, bsym
);
6692 base
+= e
; len
-= e
;
6702 if (p
->v1
== p
->v2
) {
6704 gtst_addr(0, p
->sym
);
6714 gtst_addr(0, p
->sym
);
6718 *bsym
= gjmp(*bsym
);
6721 /* ------------------------------------------------------------------------- */
6722 /* __attribute__((cleanup(fn))) */
6724 static void try_call_scope_cleanup(Sym
*stop
)
6726 Sym
*cls
= cur_scope
->cl
.s
;
6728 for (; cls
!= stop
; cls
= cls
->ncl
) {
6729 Sym
*fs
= cls
->next
;
6730 Sym
*vs
= cls
->prev_tok
;
6732 vpushsym(&fs
->type
, fs
);
6733 vset(&vs
->type
, vs
->r
, vs
->c
);
6735 mk_pointer(&vtop
->type
);
6741 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6746 if (!cur_scope
->cl
.s
)
6749 /* search NCA of both cleanup chains given parents and initial depth */
6750 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6751 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6753 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6755 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6758 try_call_scope_cleanup(cc
);
6761 /* call 'func' for each __attribute__((cleanup(func))) */
6762 static void block_cleanup(struct scope
*o
)
6766 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6767 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6772 try_call_scope_cleanup(o
->cl
.s
);
6773 pcl
->jnext
= gjmp(0);
6775 goto remove_pending
;
6785 try_call_scope_cleanup(o
->cl
.s
);
6788 /* ------------------------------------------------------------------------- */
6791 static void vla_restore(int loc
)
6794 gen_vla_sp_restore(loc
);
6797 static void vla_leave(struct scope
*o
)
6799 struct scope
*c
= cur_scope
, *v
= NULL
;
6800 for (; c
!= o
&& c
; c
= c
->prev
)
6804 vla_restore(v
->vla
.locorig
);
6807 /* ------------------------------------------------------------------------- */
6810 static void new_scope(struct scope
*o
)
6812 /* copy and link previous scope */
6814 o
->prev
= cur_scope
;
6816 cur_scope
->vla
.num
= 0;
6818 /* record local declaration stack position */
6819 o
->lstk
= local_stack
;
6820 o
->llstk
= local_label_stack
;
6824 static void prev_scope(struct scope
*o
, int is_expr
)
6828 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6829 block_cleanup(o
->prev
);
6831 /* pop locally defined labels */
6832 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6834 /* In the is_expr case (a statement expression is finished here),
6835 vtop might refer to symbols on the local_stack. Either via the
6836 type or via vtop->sym. We can't pop those nor any that in turn
6837 might be referred to. To make it easier we don't roll back
6838 any symbols in that case; some upper level call to block() will
6839 do that. We do have to remove such symbols from the lookup
6840 tables, though. sym_pop will do that. */
6842 /* pop locally defined symbols */
6843 pop_local_syms(o
->lstk
, is_expr
);
6844 cur_scope
= o
->prev
;
6848 /* leave a scope via break/continue(/goto) */
6849 static void leave_scope(struct scope
*o
)
6853 try_call_scope_cleanup(o
->cl
.s
);
6857 /* short versiona for scopes with 'if/do/while/switch' which can
6858 declare only types (of struct/union/enum) */
6859 static void new_scope_s(struct scope
*o
)
6861 o
->lstk
= local_stack
;
6865 static void prev_scope_s(struct scope
*o
)
6867 sym_pop(&local_stack
, o
->lstk
, 0);
6871 /* ------------------------------------------------------------------------- */
6872 /* call block from 'for do while' loops */
6874 static void lblock(int *bsym
, int *csym
)
6876 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6877 int *b
= co
->bsym
, *c
= co
->csym
;
6891 static void block(int is_expr
)
6893 int a
, b
, c
, d
, e
, t
;
6898 /* default return value is (void) */
6900 vtop
->type
.t
= VT_VOID
;
6905 /* If the token carries a value, next() might destroy it. Only with
6906 invalid code such as f(){"123"4;} */
6907 if (TOK_HAS_VALUE(t
))
6912 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6921 if (tok
== TOK_ELSE
) {
6926 gsym(d
); /* patch else jmp */
6932 } else if (t
== TOK_WHILE
) {
6946 } else if (t
== '{') {
6948 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
6951 /* handle local labels declarations */
6952 while (tok
== TOK_LABEL
) {
6955 if (tok
< TOK_UIDENT
)
6956 expect("label identifier");
6957 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6959 } while (tok
== ',');
6963 while (tok
!= '}') {
6972 prev_scope(&o
, is_expr
);
6974 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
6977 else if (!nocode_wanted
)
6978 check_func_return();
6980 } else if (t
== TOK_RETURN
) {
6981 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6985 gen_assign_cast(&func_vt
);
6987 if (vtop
->type
.t
!= VT_VOID
)
6988 tcc_warning("void function returns a value");
6992 tcc_warning("'return' with no value");
6995 leave_scope(root_scope
);
6997 gfunc_return(&func_vt
);
6999 /* jump unless last stmt in top-level block */
7000 if (tok
!= '}' || local_scope
!= 1)
7003 tcc_tcov_block_end (tcc_state
, -1);
7006 } else if (t
== TOK_BREAK
) {
7008 if (!cur_scope
->bsym
)
7009 tcc_error("cannot break");
7010 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7011 leave_scope(cur_switch
->scope
);
7013 leave_scope(loop_scope
);
7014 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7017 } else if (t
== TOK_CONTINUE
) {
7019 if (!cur_scope
->csym
)
7020 tcc_error("cannot continue");
7021 leave_scope(loop_scope
);
7022 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7025 } else if (t
== TOK_FOR
) {
7030 /* c99 for-loop init decl? */
7031 if (!decl(VT_JMP
)) {
7032 /* no, regular for-loop init expr */
7060 } else if (t
== TOK_DO
) {
7076 } else if (t
== TOK_SWITCH
) {
7077 struct switch_t
*sw
;
7079 sw
= tcc_mallocz(sizeof *sw
);
7081 sw
->scope
= cur_scope
;
7082 sw
->prev
= cur_switch
;
7083 sw
->nocode_wanted
= nocode_wanted
;
7090 sw
->sv
= *vtop
--; /* save switch value */
7092 b
= gjmp(0); /* jump to first case */
7094 a
= gjmp(a
); /* add implicit break */
7099 if (sw
->nocode_wanted
)
7101 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7102 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7104 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7105 for (b
= 1; b
< sw
->n
; b
++)
7106 if (sw
->sv
.type
.t
& VT_UNSIGNED
7107 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7108 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7109 tcc_error("duplicate case value");
7112 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7115 gsym_addr(d
, sw
->def_sym
);
7122 dynarray_reset(&sw
->p
, &sw
->n
);
7123 cur_switch
= sw
->prev
;
7126 } else if (t
== TOK_CASE
) {
7127 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7130 cr
->v1
= cr
->v2
= expr_const64();
7131 if (gnu_ext
&& tok
== TOK_DOTS
) {
7133 cr
->v2
= expr_const64();
7134 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7135 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7136 tcc_warning("empty case range");
7138 /* case and default are unreachable from a switch under nocode_wanted */
7139 if (!cur_switch
->nocode_wanted
)
7141 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7144 goto block_after_label
;
7146 } else if (t
== TOK_DEFAULT
) {
7149 if (cur_switch
->def_sym
)
7150 tcc_error("too many 'default'");
7151 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? 1 : gind();
7154 goto block_after_label
;
7156 } else if (t
== TOK_GOTO
) {
7157 vla_restore(cur_scope
->vla
.locorig
);
7158 if (tok
== '*' && gnu_ext
) {
7162 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7166 } else if (tok
>= TOK_UIDENT
) {
7167 s
= label_find(tok
);
7168 /* put forward definition if needed */
7170 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7171 else if (s
->r
== LABEL_DECLARED
)
7172 s
->r
= LABEL_FORWARD
;
7174 if (s
->r
& LABEL_FORWARD
) {
7175 /* start new goto chain for cleanups, linked via label->next */
7176 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7177 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7178 pending_gotos
->prev_tok
= s
;
7179 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7180 pending_gotos
->next
= s
;
7182 s
->jnext
= gjmp(s
->jnext
);
7184 try_call_cleanup_goto(s
->cleanupstate
);
7185 gjmp_addr(s
->jnext
);
7190 expect("label identifier");
7194 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7198 if (tok
== ':' && t
>= TOK_UIDENT
) {
7203 if (s
->r
== LABEL_DEFINED
)
7204 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7205 s
->r
= LABEL_DEFINED
;
7207 Sym
*pcl
; /* pending cleanup goto */
7208 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7210 sym_pop(&s
->next
, NULL
, 0);
7214 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7217 s
->cleanupstate
= cur_scope
->cl
.s
;
7221 /* Accept attributes after labels (e.g. 'unused') */
7222 AttributeDef ad_tmp
;
7223 parse_attribute(&ad_tmp
);
7226 tcc_tcov_reset_ind(tcc_state
);
7227 vla_restore(cur_scope
->vla
.loc
);
7230 /* we accept this, but it is a mistake */
7231 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7234 /* expression case */
7251 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7254 /* This skips over a stream of tokens containing balanced {} and ()
7255 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7256 with a '{'). If STR then allocates and stores the skipped tokens
7257 in *STR. This doesn't check if () and {} are nested correctly,
7258 i.e. "({)}" is accepted. */
7259 static void skip_or_save_block(TokenString
**str
)
7261 int braces
= tok
== '{';
7264 *str
= tok_str_alloc();
7276 if (str
|| level
> 0)
7277 tcc_error("unexpected end of file");
7282 tok_str_add_tok(*str
);
7284 if (t
== '{' || t
== '(' || t
== '[') {
7286 } else if (t
== '}' || t
== ')' || t
== ']') {
7288 if (level
== 0 && braces
&& t
== '}')
7293 tok_str_add(*str
, -1);
7294 tok_str_add(*str
, 0);
7298 #define EXPR_CONST 1
7301 static void parse_init_elem(int expr_type
)
7303 int saved_global_expr
;
7306 /* compound literals must be allocated globally in this case */
7307 saved_global_expr
= global_expr
;
7310 global_expr
= saved_global_expr
;
7311 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7312 (compound literals). */
7313 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7314 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7315 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7316 #ifdef TCC_TARGET_PE
7317 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7320 tcc_error("initializer element is not constant");
7329 static void init_assert(init_params
*p
, int offset
)
7331 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7332 : !nocode_wanted
&& offset
> p
->local_offset
)
7333 tcc_internal_error("initializer overflow");
7336 #define init_assert(sec, offset)
7339 /* put zeros for variable based init */
7340 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7342 init_assert(p
, c
+ size
);
7344 /* nothing to do because globals are already set to zero */
7346 vpush_helper_func(TOK_memset
);
7348 #ifdef TCC_TARGET_ARM
7360 #define DIF_SIZE_ONLY 2
7361 #define DIF_HAVE_ELEM 4
7364 /* delete relocations for specified range c ... c + size. Unfortunatly
7365 in very special cases, relocations may occur unordered */
7366 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7368 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7369 if (!sec
|| !sec
->reloc
)
7371 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7372 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7373 while (rel
< rel_end
) {
7374 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7375 sec
->reloc
->data_offset
-= sizeof *rel
;
7378 memcpy(rel2
, rel
, sizeof *rel
);
7385 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7387 if (ref
== p
->flex_array_ref
) {
7388 if (index
>= ref
->c
)
7390 } else if (ref
->c
< 0)
7391 tcc_error("flexible array has zero size in this context");
7394 /* t is the array or struct type. c is the array or struct
7395 address. cur_field is the pointer to the current
7396 field, for arrays the 'c' member contains the current start
7397 index. 'flags' is as in decl_initializer.
7398 'al' contains the already initialized length of the
7399 current container (starting at c). This returns the new length of that. */
7400 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7401 Sym
**cur_field
, int flags
, int al
)
7404 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7405 unsigned long corig
= c
;
7410 if (flags
& DIF_HAVE_ELEM
)
7413 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7420 /* NOTE: we only support ranges for last designator */
7421 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7423 if (!(type
->t
& VT_ARRAY
))
7424 expect("array type");
7426 index
= index_last
= expr_const();
7427 if (tok
== TOK_DOTS
&& gnu_ext
) {
7429 index_last
= expr_const();
7433 decl_design_flex(p
, s
, index_last
);
7434 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7435 tcc_error("index exceeds array bounds or range is empty");
7437 (*cur_field
)->c
= index_last
;
7438 type
= pointed_type(type
);
7439 elem_size
= type_size(type
, &align
);
7440 c
+= index
* elem_size
;
7441 nb_elems
= index_last
- index
+ 1;
7448 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7449 expect("struct/union type");
7451 f
= find_field(type
, l
, &cumofs
);
7462 } else if (!gnu_ext
) {
7467 if (type
->t
& VT_ARRAY
) {
7468 index
= (*cur_field
)->c
;
7470 decl_design_flex(p
, s
, index
);
7472 tcc_error("too many initializers");
7473 type
= pointed_type(type
);
7474 elem_size
= type_size(type
, &align
);
7475 c
+= index
* elem_size
;
7478 /* Skip bitfield padding. Also with size 32 and 64. */
7479 while (f
&& (f
->v
& SYM_FIRST_ANOM
) &&
7480 is_integer_btype(f
->type
.t
& VT_BTYPE
))
7481 *cur_field
= f
= f
->next
;
7483 tcc_error("too many initializers");
7489 if (!elem_size
) /* for structs */
7490 elem_size
= type_size(type
, &align
);
7492 /* Using designators the same element can be initialized more
7493 than once. In that case we need to delete possibly already
7494 existing relocations. */
7495 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7496 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7497 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7500 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7502 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7506 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7507 /* make init_putv/vstore believe it were a struct */
7509 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7513 vpush_ref(type
, p
->sec
, c
, elem_size
);
7515 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7516 for (i
= 1; i
< nb_elems
; i
++) {
7518 init_putv(p
, type
, c
+ elem_size
* i
);
7523 c
+= nb_elems
* elem_size
;
7529 /* store a value or an expression directly in global data or in local array */
7530 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7536 Section
*sec
= p
->sec
;
7540 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7542 size
= type_size(type
, &align
);
7543 if (type
->t
& VT_BITFIELD
)
7544 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7545 init_assert(p
, c
+ size
);
7548 /* XXX: not portable */
7549 /* XXX: generate error if incorrect relocation */
7550 gen_assign_cast(&dtype
);
7551 bt
= type
->t
& VT_BTYPE
;
7553 if ((vtop
->r
& VT_SYM
)
7555 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7556 || (type
->t
& VT_BITFIELD
))
7557 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7559 tcc_error("initializer element is not computable at load time");
7561 if (NODATA_WANTED
) {
7566 ptr
= sec
->data
+ c
;
7569 /* XXX: make code faster ? */
7570 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7571 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7572 /* XXX This rejects compound literals like
7573 '(void *){ptr}'. The problem is that '&sym' is
7574 represented the same way, which would be ruled out
7575 by the SYM_FIRST_ANOM check above, but also '"string"'
7576 in 'char *p = "string"' is represented the same
7577 with the type being VT_PTR and the symbol being an
7578 anonymous one. That is, there's no difference in vtop
7579 between '(void *){x}' and '&(void *){x}'. Ignore
7580 pointer typed entities here. Hopefully no real code
7581 will ever use compound literals with scalar type. */
7582 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7583 /* These come from compound literals, memcpy stuff over. */
7587 esym
= elfsym(vtop
->sym
);
7588 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7589 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7591 /* We need to copy over all memory contents, and that
7592 includes relocations. Use the fact that relocs are
7593 created it order, so look from the end of relocs
7594 until we hit one before the copied region. */
7595 unsigned long relofs
= ssec
->reloc
->data_offset
;
7596 while (relofs
>= sizeof(*rel
)) {
7597 relofs
-= sizeof(*rel
);
7598 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7599 if (rel
->r_offset
>= esym
->st_value
+ size
)
7601 if (rel
->r_offset
< esym
->st_value
)
7603 put_elf_reloca(symtab_section
, sec
,
7604 c
+ rel
->r_offset
- esym
->st_value
,
7605 ELFW(R_TYPE
)(rel
->r_info
),
7606 ELFW(R_SYM
)(rel
->r_info
),
7616 if (type
->t
& VT_BITFIELD
) {
7617 int bit_pos
, bit_size
, bits
, n
;
7618 unsigned char *p
, v
, m
;
7619 bit_pos
= BIT_POS(vtop
->type
.t
);
7620 bit_size
= BIT_SIZE(vtop
->type
.t
);
7621 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7622 bit_pos
&= 7, bits
= 0;
7627 v
= val
>> bits
<< bit_pos
;
7628 m
= ((1 << n
) - 1) << bit_pos
;
7629 *p
= (*p
& ~m
) | (v
& m
);
7630 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7635 *(char *)ptr
= val
!= 0;
7641 write16le(ptr
, val
);
7644 write32le(ptr
, val
);
7647 write64le(ptr
, val
);
7650 #if defined TCC_IS_NATIVE_387
7651 /* Host and target platform may be different but both have x87.
7652 On windows, tcc does not use VT_LDOUBLE, except when it is a
7653 cross compiler. In this case a mingw gcc as host compiler
7654 comes here with 10-byte long doubles, while msvc or tcc won't.
7655 tcc itself can still translate by asm.
7656 In any case we avoid possibly random bytes 11 and 12.
7658 if (sizeof (long double) >= 10)
7659 memcpy(ptr
, &vtop
->c
.ld
, 10);
7661 else if (sizeof (long double) == sizeof (double))
7662 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7664 else if (vtop
->c
.ld
== 0.0)
7668 /* For other platforms it should work natively, but may not work
7669 for cross compilers */
7670 if (sizeof(long double) == LDOUBLE_SIZE
)
7671 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7672 else if (sizeof(double) == LDOUBLE_SIZE
)
7673 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7674 #ifndef TCC_CROSS_TEST
7676 tcc_error("can't cross compile long double constants");
7681 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7684 if (vtop
->r
& VT_SYM
)
7685 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7687 write64le(ptr
, val
);
7690 write32le(ptr
, val
);
7694 write64le(ptr
, val
);
7698 if (vtop
->r
& VT_SYM
)
7699 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7700 write32le(ptr
, val
);
7704 //tcc_internal_error("unexpected type");
7710 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7717 /* 't' contains the type and storage info. 'c' is the offset of the
7718 object in section 'sec'. If 'sec' is NULL, it means stack based
7719 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7720 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7721 size only evaluation is wanted (only for arrays). */
7722 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7724 int len
, n
, no_oblock
, i
;
7730 /* generate line number info */
7731 if (debug_modes
&& !p
->sec
)
7732 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7734 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7735 /* In case of strings we have special handling for arrays, so
7736 don't consume them as initializer value (which would commit them
7737 to some anonymous symbol). */
7738 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7739 (!(flags
& DIF_SIZE_ONLY
)
7740 /* a struct may be initialized from a struct of same type, as in
7741 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7742 In that case we need to parse the element in order to check
7743 it for compatibility below */
7744 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7746 int ncw_prev
= nocode_wanted
;
7747 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7749 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7750 nocode_wanted
= ncw_prev
;
7751 flags
|= DIF_HAVE_ELEM
;
7754 if (type
->t
& VT_ARRAY
) {
7756 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7764 t1
= pointed_type(type
);
7765 size1
= type_size(t1
, &align1
);
7767 /* only parse strings here if correct type (otherwise: handle
7768 them as ((w)char *) expressions */
7769 if ((tok
== TOK_LSTR
&&
7770 #ifdef TCC_TARGET_PE
7771 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7773 (t1
->t
& VT_BTYPE
) == VT_INT
7775 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7777 cstr_reset(&initstr
);
7778 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7779 tcc_error("unhandled string literal merging");
7780 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7782 initstr
.size
-= size1
;
7784 len
+= tokc
.str
.size
;
7786 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7788 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7791 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7792 && tok
!= TOK_EOF
) {
7793 /* Not a lone literal but part of a bigger expression. */
7794 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7795 tokc
.str
.size
= initstr
.size
;
7796 tokc
.str
.data
= initstr
.data
;
7800 decl_design_flex(p
, s
, len
);
7801 if (!(flags
& DIF_SIZE_ONLY
)) {
7806 tcc_warning("initializer-string for array is too long");
7807 /* in order to go faster for common case (char
7808 string in global variable, we handle it
7810 if (p
->sec
&& size1
== 1) {
7811 init_assert(p
, c
+ nb
);
7813 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7817 /* only add trailing zero if enough storage (no
7818 warning in this case since it is standard) */
7819 if (flags
& DIF_CLEAR
)
7822 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7826 } else if (size1
== 1)
7827 ch
= ((unsigned char *)initstr
.data
)[i
];
7829 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7831 init_putv(p
, t1
, c
+ i
* size1
);
7842 /* zero memory once in advance */
7843 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7844 init_putz(p
, c
, n
*size1
);
7849 /* GNU extension: if the initializer is empty for a flex array,
7850 it's size is zero. We won't enter the loop, so set the size
7852 decl_design_flex(p
, s
, len
);
7853 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7854 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7855 flags
&= ~DIF_HAVE_ELEM
;
7856 if (type
->t
& VT_ARRAY
) {
7858 /* special test for multi dimensional arrays (may not
7859 be strictly correct if designators are used at the
7861 if (no_oblock
&& len
>= n
*size1
)
7864 if (s
->type
.t
== VT_UNION
)
7868 if (no_oblock
&& f
== NULL
)
7880 } else if ((flags
& DIF_HAVE_ELEM
)
7881 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7882 The source type might have VT_CONSTANT set, which is
7883 of course assignable to non-const elements. */
7884 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7887 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7889 if ((flags
& DIF_FIRST
) || tok
== '{') {
7899 } else if (tok
== '{') {
7900 if (flags
& DIF_HAVE_ELEM
)
7903 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7906 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7907 /* If we supported only ISO C we wouldn't have to accept calling
7908 this on anything than an array if DIF_SIZE_ONLY (and even then
7909 only on the outermost level, so no recursion would be needed),
7910 because initializing a flex array member isn't supported.
7911 But GNU C supports it, so we need to recurse even into
7912 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7913 /* just skip expression */
7914 if (flags
& DIF_HAVE_ELEM
)
7917 skip_or_save_block(NULL
);
7920 if (!(flags
& DIF_HAVE_ELEM
)) {
7921 /* This should happen only when we haven't parsed
7922 the init element above for fear of committing a
7923 string constant to memory too early. */
7924 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7925 expect("string constant");
7926 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7928 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7929 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7931 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7935 init_putv(p
, type
, c
);
7939 /* parse an initializer for type 't' if 'has_init' is non zero, and
7940 allocate space in local or global data space ('r' is either
7941 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7942 variable 'v' of scope 'scope' is declared before initializers
7943 are parsed. If 'v' is zero, then a reference to the new object
7944 is put in the value stack. If 'has_init' is 2, a special parsing
7945 is done to handle string constants. */
7946 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7947 int has_init
, int v
, int global
)
7949 int size
, align
, addr
;
7950 TokenString
*init_str
= NULL
;
7953 Sym
*flexible_array
;
7955 int saved_nocode_wanted
= nocode_wanted
;
7956 #ifdef CONFIG_TCC_BCHECK
7957 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7959 init_params p
= {0};
7961 /* Always allocate static or global variables */
7962 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7963 nocode_wanted
|= DATA_ONLY_WANTED
;
7965 flexible_array
= NULL
;
7966 size
= type_size(type
, &align
);
7968 /* exactly one flexible array may be initialized, either the
7969 toplevel array or the last member of the toplevel struct */
7972 /* If the base type itself was an array type of unspecified size
7973 (like in 'typedef int arr[]; arr x = {1};') then we will
7974 overwrite the unknown size by the real one for this decl.
7975 We need to unshare the ref symbol holding that size. */
7976 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7977 p
.flex_array_ref
= type
->ref
;
7979 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7980 Sym
*field
= type
->ref
->next
;
7983 field
= field
->next
;
7984 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7985 flexible_array
= field
;
7986 p
.flex_array_ref
= field
->type
.ref
;
7993 /* If unknown size, do a dry-run 1st pass */
7995 tcc_error("unknown type size");
7996 if (has_init
== 2) {
7997 /* only get strings */
7998 init_str
= tok_str_alloc();
7999 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8000 tok_str_add_tok(init_str
);
8003 tok_str_add(init_str
, -1);
8004 tok_str_add(init_str
, 0);
8006 skip_or_save_block(&init_str
);
8010 begin_macro(init_str
, 1);
8012 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8013 /* prepare second initializer parsing */
8014 macro_ptr
= init_str
->str
;
8017 /* if still unknown size, error */
8018 size
= type_size(type
, &align
);
8020 tcc_error("unknown type size");
8022 /* If there's a flex member and it was used in the initializer
8024 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8025 size
+= flexible_array
->type
.ref
->c
8026 * pointed_size(&flexible_array
->type
);
8029 /* take into account specified alignment if bigger */
8030 if (ad
->a
.aligned
) {
8031 int speca
= 1 << (ad
->a
.aligned
- 1);
8034 } else if (ad
->a
.packed
) {
8038 if (!v
&& NODATA_WANTED
)
8039 size
= 0, align
= 1;
8041 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8043 #ifdef CONFIG_TCC_BCHECK
8045 /* add padding between stack variables for bound checking */
8049 loc
= (loc
- size
) & -align
;
8051 p
.local_offset
= addr
+ size
;
8052 #ifdef CONFIG_TCC_BCHECK
8054 /* add padding between stack variables for bound checking */
8059 /* local variable */
8060 #ifdef CONFIG_TCC_ASM
8061 if (ad
->asm_label
) {
8062 int reg
= asm_parse_regvar(ad
->asm_label
);
8064 r
= (r
& ~VT_VALMASK
) | reg
;
8067 sym
= sym_push(v
, type
, r
, addr
);
8068 if (ad
->cleanup_func
) {
8069 Sym
*cls
= sym_push2(&all_cleanups
,
8070 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8071 cls
->prev_tok
= sym
;
8072 cls
->next
= ad
->cleanup_func
;
8073 cls
->ncl
= cur_scope
->cl
.s
;
8074 cur_scope
->cl
.s
= cls
;
8079 /* push local reference */
8080 vset(type
, r
, addr
);
8085 /* see if the symbol was already defined */
8088 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8089 && sym
->type
.ref
->c
> type
->ref
->c
) {
8090 /* flex array was already declared with explicit size
8092 int arr[] = { 1,2,3 }; */
8093 type
->ref
->c
= sym
->type
.ref
->c
;
8094 size
= type_size(type
, &align
);
8096 patch_storage(sym
, ad
, type
);
8097 /* we accept several definitions of the same global variable. */
8098 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8103 /* allocate symbol in corresponding section */
8107 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8108 tp
= &tp
->ref
->type
;
8109 if (tp
->t
& VT_CONSTANT
) {
8110 sec
= rodata_section
;
8111 } else if (has_init
) {
8113 /*if (tcc_state->g_debug & 4)
8114 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8115 } else if (tcc_state
->nocommon
)
8120 addr
= section_add(sec
, size
, align
);
8121 #ifdef CONFIG_TCC_BCHECK
8122 /* add padding if bound check */
8124 section_add(sec
, 1, 1);
8127 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8128 sec
= common_section
;
8133 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8134 patch_storage(sym
, ad
, NULL
);
8136 /* update symbol definition */
8137 put_extern_sym(sym
, sec
, addr
, size
);
8139 /* push global reference */
8140 vpush_ref(type
, sec
, addr
, size
);
8145 #ifdef CONFIG_TCC_BCHECK
8146 /* handles bounds now because the symbol must be defined
8147 before for the relocation */
8151 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8152 /* then add global bound info */
8153 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8154 bounds_ptr
[0] = 0; /* relocated */
8155 bounds_ptr
[1] = size
;
8160 if (type
->t
& VT_VLA
) {
8166 /* save before-VLA stack pointer if needed */
8167 if (cur_scope
->vla
.num
== 0) {
8168 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8169 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8171 gen_vla_sp_save(loc
-= PTR_SIZE
);
8172 cur_scope
->vla
.locorig
= loc
;
8176 vpush_type_size(type
, &a
);
8177 gen_vla_alloc(type
, a
);
8178 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8179 /* on _WIN64, because of the function args scratch area, the
8180 result of alloca differs from RSP and is returned in RAX. */
8181 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8183 gen_vla_sp_save(addr
);
8184 cur_scope
->vla
.loc
= addr
;
8185 cur_scope
->vla
.num
++;
8186 } else if (has_init
) {
8188 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8189 /* patch flexible array member size back to -1, */
8190 /* for possible subsequent similar declarations */
8192 flexible_array
->type
.ref
->c
= -1;
8196 /* restore parse state if needed */
8202 nocode_wanted
= saved_nocode_wanted
;
8205 /* generate vla code saved in post_type() */
8206 static void func_vla_arg_code(Sym
*arg
)
8209 TokenString
*vla_array_tok
= NULL
;
8212 func_vla_arg_code(arg
->type
.ref
);
8214 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8215 loc
-= type_size(&int_type
, &align
);
8217 arg
->type
.ref
->c
= loc
;
8220 vla_array_tok
= tok_str_alloc();
8221 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8222 begin_macro(vla_array_tok
, 1);
8227 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8229 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8236 static void func_vla_arg(Sym
*sym
)
8240 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8241 if (arg
->type
.t
& VT_VLA
)
8242 func_vla_arg_code(arg
);
8245 /* parse a function defined by symbol 'sym' and generate its code in
8246 'cur_text_section' */
8247 static void gen_function(Sym
*sym
)
8249 struct scope f
= { 0 };
8250 cur_scope
= root_scope
= &f
;
8252 ind
= cur_text_section
->data_offset
;
8253 if (sym
->a
.aligned
) {
8254 size_t newoff
= section_add(cur_text_section
, 0,
8255 1 << (sym
->a
.aligned
- 1));
8256 gen_fill_nops(newoff
- ind
);
8258 /* NOTE: we patch the symbol size later */
8259 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8260 if (sym
->type
.ref
->f
.func_ctor
)
8261 add_array (tcc_state
, ".init_array", sym
->c
);
8262 if (sym
->type
.ref
->f
.func_dtor
)
8263 add_array (tcc_state
, ".fini_array", sym
->c
);
8265 funcname
= get_tok_str(sym
->v
, NULL
);
8267 func_vt
= sym
->type
.ref
->type
;
8268 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8270 /* put debug symbol */
8271 tcc_debug_funcstart(tcc_state
, sym
);
8272 /* push a dummy symbol to enable local sym storage */
8273 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8274 local_scope
= 1; /* for function parameters */
8276 tcc_debug_prolog_epilog(tcc_state
, 0);
8279 clear_temp_local_var_list();
8284 /* reset local stack */
8285 pop_local_syms(NULL
, 0);
8286 tcc_debug_prolog_epilog(tcc_state
, 1);
8288 cur_text_section
->data_offset
= ind
;
8290 label_pop(&global_label_stack
, NULL
, 0);
8291 sym_pop(&all_cleanups
, NULL
, 0);
8292 /* patch symbol size */
8293 elfsym(sym
)->st_size
= ind
- func_ind
;
8294 /* end of function */
8295 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8296 /* It's better to crash than to generate wrong code */
8297 cur_text_section
= NULL
;
8298 funcname
= ""; /* for safety */
8299 func_vt
.t
= VT_VOID
; /* for safety */
8300 func_var
= 0; /* for safety */
8301 ind
= 0; /* for safety */
8303 nocode_wanted
= DATA_ONLY_WANTED
;
8305 /* do this after funcend debug info */
8309 static void gen_inline_functions(TCCState
*s
)
8312 int inline_generated
, i
;
8313 struct InlineFunc
*fn
;
8315 tcc_open_bf(s
, ":inline:", 0);
8316 /* iterate while inline function are referenced */
8318 inline_generated
= 0;
8319 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8320 fn
= s
->inline_fns
[i
];
8322 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8323 /* the function was used or forced (and then not internal):
8324 generate its code and convert it to a normal function */
8326 tcc_debug_putfile(s
, fn
->filename
);
8327 begin_macro(fn
->func_str
, 1);
8329 cur_text_section
= text_section
;
8333 inline_generated
= 1;
8336 } while (inline_generated
);
8340 static void free_inline_functions(TCCState
*s
)
8343 /* free tokens of unused inline functions */
8344 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8345 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8347 tok_str_free(fn
->func_str
);
8349 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8352 static void do_Static_assert(void){
8362 tcc_error("_Static_assert fail");
8364 goto static_assert_out
;
8368 parse_mult_str(&error_str
, "string constant");
8370 tcc_error("%s", (char *)error_str
.data
);
8371 cstr_free(&error_str
);
8377 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8378 or VT_CMP if parsing old style parameter list
8379 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8380 static int decl(int l
)
8382 int v
, has_init
, r
, oldint
;
8385 AttributeDef ad
, adbase
;
8388 if (tok
== TOK_STATIC_ASSERT
) {
8394 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8397 /* skip redundant ';' if not in old parameter decl scope */
8398 if (tok
== ';' && l
!= VT_CMP
) {
8404 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8405 /* global asm block */
8409 if (tok
>= TOK_UIDENT
) {
8410 /* special test for old K&R protos without explicit int
8411 type. Only accepted when defining global data */
8416 expect("declaration");
8422 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8424 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8425 tcc_warning("unnamed struct/union that defines no instances");
8429 if (IS_ENUM(btype
.t
)) {
8435 while (1) { /* iterate thru each declaration */
8438 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8442 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8443 printf("type = '%s'\n", buf
);
8446 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8447 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8448 tcc_error("function without file scope cannot be static");
8449 /* if old style function prototype, we accept a
8452 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8456 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8457 if (sym
->f
.func_alwinl
8458 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8459 == (VT_EXTERN
| VT_INLINE
))) {
8460 /* always_inline functions must be handled as if they
8461 don't generate multiple global defs, even if extern
8462 inline, i.e. GNU inline semantics for those. Rewrite
8463 them into static inline. */
8464 type
.t
&= ~VT_EXTERN
;
8465 type
.t
|= VT_STATIC
;
8468 /* always compile 'extern inline' */
8469 if (type
.t
& VT_EXTERN
)
8470 type
.t
&= ~VT_INLINE
;
8472 } else if (oldint
) {
8473 tcc_warning("type defaults to int");
8476 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8477 ad
.asm_label
= asm_label_instr();
8478 /* parse one last attribute list, after asm label */
8479 parse_attribute(&ad
);
8481 /* gcc does not allow __asm__("label") with function definition,
8488 #ifdef TCC_TARGET_PE
8489 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8490 if (type
.t
& VT_STATIC
)
8491 tcc_error("cannot have dll linkage with static");
8492 if (type
.t
& VT_TYPEDEF
) {
8493 tcc_warning("'%s' attribute ignored for typedef",
8494 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8495 (ad
.a
.dllexport
= 0, "dllexport"));
8496 } else if (ad
.a
.dllimport
) {
8497 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8500 type
.t
|= VT_EXTERN
;
8506 tcc_error("cannot use local functions");
8507 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8508 expect("function definition");
8510 /* reject abstract declarators in function definition
8511 make old style params without decl have int type */
8513 while ((sym
= sym
->next
) != NULL
) {
8514 if (!(sym
->v
& ~SYM_FIELD
))
8515 expect("identifier");
8516 if (sym
->type
.t
== VT_VOID
)
8517 sym
->type
= int_type
;
8520 /* apply post-declaraton attributes */
8521 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8523 /* put function symbol */
8524 type
.t
&= ~VT_EXTERN
;
8525 sym
= external_sym(v
, &type
, 0, &ad
);
8527 /* static inline functions are just recorded as a kind
8528 of macro. Their code will be emitted at the end of
8529 the compilation unit only if they are used */
8530 if (sym
->type
.t
& VT_INLINE
) {
8531 struct InlineFunc
*fn
;
8532 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8533 strcpy(fn
->filename
, file
->filename
);
8535 skip_or_save_block(&fn
->func_str
);
8536 dynarray_add(&tcc_state
->inline_fns
,
8537 &tcc_state
->nb_inline_fns
, fn
);
8539 /* compute text section */
8540 cur_text_section
= ad
.section
;
8541 if (!cur_text_section
)
8542 cur_text_section
= text_section
;
8548 /* find parameter in function parameter list */
8549 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8550 if ((sym
->v
& ~SYM_FIELD
) == v
)
8552 tcc_error("declaration for parameter '%s' but no such parameter",
8553 get_tok_str(v
, NULL
));
8555 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8556 tcc_error("storage class specified for '%s'",
8557 get_tok_str(v
, NULL
));
8558 if (sym
->type
.t
!= VT_VOID
)
8559 tcc_error("redefinition of parameter '%s'",
8560 get_tok_str(v
, NULL
));
8561 convert_parameter_type(&type
);
8563 } else if (type
.t
& VT_TYPEDEF
) {
8564 /* save typedefed type */
8565 /* XXX: test storage specifiers ? */
8567 if (sym
&& sym
->sym_scope
== local_scope
) {
8568 if (!is_compatible_types(&sym
->type
, &type
)
8569 || !(sym
->type
.t
& VT_TYPEDEF
))
8570 tcc_error("incompatible redefinition of '%s'",
8571 get_tok_str(v
, NULL
));
8574 sym
= sym_push(v
, &type
, 0, 0);
8577 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8578 merge_funcattr(&sym
->type
.ref
->f
, &ad
.f
);
8580 tcc_debug_typedef (tcc_state
, sym
);
8581 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8582 && !(type
.t
& VT_EXTERN
)) {
8583 tcc_error("declaration of void object");
8586 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8587 /* external function definition */
8588 /* specific case for func_call attribute */
8589 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8590 } else if (!(type
.t
& VT_ARRAY
)) {
8591 /* not lvalue if array */
8594 has_init
= (tok
== '=');
8595 if (has_init
&& (type
.t
& VT_VLA
))
8596 tcc_error("variable length array cannot be initialized");
8597 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8598 || (type
.t
& VT_BTYPE
) == VT_FUNC
8599 /* as with GCC, uninitialized global arrays with no size
8600 are considered extern: */
8601 || ((type
.t
& VT_ARRAY
) && !has_init
8602 && l
== VT_CONST
&& type
.ref
->c
< 0)
8604 /* external variable or function */
8605 type
.t
|= VT_EXTERN
;
8606 sym
= external_sym(v
, &type
, r
, &ad
);
8607 if (ad
.alias_target
) {
8608 /* Aliases need to be emitted when their target
8609 symbol is emitted, even if perhaps unreferenced.
8610 We only support the case where the base is
8611 already defined, otherwise we would need
8612 deferring to emit the aliases until the end of
8613 the compile unit. */
8614 Sym
*alias_target
= sym_find(ad
.alias_target
);
8615 ElfSym
*esym
= elfsym(alias_target
);
8617 tcc_error("unsupported forward __alias__ attribute");
8618 put_extern_sym2(sym
, esym
->st_shndx
,
8619 esym
->st_value
, esym
->st_size
, 1);
8622 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8628 else if (l
== VT_CONST
)
8629 /* uninitialized global variables may be overridden */
8630 type
.t
|= VT_EXTERN
;
8631 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8647 /* ------------------------------------------------------------------------- */
8650 /* ------------------------------------------------------------------------- */