2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int constant_p
;
48 ST_DATA
char debug_modes
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF_BIT 0x20000000
60 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
61 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
63 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
64 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
65 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
68 ST_DATA
const char *funcname
;
69 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
70 static CString initstr
;
73 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
74 #define VT_PTRDIFF_T VT_INT
76 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
77 #define VT_PTRDIFF_T VT_LLONG
79 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
80 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
83 static struct switch_t
{
87 } **p
; int n
; /* list of case ranges */
88 int def_sym
; /* default symbol */
92 struct switch_t
*prev
;
94 } *cur_switch
; /* current switch */
96 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
97 /*list of temporary local variables on the stack in current function. */
98 static struct temp_local_variable
{
99 int location
; //offset on stack. Svalue.c.i
102 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
103 static int nb_temp_local_vars
;
105 static struct scope
{
107 struct { int loc
, locorig
, num
; } vla
;
108 struct { Sym
*s
; int n
; } cl
;
111 } *cur_scope
, *loop_scope
, *root_scope
;
120 #define precedence_parser
121 static void init_prec(void);
124 static void gen_cast(CType
*type
);
125 static void gen_cast_s(int t
);
126 static inline CType
*pointed_type(CType
*type
);
127 static int is_compatible_types(CType
*type1
, CType
*type2
);
128 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
129 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
130 static void parse_expr_type(CType
*type
);
131 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
132 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
133 static void block(int is_expr
);
134 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
135 static int decl(int l
);
136 static void expr_eq(void);
137 static void vpush_type_size(CType
*type
, int *a
);
138 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
139 static inline int64_t expr_const64(void);
140 static void vpush64(int ty
, unsigned long long v
);
141 static void vpush(CType
*type
);
142 static int gvtst(int inv
, int t
);
143 static void gen_inline_functions(TCCState
*s
);
144 static void free_inline_functions(TCCState
*s
);
145 static void skip_or_save_block(TokenString
**str
);
146 static void gv_dup(void);
147 static int get_temp_local_var(int size
,int align
);
148 static void clear_temp_local_var_list();
149 static void cast_error(CType
*st
, CType
*dt
);
151 /* ------------------------------------------------------------------------- */
152 /* Automagical code suppression */
154 /* Clear 'nocode_wanted' at forward label if it was used */
155 ST_FUNC
void gsym(int t
)
163 /* Clear 'nocode_wanted' if current pc is a label */
169 tcc_tcov_block_begin(tcc_state
);
173 /* Set 'nocode_wanted' after unconditional (backwards) jump */
174 static void gjmp_addr_acs(int t
)
180 /* Set 'nocode_wanted' after unconditional (forwards) jump */
181 static int gjmp_acs(int t
)
188 /* These are #undef'd at the end of this file */
189 #define gjmp_addr gjmp_addr_acs
190 #define gjmp gjmp_acs
191 /* ------------------------------------------------------------------------- */
193 ST_INLN
int is_float(int t
)
195 int bt
= t
& VT_BTYPE
;
196 return bt
== VT_LDOUBLE
202 static inline int is_integer_btype(int bt
)
211 static int btype_size(int bt
)
213 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
217 bt
== VT_PTR
? PTR_SIZE
: 0;
220 /* returns function return register from type */
221 static int R_RET(int t
)
225 #ifdef TCC_TARGET_X86_64
226 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
228 #elif defined TCC_TARGET_RISCV64
229 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
235 /* returns 2nd function return register, if any */
236 static int R2_RET(int t
)
242 #elif defined TCC_TARGET_X86_64
247 #elif defined TCC_TARGET_RISCV64
254 /* returns true for two-word types */
255 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
257 /* put function return registers to stack value */
258 static void PUT_R_RET(SValue
*sv
, int t
)
260 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
263 /* returns function return register class for type t */
264 static int RC_RET(int t
)
266 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
269 /* returns generic register class for type t */
270 static int RC_TYPE(int t
)
274 #ifdef TCC_TARGET_X86_64
275 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
277 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
279 #elif defined TCC_TARGET_RISCV64
280 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
286 /* returns 2nd register class corresponding to t and rc */
287 static int RC2_TYPE(int t
, int rc
)
289 if (!USING_TWO_WORDS(t
))
304 /* we use our own 'finite' function to avoid potential problems with
305 non standard math libs */
306 /* XXX: endianness dependent */
307 ST_FUNC
int ieee_finite(double d
)
310 memcpy(p
, &d
, sizeof(double));
311 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
314 /* compiling intel long double natively */
315 #if (defined __i386__ || defined __x86_64__) \
316 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
317 # define TCC_IS_NATIVE_387
320 ST_FUNC
void test_lvalue(void)
322 if (!(vtop
->r
& VT_LVAL
))
326 ST_FUNC
void check_vstack(void)
328 if (vtop
!= vstack
- 1)
329 tcc_error("internal compiler error: vstack leak (%d)",
330 (int)(vtop
- vstack
+ 1));
333 /* vstack debugging aid */
335 void pv (const char *lbl
, int a
, int b
)
338 for (i
= a
; i
< a
+ b
; ++i
) {
339 SValue
*p
= &vtop
[-i
];
340 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
341 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
346 /* ------------------------------------------------------------------------- */
347 /* initialize vstack and types. This must be done also for tcc -E */
348 ST_FUNC
void tccgen_init(TCCState
*s1
)
351 memset(vtop
, 0, sizeof *vtop
);
353 /* define some often used types */
356 char_type
.t
= VT_BYTE
;
357 if (s1
->char_is_unsigned
)
358 char_type
.t
|= VT_UNSIGNED
;
359 char_pointer_type
= char_type
;
360 mk_pointer(&char_pointer_type
);
362 func_old_type
.t
= VT_FUNC
;
363 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
364 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
365 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
366 #ifdef precedence_parser
372 ST_FUNC
int tccgen_compile(TCCState
*s1
)
374 cur_text_section
= NULL
;
377 anon_sym
= SYM_FIRST_ANOM
;
379 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
381 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
385 #ifdef TCC_TARGET_ARM
389 printf("%s: **** new file\n", file
->filename
);
391 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
394 gen_inline_functions(s1
);
396 /* end of translation unit info */
402 ST_FUNC
void tccgen_finish(TCCState
*s1
)
405 free_inline_functions(s1
);
406 sym_pop(&global_stack
, NULL
, 0);
407 sym_pop(&local_stack
, NULL
, 0);
408 /* free preprocessor macros */
411 dynarray_reset(&sym_pools
, &nb_sym_pools
);
412 sym_free_first
= NULL
;
415 /* ------------------------------------------------------------------------- */
416 ST_FUNC ElfSym
*elfsym(Sym
*s
)
420 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
423 /* apply storage attributes to Elf symbol */
424 ST_FUNC
void update_storage(Sym
*sym
)
427 int sym_bind
, old_sym_bind
;
433 if (sym
->a
.visibility
)
434 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
437 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
438 sym_bind
= STB_LOCAL
;
439 else if (sym
->a
.weak
)
442 sym_bind
= STB_GLOBAL
;
443 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
444 if (sym_bind
!= old_sym_bind
) {
445 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
449 if (sym
->a
.dllimport
)
450 esym
->st_other
|= ST_PE_IMPORT
;
451 if (sym
->a
.dllexport
)
452 esym
->st_other
|= ST_PE_EXPORT
;
456 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
457 get_tok_str(sym
->v
, NULL
),
458 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
466 /* ------------------------------------------------------------------------- */
467 /* update sym->c so that it points to an external symbol in section
468 'section' with value 'value' */
470 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
471 addr_t value
, unsigned long size
,
472 int can_add_underscore
)
474 int sym_type
, sym_bind
, info
, other
, t
;
480 name
= get_tok_str(sym
->v
, NULL
);
482 if ((t
& VT_BTYPE
) == VT_FUNC
) {
484 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
485 sym_type
= STT_NOTYPE
;
486 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
489 sym_type
= STT_OBJECT
;
491 if (t
& (VT_STATIC
| VT_INLINE
))
492 sym_bind
= STB_LOCAL
;
494 sym_bind
= STB_GLOBAL
;
498 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
499 Sym
*ref
= sym
->type
.ref
;
500 if (ref
->a
.nodecorate
) {
501 can_add_underscore
= 0;
503 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
504 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
506 other
|= ST_PE_STDCALL
;
507 can_add_underscore
= 0;
512 if (sym
->asm_label
) {
513 name
= get_tok_str(sym
->asm_label
, NULL
);
514 can_add_underscore
= 0;
517 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
519 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
523 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
524 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
527 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
531 esym
->st_value
= value
;
532 esym
->st_size
= size
;
533 esym
->st_shndx
= sh_num
;
538 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
540 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
542 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
545 /* add a new relocation entry to symbol 'sym' in section 's' */
546 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
551 if (nocode_wanted
&& s
== cur_text_section
)
556 put_extern_sym(sym
, NULL
, 0, 0);
560 /* now we can add ELF relocation info */
561 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
565 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
567 greloca(s
, sym
, offset
, type
, 0);
571 /* ------------------------------------------------------------------------- */
572 /* symbol allocator */
573 static Sym
*__sym_malloc(void)
575 Sym
*sym_pool
, *sym
, *last_sym
;
578 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
579 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
581 last_sym
= sym_free_first
;
583 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
584 sym
->next
= last_sym
;
588 sym_free_first
= last_sym
;
592 static inline Sym
*sym_malloc(void)
596 sym
= sym_free_first
;
598 sym
= __sym_malloc();
599 sym_free_first
= sym
->next
;
602 sym
= tcc_malloc(sizeof(Sym
));
607 ST_INLN
void sym_free(Sym
*sym
)
610 sym
->next
= sym_free_first
;
611 sym_free_first
= sym
;
617 /* push, without hashing */
618 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
623 memset(s
, 0, sizeof *s
);
633 /* find a symbol and return its associated structure. 's' is the top
634 of the symbol stack */
635 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
647 /* structure lookup */
648 ST_INLN Sym
*struct_find(int v
)
651 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
653 return table_ident
[v
]->sym_struct
;
656 /* find an identifier */
657 ST_INLN Sym
*sym_find(int v
)
660 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
662 return table_ident
[v
]->sym_identifier
;
665 static int sym_scope(Sym
*s
)
667 if (IS_ENUM_VAL (s
->type
.t
))
668 return s
->type
.ref
->sym_scope
;
673 /* push a given symbol on the symbol stack */
674 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
683 s
= sym_push2(ps
, v
, type
->t
, c
);
684 s
->type
.ref
= type
->ref
;
686 /* don't record fields or anonymous symbols */
688 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
689 /* record symbol in token array */
690 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
692 ps
= &ts
->sym_struct
;
694 ps
= &ts
->sym_identifier
;
697 s
->sym_scope
= local_scope
;
698 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
699 tcc_error("redeclaration of '%s'",
700 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
705 /* push a global identifier */
706 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
709 s
= sym_push2(&global_stack
, v
, t
, c
);
710 s
->r
= VT_CONST
| VT_SYM
;
711 /* don't record anonymous symbol */
712 if (v
< SYM_FIRST_ANOM
) {
713 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
714 /* modify the top most local identifier, so that sym_identifier will
715 point to 's' when popped; happens when called from inline asm */
716 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
717 ps
= &(*ps
)->prev_tok
;
724 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
725 pop them yet from the list, but do remove them from the token array. */
726 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
736 /* remove symbol in token array */
738 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
739 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
741 ps
= &ts
->sym_struct
;
743 ps
= &ts
->sym_identifier
;
754 /* ------------------------------------------------------------------------- */
755 static void vcheck_cmp(void)
757 /* cannot let cpu flags if other instruction are generated. Also
758 avoid leaving VT_JMP anywhere except on the top of the stack
759 because it would complicate the code generator.
761 Don't do this when nocode_wanted. vtop might come from
762 !nocode_wanted regions (see 88_codeopt.c) and transforming
763 it to a register without actually generating code is wrong
764 as their value might still be used for real. All values
765 we push under nocode_wanted will eventually be popped
766 again, so that the VT_CMP/VT_JMP value will be in vtop
767 when code is unsuppressed again. */
769 /* However if it's just automatic suppression via CODE_OFF/ON()
770 then it seems that we better let things work undisturbed.
771 How can it work at all under nocode_wanted? Well, gv() will
772 actually clear it at the gsym() in load()/VT_JMP in the
773 generator backends */
775 if (vtop
->r
== VT_CMP
&& 0 == (nocode_wanted
& ~CODE_OFF_BIT
))
779 static void vsetc(CType
*type
, int r
, CValue
*vc
)
781 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
782 tcc_error("memory full (vstack)");
792 ST_FUNC
void vswap(void)
802 /* pop stack value */
803 ST_FUNC
void vpop(void)
806 v
= vtop
->r
& VT_VALMASK
;
807 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
808 /* for x86, we need to pop the FP stack */
810 o(0xd8dd); /* fstp %st(0) */
814 /* need to put correct jump if && or || without test */
821 /* push constant of type "type" with useless value */
822 static void vpush(CType
*type
)
824 vset(type
, VT_CONST
, 0);
827 /* push arbitrary 64bit constant */
828 static void vpush64(int ty
, unsigned long long v
)
835 vsetc(&ctype
, VT_CONST
, &cval
);
838 /* push integer constant */
839 ST_FUNC
void vpushi(int v
)
844 /* push a pointer sized constant */
845 static void vpushs(addr_t v
)
847 vpush64(VT_SIZE_T
, v
);
850 /* push long long constant */
851 static inline void vpushll(long long v
)
853 vpush64(VT_LLONG
, v
);
856 ST_FUNC
void vset(CType
*type
, int r
, int v
)
860 vsetc(type
, r
, &cval
);
863 static void vseti(int r
, int v
)
871 ST_FUNC
void vpushv(SValue
*v
)
873 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
874 tcc_error("memory full (vstack)");
879 static void vdup(void)
884 /* rotate n first stack elements to the bottom
885 I1 ... In -> I2 ... In I1 [top is right]
887 ST_FUNC
void vrotb(int n
)
899 /* rotate the n elements before entry e towards the top
900 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
902 ST_FUNC
void vrote(SValue
*e
, int n
)
909 for(i
= 0;i
< n
- 1; i
++)
914 /* rotate n first stack elements to the top
915 I1 ... In -> In I1 ... I(n-1) [top is right]
917 ST_FUNC
void vrott(int n
)
922 /* ------------------------------------------------------------------------- */
923 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
925 /* called from generators to set the result from relational ops */
926 ST_FUNC
void vset_VT_CMP(int op
)
934 /* called once before asking generators to load VT_CMP to a register */
935 static void vset_VT_JMP(void)
937 int op
= vtop
->cmp_op
;
939 if (vtop
->jtrue
|| vtop
->jfalse
) {
940 int origt
= vtop
->type
.t
;
941 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
942 int inv
= op
& (op
< 2); /* small optimization */
943 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
944 vtop
->type
.t
|= origt
& (VT_UNSIGNED
| VT_DEFSIGN
);
946 /* otherwise convert flags (rsp. 0/1) to register */
948 if (op
< 2) /* doesn't seem to happen */
953 /* Set CPU Flags, doesn't yet jump */
954 static void gvtst_set(int inv
, int t
)
958 if (vtop
->r
!= VT_CMP
) {
961 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
962 vset_VT_CMP(vtop
->c
.i
!= 0);
965 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
966 *p
= gjmp_append(*p
, t
);
969 /* Generate value test
971 * Generate a test for any value (jump, comparison and integers) */
972 static int gvtst(int inv
, int t
)
977 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
982 /* jump to the wanted target */
984 t
= gjmp_cond(op
^ inv
, t
);
987 /* resolve complementary jumps to here */
994 /* generate a zero or nozero test */
995 static void gen_test_zero(int op
)
997 if (vtop
->r
== VT_CMP
) {
1001 vtop
->jfalse
= vtop
->jtrue
;
1011 /* ------------------------------------------------------------------------- */
1012 /* push a symbol value of TYPE */
1013 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1017 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1021 /* Return a static symbol pointing to a section */
1022 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1028 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1029 sym
->type
.t
|= VT_STATIC
;
1030 put_extern_sym(sym
, sec
, offset
, size
);
1034 /* push a reference to a section offset by adding a dummy symbol */
1035 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1037 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1040 /* define a new external reference to a symbol 'v' of type 'u' */
1041 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1047 /* push forward reference */
1048 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1049 s
->type
.ref
= type
->ref
;
1050 } else if (IS_ASM_SYM(s
)) {
1051 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1052 s
->type
.ref
= type
->ref
;
1058 /* create an external reference with no specific type similar to asm labels.
1059 This avoids type conflicts if the symbol is used from C too */
1060 ST_FUNC Sym
*external_helper_sym(int v
)
1062 CType ct
= { VT_ASM_FUNC
, NULL
};
1063 return external_global_sym(v
, &ct
);
1066 /* push a reference to an helper function (such as memmove) */
1067 ST_FUNC
void vpush_helper_func(int v
)
1069 vpushsym(&func_old_type
, external_helper_sym(v
));
1072 /* Merge symbol attributes. */
1073 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1075 if (sa1
->aligned
&& !sa
->aligned
)
1076 sa
->aligned
= sa1
->aligned
;
1077 sa
->packed
|= sa1
->packed
;
1078 sa
->weak
|= sa1
->weak
;
1079 sa
->nodebug
|= sa1
->nodebug
;
1080 if (sa1
->visibility
!= STV_DEFAULT
) {
1081 int vis
= sa
->visibility
;
1082 if (vis
== STV_DEFAULT
1083 || vis
> sa1
->visibility
)
1084 vis
= sa1
->visibility
;
1085 sa
->visibility
= vis
;
1087 sa
->dllexport
|= sa1
->dllexport
;
1088 sa
->nodecorate
|= sa1
->nodecorate
;
1089 sa
->dllimport
|= sa1
->dllimport
;
1092 /* Merge function attributes. */
1093 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1095 if (fa1
->func_call
&& !fa
->func_call
)
1096 fa
->func_call
= fa1
->func_call
;
1097 if (fa1
->func_type
&& !fa
->func_type
)
1098 fa
->func_type
= fa1
->func_type
;
1099 if (fa1
->func_args
&& !fa
->func_args
)
1100 fa
->func_args
= fa1
->func_args
;
1101 if (fa1
->func_noreturn
)
1102 fa
->func_noreturn
= 1;
1109 /* Merge attributes. */
1110 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1112 merge_symattr(&ad
->a
, &ad1
->a
);
1113 merge_funcattr(&ad
->f
, &ad1
->f
);
1116 ad
->section
= ad1
->section
;
1117 if (ad1
->alias_target
)
1118 ad
->alias_target
= ad1
->alias_target
;
1120 ad
->asm_label
= ad1
->asm_label
;
1122 ad
->attr_mode
= ad1
->attr_mode
;
1125 /* Merge some type attributes. */
1126 static void patch_type(Sym
*sym
, CType
*type
)
1128 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1129 if (!(sym
->type
.t
& VT_EXTERN
))
1130 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1131 sym
->type
.t
&= ~VT_EXTERN
;
1134 if (IS_ASM_SYM(sym
)) {
1135 /* stay static if both are static */
1136 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1137 sym
->type
.ref
= type
->ref
;
1140 if (!is_compatible_types(&sym
->type
, type
)) {
1141 tcc_error("incompatible types for redefinition of '%s'",
1142 get_tok_str(sym
->v
, NULL
));
1144 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1145 int static_proto
= sym
->type
.t
& VT_STATIC
;
1146 /* warn if static follows non-static function declaration */
1147 if ((type
->t
& VT_STATIC
) && !static_proto
1148 /* XXX this test for inline shouldn't be here. Until we
1149 implement gnu-inline mode again it silences a warning for
1150 mingw caused by our workarounds. */
1151 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1152 tcc_warning("static storage ignored for redefinition of '%s'",
1153 get_tok_str(sym
->v
, NULL
));
1155 /* set 'inline' if both agree or if one has static */
1156 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1157 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1158 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1159 static_proto
|= VT_INLINE
;
1162 if (0 == (type
->t
& VT_EXTERN
)) {
1163 struct FuncAttr f
= sym
->type
.ref
->f
;
1164 /* put complete type, use static from prototype */
1165 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1166 sym
->type
.ref
= type
->ref
;
1167 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1169 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1172 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1173 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1174 sym
->type
.ref
= type
->ref
;
1178 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1179 /* set array size if it was omitted in extern declaration */
1180 sym
->type
.ref
->c
= type
->ref
->c
;
1182 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1183 tcc_warning("storage mismatch for redefinition of '%s'",
1184 get_tok_str(sym
->v
, NULL
));
1188 /* Merge some storage attributes. */
1189 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1192 patch_type(sym
, type
);
1194 #ifdef TCC_TARGET_PE
1195 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1196 tcc_error("incompatible dll linkage for redefinition of '%s'",
1197 get_tok_str(sym
->v
, NULL
));
1199 merge_symattr(&sym
->a
, &ad
->a
);
1201 sym
->asm_label
= ad
->asm_label
;
1202 update_storage(sym
);
1205 /* copy sym to other stack */
1206 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1209 s
= sym_malloc(), *s
= *s0
;
1210 s
->prev
= *ps
, *ps
= s
;
1211 if (s
->v
< SYM_FIRST_ANOM
) {
1212 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1213 s
->prev_tok
= *ps
, *ps
= s
;
1218 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1219 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1221 int bt
= s
->type
.t
& VT_BTYPE
;
1222 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1223 Sym
**sp
= &s
->type
.ref
;
1224 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1225 Sym
*s2
= sym_copy(s
, ps
);
1226 sp
= &(*sp
= s2
)->next
;
1227 sym_copy_ref(s2
, ps
);
1232 /* define a new external reference to a symbol 'v' */
1233 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1237 /* look for global symbol */
1239 while (s
&& s
->sym_scope
)
1243 /* push forward reference */
1244 s
= global_identifier_push(v
, type
->t
, 0);
1247 s
->asm_label
= ad
->asm_label
;
1248 s
->type
.ref
= type
->ref
;
1249 /* copy type to the global stack */
1251 sym_copy_ref(s
, &global_stack
);
1253 patch_storage(s
, ad
, type
);
1255 /* push variables on local_stack if any */
1256 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1257 s
= sym_copy(s
, &local_stack
);
1261 /* save registers up to (vtop - n) stack entry */
1262 ST_FUNC
void save_regs(int n
)
1265 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1269 /* save r to the memory stack, and mark it as being free */
1270 ST_FUNC
void save_reg(int r
)
1272 save_reg_upstack(r
, 0);
1275 /* save r to the memory stack, and mark it as being free,
1276 if seen up to (vtop - n) stack entry */
1277 ST_FUNC
void save_reg_upstack(int r
, int n
)
1279 int l
, size
, align
, bt
;
1282 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1287 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1288 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1289 /* must save value on stack if not already done */
1291 bt
= p
->type
.t
& VT_BTYPE
;
1294 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1297 size
= type_size(&sv
.type
, &align
);
1298 l
= get_temp_local_var(size
,align
);
1299 sv
.r
= VT_LOCAL
| VT_LVAL
;
1301 store(p
->r
& VT_VALMASK
, &sv
);
1302 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1303 /* x86 specific: need to pop fp register ST0 if saved */
1304 if (r
== TREG_ST0
) {
1305 o(0xd8dd); /* fstp %st(0) */
1308 /* special long long case */
1309 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1314 /* mark that stack entry as being saved on the stack */
1315 if (p
->r
& VT_LVAL
) {
1316 /* also clear the bounded flag because the
1317 relocation address of the function was stored in
1319 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1321 p
->r
= VT_LVAL
| VT_LOCAL
;
1330 #ifdef TCC_TARGET_ARM
1331 /* find a register of class 'rc2' with at most one reference on stack.
1332 * If none, call get_reg(rc) */
1333 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1338 for(r
=0;r
<NB_REGS
;r
++) {
1339 if (reg_classes
[r
] & rc2
) {
1342 for(p
= vstack
; p
<= vtop
; p
++) {
1343 if ((p
->r
& VT_VALMASK
) == r
||
1355 /* find a free register of class 'rc'. If none, save one register */
1356 ST_FUNC
int get_reg(int rc
)
1361 /* find a free register */
1362 for(r
=0;r
<NB_REGS
;r
++) {
1363 if (reg_classes
[r
] & rc
) {
1366 for(p
=vstack
;p
<=vtop
;p
++) {
1367 if ((p
->r
& VT_VALMASK
) == r
||
1376 /* no register left : free the first one on the stack (VERY
1377 IMPORTANT to start from the bottom to ensure that we don't
1378 spill registers used in gen_opi()) */
1379 for(p
=vstack
;p
<=vtop
;p
++) {
1380 /* look at second register (if long long) */
1382 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1384 r
= p
->r
& VT_VALMASK
;
1385 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1391 /* Should never comes here */
1395 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1396 static int get_temp_local_var(int size
,int align
){
1398 struct temp_local_variable
*temp_var
;
1405 for(i
=0;i
<nb_temp_local_vars
;i
++){
1406 temp_var
=&arr_temp_local_vars
[i
];
1407 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1410 /*check if temp_var is free*/
1412 for(p
=vstack
;p
<=vtop
;p
++) {
1414 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1415 if(p
->c
.i
==temp_var
->location
){
1422 found_var
=temp_var
->location
;
1428 loc
= (loc
- size
) & -align
;
1429 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1430 temp_var
=&arr_temp_local_vars
[i
];
1431 temp_var
->location
=loc
;
1432 temp_var
->size
=size
;
1433 temp_var
->align
=align
;
1434 nb_temp_local_vars
++;
1441 static void clear_temp_local_var_list(){
1442 nb_temp_local_vars
=0;
1445 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1447 static void move_reg(int r
, int s
, int t
)
1461 /* get address of vtop (vtop MUST BE an lvalue) */
1462 ST_FUNC
void gaddrof(void)
1464 vtop
->r
&= ~VT_LVAL
;
1465 /* tricky: if saved lvalue, then we can go back to lvalue */
1466 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1467 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1470 #ifdef CONFIG_TCC_BCHECK
1471 /* generate a bounded pointer addition */
1472 static void gen_bounded_ptr_add(void)
1474 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1479 vpush_helper_func(TOK___bound_ptr_add
);
1484 /* returned pointer is in REG_IRET */
1485 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1488 /* relocation offset of the bounding function call point */
1489 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1492 /* patch pointer addition in vtop so that pointer dereferencing is
1494 static void gen_bounded_ptr_deref(void)
1504 size
= type_size(&vtop
->type
, &align
);
1506 case 1: func
= TOK___bound_ptr_indir1
; break;
1507 case 2: func
= TOK___bound_ptr_indir2
; break;
1508 case 4: func
= TOK___bound_ptr_indir4
; break;
1509 case 8: func
= TOK___bound_ptr_indir8
; break;
1510 case 12: func
= TOK___bound_ptr_indir12
; break;
1511 case 16: func
= TOK___bound_ptr_indir16
; break;
1513 /* may happen with struct member access */
1516 sym
= external_helper_sym(func
);
1518 put_extern_sym(sym
, NULL
, 0, 0);
1519 /* patch relocation */
1520 /* XXX: find a better solution ? */
1521 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1522 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1525 /* generate lvalue bound code */
1526 static void gbound(void)
1530 vtop
->r
&= ~VT_MUSTBOUND
;
1531 /* if lvalue, then use checking code before dereferencing */
1532 if (vtop
->r
& VT_LVAL
) {
1533 /* if not VT_BOUNDED value, then make one */
1534 if (!(vtop
->r
& VT_BOUNDED
)) {
1535 /* must save type because we must set it to int to get pointer */
1537 vtop
->type
.t
= VT_PTR
;
1540 gen_bounded_ptr_add();
1544 /* then check for dereferencing */
1545 gen_bounded_ptr_deref();
1549 /* we need to call __bound_ptr_add before we start to load function
1550 args into registers */
1551 ST_FUNC
void gbound_args(int nb_args
)
1556 for (i
= 1; i
<= nb_args
; ++i
)
1557 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1563 sv
= vtop
- nb_args
;
1564 if (sv
->r
& VT_SYM
) {
1568 #ifndef TCC_TARGET_PE
1569 || v
== TOK_sigsetjmp
1570 || v
== TOK___sigsetjmp
1573 vpush_helper_func(TOK___bound_setjmp
);
1576 func_bound_add_epilog
= 1;
1578 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1579 if (v
== TOK_alloca
)
1580 func_bound_add_epilog
= 1;
1583 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1584 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1589 /* Add bounds for local symbols from S to E (via ->prev) */
1590 static void add_local_bounds(Sym
*s
, Sym
*e
)
1592 for (; s
!= e
; s
= s
->prev
) {
1593 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1595 /* Add arrays/structs/unions because we always take address */
1596 if ((s
->type
.t
& VT_ARRAY
)
1597 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1598 || s
->a
.addrtaken
) {
1599 /* add local bound info */
1600 int align
, size
= type_size(&s
->type
, &align
);
1601 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1602 2 * sizeof(addr_t
));
1603 bounds_ptr
[0] = s
->c
;
1604 bounds_ptr
[1] = size
;
1610 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1611 static void pop_local_syms(Sym
*b
, int keep
)
1613 #ifdef CONFIG_TCC_BCHECK
1614 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1615 add_local_bounds(local_stack
, b
);
1618 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1619 sym_pop(&local_stack
, b
, keep
);
1622 static void incr_bf_adr(int o
)
1624 vtop
->type
= char_pointer_type
;
1628 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1632 /* single-byte load mode for packed or otherwise unaligned bitfields */
1633 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1636 save_reg_upstack(vtop
->r
, 1);
1637 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1638 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1647 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1649 vpushi((1 << n
) - 1), gen_op('&');
1652 vpushi(bits
), gen_op(TOK_SHL
);
1655 bits
+= n
, bit_size
-= n
, o
= 1;
1658 if (!(type
->t
& VT_UNSIGNED
)) {
1659 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1660 vpushi(n
), gen_op(TOK_SHL
);
1661 vpushi(n
), gen_op(TOK_SAR
);
1665 /* single-byte store mode for packed or otherwise unaligned bitfields */
1666 static void store_packed_bf(int bit_pos
, int bit_size
)
1668 int bits
, n
, o
, m
, c
;
1669 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1671 save_reg_upstack(vtop
->r
, 1);
1672 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1674 incr_bf_adr(o
); // X B
1676 c
? vdup() : gv_dup(); // B V X
1679 vpushi(bits
), gen_op(TOK_SHR
);
1681 vpushi(bit_pos
), gen_op(TOK_SHL
);
1686 m
= ((1 << n
) - 1) << bit_pos
;
1687 vpushi(m
), gen_op('&'); // X B V1
1688 vpushv(vtop
-1); // X B V1 B
1689 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1690 gen_op('&'); // X B V1 B1
1691 gen_op('|'); // X B V2
1693 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1694 vstore(), vpop(); // X B
1695 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1700 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1703 if (0 == sv
->type
.ref
)
1705 t
= sv
->type
.ref
->auxtype
;
1706 if (t
!= -1 && t
!= VT_STRUCT
) {
1707 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1713 /* store vtop a register belonging to class 'rc'. lvalues are
1714 converted to values. Cannot be used if cannot be converted to
1715 register value (such as structures). */
1716 ST_FUNC
int gv(int rc
)
1718 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1719 int bit_pos
, bit_size
, size
, align
;
1721 /* NOTE: get_reg can modify vstack[] */
1722 if (vtop
->type
.t
& VT_BITFIELD
) {
1725 bit_pos
= BIT_POS(vtop
->type
.t
);
1726 bit_size
= BIT_SIZE(vtop
->type
.t
);
1727 /* remove bit field info to avoid loops */
1728 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1731 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1732 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1733 type
.t
|= VT_UNSIGNED
;
1735 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1737 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1742 if (r
== VT_STRUCT
) {
1743 load_packed_bf(&type
, bit_pos
, bit_size
);
1745 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1746 /* cast to int to propagate signedness in following ops */
1748 /* generate shifts */
1749 vpushi(bits
- (bit_pos
+ bit_size
));
1751 vpushi(bits
- bit_size
);
1752 /* NOTE: transformed to SHR if unsigned */
1757 if (is_float(vtop
->type
.t
) &&
1758 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1759 /* CPUs usually cannot use float constants, so we store them
1760 generically in data segment */
1761 init_params p
= { rodata_section
};
1762 unsigned long offset
;
1763 size
= type_size(&vtop
->type
, &align
);
1765 size
= 0, align
= 1;
1766 offset
= section_add(p
.sec
, size
, align
);
1767 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1769 init_putv(&p
, &vtop
->type
, offset
);
1772 #ifdef CONFIG_TCC_BCHECK
1773 if (vtop
->r
& VT_MUSTBOUND
)
1777 bt
= vtop
->type
.t
& VT_BTYPE
;
1779 #ifdef TCC_TARGET_RISCV64
1781 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1784 rc2
= RC2_TYPE(bt
, rc
);
1786 /* need to reload if:
1788 - lvalue (need to dereference pointer)
1789 - already a register, but not in the right class */
1790 r
= vtop
->r
& VT_VALMASK
;
1791 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1792 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1794 if (!r_ok
|| !r2_ok
) {
1798 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1799 int original_type
= vtop
->type
.t
;
1801 /* two register type load :
1802 expand to two words temporarily */
1803 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1805 unsigned long long ll
= vtop
->c
.i
;
1806 vtop
->c
.i
= ll
; /* first word */
1808 vtop
->r
= r
; /* save register value */
1809 vpushi(ll
>> 32); /* second word */
1810 } else if (vtop
->r
& VT_LVAL
) {
1811 /* We do not want to modifier the long long pointer here.
1812 So we save any other instances down the stack */
1813 save_reg_upstack(vtop
->r
, 1);
1814 /* load from memory */
1815 vtop
->type
.t
= load_type
;
1818 vtop
[-1].r
= r
; /* save register value */
1819 /* increment pointer to get second word */
1820 vtop
->type
.t
= VT_PTRDIFF_T
;
1825 vtop
->type
.t
= load_type
;
1827 /* move registers */
1830 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1833 vtop
[-1].r
= r
; /* save register value */
1834 vtop
->r
= vtop
[-1].r2
;
1836 /* Allocate second register. Here we rely on the fact that
1837 get_reg() tries first to free r2 of an SValue. */
1841 /* write second register */
1844 vtop
->type
.t
= original_type
;
1846 if (vtop
->r
== VT_CMP
)
1848 /* one register type load */
1853 #ifdef TCC_TARGET_C67
1854 /* uses register pairs for doubles */
1855 if (bt
== VT_DOUBLE
)
1862 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1863 ST_FUNC
void gv2(int rc1
, int rc2
)
1865 /* generate more generic register first. But VT_JMP or VT_CMP
1866 values must be generated first in all cases to avoid possible
1868 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1873 /* test if reload is needed for first register */
1874 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1884 /* test if reload is needed for first register */
1885 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1892 /* expand 64bit on stack in two ints */
1893 ST_FUNC
void lexpand(void)
1896 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1897 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1898 if (v
== VT_CONST
) {
1901 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1907 vtop
[0].r
= vtop
[-1].r2
;
1908 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1910 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1915 /* build a long long from two ints */
1916 static void lbuild(int t
)
1918 gv2(RC_INT
, RC_INT
);
1919 vtop
[-1].r2
= vtop
[0].r
;
1920 vtop
[-1].type
.t
= t
;
1925 /* convert stack entry to register and duplicate its value in another
1927 static void gv_dup(void)
1933 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1934 if (t
& VT_BITFIELD
) {
1944 /* stack: H L L1 H1 */
1954 /* duplicate value */
1964 /* generate CPU independent (unsigned) long long operations */
1965 static void gen_opl(int op
)
1967 int t
, a
, b
, op1
, c
, i
;
1969 unsigned short reg_iret
= REG_IRET
;
1970 unsigned short reg_lret
= REG_IRE2
;
1976 func
= TOK___divdi3
;
1979 func
= TOK___udivdi3
;
1982 func
= TOK___moddi3
;
1985 func
= TOK___umoddi3
;
1992 /* call generic long long function */
1993 vpush_helper_func(func
);
1998 vtop
->r2
= reg_lret
;
2006 //pv("gen_opl A",0,2);
2012 /* stack: L1 H1 L2 H2 */
2017 vtop
[-2] = vtop
[-3];
2020 /* stack: H1 H2 L1 L2 */
2021 //pv("gen_opl B",0,4);
2027 /* stack: H1 H2 L1 L2 ML MH */
2030 /* stack: ML MH H1 H2 L1 L2 */
2034 /* stack: ML MH H1 L2 H2 L1 */
2039 /* stack: ML MH M1 M2 */
2042 } else if (op
== '+' || op
== '-') {
2043 /* XXX: add non carry method too (for MIPS or alpha) */
2049 /* stack: H1 H2 (L1 op L2) */
2052 gen_op(op1
+ 1); /* TOK_xxxC2 */
2055 /* stack: H1 H2 (L1 op L2) */
2058 /* stack: (L1 op L2) H1 H2 */
2060 /* stack: (L1 op L2) (H1 op H2) */
2068 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2069 t
= vtop
[-1].type
.t
;
2073 /* stack: L H shift */
2075 /* constant: simpler */
2076 /* NOTE: all comments are for SHL. the other cases are
2077 done by swapping words */
2088 if (op
!= TOK_SAR
) {
2121 /* XXX: should provide a faster fallback on x86 ? */
2124 func
= TOK___ashrdi3
;
2127 func
= TOK___lshrdi3
;
2130 func
= TOK___ashldi3
;
2136 /* compare operations */
2142 /* stack: L1 H1 L2 H2 */
2144 vtop
[-1] = vtop
[-2];
2146 /* stack: L1 L2 H1 H2 */
2150 /* when values are equal, we need to compare low words. since
2151 the jump is inverted, we invert the test too. */
2154 else if (op1
== TOK_GT
)
2156 else if (op1
== TOK_ULT
)
2158 else if (op1
== TOK_UGT
)
2168 /* generate non equal test */
2170 vset_VT_CMP(TOK_NE
);
2174 /* compare low. Always unsigned */
2178 else if (op1
== TOK_LE
)
2180 else if (op1
== TOK_GT
)
2182 else if (op1
== TOK_GE
)
2185 #if 0//def TCC_TARGET_I386
2186 if (op
== TOK_NE
) { gsym(b
); break; }
2187 if (op
== TOK_EQ
) { gsym(a
); break; }
2196 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2198 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2199 return (a
^ b
) >> 63 ? -x
: x
;
2202 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2204 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2207 /* handle integer constant optimizations and various machine
2209 static void gen_opic(int op
)
2211 SValue
*v1
= vtop
- 1;
2213 int t1
= v1
->type
.t
& VT_BTYPE
;
2214 int t2
= v2
->type
.t
& VT_BTYPE
;
2215 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2216 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2217 int nonconst
= (v1
->r
| v2
->r
) & VT_NONCONST
;
2218 uint64_t l1
= c1
? v1
->c
.i
: 0;
2219 uint64_t l2
= c2
? v2
->c
.i
: 0;
2220 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2222 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2223 l1
= ((uint32_t)l1
|
2224 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2225 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2226 l2
= ((uint32_t)l2
|
2227 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2231 case '+': l1
+= l2
; break;
2232 case '-': l1
-= l2
; break;
2233 case '&': l1
&= l2
; break;
2234 case '^': l1
^= l2
; break;
2235 case '|': l1
|= l2
; break;
2236 case '*': l1
*= l2
; break;
2243 /* if division by zero, generate explicit division */
2245 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2246 tcc_error("division by zero in constant");
2250 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2251 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2252 case TOK_UDIV
: l1
= l1
/ l2
; break;
2253 case TOK_UMOD
: l1
= l1
% l2
; break;
2256 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2257 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2259 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2262 case TOK_ULT
: l1
= l1
< l2
; break;
2263 case TOK_UGE
: l1
= l1
>= l2
; break;
2264 case TOK_EQ
: l1
= l1
== l2
; break;
2265 case TOK_NE
: l1
= l1
!= l2
; break;
2266 case TOK_ULE
: l1
= l1
<= l2
; break;
2267 case TOK_UGT
: l1
= l1
> l2
; break;
2268 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2269 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2270 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2271 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2273 case TOK_LAND
: l1
= l1
&& l2
; break;
2274 case TOK_LOR
: l1
= l1
|| l2
; break;
2278 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2279 l1
= ((uint32_t)l1
|
2280 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2284 nonconst
= VT_NONCONST
;
2285 /* if commutative ops, put c2 as constant */
2286 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2287 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2289 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2290 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2292 if (!const_wanted
&&
2294 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2295 (l1
== -1 && op
== TOK_SAR
))) {
2296 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2298 } else if (!const_wanted
&&
2299 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2301 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2302 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2303 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2308 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2311 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2312 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2315 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2316 /* filter out NOP operations like x*1, x-0, x&-1... */
2318 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2319 /* try to use shifts instead of muls or divs */
2320 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2329 else if (op
== TOK_PDIV
)
2335 } else if (c2
&& (op
== '+' || op
== '-') &&
2336 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2337 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2338 /* symbol + constant case */
2342 /* The backends can't always deal with addends to symbols
2343 larger than +-1<<31. Don't construct such. */
2350 /* call low level op generator */
2351 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2352 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2358 if (vtop
->r
== VT_CONST
)
2359 vtop
->r
|= nonconst
;
2362 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2363 # define gen_negf gen_opf
2364 #elif defined TCC_TARGET_ARM
2365 void gen_negf(int op
)
2367 /* arm will detect 0-x and replace by vneg */
2368 vpushi(0), vswap(), gen_op('-');
2371 /* XXX: implement in gen_opf() for other backends too */
2372 void gen_negf(int op
)
2374 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2375 subtract(-0, x), but with them it's really a sign flip
2376 operation. We implement this with bit manipulation and have
2377 to do some type reinterpretation for this, which TCC can do
2380 int align
, size
, bt
;
2382 size
= type_size(&vtop
->type
, &align
);
2383 bt
= vtop
->type
.t
& VT_BTYPE
;
2384 save_reg(gv(RC_TYPE(bt
)));
2386 incr_bf_adr(size
- 1);
2388 vpushi(0x80); /* flip sign */
2395 /* generate a floating point operation with constant propagation */
2396 static void gen_opif(int op
)
2400 #if defined _MSC_VER && defined __x86_64__
2401 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2411 /* currently, we cannot do computations with forward symbols */
2412 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2413 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2415 if (v1
->type
.t
== VT_FLOAT
) {
2418 } else if (v1
->type
.t
== VT_DOUBLE
) {
2425 /* NOTE: we only do constant propagation if finite number (not
2426 NaN or infinity) (ANSI spec) */
2427 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
2430 case '+': f1
+= f2
; break;
2431 case '-': f1
-= f2
; break;
2432 case '*': f1
*= f2
; break;
2435 union { float f
; unsigned u
; } x1
, x2
, y
;
2436 /* If not in initializer we need to potentially generate
2437 FP exceptions at runtime, otherwise we want to fold. */
2440 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2441 when used to compile the f1 /= f2 below, would be -nan */
2442 x1
.f
= f1
, x2
.f
= f2
;
2444 y
.u
= 0x7fc00000; /* nan */
2446 y
.u
= 0x7f800000; /* infinity */
2447 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2456 /* XXX: also handles tests ? */
2462 /* XXX: overflow test ? */
2463 if (v1
->type
.t
== VT_FLOAT
) {
2465 } else if (v1
->type
.t
== VT_DOUBLE
) {
2472 if (op
== TOK_NEG
) {
2480 /* print a type. If 'varstr' is not NULL, then the variable is also
2481 printed in the type */
2483 /* XXX: add array and function pointers */
2484 static void type_to_str(char *buf
, int buf_size
,
2485 CType
*type
, const char *varstr
)
2497 pstrcat(buf
, buf_size
, "extern ");
2499 pstrcat(buf
, buf_size
, "static ");
2501 pstrcat(buf
, buf_size
, "typedef ");
2503 pstrcat(buf
, buf_size
, "inline ");
2505 if (t
& VT_VOLATILE
)
2506 pstrcat(buf
, buf_size
, "volatile ");
2507 if (t
& VT_CONSTANT
)
2508 pstrcat(buf
, buf_size
, "const ");
2510 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2511 || ((t
& VT_UNSIGNED
)
2512 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2515 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2517 buf_size
-= strlen(buf
);
2553 tstr
= "long double";
2555 pstrcat(buf
, buf_size
, tstr
);
2562 pstrcat(buf
, buf_size
, tstr
);
2563 v
= type
->ref
->v
& ~SYM_STRUCT
;
2564 if (v
>= SYM_FIRST_ANOM
)
2565 pstrcat(buf
, buf_size
, "<anonymous>");
2567 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2572 if (varstr
&& '*' == *varstr
) {
2573 pstrcat(buf1
, sizeof(buf1
), "(");
2574 pstrcat(buf1
, sizeof(buf1
), varstr
);
2575 pstrcat(buf1
, sizeof(buf1
), ")");
2577 pstrcat(buf1
, buf_size
, "(");
2579 while (sa
!= NULL
) {
2581 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2582 pstrcat(buf1
, sizeof(buf1
), buf2
);
2585 pstrcat(buf1
, sizeof(buf1
), ", ");
2587 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2588 pstrcat(buf1
, sizeof(buf1
), ", ...");
2589 pstrcat(buf1
, sizeof(buf1
), ")");
2590 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2594 if (t
& (VT_ARRAY
|VT_VLA
)) {
2595 if (varstr
&& '*' == *varstr
)
2596 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2598 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2599 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2602 pstrcpy(buf1
, sizeof(buf1
), "*");
2603 if (t
& VT_CONSTANT
)
2604 pstrcat(buf1
, buf_size
, "const ");
2605 if (t
& VT_VOLATILE
)
2606 pstrcat(buf1
, buf_size
, "volatile ");
2608 pstrcat(buf1
, sizeof(buf1
), varstr
);
2609 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2613 pstrcat(buf
, buf_size
, " ");
2614 pstrcat(buf
, buf_size
, varstr
);
2619 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2621 char buf1
[256], buf2
[256];
2622 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2623 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2624 tcc_error(fmt
, buf1
, buf2
);
2627 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2629 char buf1
[256], buf2
[256];
2630 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2631 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2632 tcc_warning(fmt
, buf1
, buf2
);
2635 static int pointed_size(CType
*type
)
2638 return type_size(pointed_type(type
), &align
);
2641 static inline int is_null_pointer(SValue
*p
)
2643 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2645 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2646 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2647 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2648 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2649 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2650 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2654 /* compare function types. OLD functions match any new functions */
2655 static int is_compatible_func(CType
*type1
, CType
*type2
)
2661 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2663 if (s1
->f
.func_type
!= s2
->f
.func_type
2664 && s1
->f
.func_type
!= FUNC_OLD
2665 && s2
->f
.func_type
!= FUNC_OLD
)
2668 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2670 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2681 /* return true if type1 and type2 are the same. If unqualified is
2682 true, qualifiers on the types are ignored.
2684 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2688 t1
= type1
->t
& VT_TYPE
;
2689 t2
= type2
->t
& VT_TYPE
;
2691 /* strip qualifiers before comparing */
2692 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2693 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2696 /* Default Vs explicit signedness only matters for char */
2697 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2701 /* XXX: bitfields ? */
2706 && !(type1
->ref
->c
< 0
2707 || type2
->ref
->c
< 0
2708 || type1
->ref
->c
== type2
->ref
->c
))
2711 /* test more complicated cases */
2712 bt1
= t1
& VT_BTYPE
;
2713 if (bt1
== VT_PTR
) {
2714 type1
= pointed_type(type1
);
2715 type2
= pointed_type(type2
);
2716 return is_compatible_types(type1
, type2
);
2717 } else if (bt1
== VT_STRUCT
) {
2718 return (type1
->ref
== type2
->ref
);
2719 } else if (bt1
== VT_FUNC
) {
2720 return is_compatible_func(type1
, type2
);
2721 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2722 /* If both are enums then they must be the same, if only one is then
2723 t1 and t2 must be equal, which was checked above already. */
2724 return type1
->ref
== type2
->ref
;
2730 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2731 type is stored in DEST if non-null (except for pointer plus/minus) . */
2732 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2734 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2735 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2741 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2742 ret
= op
== '?' ? 1 : 0;
2743 /* NOTE: as an extension, we accept void on only one side */
2745 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2746 if (op
== '+') ; /* Handled in caller */
2747 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2748 /* If one is a null ptr constant the result type is the other. */
2749 else if (is_null_pointer (op2
)) type
= *type1
;
2750 else if (is_null_pointer (op1
)) type
= *type2
;
2751 else if (bt1
!= bt2
) {
2752 /* accept comparison or cond-expr between pointer and integer
2754 if ((op
== '?' || TOK_ISCOND(op
))
2755 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2756 tcc_warning("pointer/integer mismatch in %s",
2757 op
== '?' ? "conditional expression" : "comparison");
2758 else if (op
!= '-' || !is_integer_btype(bt2
))
2760 type
= *(bt1
== VT_PTR
? type1
: type2
);
2762 CType
*pt1
= pointed_type(type1
);
2763 CType
*pt2
= pointed_type(type2
);
2764 int pbt1
= pt1
->t
& VT_BTYPE
;
2765 int pbt2
= pt2
->t
& VT_BTYPE
;
2766 int newquals
, copied
= 0;
2767 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2768 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2769 if (op
!= '?' && !TOK_ISCOND(op
))
2772 type_incompatibility_warning(type1
, type2
,
2774 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2775 : "pointer type mismatch in comparison('%s' and '%s')");
2778 /* pointers to void get preferred, otherwise the
2779 pointed to types minus qualifs should be compatible */
2780 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2781 /* combine qualifs */
2782 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2783 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2786 /* copy the pointer target symbol */
2787 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2790 pointed_type(&type
)->t
|= newquals
;
2792 /* pointers to incomplete arrays get converted to
2793 pointers to completed ones if possible */
2794 if (pt1
->t
& VT_ARRAY
2795 && pt2
->t
& VT_ARRAY
2796 && pointed_type(&type
)->ref
->c
< 0
2797 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2800 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2802 pointed_type(&type
)->ref
=
2803 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2804 0, pointed_type(&type
)->ref
->c
);
2805 pointed_type(&type
)->ref
->c
=
2806 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2812 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2813 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2816 } else if (is_float(bt1
) || is_float(bt2
)) {
2817 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2818 type
.t
= VT_LDOUBLE
;
2819 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2824 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2825 /* cast to biggest op */
2826 type
.t
= VT_LLONG
| VT_LONG
;
2827 if (bt1
== VT_LLONG
)
2829 if (bt2
== VT_LLONG
)
2831 /* convert to unsigned if it does not fit in a long long */
2832 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2833 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2834 type
.t
|= VT_UNSIGNED
;
2836 /* integer operations */
2837 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2838 /* convert to unsigned if it does not fit in an integer */
2839 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2840 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2841 type
.t
|= VT_UNSIGNED
;
2848 /* generic gen_op: handles types problems */
2849 ST_FUNC
void gen_op(int op
)
2851 int t1
, t2
, bt1
, bt2
, t
;
2852 CType type1
, combtype
;
2855 t1
= vtop
[-1].type
.t
;
2856 t2
= vtop
[0].type
.t
;
2857 bt1
= t1
& VT_BTYPE
;
2858 bt2
= t2
& VT_BTYPE
;
2860 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2861 if (bt2
== VT_FUNC
) {
2862 mk_pointer(&vtop
->type
);
2865 if (bt1
== VT_FUNC
) {
2867 mk_pointer(&vtop
->type
);
2872 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2873 tcc_error_noabort("invalid operand types for binary operation");
2875 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2876 /* at least one operand is a pointer */
2877 /* relational op: must be both pointers */
2881 /* if both pointers, then it must be the '-' op */
2882 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2884 tcc_error("cannot use pointers here");
2885 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2888 vtop
->type
.t
= VT_PTRDIFF_T
;
2892 /* exactly one pointer : must be '+' or '-'. */
2893 if (op
!= '-' && op
!= '+')
2894 tcc_error("cannot use pointers here");
2895 /* Put pointer as first operand */
2896 if (bt2
== VT_PTR
) {
2898 t
= t1
, t1
= t2
, t2
= t
;
2901 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2902 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2905 type1
= vtop
[-1].type
;
2906 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2908 #ifdef CONFIG_TCC_BCHECK
2909 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2910 /* if bounded pointers, we generate a special code to
2917 gen_bounded_ptr_add();
2923 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
2924 /* put again type if gen_opic() swaped operands */
2928 /* floats can only be used for a few operations */
2929 if (is_float(combtype
.t
)
2930 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
2932 tcc_error("invalid operands for binary operation");
2933 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2934 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2935 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2937 t
|= (VT_LONG
& t1
);
2941 t
= t2
= combtype
.t
;
2942 /* XXX: currently, some unsigned operations are explicit, so
2943 we modify them here */
2944 if (t
& VT_UNSIGNED
) {
2951 else if (op
== TOK_LT
)
2953 else if (op
== TOK_GT
)
2955 else if (op
== TOK_LE
)
2957 else if (op
== TOK_GE
)
2963 /* special case for shifts and long long: we keep the shift as
2965 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2972 if (TOK_ISCOND(op
)) {
2973 /* relational op: the result is an int */
2974 vtop
->type
.t
= VT_INT
;
2979 // Make sure that we have converted to an rvalue:
2980 if (vtop
->r
& VT_LVAL
)
2981 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2984 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2985 #define gen_cvt_itof1 gen_cvt_itof
2987 /* generic itof for unsigned long long case */
2988 static void gen_cvt_itof1(int t
)
2990 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2991 (VT_LLONG
| VT_UNSIGNED
)) {
2994 vpush_helper_func(TOK___floatundisf
);
2995 #if LDOUBLE_SIZE != 8
2996 else if (t
== VT_LDOUBLE
)
2997 vpush_helper_func(TOK___floatundixf
);
3000 vpush_helper_func(TOK___floatundidf
);
3011 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3012 #define gen_cvt_ftoi1 gen_cvt_ftoi
3014 /* generic ftoi for unsigned long long case */
3015 static void gen_cvt_ftoi1(int t
)
3018 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3019 /* not handled natively */
3020 st
= vtop
->type
.t
& VT_BTYPE
;
3022 vpush_helper_func(TOK___fixunssfdi
);
3023 #if LDOUBLE_SIZE != 8
3024 else if (st
== VT_LDOUBLE
)
3025 vpush_helper_func(TOK___fixunsxfdi
);
3028 vpush_helper_func(TOK___fixunsdfdi
);
3039 /* special delayed cast for char/short */
3040 static void force_charshort_cast(void)
3042 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3043 int dbt
= vtop
->type
.t
;
3044 vtop
->r
&= ~VT_MUSTCAST
;
3046 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3050 static void gen_cast_s(int t
)
3058 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3059 static void gen_cast(CType
*type
)
3061 int sbt
, dbt
, sf
, df
, c
;
3062 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3064 /* special delayed cast for char/short */
3065 if (vtop
->r
& VT_MUSTCAST
)
3066 force_charshort_cast();
3068 /* bitfields first get cast to ints */
3069 if (vtop
->type
.t
& VT_BITFIELD
)
3072 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3073 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3081 dbt_bt
= dbt
& VT_BTYPE
;
3082 sbt_bt
= sbt
& VT_BTYPE
;
3083 if (dbt_bt
== VT_VOID
)
3085 if (sbt_bt
== VT_VOID
) {
3087 cast_error(&vtop
->type
, type
);
3090 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3091 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3092 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3095 /* constant case: we can do it now */
3096 /* XXX: in ISOC, cannot do it if error in convert */
3097 if (sbt
== VT_FLOAT
)
3098 vtop
->c
.ld
= vtop
->c
.f
;
3099 else if (sbt
== VT_DOUBLE
)
3100 vtop
->c
.ld
= vtop
->c
.d
;
3103 if (sbt_bt
== VT_LLONG
) {
3104 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3105 vtop
->c
.ld
= vtop
->c
.i
;
3107 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3109 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3110 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3112 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3115 if (dbt
== VT_FLOAT
)
3116 vtop
->c
.f
= (float)vtop
->c
.ld
;
3117 else if (dbt
== VT_DOUBLE
)
3118 vtop
->c
.d
= (double)vtop
->c
.ld
;
3119 } else if (sf
&& dbt
== VT_BOOL
) {
3120 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3123 vtop
->c
.i
= vtop
->c
.ld
;
3124 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3126 else if (sbt
& VT_UNSIGNED
)
3127 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3129 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3131 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3133 else if (dbt
== VT_BOOL
)
3134 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3136 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3137 dbt_bt
== VT_SHORT
? 0xffff :
3140 if (!(dbt
& VT_UNSIGNED
))
3141 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3146 } else if (dbt
== VT_BOOL
3147 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3148 == (VT_CONST
| VT_SYM
)) {
3149 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3155 /* cannot generate code for global or static initializers */
3156 if (nocode_wanted
& DATA_ONLY_WANTED
)
3159 /* non constant case: generate code */
3160 if (dbt
== VT_BOOL
) {
3161 gen_test_zero(TOK_NE
);
3167 /* convert from fp to fp */
3170 /* convert int to fp */
3173 /* convert fp to int */
3175 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3178 goto again
; /* may need char/short cast */
3183 ds
= btype_size(dbt_bt
);
3184 ss
= btype_size(sbt_bt
);
3185 if (ds
== 0 || ss
== 0)
3188 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3189 tcc_error("cast to incomplete type");
3191 /* same size and no sign conversion needed */
3192 if (ds
== ss
&& ds
>= 4)
3194 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3195 tcc_warning("cast between pointer and integer of different size");
3196 if (sbt_bt
== VT_PTR
) {
3197 /* put integer type to allow logical operations below */
3198 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3202 /* processor allows { int a = 0, b = *(char*)&a; }
3203 That means that if we cast to less width, we can just
3204 change the type and read it still later. */
3205 #define ALLOW_SUBTYPE_ACCESS 1
3207 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3208 /* value still in memory */
3212 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3214 goto done
; /* no 64bit envolved */
3222 /* generate high word */
3223 if (sbt
& VT_UNSIGNED
) {
3232 } else if (ss
== 8) {
3233 /* from long long: just take low order word */
3241 /* need to convert from 32bit to 64bit */
3242 if (sbt
& VT_UNSIGNED
) {
3243 #if defined(TCC_TARGET_RISCV64)
3244 /* RISC-V keeps 32bit vals in registers sign-extended.
3245 So here we need a zero-extension. */
3254 ss
= ds
, ds
= 4, dbt
= sbt
;
3255 } else if (ss
== 8) {
3256 /* RISC-V keeps 32bit vals in registers sign-extended.
3257 So here we need a sign-extension for signed types and
3258 zero-extension. for unsigned types. */
3259 #if !defined(TCC_TARGET_RISCV64)
3260 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3269 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3275 bits
= (ss
- ds
) * 8;
3276 /* for unsigned, gen_op will convert SAR to SHR */
3277 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3280 vpushi(bits
- trunc
);
3287 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3290 /* return type size as known at compile time. Put alignment at 'a' */
3291 ST_FUNC
int type_size(CType
*type
, int *a
)
3296 bt
= type
->t
& VT_BTYPE
;
3297 if (bt
== VT_STRUCT
) {
3302 } else if (bt
== VT_PTR
) {
3303 if (type
->t
& VT_ARRAY
) {
3307 ts
= type_size(&s
->type
, a
);
3309 if (ts
< 0 && s
->c
< 0)
3317 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3319 return -1; /* incomplete enum */
3320 } else if (bt
== VT_LDOUBLE
) {
3322 return LDOUBLE_SIZE
;
3323 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3324 #ifdef TCC_TARGET_I386
3325 #ifdef TCC_TARGET_PE
3330 #elif defined(TCC_TARGET_ARM)
3340 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3343 } else if (bt
== VT_SHORT
) {
3346 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3350 /* char, void, function, _Bool */
3356 /* push type size as known at runtime time on top of value stack. Put
3358 static void vpush_type_size(CType
*type
, int *a
)
3360 if (type
->t
& VT_VLA
) {
3361 type_size(&type
->ref
->type
, a
);
3362 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3364 int size
= type_size(type
, a
);
3366 tcc_error("unknown type size");
3375 /* return the pointed type of t */
3376 static inline CType
*pointed_type(CType
*type
)
3378 return &type
->ref
->type
;
3381 /* modify type so that its it is a pointer to type. */
3382 ST_FUNC
void mk_pointer(CType
*type
)
3385 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3386 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3390 /* return true if type1 and type2 are exactly the same (including
3393 static int is_compatible_types(CType
*type1
, CType
*type2
)
3395 return compare_types(type1
,type2
,0);
3398 /* return true if type1 and type2 are the same (ignoring qualifiers).
3400 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3402 return compare_types(type1
,type2
,1);
3405 static void cast_error(CType
*st
, CType
*dt
)
3407 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3410 /* verify type compatibility to store vtop in 'dt' type */
3411 static void verify_assign_cast(CType
*dt
)
3413 CType
*st
, *type1
, *type2
;
3414 int dbt
, sbt
, qualwarn
, lvl
;
3416 st
= &vtop
->type
; /* source type */
3417 dbt
= dt
->t
& VT_BTYPE
;
3418 sbt
= st
->t
& VT_BTYPE
;
3419 if (dt
->t
& VT_CONSTANT
)
3420 tcc_warning("assignment of read-only location");
3424 tcc_error("assignment to void expression");
3427 /* special cases for pointers */
3428 /* '0' can also be a pointer */
3429 if (is_null_pointer(vtop
))
3431 /* accept implicit pointer to integer cast with warning */
3432 if (is_integer_btype(sbt
)) {
3433 tcc_warning("assignment makes pointer from integer without a cast");
3436 type1
= pointed_type(dt
);
3438 type2
= pointed_type(st
);
3439 else if (sbt
== VT_FUNC
)
3440 type2
= st
; /* a function is implicitly a function pointer */
3443 if (is_compatible_types(type1
, type2
))
3445 for (qualwarn
= lvl
= 0;; ++lvl
) {
3446 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3447 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3449 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3450 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3451 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3453 type1
= pointed_type(type1
);
3454 type2
= pointed_type(type2
);
3456 if (!is_compatible_unqualified_types(type1
, type2
)) {
3457 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3458 /* void * can match anything */
3459 } else if (dbt
== sbt
3460 && is_integer_btype(sbt
& VT_BTYPE
)
3461 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3462 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3463 /* Like GCC don't warn by default for merely changes
3464 in pointer target signedness. Do warn for different
3465 base types, though, in particular for unsigned enums
3466 and signed int targets. */
3468 tcc_warning("assignment from incompatible pointer type");
3473 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3479 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3480 tcc_warning("assignment makes integer from pointer without a cast");
3481 } else if (sbt
== VT_STRUCT
) {
3482 goto case_VT_STRUCT
;
3484 /* XXX: more tests */
3488 if (!is_compatible_unqualified_types(dt
, st
)) {
3496 static void gen_assign_cast(CType
*dt
)
3498 verify_assign_cast(dt
);
3502 /* store vtop in lvalue pushed on stack */
3503 ST_FUNC
void vstore(void)
3505 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3507 ft
= vtop
[-1].type
.t
;
3508 sbt
= vtop
->type
.t
& VT_BTYPE
;
3509 dbt
= ft
& VT_BTYPE
;
3510 verify_assign_cast(&vtop
[-1].type
);
3512 if (sbt
== VT_STRUCT
) {
3513 /* if structure, only generate pointer */
3514 /* structure assignment : generate memcpy */
3515 size
= type_size(&vtop
->type
, &align
);
3516 /* destination, keep on stack() as result */
3518 #ifdef CONFIG_TCC_BCHECK
3519 if (vtop
->r
& VT_MUSTBOUND
)
3520 gbound(); /* check would be wrong after gaddrof() */
3522 vtop
->type
.t
= VT_PTR
;
3526 #ifdef CONFIG_TCC_BCHECK
3527 if (vtop
->r
& VT_MUSTBOUND
)
3530 vtop
->type
.t
= VT_PTR
;
3533 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3535 #ifdef CONFIG_TCC_BCHECK
3536 && !tcc_state
->do_bounds_check
3539 gen_struct_copy(size
);
3545 /* Use memmove, rather than memcpy, as dest and src may be same: */
3548 vpush_helper_func(TOK_memmove8
);
3549 else if(!(align
& 3))
3550 vpush_helper_func(TOK_memmove4
);
3553 vpush_helper_func(TOK_memmove
);
3558 } else if (ft
& VT_BITFIELD
) {
3559 /* bitfield store handling */
3561 /* save lvalue as expression result (example: s.b = s.a = n;) */
3562 vdup(), vtop
[-1] = vtop
[-2];
3564 bit_pos
= BIT_POS(ft
);
3565 bit_size
= BIT_SIZE(ft
);
3566 /* remove bit field info to avoid loops */
3567 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3569 if (dbt
== VT_BOOL
) {
3570 gen_cast(&vtop
[-1].type
);
3571 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3573 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3574 if (dbt
!= VT_BOOL
) {
3575 gen_cast(&vtop
[-1].type
);
3576 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3578 if (r
== VT_STRUCT
) {
3579 store_packed_bf(bit_pos
, bit_size
);
3581 unsigned long long mask
= (1ULL << bit_size
) - 1;
3582 if (dbt
!= VT_BOOL
) {
3584 if (dbt
== VT_LLONG
)
3587 vpushi((unsigned)mask
);
3594 /* duplicate destination */
3597 /* load destination, mask and or with source */
3598 if (dbt
== VT_LLONG
)
3599 vpushll(~(mask
<< bit_pos
));
3601 vpushi(~((unsigned)mask
<< bit_pos
));
3606 /* ... and discard */
3609 } else if (dbt
== VT_VOID
) {
3612 /* optimize char/short casts */
3614 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3615 && is_integer_btype(sbt
)
3617 if ((vtop
->r
& VT_MUSTCAST
)
3618 && btype_size(dbt
) > btype_size(sbt
)
3620 force_charshort_cast();
3623 gen_cast(&vtop
[-1].type
);
3626 #ifdef CONFIG_TCC_BCHECK
3627 /* bound check case */
3628 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3634 gv(RC_TYPE(dbt
)); /* generate value */
3637 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3638 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3639 vtop
->type
.t
= ft
& VT_TYPE
;
3642 /* if lvalue was saved on stack, must read it */
3643 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3645 r
= get_reg(RC_INT
);
3646 sv
.type
.t
= VT_PTRDIFF_T
;
3647 sv
.r
= VT_LOCAL
| VT_LVAL
;
3648 sv
.c
.i
= vtop
[-1].c
.i
;
3650 vtop
[-1].r
= r
| VT_LVAL
;
3653 r
= vtop
->r
& VT_VALMASK
;
3654 /* two word case handling :
3655 store second register at word + 4 (or +8 for x86-64) */
3656 if (USING_TWO_WORDS(dbt
)) {
3657 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3658 vtop
[-1].type
.t
= load_type
;
3661 /* convert to int to increment easily */
3662 vtop
->type
.t
= VT_PTRDIFF_T
;
3668 vtop
[-1].type
.t
= load_type
;
3669 /* XXX: it works because r2 is spilled last ! */
3670 store(vtop
->r2
, vtop
- 1);
3676 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3680 /* post defines POST/PRE add. c is the token ++ or -- */
3681 ST_FUNC
void inc(int post
, int c
)
3684 vdup(); /* save lvalue */
3686 gv_dup(); /* duplicate value */
3691 vpushi(c
- TOK_MID
);
3693 vstore(); /* store value */
3695 vpop(); /* if post op, return saved value */
3698 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3700 /* read the string */
3704 while (tok
== TOK_STR
) {
3705 /* XXX: add \0 handling too ? */
3706 cstr_cat(astr
, tokc
.str
.data
, -1);
3709 cstr_ccat(astr
, '\0');
3712 /* If I is >= 1 and a power of two, returns log2(i)+1.
3713 If I is 0 returns 0. */
3714 ST_FUNC
int exact_log2p1(int i
)
3719 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3730 /* Parse __attribute__((...)) GNUC extension. */
3731 static void parse_attribute(AttributeDef
*ad
)
3737 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3742 while (tok
!= ')') {
3743 if (tok
< TOK_IDENT
)
3744 expect("attribute name");
3756 tcc_warning_c(warn_implicit_function_declaration
)(
3757 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3758 s
= external_global_sym(tok
, &func_old_type
);
3759 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3760 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3761 ad
->cleanup_func
= s
;
3766 case TOK_CONSTRUCTOR1
:
3767 case TOK_CONSTRUCTOR2
:
3768 ad
->f
.func_ctor
= 1;
3770 case TOK_DESTRUCTOR1
:
3771 case TOK_DESTRUCTOR2
:
3772 ad
->f
.func_dtor
= 1;
3774 case TOK_ALWAYS_INLINE1
:
3775 case TOK_ALWAYS_INLINE2
:
3776 ad
->f
.func_alwinl
= 1;
3781 parse_mult_str(&astr
, "section name");
3782 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3789 parse_mult_str(&astr
, "alias(\"target\")");
3790 ad
->alias_target
= /* save string as token, for later */
3791 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3795 case TOK_VISIBILITY1
:
3796 case TOK_VISIBILITY2
:
3798 parse_mult_str(&astr
,
3799 "visibility(\"default|hidden|internal|protected\")");
3800 if (!strcmp (astr
.data
, "default"))
3801 ad
->a
.visibility
= STV_DEFAULT
;
3802 else if (!strcmp (astr
.data
, "hidden"))
3803 ad
->a
.visibility
= STV_HIDDEN
;
3804 else if (!strcmp (astr
.data
, "internal"))
3805 ad
->a
.visibility
= STV_INTERNAL
;
3806 else if (!strcmp (astr
.data
, "protected"))
3807 ad
->a
.visibility
= STV_PROTECTED
;
3809 expect("visibility(\"default|hidden|internal|protected\")");
3818 if (n
<= 0 || (n
& (n
- 1)) != 0)
3819 tcc_error("alignment must be a positive power of two");
3824 ad
->a
.aligned
= exact_log2p1(n
);
3825 if (n
!= 1 << (ad
->a
.aligned
- 1))
3826 tcc_error("alignment of %d is larger than implemented", n
);
3842 /* currently, no need to handle it because tcc does not
3843 track unused objects */
3847 ad
->f
.func_noreturn
= 1;
3852 ad
->f
.func_call
= FUNC_CDECL
;
3857 ad
->f
.func_call
= FUNC_STDCALL
;
3859 #ifdef TCC_TARGET_I386
3869 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3875 ad
->f
.func_call
= FUNC_FASTCALLW
;
3882 ad
->attr_mode
= VT_LLONG
+ 1;
3885 ad
->attr_mode
= VT_BYTE
+ 1;
3888 ad
->attr_mode
= VT_SHORT
+ 1;
3892 ad
->attr_mode
= VT_INT
+ 1;
3895 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3902 ad
->a
.dllexport
= 1;
3904 case TOK_NODECORATE
:
3905 ad
->a
.nodecorate
= 1;
3908 ad
->a
.dllimport
= 1;
3911 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
3912 /* skip parameters */
3914 int parenthesis
= 0;
3918 else if (tok
== ')')
3921 } while (parenthesis
&& tok
!= -1);
3934 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3937 int v1
= v
| SYM_FIELD
;
3939 while ((s
= s
->next
) != NULL
) {
3944 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
3945 && s
->v
>= (SYM_FIRST_ANOM
| SYM_FIELD
)) {
3946 /* try to find field in anonymous sub-struct/union */
3947 Sym
*ret
= find_field (&s
->type
, v1
, cumofs
);
3955 if (!(v
& SYM_FIELD
)) { /* top-level call */
3958 tcc_error("dereferencing incomplete type '%s'",
3959 get_tok_str(s
->v
& ~SYM_STRUCT
, 0));
3961 tcc_error("field not found: %s",
3962 get_tok_str(v
, &tokc
));
3967 static void check_fields (CType
*type
, int check
)
3971 while ((s
= s
->next
) != NULL
) {
3972 int v
= s
->v
& ~SYM_FIELD
;
3973 if (v
< SYM_FIRST_ANOM
) {
3974 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
3975 if (check
&& (ts
->tok
& SYM_FIELD
))
3976 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
3977 ts
->tok
^= SYM_FIELD
;
3978 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
3979 check_fields (&s
->type
, check
);
3983 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3985 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3986 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3987 int pcc
= !tcc_state
->ms_bitfields
;
3988 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3995 prevbt
= VT_STRUCT
; /* make it never match */
4000 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4001 if (f
->type
.t
& VT_BITFIELD
)
4002 bit_size
= BIT_SIZE(f
->type
.t
);
4005 size
= type_size(&f
->type
, &align
);
4006 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4009 if (pcc
&& bit_size
== 0) {
4010 /* in pcc mode, packing does not affect zero-width bitfields */
4013 /* in pcc mode, attribute packed overrides if set. */
4014 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4017 /* pragma pack overrides align if lesser and packs bitfields always */
4020 if (pragma_pack
< align
)
4021 align
= pragma_pack
;
4022 /* in pcc mode pragma pack also overrides individual align */
4023 if (pcc
&& pragma_pack
< a
)
4027 /* some individual align was specified */
4031 if (type
->ref
->type
.t
== VT_UNION
) {
4032 if (pcc
&& bit_size
>= 0)
4033 size
= (bit_size
+ 7) >> 3;
4038 } else if (bit_size
< 0) {
4040 c
+= (bit_pos
+ 7) >> 3;
4041 c
= (c
+ align
- 1) & -align
;
4050 /* A bit-field. Layout is more complicated. There are two
4051 options: PCC (GCC) compatible and MS compatible */
4053 /* In PCC layout a bit-field is placed adjacent to the
4054 preceding bit-fields, except if:
4056 - an individual alignment was given
4057 - it would overflow its base type container and
4058 there is no packing */
4059 if (bit_size
== 0) {
4061 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4063 } else if (f
->a
.aligned
) {
4065 } else if (!packed
) {
4067 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4068 if (ofs
> size
/ align
)
4072 /* in pcc mode, long long bitfields have type int if they fit */
4073 if (size
== 8 && bit_size
<= 32)
4074 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4076 while (bit_pos
>= align
* 8)
4077 c
+= align
, bit_pos
-= align
* 8;
4080 /* In PCC layout named bit-fields influence the alignment
4081 of the containing struct using the base types alignment,
4082 except for packed fields (which here have correct align). */
4083 if (f
->v
& SYM_FIRST_ANOM
4084 // && bit_size // ??? gcc on ARM/rpi does that
4089 bt
= f
->type
.t
& VT_BTYPE
;
4090 if ((bit_pos
+ bit_size
> size
* 8)
4091 || (bit_size
> 0) == (bt
!= prevbt
)
4093 c
= (c
+ align
- 1) & -align
;
4096 /* In MS bitfield mode a bit-field run always uses
4097 at least as many bits as the underlying type.
4098 To start a new run it's also required that this
4099 or the last bit-field had non-zero width. */
4100 if (bit_size
|| prev_bit_size
)
4103 /* In MS layout the records alignment is normally
4104 influenced by the field, except for a zero-width
4105 field at the start of a run (but by further zero-width
4106 fields it is again). */
4107 if (bit_size
== 0 && prevbt
!= bt
)
4110 prev_bit_size
= bit_size
;
4113 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4114 | (bit_pos
<< VT_STRUCT_SHIFT
);
4115 bit_pos
+= bit_size
;
4117 if (align
> maxalign
)
4121 printf("set field %s offset %-2d size %-2d align %-2d",
4122 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4123 if (f
->type
.t
& VT_BITFIELD
) {
4124 printf(" pos %-2d bits %-2d",
4137 c
+= (bit_pos
+ 7) >> 3;
4139 /* store size and alignment */
4140 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4144 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4145 /* can happen if individual align for some member was given. In
4146 this case MSVC ignores maxalign when aligning the size */
4151 c
= (c
+ a
- 1) & -a
;
4155 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4158 /* check whether we can access bitfields by their type */
4159 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4163 if (0 == (f
->type
.t
& VT_BITFIELD
))
4167 bit_size
= BIT_SIZE(f
->type
.t
);
4170 bit_pos
= BIT_POS(f
->type
.t
);
4171 size
= type_size(&f
->type
, &align
);
4173 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4174 #ifdef TCC_TARGET_ARM
4175 && !(f
->c
& (align
- 1))
4180 /* try to access the field using a different type */
4181 c0
= -1, s
= align
= 1;
4184 px
= f
->c
* 8 + bit_pos
;
4185 cx
= (px
>> 3) & -align
;
4186 px
= px
- (cx
<< 3);
4189 s
= (px
+ bit_size
+ 7) >> 3;
4199 s
= type_size(&t
, &align
);
4203 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4204 #ifdef TCC_TARGET_ARM
4205 && !(cx
& (align
- 1))
4208 /* update offset and bit position */
4211 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4212 | (bit_pos
<< VT_STRUCT_SHIFT
);
4216 printf("FIX field %s offset %-2d size %-2d align %-2d "
4217 "pos %-2d bits %-2d\n",
4218 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4219 cx
, s
, align
, px
, bit_size
);
4222 /* fall back to load/store single-byte wise */
4223 f
->auxtype
= VT_STRUCT
;
4225 printf("FIX field %s : load byte-wise\n",
4226 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4232 static void do_Static_assert(void);
4234 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4235 static void struct_decl(CType
*type
, int u
)
4237 int v
, c
, size
, align
, flexible
;
4238 int bit_size
, bsize
, bt
;
4240 AttributeDef ad
, ad1
;
4243 memset(&ad
, 0, sizeof ad
);
4245 parse_attribute(&ad
);
4249 /* struct already defined ? return it */
4251 expect("struct/union/enum name");
4253 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4256 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4258 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4263 /* Record the original enum/struct/union token. */
4264 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4266 /* we put an undefined size for struct/union */
4267 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4268 s
->r
= 0; /* default alignment is zero as gcc */
4270 type
->t
= s
->type
.t
;
4276 tcc_error("struct/union/enum already defined");
4278 /* cannot be empty */
4279 /* non empty enums are not allowed */
4282 long long ll
= 0, pl
= 0, nl
= 0;
4285 /* enum symbols have static storage */
4286 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4290 expect("identifier");
4292 if (ss
&& !local_stack
)
4293 tcc_error("redefinition of enumerator '%s'",
4294 get_tok_str(v
, NULL
));
4298 ll
= expr_const64();
4300 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4302 *ps
= ss
, ps
= &ss
->next
;
4311 /* NOTE: we accept a trailing comma */
4316 /* set integral type of the enum */
4319 if (pl
!= (unsigned)pl
)
4320 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4322 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4323 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4324 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4326 /* set type for enum members */
4327 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4329 if (ll
== (int)ll
) /* default is int if it fits */
4331 if (t
.t
& VT_UNSIGNED
) {
4332 ss
->type
.t
|= VT_UNSIGNED
;
4333 if (ll
== (unsigned)ll
)
4336 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4337 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4342 while (tok
!= '}') {
4343 if (tok
== TOK_STATIC_ASSERT
) {
4347 if (!parse_btype(&btype
, &ad1
, 0)) {
4353 tcc_error("flexible array member '%s' not at the end of struct",
4354 get_tok_str(v
, NULL
));
4360 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4362 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4363 expect("identifier");
4365 int v
= btype
.ref
->v
;
4366 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4367 if (tcc_state
->ms_extensions
== 0)
4368 expect("identifier");
4372 if (type_size(&type1
, &align
) < 0) {
4373 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4376 tcc_error("field '%s' has incomplete type",
4377 get_tok_str(v
, NULL
));
4379 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4380 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4381 (type1
.t
& VT_STORAGE
))
4382 tcc_error("invalid type for '%s'",
4383 get_tok_str(v
, NULL
));
4387 bit_size
= expr_const();
4388 /* XXX: handle v = 0 case for messages */
4390 tcc_error("negative width in bit-field '%s'",
4391 get_tok_str(v
, NULL
));
4392 if (v
&& bit_size
== 0)
4393 tcc_error("zero width for bit-field '%s'",
4394 get_tok_str(v
, NULL
));
4395 parse_attribute(&ad1
);
4397 size
= type_size(&type1
, &align
);
4398 if (bit_size
>= 0) {
4399 bt
= type1
.t
& VT_BTYPE
;
4405 tcc_error("bitfields must have scalar type");
4407 if (bit_size
> bsize
) {
4408 tcc_error("width of '%s' exceeds its type",
4409 get_tok_str(v
, NULL
));
4410 } else if (bit_size
== bsize
4411 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4412 /* no need for bit fields */
4414 } else if (bit_size
== 64) {
4415 tcc_error("field width 64 not implemented");
4417 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4419 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4422 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4423 /* Remember we've seen a real field to check
4424 for placement of flexible array member. */
4427 /* If member is a struct or bit-field, enforce
4428 placing into the struct (as anonymous). */
4430 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4435 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4440 if (tok
== ';' || tok
== TOK_EOF
)
4447 parse_attribute(&ad
);
4448 if (ad
.cleanup_func
) {
4449 tcc_warning("attribute '__cleanup__' ignored on type");
4451 check_fields(type
, 1);
4452 check_fields(type
, 0);
4453 struct_layout(type
, &ad
);
4455 tcc_debug_fix_anon(tcc_state
, type
);
4460 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4462 merge_symattr(&ad
->a
, &s
->a
);
4463 merge_funcattr(&ad
->f
, &s
->f
);
4466 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4467 are added to the element type, copied because it could be a typedef. */
4468 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4470 while (type
->t
& VT_ARRAY
) {
4471 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4472 type
= &type
->ref
->type
;
4474 type
->t
|= qualifiers
;
4477 /* return 0 if no type declaration. otherwise, return the basic type
4480 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4482 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4486 memset(ad
, 0, sizeof(AttributeDef
));
4496 /* currently, we really ignore extension */
4506 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4507 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4508 tmbt
: tcc_error("too many basic types");
4511 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4516 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4533 memset(&ad1
, 0, sizeof(AttributeDef
));
4534 if (parse_btype(&type1
, &ad1
, 0)) {
4535 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4537 n
= 1 << (ad1
.a
.aligned
- 1);
4539 type_size(&type1
, &n
);
4542 if (n
< 0 || (n
& (n
- 1)) != 0)
4543 tcc_error("alignment must be a positive power of two");
4546 ad
->a
.aligned
= exact_log2p1(n
);
4550 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4551 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4552 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4553 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4560 #ifdef TCC_TARGET_ARM64
4562 /* GCC's __uint128_t appears in some Linux header files. Make it a
4563 synonym for long double to get the size and alignment right. */
4571 tcc_error("_Complex is not yet supported");
4576 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4577 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4585 struct_decl(&type1
, VT_ENUM
);
4588 type
->ref
= type1
.ref
;
4591 struct_decl(&type1
, VT_STRUCT
);
4594 struct_decl(&type1
, VT_UNION
);
4597 /* type modifiers */
4601 parse_btype_qualify(type
, VT_ATOMIC
);
4604 parse_expr_type(&type1
);
4605 /* remove all storage modifiers except typedef */
4606 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4608 sym_to_attr(ad
, type1
.ref
);
4616 parse_btype_qualify(type
, VT_CONSTANT
);
4624 parse_btype_qualify(type
, VT_VOLATILE
);
4631 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4632 tcc_error("signed and unsigned modifier");
4645 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4646 tcc_error("signed and unsigned modifier");
4647 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4663 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4664 tcc_error("multiple storage classes");
4676 ad
->f
.func_noreturn
= 1;
4678 /* GNUC attribute */
4679 case TOK_ATTRIBUTE1
:
4680 case TOK_ATTRIBUTE2
:
4681 parse_attribute(ad
);
4682 if (ad
->attr_mode
) {
4683 u
= ad
->attr_mode
-1;
4684 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4692 parse_expr_type(&type1
);
4693 /* remove all storage modifiers except typedef */
4694 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4696 sym_to_attr(ad
, type1
.ref
);
4698 case TOK_THREAD_LOCAL
:
4699 tcc_error("_Thread_local is not implemented");
4704 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4708 if (tok
== ':' && ignore_label
) {
4709 /* ignore if it's a label */
4714 t
&= ~(VT_BTYPE
|VT_LONG
);
4715 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4716 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4717 type
->ref
= s
->type
.ref
;
4719 parse_btype_qualify(type
, t
);
4721 /* get attributes from typedef */
4730 if (tcc_state
->char_is_unsigned
) {
4731 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4734 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4735 bt
= t
& (VT_BTYPE
|VT_LONG
);
4737 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4738 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4739 if (bt
== VT_LDOUBLE
)
4740 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4746 /* convert a function parameter type (array to pointer and function to
4747 function pointer) */
4748 static inline void convert_parameter_type(CType
*pt
)
4750 /* remove const and volatile qualifiers (XXX: const could be used
4751 to indicate a const function parameter */
4752 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4753 /* array must be transformed to pointer according to ANSI C */
4755 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4760 ST_FUNC
void parse_asm_str(CString
*astr
)
4763 parse_mult_str(astr
, "string constant");
4766 /* Parse an asm label and return the token */
4767 static int asm_label_instr(void)
4773 parse_asm_str(&astr
);
4776 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4778 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4783 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4785 int n
, l
, t1
, arg_size
, align
;
4786 Sym
**plast
, *s
, *first
;
4789 TokenString
*vla_array_tok
= NULL
;
4790 int *vla_array_str
= NULL
;
4793 /* function type, or recursive declarator (return if so) */
4795 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4799 else if (parse_btype(&pt
, &ad1
, 0))
4801 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4802 merge_attr (ad
, &ad1
);
4813 /* read param name and compute offset */
4814 if (l
!= FUNC_OLD
) {
4815 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4817 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4818 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4819 tcc_error("parameter declared as void");
4824 pt
.t
= VT_VOID
; /* invalid type */
4829 expect("identifier");
4830 convert_parameter_type(&pt
);
4831 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4832 /* these symbols may be evaluated for VLArrays (see below, under
4833 nocode_wanted) which is why we push them here as normal symbols
4834 temporarily. Example: int func(int a, int b[++a]); */
4835 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4841 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4846 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4847 tcc_error("invalid type");
4850 /* if no parameters, then old type prototype */
4853 /* remove parameter symbols from token table, keep on stack */
4855 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4856 for (s
= first
; s
; s
= s
->next
)
4860 /* NOTE: const is ignored in returned type as it has a special
4861 meaning in gcc / C++ */
4862 type
->t
&= ~VT_CONSTANT
;
4863 /* some ancient pre-K&R C allows a function to return an array
4864 and the array brackets to be put after the arguments, such
4865 that "int c()[]" means something like "int[] c()" */
4868 skip(']'); /* only handle simple "[]" */
4871 /* we push a anonymous symbol which will contain the function prototype */
4872 ad
->f
.func_args
= arg_size
;
4873 ad
->f
.func_type
= l
;
4874 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4880 } else if (tok
== '[') {
4881 int saved_nocode_wanted
= nocode_wanted
;
4882 /* array definition */
4886 if (td
& TYPE_PARAM
) while (1) {
4887 /* XXX The optional type-quals and static should only be accepted
4888 in parameter decls. The '*' as well, and then even only
4889 in prototypes (not function defs). */
4891 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4902 /* Code generation is not done now but has to be done
4903 at start of function. Save code here for later use. */
4905 skip_or_save_block(&vla_array_tok
);
4907 vla_array_str
= vla_array_tok
->str
;
4908 begin_macro(vla_array_tok
, 2);
4917 } else if (tok
!= ']') {
4918 if (!local_stack
|| (storage
& VT_STATIC
))
4919 vpushi(expr_const());
4921 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4922 length must always be evaluated, even under nocode_wanted,
4923 so that its size slot is initialized (e.g. under sizeof
4929 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4932 tcc_error("invalid array size");
4934 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4935 tcc_error("size of variable length array should be an integer");
4941 /* parse next post type */
4942 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
4944 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4945 tcc_error("declaration of an array of functions");
4946 if ((type
->t
& VT_BTYPE
) == VT_VOID
4947 || type_size(type
, &align
) < 0)
4948 tcc_error("declaration of an array of incomplete type elements");
4950 t1
|= type
->t
& VT_VLA
;
4955 tcc_error("need explicit inner array size in VLAs");
4958 loc
-= type_size(&int_type
, &align
);
4962 vpush_type_size(type
, &align
);
4964 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4971 nocode_wanted
= saved_nocode_wanted
;
4973 /* we push an anonymous symbol which will contain the array
4975 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4976 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4979 if (vla_array_str
) {
4981 s
->vla_array_str
= vla_array_str
;
4983 tok_str_free_str(vla_array_str
);
4989 /* Parse a type declarator (except basic type), and return the type
4990 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4991 expected. 'type' should contain the basic type. 'ad' is the
4992 attribute definition of the basic type. It can be modified by
4993 type_decl(). If this (possibly abstract) declarator is a pointer chain
4994 it returns the innermost pointed to type (equals *type, but is a different
4995 pointer), otherwise returns type itself, that's used for recursive calls. */
4996 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4999 int qualifiers
, storage
;
5001 /* recursive type, remove storage bits first, apply them later again */
5002 storage
= type
->t
& VT_STORAGE
;
5003 type
->t
&= ~VT_STORAGE
;
5006 while (tok
== '*') {
5012 qualifiers
|= VT_ATOMIC
;
5017 qualifiers
|= VT_CONSTANT
;
5022 qualifiers
|= VT_VOLATILE
;
5028 /* XXX: clarify attribute handling */
5029 case TOK_ATTRIBUTE1
:
5030 case TOK_ATTRIBUTE2
:
5031 parse_attribute(ad
);
5035 type
->t
|= qualifiers
;
5037 /* innermost pointed to type is the one for the first derivation */
5038 ret
= pointed_type(type
);
5042 /* This is possibly a parameter type list for abstract declarators
5043 ('int ()'), use post_type for testing this. */
5044 if (!post_type(type
, ad
, 0, td
)) {
5045 /* It's not, so it's a nested declarator, and the post operations
5046 apply to the innermost pointed to type (if any). */
5047 /* XXX: this is not correct to modify 'ad' at this point, but
5048 the syntax is not clear */
5049 parse_attribute(ad
);
5050 post
= type_decl(type
, ad
, v
, td
);
5054 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5055 /* type identifier */
5060 if (!(td
& TYPE_ABSTRACT
))
5061 expect("identifier");
5064 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5065 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5066 parse_attribute(ad
);
5071 /* indirection with full error checking and bound check */
5072 ST_FUNC
void indir(void)
5074 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5075 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5079 if (vtop
->r
& VT_LVAL
)
5081 vtop
->type
= *pointed_type(&vtop
->type
);
5082 /* Arrays and functions are never lvalues */
5083 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5084 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5086 /* if bound checking, the referenced pointer must be checked */
5087 #ifdef CONFIG_TCC_BCHECK
5088 if (tcc_state
->do_bounds_check
)
5089 vtop
->r
|= VT_MUSTBOUND
;
5094 /* pass a parameter to a function and do type checking and casting */
5095 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5100 func_type
= func
->f
.func_type
;
5101 if (func_type
== FUNC_OLD
||
5102 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5103 /* default casting : only need to convert float to double */
5104 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5105 gen_cast_s(VT_DOUBLE
);
5106 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5107 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5108 type
.ref
= vtop
->type
.ref
;
5110 } else if (vtop
->r
& VT_MUSTCAST
) {
5111 force_charshort_cast();
5113 } else if (arg
== NULL
) {
5114 tcc_error("too many arguments to function");
5117 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5118 gen_assign_cast(&type
);
5122 /* parse an expression and return its type without any side effect. */
5123 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5132 /* parse an expression of the form '(type)' or '(expr)' and return its
5134 static void parse_expr_type(CType
*type
)
5140 if (parse_btype(type
, &ad
, 0)) {
5141 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5143 expr_type(type
, gexpr
);
5148 static void parse_type(CType
*type
)
5153 if (!parse_btype(type
, &ad
, 0)) {
5156 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5159 static void parse_builtin_params(int nc
, const char *args
)
5168 while ((c
= *args
++)) {
5183 type
.t
= VT_CONSTANT
;
5189 type
.t
= VT_CONSTANT
;
5191 type
.t
|= char_type
.t
;
5203 gen_assign_cast(&type
);
5210 static void parse_atomic(int atok
)
5212 int size
, align
, arg
, t
, save
= 0;
5213 CType
*atom
, *atom_ptr
, ct
= {0};
5216 static const char *const templates
[] = {
5218 * Each entry consists of callback and function template.
5219 * The template represents argument types and return type.
5221 * ? void (return-only)
5224 * A read-only atomic
5225 * p pointer to memory
5232 /* keep in order of appearance in tcctok.h: */
5233 /* __atomic_store */ "alm.?",
5234 /* __atomic_load */ "Asm.v",
5235 /* __atomic_exchange */ "alsm.v",
5236 /* __atomic_compare_exchange */ "aplbmm.b",
5237 /* __atomic_fetch_add */ "avm.v",
5238 /* __atomic_fetch_sub */ "avm.v",
5239 /* __atomic_fetch_or */ "avm.v",
5240 /* __atomic_fetch_xor */ "avm.v",
5241 /* __atomic_fetch_and */ "avm.v",
5242 /* __atomic_fetch_nand */ "avm.v",
5243 /* __atomic_and_fetch */ "avm.v",
5244 /* __atomic_sub_fetch */ "avm.v",
5245 /* __atomic_or_fetch */ "avm.v",
5246 /* __atomic_xor_fetch */ "avm.v",
5247 /* __atomic_and_fetch */ "avm.v",
5248 /* __atomic_nand_fetch */ "avm.v"
5250 const char *template = templates
[(atok
- TOK___atomic_store
)];
5252 atom
= atom_ptr
= NULL
;
5253 size
= 0; /* pacify compiler */
5258 switch (template[arg
]) {
5261 atom_ptr
= &vtop
->type
;
5262 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5264 atom
= pointed_type(atom_ptr
);
5265 size
= type_size(atom
, &align
);
5267 || (size
& (size
- 1))
5268 || (atok
> TOK___atomic_compare_exchange
5269 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5270 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5271 expect("integral or integer-sized pointer target type");
5272 /* GCC does not care either: */
5273 /* if (!(atom->t & VT_ATOMIC))
5274 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5278 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5279 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5280 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5281 gen_assign_cast(atom_ptr
);
5284 gen_assign_cast(atom
);
5288 gen_assign_cast(atom
);
5297 gen_assign_cast(&int_type
);
5301 gen_assign_cast(&ct
);
5304 if ('.' == template[++arg
])
5311 switch (template[arg
+ 1]) {
5320 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5321 vpush_helper_func(tok_alloc_const(buf
));
5322 vrott(arg
- save
+ 1);
5323 gfunc_call(arg
- save
);
5326 PUT_R_RET(vtop
, ct
.t
);
5327 t
= ct
.t
& VT_BTYPE
;
5328 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5330 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5332 vtop
->type
.t
= VT_INT
;
5344 ST_FUNC
void unary(void)
5346 int n
, t
, align
, size
, r
, sizeof_caller
;
5351 /* generate line number info */
5353 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5355 sizeof_caller
= in_sizeof
;
5358 /* XXX: GCC 2.95.3 does not generate a table although it should be
5366 #ifdef TCC_TARGET_PE
5367 t
= VT_SHORT
|VT_UNSIGNED
;
5375 vsetc(&type
, VT_CONST
, &tokc
);
5379 t
= VT_INT
| VT_UNSIGNED
;
5385 t
= VT_LLONG
| VT_UNSIGNED
;
5397 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5400 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5402 case TOK___FUNCTION__
:
5404 goto tok_identifier
;
5410 /* special function name identifier */
5411 len
= strlen(funcname
) + 1;
5412 /* generate char[len] type */
5413 type
.t
= char_type
.t
;
5414 if (tcc_state
->warn_write_strings
& WARN_ON
)
5415 type
.t
|= VT_CONSTANT
;
5419 sec
= rodata_section
;
5420 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5422 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5427 #ifdef TCC_TARGET_PE
5428 t
= VT_SHORT
| VT_UNSIGNED
;
5434 /* string parsing */
5437 if (tcc_state
->warn_write_strings
& WARN_ON
)
5442 memset(&ad
, 0, sizeof(AttributeDef
));
5443 ad
.section
= rodata_section
;
5444 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5449 if (parse_btype(&type
, &ad
, 0)) {
5450 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5452 /* check ISOC99 compound literal */
5454 /* data is allocated locally by default */
5459 /* all except arrays are lvalues */
5460 if (!(type
.t
& VT_ARRAY
))
5462 memset(&ad
, 0, sizeof(AttributeDef
));
5463 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5465 if (sizeof_caller
) {
5472 } else if (tok
== '{') {
5473 int saved_nocode_wanted
= nocode_wanted
;
5474 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5476 if (0 == local_scope
)
5477 tcc_error("statement expression outside of function");
5478 /* save all registers */
5480 /* statement expression : we do not accept break/continue
5481 inside as GCC does. We do retain the nocode_wanted state,
5482 as statement expressions can't ever be entered from the
5483 outside, so any reactivation of code emission (from labels
5484 or loop heads) can be disabled again after the end of it. */
5486 /* If the statement expr can be entered, then we retain the current
5487 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5488 If it can't be entered then the state is that from before the
5489 statement expression. */
5490 if (saved_nocode_wanted
)
5491 nocode_wanted
= saved_nocode_wanted
;
5506 /* functions names must be treated as function pointers,
5507 except for unary '&' and sizeof. Since we consider that
5508 functions are not lvalues, we only have to handle it
5509 there and in function calls. */
5510 /* arrays can also be used although they are not lvalues */
5511 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5512 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5515 vtop
->sym
->a
.addrtaken
= 1;
5516 mk_pointer(&vtop
->type
);
5522 gen_test_zero(TOK_EQ
);
5533 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5534 tcc_error("pointer not accepted for unary plus");
5535 /* In order to force cast, we add zero, except for floating point
5536 where we really need an noop (otherwise -0.0 will be transformed
5538 if (!is_float(vtop
->type
.t
)) {
5550 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5551 if (t
== TOK_SIZEOF
) {
5552 vpush_type_size(&type
, &align
);
5553 gen_cast_s(VT_SIZE_T
);
5555 type_size(&type
, &align
);
5557 if (vtop
[1].r
& VT_SYM
)
5558 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5559 if (s
&& s
->a
.aligned
)
5560 align
= 1 << (s
->a
.aligned
- 1);
5565 case TOK_builtin_expect
:
5566 /* __builtin_expect is a no-op for now */
5567 parse_builtin_params(0, "ee");
5570 case TOK_builtin_types_compatible_p
:
5571 parse_builtin_params(0, "tt");
5572 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5573 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5574 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5578 case TOK_builtin_choose_expr
:
5605 case TOK_builtin_constant_p
:
5607 parse_builtin_params(1, "e");
5609 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5610 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5614 case TOK_builtin_frame_address
:
5615 case TOK_builtin_return_address
:
5621 level
= expr_const64();
5623 tcc_error("%s only takes positive integers",
5624 tok1
== TOK_builtin_return_address
?
5625 "__builtin_return_address" :
5626 "__builtin_frame_address");
5631 vset(&type
, VT_LOCAL
, 0); /* local frame */
5633 #ifdef TCC_TARGET_RISCV64
5637 mk_pointer(&vtop
->type
);
5638 indir(); /* -> parent frame */
5640 if (tok1
== TOK_builtin_return_address
) {
5641 // assume return address is just above frame pointer on stack
5642 #ifdef TCC_TARGET_ARM
5645 #elif defined TCC_TARGET_RISCV64
5652 mk_pointer(&vtop
->type
);
5657 #ifdef TCC_TARGET_RISCV64
5658 case TOK_builtin_va_start
:
5659 parse_builtin_params(0, "ee");
5660 r
= vtop
->r
& VT_VALMASK
;
5664 tcc_error("__builtin_va_start expects a local variable");
5669 #ifdef TCC_TARGET_X86_64
5670 #ifdef TCC_TARGET_PE
5671 case TOK_builtin_va_start
:
5672 parse_builtin_params(0, "ee");
5673 r
= vtop
->r
& VT_VALMASK
;
5677 tcc_error("__builtin_va_start expects a local variable");
5679 vtop
->type
= char_pointer_type
;
5684 case TOK_builtin_va_arg_types
:
5685 parse_builtin_params(0, "t");
5686 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5693 #ifdef TCC_TARGET_ARM64
5694 case TOK_builtin_va_start
: {
5695 parse_builtin_params(0, "ee");
5699 vtop
->type
.t
= VT_VOID
;
5702 case TOK_builtin_va_arg
: {
5703 parse_builtin_params(0, "et");
5711 case TOK___arm64_clear_cache
: {
5712 parse_builtin_params(0, "ee");
5715 vtop
->type
.t
= VT_VOID
;
5720 /* atomic operations */
5721 case TOK___atomic_store
:
5722 case TOK___atomic_load
:
5723 case TOK___atomic_exchange
:
5724 case TOK___atomic_compare_exchange
:
5725 case TOK___atomic_fetch_add
:
5726 case TOK___atomic_fetch_sub
:
5727 case TOK___atomic_fetch_or
:
5728 case TOK___atomic_fetch_xor
:
5729 case TOK___atomic_fetch_and
:
5730 case TOK___atomic_fetch_nand
:
5731 case TOK___atomic_add_fetch
:
5732 case TOK___atomic_sub_fetch
:
5733 case TOK___atomic_or_fetch
:
5734 case TOK___atomic_xor_fetch
:
5735 case TOK___atomic_and_fetch
:
5736 case TOK___atomic_nand_fetch
:
5740 /* pre operations */
5751 if (is_float(vtop
->type
.t
)) {
5761 goto tok_identifier
;
5763 /* allow to take the address of a label */
5764 if (tok
< TOK_UIDENT
)
5765 expect("label identifier");
5766 s
= label_find(tok
);
5768 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5770 if (s
->r
== LABEL_DECLARED
)
5771 s
->r
= LABEL_FORWARD
;
5773 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5774 s
->type
.t
= VT_VOID
;
5775 mk_pointer(&s
->type
);
5776 s
->type
.t
|= VT_STATIC
;
5778 vpushsym(&s
->type
, s
);
5784 CType controlling_type
;
5785 int has_default
= 0;
5788 TokenString
*str
= NULL
;
5789 int saved_const_wanted
= const_wanted
;
5794 expr_type(&controlling_type
, expr_eq
);
5795 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5796 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5797 mk_pointer(&controlling_type
);
5798 const_wanted
= saved_const_wanted
;
5802 if (tok
== TOK_DEFAULT
) {
5804 tcc_error("too many 'default'");
5810 AttributeDef ad_tmp
;
5814 parse_btype(&cur_type
, &ad_tmp
, 0);
5815 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5816 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5818 tcc_error("type match twice");
5828 skip_or_save_block(&str
);
5830 skip_or_save_block(NULL
);
5837 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5838 tcc_error("type '%s' does not match any association", buf
);
5840 begin_macro(str
, 1);
5849 // special qnan , snan and infinity values
5854 vtop
->type
.t
= VT_FLOAT
;
5859 goto special_math_val
;
5862 goto special_math_val
;
5869 expect("identifier");
5871 if (!s
|| IS_ASM_SYM(s
)) {
5872 const char *name
= get_tok_str(t
, NULL
);
5874 tcc_error("'%s' undeclared", name
);
5875 /* for simple function calls, we tolerate undeclared
5876 external reference to int() function */
5877 tcc_warning_c(warn_implicit_function_declaration
)(
5878 "implicit declaration of function '%s'", name
);
5879 s
= external_global_sym(t
, &func_old_type
);
5883 /* A symbol that has a register is a local register variable,
5884 which starts out as VT_LOCAL value. */
5885 if ((r
& VT_VALMASK
) < VT_CONST
)
5886 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5888 vset(&s
->type
, r
, s
->c
);
5889 /* Point to s as backpointer (even without r&VT_SYM).
5890 Will be used by at least the x86 inline asm parser for
5896 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5897 vtop
->c
.i
= s
->enum_val
;
5902 /* post operations */
5904 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5907 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5908 int qualifiers
, cumofs
= 0;
5910 if (tok
== TOK_ARROW
)
5912 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5915 /* expect pointer on structure */
5916 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5917 expect("struct or union");
5918 if (tok
== TOK_CDOUBLE
)
5919 expect("field name");
5921 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5922 expect("field name");
5923 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5924 /* add field offset to pointer */
5925 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5928 /* change type to field type, and set to lvalue */
5929 vtop
->type
= s
->type
;
5930 vtop
->type
.t
|= qualifiers
;
5931 /* an array is never an lvalue */
5932 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5934 #ifdef CONFIG_TCC_BCHECK
5935 /* if bound checking, the referenced pointer must be checked */
5936 if (tcc_state
->do_bounds_check
)
5937 vtop
->r
|= VT_MUSTBOUND
;
5941 } else if (tok
== '[') {
5947 } else if (tok
== '(') {
5950 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5953 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5954 /* pointer test (no array accepted) */
5955 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5956 vtop
->type
= *pointed_type(&vtop
->type
);
5957 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5961 expect("function pointer");
5964 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5966 /* get return type */
5969 sa
= s
->next
; /* first parameter */
5970 nb_args
= regsize
= 0;
5972 /* compute first implicit argument if a structure is returned */
5973 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5974 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5975 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5976 &ret_align
, ®size
);
5977 if (ret_nregs
<= 0) {
5978 /* get some space for the returned structure */
5979 size
= type_size(&s
->type
, &align
);
5980 #ifdef TCC_TARGET_ARM64
5981 /* On arm64, a small struct is return in registers.
5982 It is much easier to write it to memory if we know
5983 that we are allowed to write some extra bytes, so
5984 round the allocated space up to a power of 2: */
5986 while (size
& (size
- 1))
5987 size
= (size
| (size
- 1)) + 1;
5989 loc
= (loc
- size
) & -align
;
5991 ret
.r
= VT_LOCAL
| VT_LVAL
;
5992 /* pass it as 'int' to avoid structure arg passing
5994 vseti(VT_LOCAL
, loc
);
5995 #ifdef CONFIG_TCC_BCHECK
5996 if (tcc_state
->do_bounds_check
)
6010 if (ret_nregs
> 0) {
6011 /* return in register */
6013 PUT_R_RET(&ret
, ret
.type
.t
);
6018 gfunc_param_typed(s
, sa
);
6028 tcc_error("too few arguments to function");
6030 gfunc_call(nb_args
);
6032 if (ret_nregs
< 0) {
6033 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6034 #ifdef TCC_TARGET_RISCV64
6035 arch_transfer_ret_regs(1);
6039 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6040 vsetc(&ret
.type
, r
, &ret
.c
);
6041 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6044 /* handle packed struct return */
6045 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6048 size
= type_size(&s
->type
, &align
);
6049 /* We're writing whole regs often, make sure there's enough
6050 space. Assume register size is power of 2. */
6051 if (regsize
> align
)
6053 loc
= (loc
- size
) & -align
;
6057 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6061 if (--ret_nregs
== 0)
6065 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6068 /* Promote char/short return values. This is matters only
6069 for calling function that were not compiled by TCC and
6070 only on some architectures. For those where it doesn't
6071 matter we expect things to be already promoted to int,
6073 t
= s
->type
.t
& VT_BTYPE
;
6074 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6076 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6078 vtop
->type
.t
= VT_INT
;
6082 if (s
->f
.func_noreturn
) {
6084 tcc_tcov_block_end(tcc_state
, -1);
6093 #ifndef precedence_parser /* original top-down parser */
6095 static void expr_prod(void)
6100 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6107 static void expr_sum(void)
6112 while ((t
= tok
) == '+' || t
== '-') {
6119 static void expr_shift(void)
6124 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6131 static void expr_cmp(void)
6136 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6137 t
== TOK_ULT
|| t
== TOK_UGE
) {
6144 static void expr_cmpeq(void)
6149 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6156 static void expr_and(void)
6159 while (tok
== '&') {
6166 static void expr_xor(void)
6169 while (tok
== '^') {
6176 static void expr_or(void)
6179 while (tok
== '|') {
6186 static void expr_landor(int op
);
6188 static void expr_land(void)
6191 if (tok
== TOK_LAND
)
6195 static void expr_lor(void)
6202 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6203 #else /* defined precedence_parser */
6204 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6205 # define expr_lor() unary(), expr_infix(1)
6207 static int precedence(int tok
)
6210 case TOK_LOR
: return 1;
6211 case TOK_LAND
: return 2;
6215 case TOK_EQ
: case TOK_NE
: return 6;
6216 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6217 case TOK_SHL
: case TOK_SAR
: return 8;
6218 case '+': case '-': return 9;
6219 case '*': case '/': case '%': return 10;
6221 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6226 static unsigned char prec
[256];
6227 static void init_prec(void)
6230 for (i
= 0; i
< 256; i
++)
6231 prec
[i
] = precedence(i
);
6233 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6235 static void expr_landor(int op
);
6237 static void expr_infix(int p
)
6240 while ((p2
= precedence(t
)) >= p
) {
6241 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6246 if (precedence(tok
) > p2
)
6255 /* Assuming vtop is a value used in a conditional context
6256 (i.e. compared with zero) return 0 if it's false, 1 if
6257 true and -1 if it can't be statically determined. */
6258 static int condition_3way(void)
6261 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6262 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6264 gen_cast_s(VT_BOOL
);
6271 static void expr_landor(int op
)
6273 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6275 c
= f
? i
: condition_3way();
6277 save_regs(1), cc
= 0;
6279 nocode_wanted
++, f
= 1;
6287 expr_landor_next(op
);
6299 static int is_cond_bool(SValue
*sv
)
6301 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6302 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6303 return (unsigned)sv
->c
.i
< 2;
6304 if (sv
->r
== VT_CMP
)
6309 static void expr_cond(void)
6311 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6318 c
= condition_3way();
6319 g
= (tok
== ':' && gnu_ext
);
6329 /* needed to avoid having different registers saved in
6341 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6342 mk_pointer(&vtop
->type
);
6343 sv
= *vtop
; /* save value to handle it later */
6344 vtop
--; /* no vpop so that FP stack is not flushed */
6361 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6362 mk_pointer(&vtop
->type
);
6364 /* cast operands to correct type according to ISOC rules */
6365 if (!combine_types(&type
, &sv
, vtop
, '?'))
6366 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6367 "type mismatch in conditional expression (have '%s' and '%s')");
6369 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6370 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6371 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6372 this code jumps directly to the if's then/else branches. */
6377 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6381 // tcc_warning("two conditions expr_cond");
6385 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6386 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6387 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6389 /* now we convert second operand */
6393 mk_pointer(&vtop
->type
);
6395 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6399 rc
= RC_TYPE(type
.t
);
6400 /* for long longs, we use fixed registers to avoid having
6401 to handle a complicated move */
6402 if (USING_TWO_WORDS(type
.t
))
6403 rc
= RC_RET(type
.t
);
6414 /* this is horrible, but we must also convert first
6420 mk_pointer(&vtop
->type
);
6422 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6428 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6438 static void expr_eq(void)
6443 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6451 gen_op(TOK_ASSIGN_OP(t
));
6457 ST_FUNC
void gexpr(void)
6463 constant_p
&= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6464 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6470 /* parse a constant expression and return value in vtop. */
6471 static void expr_const1(void)
6474 nocode_wanted
+= unevalmask
+ 1;
6476 nocode_wanted
-= unevalmask
+ 1;
6480 /* parse an integer constant and return its value. */
6481 static inline int64_t expr_const64(void)
6485 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6486 expect("constant expression");
6492 /* parse an integer constant and return its value.
6493 Complain if it doesn't fit 32bit (signed or unsigned). */
6494 ST_FUNC
int expr_const(void)
6497 int64_t wc
= expr_const64();
6499 if (c
!= wc
&& (unsigned)c
!= wc
)
6500 tcc_error("constant exceeds 32 bit");
6504 /* ------------------------------------------------------------------------- */
6505 /* return from function */
6507 #ifndef TCC_TARGET_ARM64
6508 static void gfunc_return(CType
*func_type
)
6510 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6511 CType type
, ret_type
;
6512 int ret_align
, ret_nregs
, regsize
;
6513 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6514 &ret_align
, ®size
);
6515 if (ret_nregs
< 0) {
6516 #ifdef TCC_TARGET_RISCV64
6517 arch_transfer_ret_regs(0);
6519 } else if (0 == ret_nregs
) {
6520 /* if returning structure, must copy it to implicit
6521 first pointer arg location */
6524 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6527 /* copy structure value to pointer */
6530 /* returning structure packed into registers */
6531 int size
, addr
, align
, rc
;
6532 size
= type_size(func_type
,&align
);
6533 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6534 (vtop
->c
.i
& (ret_align
-1)))
6535 && (align
& (ret_align
-1))) {
6536 loc
= (loc
- size
) & -ret_align
;
6539 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6543 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6545 vtop
->type
= ret_type
;
6546 rc
= RC_RET(ret_type
.t
);
6554 if (--ret_nregs
== 0)
6556 /* We assume that when a structure is returned in multiple
6557 registers, their classes are consecutive values of the
6560 vtop
->c
.i
+= regsize
;
6565 gv(RC_RET(func_type
->t
));
6567 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6571 static void check_func_return(void)
6573 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6575 if (!strcmp (funcname
, "main")
6576 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6577 /* main returns 0 by default */
6579 gen_assign_cast(&func_vt
);
6580 gfunc_return(&func_vt
);
6582 tcc_warning("function might return no value: '%s'", funcname
);
6586 /* ------------------------------------------------------------------------- */
6589 static int case_cmpi(const void *pa
, const void *pb
)
6591 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6592 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6593 return a
< b
? -1 : a
> b
;
6596 static int case_cmpu(const void *pa
, const void *pb
)
6598 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6599 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6600 return a
< b
? -1 : a
> b
;
6603 static void gtst_addr(int t
, int a
)
6605 gsym_addr(gvtst(0, t
), a
);
6608 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6612 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6629 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6631 gcase(base
, len
/2, bsym
);
6635 base
+= e
; len
-= e
;
6645 if (p
->v1
== p
->v2
) {
6647 gtst_addr(0, p
->sym
);
6657 gtst_addr(0, p
->sym
);
6661 *bsym
= gjmp(*bsym
);
6664 /* ------------------------------------------------------------------------- */
6665 /* __attribute__((cleanup(fn))) */
6667 static void try_call_scope_cleanup(Sym
*stop
)
6669 Sym
*cls
= cur_scope
->cl
.s
;
6671 for (; cls
!= stop
; cls
= cls
->ncl
) {
6672 Sym
*fs
= cls
->next
;
6673 Sym
*vs
= cls
->prev_tok
;
6675 vpushsym(&fs
->type
, fs
);
6676 vset(&vs
->type
, vs
->r
, vs
->c
);
6678 mk_pointer(&vtop
->type
);
6684 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6689 if (!cur_scope
->cl
.s
)
6692 /* search NCA of both cleanup chains given parents and initial depth */
6693 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6694 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6696 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6698 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6701 try_call_scope_cleanup(cc
);
6704 /* call 'func' for each __attribute__((cleanup(func))) */
6705 static void block_cleanup(struct scope
*o
)
6709 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6710 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6715 try_call_scope_cleanup(o
->cl
.s
);
6716 pcl
->jnext
= gjmp(0);
6718 goto remove_pending
;
6728 try_call_scope_cleanup(o
->cl
.s
);
6731 /* ------------------------------------------------------------------------- */
6734 static void vla_restore(int loc
)
6737 gen_vla_sp_restore(loc
);
6740 static void vla_leave(struct scope
*o
)
6742 struct scope
*c
= cur_scope
, *v
= NULL
;
6743 for (; c
!= o
&& c
; c
= c
->prev
)
6747 vla_restore(v
->vla
.locorig
);
6750 /* ------------------------------------------------------------------------- */
6753 static void new_scope(struct scope
*o
)
6755 /* copy and link previous scope */
6757 o
->prev
= cur_scope
;
6759 cur_scope
->vla
.num
= 0;
6761 /* record local declaration stack position */
6762 o
->lstk
= local_stack
;
6763 o
->llstk
= local_label_stack
;
6767 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
6770 static void prev_scope(struct scope
*o
, int is_expr
)
6774 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6775 block_cleanup(o
->prev
);
6777 /* pop locally defined labels */
6778 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6780 /* In the is_expr case (a statement expression is finished here),
6781 vtop might refer to symbols on the local_stack. Either via the
6782 type or via vtop->sym. We can't pop those nor any that in turn
6783 might be referred to. To make it easier we don't roll back
6784 any symbols in that case; some upper level call to block() will
6785 do that. We do have to remove such symbols from the lookup
6786 tables, though. sym_pop will do that. */
6788 /* pop locally defined symbols */
6789 pop_local_syms(o
->lstk
, is_expr
);
6790 cur_scope
= o
->prev
;
6794 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
6797 /* leave a scope via break/continue(/goto) */
6798 static void leave_scope(struct scope
*o
)
6802 try_call_scope_cleanup(o
->cl
.s
);
6806 /* ------------------------------------------------------------------------- */
6807 /* call block from 'for do while' loops */
6809 static void lblock(int *bsym
, int *csym
)
6811 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6812 int *b
= co
->bsym
, *c
= co
->csym
;
6826 static void block(int is_expr
)
6828 int a
, b
, c
, d
, e
, t
;
6833 /* default return value is (void) */
6835 vtop
->type
.t
= VT_VOID
;
6840 /* If the token carries a value, next() might destroy it. Only with
6841 invalid code such as f(){"123"4;} */
6842 if (TOK_HAS_VALUE(t
))
6847 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6850 //new_scope(&o); //?? breaks tests2.122
6856 if (tok
== TOK_ELSE
) {
6861 gsym(d
); /* patch else jmp */
6865 //prev_scope(&o,0); //?? breaks tests2.122
6867 } else if (t
== TOK_WHILE
) {
6880 } else if (t
== '{') {
6883 /* handle local labels declarations */
6884 while (tok
== TOK_LABEL
) {
6887 if (tok
< TOK_UIDENT
)
6888 expect("label identifier");
6889 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6891 } while (tok
== ',');
6895 while (tok
!= '}') {
6904 prev_scope(&o
, is_expr
);
6907 else if (!nocode_wanted
)
6908 check_func_return();
6910 } else if (t
== TOK_RETURN
) {
6911 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6915 gen_assign_cast(&func_vt
);
6917 if (vtop
->type
.t
!= VT_VOID
)
6918 tcc_warning("void function returns a value");
6922 tcc_warning("'return' with no value");
6925 leave_scope(root_scope
);
6927 gfunc_return(&func_vt
);
6929 /* jump unless last stmt in top-level block */
6930 if (tok
!= '}' || local_scope
!= 1)
6933 tcc_tcov_block_end (tcc_state
, -1);
6936 } else if (t
== TOK_BREAK
) {
6938 if (!cur_scope
->bsym
)
6939 tcc_error("cannot break");
6940 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6941 leave_scope(cur_switch
->scope
);
6943 leave_scope(loop_scope
);
6944 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6947 } else if (t
== TOK_CONTINUE
) {
6949 if (!cur_scope
->csym
)
6950 tcc_error("cannot continue");
6951 leave_scope(loop_scope
);
6952 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6955 } else if (t
== TOK_FOR
) {
6960 /* c99 for-loop init decl? */
6961 if (!decl(VT_JMP
)) {
6962 /* no, regular for-loop init expr */
6990 } else if (t
== TOK_DO
) {
7006 } else if (t
== TOK_SWITCH
) {
7007 struct switch_t
*sw
;
7010 sw
= tcc_mallocz(sizeof *sw
);
7012 sw
->scope
= cur_scope
;
7013 sw
->prev
= cur_switch
;
7014 sw
->nocode_wanted
= nocode_wanted
;
7020 sw
->sv
= *vtop
--; /* save switch value */
7023 b
= gjmp(0); /* jump to first case */
7025 a
= gjmp(a
); /* add implicit break */
7029 if (sw
->nocode_wanted
)
7031 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7032 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7034 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7035 for (b
= 1; b
< sw
->n
; b
++)
7036 if (sw
->sv
.type
.t
& VT_UNSIGNED
7037 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7038 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7039 tcc_error("duplicate case value");
7042 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7045 gsym_addr(d
, sw
->def_sym
);
7052 dynarray_reset(&sw
->p
, &sw
->n
);
7053 cur_switch
= sw
->prev
;
7057 } else if (t
== TOK_CASE
) {
7058 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7061 cr
->v1
= cr
->v2
= expr_const64();
7062 if (gnu_ext
&& tok
== TOK_DOTS
) {
7064 cr
->v2
= expr_const64();
7065 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7066 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7067 tcc_warning("empty case range");
7069 /* case and default are unreachable from a switch under nocode_wanted */
7070 if (!cur_switch
->nocode_wanted
)
7072 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7075 goto block_after_label
;
7077 } else if (t
== TOK_DEFAULT
) {
7080 if (cur_switch
->def_sym
)
7081 tcc_error("too many 'default'");
7082 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? 1 : gind();
7085 goto block_after_label
;
7087 } else if (t
== TOK_GOTO
) {
7088 if (cur_scope
->vla
.num
)
7089 vla_restore(cur_scope
->vla
.locorig
);
7090 if (tok
== '*' && gnu_ext
) {
7094 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7098 } else if (tok
>= TOK_UIDENT
) {
7099 s
= label_find(tok
);
7100 /* put forward definition if needed */
7102 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7103 else if (s
->r
== LABEL_DECLARED
)
7104 s
->r
= LABEL_FORWARD
;
7106 if (s
->r
& LABEL_FORWARD
) {
7107 /* start new goto chain for cleanups, linked via label->next */
7108 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7109 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7110 pending_gotos
->prev_tok
= s
;
7111 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7112 pending_gotos
->next
= s
;
7114 s
->jnext
= gjmp(s
->jnext
);
7116 try_call_cleanup_goto(s
->cleanupstate
);
7117 gjmp_addr(s
->jnext
);
7122 expect("label identifier");
7126 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7130 if (tok
== ':' && t
>= TOK_UIDENT
) {
7135 if (s
->r
== LABEL_DEFINED
)
7136 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7137 s
->r
= LABEL_DEFINED
;
7139 Sym
*pcl
; /* pending cleanup goto */
7140 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7142 sym_pop(&s
->next
, NULL
, 0);
7146 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7149 s
->cleanupstate
= cur_scope
->cl
.s
;
7153 /* Accept attributes after labels (e.g. 'unused') */
7154 AttributeDef ad_tmp
;
7155 parse_attribute(&ad_tmp
);
7158 tcc_tcov_reset_ind(tcc_state
);
7159 vla_restore(cur_scope
->vla
.loc
);
7162 /* we accept this, but it is a mistake */
7163 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7166 /* expression case */
7183 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7186 /* This skips over a stream of tokens containing balanced {} and ()
7187 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7188 with a '{'). If STR then allocates and stores the skipped tokens
7189 in *STR. This doesn't check if () and {} are nested correctly,
7190 i.e. "({)}" is accepted. */
7191 static void skip_or_save_block(TokenString
**str
)
7193 int braces
= tok
== '{';
7196 *str
= tok_str_alloc();
7208 if (str
|| level
> 0)
7209 tcc_error("unexpected end of file");
7214 tok_str_add_tok(*str
);
7216 if (t
== '{' || t
== '(' || t
== '[') {
7218 } else if (t
== '}' || t
== ')' || t
== ']') {
7220 if (level
== 0 && braces
&& t
== '}')
7225 tok_str_add(*str
, -1);
7226 tok_str_add(*str
, 0);
7230 #define EXPR_CONST 1
7233 static void parse_init_elem(int expr_type
)
7235 int saved_global_expr
;
7238 /* compound literals must be allocated globally in this case */
7239 saved_global_expr
= global_expr
;
7242 global_expr
= saved_global_expr
;
7243 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7244 (compound literals). */
7245 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7246 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7247 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7248 #ifdef TCC_TARGET_PE
7249 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7252 tcc_error("initializer element is not constant");
7261 static void init_assert(init_params
*p
, int offset
)
7263 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7264 : !nocode_wanted
&& offset
> p
->local_offset
)
7265 tcc_internal_error("initializer overflow");
7268 #define init_assert(sec, offset)
7271 /* put zeros for variable based init */
7272 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7274 init_assert(p
, c
+ size
);
7276 /* nothing to do because globals are already set to zero */
7278 vpush_helper_func(TOK_memset
);
7280 #ifdef TCC_TARGET_ARM
7292 #define DIF_SIZE_ONLY 2
7293 #define DIF_HAVE_ELEM 4
7296 /* delete relocations for specified range c ... c + size. Unfortunatly
7297 in very special cases, relocations may occur unordered */
7298 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7300 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7301 if (!sec
|| !sec
->reloc
)
7303 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7304 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7305 while (rel
< rel_end
) {
7306 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7307 sec
->reloc
->data_offset
-= sizeof *rel
;
7310 memcpy(rel2
, rel
, sizeof *rel
);
7317 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7319 if (ref
== p
->flex_array_ref
) {
7320 if (index
>= ref
->c
)
7322 } else if (ref
->c
< 0)
7323 tcc_error("flexible array has zero size in this context");
7326 /* t is the array or struct type. c is the array or struct
7327 address. cur_field is the pointer to the current
7328 field, for arrays the 'c' member contains the current start
7329 index. 'flags' is as in decl_initializer.
7330 'al' contains the already initialized length of the
7331 current container (starting at c). This returns the new length of that. */
7332 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7333 Sym
**cur_field
, int flags
, int al
)
7336 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7337 unsigned long corig
= c
;
7342 if (flags
& DIF_HAVE_ELEM
)
7345 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7352 /* NOTE: we only support ranges for last designator */
7353 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7355 if (!(type
->t
& VT_ARRAY
))
7356 expect("array type");
7358 index
= index_last
= expr_const();
7359 if (tok
== TOK_DOTS
&& gnu_ext
) {
7361 index_last
= expr_const();
7365 decl_design_flex(p
, s
, index_last
);
7366 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7367 tcc_error("index exceeds array bounds or range is empty");
7369 (*cur_field
)->c
= index_last
;
7370 type
= pointed_type(type
);
7371 elem_size
= type_size(type
, &align
);
7372 c
+= index
* elem_size
;
7373 nb_elems
= index_last
- index
+ 1;
7380 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7381 expect("struct/union type");
7383 f
= find_field(type
, l
, &cumofs
);
7394 } else if (!gnu_ext
) {
7399 if (type
->t
& VT_ARRAY
) {
7400 index
= (*cur_field
)->c
;
7402 decl_design_flex(p
, s
, index
);
7404 tcc_error("too many initializers");
7405 type
= pointed_type(type
);
7406 elem_size
= type_size(type
, &align
);
7407 c
+= index
* elem_size
;
7410 /* Skip bitfield padding. Also with size 32 and 64. */
7411 while (f
&& (f
->v
& SYM_FIRST_ANOM
) &&
7412 is_integer_btype(f
->type
.t
& VT_BTYPE
))
7413 *cur_field
= f
= f
->next
;
7415 tcc_error("too many initializers");
7421 if (!elem_size
) /* for structs */
7422 elem_size
= type_size(type
, &align
);
7424 /* Using designators the same element can be initialized more
7425 than once. In that case we need to delete possibly already
7426 existing relocations. */
7427 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7428 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7429 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7432 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7434 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7438 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7439 /* make init_putv/vstore believe it were a struct */
7441 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7445 vpush_ref(type
, p
->sec
, c
, elem_size
);
7447 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7448 for (i
= 1; i
< nb_elems
; i
++) {
7450 init_putv(p
, type
, c
+ elem_size
* i
);
7455 c
+= nb_elems
* elem_size
;
7461 /* store a value or an expression directly in global data or in local array */
7462 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7468 Section
*sec
= p
->sec
;
7472 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7474 size
= type_size(type
, &align
);
7475 if (type
->t
& VT_BITFIELD
)
7476 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7477 init_assert(p
, c
+ size
);
7480 /* XXX: not portable */
7481 /* XXX: generate error if incorrect relocation */
7482 gen_assign_cast(&dtype
);
7483 bt
= type
->t
& VT_BTYPE
;
7485 if ((vtop
->r
& VT_SYM
)
7487 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7488 || (type
->t
& VT_BITFIELD
))
7489 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7491 tcc_error("initializer element is not computable at load time");
7493 if (NODATA_WANTED
) {
7498 ptr
= sec
->data
+ c
;
7501 /* XXX: make code faster ? */
7502 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7503 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7504 /* XXX This rejects compound literals like
7505 '(void *){ptr}'. The problem is that '&sym' is
7506 represented the same way, which would be ruled out
7507 by the SYM_FIRST_ANOM check above, but also '"string"'
7508 in 'char *p = "string"' is represented the same
7509 with the type being VT_PTR and the symbol being an
7510 anonymous one. That is, there's no difference in vtop
7511 between '(void *){x}' and '&(void *){x}'. Ignore
7512 pointer typed entities here. Hopefully no real code
7513 will ever use compound literals with scalar type. */
7514 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7515 /* These come from compound literals, memcpy stuff over. */
7519 esym
= elfsym(vtop
->sym
);
7520 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7521 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7523 /* We need to copy over all memory contents, and that
7524 includes relocations. Use the fact that relocs are
7525 created it order, so look from the end of relocs
7526 until we hit one before the copied region. */
7527 unsigned long relofs
= ssec
->reloc
->data_offset
;
7528 while (relofs
>= sizeof(*rel
)) {
7529 relofs
-= sizeof(*rel
);
7530 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7531 if (rel
->r_offset
>= esym
->st_value
+ size
)
7533 if (rel
->r_offset
< esym
->st_value
)
7535 put_elf_reloca(symtab_section
, sec
,
7536 c
+ rel
->r_offset
- esym
->st_value
,
7537 ELFW(R_TYPE
)(rel
->r_info
),
7538 ELFW(R_SYM
)(rel
->r_info
),
7548 if (type
->t
& VT_BITFIELD
) {
7549 int bit_pos
, bit_size
, bits
, n
;
7550 unsigned char *p
, v
, m
;
7551 bit_pos
= BIT_POS(vtop
->type
.t
);
7552 bit_size
= BIT_SIZE(vtop
->type
.t
);
7553 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7554 bit_pos
&= 7, bits
= 0;
7559 v
= val
>> bits
<< bit_pos
;
7560 m
= ((1 << n
) - 1) << bit_pos
;
7561 *p
= (*p
& ~m
) | (v
& m
);
7562 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7567 *(char *)ptr
= val
!= 0;
7573 write16le(ptr
, val
);
7576 write32le(ptr
, val
);
7579 write64le(ptr
, val
);
7582 #if defined TCC_IS_NATIVE_387
7583 /* Host and target platform may be different but both have x87.
7584 On windows, tcc does not use VT_LDOUBLE, except when it is a
7585 cross compiler. In this case a mingw gcc as host compiler
7586 comes here with 10-byte long doubles, while msvc or tcc won't.
7587 tcc itself can still translate by asm.
7588 In any case we avoid possibly random bytes 11 and 12.
7590 if (sizeof (long double) >= 10)
7591 memcpy(ptr
, &vtop
->c
.ld
, 10);
7593 else if (sizeof (long double) == sizeof (double))
7594 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7596 else if (vtop
->c
.ld
== 0.0)
7600 /* For other platforms it should work natively, but may not work
7601 for cross compilers */
7602 if (sizeof(long double) == LDOUBLE_SIZE
)
7603 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7604 else if (sizeof(double) == LDOUBLE_SIZE
)
7605 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7606 #ifndef TCC_CROSS_TEST
7608 tcc_error("can't cross compile long double constants");
7613 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7616 if (vtop
->r
& VT_SYM
)
7617 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7619 write64le(ptr
, val
);
7622 write32le(ptr
, val
);
7626 write64le(ptr
, val
);
7630 if (vtop
->r
& VT_SYM
)
7631 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7632 write32le(ptr
, val
);
7636 //tcc_internal_error("unexpected type");
7642 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7649 /* 't' contains the type and storage info. 'c' is the offset of the
7650 object in section 'sec'. If 'sec' is NULL, it means stack based
7651 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7652 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7653 size only evaluation is wanted (only for arrays). */
7654 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7656 int len
, n
, no_oblock
, i
;
7662 /* generate line number info */
7663 if (debug_modes
&& !p
->sec
)
7664 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7666 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7667 /* In case of strings we have special handling for arrays, so
7668 don't consume them as initializer value (which would commit them
7669 to some anonymous symbol). */
7670 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7671 (!(flags
& DIF_SIZE_ONLY
)
7672 /* a struct may be initialized from a struct of same type, as in
7673 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7674 In that case we need to parse the element in order to check
7675 it for compatibility below */
7676 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7678 int ncw_prev
= nocode_wanted
;
7679 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7681 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7682 nocode_wanted
= ncw_prev
;
7683 flags
|= DIF_HAVE_ELEM
;
7686 if (type
->t
& VT_ARRAY
) {
7688 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7696 t1
= pointed_type(type
);
7697 size1
= type_size(t1
, &align1
);
7699 /* only parse strings here if correct type (otherwise: handle
7700 them as ((w)char *) expressions */
7701 if ((tok
== TOK_LSTR
&&
7702 #ifdef TCC_TARGET_PE
7703 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7705 (t1
->t
& VT_BTYPE
) == VT_INT
7707 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7709 cstr_reset(&initstr
);
7710 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7711 tcc_error("unhandled string literal merging");
7712 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7714 initstr
.size
-= size1
;
7716 len
+= tokc
.str
.size
;
7718 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7720 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7723 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7724 && tok
!= TOK_EOF
) {
7725 /* Not a lone literal but part of a bigger expression. */
7726 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7727 tokc
.str
.size
= initstr
.size
;
7728 tokc
.str
.data
= initstr
.data
;
7732 decl_design_flex(p
, s
, len
);
7733 if (!(flags
& DIF_SIZE_ONLY
)) {
7738 tcc_warning("initializer-string for array is too long");
7739 /* in order to go faster for common case (char
7740 string in global variable, we handle it
7742 if (p
->sec
&& size1
== 1) {
7743 init_assert(p
, c
+ nb
);
7745 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7749 /* only add trailing zero if enough storage (no
7750 warning in this case since it is standard) */
7751 if (flags
& DIF_CLEAR
)
7754 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7758 } else if (size1
== 1)
7759 ch
= ((unsigned char *)initstr
.data
)[i
];
7761 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7763 init_putv(p
, t1
, c
+ i
* size1
);
7774 /* zero memory once in advance */
7775 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7776 init_putz(p
, c
, n
*size1
);
7781 /* GNU extension: if the initializer is empty for a flex array,
7782 it's size is zero. We won't enter the loop, so set the size
7784 decl_design_flex(p
, s
, len
);
7785 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7786 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7787 flags
&= ~DIF_HAVE_ELEM
;
7788 if (type
->t
& VT_ARRAY
) {
7790 /* special test for multi dimensional arrays (may not
7791 be strictly correct if designators are used at the
7793 if (no_oblock
&& len
>= n
*size1
)
7796 if (s
->type
.t
== VT_UNION
)
7800 if (no_oblock
&& f
== NULL
)
7812 } else if ((flags
& DIF_HAVE_ELEM
)
7813 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7814 The source type might have VT_CONSTANT set, which is
7815 of course assignable to non-const elements. */
7816 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7819 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7821 if ((flags
& DIF_FIRST
) || tok
== '{') {
7831 } else if (tok
== '{') {
7832 if (flags
& DIF_HAVE_ELEM
)
7835 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7838 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7839 /* If we supported only ISO C we wouldn't have to accept calling
7840 this on anything than an array if DIF_SIZE_ONLY (and even then
7841 only on the outermost level, so no recursion would be needed),
7842 because initializing a flex array member isn't supported.
7843 But GNU C supports it, so we need to recurse even into
7844 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7845 /* just skip expression */
7846 if (flags
& DIF_HAVE_ELEM
)
7849 skip_or_save_block(NULL
);
7852 if (!(flags
& DIF_HAVE_ELEM
)) {
7853 /* This should happen only when we haven't parsed
7854 the init element above for fear of committing a
7855 string constant to memory too early. */
7856 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7857 expect("string constant");
7858 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7860 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7861 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7863 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7867 init_putv(p
, type
, c
);
7871 /* parse an initializer for type 't' if 'has_init' is non zero, and
7872 allocate space in local or global data space ('r' is either
7873 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7874 variable 'v' of scope 'scope' is declared before initializers
7875 are parsed. If 'v' is zero, then a reference to the new object
7876 is put in the value stack. If 'has_init' is 2, a special parsing
7877 is done to handle string constants. */
7878 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7879 int has_init
, int v
, int global
)
7881 int size
, align
, addr
;
7882 TokenString
*init_str
= NULL
;
7885 Sym
*flexible_array
;
7887 int saved_nocode_wanted
= nocode_wanted
;
7888 #ifdef CONFIG_TCC_BCHECK
7889 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7891 init_params p
= {0};
7893 /* Always allocate static or global variables */
7894 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7895 nocode_wanted
|= DATA_ONLY_WANTED
;
7897 flexible_array
= NULL
;
7898 size
= type_size(type
, &align
);
7900 /* exactly one flexible array may be initialized, either the
7901 toplevel array or the last member of the toplevel struct */
7904 /* If the base type itself was an array type of unspecified size
7905 (like in 'typedef int arr[]; arr x = {1};') then we will
7906 overwrite the unknown size by the real one for this decl.
7907 We need to unshare the ref symbol holding that size. */
7908 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7909 p
.flex_array_ref
= type
->ref
;
7911 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7912 Sym
*field
= type
->ref
->next
;
7915 field
= field
->next
;
7916 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7917 flexible_array
= field
;
7918 p
.flex_array_ref
= field
->type
.ref
;
7925 /* If unknown size, do a dry-run 1st pass */
7927 tcc_error("unknown type size");
7928 if (has_init
== 2) {
7929 /* only get strings */
7930 init_str
= tok_str_alloc();
7931 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7932 tok_str_add_tok(init_str
);
7935 tok_str_add(init_str
, -1);
7936 tok_str_add(init_str
, 0);
7938 skip_or_save_block(&init_str
);
7942 begin_macro(init_str
, 1);
7944 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7945 /* prepare second initializer parsing */
7946 macro_ptr
= init_str
->str
;
7949 /* if still unknown size, error */
7950 size
= type_size(type
, &align
);
7952 tcc_error("unknown type size");
7954 /* If there's a flex member and it was used in the initializer
7956 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
7957 size
+= flexible_array
->type
.ref
->c
7958 * pointed_size(&flexible_array
->type
);
7961 /* take into account specified alignment if bigger */
7962 if (ad
->a
.aligned
) {
7963 int speca
= 1 << (ad
->a
.aligned
- 1);
7966 } else if (ad
->a
.packed
) {
7970 if (!v
&& NODATA_WANTED
)
7971 size
= 0, align
= 1;
7973 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7975 #ifdef CONFIG_TCC_BCHECK
7977 /* add padding between stack variables for bound checking */
7981 loc
= (loc
- size
) & -align
;
7983 p
.local_offset
= addr
+ size
;
7984 #ifdef CONFIG_TCC_BCHECK
7986 /* add padding between stack variables for bound checking */
7991 /* local variable */
7992 #ifdef CONFIG_TCC_ASM
7993 if (ad
->asm_label
) {
7994 int reg
= asm_parse_regvar(ad
->asm_label
);
7996 r
= (r
& ~VT_VALMASK
) | reg
;
7999 sym
= sym_push(v
, type
, r
, addr
);
8000 if (ad
->cleanup_func
) {
8001 Sym
*cls
= sym_push2(&all_cleanups
,
8002 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8003 cls
->prev_tok
= sym
;
8004 cls
->next
= ad
->cleanup_func
;
8005 cls
->ncl
= cur_scope
->cl
.s
;
8006 cur_scope
->cl
.s
= cls
;
8011 /* push local reference */
8012 vset(type
, r
, addr
);
8017 /* see if the symbol was already defined */
8020 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8021 && sym
->type
.ref
->c
> type
->ref
->c
) {
8022 /* flex array was already declared with explicit size
8024 int arr[] = { 1,2,3 }; */
8025 type
->ref
->c
= sym
->type
.ref
->c
;
8026 size
= type_size(type
, &align
);
8028 patch_storage(sym
, ad
, type
);
8029 /* we accept several definitions of the same global variable. */
8030 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8035 /* allocate symbol in corresponding section */
8039 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8040 tp
= &tp
->ref
->type
;
8041 if (tp
->t
& VT_CONSTANT
) {
8042 sec
= rodata_section
;
8043 } else if (has_init
) {
8045 /*if (tcc_state->g_debug & 4)
8046 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8047 } else if (tcc_state
->nocommon
)
8052 addr
= section_add(sec
, size
, align
);
8053 #ifdef CONFIG_TCC_BCHECK
8054 /* add padding if bound check */
8056 section_add(sec
, 1, 1);
8059 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8060 sec
= common_section
;
8065 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8066 patch_storage(sym
, ad
, NULL
);
8068 /* update symbol definition */
8069 put_extern_sym(sym
, sec
, addr
, size
);
8071 /* push global reference */
8072 vpush_ref(type
, sec
, addr
, size
);
8077 #ifdef CONFIG_TCC_BCHECK
8078 /* handles bounds now because the symbol must be defined
8079 before for the relocation */
8083 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8084 /* then add global bound info */
8085 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8086 bounds_ptr
[0] = 0; /* relocated */
8087 bounds_ptr
[1] = size
;
8092 if (type
->t
& VT_VLA
) {
8098 /* save before-VLA stack pointer if needed */
8099 if (cur_scope
->vla
.num
== 0) {
8100 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8101 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8103 gen_vla_sp_save(loc
-= PTR_SIZE
);
8104 cur_scope
->vla
.locorig
= loc
;
8108 vpush_type_size(type
, &a
);
8109 gen_vla_alloc(type
, a
);
8110 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8111 /* on _WIN64, because of the function args scratch area, the
8112 result of alloca differs from RSP and is returned in RAX. */
8113 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8115 gen_vla_sp_save(addr
);
8116 cur_scope
->vla
.loc
= addr
;
8117 cur_scope
->vla
.num
++;
8118 } else if (has_init
) {
8120 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8121 /* patch flexible array member size back to -1, */
8122 /* for possible subsequent similar declarations */
8124 flexible_array
->type
.ref
->c
= -1;
8128 /* restore parse state if needed */
8134 nocode_wanted
= saved_nocode_wanted
;
8137 /* generate vla code saved in post_type() */
8138 static void func_vla_arg_code(Sym
*arg
)
8141 TokenString
*vla_array_tok
= NULL
;
8144 func_vla_arg_code(arg
->type
.ref
);
8146 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8147 loc
-= type_size(&int_type
, &align
);
8149 arg
->type
.ref
->c
= loc
;
8152 vla_array_tok
= tok_str_alloc();
8153 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8154 begin_macro(vla_array_tok
, 1);
8159 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8161 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8168 static void func_vla_arg(Sym
*sym
)
8172 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8173 if (arg
->type
.t
& VT_VLA
)
8174 func_vla_arg_code(arg
);
8177 /* parse a function defined by symbol 'sym' and generate its code in
8178 'cur_text_section' */
8179 static void gen_function(Sym
*sym
)
8181 struct scope f
= { 0 };
8182 cur_scope
= root_scope
= &f
;
8184 ind
= cur_text_section
->data_offset
;
8185 if (sym
->a
.aligned
) {
8186 size_t newoff
= section_add(cur_text_section
, 0,
8187 1 << (sym
->a
.aligned
- 1));
8188 gen_fill_nops(newoff
- ind
);
8190 /* NOTE: we patch the symbol size later */
8191 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8192 if (sym
->type
.ref
->f
.func_ctor
)
8193 add_array (tcc_state
, ".init_array", sym
->c
);
8194 if (sym
->type
.ref
->f
.func_dtor
)
8195 add_array (tcc_state
, ".fini_array", sym
->c
);
8197 funcname
= get_tok_str(sym
->v
, NULL
);
8199 func_vt
= sym
->type
.ref
->type
;
8200 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8202 /* put debug symbol */
8203 tcc_debug_funcstart(tcc_state
, sym
);
8204 /* push a dummy symbol to enable local sym storage */
8205 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8206 local_scope
= 1; /* for function parameters */
8208 tcc_debug_prolog_epilog(tcc_state
, 0);
8211 clear_temp_local_var_list();
8216 /* reset local stack */
8217 pop_local_syms(NULL
, 0);
8218 tcc_debug_prolog_epilog(tcc_state
, 1);
8220 cur_text_section
->data_offset
= ind
;
8222 label_pop(&global_label_stack
, NULL
, 0);
8223 sym_pop(&all_cleanups
, NULL
, 0);
8224 /* patch symbol size */
8225 elfsym(sym
)->st_size
= ind
- func_ind
;
8226 /* end of function */
8227 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8228 /* It's better to crash than to generate wrong code */
8229 cur_text_section
= NULL
;
8230 funcname
= ""; /* for safety */
8231 func_vt
.t
= VT_VOID
; /* for safety */
8232 func_var
= 0; /* for safety */
8233 ind
= 0; /* for safety */
8235 nocode_wanted
= DATA_ONLY_WANTED
;
8237 /* do this after funcend debug info */
8241 static void gen_inline_functions(TCCState
*s
)
8244 int inline_generated
, i
;
8245 struct InlineFunc
*fn
;
8247 tcc_open_bf(s
, ":inline:", 0);
8248 /* iterate while inline function are referenced */
8250 inline_generated
= 0;
8251 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8252 fn
= s
->inline_fns
[i
];
8254 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8255 /* the function was used or forced (and then not internal):
8256 generate its code and convert it to a normal function */
8258 tcc_debug_putfile(s
, fn
->filename
);
8259 begin_macro(fn
->func_str
, 1);
8261 cur_text_section
= text_section
;
8265 inline_generated
= 1;
8268 } while (inline_generated
);
8272 static void free_inline_functions(TCCState
*s
)
8275 /* free tokens of unused inline functions */
8276 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8277 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8279 tok_str_free(fn
->func_str
);
8281 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8284 static void do_Static_assert(void){
8294 tcc_error("_Static_assert fail");
8296 goto static_assert_out
;
8300 parse_mult_str(&error_str
, "string constant");
8302 tcc_error("%s", (char *)error_str
.data
);
8303 cstr_free(&error_str
);
8309 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8310 or VT_CMP if parsing old style parameter list
8311 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8312 static int decl(int l
)
8314 int v
, has_init
, r
, oldint
;
8317 AttributeDef ad
, adbase
;
8320 if (tok
== TOK_STATIC_ASSERT
) {
8326 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8329 /* skip redundant ';' if not in old parameter decl scope */
8330 if (tok
== ';' && l
!= VT_CMP
) {
8336 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8337 /* global asm block */
8341 if (tok
>= TOK_UIDENT
) {
8342 /* special test for old K&R protos without explicit int
8343 type. Only accepted when defining global data */
8348 expect("declaration");
8354 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8356 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8357 tcc_warning("unnamed struct/union that defines no instances");
8361 if (IS_ENUM(btype
.t
)) {
8367 while (1) { /* iterate thru each declaration */
8370 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8374 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8375 printf("type = '%s'\n", buf
);
8378 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8379 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8380 tcc_error("function without file scope cannot be static");
8381 /* if old style function prototype, we accept a
8384 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8388 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8389 if (sym
->f
.func_alwinl
8390 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8391 == (VT_EXTERN
| VT_INLINE
))) {
8392 /* always_inline functions must be handled as if they
8393 don't generate multiple global defs, even if extern
8394 inline, i.e. GNU inline semantics for those. Rewrite
8395 them into static inline. */
8396 type
.t
&= ~VT_EXTERN
;
8397 type
.t
|= VT_STATIC
;
8400 /* always compile 'extern inline' */
8401 if (type
.t
& VT_EXTERN
)
8402 type
.t
&= ~VT_INLINE
;
8404 } else if (oldint
) {
8405 tcc_warning("type defaults to int");
8408 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8409 ad
.asm_label
= asm_label_instr();
8410 /* parse one last attribute list, after asm label */
8411 parse_attribute(&ad
);
8413 /* gcc does not allow __asm__("label") with function definition,
8420 #ifdef TCC_TARGET_PE
8421 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8422 if (type
.t
& VT_STATIC
)
8423 tcc_error("cannot have dll linkage with static");
8424 if (type
.t
& VT_TYPEDEF
) {
8425 tcc_warning("'%s' attribute ignored for typedef",
8426 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8427 (ad
.a
.dllexport
= 0, "dllexport"));
8428 } else if (ad
.a
.dllimport
) {
8429 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8432 type
.t
|= VT_EXTERN
;
8438 tcc_error("cannot use local functions");
8439 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8440 expect("function definition");
8442 /* reject abstract declarators in function definition
8443 make old style params without decl have int type */
8445 while ((sym
= sym
->next
) != NULL
) {
8446 if (!(sym
->v
& ~SYM_FIELD
))
8447 expect("identifier");
8448 if (sym
->type
.t
== VT_VOID
)
8449 sym
->type
= int_type
;
8452 /* apply post-declaraton attributes */
8453 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8455 /* put function symbol */
8456 type
.t
&= ~VT_EXTERN
;
8457 sym
= external_sym(v
, &type
, 0, &ad
);
8459 /* static inline functions are just recorded as a kind
8460 of macro. Their code will be emitted at the end of
8461 the compilation unit only if they are used */
8462 if (sym
->type
.t
& VT_INLINE
) {
8463 struct InlineFunc
*fn
;
8464 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8465 strcpy(fn
->filename
, file
->filename
);
8467 skip_or_save_block(&fn
->func_str
);
8468 dynarray_add(&tcc_state
->inline_fns
,
8469 &tcc_state
->nb_inline_fns
, fn
);
8471 /* compute text section */
8472 cur_text_section
= ad
.section
;
8473 if (!cur_text_section
)
8474 cur_text_section
= text_section
;
8480 /* find parameter in function parameter list */
8481 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8482 if ((sym
->v
& ~SYM_FIELD
) == v
)
8484 tcc_error("declaration for parameter '%s' but no such parameter",
8485 get_tok_str(v
, NULL
));
8487 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8488 tcc_error("storage class specified for '%s'",
8489 get_tok_str(v
, NULL
));
8490 if (sym
->type
.t
!= VT_VOID
)
8491 tcc_error("redefinition of parameter '%s'",
8492 get_tok_str(v
, NULL
));
8493 convert_parameter_type(&type
);
8495 } else if (type
.t
& VT_TYPEDEF
) {
8496 /* save typedefed type */
8497 /* XXX: test storage specifiers ? */
8499 if (sym
&& sym
->sym_scope
== local_scope
) {
8500 if (!is_compatible_types(&sym
->type
, &type
)
8501 || !(sym
->type
.t
& VT_TYPEDEF
))
8502 tcc_error("incompatible redefinition of '%s'",
8503 get_tok_str(v
, NULL
));
8506 sym
= sym_push(v
, &type
, 0, 0);
8509 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8510 merge_funcattr(&sym
->type
.ref
->f
, &ad
.f
);
8512 tcc_debug_typedef (tcc_state
, sym
);
8513 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8514 && !(type
.t
& VT_EXTERN
)) {
8515 tcc_error("declaration of void object");
8518 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8519 /* external function definition */
8520 /* specific case for func_call attribute */
8521 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8522 } else if (!(type
.t
& VT_ARRAY
)) {
8523 /* not lvalue if array */
8526 has_init
= (tok
== '=');
8527 if (has_init
&& (type
.t
& VT_VLA
))
8528 tcc_error("variable length array cannot be initialized");
8529 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8530 || (type
.t
& VT_BTYPE
) == VT_FUNC
8531 /* as with GCC, uninitialized global arrays with no size
8532 are considered extern: */
8533 || ((type
.t
& VT_ARRAY
) && !has_init
8534 && l
== VT_CONST
&& type
.ref
->c
< 0)
8536 /* external variable or function */
8537 type
.t
|= VT_EXTERN
;
8538 sym
= external_sym(v
, &type
, r
, &ad
);
8539 if (ad
.alias_target
) {
8540 /* Aliases need to be emitted when their target
8541 symbol is emitted, even if perhaps unreferenced.
8542 We only support the case where the base is
8543 already defined, otherwise we would need
8544 deferring to emit the aliases until the end of
8545 the compile unit. */
8546 Sym
*alias_target
= sym_find(ad
.alias_target
);
8547 ElfSym
*esym
= elfsym(alias_target
);
8549 tcc_error("unsupported forward __alias__ attribute");
8550 put_extern_sym2(sym
, esym
->st_shndx
,
8551 esym
->st_value
, esym
->st_size
, 1);
8554 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8560 else if (l
== VT_CONST
)
8561 /* uninitialized global variables may be overridden */
8562 type
.t
|= VT_EXTERN
;
8563 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8579 /* ------------------------------------------------------------------------- */
8582 /* ------------------------------------------------------------------------- */