2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int constant_p
;
48 ST_DATA
char debug_modes
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= 0x20000000)
60 #define CODE_ON() (nocode_wanted &= ~0x20000000)
62 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
63 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
64 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
67 ST_DATA
const char *funcname
;
68 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
69 static CString initstr
;
72 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
73 #define VT_PTRDIFF_T VT_INT
75 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
76 #define VT_PTRDIFF_T VT_LLONG
78 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
79 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
82 static struct switch_t
{
86 } **p
; int n
; /* list of case ranges */
87 int def_sym
; /* default symbol */
91 struct switch_t
*prev
;
93 } *cur_switch
; /* current switch */
95 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
96 /*list of temporary local variables on the stack in current function. */
97 static struct temp_local_variable
{
98 int location
; //offset on stack. Svalue.c.i
101 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
102 static int nb_temp_local_vars
;
104 static struct scope
{
106 struct { int loc
, locorig
, num
; } vla
;
107 struct { Sym
*s
; int n
; } cl
;
110 } *cur_scope
, *loop_scope
, *root_scope
;
119 #define precedence_parser
120 static void init_prec(void);
123 static void gen_cast(CType
*type
);
124 static void gen_cast_s(int t
);
125 static inline CType
*pointed_type(CType
*type
);
126 static int is_compatible_types(CType
*type1
, CType
*type2
);
127 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
128 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
129 static void parse_expr_type(CType
*type
);
130 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
131 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
132 static void block(int is_expr
);
133 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
134 static int decl(int l
);
135 static void expr_eq(void);
136 static void vpush_type_size(CType
*type
, int *a
);
137 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
138 static inline int64_t expr_const64(void);
139 static void vpush64(int ty
, unsigned long long v
);
140 static void vpush(CType
*type
);
141 static int gvtst(int inv
, int t
);
142 static void gen_inline_functions(TCCState
*s
);
143 static void free_inline_functions(TCCState
*s
);
144 static void skip_or_save_block(TokenString
**str
);
145 static void gv_dup(void);
146 static int get_temp_local_var(int size
,int align
);
147 static void clear_temp_local_var_list();
148 static void cast_error(CType
*st
, CType
*dt
);
150 /* ------------------------------------------------------------------------- */
151 /* Automagical code suppression */
153 /* Clear 'nocode_wanted' at forward label if it was used */
154 ST_FUNC
void gsym(int t
)
162 /* Clear 'nocode_wanted' if current pc is a label */
168 tcc_tcov_block_begin(tcc_state
);
172 /* Set 'nocode_wanted' after unconditional (backwards) jump */
173 static void gjmp_addr_acs(int t
)
179 /* Set 'nocode_wanted' after unconditional (forwards) jump */
180 static int gjmp_acs(int t
)
187 /* These are #undef'd at the end of this file */
188 #define gjmp_addr gjmp_addr_acs
189 #define gjmp gjmp_acs
190 /* ------------------------------------------------------------------------- */
192 ST_INLN
int is_float(int t
)
194 int bt
= t
& VT_BTYPE
;
195 return bt
== VT_LDOUBLE
201 static inline int is_integer_btype(int bt
)
210 static int btype_size(int bt
)
212 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
216 bt
== VT_PTR
? PTR_SIZE
: 0;
219 /* returns function return register from type */
220 static int R_RET(int t
)
224 #ifdef TCC_TARGET_X86_64
225 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
227 #elif defined TCC_TARGET_RISCV64
228 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
234 /* returns 2nd function return register, if any */
235 static int R2_RET(int t
)
241 #elif defined TCC_TARGET_X86_64
246 #elif defined TCC_TARGET_RISCV64
253 /* returns true for two-word types */
254 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
256 /* put function return registers to stack value */
257 static void PUT_R_RET(SValue
*sv
, int t
)
259 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
262 /* returns function return register class for type t */
263 static int RC_RET(int t
)
265 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
268 /* returns generic register class for type t */
269 static int RC_TYPE(int t
)
273 #ifdef TCC_TARGET_X86_64
274 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
276 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
278 #elif defined TCC_TARGET_RISCV64
279 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
285 /* returns 2nd register class corresponding to t and rc */
286 static int RC2_TYPE(int t
, int rc
)
288 if (!USING_TWO_WORDS(t
))
303 /* we use our own 'finite' function to avoid potential problems with
304 non standard math libs */
305 /* XXX: endianness dependent */
306 ST_FUNC
int ieee_finite(double d
)
309 memcpy(p
, &d
, sizeof(double));
310 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
313 /* compiling intel long double natively */
314 #if (defined __i386__ || defined __x86_64__) \
315 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
316 # define TCC_IS_NATIVE_387
319 ST_FUNC
void test_lvalue(void)
321 if (!(vtop
->r
& VT_LVAL
))
325 ST_FUNC
void check_vstack(void)
327 if (vtop
!= vstack
- 1)
328 tcc_error("internal compiler error: vstack leak (%d)",
329 (int)(vtop
- vstack
+ 1));
332 /* vstack debugging aid */
334 void pv (const char *lbl
, int a
, int b
)
337 for (i
= a
; i
< a
+ b
; ++i
) {
338 SValue
*p
= &vtop
[-i
];
339 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
340 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
345 /* ------------------------------------------------------------------------- */
346 /* initialize vstack and types. This must be done also for tcc -E */
347 ST_FUNC
void tccgen_init(TCCState
*s1
)
350 memset(vtop
, 0, sizeof *vtop
);
352 /* define some often used types */
355 char_type
.t
= VT_BYTE
;
356 if (s1
->char_is_unsigned
)
357 char_type
.t
|= VT_UNSIGNED
;
358 char_pointer_type
= char_type
;
359 mk_pointer(&char_pointer_type
);
361 func_old_type
.t
= VT_FUNC
;
362 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
363 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
364 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
365 #ifdef precedence_parser
371 ST_FUNC
int tccgen_compile(TCCState
*s1
)
373 cur_text_section
= NULL
;
376 anon_sym
= SYM_FIRST_ANOM
;
378 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
380 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
384 #ifdef TCC_TARGET_ARM
388 printf("%s: **** new file\n", file
->filename
);
390 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
393 gen_inline_functions(s1
);
395 /* end of translation unit info */
401 ST_FUNC
void tccgen_finish(TCCState
*s1
)
404 free_inline_functions(s1
);
405 sym_pop(&global_stack
, NULL
, 0);
406 sym_pop(&local_stack
, NULL
, 0);
407 /* free preprocessor macros */
410 dynarray_reset(&sym_pools
, &nb_sym_pools
);
411 sym_free_first
= NULL
;
414 /* ------------------------------------------------------------------------- */
415 ST_FUNC ElfSym
*elfsym(Sym
*s
)
419 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
422 /* apply storage attributes to Elf symbol */
423 ST_FUNC
void update_storage(Sym
*sym
)
426 int sym_bind
, old_sym_bind
;
432 if (sym
->a
.visibility
)
433 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
436 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
437 sym_bind
= STB_LOCAL
;
438 else if (sym
->a
.weak
)
441 sym_bind
= STB_GLOBAL
;
442 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
443 if (sym_bind
!= old_sym_bind
) {
444 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
448 if (sym
->a
.dllimport
)
449 esym
->st_other
|= ST_PE_IMPORT
;
450 if (sym
->a
.dllexport
)
451 esym
->st_other
|= ST_PE_EXPORT
;
455 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
456 get_tok_str(sym
->v
, NULL
),
457 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
465 /* ------------------------------------------------------------------------- */
466 /* update sym->c so that it points to an external symbol in section
467 'section' with value 'value' */
469 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
470 addr_t value
, unsigned long size
,
471 int can_add_underscore
)
473 int sym_type
, sym_bind
, info
, other
, t
;
479 name
= get_tok_str(sym
->v
, NULL
);
481 if ((t
& VT_BTYPE
) == VT_FUNC
) {
483 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
484 sym_type
= STT_NOTYPE
;
485 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
488 sym_type
= STT_OBJECT
;
490 if (t
& (VT_STATIC
| VT_INLINE
))
491 sym_bind
= STB_LOCAL
;
493 sym_bind
= STB_GLOBAL
;
497 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
498 Sym
*ref
= sym
->type
.ref
;
499 if (ref
->a
.nodecorate
) {
500 can_add_underscore
= 0;
502 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
503 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
505 other
|= ST_PE_STDCALL
;
506 can_add_underscore
= 0;
511 if (sym
->asm_label
) {
512 name
= get_tok_str(sym
->asm_label
, NULL
);
513 can_add_underscore
= 0;
516 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
518 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
522 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
523 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
526 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
530 esym
->st_value
= value
;
531 esym
->st_size
= size
;
532 esym
->st_shndx
= sh_num
;
537 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
539 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
541 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
544 /* add a new relocation entry to symbol 'sym' in section 's' */
545 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
550 if (nocode_wanted
&& s
== cur_text_section
)
555 put_extern_sym(sym
, NULL
, 0, 0);
559 /* now we can add ELF relocation info */
560 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
564 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
566 greloca(s
, sym
, offset
, type
, 0);
570 /* ------------------------------------------------------------------------- */
571 /* symbol allocator */
572 static Sym
*__sym_malloc(void)
574 Sym
*sym_pool
, *sym
, *last_sym
;
577 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
578 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
580 last_sym
= sym_free_first
;
582 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
583 sym
->next
= last_sym
;
587 sym_free_first
= last_sym
;
591 static inline Sym
*sym_malloc(void)
595 sym
= sym_free_first
;
597 sym
= __sym_malloc();
598 sym_free_first
= sym
->next
;
601 sym
= tcc_malloc(sizeof(Sym
));
606 ST_INLN
void sym_free(Sym
*sym
)
609 sym
->next
= sym_free_first
;
610 sym_free_first
= sym
;
616 /* push, without hashing */
617 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
622 memset(s
, 0, sizeof *s
);
632 /* find a symbol and return its associated structure. 's' is the top
633 of the symbol stack */
634 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
646 /* structure lookup */
647 ST_INLN Sym
*struct_find(int v
)
650 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
652 return table_ident
[v
]->sym_struct
;
655 /* find an identifier */
656 ST_INLN Sym
*sym_find(int v
)
659 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
661 return table_ident
[v
]->sym_identifier
;
664 static int sym_scope(Sym
*s
)
666 if (IS_ENUM_VAL (s
->type
.t
))
667 return s
->type
.ref
->sym_scope
;
672 /* push a given symbol on the symbol stack */
673 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
682 s
= sym_push2(ps
, v
, type
->t
, c
);
683 s
->type
.ref
= type
->ref
;
685 /* don't record fields or anonymous symbols */
687 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
688 /* record symbol in token array */
689 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
691 ps
= &ts
->sym_struct
;
693 ps
= &ts
->sym_identifier
;
696 s
->sym_scope
= local_scope
;
697 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
698 tcc_error("redeclaration of '%s'",
699 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
704 /* push a global identifier */
705 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
708 s
= sym_push2(&global_stack
, v
, t
, c
);
709 s
->r
= VT_CONST
| VT_SYM
;
710 /* don't record anonymous symbol */
711 if (v
< SYM_FIRST_ANOM
) {
712 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
713 /* modify the top most local identifier, so that sym_identifier will
714 point to 's' when popped; happens when called from inline asm */
715 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
716 ps
= &(*ps
)->prev_tok
;
723 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
724 pop them yet from the list, but do remove them from the token array. */
725 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
735 /* remove symbol in token array */
737 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
738 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
740 ps
= &ts
->sym_struct
;
742 ps
= &ts
->sym_identifier
;
753 /* ------------------------------------------------------------------------- */
754 static void vcheck_cmp(void)
756 /* cannot let cpu flags if other instruction are generated. Also
757 avoid leaving VT_JMP anywhere except on the top of the stack
758 because it would complicate the code generator.
760 Don't do this when nocode_wanted. vtop might come from
761 !nocode_wanted regions (see 88_codeopt.c) and transforming
762 it to a register without actually generating code is wrong
763 as their value might still be used for real. All values
764 we push under nocode_wanted will eventually be popped
765 again, so that the VT_CMP/VT_JMP value will be in vtop
766 when code is unsuppressed again. */
768 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
772 static void vsetc(CType
*type
, int r
, CValue
*vc
)
774 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
775 tcc_error("memory full (vstack)");
785 ST_FUNC
void vswap(void)
795 /* pop stack value */
796 ST_FUNC
void vpop(void)
799 v
= vtop
->r
& VT_VALMASK
;
800 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
801 /* for x86, we need to pop the FP stack */
803 o(0xd8dd); /* fstp %st(0) */
807 /* need to put correct jump if && or || without test */
814 /* push constant of type "type" with useless value */
815 static void vpush(CType
*type
)
817 vset(type
, VT_CONST
, 0);
820 /* push arbitrary 64bit constant */
821 static void vpush64(int ty
, unsigned long long v
)
828 vsetc(&ctype
, VT_CONST
, &cval
);
831 /* push integer constant */
832 ST_FUNC
void vpushi(int v
)
837 /* push a pointer sized constant */
838 static void vpushs(addr_t v
)
840 vpush64(VT_SIZE_T
, v
);
843 /* push long long constant */
844 static inline void vpushll(long long v
)
846 vpush64(VT_LLONG
, v
);
849 ST_FUNC
void vset(CType
*type
, int r
, int v
)
853 vsetc(type
, r
, &cval
);
856 static void vseti(int r
, int v
)
864 ST_FUNC
void vpushv(SValue
*v
)
866 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
867 tcc_error("memory full (vstack)");
872 static void vdup(void)
877 /* rotate n first stack elements to the bottom
878 I1 ... In -> I2 ... In I1 [top is right]
880 ST_FUNC
void vrotb(int n
)
892 /* rotate the n elements before entry e towards the top
893 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
895 ST_FUNC
void vrote(SValue
*e
, int n
)
902 for(i
= 0;i
< n
- 1; i
++)
907 /* rotate n first stack elements to the top
908 I1 ... In -> In I1 ... I(n-1) [top is right]
910 ST_FUNC
void vrott(int n
)
915 /* ------------------------------------------------------------------------- */
916 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
918 /* called from generators to set the result from relational ops */
919 ST_FUNC
void vset_VT_CMP(int op
)
927 /* called once before asking generators to load VT_CMP to a register */
928 static void vset_VT_JMP(void)
930 int op
= vtop
->cmp_op
;
932 if (vtop
->jtrue
|| vtop
->jfalse
) {
933 int origt
= vtop
->type
.t
;
934 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
935 int inv
= op
& (op
< 2); /* small optimization */
936 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
937 vtop
->type
.t
|= origt
& (VT_UNSIGNED
| VT_DEFSIGN
);
939 /* otherwise convert flags (rsp. 0/1) to register */
941 if (op
< 2) /* doesn't seem to happen */
946 /* Set CPU Flags, doesn't yet jump */
947 static void gvtst_set(int inv
, int t
)
951 if (vtop
->r
!= VT_CMP
) {
954 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
955 vset_VT_CMP(vtop
->c
.i
!= 0);
958 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
959 *p
= gjmp_append(*p
, t
);
962 /* Generate value test
964 * Generate a test for any value (jump, comparison and integers) */
965 static int gvtst(int inv
, int t
)
970 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
975 /* jump to the wanted target */
977 t
= gjmp_cond(op
^ inv
, t
);
980 /* resolve complementary jumps to here */
987 /* generate a zero or nozero test */
988 static void gen_test_zero(int op
)
990 if (vtop
->r
== VT_CMP
) {
994 vtop
->jfalse
= vtop
->jtrue
;
1004 /* ------------------------------------------------------------------------- */
1005 /* push a symbol value of TYPE */
1006 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1010 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1014 /* Return a static symbol pointing to a section */
1015 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1021 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1022 sym
->type
.t
|= VT_STATIC
;
1023 put_extern_sym(sym
, sec
, offset
, size
);
1027 /* push a reference to a section offset by adding a dummy symbol */
1028 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1030 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1033 /* define a new external reference to a symbol 'v' of type 'u' */
1034 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1040 /* push forward reference */
1041 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1042 s
->type
.ref
= type
->ref
;
1043 } else if (IS_ASM_SYM(s
)) {
1044 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1045 s
->type
.ref
= type
->ref
;
1051 /* create an external reference with no specific type similar to asm labels.
1052 This avoids type conflicts if the symbol is used from C too */
1053 ST_FUNC Sym
*external_helper_sym(int v
)
1055 CType ct
= { VT_ASM_FUNC
, NULL
};
1056 return external_global_sym(v
, &ct
);
1059 /* push a reference to an helper function (such as memmove) */
1060 ST_FUNC
void vpush_helper_func(int v
)
1062 vpushsym(&func_old_type
, external_helper_sym(v
));
1065 /* Merge symbol attributes. */
1066 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1068 if (sa1
->aligned
&& !sa
->aligned
)
1069 sa
->aligned
= sa1
->aligned
;
1070 sa
->packed
|= sa1
->packed
;
1071 sa
->weak
|= sa1
->weak
;
1072 sa
->nodebug
|= sa1
->nodebug
;
1073 if (sa1
->visibility
!= STV_DEFAULT
) {
1074 int vis
= sa
->visibility
;
1075 if (vis
== STV_DEFAULT
1076 || vis
> sa1
->visibility
)
1077 vis
= sa1
->visibility
;
1078 sa
->visibility
= vis
;
1080 sa
->dllexport
|= sa1
->dllexport
;
1081 sa
->nodecorate
|= sa1
->nodecorate
;
1082 sa
->dllimport
|= sa1
->dllimport
;
1085 /* Merge function attributes. */
1086 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1088 if (fa1
->func_call
&& !fa
->func_call
)
1089 fa
->func_call
= fa1
->func_call
;
1090 if (fa1
->func_type
&& !fa
->func_type
)
1091 fa
->func_type
= fa1
->func_type
;
1092 if (fa1
->func_args
&& !fa
->func_args
)
1093 fa
->func_args
= fa1
->func_args
;
1094 if (fa1
->func_noreturn
)
1095 fa
->func_noreturn
= 1;
1102 /* Merge attributes. */
1103 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1105 merge_symattr(&ad
->a
, &ad1
->a
);
1106 merge_funcattr(&ad
->f
, &ad1
->f
);
1109 ad
->section
= ad1
->section
;
1110 if (ad1
->alias_target
)
1111 ad
->alias_target
= ad1
->alias_target
;
1113 ad
->asm_label
= ad1
->asm_label
;
1115 ad
->attr_mode
= ad1
->attr_mode
;
1118 /* Merge some type attributes. */
1119 static void patch_type(Sym
*sym
, CType
*type
)
1121 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1122 if (!(sym
->type
.t
& VT_EXTERN
))
1123 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1124 sym
->type
.t
&= ~VT_EXTERN
;
1127 if (IS_ASM_SYM(sym
)) {
1128 /* stay static if both are static */
1129 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1130 sym
->type
.ref
= type
->ref
;
1133 if (!is_compatible_types(&sym
->type
, type
)) {
1134 tcc_error("incompatible types for redefinition of '%s'",
1135 get_tok_str(sym
->v
, NULL
));
1137 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1138 int static_proto
= sym
->type
.t
& VT_STATIC
;
1139 /* warn if static follows non-static function declaration */
1140 if ((type
->t
& VT_STATIC
) && !static_proto
1141 /* XXX this test for inline shouldn't be here. Until we
1142 implement gnu-inline mode again it silences a warning for
1143 mingw caused by our workarounds. */
1144 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1145 tcc_warning("static storage ignored for redefinition of '%s'",
1146 get_tok_str(sym
->v
, NULL
));
1148 /* set 'inline' if both agree or if one has static */
1149 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1150 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1151 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1152 static_proto
|= VT_INLINE
;
1155 if (0 == (type
->t
& VT_EXTERN
)) {
1156 struct FuncAttr f
= sym
->type
.ref
->f
;
1157 /* put complete type, use static from prototype */
1158 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1159 sym
->type
.ref
= type
->ref
;
1160 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1162 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1165 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1166 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1167 sym
->type
.ref
= type
->ref
;
1171 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1172 /* set array size if it was omitted in extern declaration */
1173 sym
->type
.ref
->c
= type
->ref
->c
;
1175 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1176 tcc_warning("storage mismatch for redefinition of '%s'",
1177 get_tok_str(sym
->v
, NULL
));
1181 /* Merge some storage attributes. */
1182 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1185 patch_type(sym
, type
);
1187 #ifdef TCC_TARGET_PE
1188 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1189 tcc_error("incompatible dll linkage for redefinition of '%s'",
1190 get_tok_str(sym
->v
, NULL
));
1192 merge_symattr(&sym
->a
, &ad
->a
);
1194 sym
->asm_label
= ad
->asm_label
;
1195 update_storage(sym
);
1198 /* copy sym to other stack */
1199 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1202 s
= sym_malloc(), *s
= *s0
;
1203 s
->prev
= *ps
, *ps
= s
;
1204 if (s
->v
< SYM_FIRST_ANOM
) {
1205 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1206 s
->prev_tok
= *ps
, *ps
= s
;
1211 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1212 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1214 int bt
= s
->type
.t
& VT_BTYPE
;
1215 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1216 Sym
**sp
= &s
->type
.ref
;
1217 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1218 Sym
*s2
= sym_copy(s
, ps
);
1219 sp
= &(*sp
= s2
)->next
;
1220 sym_copy_ref(s2
, ps
);
1225 /* define a new external reference to a symbol 'v' */
1226 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1230 /* look for global symbol */
1232 while (s
&& s
->sym_scope
)
1236 /* push forward reference */
1237 s
= global_identifier_push(v
, type
->t
, 0);
1240 s
->asm_label
= ad
->asm_label
;
1241 s
->type
.ref
= type
->ref
;
1242 /* copy type to the global stack */
1244 sym_copy_ref(s
, &global_stack
);
1246 patch_storage(s
, ad
, type
);
1248 /* push variables on local_stack if any */
1249 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1250 s
= sym_copy(s
, &local_stack
);
1254 /* save registers up to (vtop - n) stack entry */
1255 ST_FUNC
void save_regs(int n
)
1258 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1262 /* save r to the memory stack, and mark it as being free */
1263 ST_FUNC
void save_reg(int r
)
1265 save_reg_upstack(r
, 0);
1268 /* save r to the memory stack, and mark it as being free,
1269 if seen up to (vtop - n) stack entry */
1270 ST_FUNC
void save_reg_upstack(int r
, int n
)
1272 int l
, size
, align
, bt
;
1275 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1280 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1281 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1282 /* must save value on stack if not already done */
1284 bt
= p
->type
.t
& VT_BTYPE
;
1287 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1290 size
= type_size(&sv
.type
, &align
);
1291 l
= get_temp_local_var(size
,align
);
1292 sv
.r
= VT_LOCAL
| VT_LVAL
;
1294 store(p
->r
& VT_VALMASK
, &sv
);
1295 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1296 /* x86 specific: need to pop fp register ST0 if saved */
1297 if (r
== TREG_ST0
) {
1298 o(0xd8dd); /* fstp %st(0) */
1301 /* special long long case */
1302 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1307 /* mark that stack entry as being saved on the stack */
1308 if (p
->r
& VT_LVAL
) {
1309 /* also clear the bounded flag because the
1310 relocation address of the function was stored in
1312 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1314 p
->r
= VT_LVAL
| VT_LOCAL
;
1323 #ifdef TCC_TARGET_ARM
1324 /* find a register of class 'rc2' with at most one reference on stack.
1325 * If none, call get_reg(rc) */
1326 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1331 for(r
=0;r
<NB_REGS
;r
++) {
1332 if (reg_classes
[r
] & rc2
) {
1335 for(p
= vstack
; p
<= vtop
; p
++) {
1336 if ((p
->r
& VT_VALMASK
) == r
||
1348 /* find a free register of class 'rc'. If none, save one register */
1349 ST_FUNC
int get_reg(int rc
)
1354 /* find a free register */
1355 for(r
=0;r
<NB_REGS
;r
++) {
1356 if (reg_classes
[r
] & rc
) {
1359 for(p
=vstack
;p
<=vtop
;p
++) {
1360 if ((p
->r
& VT_VALMASK
) == r
||
1369 /* no register left : free the first one on the stack (VERY
1370 IMPORTANT to start from the bottom to ensure that we don't
1371 spill registers used in gen_opi()) */
1372 for(p
=vstack
;p
<=vtop
;p
++) {
1373 /* look at second register (if long long) */
1375 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1377 r
= p
->r
& VT_VALMASK
;
1378 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1384 /* Should never comes here */
1388 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1389 static int get_temp_local_var(int size
,int align
){
1391 struct temp_local_variable
*temp_var
;
1398 for(i
=0;i
<nb_temp_local_vars
;i
++){
1399 temp_var
=&arr_temp_local_vars
[i
];
1400 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1403 /*check if temp_var is free*/
1405 for(p
=vstack
;p
<=vtop
;p
++) {
1407 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1408 if(p
->c
.i
==temp_var
->location
){
1415 found_var
=temp_var
->location
;
1421 loc
= (loc
- size
) & -align
;
1422 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1423 temp_var
=&arr_temp_local_vars
[i
];
1424 temp_var
->location
=loc
;
1425 temp_var
->size
=size
;
1426 temp_var
->align
=align
;
1427 nb_temp_local_vars
++;
1434 static void clear_temp_local_var_list(){
1435 nb_temp_local_vars
=0;
1438 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1440 static void move_reg(int r
, int s
, int t
)
1454 /* get address of vtop (vtop MUST BE an lvalue) */
1455 ST_FUNC
void gaddrof(void)
1457 vtop
->r
&= ~VT_LVAL
;
1458 /* tricky: if saved lvalue, then we can go back to lvalue */
1459 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1460 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1463 #ifdef CONFIG_TCC_BCHECK
1464 /* generate a bounded pointer addition */
1465 static void gen_bounded_ptr_add(void)
1467 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1472 vpush_helper_func(TOK___bound_ptr_add
);
1477 /* returned pointer is in REG_IRET */
1478 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1481 /* relocation offset of the bounding function call point */
1482 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1485 /* patch pointer addition in vtop so that pointer dereferencing is
1487 static void gen_bounded_ptr_deref(void)
1497 size
= type_size(&vtop
->type
, &align
);
1499 case 1: func
= TOK___bound_ptr_indir1
; break;
1500 case 2: func
= TOK___bound_ptr_indir2
; break;
1501 case 4: func
= TOK___bound_ptr_indir4
; break;
1502 case 8: func
= TOK___bound_ptr_indir8
; break;
1503 case 12: func
= TOK___bound_ptr_indir12
; break;
1504 case 16: func
= TOK___bound_ptr_indir16
; break;
1506 /* may happen with struct member access */
1509 sym
= external_helper_sym(func
);
1511 put_extern_sym(sym
, NULL
, 0, 0);
1512 /* patch relocation */
1513 /* XXX: find a better solution ? */
1514 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1515 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1518 /* generate lvalue bound code */
1519 static void gbound(void)
1523 vtop
->r
&= ~VT_MUSTBOUND
;
1524 /* if lvalue, then use checking code before dereferencing */
1525 if (vtop
->r
& VT_LVAL
) {
1526 /* if not VT_BOUNDED value, then make one */
1527 if (!(vtop
->r
& VT_BOUNDED
)) {
1528 /* must save type because we must set it to int to get pointer */
1530 vtop
->type
.t
= VT_PTR
;
1533 gen_bounded_ptr_add();
1537 /* then check for dereferencing */
1538 gen_bounded_ptr_deref();
1542 /* we need to call __bound_ptr_add before we start to load function
1543 args into registers */
1544 ST_FUNC
void gbound_args(int nb_args
)
1549 for (i
= 1; i
<= nb_args
; ++i
)
1550 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1556 sv
= vtop
- nb_args
;
1557 if (sv
->r
& VT_SYM
) {
1561 #ifndef TCC_TARGET_PE
1562 || v
== TOK_sigsetjmp
1563 || v
== TOK___sigsetjmp
1566 vpush_helper_func(TOK___bound_setjmp
);
1569 func_bound_add_epilog
= 1;
1571 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1572 if (v
== TOK_alloca
)
1573 func_bound_add_epilog
= 1;
1576 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1577 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1582 /* Add bounds for local symbols from S to E (via ->prev) */
1583 static void add_local_bounds(Sym
*s
, Sym
*e
)
1585 for (; s
!= e
; s
= s
->prev
) {
1586 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1588 /* Add arrays/structs/unions because we always take address */
1589 if ((s
->type
.t
& VT_ARRAY
)
1590 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1591 || s
->a
.addrtaken
) {
1592 /* add local bound info */
1593 int align
, size
= type_size(&s
->type
, &align
);
1594 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1595 2 * sizeof(addr_t
));
1596 bounds_ptr
[0] = s
->c
;
1597 bounds_ptr
[1] = size
;
1603 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1604 static void pop_local_syms(Sym
*b
, int keep
)
1606 #ifdef CONFIG_TCC_BCHECK
1607 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1608 add_local_bounds(local_stack
, b
);
1611 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1612 sym_pop(&local_stack
, b
, keep
);
1615 static void incr_bf_adr(int o
)
1617 vtop
->type
= char_pointer_type
;
1621 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1625 /* single-byte load mode for packed or otherwise unaligned bitfields */
1626 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1629 save_reg_upstack(vtop
->r
, 1);
1630 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1631 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1640 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1642 vpushi((1 << n
) - 1), gen_op('&');
1645 vpushi(bits
), gen_op(TOK_SHL
);
1648 bits
+= n
, bit_size
-= n
, o
= 1;
1651 if (!(type
->t
& VT_UNSIGNED
)) {
1652 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1653 vpushi(n
), gen_op(TOK_SHL
);
1654 vpushi(n
), gen_op(TOK_SAR
);
1658 /* single-byte store mode for packed or otherwise unaligned bitfields */
1659 static void store_packed_bf(int bit_pos
, int bit_size
)
1661 int bits
, n
, o
, m
, c
;
1662 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1664 save_reg_upstack(vtop
->r
, 1);
1665 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1667 incr_bf_adr(o
); // X B
1669 c
? vdup() : gv_dup(); // B V X
1672 vpushi(bits
), gen_op(TOK_SHR
);
1674 vpushi(bit_pos
), gen_op(TOK_SHL
);
1679 m
= ((1 << n
) - 1) << bit_pos
;
1680 vpushi(m
), gen_op('&'); // X B V1
1681 vpushv(vtop
-1); // X B V1 B
1682 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1683 gen_op('&'); // X B V1 B1
1684 gen_op('|'); // X B V2
1686 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1687 vstore(), vpop(); // X B
1688 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1693 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1696 if (0 == sv
->type
.ref
)
1698 t
= sv
->type
.ref
->auxtype
;
1699 if (t
!= -1 && t
!= VT_STRUCT
) {
1700 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1706 /* store vtop a register belonging to class 'rc'. lvalues are
1707 converted to values. Cannot be used if cannot be converted to
1708 register value (such as structures). */
1709 ST_FUNC
int gv(int rc
)
1711 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1712 int bit_pos
, bit_size
, size
, align
;
1714 /* NOTE: get_reg can modify vstack[] */
1715 if (vtop
->type
.t
& VT_BITFIELD
) {
1718 bit_pos
= BIT_POS(vtop
->type
.t
);
1719 bit_size
= BIT_SIZE(vtop
->type
.t
);
1720 /* remove bit field info to avoid loops */
1721 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1724 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1725 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1726 type
.t
|= VT_UNSIGNED
;
1728 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1730 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1735 if (r
== VT_STRUCT
) {
1736 load_packed_bf(&type
, bit_pos
, bit_size
);
1738 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1739 /* cast to int to propagate signedness in following ops */
1741 /* generate shifts */
1742 vpushi(bits
- (bit_pos
+ bit_size
));
1744 vpushi(bits
- bit_size
);
1745 /* NOTE: transformed to SHR if unsigned */
1750 if (is_float(vtop
->type
.t
) &&
1751 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1752 /* CPUs usually cannot use float constants, so we store them
1753 generically in data segment */
1754 init_params p
= { rodata_section
};
1755 unsigned long offset
;
1756 size
= type_size(&vtop
->type
, &align
);
1758 size
= 0, align
= 1;
1759 offset
= section_add(p
.sec
, size
, align
);
1760 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1762 init_putv(&p
, &vtop
->type
, offset
);
1765 #ifdef CONFIG_TCC_BCHECK
1766 if (vtop
->r
& VT_MUSTBOUND
)
1770 bt
= vtop
->type
.t
& VT_BTYPE
;
1772 #ifdef TCC_TARGET_RISCV64
1774 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1777 rc2
= RC2_TYPE(bt
, rc
);
1779 /* need to reload if:
1781 - lvalue (need to dereference pointer)
1782 - already a register, but not in the right class */
1783 r
= vtop
->r
& VT_VALMASK
;
1784 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1785 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1787 if (!r_ok
|| !r2_ok
) {
1791 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1792 int original_type
= vtop
->type
.t
;
1794 /* two register type load :
1795 expand to two words temporarily */
1796 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1798 unsigned long long ll
= vtop
->c
.i
;
1799 vtop
->c
.i
= ll
; /* first word */
1801 vtop
->r
= r
; /* save register value */
1802 vpushi(ll
>> 32); /* second word */
1803 } else if (vtop
->r
& VT_LVAL
) {
1804 /* We do not want to modifier the long long pointer here.
1805 So we save any other instances down the stack */
1806 save_reg_upstack(vtop
->r
, 1);
1807 /* load from memory */
1808 vtop
->type
.t
= load_type
;
1811 vtop
[-1].r
= r
; /* save register value */
1812 /* increment pointer to get second word */
1813 vtop
->type
.t
= VT_PTRDIFF_T
;
1818 vtop
->type
.t
= load_type
;
1820 /* move registers */
1823 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1826 vtop
[-1].r
= r
; /* save register value */
1827 vtop
->r
= vtop
[-1].r2
;
1829 /* Allocate second register. Here we rely on the fact that
1830 get_reg() tries first to free r2 of an SValue. */
1834 /* write second register */
1837 vtop
->type
.t
= original_type
;
1839 if (vtop
->r
== VT_CMP
)
1841 /* one register type load */
1846 #ifdef TCC_TARGET_C67
1847 /* uses register pairs for doubles */
1848 if (bt
== VT_DOUBLE
)
1855 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1856 ST_FUNC
void gv2(int rc1
, int rc2
)
1858 /* generate more generic register first. But VT_JMP or VT_CMP
1859 values must be generated first in all cases to avoid possible
1861 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1866 /* test if reload is needed for first register */
1867 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1877 /* test if reload is needed for first register */
1878 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1885 /* expand 64bit on stack in two ints */
1886 ST_FUNC
void lexpand(void)
1889 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1890 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1891 if (v
== VT_CONST
) {
1894 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1900 vtop
[0].r
= vtop
[-1].r2
;
1901 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1903 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1908 /* build a long long from two ints */
1909 static void lbuild(int t
)
1911 gv2(RC_INT
, RC_INT
);
1912 vtop
[-1].r2
= vtop
[0].r
;
1913 vtop
[-1].type
.t
= t
;
1918 /* convert stack entry to register and duplicate its value in another
1920 static void gv_dup(void)
1926 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1927 if (t
& VT_BITFIELD
) {
1937 /* stack: H L L1 H1 */
1947 /* duplicate value */
1957 /* generate CPU independent (unsigned) long long operations */
1958 static void gen_opl(int op
)
1960 int t
, a
, b
, op1
, c
, i
;
1962 unsigned short reg_iret
= REG_IRET
;
1963 unsigned short reg_lret
= REG_IRE2
;
1969 func
= TOK___divdi3
;
1972 func
= TOK___udivdi3
;
1975 func
= TOK___moddi3
;
1978 func
= TOK___umoddi3
;
1985 /* call generic long long function */
1986 vpush_helper_func(func
);
1991 vtop
->r2
= reg_lret
;
1999 //pv("gen_opl A",0,2);
2005 /* stack: L1 H1 L2 H2 */
2010 vtop
[-2] = vtop
[-3];
2013 /* stack: H1 H2 L1 L2 */
2014 //pv("gen_opl B",0,4);
2020 /* stack: H1 H2 L1 L2 ML MH */
2023 /* stack: ML MH H1 H2 L1 L2 */
2027 /* stack: ML MH H1 L2 H2 L1 */
2032 /* stack: ML MH M1 M2 */
2035 } else if (op
== '+' || op
== '-') {
2036 /* XXX: add non carry method too (for MIPS or alpha) */
2042 /* stack: H1 H2 (L1 op L2) */
2045 gen_op(op1
+ 1); /* TOK_xxxC2 */
2048 /* stack: H1 H2 (L1 op L2) */
2051 /* stack: (L1 op L2) H1 H2 */
2053 /* stack: (L1 op L2) (H1 op H2) */
2061 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2062 t
= vtop
[-1].type
.t
;
2066 /* stack: L H shift */
2068 /* constant: simpler */
2069 /* NOTE: all comments are for SHL. the other cases are
2070 done by swapping words */
2081 if (op
!= TOK_SAR
) {
2114 /* XXX: should provide a faster fallback on x86 ? */
2117 func
= TOK___ashrdi3
;
2120 func
= TOK___lshrdi3
;
2123 func
= TOK___ashldi3
;
2129 /* compare operations */
2135 /* stack: L1 H1 L2 H2 */
2137 vtop
[-1] = vtop
[-2];
2139 /* stack: L1 L2 H1 H2 */
2143 /* when values are equal, we need to compare low words. since
2144 the jump is inverted, we invert the test too. */
2147 else if (op1
== TOK_GT
)
2149 else if (op1
== TOK_ULT
)
2151 else if (op1
== TOK_UGT
)
2161 /* generate non equal test */
2163 vset_VT_CMP(TOK_NE
);
2167 /* compare low. Always unsigned */
2171 else if (op1
== TOK_LE
)
2173 else if (op1
== TOK_GT
)
2175 else if (op1
== TOK_GE
)
2178 #if 0//def TCC_TARGET_I386
2179 if (op
== TOK_NE
) { gsym(b
); break; }
2180 if (op
== TOK_EQ
) { gsym(a
); break; }
2189 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2191 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2192 return (a
^ b
) >> 63 ? -x
: x
;
2195 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2197 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2200 /* handle integer constant optimizations and various machine
2202 static void gen_opic(int op
)
2204 SValue
*v1
= vtop
- 1;
2206 int t1
= v1
->type
.t
& VT_BTYPE
;
2207 int t2
= v2
->type
.t
& VT_BTYPE
;
2208 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2209 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2210 int nonconst
= (v1
->r
| v2
->r
) & VT_NONCONST
;
2211 uint64_t l1
= c1
? v1
->c
.i
: 0;
2212 uint64_t l2
= c2
? v2
->c
.i
: 0;
2213 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2215 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2216 l1
= ((uint32_t)l1
|
2217 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2218 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2219 l2
= ((uint32_t)l2
|
2220 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2224 case '+': l1
+= l2
; break;
2225 case '-': l1
-= l2
; break;
2226 case '&': l1
&= l2
; break;
2227 case '^': l1
^= l2
; break;
2228 case '|': l1
|= l2
; break;
2229 case '*': l1
*= l2
; break;
2236 /* if division by zero, generate explicit division */
2238 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2239 tcc_error("division by zero in constant");
2243 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2244 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2245 case TOK_UDIV
: l1
= l1
/ l2
; break;
2246 case TOK_UMOD
: l1
= l1
% l2
; break;
2249 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2250 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2252 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2255 case TOK_ULT
: l1
= l1
< l2
; break;
2256 case TOK_UGE
: l1
= l1
>= l2
; break;
2257 case TOK_EQ
: l1
= l1
== l2
; break;
2258 case TOK_NE
: l1
= l1
!= l2
; break;
2259 case TOK_ULE
: l1
= l1
<= l2
; break;
2260 case TOK_UGT
: l1
= l1
> l2
; break;
2261 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2262 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2263 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2264 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2266 case TOK_LAND
: l1
= l1
&& l2
; break;
2267 case TOK_LOR
: l1
= l1
|| l2
; break;
2271 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2272 l1
= ((uint32_t)l1
|
2273 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2277 nonconst
= VT_NONCONST
;
2278 /* if commutative ops, put c2 as constant */
2279 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2280 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2282 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2283 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2285 if (!const_wanted
&&
2287 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2288 (l1
== -1 && op
== TOK_SAR
))) {
2289 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2291 } else if (!const_wanted
&&
2292 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2294 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2295 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2296 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2301 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2304 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2305 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2308 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2309 /* filter out NOP operations like x*1, x-0, x&-1... */
2311 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2312 /* try to use shifts instead of muls or divs */
2313 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2322 else if (op
== TOK_PDIV
)
2328 } else if (c2
&& (op
== '+' || op
== '-') &&
2329 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2330 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2331 /* symbol + constant case */
2335 /* The backends can't always deal with addends to symbols
2336 larger than +-1<<31. Don't construct such. */
2343 /* call low level op generator */
2344 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2345 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2351 if (vtop
->r
== VT_CONST
)
2352 vtop
->r
|= nonconst
;
2355 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2356 # define gen_negf gen_opf
2357 #elif defined TCC_TARGET_ARM
2358 void gen_negf(int op
)
2360 /* arm will detect 0-x and replace by vneg */
2361 vpushi(0), vswap(), gen_op('-');
2364 /* XXX: implement in gen_opf() for other backends too */
2365 void gen_negf(int op
)
2367 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2368 subtract(-0, x), but with them it's really a sign flip
2369 operation. We implement this with bit manipulation and have
2370 to do some type reinterpretation for this, which TCC can do
2373 int align
, size
, bt
;
2375 size
= type_size(&vtop
->type
, &align
);
2376 bt
= vtop
->type
.t
& VT_BTYPE
;
2377 save_reg(gv(RC_TYPE(bt
)));
2379 incr_bf_adr(size
- 1);
2381 vpushi(0x80); /* flip sign */
2388 /* generate a floating point operation with constant propagation */
2389 static void gen_opif(int op
)
2393 #if defined _MSC_VER && defined __x86_64__
2394 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2404 /* currently, we cannot do computations with forward symbols */
2405 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2406 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2408 if (v1
->type
.t
== VT_FLOAT
) {
2411 } else if (v1
->type
.t
== VT_DOUBLE
) {
2418 /* NOTE: we only do constant propagation if finite number (not
2419 NaN or infinity) (ANSI spec) */
2420 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
2423 case '+': f1
+= f2
; break;
2424 case '-': f1
-= f2
; break;
2425 case '*': f1
*= f2
; break;
2428 union { float f
; unsigned u
; } x1
, x2
, y
;
2429 /* If not in initializer we need to potentially generate
2430 FP exceptions at runtime, otherwise we want to fold. */
2433 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2434 when used to compile the f1 /= f2 below, would be -nan */
2435 x1
.f
= f1
, x2
.f
= f2
;
2437 y
.u
= 0x7fc00000; /* nan */
2439 y
.u
= 0x7f800000; /* infinity */
2440 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2449 /* XXX: also handles tests ? */
2455 /* XXX: overflow test ? */
2456 if (v1
->type
.t
== VT_FLOAT
) {
2458 } else if (v1
->type
.t
== VT_DOUBLE
) {
2465 if (op
== TOK_NEG
) {
2473 /* print a type. If 'varstr' is not NULL, then the variable is also
2474 printed in the type */
2476 /* XXX: add array and function pointers */
2477 static void type_to_str(char *buf
, int buf_size
,
2478 CType
*type
, const char *varstr
)
2490 pstrcat(buf
, buf_size
, "extern ");
2492 pstrcat(buf
, buf_size
, "static ");
2494 pstrcat(buf
, buf_size
, "typedef ");
2496 pstrcat(buf
, buf_size
, "inline ");
2498 if (t
& VT_VOLATILE
)
2499 pstrcat(buf
, buf_size
, "volatile ");
2500 if (t
& VT_CONSTANT
)
2501 pstrcat(buf
, buf_size
, "const ");
2503 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2504 || ((t
& VT_UNSIGNED
)
2505 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2508 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2510 buf_size
-= strlen(buf
);
2546 tstr
= "long double";
2548 pstrcat(buf
, buf_size
, tstr
);
2555 pstrcat(buf
, buf_size
, tstr
);
2556 v
= type
->ref
->v
& ~SYM_STRUCT
;
2557 if (v
>= SYM_FIRST_ANOM
)
2558 pstrcat(buf
, buf_size
, "<anonymous>");
2560 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2565 if (varstr
&& '*' == *varstr
) {
2566 pstrcat(buf1
, sizeof(buf1
), "(");
2567 pstrcat(buf1
, sizeof(buf1
), varstr
);
2568 pstrcat(buf1
, sizeof(buf1
), ")");
2570 pstrcat(buf1
, buf_size
, "(");
2572 while (sa
!= NULL
) {
2574 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2575 pstrcat(buf1
, sizeof(buf1
), buf2
);
2578 pstrcat(buf1
, sizeof(buf1
), ", ");
2580 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2581 pstrcat(buf1
, sizeof(buf1
), ", ...");
2582 pstrcat(buf1
, sizeof(buf1
), ")");
2583 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2587 if (t
& (VT_ARRAY
|VT_VLA
)) {
2588 if (varstr
&& '*' == *varstr
)
2589 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2591 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2592 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2595 pstrcpy(buf1
, sizeof(buf1
), "*");
2596 if (t
& VT_CONSTANT
)
2597 pstrcat(buf1
, buf_size
, "const ");
2598 if (t
& VT_VOLATILE
)
2599 pstrcat(buf1
, buf_size
, "volatile ");
2601 pstrcat(buf1
, sizeof(buf1
), varstr
);
2602 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2606 pstrcat(buf
, buf_size
, " ");
2607 pstrcat(buf
, buf_size
, varstr
);
2612 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2614 char buf1
[256], buf2
[256];
2615 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2616 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2617 tcc_error(fmt
, buf1
, buf2
);
2620 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2622 char buf1
[256], buf2
[256];
2623 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2624 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2625 tcc_warning(fmt
, buf1
, buf2
);
2628 static int pointed_size(CType
*type
)
2631 return type_size(pointed_type(type
), &align
);
2634 static inline int is_null_pointer(SValue
*p
)
2636 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2638 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2639 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2640 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2641 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2642 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2643 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2647 /* compare function types. OLD functions match any new functions */
2648 static int is_compatible_func(CType
*type1
, CType
*type2
)
2654 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2656 if (s1
->f
.func_type
!= s2
->f
.func_type
2657 && s1
->f
.func_type
!= FUNC_OLD
2658 && s2
->f
.func_type
!= FUNC_OLD
)
2661 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2663 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2674 /* return true if type1 and type2 are the same. If unqualified is
2675 true, qualifiers on the types are ignored.
2677 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2681 t1
= type1
->t
& VT_TYPE
;
2682 t2
= type2
->t
& VT_TYPE
;
2684 /* strip qualifiers before comparing */
2685 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2686 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2689 /* Default Vs explicit signedness only matters for char */
2690 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2694 /* XXX: bitfields ? */
2699 && !(type1
->ref
->c
< 0
2700 || type2
->ref
->c
< 0
2701 || type1
->ref
->c
== type2
->ref
->c
))
2704 /* test more complicated cases */
2705 bt1
= t1
& VT_BTYPE
;
2706 if (bt1
== VT_PTR
) {
2707 type1
= pointed_type(type1
);
2708 type2
= pointed_type(type2
);
2709 return is_compatible_types(type1
, type2
);
2710 } else if (bt1
== VT_STRUCT
) {
2711 return (type1
->ref
== type2
->ref
);
2712 } else if (bt1
== VT_FUNC
) {
2713 return is_compatible_func(type1
, type2
);
2714 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2715 /* If both are enums then they must be the same, if only one is then
2716 t1 and t2 must be equal, which was checked above already. */
2717 return type1
->ref
== type2
->ref
;
2723 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2724 type is stored in DEST if non-null (except for pointer plus/minus) . */
2725 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2727 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2728 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2734 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2735 ret
= op
== '?' ? 1 : 0;
2736 /* NOTE: as an extension, we accept void on only one side */
2738 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2739 if (op
== '+') ; /* Handled in caller */
2740 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2741 /* If one is a null ptr constant the result type is the other. */
2742 else if (is_null_pointer (op2
)) type
= *type1
;
2743 else if (is_null_pointer (op1
)) type
= *type2
;
2744 else if (bt1
!= bt2
) {
2745 /* accept comparison or cond-expr between pointer and integer
2747 if ((op
== '?' || TOK_ISCOND(op
))
2748 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2749 tcc_warning("pointer/integer mismatch in %s",
2750 op
== '?' ? "conditional expression" : "comparison");
2751 else if (op
!= '-' || !is_integer_btype(bt2
))
2753 type
= *(bt1
== VT_PTR
? type1
: type2
);
2755 CType
*pt1
= pointed_type(type1
);
2756 CType
*pt2
= pointed_type(type2
);
2757 int pbt1
= pt1
->t
& VT_BTYPE
;
2758 int pbt2
= pt2
->t
& VT_BTYPE
;
2759 int newquals
, copied
= 0;
2760 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2761 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2762 if (op
!= '?' && !TOK_ISCOND(op
))
2765 type_incompatibility_warning(type1
, type2
,
2767 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2768 : "pointer type mismatch in comparison('%s' and '%s')");
2771 /* pointers to void get preferred, otherwise the
2772 pointed to types minus qualifs should be compatible */
2773 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2774 /* combine qualifs */
2775 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2776 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2779 /* copy the pointer target symbol */
2780 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2783 pointed_type(&type
)->t
|= newquals
;
2785 /* pointers to incomplete arrays get converted to
2786 pointers to completed ones if possible */
2787 if (pt1
->t
& VT_ARRAY
2788 && pt2
->t
& VT_ARRAY
2789 && pointed_type(&type
)->ref
->c
< 0
2790 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2793 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2795 pointed_type(&type
)->ref
=
2796 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2797 0, pointed_type(&type
)->ref
->c
);
2798 pointed_type(&type
)->ref
->c
=
2799 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2805 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2806 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2809 } else if (is_float(bt1
) || is_float(bt2
)) {
2810 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2811 type
.t
= VT_LDOUBLE
;
2812 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2817 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2818 /* cast to biggest op */
2819 type
.t
= VT_LLONG
| VT_LONG
;
2820 if (bt1
== VT_LLONG
)
2822 if (bt2
== VT_LLONG
)
2824 /* convert to unsigned if it does not fit in a long long */
2825 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2826 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2827 type
.t
|= VT_UNSIGNED
;
2829 /* integer operations */
2830 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2831 /* convert to unsigned if it does not fit in an integer */
2832 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2833 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2834 type
.t
|= VT_UNSIGNED
;
2841 /* generic gen_op: handles types problems */
2842 ST_FUNC
void gen_op(int op
)
2844 int t1
, t2
, bt1
, bt2
, t
;
2845 CType type1
, combtype
;
2848 t1
= vtop
[-1].type
.t
;
2849 t2
= vtop
[0].type
.t
;
2850 bt1
= t1
& VT_BTYPE
;
2851 bt2
= t2
& VT_BTYPE
;
2853 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2854 if (bt2
== VT_FUNC
) {
2855 mk_pointer(&vtop
->type
);
2858 if (bt1
== VT_FUNC
) {
2860 mk_pointer(&vtop
->type
);
2865 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2866 tcc_error_noabort("invalid operand types for binary operation");
2868 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2869 /* at least one operand is a pointer */
2870 /* relational op: must be both pointers */
2874 /* if both pointers, then it must be the '-' op */
2875 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2877 tcc_error("cannot use pointers here");
2878 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2881 vtop
->type
.t
= VT_PTRDIFF_T
;
2885 /* exactly one pointer : must be '+' or '-'. */
2886 if (op
!= '-' && op
!= '+')
2887 tcc_error("cannot use pointers here");
2888 /* Put pointer as first operand */
2889 if (bt2
== VT_PTR
) {
2891 t
= t1
, t1
= t2
, t2
= t
;
2894 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2895 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2898 type1
= vtop
[-1].type
;
2899 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2901 #ifdef CONFIG_TCC_BCHECK
2902 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2903 /* if bounded pointers, we generate a special code to
2910 gen_bounded_ptr_add();
2916 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
2917 /* put again type if gen_opic() swaped operands */
2921 /* floats can only be used for a few operations */
2922 if (is_float(combtype
.t
)
2923 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
2925 tcc_error("invalid operands for binary operation");
2926 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2927 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2928 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2930 t
|= (VT_LONG
& t1
);
2934 t
= t2
= combtype
.t
;
2935 /* XXX: currently, some unsigned operations are explicit, so
2936 we modify them here */
2937 if (t
& VT_UNSIGNED
) {
2944 else if (op
== TOK_LT
)
2946 else if (op
== TOK_GT
)
2948 else if (op
== TOK_LE
)
2950 else if (op
== TOK_GE
)
2956 /* special case for shifts and long long: we keep the shift as
2958 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2965 if (TOK_ISCOND(op
)) {
2966 /* relational op: the result is an int */
2967 vtop
->type
.t
= VT_INT
;
2972 // Make sure that we have converted to an rvalue:
2973 if (vtop
->r
& VT_LVAL
)
2974 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2977 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2978 #define gen_cvt_itof1 gen_cvt_itof
2980 /* generic itof for unsigned long long case */
2981 static void gen_cvt_itof1(int t
)
2983 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2984 (VT_LLONG
| VT_UNSIGNED
)) {
2987 vpush_helper_func(TOK___floatundisf
);
2988 #if LDOUBLE_SIZE != 8
2989 else if (t
== VT_LDOUBLE
)
2990 vpush_helper_func(TOK___floatundixf
);
2993 vpush_helper_func(TOK___floatundidf
);
3004 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3005 #define gen_cvt_ftoi1 gen_cvt_ftoi
3007 /* generic ftoi for unsigned long long case */
3008 static void gen_cvt_ftoi1(int t
)
3011 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3012 /* not handled natively */
3013 st
= vtop
->type
.t
& VT_BTYPE
;
3015 vpush_helper_func(TOK___fixunssfdi
);
3016 #if LDOUBLE_SIZE != 8
3017 else if (st
== VT_LDOUBLE
)
3018 vpush_helper_func(TOK___fixunsxfdi
);
3021 vpush_helper_func(TOK___fixunsdfdi
);
3032 /* special delayed cast for char/short */
3033 static void force_charshort_cast(void)
3035 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3036 int dbt
= vtop
->type
.t
;
3037 vtop
->r
&= ~VT_MUSTCAST
;
3039 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3043 static void gen_cast_s(int t
)
3051 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3052 static void gen_cast(CType
*type
)
3054 int sbt
, dbt
, sf
, df
, c
;
3055 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3057 /* special delayed cast for char/short */
3058 if (vtop
->r
& VT_MUSTCAST
)
3059 force_charshort_cast();
3061 /* bitfields first get cast to ints */
3062 if (vtop
->type
.t
& VT_BITFIELD
)
3065 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3066 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3074 dbt_bt
= dbt
& VT_BTYPE
;
3075 sbt_bt
= sbt
& VT_BTYPE
;
3076 if (dbt_bt
== VT_VOID
)
3078 if (sbt_bt
== VT_VOID
) {
3080 cast_error(&vtop
->type
, type
);
3083 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3084 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3085 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3088 /* constant case: we can do it now */
3089 /* XXX: in ISOC, cannot do it if error in convert */
3090 if (sbt
== VT_FLOAT
)
3091 vtop
->c
.ld
= vtop
->c
.f
;
3092 else if (sbt
== VT_DOUBLE
)
3093 vtop
->c
.ld
= vtop
->c
.d
;
3096 if (sbt_bt
== VT_LLONG
) {
3097 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3098 vtop
->c
.ld
= vtop
->c
.i
;
3100 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3102 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3103 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3105 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3108 if (dbt
== VT_FLOAT
)
3109 vtop
->c
.f
= (float)vtop
->c
.ld
;
3110 else if (dbt
== VT_DOUBLE
)
3111 vtop
->c
.d
= (double)vtop
->c
.ld
;
3112 } else if (sf
&& dbt
== VT_BOOL
) {
3113 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3116 vtop
->c
.i
= vtop
->c
.ld
;
3117 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3119 else if (sbt
& VT_UNSIGNED
)
3120 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3122 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3124 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3126 else if (dbt
== VT_BOOL
)
3127 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3129 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3130 dbt_bt
== VT_SHORT
? 0xffff :
3133 if (!(dbt
& VT_UNSIGNED
))
3134 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3139 } else if (dbt
== VT_BOOL
3140 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3141 == (VT_CONST
| VT_SYM
)) {
3142 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3148 /* cannot generate code for global or static initializers */
3149 if (nocode_wanted
& DATA_ONLY_WANTED
)
3152 /* non constant case: generate code */
3153 if (dbt
== VT_BOOL
) {
3154 gen_test_zero(TOK_NE
);
3160 /* convert from fp to fp */
3163 /* convert int to fp */
3166 /* convert fp to int */
3168 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3171 goto again
; /* may need char/short cast */
3176 ds
= btype_size(dbt_bt
);
3177 ss
= btype_size(sbt_bt
);
3178 if (ds
== 0 || ss
== 0)
3181 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3182 tcc_error("cast to incomplete type");
3184 /* same size and no sign conversion needed */
3185 if (ds
== ss
&& ds
>= 4)
3187 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3188 tcc_warning("cast between pointer and integer of different size");
3189 if (sbt_bt
== VT_PTR
) {
3190 /* put integer type to allow logical operations below */
3191 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3195 /* processor allows { int a = 0, b = *(char*)&a; }
3196 That means that if we cast to less width, we can just
3197 change the type and read it still later. */
3198 #define ALLOW_SUBTYPE_ACCESS 1
3200 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3201 /* value still in memory */
3205 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3207 goto done
; /* no 64bit envolved */
3215 /* generate high word */
3216 if (sbt
& VT_UNSIGNED
) {
3225 } else if (ss
== 8) {
3226 /* from long long: just take low order word */
3234 /* need to convert from 32bit to 64bit */
3235 if (sbt
& VT_UNSIGNED
) {
3236 #if defined(TCC_TARGET_RISCV64)
3237 /* RISC-V keeps 32bit vals in registers sign-extended.
3238 So here we need a zero-extension. */
3247 ss
= ds
, ds
= 4, dbt
= sbt
;
3248 } else if (ss
== 8) {
3249 /* RISC-V keeps 32bit vals in registers sign-extended.
3250 So here we need a sign-extension for signed types and
3251 zero-extension. for unsigned types. */
3252 #if !defined(TCC_TARGET_RISCV64)
3253 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3262 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3268 bits
= (ss
- ds
) * 8;
3269 /* for unsigned, gen_op will convert SAR to SHR */
3270 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3273 vpushi(bits
- trunc
);
3280 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3283 /* return type size as known at compile time. Put alignment at 'a' */
3284 ST_FUNC
int type_size(CType
*type
, int *a
)
3289 bt
= type
->t
& VT_BTYPE
;
3290 if (bt
== VT_STRUCT
) {
3295 } else if (bt
== VT_PTR
) {
3296 if (type
->t
& VT_ARRAY
) {
3300 ts
= type_size(&s
->type
, a
);
3302 if (ts
< 0 && s
->c
< 0)
3310 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3312 return -1; /* incomplete enum */
3313 } else if (bt
== VT_LDOUBLE
) {
3315 return LDOUBLE_SIZE
;
3316 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3317 #ifdef TCC_TARGET_I386
3318 #ifdef TCC_TARGET_PE
3323 #elif defined(TCC_TARGET_ARM)
3333 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3336 } else if (bt
== VT_SHORT
) {
3339 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3343 /* char, void, function, _Bool */
3349 /* push type size as known at runtime time on top of value stack. Put
3351 static void vpush_type_size(CType
*type
, int *a
)
3353 if (type
->t
& VT_VLA
) {
3354 type_size(&type
->ref
->type
, a
);
3355 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3357 int size
= type_size(type
, a
);
3359 tcc_error("unknown type size");
3368 /* return the pointed type of t */
3369 static inline CType
*pointed_type(CType
*type
)
3371 return &type
->ref
->type
;
3374 /* modify type so that its it is a pointer to type. */
3375 ST_FUNC
void mk_pointer(CType
*type
)
3378 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3379 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3383 /* return true if type1 and type2 are exactly the same (including
3386 static int is_compatible_types(CType
*type1
, CType
*type2
)
3388 return compare_types(type1
,type2
,0);
3391 /* return true if type1 and type2 are the same (ignoring qualifiers).
3393 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3395 return compare_types(type1
,type2
,1);
3398 static void cast_error(CType
*st
, CType
*dt
)
3400 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3403 /* verify type compatibility to store vtop in 'dt' type */
3404 static void verify_assign_cast(CType
*dt
)
3406 CType
*st
, *type1
, *type2
;
3407 int dbt
, sbt
, qualwarn
, lvl
;
3409 st
= &vtop
->type
; /* source type */
3410 dbt
= dt
->t
& VT_BTYPE
;
3411 sbt
= st
->t
& VT_BTYPE
;
3412 if (dt
->t
& VT_CONSTANT
)
3413 tcc_warning("assignment of read-only location");
3417 tcc_error("assignment to void expression");
3420 /* special cases for pointers */
3421 /* '0' can also be a pointer */
3422 if (is_null_pointer(vtop
))
3424 /* accept implicit pointer to integer cast with warning */
3425 if (is_integer_btype(sbt
)) {
3426 tcc_warning("assignment makes pointer from integer without a cast");
3429 type1
= pointed_type(dt
);
3431 type2
= pointed_type(st
);
3432 else if (sbt
== VT_FUNC
)
3433 type2
= st
; /* a function is implicitly a function pointer */
3436 if (is_compatible_types(type1
, type2
))
3438 for (qualwarn
= lvl
= 0;; ++lvl
) {
3439 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3440 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3442 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3443 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3444 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3446 type1
= pointed_type(type1
);
3447 type2
= pointed_type(type2
);
3449 if (!is_compatible_unqualified_types(type1
, type2
)) {
3450 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3451 /* void * can match anything */
3452 } else if (dbt
== sbt
3453 && is_integer_btype(sbt
& VT_BTYPE
)
3454 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3455 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3456 /* Like GCC don't warn by default for merely changes
3457 in pointer target signedness. Do warn for different
3458 base types, though, in particular for unsigned enums
3459 and signed int targets. */
3461 tcc_warning("assignment from incompatible pointer type");
3466 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3472 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3473 tcc_warning("assignment makes integer from pointer without a cast");
3474 } else if (sbt
== VT_STRUCT
) {
3475 goto case_VT_STRUCT
;
3477 /* XXX: more tests */
3481 if (!is_compatible_unqualified_types(dt
, st
)) {
3489 static void gen_assign_cast(CType
*dt
)
3491 verify_assign_cast(dt
);
3495 /* store vtop in lvalue pushed on stack */
3496 ST_FUNC
void vstore(void)
3498 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3500 ft
= vtop
[-1].type
.t
;
3501 sbt
= vtop
->type
.t
& VT_BTYPE
;
3502 dbt
= ft
& VT_BTYPE
;
3503 verify_assign_cast(&vtop
[-1].type
);
3505 if (sbt
== VT_STRUCT
) {
3506 /* if structure, only generate pointer */
3507 /* structure assignment : generate memcpy */
3508 size
= type_size(&vtop
->type
, &align
);
3509 /* destination, keep on stack() as result */
3511 #ifdef CONFIG_TCC_BCHECK
3512 if (vtop
->r
& VT_MUSTBOUND
)
3513 gbound(); /* check would be wrong after gaddrof() */
3515 vtop
->type
.t
= VT_PTR
;
3519 #ifdef CONFIG_TCC_BCHECK
3520 if (vtop
->r
& VT_MUSTBOUND
)
3523 vtop
->type
.t
= VT_PTR
;
3526 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3528 #ifdef CONFIG_TCC_BCHECK
3529 && !tcc_state
->do_bounds_check
3532 gen_struct_copy(size
);
3538 /* Use memmove, rather than memcpy, as dest and src may be same: */
3541 vpush_helper_func(TOK_memmove8
);
3542 else if(!(align
& 3))
3543 vpush_helper_func(TOK_memmove4
);
3546 vpush_helper_func(TOK_memmove
);
3551 } else if (ft
& VT_BITFIELD
) {
3552 /* bitfield store handling */
3554 /* save lvalue as expression result (example: s.b = s.a = n;) */
3555 vdup(), vtop
[-1] = vtop
[-2];
3557 bit_pos
= BIT_POS(ft
);
3558 bit_size
= BIT_SIZE(ft
);
3559 /* remove bit field info to avoid loops */
3560 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3562 if (dbt
== VT_BOOL
) {
3563 gen_cast(&vtop
[-1].type
);
3564 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3566 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3567 if (dbt
!= VT_BOOL
) {
3568 gen_cast(&vtop
[-1].type
);
3569 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3571 if (r
== VT_STRUCT
) {
3572 store_packed_bf(bit_pos
, bit_size
);
3574 unsigned long long mask
= (1ULL << bit_size
) - 1;
3575 if (dbt
!= VT_BOOL
) {
3577 if (dbt
== VT_LLONG
)
3580 vpushi((unsigned)mask
);
3587 /* duplicate destination */
3590 /* load destination, mask and or with source */
3591 if (dbt
== VT_LLONG
)
3592 vpushll(~(mask
<< bit_pos
));
3594 vpushi(~((unsigned)mask
<< bit_pos
));
3599 /* ... and discard */
3602 } else if (dbt
== VT_VOID
) {
3605 /* optimize char/short casts */
3607 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3608 && is_integer_btype(sbt
)
3610 if ((vtop
->r
& VT_MUSTCAST
)
3611 && btype_size(dbt
) > btype_size(sbt
)
3613 force_charshort_cast();
3616 gen_cast(&vtop
[-1].type
);
3619 #ifdef CONFIG_TCC_BCHECK
3620 /* bound check case */
3621 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3627 gv(RC_TYPE(dbt
)); /* generate value */
3630 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3631 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3632 vtop
->type
.t
= ft
& VT_TYPE
;
3635 /* if lvalue was saved on stack, must read it */
3636 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3638 r
= get_reg(RC_INT
);
3639 sv
.type
.t
= VT_PTRDIFF_T
;
3640 sv
.r
= VT_LOCAL
| VT_LVAL
;
3641 sv
.c
.i
= vtop
[-1].c
.i
;
3643 vtop
[-1].r
= r
| VT_LVAL
;
3646 r
= vtop
->r
& VT_VALMASK
;
3647 /* two word case handling :
3648 store second register at word + 4 (or +8 for x86-64) */
3649 if (USING_TWO_WORDS(dbt
)) {
3650 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3651 vtop
[-1].type
.t
= load_type
;
3654 /* convert to int to increment easily */
3655 vtop
->type
.t
= VT_PTRDIFF_T
;
3661 vtop
[-1].type
.t
= load_type
;
3662 /* XXX: it works because r2 is spilled last ! */
3663 store(vtop
->r2
, vtop
- 1);
3669 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3673 /* post defines POST/PRE add. c is the token ++ or -- */
3674 ST_FUNC
void inc(int post
, int c
)
3677 vdup(); /* save lvalue */
3679 gv_dup(); /* duplicate value */
3684 vpushi(c
- TOK_MID
);
3686 vstore(); /* store value */
3688 vpop(); /* if post op, return saved value */
3691 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3693 /* read the string */
3697 while (tok
== TOK_STR
) {
3698 /* XXX: add \0 handling too ? */
3699 cstr_cat(astr
, tokc
.str
.data
, -1);
3702 cstr_ccat(astr
, '\0');
3705 /* If I is >= 1 and a power of two, returns log2(i)+1.
3706 If I is 0 returns 0. */
3707 ST_FUNC
int exact_log2p1(int i
)
3712 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3723 /* Parse __attribute__((...)) GNUC extension. */
3724 static void parse_attribute(AttributeDef
*ad
)
3730 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3735 while (tok
!= ')') {
3736 if (tok
< TOK_IDENT
)
3737 expect("attribute name");
3749 tcc_warning_c(warn_implicit_function_declaration
)(
3750 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3751 s
= external_global_sym(tok
, &func_old_type
);
3752 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3753 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3754 ad
->cleanup_func
= s
;
3759 case TOK_CONSTRUCTOR1
:
3760 case TOK_CONSTRUCTOR2
:
3761 ad
->f
.func_ctor
= 1;
3763 case TOK_DESTRUCTOR1
:
3764 case TOK_DESTRUCTOR2
:
3765 ad
->f
.func_dtor
= 1;
3767 case TOK_ALWAYS_INLINE1
:
3768 case TOK_ALWAYS_INLINE2
:
3769 ad
->f
.func_alwinl
= 1;
3774 parse_mult_str(&astr
, "section name");
3775 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3782 parse_mult_str(&astr
, "alias(\"target\")");
3783 ad
->alias_target
= /* save string as token, for later */
3784 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3788 case TOK_VISIBILITY1
:
3789 case TOK_VISIBILITY2
:
3791 parse_mult_str(&astr
,
3792 "visibility(\"default|hidden|internal|protected\")");
3793 if (!strcmp (astr
.data
, "default"))
3794 ad
->a
.visibility
= STV_DEFAULT
;
3795 else if (!strcmp (astr
.data
, "hidden"))
3796 ad
->a
.visibility
= STV_HIDDEN
;
3797 else if (!strcmp (astr
.data
, "internal"))
3798 ad
->a
.visibility
= STV_INTERNAL
;
3799 else if (!strcmp (astr
.data
, "protected"))
3800 ad
->a
.visibility
= STV_PROTECTED
;
3802 expect("visibility(\"default|hidden|internal|protected\")");
3811 if (n
<= 0 || (n
& (n
- 1)) != 0)
3812 tcc_error("alignment must be a positive power of two");
3817 ad
->a
.aligned
= exact_log2p1(n
);
3818 if (n
!= 1 << (ad
->a
.aligned
- 1))
3819 tcc_error("alignment of %d is larger than implemented", n
);
3835 /* currently, no need to handle it because tcc does not
3836 track unused objects */
3840 ad
->f
.func_noreturn
= 1;
3845 ad
->f
.func_call
= FUNC_CDECL
;
3850 ad
->f
.func_call
= FUNC_STDCALL
;
3852 #ifdef TCC_TARGET_I386
3862 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3868 ad
->f
.func_call
= FUNC_FASTCALLW
;
3875 ad
->attr_mode
= VT_LLONG
+ 1;
3878 ad
->attr_mode
= VT_BYTE
+ 1;
3881 ad
->attr_mode
= VT_SHORT
+ 1;
3885 ad
->attr_mode
= VT_INT
+ 1;
3888 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3895 ad
->a
.dllexport
= 1;
3897 case TOK_NODECORATE
:
3898 ad
->a
.nodecorate
= 1;
3901 ad
->a
.dllimport
= 1;
3904 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
3905 /* skip parameters */
3907 int parenthesis
= 0;
3911 else if (tok
== ')')
3914 } while (parenthesis
&& tok
!= -1);
3927 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3930 int v1
= v
| SYM_FIELD
;
3932 while ((s
= s
->next
) != NULL
) {
3937 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
3938 && s
->v
>= (SYM_FIRST_ANOM
| SYM_FIELD
)) {
3939 /* try to find field in anonymous sub-struct/union */
3940 Sym
*ret
= find_field (&s
->type
, v1
, cumofs
);
3948 if (!(v
& SYM_FIELD
)) { /* top-level call */
3951 tcc_error("dereferencing incomplete type '%s'",
3952 get_tok_str(s
->v
& ~SYM_STRUCT
, 0));
3954 tcc_error("field not found: %s",
3955 get_tok_str(v
, &tokc
));
3960 static void check_fields (CType
*type
, int check
)
3964 while ((s
= s
->next
) != NULL
) {
3965 int v
= s
->v
& ~SYM_FIELD
;
3966 if (v
< SYM_FIRST_ANOM
) {
3967 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
3968 if (check
&& (ts
->tok
& SYM_FIELD
))
3969 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
3970 ts
->tok
^= SYM_FIELD
;
3971 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
3972 check_fields (&s
->type
, check
);
3976 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3978 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3979 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3980 int pcc
= !tcc_state
->ms_bitfields
;
3981 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3988 prevbt
= VT_STRUCT
; /* make it never match */
3993 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3994 if (f
->type
.t
& VT_BITFIELD
)
3995 bit_size
= BIT_SIZE(f
->type
.t
);
3998 size
= type_size(&f
->type
, &align
);
3999 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4002 if (pcc
&& bit_size
== 0) {
4003 /* in pcc mode, packing does not affect zero-width bitfields */
4006 /* in pcc mode, attribute packed overrides if set. */
4007 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4010 /* pragma pack overrides align if lesser and packs bitfields always */
4013 if (pragma_pack
< align
)
4014 align
= pragma_pack
;
4015 /* in pcc mode pragma pack also overrides individual align */
4016 if (pcc
&& pragma_pack
< a
)
4020 /* some individual align was specified */
4024 if (type
->ref
->type
.t
== VT_UNION
) {
4025 if (pcc
&& bit_size
>= 0)
4026 size
= (bit_size
+ 7) >> 3;
4031 } else if (bit_size
< 0) {
4033 c
+= (bit_pos
+ 7) >> 3;
4034 c
= (c
+ align
- 1) & -align
;
4043 /* A bit-field. Layout is more complicated. There are two
4044 options: PCC (GCC) compatible and MS compatible */
4046 /* In PCC layout a bit-field is placed adjacent to the
4047 preceding bit-fields, except if:
4049 - an individual alignment was given
4050 - it would overflow its base type container and
4051 there is no packing */
4052 if (bit_size
== 0) {
4054 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4056 } else if (f
->a
.aligned
) {
4058 } else if (!packed
) {
4060 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4061 if (ofs
> size
/ align
)
4065 /* in pcc mode, long long bitfields have type int if they fit */
4066 if (size
== 8 && bit_size
<= 32)
4067 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4069 while (bit_pos
>= align
* 8)
4070 c
+= align
, bit_pos
-= align
* 8;
4073 /* In PCC layout named bit-fields influence the alignment
4074 of the containing struct using the base types alignment,
4075 except for packed fields (which here have correct align). */
4076 if (f
->v
& SYM_FIRST_ANOM
4077 // && bit_size // ??? gcc on ARM/rpi does that
4082 bt
= f
->type
.t
& VT_BTYPE
;
4083 if ((bit_pos
+ bit_size
> size
* 8)
4084 || (bit_size
> 0) == (bt
!= prevbt
)
4086 c
= (c
+ align
- 1) & -align
;
4089 /* In MS bitfield mode a bit-field run always uses
4090 at least as many bits as the underlying type.
4091 To start a new run it's also required that this
4092 or the last bit-field had non-zero width. */
4093 if (bit_size
|| prev_bit_size
)
4096 /* In MS layout the records alignment is normally
4097 influenced by the field, except for a zero-width
4098 field at the start of a run (but by further zero-width
4099 fields it is again). */
4100 if (bit_size
== 0 && prevbt
!= bt
)
4103 prev_bit_size
= bit_size
;
4106 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4107 | (bit_pos
<< VT_STRUCT_SHIFT
);
4108 bit_pos
+= bit_size
;
4110 if (align
> maxalign
)
4114 printf("set field %s offset %-2d size %-2d align %-2d",
4115 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4116 if (f
->type
.t
& VT_BITFIELD
) {
4117 printf(" pos %-2d bits %-2d",
4130 c
+= (bit_pos
+ 7) >> 3;
4132 /* store size and alignment */
4133 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4137 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4138 /* can happen if individual align for some member was given. In
4139 this case MSVC ignores maxalign when aligning the size */
4144 c
= (c
+ a
- 1) & -a
;
4148 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4151 /* check whether we can access bitfields by their type */
4152 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4156 if (0 == (f
->type
.t
& VT_BITFIELD
))
4160 bit_size
= BIT_SIZE(f
->type
.t
);
4163 bit_pos
= BIT_POS(f
->type
.t
);
4164 size
= type_size(&f
->type
, &align
);
4166 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4167 #ifdef TCC_TARGET_ARM
4168 && !(f
->c
& (align
- 1))
4173 /* try to access the field using a different type */
4174 c0
= -1, s
= align
= 1;
4177 px
= f
->c
* 8 + bit_pos
;
4178 cx
= (px
>> 3) & -align
;
4179 px
= px
- (cx
<< 3);
4182 s
= (px
+ bit_size
+ 7) >> 3;
4192 s
= type_size(&t
, &align
);
4196 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4197 #ifdef TCC_TARGET_ARM
4198 && !(cx
& (align
- 1))
4201 /* update offset and bit position */
4204 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4205 | (bit_pos
<< VT_STRUCT_SHIFT
);
4209 printf("FIX field %s offset %-2d size %-2d align %-2d "
4210 "pos %-2d bits %-2d\n",
4211 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4212 cx
, s
, align
, px
, bit_size
);
4215 /* fall back to load/store single-byte wise */
4216 f
->auxtype
= VT_STRUCT
;
4218 printf("FIX field %s : load byte-wise\n",
4219 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4225 static void do_Static_assert(void);
4227 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4228 static void struct_decl(CType
*type
, int u
)
4230 int v
, c
, size
, align
, flexible
;
4231 int bit_size
, bsize
, bt
;
4233 AttributeDef ad
, ad1
;
4236 memset(&ad
, 0, sizeof ad
);
4238 parse_attribute(&ad
);
4242 /* struct already defined ? return it */
4244 expect("struct/union/enum name");
4246 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4249 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4251 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4256 /* Record the original enum/struct/union token. */
4257 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4259 /* we put an undefined size for struct/union */
4260 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4261 s
->r
= 0; /* default alignment is zero as gcc */
4263 type
->t
= s
->type
.t
;
4269 tcc_error("struct/union/enum already defined");
4271 /* cannot be empty */
4272 /* non empty enums are not allowed */
4275 long long ll
= 0, pl
= 0, nl
= 0;
4278 /* enum symbols have static storage */
4279 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4283 expect("identifier");
4285 if (ss
&& !local_stack
)
4286 tcc_error("redefinition of enumerator '%s'",
4287 get_tok_str(v
, NULL
));
4291 ll
= expr_const64();
4293 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4295 *ps
= ss
, ps
= &ss
->next
;
4304 /* NOTE: we accept a trailing comma */
4309 /* set integral type of the enum */
4312 if (pl
!= (unsigned)pl
)
4313 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4315 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4316 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4317 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4319 /* set type for enum members */
4320 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4322 if (ll
== (int)ll
) /* default is int if it fits */
4324 if (t
.t
& VT_UNSIGNED
) {
4325 ss
->type
.t
|= VT_UNSIGNED
;
4326 if (ll
== (unsigned)ll
)
4329 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4330 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4335 while (tok
!= '}') {
4336 if (tok
== TOK_STATIC_ASSERT
) {
4340 if (!parse_btype(&btype
, &ad1
, 0)) {
4346 tcc_error("flexible array member '%s' not at the end of struct",
4347 get_tok_str(v
, NULL
));
4353 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4355 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4356 expect("identifier");
4358 int v
= btype
.ref
->v
;
4359 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4360 if (tcc_state
->ms_extensions
== 0)
4361 expect("identifier");
4365 if (type_size(&type1
, &align
) < 0) {
4366 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4369 tcc_error("field '%s' has incomplete type",
4370 get_tok_str(v
, NULL
));
4372 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4373 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4374 (type1
.t
& VT_STORAGE
))
4375 tcc_error("invalid type for '%s'",
4376 get_tok_str(v
, NULL
));
4380 bit_size
= expr_const();
4381 /* XXX: handle v = 0 case for messages */
4383 tcc_error("negative width in bit-field '%s'",
4384 get_tok_str(v
, NULL
));
4385 if (v
&& bit_size
== 0)
4386 tcc_error("zero width for bit-field '%s'",
4387 get_tok_str(v
, NULL
));
4388 parse_attribute(&ad1
);
4390 size
= type_size(&type1
, &align
);
4391 if (bit_size
>= 0) {
4392 bt
= type1
.t
& VT_BTYPE
;
4398 tcc_error("bitfields must have scalar type");
4400 if (bit_size
> bsize
) {
4401 tcc_error("width of '%s' exceeds its type",
4402 get_tok_str(v
, NULL
));
4403 } else if (bit_size
== bsize
4404 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4405 /* no need for bit fields */
4407 } else if (bit_size
== 64) {
4408 tcc_error("field width 64 not implemented");
4410 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4412 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4415 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4416 /* Remember we've seen a real field to check
4417 for placement of flexible array member. */
4420 /* If member is a struct or bit-field, enforce
4421 placing into the struct (as anonymous). */
4423 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4428 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4433 if (tok
== ';' || tok
== TOK_EOF
)
4440 parse_attribute(&ad
);
4441 if (ad
.cleanup_func
) {
4442 tcc_warning("attribute '__cleanup__' ignored on type");
4444 check_fields(type
, 1);
4445 check_fields(type
, 0);
4446 struct_layout(type
, &ad
);
4448 tcc_debug_fix_anon(tcc_state
, type
);
4453 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4455 merge_symattr(&ad
->a
, &s
->a
);
4456 merge_funcattr(&ad
->f
, &s
->f
);
4459 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4460 are added to the element type, copied because it could be a typedef. */
4461 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4463 while (type
->t
& VT_ARRAY
) {
4464 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4465 type
= &type
->ref
->type
;
4467 type
->t
|= qualifiers
;
4470 /* return 0 if no type declaration. otherwise, return the basic type
4473 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4475 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4479 memset(ad
, 0, sizeof(AttributeDef
));
4489 /* currently, we really ignore extension */
4499 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4500 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4501 tmbt
: tcc_error("too many basic types");
4504 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4509 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4526 memset(&ad1
, 0, sizeof(AttributeDef
));
4527 if (parse_btype(&type1
, &ad1
, 0)) {
4528 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4530 n
= 1 << (ad1
.a
.aligned
- 1);
4532 type_size(&type1
, &n
);
4535 if (n
< 0 || (n
& (n
- 1)) != 0)
4536 tcc_error("alignment must be a positive power of two");
4539 ad
->a
.aligned
= exact_log2p1(n
);
4543 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4544 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4545 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4546 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4553 #ifdef TCC_TARGET_ARM64
4555 /* GCC's __uint128_t appears in some Linux header files. Make it a
4556 synonym for long double to get the size and alignment right. */
4564 tcc_error("_Complex is not yet supported");
4569 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4570 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4578 struct_decl(&type1
, VT_ENUM
);
4581 type
->ref
= type1
.ref
;
4584 struct_decl(&type1
, VT_STRUCT
);
4587 struct_decl(&type1
, VT_UNION
);
4590 /* type modifiers */
4594 parse_btype_qualify(type
, VT_ATOMIC
);
4597 parse_expr_type(&type1
);
4598 /* remove all storage modifiers except typedef */
4599 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4601 sym_to_attr(ad
, type1
.ref
);
4609 parse_btype_qualify(type
, VT_CONSTANT
);
4617 parse_btype_qualify(type
, VT_VOLATILE
);
4624 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4625 tcc_error("signed and unsigned modifier");
4638 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4639 tcc_error("signed and unsigned modifier");
4640 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4656 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4657 tcc_error("multiple storage classes");
4669 ad
->f
.func_noreturn
= 1;
4671 /* GNUC attribute */
4672 case TOK_ATTRIBUTE1
:
4673 case TOK_ATTRIBUTE2
:
4674 parse_attribute(ad
);
4675 if (ad
->attr_mode
) {
4676 u
= ad
->attr_mode
-1;
4677 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4685 parse_expr_type(&type1
);
4686 /* remove all storage modifiers except typedef */
4687 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4689 sym_to_attr(ad
, type1
.ref
);
4691 case TOK_THREAD_LOCAL
:
4692 tcc_error("_Thread_local is not implemented");
4697 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4701 if (tok
== ':' && ignore_label
) {
4702 /* ignore if it's a label */
4707 t
&= ~(VT_BTYPE
|VT_LONG
);
4708 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4709 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4710 type
->ref
= s
->type
.ref
;
4712 parse_btype_qualify(type
, t
);
4714 /* get attributes from typedef */
4723 if (tcc_state
->char_is_unsigned
) {
4724 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4727 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4728 bt
= t
& (VT_BTYPE
|VT_LONG
);
4730 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4731 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4732 if (bt
== VT_LDOUBLE
)
4733 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4739 /* convert a function parameter type (array to pointer and function to
4740 function pointer) */
4741 static inline void convert_parameter_type(CType
*pt
)
4743 /* remove const and volatile qualifiers (XXX: const could be used
4744 to indicate a const function parameter */
4745 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4746 /* array must be transformed to pointer according to ANSI C */
4748 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4753 ST_FUNC
void parse_asm_str(CString
*astr
)
4756 parse_mult_str(astr
, "string constant");
4759 /* Parse an asm label and return the token */
4760 static int asm_label_instr(void)
4766 parse_asm_str(&astr
);
4769 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4771 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4776 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4778 int n
, l
, t1
, arg_size
, align
;
4779 Sym
**plast
, *s
, *first
;
4782 TokenString
*vla_array_tok
= NULL
;
4783 int *vla_array_str
= NULL
;
4786 /* function type, or recursive declarator (return if so) */
4788 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4792 else if (parse_btype(&pt
, &ad1
, 0))
4794 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4795 merge_attr (ad
, &ad1
);
4806 /* read param name and compute offset */
4807 if (l
!= FUNC_OLD
) {
4808 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4810 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4811 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4812 tcc_error("parameter declared as void");
4817 pt
.t
= VT_VOID
; /* invalid type */
4822 expect("identifier");
4823 convert_parameter_type(&pt
);
4824 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4825 /* these symbols may be evaluated for VLArrays (see below, under
4826 nocode_wanted) which is why we push them here as normal symbols
4827 temporarily. Example: int func(int a, int b[++a]); */
4828 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4834 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4839 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4840 tcc_error("invalid type");
4843 /* if no parameters, then old type prototype */
4846 /* remove parameter symbols from token table, keep on stack */
4848 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4849 for (s
= first
; s
; s
= s
->next
)
4853 /* NOTE: const is ignored in returned type as it has a special
4854 meaning in gcc / C++ */
4855 type
->t
&= ~VT_CONSTANT
;
4856 /* some ancient pre-K&R C allows a function to return an array
4857 and the array brackets to be put after the arguments, such
4858 that "int c()[]" means something like "int[] c()" */
4861 skip(']'); /* only handle simple "[]" */
4864 /* we push a anonymous symbol which will contain the function prototype */
4865 ad
->f
.func_args
= arg_size
;
4866 ad
->f
.func_type
= l
;
4867 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4873 } else if (tok
== '[') {
4874 int saved_nocode_wanted
= nocode_wanted
;
4875 /* array definition */
4879 if (td
& TYPE_PARAM
) while (1) {
4880 /* XXX The optional type-quals and static should only be accepted
4881 in parameter decls. The '*' as well, and then even only
4882 in prototypes (not function defs). */
4884 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4895 /* Code generation is not done now but has to be done
4896 at start of function. Save code here for later use. */
4898 skip_or_save_block(&vla_array_tok
);
4900 vla_array_str
= vla_array_tok
->str
;
4901 begin_macro(vla_array_tok
, 2);
4910 } else if (tok
!= ']') {
4911 if (!local_stack
|| (storage
& VT_STATIC
))
4912 vpushi(expr_const());
4914 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4915 length must always be evaluated, even under nocode_wanted,
4916 so that its size slot is initialized (e.g. under sizeof
4922 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4925 tcc_error("invalid array size");
4927 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4928 tcc_error("size of variable length array should be an integer");
4934 /* parse next post type */
4935 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
4937 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4938 tcc_error("declaration of an array of functions");
4939 if ((type
->t
& VT_BTYPE
) == VT_VOID
4940 || type_size(type
, &align
) < 0)
4941 tcc_error("declaration of an array of incomplete type elements");
4943 t1
|= type
->t
& VT_VLA
;
4948 tcc_error("need explicit inner array size in VLAs");
4951 loc
-= type_size(&int_type
, &align
);
4955 vpush_type_size(type
, &align
);
4957 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4964 nocode_wanted
= saved_nocode_wanted
;
4966 /* we push an anonymous symbol which will contain the array
4968 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4969 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4972 if (vla_array_str
) {
4974 s
->vla_array_str
= vla_array_str
;
4976 tok_str_free_str(vla_array_str
);
4982 /* Parse a type declarator (except basic type), and return the type
4983 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4984 expected. 'type' should contain the basic type. 'ad' is the
4985 attribute definition of the basic type. It can be modified by
4986 type_decl(). If this (possibly abstract) declarator is a pointer chain
4987 it returns the innermost pointed to type (equals *type, but is a different
4988 pointer), otherwise returns type itself, that's used for recursive calls. */
4989 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4992 int qualifiers
, storage
;
4994 /* recursive type, remove storage bits first, apply them later again */
4995 storage
= type
->t
& VT_STORAGE
;
4996 type
->t
&= ~VT_STORAGE
;
4999 while (tok
== '*') {
5005 qualifiers
|= VT_ATOMIC
;
5010 qualifiers
|= VT_CONSTANT
;
5015 qualifiers
|= VT_VOLATILE
;
5021 /* XXX: clarify attribute handling */
5022 case TOK_ATTRIBUTE1
:
5023 case TOK_ATTRIBUTE2
:
5024 parse_attribute(ad
);
5028 type
->t
|= qualifiers
;
5030 /* innermost pointed to type is the one for the first derivation */
5031 ret
= pointed_type(type
);
5035 /* This is possibly a parameter type list for abstract declarators
5036 ('int ()'), use post_type for testing this. */
5037 if (!post_type(type
, ad
, 0, td
)) {
5038 /* It's not, so it's a nested declarator, and the post operations
5039 apply to the innermost pointed to type (if any). */
5040 /* XXX: this is not correct to modify 'ad' at this point, but
5041 the syntax is not clear */
5042 parse_attribute(ad
);
5043 post
= type_decl(type
, ad
, v
, td
);
5047 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5048 /* type identifier */
5053 if (!(td
& TYPE_ABSTRACT
))
5054 expect("identifier");
5057 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5058 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5059 parse_attribute(ad
);
5064 /* indirection with full error checking and bound check */
5065 ST_FUNC
void indir(void)
5067 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5068 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5072 if (vtop
->r
& VT_LVAL
)
5074 vtop
->type
= *pointed_type(&vtop
->type
);
5075 /* Arrays and functions are never lvalues */
5076 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5077 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5079 /* if bound checking, the referenced pointer must be checked */
5080 #ifdef CONFIG_TCC_BCHECK
5081 if (tcc_state
->do_bounds_check
)
5082 vtop
->r
|= VT_MUSTBOUND
;
5087 /* pass a parameter to a function and do type checking and casting */
5088 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5093 func_type
= func
->f
.func_type
;
5094 if (func_type
== FUNC_OLD
||
5095 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5096 /* default casting : only need to convert float to double */
5097 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5098 gen_cast_s(VT_DOUBLE
);
5099 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5100 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5101 type
.ref
= vtop
->type
.ref
;
5103 } else if (vtop
->r
& VT_MUSTCAST
) {
5104 force_charshort_cast();
5106 } else if (arg
== NULL
) {
5107 tcc_error("too many arguments to function");
5110 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5111 gen_assign_cast(&type
);
5115 /* parse an expression and return its type without any side effect. */
5116 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5125 /* parse an expression of the form '(type)' or '(expr)' and return its
5127 static void parse_expr_type(CType
*type
)
5133 if (parse_btype(type
, &ad
, 0)) {
5134 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5136 expr_type(type
, gexpr
);
5141 static void parse_type(CType
*type
)
5146 if (!parse_btype(type
, &ad
, 0)) {
5149 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5152 static void parse_builtin_params(int nc
, const char *args
)
5161 while ((c
= *args
++)) {
5176 type
.t
= VT_CONSTANT
;
5182 type
.t
= VT_CONSTANT
;
5184 type
.t
|= char_type
.t
;
5196 gen_assign_cast(&type
);
5203 static void parse_atomic(int atok
)
5205 int size
, align
, arg
, t
, save
= 0;
5206 CType
*atom
, *atom_ptr
, ct
= {0};
5209 static const char *const templates
[] = {
5211 * Each entry consists of callback and function template.
5212 * The template represents argument types and return type.
5214 * ? void (return-only)
5217 * A read-only atomic
5218 * p pointer to memory
5225 /* keep in order of appearance in tcctok.h: */
5226 /* __atomic_store */ "alm.?",
5227 /* __atomic_load */ "Asm.v",
5228 /* __atomic_exchange */ "alsm.v",
5229 /* __atomic_compare_exchange */ "aplbmm.b",
5230 /* __atomic_fetch_add */ "avm.v",
5231 /* __atomic_fetch_sub */ "avm.v",
5232 /* __atomic_fetch_or */ "avm.v",
5233 /* __atomic_fetch_xor */ "avm.v",
5234 /* __atomic_fetch_and */ "avm.v",
5235 /* __atomic_fetch_nand */ "avm.v",
5236 /* __atomic_and_fetch */ "avm.v",
5237 /* __atomic_sub_fetch */ "avm.v",
5238 /* __atomic_or_fetch */ "avm.v",
5239 /* __atomic_xor_fetch */ "avm.v",
5240 /* __atomic_and_fetch */ "avm.v",
5241 /* __atomic_nand_fetch */ "avm.v"
5243 const char *template = templates
[(atok
- TOK___atomic_store
)];
5245 atom
= atom_ptr
= NULL
;
5246 size
= 0; /* pacify compiler */
5251 switch (template[arg
]) {
5254 atom_ptr
= &vtop
->type
;
5255 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5257 atom
= pointed_type(atom_ptr
);
5258 size
= type_size(atom
, &align
);
5260 || (size
& (size
- 1))
5261 || (atok
> TOK___atomic_compare_exchange
5262 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5263 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5264 expect("integral or integer-sized pointer target type");
5265 /* GCC does not care either: */
5266 /* if (!(atom->t & VT_ATOMIC))
5267 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5271 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5272 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5273 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5274 gen_assign_cast(atom_ptr
);
5277 gen_assign_cast(atom
);
5281 gen_assign_cast(atom
);
5290 gen_assign_cast(&int_type
);
5294 gen_assign_cast(&ct
);
5297 if ('.' == template[++arg
])
5304 switch (template[arg
+ 1]) {
5313 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5314 vpush_helper_func(tok_alloc_const(buf
));
5315 vrott(arg
- save
+ 1);
5316 gfunc_call(arg
- save
);
5319 PUT_R_RET(vtop
, ct
.t
);
5320 t
= ct
.t
& VT_BTYPE
;
5321 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5323 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5325 vtop
->type
.t
= VT_INT
;
5337 ST_FUNC
void unary(void)
5339 int n
, t
, align
, size
, r
, sizeof_caller
;
5344 /* generate line number info */
5346 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5348 sizeof_caller
= in_sizeof
;
5351 /* XXX: GCC 2.95.3 does not generate a table although it should be
5359 #ifdef TCC_TARGET_PE
5360 t
= VT_SHORT
|VT_UNSIGNED
;
5368 vsetc(&type
, VT_CONST
, &tokc
);
5372 t
= VT_INT
| VT_UNSIGNED
;
5378 t
= VT_LLONG
| VT_UNSIGNED
;
5390 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5393 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5395 case TOK___FUNCTION__
:
5397 goto tok_identifier
;
5403 /* special function name identifier */
5404 len
= strlen(funcname
) + 1;
5405 /* generate char[len] type */
5406 type
.t
= char_type
.t
;
5407 if (tcc_state
->warn_write_strings
& WARN_ON
)
5408 type
.t
|= VT_CONSTANT
;
5412 sec
= rodata_section
;
5413 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5415 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5420 #ifdef TCC_TARGET_PE
5421 t
= VT_SHORT
| VT_UNSIGNED
;
5427 /* string parsing */
5430 if (tcc_state
->warn_write_strings
& WARN_ON
)
5435 memset(&ad
, 0, sizeof(AttributeDef
));
5436 ad
.section
= rodata_section
;
5437 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5442 if (parse_btype(&type
, &ad
, 0)) {
5443 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5445 /* check ISOC99 compound literal */
5447 /* data is allocated locally by default */
5452 /* all except arrays are lvalues */
5453 if (!(type
.t
& VT_ARRAY
))
5455 memset(&ad
, 0, sizeof(AttributeDef
));
5456 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5458 if (sizeof_caller
) {
5465 } else if (tok
== '{') {
5466 int saved_nocode_wanted
= nocode_wanted
;
5467 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5469 if (0 == local_scope
)
5470 tcc_error("statement expression outside of function");
5471 /* save all registers */
5473 /* statement expression : we do not accept break/continue
5474 inside as GCC does. We do retain the nocode_wanted state,
5475 as statement expressions can't ever be entered from the
5476 outside, so any reactivation of code emission (from labels
5477 or loop heads) can be disabled again after the end of it. */
5479 /* If the statement expr can be entered, then we retain the current
5480 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5481 If it can't be entered then the state is that from before the
5482 statement expression. */
5483 if (saved_nocode_wanted
)
5484 nocode_wanted
= saved_nocode_wanted
;
5499 /* functions names must be treated as function pointers,
5500 except for unary '&' and sizeof. Since we consider that
5501 functions are not lvalues, we only have to handle it
5502 there and in function calls. */
5503 /* arrays can also be used although they are not lvalues */
5504 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5505 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5508 vtop
->sym
->a
.addrtaken
= 1;
5509 mk_pointer(&vtop
->type
);
5515 gen_test_zero(TOK_EQ
);
5526 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5527 tcc_error("pointer not accepted for unary plus");
5528 /* In order to force cast, we add zero, except for floating point
5529 where we really need an noop (otherwise -0.0 will be transformed
5531 if (!is_float(vtop
->type
.t
)) {
5543 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5544 if (t
== TOK_SIZEOF
) {
5545 vpush_type_size(&type
, &align
);
5546 gen_cast_s(VT_SIZE_T
);
5548 type_size(&type
, &align
);
5550 if (vtop
[1].r
& VT_SYM
)
5551 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5552 if (s
&& s
->a
.aligned
)
5553 align
= 1 << (s
->a
.aligned
- 1);
5558 case TOK_builtin_expect
:
5559 /* __builtin_expect is a no-op for now */
5560 parse_builtin_params(0, "ee");
5563 case TOK_builtin_types_compatible_p
:
5564 parse_builtin_params(0, "tt");
5565 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5566 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5567 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5571 case TOK_builtin_choose_expr
:
5598 case TOK_builtin_constant_p
:
5600 parse_builtin_params(1, "e");
5602 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5603 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5607 case TOK_builtin_frame_address
:
5608 case TOK_builtin_return_address
:
5614 level
= expr_const64();
5616 tcc_error("%s only takes positive integers",
5617 tok1
== TOK_builtin_return_address
?
5618 "__builtin_return_address" :
5619 "__builtin_frame_address");
5624 vset(&type
, VT_LOCAL
, 0); /* local frame */
5626 #ifdef TCC_TARGET_RISCV64
5630 mk_pointer(&vtop
->type
);
5631 indir(); /* -> parent frame */
5633 if (tok1
== TOK_builtin_return_address
) {
5634 // assume return address is just above frame pointer on stack
5635 #ifdef TCC_TARGET_ARM
5638 #elif defined TCC_TARGET_RISCV64
5645 mk_pointer(&vtop
->type
);
5650 #ifdef TCC_TARGET_RISCV64
5651 case TOK_builtin_va_start
:
5652 parse_builtin_params(0, "ee");
5653 r
= vtop
->r
& VT_VALMASK
;
5657 tcc_error("__builtin_va_start expects a local variable");
5662 #ifdef TCC_TARGET_X86_64
5663 #ifdef TCC_TARGET_PE
5664 case TOK_builtin_va_start
:
5665 parse_builtin_params(0, "ee");
5666 r
= vtop
->r
& VT_VALMASK
;
5670 tcc_error("__builtin_va_start expects a local variable");
5672 vtop
->type
= char_pointer_type
;
5677 case TOK_builtin_va_arg_types
:
5678 parse_builtin_params(0, "t");
5679 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5686 #ifdef TCC_TARGET_ARM64
5687 case TOK_builtin_va_start
: {
5688 parse_builtin_params(0, "ee");
5692 vtop
->type
.t
= VT_VOID
;
5695 case TOK_builtin_va_arg
: {
5696 parse_builtin_params(0, "et");
5704 case TOK___arm64_clear_cache
: {
5705 parse_builtin_params(0, "ee");
5708 vtop
->type
.t
= VT_VOID
;
5713 /* atomic operations */
5714 case TOK___atomic_store
:
5715 case TOK___atomic_load
:
5716 case TOK___atomic_exchange
:
5717 case TOK___atomic_compare_exchange
:
5718 case TOK___atomic_fetch_add
:
5719 case TOK___atomic_fetch_sub
:
5720 case TOK___atomic_fetch_or
:
5721 case TOK___atomic_fetch_xor
:
5722 case TOK___atomic_fetch_and
:
5723 case TOK___atomic_fetch_nand
:
5724 case TOK___atomic_add_fetch
:
5725 case TOK___atomic_sub_fetch
:
5726 case TOK___atomic_or_fetch
:
5727 case TOK___atomic_xor_fetch
:
5728 case TOK___atomic_and_fetch
:
5729 case TOK___atomic_nand_fetch
:
5733 /* pre operations */
5744 if (is_float(vtop
->type
.t
)) {
5754 goto tok_identifier
;
5756 /* allow to take the address of a label */
5757 if (tok
< TOK_UIDENT
)
5758 expect("label identifier");
5759 s
= label_find(tok
);
5761 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5763 if (s
->r
== LABEL_DECLARED
)
5764 s
->r
= LABEL_FORWARD
;
5766 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5767 s
->type
.t
= VT_VOID
;
5768 mk_pointer(&s
->type
);
5769 s
->type
.t
|= VT_STATIC
;
5771 vpushsym(&s
->type
, s
);
5777 CType controlling_type
;
5778 int has_default
= 0;
5781 TokenString
*str
= NULL
;
5782 int saved_const_wanted
= const_wanted
;
5787 expr_type(&controlling_type
, expr_eq
);
5788 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5789 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5790 mk_pointer(&controlling_type
);
5791 const_wanted
= saved_const_wanted
;
5795 if (tok
== TOK_DEFAULT
) {
5797 tcc_error("too many 'default'");
5803 AttributeDef ad_tmp
;
5807 parse_btype(&cur_type
, &ad_tmp
, 0);
5808 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5809 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5811 tcc_error("type match twice");
5821 skip_or_save_block(&str
);
5823 skip_or_save_block(NULL
);
5830 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5831 tcc_error("type '%s' does not match any association", buf
);
5833 begin_macro(str
, 1);
5842 // special qnan , snan and infinity values
5847 vtop
->type
.t
= VT_FLOAT
;
5852 goto special_math_val
;
5855 goto special_math_val
;
5862 expect("identifier");
5864 if (!s
|| IS_ASM_SYM(s
)) {
5865 const char *name
= get_tok_str(t
, NULL
);
5867 tcc_error("'%s' undeclared", name
);
5868 /* for simple function calls, we tolerate undeclared
5869 external reference to int() function */
5870 tcc_warning_c(warn_implicit_function_declaration
)(
5871 "implicit declaration of function '%s'", name
);
5872 s
= external_global_sym(t
, &func_old_type
);
5876 /* A symbol that has a register is a local register variable,
5877 which starts out as VT_LOCAL value. */
5878 if ((r
& VT_VALMASK
) < VT_CONST
)
5879 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5881 vset(&s
->type
, r
, s
->c
);
5882 /* Point to s as backpointer (even without r&VT_SYM).
5883 Will be used by at least the x86 inline asm parser for
5889 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5890 vtop
->c
.i
= s
->enum_val
;
5895 /* post operations */
5897 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5900 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5901 int qualifiers
, cumofs
= 0;
5903 if (tok
== TOK_ARROW
)
5905 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5908 /* expect pointer on structure */
5909 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5910 expect("struct or union");
5911 if (tok
== TOK_CDOUBLE
)
5912 expect("field name");
5914 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5915 expect("field name");
5916 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5917 /* add field offset to pointer */
5918 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5921 /* change type to field type, and set to lvalue */
5922 vtop
->type
= s
->type
;
5923 vtop
->type
.t
|= qualifiers
;
5924 /* an array is never an lvalue */
5925 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5927 #ifdef CONFIG_TCC_BCHECK
5928 /* if bound checking, the referenced pointer must be checked */
5929 if (tcc_state
->do_bounds_check
)
5930 vtop
->r
|= VT_MUSTBOUND
;
5934 } else if (tok
== '[') {
5940 } else if (tok
== '(') {
5943 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5946 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5947 /* pointer test (no array accepted) */
5948 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5949 vtop
->type
= *pointed_type(&vtop
->type
);
5950 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5954 expect("function pointer");
5957 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5959 /* get return type */
5962 sa
= s
->next
; /* first parameter */
5963 nb_args
= regsize
= 0;
5965 /* compute first implicit argument if a structure is returned */
5966 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5967 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5968 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5969 &ret_align
, ®size
);
5970 if (ret_nregs
<= 0) {
5971 /* get some space for the returned structure */
5972 size
= type_size(&s
->type
, &align
);
5973 #ifdef TCC_TARGET_ARM64
5974 /* On arm64, a small struct is return in registers.
5975 It is much easier to write it to memory if we know
5976 that we are allowed to write some extra bytes, so
5977 round the allocated space up to a power of 2: */
5979 while (size
& (size
- 1))
5980 size
= (size
| (size
- 1)) + 1;
5982 loc
= (loc
- size
) & -align
;
5984 ret
.r
= VT_LOCAL
| VT_LVAL
;
5985 /* pass it as 'int' to avoid structure arg passing
5987 vseti(VT_LOCAL
, loc
);
5988 #ifdef CONFIG_TCC_BCHECK
5989 if (tcc_state
->do_bounds_check
)
6003 if (ret_nregs
> 0) {
6004 /* return in register */
6006 PUT_R_RET(&ret
, ret
.type
.t
);
6011 gfunc_param_typed(s
, sa
);
6021 tcc_error("too few arguments to function");
6023 gfunc_call(nb_args
);
6025 if (ret_nregs
< 0) {
6026 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6027 #ifdef TCC_TARGET_RISCV64
6028 arch_transfer_ret_regs(1);
6032 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6033 vsetc(&ret
.type
, r
, &ret
.c
);
6034 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6037 /* handle packed struct return */
6038 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6041 size
= type_size(&s
->type
, &align
);
6042 /* We're writing whole regs often, make sure there's enough
6043 space. Assume register size is power of 2. */
6044 if (regsize
> align
)
6046 loc
= (loc
- size
) & -align
;
6050 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6054 if (--ret_nregs
== 0)
6058 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6061 /* Promote char/short return values. This is matters only
6062 for calling function that were not compiled by TCC and
6063 only on some architectures. For those where it doesn't
6064 matter we expect things to be already promoted to int,
6066 t
= s
->type
.t
& VT_BTYPE
;
6067 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6069 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6071 vtop
->type
.t
= VT_INT
;
6075 if (s
->f
.func_noreturn
) {
6077 tcc_tcov_block_end(tcc_state
, -1);
6086 #ifndef precedence_parser /* original top-down parser */
6088 static void expr_prod(void)
6093 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6100 static void expr_sum(void)
6105 while ((t
= tok
) == '+' || t
== '-') {
6112 static void expr_shift(void)
6117 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6124 static void expr_cmp(void)
6129 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6130 t
== TOK_ULT
|| t
== TOK_UGE
) {
6137 static void expr_cmpeq(void)
6142 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6149 static void expr_and(void)
6152 while (tok
== '&') {
6159 static void expr_xor(void)
6162 while (tok
== '^') {
6169 static void expr_or(void)
6172 while (tok
== '|') {
6179 static void expr_landor(int op
);
6181 static void expr_land(void)
6184 if (tok
== TOK_LAND
)
6188 static void expr_lor(void)
6195 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6196 #else /* defined precedence_parser */
6197 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6198 # define expr_lor() unary(), expr_infix(1)
6200 static int precedence(int tok
)
6203 case TOK_LOR
: return 1;
6204 case TOK_LAND
: return 2;
6208 case TOK_EQ
: case TOK_NE
: return 6;
6209 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6210 case TOK_SHL
: case TOK_SAR
: return 8;
6211 case '+': case '-': return 9;
6212 case '*': case '/': case '%': return 10;
6214 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6219 static unsigned char prec
[256];
6220 static void init_prec(void)
6223 for (i
= 0; i
< 256; i
++)
6224 prec
[i
] = precedence(i
);
6226 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6228 static void expr_landor(int op
);
6230 static void expr_infix(int p
)
6233 while ((p2
= precedence(t
)) >= p
) {
6234 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6239 if (precedence(tok
) > p2
)
6248 /* Assuming vtop is a value used in a conditional context
6249 (i.e. compared with zero) return 0 if it's false, 1 if
6250 true and -1 if it can't be statically determined. */
6251 static int condition_3way(void)
6254 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6255 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6257 gen_cast_s(VT_BOOL
);
6264 static void expr_landor(int op
)
6266 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6268 c
= f
? i
: condition_3way();
6270 save_regs(1), cc
= 0;
6272 nocode_wanted
++, f
= 1;
6280 expr_landor_next(op
);
6292 static int is_cond_bool(SValue
*sv
)
6294 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6295 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6296 return (unsigned)sv
->c
.i
< 2;
6297 if (sv
->r
== VT_CMP
)
6302 static void expr_cond(void)
6304 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6311 c
= condition_3way();
6312 g
= (tok
== ':' && gnu_ext
);
6322 /* needed to avoid having different registers saved in
6334 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6335 mk_pointer(&vtop
->type
);
6336 sv
= *vtop
; /* save value to handle it later */
6337 vtop
--; /* no vpop so that FP stack is not flushed */
6354 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6355 mk_pointer(&vtop
->type
);
6357 /* cast operands to correct type according to ISOC rules */
6358 if (!combine_types(&type
, &sv
, vtop
, '?'))
6359 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6360 "type mismatch in conditional expression (have '%s' and '%s')");
6362 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6363 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6364 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6365 this code jumps directly to the if's then/else branches. */
6370 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6374 // tcc_warning("two conditions expr_cond");
6378 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6379 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6380 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6382 /* now we convert second operand */
6386 mk_pointer(&vtop
->type
);
6388 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6392 rc
= RC_TYPE(type
.t
);
6393 /* for long longs, we use fixed registers to avoid having
6394 to handle a complicated move */
6395 if (USING_TWO_WORDS(type
.t
))
6396 rc
= RC_RET(type
.t
);
6407 /* this is horrible, but we must also convert first
6413 mk_pointer(&vtop
->type
);
6415 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6421 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6431 static void expr_eq(void)
6436 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6444 gen_op(TOK_ASSIGN_OP(t
));
6450 ST_FUNC
void gexpr(void)
6456 constant_p
&= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6457 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6463 /* parse a constant expression and return value in vtop. */
6464 static void expr_const1(void)
6467 nocode_wanted
+= unevalmask
+ 1;
6469 nocode_wanted
-= unevalmask
+ 1;
6473 /* parse an integer constant and return its value. */
6474 static inline int64_t expr_const64(void)
6478 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6479 expect("constant expression");
6485 /* parse an integer constant and return its value.
6486 Complain if it doesn't fit 32bit (signed or unsigned). */
6487 ST_FUNC
int expr_const(void)
6490 int64_t wc
= expr_const64();
6492 if (c
!= wc
&& (unsigned)c
!= wc
)
6493 tcc_error("constant exceeds 32 bit");
6497 /* ------------------------------------------------------------------------- */
6498 /* return from function */
6500 #ifndef TCC_TARGET_ARM64
6501 static void gfunc_return(CType
*func_type
)
6503 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6504 CType type
, ret_type
;
6505 int ret_align
, ret_nregs
, regsize
;
6506 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6507 &ret_align
, ®size
);
6508 if (ret_nregs
< 0) {
6509 #ifdef TCC_TARGET_RISCV64
6510 arch_transfer_ret_regs(0);
6512 } else if (0 == ret_nregs
) {
6513 /* if returning structure, must copy it to implicit
6514 first pointer arg location */
6517 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6520 /* copy structure value to pointer */
6523 /* returning structure packed into registers */
6524 int size
, addr
, align
, rc
;
6525 size
= type_size(func_type
,&align
);
6526 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6527 (vtop
->c
.i
& (ret_align
-1)))
6528 && (align
& (ret_align
-1))) {
6529 loc
= (loc
- size
) & -ret_align
;
6532 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6536 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6538 vtop
->type
= ret_type
;
6539 rc
= RC_RET(ret_type
.t
);
6547 if (--ret_nregs
== 0)
6549 /* We assume that when a structure is returned in multiple
6550 registers, their classes are consecutive values of the
6553 vtop
->c
.i
+= regsize
;
6558 gv(RC_RET(func_type
->t
));
6560 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6564 static void check_func_return(void)
6566 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6568 if (!strcmp (funcname
, "main")
6569 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6570 /* main returns 0 by default */
6572 gen_assign_cast(&func_vt
);
6573 gfunc_return(&func_vt
);
6575 tcc_warning("function might return no value: '%s'", funcname
);
6579 /* ------------------------------------------------------------------------- */
6582 static int case_cmpi(const void *pa
, const void *pb
)
6584 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6585 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6586 return a
< b
? -1 : a
> b
;
6589 static int case_cmpu(const void *pa
, const void *pb
)
6591 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6592 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6593 return a
< b
? -1 : a
> b
;
6596 static void gtst_addr(int t
, int a
)
6598 gsym_addr(gvtst(0, t
), a
);
6601 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6605 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6622 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6624 gcase(base
, len
/2, bsym
);
6628 base
+= e
; len
-= e
;
6638 if (p
->v1
== p
->v2
) {
6640 gtst_addr(0, p
->sym
);
6650 gtst_addr(0, p
->sym
);
6654 *bsym
= gjmp(*bsym
);
6657 /* ------------------------------------------------------------------------- */
6658 /* __attribute__((cleanup(fn))) */
6660 static void try_call_scope_cleanup(Sym
*stop
)
6662 Sym
*cls
= cur_scope
->cl
.s
;
6664 for (; cls
!= stop
; cls
= cls
->ncl
) {
6665 Sym
*fs
= cls
->next
;
6666 Sym
*vs
= cls
->prev_tok
;
6668 vpushsym(&fs
->type
, fs
);
6669 vset(&vs
->type
, vs
->r
, vs
->c
);
6671 mk_pointer(&vtop
->type
);
6677 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6682 if (!cur_scope
->cl
.s
)
6685 /* search NCA of both cleanup chains given parents and initial depth */
6686 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6687 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6689 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6691 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6694 try_call_scope_cleanup(cc
);
6697 /* call 'func' for each __attribute__((cleanup(func))) */
6698 static void block_cleanup(struct scope
*o
)
6702 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6703 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6708 try_call_scope_cleanup(o
->cl
.s
);
6709 pcl
->jnext
= gjmp(0);
6711 goto remove_pending
;
6721 try_call_scope_cleanup(o
->cl
.s
);
6724 /* ------------------------------------------------------------------------- */
6727 static void vla_restore(int loc
)
6730 gen_vla_sp_restore(loc
);
6733 static void vla_leave(struct scope
*o
)
6735 struct scope
*c
= cur_scope
, *v
= NULL
;
6736 for (; c
!= o
&& c
; c
= c
->prev
)
6740 vla_restore(v
->vla
.locorig
);
6743 /* ------------------------------------------------------------------------- */
6746 static void new_scope(struct scope
*o
)
6748 /* copy and link previous scope */
6750 o
->prev
= cur_scope
;
6752 cur_scope
->vla
.num
= 0;
6754 /* record local declaration stack position */
6755 o
->lstk
= local_stack
;
6756 o
->llstk
= local_label_stack
;
6760 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
6763 static void prev_scope(struct scope
*o
, int is_expr
)
6767 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6768 block_cleanup(o
->prev
);
6770 /* pop locally defined labels */
6771 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6773 /* In the is_expr case (a statement expression is finished here),
6774 vtop might refer to symbols on the local_stack. Either via the
6775 type or via vtop->sym. We can't pop those nor any that in turn
6776 might be referred to. To make it easier we don't roll back
6777 any symbols in that case; some upper level call to block() will
6778 do that. We do have to remove such symbols from the lookup
6779 tables, though. sym_pop will do that. */
6781 /* pop locally defined symbols */
6782 pop_local_syms(o
->lstk
, is_expr
);
6783 cur_scope
= o
->prev
;
6787 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
6790 /* leave a scope via break/continue(/goto) */
6791 static void leave_scope(struct scope
*o
)
6795 try_call_scope_cleanup(o
->cl
.s
);
6799 /* ------------------------------------------------------------------------- */
6800 /* call block from 'for do while' loops */
6802 static void lblock(int *bsym
, int *csym
)
6804 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6805 int *b
= co
->bsym
, *c
= co
->csym
;
6819 static void block(int is_expr
)
6821 int a
, b
, c
, d
, e
, t
;
6826 /* default return value is (void) */
6828 vtop
->type
.t
= VT_VOID
;
6833 /* If the token carries a value, next() might destroy it. Only with
6834 invalid code such as f(){"123"4;} */
6835 if (TOK_HAS_VALUE(t
))
6840 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6843 //new_scope(&o); //?? breaks tests2.122
6849 if (tok
== TOK_ELSE
) {
6854 gsym(d
); /* patch else jmp */
6858 //prev_scope(&o,0); //?? breaks tests2.122
6860 } else if (t
== TOK_WHILE
) {
6873 } else if (t
== '{') {
6876 /* handle local labels declarations */
6877 while (tok
== TOK_LABEL
) {
6880 if (tok
< TOK_UIDENT
)
6881 expect("label identifier");
6882 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6884 } while (tok
== ',');
6888 while (tok
!= '}') {
6897 prev_scope(&o
, is_expr
);
6900 else if (!nocode_wanted
)
6901 check_func_return();
6903 } else if (t
== TOK_RETURN
) {
6904 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6908 gen_assign_cast(&func_vt
);
6910 if (vtop
->type
.t
!= VT_VOID
)
6911 tcc_warning("void function returns a value");
6915 tcc_warning("'return' with no value");
6918 leave_scope(root_scope
);
6920 gfunc_return(&func_vt
);
6922 /* jump unless last stmt in top-level block */
6923 if (tok
!= '}' || local_scope
!= 1)
6926 tcc_tcov_block_end (tcc_state
, -1);
6929 } else if (t
== TOK_BREAK
) {
6931 if (!cur_scope
->bsym
)
6932 tcc_error("cannot break");
6933 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6934 leave_scope(cur_switch
->scope
);
6936 leave_scope(loop_scope
);
6937 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6940 } else if (t
== TOK_CONTINUE
) {
6942 if (!cur_scope
->csym
)
6943 tcc_error("cannot continue");
6944 leave_scope(loop_scope
);
6945 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6948 } else if (t
== TOK_FOR
) {
6953 /* c99 for-loop init decl? */
6954 if (!decl(VT_JMP
)) {
6955 /* no, regular for-loop init expr */
6983 } else if (t
== TOK_DO
) {
6999 } else if (t
== TOK_SWITCH
) {
7000 struct switch_t
*sw
;
7003 sw
= tcc_mallocz(sizeof *sw
);
7005 sw
->scope
= cur_scope
;
7006 sw
->prev
= cur_switch
;
7007 sw
->nocode_wanted
= nocode_wanted
;
7013 sw
->sv
= *vtop
--; /* save switch value */
7016 b
= gjmp(0); /* jump to first case */
7018 a
= gjmp(a
); /* add implicit break */
7022 if (sw
->nocode_wanted
)
7024 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7025 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7027 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7028 for (b
= 1; b
< sw
->n
; b
++)
7029 if (sw
->sv
.type
.t
& VT_UNSIGNED
7030 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7031 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7032 tcc_error("duplicate case value");
7035 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7038 gsym_addr(d
, sw
->def_sym
);
7045 dynarray_reset(&sw
->p
, &sw
->n
);
7046 cur_switch
= sw
->prev
;
7050 } else if (t
== TOK_CASE
) {
7051 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7054 cr
->v1
= cr
->v2
= expr_const64();
7055 if (gnu_ext
&& tok
== TOK_DOTS
) {
7057 cr
->v2
= expr_const64();
7058 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7059 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7060 tcc_warning("empty case range");
7062 /* case and default are unreachable from a switch under nocode_wanted */
7063 if (!cur_switch
->nocode_wanted
)
7065 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7068 goto block_after_label
;
7070 } else if (t
== TOK_DEFAULT
) {
7073 if (cur_switch
->def_sym
)
7074 tcc_error("too many 'default'");
7075 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? 1 : gind();
7078 goto block_after_label
;
7080 } else if (t
== TOK_GOTO
) {
7081 if (cur_scope
->vla
.num
)
7082 vla_restore(cur_scope
->vla
.locorig
);
7083 if (tok
== '*' && gnu_ext
) {
7087 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7091 } else if (tok
>= TOK_UIDENT
) {
7092 s
= label_find(tok
);
7093 /* put forward definition if needed */
7095 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7096 else if (s
->r
== LABEL_DECLARED
)
7097 s
->r
= LABEL_FORWARD
;
7099 if (s
->r
& LABEL_FORWARD
) {
7100 /* start new goto chain for cleanups, linked via label->next */
7101 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7102 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7103 pending_gotos
->prev_tok
= s
;
7104 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7105 pending_gotos
->next
= s
;
7107 s
->jnext
= gjmp(s
->jnext
);
7109 try_call_cleanup_goto(s
->cleanupstate
);
7110 gjmp_addr(s
->jnext
);
7115 expect("label identifier");
7119 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7123 if (tok
== ':' && t
>= TOK_UIDENT
) {
7128 if (s
->r
== LABEL_DEFINED
)
7129 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7130 s
->r
= LABEL_DEFINED
;
7132 Sym
*pcl
; /* pending cleanup goto */
7133 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7135 sym_pop(&s
->next
, NULL
, 0);
7139 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7142 s
->cleanupstate
= cur_scope
->cl
.s
;
7146 /* Accept attributes after labels (e.g. 'unused') */
7147 AttributeDef ad_tmp
;
7148 parse_attribute(&ad_tmp
);
7151 tcc_tcov_reset_ind(tcc_state
);
7152 vla_restore(cur_scope
->vla
.loc
);
7155 /* we accept this, but it is a mistake */
7156 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7159 /* expression case */
7176 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7179 /* This skips over a stream of tokens containing balanced {} and ()
7180 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7181 with a '{'). If STR then allocates and stores the skipped tokens
7182 in *STR. This doesn't check if () and {} are nested correctly,
7183 i.e. "({)}" is accepted. */
7184 static void skip_or_save_block(TokenString
**str
)
7186 int braces
= tok
== '{';
7189 *str
= tok_str_alloc();
7201 if (str
|| level
> 0)
7202 tcc_error("unexpected end of file");
7207 tok_str_add_tok(*str
);
7209 if (t
== '{' || t
== '(' || t
== '[') {
7211 } else if (t
== '}' || t
== ')' || t
== ']') {
7213 if (level
== 0 && braces
&& t
== '}')
7218 tok_str_add(*str
, -1);
7219 tok_str_add(*str
, 0);
7223 #define EXPR_CONST 1
7226 static void parse_init_elem(int expr_type
)
7228 int saved_global_expr
;
7231 /* compound literals must be allocated globally in this case */
7232 saved_global_expr
= global_expr
;
7235 global_expr
= saved_global_expr
;
7236 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7237 (compound literals). */
7238 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7239 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7240 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7241 #ifdef TCC_TARGET_PE
7242 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7245 tcc_error("initializer element is not constant");
7254 static void init_assert(init_params
*p
, int offset
)
7256 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7257 : !nocode_wanted
&& offset
> p
->local_offset
)
7258 tcc_internal_error("initializer overflow");
7261 #define init_assert(sec, offset)
7264 /* put zeros for variable based init */
7265 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7267 init_assert(p
, c
+ size
);
7269 /* nothing to do because globals are already set to zero */
7271 vpush_helper_func(TOK_memset
);
7273 #ifdef TCC_TARGET_ARM
7285 #define DIF_SIZE_ONLY 2
7286 #define DIF_HAVE_ELEM 4
7289 /* delete relocations for specified range c ... c + size. Unfortunatly
7290 in very special cases, relocations may occur unordered */
7291 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7293 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7294 if (!sec
|| !sec
->reloc
)
7296 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7297 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7298 while (rel
< rel_end
) {
7299 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7300 sec
->reloc
->data_offset
-= sizeof *rel
;
7303 memcpy(rel2
, rel
, sizeof *rel
);
7310 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7312 if (ref
== p
->flex_array_ref
) {
7313 if (index
>= ref
->c
)
7315 } else if (ref
->c
< 0)
7316 tcc_error("flexible array has zero size in this context");
7319 /* t is the array or struct type. c is the array or struct
7320 address. cur_field is the pointer to the current
7321 field, for arrays the 'c' member contains the current start
7322 index. 'flags' is as in decl_initializer.
7323 'al' contains the already initialized length of the
7324 current container (starting at c). This returns the new length of that. */
7325 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7326 Sym
**cur_field
, int flags
, int al
)
7329 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7330 unsigned long corig
= c
;
7335 if (flags
& DIF_HAVE_ELEM
)
7338 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7345 /* NOTE: we only support ranges for last designator */
7346 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7348 if (!(type
->t
& VT_ARRAY
))
7349 expect("array type");
7351 index
= index_last
= expr_const();
7352 if (tok
== TOK_DOTS
&& gnu_ext
) {
7354 index_last
= expr_const();
7358 decl_design_flex(p
, s
, index_last
);
7359 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7360 tcc_error("index exceeds array bounds or range is empty");
7362 (*cur_field
)->c
= index_last
;
7363 type
= pointed_type(type
);
7364 elem_size
= type_size(type
, &align
);
7365 c
+= index
* elem_size
;
7366 nb_elems
= index_last
- index
+ 1;
7373 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7374 expect("struct/union type");
7376 f
= find_field(type
, l
, &cumofs
);
7387 } else if (!gnu_ext
) {
7392 if (type
->t
& VT_ARRAY
) {
7393 index
= (*cur_field
)->c
;
7395 decl_design_flex(p
, s
, index
);
7397 tcc_error("too many initializers");
7398 type
= pointed_type(type
);
7399 elem_size
= type_size(type
, &align
);
7400 c
+= index
* elem_size
;
7403 /* Skip bitfield padding. Also with size 32 and 64. */
7404 while (f
&& (f
->v
& SYM_FIRST_ANOM
) &&
7405 is_integer_btype(f
->type
.t
& VT_BTYPE
))
7406 *cur_field
= f
= f
->next
;
7408 tcc_error("too many initializers");
7414 if (!elem_size
) /* for structs */
7415 elem_size
= type_size(type
, &align
);
7417 /* Using designators the same element can be initialized more
7418 than once. In that case we need to delete possibly already
7419 existing relocations. */
7420 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7421 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7422 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7425 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7427 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7431 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7432 /* make init_putv/vstore believe it were a struct */
7434 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7438 vpush_ref(type
, p
->sec
, c
, elem_size
);
7440 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7441 for (i
= 1; i
< nb_elems
; i
++) {
7443 init_putv(p
, type
, c
+ elem_size
* i
);
7448 c
+= nb_elems
* elem_size
;
7454 /* store a value or an expression directly in global data or in local array */
7455 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7461 Section
*sec
= p
->sec
;
7465 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7467 size
= type_size(type
, &align
);
7468 if (type
->t
& VT_BITFIELD
)
7469 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7470 init_assert(p
, c
+ size
);
7473 /* XXX: not portable */
7474 /* XXX: generate error if incorrect relocation */
7475 gen_assign_cast(&dtype
);
7476 bt
= type
->t
& VT_BTYPE
;
7478 if ((vtop
->r
& VT_SYM
)
7480 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7481 || (type
->t
& VT_BITFIELD
))
7482 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7484 tcc_error("initializer element is not computable at load time");
7486 if (NODATA_WANTED
) {
7491 ptr
= sec
->data
+ c
;
7494 /* XXX: make code faster ? */
7495 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7496 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7497 /* XXX This rejects compound literals like
7498 '(void *){ptr}'. The problem is that '&sym' is
7499 represented the same way, which would be ruled out
7500 by the SYM_FIRST_ANOM check above, but also '"string"'
7501 in 'char *p = "string"' is represented the same
7502 with the type being VT_PTR and the symbol being an
7503 anonymous one. That is, there's no difference in vtop
7504 between '(void *){x}' and '&(void *){x}'. Ignore
7505 pointer typed entities here. Hopefully no real code
7506 will ever use compound literals with scalar type. */
7507 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7508 /* These come from compound literals, memcpy stuff over. */
7512 esym
= elfsym(vtop
->sym
);
7513 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7514 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7516 /* We need to copy over all memory contents, and that
7517 includes relocations. Use the fact that relocs are
7518 created it order, so look from the end of relocs
7519 until we hit one before the copied region. */
7520 unsigned long relofs
= ssec
->reloc
->data_offset
;
7521 while (relofs
>= sizeof(*rel
)) {
7522 relofs
-= sizeof(*rel
);
7523 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7524 if (rel
->r_offset
>= esym
->st_value
+ size
)
7526 if (rel
->r_offset
< esym
->st_value
)
7528 put_elf_reloca(symtab_section
, sec
,
7529 c
+ rel
->r_offset
- esym
->st_value
,
7530 ELFW(R_TYPE
)(rel
->r_info
),
7531 ELFW(R_SYM
)(rel
->r_info
),
7541 if (type
->t
& VT_BITFIELD
) {
7542 int bit_pos
, bit_size
, bits
, n
;
7543 unsigned char *p
, v
, m
;
7544 bit_pos
= BIT_POS(vtop
->type
.t
);
7545 bit_size
= BIT_SIZE(vtop
->type
.t
);
7546 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7547 bit_pos
&= 7, bits
= 0;
7552 v
= val
>> bits
<< bit_pos
;
7553 m
= ((1 << n
) - 1) << bit_pos
;
7554 *p
= (*p
& ~m
) | (v
& m
);
7555 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7560 *(char *)ptr
= val
!= 0;
7566 write16le(ptr
, val
);
7569 write32le(ptr
, val
);
7572 write64le(ptr
, val
);
7575 #if defined TCC_IS_NATIVE_387
7576 /* Host and target platform may be different but both have x87.
7577 On windows, tcc does not use VT_LDOUBLE, except when it is a
7578 cross compiler. In this case a mingw gcc as host compiler
7579 comes here with 10-byte long doubles, while msvc or tcc won't.
7580 tcc itself can still translate by asm.
7581 In any case we avoid possibly random bytes 11 and 12.
7583 if (sizeof (long double) >= 10)
7584 memcpy(ptr
, &vtop
->c
.ld
, 10);
7586 else if (sizeof (long double) == sizeof (double))
7587 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7589 else if (vtop
->c
.ld
== 0.0)
7593 /* For other platforms it should work natively, but may not work
7594 for cross compilers */
7595 if (sizeof(long double) == LDOUBLE_SIZE
)
7596 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7597 else if (sizeof(double) == LDOUBLE_SIZE
)
7598 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7599 #ifndef TCC_CROSS_TEST
7601 tcc_error("can't cross compile long double constants");
7606 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7609 if (vtop
->r
& VT_SYM
)
7610 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7612 write64le(ptr
, val
);
7615 write32le(ptr
, val
);
7619 write64le(ptr
, val
);
7623 if (vtop
->r
& VT_SYM
)
7624 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7625 write32le(ptr
, val
);
7629 //tcc_internal_error("unexpected type");
7635 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7642 /* 't' contains the type and storage info. 'c' is the offset of the
7643 object in section 'sec'. If 'sec' is NULL, it means stack based
7644 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7645 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7646 size only evaluation is wanted (only for arrays). */
7647 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7649 int len
, n
, no_oblock
, i
;
7655 /* generate line number info */
7656 if (debug_modes
&& !p
->sec
)
7657 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7659 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7660 /* In case of strings we have special handling for arrays, so
7661 don't consume them as initializer value (which would commit them
7662 to some anonymous symbol). */
7663 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7664 (!(flags
& DIF_SIZE_ONLY
)
7665 /* a struct may be initialized from a struct of same type, as in
7666 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7667 In that case we need to parse the element in order to check
7668 it for compatibility below */
7669 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7671 int ncw_prev
= nocode_wanted
;
7672 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7674 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7675 nocode_wanted
= ncw_prev
;
7676 flags
|= DIF_HAVE_ELEM
;
7679 if (type
->t
& VT_ARRAY
) {
7681 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7689 t1
= pointed_type(type
);
7690 size1
= type_size(t1
, &align1
);
7692 /* only parse strings here if correct type (otherwise: handle
7693 them as ((w)char *) expressions */
7694 if ((tok
== TOK_LSTR
&&
7695 #ifdef TCC_TARGET_PE
7696 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7698 (t1
->t
& VT_BTYPE
) == VT_INT
7700 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7702 cstr_reset(&initstr
);
7703 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7704 tcc_error("unhandled string literal merging");
7705 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7707 initstr
.size
-= size1
;
7709 len
+= tokc
.str
.size
;
7711 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7713 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7716 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7717 && tok
!= TOK_EOF
) {
7718 /* Not a lone literal but part of a bigger expression. */
7719 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7720 tokc
.str
.size
= initstr
.size
;
7721 tokc
.str
.data
= initstr
.data
;
7725 decl_design_flex(p
, s
, len
);
7726 if (!(flags
& DIF_SIZE_ONLY
)) {
7731 tcc_warning("initializer-string for array is too long");
7732 /* in order to go faster for common case (char
7733 string in global variable, we handle it
7735 if (p
->sec
&& size1
== 1) {
7736 init_assert(p
, c
+ nb
);
7738 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7742 /* only add trailing zero if enough storage (no
7743 warning in this case since it is standard) */
7744 if (flags
& DIF_CLEAR
)
7747 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7751 } else if (size1
== 1)
7752 ch
= ((unsigned char *)initstr
.data
)[i
];
7754 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7756 init_putv(p
, t1
, c
+ i
* size1
);
7767 /* zero memory once in advance */
7768 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7769 init_putz(p
, c
, n
*size1
);
7774 /* GNU extension: if the initializer is empty for a flex array,
7775 it's size is zero. We won't enter the loop, so set the size
7777 decl_design_flex(p
, s
, len
);
7778 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7779 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7780 flags
&= ~DIF_HAVE_ELEM
;
7781 if (type
->t
& VT_ARRAY
) {
7783 /* special test for multi dimensional arrays (may not
7784 be strictly correct if designators are used at the
7786 if (no_oblock
&& len
>= n
*size1
)
7789 if (s
->type
.t
== VT_UNION
)
7793 if (no_oblock
&& f
== NULL
)
7805 } else if ((flags
& DIF_HAVE_ELEM
)
7806 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7807 The source type might have VT_CONSTANT set, which is
7808 of course assignable to non-const elements. */
7809 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7812 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7814 if ((flags
& DIF_FIRST
) || tok
== '{') {
7824 } else if (tok
== '{') {
7825 if (flags
& DIF_HAVE_ELEM
)
7828 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7831 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7832 /* If we supported only ISO C we wouldn't have to accept calling
7833 this on anything than an array if DIF_SIZE_ONLY (and even then
7834 only on the outermost level, so no recursion would be needed),
7835 because initializing a flex array member isn't supported.
7836 But GNU C supports it, so we need to recurse even into
7837 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7838 /* just skip expression */
7839 if (flags
& DIF_HAVE_ELEM
)
7842 skip_or_save_block(NULL
);
7845 if (!(flags
& DIF_HAVE_ELEM
)) {
7846 /* This should happen only when we haven't parsed
7847 the init element above for fear of committing a
7848 string constant to memory too early. */
7849 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7850 expect("string constant");
7851 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7853 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7854 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7856 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7860 init_putv(p
, type
, c
);
7864 /* parse an initializer for type 't' if 'has_init' is non zero, and
7865 allocate space in local or global data space ('r' is either
7866 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7867 variable 'v' of scope 'scope' is declared before initializers
7868 are parsed. If 'v' is zero, then a reference to the new object
7869 is put in the value stack. If 'has_init' is 2, a special parsing
7870 is done to handle string constants. */
7871 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7872 int has_init
, int v
, int global
)
7874 int size
, align
, addr
;
7875 TokenString
*init_str
= NULL
;
7878 Sym
*flexible_array
;
7880 int saved_nocode_wanted
= nocode_wanted
;
7881 #ifdef CONFIG_TCC_BCHECK
7882 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7884 init_params p
= {0};
7886 /* Always allocate static or global variables */
7887 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7888 nocode_wanted
|= DATA_ONLY_WANTED
;
7890 flexible_array
= NULL
;
7891 size
= type_size(type
, &align
);
7893 /* exactly one flexible array may be initialized, either the
7894 toplevel array or the last member of the toplevel struct */
7897 /* If the base type itself was an array type of unspecified size
7898 (like in 'typedef int arr[]; arr x = {1};') then we will
7899 overwrite the unknown size by the real one for this decl.
7900 We need to unshare the ref symbol holding that size. */
7901 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7902 p
.flex_array_ref
= type
->ref
;
7904 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7905 Sym
*field
= type
->ref
->next
;
7908 field
= field
->next
;
7909 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7910 flexible_array
= field
;
7911 p
.flex_array_ref
= field
->type
.ref
;
7918 /* If unknown size, do a dry-run 1st pass */
7920 tcc_error("unknown type size");
7921 if (has_init
== 2) {
7922 /* only get strings */
7923 init_str
= tok_str_alloc();
7924 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7925 tok_str_add_tok(init_str
);
7928 tok_str_add(init_str
, -1);
7929 tok_str_add(init_str
, 0);
7931 skip_or_save_block(&init_str
);
7935 begin_macro(init_str
, 1);
7937 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7938 /* prepare second initializer parsing */
7939 macro_ptr
= init_str
->str
;
7942 /* if still unknown size, error */
7943 size
= type_size(type
, &align
);
7945 tcc_error("unknown type size");
7947 /* If there's a flex member and it was used in the initializer
7949 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
7950 size
+= flexible_array
->type
.ref
->c
7951 * pointed_size(&flexible_array
->type
);
7954 /* take into account specified alignment if bigger */
7955 if (ad
->a
.aligned
) {
7956 int speca
= 1 << (ad
->a
.aligned
- 1);
7959 } else if (ad
->a
.packed
) {
7963 if (!v
&& NODATA_WANTED
)
7964 size
= 0, align
= 1;
7966 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7968 #ifdef CONFIG_TCC_BCHECK
7970 /* add padding between stack variables for bound checking */
7974 loc
= (loc
- size
) & -align
;
7976 p
.local_offset
= addr
+ size
;
7977 #ifdef CONFIG_TCC_BCHECK
7979 /* add padding between stack variables for bound checking */
7984 /* local variable */
7985 #ifdef CONFIG_TCC_ASM
7986 if (ad
->asm_label
) {
7987 int reg
= asm_parse_regvar(ad
->asm_label
);
7989 r
= (r
& ~VT_VALMASK
) | reg
;
7992 sym
= sym_push(v
, type
, r
, addr
);
7993 if (ad
->cleanup_func
) {
7994 Sym
*cls
= sym_push2(&all_cleanups
,
7995 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7996 cls
->prev_tok
= sym
;
7997 cls
->next
= ad
->cleanup_func
;
7998 cls
->ncl
= cur_scope
->cl
.s
;
7999 cur_scope
->cl
.s
= cls
;
8004 /* push local reference */
8005 vset(type
, r
, addr
);
8010 /* see if the symbol was already defined */
8013 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8014 && sym
->type
.ref
->c
> type
->ref
->c
) {
8015 /* flex array was already declared with explicit size
8017 int arr[] = { 1,2,3 }; */
8018 type
->ref
->c
= sym
->type
.ref
->c
;
8019 size
= type_size(type
, &align
);
8021 patch_storage(sym
, ad
, type
);
8022 /* we accept several definitions of the same global variable. */
8023 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8028 /* allocate symbol in corresponding section */
8032 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8033 tp
= &tp
->ref
->type
;
8034 if (tp
->t
& VT_CONSTANT
) {
8035 sec
= rodata_section
;
8036 } else if (has_init
) {
8038 /*if (tcc_state->g_debug & 4)
8039 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8040 } else if (tcc_state
->nocommon
)
8045 addr
= section_add(sec
, size
, align
);
8046 #ifdef CONFIG_TCC_BCHECK
8047 /* add padding if bound check */
8049 section_add(sec
, 1, 1);
8052 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8053 sec
= common_section
;
8058 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8059 patch_storage(sym
, ad
, NULL
);
8061 /* update symbol definition */
8062 put_extern_sym(sym
, sec
, addr
, size
);
8064 /* push global reference */
8065 vpush_ref(type
, sec
, addr
, size
);
8070 #ifdef CONFIG_TCC_BCHECK
8071 /* handles bounds now because the symbol must be defined
8072 before for the relocation */
8076 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8077 /* then add global bound info */
8078 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8079 bounds_ptr
[0] = 0; /* relocated */
8080 bounds_ptr
[1] = size
;
8085 if (type
->t
& VT_VLA
) {
8091 /* save before-VLA stack pointer if needed */
8092 if (cur_scope
->vla
.num
== 0) {
8093 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8094 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8096 gen_vla_sp_save(loc
-= PTR_SIZE
);
8097 cur_scope
->vla
.locorig
= loc
;
8101 vpush_type_size(type
, &a
);
8102 gen_vla_alloc(type
, a
);
8103 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8104 /* on _WIN64, because of the function args scratch area, the
8105 result of alloca differs from RSP and is returned in RAX. */
8106 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8108 gen_vla_sp_save(addr
);
8109 cur_scope
->vla
.loc
= addr
;
8110 cur_scope
->vla
.num
++;
8111 } else if (has_init
) {
8113 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8114 /* patch flexible array member size back to -1, */
8115 /* for possible subsequent similar declarations */
8117 flexible_array
->type
.ref
->c
= -1;
8121 /* restore parse state if needed */
8127 nocode_wanted
= saved_nocode_wanted
;
8130 /* generate vla code saved in post_type() */
8131 static void func_vla_arg_code(Sym
*arg
)
8134 TokenString
*vla_array_tok
= NULL
;
8137 func_vla_arg_code(arg
->type
.ref
);
8139 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8140 loc
-= type_size(&int_type
, &align
);
8142 arg
->type
.ref
->c
= loc
;
8145 vla_array_tok
= tok_str_alloc();
8146 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8147 begin_macro(vla_array_tok
, 1);
8152 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8154 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8161 static void func_vla_arg(Sym
*sym
)
8165 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8166 if (arg
->type
.t
& VT_VLA
)
8167 func_vla_arg_code(arg
);
8170 /* parse a function defined by symbol 'sym' and generate its code in
8171 'cur_text_section' */
8172 static void gen_function(Sym
*sym
)
8174 struct scope f
= { 0 };
8175 cur_scope
= root_scope
= &f
;
8177 ind
= cur_text_section
->data_offset
;
8178 if (sym
->a
.aligned
) {
8179 size_t newoff
= section_add(cur_text_section
, 0,
8180 1 << (sym
->a
.aligned
- 1));
8181 gen_fill_nops(newoff
- ind
);
8183 /* NOTE: we patch the symbol size later */
8184 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8185 if (sym
->type
.ref
->f
.func_ctor
)
8186 add_array (tcc_state
, ".init_array", sym
->c
);
8187 if (sym
->type
.ref
->f
.func_dtor
)
8188 add_array (tcc_state
, ".fini_array", sym
->c
);
8190 funcname
= get_tok_str(sym
->v
, NULL
);
8192 func_vt
= sym
->type
.ref
->type
;
8193 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8195 /* put debug symbol */
8196 tcc_debug_funcstart(tcc_state
, sym
);
8197 /* push a dummy symbol to enable local sym storage */
8198 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8199 local_scope
= 1; /* for function parameters */
8201 tcc_debug_prolog_epilog(tcc_state
, 0);
8204 clear_temp_local_var_list();
8209 /* reset local stack */
8210 pop_local_syms(NULL
, 0);
8211 tcc_debug_prolog_epilog(tcc_state
, 1);
8213 cur_text_section
->data_offset
= ind
;
8215 label_pop(&global_label_stack
, NULL
, 0);
8216 sym_pop(&all_cleanups
, NULL
, 0);
8217 /* patch symbol size */
8218 elfsym(sym
)->st_size
= ind
- func_ind
;
8219 /* end of function */
8220 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8221 /* It's better to crash than to generate wrong code */
8222 cur_text_section
= NULL
;
8223 funcname
= ""; /* for safety */
8224 func_vt
.t
= VT_VOID
; /* for safety */
8225 func_var
= 0; /* for safety */
8226 ind
= 0; /* for safety */
8228 nocode_wanted
= DATA_ONLY_WANTED
;
8230 /* do this after funcend debug info */
8234 static void gen_inline_functions(TCCState
*s
)
8237 int inline_generated
, i
;
8238 struct InlineFunc
*fn
;
8240 tcc_open_bf(s
, ":inline:", 0);
8241 /* iterate while inline function are referenced */
8243 inline_generated
= 0;
8244 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8245 fn
= s
->inline_fns
[i
];
8247 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8248 /* the function was used or forced (and then not internal):
8249 generate its code and convert it to a normal function */
8251 tcc_debug_putfile(s
, fn
->filename
);
8252 begin_macro(fn
->func_str
, 1);
8254 cur_text_section
= text_section
;
8258 inline_generated
= 1;
8261 } while (inline_generated
);
8265 static void free_inline_functions(TCCState
*s
)
8268 /* free tokens of unused inline functions */
8269 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8270 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8272 tok_str_free(fn
->func_str
);
8274 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8277 static void do_Static_assert(void){
8287 tcc_error("_Static_assert fail");
8289 goto static_assert_out
;
8293 parse_mult_str(&error_str
, "string constant");
8295 tcc_error("%s", (char *)error_str
.data
);
8296 cstr_free(&error_str
);
8302 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8303 or VT_CMP if parsing old style parameter list
8304 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8305 static int decl(int l
)
8307 int v
, has_init
, r
, oldint
;
8310 AttributeDef ad
, adbase
;
8313 if (tok
== TOK_STATIC_ASSERT
) {
8319 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8322 /* skip redundant ';' if not in old parameter decl scope */
8323 if (tok
== ';' && l
!= VT_CMP
) {
8329 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8330 /* global asm block */
8334 if (tok
>= TOK_UIDENT
) {
8335 /* special test for old K&R protos without explicit int
8336 type. Only accepted when defining global data */
8341 expect("declaration");
8347 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8349 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8350 tcc_warning("unnamed struct/union that defines no instances");
8354 if (IS_ENUM(btype
.t
)) {
8360 while (1) { /* iterate thru each declaration */
8363 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8367 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8368 printf("type = '%s'\n", buf
);
8371 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8372 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8373 tcc_error("function without file scope cannot be static");
8374 /* if old style function prototype, we accept a
8377 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8381 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8382 if (sym
->f
.func_alwinl
8383 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8384 == (VT_EXTERN
| VT_INLINE
))) {
8385 /* always_inline functions must be handled as if they
8386 don't generate multiple global defs, even if extern
8387 inline, i.e. GNU inline semantics for those. Rewrite
8388 them into static inline. */
8389 type
.t
&= ~VT_EXTERN
;
8390 type
.t
|= VT_STATIC
;
8393 /* always compile 'extern inline' */
8394 if (type
.t
& VT_EXTERN
)
8395 type
.t
&= ~VT_INLINE
;
8397 } else if (oldint
) {
8398 tcc_warning("type defaults to int");
8401 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8402 ad
.asm_label
= asm_label_instr();
8403 /* parse one last attribute list, after asm label */
8404 parse_attribute(&ad
);
8406 /* gcc does not allow __asm__("label") with function definition,
8413 #ifdef TCC_TARGET_PE
8414 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8415 if (type
.t
& VT_STATIC
)
8416 tcc_error("cannot have dll linkage with static");
8417 if (type
.t
& VT_TYPEDEF
) {
8418 tcc_warning("'%s' attribute ignored for typedef",
8419 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8420 (ad
.a
.dllexport
= 0, "dllexport"));
8421 } else if (ad
.a
.dllimport
) {
8422 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8425 type
.t
|= VT_EXTERN
;
8431 tcc_error("cannot use local functions");
8432 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8433 expect("function definition");
8435 /* reject abstract declarators in function definition
8436 make old style params without decl have int type */
8438 while ((sym
= sym
->next
) != NULL
) {
8439 if (!(sym
->v
& ~SYM_FIELD
))
8440 expect("identifier");
8441 if (sym
->type
.t
== VT_VOID
)
8442 sym
->type
= int_type
;
8445 /* apply post-declaraton attributes */
8446 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8448 /* put function symbol */
8449 type
.t
&= ~VT_EXTERN
;
8450 sym
= external_sym(v
, &type
, 0, &ad
);
8452 /* static inline functions are just recorded as a kind
8453 of macro. Their code will be emitted at the end of
8454 the compilation unit only if they are used */
8455 if (sym
->type
.t
& VT_INLINE
) {
8456 struct InlineFunc
*fn
;
8457 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8458 strcpy(fn
->filename
, file
->filename
);
8460 skip_or_save_block(&fn
->func_str
);
8461 dynarray_add(&tcc_state
->inline_fns
,
8462 &tcc_state
->nb_inline_fns
, fn
);
8464 /* compute text section */
8465 cur_text_section
= ad
.section
;
8466 if (!cur_text_section
)
8467 cur_text_section
= text_section
;
8473 /* find parameter in function parameter list */
8474 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8475 if ((sym
->v
& ~SYM_FIELD
) == v
)
8477 tcc_error("declaration for parameter '%s' but no such parameter",
8478 get_tok_str(v
, NULL
));
8480 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8481 tcc_error("storage class specified for '%s'",
8482 get_tok_str(v
, NULL
));
8483 if (sym
->type
.t
!= VT_VOID
)
8484 tcc_error("redefinition of parameter '%s'",
8485 get_tok_str(v
, NULL
));
8486 convert_parameter_type(&type
);
8488 } else if (type
.t
& VT_TYPEDEF
) {
8489 /* save typedefed type */
8490 /* XXX: test storage specifiers ? */
8492 if (sym
&& sym
->sym_scope
== local_scope
) {
8493 if (!is_compatible_types(&sym
->type
, &type
)
8494 || !(sym
->type
.t
& VT_TYPEDEF
))
8495 tcc_error("incompatible redefinition of '%s'",
8496 get_tok_str(v
, NULL
));
8499 sym
= sym_push(v
, &type
, 0, 0);
8502 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8503 merge_funcattr(&sym
->type
.ref
->f
, &ad
.f
);
8505 tcc_debug_typedef (tcc_state
, sym
);
8506 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8507 && !(type
.t
& VT_EXTERN
)) {
8508 tcc_error("declaration of void object");
8511 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8512 /* external function definition */
8513 /* specific case for func_call attribute */
8514 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8515 } else if (!(type
.t
& VT_ARRAY
)) {
8516 /* not lvalue if array */
8519 has_init
= (tok
== '=');
8520 if (has_init
&& (type
.t
& VT_VLA
))
8521 tcc_error("variable length array cannot be initialized");
8522 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8523 || (type
.t
& VT_BTYPE
) == VT_FUNC
8524 /* as with GCC, uninitialized global arrays with no size
8525 are considered extern: */
8526 || ((type
.t
& VT_ARRAY
) && !has_init
8527 && l
== VT_CONST
&& type
.ref
->c
< 0)
8529 /* external variable or function */
8530 type
.t
|= VT_EXTERN
;
8531 sym
= external_sym(v
, &type
, r
, &ad
);
8532 if (ad
.alias_target
) {
8533 /* Aliases need to be emitted when their target
8534 symbol is emitted, even if perhaps unreferenced.
8535 We only support the case where the base is
8536 already defined, otherwise we would need
8537 deferring to emit the aliases until the end of
8538 the compile unit. */
8539 Sym
*alias_target
= sym_find(ad
.alias_target
);
8540 ElfSym
*esym
= elfsym(alias_target
);
8542 tcc_error("unsupported forward __alias__ attribute");
8543 put_extern_sym2(sym
, esym
->st_shndx
,
8544 esym
->st_value
, esym
->st_size
, 1);
8547 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8553 else if (l
== VT_CONST
)
8554 /* uninitialized global variables may be overridden */
8555 type
.t
|= VT_EXTERN
;
8556 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8572 /* ------------------------------------------------------------------------- */
8575 /* ------------------------------------------------------------------------- */