2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int constant_p
;
48 ST_DATA
char debug_modes
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF() (nocode_wanted |= 0x20000000)
60 #define CODE_ON() (nocode_wanted &= ~0x20000000)
62 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
63 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
64 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
67 ST_DATA
const char *funcname
;
68 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
69 static CString initstr
;
72 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
73 #define VT_PTRDIFF_T VT_INT
75 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
76 #define VT_PTRDIFF_T VT_LLONG
78 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
79 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
82 static struct switch_t
{
86 } **p
; int n
; /* list of case ranges */
87 int def_sym
; /* default symbol */
91 struct switch_t
*prev
;
93 } *cur_switch
; /* current switch */
95 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
96 /*list of temporary local variables on the stack in current function. */
97 static struct temp_local_variable
{
98 int location
; //offset on stack. Svalue.c.i
101 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
102 static int nb_temp_local_vars
;
104 static struct scope
{
106 struct { int loc
, locorig
, num
; } vla
;
107 struct { Sym
*s
; int n
; } cl
;
110 } *cur_scope
, *loop_scope
, *root_scope
;
119 #define precedence_parser
120 static void init_prec(void);
123 static void gen_cast(CType
*type
);
124 static void gen_cast_s(int t
);
125 static inline CType
*pointed_type(CType
*type
);
126 static int is_compatible_types(CType
*type1
, CType
*type2
);
127 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
128 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
129 static void parse_expr_type(CType
*type
);
130 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
131 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
132 static void block(int is_expr
);
133 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
134 static int decl(int l
);
135 static void expr_eq(void);
136 static void vpush_type_size(CType
*type
, int *a
);
137 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
138 static inline int64_t expr_const64(void);
139 static void vpush64(int ty
, unsigned long long v
);
140 static void vpush(CType
*type
);
141 static int gvtst(int inv
, int t
);
142 static void gen_inline_functions(TCCState
*s
);
143 static void free_inline_functions(TCCState
*s
);
144 static void skip_or_save_block(TokenString
**str
);
145 static void gv_dup(void);
146 static int get_temp_local_var(int size
,int align
);
147 static void clear_temp_local_var_list();
148 static void cast_error(CType
*st
, CType
*dt
);
150 /* ------------------------------------------------------------------------- */
151 /* Automagical code suppression */
153 /* Clear 'nocode_wanted' at forward label if it was used */
154 ST_FUNC
void gsym(int t
)
162 /* Clear 'nocode_wanted' if current pc is a label */
168 tcc_tcov_block_begin(tcc_state
);
172 /* Set 'nocode_wanted' after unconditional (backwards) jump */
173 static void gjmp_addr_acs(int t
)
179 /* Set 'nocode_wanted' after unconditional (forwards) jump */
180 static int gjmp_acs(int t
)
187 /* These are #undef'd at the end of this file */
188 #define gjmp_addr gjmp_addr_acs
189 #define gjmp gjmp_acs
190 /* ------------------------------------------------------------------------- */
192 ST_INLN
int is_float(int t
)
194 int bt
= t
& VT_BTYPE
;
195 return bt
== VT_LDOUBLE
201 static inline int is_integer_btype(int bt
)
210 static int btype_size(int bt
)
212 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
216 bt
== VT_PTR
? PTR_SIZE
: 0;
219 /* returns function return register from type */
220 static int R_RET(int t
)
224 #ifdef TCC_TARGET_X86_64
225 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
227 #elif defined TCC_TARGET_RISCV64
228 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
234 /* returns 2nd function return register, if any */
235 static int R2_RET(int t
)
241 #elif defined TCC_TARGET_X86_64
246 #elif defined TCC_TARGET_RISCV64
253 /* returns true for two-word types */
254 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
256 /* put function return registers to stack value */
257 static void PUT_R_RET(SValue
*sv
, int t
)
259 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
262 /* returns function return register class for type t */
263 static int RC_RET(int t
)
265 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
268 /* returns generic register class for type t */
269 static int RC_TYPE(int t
)
273 #ifdef TCC_TARGET_X86_64
274 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
276 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
278 #elif defined TCC_TARGET_RISCV64
279 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
285 /* returns 2nd register class corresponding to t and rc */
286 static int RC2_TYPE(int t
, int rc
)
288 if (!USING_TWO_WORDS(t
))
303 /* we use our own 'finite' function to avoid potential problems with
304 non standard math libs */
305 /* XXX: endianness dependent */
306 ST_FUNC
int ieee_finite(double d
)
309 memcpy(p
, &d
, sizeof(double));
310 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
313 /* compiling intel long double natively */
314 #if (defined __i386__ || defined __x86_64__) \
315 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
316 # define TCC_IS_NATIVE_387
319 ST_FUNC
void test_lvalue(void)
321 if (!(vtop
->r
& VT_LVAL
))
325 ST_FUNC
void check_vstack(void)
327 if (vtop
!= vstack
- 1)
328 tcc_error("internal compiler error: vstack leak (%d)",
329 (int)(vtop
- vstack
+ 1));
332 /* vstack debugging aid */
334 void pv (const char *lbl
, int a
, int b
)
337 for (i
= a
; i
< a
+ b
; ++i
) {
338 SValue
*p
= &vtop
[-i
];
339 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
340 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
345 /* ------------------------------------------------------------------------- */
346 /* initialize vstack and types. This must be done also for tcc -E */
347 ST_FUNC
void tccgen_init(TCCState
*s1
)
350 memset(vtop
, 0, sizeof *vtop
);
352 /* define some often used types */
355 char_type
.t
= VT_BYTE
;
356 if (s1
->char_is_unsigned
)
357 char_type
.t
|= VT_UNSIGNED
;
358 char_pointer_type
= char_type
;
359 mk_pointer(&char_pointer_type
);
361 func_old_type
.t
= VT_FUNC
;
362 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
363 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
364 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
365 #ifdef precedence_parser
371 ST_FUNC
int tccgen_compile(TCCState
*s1
)
373 cur_text_section
= NULL
;
376 anon_sym
= SYM_FIRST_ANOM
;
378 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
380 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
384 #ifdef TCC_TARGET_ARM
388 printf("%s: **** new file\n", file
->filename
);
390 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
393 gen_inline_functions(s1
);
395 /* end of translation unit info */
401 ST_FUNC
void tccgen_finish(TCCState
*s1
)
404 free_inline_functions(s1
);
405 sym_pop(&global_stack
, NULL
, 0);
406 sym_pop(&local_stack
, NULL
, 0);
407 /* free preprocessor macros */
410 dynarray_reset(&sym_pools
, &nb_sym_pools
);
411 sym_free_first
= NULL
;
414 /* ------------------------------------------------------------------------- */
415 ST_FUNC ElfSym
*elfsym(Sym
*s
)
419 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
422 /* apply storage attributes to Elf symbol */
423 ST_FUNC
void update_storage(Sym
*sym
)
426 int sym_bind
, old_sym_bind
;
432 if (sym
->a
.visibility
)
433 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
436 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
437 sym_bind
= STB_LOCAL
;
438 else if (sym
->a
.weak
)
441 sym_bind
= STB_GLOBAL
;
442 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
443 if (sym_bind
!= old_sym_bind
) {
444 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
448 if (sym
->a
.dllimport
)
449 esym
->st_other
|= ST_PE_IMPORT
;
450 if (sym
->a
.dllexport
)
451 esym
->st_other
|= ST_PE_EXPORT
;
455 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
456 get_tok_str(sym
->v
, NULL
),
457 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
465 /* ------------------------------------------------------------------------- */
466 /* update sym->c so that it points to an external symbol in section
467 'section' with value 'value' */
469 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
470 addr_t value
, unsigned long size
,
471 int can_add_underscore
)
473 int sym_type
, sym_bind
, info
, other
, t
;
479 name
= get_tok_str(sym
->v
, NULL
);
481 if ((t
& VT_BTYPE
) == VT_FUNC
) {
483 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
484 sym_type
= STT_NOTYPE
;
485 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
488 sym_type
= STT_OBJECT
;
490 if (t
& (VT_STATIC
| VT_INLINE
))
491 sym_bind
= STB_LOCAL
;
493 sym_bind
= STB_GLOBAL
;
497 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
498 Sym
*ref
= sym
->type
.ref
;
499 if (ref
->a
.nodecorate
) {
500 can_add_underscore
= 0;
502 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
503 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
505 other
|= ST_PE_STDCALL
;
506 can_add_underscore
= 0;
511 if (sym
->asm_label
) {
512 name
= get_tok_str(sym
->asm_label
, NULL
);
513 can_add_underscore
= 0;
516 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
518 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
522 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
523 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
526 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
530 esym
->st_value
= value
;
531 esym
->st_size
= size
;
532 esym
->st_shndx
= sh_num
;
537 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
539 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
541 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
544 /* add a new relocation entry to symbol 'sym' in section 's' */
545 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
550 if (nocode_wanted
&& s
== cur_text_section
)
555 put_extern_sym(sym
, NULL
, 0, 0);
559 /* now we can add ELF relocation info */
560 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
564 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
566 greloca(s
, sym
, offset
, type
, 0);
570 /* ------------------------------------------------------------------------- */
571 /* symbol allocator */
572 static Sym
*__sym_malloc(void)
574 Sym
*sym_pool
, *sym
, *last_sym
;
577 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
578 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
580 last_sym
= sym_free_first
;
582 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
583 sym
->next
= last_sym
;
587 sym_free_first
= last_sym
;
591 static inline Sym
*sym_malloc(void)
595 sym
= sym_free_first
;
597 sym
= __sym_malloc();
598 sym_free_first
= sym
->next
;
601 sym
= tcc_malloc(sizeof(Sym
));
606 ST_INLN
void sym_free(Sym
*sym
)
609 sym
->next
= sym_free_first
;
610 sym_free_first
= sym
;
616 /* push, without hashing */
617 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
622 memset(s
, 0, sizeof *s
);
632 /* find a symbol and return its associated structure. 's' is the top
633 of the symbol stack */
634 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
646 /* structure lookup */
647 ST_INLN Sym
*struct_find(int v
)
650 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
652 return table_ident
[v
]->sym_struct
;
655 /* find an identifier */
656 ST_INLN Sym
*sym_find(int v
)
659 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
661 return table_ident
[v
]->sym_identifier
;
664 static int sym_scope(Sym
*s
)
666 if (IS_ENUM_VAL (s
->type
.t
))
667 return s
->type
.ref
->sym_scope
;
672 /* push a given symbol on the symbol stack */
673 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
682 s
= sym_push2(ps
, v
, type
->t
, c
);
683 s
->type
.ref
= type
->ref
;
685 /* don't record fields or anonymous symbols */
687 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
688 /* record symbol in token array */
689 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
691 ps
= &ts
->sym_struct
;
693 ps
= &ts
->sym_identifier
;
696 s
->sym_scope
= local_scope
;
697 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
698 tcc_error("redeclaration of '%s'",
699 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
704 /* push a global identifier */
705 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
708 s
= sym_push2(&global_stack
, v
, t
, c
);
709 s
->r
= VT_CONST
| VT_SYM
;
710 /* don't record anonymous symbol */
711 if (v
< SYM_FIRST_ANOM
) {
712 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
713 /* modify the top most local identifier, so that sym_identifier will
714 point to 's' when popped; happens when called from inline asm */
715 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
716 ps
= &(*ps
)->prev_tok
;
723 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
724 pop them yet from the list, but do remove them from the token array. */
725 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
735 /* remove symbol in token array */
737 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
738 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
740 ps
= &ts
->sym_struct
;
742 ps
= &ts
->sym_identifier
;
753 /* ------------------------------------------------------------------------- */
754 static void vcheck_cmp(void)
756 /* cannot let cpu flags if other instruction are generated. Also
757 avoid leaving VT_JMP anywhere except on the top of the stack
758 because it would complicate the code generator.
760 Don't do this when nocode_wanted. vtop might come from
761 !nocode_wanted regions (see 88_codeopt.c) and transforming
762 it to a register without actually generating code is wrong
763 as their value might still be used for real. All values
764 we push under nocode_wanted will eventually be popped
765 again, so that the VT_CMP/VT_JMP value will be in vtop
766 when code is unsuppressed again. */
768 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
772 static void vsetc(CType
*type
, int r
, CValue
*vc
)
774 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
775 tcc_error("memory full (vstack)");
785 ST_FUNC
void vswap(void)
795 /* pop stack value */
796 ST_FUNC
void vpop(void)
799 v
= vtop
->r
& VT_VALMASK
;
800 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
801 /* for x86, we need to pop the FP stack */
803 o(0xd8dd); /* fstp %st(0) */
807 /* need to put correct jump if && or || without test */
814 /* push constant of type "type" with useless value */
815 static void vpush(CType
*type
)
817 vset(type
, VT_CONST
, 0);
820 /* push arbitrary 64bit constant */
821 static void vpush64(int ty
, unsigned long long v
)
828 vsetc(&ctype
, VT_CONST
, &cval
);
831 /* push integer constant */
832 ST_FUNC
void vpushi(int v
)
837 /* push a pointer sized constant */
838 static void vpushs(addr_t v
)
840 vpush64(VT_SIZE_T
, v
);
843 /* push long long constant */
844 static inline void vpushll(long long v
)
846 vpush64(VT_LLONG
, v
);
849 ST_FUNC
void vset(CType
*type
, int r
, int v
)
853 vsetc(type
, r
, &cval
);
856 static void vseti(int r
, int v
)
864 ST_FUNC
void vpushv(SValue
*v
)
866 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
867 tcc_error("memory full (vstack)");
872 static void vdup(void)
877 /* rotate n first stack elements to the bottom
878 I1 ... In -> I2 ... In I1 [top is right]
880 ST_FUNC
void vrotb(int n
)
892 /* rotate the n elements before entry e towards the top
893 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
895 ST_FUNC
void vrote(SValue
*e
, int n
)
902 for(i
= 0;i
< n
- 1; i
++)
907 /* rotate n first stack elements to the top
908 I1 ... In -> In I1 ... I(n-1) [top is right]
910 ST_FUNC
void vrott(int n
)
915 /* ------------------------------------------------------------------------- */
916 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
918 /* called from generators to set the result from relational ops */
919 ST_FUNC
void vset_VT_CMP(int op
)
927 /* called once before asking generators to load VT_CMP to a register */
928 static void vset_VT_JMP(void)
930 int op
= vtop
->cmp_op
;
932 if (vtop
->jtrue
|| vtop
->jfalse
) {
933 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
934 int inv
= op
& (op
< 2); /* small optimization */
935 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
937 /* otherwise convert flags (rsp. 0/1) to register */
939 if (op
< 2) /* doesn't seem to happen */
944 /* Set CPU Flags, doesn't yet jump */
945 static void gvtst_set(int inv
, int t
)
949 if (vtop
->r
!= VT_CMP
) {
952 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
953 vset_VT_CMP(vtop
->c
.i
!= 0);
956 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
957 *p
= gjmp_append(*p
, t
);
960 /* Generate value test
962 * Generate a test for any value (jump, comparison and integers) */
963 static int gvtst(int inv
, int t
)
968 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
973 /* jump to the wanted target */
975 t
= gjmp_cond(op
^ inv
, t
);
978 /* resolve complementary jumps to here */
985 /* generate a zero or nozero test */
986 static void gen_test_zero(int op
)
988 if (vtop
->r
== VT_CMP
) {
992 vtop
->jfalse
= vtop
->jtrue
;
1002 /* ------------------------------------------------------------------------- */
1003 /* push a symbol value of TYPE */
1004 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1008 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1012 /* Return a static symbol pointing to a section */
1013 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1019 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1020 sym
->type
.t
|= VT_STATIC
;
1021 put_extern_sym(sym
, sec
, offset
, size
);
1025 /* push a reference to a section offset by adding a dummy symbol */
1026 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1028 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1031 /* define a new external reference to a symbol 'v' of type 'u' */
1032 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1038 /* push forward reference */
1039 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1040 s
->type
.ref
= type
->ref
;
1041 } else if (IS_ASM_SYM(s
)) {
1042 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1043 s
->type
.ref
= type
->ref
;
1049 /* create an external reference with no specific type similar to asm labels.
1050 This avoids type conflicts if the symbol is used from C too */
1051 ST_FUNC Sym
*external_helper_sym(int v
)
1053 CType ct
= { VT_ASM_FUNC
, NULL
};
1054 return external_global_sym(v
, &ct
);
1057 /* push a reference to an helper function (such as memmove) */
1058 ST_FUNC
void vpush_helper_func(int v
)
1060 vpushsym(&func_old_type
, external_helper_sym(v
));
1063 /* Merge symbol attributes. */
1064 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1066 if (sa1
->aligned
&& !sa
->aligned
)
1067 sa
->aligned
= sa1
->aligned
;
1068 sa
->packed
|= sa1
->packed
;
1069 sa
->weak
|= sa1
->weak
;
1070 if (sa1
->visibility
!= STV_DEFAULT
) {
1071 int vis
= sa
->visibility
;
1072 if (vis
== STV_DEFAULT
1073 || vis
> sa1
->visibility
)
1074 vis
= sa1
->visibility
;
1075 sa
->visibility
= vis
;
1077 sa
->dllexport
|= sa1
->dllexport
;
1078 sa
->nodecorate
|= sa1
->nodecorate
;
1079 sa
->dllimport
|= sa1
->dllimport
;
1082 /* Merge function attributes. */
1083 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1085 if (fa1
->func_call
&& !fa
->func_call
)
1086 fa
->func_call
= fa1
->func_call
;
1087 if (fa1
->func_type
&& !fa
->func_type
)
1088 fa
->func_type
= fa1
->func_type
;
1089 if (fa1
->func_args
&& !fa
->func_args
)
1090 fa
->func_args
= fa1
->func_args
;
1091 if (fa1
->func_noreturn
)
1092 fa
->func_noreturn
= 1;
1099 /* Merge attributes. */
1100 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1102 merge_symattr(&ad
->a
, &ad1
->a
);
1103 merge_funcattr(&ad
->f
, &ad1
->f
);
1106 ad
->section
= ad1
->section
;
1107 if (ad1
->alias_target
)
1108 ad
->alias_target
= ad1
->alias_target
;
1110 ad
->asm_label
= ad1
->asm_label
;
1112 ad
->attr_mode
= ad1
->attr_mode
;
1115 /* Merge some type attributes. */
1116 static void patch_type(Sym
*sym
, CType
*type
)
1118 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1119 if (!(sym
->type
.t
& VT_EXTERN
))
1120 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1121 sym
->type
.t
&= ~VT_EXTERN
;
1124 if (IS_ASM_SYM(sym
)) {
1125 /* stay static if both are static */
1126 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1127 sym
->type
.ref
= type
->ref
;
1130 if (!is_compatible_types(&sym
->type
, type
)) {
1131 tcc_error("incompatible types for redefinition of '%s'",
1132 get_tok_str(sym
->v
, NULL
));
1134 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1135 int static_proto
= sym
->type
.t
& VT_STATIC
;
1136 /* warn if static follows non-static function declaration */
1137 if ((type
->t
& VT_STATIC
) && !static_proto
1138 /* XXX this test for inline shouldn't be here. Until we
1139 implement gnu-inline mode again it silences a warning for
1140 mingw caused by our workarounds. */
1141 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1142 tcc_warning("static storage ignored for redefinition of '%s'",
1143 get_tok_str(sym
->v
, NULL
));
1145 /* set 'inline' if both agree or if one has static */
1146 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1147 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1148 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1149 static_proto
|= VT_INLINE
;
1152 if (0 == (type
->t
& VT_EXTERN
)) {
1153 struct FuncAttr f
= sym
->type
.ref
->f
;
1154 /* put complete type, use static from prototype */
1155 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1156 sym
->type
.ref
= type
->ref
;
1157 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1159 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1162 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1163 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1164 sym
->type
.ref
= type
->ref
;
1168 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1169 /* set array size if it was omitted in extern declaration */
1170 sym
->type
.ref
->c
= type
->ref
->c
;
1172 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1173 tcc_warning("storage mismatch for redefinition of '%s'",
1174 get_tok_str(sym
->v
, NULL
));
1178 /* Merge some storage attributes. */
1179 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1182 patch_type(sym
, type
);
1184 #ifdef TCC_TARGET_PE
1185 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1186 tcc_error("incompatible dll linkage for redefinition of '%s'",
1187 get_tok_str(sym
->v
, NULL
));
1189 merge_symattr(&sym
->a
, &ad
->a
);
1191 sym
->asm_label
= ad
->asm_label
;
1192 update_storage(sym
);
1195 /* copy sym to other stack */
1196 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1199 s
= sym_malloc(), *s
= *s0
;
1200 s
->prev
= *ps
, *ps
= s
;
1201 if (s
->v
< SYM_FIRST_ANOM
) {
1202 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1203 s
->prev_tok
= *ps
, *ps
= s
;
1208 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1209 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1211 int bt
= s
->type
.t
& VT_BTYPE
;
1212 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1213 Sym
**sp
= &s
->type
.ref
;
1214 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1215 Sym
*s2
= sym_copy(s
, ps
);
1216 sp
= &(*sp
= s2
)->next
;
1217 sym_copy_ref(s2
, ps
);
1222 /* define a new external reference to a symbol 'v' */
1223 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1227 /* look for global symbol */
1229 while (s
&& s
->sym_scope
)
1233 /* push forward reference */
1234 s
= global_identifier_push(v
, type
->t
, 0);
1237 s
->asm_label
= ad
->asm_label
;
1238 s
->type
.ref
= type
->ref
;
1239 /* copy type to the global stack */
1241 sym_copy_ref(s
, &global_stack
);
1243 patch_storage(s
, ad
, type
);
1245 /* push variables on local_stack if any */
1246 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1247 s
= sym_copy(s
, &local_stack
);
1251 /* save registers up to (vtop - n) stack entry */
1252 ST_FUNC
void save_regs(int n
)
1255 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1259 /* save r to the memory stack, and mark it as being free */
1260 ST_FUNC
void save_reg(int r
)
1262 save_reg_upstack(r
, 0);
1265 /* save r to the memory stack, and mark it as being free,
1266 if seen up to (vtop - n) stack entry */
1267 ST_FUNC
void save_reg_upstack(int r
, int n
)
1269 int l
, size
, align
, bt
;
1272 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1277 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1278 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1279 /* must save value on stack if not already done */
1281 bt
= p
->type
.t
& VT_BTYPE
;
1284 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1287 size
= type_size(&sv
.type
, &align
);
1288 l
= get_temp_local_var(size
,align
);
1289 sv
.r
= VT_LOCAL
| VT_LVAL
;
1291 store(p
->r
& VT_VALMASK
, &sv
);
1292 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1293 /* x86 specific: need to pop fp register ST0 if saved */
1294 if (r
== TREG_ST0
) {
1295 o(0xd8dd); /* fstp %st(0) */
1298 /* special long long case */
1299 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1304 /* mark that stack entry as being saved on the stack */
1305 if (p
->r
& VT_LVAL
) {
1306 /* also clear the bounded flag because the
1307 relocation address of the function was stored in
1309 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1311 p
->r
= VT_LVAL
| VT_LOCAL
;
1320 #ifdef TCC_TARGET_ARM
1321 /* find a register of class 'rc2' with at most one reference on stack.
1322 * If none, call get_reg(rc) */
1323 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1328 for(r
=0;r
<NB_REGS
;r
++) {
1329 if (reg_classes
[r
] & rc2
) {
1332 for(p
= vstack
; p
<= vtop
; p
++) {
1333 if ((p
->r
& VT_VALMASK
) == r
||
1345 /* find a free register of class 'rc'. If none, save one register */
1346 ST_FUNC
int get_reg(int rc
)
1351 /* find a free register */
1352 for(r
=0;r
<NB_REGS
;r
++) {
1353 if (reg_classes
[r
] & rc
) {
1356 for(p
=vstack
;p
<=vtop
;p
++) {
1357 if ((p
->r
& VT_VALMASK
) == r
||
1366 /* no register left : free the first one on the stack (VERY
1367 IMPORTANT to start from the bottom to ensure that we don't
1368 spill registers used in gen_opi()) */
1369 for(p
=vstack
;p
<=vtop
;p
++) {
1370 /* look at second register (if long long) */
1372 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1374 r
= p
->r
& VT_VALMASK
;
1375 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1381 /* Should never comes here */
1385 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1386 static int get_temp_local_var(int size
,int align
){
1388 struct temp_local_variable
*temp_var
;
1395 for(i
=0;i
<nb_temp_local_vars
;i
++){
1396 temp_var
=&arr_temp_local_vars
[i
];
1397 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1400 /*check if temp_var is free*/
1402 for(p
=vstack
;p
<=vtop
;p
++) {
1404 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1405 if(p
->c
.i
==temp_var
->location
){
1412 found_var
=temp_var
->location
;
1418 loc
= (loc
- size
) & -align
;
1419 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1420 temp_var
=&arr_temp_local_vars
[i
];
1421 temp_var
->location
=loc
;
1422 temp_var
->size
=size
;
1423 temp_var
->align
=align
;
1424 nb_temp_local_vars
++;
1431 static void clear_temp_local_var_list(){
1432 nb_temp_local_vars
=0;
1435 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1437 static void move_reg(int r
, int s
, int t
)
1451 /* get address of vtop (vtop MUST BE an lvalue) */
1452 ST_FUNC
void gaddrof(void)
1454 vtop
->r
&= ~VT_LVAL
;
1455 /* tricky: if saved lvalue, then we can go back to lvalue */
1456 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1457 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1460 #ifdef CONFIG_TCC_BCHECK
1461 /* generate a bounded pointer addition */
1462 static void gen_bounded_ptr_add(void)
1464 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1469 vpush_helper_func(TOK___bound_ptr_add
);
1474 /* returned pointer is in REG_IRET */
1475 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1478 /* relocation offset of the bounding function call point */
1479 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1482 /* patch pointer addition in vtop so that pointer dereferencing is
1484 static void gen_bounded_ptr_deref(void)
1494 size
= type_size(&vtop
->type
, &align
);
1496 case 1: func
= TOK___bound_ptr_indir1
; break;
1497 case 2: func
= TOK___bound_ptr_indir2
; break;
1498 case 4: func
= TOK___bound_ptr_indir4
; break;
1499 case 8: func
= TOK___bound_ptr_indir8
; break;
1500 case 12: func
= TOK___bound_ptr_indir12
; break;
1501 case 16: func
= TOK___bound_ptr_indir16
; break;
1503 /* may happen with struct member access */
1506 sym
= external_helper_sym(func
);
1508 put_extern_sym(sym
, NULL
, 0, 0);
1509 /* patch relocation */
1510 /* XXX: find a better solution ? */
1511 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1512 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1515 /* generate lvalue bound code */
1516 static void gbound(void)
1520 vtop
->r
&= ~VT_MUSTBOUND
;
1521 /* if lvalue, then use checking code before dereferencing */
1522 if (vtop
->r
& VT_LVAL
) {
1523 /* if not VT_BOUNDED value, then make one */
1524 if (!(vtop
->r
& VT_BOUNDED
)) {
1525 /* must save type because we must set it to int to get pointer */
1527 vtop
->type
.t
= VT_PTR
;
1530 gen_bounded_ptr_add();
1534 /* then check for dereferencing */
1535 gen_bounded_ptr_deref();
1539 /* we need to call __bound_ptr_add before we start to load function
1540 args into registers */
1541 ST_FUNC
void gbound_args(int nb_args
)
1546 for (i
= 1; i
<= nb_args
; ++i
)
1547 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1553 sv
= vtop
- nb_args
;
1554 if (sv
->r
& VT_SYM
) {
1558 #ifndef TCC_TARGET_PE
1559 || v
== TOK_sigsetjmp
1560 || v
== TOK___sigsetjmp
1563 vpush_helper_func(TOK___bound_setjmp
);
1566 func_bound_add_epilog
= 1;
1568 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1569 if (v
== TOK_alloca
)
1570 func_bound_add_epilog
= 1;
1573 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1574 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1579 /* Add bounds for local symbols from S to E (via ->prev) */
1580 static void add_local_bounds(Sym
*s
, Sym
*e
)
1582 for (; s
!= e
; s
= s
->prev
) {
1583 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1585 /* Add arrays/structs/unions because we always take address */
1586 if ((s
->type
.t
& VT_ARRAY
)
1587 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1588 || s
->a
.addrtaken
) {
1589 /* add local bound info */
1590 int align
, size
= type_size(&s
->type
, &align
);
1591 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1592 2 * sizeof(addr_t
));
1593 bounds_ptr
[0] = s
->c
;
1594 bounds_ptr
[1] = size
;
1600 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1601 static void pop_local_syms(Sym
*b
, int keep
)
1603 #ifdef CONFIG_TCC_BCHECK
1604 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1605 add_local_bounds(local_stack
, b
);
1608 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1609 sym_pop(&local_stack
, b
, keep
);
1612 static void incr_bf_adr(int o
)
1614 vtop
->type
= char_pointer_type
;
1618 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1622 /* single-byte load mode for packed or otherwise unaligned bitfields */
1623 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1626 save_reg_upstack(vtop
->r
, 1);
1627 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1628 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1637 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1639 vpushi((1 << n
) - 1), gen_op('&');
1642 vpushi(bits
), gen_op(TOK_SHL
);
1645 bits
+= n
, bit_size
-= n
, o
= 1;
1648 if (!(type
->t
& VT_UNSIGNED
)) {
1649 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1650 vpushi(n
), gen_op(TOK_SHL
);
1651 vpushi(n
), gen_op(TOK_SAR
);
1655 /* single-byte store mode for packed or otherwise unaligned bitfields */
1656 static void store_packed_bf(int bit_pos
, int bit_size
)
1658 int bits
, n
, o
, m
, c
;
1659 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1661 save_reg_upstack(vtop
->r
, 1);
1662 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1664 incr_bf_adr(o
); // X B
1666 c
? vdup() : gv_dup(); // B V X
1669 vpushi(bits
), gen_op(TOK_SHR
);
1671 vpushi(bit_pos
), gen_op(TOK_SHL
);
1676 m
= ((1 << n
) - 1) << bit_pos
;
1677 vpushi(m
), gen_op('&'); // X B V1
1678 vpushv(vtop
-1); // X B V1 B
1679 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1680 gen_op('&'); // X B V1 B1
1681 gen_op('|'); // X B V2
1683 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1684 vstore(), vpop(); // X B
1685 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1690 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1693 if (0 == sv
->type
.ref
)
1695 t
= sv
->type
.ref
->auxtype
;
1696 if (t
!= -1 && t
!= VT_STRUCT
) {
1697 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1703 /* store vtop a register belonging to class 'rc'. lvalues are
1704 converted to values. Cannot be used if cannot be converted to
1705 register value (such as structures). */
1706 ST_FUNC
int gv(int rc
)
1708 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1709 int bit_pos
, bit_size
, size
, align
;
1711 /* NOTE: get_reg can modify vstack[] */
1712 if (vtop
->type
.t
& VT_BITFIELD
) {
1715 bit_pos
= BIT_POS(vtop
->type
.t
);
1716 bit_size
= BIT_SIZE(vtop
->type
.t
);
1717 /* remove bit field info to avoid loops */
1718 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1721 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1722 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1723 type
.t
|= VT_UNSIGNED
;
1725 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1727 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1732 if (r
== VT_STRUCT
) {
1733 load_packed_bf(&type
, bit_pos
, bit_size
);
1735 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1736 /* cast to int to propagate signedness in following ops */
1738 /* generate shifts */
1739 vpushi(bits
- (bit_pos
+ bit_size
));
1741 vpushi(bits
- bit_size
);
1742 /* NOTE: transformed to SHR if unsigned */
1747 if (is_float(vtop
->type
.t
) &&
1748 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1749 /* CPUs usually cannot use float constants, so we store them
1750 generically in data segment */
1751 init_params p
= { rodata_section
};
1752 unsigned long offset
;
1753 size
= type_size(&vtop
->type
, &align
);
1755 size
= 0, align
= 1;
1756 offset
= section_add(p
.sec
, size
, align
);
1757 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1759 init_putv(&p
, &vtop
->type
, offset
);
1762 #ifdef CONFIG_TCC_BCHECK
1763 if (vtop
->r
& VT_MUSTBOUND
)
1767 bt
= vtop
->type
.t
& VT_BTYPE
;
1769 #ifdef TCC_TARGET_RISCV64
1771 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1774 rc2
= RC2_TYPE(bt
, rc
);
1776 /* need to reload if:
1778 - lvalue (need to dereference pointer)
1779 - already a register, but not in the right class */
1780 r
= vtop
->r
& VT_VALMASK
;
1781 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1782 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1784 if (!r_ok
|| !r2_ok
) {
1788 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1789 int original_type
= vtop
->type
.t
;
1791 /* two register type load :
1792 expand to two words temporarily */
1793 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1795 unsigned long long ll
= vtop
->c
.i
;
1796 vtop
->c
.i
= ll
; /* first word */
1798 vtop
->r
= r
; /* save register value */
1799 vpushi(ll
>> 32); /* second word */
1800 } else if (vtop
->r
& VT_LVAL
) {
1801 /* We do not want to modifier the long long pointer here.
1802 So we save any other instances down the stack */
1803 save_reg_upstack(vtop
->r
, 1);
1804 /* load from memory */
1805 vtop
->type
.t
= load_type
;
1808 vtop
[-1].r
= r
; /* save register value */
1809 /* increment pointer to get second word */
1810 vtop
->type
.t
= VT_PTRDIFF_T
;
1815 vtop
->type
.t
= load_type
;
1817 /* move registers */
1820 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1823 vtop
[-1].r
= r
; /* save register value */
1824 vtop
->r
= vtop
[-1].r2
;
1826 /* Allocate second register. Here we rely on the fact that
1827 get_reg() tries first to free r2 of an SValue. */
1831 /* write second register */
1834 vtop
->type
.t
= original_type
;
1836 if (vtop
->r
== VT_CMP
)
1838 /* one register type load */
1843 #ifdef TCC_TARGET_C67
1844 /* uses register pairs for doubles */
1845 if (bt
== VT_DOUBLE
)
1852 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1853 ST_FUNC
void gv2(int rc1
, int rc2
)
1855 /* generate more generic register first. But VT_JMP or VT_CMP
1856 values must be generated first in all cases to avoid possible
1858 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1863 /* test if reload is needed for first register */
1864 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1874 /* test if reload is needed for first register */
1875 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1882 /* expand 64bit on stack in two ints */
1883 ST_FUNC
void lexpand(void)
1886 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1887 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1888 if (v
== VT_CONST
) {
1891 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1897 vtop
[0].r
= vtop
[-1].r2
;
1898 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1900 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1905 /* build a long long from two ints */
1906 static void lbuild(int t
)
1908 gv2(RC_INT
, RC_INT
);
1909 vtop
[-1].r2
= vtop
[0].r
;
1910 vtop
[-1].type
.t
= t
;
1915 /* convert stack entry to register and duplicate its value in another
1917 static void gv_dup(void)
1923 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1924 if (t
& VT_BITFIELD
) {
1934 /* stack: H L L1 H1 */
1944 /* duplicate value */
1954 /* generate CPU independent (unsigned) long long operations */
1955 static void gen_opl(int op
)
1957 int t
, a
, b
, op1
, c
, i
;
1959 unsigned short reg_iret
= REG_IRET
;
1960 unsigned short reg_lret
= REG_IRE2
;
1966 func
= TOK___divdi3
;
1969 func
= TOK___udivdi3
;
1972 func
= TOK___moddi3
;
1975 func
= TOK___umoddi3
;
1982 /* call generic long long function */
1983 vpush_helper_func(func
);
1988 vtop
->r2
= reg_lret
;
1996 //pv("gen_opl A",0,2);
2002 /* stack: L1 H1 L2 H2 */
2007 vtop
[-2] = vtop
[-3];
2010 /* stack: H1 H2 L1 L2 */
2011 //pv("gen_opl B",0,4);
2017 /* stack: H1 H2 L1 L2 ML MH */
2020 /* stack: ML MH H1 H2 L1 L2 */
2024 /* stack: ML MH H1 L2 H2 L1 */
2029 /* stack: ML MH M1 M2 */
2032 } else if (op
== '+' || op
== '-') {
2033 /* XXX: add non carry method too (for MIPS or alpha) */
2039 /* stack: H1 H2 (L1 op L2) */
2042 gen_op(op1
+ 1); /* TOK_xxxC2 */
2045 /* stack: H1 H2 (L1 op L2) */
2048 /* stack: (L1 op L2) H1 H2 */
2050 /* stack: (L1 op L2) (H1 op H2) */
2058 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2059 t
= vtop
[-1].type
.t
;
2063 /* stack: L H shift */
2065 /* constant: simpler */
2066 /* NOTE: all comments are for SHL. the other cases are
2067 done by swapping words */
2078 if (op
!= TOK_SAR
) {
2111 /* XXX: should provide a faster fallback on x86 ? */
2114 func
= TOK___ashrdi3
;
2117 func
= TOK___lshrdi3
;
2120 func
= TOK___ashldi3
;
2126 /* compare operations */
2132 /* stack: L1 H1 L2 H2 */
2134 vtop
[-1] = vtop
[-2];
2136 /* stack: L1 L2 H1 H2 */
2140 /* when values are equal, we need to compare low words. since
2141 the jump is inverted, we invert the test too. */
2144 else if (op1
== TOK_GT
)
2146 else if (op1
== TOK_ULT
)
2148 else if (op1
== TOK_UGT
)
2158 /* generate non equal test */
2160 vset_VT_CMP(TOK_NE
);
2164 /* compare low. Always unsigned */
2168 else if (op1
== TOK_LE
)
2170 else if (op1
== TOK_GT
)
2172 else if (op1
== TOK_GE
)
2175 #if 0//def TCC_TARGET_I386
2176 if (op
== TOK_NE
) { gsym(b
); break; }
2177 if (op
== TOK_EQ
) { gsym(a
); break; }
2186 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2188 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2189 return (a
^ b
) >> 63 ? -x
: x
;
2192 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2194 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2197 /* handle integer constant optimizations and various machine
2199 static void gen_opic(int op
)
2201 SValue
*v1
= vtop
- 1;
2203 int t1
= v1
->type
.t
& VT_BTYPE
;
2204 int t2
= v2
->type
.t
& VT_BTYPE
;
2205 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2206 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2207 int nonconst
= (v1
->r
| v2
->r
) & VT_NONCONST
;
2208 uint64_t l1
= c1
? v1
->c
.i
: 0;
2209 uint64_t l2
= c2
? v2
->c
.i
: 0;
2210 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2212 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2213 l1
= ((uint32_t)l1
|
2214 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2215 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2216 l2
= ((uint32_t)l2
|
2217 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2221 case '+': l1
+= l2
; break;
2222 case '-': l1
-= l2
; break;
2223 case '&': l1
&= l2
; break;
2224 case '^': l1
^= l2
; break;
2225 case '|': l1
|= l2
; break;
2226 case '*': l1
*= l2
; break;
2233 /* if division by zero, generate explicit division */
2235 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2236 tcc_error("division by zero in constant");
2240 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2241 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2242 case TOK_UDIV
: l1
= l1
/ l2
; break;
2243 case TOK_UMOD
: l1
= l1
% l2
; break;
2246 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2247 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2249 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2252 case TOK_ULT
: l1
= l1
< l2
; break;
2253 case TOK_UGE
: l1
= l1
>= l2
; break;
2254 case TOK_EQ
: l1
= l1
== l2
; break;
2255 case TOK_NE
: l1
= l1
!= l2
; break;
2256 case TOK_ULE
: l1
= l1
<= l2
; break;
2257 case TOK_UGT
: l1
= l1
> l2
; break;
2258 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2259 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2260 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2261 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2263 case TOK_LAND
: l1
= l1
&& l2
; break;
2264 case TOK_LOR
: l1
= l1
|| l2
; break;
2268 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2269 l1
= ((uint32_t)l1
|
2270 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2274 nonconst
= VT_NONCONST
;
2275 /* if commutative ops, put c2 as constant */
2276 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2277 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2279 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2280 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2282 if (!const_wanted
&&
2284 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2285 (l1
== -1 && op
== TOK_SAR
))) {
2286 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2288 } else if (!const_wanted
&&
2289 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2291 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2292 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2293 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2298 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2301 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2302 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2305 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2306 /* filter out NOP operations like x*1, x-0, x&-1... */
2308 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2309 /* try to use shifts instead of muls or divs */
2310 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2319 else if (op
== TOK_PDIV
)
2325 } else if (c2
&& (op
== '+' || op
== '-') &&
2326 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2327 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2328 /* symbol + constant case */
2332 /* The backends can't always deal with addends to symbols
2333 larger than +-1<<31. Don't construct such. */
2340 /* call low level op generator */
2341 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2342 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2348 if (vtop
->r
== VT_CONST
)
2349 vtop
->r
|= nonconst
;
2352 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2353 # define gen_negf gen_opf
2354 #elif defined TCC_TARGET_ARM
2355 void gen_negf(int op
)
2357 /* arm will detect 0-x and replace by vneg */
2358 vpushi(0), vswap(), gen_op('-');
2361 /* XXX: implement in gen_opf() for other backends too */
2362 void gen_negf(int op
)
2364 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2365 subtract(-0, x), but with them it's really a sign flip
2366 operation. We implement this with bit manipulation and have
2367 to do some type reinterpretation for this, which TCC can do
2370 int align
, size
, bt
;
2372 size
= type_size(&vtop
->type
, &align
);
2373 bt
= vtop
->type
.t
& VT_BTYPE
;
2374 save_reg(gv(RC_TYPE(bt
)));
2376 incr_bf_adr(size
- 1);
2378 vpushi(0x80); /* flip sign */
2385 /* generate a floating point operation with constant propagation */
2386 static void gen_opif(int op
)
2390 #if defined _MSC_VER && defined __x86_64__
2391 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2401 /* currently, we cannot do computations with forward symbols */
2402 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2403 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2405 if (v1
->type
.t
== VT_FLOAT
) {
2408 } else if (v1
->type
.t
== VT_DOUBLE
) {
2415 /* NOTE: we only do constant propagation if finite number (not
2416 NaN or infinity) (ANSI spec) */
2417 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
2420 case '+': f1
+= f2
; break;
2421 case '-': f1
-= f2
; break;
2422 case '*': f1
*= f2
; break;
2425 union { float f
; unsigned u
; } x1
, x2
, y
;
2426 /* If not in initializer we need to potentially generate
2427 FP exceptions at runtime, otherwise we want to fold. */
2430 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2431 when used to compile the f1 /= f2 below, would be -nan */
2432 x1
.f
= f1
, x2
.f
= f2
;
2434 y
.u
= 0x7fc00000; /* nan */
2436 y
.u
= 0x7f800000; /* infinity */
2437 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2446 /* XXX: also handles tests ? */
2452 /* XXX: overflow test ? */
2453 if (v1
->type
.t
== VT_FLOAT
) {
2455 } else if (v1
->type
.t
== VT_DOUBLE
) {
2462 if (op
== TOK_NEG
) {
2470 /* print a type. If 'varstr' is not NULL, then the variable is also
2471 printed in the type */
2473 /* XXX: add array and function pointers */
2474 static void type_to_str(char *buf
, int buf_size
,
2475 CType
*type
, const char *varstr
)
2487 pstrcat(buf
, buf_size
, "extern ");
2489 pstrcat(buf
, buf_size
, "static ");
2491 pstrcat(buf
, buf_size
, "typedef ");
2493 pstrcat(buf
, buf_size
, "inline ");
2495 if (t
& VT_VOLATILE
)
2496 pstrcat(buf
, buf_size
, "volatile ");
2497 if (t
& VT_CONSTANT
)
2498 pstrcat(buf
, buf_size
, "const ");
2500 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2501 || ((t
& VT_UNSIGNED
)
2502 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2505 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2507 buf_size
-= strlen(buf
);
2543 tstr
= "long double";
2545 pstrcat(buf
, buf_size
, tstr
);
2552 pstrcat(buf
, buf_size
, tstr
);
2553 v
= type
->ref
->v
& ~SYM_STRUCT
;
2554 if (v
>= SYM_FIRST_ANOM
)
2555 pstrcat(buf
, buf_size
, "<anonymous>");
2557 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2562 if (varstr
&& '*' == *varstr
) {
2563 pstrcat(buf1
, sizeof(buf1
), "(");
2564 pstrcat(buf1
, sizeof(buf1
), varstr
);
2565 pstrcat(buf1
, sizeof(buf1
), ")");
2567 pstrcat(buf1
, buf_size
, "(");
2569 while (sa
!= NULL
) {
2571 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2572 pstrcat(buf1
, sizeof(buf1
), buf2
);
2575 pstrcat(buf1
, sizeof(buf1
), ", ");
2577 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2578 pstrcat(buf1
, sizeof(buf1
), ", ...");
2579 pstrcat(buf1
, sizeof(buf1
), ")");
2580 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2584 if (t
& (VT_ARRAY
|VT_VLA
)) {
2585 if (varstr
&& '*' == *varstr
)
2586 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2588 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2589 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2592 pstrcpy(buf1
, sizeof(buf1
), "*");
2593 if (t
& VT_CONSTANT
)
2594 pstrcat(buf1
, buf_size
, "const ");
2595 if (t
& VT_VOLATILE
)
2596 pstrcat(buf1
, buf_size
, "volatile ");
2598 pstrcat(buf1
, sizeof(buf1
), varstr
);
2599 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2603 pstrcat(buf
, buf_size
, " ");
2604 pstrcat(buf
, buf_size
, varstr
);
2609 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2611 char buf1
[256], buf2
[256];
2612 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2613 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2614 tcc_error(fmt
, buf1
, buf2
);
2617 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2619 char buf1
[256], buf2
[256];
2620 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2621 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2622 tcc_warning(fmt
, buf1
, buf2
);
2625 static int pointed_size(CType
*type
)
2628 return type_size(pointed_type(type
), &align
);
2631 static inline int is_null_pointer(SValue
*p
)
2633 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2635 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2636 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2637 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2638 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2639 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2640 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2644 /* compare function types. OLD functions match any new functions */
2645 static int is_compatible_func(CType
*type1
, CType
*type2
)
2651 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2653 if (s1
->f
.func_type
!= s2
->f
.func_type
2654 && s1
->f
.func_type
!= FUNC_OLD
2655 && s2
->f
.func_type
!= FUNC_OLD
)
2658 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2660 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2671 /* return true if type1 and type2 are the same. If unqualified is
2672 true, qualifiers on the types are ignored.
2674 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2678 t1
= type1
->t
& VT_TYPE
;
2679 t2
= type2
->t
& VT_TYPE
;
2681 /* strip qualifiers before comparing */
2682 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2683 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2686 /* Default Vs explicit signedness only matters for char */
2687 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2691 /* XXX: bitfields ? */
2696 && !(type1
->ref
->c
< 0
2697 || type2
->ref
->c
< 0
2698 || type1
->ref
->c
== type2
->ref
->c
))
2701 /* test more complicated cases */
2702 bt1
= t1
& VT_BTYPE
;
2703 if (bt1
== VT_PTR
) {
2704 type1
= pointed_type(type1
);
2705 type2
= pointed_type(type2
);
2706 return is_compatible_types(type1
, type2
);
2707 } else if (bt1
== VT_STRUCT
) {
2708 return (type1
->ref
== type2
->ref
);
2709 } else if (bt1
== VT_FUNC
) {
2710 return is_compatible_func(type1
, type2
);
2711 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2712 /* If both are enums then they must be the same, if only one is then
2713 t1 and t2 must be equal, which was checked above already. */
2714 return type1
->ref
== type2
->ref
;
2720 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2721 type is stored in DEST if non-null (except for pointer plus/minus) . */
2722 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2724 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2725 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2731 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2732 ret
= op
== '?' ? 1 : 0;
2733 /* NOTE: as an extension, we accept void on only one side */
2735 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2736 if (op
== '+') ; /* Handled in caller */
2737 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2738 /* If one is a null ptr constant the result type is the other. */
2739 else if (is_null_pointer (op2
)) type
= *type1
;
2740 else if (is_null_pointer (op1
)) type
= *type2
;
2741 else if (bt1
!= bt2
) {
2742 /* accept comparison or cond-expr between pointer and integer
2744 if ((op
== '?' || TOK_ISCOND(op
))
2745 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2746 tcc_warning("pointer/integer mismatch in %s",
2747 op
== '?' ? "conditional expression" : "comparison");
2748 else if (op
!= '-' || !is_integer_btype(bt2
))
2750 type
= *(bt1
== VT_PTR
? type1
: type2
);
2752 CType
*pt1
= pointed_type(type1
);
2753 CType
*pt2
= pointed_type(type2
);
2754 int pbt1
= pt1
->t
& VT_BTYPE
;
2755 int pbt2
= pt2
->t
& VT_BTYPE
;
2756 int newquals
, copied
= 0;
2757 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2758 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2759 if (op
!= '?' && !TOK_ISCOND(op
))
2762 type_incompatibility_warning(type1
, type2
,
2764 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2765 : "pointer type mismatch in comparison('%s' and '%s')");
2768 /* pointers to void get preferred, otherwise the
2769 pointed to types minus qualifs should be compatible */
2770 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2771 /* combine qualifs */
2772 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2773 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2776 /* copy the pointer target symbol */
2777 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2780 pointed_type(&type
)->t
|= newquals
;
2782 /* pointers to incomplete arrays get converted to
2783 pointers to completed ones if possible */
2784 if (pt1
->t
& VT_ARRAY
2785 && pt2
->t
& VT_ARRAY
2786 && pointed_type(&type
)->ref
->c
< 0
2787 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2790 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2792 pointed_type(&type
)->ref
=
2793 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2794 0, pointed_type(&type
)->ref
->c
);
2795 pointed_type(&type
)->ref
->c
=
2796 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2802 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2803 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2806 } else if (is_float(bt1
) || is_float(bt2
)) {
2807 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2808 type
.t
= VT_LDOUBLE
;
2809 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2814 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2815 /* cast to biggest op */
2816 type
.t
= VT_LLONG
| VT_LONG
;
2817 if (bt1
== VT_LLONG
)
2819 if (bt2
== VT_LLONG
)
2821 /* convert to unsigned if it does not fit in a long long */
2822 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2823 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2824 type
.t
|= VT_UNSIGNED
;
2826 /* integer operations */
2827 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2828 /* convert to unsigned if it does not fit in an integer */
2829 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2830 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2831 type
.t
|= VT_UNSIGNED
;
2838 /* generic gen_op: handles types problems */
2839 ST_FUNC
void gen_op(int op
)
2841 int t1
, t2
, bt1
, bt2
, t
;
2842 CType type1
, combtype
;
2845 t1
= vtop
[-1].type
.t
;
2846 t2
= vtop
[0].type
.t
;
2847 bt1
= t1
& VT_BTYPE
;
2848 bt2
= t2
& VT_BTYPE
;
2850 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2851 if (bt2
== VT_FUNC
) {
2852 mk_pointer(&vtop
->type
);
2855 if (bt1
== VT_FUNC
) {
2857 mk_pointer(&vtop
->type
);
2862 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2863 tcc_error_noabort("invalid operand types for binary operation");
2865 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2866 /* at least one operand is a pointer */
2867 /* relational op: must be both pointers */
2871 /* if both pointers, then it must be the '-' op */
2872 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2874 tcc_error("cannot use pointers here");
2875 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2878 vtop
->type
.t
= VT_PTRDIFF_T
;
2882 /* exactly one pointer : must be '+' or '-'. */
2883 if (op
!= '-' && op
!= '+')
2884 tcc_error("cannot use pointers here");
2885 /* Put pointer as first operand */
2886 if (bt2
== VT_PTR
) {
2888 t
= t1
, t1
= t2
, t2
= t
;
2891 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2892 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2895 type1
= vtop
[-1].type
;
2896 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2898 #ifdef CONFIG_TCC_BCHECK
2899 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2900 /* if bounded pointers, we generate a special code to
2907 gen_bounded_ptr_add();
2913 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
2914 /* put again type if gen_opic() swaped operands */
2918 /* floats can only be used for a few operations */
2919 if (is_float(combtype
.t
)
2920 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
2922 tcc_error("invalid operands for binary operation");
2923 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2924 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2925 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2927 t
|= (VT_LONG
& t1
);
2931 t
= t2
= combtype
.t
;
2932 /* XXX: currently, some unsigned operations are explicit, so
2933 we modify them here */
2934 if (t
& VT_UNSIGNED
) {
2941 else if (op
== TOK_LT
)
2943 else if (op
== TOK_GT
)
2945 else if (op
== TOK_LE
)
2947 else if (op
== TOK_GE
)
2953 /* special case for shifts and long long: we keep the shift as
2955 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2962 if (TOK_ISCOND(op
)) {
2963 /* relational op: the result is an int */
2964 vtop
->type
.t
= VT_INT
;
2969 // Make sure that we have converted to an rvalue:
2970 if (vtop
->r
& VT_LVAL
)
2971 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2974 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2975 #define gen_cvt_itof1 gen_cvt_itof
2977 /* generic itof for unsigned long long case */
2978 static void gen_cvt_itof1(int t
)
2980 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2981 (VT_LLONG
| VT_UNSIGNED
)) {
2984 vpush_helper_func(TOK___floatundisf
);
2985 #if LDOUBLE_SIZE != 8
2986 else if (t
== VT_LDOUBLE
)
2987 vpush_helper_func(TOK___floatundixf
);
2990 vpush_helper_func(TOK___floatundidf
);
3001 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3002 #define gen_cvt_ftoi1 gen_cvt_ftoi
3004 /* generic ftoi for unsigned long long case */
3005 static void gen_cvt_ftoi1(int t
)
3008 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3009 /* not handled natively */
3010 st
= vtop
->type
.t
& VT_BTYPE
;
3012 vpush_helper_func(TOK___fixunssfdi
);
3013 #if LDOUBLE_SIZE != 8
3014 else if (st
== VT_LDOUBLE
)
3015 vpush_helper_func(TOK___fixunsxfdi
);
3018 vpush_helper_func(TOK___fixunsdfdi
);
3029 /* special delayed cast for char/short */
3030 static void force_charshort_cast(void)
3032 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3033 int dbt
= vtop
->type
.t
;
3034 vtop
->r
&= ~VT_MUSTCAST
;
3036 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3040 static void gen_cast_s(int t
)
3048 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3049 static void gen_cast(CType
*type
)
3051 int sbt
, dbt
, sf
, df
, c
;
3052 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3054 /* special delayed cast for char/short */
3055 if (vtop
->r
& VT_MUSTCAST
)
3056 force_charshort_cast();
3058 /* bitfields first get cast to ints */
3059 if (vtop
->type
.t
& VT_BITFIELD
)
3062 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3063 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3071 dbt_bt
= dbt
& VT_BTYPE
;
3072 sbt_bt
= sbt
& VT_BTYPE
;
3073 if (dbt_bt
== VT_VOID
)
3075 if (sbt_bt
== VT_VOID
) {
3077 cast_error(&vtop
->type
, type
);
3080 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3081 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3082 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3085 /* constant case: we can do it now */
3086 /* XXX: in ISOC, cannot do it if error in convert */
3087 if (sbt
== VT_FLOAT
)
3088 vtop
->c
.ld
= vtop
->c
.f
;
3089 else if (sbt
== VT_DOUBLE
)
3090 vtop
->c
.ld
= vtop
->c
.d
;
3093 if (sbt_bt
== VT_LLONG
) {
3094 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3095 vtop
->c
.ld
= vtop
->c
.i
;
3097 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3099 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3100 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3102 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3105 if (dbt
== VT_FLOAT
)
3106 vtop
->c
.f
= (float)vtop
->c
.ld
;
3107 else if (dbt
== VT_DOUBLE
)
3108 vtop
->c
.d
= (double)vtop
->c
.ld
;
3109 } else if (sf
&& dbt
== VT_BOOL
) {
3110 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3113 vtop
->c
.i
= vtop
->c
.ld
;
3114 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3116 else if (sbt
& VT_UNSIGNED
)
3117 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3119 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3121 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3123 else if (dbt
== VT_BOOL
)
3124 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3126 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3127 dbt_bt
== VT_SHORT
? 0xffff :
3130 if (!(dbt
& VT_UNSIGNED
))
3131 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3136 } else if (dbt
== VT_BOOL
3137 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3138 == (VT_CONST
| VT_SYM
)) {
3139 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3145 /* cannot generate code for global or static initializers */
3146 if (nocode_wanted
& DATA_ONLY_WANTED
)
3149 /* non constant case: generate code */
3150 if (dbt
== VT_BOOL
) {
3151 gen_test_zero(TOK_NE
);
3157 /* convert from fp to fp */
3160 /* convert int to fp */
3163 /* convert fp to int */
3165 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3168 goto again
; /* may need char/short cast */
3173 ds
= btype_size(dbt_bt
);
3174 ss
= btype_size(sbt_bt
);
3175 if (ds
== 0 || ss
== 0)
3178 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3179 tcc_error("cast to incomplete type");
3181 /* same size and no sign conversion needed */
3182 if (ds
== ss
&& ds
>= 4)
3184 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3185 tcc_warning("cast between pointer and integer of different size");
3186 if (sbt_bt
== VT_PTR
) {
3187 /* put integer type to allow logical operations below */
3188 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3192 /* processor allows { int a = 0, b = *(char*)&a; }
3193 That means that if we cast to less width, we can just
3194 change the type and read it still later. */
3195 #define ALLOW_SUBTYPE_ACCESS 1
3197 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3198 /* value still in memory */
3202 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3204 goto done
; /* no 64bit envolved */
3212 /* generate high word */
3213 if (sbt
& VT_UNSIGNED
) {
3222 } else if (ss
== 8) {
3223 /* from long long: just take low order word */
3231 /* need to convert from 32bit to 64bit */
3232 if (sbt
& VT_UNSIGNED
) {
3233 #if defined(TCC_TARGET_RISCV64)
3234 /* RISC-V keeps 32bit vals in registers sign-extended.
3235 So here we need a zero-extension. */
3244 ss
= ds
, ds
= 4, dbt
= sbt
;
3245 } else if (ss
== 8) {
3246 /* RISC-V keeps 32bit vals in registers sign-extended.
3247 So here we need a sign-extension for signed types and
3248 zero-extension. for unsigned types. */
3249 #if !defined(TCC_TARGET_RISCV64)
3250 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3259 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3265 bits
= (ss
- ds
) * 8;
3266 /* for unsigned, gen_op will convert SAR to SHR */
3267 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3270 vpushi(bits
- trunc
);
3277 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3280 /* return type size as known at compile time. Put alignment at 'a' */
3281 ST_FUNC
int type_size(CType
*type
, int *a
)
3286 bt
= type
->t
& VT_BTYPE
;
3287 if (bt
== VT_STRUCT
) {
3292 } else if (bt
== VT_PTR
) {
3293 if (type
->t
& VT_ARRAY
) {
3297 ts
= type_size(&s
->type
, a
);
3299 if (ts
< 0 && s
->c
< 0)
3307 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3309 return -1; /* incomplete enum */
3310 } else if (bt
== VT_LDOUBLE
) {
3312 return LDOUBLE_SIZE
;
3313 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3314 #ifdef TCC_TARGET_I386
3315 #ifdef TCC_TARGET_PE
3320 #elif defined(TCC_TARGET_ARM)
3330 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3333 } else if (bt
== VT_SHORT
) {
3336 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3340 /* char, void, function, _Bool */
3346 /* push type size as known at runtime time on top of value stack. Put
3348 static void vpush_type_size(CType
*type
, int *a
)
3350 if (type
->t
& VT_VLA
) {
3351 type_size(&type
->ref
->type
, a
);
3352 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3354 int size
= type_size(type
, a
);
3356 tcc_error("unknown type size");
3365 /* return the pointed type of t */
3366 static inline CType
*pointed_type(CType
*type
)
3368 return &type
->ref
->type
;
3371 /* modify type so that its it is a pointer to type. */
3372 ST_FUNC
void mk_pointer(CType
*type
)
3375 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3376 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3380 /* return true if type1 and type2 are exactly the same (including
3383 static int is_compatible_types(CType
*type1
, CType
*type2
)
3385 return compare_types(type1
,type2
,0);
3388 /* return true if type1 and type2 are the same (ignoring qualifiers).
3390 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3392 return compare_types(type1
,type2
,1);
3395 static void cast_error(CType
*st
, CType
*dt
)
3397 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3400 /* verify type compatibility to store vtop in 'dt' type */
3401 static void verify_assign_cast(CType
*dt
)
3403 CType
*st
, *type1
, *type2
;
3404 int dbt
, sbt
, qualwarn
, lvl
;
3406 st
= &vtop
->type
; /* source type */
3407 dbt
= dt
->t
& VT_BTYPE
;
3408 sbt
= st
->t
& VT_BTYPE
;
3409 if (dt
->t
& VT_CONSTANT
)
3410 tcc_warning("assignment of read-only location");
3414 tcc_error("assignment to void expression");
3417 /* special cases for pointers */
3418 /* '0' can also be a pointer */
3419 if (is_null_pointer(vtop
))
3421 /* accept implicit pointer to integer cast with warning */
3422 if (is_integer_btype(sbt
)) {
3423 tcc_warning("assignment makes pointer from integer without a cast");
3426 type1
= pointed_type(dt
);
3428 type2
= pointed_type(st
);
3429 else if (sbt
== VT_FUNC
)
3430 type2
= st
; /* a function is implicitly a function pointer */
3433 if (is_compatible_types(type1
, type2
))
3435 for (qualwarn
= lvl
= 0;; ++lvl
) {
3436 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3437 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3439 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3440 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3441 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3443 type1
= pointed_type(type1
);
3444 type2
= pointed_type(type2
);
3446 if (!is_compatible_unqualified_types(type1
, type2
)) {
3447 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3448 /* void * can match anything */
3449 } else if (dbt
== sbt
3450 && is_integer_btype(sbt
& VT_BTYPE
)
3451 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3452 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3453 /* Like GCC don't warn by default for merely changes
3454 in pointer target signedness. Do warn for different
3455 base types, though, in particular for unsigned enums
3456 and signed int targets. */
3458 tcc_warning("assignment from incompatible pointer type");
3463 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3469 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3470 tcc_warning("assignment makes integer from pointer without a cast");
3471 } else if (sbt
== VT_STRUCT
) {
3472 goto case_VT_STRUCT
;
3474 /* XXX: more tests */
3478 if (!is_compatible_unqualified_types(dt
, st
)) {
3486 static void gen_assign_cast(CType
*dt
)
3488 verify_assign_cast(dt
);
3492 /* store vtop in lvalue pushed on stack */
3493 ST_FUNC
void vstore(void)
3495 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3497 ft
= vtop
[-1].type
.t
;
3498 sbt
= vtop
->type
.t
& VT_BTYPE
;
3499 dbt
= ft
& VT_BTYPE
;
3500 verify_assign_cast(&vtop
[-1].type
);
3502 if (sbt
== VT_STRUCT
) {
3503 /* if structure, only generate pointer */
3504 /* structure assignment : generate memcpy */
3505 size
= type_size(&vtop
->type
, &align
);
3506 /* destination, keep on stack() as result */
3508 #ifdef CONFIG_TCC_BCHECK
3509 if (vtop
->r
& VT_MUSTBOUND
)
3510 gbound(); /* check would be wrong after gaddrof() */
3512 vtop
->type
.t
= VT_PTR
;
3516 #ifdef CONFIG_TCC_BCHECK
3517 if (vtop
->r
& VT_MUSTBOUND
)
3520 vtop
->type
.t
= VT_PTR
;
3523 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3525 #ifdef CONFIG_TCC_BCHECK
3526 && !tcc_state
->do_bounds_check
3529 gen_struct_copy(size
);
3535 /* Use memmove, rather than memcpy, as dest and src may be same: */
3538 vpush_helper_func(TOK_memmove8
);
3539 else if(!(align
& 3))
3540 vpush_helper_func(TOK_memmove4
);
3543 vpush_helper_func(TOK_memmove
);
3548 } else if (ft
& VT_BITFIELD
) {
3549 /* bitfield store handling */
3551 /* save lvalue as expression result (example: s.b = s.a = n;) */
3552 vdup(), vtop
[-1] = vtop
[-2];
3554 bit_pos
= BIT_POS(ft
);
3555 bit_size
= BIT_SIZE(ft
);
3556 /* remove bit field info to avoid loops */
3557 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3559 if (dbt
== VT_BOOL
) {
3560 gen_cast(&vtop
[-1].type
);
3561 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3563 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3564 if (dbt
!= VT_BOOL
) {
3565 gen_cast(&vtop
[-1].type
);
3566 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3568 if (r
== VT_STRUCT
) {
3569 store_packed_bf(bit_pos
, bit_size
);
3571 unsigned long long mask
= (1ULL << bit_size
) - 1;
3572 if (dbt
!= VT_BOOL
) {
3574 if (dbt
== VT_LLONG
)
3577 vpushi((unsigned)mask
);
3584 /* duplicate destination */
3587 /* load destination, mask and or with source */
3588 if (dbt
== VT_LLONG
)
3589 vpushll(~(mask
<< bit_pos
));
3591 vpushi(~((unsigned)mask
<< bit_pos
));
3596 /* ... and discard */
3599 } else if (dbt
== VT_VOID
) {
3602 /* optimize char/short casts */
3604 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3605 && is_integer_btype(sbt
)
3607 if ((vtop
->r
& VT_MUSTCAST
)
3608 && btype_size(dbt
) > btype_size(sbt
)
3610 force_charshort_cast();
3613 gen_cast(&vtop
[-1].type
);
3616 #ifdef CONFIG_TCC_BCHECK
3617 /* bound check case */
3618 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3624 gv(RC_TYPE(dbt
)); /* generate value */
3627 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3628 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3629 vtop
->type
.t
= ft
& VT_TYPE
;
3632 /* if lvalue was saved on stack, must read it */
3633 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3635 r
= get_reg(RC_INT
);
3636 sv
.type
.t
= VT_PTRDIFF_T
;
3637 sv
.r
= VT_LOCAL
| VT_LVAL
;
3638 sv
.c
.i
= vtop
[-1].c
.i
;
3640 vtop
[-1].r
= r
| VT_LVAL
;
3643 r
= vtop
->r
& VT_VALMASK
;
3644 /* two word case handling :
3645 store second register at word + 4 (or +8 for x86-64) */
3646 if (USING_TWO_WORDS(dbt
)) {
3647 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3648 vtop
[-1].type
.t
= load_type
;
3651 /* convert to int to increment easily */
3652 vtop
->type
.t
= VT_PTRDIFF_T
;
3658 vtop
[-1].type
.t
= load_type
;
3659 /* XXX: it works because r2 is spilled last ! */
3660 store(vtop
->r2
, vtop
- 1);
3666 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3670 /* post defines POST/PRE add. c is the token ++ or -- */
3671 ST_FUNC
void inc(int post
, int c
)
3674 vdup(); /* save lvalue */
3676 gv_dup(); /* duplicate value */
3681 vpushi(c
- TOK_MID
);
3683 vstore(); /* store value */
3685 vpop(); /* if post op, return saved value */
3688 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3690 /* read the string */
3694 while (tok
== TOK_STR
) {
3695 /* XXX: add \0 handling too ? */
3696 cstr_cat(astr
, tokc
.str
.data
, -1);
3699 cstr_ccat(astr
, '\0');
3702 /* If I is >= 1 and a power of two, returns log2(i)+1.
3703 If I is 0 returns 0. */
3704 ST_FUNC
int exact_log2p1(int i
)
3709 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3720 /* Parse __attribute__((...)) GNUC extension. */
3721 static void parse_attribute(AttributeDef
*ad
)
3727 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3732 while (tok
!= ')') {
3733 if (tok
< TOK_IDENT
)
3734 expect("attribute name");
3746 tcc_warning_c(warn_implicit_function_declaration
)(
3747 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3748 s
= external_global_sym(tok
, &func_old_type
);
3749 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3750 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3751 ad
->cleanup_func
= s
;
3756 case TOK_CONSTRUCTOR1
:
3757 case TOK_CONSTRUCTOR2
:
3758 ad
->f
.func_ctor
= 1;
3760 case TOK_DESTRUCTOR1
:
3761 case TOK_DESTRUCTOR2
:
3762 ad
->f
.func_dtor
= 1;
3764 case TOK_ALWAYS_INLINE1
:
3765 case TOK_ALWAYS_INLINE2
:
3766 ad
->f
.func_alwinl
= 1;
3771 parse_mult_str(&astr
, "section name");
3772 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3779 parse_mult_str(&astr
, "alias(\"target\")");
3780 ad
->alias_target
= /* save string as token, for later */
3781 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3785 case TOK_VISIBILITY1
:
3786 case TOK_VISIBILITY2
:
3788 parse_mult_str(&astr
,
3789 "visibility(\"default|hidden|internal|protected\")");
3790 if (!strcmp (astr
.data
, "default"))
3791 ad
->a
.visibility
= STV_DEFAULT
;
3792 else if (!strcmp (astr
.data
, "hidden"))
3793 ad
->a
.visibility
= STV_HIDDEN
;
3794 else if (!strcmp (astr
.data
, "internal"))
3795 ad
->a
.visibility
= STV_INTERNAL
;
3796 else if (!strcmp (astr
.data
, "protected"))
3797 ad
->a
.visibility
= STV_PROTECTED
;
3799 expect("visibility(\"default|hidden|internal|protected\")");
3808 if (n
<= 0 || (n
& (n
- 1)) != 0)
3809 tcc_error("alignment must be a positive power of two");
3814 ad
->a
.aligned
= exact_log2p1(n
);
3815 if (n
!= 1 << (ad
->a
.aligned
- 1))
3816 tcc_error("alignment of %d is larger than implemented", n
);
3828 /* currently, no need to handle it because tcc does not
3829 track unused objects */
3833 ad
->f
.func_noreturn
= 1;
3838 ad
->f
.func_call
= FUNC_CDECL
;
3843 ad
->f
.func_call
= FUNC_STDCALL
;
3845 #ifdef TCC_TARGET_I386
3855 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3861 ad
->f
.func_call
= FUNC_FASTCALLW
;
3868 ad
->attr_mode
= VT_LLONG
+ 1;
3871 ad
->attr_mode
= VT_BYTE
+ 1;
3874 ad
->attr_mode
= VT_SHORT
+ 1;
3878 ad
->attr_mode
= VT_INT
+ 1;
3881 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3888 ad
->a
.dllexport
= 1;
3890 case TOK_NODECORATE
:
3891 ad
->a
.nodecorate
= 1;
3894 ad
->a
.dllimport
= 1;
3897 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
3898 /* skip parameters */
3900 int parenthesis
= 0;
3904 else if (tok
== ')')
3907 } while (parenthesis
&& tok
!= -1);
3920 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3924 while ((s
= s
->next
) != NULL
) {
3925 if ((s
->v
& SYM_FIELD
) &&
3926 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3927 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3928 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3940 static void check_fields (CType
*type
, int check
)
3944 while ((s
= s
->next
) != NULL
) {
3945 int v
= s
->v
& ~SYM_FIELD
;
3946 if (v
< SYM_FIRST_ANOM
) {
3947 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
3948 if (check
&& (ts
->tok
& SYM_FIELD
))
3949 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
3950 ts
->tok
^= SYM_FIELD
;
3951 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
3952 check_fields (&s
->type
, check
);
3956 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3958 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3959 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3960 int pcc
= !tcc_state
->ms_bitfields
;
3961 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3968 prevbt
= VT_STRUCT
; /* make it never match */
3973 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3974 if (f
->type
.t
& VT_BITFIELD
)
3975 bit_size
= BIT_SIZE(f
->type
.t
);
3978 size
= type_size(&f
->type
, &align
);
3979 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3982 if (pcc
&& bit_size
== 0) {
3983 /* in pcc mode, packing does not affect zero-width bitfields */
3986 /* in pcc mode, attribute packed overrides if set. */
3987 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3990 /* pragma pack overrides align if lesser and packs bitfields always */
3993 if (pragma_pack
< align
)
3994 align
= pragma_pack
;
3995 /* in pcc mode pragma pack also overrides individual align */
3996 if (pcc
&& pragma_pack
< a
)
4000 /* some individual align was specified */
4004 if (type
->ref
->type
.t
== VT_UNION
) {
4005 if (pcc
&& bit_size
>= 0)
4006 size
= (bit_size
+ 7) >> 3;
4011 } else if (bit_size
< 0) {
4013 c
+= (bit_pos
+ 7) >> 3;
4014 c
= (c
+ align
- 1) & -align
;
4023 /* A bit-field. Layout is more complicated. There are two
4024 options: PCC (GCC) compatible and MS compatible */
4026 /* In PCC layout a bit-field is placed adjacent to the
4027 preceding bit-fields, except if:
4029 - an individual alignment was given
4030 - it would overflow its base type container and
4031 there is no packing */
4032 if (bit_size
== 0) {
4034 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4036 } else if (f
->a
.aligned
) {
4038 } else if (!packed
) {
4040 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4041 if (ofs
> size
/ align
)
4045 /* in pcc mode, long long bitfields have type int if they fit */
4046 if (size
== 8 && bit_size
<= 32)
4047 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4049 while (bit_pos
>= align
* 8)
4050 c
+= align
, bit_pos
-= align
* 8;
4053 /* In PCC layout named bit-fields influence the alignment
4054 of the containing struct using the base types alignment,
4055 except for packed fields (which here have correct align). */
4056 if (f
->v
& SYM_FIRST_ANOM
4057 // && bit_size // ??? gcc on ARM/rpi does that
4062 bt
= f
->type
.t
& VT_BTYPE
;
4063 if ((bit_pos
+ bit_size
> size
* 8)
4064 || (bit_size
> 0) == (bt
!= prevbt
)
4066 c
= (c
+ align
- 1) & -align
;
4069 /* In MS bitfield mode a bit-field run always uses
4070 at least as many bits as the underlying type.
4071 To start a new run it's also required that this
4072 or the last bit-field had non-zero width. */
4073 if (bit_size
|| prev_bit_size
)
4076 /* In MS layout the records alignment is normally
4077 influenced by the field, except for a zero-width
4078 field at the start of a run (but by further zero-width
4079 fields it is again). */
4080 if (bit_size
== 0 && prevbt
!= bt
)
4083 prev_bit_size
= bit_size
;
4086 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4087 | (bit_pos
<< VT_STRUCT_SHIFT
);
4088 bit_pos
+= bit_size
;
4090 if (align
> maxalign
)
4094 printf("set field %s offset %-2d size %-2d align %-2d",
4095 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4096 if (f
->type
.t
& VT_BITFIELD
) {
4097 printf(" pos %-2d bits %-2d",
4110 c
+= (bit_pos
+ 7) >> 3;
4112 /* store size and alignment */
4113 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4117 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4118 /* can happen if individual align for some member was given. In
4119 this case MSVC ignores maxalign when aligning the size */
4124 c
= (c
+ a
- 1) & -a
;
4128 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4131 /* check whether we can access bitfields by their type */
4132 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4136 if (0 == (f
->type
.t
& VT_BITFIELD
))
4140 bit_size
= BIT_SIZE(f
->type
.t
);
4143 bit_pos
= BIT_POS(f
->type
.t
);
4144 size
= type_size(&f
->type
, &align
);
4146 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4147 #ifdef TCC_TARGET_ARM
4148 && !(f
->c
& (align
- 1))
4153 /* try to access the field using a different type */
4154 c0
= -1, s
= align
= 1;
4157 px
= f
->c
* 8 + bit_pos
;
4158 cx
= (px
>> 3) & -align
;
4159 px
= px
- (cx
<< 3);
4162 s
= (px
+ bit_size
+ 7) >> 3;
4172 s
= type_size(&t
, &align
);
4176 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4177 #ifdef TCC_TARGET_ARM
4178 && !(cx
& (align
- 1))
4181 /* update offset and bit position */
4184 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4185 | (bit_pos
<< VT_STRUCT_SHIFT
);
4189 printf("FIX field %s offset %-2d size %-2d align %-2d "
4190 "pos %-2d bits %-2d\n",
4191 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4192 cx
, s
, align
, px
, bit_size
);
4195 /* fall back to load/store single-byte wise */
4196 f
->auxtype
= VT_STRUCT
;
4198 printf("FIX field %s : load byte-wise\n",
4199 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4205 static void do_Static_assert(void);
4207 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4208 static void struct_decl(CType
*type
, int u
)
4210 int v
, c
, size
, align
, flexible
;
4211 int bit_size
, bsize
, bt
;
4213 AttributeDef ad
, ad1
;
4216 memset(&ad
, 0, sizeof ad
);
4218 parse_attribute(&ad
);
4222 /* struct already defined ? return it */
4224 expect("struct/union/enum name");
4226 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4229 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4231 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4236 /* Record the original enum/struct/union token. */
4237 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4239 /* we put an undefined size for struct/union */
4240 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4241 s
->r
= 0; /* default alignment is zero as gcc */
4243 type
->t
= s
->type
.t
;
4249 tcc_error("struct/union/enum already defined");
4251 /* cannot be empty */
4252 /* non empty enums are not allowed */
4255 long long ll
= 0, pl
= 0, nl
= 0;
4258 /* enum symbols have static storage */
4259 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4263 expect("identifier");
4265 if (ss
&& !local_stack
)
4266 tcc_error("redefinition of enumerator '%s'",
4267 get_tok_str(v
, NULL
));
4271 ll
= expr_const64();
4273 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4275 *ps
= ss
, ps
= &ss
->next
;
4284 /* NOTE: we accept a trailing comma */
4289 /* set integral type of the enum */
4292 if (pl
!= (unsigned)pl
)
4293 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4295 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4296 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4297 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4299 /* set type for enum members */
4300 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4302 if (ll
== (int)ll
) /* default is int if it fits */
4304 if (t
.t
& VT_UNSIGNED
) {
4305 ss
->type
.t
|= VT_UNSIGNED
;
4306 if (ll
== (unsigned)ll
)
4309 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4310 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4315 while (tok
!= '}') {
4316 if (tok
== TOK_STATIC_ASSERT
) {
4320 if (!parse_btype(&btype
, &ad1
, 0)) {
4326 tcc_error("flexible array member '%s' not at the end of struct",
4327 get_tok_str(v
, NULL
));
4333 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4335 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4336 expect("identifier");
4338 int v
= btype
.ref
->v
;
4339 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4340 if (tcc_state
->ms_extensions
== 0)
4341 expect("identifier");
4345 if (type_size(&type1
, &align
) < 0) {
4346 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4349 tcc_error("field '%s' has incomplete type",
4350 get_tok_str(v
, NULL
));
4352 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4353 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4354 (type1
.t
& VT_STORAGE
))
4355 tcc_error("invalid type for '%s'",
4356 get_tok_str(v
, NULL
));
4360 bit_size
= expr_const();
4361 /* XXX: handle v = 0 case for messages */
4363 tcc_error("negative width in bit-field '%s'",
4364 get_tok_str(v
, NULL
));
4365 if (v
&& bit_size
== 0)
4366 tcc_error("zero width for bit-field '%s'",
4367 get_tok_str(v
, NULL
));
4368 parse_attribute(&ad1
);
4370 size
= type_size(&type1
, &align
);
4371 if (bit_size
>= 0) {
4372 bt
= type1
.t
& VT_BTYPE
;
4378 tcc_error("bitfields must have scalar type");
4380 if (bit_size
> bsize
) {
4381 tcc_error("width of '%s' exceeds its type",
4382 get_tok_str(v
, NULL
));
4383 } else if (bit_size
== bsize
4384 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4385 /* no need for bit fields */
4387 } else if (bit_size
== 64) {
4388 tcc_error("field width 64 not implemented");
4390 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4392 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4395 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4396 /* Remember we've seen a real field to check
4397 for placement of flexible array member. */
4400 /* If member is a struct or bit-field, enforce
4401 placing into the struct (as anonymous). */
4403 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4408 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4413 if (tok
== ';' || tok
== TOK_EOF
)
4420 parse_attribute(&ad
);
4421 if (ad
.cleanup_func
) {
4422 tcc_warning("attribute '__cleanup__' ignored on type");
4424 check_fields(type
, 1);
4425 check_fields(type
, 0);
4426 struct_layout(type
, &ad
);
4428 tcc_debug_fix_anon(tcc_state
, type
);
4433 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4435 merge_symattr(&ad
->a
, &s
->a
);
4436 merge_funcattr(&ad
->f
, &s
->f
);
4439 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4440 are added to the element type, copied because it could be a typedef. */
4441 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4443 while (type
->t
& VT_ARRAY
) {
4444 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4445 type
= &type
->ref
->type
;
4447 type
->t
|= qualifiers
;
4450 /* return 0 if no type declaration. otherwise, return the basic type
4453 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4455 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4459 memset(ad
, 0, sizeof(AttributeDef
));
4469 /* currently, we really ignore extension */
4479 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4480 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4481 tmbt
: tcc_error("too many basic types");
4484 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4489 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4506 memset(&ad1
, 0, sizeof(AttributeDef
));
4507 if (parse_btype(&type1
, &ad1
, 0)) {
4508 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4510 n
= 1 << (ad1
.a
.aligned
- 1);
4512 type_size(&type1
, &n
);
4515 if (n
< 0 || (n
& (n
- 1)) != 0)
4516 tcc_error("alignment must be a positive power of two");
4519 ad
->a
.aligned
= exact_log2p1(n
);
4523 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4524 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4525 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4526 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4533 #ifdef TCC_TARGET_ARM64
4535 /* GCC's __uint128_t appears in some Linux header files. Make it a
4536 synonym for long double to get the size and alignment right. */
4544 tcc_error("_Complex is not yet supported");
4549 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4550 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4558 struct_decl(&type1
, VT_ENUM
);
4561 type
->ref
= type1
.ref
;
4564 struct_decl(&type1
, VT_STRUCT
);
4567 struct_decl(&type1
, VT_UNION
);
4570 /* type modifiers */
4574 parse_btype_qualify(type
, VT_ATOMIC
);
4577 parse_expr_type(&type1
);
4578 /* remove all storage modifiers except typedef */
4579 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4581 sym_to_attr(ad
, type1
.ref
);
4589 parse_btype_qualify(type
, VT_CONSTANT
);
4597 parse_btype_qualify(type
, VT_VOLATILE
);
4604 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4605 tcc_error("signed and unsigned modifier");
4618 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4619 tcc_error("signed and unsigned modifier");
4620 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4636 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4637 tcc_error("multiple storage classes");
4649 ad
->f
.func_noreturn
= 1;
4651 /* GNUC attribute */
4652 case TOK_ATTRIBUTE1
:
4653 case TOK_ATTRIBUTE2
:
4654 parse_attribute(ad
);
4655 if (ad
->attr_mode
) {
4656 u
= ad
->attr_mode
-1;
4657 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4665 parse_expr_type(&type1
);
4666 /* remove all storage modifiers except typedef */
4667 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4669 sym_to_attr(ad
, type1
.ref
);
4671 case TOK_THREAD_LOCAL
:
4672 tcc_error("_Thread_local is not implemented");
4677 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4681 if (tok
== ':' && ignore_label
) {
4682 /* ignore if it's a label */
4687 t
&= ~(VT_BTYPE
|VT_LONG
);
4688 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4689 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4690 type
->ref
= s
->type
.ref
;
4692 parse_btype_qualify(type
, t
);
4694 /* get attributes from typedef */
4703 if (tcc_state
->char_is_unsigned
) {
4704 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4707 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4708 bt
= t
& (VT_BTYPE
|VT_LONG
);
4710 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4711 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4712 if (bt
== VT_LDOUBLE
)
4713 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4719 /* convert a function parameter type (array to pointer and function to
4720 function pointer) */
4721 static inline void convert_parameter_type(CType
*pt
)
4723 /* remove const and volatile qualifiers (XXX: const could be used
4724 to indicate a const function parameter */
4725 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4726 /* array must be transformed to pointer according to ANSI C */
4728 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4733 ST_FUNC
void parse_asm_str(CString
*astr
)
4736 parse_mult_str(astr
, "string constant");
4739 /* Parse an asm label and return the token */
4740 static int asm_label_instr(void)
4746 parse_asm_str(&astr
);
4749 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4751 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4756 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4758 int n
, l
, t1
, arg_size
, align
;
4759 Sym
**plast
, *s
, *first
;
4762 TokenString
*vla_array_tok
= NULL
;
4763 int *vla_array_str
= NULL
;
4766 /* function type, or recursive declarator (return if so) */
4768 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4772 else if (parse_btype(&pt
, &ad1
, 0))
4774 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4775 merge_attr (ad
, &ad1
);
4786 /* read param name and compute offset */
4787 if (l
!= FUNC_OLD
) {
4788 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4790 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4791 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4792 tcc_error("parameter declared as void");
4797 pt
.t
= VT_VOID
; /* invalid type */
4802 expect("identifier");
4803 convert_parameter_type(&pt
);
4804 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4805 /* these symbols may be evaluated for VLArrays (see below, under
4806 nocode_wanted) which is why we push them here as normal symbols
4807 temporarily. Example: int func(int a, int b[++a]); */
4808 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4814 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4819 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4820 tcc_error("invalid type");
4823 /* if no parameters, then old type prototype */
4826 /* remove parameter symbols from token table, keep on stack */
4828 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4829 for (s
= first
; s
; s
= s
->next
)
4833 /* NOTE: const is ignored in returned type as it has a special
4834 meaning in gcc / C++ */
4835 type
->t
&= ~VT_CONSTANT
;
4836 /* some ancient pre-K&R C allows a function to return an array
4837 and the array brackets to be put after the arguments, such
4838 that "int c()[]" means something like "int[] c()" */
4841 skip(']'); /* only handle simple "[]" */
4844 /* we push a anonymous symbol which will contain the function prototype */
4845 ad
->f
.func_args
= arg_size
;
4846 ad
->f
.func_type
= l
;
4847 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4853 } else if (tok
== '[') {
4854 int saved_nocode_wanted
= nocode_wanted
;
4855 /* array definition */
4859 if (td
& TYPE_PARAM
) while (1) {
4860 /* XXX The optional type-quals and static should only be accepted
4861 in parameter decls. The '*' as well, and then even only
4862 in prototypes (not function defs). */
4864 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4875 /* Code generation is not done now but has to be done
4876 at start of function. Save code here for later use. */
4878 skip_or_save_block(&vla_array_tok
);
4880 vla_array_str
= vla_array_tok
->str
;
4881 begin_macro(vla_array_tok
, 2);
4890 } else if (tok
!= ']') {
4891 if (!local_stack
|| (storage
& VT_STATIC
))
4892 vpushi(expr_const());
4894 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4895 length must always be evaluated, even under nocode_wanted,
4896 so that its size slot is initialized (e.g. under sizeof
4902 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4905 tcc_error("invalid array size");
4907 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4908 tcc_error("size of variable length array should be an integer");
4914 /* parse next post type */
4915 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
4917 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4918 tcc_error("declaration of an array of functions");
4919 if ((type
->t
& VT_BTYPE
) == VT_VOID
4920 || type_size(type
, &align
) < 0)
4921 tcc_error("declaration of an array of incomplete type elements");
4923 t1
|= type
->t
& VT_VLA
;
4928 tcc_error("need explicit inner array size in VLAs");
4931 loc
-= type_size(&int_type
, &align
);
4935 vpush_type_size(type
, &align
);
4937 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4944 nocode_wanted
= saved_nocode_wanted
;
4946 /* we push an anonymous symbol which will contain the array
4948 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4949 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4952 if (vla_array_str
) {
4954 s
->vla_array_str
= vla_array_str
;
4956 tok_str_free_str(vla_array_str
);
4962 /* Parse a type declarator (except basic type), and return the type
4963 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4964 expected. 'type' should contain the basic type. 'ad' is the
4965 attribute definition of the basic type. It can be modified by
4966 type_decl(). If this (possibly abstract) declarator is a pointer chain
4967 it returns the innermost pointed to type (equals *type, but is a different
4968 pointer), otherwise returns type itself, that's used for recursive calls. */
4969 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4972 int qualifiers
, storage
;
4974 /* recursive type, remove storage bits first, apply them later again */
4975 storage
= type
->t
& VT_STORAGE
;
4976 type
->t
&= ~VT_STORAGE
;
4979 while (tok
== '*') {
4985 qualifiers
|= VT_ATOMIC
;
4990 qualifiers
|= VT_CONSTANT
;
4995 qualifiers
|= VT_VOLATILE
;
5001 /* XXX: clarify attribute handling */
5002 case TOK_ATTRIBUTE1
:
5003 case TOK_ATTRIBUTE2
:
5004 parse_attribute(ad
);
5008 type
->t
|= qualifiers
;
5010 /* innermost pointed to type is the one for the first derivation */
5011 ret
= pointed_type(type
);
5015 /* This is possibly a parameter type list for abstract declarators
5016 ('int ()'), use post_type for testing this. */
5017 if (!post_type(type
, ad
, 0, td
)) {
5018 /* It's not, so it's a nested declarator, and the post operations
5019 apply to the innermost pointed to type (if any). */
5020 /* XXX: this is not correct to modify 'ad' at this point, but
5021 the syntax is not clear */
5022 parse_attribute(ad
);
5023 post
= type_decl(type
, ad
, v
, td
);
5027 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5028 /* type identifier */
5033 if (!(td
& TYPE_ABSTRACT
))
5034 expect("identifier");
5037 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5038 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5039 parse_attribute(ad
);
5044 /* indirection with full error checking and bound check */
5045 ST_FUNC
void indir(void)
5047 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5048 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5052 if (vtop
->r
& VT_LVAL
)
5054 vtop
->type
= *pointed_type(&vtop
->type
);
5055 /* Arrays and functions are never lvalues */
5056 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5057 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5059 /* if bound checking, the referenced pointer must be checked */
5060 #ifdef CONFIG_TCC_BCHECK
5061 if (tcc_state
->do_bounds_check
)
5062 vtop
->r
|= VT_MUSTBOUND
;
5067 /* pass a parameter to a function and do type checking and casting */
5068 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5073 func_type
= func
->f
.func_type
;
5074 if (func_type
== FUNC_OLD
||
5075 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5076 /* default casting : only need to convert float to double */
5077 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5078 gen_cast_s(VT_DOUBLE
);
5079 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5080 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5081 type
.ref
= vtop
->type
.ref
;
5083 } else if (vtop
->r
& VT_MUSTCAST
) {
5084 force_charshort_cast();
5086 } else if (arg
== NULL
) {
5087 tcc_error("too many arguments to function");
5090 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5091 gen_assign_cast(&type
);
5095 /* parse an expression and return its type without any side effect. */
5096 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5105 /* parse an expression of the form '(type)' or '(expr)' and return its
5107 static void parse_expr_type(CType
*type
)
5113 if (parse_btype(type
, &ad
, 0)) {
5114 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5116 expr_type(type
, gexpr
);
5121 static void parse_type(CType
*type
)
5126 if (!parse_btype(type
, &ad
, 0)) {
5129 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5132 static void parse_builtin_params(int nc
, const char *args
)
5141 while ((c
= *args
++)) {
5156 type
.t
= VT_CONSTANT
;
5162 type
.t
= VT_CONSTANT
;
5164 type
.t
|= char_type
.t
;
5176 gen_assign_cast(&type
);
5183 static void parse_atomic(int atok
)
5185 int size
, align
, arg
, t
, save
= 0;
5186 CType
*atom
, *atom_ptr
, ct
= {0};
5189 static const char *const templates
[] = {
5191 * Each entry consists of callback and function template.
5192 * The template represents argument types and return type.
5194 * ? void (return-only)
5197 * A read-only atomic
5198 * p pointer to memory
5205 /* keep in order of appearance in tcctok.h: */
5206 /* __atomic_store */ "alm.?",
5207 /* __atomic_load */ "Asm.v",
5208 /* __atomic_exchange */ "alsm.v",
5209 /* __atomic_compare_exchange */ "aplbmm.b",
5210 /* __atomic_fetch_add */ "avm.v",
5211 /* __atomic_fetch_sub */ "avm.v",
5212 /* __atomic_fetch_or */ "avm.v",
5213 /* __atomic_fetch_xor */ "avm.v",
5214 /* __atomic_fetch_and */ "avm.v",
5215 /* __atomic_fetch_nand */ "avm.v",
5216 /* __atomic_and_fetch */ "avm.v",
5217 /* __atomic_sub_fetch */ "avm.v",
5218 /* __atomic_or_fetch */ "avm.v",
5219 /* __atomic_xor_fetch */ "avm.v",
5220 /* __atomic_and_fetch */ "avm.v",
5221 /* __atomic_nand_fetch */ "avm.v"
5223 const char *template = templates
[(atok
- TOK___atomic_store
)];
5225 atom
= atom_ptr
= NULL
;
5226 size
= 0; /* pacify compiler */
5231 switch (template[arg
]) {
5234 atom_ptr
= &vtop
->type
;
5235 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5237 atom
= pointed_type(atom_ptr
);
5238 size
= type_size(atom
, &align
);
5240 || (size
& (size
- 1))
5241 || (atok
> TOK___atomic_compare_exchange
5242 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5243 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5244 expect("integral or integer-sized pointer target type");
5245 /* GCC does not care either: */
5246 /* if (!(atom->t & VT_ATOMIC))
5247 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5251 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5252 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5253 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5254 gen_assign_cast(atom_ptr
);
5257 gen_assign_cast(atom
);
5261 gen_assign_cast(atom
);
5270 gen_assign_cast(&int_type
);
5274 gen_assign_cast(&ct
);
5277 if ('.' == template[++arg
])
5284 switch (template[arg
+ 1]) {
5293 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5294 vpush_helper_func(tok_alloc_const(buf
));
5295 vrott(arg
- save
+ 1);
5296 gfunc_call(arg
- save
);
5299 PUT_R_RET(vtop
, ct
.t
);
5300 t
= ct
.t
& VT_BTYPE
;
5301 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5303 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5305 vtop
->type
.t
= VT_INT
;
5317 ST_FUNC
void unary(void)
5319 int n
, t
, align
, size
, r
, sizeof_caller
;
5324 /* generate line number info */
5326 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5328 sizeof_caller
= in_sizeof
;
5331 /* XXX: GCC 2.95.3 does not generate a table although it should be
5339 #ifdef TCC_TARGET_PE
5340 t
= VT_SHORT
|VT_UNSIGNED
;
5348 vsetc(&type
, VT_CONST
, &tokc
);
5352 t
= VT_INT
| VT_UNSIGNED
;
5358 t
= VT_LLONG
| VT_UNSIGNED
;
5370 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5373 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5375 case TOK___FUNCTION__
:
5377 goto tok_identifier
;
5383 /* special function name identifier */
5384 len
= strlen(funcname
) + 1;
5385 /* generate char[len] type */
5386 type
.t
= char_type
.t
;
5387 if (tcc_state
->warn_write_strings
& WARN_ON
)
5388 type
.t
|= VT_CONSTANT
;
5392 sec
= rodata_section
;
5393 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5395 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5400 #ifdef TCC_TARGET_PE
5401 t
= VT_SHORT
| VT_UNSIGNED
;
5407 /* string parsing */
5410 if (tcc_state
->warn_write_strings
& WARN_ON
)
5415 memset(&ad
, 0, sizeof(AttributeDef
));
5416 ad
.section
= rodata_section
;
5417 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5422 if (parse_btype(&type
, &ad
, 0)) {
5423 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5425 /* check ISOC99 compound literal */
5427 /* data is allocated locally by default */
5432 /* all except arrays are lvalues */
5433 if (!(type
.t
& VT_ARRAY
))
5435 memset(&ad
, 0, sizeof(AttributeDef
));
5436 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5438 if (sizeof_caller
) {
5445 } else if (tok
== '{') {
5446 int saved_nocode_wanted
= nocode_wanted
;
5447 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5449 if (0 == local_scope
)
5450 tcc_error("statement expression outside of function");
5451 /* save all registers */
5453 /* statement expression : we do not accept break/continue
5454 inside as GCC does. We do retain the nocode_wanted state,
5455 as statement expressions can't ever be entered from the
5456 outside, so any reactivation of code emission (from labels
5457 or loop heads) can be disabled again after the end of it. */
5459 /* If the statement expr can be entered, then we retain the current
5460 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5461 If it can't be entered then the state is that from before the
5462 statement expression. */
5463 if (saved_nocode_wanted
)
5464 nocode_wanted
= saved_nocode_wanted
;
5479 /* functions names must be treated as function pointers,
5480 except for unary '&' and sizeof. Since we consider that
5481 functions are not lvalues, we only have to handle it
5482 there and in function calls. */
5483 /* arrays can also be used although they are not lvalues */
5484 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5485 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5488 vtop
->sym
->a
.addrtaken
= 1;
5489 mk_pointer(&vtop
->type
);
5495 gen_test_zero(TOK_EQ
);
5506 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5507 tcc_error("pointer not accepted for unary plus");
5508 /* In order to force cast, we add zero, except for floating point
5509 where we really need an noop (otherwise -0.0 will be transformed
5511 if (!is_float(vtop
->type
.t
)) {
5523 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5524 if (t
== TOK_SIZEOF
) {
5525 vpush_type_size(&type
, &align
);
5526 gen_cast_s(VT_SIZE_T
);
5528 type_size(&type
, &align
);
5530 if (vtop
[1].r
& VT_SYM
)
5531 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5532 if (s
&& s
->a
.aligned
)
5533 align
= 1 << (s
->a
.aligned
- 1);
5538 case TOK_builtin_expect
:
5539 /* __builtin_expect is a no-op for now */
5540 parse_builtin_params(0, "ee");
5543 case TOK_builtin_types_compatible_p
:
5544 parse_builtin_params(0, "tt");
5545 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5546 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5547 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5551 case TOK_builtin_choose_expr
:
5578 case TOK_builtin_constant_p
:
5580 parse_builtin_params(1, "e");
5582 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5583 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5587 case TOK_builtin_frame_address
:
5588 case TOK_builtin_return_address
:
5594 if (tok
!= TOK_CINT
) {
5595 tcc_error("%s only takes positive integers",
5596 tok1
== TOK_builtin_return_address
?
5597 "__builtin_return_address" :
5598 "__builtin_frame_address");
5600 level
= (uint32_t)tokc
.i
;
5605 vset(&type
, VT_LOCAL
, 0); /* local frame */
5607 #ifdef TCC_TARGET_RISCV64
5611 mk_pointer(&vtop
->type
);
5612 indir(); /* -> parent frame */
5614 if (tok1
== TOK_builtin_return_address
) {
5615 // assume return address is just above frame pointer on stack
5616 #ifdef TCC_TARGET_ARM
5619 #elif defined TCC_TARGET_RISCV64
5626 mk_pointer(&vtop
->type
);
5631 #ifdef TCC_TARGET_RISCV64
5632 case TOK_builtin_va_start
:
5633 parse_builtin_params(0, "ee");
5634 r
= vtop
->r
& VT_VALMASK
;
5638 tcc_error("__builtin_va_start expects a local variable");
5643 #ifdef TCC_TARGET_X86_64
5644 #ifdef TCC_TARGET_PE
5645 case TOK_builtin_va_start
:
5646 parse_builtin_params(0, "ee");
5647 r
= vtop
->r
& VT_VALMASK
;
5651 tcc_error("__builtin_va_start expects a local variable");
5653 vtop
->type
= char_pointer_type
;
5658 case TOK_builtin_va_arg_types
:
5659 parse_builtin_params(0, "t");
5660 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5667 #ifdef TCC_TARGET_ARM64
5668 case TOK_builtin_va_start
: {
5669 parse_builtin_params(0, "ee");
5673 vtop
->type
.t
= VT_VOID
;
5676 case TOK_builtin_va_arg
: {
5677 parse_builtin_params(0, "et");
5685 case TOK___arm64_clear_cache
: {
5686 parse_builtin_params(0, "ee");
5689 vtop
->type
.t
= VT_VOID
;
5694 /* atomic operations */
5695 case TOK___atomic_store
:
5696 case TOK___atomic_load
:
5697 case TOK___atomic_exchange
:
5698 case TOK___atomic_compare_exchange
:
5699 case TOK___atomic_fetch_add
:
5700 case TOK___atomic_fetch_sub
:
5701 case TOK___atomic_fetch_or
:
5702 case TOK___atomic_fetch_xor
:
5703 case TOK___atomic_fetch_and
:
5704 case TOK___atomic_fetch_nand
:
5705 case TOK___atomic_add_fetch
:
5706 case TOK___atomic_sub_fetch
:
5707 case TOK___atomic_or_fetch
:
5708 case TOK___atomic_xor_fetch
:
5709 case TOK___atomic_and_fetch
:
5710 case TOK___atomic_nand_fetch
:
5714 /* pre operations */
5725 if (is_float(vtop
->type
.t
)) {
5735 goto tok_identifier
;
5737 /* allow to take the address of a label */
5738 if (tok
< TOK_UIDENT
)
5739 expect("label identifier");
5740 s
= label_find(tok
);
5742 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5744 if (s
->r
== LABEL_DECLARED
)
5745 s
->r
= LABEL_FORWARD
;
5747 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5748 s
->type
.t
= VT_VOID
;
5749 mk_pointer(&s
->type
);
5750 s
->type
.t
|= VT_STATIC
;
5752 vpushsym(&s
->type
, s
);
5758 CType controlling_type
;
5759 int has_default
= 0;
5762 TokenString
*str
= NULL
;
5763 int saved_const_wanted
= const_wanted
;
5768 expr_type(&controlling_type
, expr_eq
);
5769 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5770 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5771 mk_pointer(&controlling_type
);
5772 const_wanted
= saved_const_wanted
;
5776 if (tok
== TOK_DEFAULT
) {
5778 tcc_error("too many 'default'");
5784 AttributeDef ad_tmp
;
5788 parse_btype(&cur_type
, &ad_tmp
, 0);
5789 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5790 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5792 tcc_error("type match twice");
5802 skip_or_save_block(&str
);
5804 skip_or_save_block(NULL
);
5811 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5812 tcc_error("type '%s' does not match any association", buf
);
5814 begin_macro(str
, 1);
5823 // special qnan , snan and infinity values
5828 vtop
->type
.t
= VT_FLOAT
;
5833 goto special_math_val
;
5836 goto special_math_val
;
5843 expect("identifier");
5845 if (!s
|| IS_ASM_SYM(s
)) {
5846 const char *name
= get_tok_str(t
, NULL
);
5848 tcc_error("'%s' undeclared", name
);
5849 /* for simple function calls, we tolerate undeclared
5850 external reference to int() function */
5851 tcc_warning_c(warn_implicit_function_declaration
)(
5852 "implicit declaration of function '%s'", name
);
5853 s
= external_global_sym(t
, &func_old_type
);
5857 /* A symbol that has a register is a local register variable,
5858 which starts out as VT_LOCAL value. */
5859 if ((r
& VT_VALMASK
) < VT_CONST
)
5860 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5862 vset(&s
->type
, r
, s
->c
);
5863 /* Point to s as backpointer (even without r&VT_SYM).
5864 Will be used by at least the x86 inline asm parser for
5870 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5871 vtop
->c
.i
= s
->enum_val
;
5876 /* post operations */
5878 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5881 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5882 int qualifiers
, cumofs
= 0;
5884 if (tok
== TOK_ARROW
)
5886 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5889 /* expect pointer on structure */
5890 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5891 expect("struct or union");
5892 if (tok
== TOK_CDOUBLE
)
5893 expect("field name");
5895 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5896 expect("field name");
5897 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5899 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5900 /* add field offset to pointer */
5901 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5902 vpushi(cumofs
+ s
->c
);
5904 /* change type to field type, and set to lvalue */
5905 vtop
->type
= s
->type
;
5906 vtop
->type
.t
|= qualifiers
;
5907 /* an array is never an lvalue */
5908 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5910 #ifdef CONFIG_TCC_BCHECK
5911 /* if bound checking, the referenced pointer must be checked */
5912 if (tcc_state
->do_bounds_check
)
5913 vtop
->r
|= VT_MUSTBOUND
;
5917 } else if (tok
== '[') {
5923 } else if (tok
== '(') {
5926 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5929 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5930 /* pointer test (no array accepted) */
5931 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5932 vtop
->type
= *pointed_type(&vtop
->type
);
5933 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5937 expect("function pointer");
5940 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5942 /* get return type */
5945 sa
= s
->next
; /* first parameter */
5946 nb_args
= regsize
= 0;
5948 /* compute first implicit argument if a structure is returned */
5949 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5950 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5951 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5952 &ret_align
, ®size
);
5953 if (ret_nregs
<= 0) {
5954 /* get some space for the returned structure */
5955 size
= type_size(&s
->type
, &align
);
5956 #ifdef TCC_TARGET_ARM64
5957 /* On arm64, a small struct is return in registers.
5958 It is much easier to write it to memory if we know
5959 that we are allowed to write some extra bytes, so
5960 round the allocated space up to a power of 2: */
5962 while (size
& (size
- 1))
5963 size
= (size
| (size
- 1)) + 1;
5965 loc
= (loc
- size
) & -align
;
5967 ret
.r
= VT_LOCAL
| VT_LVAL
;
5968 /* pass it as 'int' to avoid structure arg passing
5970 vseti(VT_LOCAL
, loc
);
5971 #ifdef CONFIG_TCC_BCHECK
5972 if (tcc_state
->do_bounds_check
)
5986 if (ret_nregs
> 0) {
5987 /* return in register */
5989 PUT_R_RET(&ret
, ret
.type
.t
);
5994 gfunc_param_typed(s
, sa
);
6004 tcc_error("too few arguments to function");
6006 gfunc_call(nb_args
);
6008 if (ret_nregs
< 0) {
6009 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6010 #ifdef TCC_TARGET_RISCV64
6011 arch_transfer_ret_regs(1);
6015 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6016 vsetc(&ret
.type
, r
, &ret
.c
);
6017 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6020 /* handle packed struct return */
6021 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6024 size
= type_size(&s
->type
, &align
);
6025 /* We're writing whole regs often, make sure there's enough
6026 space. Assume register size is power of 2. */
6027 if (regsize
> align
)
6029 loc
= (loc
- size
) & -align
;
6033 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6037 if (--ret_nregs
== 0)
6041 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6044 /* Promote char/short return values. This is matters only
6045 for calling function that were not compiled by TCC and
6046 only on some architectures. For those where it doesn't
6047 matter we expect things to be already promoted to int,
6049 t
= s
->type
.t
& VT_BTYPE
;
6050 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6052 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6054 vtop
->type
.t
= VT_INT
;
6058 if (s
->f
.func_noreturn
) {
6060 tcc_tcov_block_end(tcc_state
, -1);
6069 #ifndef precedence_parser /* original top-down parser */
6071 static void expr_prod(void)
6076 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6083 static void expr_sum(void)
6088 while ((t
= tok
) == '+' || t
== '-') {
6095 static void expr_shift(void)
6100 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6107 static void expr_cmp(void)
6112 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6113 t
== TOK_ULT
|| t
== TOK_UGE
) {
6120 static void expr_cmpeq(void)
6125 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6132 static void expr_and(void)
6135 while (tok
== '&') {
6142 static void expr_xor(void)
6145 while (tok
== '^') {
6152 static void expr_or(void)
6155 while (tok
== '|') {
6162 static void expr_landor(int op
);
6164 static void expr_land(void)
6167 if (tok
== TOK_LAND
)
6171 static void expr_lor(void)
6178 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6179 #else /* defined precedence_parser */
6180 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6181 # define expr_lor() unary(), expr_infix(1)
6183 static int precedence(int tok
)
6186 case TOK_LOR
: return 1;
6187 case TOK_LAND
: return 2;
6191 case TOK_EQ
: case TOK_NE
: return 6;
6192 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6193 case TOK_SHL
: case TOK_SAR
: return 8;
6194 case '+': case '-': return 9;
6195 case '*': case '/': case '%': return 10;
6197 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6202 static unsigned char prec
[256];
6203 static void init_prec(void)
6206 for (i
= 0; i
< 256; i
++)
6207 prec
[i
] = precedence(i
);
6209 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6211 static void expr_landor(int op
);
6213 static void expr_infix(int p
)
6216 while ((p2
= precedence(t
)) >= p
) {
6217 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6222 if (precedence(tok
) > p2
)
6231 /* Assuming vtop is a value used in a conditional context
6232 (i.e. compared with zero) return 0 if it's false, 1 if
6233 true and -1 if it can't be statically determined. */
6234 static int condition_3way(void)
6237 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6238 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6240 gen_cast_s(VT_BOOL
);
6247 static void expr_landor(int op
)
6249 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6251 c
= f
? i
: condition_3way();
6253 save_regs(1), cc
= 0;
6255 nocode_wanted
++, f
= 1;
6263 expr_landor_next(op
);
6275 static int is_cond_bool(SValue
*sv
)
6277 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6278 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6279 return (unsigned)sv
->c
.i
< 2;
6280 if (sv
->r
== VT_CMP
)
6285 static void expr_cond(void)
6287 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6295 c
= condition_3way();
6296 g
= (tok
== ':' && gnu_ext
);
6306 /* needed to avoid having different registers saved in
6313 ncw_prev
= nocode_wanted
;
6319 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6320 mk_pointer(&vtop
->type
);
6321 sv
= *vtop
; /* save value to handle it later */
6322 vtop
--; /* no vpop so that FP stack is not flushed */
6332 nocode_wanted
= ncw_prev
;
6338 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6339 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6340 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6341 this code jumps directly to the if's then/else branches. */
6346 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6349 nocode_wanted
= ncw_prev
;
6350 // tcc_warning("two conditions expr_cond");
6354 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6355 mk_pointer(&vtop
->type
);
6357 /* cast operands to correct type according to ISOC rules */
6358 if (!combine_types(&type
, &sv
, vtop
, '?'))
6359 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6360 "type mismatch in conditional expression (have '%s' and '%s')");
6361 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6362 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6363 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6365 /* now we convert second operand */
6369 mk_pointer(&vtop
->type
);
6371 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6375 rc
= RC_TYPE(type
.t
);
6376 /* for long longs, we use fixed registers to avoid having
6377 to handle a complicated move */
6378 if (USING_TWO_WORDS(type
.t
))
6379 rc
= RC_RET(type
.t
);
6387 nocode_wanted
= ncw_prev
;
6389 /* this is horrible, but we must also convert first
6395 mk_pointer(&vtop
->type
);
6397 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6403 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6413 static void expr_eq(void)
6418 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6426 gen_op(TOK_ASSIGN_OP(t
));
6432 ST_FUNC
void gexpr(void)
6438 constant_p
&= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6439 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6445 /* parse a constant expression and return value in vtop. */
6446 static void expr_const1(void)
6449 nocode_wanted
+= unevalmask
+ 1;
6451 nocode_wanted
-= unevalmask
+ 1;
6455 /* parse an integer constant and return its value. */
6456 static inline int64_t expr_const64(void)
6460 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6461 expect("constant expression");
6467 /* parse an integer constant and return its value.
6468 Complain if it doesn't fit 32bit (signed or unsigned). */
6469 ST_FUNC
int expr_const(void)
6472 int64_t wc
= expr_const64();
6474 if (c
!= wc
&& (unsigned)c
!= wc
)
6475 tcc_error("constant exceeds 32 bit");
6479 /* ------------------------------------------------------------------------- */
6480 /* return from function */
6482 #ifndef TCC_TARGET_ARM64
6483 static void gfunc_return(CType
*func_type
)
6485 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6486 CType type
, ret_type
;
6487 int ret_align
, ret_nregs
, regsize
;
6488 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6489 &ret_align
, ®size
);
6490 if (ret_nregs
< 0) {
6491 #ifdef TCC_TARGET_RISCV64
6492 arch_transfer_ret_regs(0);
6494 } else if (0 == ret_nregs
) {
6495 /* if returning structure, must copy it to implicit
6496 first pointer arg location */
6499 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6502 /* copy structure value to pointer */
6505 /* returning structure packed into registers */
6506 int size
, addr
, align
, rc
;
6507 size
= type_size(func_type
,&align
);
6508 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6509 (vtop
->c
.i
& (ret_align
-1)))
6510 && (align
& (ret_align
-1))) {
6511 loc
= (loc
- size
) & -ret_align
;
6514 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6518 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6520 vtop
->type
= ret_type
;
6521 rc
= RC_RET(ret_type
.t
);
6529 if (--ret_nregs
== 0)
6531 /* We assume that when a structure is returned in multiple
6532 registers, their classes are consecutive values of the
6535 vtop
->c
.i
+= regsize
;
6540 gv(RC_RET(func_type
->t
));
6542 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6546 static void check_func_return(void)
6548 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6550 if (!strcmp (funcname
, "main")
6551 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6552 /* main returns 0 by default */
6554 gen_assign_cast(&func_vt
);
6555 gfunc_return(&func_vt
);
6557 tcc_warning("function might return no value: '%s'", funcname
);
6561 /* ------------------------------------------------------------------------- */
6564 static int case_cmpi(const void *pa
, const void *pb
)
6566 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6567 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6568 return a
< b
? -1 : a
> b
;
6571 static int case_cmpu(const void *pa
, const void *pb
)
6573 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6574 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6575 return a
< b
? -1 : a
> b
;
6578 static void gtst_addr(int t
, int a
)
6580 gsym_addr(gvtst(0, t
), a
);
6583 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6587 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6604 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6606 gcase(base
, len
/2, bsym
);
6610 base
+= e
; len
-= e
;
6620 if (p
->v1
== p
->v2
) {
6622 gtst_addr(0, p
->sym
);
6632 gtst_addr(0, p
->sym
);
6636 *bsym
= gjmp(*bsym
);
6639 /* ------------------------------------------------------------------------- */
6640 /* __attribute__((cleanup(fn))) */
6642 static void try_call_scope_cleanup(Sym
*stop
)
6644 Sym
*cls
= cur_scope
->cl
.s
;
6646 for (; cls
!= stop
; cls
= cls
->ncl
) {
6647 Sym
*fs
= cls
->next
;
6648 Sym
*vs
= cls
->prev_tok
;
6650 vpushsym(&fs
->type
, fs
);
6651 vset(&vs
->type
, vs
->r
, vs
->c
);
6653 mk_pointer(&vtop
->type
);
6659 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6664 if (!cur_scope
->cl
.s
)
6667 /* search NCA of both cleanup chains given parents and initial depth */
6668 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6669 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6671 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6673 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6676 try_call_scope_cleanup(cc
);
6679 /* call 'func' for each __attribute__((cleanup(func))) */
6680 static void block_cleanup(struct scope
*o
)
6684 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6685 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6690 try_call_scope_cleanup(o
->cl
.s
);
6691 pcl
->jnext
= gjmp(0);
6693 goto remove_pending
;
6703 try_call_scope_cleanup(o
->cl
.s
);
6706 /* ------------------------------------------------------------------------- */
6709 static void vla_restore(int loc
)
6712 gen_vla_sp_restore(loc
);
6715 static void vla_leave(struct scope
*o
)
6717 struct scope
*c
= cur_scope
, *v
= NULL
;
6718 for (; c
!= o
&& c
; c
= c
->prev
)
6722 vla_restore(v
->vla
.locorig
);
6725 /* ------------------------------------------------------------------------- */
6728 static void new_scope(struct scope
*o
)
6730 /* copy and link previous scope */
6732 o
->prev
= cur_scope
;
6734 cur_scope
->vla
.num
= 0;
6736 /* record local declaration stack position */
6737 o
->lstk
= local_stack
;
6738 o
->llstk
= local_label_stack
;
6742 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
6745 static void prev_scope(struct scope
*o
, int is_expr
)
6749 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6750 block_cleanup(o
->prev
);
6752 /* pop locally defined labels */
6753 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6755 /* In the is_expr case (a statement expression is finished here),
6756 vtop might refer to symbols on the local_stack. Either via the
6757 type or via vtop->sym. We can't pop those nor any that in turn
6758 might be referred to. To make it easier we don't roll back
6759 any symbols in that case; some upper level call to block() will
6760 do that. We do have to remove such symbols from the lookup
6761 tables, though. sym_pop will do that. */
6763 /* pop locally defined symbols */
6764 pop_local_syms(o
->lstk
, is_expr
);
6765 cur_scope
= o
->prev
;
6769 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
6772 /* leave a scope via break/continue(/goto) */
6773 static void leave_scope(struct scope
*o
)
6777 try_call_scope_cleanup(o
->cl
.s
);
6781 /* ------------------------------------------------------------------------- */
6782 /* call block from 'for do while' loops */
6784 static void lblock(int *bsym
, int *csym
)
6786 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6787 int *b
= co
->bsym
, *c
= co
->csym
;
6801 static void block(int is_expr
)
6803 int a
, b
, c
, d
, e
, t
;
6808 /* default return value is (void) */
6810 vtop
->type
.t
= VT_VOID
;
6815 /* If the token carries a value, next() might destroy it. Only with
6816 invalid code such as f(){"123"4;} */
6817 if (TOK_HAS_VALUE(t
))
6822 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6830 if (tok
== TOK_ELSE
) {
6835 gsym(d
); /* patch else jmp */
6840 } else if (t
== TOK_WHILE
) {
6852 } else if (t
== '{') {
6855 /* handle local labels declarations */
6856 while (tok
== TOK_LABEL
) {
6859 if (tok
< TOK_UIDENT
)
6860 expect("label identifier");
6861 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6863 } while (tok
== ',');
6867 while (tok
!= '}') {
6876 prev_scope(&o
, is_expr
);
6879 else if (!nocode_wanted
)
6880 check_func_return();
6882 } else if (t
== TOK_RETURN
) {
6883 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6887 gen_assign_cast(&func_vt
);
6889 if (vtop
->type
.t
!= VT_VOID
)
6890 tcc_warning("void function returns a value");
6894 tcc_warning("'return' with no value");
6897 leave_scope(root_scope
);
6899 gfunc_return(&func_vt
);
6901 /* jump unless last stmt in top-level block */
6902 if (tok
!= '}' || local_scope
!= 1)
6905 tcc_tcov_block_end (tcc_state
, -1);
6908 } else if (t
== TOK_BREAK
) {
6910 if (!cur_scope
->bsym
)
6911 tcc_error("cannot break");
6912 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6913 leave_scope(cur_switch
->scope
);
6915 leave_scope(loop_scope
);
6916 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6919 } else if (t
== TOK_CONTINUE
) {
6921 if (!cur_scope
->csym
)
6922 tcc_error("cannot continue");
6923 leave_scope(loop_scope
);
6924 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6927 } else if (t
== TOK_FOR
) {
6932 /* c99 for-loop init decl? */
6933 if (!decl(VT_JMP
)) {
6934 /* no, regular for-loop init expr */
6962 } else if (t
== TOK_DO
) {
6976 } else if (t
== TOK_SWITCH
) {
6977 struct switch_t
*sw
;
6979 sw
= tcc_mallocz(sizeof *sw
);
6981 sw
->scope
= cur_scope
;
6982 sw
->prev
= cur_switch
;
6983 sw
->nocode_wanted
= nocode_wanted
;
6989 sw
->sv
= *vtop
--; /* save switch value */
6992 b
= gjmp(0); /* jump to first case */
6994 a
= gjmp(a
); /* add implicit break */
6998 if (sw
->nocode_wanted
)
7000 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7001 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7003 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7004 for (b
= 1; b
< sw
->n
; b
++)
7005 if (sw
->sv
.type
.t
& VT_UNSIGNED
7006 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7007 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7008 tcc_error("duplicate case value");
7011 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7014 gsym_addr(d
, sw
->def_sym
);
7021 dynarray_reset(&sw
->p
, &sw
->n
);
7022 cur_switch
= sw
->prev
;
7025 } else if (t
== TOK_CASE
) {
7026 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7029 cr
->v1
= cr
->v2
= expr_const64();
7030 if (gnu_ext
&& tok
== TOK_DOTS
) {
7032 cr
->v2
= expr_const64();
7033 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7034 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7035 tcc_warning("empty case range");
7037 /* case and default are unreachable from a switch under nocode_wanted */
7038 if (!cur_switch
->nocode_wanted
)
7040 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7043 goto block_after_label
;
7045 } else if (t
== TOK_DEFAULT
) {
7048 if (cur_switch
->def_sym
)
7049 tcc_error("too many 'default'");
7050 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? 1 : gind();
7053 goto block_after_label
;
7055 } else if (t
== TOK_GOTO
) {
7056 if (cur_scope
->vla
.num
)
7057 vla_restore(cur_scope
->vla
.locorig
);
7058 if (tok
== '*' && gnu_ext
) {
7062 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7066 } else if (tok
>= TOK_UIDENT
) {
7067 s
= label_find(tok
);
7068 /* put forward definition if needed */
7070 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7071 else if (s
->r
== LABEL_DECLARED
)
7072 s
->r
= LABEL_FORWARD
;
7074 if (s
->r
& LABEL_FORWARD
) {
7075 /* start new goto chain for cleanups, linked via label->next */
7076 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7077 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7078 pending_gotos
->prev_tok
= s
;
7079 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7080 pending_gotos
->next
= s
;
7082 s
->jnext
= gjmp(s
->jnext
);
7084 try_call_cleanup_goto(s
->cleanupstate
);
7085 gjmp_addr(s
->jnext
);
7090 expect("label identifier");
7094 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7098 if (tok
== ':' && t
>= TOK_UIDENT
) {
7103 if (s
->r
== LABEL_DEFINED
)
7104 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7105 s
->r
= LABEL_DEFINED
;
7107 Sym
*pcl
; /* pending cleanup goto */
7108 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7110 sym_pop(&s
->next
, NULL
, 0);
7114 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7117 s
->cleanupstate
= cur_scope
->cl
.s
;
7121 /* Accept attributes after labels (e.g. 'unused') */
7122 AttributeDef ad_tmp
;
7123 parse_attribute(&ad_tmp
);
7126 tcc_tcov_reset_ind(tcc_state
);
7127 vla_restore(cur_scope
->vla
.loc
);
7130 /* we accept this, but it is a mistake */
7131 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7134 /* expression case */
7151 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7154 /* This skips over a stream of tokens containing balanced {} and ()
7155 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7156 with a '{'). If STR then allocates and stores the skipped tokens
7157 in *STR. This doesn't check if () and {} are nested correctly,
7158 i.e. "({)}" is accepted. */
7159 static void skip_or_save_block(TokenString
**str
)
7161 int braces
= tok
== '{';
7164 *str
= tok_str_alloc();
7176 if (str
|| level
> 0)
7177 tcc_error("unexpected end of file");
7182 tok_str_add_tok(*str
);
7184 if (t
== '{' || t
== '(' || t
== '[') {
7186 } else if (t
== '}' || t
== ')' || t
== ']') {
7188 if (level
== 0 && braces
&& t
== '}')
7193 tok_str_add(*str
, -1);
7194 tok_str_add(*str
, 0);
7198 #define EXPR_CONST 1
7201 static void parse_init_elem(int expr_type
)
7203 int saved_global_expr
;
7206 /* compound literals must be allocated globally in this case */
7207 saved_global_expr
= global_expr
;
7210 global_expr
= saved_global_expr
;
7211 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7212 (compound literals). */
7213 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7214 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7215 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7216 #ifdef TCC_TARGET_PE
7217 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7220 tcc_error("initializer element is not constant");
7229 static void init_assert(init_params
*p
, int offset
)
7231 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7232 : !nocode_wanted
&& offset
> p
->local_offset
)
7233 tcc_internal_error("initializer overflow");
7236 #define init_assert(sec, offset)
7239 /* put zeros for variable based init */
7240 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7242 init_assert(p
, c
+ size
);
7244 /* nothing to do because globals are already set to zero */
7246 vpush_helper_func(TOK_memset
);
7248 #ifdef TCC_TARGET_ARM
7260 #define DIF_SIZE_ONLY 2
7261 #define DIF_HAVE_ELEM 4
7264 /* delete relocations for specified range c ... c + size. Unfortunatly
7265 in very special cases, relocations may occur unordered */
7266 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7268 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7269 if (!sec
|| !sec
->reloc
)
7271 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7272 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7273 while (rel
< rel_end
) {
7274 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7275 sec
->reloc
->data_offset
-= sizeof *rel
;
7278 memcpy(rel2
, rel
, sizeof *rel
);
7285 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7287 if (ref
== p
->flex_array_ref
) {
7288 if (index
>= ref
->c
)
7290 } else if (ref
->c
< 0)
7291 tcc_error("flexible array has zero size in this context");
7294 /* t is the array or struct type. c is the array or struct
7295 address. cur_field is the pointer to the current
7296 field, for arrays the 'c' member contains the current start
7297 index. 'flags' is as in decl_initializer.
7298 'al' contains the already initialized length of the
7299 current container (starting at c). This returns the new length of that. */
7300 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7301 Sym
**cur_field
, int flags
, int al
)
7304 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7305 unsigned long corig
= c
;
7310 if (flags
& DIF_HAVE_ELEM
)
7313 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7320 /* NOTE: we only support ranges for last designator */
7321 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7323 if (!(type
->t
& VT_ARRAY
))
7324 expect("array type");
7326 index
= index_last
= expr_const();
7327 if (tok
== TOK_DOTS
&& gnu_ext
) {
7329 index_last
= expr_const();
7333 decl_design_flex(p
, s
, index_last
);
7334 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7335 tcc_error("index exceeds array bounds or range is empty");
7337 (*cur_field
)->c
= index_last
;
7338 type
= pointed_type(type
);
7339 elem_size
= type_size(type
, &align
);
7340 c
+= index
* elem_size
;
7341 nb_elems
= index_last
- index
+ 1;
7348 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7349 expect("struct/union type");
7351 f
= find_field(type
, l
, &cumofs
);
7364 } else if (!gnu_ext
) {
7369 if (type
->t
& VT_ARRAY
) {
7370 index
= (*cur_field
)->c
;
7372 decl_design_flex(p
, s
, index
);
7374 tcc_error("too many initializers");
7375 type
= pointed_type(type
);
7376 elem_size
= type_size(type
, &align
);
7377 c
+= index
* elem_size
;
7380 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7381 *cur_field
= f
= f
->next
;
7383 tcc_error("too many initializers");
7389 if (!elem_size
) /* for structs */
7390 elem_size
= type_size(type
, &align
);
7392 /* Using designators the same element can be initialized more
7393 than once. In that case we need to delete possibly already
7394 existing relocations. */
7395 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7396 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7397 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7400 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7402 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7406 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7407 /* make init_putv/vstore believe it were a struct */
7409 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7413 vpush_ref(type
, p
->sec
, c
, elem_size
);
7415 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7416 for (i
= 1; i
< nb_elems
; i
++) {
7418 init_putv(p
, type
, c
+ elem_size
* i
);
7423 c
+= nb_elems
* elem_size
;
7429 /* store a value or an expression directly in global data or in local array */
7430 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7436 Section
*sec
= p
->sec
;
7440 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7442 size
= type_size(type
, &align
);
7443 if (type
->t
& VT_BITFIELD
)
7444 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7445 init_assert(p
, c
+ size
);
7448 /* XXX: not portable */
7449 /* XXX: generate error if incorrect relocation */
7450 gen_assign_cast(&dtype
);
7451 bt
= type
->t
& VT_BTYPE
;
7453 if ((vtop
->r
& VT_SYM
)
7455 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7456 || (type
->t
& VT_BITFIELD
))
7457 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7459 tcc_error("initializer element is not computable at load time");
7461 if (NODATA_WANTED
) {
7466 ptr
= sec
->data
+ c
;
7469 /* XXX: make code faster ? */
7470 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7471 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7472 /* XXX This rejects compound literals like
7473 '(void *){ptr}'. The problem is that '&sym' is
7474 represented the same way, which would be ruled out
7475 by the SYM_FIRST_ANOM check above, but also '"string"'
7476 in 'char *p = "string"' is represented the same
7477 with the type being VT_PTR and the symbol being an
7478 anonymous one. That is, there's no difference in vtop
7479 between '(void *){x}' and '&(void *){x}'. Ignore
7480 pointer typed entities here. Hopefully no real code
7481 will ever use compound literals with scalar type. */
7482 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7483 /* These come from compound literals, memcpy stuff over. */
7487 esym
= elfsym(vtop
->sym
);
7488 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7489 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7491 /* We need to copy over all memory contents, and that
7492 includes relocations. Use the fact that relocs are
7493 created it order, so look from the end of relocs
7494 until we hit one before the copied region. */
7495 unsigned long relofs
= ssec
->reloc
->data_offset
;
7496 while (relofs
>= sizeof(*rel
)) {
7497 relofs
-= sizeof(*rel
);
7498 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7499 if (rel
->r_offset
>= esym
->st_value
+ size
)
7501 if (rel
->r_offset
< esym
->st_value
)
7503 put_elf_reloca(symtab_section
, sec
,
7504 c
+ rel
->r_offset
- esym
->st_value
,
7505 ELFW(R_TYPE
)(rel
->r_info
),
7506 ELFW(R_SYM
)(rel
->r_info
),
7516 if (type
->t
& VT_BITFIELD
) {
7517 int bit_pos
, bit_size
, bits
, n
;
7518 unsigned char *p
, v
, m
;
7519 bit_pos
= BIT_POS(vtop
->type
.t
);
7520 bit_size
= BIT_SIZE(vtop
->type
.t
);
7521 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7522 bit_pos
&= 7, bits
= 0;
7527 v
= val
>> bits
<< bit_pos
;
7528 m
= ((1 << n
) - 1) << bit_pos
;
7529 *p
= (*p
& ~m
) | (v
& m
);
7530 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7535 *(char *)ptr
= val
!= 0;
7541 write16le(ptr
, val
);
7544 write32le(ptr
, val
);
7547 write64le(ptr
, val
);
7550 #if defined TCC_IS_NATIVE_387
7551 /* Host and target platform may be different but both have x87.
7552 On windows, tcc does not use VT_LDOUBLE, except when it is a
7553 cross compiler. In this case a mingw gcc as host compiler
7554 comes here with 10-byte long doubles, while msvc or tcc won't.
7555 tcc itself can still translate by asm.
7556 In any case we avoid possibly random bytes 11 and 12.
7558 if (sizeof (long double) >= 10)
7559 memcpy(ptr
, &vtop
->c
.ld
, 10);
7561 else if (sizeof (long double) == sizeof (double))
7562 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7564 else if (vtop
->c
.ld
== 0.0)
7568 /* For other platforms it should work natively, but may not work
7569 for cross compilers */
7570 if (sizeof(long double) == LDOUBLE_SIZE
)
7571 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7572 else if (sizeof(double) == LDOUBLE_SIZE
)
7573 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7574 #ifndef TCC_CROSS_TEST
7576 tcc_error("can't cross compile long double constants");
7581 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7584 if (vtop
->r
& VT_SYM
)
7585 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7587 write64le(ptr
, val
);
7590 write32le(ptr
, val
);
7594 write64le(ptr
, val
);
7598 if (vtop
->r
& VT_SYM
)
7599 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7600 write32le(ptr
, val
);
7604 //tcc_internal_error("unexpected type");
7610 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7617 /* 't' contains the type and storage info. 'c' is the offset of the
7618 object in section 'sec'. If 'sec' is NULL, it means stack based
7619 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7620 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7621 size only evaluation is wanted (only for arrays). */
7622 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7624 int len
, n
, no_oblock
, i
;
7630 /* generate line number info */
7631 if (debug_modes
&& !p
->sec
)
7632 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7634 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7635 /* In case of strings we have special handling for arrays, so
7636 don't consume them as initializer value (which would commit them
7637 to some anonymous symbol). */
7638 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7639 (!(flags
& DIF_SIZE_ONLY
)
7640 /* a struct may be initialized from a struct of same type, as in
7641 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7642 In that case we need to parse the element in order to check
7643 it for compatibility below */
7644 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7646 int ncw_prev
= nocode_wanted
;
7647 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7649 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7650 nocode_wanted
= ncw_prev
;
7651 flags
|= DIF_HAVE_ELEM
;
7654 if (type
->t
& VT_ARRAY
) {
7656 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7664 t1
= pointed_type(type
);
7665 size1
= type_size(t1
, &align1
);
7667 /* only parse strings here if correct type (otherwise: handle
7668 them as ((w)char *) expressions */
7669 if ((tok
== TOK_LSTR
&&
7670 #ifdef TCC_TARGET_PE
7671 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7673 (t1
->t
& VT_BTYPE
) == VT_INT
7675 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7677 cstr_reset(&initstr
);
7678 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7679 tcc_error("unhandled string literal merging");
7680 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7682 initstr
.size
-= size1
;
7684 len
+= tokc
.str
.size
;
7686 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7688 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7691 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7692 && tok
!= TOK_EOF
) {
7693 /* Not a lone literal but part of a bigger expression. */
7694 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7695 tokc
.str
.size
= initstr
.size
;
7696 tokc
.str
.data
= initstr
.data
;
7700 decl_design_flex(p
, s
, len
);
7701 if (!(flags
& DIF_SIZE_ONLY
)) {
7706 tcc_warning("initializer-string for array is too long");
7707 /* in order to go faster for common case (char
7708 string in global variable, we handle it
7710 if (p
->sec
&& size1
== 1) {
7711 init_assert(p
, c
+ nb
);
7713 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7717 /* only add trailing zero if enough storage (no
7718 warning in this case since it is standard) */
7719 if (flags
& DIF_CLEAR
)
7722 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7726 } else if (size1
== 1)
7727 ch
= ((unsigned char *)initstr
.data
)[i
];
7729 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7731 init_putv(p
, t1
, c
+ i
* size1
);
7742 /* zero memory once in advance */
7743 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7744 init_putz(p
, c
, n
*size1
);
7749 /* GNU extension: if the initializer is empty for a flex array,
7750 it's size is zero. We won't enter the loop, so set the size
7752 decl_design_flex(p
, s
, len
);
7753 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7754 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7755 flags
&= ~DIF_HAVE_ELEM
;
7756 if (type
->t
& VT_ARRAY
) {
7758 /* special test for multi dimensional arrays (may not
7759 be strictly correct if designators are used at the
7761 if (no_oblock
&& len
>= n
*size1
)
7764 if (s
->type
.t
== VT_UNION
)
7768 if (no_oblock
&& f
== NULL
)
7780 } else if ((flags
& DIF_HAVE_ELEM
)
7781 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7782 The source type might have VT_CONSTANT set, which is
7783 of course assignable to non-const elements. */
7784 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7787 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7789 if ((flags
& DIF_FIRST
) || tok
== '{') {
7799 } else if (tok
== '{') {
7800 if (flags
& DIF_HAVE_ELEM
)
7803 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7806 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7807 /* If we supported only ISO C we wouldn't have to accept calling
7808 this on anything than an array if DIF_SIZE_ONLY (and even then
7809 only on the outermost level, so no recursion would be needed),
7810 because initializing a flex array member isn't supported.
7811 But GNU C supports it, so we need to recurse even into
7812 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7813 /* just skip expression */
7814 if (flags
& DIF_HAVE_ELEM
)
7817 skip_or_save_block(NULL
);
7820 if (!(flags
& DIF_HAVE_ELEM
)) {
7821 /* This should happen only when we haven't parsed
7822 the init element above for fear of committing a
7823 string constant to memory too early. */
7824 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7825 expect("string constant");
7826 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7828 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7829 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7831 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7835 init_putv(p
, type
, c
);
7839 /* parse an initializer for type 't' if 'has_init' is non zero, and
7840 allocate space in local or global data space ('r' is either
7841 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7842 variable 'v' of scope 'scope' is declared before initializers
7843 are parsed. If 'v' is zero, then a reference to the new object
7844 is put in the value stack. If 'has_init' is 2, a special parsing
7845 is done to handle string constants. */
7846 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7847 int has_init
, int v
, int global
)
7849 int size
, align
, addr
;
7850 TokenString
*init_str
= NULL
;
7853 Sym
*flexible_array
;
7855 int saved_nocode_wanted
= nocode_wanted
;
7856 #ifdef CONFIG_TCC_BCHECK
7857 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7859 init_params p
= {0};
7861 /* Always allocate static or global variables */
7862 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7863 nocode_wanted
|= DATA_ONLY_WANTED
;
7865 flexible_array
= NULL
;
7866 size
= type_size(type
, &align
);
7868 /* exactly one flexible array may be initialized, either the
7869 toplevel array or the last member of the toplevel struct */
7872 /* If the base type itself was an array type of unspecified size
7873 (like in 'typedef int arr[]; arr x = {1};') then we will
7874 overwrite the unknown size by the real one for this decl.
7875 We need to unshare the ref symbol holding that size. */
7876 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7877 p
.flex_array_ref
= type
->ref
;
7879 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7880 Sym
*field
= type
->ref
->next
;
7883 field
= field
->next
;
7884 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7885 flexible_array
= field
;
7886 p
.flex_array_ref
= field
->type
.ref
;
7893 /* If unknown size, do a dry-run 1st pass */
7895 tcc_error("unknown type size");
7896 if (has_init
== 2) {
7897 /* only get strings */
7898 init_str
= tok_str_alloc();
7899 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7900 tok_str_add_tok(init_str
);
7903 tok_str_add(init_str
, -1);
7904 tok_str_add(init_str
, 0);
7906 skip_or_save_block(&init_str
);
7910 begin_macro(init_str
, 1);
7912 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7913 /* prepare second initializer parsing */
7914 macro_ptr
= init_str
->str
;
7917 /* if still unknown size, error */
7918 size
= type_size(type
, &align
);
7920 tcc_error("unknown type size");
7922 /* If there's a flex member and it was used in the initializer
7924 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
7925 size
+= flexible_array
->type
.ref
->c
7926 * pointed_size(&flexible_array
->type
);
7929 /* take into account specified alignment if bigger */
7930 if (ad
->a
.aligned
) {
7931 int speca
= 1 << (ad
->a
.aligned
- 1);
7934 } else if (ad
->a
.packed
) {
7938 if (!v
&& NODATA_WANTED
)
7939 size
= 0, align
= 1;
7941 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7943 #ifdef CONFIG_TCC_BCHECK
7945 /* add padding between stack variables for bound checking */
7949 loc
= (loc
- size
) & -align
;
7951 p
.local_offset
= addr
+ size
;
7952 #ifdef CONFIG_TCC_BCHECK
7954 /* add padding between stack variables for bound checking */
7959 /* local variable */
7960 #ifdef CONFIG_TCC_ASM
7961 if (ad
->asm_label
) {
7962 int reg
= asm_parse_regvar(ad
->asm_label
);
7964 r
= (r
& ~VT_VALMASK
) | reg
;
7967 sym
= sym_push(v
, type
, r
, addr
);
7968 if (ad
->cleanup_func
) {
7969 Sym
*cls
= sym_push2(&all_cleanups
,
7970 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7971 cls
->prev_tok
= sym
;
7972 cls
->next
= ad
->cleanup_func
;
7973 cls
->ncl
= cur_scope
->cl
.s
;
7974 cur_scope
->cl
.s
= cls
;
7979 /* push local reference */
7980 vset(type
, r
, addr
);
7985 /* see if the symbol was already defined */
7988 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
7989 && sym
->type
.ref
->c
> type
->ref
->c
) {
7990 /* flex array was already declared with explicit size
7992 int arr[] = { 1,2,3 }; */
7993 type
->ref
->c
= sym
->type
.ref
->c
;
7994 size
= type_size(type
, &align
);
7996 patch_storage(sym
, ad
, type
);
7997 /* we accept several definitions of the same global variable. */
7998 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8003 /* allocate symbol in corresponding section */
8007 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8008 tp
= &tp
->ref
->type
;
8009 if (tp
->t
& VT_CONSTANT
) {
8010 sec
= rodata_section
;
8011 } else if (has_init
) {
8013 /*if (tcc_state->g_debug & 4)
8014 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8015 } else if (tcc_state
->nocommon
)
8020 addr
= section_add(sec
, size
, align
);
8021 #ifdef CONFIG_TCC_BCHECK
8022 /* add padding if bound check */
8024 section_add(sec
, 1, 1);
8027 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8028 sec
= common_section
;
8033 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8034 patch_storage(sym
, ad
, NULL
);
8036 /* update symbol definition */
8037 put_extern_sym(sym
, sec
, addr
, size
);
8039 /* push global reference */
8040 vpush_ref(type
, sec
, addr
, size
);
8045 #ifdef CONFIG_TCC_BCHECK
8046 /* handles bounds now because the symbol must be defined
8047 before for the relocation */
8051 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8052 /* then add global bound info */
8053 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8054 bounds_ptr
[0] = 0; /* relocated */
8055 bounds_ptr
[1] = size
;
8060 if (type
->t
& VT_VLA
) {
8066 /* save before-VLA stack pointer if needed */
8067 if (cur_scope
->vla
.num
== 0) {
8068 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8069 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8071 gen_vla_sp_save(loc
-= PTR_SIZE
);
8072 cur_scope
->vla
.locorig
= loc
;
8076 vpush_type_size(type
, &a
);
8077 gen_vla_alloc(type
, a
);
8078 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8079 /* on _WIN64, because of the function args scratch area, the
8080 result of alloca differs from RSP and is returned in RAX. */
8081 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8083 gen_vla_sp_save(addr
);
8084 cur_scope
->vla
.loc
= addr
;
8085 cur_scope
->vla
.num
++;
8086 } else if (has_init
) {
8088 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8089 /* patch flexible array member size back to -1, */
8090 /* for possible subsequent similar declarations */
8092 flexible_array
->type
.ref
->c
= -1;
8096 /* restore parse state if needed */
8102 nocode_wanted
= saved_nocode_wanted
;
8105 /* generate vla code saved in post_type() */
8106 static void func_vla_arg_code(Sym
*arg
)
8109 TokenString
*vla_array_tok
= NULL
;
8112 func_vla_arg_code(arg
->type
.ref
);
8114 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8115 loc
-= type_size(&int_type
, &align
);
8117 arg
->type
.ref
->c
= loc
;
8120 vla_array_tok
= tok_str_alloc();
8121 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8122 begin_macro(vla_array_tok
, 1);
8127 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8129 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8136 static void func_vla_arg(Sym
*sym
)
8140 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8141 if (arg
->type
.t
& VT_VLA
)
8142 func_vla_arg_code(arg
);
8145 /* parse a function defined by symbol 'sym' and generate its code in
8146 'cur_text_section' */
8147 static void gen_function(Sym
*sym
)
8149 struct scope f
= { 0 };
8150 cur_scope
= root_scope
= &f
;
8152 ind
= cur_text_section
->data_offset
;
8153 if (sym
->a
.aligned
) {
8154 size_t newoff
= section_add(cur_text_section
, 0,
8155 1 << (sym
->a
.aligned
- 1));
8156 gen_fill_nops(newoff
- ind
);
8158 /* NOTE: we patch the symbol size later */
8159 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8160 if (sym
->type
.ref
->f
.func_ctor
)
8161 add_array (tcc_state
, ".init_array", sym
->c
);
8162 if (sym
->type
.ref
->f
.func_dtor
)
8163 add_array (tcc_state
, ".fini_array", sym
->c
);
8165 funcname
= get_tok_str(sym
->v
, NULL
);
8167 func_vt
= sym
->type
.ref
->type
;
8168 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8170 /* put debug symbol */
8171 tcc_debug_funcstart(tcc_state
, sym
);
8172 /* push a dummy symbol to enable local sym storage */
8173 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8174 local_scope
= 1; /* for function parameters */
8176 tcc_debug_prolog_epilog(tcc_state
, 0);
8179 clear_temp_local_var_list();
8184 /* reset local stack */
8185 pop_local_syms(NULL
, 0);
8186 tcc_debug_prolog_epilog(tcc_state
, 1);
8188 cur_text_section
->data_offset
= ind
;
8190 label_pop(&global_label_stack
, NULL
, 0);
8191 sym_pop(&all_cleanups
, NULL
, 0);
8192 /* patch symbol size */
8193 elfsym(sym
)->st_size
= ind
- func_ind
;
8194 /* end of function */
8195 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8196 /* It's better to crash than to generate wrong code */
8197 cur_text_section
= NULL
;
8198 funcname
= ""; /* for safety */
8199 func_vt
.t
= VT_VOID
; /* for safety */
8200 func_var
= 0; /* for safety */
8201 ind
= 0; /* for safety */
8203 nocode_wanted
= DATA_ONLY_WANTED
;
8205 /* do this after funcend debug info */
8209 static void gen_inline_functions(TCCState
*s
)
8212 int inline_generated
, i
;
8213 struct InlineFunc
*fn
;
8215 tcc_open_bf(s
, ":inline:", 0);
8216 /* iterate while inline function are referenced */
8218 inline_generated
= 0;
8219 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8220 fn
= s
->inline_fns
[i
];
8222 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8223 /* the function was used or forced (and then not internal):
8224 generate its code and convert it to a normal function */
8226 tcc_debug_putfile(s
, fn
->filename
);
8227 begin_macro(fn
->func_str
, 1);
8229 cur_text_section
= text_section
;
8233 inline_generated
= 1;
8236 } while (inline_generated
);
8240 static void free_inline_functions(TCCState
*s
)
8243 /* free tokens of unused inline functions */
8244 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8245 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8247 tok_str_free(fn
->func_str
);
8249 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8252 static void do_Static_assert(void){
8262 tcc_error("_Static_assert fail");
8264 goto static_assert_out
;
8268 parse_mult_str(&error_str
, "string constant");
8270 tcc_error("%s", (char *)error_str
.data
);
8271 cstr_free(&error_str
);
8277 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8278 or VT_CMP if parsing old style parameter list
8279 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8280 static int decl(int l
)
8282 int v
, has_init
, r
, oldint
;
8285 AttributeDef ad
, adbase
;
8288 if (tok
== TOK_STATIC_ASSERT
) {
8294 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8297 /* skip redundant ';' if not in old parameter decl scope */
8298 if (tok
== ';' && l
!= VT_CMP
) {
8304 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8305 /* global asm block */
8309 if (tok
>= TOK_UIDENT
) {
8310 /* special test for old K&R protos without explicit int
8311 type. Only accepted when defining global data */
8316 expect("declaration");
8322 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8324 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8325 tcc_warning("unnamed struct/union that defines no instances");
8329 if (IS_ENUM(btype
.t
)) {
8335 while (1) { /* iterate thru each declaration */
8338 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8342 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8343 printf("type = '%s'\n", buf
);
8346 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8347 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8348 tcc_error("function without file scope cannot be static");
8349 /* if old style function prototype, we accept a
8352 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8356 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8357 if (sym
->f
.func_alwinl
8358 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8359 == (VT_EXTERN
| VT_INLINE
))) {
8360 /* always_inline functions must be handled as if they
8361 don't generate multiple global defs, even if extern
8362 inline, i.e. GNU inline semantics for those. Rewrite
8363 them into static inline. */
8364 type
.t
&= ~VT_EXTERN
;
8365 type
.t
|= VT_STATIC
;
8368 /* always compile 'extern inline' */
8369 if (type
.t
& VT_EXTERN
)
8370 type
.t
&= ~VT_INLINE
;
8372 } else if (oldint
) {
8373 tcc_warning("type defaults to int");
8376 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8377 ad
.asm_label
= asm_label_instr();
8378 /* parse one last attribute list, after asm label */
8379 parse_attribute(&ad
);
8381 /* gcc does not allow __asm__("label") with function definition,
8388 #ifdef TCC_TARGET_PE
8389 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8390 if (type
.t
& VT_STATIC
)
8391 tcc_error("cannot have dll linkage with static");
8392 if (type
.t
& VT_TYPEDEF
) {
8393 tcc_warning("'%s' attribute ignored for typedef",
8394 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8395 (ad
.a
.dllexport
= 0, "dllexport"));
8396 } else if (ad
.a
.dllimport
) {
8397 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8400 type
.t
|= VT_EXTERN
;
8406 tcc_error("cannot use local functions");
8407 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8408 expect("function definition");
8410 /* reject abstract declarators in function definition
8411 make old style params without decl have int type */
8413 while ((sym
= sym
->next
) != NULL
) {
8414 if (!(sym
->v
& ~SYM_FIELD
))
8415 expect("identifier");
8416 if (sym
->type
.t
== VT_VOID
)
8417 sym
->type
= int_type
;
8420 /* apply post-declaraton attributes */
8421 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8423 /* put function symbol */
8424 type
.t
&= ~VT_EXTERN
;
8425 sym
= external_sym(v
, &type
, 0, &ad
);
8427 /* static inline functions are just recorded as a kind
8428 of macro. Their code will be emitted at the end of
8429 the compilation unit only if they are used */
8430 if (sym
->type
.t
& VT_INLINE
) {
8431 struct InlineFunc
*fn
;
8432 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8433 strcpy(fn
->filename
, file
->filename
);
8435 skip_or_save_block(&fn
->func_str
);
8436 dynarray_add(&tcc_state
->inline_fns
,
8437 &tcc_state
->nb_inline_fns
, fn
);
8439 /* compute text section */
8440 cur_text_section
= ad
.section
;
8441 if (!cur_text_section
)
8442 cur_text_section
= text_section
;
8448 /* find parameter in function parameter list */
8449 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8450 if ((sym
->v
& ~SYM_FIELD
) == v
)
8452 tcc_error("declaration for parameter '%s' but no such parameter",
8453 get_tok_str(v
, NULL
));
8455 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8456 tcc_error("storage class specified for '%s'",
8457 get_tok_str(v
, NULL
));
8458 if (sym
->type
.t
!= VT_VOID
)
8459 tcc_error("redefinition of parameter '%s'",
8460 get_tok_str(v
, NULL
));
8461 convert_parameter_type(&type
);
8463 } else if (type
.t
& VT_TYPEDEF
) {
8464 /* save typedefed type */
8465 /* XXX: test storage specifiers ? */
8467 if (sym
&& sym
->sym_scope
== local_scope
) {
8468 if (!is_compatible_types(&sym
->type
, &type
)
8469 || !(sym
->type
.t
& VT_TYPEDEF
))
8470 tcc_error("incompatible redefinition of '%s'",
8471 get_tok_str(v
, NULL
));
8474 sym
= sym_push(v
, &type
, 0, 0);
8479 tcc_debug_typedef (tcc_state
, sym
);
8480 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8481 && !(type
.t
& VT_EXTERN
)) {
8482 tcc_error("declaration of void object");
8485 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8486 /* external function definition */
8487 /* specific case for func_call attribute */
8489 } else if (!(type
.t
& VT_ARRAY
)) {
8490 /* not lvalue if array */
8493 has_init
= (tok
== '=');
8494 if (has_init
&& (type
.t
& VT_VLA
))
8495 tcc_error("variable length array cannot be initialized");
8496 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8497 || (type
.t
& VT_BTYPE
) == VT_FUNC
8498 /* as with GCC, uninitialized global arrays with no size
8499 are considered extern: */
8500 || ((type
.t
& VT_ARRAY
) && !has_init
8501 && l
== VT_CONST
&& type
.ref
->c
< 0)
8503 /* external variable or function */
8504 type
.t
|= VT_EXTERN
;
8505 sym
= external_sym(v
, &type
, r
, &ad
);
8506 if (ad
.alias_target
) {
8507 /* Aliases need to be emitted when their target
8508 symbol is emitted, even if perhaps unreferenced.
8509 We only support the case where the base is
8510 already defined, otherwise we would need
8511 deferring to emit the aliases until the end of
8512 the compile unit. */
8513 Sym
*alias_target
= sym_find(ad
.alias_target
);
8514 ElfSym
*esym
= elfsym(alias_target
);
8516 tcc_error("unsupported forward __alias__ attribute");
8517 put_extern_sym2(sym
, esym
->st_shndx
,
8518 esym
->st_value
, esym
->st_size
, 1);
8521 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8527 else if (l
== VT_CONST
)
8528 /* uninitialized global variables may be overridden */
8529 type
.t
|= VT_EXTERN
;
8530 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8546 /* ------------------------------------------------------------------------- */
8549 /* ------------------------------------------------------------------------- */