2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
46 ST_DATA
char debug_modes
;
49 static SValue _vstack
[1 + VSTACK_SIZE
];
50 #define vstack (_vstack + 1)
52 ST_DATA
int nocode_wanted
; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
72 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
75 ST_DATA
const char *funcname
;
76 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
77 static CString initstr
;
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
90 static struct switch_t
{
94 } **p
; int n
; /* list of case ranges */
95 int def_sym
; /* default symbol */
99 struct switch_t
*prev
;
101 } *cur_switch
; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable
{
106 int location
; //offset on stack. Svalue.c.i
109 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
110 static int nb_temp_local_vars
;
112 static struct scope
{
114 struct { int loc
, locorig
, num
; } vla
;
115 struct { Sym
*s
; int n
; } cl
;
118 } *cur_scope
, *loop_scope
, *root_scope
;
127 #define precedence_parser
128 static void init_prec(void);
131 static void block(int flags
);
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType
*type
);
136 static void gen_cast_s(int t
);
137 static inline CType
*pointed_type(CType
*type
);
138 static int is_compatible_types(CType
*type1
, CType
*type2
);
139 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
140 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
141 static void parse_expr_type(CType
*type
);
142 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
143 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
144 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
145 static int decl(int l
);
146 static void expr_eq(void);
147 static void vpush_type_size(CType
*type
, int *a
);
148 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty
, unsigned long long v
);
151 static void vpush(CType
*type
);
152 static int gvtst(int inv
, int t
);
153 static void gen_inline_functions(TCCState
*s
);
154 static void free_inline_functions(TCCState
*s
);
155 static void skip_or_save_block(TokenString
**str
);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size
,int align
,int *r2
);
158 static void cast_error(CType
*st
, CType
*dt
);
159 static void end_switch(void);
160 static void do_Static_assert(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC
void gsym(int t
)
174 /* Clear 'nocode_wanted' if current pc is a label */
180 tcc_tcov_block_begin(tcc_state
);
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t
)
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t
)
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN
int is_float(int t
)
206 int bt
= t
& VT_BTYPE
;
207 return bt
== VT_LDOUBLE
213 static inline int is_integer_btype(int bt
)
222 static int btype_size(int bt
)
224 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
228 bt
== VT_PTR
? PTR_SIZE
: 0;
231 /* returns function return register from type */
232 static int R_RET(int t
)
236 #ifdef TCC_TARGET_X86_64
237 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
239 #elif defined TCC_TARGET_RISCV64
240 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t
)
253 #elif defined TCC_TARGET_X86_64
258 #elif defined TCC_TARGET_RISCV64
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue
*sv
, int t
)
271 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
274 /* returns function return register class for type t */
275 static int RC_RET(int t
)
277 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t
)
285 #ifdef TCC_TARGET_X86_64
286 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
288 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
290 #elif defined TCC_TARGET_RISCV64
291 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t
, int rc
)
300 if (!USING_TWO_WORDS(t
))
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC
int ieee_finite(double d
)
321 memcpy(p
, &d
, sizeof(double));
322 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
331 ST_FUNC
void test_lvalue(void)
333 if (!(vtop
->r
& VT_LVAL
))
337 ST_FUNC
void check_vstack(void)
339 if (vtop
!= vstack
- 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop
- vstack
+ 1));
344 /* vstack debugging aid */
346 void pv (const char *lbl
, int a
, int b
)
349 for (i
= a
; i
< a
+ b
; ++i
) {
350 SValue
*p
= &vtop
[-i
];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC
void tccgen_init(TCCState
*s1
)
362 memset(vtop
, 0, sizeof *vtop
);
364 /* define some often used types */
367 char_type
.t
= VT_BYTE
;
368 if (s1
->char_is_unsigned
)
369 char_type
.t
|= VT_UNSIGNED
;
370 char_pointer_type
= char_type
;
371 mk_pointer(&char_pointer_type
);
373 func_old_type
.t
= VT_FUNC
;
374 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
375 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
376 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
377 #ifdef precedence_parser
383 ST_FUNC
int tccgen_compile(TCCState
*s1
)
387 anon_sym
= SYM_FIRST_ANOM
;
388 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
389 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
393 #ifdef TCC_TARGET_ARM
397 printf("%s: **** new file\n", file
->filename
);
399 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
402 gen_inline_functions(s1
);
404 /* end of translation unit info */
406 tcc_eh_frame_end(s1
);
413 ST_FUNC
void tccgen_finish(TCCState
*s1
)
415 tcc_debug_end(s1
); /* just in case of errors: free memory */
416 free_inline_functions(s1
);
417 sym_pop(&global_stack
, NULL
, 0);
418 sym_pop(&local_stack
, NULL
, 0);
419 /* free preprocessor macros */
422 dynarray_reset(&sym_pools
, &nb_sym_pools
);
424 dynarray_reset(&stk_data
, &nb_stk_data
);
430 pending_gotos
= NULL
;
431 nb_temp_local_vars
= 0;
432 global_label_stack
= NULL
;
433 local_label_stack
= NULL
;
434 cur_text_section
= NULL
;
435 sym_free_first
= NULL
;
438 /* ------------------------------------------------------------------------- */
439 ST_FUNC ElfSym
*elfsym(Sym
*s
)
443 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
446 /* apply storage attributes to Elf symbol */
447 ST_FUNC
void update_storage(Sym
*sym
)
450 int sym_bind
, old_sym_bind
;
456 if (sym
->a
.visibility
)
457 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
460 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
461 sym_bind
= STB_LOCAL
;
462 else if (sym
->a
.weak
)
465 sym_bind
= STB_GLOBAL
;
466 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
467 if (sym_bind
!= old_sym_bind
) {
468 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
472 if (sym
->a
.dllimport
)
473 esym
->st_other
|= ST_PE_IMPORT
;
474 if (sym
->a
.dllexport
)
475 esym
->st_other
|= ST_PE_EXPORT
;
479 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
480 get_tok_str(sym
->v
, NULL
),
481 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
489 /* ------------------------------------------------------------------------- */
490 /* update sym->c so that it points to an external symbol in section
491 'section' with value 'value' */
493 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
494 addr_t value
, unsigned long size
,
495 int can_add_underscore
)
497 int sym_type
, sym_bind
, info
, other
, t
;
503 name
= get_tok_str(sym
->v
, NULL
);
505 if ((t
& VT_BTYPE
) == VT_FUNC
) {
507 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
508 sym_type
= STT_NOTYPE
;
509 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
512 sym_type
= STT_OBJECT
;
514 if (t
& (VT_STATIC
| VT_INLINE
))
515 sym_bind
= STB_LOCAL
;
517 sym_bind
= STB_GLOBAL
;
521 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
522 Sym
*ref
= sym
->type
.ref
;
523 if (ref
->a
.nodecorate
) {
524 can_add_underscore
= 0;
526 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
527 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
529 other
|= ST_PE_STDCALL
;
530 can_add_underscore
= 0;
535 if (sym
->asm_label
) {
536 name
= get_tok_str(sym
->asm_label
, NULL
);
537 can_add_underscore
= 0;
540 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
542 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
546 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
547 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
550 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
554 esym
->st_value
= value
;
555 esym
->st_size
= size
;
556 esym
->st_shndx
= sh_num
;
561 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
563 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
565 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
568 /* add a new relocation entry to symbol 'sym' in section 's' */
569 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
574 if (nocode_wanted
&& s
== cur_text_section
)
579 put_extern_sym(sym
, NULL
, 0, 0);
583 /* now we can add ELF relocation info */
584 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
588 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
590 greloca(s
, sym
, offset
, type
, 0);
594 /* ------------------------------------------------------------------------- */
595 /* symbol allocator */
596 static Sym
*__sym_malloc(void)
598 Sym
*sym_pool
, *sym
, *last_sym
;
601 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
602 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
604 last_sym
= sym_free_first
;
606 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
607 sym
->next
= last_sym
;
611 sym_free_first
= last_sym
;
615 static inline Sym
*sym_malloc(void)
619 sym
= sym_free_first
;
621 sym
= __sym_malloc();
622 sym_free_first
= sym
->next
;
625 sym
= tcc_malloc(sizeof(Sym
));
630 ST_INLN
void sym_free(Sym
*sym
)
633 sym
->next
= sym_free_first
;
634 sym_free_first
= sym
;
640 /* push, without hashing */
641 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
646 memset(s
, 0, sizeof *s
);
656 /* find a symbol and return its associated structure. 's' is the top
657 of the symbol stack */
658 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
668 /* structure lookup */
669 ST_INLN Sym
*struct_find(int v
)
672 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
674 return table_ident
[v
]->sym_struct
;
677 /* find an identifier */
678 ST_INLN Sym
*sym_find(int v
)
681 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
683 return table_ident
[v
]->sym_identifier
;
686 static int sym_scope(Sym
*s
)
688 if (IS_ENUM_VAL (s
->type
.t
))
689 return s
->type
.ref
->sym_scope
;
694 /* push a given symbol on the symbol stack */
695 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
704 s
= sym_push2(ps
, v
, type
->t
, c
);
705 s
->type
.ref
= type
->ref
;
707 /* don't record fields or anonymous symbols */
709 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
710 /* record symbol in token array */
711 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
713 ps
= &ts
->sym_struct
;
715 ps
= &ts
->sym_identifier
;
718 s
->sym_scope
= local_scope
;
719 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
720 tcc_error("redeclaration of '%s'",
721 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
726 /* push a global identifier */
727 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
730 s
= sym_push2(&global_stack
, v
, t
, c
);
731 s
->r
= VT_CONST
| VT_SYM
;
732 /* don't record anonymous symbol */
733 if (v
< SYM_FIRST_ANOM
) {
734 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
735 /* modify the top most local identifier, so that sym_identifier will
736 point to 's' when popped; happens when called from inline asm */
737 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
738 ps
= &(*ps
)->prev_tok
;
745 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
746 pop them yet from the list, but do remove them from the token array. */
747 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
757 /* remove symbol in token array */
759 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
760 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
762 ps
= &ts
->sym_struct
;
764 ps
= &ts
->sym_identifier
;
776 ST_FUNC Sym
*label_find(int v
)
779 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
781 return table_ident
[v
]->sym_label
;
784 ST_FUNC Sym
*label_push(Sym
**ptop
, int v
, int flags
)
787 s
= sym_push2(ptop
, v
, VT_STATIC
, 0);
789 ps
= &table_ident
[v
- TOK_IDENT
]->sym_label
;
790 if (ptop
== &global_label_stack
) {
791 /* modify the top most local identifier, so that
792 sym_identifier will point to 's' when popped */
794 ps
= &(*ps
)->prev_tok
;
801 /* pop labels until element last is reached. Look if any labels are
802 undefined. Define symbols if '&&label' was used. */
803 ST_FUNC
void label_pop(Sym
**ptop
, Sym
*slast
, int keep
)
806 for(s
= *ptop
; s
!= slast
; s
= s1
) {
808 if (s
->r
== LABEL_DECLARED
) {
809 tcc_warning_c(warn_all
)("label '%s' declared but not used", get_tok_str(s
->v
, NULL
));
810 } else if (s
->r
== LABEL_FORWARD
) {
811 tcc_error("label '%s' used but not defined",
812 get_tok_str(s
->v
, NULL
));
815 /* define corresponding symbol. A size of
817 put_extern_sym(s
, cur_text_section
, s
->jnext
, 1);
821 if (s
->r
!= LABEL_GONE
)
822 table_ident
[s
->v
- TOK_IDENT
]->sym_label
= s
->prev_tok
;
832 /* ------------------------------------------------------------------------- */
833 static void vcheck_cmp(void)
835 /* cannot let cpu flags if other instruction are generated. Also
836 avoid leaving VT_JMP anywhere except on the top of the stack
837 because it would complicate the code generator.
839 Don't do this when nocode_wanted. vtop might come from
840 !nocode_wanted regions (see 88_codeopt.c) and transforming
841 it to a register without actually generating code is wrong
842 as their value might still be used for real. All values
843 we push under nocode_wanted will eventually be popped
844 again, so that the VT_CMP/VT_JMP value will be in vtop
845 when code is unsuppressed again. */
847 /* However if it's just automatic suppression via CODE_OFF/ON()
848 then it seems that we better let things work undisturbed.
849 How can it work at all under nocode_wanted? Well, gv() will
850 actually clear it at the gsym() in load()/VT_JMP in the
851 generator backends */
853 if (vtop
->r
== VT_CMP
&& 0 == (nocode_wanted
& ~CODE_OFF_BIT
))
857 static void vsetc(CType
*type
, int r
, CValue
*vc
)
859 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
860 tcc_error("memory full (vstack)");
870 ST_FUNC
void vswap(void)
880 /* pop stack value */
881 ST_FUNC
void vpop(void)
884 v
= vtop
->r
& VT_VALMASK
;
885 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
886 /* for x86, we need to pop the FP stack */
888 o(0xd8dd); /* fstp %st(0) */
892 /* need to put correct jump if && or || without test */
899 /* push constant of type "type" with useless value */
900 static void vpush(CType
*type
)
902 vset(type
, VT_CONST
, 0);
905 /* push arbitrary 64bit constant */
906 static void vpush64(int ty
, unsigned long long v
)
913 vsetc(&ctype
, VT_CONST
, &cval
);
916 /* push integer constant */
917 ST_FUNC
void vpushi(int v
)
922 /* push a pointer sized constant */
923 static void vpushs(addr_t v
)
925 vpush64(VT_SIZE_T
, v
);
928 /* push long long constant */
929 static inline void vpushll(long long v
)
931 vpush64(VT_LLONG
, v
);
934 ST_FUNC
void vset(CType
*type
, int r
, int v
)
938 vsetc(type
, r
, &cval
);
941 static void vseti(int r
, int v
)
949 ST_FUNC
void vpushv(SValue
*v
)
951 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
952 tcc_error("memory full (vstack)");
957 static void vdup(void)
962 /* rotate the stack element at position n-1 to the top */
963 ST_FUNC
void vrotb(int n
)
970 memmove(vtop
- n
, vtop
- n
+ 1, sizeof *vtop
* n
);
974 /* rotate the top stack element into position n-1 */
975 ST_FUNC
void vrott(int n
)
982 memmove(vtop
- n
+ 1, vtop
- n
, sizeof *vtop
* n
);
986 /* reverse order of the the first n stack elements */
987 ST_FUNC
void vrev(int n
)
992 for (i
= 0, n
= -n
; i
> ++n
; --i
)
993 tmp
= vtop
[i
], vtop
[i
] = vtop
[n
], vtop
[n
] = tmp
;
996 /* ------------------------------------------------------------------------- */
997 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
999 /* called from generators to set the result from relational ops */
1000 ST_FUNC
void vset_VT_CMP(int op
)
1008 /* called once before asking generators to load VT_CMP to a register */
1009 static void vset_VT_JMP(void)
1011 int op
= vtop
->cmp_op
;
1013 if (vtop
->jtrue
|| vtop
->jfalse
) {
1014 int origt
= vtop
->type
.t
;
1015 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1016 int inv
= op
& (op
< 2); /* small optimization */
1017 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1018 vtop
->type
.t
|= origt
& (VT_UNSIGNED
| VT_DEFSIGN
);
1020 /* otherwise convert flags (rsp. 0/1) to register */
1022 if (op
< 2) /* doesn't seem to happen */
1027 /* Set CPU Flags, doesn't yet jump */
1028 static void gvtst_set(int inv
, int t
)
1032 if (vtop
->r
!= VT_CMP
) {
1035 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1036 vset_VT_CMP(vtop
->c
.i
!= 0);
1039 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1040 *p
= gjmp_append(*p
, t
);
1043 /* Generate value test
1045 * Generate a test for any value (jump, comparison and integers) */
1046 static int gvtst(int inv
, int t
)
1051 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1053 x
= u
, u
= t
, t
= x
;
1056 /* jump to the wanted target */
1058 t
= gjmp_cond(op
^ inv
, t
);
1061 /* resolve complementary jumps to here */
1068 /* generate a zero or nozero test */
1069 static void gen_test_zero(int op
)
1071 if (vtop
->r
== VT_CMP
) {
1075 vtop
->jfalse
= vtop
->jtrue
;
1085 /* ------------------------------------------------------------------------- */
1086 /* push a symbol value of TYPE */
1087 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1091 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1095 /* Return a static symbol pointing to a section */
1096 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1102 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1103 sym
->type
.t
|= VT_STATIC
;
1104 put_extern_sym(sym
, sec
, offset
, size
);
1108 /* push a reference to a section offset by adding a dummy symbol */
1109 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1111 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1114 /* define a new external reference to a symbol 'v' of type 'u' */
1115 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1121 /* push forward reference */
1122 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1123 s
->type
.ref
= type
->ref
;
1124 } else if (IS_ASM_SYM(s
)) {
1125 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1126 s
->type
.ref
= type
->ref
;
1132 /* create an external reference with no specific type similar to asm labels.
1133 This avoids type conflicts if the symbol is used from C too */
1134 ST_FUNC Sym
*external_helper_sym(int v
)
1136 CType ct
= { VT_ASM_FUNC
, NULL
};
1137 return external_global_sym(v
, &ct
);
1140 /* push a reference to an helper function (such as memmove) */
1141 ST_FUNC
void vpush_helper_func(int v
)
1143 vpushsym(&func_old_type
, external_helper_sym(v
));
1146 /* Merge symbol attributes. */
1147 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1149 if (sa1
->aligned
&& !sa
->aligned
)
1150 sa
->aligned
= sa1
->aligned
;
1151 sa
->packed
|= sa1
->packed
;
1152 sa
->weak
|= sa1
->weak
;
1153 sa
->nodebug
|= sa1
->nodebug
;
1154 if (sa1
->visibility
!= STV_DEFAULT
) {
1155 int vis
= sa
->visibility
;
1156 if (vis
== STV_DEFAULT
1157 || vis
> sa1
->visibility
)
1158 vis
= sa1
->visibility
;
1159 sa
->visibility
= vis
;
1161 sa
->dllexport
|= sa1
->dllexport
;
1162 sa
->nodecorate
|= sa1
->nodecorate
;
1163 sa
->dllimport
|= sa1
->dllimport
;
1166 /* Merge function attributes. */
1167 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1169 if (fa1
->func_call
&& !fa
->func_call
)
1170 fa
->func_call
= fa1
->func_call
;
1171 if (fa1
->func_type
&& !fa
->func_type
)
1172 fa
->func_type
= fa1
->func_type
;
1173 if (fa1
->func_args
&& !fa
->func_args
)
1174 fa
->func_args
= fa1
->func_args
;
1175 if (fa1
->func_noreturn
)
1176 fa
->func_noreturn
= 1;
1183 /* Merge attributes. */
1184 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1186 merge_symattr(&ad
->a
, &ad1
->a
);
1187 merge_funcattr(&ad
->f
, &ad1
->f
);
1190 ad
->section
= ad1
->section
;
1191 if (ad1
->alias_target
)
1192 ad
->alias_target
= ad1
->alias_target
;
1194 ad
->asm_label
= ad1
->asm_label
;
1196 ad
->attr_mode
= ad1
->attr_mode
;
1199 /* Merge some type attributes. */
1200 static void patch_type(Sym
*sym
, CType
*type
)
1202 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1203 if (!(sym
->type
.t
& VT_EXTERN
))
1204 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1205 sym
->type
.t
&= ~VT_EXTERN
;
1208 if (IS_ASM_SYM(sym
)) {
1209 /* stay static if both are static */
1210 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1211 sym
->type
.ref
= type
->ref
;
1212 if ((type
->t
& VT_BTYPE
) != VT_FUNC
&& !(type
->t
& VT_ARRAY
))
1216 if (!is_compatible_types(&sym
->type
, type
)) {
1217 tcc_error("incompatible types for redefinition of '%s'",
1218 get_tok_str(sym
->v
, NULL
));
1220 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1221 int static_proto
= sym
->type
.t
& VT_STATIC
;
1222 /* warn if static follows non-static function declaration */
1223 if ((type
->t
& VT_STATIC
) && !static_proto
1224 /* XXX this test for inline shouldn't be here. Until we
1225 implement gnu-inline mode again it silences a warning for
1226 mingw caused by our workarounds. */
1227 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1228 tcc_warning("static storage ignored for redefinition of '%s'",
1229 get_tok_str(sym
->v
, NULL
));
1231 /* set 'inline' if both agree or if one has static */
1232 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1233 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1234 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1235 static_proto
|= VT_INLINE
;
1238 if (0 == (type
->t
& VT_EXTERN
)) {
1239 struct FuncAttr f
= sym
->type
.ref
->f
;
1240 /* put complete type, use static from prototype */
1241 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1242 sym
->type
.ref
= type
->ref
;
1243 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1245 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1248 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1249 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1250 sym
->type
.ref
= type
->ref
;
1254 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1255 /* set array size if it was omitted in extern declaration */
1256 sym
->type
.ref
->c
= type
->ref
->c
;
1258 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1259 tcc_warning("storage mismatch for redefinition of '%s'",
1260 get_tok_str(sym
->v
, NULL
));
1264 /* Merge some storage attributes. */
1265 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1268 patch_type(sym
, type
);
1270 #ifdef TCC_TARGET_PE
1271 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1272 tcc_error("incompatible dll linkage for redefinition of '%s'",
1273 get_tok_str(sym
->v
, NULL
));
1275 merge_symattr(&sym
->a
, &ad
->a
);
1277 sym
->asm_label
= ad
->asm_label
;
1278 update_storage(sym
);
1281 /* copy sym to other stack */
1282 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1285 s
= sym_malloc(), *s
= *s0
;
1286 s
->prev
= *ps
, *ps
= s
;
1287 if (s
->v
< SYM_FIRST_ANOM
) {
1288 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1289 s
->prev_tok
= *ps
, *ps
= s
;
1294 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1295 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1297 int bt
= s
->type
.t
& VT_BTYPE
;
1298 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1299 Sym
**sp
= &s
->type
.ref
;
1300 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1301 Sym
*s2
= sym_copy(s
, ps
);
1302 sp
= &(*sp
= s2
)->next
;
1303 sym_copy_ref(s2
, ps
);
1308 /* define a new external reference to a symbol 'v' */
1309 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1313 /* look for global symbol */
1315 while (s
&& s
->sym_scope
)
1319 /* push forward reference */
1320 s
= global_identifier_push(v
, type
->t
, 0);
1323 s
->asm_label
= ad
->asm_label
;
1324 s
->type
.ref
= type
->ref
;
1325 /* copy type to the global stack */
1327 sym_copy_ref(s
, &global_stack
);
1329 patch_storage(s
, ad
, type
);
1331 /* push variables on local_stack if any */
1332 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1333 s
= sym_copy(s
, &local_stack
);
1337 /* save registers up to (vtop - n) stack entry */
1338 ST_FUNC
void save_regs(int n
)
1341 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1345 /* save r to the memory stack, and mark it as being free */
1346 ST_FUNC
void save_reg(int r
)
1348 save_reg_upstack(r
, 0);
1351 /* save r to the memory stack, and mark it as being free,
1352 if seen up to (vtop - n) stack entry */
1353 ST_FUNC
void save_reg_upstack(int r
, int n
)
1355 int l
, size
, align
, bt
, r2
;
1358 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1363 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1364 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1365 /* must save value on stack if not already done */
1367 bt
= p
->type
.t
& VT_BTYPE
;
1370 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1373 size
= type_size(&sv
.type
, &align
);
1374 l
= get_temp_local_var(size
, align
, &r2
);
1375 sv
.r
= VT_LOCAL
| VT_LVAL
;
1377 store(p
->r
& VT_VALMASK
, &sv
);
1378 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1379 /* x86 specific: need to pop fp register ST0 if saved */
1380 if (r
== TREG_ST0
) {
1381 o(0xd8dd); /* fstp %st(0) */
1384 /* special long long case */
1385 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1390 /* mark that stack entry as being saved on the stack */
1391 if (p
->r
& VT_LVAL
) {
1392 /* also clear the bounded flag because the
1393 relocation address of the function was stored in
1395 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1397 p
->r
= VT_LVAL
| VT_LOCAL
;
1398 p
->type
.t
&= ~VT_ARRAY
; /* cannot combine VT_LVAL with VT_ARRAY */
1407 #ifdef TCC_TARGET_ARM
1408 /* find a register of class 'rc2' with at most one reference on stack.
1409 * If none, call get_reg(rc) */
1410 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1415 for(r
=0;r
<NB_REGS
;r
++) {
1416 if (reg_classes
[r
] & rc2
) {
1419 for(p
= vstack
; p
<= vtop
; p
++) {
1420 if ((p
->r
& VT_VALMASK
) == r
||
1432 /* find a free register of class 'rc'. If none, save one register */
1433 ST_FUNC
int get_reg(int rc
)
1438 /* find a free register */
1439 for(r
=0;r
<NB_REGS
;r
++) {
1440 if (reg_classes
[r
] & rc
) {
1443 for(p
=vstack
;p
<=vtop
;p
++) {
1444 if ((p
->r
& VT_VALMASK
) == r
||
1453 /* no register left : free the first one on the stack (VERY
1454 IMPORTANT to start from the bottom to ensure that we don't
1455 spill registers used in gen_opi()) */
1456 for(p
=vstack
;p
<=vtop
;p
++) {
1457 /* look at second register (if long long) */
1459 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1461 r
= p
->r
& VT_VALMASK
;
1462 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1468 /* Should never comes here */
1472 /* find a free temporary local variable (return the offset on stack) match
1473 size and align. If none, add new temporary stack variable */
1474 static int get_temp_local_var(int size
,int align
, int *r2
)
1477 struct temp_local_variable
*temp_var
;
1482 /* mark locations that are still in use */
1483 for (p
= vstack
; p
<= vtop
; p
++) {
1484 r
= p
->r
& VT_VALMASK
;
1485 if (r
== VT_LOCAL
|| r
== VT_LLOCAL
) {
1486 r
= p
->r2
- (VT_CONST
+ 1);
1487 if (r
>= 0 && r
< MAX_TEMP_LOCAL_VARIABLE_NUMBER
)
1491 for (i
=0;i
<nb_temp_local_vars
;i
++) {
1492 temp_var
=&arr_temp_local_vars
[i
];
1494 && temp_var
->size
>=size
1495 && temp_var
->align
>=align
) {
1497 *r2
= (VT_CONST
+ 1) + i
;
1498 return temp_var
->location
;
1501 loc
= (loc
- size
) & -align
;
1502 if (nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
) {
1503 temp_var
=&arr_temp_local_vars
[i
];
1504 temp_var
->location
=loc
;
1505 temp_var
->size
=size
;
1506 temp_var
->align
=align
;
1507 nb_temp_local_vars
++;
1514 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1516 static void move_reg(int r
, int s
, int t
)
1530 /* get address of vtop (vtop MUST BE an lvalue) */
1531 ST_FUNC
void gaddrof(void)
1533 vtop
->r
&= ~VT_LVAL
;
1534 /* tricky: if saved lvalue, then we can go back to lvalue */
1535 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1536 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1539 #ifdef CONFIG_TCC_BCHECK
1540 /* generate a bounded pointer addition */
1541 static void gen_bounded_ptr_add(void)
1543 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1548 vpush_helper_func(TOK___bound_ptr_add
);
1553 /* returned pointer is in REG_IRET */
1554 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1557 /* relocation offset of the bounding function call point */
1558 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1561 /* patch pointer addition in vtop so that pointer dereferencing is
1563 static void gen_bounded_ptr_deref(void)
1573 size
= type_size(&vtop
->type
, &align
);
1575 case 1: func
= TOK___bound_ptr_indir1
; break;
1576 case 2: func
= TOK___bound_ptr_indir2
; break;
1577 case 4: func
= TOK___bound_ptr_indir4
; break;
1578 case 8: func
= TOK___bound_ptr_indir8
; break;
1579 case 12: func
= TOK___bound_ptr_indir12
; break;
1580 case 16: func
= TOK___bound_ptr_indir16
; break;
1582 /* may happen with struct member access */
1585 sym
= external_helper_sym(func
);
1587 put_extern_sym(sym
, NULL
, 0, 0);
1588 /* patch relocation */
1589 /* XXX: find a better solution ? */
1590 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1591 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1594 /* generate lvalue bound code */
1595 static void gbound(void)
1599 vtop
->r
&= ~VT_MUSTBOUND
;
1600 /* if lvalue, then use checking code before dereferencing */
1601 if (vtop
->r
& VT_LVAL
) {
1602 /* if not VT_BOUNDED value, then make one */
1603 if (!(vtop
->r
& VT_BOUNDED
)) {
1604 /* must save type because we must set it to int to get pointer */
1606 vtop
->type
.t
= VT_PTR
;
1609 gen_bounded_ptr_add();
1613 /* then check for dereferencing */
1614 gen_bounded_ptr_deref();
1618 /* we need to call __bound_ptr_add before we start to load function
1619 args into registers */
1620 ST_FUNC
void gbound_args(int nb_args
)
1625 for (i
= 1; i
<= nb_args
; ++i
)
1626 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1632 sv
= vtop
- nb_args
;
1633 if (sv
->r
& VT_SYM
) {
1637 #ifndef TCC_TARGET_PE
1638 || v
== TOK_sigsetjmp
1639 || v
== TOK___sigsetjmp
1642 vpush_helper_func(TOK___bound_setjmp
);
1645 func_bound_add_epilog
= 1;
1647 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1648 if (v
== TOK_alloca
)
1649 func_bound_add_epilog
= 1;
1652 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1653 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1658 /* Add bounds for local symbols from S to E (via ->prev) */
1659 static void add_local_bounds(Sym
*s
, Sym
*e
)
1661 for (; s
!= e
; s
= s
->prev
) {
1662 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1664 /* Add arrays/structs/unions because we always take address */
1665 if ((s
->type
.t
& VT_ARRAY
)
1666 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1667 || s
->a
.addrtaken
) {
1668 /* add local bound info */
1669 int align
, size
= type_size(&s
->type
, &align
);
1670 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1671 2 * sizeof(addr_t
));
1672 bounds_ptr
[0] = s
->c
;
1673 bounds_ptr
[1] = size
;
1679 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1680 static void pop_local_syms(Sym
*b
, int keep
)
1682 #ifdef CONFIG_TCC_BCHECK
1683 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1684 add_local_bounds(local_stack
, b
);
1687 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1688 sym_pop(&local_stack
, b
, keep
);
1691 /* increment an lvalue pointer */
1692 static void incr_offset(int offset
)
1694 int t
= vtop
->type
.t
;
1695 gaddrof(); /* remove VT_LVAL */
1696 vtop
->type
.t
= VT_PTRDIFF_T
; /* set scalar type */
1703 static void incr_bf_adr(int o
)
1705 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1709 /* single-byte load mode for packed or otherwise unaligned bitfields */
1710 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1713 save_reg_upstack(vtop
->r
, 1);
1714 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1715 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1724 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1726 vpushi((1 << n
) - 1), gen_op('&');
1729 vpushi(bits
), gen_op(TOK_SHL
);
1732 bits
+= n
, bit_size
-= n
, o
= 1;
1735 if (!(type
->t
& VT_UNSIGNED
)) {
1736 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1737 vpushi(n
), gen_op(TOK_SHL
);
1738 vpushi(n
), gen_op(TOK_SAR
);
1742 /* single-byte store mode for packed or otherwise unaligned bitfields */
1743 static void store_packed_bf(int bit_pos
, int bit_size
)
1745 int bits
, n
, o
, m
, c
;
1746 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1748 save_reg_upstack(vtop
->r
, 1);
1749 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1751 incr_bf_adr(o
); // X B
1753 c
? vdup() : gv_dup(); // B V X
1756 vpushi(bits
), gen_op(TOK_SHR
);
1758 vpushi(bit_pos
), gen_op(TOK_SHL
);
1763 m
= ((1 << n
) - 1) << bit_pos
;
1764 vpushi(m
), gen_op('&'); // X B V1
1765 vpushv(vtop
-1); // X B V1 B
1766 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1767 gen_op('&'); // X B V1 B1
1768 gen_op('|'); // X B V2
1770 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1771 vstore(), vpop(); // X B
1772 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1777 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1780 if (0 == sv
->type
.ref
)
1782 t
= sv
->type
.ref
->auxtype
;
1783 if (t
!= -1 && t
!= VT_STRUCT
) {
1784 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1790 /* store vtop a register belonging to class 'rc'. lvalues are
1791 converted to values. Cannot be used if cannot be converted to
1792 register value (such as structures). */
1793 ST_FUNC
int gv(int rc
)
1795 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1796 int bit_pos
, bit_size
, size
, align
;
1798 /* NOTE: get_reg can modify vstack[] */
1799 if (vtop
->type
.t
& VT_BITFIELD
) {
1802 bit_pos
= BIT_POS(vtop
->type
.t
);
1803 bit_size
= BIT_SIZE(vtop
->type
.t
);
1804 /* remove bit field info to avoid loops */
1805 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1808 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1809 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1810 type
.t
|= VT_UNSIGNED
;
1812 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1814 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1819 if (r
== VT_STRUCT
) {
1820 load_packed_bf(&type
, bit_pos
, bit_size
);
1822 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1823 /* cast to int to propagate signedness in following ops */
1825 /* generate shifts */
1826 vpushi(bits
- (bit_pos
+ bit_size
));
1828 vpushi(bits
- bit_size
);
1829 /* NOTE: transformed to SHR if unsigned */
1834 if (is_float(vtop
->type
.t
) &&
1835 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1836 /* CPUs usually cannot use float constants, so we store them
1837 generically in data segment */
1838 init_params p
= { rodata_section
};
1839 unsigned long offset
;
1840 size
= type_size(&vtop
->type
, &align
);
1842 size
= 0, align
= 1;
1843 offset
= section_add(p
.sec
, size
, align
);
1844 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1846 init_putv(&p
, &vtop
->type
, offset
);
1849 #ifdef CONFIG_TCC_BCHECK
1850 if (vtop
->r
& VT_MUSTBOUND
)
1854 bt
= vtop
->type
.t
& VT_BTYPE
;
1856 #ifdef TCC_TARGET_RISCV64
1858 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1861 rc2
= RC2_TYPE(bt
, rc
);
1863 /* need to reload if:
1865 - lvalue (need to dereference pointer)
1866 - already a register, but not in the right class */
1867 r
= vtop
->r
& VT_VALMASK
;
1868 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1869 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1871 if (!r_ok
|| !r2_ok
) {
1874 if (1 /* we can 'mov (r),r' in cases */
1876 && (reg_classes
[r
] & rc
)
1879 save_reg_upstack(r
, 1);
1885 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1886 int original_type
= vtop
->type
.t
;
1888 /* two register type load :
1889 expand to two words temporarily */
1890 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1892 unsigned long long ll
= vtop
->c
.i
;
1893 vtop
->c
.i
= ll
; /* first word */
1895 vtop
->r
= r
; /* save register value */
1896 vpushi(ll
>> 32); /* second word */
1897 } else if (vtop
->r
& VT_LVAL
) {
1898 /* We do not want to modifier the long long pointer here.
1899 So we save any other instances down the stack */
1900 save_reg_upstack(vtop
->r
, 1);
1901 /* load from memory */
1902 vtop
->type
.t
= load_type
;
1905 vtop
[-1].r
= r
; /* save register value */
1906 /* increment pointer to get second word */
1907 incr_offset(PTR_SIZE
);
1909 /* move registers */
1912 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1915 vtop
[-1].r
= r
; /* save register value */
1916 vtop
->r
= vtop
[-1].r2
;
1918 /* Allocate second register. Here we rely on the fact that
1919 get_reg() tries first to free r2 of an SValue. */
1923 /* write second register */
1926 vtop
->type
.t
= original_type
;
1928 if (vtop
->r
== VT_CMP
)
1930 /* one register type load */
1935 #ifdef TCC_TARGET_C67
1936 /* uses register pairs for doubles */
1937 if (bt
== VT_DOUBLE
)
1944 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1945 ST_FUNC
void gv2(int rc1
, int rc2
)
1947 /* generate more generic register first. But VT_JMP or VT_CMP
1948 values must be generated first in all cases to avoid possible
1950 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1955 /* test if reload is needed for first register */
1956 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1966 /* test if reload is needed for first register */
1967 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1974 /* expand 64bit on stack in two ints */
1975 ST_FUNC
void lexpand(void)
1978 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1979 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1980 if (v
== VT_CONST
) {
1983 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1989 vtop
[0].r
= vtop
[-1].r2
;
1990 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1992 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1997 /* build a long long from two ints */
1998 static void lbuild(int t
)
2000 gv2(RC_INT
, RC_INT
);
2001 vtop
[-1].r2
= vtop
[0].r
;
2002 vtop
[-1].type
.t
= t
;
2007 /* convert stack entry to register and duplicate its value in another
2009 static void gv_dup(void)
2015 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2016 if (t
& VT_BITFIELD
) {
2026 /* stack: H L L1 H1 */
2036 /* duplicate value */
2046 /* generate CPU independent (unsigned) long long operations */
2047 static void gen_opl(int op
)
2049 int t
, a
, b
, op1
, c
, i
;
2051 unsigned short reg_iret
= REG_IRET
;
2052 unsigned short reg_lret
= REG_IRE2
;
2058 func
= TOK___divdi3
;
2061 func
= TOK___udivdi3
;
2064 func
= TOK___moddi3
;
2067 func
= TOK___umoddi3
;
2074 /* call generic long long function */
2075 vpush_helper_func(func
);
2080 vtop
->r2
= reg_lret
;
2088 //pv("gen_opl A",0,2);
2094 /* stack: L1 H1 L2 H2 */
2099 vtop
[-2] = vtop
[-3];
2102 /* stack: H1 H2 L1 L2 */
2103 //pv("gen_opl B",0,4);
2109 /* stack: H1 H2 L1 L2 ML MH */
2112 /* stack: ML MH H1 H2 L1 L2 */
2116 /* stack: ML MH H1 L2 H2 L1 */
2121 /* stack: ML MH M1 M2 */
2124 } else if (op
== '+' || op
== '-') {
2125 /* XXX: add non carry method too (for MIPS or alpha) */
2131 /* stack: H1 H2 (L1 op L2) */
2134 gen_op(op1
+ 1); /* TOK_xxxC2 */
2137 /* stack: H1 H2 (L1 op L2) */
2140 /* stack: (L1 op L2) H1 H2 */
2142 /* stack: (L1 op L2) (H1 op H2) */
2150 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2151 t
= vtop
[-1].type
.t
;
2155 /* stack: L H shift */
2157 /* constant: simpler */
2158 /* NOTE: all comments are for SHL. the other cases are
2159 done by swapping words */
2170 if (op
!= TOK_SAR
) {
2203 /* XXX: should provide a faster fallback on x86 ? */
2206 func
= TOK___ashrdi3
;
2209 func
= TOK___lshrdi3
;
2212 func
= TOK___ashldi3
;
2218 /* compare operations */
2224 /* stack: L1 H1 L2 H2 */
2226 vtop
[-1] = vtop
[-2];
2228 /* stack: L1 L2 H1 H2 */
2229 if (!cur_switch
|| cur_switch
->bsym
) {
2230 /* avoid differnt registers being saved in branches.
2231 This is not needed when comparing switch cases */
2236 /* when values are equal, we need to compare low words. since
2237 the jump is inverted, we invert the test too. */
2240 else if (op1
== TOK_GT
)
2242 else if (op1
== TOK_ULT
)
2244 else if (op1
== TOK_UGT
)
2254 /* generate non equal test */
2256 vset_VT_CMP(TOK_NE
);
2260 /* compare low. Always unsigned */
2264 else if (op1
== TOK_LE
)
2266 else if (op1
== TOK_GT
)
2268 else if (op1
== TOK_GE
)
2271 #if 0//def TCC_TARGET_I386
2272 if (op
== TOK_NE
) { gsym(b
); break; }
2273 if (op
== TOK_EQ
) { gsym(a
); break; }
2282 /* normalize values */
2283 static uint64_t value64(uint64_t l1
, int t
)
2285 if ((t
& VT_BTYPE
) == VT_LLONG
2286 || (PTR_SIZE
== 8 && (t
& VT_BTYPE
) == VT_PTR
))
2288 else if (t
& VT_UNSIGNED
)
2289 return (uint32_t)l1
;
2291 return (uint32_t)l1
| -(l1
& 0x80000000);
2294 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2296 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2297 return (a
^ b
) >> 63 ? -x
: x
;
2300 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2302 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2305 /* handle integer constant optimizations and various machine
2307 static void gen_opic(int op
)
2309 SValue
*v1
= vtop
- 1;
2311 int t1
= v1
->type
.t
& VT_BTYPE
;
2312 int t2
= v2
->type
.t
& VT_BTYPE
;
2313 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2314 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2315 uint64_t l1
= c1
? value64(v1
->c
.i
, v1
->type
.t
) : 0;
2316 uint64_t l2
= c2
? value64(v2
->c
.i
, v2
->type
.t
) : 0;
2317 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2322 case '+': l1
+= l2
; break;
2323 case '-': l1
-= l2
; break;
2324 case '&': l1
&= l2
; break;
2325 case '^': l1
^= l2
; break;
2326 case '|': l1
|= l2
; break;
2327 case '*': l1
*= l2
; break;
2334 /* if division by zero, generate explicit division */
2336 if (CONST_WANTED
&& !NOEVAL_WANTED
)
2337 tcc_error("division by zero in constant");
2341 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2342 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2343 case TOK_UDIV
: l1
= l1
/ l2
; break;
2344 case TOK_UMOD
: l1
= l1
% l2
; break;
2347 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2348 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2350 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2353 case TOK_ULT
: l1
= l1
< l2
; break;
2354 case TOK_UGE
: l1
= l1
>= l2
; break;
2355 case TOK_EQ
: l1
= l1
== l2
; break;
2356 case TOK_NE
: l1
= l1
!= l2
; break;
2357 case TOK_ULE
: l1
= l1
<= l2
; break;
2358 case TOK_UGT
: l1
= l1
> l2
; break;
2359 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2360 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2361 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2362 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2364 case TOK_LAND
: l1
= l1
&& l2
; break;
2365 case TOK_LOR
: l1
= l1
|| l2
; break;
2369 v1
->c
.i
= value64(l1
, v1
->type
.t
);
2370 v1
->r
|= v2
->r
& VT_NONCONST
;
2373 /* if commutative ops, put c2 as constant */
2374 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2375 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2377 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2378 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2380 if (c1
&& ((l1
== 0 &&
2381 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2382 (l1
== -1 && op
== TOK_SAR
))) {
2383 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2385 } else if (c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2387 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2388 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2389 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2394 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2397 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2398 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2401 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2402 /* filter out NOP operations like x*1, x-0, x&-1... */
2404 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2405 /* try to use shifts instead of muls or divs */
2406 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2415 else if (op
== TOK_PDIV
)
2421 } else if (c2
&& (op
== '+' || op
== '-') &&
2422 (r
= vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
),
2423 r
== (VT_CONST
| VT_SYM
) || r
== VT_LOCAL
)) {
2424 /* symbol + constant case */
2428 /* The backends can't always deal with addends to symbols
2429 larger than +-1<<31. Don't construct such. */
2436 /* call low level op generator */
2437 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2438 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2443 if (vtop
->r
== VT_CONST
)
2444 vtop
->r
|= VT_NONCONST
; /* is const, but only by optimization */
2448 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2449 # define gen_negf gen_opf
2450 #elif defined TCC_TARGET_ARM
2451 void gen_negf(int op
)
2453 /* arm will detect 0-x and replace by vneg */
2454 vpushi(0), vswap(), gen_op('-');
2457 /* XXX: implement in gen_opf() for other backends too */
2458 void gen_negf(int op
)
2460 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2461 subtract(-0, x), but with them it's really a sign flip
2462 operation. We implement this with bit manipulation and have
2463 to do some type reinterpretation for this, which TCC can do
2466 int align
, size
, bt
;
2468 size
= type_size(&vtop
->type
, &align
);
2469 bt
= vtop
->type
.t
& VT_BTYPE
;
2470 save_reg(gv(RC_TYPE(bt
)));
2472 incr_bf_adr(size
- 1);
2474 vpushi(0x80); /* flip sign */
2481 /* generate a floating point operation with constant propagation */
2482 static void gen_opif(int op
)
2486 #if defined _MSC_VER && defined __x86_64__
2487 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2496 bt
= v1
->type
.t
& VT_BTYPE
;
2498 /* currently, we cannot do computations with forward symbols */
2499 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2500 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2502 if (bt
== VT_FLOAT
) {
2505 } else if (bt
== VT_DOUBLE
) {
2512 /* NOTE: we only do constant propagation if finite number (not
2513 NaN or infinity) (ANSI spec) */
2514 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !CONST_WANTED
)
2517 case '+': f1
+= f2
; break;
2518 case '-': f1
-= f2
; break;
2519 case '*': f1
*= f2
; break;
2522 union { float f
; unsigned u
; } x1
, x2
, y
;
2523 /* If not in initializer we need to potentially generate
2524 FP exceptions at runtime, otherwise we want to fold. */
2527 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2528 when used to compile the f1 /= f2 below, would be -nan */
2529 x1
.f
= f1
, x2
.f
= f2
;
2531 y
.u
= 0x7fc00000; /* nan */
2533 y
.u
= 0x7f800000; /* infinity */
2534 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2569 /* XXX: overflow test ? */
2570 if (bt
== VT_FLOAT
) {
2572 } else if (bt
== VT_DOUBLE
) {
2579 if (op
== TOK_NEG
) {
2587 /* print a type. If 'varstr' is not NULL, then the variable is also
2588 printed in the type */
2590 /* XXX: add array and function pointers */
2591 static void type_to_str(char *buf
, int buf_size
,
2592 CType
*type
, const char *varstr
)
2604 pstrcat(buf
, buf_size
, "extern ");
2606 pstrcat(buf
, buf_size
, "static ");
2608 pstrcat(buf
, buf_size
, "typedef ");
2610 pstrcat(buf
, buf_size
, "inline ");
2612 if (t
& VT_VOLATILE
)
2613 pstrcat(buf
, buf_size
, "volatile ");
2614 if (t
& VT_CONSTANT
)
2615 pstrcat(buf
, buf_size
, "const ");
2617 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2618 || ((t
& VT_UNSIGNED
)
2619 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2622 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2624 buf_size
-= strlen(buf
);
2660 tstr
= "long double";
2662 pstrcat(buf
, buf_size
, tstr
);
2669 pstrcat(buf
, buf_size
, tstr
);
2670 v
= type
->ref
->v
& ~SYM_STRUCT
;
2671 if (v
>= SYM_FIRST_ANOM
)
2672 pstrcat(buf
, buf_size
, "<anonymous>");
2674 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2679 if (varstr
&& '*' == *varstr
) {
2680 pstrcat(buf1
, sizeof(buf1
), "(");
2681 pstrcat(buf1
, sizeof(buf1
), varstr
);
2682 pstrcat(buf1
, sizeof(buf1
), ")");
2684 pstrcat(buf1
, buf_size
, "(");
2686 while (sa
!= NULL
) {
2688 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2689 pstrcat(buf1
, sizeof(buf1
), buf2
);
2692 pstrcat(buf1
, sizeof(buf1
), ", ");
2694 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2695 pstrcat(buf1
, sizeof(buf1
), ", ...");
2696 pstrcat(buf1
, sizeof(buf1
), ")");
2697 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2701 if (t
& (VT_ARRAY
|VT_VLA
)) {
2702 if (varstr
&& '*' == *varstr
)
2703 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2705 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2706 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2709 pstrcpy(buf1
, sizeof(buf1
), "*");
2710 if (t
& VT_CONSTANT
)
2711 pstrcat(buf1
, buf_size
, "const ");
2712 if (t
& VT_VOLATILE
)
2713 pstrcat(buf1
, buf_size
, "volatile ");
2715 pstrcat(buf1
, sizeof(buf1
), varstr
);
2716 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2720 pstrcat(buf
, buf_size
, " ");
2721 pstrcat(buf
, buf_size
, varstr
);
2726 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2728 char buf1
[256], buf2
[256];
2729 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2730 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2731 tcc_error(fmt
, buf1
, buf2
);
2734 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2736 char buf1
[256], buf2
[256];
2737 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2738 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2739 tcc_warning(fmt
, buf1
, buf2
);
2742 static int pointed_size(CType
*type
)
2745 return type_size(pointed_type(type
), &align
);
2748 static inline int is_null_pointer(SValue
*p
)
2750 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2752 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2753 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2754 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2755 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2756 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2757 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2761 /* compare function types. OLD functions match any new functions */
2762 static int is_compatible_func(CType
*type1
, CType
*type2
)
2768 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2770 if (s1
->f
.func_type
!= s2
->f
.func_type
2771 && s1
->f
.func_type
!= FUNC_OLD
2772 && s2
->f
.func_type
!= FUNC_OLD
)
2775 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2777 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2788 /* return true if type1 and type2 are the same. If unqualified is
2789 true, qualifiers on the types are ignored.
2791 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2795 if (IS_ENUM(type1
->t
)) {
2796 if (IS_ENUM(type2
->t
))
2797 return type1
->ref
== type2
->ref
;
2798 type1
= &type1
->ref
->type
;
2799 } else if (IS_ENUM(type2
->t
))
2800 type2
= &type2
->ref
->type
;
2802 t1
= type1
->t
& VT_TYPE
;
2803 t2
= type2
->t
& VT_TYPE
;
2805 /* strip qualifiers before comparing */
2806 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2807 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2810 /* Default Vs explicit signedness only matters for char */
2811 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2815 /* XXX: bitfields ? */
2820 && !(type1
->ref
->c
< 0
2821 || type2
->ref
->c
< 0
2822 || type1
->ref
->c
== type2
->ref
->c
))
2825 /* test more complicated cases */
2826 bt1
= t1
& VT_BTYPE
;
2827 if (bt1
== VT_PTR
) {
2828 type1
= pointed_type(type1
);
2829 type2
= pointed_type(type2
);
2830 return is_compatible_types(type1
, type2
);
2831 } else if (bt1
== VT_STRUCT
) {
2832 return (type1
->ref
== type2
->ref
);
2833 } else if (bt1
== VT_FUNC
) {
2834 return is_compatible_func(type1
, type2
);
2841 #define SHIFT_OP 'S'
2843 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2844 type is stored in DEST if non-null (except for pointer plus/minus) . */
2845 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2847 CType
*type1
, *type2
, type
;
2848 int t1
, t2
, bt1
, bt2
;
2851 /* for shifts, 'combine' only left operand */
2855 type1
= &op1
->type
, type2
= &op2
->type
;
2856 t1
= type1
->t
, t2
= type2
->t
;
2857 bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2862 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2863 ret
= op
== '?' ? 1 : 0;
2864 /* NOTE: as an extension, we accept void on only one side */
2866 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2868 if (!is_integer_btype(bt1
== VT_PTR
? bt2
: bt1
))
2871 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2872 /* If one is a null ptr constant the result type is the other. */
2873 else if (is_null_pointer (op2
)) type
= *type1
;
2874 else if (is_null_pointer (op1
)) type
= *type2
;
2875 else if (bt1
!= bt2
) {
2876 /* accept comparison or cond-expr between pointer and integer
2878 if ((op
== '?' || op
== CMP_OP
)
2879 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2880 tcc_warning("pointer/integer mismatch in %s",
2881 op
== '?' ? "conditional expression" : "comparison");
2882 else if (op
!= '-' || !is_integer_btype(bt2
))
2884 type
= *(bt1
== VT_PTR
? type1
: type2
);
2886 CType
*pt1
= pointed_type(type1
);
2887 CType
*pt2
= pointed_type(type2
);
2888 int pbt1
= pt1
->t
& VT_BTYPE
;
2889 int pbt2
= pt2
->t
& VT_BTYPE
;
2890 int newquals
, copied
= 0;
2891 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2892 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2893 if (op
!= '?' && op
!= CMP_OP
)
2896 type_incompatibility_warning(type1
, type2
,
2898 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2899 : "pointer type mismatch in comparison('%s' and '%s')");
2902 /* pointers to void get preferred, otherwise the
2903 pointed to types minus qualifs should be compatible */
2904 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2905 /* combine qualifs */
2906 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2907 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2910 /* copy the pointer target symbol */
2911 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2914 pointed_type(&type
)->t
|= newquals
;
2916 /* pointers to incomplete arrays get converted to
2917 pointers to completed ones if possible */
2918 if (pt1
->t
& VT_ARRAY
2919 && pt2
->t
& VT_ARRAY
2920 && pointed_type(&type
)->ref
->c
< 0
2921 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2924 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2926 pointed_type(&type
)->ref
=
2927 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2928 0, pointed_type(&type
)->ref
->c
);
2929 pointed_type(&type
)->ref
->c
=
2930 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2936 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2937 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2940 } else if (is_float(bt1
) || is_float(bt2
)) {
2941 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2942 type
.t
= VT_LDOUBLE
;
2943 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2948 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2949 /* cast to biggest op */
2950 type
.t
= VT_LLONG
| VT_LONG
;
2951 if (bt1
== VT_LLONG
)
2953 if (bt2
== VT_LLONG
)
2955 /* convert to unsigned if it does not fit in a long long */
2956 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2957 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2958 type
.t
|= VT_UNSIGNED
;
2960 /* integer operations */
2961 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2962 /* convert to unsigned if it does not fit in an integer */
2963 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2964 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2965 type
.t
|= VT_UNSIGNED
;
2972 /* generic gen_op: handles types problems */
2973 ST_FUNC
void gen_op(int op
)
2975 int t1
, t2
, bt1
, bt2
, t
;
2976 CType type1
, combtype
;
2979 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2980 op_class
= SHIFT_OP
;
2981 else if (TOK_ISCOND(op
)) /* == != > ... */
2985 t1
= vtop
[-1].type
.t
;
2986 t2
= vtop
[0].type
.t
;
2987 bt1
= t1
& VT_BTYPE
;
2988 bt2
= t2
& VT_BTYPE
;
2990 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2991 if (bt2
== VT_FUNC
) {
2992 mk_pointer(&vtop
->type
);
2995 if (bt1
== VT_FUNC
) {
2997 mk_pointer(&vtop
->type
);
3002 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op_class
)) {
3004 tcc_error("invalid operand types for binary operation");
3005 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3006 /* at least one operand is a pointer */
3007 /* relational op: must be both pointers */
3009 if (op_class
== CMP_OP
)
3011 /* if both pointers, then it must be the '-' op */
3012 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3015 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3016 vtop
->type
.t
&= ~VT_UNSIGNED
;
3019 vtop
->type
.t
= VT_PTRDIFF_T
;
3023 /* exactly one pointer : must be '+' or '-'. */
3024 if (op
!= '-' && op
!= '+')
3026 /* Put pointer as first operand */
3027 if (bt2
== VT_PTR
) {
3029 t
= t1
, t1
= t2
, t2
= t
;
3033 if (bt2
== VT_LLONG
)
3034 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3037 type1
= vtop
[-1].type
;
3038 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3040 #ifdef CONFIG_TCC_BCHECK
3041 if (tcc_state
->do_bounds_check
&& !CONST_WANTED
) {
3042 /* if bounded pointers, we generate a special code to
3049 gen_bounded_ptr_add();
3055 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
3056 /* put again type if gen_opic() swaped operands */
3060 /* floats can only be used for a few operations */
3061 if (is_float(combtype
.t
)
3062 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3063 && op_class
!= CMP_OP
) {
3067 t
= t2
= combtype
.t
;
3068 /* special case for shifts and long long: we keep the shift as
3070 if (op_class
== SHIFT_OP
)
3072 /* XXX: currently, some unsigned operations are explicit, so
3073 we modify them here */
3074 if (t
& VT_UNSIGNED
) {
3081 else if (op
== TOK_LT
)
3083 else if (op
== TOK_GT
)
3085 else if (op
== TOK_LE
)
3087 else if (op
== TOK_GE
)
3098 if (op_class
== CMP_OP
) {
3099 /* relational op: the result is an int */
3100 vtop
->type
.t
= VT_INT
;
3105 // Make sure that we have converted to an rvalue:
3106 if (vtop
->r
& VT_LVAL
)
3107 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3110 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3111 #define gen_cvt_itof1 gen_cvt_itof
3113 /* generic itof for unsigned long long case */
3114 static void gen_cvt_itof1(int t
)
3116 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3117 (VT_LLONG
| VT_UNSIGNED
)) {
3120 vpush_helper_func(TOK___floatundisf
);
3121 #if LDOUBLE_SIZE != 8
3122 else if (t
== VT_LDOUBLE
)
3123 vpush_helper_func(TOK___floatundixf
);
3126 vpush_helper_func(TOK___floatundidf
);
3137 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3138 #define gen_cvt_ftoi1 gen_cvt_ftoi
3140 /* generic ftoi for unsigned long long case */
3141 static void gen_cvt_ftoi1(int t
)
3144 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3145 /* not handled natively */
3146 st
= vtop
->type
.t
& VT_BTYPE
;
3148 vpush_helper_func(TOK___fixunssfdi
);
3149 #if LDOUBLE_SIZE != 8
3150 else if (st
== VT_LDOUBLE
)
3151 vpush_helper_func(TOK___fixunsxfdi
);
3154 vpush_helper_func(TOK___fixunsdfdi
);
3165 /* special delayed cast for char/short */
3166 static void force_charshort_cast(void)
3168 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3169 int dbt
= vtop
->type
.t
;
3170 vtop
->r
&= ~VT_MUSTCAST
;
3172 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3176 static void gen_cast_s(int t
)
3184 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3185 static void gen_cast(CType
*type
)
3187 int sbt
, dbt
, sf
, df
, c
;
3188 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3190 /* special delayed cast for char/short */
3191 if (vtop
->r
& VT_MUSTCAST
)
3192 force_charshort_cast();
3194 /* bitfields first get cast to ints */
3195 if (vtop
->type
.t
& VT_BITFIELD
)
3198 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3199 tcc_error("cast to incomplete type");
3201 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3202 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3210 dbt_bt
= dbt
& VT_BTYPE
;
3211 sbt_bt
= sbt
& VT_BTYPE
;
3212 if (dbt_bt
== VT_VOID
)
3214 if (sbt_bt
== VT_VOID
) {
3216 cast_error(&vtop
->type
, type
);
3219 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3220 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3221 /* don't try to convert to ldouble when cross-compiling
3222 (except when it's '0' which is needed for arm:gen_negf()) */
3223 if (dbt_bt
== VT_LDOUBLE
&& !nocode_wanted
&& (sf
|| vtop
->c
.i
!= 0))
3227 /* constant case: we can do it now */
3228 /* XXX: in ISOC, cannot do it if error in convert */
3229 if (sbt
== VT_FLOAT
)
3230 vtop
->c
.ld
= vtop
->c
.f
;
3231 else if (sbt
== VT_DOUBLE
)
3232 vtop
->c
.ld
= vtop
->c
.d
;
3235 if (sbt_bt
== VT_LLONG
) {
3236 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3237 vtop
->c
.ld
= vtop
->c
.i
;
3239 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3241 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3242 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3244 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3247 if (dbt
== VT_FLOAT
)
3248 vtop
->c
.f
= (float)vtop
->c
.ld
;
3249 else if (dbt
== VT_DOUBLE
)
3250 vtop
->c
.d
= (double)vtop
->c
.ld
;
3251 } else if (sf
&& dbt
== VT_BOOL
) {
3252 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3255 if (dbt
& VT_UNSIGNED
)
3256 vtop
->c
.i
= (uint64_t)vtop
->c
.ld
;
3258 vtop
->c
.i
= (int64_t)vtop
->c
.ld
;
3260 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3262 else if (sbt
& VT_UNSIGNED
)
3263 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3265 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3267 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3269 else if (dbt
== VT_BOOL
)
3270 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3272 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3273 dbt_bt
== VT_SHORT
? 0xffff :
3276 if (!(dbt
& VT_UNSIGNED
))
3277 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3282 } else if (dbt
== VT_BOOL
3283 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3284 == (VT_CONST
| VT_SYM
)) {
3285 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3291 /* cannot generate code for global or static initializers */
3292 if (nocode_wanted
& DATA_ONLY_WANTED
)
3295 /* non constant case: generate code */
3296 if (dbt
== VT_BOOL
) {
3297 gen_test_zero(TOK_NE
);
3303 /* convert from fp to fp */
3306 /* convert int to fp */
3309 /* convert fp to int */
3311 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3314 goto again
; /* may need char/short cast */
3319 ds
= btype_size(dbt_bt
);
3320 ss
= btype_size(sbt_bt
);
3321 if (ds
== 0 || ss
== 0)
3324 /* same size and no sign conversion needed */
3325 if (ds
== ss
&& ds
>= 4)
3327 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3328 tcc_warning("cast between pointer and integer of different size");
3329 if (sbt_bt
== VT_PTR
) {
3330 /* put integer type to allow logical operations below */
3331 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3335 /* processor allows { int a = 0, b = *(char*)&a; }
3336 That means that if we cast to less width, we can just
3337 change the type and read it still later. */
3338 #define ALLOW_SUBTYPE_ACCESS 1
3340 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3341 /* value still in memory */
3345 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3347 goto done
; /* no 64bit envolved */
3355 /* generate high word */
3356 if (sbt
& VT_UNSIGNED
) {
3365 } else if (ss
== 8) {
3366 /* from long long: just take low order word */
3374 /* need to convert from 32bit to 64bit */
3375 if (sbt
& VT_UNSIGNED
) {
3376 #if defined(TCC_TARGET_RISCV64)
3377 /* RISC-V keeps 32bit vals in registers sign-extended.
3378 So here we need a zero-extension. */
3387 ss
= ds
, ds
= 4, dbt
= sbt
;
3388 } else if (ss
== 8) {
3389 /* RISC-V keeps 32bit vals in registers sign-extended.
3390 So here we need a sign-extension for signed types and
3391 zero-extension. for unsigned types. */
3392 #if !defined(TCC_TARGET_RISCV64)
3393 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3402 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3408 bits
= (ss
- ds
) * 8;
3409 /* for unsigned, gen_op will convert SAR to SHR */
3410 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3413 vpushi(bits
- trunc
);
3420 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3423 /* return type size as known at compile time. Put alignment at 'a' */
3424 ST_FUNC
int type_size(CType
*type
, int *a
)
3429 bt
= type
->t
& VT_BTYPE
;
3430 if (bt
== VT_STRUCT
) {
3435 } else if (bt
== VT_PTR
) {
3436 if (type
->t
& VT_ARRAY
) {
3439 ts
= type_size(&s
->type
, a
);
3440 if (ts
< 0 && s
->c
< 0)
3447 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3449 return -1; /* incomplete enum */
3450 } else if (bt
== VT_LDOUBLE
) {
3452 return LDOUBLE_SIZE
;
3453 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3454 #if (defined TCC_TARGET_I386 && !defined TCC_TARGET_PE) \
3455 || (defined TCC_TARGET_ARM && !defined TCC_ARM_EABI)
3461 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3464 } else if (bt
== VT_SHORT
) {
3467 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3471 /* char, void, function, _Bool */
3477 /* push type size as known at runtime time on top of value stack. Put
3479 static void vpush_type_size(CType
*type
, int *a
)
3481 if (type
->t
& VT_VLA
) {
3482 type_size(&type
->ref
->type
, a
);
3483 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3485 int size
= type_size(type
, a
);
3487 tcc_error("unknown type size");
3492 /* return the pointed type of t */
3493 static inline CType
*pointed_type(CType
*type
)
3495 return &type
->ref
->type
;
3498 /* modify type so that its it is a pointer to type. */
3499 ST_FUNC
void mk_pointer(CType
*type
)
3502 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3503 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3507 /* return true if type1 and type2 are exactly the same (including
3510 static int is_compatible_types(CType
*type1
, CType
*type2
)
3512 return compare_types(type1
,type2
,0);
3515 /* return true if type1 and type2 are the same (ignoring qualifiers).
3517 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3519 return compare_types(type1
,type2
,1);
3522 static void cast_error(CType
*st
, CType
*dt
)
3524 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3527 /* verify type compatibility to store vtop in 'dt' type */
3528 static void verify_assign_cast(CType
*dt
)
3530 CType
*st
, *type1
, *type2
;
3531 int dbt
, sbt
, qualwarn
, lvl
;
3533 st
= &vtop
->type
; /* source type */
3534 dbt
= dt
->t
& VT_BTYPE
;
3535 sbt
= st
->t
& VT_BTYPE
;
3536 if (dt
->t
& VT_CONSTANT
)
3537 tcc_warning("assignment of read-only location");
3541 tcc_error("assignment to void expression");
3544 /* special cases for pointers */
3545 /* '0' can also be a pointer */
3546 if (is_null_pointer(vtop
))
3548 /* accept implicit pointer to integer cast with warning */
3549 if (is_integer_btype(sbt
)) {
3550 tcc_warning("assignment makes pointer from integer without a cast");
3553 type1
= pointed_type(dt
);
3555 type2
= pointed_type(st
);
3556 else if (sbt
== VT_FUNC
)
3557 type2
= st
; /* a function is implicitly a function pointer */
3560 if (is_compatible_types(type1
, type2
))
3562 for (qualwarn
= lvl
= 0;; ++lvl
) {
3563 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3564 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3566 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3567 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3568 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3570 type1
= pointed_type(type1
);
3571 type2
= pointed_type(type2
);
3573 if (!is_compatible_unqualified_types(type1
, type2
)) {
3574 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3575 /* void * can match anything */
3576 } else if (dbt
== sbt
3577 && is_integer_btype(sbt
& VT_BTYPE
)
3578 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3579 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3580 /* Like GCC don't warn by default for merely changes
3581 in pointer target signedness. Do warn for different
3582 base types, though, in particular for unsigned enums
3583 and signed int targets. */
3585 tcc_warning("assignment from incompatible pointer type");
3590 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3596 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3597 tcc_warning("assignment makes integer from pointer without a cast");
3598 } else if (sbt
== VT_STRUCT
) {
3599 goto case_VT_STRUCT
;
3601 /* XXX: more tests */
3605 if (!is_compatible_unqualified_types(dt
, st
)) {
3613 static void gen_assign_cast(CType
*dt
)
3615 verify_assign_cast(dt
);
3619 /* store vtop in lvalue pushed on stack */
3620 ST_FUNC
void vstore(void)
3622 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3624 ft
= vtop
[-1].type
.t
;
3625 sbt
= vtop
->type
.t
& VT_BTYPE
;
3626 dbt
= ft
& VT_BTYPE
;
3627 verify_assign_cast(&vtop
[-1].type
);
3629 if (sbt
== VT_STRUCT
) {
3630 /* if structure, only generate pointer */
3631 /* structure assignment : generate memcpy */
3632 size
= type_size(&vtop
->type
, &align
);
3633 /* destination, keep on stack() as result */
3635 #ifdef CONFIG_TCC_BCHECK
3636 if (vtop
->r
& VT_MUSTBOUND
)
3637 gbound(); /* check would be wrong after gaddrof() */
3639 vtop
->type
.t
= VT_PTR
;
3643 #ifdef CONFIG_TCC_BCHECK
3644 if (vtop
->r
& VT_MUSTBOUND
)
3647 vtop
->type
.t
= VT_PTR
;
3650 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3652 #ifdef CONFIG_TCC_BCHECK
3653 && !tcc_state
->do_bounds_check
3656 gen_struct_copy(size
);
3662 /* Use memmove, rather than memcpy, as dest and src may be same: */
3665 vpush_helper_func(TOK_memmove8
);
3666 else if(!(align
& 3))
3667 vpush_helper_func(TOK_memmove4
);
3670 vpush_helper_func(TOK_memmove
);
3675 } else if (ft
& VT_BITFIELD
) {
3676 /* bitfield store handling */
3678 /* save lvalue as expression result (example: s.b = s.a = n;) */
3679 vdup(), vtop
[-1] = vtop
[-2];
3681 bit_pos
= BIT_POS(ft
);
3682 bit_size
= BIT_SIZE(ft
);
3683 /* remove bit field info to avoid loops */
3684 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3686 if (dbt
== VT_BOOL
) {
3687 gen_cast(&vtop
[-1].type
);
3688 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3690 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3691 if (dbt
!= VT_BOOL
) {
3692 gen_cast(&vtop
[-1].type
);
3693 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3695 if (r
== VT_STRUCT
) {
3696 store_packed_bf(bit_pos
, bit_size
);
3698 unsigned long long mask
= (1ULL << bit_size
) - 1;
3699 if (dbt
!= VT_BOOL
) {
3701 if (dbt
== VT_LLONG
)
3704 vpushi((unsigned)mask
);
3711 /* duplicate destination */
3714 /* load destination, mask and or with source */
3715 if (dbt
== VT_LLONG
)
3716 vpushll(~(mask
<< bit_pos
));
3718 vpushi(~((unsigned)mask
<< bit_pos
));
3723 /* ... and discard */
3726 } else if (dbt
== VT_VOID
) {
3729 /* optimize char/short casts */
3731 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3732 && is_integer_btype(sbt
)
3734 if ((vtop
->r
& VT_MUSTCAST
)
3735 && btype_size(dbt
) > btype_size(sbt
)
3737 force_charshort_cast();
3740 gen_cast(&vtop
[-1].type
);
3743 #ifdef CONFIG_TCC_BCHECK
3744 /* bound check case */
3745 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3751 gv(RC_TYPE(dbt
)); /* generate value */
3754 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3755 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3756 vtop
->type
.t
= ft
& VT_TYPE
;
3759 /* if lvalue was saved on stack, must read it */
3760 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3762 r
= get_reg(RC_INT
);
3763 sv
.type
.t
= VT_PTRDIFF_T
;
3764 sv
.r
= VT_LOCAL
| VT_LVAL
;
3765 sv
.c
.i
= vtop
[-1].c
.i
;
3767 vtop
[-1].r
= r
| VT_LVAL
;
3770 r
= vtop
->r
& VT_VALMASK
;
3771 /* two word case handling :
3772 store second register at word + 4 (or +8 for x86-64) */
3773 if (USING_TWO_WORDS(dbt
)) {
3774 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3775 vtop
[-1].type
.t
= load_type
;
3778 incr_offset(PTR_SIZE
);
3780 /* XXX: it works because r2 is spilled last ! */
3781 store(vtop
->r2
, vtop
- 1);
3787 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3791 /* post defines POST/PRE add. c is the token ++ or -- */
3792 ST_FUNC
void inc(int post
, int c
)
3795 vdup(); /* save lvalue */
3797 gv_dup(); /* duplicate value */
3802 vpushi(c
- TOK_MID
);
3804 vstore(); /* store value */
3806 vpop(); /* if post op, return saved value */
3809 ST_FUNC CString
* parse_mult_str (const char *msg
)
3811 /* read the string */
3814 cstr_reset(&initstr
);
3815 while (tok
== TOK_STR
) {
3816 /* XXX: add \0 handling too ? */
3817 cstr_cat(&initstr
, tokc
.str
.data
, -1);
3820 cstr_ccat(&initstr
, '\0');
3824 /* If I is >= 1 and a power of two, returns log2(i)+1.
3825 If I is 0 returns 0. */
3826 ST_FUNC
int exact_log2p1(int i
)
3831 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3842 /* Parse __attribute__((...)) GNUC extension. */
3843 static void parse_attribute(AttributeDef
*ad
)
3849 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3854 while (tok
!= ')') {
3855 if (tok
< TOK_IDENT
)
3856 expect("attribute name");
3868 tcc_warning_c(warn_implicit_function_declaration
)(
3869 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3870 s
= external_global_sym(tok
, &func_old_type
);
3871 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3872 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3873 ad
->cleanup_func
= s
;
3878 case TOK_CONSTRUCTOR1
:
3879 case TOK_CONSTRUCTOR2
:
3880 ad
->f
.func_ctor
= 1;
3882 case TOK_DESTRUCTOR1
:
3883 case TOK_DESTRUCTOR2
:
3884 ad
->f
.func_dtor
= 1;
3886 case TOK_ALWAYS_INLINE1
:
3887 case TOK_ALWAYS_INLINE2
:
3888 ad
->f
.func_alwinl
= 1;
3893 astr
= parse_mult_str("section name")->data
;
3894 ad
->section
= find_section(tcc_state
, astr
);
3900 astr
= parse_mult_str("alias(\"target\")")->data
;
3901 /* save string as token, for later */
3902 ad
->alias_target
= tok_alloc_const(astr
);
3905 case TOK_VISIBILITY1
:
3906 case TOK_VISIBILITY2
:
3908 astr
= parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data
;
3909 if (!strcmp (astr
, "default"))
3910 ad
->a
.visibility
= STV_DEFAULT
;
3911 else if (!strcmp (astr
, "hidden"))
3912 ad
->a
.visibility
= STV_HIDDEN
;
3913 else if (!strcmp (astr
, "internal"))
3914 ad
->a
.visibility
= STV_INTERNAL
;
3915 else if (!strcmp (astr
, "protected"))
3916 ad
->a
.visibility
= STV_PROTECTED
;
3918 expect("visibility(\"default|hidden|internal|protected\")");
3926 if (n
<= 0 || (n
& (n
- 1)) != 0)
3927 tcc_error("alignment must be a positive power of two");
3932 ad
->a
.aligned
= exact_log2p1(n
);
3933 if (n
!= 1 << (ad
->a
.aligned
- 1))
3934 tcc_error("alignment of %d is larger than implemented", n
);
3950 /* currently, no need to handle it because tcc does not
3951 track unused objects */
3955 ad
->f
.func_noreturn
= 1;
3960 ad
->f
.func_call
= FUNC_CDECL
;
3965 ad
->f
.func_call
= FUNC_STDCALL
;
3967 #ifdef TCC_TARGET_I386
3977 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3983 ad
->f
.func_call
= FUNC_FASTCALLW
;
3988 ad
->f
.func_call
= FUNC_THISCALL
;
3995 ad
->attr_mode
= VT_LLONG
+ 1;
3998 ad
->attr_mode
= VT_BYTE
+ 1;
4001 ad
->attr_mode
= VT_SHORT
+ 1;
4005 ad
->attr_mode
= VT_INT
+ 1;
4008 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4015 ad
->a
.dllexport
= 1;
4017 case TOK_NODECORATE
:
4018 ad
->a
.nodecorate
= 1;
4021 ad
->a
.dllimport
= 1;
4024 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
4025 /* skip parameters */
4027 int parenthesis
= 0;
4031 else if (tok
== ')')
4034 } while (parenthesis
&& tok
!= -1);
4047 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4050 int v1
= v
| SYM_FIELD
;
4051 if (!(v
& SYM_FIELD
)) { /* top-level call */
4052 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
4053 expect("struct or union");
4055 expect("field name");
4057 tcc_error("dereferencing incomplete type '%s'",
4058 get_tok_str(s
->v
& ~SYM_STRUCT
, 0));
4060 while ((s
= s
->next
) != NULL
) {
4065 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
4066 && s
->v
>= (SYM_FIRST_ANOM
| SYM_FIELD
)) {
4067 /* try to find field in anonymous sub-struct/union */
4068 Sym
*ret
= find_field (&s
->type
, v1
, cumofs
);
4075 if (!(v
& SYM_FIELD
))
4076 tcc_error("field not found: %s", get_tok_str(v
, NULL
));
4080 static void check_fields (CType
*type
, int check
)
4084 while ((s
= s
->next
) != NULL
) {
4085 int v
= s
->v
& ~SYM_FIELD
;
4086 if (v
< SYM_FIRST_ANOM
) {
4087 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4088 if (check
&& (ts
->tok
& SYM_FIELD
))
4089 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4090 ts
->tok
^= SYM_FIELD
;
4091 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4092 check_fields (&s
->type
, check
);
4096 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4098 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4099 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4100 int pcc
= !tcc_state
->ms_bitfields
;
4101 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4108 prevbt
= VT_STRUCT
; /* make it never match */
4113 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4114 if (f
->type
.t
& VT_BITFIELD
)
4115 bit_size
= BIT_SIZE(f
->type
.t
);
4118 size
= type_size(&f
->type
, &align
);
4119 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4122 if (pcc
&& bit_size
== 0) {
4123 /* in pcc mode, packing does not affect zero-width bitfields */
4126 /* in pcc mode, attribute packed overrides if set. */
4127 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4130 /* pragma pack overrides align if lesser and packs bitfields always */
4133 if (pragma_pack
< align
)
4134 align
= pragma_pack
;
4135 /* in pcc mode pragma pack also overrides individual align */
4136 if (pcc
&& pragma_pack
< a
)
4140 /* some individual align was specified */
4144 if (type
->ref
->type
.t
== VT_UNION
) {
4145 if (pcc
&& bit_size
>= 0)
4146 size
= (bit_size
+ 7) >> 3;
4151 } else if (bit_size
< 0) {
4153 c
+= (bit_pos
+ 7) >> 3;
4154 c
= (c
+ align
- 1) & -align
;
4163 /* A bit-field. Layout is more complicated. There are two
4164 options: PCC (GCC) compatible and MS compatible */
4166 /* In PCC layout a bit-field is placed adjacent to the
4167 preceding bit-fields, except if:
4169 - an individual alignment was given
4170 - it would overflow its base type container and
4171 there is no packing */
4172 if (bit_size
== 0) {
4174 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4176 } else if (f
->a
.aligned
) {
4178 } else if (!packed
) {
4180 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4181 if (ofs
> size
/ align
)
4185 /* in pcc mode, long long bitfields have type int if they fit */
4186 if (size
== 8 && bit_size
<= 32)
4187 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4189 while (bit_pos
>= align
* 8)
4190 c
+= align
, bit_pos
-= align
* 8;
4193 /* In PCC layout named bit-fields influence the alignment
4194 of the containing struct using the base types alignment,
4195 except for packed fields (which here have correct align). */
4196 if (f
->v
& SYM_FIRST_ANOM
4197 // && bit_size // ??? gcc on ARM/rpi does that
4202 bt
= f
->type
.t
& VT_BTYPE
;
4203 if ((bit_pos
+ bit_size
> size
* 8)
4204 || (bit_size
> 0) == (bt
!= prevbt
)
4206 c
= (c
+ align
- 1) & -align
;
4209 /* In MS bitfield mode a bit-field run always uses
4210 at least as many bits as the underlying type.
4211 To start a new run it's also required that this
4212 or the last bit-field had non-zero width. */
4213 if (bit_size
|| prev_bit_size
)
4216 /* In MS layout the records alignment is normally
4217 influenced by the field, except for a zero-width
4218 field at the start of a run (but by further zero-width
4219 fields it is again). */
4220 if (bit_size
== 0 && prevbt
!= bt
)
4223 prev_bit_size
= bit_size
;
4226 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4227 | (bit_pos
<< VT_STRUCT_SHIFT
);
4228 bit_pos
+= bit_size
;
4230 if (align
> maxalign
)
4234 printf("set field %s offset %-2d size %-2d align %-2d",
4235 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4236 if (f
->type
.t
& VT_BITFIELD
) {
4237 printf(" pos %-2d bits %-2d",
4250 c
+= (bit_pos
+ 7) >> 3;
4252 /* store size and alignment */
4253 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4257 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4258 /* can happen if individual align for some member was given. In
4259 this case MSVC ignores maxalign when aligning the size */
4264 c
= (c
+ a
- 1) & -a
;
4268 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4271 /* check whether we can access bitfields by their type */
4272 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4276 if (0 == (f
->type
.t
& VT_BITFIELD
))
4280 bit_size
= BIT_SIZE(f
->type
.t
);
4283 bit_pos
= BIT_POS(f
->type
.t
);
4284 size
= type_size(&f
->type
, &align
);
4286 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4287 #ifdef TCC_TARGET_ARM
4288 && !(f
->c
& (align
- 1))
4293 /* try to access the field using a different type */
4294 c0
= -1, s
= align
= 1;
4297 px
= f
->c
* 8 + bit_pos
;
4298 cx
= (px
>> 3) & -align
;
4299 px
= px
- (cx
<< 3);
4302 s
= (px
+ bit_size
+ 7) >> 3;
4312 s
= type_size(&t
, &align
);
4316 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4317 #ifdef TCC_TARGET_ARM
4318 && !(cx
& (align
- 1))
4321 /* update offset and bit position */
4324 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4325 | (bit_pos
<< VT_STRUCT_SHIFT
);
4329 printf("FIX field %s offset %-2d size %-2d align %-2d "
4330 "pos %-2d bits %-2d\n",
4331 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4332 cx
, s
, align
, px
, bit_size
);
4335 /* fall back to load/store single-byte wise */
4336 f
->auxtype
= VT_STRUCT
;
4338 printf("FIX field %s : load byte-wise\n",
4339 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4345 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4346 static void struct_decl(CType
*type
, int u
)
4348 int v
, c
, size
, align
, flexible
;
4349 int bit_size
, bsize
, bt
, ut
;
4351 AttributeDef ad
, ad1
;
4354 memset(&ad
, 0, sizeof ad
);
4356 parse_attribute(&ad
);
4359 if (tok
>= TOK_IDENT
) /* struct/enum tag */
4365 if (tok
== ':') { /* C2x enum : <type> ... */
4367 if (!parse_btype(&btype
, &ad1
, 0)
4368 || !is_integer_btype(btype
.t
& VT_BTYPE
))
4369 expect("enum type");
4370 bt
= ut
= btype
.t
& (VT_BTYPE
|VT_LONG
|VT_UNSIGNED
|VT_DEFSIGN
);
4375 /* struct already defined ? return it */
4377 if (s
&& (s
->sym_scope
== local_scope
|| (tok
!= '{' && tok
!= ';'))) {
4380 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
)) /* XXX: check integral types */
4382 tcc_error("redeclaration of '%s'", get_tok_str(v
, NULL
));
4386 expect("struct/union/enum name");
4389 /* Record the original enum/struct/union token. */
4392 /* we put an undefined size for struct/union */
4393 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, bt
? 0 : -1);
4394 s
->r
= 0; /* default alignment is zero as gcc */
4396 type
->t
= s
->type
.t
;
4402 && !(u
== VT_ENUM
&& s
->c
== 0)) /* not yet defined typed enum */
4403 tcc_error("struct/union/enum already defined");
4405 /* cannot be empty */
4406 /* non empty enums are not allowed */
4409 long long ll
= 0, pl
= 0, nl
= 0;
4412 /* enum symbols have static storage */
4413 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4415 t
.t
= bt
|VT_STATIC
|VT_ENUM_VAL
;
4419 expect("identifier");
4421 if (ss
&& !local_stack
)
4422 tcc_error("redefinition of enumerator '%s'",
4423 get_tok_str(v
, NULL
));
4427 ll
= expr_const64();
4429 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4431 *ps
= ss
, ps
= &ss
->next
;
4440 /* NOTE: we accept a trailing comma */
4452 /* set integral type of the enum */
4455 if (pl
!= (unsigned)pl
)
4456 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4458 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4459 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4461 /* set type for enum members */
4462 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4464 if (ll
== (int)ll
) /* default is int if it fits */
4466 if (t
.t
& VT_UNSIGNED
) {
4467 ss
->type
.t
|= VT_UNSIGNED
;
4468 if (ll
== (unsigned)ll
)
4471 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4472 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4476 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4481 while (tok
!= '}') {
4482 if (!parse_btype(&btype
, &ad1
, 0)) {
4483 if (tok
== TOK_STATIC_ASSERT
) {
4492 tcc_error("flexible array member '%s' not at the end of struct",
4493 get_tok_str(v
, NULL
));
4499 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4501 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4502 expect("identifier");
4504 int v
= btype
.ref
->v
;
4505 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4506 if (tcc_state
->ms_extensions
== 0)
4507 expect("identifier");
4511 if (type_size(&type1
, &align
) < 0) {
4512 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4515 tcc_error("field '%s' has incomplete type",
4516 get_tok_str(v
, NULL
));
4518 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4519 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4520 (type1
.t
& VT_STORAGE
))
4521 tcc_error("invalid type for '%s'",
4522 get_tok_str(v
, NULL
));
4526 bit_size
= expr_const();
4527 /* XXX: handle v = 0 case for messages */
4529 tcc_error("negative width in bit-field '%s'",
4530 get_tok_str(v
, NULL
));
4531 if (v
&& bit_size
== 0)
4532 tcc_error("zero width for bit-field '%s'",
4533 get_tok_str(v
, NULL
));
4534 parse_attribute(&ad1
);
4536 size
= type_size(&type1
, &align
);
4537 if (bit_size
>= 0) {
4538 bt
= type1
.t
& VT_BTYPE
;
4544 tcc_error("bitfields must have scalar type");
4546 if (bit_size
> bsize
) {
4547 tcc_error("width of '%s' exceeds its type",
4548 get_tok_str(v
, NULL
));
4549 } else if (bit_size
== bsize
4550 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4551 /* no need for bit fields */
4553 } else if (bit_size
== 64) {
4554 tcc_error("field width 64 not implemented");
4556 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4558 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4561 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4562 /* Remember we've seen a real field to check
4563 for placement of flexible array member. */
4566 /* If member is a struct or bit-field, enforce
4567 placing into the struct (as anonymous). */
4569 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4574 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4579 if (tok
== ';' || tok
== TOK_EOF
)
4586 parse_attribute(&ad
);
4587 if (ad
.cleanup_func
) {
4588 tcc_warning("attribute '__cleanup__' ignored on type");
4590 check_fields(type
, 1);
4591 check_fields(type
, 0);
4592 struct_layout(type
, &ad
);
4594 tcc_debug_fix_anon(tcc_state
, type
);
4599 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4601 merge_symattr(&ad
->a
, &s
->a
);
4602 merge_funcattr(&ad
->f
, &s
->f
);
4605 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4606 are added to the element type, copied because it could be a typedef. */
4607 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4609 while (type
->t
& VT_ARRAY
) {
4610 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4611 type
= &type
->ref
->type
;
4613 type
->t
|= qualifiers
;
4616 /* return 0 if no type declaration. otherwise, return the basic type
4619 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4621 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4625 memset(ad
, 0, sizeof(AttributeDef
));
4635 /* currently, we really ignore extension */
4645 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4646 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4647 tmbt
: tcc_error("too many basic types");
4650 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4655 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4672 memset(&ad1
, 0, sizeof(AttributeDef
));
4673 if (parse_btype(&type1
, &ad1
, 0)) {
4674 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4676 n
= 1 << (ad1
.a
.aligned
- 1);
4678 type_size(&type1
, &n
);
4681 if (n
< 0 || (n
& (n
- 1)) != 0)
4682 tcc_error("alignment must be a positive power of two");
4685 ad
->a
.aligned
= exact_log2p1(n
);
4689 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4690 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4691 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4692 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4699 #ifdef TCC_TARGET_ARM64
4701 /* GCC's __uint128_t appears in some Linux header files. Make it a
4702 synonym for long double to get the size and alignment right. */
4710 tcc_error("_Complex is not yet supported");
4715 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4716 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4724 struct_decl(&type1
, VT_ENUM
);
4727 type
->ref
= type1
.ref
;
4730 struct_decl(&type1
, VT_STRUCT
);
4733 struct_decl(&type1
, VT_UNION
);
4736 /* type modifiers */
4740 parse_btype_qualify(type
, VT_ATOMIC
);
4743 parse_expr_type(&type1
);
4744 /* remove all storage modifiers except typedef */
4745 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4747 sym_to_attr(ad
, type1
.ref
);
4755 parse_btype_qualify(type
, VT_CONSTANT
);
4763 parse_btype_qualify(type
, VT_VOLATILE
);
4770 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4771 tcc_error("signed and unsigned modifier");
4784 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4785 tcc_error("signed and unsigned modifier");
4786 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4802 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4803 tcc_error("multiple storage classes");
4815 ad
->f
.func_noreturn
= 1;
4817 /* GNUC attribute */
4818 case TOK_ATTRIBUTE1
:
4819 case TOK_ATTRIBUTE2
:
4820 parse_attribute(ad
);
4821 if (ad
->attr_mode
) {
4822 u
= ad
->attr_mode
-1;
4823 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4831 parse_expr_type(&type1
);
4832 /* remove all storage modifiers except typedef */
4833 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4835 sym_to_attr(ad
, type1
.ref
);
4837 case TOK_THREAD_LOCAL
:
4838 tcc_error("_Thread_local is not implemented");
4843 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4847 if (tok
== ':' && ignore_label
) {
4848 /* ignore if it's a label */
4853 t
&= ~(VT_BTYPE
|VT_LONG
);
4854 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4855 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4856 type
->ref
= s
->type
.ref
;
4858 parse_btype_qualify(type
, t
);
4860 /* get attributes from typedef */
4869 if (tcc_state
->char_is_unsigned
) {
4870 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4873 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4874 bt
= t
& (VT_BTYPE
|VT_LONG
);
4876 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4877 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4878 if (bt
== VT_LDOUBLE
)
4879 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4885 /* convert a function parameter type (array to pointer and function to
4886 function pointer) */
4887 static inline void convert_parameter_type(CType
*pt
)
4889 /* remove const and volatile qualifiers (XXX: const could be used
4890 to indicate a const function parameter */
4891 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4892 /* array must be transformed to pointer according to ANSI C */
4893 pt
->t
&= ~(VT_ARRAY
| VT_VLA
);
4894 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4899 ST_FUNC CString
* parse_asm_str(void)
4902 return parse_mult_str("string constant");
4905 /* Parse an asm label and return the token */
4906 static int asm_label_instr(void)
4912 astr
= parse_asm_str()->data
;
4915 printf("asm_alias: \"%s\"\n", astr
);
4917 v
= tok_alloc_const(astr
);
4921 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4923 int n
, l
, t1
, arg_size
, align
;
4924 Sym
**plast
, *s
, *first
;
4927 TokenString
*vla_array_tok
= NULL
;
4928 int *vla_array_str
= NULL
;
4931 /* function type, or recursive declarator (return if so) */
4933 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4937 else if (parse_btype(&pt
, &ad1
, 0))
4939 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4940 merge_attr (ad
, &ad1
);
4951 /* read param name and compute offset */
4952 if (l
!= FUNC_OLD
) {
4953 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4955 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4956 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4957 tcc_error("parameter declared as void");
4962 pt
.t
= VT_VOID
; /* invalid type */
4967 expect("identifier");
4968 convert_parameter_type(&pt
);
4969 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4970 /* these symbols may be evaluated for VLArrays (see below, under
4971 nocode_wanted) which is why we push them here as normal symbols
4972 temporarily. Example: int func(int a, int b[++a]); */
4973 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4979 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4984 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4985 tcc_error("invalid type");
4988 /* if no parameters, then old type prototype */
4991 /* remove parameter symbols from token table, keep on stack */
4993 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4994 for (s
= first
; s
; s
= s
->next
)
4998 /* NOTE: const is ignored in returned type as it has a special
4999 meaning in gcc / C++ */
5000 type
->t
&= ~VT_CONSTANT
;
5001 /* some ancient pre-K&R C allows a function to return an array
5002 and the array brackets to be put after the arguments, such
5003 that "int c()[]" means something like "int[] c()" */
5006 skip(']'); /* only handle simple "[]" */
5009 /* we push a anonymous symbol which will contain the function prototype */
5010 ad
->f
.func_args
= arg_size
;
5011 ad
->f
.func_type
= l
;
5012 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5018 } else if (tok
== '[') {
5019 int saved_nocode_wanted
= nocode_wanted
;
5020 /* array definition */
5024 if (td
& TYPE_PARAM
) while (1) {
5025 /* XXX The optional type-quals and static should only be accepted
5026 in parameter decls. The '*' as well, and then even only
5027 in prototypes (not function defs). */
5029 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5040 /* Code generation is not done now but has to be done
5041 at start of function. Save code here for later use. */
5043 skip_or_save_block(&vla_array_tok
);
5045 vla_array_str
= vla_array_tok
->str
;
5046 begin_macro(vla_array_tok
, 2);
5055 } else if (tok
!= ']') {
5056 if (!local_stack
|| (storage
& VT_STATIC
))
5057 vpushi(expr_const());
5059 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5060 length must always be evaluated, even under nocode_wanted,
5061 so that its size slot is initialized (e.g. under sizeof
5067 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5070 tcc_error("invalid array size");
5072 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5073 tcc_error("size of variable length array should be an integer");
5079 /* parse next post type */
5080 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
5082 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5083 tcc_error("declaration of an array of functions");
5084 if ((type
->t
& VT_BTYPE
) == VT_VOID
5085 || type_size(type
, &align
) < 0)
5086 tcc_error("declaration of an array of incomplete type elements");
5088 t1
|= type
->t
& VT_VLA
;
5093 tcc_error("need explicit inner array size in VLAs");
5096 loc
-= type_size(&int_type
, &align
);
5100 vpush_type_size(type
, &align
);
5102 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5109 nocode_wanted
= saved_nocode_wanted
;
5111 /* we push an anonymous symbol which will contain the array
5113 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5114 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5117 if (vla_array_str
) {
5118 /* for function args, the top dimension is converted to pointer */
5119 if ((t1
& VT_VLA
) && (td
& TYPE_NEST
))
5120 s
->vla_array_str
= vla_array_str
;
5122 tok_str_free_str(vla_array_str
);
5128 /* Parse a type declarator (except basic type), and return the type
5129 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5130 expected. 'type' should contain the basic type. 'ad' is the
5131 attribute definition of the basic type. It can be modified by
5132 type_decl(). If this (possibly abstract) declarator is a pointer chain
5133 it returns the innermost pointed to type (equals *type, but is a different
5134 pointer), otherwise returns type itself, that's used for recursive calls. */
5135 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5138 int qualifiers
, storage
;
5140 /* recursive type, remove storage bits first, apply them later again */
5141 storage
= type
->t
& VT_STORAGE
;
5142 type
->t
&= ~VT_STORAGE
;
5145 while (tok
== '*') {
5151 qualifiers
|= VT_ATOMIC
;
5156 qualifiers
|= VT_CONSTANT
;
5161 qualifiers
|= VT_VOLATILE
;
5167 /* XXX: clarify attribute handling */
5168 case TOK_ATTRIBUTE1
:
5169 case TOK_ATTRIBUTE2
:
5170 parse_attribute(ad
);
5174 type
->t
|= qualifiers
;
5176 /* innermost pointed to type is the one for the first derivation */
5177 ret
= pointed_type(type
);
5181 /* This is possibly a parameter type list for abstract declarators
5182 ('int ()'), use post_type for testing this. */
5183 if (!post_type(type
, ad
, 0, td
)) {
5184 /* It's not, so it's a nested declarator, and the post operations
5185 apply to the innermost pointed to type (if any). */
5186 /* XXX: this is not correct to modify 'ad' at this point, but
5187 the syntax is not clear */
5188 parse_attribute(ad
);
5189 post
= type_decl(type
, ad
, v
, td
);
5193 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5194 /* type identifier */
5199 if (!(td
& TYPE_ABSTRACT
))
5200 expect("identifier");
5203 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5204 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5205 parse_attribute(ad
);
5210 /* indirection with full error checking and bound check */
5211 ST_FUNC
void indir(void)
5213 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5214 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5218 if (vtop
->r
& VT_LVAL
)
5220 vtop
->type
= *pointed_type(&vtop
->type
);
5221 /* Arrays and functions are never lvalues */
5222 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5223 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5225 /* if bound checking, the referenced pointer must be checked */
5226 #ifdef CONFIG_TCC_BCHECK
5227 if (tcc_state
->do_bounds_check
)
5228 vtop
->r
|= VT_MUSTBOUND
;
5233 /* pass a parameter to a function and do type checking and casting */
5234 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5239 func_type
= func
->f
.func_type
;
5240 if (func_type
== FUNC_OLD
||
5241 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5242 /* default casting : only need to convert float to double */
5243 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5244 gen_cast_s(VT_DOUBLE
);
5245 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5246 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5247 type
.ref
= vtop
->type
.ref
;
5249 } else if (vtop
->r
& VT_MUSTCAST
) {
5250 force_charshort_cast();
5252 } else if (arg
== NULL
) {
5253 tcc_error("too many arguments to function");
5256 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5257 gen_assign_cast(&type
);
5261 /* parse an expression and return its type without any side effect. */
5262 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5271 /* parse an expression of the form '(type)' or '(expr)' and return its
5273 static void parse_expr_type(CType
*type
)
5279 if (parse_btype(type
, &ad
, 0)) {
5280 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5282 expr_type(type
, gexpr
);
5287 static void parse_type(CType
*type
)
5292 if (!parse_btype(type
, &ad
, 0)) {
5295 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5298 static void parse_builtin_params(int nc
, const char *args
)
5307 while ((c
= *args
++)) {
5322 type
.t
= VT_CONSTANT
;
5328 type
.t
= VT_CONSTANT
;
5330 type
.t
|= char_type
.t
;
5342 gen_assign_cast(&type
);
5349 static void parse_atomic(int atok
)
5351 int size
, align
, arg
, t
, save
= 0;
5352 CType
*atom
, *atom_ptr
, ct
= {0};
5355 static const char *const templates
[] = {
5357 * Each entry consists of callback and function template.
5358 * The template represents argument types and return type.
5360 * ? void (return-only)
5363 * A read-only atomic
5364 * p pointer to memory
5371 /* keep in order of appearance in tcctok.h: */
5372 /* __atomic_store */ "alm.?",
5373 /* __atomic_load */ "Asm.v",
5374 /* __atomic_exchange */ "alsm.v",
5375 /* __atomic_compare_exchange */ "aplbmm.b",
5376 /* __atomic_fetch_add */ "avm.v",
5377 /* __atomic_fetch_sub */ "avm.v",
5378 /* __atomic_fetch_or */ "avm.v",
5379 /* __atomic_fetch_xor */ "avm.v",
5380 /* __atomic_fetch_and */ "avm.v",
5381 /* __atomic_fetch_nand */ "avm.v",
5382 /* __atomic_and_fetch */ "avm.v",
5383 /* __atomic_sub_fetch */ "avm.v",
5384 /* __atomic_or_fetch */ "avm.v",
5385 /* __atomic_xor_fetch */ "avm.v",
5386 /* __atomic_and_fetch */ "avm.v",
5387 /* __atomic_nand_fetch */ "avm.v"
5389 const char *template = templates
[(atok
- TOK___atomic_store
)];
5391 atom
= atom_ptr
= NULL
;
5392 size
= 0; /* pacify compiler */
5397 switch (template[arg
]) {
5400 atom_ptr
= &vtop
->type
;
5401 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5403 atom
= pointed_type(atom_ptr
);
5404 size
= type_size(atom
, &align
);
5406 || (size
& (size
- 1))
5407 || (atok
> TOK___atomic_compare_exchange
5408 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5409 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5410 expect("integral or integer-sized pointer target type");
5411 /* GCC does not care either: */
5412 /* if (!(atom->t & VT_ATOMIC))
5413 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5417 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5418 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5419 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5420 gen_assign_cast(atom_ptr
);
5423 gen_assign_cast(atom
);
5427 gen_assign_cast(atom
);
5436 gen_assign_cast(&int_type
);
5440 gen_assign_cast(&ct
);
5443 if ('.' == template[++arg
])
5450 switch (template[arg
+ 1]) {
5459 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5460 vpush_helper_func(tok_alloc_const(buf
));
5461 vrott(arg
- save
+ 1);
5462 gfunc_call(arg
- save
);
5465 PUT_R_RET(vtop
, ct
.t
);
5466 t
= ct
.t
& VT_BTYPE
;
5467 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5469 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5471 vtop
->type
.t
= VT_INT
;
5483 ST_FUNC
void unary(void)
5485 int n
, t
, align
, size
, r
;
5490 /* generate line number info */
5492 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5495 /* XXX: GCC 2.95.3 does not generate a table although it should be
5503 #ifdef TCC_TARGET_PE
5504 t
= VT_SHORT
|VT_UNSIGNED
;
5512 vsetc(&type
, VT_CONST
, &tokc
);
5516 t
= VT_INT
| VT_UNSIGNED
;
5522 t
= VT_LLONG
| VT_UNSIGNED
;
5531 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5532 t
= VT_DOUBLE
| VT_LONG
;
5538 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5541 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5543 case TOK___FUNCTION__
:
5545 goto tok_identifier
;
5549 cstr_reset(&tokcstr
);
5550 cstr_cat(&tokcstr
, funcname
, 0);
5551 tokc
.str
.size
= tokcstr
.size
;
5552 tokc
.str
.data
= tokcstr
.data
;
5555 #ifdef TCC_TARGET_PE
5556 t
= VT_SHORT
| VT_UNSIGNED
;
5563 /* string parsing */
5566 if (tcc_state
->warn_write_strings
& WARN_ON
)
5571 memset(&ad
, 0, sizeof(AttributeDef
));
5572 ad
.section
= rodata_section
;
5573 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5580 if (parse_btype(&type
, &ad
, 0)) {
5581 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5583 /* check ISOC99 compound literal */
5585 /* data is allocated locally by default */
5590 /* all except arrays are lvalues */
5591 if (!(type
.t
& VT_ARRAY
))
5593 memset(&ad
, 0, sizeof(AttributeDef
));
5594 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5595 } else if (t
== TOK_SOTYPE
) { /* from sizeof/alignof (...) */
5602 } else if (tok
== '{') {
5603 int saved_nocode_wanted
= nocode_wanted
;
5604 if (CONST_WANTED
&& !NOEVAL_WANTED
)
5606 if (0 == local_scope
)
5607 tcc_error("statement expression outside of function");
5608 /* save all registers */
5610 /* statement expression : we do not accept break/continue
5611 inside as GCC does. We do retain the nocode_wanted state,
5612 as statement expressions can't ever be entered from the
5613 outside, so any reactivation of code emission (from labels
5614 or loop heads) can be disabled again after the end of it. */
5616 /* If the statement expr can be entered, then we retain the current
5617 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5618 If it can't be entered then the state is that from before the
5619 statement expression. */
5620 if (saved_nocode_wanted
)
5621 nocode_wanted
= saved_nocode_wanted
;
5636 /* functions names must be treated as function pointers,
5637 except for unary '&' and sizeof. Since we consider that
5638 functions are not lvalues, we only have to handle it
5639 there and in function calls. */
5640 /* arrays can also be used although they are not lvalues */
5641 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5642 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5645 vtop
->sym
->a
.addrtaken
= 1;
5646 mk_pointer(&vtop
->type
);
5652 gen_test_zero(TOK_EQ
);
5663 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5664 tcc_error("pointer not accepted for unary plus");
5665 /* In order to force cast, we add zero, except for floating point
5666 where we really need an noop (otherwise -0.0 will be transformed
5668 if (!is_float(vtop
->type
.t
)) {
5681 expr_type(&type
, unary
);
5682 if (t
== TOK_SIZEOF
) {
5683 vpush_type_size(&type
, &align
);
5684 gen_cast_s(VT_SIZE_T
);
5686 type_size(&type
, &align
);
5688 if (vtop
[1].r
& VT_SYM
)
5689 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5690 if (s
&& s
->a
.aligned
)
5691 align
= 1 << (s
->a
.aligned
- 1);
5696 case TOK_builtin_expect
:
5697 /* __builtin_expect is a no-op for now */
5698 parse_builtin_params(0, "ee");
5701 case TOK_builtin_types_compatible_p
:
5702 parse_builtin_params(0, "tt");
5703 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5704 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5705 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5709 case TOK_builtin_choose_expr
:
5736 case TOK_builtin_constant_p
:
5737 parse_builtin_params(1, "e");
5739 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5740 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
)
5746 case TOK_builtin_unreachable
:
5747 parse_builtin_params(0, ""); /* just skip '()' */
5752 case TOK_builtin_frame_address
:
5753 case TOK_builtin_return_address
:
5759 level
= expr_const();
5761 tcc_error("%s only takes positive integers", get_tok_str(tok1
, 0));
5765 vset(&type
, VT_LOCAL
, 0); /* local frame */
5767 #ifdef TCC_TARGET_RISCV64
5771 mk_pointer(&vtop
->type
);
5772 indir(); /* -> parent frame */
5774 if (tok1
== TOK_builtin_return_address
) {
5775 // assume return address is just above frame pointer on stack
5776 #ifdef TCC_TARGET_ARM
5779 #elif defined TCC_TARGET_RISCV64
5786 mk_pointer(&vtop
->type
);
5791 #ifdef TCC_TARGET_RISCV64
5792 case TOK_builtin_va_start
:
5793 parse_builtin_params(0, "ee");
5794 r
= vtop
->r
& VT_VALMASK
;
5798 tcc_error("__builtin_va_start expects a local variable");
5803 #ifdef TCC_TARGET_X86_64
5804 #ifdef TCC_TARGET_PE
5805 case TOK_builtin_va_start
:
5806 parse_builtin_params(0, "ee");
5807 r
= vtop
->r
& VT_VALMASK
;
5811 tcc_error("__builtin_va_start expects a local variable");
5813 vtop
->type
= char_pointer_type
;
5818 case TOK_builtin_va_arg_types
:
5819 parse_builtin_params(0, "t");
5820 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5827 #ifdef TCC_TARGET_ARM64
5828 case TOK_builtin_va_start
: {
5829 parse_builtin_params(0, "ee");
5833 vtop
->type
.t
= VT_VOID
;
5836 case TOK_builtin_va_arg
: {
5837 parse_builtin_params(0, "et");
5845 case TOK___arm64_clear_cache
: {
5846 parse_builtin_params(0, "ee");
5849 vtop
->type
.t
= VT_VOID
;
5854 /* atomic operations */
5855 case TOK___atomic_store
:
5856 case TOK___atomic_load
:
5857 case TOK___atomic_exchange
:
5858 case TOK___atomic_compare_exchange
:
5859 case TOK___atomic_fetch_add
:
5860 case TOK___atomic_fetch_sub
:
5861 case TOK___atomic_fetch_or
:
5862 case TOK___atomic_fetch_xor
:
5863 case TOK___atomic_fetch_and
:
5864 case TOK___atomic_fetch_nand
:
5865 case TOK___atomic_add_fetch
:
5866 case TOK___atomic_sub_fetch
:
5867 case TOK___atomic_or_fetch
:
5868 case TOK___atomic_xor_fetch
:
5869 case TOK___atomic_and_fetch
:
5870 case TOK___atomic_nand_fetch
:
5874 /* pre operations */
5885 if (is_float(vtop
->type
.t
)) {
5895 goto tok_identifier
;
5897 /* allow to take the address of a label */
5898 if (tok
< TOK_UIDENT
)
5899 expect("label identifier");
5900 s
= label_find(tok
);
5902 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5904 if (s
->r
== LABEL_DECLARED
)
5905 s
->r
= LABEL_FORWARD
;
5907 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5908 s
->type
.t
= VT_VOID
;
5909 mk_pointer(&s
->type
);
5910 s
->type
.t
|= VT_STATIC
;
5912 vpushsym(&s
->type
, s
);
5918 CType controlling_type
;
5919 int has_default
= 0;
5922 TokenString
*str
= NULL
;
5923 int saved_nocode_wanted
= nocode_wanted
;
5924 nocode_wanted
&= ~CONST_WANTED_MASK
;
5928 expr_type(&controlling_type
, expr_eq
);
5929 convert_parameter_type (&controlling_type
);
5931 nocode_wanted
= saved_nocode_wanted
;
5936 if (tok
== TOK_DEFAULT
) {
5938 tcc_error("too many 'default'");
5944 AttributeDef ad_tmp
;
5948 parse_btype(&cur_type
, &ad_tmp
, 0);
5949 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5950 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5952 tcc_error("type match twice");
5962 skip_or_save_block(&str
);
5964 skip_or_save_block(NULL
);
5971 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5972 tcc_error("type '%s' does not match any association", buf
);
5974 begin_macro(str
, 1);
5983 // special qnan , snan and infinity values
5988 vtop
->type
.t
= VT_FLOAT
;
5993 goto special_math_val
;
5996 goto special_math_val
;
6000 if (tok
< TOK_UIDENT
)
6001 tcc_error("expression expected before '%s'", get_tok_str(tok
, &tokc
));
6005 if (!s
|| IS_ASM_SYM(s
)) {
6006 const char *name
= get_tok_str(t
, NULL
);
6008 tcc_error("'%s' undeclared", name
);
6009 /* for simple function calls, we tolerate undeclared
6010 external reference to int() function */
6011 tcc_warning_c(warn_implicit_function_declaration
)(
6012 "implicit declaration of function '%s'", name
);
6013 s
= external_global_sym(t
, &func_old_type
);
6017 /* A symbol that has a register is a local register variable,
6018 which starts out as VT_LOCAL value. */
6019 if ((r
& VT_VALMASK
) < VT_CONST
)
6020 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
6022 vset(&s
->type
, r
, s
->c
);
6023 /* Point to s as backpointer (even without r&VT_SYM).
6024 Will be used by at least the x86 inline asm parser for
6030 #ifdef TCC_TARGET_PE
6031 if (s
->a
.dllimport
) {
6032 mk_pointer(&vtop
->type
);
6037 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
6038 vtop
->c
.i
= s
->enum_val
;
6043 /* post operations */
6045 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6048 } else if (tok
== '.' || tok
== TOK_ARROW
) {
6049 int qualifiers
, cumofs
;
6051 if (tok
== TOK_ARROW
)
6053 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6055 /* expect pointer on structure */
6057 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6058 /* add field offset to pointer */
6060 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6063 /* change type to field type, and set to lvalue */
6064 vtop
->type
= s
->type
;
6065 vtop
->type
.t
|= qualifiers
;
6066 /* an array is never an lvalue */
6067 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6069 #ifdef CONFIG_TCC_BCHECK
6070 /* if bound checking, the referenced pointer must be checked */
6071 if (tcc_state
->do_bounds_check
)
6072 vtop
->r
|= VT_MUSTBOUND
;
6076 } else if (tok
== '[') {
6082 } else if (tok
== '(') {
6085 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6086 TokenString
*p
, *p2
;
6089 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6090 /* pointer test (no array accepted) */
6091 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6092 vtop
->type
= *pointed_type(&vtop
->type
);
6093 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6097 expect("function pointer");
6100 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6102 /* get return type */
6105 sa
= s
->next
; /* first parameter */
6106 nb_args
= regsize
= 0;
6108 /* compute first implicit argument if a structure is returned */
6109 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6110 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6111 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6112 &ret_align
, ®size
);
6113 if (ret_nregs
<= 0) {
6114 /* get some space for the returned structure */
6115 size
= type_size(&s
->type
, &align
);
6116 #ifdef TCC_TARGET_ARM64
6117 /* On arm64, a small struct is return in registers.
6118 It is much easier to write it to memory if we know
6119 that we are allowed to write some extra bytes, so
6120 round the allocated space up to a power of 2: */
6122 while (size
& (size
- 1))
6123 size
= (size
| (size
- 1)) + 1;
6125 loc
= (loc
- size
) & -align
;
6127 ret
.r
= VT_LOCAL
| VT_LVAL
;
6128 /* pass it as 'int' to avoid structure arg passing
6130 vseti(VT_LOCAL
, loc
);
6131 #ifdef CONFIG_TCC_BCHECK
6132 if (tcc_state
->do_bounds_check
)
6146 if (ret_nregs
> 0) {
6147 /* return in register */
6149 PUT_R_RET(&ret
, ret
.type
.t
);
6154 r
= tcc_state
->reverse_funcargs
;
6157 skip_or_save_block(&p2
);
6158 p2
->prev
= p
, p
= p2
;
6161 gfunc_param_typed(s
, sa
);
6172 tcc_error("too few arguments to function");
6174 if (p
) { /* with reverse_funcargs */
6175 for (n
= 0; p
; p
= p2
, ++n
) {
6178 sa
= sa
->next
, p2
= p2
->prev
;
6181 begin_macro(p
, 1), next();
6183 gfunc_param_typed(s
, sa
);
6190 vcheck_cmp(); /* the generators don't like VT_CMP on vtop */
6191 gfunc_call(nb_args
);
6193 if (ret_nregs
< 0) {
6194 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6195 #ifdef TCC_TARGET_RISCV64
6196 arch_transfer_ret_regs(1);
6202 int rc
= reg_classes
[ret
.r
] & ~(RC_INT
| RC_FLOAT
);
6203 /* We assume that when a structure is returned in multiple
6204 registers, their classes are consecutive values of the
6207 for (r
= 0; r
< NB_REGS
; ++r
)
6208 if (reg_classes
[r
] & rc
)
6210 vsetc(&ret
.type
, r
, &ret
.c
);
6212 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6215 /* handle packed struct return */
6216 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6219 size
= type_size(&s
->type
, &align
);
6220 /* We're writing whole regs often, make sure there's enough
6221 space. Assume register size is power of 2. */
6222 size
= (size
+ regsize
- 1) & -regsize
;
6223 if (ret_align
> align
)
6225 loc
= (loc
- size
) & -align
;
6229 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6233 if (--ret_nregs
== 0)
6237 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6240 /* Promote char/short return values. This is matters only
6241 for calling function that were not compiled by TCC and
6242 only on some architectures. For those where it doesn't
6243 matter we expect things to be already promoted to int,
6245 t
= s
->type
.t
& VT_BTYPE
;
6246 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6248 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6250 vtop
->type
.t
= VT_INT
;
6254 if (s
->f
.func_noreturn
) {
6256 tcc_tcov_block_end(tcc_state
, -1);
6265 #ifndef precedence_parser /* original top-down parser */
6267 static void expr_prod(void)
6272 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6279 static void expr_sum(void)
6284 while ((t
= tok
) == '+' || t
== '-') {
6291 static void expr_shift(void)
6296 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6303 static void expr_cmp(void)
6308 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6309 t
== TOK_ULT
|| t
== TOK_UGE
) {
6316 static void expr_cmpeq(void)
6321 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6328 static void expr_and(void)
6331 while (tok
== '&') {
6338 static void expr_xor(void)
6341 while (tok
== '^') {
6348 static void expr_or(void)
6351 while (tok
== '|') {
6358 static void expr_landor(int op
);
6360 static void expr_land(void)
6363 if (tok
== TOK_LAND
)
6367 static void expr_lor(void)
6374 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6375 #else /* defined precedence_parser */
6376 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6377 # define expr_lor() unary(), expr_infix(1)
6379 static int precedence(int tok
)
6382 case TOK_LOR
: return 1;
6383 case TOK_LAND
: return 2;
6387 case TOK_EQ
: case TOK_NE
: return 6;
6388 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6389 case TOK_SHL
: case TOK_SAR
: return 8;
6390 case '+': case '-': return 9;
6391 case '*': case '/': case '%': return 10;
6393 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6398 static unsigned char prec
[256];
6399 static void init_prec(void)
6402 for (i
= 0; i
< 256; i
++)
6403 prec
[i
] = precedence(i
);
6405 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6407 static void expr_landor(int op
);
6409 static void expr_infix(int p
)
6412 while ((p2
= precedence(t
)) >= p
) {
6413 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6418 if (precedence(tok
) > p2
)
6427 /* Assuming vtop is a value used in a conditional context
6428 (i.e. compared with zero) return 0 if it's false, 1 if
6429 true and -1 if it can't be statically determined. */
6430 static int condition_3way(void)
6433 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6434 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6436 gen_cast_s(VT_BOOL
);
6443 static void expr_landor(int op
)
6445 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6447 c
= f
? i
: condition_3way();
6449 save_regs(1), cc
= 0;
6451 nocode_wanted
++, f
= 1;
6459 expr_landor_next(op
);
6471 static int is_cond_bool(SValue
*sv
)
6473 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6474 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6475 return (unsigned)sv
->c
.i
< 2;
6476 if (sv
->r
== VT_CMP
)
6481 static void expr_cond(void)
6483 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6490 c
= condition_3way();
6491 g
= (tok
== ':' && gnu_ext
);
6501 /* needed to avoid having different registers saved in
6513 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6514 mk_pointer(&vtop
->type
);
6515 sv
= *vtop
; /* save value to handle it later */
6516 vtop
--; /* no vpop so that FP stack is not flushed */
6533 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6534 mk_pointer(&vtop
->type
);
6536 /* cast operands to correct type according to ISOC rules */
6537 if (!combine_types(&type
, &sv
, vtop
, '?'))
6538 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6539 "type mismatch in conditional expression (have '%s' and '%s')");
6541 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6542 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6543 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6544 this code jumps directly to the if's then/else branches. */
6549 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6553 // tcc_warning("two conditions expr_cond");
6557 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6558 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6559 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6561 /* now we convert second operand */
6565 mk_pointer(&vtop
->type
);
6567 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6571 rc
= RC_TYPE(type
.t
);
6572 /* for long longs, we use fixed registers to avoid having
6573 to handle a complicated move */
6574 if (USING_TWO_WORDS(type
.t
))
6575 rc
= RC_RET(type
.t
);
6586 /* this is horrible, but we must also convert first
6592 mk_pointer(&vtop
->type
);
6594 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6600 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6610 static void expr_eq(void)
6615 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6623 gen_op(TOK_ASSIGN_OP(t
));
6629 ST_FUNC
void gexpr(void)
6637 } while (tok
== ',');
6639 /* convert array & function to pointer */
6640 convert_parameter_type(&vtop
->type
);
6642 /* make builtin_constant_p((1,2)) return 0 (like on gcc) */
6643 if ((vtop
->r
& VT_VALMASK
) == VT_CONST
&& nocode_wanted
&& !CONST_WANTED
)
6644 gv(RC_TYPE(vtop
->type
.t
));
6648 /* parse a constant expression and return value in vtop. */
6649 static void expr_const1(void)
6651 nocode_wanted
+= CONST_WANTED_BIT
;
6653 nocode_wanted
-= CONST_WANTED_BIT
;
6656 /* parse an integer constant and return its value. */
6657 static inline int64_t expr_const64(void)
6661 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
6662 expect("constant expression");
6668 /* parse an integer constant and return its value.
6669 Complain if it doesn't fit 32bit (signed or unsigned). */
6670 ST_FUNC
int expr_const(void)
6673 int64_t wc
= expr_const64();
6675 if (c
!= wc
&& (unsigned)c
!= wc
)
6676 tcc_error("constant exceeds 32 bit");
6680 /* ------------------------------------------------------------------------- */
6681 /* return from function */
6683 #ifndef TCC_TARGET_ARM64
6684 static void gfunc_return(CType
*func_type
)
6686 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6687 CType type
, ret_type
;
6688 int ret_align
, ret_nregs
, regsize
;
6689 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6690 &ret_align
, ®size
);
6691 if (ret_nregs
< 0) {
6692 #ifdef TCC_TARGET_RISCV64
6693 arch_transfer_ret_regs(0);
6695 } else if (0 == ret_nregs
) {
6696 /* if returning structure, must copy it to implicit
6697 first pointer arg location */
6700 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6703 /* copy structure value to pointer */
6706 /* returning structure packed into registers */
6707 int size
, addr
, align
, rc
, n
;
6708 size
= type_size(func_type
,&align
);
6709 if ((align
& (ret_align
- 1))
6710 && ((vtop
->r
& VT_VALMASK
) < VT_CONST
/* pointer to struct */
6711 || (vtop
->c
.i
& (ret_align
- 1))
6713 loc
= (loc
- size
) & -ret_align
;
6716 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6720 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6722 vtop
->type
= ret_type
;
6723 rc
= RC_RET(ret_type
.t
);
6724 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6725 for (n
= ret_nregs
; --n
> 0;) {
6729 incr_offset(regsize
);
6730 /* We assume that when a structure is returned in multiple
6731 registers, their classes are consecutive values of the
6736 vtop
-= ret_nregs
- 1;
6739 gv(RC_RET(func_type
->t
));
6741 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6745 static void check_func_return(void)
6747 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6749 if (!strcmp (funcname
, "main")
6750 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6751 /* main returns 0 by default */
6753 gen_assign_cast(&func_vt
);
6754 gfunc_return(&func_vt
);
6756 tcc_warning("function might return no value: '%s'", funcname
);
6760 /* ------------------------------------------------------------------------- */
6763 static int case_cmp(uint64_t a
, uint64_t b
)
6765 if (cur_switch
->sv
.type
.t
& VT_UNSIGNED
)
6766 return a
< b
? -1 : a
> b
;
6768 return (int64_t)a
< (int64_t)b
? -1 : (int64_t)a
> (int64_t)b
;
6771 static int case_cmp_qs(const void *pa
, const void *pb
)
6773 return case_cmp((*(struct case_t
**)pa
)->v1
, (*(struct case_t
**)pb
)->v1
);
6776 static void case_sort(struct switch_t
*sw
)
6781 qsort(sw
->p
, sw
->n
, sizeof *sw
->p
, case_cmp_qs
);
6783 while (p
< sw
->p
+ sw
->n
- 1) {
6784 if (case_cmp(p
[0]->v2
, p
[1]->v1
) >= 0) {
6785 int l1
= p
[0]->line
, l2
= p
[1]->line
;
6786 /* using special format "%i:..." to show specific line */
6787 tcc_error("%i:duplicate case value", l1
> l2
? l1
: l2
);
6788 } else if (p
[0]->v2
+ 1 == p
[1]->v1
&& p
[0]->ind
== p
[1]->ind
) {
6789 /* treat "case 1: case 2: case 3:" like "case 1 ... 3: */
6790 p
[1]->v1
= p
[0]->v1
;
6792 memmove(p
, p
+ 1, (--sw
->n
- (p
- sw
->p
)) * sizeof *p
);
6798 static int gcase(struct case_t
**base
, int len
, int dsym
)
6803 t
= vtop
->type
.t
& VT_BTYPE
;
6807 /* binary search while len > 8, else linear */
6808 l2
= len
> 8 ? len
/2 : 0;
6810 vdup(), vpush64(t
, p
->v2
);
6811 if (l2
== 0 && p
->v1
== p
->v2
) {
6812 gen_op(TOK_EQ
); /* jmp to case when equal */
6813 gsym_addr(gvtst(0, 0), p
->ind
);
6815 /* case v1 ... v2 */
6816 gen_op(TOK_GT
); /* jmp over when > V2 */
6817 if (len
== 1) /* last case test jumps to default when false */
6818 dsym
= gvtst(0, dsym
), e
= 0;
6821 vdup(), vpush64(t
, p
->v1
);
6822 gen_op(TOK_GE
); /* jmp to case when >= V1 */
6823 gsym_addr(gvtst(0, 0), p
->ind
);
6824 dsym
= gcase(base
, l2
, dsym
);
6827 ++l2
, base
+= l2
, len
-= l2
;
6829 /* jump automagically will suppress more jumps */
6833 static void end_switch(void)
6835 struct switch_t
*sw
= cur_switch
;
6836 dynarray_reset(&sw
->p
, &sw
->n
);
6837 cur_switch
= sw
->prev
;
6841 /* ------------------------------------------------------------------------- */
6842 /* __attribute__((cleanup(fn))) */
6844 static void try_call_scope_cleanup(Sym
*stop
)
6846 Sym
*cls
= cur_scope
->cl
.s
;
6848 for (; cls
!= stop
; cls
= cls
->next
) {
6849 Sym
*fs
= cls
->cleanup_func
;
6850 Sym
*vs
= cls
->prev_tok
;
6852 vpushsym(&fs
->type
, fs
);
6853 vset(&vs
->type
, vs
->r
, vs
->c
);
6855 mk_pointer(&vtop
->type
);
6861 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6866 if (!cur_scope
->cl
.s
)
6869 /* search NCA of both cleanup chains given parents and initial depth */
6870 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6871 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->next
)
6873 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->next
)
6875 for (; cc
!= oc
; cc
= cc
->next
, oc
= oc
->next
, --ccd
)
6878 try_call_scope_cleanup(cc
);
6881 /* call 'func' for each __attribute__((cleanup(func))) */
6882 static void block_cleanup(struct scope
*o
)
6886 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6887 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6892 try_call_scope_cleanup(o
->cl
.s
);
6893 pcl
->jnext
= gjmp(0);
6895 goto remove_pending
;
6905 try_call_scope_cleanup(o
->cl
.s
);
6908 /* ------------------------------------------------------------------------- */
6911 static void vla_restore(int loc
)
6914 gen_vla_sp_restore(loc
);
6917 static void vla_leave(struct scope
*o
)
6919 struct scope
*c
= cur_scope
, *v
= NULL
;
6920 for (; c
!= o
&& c
; c
= c
->prev
)
6924 vla_restore(v
->vla
.locorig
);
6927 /* ------------------------------------------------------------------------- */
6930 static void new_scope(struct scope
*o
)
6932 /* copy and link previous scope */
6934 o
->prev
= cur_scope
;
6936 cur_scope
->vla
.num
= 0;
6938 /* record local declaration stack position */
6939 o
->lstk
= local_stack
;
6940 o
->llstk
= local_label_stack
;
6944 static void prev_scope(struct scope
*o
, int is_expr
)
6948 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6949 block_cleanup(o
->prev
);
6951 /* pop locally defined labels */
6952 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6954 /* In the is_expr case (a statement expression is finished here),
6955 vtop might refer to symbols on the local_stack. Either via the
6956 type or via vtop->sym. We can't pop those nor any that in turn
6957 might be referred to. To make it easier we don't roll back
6958 any symbols in that case; some upper level call to block() will
6959 do that. We do have to remove such symbols from the lookup
6960 tables, though. sym_pop will do that. */
6962 /* pop locally defined symbols */
6963 pop_local_syms(o
->lstk
, is_expr
);
6964 cur_scope
= o
->prev
;
6968 /* leave a scope via break/continue(/goto) */
6969 static void leave_scope(struct scope
*o
)
6973 try_call_scope_cleanup(o
->cl
.s
);
6977 /* short versiona for scopes with 'if/do/while/switch' which can
6978 declare only types (of struct/union/enum) */
6979 static void new_scope_s(struct scope
*o
)
6981 o
->lstk
= local_stack
;
6985 static void prev_scope_s(struct scope
*o
)
6987 sym_pop(&local_stack
, o
->lstk
, 0);
6991 /* ------------------------------------------------------------------------- */
6992 /* call block from 'for do while' loops */
6994 static void lblock(int *bsym
, int *csym
)
6996 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6997 int *b
= co
->bsym
, *c
= co
->csym
;
7011 static void block(int flags
)
7013 int a
, b
, c
, d
, e
, t
;
7017 if (flags
& STMT_EXPR
) {
7018 /* default return value is (void) */
7020 vtop
->type
.t
= VT_VOID
;
7025 /* If the token carries a value, next() might destroy it. Only with
7026 invalid code such as f(){"123"4;} */
7027 if (TOK_HAS_VALUE(t
))
7032 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
7041 if (tok
== TOK_ELSE
) {
7046 gsym(d
); /* patch else jmp */
7052 } else if (t
== TOK_WHILE
) {
7066 } else if (t
== '{') {
7068 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
7071 /* handle local labels declarations */
7072 while (tok
== TOK_LABEL
) {
7075 if (tok
< TOK_UIDENT
)
7076 expect("label identifier");
7077 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7079 } while (tok
== ',');
7083 while (tok
!= '}') {
7086 if (flags
& STMT_EXPR
)
7088 block(flags
| STMT_COMPOUND
);
7092 prev_scope(&o
, flags
& STMT_EXPR
);
7094 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
7097 else if (!nocode_wanted
)
7098 check_func_return();
7100 } else if (t
== TOK_RETURN
) {
7101 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7105 gen_assign_cast(&func_vt
);
7107 if (vtop
->type
.t
!= VT_VOID
)
7108 tcc_warning("void function returns a value");
7112 tcc_warning("'return' with no value");
7115 leave_scope(root_scope
);
7117 gfunc_return(&func_vt
);
7119 /* jump unless last stmt in top-level block */
7120 if (tok
!= '}' || local_scope
!= 1)
7123 tcc_tcov_block_end (tcc_state
, -1);
7126 } else if (t
== TOK_BREAK
) {
7128 if (!cur_scope
->bsym
)
7129 tcc_error("cannot break");
7130 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7131 leave_scope(cur_switch
->scope
);
7133 leave_scope(loop_scope
);
7134 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7137 } else if (t
== TOK_CONTINUE
) {
7139 if (!cur_scope
->csym
)
7140 tcc_error("cannot continue");
7141 leave_scope(loop_scope
);
7142 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7145 } else if (t
== TOK_FOR
) {
7150 /* c99 for-loop init decl? */
7151 if (!decl(VT_JMP
)) {
7152 /* no, regular for-loop init expr */
7180 } else if (t
== TOK_DO
) {
7196 } else if (t
== TOK_SWITCH
) {
7197 struct switch_t
*sw
;
7199 sw
= tcc_mallocz(sizeof *sw
);
7201 sw
->scope
= cur_scope
;
7202 sw
->prev
= cur_switch
;
7203 sw
->nocode_wanted
= nocode_wanted
;
7210 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
7211 tcc_error("switch value not an integer");
7212 sw
->sv
= *vtop
--; /* save switch value */
7214 b
= gjmp(0); /* jump to first case */
7216 a
= gjmp(a
); /* add implicit break */
7220 if (sw
->nocode_wanted
)
7223 sw
->bsym
= NULL
; /* marker for 32bit:gen_opl() */
7226 d
= gcase(sw
->p
, sw
->n
, 0);
7229 gsym_addr(d
, sw
->def_sym
);
7237 } else if (t
== TOK_CASE
) {
7241 cr
= tcc_malloc(sizeof(struct case_t
));
7242 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7243 t
= cur_switch
->sv
.type
.t
;
7244 cr
->v1
= cr
->v2
= value64(expr_const64(), t
);
7245 if (tok
== TOK_DOTS
&& gnu_ext
) {
7247 cr
->v2
= value64(expr_const64(), t
);
7248 if (case_cmp(cr
->v2
, cr
->v1
) < 0)
7249 tcc_warning("empty case range");
7251 /* case and default are unreachable from a switch under nocode_wanted */
7252 if (!cur_switch
->nocode_wanted
)
7254 cr
->line
= file
->line_num
;
7256 goto block_after_label
;
7258 } else if (t
== TOK_DEFAULT
) {
7261 if (cur_switch
->def_sym
)
7262 tcc_error("too many 'default'");
7263 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? -1 : gind();
7265 goto block_after_label
;
7267 } else if (t
== TOK_GOTO
) {
7268 vla_restore(cur_scope
->vla
.locorig
);
7269 if (tok
== '*' && gnu_ext
) {
7273 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7277 } else if (tok
>= TOK_UIDENT
) {
7278 s
= label_find(tok
);
7279 /* put forward definition if needed */
7281 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7282 else if (s
->r
== LABEL_DECLARED
)
7283 s
->r
= LABEL_FORWARD
;
7285 if (s
->r
& LABEL_FORWARD
) {
7286 /* start new goto chain for cleanups, linked via label->next */
7287 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7288 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7289 pending_gotos
->prev_tok
= s
;
7290 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7291 pending_gotos
->next
= s
;
7293 s
->jnext
= gjmp(s
->jnext
);
7295 try_call_cleanup_goto(s
->cleanupstate
);
7301 expect("label identifier");
7305 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7309 if (tok
== ':' && t
>= TOK_UIDENT
) {
7314 if (s
->r
== LABEL_DEFINED
)
7315 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7316 s
->r
= LABEL_DEFINED
;
7318 Sym
*pcl
; /* pending cleanup goto */
7319 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7321 sym_pop(&s
->next
, NULL
, 0);
7325 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7328 s
->cleanupstate
= cur_scope
->cl
.s
;
7332 /* Accept attributes after labels (e.g. 'unused') */
7333 AttributeDef ad_tmp
;
7334 parse_attribute(&ad_tmp
);
7337 tcc_tcov_reset_ind(tcc_state
);
7338 vla_restore(cur_scope
->vla
.loc
);
7341 if (0 == (flags
& STMT_COMPOUND
))
7343 /* C23: insert implicit null-statement whithin compound statement */
7345 /* we accept this, but it is a mistake */
7346 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7349 /* expression case */
7353 if (flags
& STMT_EXPR
) {
7366 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7369 /* This skips over a stream of tokens containing balanced {} and ()
7370 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7371 with a '{'). If STR then allocates and stores the skipped tokens
7372 in *STR. This doesn't check if () and {} are nested correctly,
7373 i.e. "({)}" is accepted. */
7374 static void skip_or_save_block(TokenString
**str
)
7376 int braces
= tok
== '{';
7379 *str
= tok_str_alloc();
7391 if (str
|| level
> 0)
7392 tcc_error("unexpected end of file");
7397 tok_str_add_tok(*str
);
7399 if (t
== '{' || t
== '(' || t
== '[') {
7401 } else if (t
== '}' || t
== ')' || t
== ']') {
7403 if (level
== 0 && braces
&& t
== '}')
7408 tok_str_add(*str
, TOK_EOF
);
7411 #define EXPR_CONST 1
7414 static void parse_init_elem(int expr_type
)
7416 int saved_global_expr
;
7419 /* compound literals must be allocated globally in this case */
7420 saved_global_expr
= global_expr
;
7423 global_expr
= saved_global_expr
;
7424 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7425 (compound literals). */
7426 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7427 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7428 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7429 #ifdef TCC_TARGET_PE
7430 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7433 tcc_error("initializer element is not constant");
7442 static void init_assert(init_params
*p
, int offset
)
7444 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7445 : !nocode_wanted
&& offset
> p
->local_offset
)
7446 tcc_internal_error("initializer overflow");
7449 #define init_assert(sec, offset)
7452 /* put zeros for variable based init */
7453 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7455 init_assert(p
, c
+ size
);
7457 /* nothing to do because globals are already set to zero */
7459 vpush_helper_func(TOK_memset
);
7463 #if defined TCC_TARGET_ARM && defined TCC_ARM_EABI
7464 vswap(); /* using __aeabi_memset(void*, size_t, int) */
7471 #define DIF_SIZE_ONLY 2
7472 #define DIF_HAVE_ELEM 4
7475 /* delete relocations for specified range c ... c + size. Unfortunatly
7476 in very special cases, relocations may occur unordered */
7477 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7479 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7480 if (!sec
|| !sec
->reloc
)
7482 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7483 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7484 while (rel
< rel_end
) {
7485 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7486 sec
->reloc
->data_offset
-= sizeof *rel
;
7489 memcpy(rel2
, rel
, sizeof *rel
);
7496 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7498 if (ref
== p
->flex_array_ref
) {
7499 if (index
>= ref
->c
)
7501 } else if (ref
->c
< 0)
7502 tcc_error("flexible array has zero size in this context");
7505 /* t is the array or struct type. c is the array or struct
7506 address. cur_field is the pointer to the current
7507 field, for arrays the 'c' member contains the current start
7508 index. 'flags' is as in decl_initializer.
7509 'al' contains the already initialized length of the
7510 current container (starting at c). This returns the new length of that. */
7511 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7512 Sym
**cur_field
, int flags
, int al
)
7515 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7516 unsigned long corig
= c
;
7521 if (flags
& DIF_HAVE_ELEM
)
7524 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7531 /* NOTE: we only support ranges for last designator */
7532 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7534 if (!(type
->t
& VT_ARRAY
))
7535 expect("array type");
7537 index
= index_last
= expr_const();
7538 if (tok
== TOK_DOTS
&& gnu_ext
) {
7540 index_last
= expr_const();
7544 decl_design_flex(p
, s
, index_last
);
7545 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7546 tcc_error("index exceeds array bounds or range is empty");
7548 (*cur_field
)->c
= index_last
;
7549 type
= pointed_type(type
);
7550 elem_size
= type_size(type
, &align
);
7551 c
+= index
* elem_size
;
7552 nb_elems
= index_last
- index
+ 1;
7559 f
= find_field(type
, l
, &cumofs
);
7570 } else if (!gnu_ext
) {
7575 if (type
->t
& VT_ARRAY
) {
7576 index
= (*cur_field
)->c
;
7578 decl_design_flex(p
, s
, index
);
7580 tcc_error("too many initializers");
7581 type
= pointed_type(type
);
7582 elem_size
= type_size(type
, &align
);
7583 c
+= index
* elem_size
;
7586 /* Skip bitfield padding. Also with size 32 and 64. */
7587 while (f
&& (f
->v
& SYM_FIRST_ANOM
) &&
7588 is_integer_btype(f
->type
.t
& VT_BTYPE
))
7589 *cur_field
= f
= f
->next
;
7591 tcc_error("too many initializers");
7597 if (!elem_size
) /* for structs */
7598 elem_size
= type_size(type
, &align
);
7600 /* Using designators the same element can be initialized more
7601 than once. In that case we need to delete possibly already
7602 existing relocations. */
7603 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7604 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7605 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7608 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7610 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7614 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7615 /* make init_putv/vstore believe it were a struct */
7617 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7621 vpush_ref(type
, p
->sec
, c
, elem_size
);
7623 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7624 for (i
= 1; i
< nb_elems
; i
++) {
7626 init_putv(p
, type
, c
+ elem_size
* i
);
7631 c
+= nb_elems
* elem_size
;
7637 /* store a value or an expression directly in global data or in local array */
7638 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7644 Section
*sec
= p
->sec
;
7648 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7650 size
= type_size(type
, &align
);
7651 if (type
->t
& VT_BITFIELD
)
7652 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7653 init_assert(p
, c
+ size
);
7656 /* XXX: not portable */
7657 /* XXX: generate error if incorrect relocation */
7658 gen_assign_cast(&dtype
);
7659 bt
= type
->t
& VT_BTYPE
;
7661 if ((vtop
->r
& VT_SYM
)
7663 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7664 || (type
->t
& VT_BITFIELD
))
7665 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7667 tcc_error("initializer element is not computable at load time");
7669 if (NODATA_WANTED
) {
7674 ptr
= sec
->data
+ c
;
7677 /* XXX: make code faster ? */
7678 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7679 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7680 /* XXX This rejects compound literals like
7681 '(void *){ptr}'. The problem is that '&sym' is
7682 represented the same way, which would be ruled out
7683 by the SYM_FIRST_ANOM check above, but also '"string"'
7684 in 'char *p = "string"' is represented the same
7685 with the type being VT_PTR and the symbol being an
7686 anonymous one. That is, there's no difference in vtop
7687 between '(void *){x}' and '&(void *){x}'. Ignore
7688 pointer typed entities here. Hopefully no real code
7689 will ever use compound literals with scalar type. */
7690 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7691 /* These come from compound literals, memcpy stuff over. */
7695 esym
= elfsym(vtop
->sym
);
7696 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7697 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7699 /* We need to copy over all memory contents, and that
7700 includes relocations. Use the fact that relocs are
7701 created it order, so look from the end of relocs
7702 until we hit one before the copied region. */
7703 unsigned long relofs
= ssec
->reloc
->data_offset
;
7704 while (relofs
>= sizeof(*rel
)) {
7705 relofs
-= sizeof(*rel
);
7706 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7707 if (rel
->r_offset
>= esym
->st_value
+ size
)
7709 if (rel
->r_offset
< esym
->st_value
)
7711 put_elf_reloca(symtab_section
, sec
,
7712 c
+ rel
->r_offset
- esym
->st_value
,
7713 ELFW(R_TYPE
)(rel
->r_info
),
7714 ELFW(R_SYM
)(rel
->r_info
),
7724 if (type
->t
& VT_BITFIELD
) {
7725 int bit_pos
, bit_size
, bits
, n
;
7726 unsigned char *p
, v
, m
;
7727 bit_pos
= BIT_POS(vtop
->type
.t
);
7728 bit_size
= BIT_SIZE(vtop
->type
.t
);
7729 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7730 bit_pos
&= 7, bits
= 0;
7735 v
= val
>> bits
<< bit_pos
;
7736 m
= ((1 << n
) - 1) << bit_pos
;
7737 *p
= (*p
& ~m
) | (v
& m
);
7738 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7743 *(char *)ptr
= val
!= 0;
7749 write16le(ptr
, val
);
7752 write32le(ptr
, val
);
7755 write64le(ptr
, val
);
7758 #if defined TCC_IS_NATIVE_387
7759 /* Host and target platform may be different but both have x87.
7760 On windows, tcc does not use VT_LDOUBLE, except when it is a
7761 cross compiler. In this case a mingw gcc as host compiler
7762 comes here with 10-byte long doubles, while msvc or tcc won't.
7763 tcc itself can still translate by asm.
7764 In any case we avoid possibly random bytes 11 and 12.
7766 if (sizeof (long double) >= 10)
7767 memcpy(ptr
, &vtop
->c
.ld
, 10);
7769 else if (sizeof (long double) == sizeof (double))
7770 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7774 /* For other platforms it should work natively, but may not work
7775 for cross compilers */
7776 if (sizeof(long double) == LDOUBLE_SIZE
)
7777 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7778 else if (sizeof(double) == LDOUBLE_SIZE
)
7779 *(double*)ptr
= (double)vtop
->c
.ld
;
7780 else if (0 == memcmp(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
))
7781 ; /* nothing to do for 0.0 */
7782 #ifndef TCC_CROSS_TEST
7784 tcc_error("can't cross compile long double constants");
7789 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7792 if (vtop
->r
& VT_SYM
)
7793 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7795 write64le(ptr
, val
);
7798 write32le(ptr
, val
);
7802 write64le(ptr
, val
);
7806 if (vtop
->r
& VT_SYM
)
7807 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7808 write32le(ptr
, val
);
7812 //tcc_internal_error("unexpected type");
7818 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7825 /* 't' contains the type and storage info. 'c' is the offset of the
7826 object in section 'sec'. If 'sec' is NULL, it means stack based
7827 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7828 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7829 size only evaluation is wanted (only for arrays). */
7830 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7832 int len
, n
, no_oblock
, i
;
7838 /* generate line number info */
7839 if (debug_modes
&& !(flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7840 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7842 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7843 /* In case of strings we have special handling for arrays, so
7844 don't consume them as initializer value (which would commit them
7845 to some anonymous symbol). */
7846 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7847 (!(flags
& DIF_SIZE_ONLY
)
7848 /* a struct may be initialized from a struct of same type, as in
7849 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7850 In that case we need to parse the element in order to check
7851 it for compatibility below */
7852 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7854 int ncw_prev
= nocode_wanted
;
7855 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7857 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7858 nocode_wanted
= ncw_prev
;
7859 flags
|= DIF_HAVE_ELEM
;
7862 if (type
->t
& VT_ARRAY
) {
7864 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7872 t1
= pointed_type(type
);
7873 size1
= type_size(t1
, &align1
);
7875 /* only parse strings here if correct type (otherwise: handle
7876 them as ((w)char *) expressions */
7877 if ((tok
== TOK_LSTR
&&
7878 #ifdef TCC_TARGET_PE
7879 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7881 (t1
->t
& VT_BTYPE
) == VT_INT
7883 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7885 cstr_reset(&initstr
);
7886 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7887 tcc_error("unhandled string literal merging");
7888 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7890 initstr
.size
-= size1
;
7892 len
+= tokc
.str
.size
;
7894 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7896 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7899 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7900 && tok
!= TOK_EOF
) {
7901 /* Not a lone literal but part of a bigger expression. */
7902 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7903 tokc
.str
.size
= initstr
.size
;
7904 tokc
.str
.data
= initstr
.data
;
7908 decl_design_flex(p
, s
, len
);
7909 if (!(flags
& DIF_SIZE_ONLY
)) {
7914 tcc_warning("initializer-string for array is too long");
7915 /* in order to go faster for common case (char
7916 string in global variable, we handle it
7918 if (p
->sec
&& size1
== 1) {
7919 init_assert(p
, c
+ nb
);
7921 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7925 /* only add trailing zero if enough storage (no
7926 warning in this case since it is standard) */
7927 if (flags
& DIF_CLEAR
)
7930 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7934 } else if (size1
== 1)
7935 ch
= ((unsigned char *)initstr
.data
)[i
];
7937 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7939 init_putv(p
, t1
, c
+ i
* size1
);
7950 /* zero memory once in advance */
7951 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7952 init_putz(p
, c
, n
*size1
);
7957 /* GNU extension: if the initializer is empty for a flex array,
7958 it's size is zero. We won't enter the loop, so set the size
7960 decl_design_flex(p
, s
, len
);
7961 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7962 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7963 flags
&= ~DIF_HAVE_ELEM
;
7964 if (type
->t
& VT_ARRAY
) {
7966 /* special test for multi dimensional arrays (may not
7967 be strictly correct if designators are used at the
7969 if (no_oblock
&& len
>= n
*size1
)
7972 if (s
->type
.t
== VT_UNION
)
7976 if (no_oblock
&& f
== NULL
)
7988 } else if ((flags
& DIF_HAVE_ELEM
)
7989 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7990 The source type might have VT_CONSTANT set, which is
7991 of course assignable to non-const elements. */
7992 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7995 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7997 if ((flags
& DIF_FIRST
) || tok
== '{') {
8007 } else if (tok
== '{') {
8008 if (flags
& DIF_HAVE_ELEM
)
8011 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
8014 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
8015 /* If we supported only ISO C we wouldn't have to accept calling
8016 this on anything than an array if DIF_SIZE_ONLY (and even then
8017 only on the outermost level, so no recursion would be needed),
8018 because initializing a flex array member isn't supported.
8019 But GNU C supports it, so we need to recurse even into
8020 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8021 /* just skip expression */
8022 if (flags
& DIF_HAVE_ELEM
)
8025 skip_or_save_block(NULL
);
8028 if (!(flags
& DIF_HAVE_ELEM
)) {
8029 /* This should happen only when we haven't parsed
8030 the init element above for fear of committing a
8031 string constant to memory too early. */
8032 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
8033 expect("string constant");
8034 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8036 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
8037 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
8039 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
8043 init_putv(p
, type
, c
);
8047 /* parse an initializer for type 't' if 'has_init' is non zero, and
8048 allocate space in local or global data space ('r' is either
8049 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8050 variable 'v' of scope 'scope' is declared before initializers
8051 are parsed. If 'v' is zero, then a reference to the new object
8052 is put in the value stack. If 'has_init' is 2, a special parsing
8053 is done to handle string constants. */
8054 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
8055 int has_init
, int v
, int global
)
8057 int size
, align
, addr
;
8058 TokenString
*init_str
= NULL
;
8061 Sym
*flexible_array
;
8063 int saved_nocode_wanted
= nocode_wanted
;
8064 #ifdef CONFIG_TCC_BCHECK
8065 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
8067 init_params p
= {0};
8069 /* Always allocate static or global variables */
8070 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
8071 nocode_wanted
|= DATA_ONLY_WANTED
;
8073 flexible_array
= NULL
;
8074 size
= type_size(type
, &align
);
8076 /* exactly one flexible array may be initialized, either the
8077 toplevel array or the last member of the toplevel struct */
8080 // error out except for top-level incomplete arrays
8081 // (arrays of incomplete types are handled in array parsing)
8082 if (!(type
->t
& VT_ARRAY
))
8083 tcc_error("initialization of incomplete type");
8085 /* If the base type itself was an array type of unspecified size
8086 (like in 'typedef int arr[]; arr x = {1};') then we will
8087 overwrite the unknown size by the real one for this decl.
8088 We need to unshare the ref symbol holding that size. */
8089 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8090 p
.flex_array_ref
= type
->ref
;
8092 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8093 Sym
*field
= type
->ref
->next
;
8096 field
= field
->next
;
8097 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8098 flexible_array
= field
;
8099 p
.flex_array_ref
= field
->type
.ref
;
8106 /* If unknown size, do a dry-run 1st pass */
8108 tcc_error("unknown type size");
8109 if (has_init
== 2) {
8110 /* only get strings */
8111 init_str
= tok_str_alloc();
8112 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8113 tok_str_add_tok(init_str
);
8116 tok_str_add(init_str
, TOK_EOF
);
8118 skip_or_save_block(&init_str
);
8122 begin_macro(init_str
, 1);
8124 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8125 /* prepare second initializer parsing */
8126 macro_ptr
= init_str
->str
;
8129 /* if still unknown size, error */
8130 size
= type_size(type
, &align
);
8132 tcc_error("unknown type size");
8134 /* If there's a flex member and it was used in the initializer
8136 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8137 size
+= flexible_array
->type
.ref
->c
8138 * pointed_size(&flexible_array
->type
);
8141 /* take into account specified alignment if bigger */
8142 if (ad
->a
.aligned
) {
8143 int speca
= 1 << (ad
->a
.aligned
- 1);
8146 } else if (ad
->a
.packed
) {
8150 if (!v
&& NODATA_WANTED
)
8151 size
= 0, align
= 1;
8153 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8155 #ifdef CONFIG_TCC_BCHECK
8157 /* add padding between stack variables for bound checking */
8161 loc
= (loc
- size
) & -align
;
8163 p
.local_offset
= addr
+ size
;
8164 #ifdef CONFIG_TCC_BCHECK
8166 /* add padding between stack variables for bound checking */
8171 /* local variable */
8172 #ifdef CONFIG_TCC_ASM
8173 if (ad
->asm_label
) {
8174 int reg
= asm_parse_regvar(ad
->asm_label
);
8176 r
= (r
& ~VT_VALMASK
) | reg
;
8179 sym
= sym_push(v
, type
, r
, addr
);
8180 if (ad
->cleanup_func
) {
8181 Sym
*cls
= sym_push2(&all_cleanups
,
8182 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8183 cls
->prev_tok
= sym
;
8184 cls
->cleanup_func
= ad
->cleanup_func
;
8185 cls
->next
= cur_scope
->cl
.s
;
8186 cur_scope
->cl
.s
= cls
;
8191 /* push local reference */
8192 vset(type
, r
, addr
);
8197 /* see if the symbol was already defined */
8200 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8201 && sym
->type
.ref
->c
> type
->ref
->c
) {
8202 /* flex array was already declared with explicit size
8204 int arr[] = { 1,2,3 }; */
8205 type
->ref
->c
= sym
->type
.ref
->c
;
8206 size
= type_size(type
, &align
);
8208 patch_storage(sym
, ad
, type
);
8209 /* we accept several definitions of the same global variable. */
8210 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8215 /* allocate symbol in corresponding section */
8219 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8220 tp
= &tp
->ref
->type
;
8221 if (tp
->t
& VT_CONSTANT
) {
8222 sec
= rodata_section
;
8223 } else if (has_init
) {
8226 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8227 } else if (tcc_state
->nocommon
)
8232 addr
= section_add(sec
, size
, align
);
8233 #ifdef CONFIG_TCC_BCHECK
8234 /* add padding if bound check */
8236 section_add(sec
, 1, 1);
8239 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8240 sec
= common_section
;
8245 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8246 patch_storage(sym
, ad
, NULL
);
8248 /* update symbol definition */
8249 put_extern_sym(sym
, sec
, addr
, size
);
8251 /* push global reference */
8252 vpush_ref(type
, sec
, addr
, size
);
8257 #ifdef CONFIG_TCC_BCHECK
8258 /* handles bounds now because the symbol must be defined
8259 before for the relocation */
8263 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8264 /* then add global bound info */
8265 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8266 bounds_ptr
[0] = 0; /* relocated */
8267 bounds_ptr
[1] = size
;
8272 if (type
->t
& VT_VLA
) {
8278 /* save before-VLA stack pointer if needed */
8279 if (cur_scope
->vla
.num
== 0) {
8280 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8281 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8283 gen_vla_sp_save(loc
-= PTR_SIZE
);
8284 cur_scope
->vla
.locorig
= loc
;
8288 vpush_type_size(type
, &a
);
8289 gen_vla_alloc(type
, a
);
8290 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8291 /* on _WIN64, because of the function args scratch area, the
8292 result of alloca differs from RSP and is returned in RAX. */
8293 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8295 gen_vla_sp_save(addr
);
8296 cur_scope
->vla
.loc
= addr
;
8297 cur_scope
->vla
.num
++;
8298 } else if (has_init
) {
8300 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8301 /* patch flexible array member size back to -1, */
8302 /* for possible subsequent similar declarations */
8304 flexible_array
->type
.ref
->c
= -1;
8308 /* restore parse state if needed */
8314 nocode_wanted
= saved_nocode_wanted
;
8317 /* generate vla code saved in post_type() */
8318 static void func_vla_arg_code(Sym
*arg
)
8321 TokenString
*vla_array_tok
= NULL
;
8324 func_vla_arg_code(arg
->type
.ref
);
8326 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8327 loc
-= type_size(&int_type
, &align
);
8329 arg
->type
.ref
->c
= loc
;
8332 vla_array_tok
= tok_str_alloc();
8333 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8334 begin_macro(vla_array_tok
, 1);
8339 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8341 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8348 static void func_vla_arg(Sym
*sym
)
8352 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8353 if ((arg
->type
.t
& VT_BTYPE
) == VT_PTR
&& (arg
->type
.ref
->type
.t
& VT_VLA
))
8354 func_vla_arg_code(arg
->type
.ref
);
8357 /* parse a function defined by symbol 'sym' and generate its code in
8358 'cur_text_section' */
8359 static void gen_function(Sym
*sym
)
8361 struct scope f
= { 0 };
8362 cur_scope
= root_scope
= &f
;
8365 ind
= cur_text_section
->data_offset
;
8366 if (sym
->a
.aligned
) {
8367 size_t newoff
= section_add(cur_text_section
, 0,
8368 1 << (sym
->a
.aligned
- 1));
8369 gen_fill_nops(newoff
- ind
);
8372 funcname
= get_tok_str(sym
->v
, NULL
);
8374 func_vt
= sym
->type
.ref
->type
;
8375 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8377 /* NOTE: we patch the symbol size later */
8378 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8380 if (sym
->type
.ref
->f
.func_ctor
)
8381 add_array (tcc_state
, ".init_array", sym
->c
);
8382 if (sym
->type
.ref
->f
.func_dtor
)
8383 add_array (tcc_state
, ".fini_array", sym
->c
);
8385 /* put debug symbol */
8386 tcc_debug_funcstart(tcc_state
, sym
);
8388 /* push a dummy symbol to enable local sym storage */
8389 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8390 local_scope
= 1; /* for function parameters */
8391 nb_temp_local_vars
= 0;
8393 tcc_debug_prolog_epilog(tcc_state
, 0);
8402 /* reset local stack */
8403 pop_local_syms(NULL
, 0);
8404 tcc_debug_prolog_epilog(tcc_state
, 1);
8407 /* end of function */
8408 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8410 /* patch symbol size */
8411 elfsym(sym
)->st_size
= ind
- func_ind
;
8413 cur_text_section
->data_offset
= ind
;
8415 label_pop(&global_label_stack
, NULL
, 0);
8416 sym_pop(&all_cleanups
, NULL
, 0);
8418 /* It's better to crash than to generate wrong code */
8419 cur_text_section
= NULL
;
8420 funcname
= ""; /* for safety */
8421 func_vt
.t
= VT_VOID
; /* for safety */
8422 func_var
= 0; /* for safety */
8423 ind
= 0; /* for safety */
8425 nocode_wanted
= DATA_ONLY_WANTED
;
8428 /* do this after funcend debug info */
8432 static void gen_inline_functions(TCCState
*s
)
8435 int inline_generated
, i
;
8436 struct InlineFunc
*fn
;
8438 tcc_open_bf(s
, ":inline:", 0);
8439 /* iterate while inline function are referenced */
8441 inline_generated
= 0;
8442 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8443 fn
= s
->inline_fns
[i
];
8445 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8446 /* the function was used or forced (and then not internal):
8447 generate its code and convert it to a normal function */
8449 tccpp_putfile(fn
->filename
);
8450 begin_macro(fn
->func_str
, 1);
8452 cur_text_section
= text_section
;
8456 inline_generated
= 1;
8459 } while (inline_generated
);
8463 static void free_inline_functions(TCCState
*s
)
8466 /* free tokens of unused inline functions */
8467 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8468 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8470 tok_str_free(fn
->func_str
);
8472 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8475 static void do_Static_assert(void)
8483 msg
= "_Static_assert fail";
8486 msg
= parse_mult_str("string constant")->data
;
8490 tcc_error("%s", msg
);
8494 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8495 or VT_CMP if parsing old style parameter list
8496 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8497 static int decl(int l
)
8499 int v
, has_init
, r
, oldint
;
8502 AttributeDef ad
, adbase
;
8508 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8511 /* skip redundant ';' if not in old parameter decl scope */
8512 if (tok
== ';' && l
!= VT_CMP
) {
8516 if (tok
== TOK_STATIC_ASSERT
) {
8522 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8523 /* global asm block */
8527 if (tok
>= TOK_UIDENT
) {
8528 /* special test for old K&R protos without explicit int
8529 type. Only accepted when defining global data */
8534 expect("declaration");
8540 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8542 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8543 tcc_warning("unnamed struct/union that defines no instances");
8547 if (IS_ENUM(btype
.t
)) {
8553 while (1) { /* iterate thru each declaration */
8556 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8560 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8561 printf("type = '%s'\n", buf
);
8564 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8565 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8566 tcc_error("function without file scope cannot be static");
8567 /* if old style function prototype, we accept a
8570 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8575 if ((type
.t
& (VT_EXTERN
|VT_INLINE
)) == (VT_EXTERN
|VT_INLINE
)) {
8576 /* always_inline functions must be handled as if they
8577 don't generate multiple global defs, even if extern
8578 inline, i.e. GNU inline semantics for those. Rewrite
8579 them into static inline. */
8580 if (tcc_state
->gnu89_inline
|| sym
->f
.func_alwinl
)
8581 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
8583 type
.t
&= ~VT_INLINE
; /* always compile otherwise */
8586 } else if (oldint
) {
8587 tcc_warning("type defaults to int");
8590 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8591 ad
.asm_label
= asm_label_instr();
8592 /* parse one last attribute list, after asm label */
8593 parse_attribute(&ad
);
8595 /* gcc does not allow __asm__("label") with function definition,
8602 #ifdef TCC_TARGET_PE
8603 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8604 if (type
.t
& VT_STATIC
)
8605 tcc_error("cannot have dll linkage with static");
8606 if (type
.t
& VT_TYPEDEF
) {
8607 tcc_warning("'%s' attribute ignored for typedef",
8608 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8609 (ad
.a
.dllexport
= 0, "dllexport"));
8610 } else if (ad
.a
.dllimport
) {
8611 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8614 type
.t
|= VT_EXTERN
;
8620 tcc_error("cannot use local functions");
8621 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8622 expect("function definition");
8624 /* reject abstract declarators in function definition
8625 make old style params without decl have int type */
8627 while ((sym
= sym
->next
) != NULL
) {
8628 if (!(sym
->v
& ~SYM_FIELD
))
8629 expect("identifier");
8630 if (sym
->type
.t
== VT_VOID
)
8631 sym
->type
= int_type
;
8634 /* apply post-declaraton attributes */
8635 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8637 /* put function symbol */
8638 type
.t
&= ~VT_EXTERN
;
8639 sym
= external_sym(v
, &type
, 0, &ad
);
8641 /* static inline functions are just recorded as a kind
8642 of macro. Their code will be emitted at the end of
8643 the compilation unit only if they are used */
8644 if (sym
->type
.t
& VT_INLINE
) {
8645 struct InlineFunc
*fn
;
8646 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8647 strcpy(fn
->filename
, file
->filename
);
8649 dynarray_add(&tcc_state
->inline_fns
,
8650 &tcc_state
->nb_inline_fns
, fn
);
8651 skip_or_save_block(&fn
->func_str
);
8653 /* compute text section */
8654 cur_text_section
= ad
.section
;
8655 if (!cur_text_section
)
8656 cur_text_section
= text_section
;
8657 else if (cur_text_section
->sh_num
> bss_section
->sh_num
)
8658 cur_text_section
->sh_flags
= text_section
->sh_flags
;
8664 /* find parameter in function parameter list */
8665 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8666 if ((sym
->v
& ~SYM_FIELD
) == v
)
8668 tcc_error("declaration for parameter '%s' but no such parameter",
8669 get_tok_str(v
, NULL
));
8671 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8672 tcc_error("storage class specified for '%s'",
8673 get_tok_str(v
, NULL
));
8674 if (sym
->type
.t
!= VT_VOID
)
8675 tcc_error("redefinition of parameter '%s'",
8676 get_tok_str(v
, NULL
));
8677 convert_parameter_type(&type
);
8679 } else if (type
.t
& VT_TYPEDEF
) {
8680 /* save typedefed type */
8681 /* XXX: test storage specifiers ? */
8683 if (sym
&& sym
->sym_scope
== local_scope
) {
8684 if (!is_compatible_types(&sym
->type
, &type
)
8685 || !(sym
->type
.t
& VT_TYPEDEF
))
8686 tcc_error("incompatible redefinition of '%s'",
8687 get_tok_str(v
, NULL
));
8690 sym
= sym_push(v
, &type
, 0, 0);
8693 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8694 merge_funcattr(&sym
->type
.ref
->f
, &ad
.f
);
8696 tcc_debug_typedef (tcc_state
, sym
);
8697 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8698 && !(type
.t
& VT_EXTERN
)) {
8699 tcc_error("declaration of void object");
8702 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8703 /* external function definition */
8704 /* specific case for func_call attribute */
8705 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8706 } else if (!(type
.t
& VT_ARRAY
)) {
8707 /* not lvalue if array */
8710 has_init
= (tok
== '=');
8711 if (has_init
&& (type
.t
& VT_VLA
))
8712 tcc_error("variable length array cannot be initialized");
8714 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8715 || (type
.t
& VT_BTYPE
) == VT_FUNC
8716 /* as with GCC, uninitialized global arrays with no size
8717 are considered extern: */
8718 || ((type
.t
& VT_ARRAY
) && !has_init
8719 && l
== VT_CONST
&& type
.ref
->c
< 0)
8721 /* external variable or function */
8722 type
.t
|= VT_EXTERN
;
8723 external_sym(v
, &type
, r
, &ad
);
8725 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8731 else if (l
== VT_CONST
)
8732 /* uninitialized global variables may be overridden */
8733 type
.t
|= VT_EXTERN
;
8734 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8737 if (ad
.alias_target
&& l
== VT_CONST
) {
8738 /* Aliases need to be emitted when their target symbol
8739 is emitted, even if perhaps unreferenced.
8740 We only support the case where the base is already
8741 defined, otherwise we would need deferring to emit
8742 the aliases until the end of the compile unit. */
8743 esym
= elfsym(sym_find(ad
.alias_target
));
8745 tcc_error("unsupported forward __alias__ attribute");
8746 put_extern_sym2(sym_find(v
), esym
->st_shndx
,
8747 esym
->st_value
, esym
->st_size
, 1);
8763 /* ------------------------------------------------------------------------- */
8766 /* ------------------------------------------------------------------------- */