2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
57 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 /* Automagical code suppression ----> */
60 #define CODE_OFF() (nocode_wanted |= 0x20000000)
61 #define CODE_ON() (nocode_wanted &= ~0x20000000)
63 /* Clear 'nocode_wanted' at label if it was used */
64 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
65 static int gind(void) { CODE_ON(); return ind
; }
67 /* Set 'nocode_wanted' after unconditional jumps */
68 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
69 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
71 /* These are #undef'd at the end of this file */
72 #define gjmp_addr gjmp_addr_acs
76 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
77 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
78 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
80 static int last_line_num
, new_file
, func_ind
; /* debug info control */
81 ST_DATA
const char *funcname
;
83 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
85 ST_DATA
struct switch_t
{
89 } **p
; int n
; /* list of case ranges */
90 int def_sym
; /* default symbol */
93 } *cur_switch
; /* current switch */
95 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
96 /*list of temporary local variables on the stack in current function. */
97 ST_DATA
struct temp_local_variable
{
98 int location
; //offset on stack. Svalue.c.i
101 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
102 short nb_temp_local_vars
;
104 static struct scope
{
106 struct { int loc
, num
; } vla
;
107 struct { Sym
*s
; int n
; } cl
;
110 } *cur_scope
, *loop_scope
, *root_scope
;
112 /********************************************************/
113 #ifndef CONFIG_TCC_ASM
114 ST_FUNC
void asm_instr(void)
116 tcc_error("inline asm() not supported");
118 ST_FUNC
void asm_global_instr(void)
120 tcc_error("inline asm() not supported");
124 /* ------------------------------------------------------------------------- */
126 static void gen_cast(CType
*type
);
127 static void gen_cast_s(int t
);
128 static inline CType
*pointed_type(CType
*type
);
129 static int is_compatible_types(CType
*type1
, CType
*type2
);
130 static int parse_btype(CType
*type
, AttributeDef
*ad
);
131 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
132 static void parse_expr_type(CType
*type
);
133 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
134 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
135 static void block(int is_expr
);
136 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
137 static void decl(int l
);
138 static int decl0(int l
, int is_for_loop_init
, Sym
*);
139 static void expr_eq(void);
140 static void vla_runtime_type_size(CType
*type
, int *a
);
141 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
142 static inline int64_t expr_const64(void);
143 static void vpush64(int ty
, unsigned long long v
);
144 static void vpush(CType
*type
);
145 static int gvtst(int inv
, int t
);
146 static void gen_inline_functions(TCCState
*s
);
147 static void free_inline_functions(TCCState
*s
);
148 static void skip_or_save_block(TokenString
**str
);
149 static void gv_dup(void);
150 static int get_temp_local_var(int size
,int align
);
151 static void clear_temp_local_var_list();
153 ST_INLN
int is_float(int t
)
155 int bt
= t
& VT_BTYPE
;
156 return bt
== VT_LDOUBLE
162 static inline int is_integer_btype(int bt
)
170 static int btype_size(int bt
)
172 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
176 bt
== VT_PTR
? PTR_SIZE
: 0;
179 /* returns function return register from type */
180 static int R_RET(int t
)
184 #ifdef TCC_TARGET_X86_64
185 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
187 #elif defined TCC_TARGET_RISCV64
188 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
194 /* returns 2nd function return register, if any */
195 static int R2_RET(int t
)
201 #elif defined TCC_TARGET_X86_64
206 #elif defined TCC_TARGET_RISCV64
213 /* returns true for two-word types */
214 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
216 /* put function return registers to stack value */
217 static void PUT_R_RET(SValue
*sv
, int t
)
219 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
222 /* returns function return register class for type t */
223 static int RC_RET(int t
)
225 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
228 /* returns generic register class for type t */
229 static int RC_TYPE(int t
)
233 #ifdef TCC_TARGET_X86_64
234 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
236 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
238 #elif defined TCC_TARGET_RISCV64
239 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
245 /* returns 2nd register class corresponding to t and rc */
246 static int RC2_TYPE(int t
, int rc
)
248 if (!USING_TWO_WORDS(t
))
263 /* we use our own 'finite' function to avoid potential problems with
264 non standard math libs */
265 /* XXX: endianness dependent */
266 ST_FUNC
int ieee_finite(double d
)
269 memcpy(p
, &d
, sizeof(double));
270 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
273 /* compiling intel long double natively */
274 #if (defined __i386__ || defined __x86_64__) \
275 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
276 # define TCC_IS_NATIVE_387
279 ST_FUNC
void test_lvalue(void)
281 if (!(vtop
->r
& VT_LVAL
))
285 ST_FUNC
void check_vstack(void)
287 if (vtop
!= vstack
- 1)
288 tcc_error("internal compiler error: vstack leak (%d)", vtop
- vstack
+ 1);
291 /* ------------------------------------------------------------------------- */
292 /* vstack debugging aid */
295 void pv (const char *lbl
, int a
, int b
)
298 for (i
= a
; i
< a
+ b
; ++i
) {
299 SValue
*p
= &vtop
[-i
];
300 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
301 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
306 /* ------------------------------------------------------------------------- */
307 /* start of translation unit info */
308 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
313 /* file info: full path + filename */
314 section_sym
= put_elf_sym(symtab_section
, 0, 0,
315 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
316 text_section
->sh_num
, NULL
);
317 getcwd(buf
, sizeof(buf
));
319 normalize_slashes(buf
);
321 pstrcat(buf
, sizeof(buf
), "/");
322 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
323 text_section
->data_offset
, text_section
, section_sym
);
324 put_stabs_r(s1
, file
->prev
->filename
, N_SO
, 0, 0,
325 text_section
->data_offset
, text_section
, section_sym
);
326 new_file
= last_line_num
= 0;
328 /* we're currently 'including' the <command line> */
332 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
333 symbols can be safely used */
334 put_elf_sym(symtab_section
, 0, 0,
335 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
336 SHN_ABS
, file
->filename
);
339 /* put end of translation unit info */
340 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
344 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
345 text_section
->data_offset
, text_section
, section_sym
);
348 static BufferedFile
* put_new_file(TCCState
*s1
)
350 BufferedFile
*f
= file
;
351 /* use upper file if from inline ":asm:" */
352 if (f
->filename
[0] == ':')
355 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
356 new_file
= last_line_num
= 0;
361 /* generate line number info */
362 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
365 if (!s1
->do_debug
|| !(f
= put_new_file(s1
)))
367 if (last_line_num
== f
->line_num
)
369 if (text_section
!= cur_text_section
)
371 if (func_ind
!= -1) {
372 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
374 /* from tcc_assemble */
375 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
377 last_line_num
= f
->line_num
;
380 /* put function symbol */
381 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
385 if (!s1
->do_debug
|| !(f
= put_new_file(s1
)))
387 /* XXX: we put here a dummy type */
388 snprintf(buf
, sizeof(buf
), "%s:%c1",
389 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
390 put_stabs_r(s1
, buf
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
394 /* put function size */
395 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
399 #if 0 // this seems to confuse gnu tools
400 put_stabn(s1
, N_FUN
, 0, 0, size
);
404 /* put alternative filename */
405 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
407 if (0 == strcmp(file
->filename
, filename
))
409 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
413 /* begin of #include */
414 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
418 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
422 /* end of #include */
423 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
427 put_stabn(s1
, N_EINCL
, 0, 0, 0);
431 /* ------------------------------------------------------------------------- */
432 /* initialize vstack and types. This must be done also for tcc -E */
433 ST_FUNC
void tccgen_init(TCCState
*s1
)
436 memset(vtop
, 0, sizeof *vtop
);
438 /* define some often used types */
440 char_pointer_type
.t
= VT_BYTE
;
441 mk_pointer(&char_pointer_type
);
443 size_type
.t
= VT_INT
| VT_UNSIGNED
;
444 ptrdiff_type
.t
= VT_INT
;
446 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
447 ptrdiff_type
.t
= VT_LLONG
;
449 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
450 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
452 func_old_type
.t
= VT_FUNC
;
453 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
454 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
455 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
458 ST_FUNC
int tccgen_compile(TCCState
*s1
)
460 cur_text_section
= NULL
;
462 anon_sym
= SYM_FIRST_ANOM
;
465 nocode_wanted
= 0x80000000;
469 #ifdef TCC_TARGET_ARM
473 printf("%s: **** new file\n", file
->filename
);
475 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
478 gen_inline_functions(s1
);
480 /* end of translation unit info */
485 ST_FUNC
void tccgen_finish(TCCState
*s1
)
487 free_inline_functions(s1
);
488 sym_pop(&global_stack
, NULL
, 0);
489 sym_pop(&local_stack
, NULL
, 0);
490 /* free preprocessor macros */
493 dynarray_reset(&sym_pools
, &nb_sym_pools
);
494 sym_free_first
= NULL
;
497 /* ------------------------------------------------------------------------- */
498 ST_FUNC ElfSym
*elfsym(Sym
*s
)
502 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
505 /* apply storage attributes to Elf symbol */
506 ST_FUNC
void update_storage(Sym
*sym
)
509 int sym_bind
, old_sym_bind
;
515 if (sym
->a
.visibility
)
516 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
519 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
520 sym_bind
= STB_LOCAL
;
521 else if (sym
->a
.weak
)
524 sym_bind
= STB_GLOBAL
;
525 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
526 if (sym_bind
!= old_sym_bind
) {
527 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
531 if (sym
->a
.dllimport
)
532 esym
->st_other
|= ST_PE_IMPORT
;
533 if (sym
->a
.dllexport
)
534 esym
->st_other
|= ST_PE_EXPORT
;
538 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
539 get_tok_str(sym
->v
, NULL
),
540 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
548 /* ------------------------------------------------------------------------- */
549 /* update sym->c so that it points to an external symbol in section
550 'section' with value 'value' */
552 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
553 addr_t value
, unsigned long size
,
554 int can_add_underscore
)
556 int sym_type
, sym_bind
, info
, other
, t
;
560 #ifdef CONFIG_TCC_BCHECK
565 name
= get_tok_str(sym
->v
, NULL
);
566 #ifdef CONFIG_TCC_BCHECK
567 if (tcc_state
->do_bounds_check
) {
568 /* XXX: avoid doing that for statics ? */
569 /* if bound checking is activated, we change some function
570 names by adding the "__bound" prefix */
573 /* XXX: we rely only on malloc hooks */
595 strcpy(buf
, "__bound_");
603 if ((t
& VT_BTYPE
) == VT_FUNC
) {
605 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
606 sym_type
= STT_NOTYPE
;
608 sym_type
= STT_OBJECT
;
610 if (t
& (VT_STATIC
| VT_INLINE
))
611 sym_bind
= STB_LOCAL
;
613 sym_bind
= STB_GLOBAL
;
616 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
617 Sym
*ref
= sym
->type
.ref
;
618 if (ref
->a
.nodecorate
) {
619 can_add_underscore
= 0;
621 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
622 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
624 other
|= ST_PE_STDCALL
;
625 can_add_underscore
= 0;
629 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
631 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
635 name
= get_tok_str(sym
->asm_label
, NULL
);
636 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
637 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
640 esym
->st_value
= value
;
641 esym
->st_size
= size
;
642 esym
->st_shndx
= sh_num
;
647 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
648 addr_t value
, unsigned long size
)
650 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
651 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
654 /* add a new relocation entry to symbol 'sym' in section 's' */
655 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
660 if (nocode_wanted
&& s
== cur_text_section
)
665 put_extern_sym(sym
, NULL
, 0, 0);
669 /* now we can add ELF relocation info */
670 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
674 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
676 greloca(s
, sym
, offset
, type
, 0);
680 /* ------------------------------------------------------------------------- */
681 /* symbol allocator */
682 static Sym
*__sym_malloc(void)
684 Sym
*sym_pool
, *sym
, *last_sym
;
687 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
688 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
690 last_sym
= sym_free_first
;
692 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
693 sym
->next
= last_sym
;
697 sym_free_first
= last_sym
;
701 static inline Sym
*sym_malloc(void)
705 sym
= sym_free_first
;
707 sym
= __sym_malloc();
708 sym_free_first
= sym
->next
;
711 sym
= tcc_malloc(sizeof(Sym
));
716 ST_INLN
void sym_free(Sym
*sym
)
719 sym
->next
= sym_free_first
;
720 sym_free_first
= sym
;
726 /* push, without hashing */
727 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
732 memset(s
, 0, sizeof *s
);
742 /* find a symbol and return its associated structure. 's' is the top
743 of the symbol stack */
744 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
756 /* structure lookup */
757 ST_INLN Sym
*struct_find(int v
)
760 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
762 return table_ident
[v
]->sym_struct
;
765 /* find an identifier */
766 ST_INLN Sym
*sym_find(int v
)
769 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
771 return table_ident
[v
]->sym_identifier
;
774 static int sym_scope(Sym
*s
)
776 if (IS_ENUM_VAL (s
->type
.t
))
777 return s
->type
.ref
->sym_scope
;
782 /* push a given symbol on the symbol stack */
783 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
792 s
= sym_push2(ps
, v
, type
->t
, c
);
793 s
->type
.ref
= type
->ref
;
795 /* don't record fields or anonymous symbols */
797 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
798 /* record symbol in token array */
799 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
801 ps
= &ts
->sym_struct
;
803 ps
= &ts
->sym_identifier
;
806 s
->sym_scope
= local_scope
;
807 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
808 tcc_error("redeclaration of '%s'",
809 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
814 /* push a global identifier */
815 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
818 s
= sym_push2(&global_stack
, v
, t
, c
);
819 s
->r
= VT_CONST
| VT_SYM
;
820 /* don't record anonymous symbol */
821 if (v
< SYM_FIRST_ANOM
) {
822 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
823 /* modify the top most local identifier, so that sym_identifier will
824 point to 's' when popped; happens when called from inline asm */
825 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
826 ps
= &(*ps
)->prev_tok
;
833 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
834 pop them yet from the list, but do remove them from the token array. */
835 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
845 /* remove symbol in token array */
847 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
848 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
850 ps
= &ts
->sym_struct
;
852 ps
= &ts
->sym_identifier
;
863 /* ------------------------------------------------------------------------- */
864 static void vcheck_cmp(void)
866 /* cannot let cpu flags if other instruction are generated. Also
867 avoid leaving VT_JMP anywhere except on the top of the stack
868 because it would complicate the code generator.
870 Don't do this when nocode_wanted. vtop might come from
871 !nocode_wanted regions (see 88_codeopt.c) and transforming
872 it to a register without actually generating code is wrong
873 as their value might still be used for real. All values
874 we push under nocode_wanted will eventually be popped
875 again, so that the VT_CMP/VT_JMP value will be in vtop
876 when code is unsuppressed again. */
878 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
882 static void vsetc(CType
*type
, int r
, CValue
*vc
)
884 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
885 tcc_error("memory full (vstack)");
895 ST_FUNC
void vswap(void)
905 /* pop stack value */
906 ST_FUNC
void vpop(void)
909 v
= vtop
->r
& VT_VALMASK
;
910 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
911 /* for x86, we need to pop the FP stack */
913 o(0xd8dd); /* fstp %st(0) */
917 /* need to put correct jump if && or || without test */
924 /* push constant of type "type" with useless value */
925 ST_FUNC
void vpush(CType
*type
)
927 vset(type
, VT_CONST
, 0);
930 /* push integer constant */
931 ST_FUNC
void vpushi(int v
)
935 vsetc(&int_type
, VT_CONST
, &cval
);
938 /* push a pointer sized constant */
939 static void vpushs(addr_t v
)
943 vsetc(&size_type
, VT_CONST
, &cval
);
946 /* push arbitrary 64bit constant */
947 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
954 vsetc(&ctype
, VT_CONST
, &cval
);
957 /* push long long constant */
958 static inline void vpushll(long long v
)
960 vpush64(VT_LLONG
, v
);
963 ST_FUNC
void vset(CType
*type
, int r
, int v
)
968 vsetc(type
, r
, &cval
);
971 static void vseti(int r
, int v
)
979 ST_FUNC
void vpushv(SValue
*v
)
981 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
982 tcc_error("memory full (vstack)");
987 static void vdup(void)
992 /* rotate n first stack elements to the bottom
993 I1 ... In -> I2 ... In I1 [top is right]
995 ST_FUNC
void vrotb(int n
)
1002 for(i
=-n
+1;i
!=0;i
++)
1003 vtop
[i
] = vtop
[i
+1];
1007 /* rotate the n elements before entry e towards the top
1008 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1010 ST_FUNC
void vrote(SValue
*e
, int n
)
1017 for(i
= 0;i
< n
- 1; i
++)
1022 /* rotate n first stack elements to the top
1023 I1 ... In -> In I1 ... I(n-1) [top is right]
1025 ST_FUNC
void vrott(int n
)
1030 /* ------------------------------------------------------------------------- */
1031 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1033 /* called from generators to set the result from relational ops */
1034 ST_FUNC
void vset_VT_CMP(int op
)
1042 /* called once before asking generators to load VT_CMP to a register */
1043 static void vset_VT_JMP(void)
1045 int op
= vtop
->cmp_op
;
1046 if (vtop
->jtrue
|| vtop
->jfalse
) {
1047 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1048 int inv
= op
& (op
< 2); /* small optimization */
1049 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1051 /* otherwise convert flags (rsp. 0/1) to register */
1053 if (op
< 2) /* doesn't seem to happen */
1058 /* Set CPU Flags, doesn't yet jump */
1059 static void gvtst_set(int inv
, int t
)
1062 if (vtop
->r
!= VT_CMP
) {
1065 if (vtop
->r
== VT_CMP
) /* must be VT_CONST otherwise */
1067 else if (vtop
->r
== VT_CONST
)
1068 vset_VT_CMP(vtop
->c
.i
!= 0);
1072 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1073 *p
= gjmp_append(*p
, t
);
1076 /* Generate value test
1078 * Generate a test for any value (jump, comparison and integers) */
1079 static int gvtst(int inv
, int t
)
1085 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1087 x
= u
, u
= t
, t
= x
;
1090 /* jump to the wanted target */
1092 t
= gjmp_cond(op
^ inv
, t
);
1095 /* resolve complementary jumps to here */
1102 /* ------------------------------------------------------------------------- */
1103 /* push a symbol value of TYPE */
1104 static inline void vpushsym(CType
*type
, Sym
*sym
)
1108 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1112 /* Return a static symbol pointing to a section */
1113 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1119 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1120 sym
->type
.t
|= VT_STATIC
;
1121 put_extern_sym(sym
, sec
, offset
, size
);
1125 /* push a reference to a section offset by adding a dummy symbol */
1126 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1128 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1131 /* define a new external reference to a symbol 'v' of type 'u' */
1132 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1138 /* push forward reference */
1139 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1140 s
->type
.ref
= type
->ref
;
1141 } else if (IS_ASM_SYM(s
)) {
1142 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1143 s
->type
.ref
= type
->ref
;
1149 /* Merge symbol attributes. */
1150 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1152 if (sa1
->aligned
&& !sa
->aligned
)
1153 sa
->aligned
= sa1
->aligned
;
1154 sa
->packed
|= sa1
->packed
;
1155 sa
->weak
|= sa1
->weak
;
1156 if (sa1
->visibility
!= STV_DEFAULT
) {
1157 int vis
= sa
->visibility
;
1158 if (vis
== STV_DEFAULT
1159 || vis
> sa1
->visibility
)
1160 vis
= sa1
->visibility
;
1161 sa
->visibility
= vis
;
1163 sa
->dllexport
|= sa1
->dllexport
;
1164 sa
->nodecorate
|= sa1
->nodecorate
;
1165 sa
->dllimport
|= sa1
->dllimport
;
1168 /* Merge function attributes. */
1169 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1171 if (fa1
->func_call
&& !fa
->func_call
)
1172 fa
->func_call
= fa1
->func_call
;
1173 if (fa1
->func_type
&& !fa
->func_type
)
1174 fa
->func_type
= fa1
->func_type
;
1175 if (fa1
->func_args
&& !fa
->func_args
)
1176 fa
->func_args
= fa1
->func_args
;
1179 /* Merge attributes. */
1180 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1182 merge_symattr(&ad
->a
, &ad1
->a
);
1183 merge_funcattr(&ad
->f
, &ad1
->f
);
1186 ad
->section
= ad1
->section
;
1187 if (ad1
->alias_target
)
1188 ad
->alias_target
= ad1
->alias_target
;
1190 ad
->asm_label
= ad1
->asm_label
;
1192 ad
->attr_mode
= ad1
->attr_mode
;
1195 /* Merge some type attributes. */
1196 static void patch_type(Sym
*sym
, CType
*type
)
1198 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1199 if (!(sym
->type
.t
& VT_EXTERN
))
1200 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1201 sym
->type
.t
&= ~VT_EXTERN
;
1204 if (IS_ASM_SYM(sym
)) {
1205 /* stay static if both are static */
1206 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1207 sym
->type
.ref
= type
->ref
;
1210 if (!is_compatible_types(&sym
->type
, type
)) {
1211 tcc_error("incompatible types for redefinition of '%s'",
1212 get_tok_str(sym
->v
, NULL
));
1214 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1215 int static_proto
= sym
->type
.t
& VT_STATIC
;
1216 /* warn if static follows non-static function declaration */
1217 if ((type
->t
& VT_STATIC
) && !static_proto
1218 /* XXX this test for inline shouldn't be here. Until we
1219 implement gnu-inline mode again it silences a warning for
1220 mingw caused by our workarounds. */
1221 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1222 tcc_warning("static storage ignored for redefinition of '%s'",
1223 get_tok_str(sym
->v
, NULL
));
1225 /* set 'inline' if both agree or if one has static */
1226 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1227 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1228 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1229 static_proto
|= VT_INLINE
;
1232 if (0 == (type
->t
& VT_EXTERN
)) {
1233 /* put complete type, use static from prototype */
1234 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1235 sym
->type
.ref
= type
->ref
;
1237 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1240 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1241 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1242 sym
->type
.ref
= type
->ref
;
1246 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1247 /* set array size if it was omitted in extern declaration */
1248 sym
->type
.ref
->c
= type
->ref
->c
;
1250 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1251 tcc_warning("storage mismatch for redefinition of '%s'",
1252 get_tok_str(sym
->v
, NULL
));
1256 /* Merge some storage attributes. */
1257 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1260 patch_type(sym
, type
);
1262 #ifdef TCC_TARGET_PE
1263 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1264 tcc_error("incompatible dll linkage for redefinition of '%s'",
1265 get_tok_str(sym
->v
, NULL
));
1267 merge_symattr(&sym
->a
, &ad
->a
);
1269 sym
->asm_label
= ad
->asm_label
;
1270 update_storage(sym
);
1273 /* copy sym to other stack */
1274 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1277 s
= sym_malloc(), *s
= *s0
;
1278 s
->prev
= *ps
, *ps
= s
;
1279 if (s
->v
< SYM_FIRST_ANOM
) {
1280 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1281 s
->prev_tok
= *ps
, *ps
= s
;
1286 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1287 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1289 int bt
= s
->type
.t
& VT_BTYPE
;
1290 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1291 Sym
**sp
= &s
->type
.ref
;
1292 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1293 Sym
*s2
= sym_copy(s
, ps
);
1294 sp
= &(*sp
= s2
)->next
;
1295 sym_copy_ref(s2
, ps
);
1300 /* define a new external reference to a symbol 'v' */
1301 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1305 /* look for global symbol */
1307 while (s
&& s
->sym_scope
)
1311 /* push forward reference */
1312 s
= global_identifier_push(v
, type
->t
, 0);
1315 s
->asm_label
= ad
->asm_label
;
1316 s
->type
.ref
= type
->ref
;
1317 /* copy type to the global stack */
1319 sym_copy_ref(s
, &global_stack
);
1321 patch_storage(s
, ad
, type
);
1323 /* push variables on local_stack if any */
1324 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1325 s
= sym_copy(s
, &local_stack
);
1329 /* push a reference to global symbol v */
1330 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1332 vpushsym(type
, external_global_sym(v
, type
));
1335 /* save registers up to (vtop - n) stack entry */
1336 ST_FUNC
void save_regs(int n
)
1339 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1343 /* save r to the memory stack, and mark it as being free */
1344 ST_FUNC
void save_reg(int r
)
1346 save_reg_upstack(r
, 0);
1349 /* save r to the memory stack, and mark it as being free,
1350 if seen up to (vtop - n) stack entry */
1351 ST_FUNC
void save_reg_upstack(int r
, int n
)
1353 int l
, saved
, size
, align
;
1357 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1362 /* modify all stack values */
1365 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1366 if ((p
->r
& VT_VALMASK
) == r
|| (p
->r2
& VT_VALMASK
) == r
) {
1367 /* must save value on stack if not already done */
1369 /* NOTE: must reload 'r' because r might be equal to r2 */
1370 r
= p
->r
& VT_VALMASK
;
1371 /* store register in the stack */
1373 if ((p
->r
& VT_LVAL
) ||
1374 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1376 type
= &char_pointer_type
;
1380 size
= type_size(type
, &align
);
1381 #ifdef CONFIG_TCC_BCHECK
1382 if (tcc_state
->do_bounds_check
)
1383 l
= loc
= (loc
- size
) & -align
;
1386 l
=get_temp_local_var(size
,align
);
1387 sv
.type
.t
= type
->t
;
1388 sv
.r
= VT_LOCAL
| VT_LVAL
;
1391 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1392 /* x86 specific: need to pop fp register ST0 if saved */
1393 if (r
== TREG_ST0
) {
1394 o(0xd8dd); /* fstp %st(0) */
1397 /* special long long case */
1398 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(type
->t
)) {
1404 /* mark that stack entry as being saved on the stack */
1405 if (p
->r
& VT_LVAL
) {
1406 /* also clear the bounded flag because the
1407 relocation address of the function was stored in
1409 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1411 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1419 #ifdef TCC_TARGET_ARM
1420 /* find a register of class 'rc2' with at most one reference on stack.
1421 * If none, call get_reg(rc) */
1422 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1427 for(r
=0;r
<NB_REGS
;r
++) {
1428 if (reg_classes
[r
] & rc2
) {
1431 for(p
= vstack
; p
<= vtop
; p
++) {
1432 if ((p
->r
& VT_VALMASK
) == r
||
1433 (p
->r2
& VT_VALMASK
) == r
)
1444 /* find a free register of class 'rc'. If none, save one register */
1445 ST_FUNC
int get_reg(int rc
)
1450 /* find a free register */
1451 for(r
=0;r
<NB_REGS
;r
++) {
1452 if (reg_classes
[r
] & rc
) {
1455 for(p
=vstack
;p
<=vtop
;p
++) {
1456 if ((p
->r
& VT_VALMASK
) == r
||
1457 (p
->r2
& VT_VALMASK
) == r
)
1465 /* no register left : free the first one on the stack (VERY
1466 IMPORTANT to start from the bottom to ensure that we don't
1467 spill registers used in gen_opi()) */
1468 for(p
=vstack
;p
<=vtop
;p
++) {
1469 /* look at second register (if long long) */
1470 r
= p
->r2
& VT_VALMASK
;
1471 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1473 r
= p
->r
& VT_VALMASK
;
1474 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1480 /* Should never comes here */
1484 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1485 static int get_temp_local_var(int size
,int align
){
1487 struct temp_local_variable
*temp_var
;
1494 for(i
=0;i
<nb_temp_local_vars
;i
++){
1495 temp_var
=&arr_temp_local_vars
[i
];
1496 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1499 /*check if temp_var is free*/
1501 for(p
=vstack
;p
<=vtop
;p
++) {
1503 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1504 if(p
->c
.i
==temp_var
->location
){
1511 found_var
=temp_var
->location
;
1517 loc
= (loc
- size
) & -align
;
1518 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1519 temp_var
=&arr_temp_local_vars
[i
];
1520 temp_var
->location
=loc
;
1521 temp_var
->size
=size
;
1522 temp_var
->align
=align
;
1523 nb_temp_local_vars
++;
1530 static void clear_temp_local_var_list(){
1531 nb_temp_local_vars
=0;
1534 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1536 static void move_reg(int r
, int s
, int t
)
1550 /* get address of vtop (vtop MUST BE an lvalue) */
1551 ST_FUNC
void gaddrof(void)
1553 vtop
->r
&= ~VT_LVAL
;
1554 /* tricky: if saved lvalue, then we can go back to lvalue */
1555 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1556 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1561 #ifdef CONFIG_TCC_BCHECK
1562 /* generate lvalue bound code */
1563 static void gbound(void)
1568 vtop
->r
&= ~VT_MUSTBOUND
;
1569 /* if lvalue, then use checking code before dereferencing */
1570 if (vtop
->r
& VT_LVAL
) {
1571 /* if not VT_BOUNDED value, then make one */
1572 if (!(vtop
->r
& VT_BOUNDED
)) {
1573 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1574 /* must save type because we must set it to int to get pointer */
1576 vtop
->type
.t
= VT_PTR
;
1579 gen_bounded_ptr_add();
1580 vtop
->r
|= lval_type
;
1583 /* then check for dereferencing */
1584 gen_bounded_ptr_deref();
1588 /* we need to call __bound_ptr_add before we start to load function
1589 args into registers */
1590 ST_FUNC
void gbound_args(int nb_args
)
1593 for (i
= 1; i
<= nb_args
; ++i
)
1594 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1602 static void incr_bf_adr(int o
)
1604 vtop
->type
= char_pointer_type
;
1608 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1609 | (VT_BYTE
|VT_UNSIGNED
);
1610 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1611 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1614 /* single-byte load mode for packed or otherwise unaligned bitfields */
1615 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1618 save_reg_upstack(vtop
->r
, 1);
1619 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1620 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1629 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1631 vpushi((1 << n
) - 1), gen_op('&');
1634 vpushi(bits
), gen_op(TOK_SHL
);
1637 bits
+= n
, bit_size
-= n
, o
= 1;
1640 if (!(type
->t
& VT_UNSIGNED
)) {
1641 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1642 vpushi(n
), gen_op(TOK_SHL
);
1643 vpushi(n
), gen_op(TOK_SAR
);
1647 /* single-byte store mode for packed or otherwise unaligned bitfields */
1648 static void store_packed_bf(int bit_pos
, int bit_size
)
1650 int bits
, n
, o
, m
, c
;
1652 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1654 save_reg_upstack(vtop
->r
, 1);
1655 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1657 incr_bf_adr(o
); // X B
1659 c
? vdup() : gv_dup(); // B V X
1662 vpushi(bits
), gen_op(TOK_SHR
);
1664 vpushi(bit_pos
), gen_op(TOK_SHL
);
1669 m
= ((1 << n
) - 1) << bit_pos
;
1670 vpushi(m
), gen_op('&'); // X B V1
1671 vpushv(vtop
-1); // X B V1 B
1672 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1673 gen_op('&'); // X B V1 B1
1674 gen_op('|'); // X B V2
1676 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1677 vstore(), vpop(); // X B
1678 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1683 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1686 if (0 == sv
->type
.ref
)
1688 t
= sv
->type
.ref
->auxtype
;
1689 if (t
!= -1 && t
!= VT_STRUCT
) {
1690 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1691 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1696 /* store vtop a register belonging to class 'rc'. lvalues are
1697 converted to values. Cannot be used if cannot be converted to
1698 register value (such as structures). */
1699 ST_FUNC
int gv(int rc
)
1701 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1702 int bit_pos
, bit_size
, size
, align
;
1704 /* NOTE: get_reg can modify vstack[] */
1705 if (vtop
->type
.t
& VT_BITFIELD
) {
1708 bit_pos
= BIT_POS(vtop
->type
.t
);
1709 bit_size
= BIT_SIZE(vtop
->type
.t
);
1710 /* remove bit field info to avoid loops */
1711 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1714 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1715 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1716 type
.t
|= VT_UNSIGNED
;
1718 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1720 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1725 if (r
== VT_STRUCT
) {
1726 load_packed_bf(&type
, bit_pos
, bit_size
);
1728 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1729 /* cast to int to propagate signedness in following ops */
1731 /* generate shifts */
1732 vpushi(bits
- (bit_pos
+ bit_size
));
1734 vpushi(bits
- bit_size
);
1735 /* NOTE: transformed to SHR if unsigned */
1740 if (is_float(vtop
->type
.t
) &&
1741 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1742 unsigned long offset
;
1743 /* CPUs usually cannot use float constants, so we store them
1744 generically in data segment */
1745 size
= type_size(&vtop
->type
, &align
);
1747 size
= 0, align
= 1;
1748 offset
= section_add(data_section
, size
, align
);
1749 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1751 init_putv(&vtop
->type
, data_section
, offset
);
1754 #ifdef CONFIG_TCC_BCHECK
1755 if (vtop
->r
& VT_MUSTBOUND
)
1759 bt
= vtop
->type
.t
& VT_BTYPE
;
1761 #ifdef TCC_TARGET_RISCV64
1763 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1766 rc2
= RC2_TYPE(bt
, rc
);
1768 /* need to reload if:
1770 - lvalue (need to dereference pointer)
1771 - already a register, but not in the right class */
1772 r
= vtop
->r
& VT_VALMASK
;
1773 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1774 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1776 if (!r_ok
|| !r2_ok
) {
1780 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: ptrdiff_type
.t
;
1781 int original_type
= vtop
->type
.t
;
1783 /* two register type load :
1784 expand to two words temporarily */
1785 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1787 unsigned long long ll
= vtop
->c
.i
;
1788 vtop
->c
.i
= ll
; /* first word */
1790 vtop
->r
= r
; /* save register value */
1791 vpushi(ll
>> 32); /* second word */
1792 } else if (vtop
->r
& VT_LVAL
) {
1793 /* We do not want to modifier the long long pointer here.
1794 So we save any other instances down the stack */
1795 save_reg_upstack(vtop
->r
, 1);
1796 /* load from memory */
1797 vtop
->type
.t
= load_type
;
1800 vtop
[-1].r
= r
; /* save register value */
1801 /* increment pointer to get second word */
1802 vtop
->type
.t
= ptrdiff_type
.t
;
1807 vtop
->type
.t
= load_type
;
1809 /* move registers */
1812 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1815 vtop
[-1].r
= r
; /* save register value */
1816 vtop
->r
= vtop
[-1].r2
;
1818 /* Allocate second register. Here we rely on the fact that
1819 get_reg() tries first to free r2 of an SValue. */
1823 /* write second register */
1826 vtop
->type
.t
= original_type
;
1827 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1829 /* lvalue of scalar type : need to use lvalue type
1830 because of possible cast */
1833 /* compute memory access type */
1834 if (vtop
->r
& VT_LVAL_BYTE
)
1836 else if (vtop
->r
& VT_LVAL_SHORT
)
1838 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1842 /* restore wanted type */
1845 if (vtop
->r
== VT_CMP
)
1847 /* one register type load */
1852 #ifdef TCC_TARGET_C67
1853 /* uses register pairs for doubles */
1854 if (bt
== VT_DOUBLE
)
1861 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1862 ST_FUNC
void gv2(int rc1
, int rc2
)
1864 /* generate more generic register first. But VT_JMP or VT_CMP
1865 values must be generated first in all cases to avoid possible
1867 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1872 /* test if reload is needed for first register */
1873 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1883 /* test if reload is needed for first register */
1884 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1891 /* expand 64bit on stack in two ints */
1892 ST_FUNC
void lexpand(void)
1895 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1896 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1897 if (v
== VT_CONST
) {
1900 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1906 vtop
[0].r
= vtop
[-1].r2
;
1907 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1909 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1914 /* build a long long from two ints */
1915 static void lbuild(int t
)
1917 gv2(RC_INT
, RC_INT
);
1918 vtop
[-1].r2
= vtop
[0].r
;
1919 vtop
[-1].type
.t
= t
;
1924 /* convert stack entry to register and duplicate its value in another
1926 static void gv_dup(void)
1932 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1933 if (t
& VT_BITFIELD
) {
1943 /* stack: H L L1 H1 */
1953 /* duplicate value */
1963 /* generate CPU independent (unsigned) long long operations */
1964 static void gen_opl(int op
)
1966 int t
, a
, b
, op1
, c
, i
;
1968 unsigned short reg_iret
= REG_IRET
;
1969 unsigned short reg_lret
= REG_IRE2
;
1975 func
= TOK___divdi3
;
1978 func
= TOK___udivdi3
;
1981 func
= TOK___moddi3
;
1984 func
= TOK___umoddi3
;
1991 /* call generic long long function */
1992 vpush_global_sym(&func_old_type
, func
);
1997 vtop
->r2
= reg_lret
;
2005 //pv("gen_opl A",0,2);
2011 /* stack: L1 H1 L2 H2 */
2016 vtop
[-2] = vtop
[-3];
2019 /* stack: H1 H2 L1 L2 */
2020 //pv("gen_opl B",0,4);
2026 /* stack: H1 H2 L1 L2 ML MH */
2029 /* stack: ML MH H1 H2 L1 L2 */
2033 /* stack: ML MH H1 L2 H2 L1 */
2038 /* stack: ML MH M1 M2 */
2041 } else if (op
== '+' || op
== '-') {
2042 /* XXX: add non carry method too (for MIPS or alpha) */
2048 /* stack: H1 H2 (L1 op L2) */
2051 gen_op(op1
+ 1); /* TOK_xxxC2 */
2054 /* stack: H1 H2 (L1 op L2) */
2057 /* stack: (L1 op L2) H1 H2 */
2059 /* stack: (L1 op L2) (H1 op H2) */
2067 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2068 t
= vtop
[-1].type
.t
;
2072 /* stack: L H shift */
2074 /* constant: simpler */
2075 /* NOTE: all comments are for SHL. the other cases are
2076 done by swapping words */
2087 if (op
!= TOK_SAR
) {
2120 /* XXX: should provide a faster fallback on x86 ? */
2123 func
= TOK___ashrdi3
;
2126 func
= TOK___lshrdi3
;
2129 func
= TOK___ashldi3
;
2135 /* compare operations */
2141 /* stack: L1 H1 L2 H2 */
2143 vtop
[-1] = vtop
[-2];
2145 /* stack: L1 L2 H1 H2 */
2149 /* when values are equal, we need to compare low words. since
2150 the jump is inverted, we invert the test too. */
2153 else if (op1
== TOK_GT
)
2155 else if (op1
== TOK_ULT
)
2157 else if (op1
== TOK_UGT
)
2167 /* generate non equal test */
2169 vset_VT_CMP(TOK_NE
);
2173 /* compare low. Always unsigned */
2177 else if (op1
== TOK_LE
)
2179 else if (op1
== TOK_GT
)
2181 else if (op1
== TOK_GE
)
2184 #if 0//def TCC_TARGET_I386
2185 if (op
== TOK_NE
) { gsym(b
); break; }
2186 if (op
== TOK_EQ
) { gsym(a
); break; }
2195 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2197 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2198 return (a
^ b
) >> 63 ? -x
: x
;
2201 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2203 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2206 /* handle integer constant optimizations and various machine
2208 static void gen_opic(int op
)
2210 SValue
*v1
= vtop
- 1;
2212 int t1
= v1
->type
.t
& VT_BTYPE
;
2213 int t2
= v2
->type
.t
& VT_BTYPE
;
2214 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2215 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2216 uint64_t l1
= c1
? v1
->c
.i
: 0;
2217 uint64_t l2
= c2
? v2
->c
.i
: 0;
2218 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2220 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2221 l1
= ((uint32_t)l1
|
2222 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2223 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2224 l2
= ((uint32_t)l2
|
2225 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2229 case '+': l1
+= l2
; break;
2230 case '-': l1
-= l2
; break;
2231 case '&': l1
&= l2
; break;
2232 case '^': l1
^= l2
; break;
2233 case '|': l1
|= l2
; break;
2234 case '*': l1
*= l2
; break;
2241 /* if division by zero, generate explicit division */
2244 tcc_error("division by zero in constant");
2248 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2249 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2250 case TOK_UDIV
: l1
= l1
/ l2
; break;
2251 case TOK_UMOD
: l1
= l1
% l2
; break;
2254 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2255 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2257 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2260 case TOK_ULT
: l1
= l1
< l2
; break;
2261 case TOK_UGE
: l1
= l1
>= l2
; break;
2262 case TOK_EQ
: l1
= l1
== l2
; break;
2263 case TOK_NE
: l1
= l1
!= l2
; break;
2264 case TOK_ULE
: l1
= l1
<= l2
; break;
2265 case TOK_UGT
: l1
= l1
> l2
; break;
2266 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2267 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2268 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2269 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2271 case TOK_LAND
: l1
= l1
&& l2
; break;
2272 case TOK_LOR
: l1
= l1
|| l2
; break;
2276 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2277 l1
= ((uint32_t)l1
|
2278 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2282 /* if commutative ops, put c2 as constant */
2283 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2284 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2286 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2287 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2289 if (!const_wanted
&&
2291 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2292 (l1
== -1 && op
== TOK_SAR
))) {
2293 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2295 } else if (!const_wanted
&&
2296 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2298 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2299 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2300 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2305 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2308 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2309 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2312 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2313 /* filter out NOP operations like x*1, x-0, x&-1... */
2315 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2316 /* try to use shifts instead of muls or divs */
2317 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2326 else if (op
== TOK_PDIV
)
2332 } else if (c2
&& (op
== '+' || op
== '-') &&
2333 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2334 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2335 /* symbol + constant case */
2339 /* The backends can't always deal with addends to symbols
2340 larger than +-1<<31. Don't construct such. */
2347 /* call low level op generator */
2348 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2349 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2357 /* generate a floating point operation with constant propagation */
2358 static void gen_opif(int op
)
2362 #if defined _MSC_VER && defined __x86_64__
2363 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2370 /* currently, we cannot do computations with forward symbols */
2371 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2372 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2374 if (v1
->type
.t
== VT_FLOAT
) {
2377 } else if (v1
->type
.t
== VT_DOUBLE
) {
2385 /* NOTE: we only do constant propagation if finite number (not
2386 NaN or infinity) (ANSI spec) */
2387 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2391 case '+': f1
+= f2
; break;
2392 case '-': f1
-= f2
; break;
2393 case '*': f1
*= f2
; break;
2396 /* If not in initializer we need to potentially generate
2397 FP exceptions at runtime, otherwise we want to fold. */
2403 /* XXX: also handles tests ? */
2407 /* XXX: overflow test ? */
2408 if (v1
->type
.t
== VT_FLOAT
) {
2410 } else if (v1
->type
.t
== VT_DOUBLE
) {
2422 static int pointed_size(CType
*type
)
2425 return type_size(pointed_type(type
), &align
);
2428 static void vla_runtime_pointed_size(CType
*type
)
2431 vla_runtime_type_size(pointed_type(type
), &align
);
2434 static inline int is_null_pointer(SValue
*p
)
2436 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2438 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2439 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2440 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2441 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2442 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2443 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2447 /* check types for comparison or subtraction of pointers */
2448 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2450 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2453 /* null pointers are accepted for all comparisons as gcc */
2454 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2458 bt1
= type1
->t
& VT_BTYPE
;
2459 bt2
= type2
->t
& VT_BTYPE
;
2460 /* accept comparison between pointer and integer with a warning */
2461 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2462 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2463 tcc_warning("comparison between pointer and integer");
2467 /* both must be pointers or implicit function pointers */
2468 if (bt1
== VT_PTR
) {
2469 type1
= pointed_type(type1
);
2470 } else if (bt1
!= VT_FUNC
)
2471 goto invalid_operands
;
2473 if (bt2
== VT_PTR
) {
2474 type2
= pointed_type(type2
);
2475 } else if (bt2
!= VT_FUNC
) {
2477 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2479 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2480 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2484 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2485 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2486 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2487 /* gcc-like error if '-' is used */
2489 goto invalid_operands
;
2491 tcc_warning("comparison of distinct pointer types lacks a cast");
2495 /* generic gen_op: handles types problems */
2496 ST_FUNC
void gen_op(int op
)
2498 int u
, t1
, t2
, bt1
, bt2
, t
;
2502 t1
= vtop
[-1].type
.t
;
2503 t2
= vtop
[0].type
.t
;
2504 bt1
= t1
& VT_BTYPE
;
2505 bt2
= t2
& VT_BTYPE
;
2507 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2508 tcc_error("operation on a struct");
2509 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2510 if (bt2
== VT_FUNC
) {
2511 mk_pointer(&vtop
->type
);
2514 if (bt1
== VT_FUNC
) {
2516 mk_pointer(&vtop
->type
);
2521 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2522 /* at least one operand is a pointer */
2523 /* relational op: must be both pointers */
2524 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2525 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2526 /* pointers are handled are unsigned */
2528 t
= VT_LLONG
| VT_UNSIGNED
;
2530 t
= VT_INT
| VT_UNSIGNED
;
2534 /* if both pointers, then it must be the '-' op */
2535 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2537 tcc_error("cannot use pointers here");
2538 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2539 /* XXX: check that types are compatible */
2540 if (vtop
[-1].type
.t
& VT_VLA
) {
2541 vla_runtime_pointed_size(&vtop
[-1].type
);
2543 vpushi(pointed_size(&vtop
[-1].type
));
2547 vtop
->type
.t
= ptrdiff_type
.t
;
2551 /* exactly one pointer : must be '+' or '-'. */
2552 if (op
!= '-' && op
!= '+')
2553 tcc_error("cannot use pointers here");
2554 /* Put pointer as first operand */
2555 if (bt2
== VT_PTR
) {
2557 t
= t1
, t1
= t2
, t2
= t
;
2560 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2561 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2564 type1
= vtop
[-1].type
;
2565 type1
.t
&= ~VT_ARRAY
;
2566 if (vtop
[-1].type
.t
& VT_VLA
)
2567 vla_runtime_pointed_size(&vtop
[-1].type
);
2569 u
= pointed_size(&vtop
[-1].type
);
2571 tcc_error("unknown array element size");
2575 /* XXX: cast to int ? (long long case) */
2580 #ifdef CONFIG_TCC_BCHECK
2581 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2582 /* if bounded pointers, we generate a special code to
2589 vtop
[-1].r
&= ~VT_MUSTBOUND
;
2590 gen_bounded_ptr_add();
2596 /* put again type if gen_opic() swaped operands */
2599 } else if (is_float(bt1
) || is_float(bt2
)) {
2600 /* compute bigger type and do implicit casts */
2601 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2603 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2608 /* floats can only be used for a few operations */
2609 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2610 (op
< TOK_ULT
|| op
> TOK_GT
))
2611 tcc_error("invalid operands for binary operation");
2613 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2614 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2615 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2617 t
|= (VT_LONG
& t1
);
2619 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2620 /* cast to biggest op */
2621 t
= VT_LLONG
| VT_LONG
;
2622 if (bt1
== VT_LLONG
)
2624 if (bt2
== VT_LLONG
)
2626 /* convert to unsigned if it does not fit in a long long */
2627 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2628 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2632 /* integer operations */
2633 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2634 /* convert to unsigned if it does not fit in an integer */
2635 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2636 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2639 /* XXX: currently, some unsigned operations are explicit, so
2640 we modify them here */
2641 if (t
& VT_UNSIGNED
) {
2648 else if (op
== TOK_LT
)
2650 else if (op
== TOK_GT
)
2652 else if (op
== TOK_LE
)
2654 else if (op
== TOK_GE
)
2662 /* special case for shifts and long long: we keep the shift as
2664 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2671 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2672 /* relational op: the result is an int */
2673 vtop
->type
.t
= VT_INT
;
2678 // Make sure that we have converted to an rvalue:
2679 if (vtop
->r
& VT_LVAL
)
2680 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2683 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2684 #define gen_cvt_itof1 gen_cvt_itof
2686 /* generic itof for unsigned long long case */
2687 static void gen_cvt_itof1(int t
)
2689 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2690 (VT_LLONG
| VT_UNSIGNED
)) {
2693 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2694 #if LDOUBLE_SIZE != 8
2695 else if (t
== VT_LDOUBLE
)
2696 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2699 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2710 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2711 #define gen_cvt_ftoi1 gen_cvt_ftoi
2713 /* generic ftoi for unsigned long long case */
2714 static void gen_cvt_ftoi1(int t
)
2717 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2718 /* not handled natively */
2719 st
= vtop
->type
.t
& VT_BTYPE
;
2721 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2722 #if LDOUBLE_SIZE != 8
2723 else if (st
== VT_LDOUBLE
)
2724 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2727 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2738 /* force char or short cast */
2739 static void force_charshort_cast(int t
)
2743 /* cannot cast static initializers */
2744 if (STATIC_DATA_WANTED
)
2748 /* XXX: add optimization if lvalue : just change type and offset */
2753 if (t
& VT_UNSIGNED
) {
2754 vpushi((1 << bits
) - 1);
2757 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2763 /* result must be signed or the SAR is converted to an SHL
2764 This was not the case when "t" was a signed short
2765 and the last value on the stack was an unsigned int */
2766 vtop
->type
.t
&= ~VT_UNSIGNED
;
2772 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2773 static void gen_cast_s(int t
)
2781 static void gen_cast(CType
*type
)
2783 int sbt
, dbt
, sf
, df
, c
, p
;
2785 /* special delayed cast for char/short */
2786 /* XXX: in some cases (multiple cascaded casts), it may still
2788 if (vtop
->r
& VT_MUSTCAST
) {
2789 vtop
->r
&= ~VT_MUSTCAST
;
2790 force_charshort_cast(vtop
->type
.t
);
2793 /* bitfields first get cast to ints */
2794 if (vtop
->type
.t
& VT_BITFIELD
) {
2798 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2799 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2804 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2805 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2806 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2807 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
2810 /* constant case: we can do it now */
2811 /* XXX: in ISOC, cannot do it if error in convert */
2812 if (sbt
== VT_FLOAT
)
2813 vtop
->c
.ld
= vtop
->c
.f
;
2814 else if (sbt
== VT_DOUBLE
)
2815 vtop
->c
.ld
= vtop
->c
.d
;
2818 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2819 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2820 vtop
->c
.ld
= vtop
->c
.i
;
2822 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2824 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2825 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2827 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2830 if (dbt
== VT_FLOAT
)
2831 vtop
->c
.f
= (float)vtop
->c
.ld
;
2832 else if (dbt
== VT_DOUBLE
)
2833 vtop
->c
.d
= (double)vtop
->c
.ld
;
2834 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2835 vtop
->c
.i
= vtop
->c
.ld
;
2836 } else if (sf
&& dbt
== VT_BOOL
) {
2837 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2840 vtop
->c
.i
= vtop
->c
.ld
;
2841 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2843 else if (sbt
& VT_UNSIGNED
)
2844 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2846 else if (sbt
== VT_PTR
)
2849 else if (sbt
!= VT_LLONG
)
2850 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2851 -(vtop
->c
.i
& 0x80000000));
2853 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2855 else if (dbt
== VT_BOOL
)
2856 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2858 else if (dbt
== VT_PTR
)
2861 else if (dbt
!= VT_LLONG
) {
2862 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2863 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2866 if (!(dbt
& VT_UNSIGNED
))
2867 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2870 } else if (p
&& dbt
== VT_BOOL
) {
2874 /* non constant case: generate code */
2876 /* convert from fp to fp */
2879 /* convert int to fp */
2882 /* convert fp to int */
2883 if (dbt
== VT_BOOL
) {
2887 /* we handle char/short/etc... with generic code */
2888 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2889 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2893 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2894 /* additional cast for char/short... */
2900 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2901 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2902 /* scalar to long long */
2903 /* machine independent conversion */
2905 /* generate high word */
2906 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2910 if (sbt
== VT_PTR
) {
2911 /* cast from pointer to int before we apply
2912 shift operation, which pointers don't support*/
2919 /* patch second register */
2920 vtop
[-1].r2
= vtop
->r
;
2924 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2925 (dbt
& VT_BTYPE
) == VT_PTR
||
2926 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2927 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2928 (sbt
& VT_BTYPE
) != VT_PTR
&&
2929 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2930 /* need to convert from 32bit to 64bit */
2932 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2933 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2935 #elif defined(TCC_TARGET_X86_64)
2937 /* x86_64 specific: movslq */
2939 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2943 } else if (sbt
& VT_UNSIGNED
) {
2944 #if defined(TCC_TARGET_RISCV64)
2945 /* RISC-V keeps 32bit vals in registers sign-extended.
2946 So here we need a zero-extension. */
2947 vtop
->type
.t
= VT_LLONG
;
2956 } else if (dbt
== VT_BOOL
) {
2957 /* scalar to bool */
2960 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2961 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2962 if (sbt
== VT_PTR
) {
2963 vtop
->type
.t
= VT_INT
;
2964 tcc_warning("nonportable conversion from pointer to char/short");
2966 force_charshort_cast(dbt
);
2967 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2969 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2971 /* from long long: just take low order word */
2975 if (dbt
& VT_UNSIGNED
) {
2976 /* XXX some architectures (e.g. risc-v) would like it
2977 better for this merely being a 32-to-64 sign or zero-
2980 vtop
->type
.t
|= VT_UNSIGNED
;
2986 /* if lvalue and single word type, nothing to do because
2987 the lvalue already contains the real type size (see
2988 VT_LVAL_xxx constants) */
2991 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2992 /* if we are casting between pointer types,
2993 we must update the VT_LVAL_xxx size */
2994 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2995 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2998 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3001 /* return type size as known at compile time. Put alignment at 'a' */
3002 ST_FUNC
int type_size(CType
*type
, int *a
)
3007 bt
= type
->t
& VT_BTYPE
;
3008 if (bt
== VT_STRUCT
) {
3013 } else if (bt
== VT_PTR
) {
3014 if (type
->t
& VT_ARRAY
) {
3018 ts
= type_size(&s
->type
, a
);
3020 if (ts
< 0 && s
->c
< 0)
3028 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3029 return -1; /* incomplete enum */
3030 } else if (bt
== VT_LDOUBLE
) {
3032 return LDOUBLE_SIZE
;
3033 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3034 #ifdef TCC_TARGET_I386
3035 #ifdef TCC_TARGET_PE
3040 #elif defined(TCC_TARGET_ARM)
3050 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3053 } else if (bt
== VT_SHORT
) {
3056 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3060 /* char, void, function, _Bool */
3066 /* push type size as known at runtime time on top of value stack. Put
3068 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3070 if (type
->t
& VT_VLA
) {
3071 type_size(&type
->ref
->type
, a
);
3072 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3074 vpushi(type_size(type
, a
));
3078 /* return the pointed type of t */
3079 static inline CType
*pointed_type(CType
*type
)
3081 return &type
->ref
->type
;
3084 /* modify type so that its it is a pointer to type. */
3085 ST_FUNC
void mk_pointer(CType
*type
)
3088 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3089 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3093 /* compare function types. OLD functions match any new functions */
3094 static int is_compatible_func(CType
*type1
, CType
*type2
)
3100 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3102 if (s1
->f
.func_type
!= s2
->f
.func_type
3103 && s1
->f
.func_type
!= FUNC_OLD
3104 && s2
->f
.func_type
!= FUNC_OLD
)
3106 /* we should check the function return type for FUNC_OLD too
3107 but that causes problems with the internally used support
3108 functions such as TOK_memmove */
3109 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
3111 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
3114 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3125 /* return true if type1 and type2 are the same. If unqualified is
3126 true, qualifiers on the types are ignored.
3128 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3132 t1
= type1
->t
& VT_TYPE
;
3133 t2
= type2
->t
& VT_TYPE
;
3135 /* strip qualifiers before comparing */
3136 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3137 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3140 /* Default Vs explicit signedness only matters for char */
3141 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3145 /* XXX: bitfields ? */
3150 && !(type1
->ref
->c
< 0
3151 || type2
->ref
->c
< 0
3152 || type1
->ref
->c
== type2
->ref
->c
))
3155 /* test more complicated cases */
3156 bt1
= t1
& VT_BTYPE
;
3157 if (bt1
== VT_PTR
) {
3158 type1
= pointed_type(type1
);
3159 type2
= pointed_type(type2
);
3160 return is_compatible_types(type1
, type2
);
3161 } else if (bt1
== VT_STRUCT
) {
3162 return (type1
->ref
== type2
->ref
);
3163 } else if (bt1
== VT_FUNC
) {
3164 return is_compatible_func(type1
, type2
);
3165 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
3166 return type1
->ref
== type2
->ref
;
3172 /* return true if type1 and type2 are exactly the same (including
3175 static int is_compatible_types(CType
*type1
, CType
*type2
)
3177 return compare_types(type1
,type2
,0);
3180 /* return true if type1 and type2 are the same (ignoring qualifiers).
3182 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3184 return compare_types(type1
,type2
,1);
3187 /* print a type. If 'varstr' is not NULL, then the variable is also
3188 printed in the type */
3190 /* XXX: add array and function pointers */
3191 static void type_to_str(char *buf
, int buf_size
,
3192 CType
*type
, const char *varstr
)
3204 pstrcat(buf
, buf_size
, "extern ");
3206 pstrcat(buf
, buf_size
, "static ");
3208 pstrcat(buf
, buf_size
, "typedef ");
3210 pstrcat(buf
, buf_size
, "inline ");
3211 if (t
& VT_VOLATILE
)
3212 pstrcat(buf
, buf_size
, "volatile ");
3213 if (t
& VT_CONSTANT
)
3214 pstrcat(buf
, buf_size
, "const ");
3216 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3217 || ((t
& VT_UNSIGNED
)
3218 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3221 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3223 buf_size
-= strlen(buf
);
3258 tstr
= "long double";
3260 pstrcat(buf
, buf_size
, tstr
);
3267 pstrcat(buf
, buf_size
, tstr
);
3268 v
= type
->ref
->v
& ~SYM_STRUCT
;
3269 if (v
>= SYM_FIRST_ANOM
)
3270 pstrcat(buf
, buf_size
, "<anonymous>");
3272 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3277 if (varstr
&& '*' == *varstr
) {
3278 pstrcat(buf1
, sizeof(buf1
), "(");
3279 pstrcat(buf1
, sizeof(buf1
), varstr
);
3280 pstrcat(buf1
, sizeof(buf1
), ")");
3282 pstrcat(buf1
, buf_size
, "(");
3284 while (sa
!= NULL
) {
3286 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3287 pstrcat(buf1
, sizeof(buf1
), buf2
);
3290 pstrcat(buf1
, sizeof(buf1
), ", ");
3292 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3293 pstrcat(buf1
, sizeof(buf1
), ", ...");
3294 pstrcat(buf1
, sizeof(buf1
), ")");
3295 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3300 if (varstr
&& '*' == *varstr
)
3301 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3303 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3304 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3307 pstrcpy(buf1
, sizeof(buf1
), "*");
3308 if (t
& VT_CONSTANT
)
3309 pstrcat(buf1
, buf_size
, "const ");
3310 if (t
& VT_VOLATILE
)
3311 pstrcat(buf1
, buf_size
, "volatile ");
3313 pstrcat(buf1
, sizeof(buf1
), varstr
);
3314 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3318 pstrcat(buf
, buf_size
, " ");
3319 pstrcat(buf
, buf_size
, varstr
);
3324 /* verify type compatibility to store vtop in 'dt' type */
3325 static void verify_assign_cast(CType
*dt
)
3327 CType
*st
, *type1
, *type2
;
3328 char buf1
[256], buf2
[256];
3329 int dbt
, sbt
, qualwarn
, lvl
;
3331 st
= &vtop
->type
; /* source type */
3332 dbt
= dt
->t
& VT_BTYPE
;
3333 sbt
= st
->t
& VT_BTYPE
;
3334 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3335 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3336 ; /* It is Ok if both are void */
3338 tcc_error("cannot cast from/to void");
3340 if (dt
->t
& VT_CONSTANT
)
3341 tcc_warning("assignment of read-only location");
3344 /* special cases for pointers */
3345 /* '0' can also be a pointer */
3346 if (is_null_pointer(vtop
))
3348 /* accept implicit pointer to integer cast with warning */
3349 if (is_integer_btype(sbt
)) {
3350 tcc_warning("assignment makes pointer from integer without a cast");
3353 type1
= pointed_type(dt
);
3355 type2
= pointed_type(st
);
3356 else if (sbt
== VT_FUNC
)
3357 type2
= st
; /* a function is implicitly a function pointer */
3360 if (is_compatible_types(type1
, type2
))
3362 for (qualwarn
= lvl
= 0;; ++lvl
) {
3363 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3364 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3366 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3367 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3368 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3370 type1
= pointed_type(type1
);
3371 type2
= pointed_type(type2
);
3373 if (!is_compatible_unqualified_types(type1
, type2
)) {
3374 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3375 /* void * can match anything */
3376 } else if (dbt
== sbt
3377 && is_integer_btype(sbt
& VT_BTYPE
)
3378 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3379 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3380 /* Like GCC don't warn by default for merely changes
3381 in pointer target signedness. Do warn for different
3382 base types, though, in particular for unsigned enums
3383 and signed int targets. */
3385 tcc_warning("assignment from incompatible pointer type");
3390 tcc_warning("assignment discards qualifiers from pointer target type");
3396 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3397 tcc_warning("assignment makes integer from pointer without a cast");
3398 } else if (sbt
== VT_STRUCT
) {
3399 goto case_VT_STRUCT
;
3401 /* XXX: more tests */
3405 if (!is_compatible_unqualified_types(dt
, st
)) {
3407 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3408 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3409 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3415 static void gen_assign_cast(CType
*dt
)
3417 verify_assign_cast(dt
);
3421 /* store vtop in lvalue pushed on stack */
3422 ST_FUNC
void vstore(void)
3424 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3426 ft
= vtop
[-1].type
.t
;
3427 sbt
= vtop
->type
.t
& VT_BTYPE
;
3428 dbt
= ft
& VT_BTYPE
;
3430 verify_assign_cast(&vtop
[-1].type
);
3432 if (sbt
== VT_STRUCT
) {
3433 /* if structure, only generate pointer */
3434 /* structure assignment : generate memcpy */
3435 /* XXX: optimize if small size */
3436 size
= type_size(&vtop
->type
, &align
);
3440 vtop
->type
.t
= VT_PTR
;
3443 /* address of memcpy() */
3446 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3447 else if(!(align
& 3))
3448 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3451 /* Use memmove, rather than memcpy, as dest and src may be same: */
3452 vpush_global_sym(&func_old_type
, TOK_memmove
);
3457 vtop
->type
.t
= VT_PTR
;
3462 /* leave source on stack */
3464 } else if (ft
& VT_BITFIELD
) {
3465 /* bitfield store handling */
3467 /* save lvalue as expression result (example: s.b = s.a = n;) */
3468 vdup(), vtop
[-1] = vtop
[-2];
3470 bit_pos
= BIT_POS(ft
);
3471 bit_size
= BIT_SIZE(ft
);
3472 /* remove bit field info to avoid loops */
3473 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3475 if (dbt
== VT_BOOL
) {
3476 gen_cast(&vtop
[-1].type
);
3477 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3479 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3480 if (dbt
!= VT_BOOL
) {
3481 gen_cast(&vtop
[-1].type
);
3482 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3484 if (r
== VT_STRUCT
) {
3485 store_packed_bf(bit_pos
, bit_size
);
3487 unsigned long long mask
= (1ULL << bit_size
) - 1;
3488 if (dbt
!= VT_BOOL
) {
3490 if (dbt
== VT_LLONG
)
3493 vpushi((unsigned)mask
);
3500 /* duplicate destination */
3503 /* load destination, mask and or with source */
3504 if (dbt
== VT_LLONG
)
3505 vpushll(~(mask
<< bit_pos
));
3507 vpushi(~((unsigned)mask
<< bit_pos
));
3512 /* ... and discard */
3515 } else if (dbt
== VT_VOID
) {
3518 /* optimize char/short casts */
3520 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3521 && is_integer_btype(sbt
)
3523 if ((vtop
->r
& VT_MUSTCAST
)
3524 && btype_size(dbt
) > btype_size(sbt
)
3526 force_charshort_cast(dbt
);
3529 gen_cast(&vtop
[-1].type
);
3532 #ifdef CONFIG_TCC_BCHECK
3533 /* bound check case */
3534 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3540 gv(RC_TYPE(dbt
)); /* generate value */
3543 vtop
->r
|= VT_MUSTCAST
;
3544 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3545 vtop
->type
.t
= ft
& VT_TYPE
;
3548 /* if lvalue was saved on stack, must read it */
3549 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3551 r
= get_reg(RC_INT
);
3552 sv
.type
.t
= ptrdiff_type
.t
;
3553 sv
.r
= VT_LOCAL
| VT_LVAL
;
3554 sv
.c
.i
= vtop
[-1].c
.i
;
3556 vtop
[-1].r
= r
| VT_LVAL
;
3559 r
= vtop
->r
& VT_VALMASK
;
3560 /* two word case handling :
3561 store second register at word + 4 (or +8 for x86-64) */
3562 if (USING_TWO_WORDS(dbt
)) {
3563 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: ptrdiff_type
.t
;
3564 vtop
[-1].type
.t
= load_type
;
3567 /* convert to int to increment easily */
3568 vtop
->type
.t
= ptrdiff_type
.t
;
3574 vtop
[-1].type
.t
= load_type
;
3575 /* XXX: it works because r2 is spilled last ! */
3576 store(vtop
->r2
, vtop
- 1);
3582 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3586 /* post defines POST/PRE add. c is the token ++ or -- */
3587 ST_FUNC
void inc(int post
, int c
)
3590 vdup(); /* save lvalue */
3592 gv_dup(); /* duplicate value */
3597 vpushi(c
- TOK_MID
);
3599 vstore(); /* store value */
3601 vpop(); /* if post op, return saved value */
3604 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3606 /* read the string */
3610 while (tok
== TOK_STR
) {
3611 /* XXX: add \0 handling too ? */
3612 cstr_cat(astr
, tokc
.str
.data
, -1);
3615 cstr_ccat(astr
, '\0');
3618 /* If I is >= 1 and a power of two, returns log2(i)+1.
3619 If I is 0 returns 0. */
3620 static int exact_log2p1(int i
)
3625 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3636 /* Parse __attribute__((...)) GNUC extension. */
3637 static void parse_attribute(AttributeDef
*ad
)
3643 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3648 while (tok
!= ')') {
3649 if (tok
< TOK_IDENT
)
3650 expect("attribute name");
3662 tcc_warning("implicit declaration of function '%s'",
3663 get_tok_str(tok
, &tokc
));
3664 s
= external_global_sym(tok
, &func_old_type
);
3666 ad
->cleanup_func
= s
;
3671 case TOK_CONSTRUCTOR1
:
3672 case TOK_CONSTRUCTOR2
:
3673 ad
->a
.constructor
= 1;
3675 case TOK_DESTRUCTOR1
:
3676 case TOK_DESTRUCTOR2
:
3677 ad
->a
.destructor
= 1;
3682 parse_mult_str(&astr
, "section name");
3683 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3690 parse_mult_str(&astr
, "alias(\"target\")");
3691 ad
->alias_target
= /* save string as token, for later */
3692 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3696 case TOK_VISIBILITY1
:
3697 case TOK_VISIBILITY2
:
3699 parse_mult_str(&astr
,
3700 "visibility(\"default|hidden|internal|protected\")");
3701 if (!strcmp (astr
.data
, "default"))
3702 ad
->a
.visibility
= STV_DEFAULT
;
3703 else if (!strcmp (astr
.data
, "hidden"))
3704 ad
->a
.visibility
= STV_HIDDEN
;
3705 else if (!strcmp (astr
.data
, "internal"))
3706 ad
->a
.visibility
= STV_INTERNAL
;
3707 else if (!strcmp (astr
.data
, "protected"))
3708 ad
->a
.visibility
= STV_PROTECTED
;
3710 expect("visibility(\"default|hidden|internal|protected\")");
3719 if (n
<= 0 || (n
& (n
- 1)) != 0)
3720 tcc_error("alignment must be a positive power of two");
3725 ad
->a
.aligned
= exact_log2p1(n
);
3726 if (n
!= 1 << (ad
->a
.aligned
- 1))
3727 tcc_error("alignment of %d is larger than implemented", n
);
3739 /* currently, no need to handle it because tcc does not
3740 track unused objects */
3744 ad
->f
.func_noreturn
= 1;
3749 ad
->f
.func_call
= FUNC_CDECL
;
3754 ad
->f
.func_call
= FUNC_STDCALL
;
3756 #ifdef TCC_TARGET_I386
3766 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3772 ad
->f
.func_call
= FUNC_FASTCALLW
;
3779 ad
->attr_mode
= VT_LLONG
+ 1;
3782 ad
->attr_mode
= VT_BYTE
+ 1;
3785 ad
->attr_mode
= VT_SHORT
+ 1;
3789 ad
->attr_mode
= VT_INT
+ 1;
3792 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3799 ad
->a
.dllexport
= 1;
3801 case TOK_NODECORATE
:
3802 ad
->a
.nodecorate
= 1;
3805 ad
->a
.dllimport
= 1;
3808 if (tcc_state
->warn_unsupported
)
3809 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3810 /* skip parameters */
3812 int parenthesis
= 0;
3816 else if (tok
== ')')
3819 } while (parenthesis
&& tok
!= -1);
3832 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3836 while ((s
= s
->next
) != NULL
) {
3837 if ((s
->v
& SYM_FIELD
) &&
3838 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3839 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3840 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3852 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3854 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3855 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3856 int pcc
= !tcc_state
->ms_bitfields
;
3857 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3864 prevbt
= VT_STRUCT
; /* make it never match */
3869 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3870 if (f
->type
.t
& VT_BITFIELD
)
3871 bit_size
= BIT_SIZE(f
->type
.t
);
3874 size
= type_size(&f
->type
, &align
);
3875 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3878 if (pcc
&& bit_size
== 0) {
3879 /* in pcc mode, packing does not affect zero-width bitfields */
3882 /* in pcc mode, attribute packed overrides if set. */
3883 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3886 /* pragma pack overrides align if lesser and packs bitfields always */
3889 if (pragma_pack
< align
)
3890 align
= pragma_pack
;
3891 /* in pcc mode pragma pack also overrides individual align */
3892 if (pcc
&& pragma_pack
< a
)
3896 /* some individual align was specified */
3900 if (type
->ref
->type
.t
== VT_UNION
) {
3901 if (pcc
&& bit_size
>= 0)
3902 size
= (bit_size
+ 7) >> 3;
3907 } else if (bit_size
< 0) {
3909 c
+= (bit_pos
+ 7) >> 3;
3910 c
= (c
+ align
- 1) & -align
;
3919 /* A bit-field. Layout is more complicated. There are two
3920 options: PCC (GCC) compatible and MS compatible */
3922 /* In PCC layout a bit-field is placed adjacent to the
3923 preceding bit-fields, except if:
3925 - an individual alignment was given
3926 - it would overflow its base type container and
3927 there is no packing */
3928 if (bit_size
== 0) {
3930 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3932 } else if (f
->a
.aligned
) {
3934 } else if (!packed
) {
3936 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3937 if (ofs
> size
/ align
)
3941 /* in pcc mode, long long bitfields have type int if they fit */
3942 if (size
== 8 && bit_size
<= 32)
3943 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3945 while (bit_pos
>= align
* 8)
3946 c
+= align
, bit_pos
-= align
* 8;
3949 /* In PCC layout named bit-fields influence the alignment
3950 of the containing struct using the base types alignment,
3951 except for packed fields (which here have correct align). */
3952 if (f
->v
& SYM_FIRST_ANOM
3953 // && bit_size // ??? gcc on ARM/rpi does that
3958 bt
= f
->type
.t
& VT_BTYPE
;
3959 if ((bit_pos
+ bit_size
> size
* 8)
3960 || (bit_size
> 0) == (bt
!= prevbt
)
3962 c
= (c
+ align
- 1) & -align
;
3965 /* In MS bitfield mode a bit-field run always uses
3966 at least as many bits as the underlying type.
3967 To start a new run it's also required that this
3968 or the last bit-field had non-zero width. */
3969 if (bit_size
|| prev_bit_size
)
3972 /* In MS layout the records alignment is normally
3973 influenced by the field, except for a zero-width
3974 field at the start of a run (but by further zero-width
3975 fields it is again). */
3976 if (bit_size
== 0 && prevbt
!= bt
)
3979 prev_bit_size
= bit_size
;
3982 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3983 | (bit_pos
<< VT_STRUCT_SHIFT
);
3984 bit_pos
+= bit_size
;
3986 if (align
> maxalign
)
3990 printf("set field %s offset %-2d size %-2d align %-2d",
3991 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3992 if (f
->type
.t
& VT_BITFIELD
) {
3993 printf(" pos %-2d bits %-2d",
4006 c
+= (bit_pos
+ 7) >> 3;
4008 /* store size and alignment */
4009 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4013 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4014 /* can happen if individual align for some member was given. In
4015 this case MSVC ignores maxalign when aligning the size */
4020 c
= (c
+ a
- 1) & -a
;
4024 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4027 /* check whether we can access bitfields by their type */
4028 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4032 if (0 == (f
->type
.t
& VT_BITFIELD
))
4036 bit_size
= BIT_SIZE(f
->type
.t
);
4039 bit_pos
= BIT_POS(f
->type
.t
);
4040 size
= type_size(&f
->type
, &align
);
4041 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4044 /* try to access the field using a different type */
4045 c0
= -1, s
= align
= 1;
4047 px
= f
->c
* 8 + bit_pos
;
4048 cx
= (px
>> 3) & -align
;
4049 px
= px
- (cx
<< 3);
4052 s
= (px
+ bit_size
+ 7) >> 3;
4062 s
= type_size(&t
, &align
);
4066 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4067 /* update offset and bit position */
4070 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4071 | (bit_pos
<< VT_STRUCT_SHIFT
);
4075 printf("FIX field %s offset %-2d size %-2d align %-2d "
4076 "pos %-2d bits %-2d\n",
4077 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4078 cx
, s
, align
, px
, bit_size
);
4081 /* fall back to load/store single-byte wise */
4082 f
->auxtype
= VT_STRUCT
;
4084 printf("FIX field %s : load byte-wise\n",
4085 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4091 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4092 static void struct_decl(CType
*type
, int u
)
4094 int v
, c
, size
, align
, flexible
;
4095 int bit_size
, bsize
, bt
;
4097 AttributeDef ad
, ad1
;
4100 memset(&ad
, 0, sizeof ad
);
4102 parse_attribute(&ad
);
4106 /* struct already defined ? return it */
4108 expect("struct/union/enum name");
4110 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4113 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4115 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4120 /* Record the original enum/struct/union token. */
4121 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4123 /* we put an undefined size for struct/union */
4124 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4125 s
->r
= 0; /* default alignment is zero as gcc */
4127 type
->t
= s
->type
.t
;
4133 tcc_error("struct/union/enum already defined");
4135 /* cannot be empty */
4136 /* non empty enums are not allowed */
4139 long long ll
= 0, pl
= 0, nl
= 0;
4142 /* enum symbols have static storage */
4143 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4147 expect("identifier");
4149 if (ss
&& !local_stack
)
4150 tcc_error("redefinition of enumerator '%s'",
4151 get_tok_str(v
, NULL
));
4155 ll
= expr_const64();
4157 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4159 *ps
= ss
, ps
= &ss
->next
;
4168 /* NOTE: we accept a trailing comma */
4173 /* set integral type of the enum */
4176 if (pl
!= (unsigned)pl
)
4177 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4179 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4180 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4181 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4183 /* set type for enum members */
4184 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4186 if (ll
== (int)ll
) /* default is int if it fits */
4188 if (t
.t
& VT_UNSIGNED
) {
4189 ss
->type
.t
|= VT_UNSIGNED
;
4190 if (ll
== (unsigned)ll
)
4193 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4194 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4199 while (tok
!= '}') {
4200 if (!parse_btype(&btype
, &ad1
)) {
4206 tcc_error("flexible array member '%s' not at the end of struct",
4207 get_tok_str(v
, NULL
));
4213 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4215 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4216 expect("identifier");
4218 int v
= btype
.ref
->v
;
4219 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4220 if (tcc_state
->ms_extensions
== 0)
4221 expect("identifier");
4225 if (type_size(&type1
, &align
) < 0) {
4226 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4229 tcc_error("field '%s' has incomplete type",
4230 get_tok_str(v
, NULL
));
4232 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4233 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4234 (type1
.t
& VT_STORAGE
))
4235 tcc_error("invalid type for '%s'",
4236 get_tok_str(v
, NULL
));
4240 bit_size
= expr_const();
4241 /* XXX: handle v = 0 case for messages */
4243 tcc_error("negative width in bit-field '%s'",
4244 get_tok_str(v
, NULL
));
4245 if (v
&& bit_size
== 0)
4246 tcc_error("zero width for bit-field '%s'",
4247 get_tok_str(v
, NULL
));
4248 parse_attribute(&ad1
);
4250 size
= type_size(&type1
, &align
);
4251 if (bit_size
>= 0) {
4252 bt
= type1
.t
& VT_BTYPE
;
4258 tcc_error("bitfields must have scalar type");
4260 if (bit_size
> bsize
) {
4261 tcc_error("width of '%s' exceeds its type",
4262 get_tok_str(v
, NULL
));
4263 } else if (bit_size
== bsize
4264 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4265 /* no need for bit fields */
4267 } else if (bit_size
== 64) {
4268 tcc_error("field width 64 not implemented");
4270 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4272 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4275 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4276 /* Remember we've seen a real field to check
4277 for placement of flexible array member. */
4280 /* If member is a struct or bit-field, enforce
4281 placing into the struct (as anonymous). */
4283 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4288 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4293 if (tok
== ';' || tok
== TOK_EOF
)
4300 parse_attribute(&ad
);
4301 struct_layout(type
, &ad
);
4306 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4308 merge_symattr(&ad
->a
, &s
->a
);
4309 merge_funcattr(&ad
->f
, &s
->f
);
4312 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4313 are added to the element type, copied because it could be a typedef. */
4314 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4316 while (type
->t
& VT_ARRAY
) {
4317 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4318 type
= &type
->ref
->type
;
4320 type
->t
|= qualifiers
;
4323 /* return 0 if no type declaration. otherwise, return the basic type
4326 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4328 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4332 memset(ad
, 0, sizeof(AttributeDef
));
4342 /* currently, we really ignore extension */
4352 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4353 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4354 tmbt
: tcc_error("too many basic types");
4357 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4362 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4379 memset(&ad1
, 0, sizeof(AttributeDef
));
4380 if (parse_btype(&type1
, &ad1
)) {
4381 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4383 n
= 1 << (ad1
.a
.aligned
- 1);
4385 type_size(&type1
, &n
);
4388 if (n
<= 0 || (n
& (n
- 1)) != 0)
4389 tcc_error("alignment must be a positive power of two");
4392 ad
->a
.aligned
= exact_log2p1(n
);
4396 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4397 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4398 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4399 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4406 #ifdef TCC_TARGET_ARM64
4408 /* GCC's __uint128_t appears in some Linux header files. Make it a
4409 synonym for long double to get the size and alignment right. */
4420 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4421 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4429 struct_decl(&type1
, VT_ENUM
);
4432 type
->ref
= type1
.ref
;
4435 struct_decl(&type1
, VT_STRUCT
);
4438 struct_decl(&type1
, VT_UNION
);
4441 /* type modifiers */
4446 parse_btype_qualify(type
, VT_CONSTANT
);
4454 parse_btype_qualify(type
, VT_VOLATILE
);
4461 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4462 tcc_error("signed and unsigned modifier");
4475 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4476 tcc_error("signed and unsigned modifier");
4477 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4493 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4494 tcc_error("multiple storage classes");
4506 ad
->f
.func_noreturn
= 1;
4508 /* GNUC attribute */
4509 case TOK_ATTRIBUTE1
:
4510 case TOK_ATTRIBUTE2
:
4511 parse_attribute(ad
);
4512 if (ad
->attr_mode
) {
4513 u
= ad
->attr_mode
-1;
4514 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4522 parse_expr_type(&type1
);
4523 /* remove all storage modifiers except typedef */
4524 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4526 sym_to_attr(ad
, type1
.ref
);
4532 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4536 if (tok
== ':' && !in_generic
) {
4537 /* ignore if it's a label */
4542 t
&= ~(VT_BTYPE
|VT_LONG
);
4543 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4544 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4545 type
->ref
= s
->type
.ref
;
4547 parse_btype_qualify(type
, t
);
4549 /* get attributes from typedef */
4558 if (tcc_state
->char_is_unsigned
) {
4559 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4562 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4563 bt
= t
& (VT_BTYPE
|VT_LONG
);
4565 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4566 #ifdef TCC_TARGET_PE
4567 if (bt
== VT_LDOUBLE
)
4568 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4574 /* convert a function parameter type (array to pointer and function to
4575 function pointer) */
4576 static inline void convert_parameter_type(CType
*pt
)
4578 /* remove const and volatile qualifiers (XXX: const could be used
4579 to indicate a const function parameter */
4580 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4581 /* array must be transformed to pointer according to ANSI C */
4583 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4588 ST_FUNC
void parse_asm_str(CString
*astr
)
4591 parse_mult_str(astr
, "string constant");
4594 /* Parse an asm label and return the token */
4595 static int asm_label_instr(void)
4601 parse_asm_str(&astr
);
4604 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4606 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4611 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4613 int n
, l
, t1
, arg_size
, align
, unused_align
;
4614 Sym
**plast
, *s
, *first
;
4619 /* function type, or recursive declarator (return if so) */
4621 if (td
&& !(td
& TYPE_ABSTRACT
))
4625 else if (parse_btype(&pt
, &ad1
))
4628 merge_attr (ad
, &ad1
);
4637 /* read param name and compute offset */
4638 if (l
!= FUNC_OLD
) {
4639 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4641 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4642 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4643 tcc_error("parameter declared as void");
4647 expect("identifier");
4648 pt
.t
= VT_VOID
; /* invalid type */
4652 convert_parameter_type(&pt
);
4653 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4654 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4660 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4665 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4666 tcc_error("invalid type");
4669 /* if no parameters, then old type prototype */
4672 /* NOTE: const is ignored in returned type as it has a special
4673 meaning in gcc / C++ */
4674 type
->t
&= ~VT_CONSTANT
;
4675 /* some ancient pre-K&R C allows a function to return an array
4676 and the array brackets to be put after the arguments, such
4677 that "int c()[]" means something like "int[] c()" */
4680 skip(']'); /* only handle simple "[]" */
4683 /* we push a anonymous symbol which will contain the function prototype */
4684 ad
->f
.func_args
= arg_size
;
4685 ad
->f
.func_type
= l
;
4686 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4692 } else if (tok
== '[') {
4693 int saved_nocode_wanted
= nocode_wanted
;
4694 /* array definition */
4697 /* XXX The optional type-quals and static should only be accepted
4698 in parameter decls. The '*' as well, and then even only
4699 in prototypes (not function defs). */
4701 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4716 if (!local_stack
|| (storage
& VT_STATIC
))
4717 vpushi(expr_const());
4719 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4720 length must always be evaluated, even under nocode_wanted,
4721 so that its size slot is initialized (e.g. under sizeof
4726 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4729 tcc_error("invalid array size");
4731 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4732 tcc_error("size of variable length array should be an integer");
4738 /* parse next post type */
4739 post_type(type
, ad
, storage
, 0);
4741 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4742 tcc_error("declaration of an array of functions");
4743 if ((type
->t
& VT_BTYPE
) == VT_VOID
4744 || type_size(type
, &unused_align
) < 0)
4745 tcc_error("declaration of an array of incomplete type elements");
4747 t1
|= type
->t
& VT_VLA
;
4751 tcc_error("need explicit inner array size in VLAs");
4752 loc
-= type_size(&int_type
, &align
);
4756 vla_runtime_type_size(type
, &align
);
4758 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4764 nocode_wanted
= saved_nocode_wanted
;
4766 /* we push an anonymous symbol which will contain the array
4768 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4769 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4775 /* Parse a type declarator (except basic type), and return the type
4776 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4777 expected. 'type' should contain the basic type. 'ad' is the
4778 attribute definition of the basic type. It can be modified by
4779 type_decl(). If this (possibly abstract) declarator is a pointer chain
4780 it returns the innermost pointed to type (equals *type, but is a different
4781 pointer), otherwise returns type itself, that's used for recursive calls. */
4782 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4785 int qualifiers
, storage
;
4787 /* recursive type, remove storage bits first, apply them later again */
4788 storage
= type
->t
& VT_STORAGE
;
4789 type
->t
&= ~VT_STORAGE
;
4792 while (tok
== '*') {
4800 qualifiers
|= VT_CONSTANT
;
4805 qualifiers
|= VT_VOLATILE
;
4811 /* XXX: clarify attribute handling */
4812 case TOK_ATTRIBUTE1
:
4813 case TOK_ATTRIBUTE2
:
4814 parse_attribute(ad
);
4818 type
->t
|= qualifiers
;
4820 /* innermost pointed to type is the one for the first derivation */
4821 ret
= pointed_type(type
);
4825 /* This is possibly a parameter type list for abstract declarators
4826 ('int ()'), use post_type for testing this. */
4827 if (!post_type(type
, ad
, 0, td
)) {
4828 /* It's not, so it's a nested declarator, and the post operations
4829 apply to the innermost pointed to type (if any). */
4830 /* XXX: this is not correct to modify 'ad' at this point, but
4831 the syntax is not clear */
4832 parse_attribute(ad
);
4833 post
= type_decl(type
, ad
, v
, td
);
4837 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4838 /* type identifier */
4843 if (!(td
& TYPE_ABSTRACT
))
4844 expect("identifier");
4847 post_type(post
, ad
, storage
, 0);
4848 parse_attribute(ad
);
4853 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4854 ST_FUNC
int lvalue_type(int t
)
4859 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4861 else if (bt
== VT_SHORT
)
4865 if (t
& VT_UNSIGNED
)
4866 r
|= VT_LVAL_UNSIGNED
;
4870 /* indirection with full error checking and bound check */
4871 ST_FUNC
void indir(void)
4873 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4874 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4878 if (vtop
->r
& VT_LVAL
)
4880 vtop
->type
= *pointed_type(&vtop
->type
);
4881 /* Arrays and functions are never lvalues */
4882 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4883 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4884 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4885 /* if bound checking, the referenced pointer must be checked */
4886 #ifdef CONFIG_TCC_BCHECK
4887 if (tcc_state
->do_bounds_check
)
4888 vtop
->r
|= VT_MUSTBOUND
;
4893 /* pass a parameter to a function and do type checking and casting */
4894 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4899 func_type
= func
->f
.func_type
;
4900 if (func_type
== FUNC_OLD
||
4901 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4902 /* default casting : only need to convert float to double */
4903 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4904 gen_cast_s(VT_DOUBLE
);
4905 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4906 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4907 type
.ref
= vtop
->type
.ref
;
4910 } else if (arg
== NULL
) {
4911 tcc_error("too many arguments to function");
4914 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4915 gen_assign_cast(&type
);
4919 /* parse an expression and return its type without any side effect. */
4920 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4929 /* parse an expression of the form '(type)' or '(expr)' and return its
4931 static void parse_expr_type(CType
*type
)
4937 if (parse_btype(type
, &ad
)) {
4938 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4940 expr_type(type
, gexpr
);
4945 static void parse_type(CType
*type
)
4950 if (!parse_btype(type
, &ad
)) {
4953 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4956 static void parse_builtin_params(int nc
, const char *args
)
4963 while ((c
= *args
++)) {
4967 case 'e': expr_eq(); continue;
4968 case 't': parse_type(&t
); vpush(&t
); continue;
4969 default: tcc_error("internal error"); break;
4977 ST_FUNC
void unary(void)
4979 int n
, t
, align
, size
, r
, sizeof_caller
;
4984 sizeof_caller
= in_sizeof
;
4987 /* XXX: GCC 2.95.3 does not generate a table although it should be
4995 #ifdef TCC_TARGET_PE
4996 t
= VT_SHORT
|VT_UNSIGNED
;
5004 vsetc(&type
, VT_CONST
, &tokc
);
5008 t
= VT_INT
| VT_UNSIGNED
;
5014 t
= VT_LLONG
| VT_UNSIGNED
;
5026 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5029 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5031 case TOK___FUNCTION__
:
5033 goto tok_identifier
;
5039 /* special function name identifier */
5040 len
= strlen(funcname
) + 1;
5041 /* generate char[len] type */
5046 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5047 if (!NODATA_WANTED
) {
5048 ptr
= section_ptr_add(data_section
, len
);
5049 memcpy(ptr
, funcname
, len
);
5055 #ifdef TCC_TARGET_PE
5056 t
= VT_SHORT
| VT_UNSIGNED
;
5062 /* string parsing */
5064 if (tcc_state
->char_is_unsigned
)
5065 t
= VT_BYTE
| VT_UNSIGNED
;
5067 if (tcc_state
->warn_write_strings
)
5072 memset(&ad
, 0, sizeof(AttributeDef
));
5073 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5078 if (parse_btype(&type
, &ad
)) {
5079 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5081 /* check ISOC99 compound literal */
5083 /* data is allocated locally by default */
5088 /* all except arrays are lvalues */
5089 if (!(type
.t
& VT_ARRAY
))
5090 r
|= lvalue_type(type
.t
);
5091 memset(&ad
, 0, sizeof(AttributeDef
));
5092 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5094 if (sizeof_caller
) {
5101 } else if (tok
== '{') {
5102 int saved_nocode_wanted
= nocode_wanted
;
5104 tcc_error("expected constant");
5105 /* save all registers */
5107 /* statement expression : we do not accept break/continue
5108 inside as GCC does. We do retain the nocode_wanted state,
5109 as statement expressions can't ever be entered from the
5110 outside, so any reactivation of code emission (from labels
5111 or loop heads) can be disabled again after the end of it. */
5113 nocode_wanted
= saved_nocode_wanted
;
5128 /* functions names must be treated as function pointers,
5129 except for unary '&' and sizeof. Since we consider that
5130 functions are not lvalues, we only have to handle it
5131 there and in function calls. */
5132 /* arrays can also be used although they are not lvalues */
5133 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5134 !(vtop
->type
.t
& VT_ARRAY
))
5136 mk_pointer(&vtop
->type
);
5142 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5143 gen_cast_s(VT_BOOL
);
5144 vtop
->c
.i
= !vtop
->c
.i
;
5145 } else if (vtop
->r
== VT_CMP
) {
5147 n
= vtop
->jfalse
, vtop
->jfalse
= vtop
->jtrue
, vtop
->jtrue
= n
;
5162 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5163 tcc_error("pointer not accepted for unary plus");
5164 /* In order to force cast, we add zero, except for floating point
5165 where we really need an noop (otherwise -0.0 will be transformed
5167 if (!is_float(vtop
->type
.t
)) {
5179 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5181 if (vtop
[1].r
& VT_SYM
)
5182 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5183 size
= type_size(&type
, &align
);
5184 if (s
&& s
->a
.aligned
)
5185 align
= 1 << (s
->a
.aligned
- 1);
5186 if (t
== TOK_SIZEOF
) {
5187 if (!(type
.t
& VT_VLA
)) {
5189 tcc_error("sizeof applied to an incomplete type");
5192 vla_runtime_type_size(&type
, &align
);
5197 vtop
->type
.t
|= VT_UNSIGNED
;
5200 case TOK_builtin_expect
:
5201 /* __builtin_expect is a no-op for now */
5202 parse_builtin_params(0, "ee");
5205 case TOK_builtin_types_compatible_p
:
5206 parse_builtin_params(0, "tt");
5207 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5208 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5209 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5213 case TOK_builtin_choose_expr
:
5240 case TOK_builtin_constant_p
:
5241 parse_builtin_params(1, "e");
5242 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5246 case TOK_builtin_frame_address
:
5247 case TOK_builtin_return_address
:
5253 if (tok
!= TOK_CINT
) {
5254 tcc_error("%s only takes positive integers",
5255 tok1
== TOK_builtin_return_address
?
5256 "__builtin_return_address" :
5257 "__builtin_frame_address");
5259 level
= (uint32_t)tokc
.i
;
5264 vset(&type
, VT_LOCAL
, 0); /* local frame */
5266 mk_pointer(&vtop
->type
);
5267 indir(); /* -> parent frame */
5269 if (tok1
== TOK_builtin_return_address
) {
5270 // assume return address is just above frame pointer on stack
5273 mk_pointer(&vtop
->type
);
5278 #ifdef TCC_TARGET_RISCV64
5279 case TOK_builtin_va_start
:
5280 parse_builtin_params(0, "ee");
5281 r
= vtop
->r
& VT_VALMASK
;
5285 tcc_error("__builtin_va_start expects a local variable");
5290 #ifdef TCC_TARGET_X86_64
5291 #ifdef TCC_TARGET_PE
5292 case TOK_builtin_va_start
:
5293 parse_builtin_params(0, "ee");
5294 r
= vtop
->r
& VT_VALMASK
;
5298 tcc_error("__builtin_va_start expects a local variable");
5300 vtop
->type
= char_pointer_type
;
5305 case TOK_builtin_va_arg_types
:
5306 parse_builtin_params(0, "t");
5307 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5314 #ifdef TCC_TARGET_ARM64
5315 case TOK___va_start
: {
5316 parse_builtin_params(0, "ee");
5320 vtop
->type
.t
= VT_VOID
;
5323 case TOK___va_arg
: {
5324 parse_builtin_params(0, "et");
5332 case TOK___arm64_clear_cache
: {
5333 parse_builtin_params(0, "ee");
5336 vtop
->type
.t
= VT_VOID
;
5340 /* pre operations */
5351 t
= vtop
->type
.t
& VT_BTYPE
;
5353 /* In IEEE negate(x) isn't subtract(0,x), but rather
5357 vtop
->c
.f
= -1.0 * 0.0;
5358 else if (t
== VT_DOUBLE
)
5359 vtop
->c
.d
= -1.0 * 0.0;
5361 vtop
->c
.ld
= -1.0 * 0.0;
5369 goto tok_identifier
;
5371 /* allow to take the address of a label */
5372 if (tok
< TOK_UIDENT
)
5373 expect("label identifier");
5374 s
= label_find(tok
);
5376 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5378 if (s
->r
== LABEL_DECLARED
)
5379 s
->r
= LABEL_FORWARD
;
5382 s
->type
.t
= VT_VOID
;
5383 mk_pointer(&s
->type
);
5384 s
->type
.t
|= VT_STATIC
;
5386 vpushsym(&s
->type
, s
);
5392 CType controlling_type
;
5393 int has_default
= 0;
5396 TokenString
*str
= NULL
;
5397 int saved_const_wanted
= const_wanted
;
5402 expr_type(&controlling_type
, expr_eq
);
5403 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5404 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5405 mk_pointer(&controlling_type
);
5406 const_wanted
= saved_const_wanted
;
5410 if (tok
== TOK_DEFAULT
) {
5412 tcc_error("too many 'default'");
5418 AttributeDef ad_tmp
;
5423 parse_btype(&cur_type
, &ad_tmp
);
5426 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5427 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5429 tcc_error("type match twice");
5439 skip_or_save_block(&str
);
5441 skip_or_save_block(NULL
);
5448 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5449 tcc_error("type '%s' does not match any association", buf
);
5451 begin_macro(str
, 1);
5460 // special qnan , snan and infinity values
5465 vtop
->type
.t
= VT_FLOAT
;
5470 goto special_math_val
;
5473 goto special_math_val
;
5480 expect("identifier");
5482 if (!s
|| IS_ASM_SYM(s
)) {
5483 const char *name
= get_tok_str(t
, NULL
);
5485 tcc_error("'%s' undeclared", name
);
5486 /* for simple function calls, we tolerate undeclared
5487 external reference to int() function */
5488 if (tcc_state
->warn_implicit_function_declaration
5489 #ifdef TCC_TARGET_PE
5490 /* people must be warned about using undeclared WINAPI functions
5491 (which usually start with uppercase letter) */
5492 || (name
[0] >= 'A' && name
[0] <= 'Z')
5495 tcc_warning("implicit declaration of function '%s'", name
);
5496 s
= external_global_sym(t
, &func_old_type
);
5500 /* A symbol that has a register is a local register variable,
5501 which starts out as VT_LOCAL value. */
5502 if ((r
& VT_VALMASK
) < VT_CONST
)
5503 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5505 vset(&s
->type
, r
, s
->c
);
5506 /* Point to s as backpointer (even without r&VT_SYM).
5507 Will be used by at least the x86 inline asm parser for
5513 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5514 vtop
->c
.i
= s
->enum_val
;
5519 /* post operations */
5521 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5524 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5525 int qualifiers
, cumofs
= 0;
5527 if (tok
== TOK_ARROW
)
5529 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5532 /* expect pointer on structure */
5533 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5534 expect("struct or union");
5535 if (tok
== TOK_CDOUBLE
)
5536 expect("field name");
5538 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5539 expect("field name");
5540 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5542 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5543 /* add field offset to pointer */
5544 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5545 vpushi(cumofs
+ s
->c
);
5547 /* change type to field type, and set to lvalue */
5548 vtop
->type
= s
->type
;
5549 vtop
->type
.t
|= qualifiers
;
5550 /* an array is never an lvalue */
5551 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5552 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5553 #ifdef CONFIG_TCC_BCHECK
5554 /* if bound checking, the referenced pointer must be checked */
5555 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5556 vtop
->r
|= VT_MUSTBOUND
;
5560 } else if (tok
== '[') {
5566 } else if (tok
== '(') {
5569 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5571 #ifdef CONFIG_TCC_BCHECK
5572 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_SYM
) && vtop
->sym
->v
== TOK_alloca
) {
5575 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
5576 bounds_ptr
[0] = 1; /* marks alloca/vla used */
5581 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5582 /* pointer test (no array accepted) */
5583 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5584 vtop
->type
= *pointed_type(&vtop
->type
);
5585 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5589 expect("function pointer");
5592 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5594 /* get return type */
5597 sa
= s
->next
; /* first parameter */
5598 nb_args
= regsize
= 0;
5600 /* compute first implicit argument if a structure is returned */
5601 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5602 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5603 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5604 &ret_align
, ®size
);
5605 if (ret_nregs
<= 0) {
5606 /* get some space for the returned structure */
5607 size
= type_size(&s
->type
, &align
);
5608 #ifdef TCC_TARGET_ARM64
5609 /* On arm64, a small struct is return in registers.
5610 It is much easier to write it to memory if we know
5611 that we are allowed to write some extra bytes, so
5612 round the allocated space up to a power of 2: */
5614 while (size
& (size
- 1))
5615 size
= (size
| (size
- 1)) + 1;
5617 loc
= (loc
- size
) & -align
;
5619 ret
.r
= VT_LOCAL
| VT_LVAL
;
5620 /* pass it as 'int' to avoid structure arg passing
5622 vseti(VT_LOCAL
, loc
);
5634 if (ret_nregs
> 0) {
5635 /* return in register */
5637 PUT_R_RET(&ret
, ret
.type
.t
);
5642 gfunc_param_typed(s
, sa
);
5652 tcc_error("too few arguments to function");
5654 gfunc_call(nb_args
);
5656 if (ret_nregs
< 0) {
5657 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
5658 #ifdef TCC_TARGET_RISCV64
5659 arch_transfer_ret_regs(1);
5663 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5664 vsetc(&ret
.type
, r
, &ret
.c
);
5665 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5668 /* handle packed struct return */
5669 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5672 size
= type_size(&s
->type
, &align
);
5673 /* We're writing whole regs often, make sure there's enough
5674 space. Assume register size is power of 2. */
5675 if (regsize
> align
)
5677 loc
= (loc
- size
) & -align
;
5681 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5685 if (--ret_nregs
== 0)
5689 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5692 if (s
->f
.func_noreturn
)
5700 ST_FUNC
void expr_prod(void)
5705 while (tok
== '*' || tok
== '/' || tok
== '%') {
5713 ST_FUNC
void expr_sum(void)
5718 while (tok
== '+' || tok
== '-') {
5726 static void expr_shift(void)
5731 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5739 static void expr_cmp(void)
5744 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5745 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5753 static void expr_cmpeq(void)
5758 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5766 static void expr_and(void)
5769 while (tok
== '&') {
5776 static void expr_xor(void)
5779 while (tok
== '^') {
5786 static void expr_or(void)
5789 while (tok
== '|') {
5796 static int condition_3way(void);
5798 static void expr_landor(void(*e_fn
)(void), int e_op
, int i
)
5800 int t
= 0, cc
= 1, f
= 0, c
;
5802 c
= f
? i
: condition_3way();
5804 save_regs(1), cc
= 0;
5805 } else if (c
!= i
) {
5806 nocode_wanted
++, f
= 1;
5828 static void expr_land(void)
5831 if (tok
== TOK_LAND
)
5832 expr_landor(expr_or
, TOK_LAND
, 1);
5835 static void expr_lor(void)
5839 expr_landor(expr_land
, TOK_LOR
, 0);
5842 /* Assuming vtop is a value used in a conditional context
5843 (i.e. compared with zero) return 0 if it's false, 1 if
5844 true and -1 if it can't be statically determined. */
5845 static int condition_3way(void)
5848 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5849 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5851 gen_cast_s(VT_BOOL
);
5858 static int is_cond_bool(SValue
*sv
)
5860 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
5861 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
5862 return (unsigned)sv
->c
.i
< 2;
5863 if (sv
->r
== VT_CMP
)
5868 static void expr_cond(void)
5870 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5872 CType type
, type1
, type2
;
5878 c
= condition_3way();
5879 g
= (tok
== ':' && gnu_ext
);
5889 /* needed to avoid having different registers saved in
5896 ncw_prev
= nocode_wanted
;
5903 if (c
< 0 && vtop
->r
== VT_CMP
) {
5909 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5910 mk_pointer(&vtop
->type
);
5912 sv
= *vtop
; /* save value to handle it later */
5913 vtop
--; /* no vpop so that FP stack is not flushed */
5923 nocode_wanted
= ncw_prev
;
5929 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
5930 if (sv
.r
== VT_CMP
) {
5941 nocode_wanted
= ncw_prev
;
5942 // tcc_warning("two conditions expr_cond");
5946 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5947 mk_pointer(&vtop
->type
);
5950 bt1
= t1
& VT_BTYPE
;
5952 bt2
= t2
& VT_BTYPE
;
5955 /* cast operands to correct type according to ISOC rules */
5956 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5957 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5958 } else if (is_float(bt1
) || is_float(bt2
)) {
5959 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5960 type
.t
= VT_LDOUBLE
;
5962 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5967 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5968 /* cast to biggest op */
5969 type
.t
= VT_LLONG
| VT_LONG
;
5970 if (bt1
== VT_LLONG
)
5972 if (bt2
== VT_LLONG
)
5974 /* convert to unsigned if it does not fit in a long long */
5975 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5976 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5977 type
.t
|= VT_UNSIGNED
;
5978 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5979 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5980 /* If one is a null ptr constant the result type
5982 if (is_null_pointer (vtop
)) type
= type1
;
5983 else if (is_null_pointer (&sv
)) type
= type2
;
5984 else if (bt1
!= bt2
)
5985 tcc_error("incompatible types in conditional expressions");
5987 CType
*pt1
= pointed_type(&type1
);
5988 CType
*pt2
= pointed_type(&type2
);
5989 int pbt1
= pt1
->t
& VT_BTYPE
;
5990 int pbt2
= pt2
->t
& VT_BTYPE
;
5991 int newquals
, copied
= 0;
5992 /* pointers to void get preferred, otherwise the
5993 pointed to types minus qualifs should be compatible */
5994 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5995 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5996 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5997 tcc_warning("pointer type mismatch in conditional expression\n");
5999 /* combine qualifs */
6000 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
6001 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
6004 /* copy the pointer target symbol */
6005 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
6008 pointed_type(&type
)->t
|= newquals
;
6010 /* pointers to incomplete arrays get converted to
6011 pointers to completed ones if possible */
6012 if (pt1
->t
& VT_ARRAY
6013 && pt2
->t
& VT_ARRAY
6014 && pointed_type(&type
)->ref
->c
< 0
6015 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
6018 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
6020 pointed_type(&type
)->ref
=
6021 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
6022 0, pointed_type(&type
)->ref
->c
);
6023 pointed_type(&type
)->ref
->c
=
6024 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
6027 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
6028 /* XXX: test structure compatibility */
6029 type
= bt1
== VT_STRUCT
? type1
: type2
;
6031 /* integer operations */
6032 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
6033 /* convert to unsigned if it does not fit in an integer */
6034 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
6035 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
6036 type
.t
|= VT_UNSIGNED
;
6038 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6039 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6040 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6042 /* now we convert second operand */
6046 mk_pointer(&vtop
->type
);
6048 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6052 rc
= RC_TYPE(type
.t
);
6053 /* for long longs, we use fixed registers to avoid having
6054 to handle a complicated move */
6055 if (USING_TWO_WORDS(type
.t
))
6056 rc
= RC_RET(type
.t
);
6064 nocode_wanted
= ncw_prev
;
6066 /* this is horrible, but we must also convert first
6072 mk_pointer(&vtop
->type
);
6074 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6080 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6091 static void expr_eq(void)
6097 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
6098 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
6099 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
6114 ST_FUNC
void gexpr(void)
6125 /* parse a constant expression and return value in vtop. */
6126 static void expr_const1(void)
6135 /* parse an integer constant and return its value. */
6136 static inline int64_t expr_const64(void)
6140 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6141 expect("constant expression");
6147 /* parse an integer constant and return its value.
6148 Complain if it doesn't fit 32bit (signed or unsigned). */
6149 ST_FUNC
int expr_const(void)
6152 int64_t wc
= expr_const64();
6154 if (c
!= wc
&& (unsigned)c
!= wc
)
6155 tcc_error("constant exceeds 32 bit");
6159 /* ------------------------------------------------------------------------- */
6160 /* return from function */
6162 #ifndef TCC_TARGET_ARM64
6163 static void gfunc_return(CType
*func_type
)
6165 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6166 CType type
, ret_type
;
6167 int ret_align
, ret_nregs
, regsize
;
6168 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6169 &ret_align
, ®size
);
6170 if (ret_nregs
< 0) {
6171 #ifdef TCC_TARGET_RISCV64
6172 arch_transfer_ret_regs(0);
6174 } else if (0 == ret_nregs
) {
6175 /* if returning structure, must copy it to implicit
6176 first pointer arg location */
6179 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6182 /* copy structure value to pointer */
6185 /* returning structure packed into registers */
6186 int size
, addr
, align
, rc
;
6187 size
= type_size(func_type
,&align
);
6188 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6189 (vtop
->c
.i
& (ret_align
-1)))
6190 && (align
& (ret_align
-1))) {
6191 loc
= (loc
- size
) & -ret_align
;
6194 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6198 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6200 vtop
->type
= ret_type
;
6201 rc
= RC_RET(ret_type
.t
);
6209 if (--ret_nregs
== 0)
6211 /* We assume that when a structure is returned in multiple
6212 registers, their classes are consecutive values of the
6215 vtop
->c
.i
+= regsize
;
6220 gv(RC_RET(func_type
->t
));
6222 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6226 static void check_func_return(void)
6228 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6230 if (!strcmp (funcname
, "main")
6231 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6232 /* main returns 0 by default */
6234 gen_assign_cast(&func_vt
);
6235 gfunc_return(&func_vt
);
6237 tcc_warning("function might return no value: '%s'", funcname
);
6241 /* ------------------------------------------------------------------------- */
6244 static int case_cmp(const void *pa
, const void *pb
)
6246 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6247 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6248 return a
< b
? -1 : a
> b
;
6251 static void gtst_addr(int t
, int a
)
6253 gsym_addr(gvtst(0, t
), a
);
6256 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6260 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6277 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6279 gcase(base
, len
/2, bsym
);
6283 base
+= e
; len
-= e
;
6293 if (p
->v1
== p
->v2
) {
6295 gtst_addr(0, p
->sym
);
6305 gtst_addr(0, p
->sym
);
6309 *bsym
= gjmp(*bsym
);
6312 /* ------------------------------------------------------------------------- */
6313 /* __attribute__((cleanup(fn))) */
6315 static void try_call_scope_cleanup(Sym
*stop
)
6317 Sym
*cls
= cur_scope
->cl
.s
;
6319 for (; cls
!= stop
; cls
= cls
->ncl
) {
6320 Sym
*fs
= cls
->next
;
6321 Sym
*vs
= cls
->prev_tok
;
6323 vpushsym(&fs
->type
, fs
);
6324 vset(&vs
->type
, vs
->r
, vs
->c
);
6326 mk_pointer(&vtop
->type
);
6332 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6337 if (!cur_scope
->cl
.s
)
6340 /* search NCA of both cleanup chains given parents and initial depth */
6341 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6342 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6344 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6346 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6349 try_call_scope_cleanup(cc
);
6352 /* call 'func' for each __attribute__((cleanup(func))) */
6353 static void block_cleanup(struct scope
*o
)
6357 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6358 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6363 try_call_scope_cleanup(o
->cl
.s
);
6364 pcl
->jnext
= gjmp(0);
6366 goto remove_pending
;
6376 try_call_scope_cleanup(o
->cl
.s
);
6379 /* ------------------------------------------------------------------------- */
6382 static void vla_restore(int loc
)
6385 gen_vla_sp_restore(loc
);
6388 static void vla_leave(struct scope
*o
)
6390 if (o
->vla
.num
< cur_scope
->vla
.num
)
6391 vla_restore(o
->vla
.loc
);
6394 /* ------------------------------------------------------------------------- */
6397 void new_scope(struct scope
*o
)
6399 /* copy and link previous scope */
6401 o
->prev
= cur_scope
;
6404 /* record local declaration stack position */
6405 o
->lstk
= local_stack
;
6406 o
->llstk
= local_label_stack
;
6411 void prev_scope(struct scope
*o
, int is_expr
)
6415 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6416 block_cleanup(o
->prev
);
6418 /* pop locally defined labels */
6419 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6421 /* In the is_expr case (a statement expression is finished here),
6422 vtop might refer to symbols on the local_stack. Either via the
6423 type or via vtop->sym. We can't pop those nor any that in turn
6424 might be referred to. To make it easier we don't roll back
6425 any symbols in that case; some upper level call to block() will
6426 do that. We do have to remove such symbols from the lookup
6427 tables, though. sym_pop will do that. */
6429 /* pop locally defined symbols */
6430 sym_pop(&local_stack
, o
->lstk
, is_expr
);
6432 cur_scope
= o
->prev
;
6436 /* leave a scope via break/continue(/goto) */
6437 void leave_scope(struct scope
*o
)
6441 try_call_scope_cleanup(o
->cl
.s
);
6445 /* ------------------------------------------------------------------------- */
6446 /* call block from 'for do while' loops */
6448 static void lblock(int *bsym
, int *csym
)
6450 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6451 int *b
= co
->bsym
, *c
= co
->csym
;
6465 static void block(int is_expr
)
6467 int a
, b
, c
, d
, e
, t
;
6471 /* default return value is (void) */
6473 vtop
->type
.t
= VT_VOID
;
6485 if (tok
== TOK_ELSE
) {
6490 gsym(d
); /* patch else jmp */
6495 } else if (t
== TOK_WHILE
) {
6507 } else if (t
== '{') {
6511 /* handle local labels declarations */
6512 while (tok
== TOK_LABEL
) {
6515 if (tok
< TOK_UIDENT
)
6516 expect("label identifier");
6517 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6519 } while (tok
== ',');
6523 while (tok
!= '}') {
6532 prev_scope(&o
, is_expr
);
6534 if (0 == local_scope
&& !nocode_wanted
)
6535 check_func_return();
6538 } else if (t
== TOK_RETURN
) {
6540 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6542 gexpr(), gen_assign_cast(&func_vt
);
6543 leave_scope(root_scope
);
6545 gfunc_return(&func_vt
);
6549 tcc_warning("'return' with no value.");
6551 /* jump unless last stmt in top-level block */
6552 if (tok
!= '}' || local_scope
!= 1)
6556 } else if (t
== TOK_BREAK
) {
6558 if (!cur_scope
->bsym
)
6559 tcc_error("cannot break");
6560 if (!cur_switch
|| cur_scope
->bsym
!= cur_switch
->bsym
)
6561 leave_scope(loop_scope
);
6563 leave_scope(cur_switch
->scope
);
6564 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6567 } else if (t
== TOK_CONTINUE
) {
6569 if (!cur_scope
->csym
)
6570 tcc_error("cannot continue");
6571 leave_scope(loop_scope
);
6572 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6575 } else if (t
== TOK_FOR
) {
6581 /* c99 for-loop init decl? */
6582 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6583 /* no, regular for-loop init expr */
6611 } else if (t
== TOK_DO
) {
6625 } else if (t
== TOK_SWITCH
) {
6626 struct switch_t
*saved
, sw
;
6633 sw
.scope
= cur_scope
;
6641 switchval
= *vtop
--;
6644 b
= gjmp(0); /* jump to first case */
6646 a
= gjmp(a
); /* add implicit break */
6650 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6651 for (b
= 1; b
< sw
.n
; b
++)
6652 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6653 tcc_error("duplicate case value");
6655 /* Our switch table sorting is signed, so the compared
6656 value needs to be as well when it's 64bit. */
6657 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6658 switchval
.type
.t
&= ~VT_UNSIGNED
;
6661 d
= 0, gcase(sw
.p
, sw
.n
, &d
);
6664 gsym_addr(d
, sw
.def_sym
);
6670 dynarray_reset(&sw
.p
, &sw
.n
);
6673 } else if (t
== TOK_CASE
) {
6674 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6677 cr
->v1
= cr
->v2
= expr_const64();
6678 if (gnu_ext
&& tok
== TOK_DOTS
) {
6680 cr
->v2
= expr_const64();
6681 if (cr
->v2
< cr
->v1
)
6682 tcc_warning("empty case range");
6685 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6688 goto block_after_label
;
6690 } else if (t
== TOK_DEFAULT
) {
6693 if (cur_switch
->def_sym
)
6694 tcc_error("too many 'default'");
6695 cur_switch
->def_sym
= gind();
6698 goto block_after_label
;
6700 } else if (t
== TOK_GOTO
) {
6701 vla_restore(root_scope
->vla
.loc
);
6702 if (tok
== '*' && gnu_ext
) {
6706 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6710 } else if (tok
>= TOK_UIDENT
) {
6711 s
= label_find(tok
);
6712 /* put forward definition if needed */
6714 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6715 else if (s
->r
== LABEL_DECLARED
)
6716 s
->r
= LABEL_FORWARD
;
6718 if (s
->r
& LABEL_FORWARD
) {
6719 /* start new goto chain for cleanups, linked via label->next */
6720 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
6721 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
6722 pending_gotos
->prev_tok
= s
;
6723 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6724 pending_gotos
->next
= s
;
6726 s
->jnext
= gjmp(s
->jnext
);
6728 try_call_cleanup_goto(s
->cleanupstate
);
6729 gjmp_addr(s
->jnext
);
6734 expect("label identifier");
6738 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
6742 if (tok
== ':' && t
>= TOK_UIDENT
) {
6747 if (s
->r
== LABEL_DEFINED
)
6748 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6749 s
->r
= LABEL_DEFINED
;
6751 Sym
*pcl
; /* pending cleanup goto */
6752 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6754 sym_pop(&s
->next
, NULL
, 0);
6758 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
6761 s
->cleanupstate
= cur_scope
->cl
.s
;
6764 vla_restore(cur_scope
->vla
.loc
);
6765 /* we accept this, but it is a mistake */
6767 tcc_warning("deprecated use of label at end of compound statement");
6773 /* expression case */
6789 /* This skips over a stream of tokens containing balanced {} and ()
6790 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6791 with a '{'). If STR then allocates and stores the skipped tokens
6792 in *STR. This doesn't check if () and {} are nested correctly,
6793 i.e. "({)}" is accepted. */
6794 static void skip_or_save_block(TokenString
**str
)
6796 int braces
= tok
== '{';
6799 *str
= tok_str_alloc();
6801 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6803 if (tok
== TOK_EOF
) {
6804 if (str
|| level
> 0)
6805 tcc_error("unexpected end of file");
6810 tok_str_add_tok(*str
);
6813 if (t
== '{' || t
== '(') {
6815 } else if (t
== '}' || t
== ')') {
6817 if (level
== 0 && braces
&& t
== '}')
6822 tok_str_add(*str
, -1);
6823 tok_str_add(*str
, 0);
6827 #define EXPR_CONST 1
6830 static void parse_init_elem(int expr_type
)
6832 int saved_global_expr
;
6835 /* compound literals must be allocated globally in this case */
6836 saved_global_expr
= global_expr
;
6839 global_expr
= saved_global_expr
;
6840 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6841 (compound literals). */
6842 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6843 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6844 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6845 #ifdef TCC_TARGET_PE
6846 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6849 tcc_error("initializer element is not constant");
6857 /* put zeros for variable based init */
6858 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6861 /* nothing to do because globals are already set to zero */
6863 vpush_global_sym(&func_old_type
, TOK_memset
);
6865 #ifdef TCC_TARGET_ARM
6877 #define DIF_SIZE_ONLY 2
6878 #define DIF_HAVE_ELEM 4
6880 /* t is the array or struct type. c is the array or struct
6881 address. cur_field is the pointer to the current
6882 field, for arrays the 'c' member contains the current start
6883 index. 'flags' is as in decl_initializer.
6884 'al' contains the already initialized length of the
6885 current container (starting at c). This returns the new length of that. */
6886 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6887 Sym
**cur_field
, int flags
, int al
)
6890 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6891 unsigned long corig
= c
;
6896 if (flags
& DIF_HAVE_ELEM
)
6899 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
6906 /* NOTE: we only support ranges for last designator */
6907 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6909 if (!(type
->t
& VT_ARRAY
))
6910 expect("array type");
6912 index
= index_last
= expr_const();
6913 if (tok
== TOK_DOTS
&& gnu_ext
) {
6915 index_last
= expr_const();
6919 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6921 tcc_error("invalid index");
6923 (*cur_field
)->c
= index_last
;
6924 type
= pointed_type(type
);
6925 elem_size
= type_size(type
, &align
);
6926 c
+= index
* elem_size
;
6927 nb_elems
= index_last
- index
+ 1;
6934 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6935 expect("struct/union type");
6937 f
= find_field(type
, l
, &cumofs
);
6950 } else if (!gnu_ext
) {
6955 if (type
->t
& VT_ARRAY
) {
6956 index
= (*cur_field
)->c
;
6957 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6958 tcc_error("index too large");
6959 type
= pointed_type(type
);
6960 c
+= index
* type_size(type
, &align
);
6963 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6964 *cur_field
= f
= f
->next
;
6966 tcc_error("too many field init");
6971 /* must put zero in holes (note that doing it that way
6972 ensures that it even works with designators) */
6973 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6974 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6975 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6977 /* XXX: make it more general */
6978 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6979 unsigned long c_end
;
6984 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6985 for (i
= 1; i
< nb_elems
; i
++) {
6986 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6991 } else if (!NODATA_WANTED
) {
6992 c_end
= c
+ nb_elems
* elem_size
;
6993 if (c_end
> sec
->data_allocated
)
6994 section_realloc(sec
, c_end
);
6995 src
= sec
->data
+ c
;
6997 for(i
= 1; i
< nb_elems
; i
++) {
6999 memcpy(dst
, src
, elem_size
);
7003 c
+= nb_elems
* type_size(type
, &align
);
7009 /* store a value or an expression directly in global data or in local array */
7010 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
7017 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7021 /* XXX: not portable */
7022 /* XXX: generate error if incorrect relocation */
7023 gen_assign_cast(&dtype
);
7024 bt
= type
->t
& VT_BTYPE
;
7026 if ((vtop
->r
& VT_SYM
)
7029 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7030 || (type
->t
& VT_BITFIELD
))
7031 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7033 tcc_error("initializer element is not computable at load time");
7035 if (NODATA_WANTED
) {
7040 size
= type_size(type
, &align
);
7041 section_reserve(sec
, c
+ size
);
7042 ptr
= sec
->data
+ c
;
7044 /* XXX: make code faster ? */
7045 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7046 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7047 /* XXX This rejects compound literals like
7048 '(void *){ptr}'. The problem is that '&sym' is
7049 represented the same way, which would be ruled out
7050 by the SYM_FIRST_ANOM check above, but also '"string"'
7051 in 'char *p = "string"' is represented the same
7052 with the type being VT_PTR and the symbol being an
7053 anonymous one. That is, there's no difference in vtop
7054 between '(void *){x}' and '&(void *){x}'. Ignore
7055 pointer typed entities here. Hopefully no real code
7056 will every use compound literals with scalar type. */
7057 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7058 /* These come from compound literals, memcpy stuff over. */
7062 esym
= elfsym(vtop
->sym
);
7063 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7064 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
7066 /* We need to copy over all memory contents, and that
7067 includes relocations. Use the fact that relocs are
7068 created it order, so look from the end of relocs
7069 until we hit one before the copied region. */
7070 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7071 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7072 while (num_relocs
--) {
7074 if (rel
->r_offset
>= esym
->st_value
+ size
)
7076 if (rel
->r_offset
< esym
->st_value
)
7078 /* Note: if the same fields are initialized multiple
7079 times (possible with designators) then we possibly
7080 add multiple relocations for the same offset here.
7081 That would lead to wrong code, the last reloc needs
7082 to win. We clean this up later after the whole
7083 initializer is parsed. */
7084 put_elf_reloca(symtab_section
, sec
,
7085 c
+ rel
->r_offset
- esym
->st_value
,
7086 ELFW(R_TYPE
)(rel
->r_info
),
7087 ELFW(R_SYM
)(rel
->r_info
),
7097 if (type
->t
& VT_BITFIELD
) {
7098 int bit_pos
, bit_size
, bits
, n
;
7099 unsigned char *p
, v
, m
;
7100 bit_pos
= BIT_POS(vtop
->type
.t
);
7101 bit_size
= BIT_SIZE(vtop
->type
.t
);
7102 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7103 bit_pos
&= 7, bits
= 0;
7108 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7109 m
= ((1 << n
) - 1) << bit_pos
;
7110 *p
= (*p
& ~m
) | (v
& m
);
7111 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7115 /* XXX: when cross-compiling we assume that each type has the
7116 same representation on host and target, which is likely to
7117 be wrong in the case of long double */
7119 vtop
->c
.i
= vtop
->c
.i
!= 0;
7121 *(char *)ptr
|= vtop
->c
.i
;
7124 *(short *)ptr
|= vtop
->c
.i
;
7127 *(float*)ptr
= vtop
->c
.f
;
7130 *(double *)ptr
= vtop
->c
.d
;
7133 #if defined TCC_IS_NATIVE_387
7134 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7135 memcpy(ptr
, &vtop
->c
.ld
, 10);
7137 else if (sizeof (long double) == sizeof (double))
7138 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7140 else if (vtop
->c
.ld
== 0.0)
7144 if (sizeof(long double) == LDOUBLE_SIZE
)
7145 *(long double*)ptr
= vtop
->c
.ld
;
7146 else if (sizeof(double) == LDOUBLE_SIZE
)
7147 *(double *)ptr
= (double)vtop
->c
.ld
;
7149 tcc_error("can't cross compile long double constants");
7153 *(long long *)ptr
|= vtop
->c
.i
;
7160 addr_t val
= vtop
->c
.i
;
7162 if (vtop
->r
& VT_SYM
)
7163 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7165 *(addr_t
*)ptr
|= val
;
7167 if (vtop
->r
& VT_SYM
)
7168 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7169 *(addr_t
*)ptr
|= val
;
7175 int val
= vtop
->c
.i
;
7177 if (vtop
->r
& VT_SYM
)
7178 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7182 if (vtop
->r
& VT_SYM
)
7183 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7192 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7199 /* 't' contains the type and storage info. 'c' is the offset of the
7200 object in section 'sec'. If 'sec' is NULL, it means stack based
7201 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7202 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7203 size only evaluation is wanted (only for arrays). */
7204 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7207 int len
, n
, no_oblock
, nb
, i
;
7213 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7214 /* In case of strings we have special handling for arrays, so
7215 don't consume them as initializer value (which would commit them
7216 to some anonymous symbol). */
7217 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7218 !(flags
& DIF_SIZE_ONLY
)) {
7219 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7220 flags
|= DIF_HAVE_ELEM
;
7223 if ((flags
& DIF_HAVE_ELEM
) &&
7224 !(type
->t
& VT_ARRAY
) &&
7225 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7226 The source type might have VT_CONSTANT set, which is
7227 of course assignable to non-const elements. */
7228 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7229 init_putv(type
, sec
, c
);
7230 } else if (type
->t
& VT_ARRAY
) {
7233 t1
= pointed_type(type
);
7234 size1
= type_size(t1
, &align1
);
7237 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7240 tcc_error("character array initializer must be a literal,"
7241 " optionally enclosed in braces");
7246 /* only parse strings here if correct type (otherwise: handle
7247 them as ((w)char *) expressions */
7248 if ((tok
== TOK_LSTR
&&
7249 #ifdef TCC_TARGET_PE
7250 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7252 (t1
->t
& VT_BTYPE
) == VT_INT
7254 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7256 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7259 /* compute maximum number of chars wanted */
7261 cstr_len
= tokc
.str
.size
;
7263 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7266 if (n
>= 0 && nb
> (n
- len
))
7268 if (!(flags
& DIF_SIZE_ONLY
)) {
7270 tcc_warning("initializer-string for array is too long");
7271 /* in order to go faster for common case (char
7272 string in global variable, we handle it
7274 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7276 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7280 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7282 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7284 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7291 /* only add trailing zero if enough storage (no
7292 warning in this case since it is standard) */
7293 if (n
< 0 || len
< n
) {
7294 if (!(flags
& DIF_SIZE_ONLY
)) {
7296 init_putv(t1
, sec
, c
+ (len
* size1
));
7307 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7308 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7309 flags
&= ~DIF_HAVE_ELEM
;
7310 if (type
->t
& VT_ARRAY
) {
7312 /* special test for multi dimensional arrays (may not
7313 be strictly correct if designators are used at the
7315 if (no_oblock
&& len
>= n
*size1
)
7318 if (s
->type
.t
== VT_UNION
)
7322 if (no_oblock
&& f
== NULL
)
7331 /* put zeros at the end */
7332 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7333 init_putz(sec
, c
+ len
, n
*size1
- len
);
7336 /* patch type size if needed, which happens only for array types */
7338 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7339 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7342 if ((flags
& DIF_FIRST
) || tok
== '{') {
7350 } else if (tok
== '{') {
7351 if (flags
& DIF_HAVE_ELEM
)
7354 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7356 } else if ((flags
& DIF_SIZE_ONLY
)) {
7357 /* If we supported only ISO C we wouldn't have to accept calling
7358 this on anything than an array if DIF_SIZE_ONLY (and even then
7359 only on the outermost level, so no recursion would be needed),
7360 because initializing a flex array member isn't supported.
7361 But GNU C supports it, so we need to recurse even into
7362 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7363 /* just skip expression */
7364 skip_or_save_block(NULL
);
7366 if (!(flags
& DIF_HAVE_ELEM
)) {
7367 /* This should happen only when we haven't parsed
7368 the init element above for fear of committing a
7369 string constant to memory too early. */
7370 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7371 expect("string constant");
7372 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7374 init_putv(type
, sec
, c
);
7378 /* parse an initializer for type 't' if 'has_init' is non zero, and
7379 allocate space in local or global data space ('r' is either
7380 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7381 variable 'v' of scope 'scope' is declared before initializers
7382 are parsed. If 'v' is zero, then a reference to the new object
7383 is put in the value stack. If 'has_init' is 2, a special parsing
7384 is done to handle string constants. */
7385 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7386 int has_init
, int v
, int scope
)
7388 int size
, align
, addr
;
7389 TokenString
*init_str
= NULL
;
7392 Sym
*flexible_array
;
7394 int saved_nocode_wanted
= nocode_wanted
;
7395 #ifdef CONFIG_TCC_BCHECK
7399 /* Always allocate static or global variables */
7400 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7401 nocode_wanted
|= 0x80000000;
7403 #ifdef CONFIG_TCC_BCHECK
7404 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7407 flexible_array
= NULL
;
7408 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7409 Sym
*field
= type
->ref
->next
;
7412 field
= field
->next
;
7413 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7414 flexible_array
= field
;
7418 size
= type_size(type
, &align
);
7419 /* If unknown size, we must evaluate it before
7420 evaluating initializers because
7421 initializers can generate global data too
7422 (e.g. string pointers or ISOC99 compound
7423 literals). It also simplifies local
7424 initializers handling */
7425 if (size
< 0 || (flexible_array
&& has_init
)) {
7427 tcc_error("unknown type size");
7428 /* get all init string */
7429 if (has_init
== 2) {
7430 init_str
= tok_str_alloc();
7431 /* only get strings */
7432 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7433 tok_str_add_tok(init_str
);
7436 tok_str_add(init_str
, -1);
7437 tok_str_add(init_str
, 0);
7439 skip_or_save_block(&init_str
);
7444 begin_macro(init_str
, 1);
7446 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7447 /* prepare second initializer parsing */
7448 macro_ptr
= init_str
->str
;
7451 /* if still unknown size, error */
7452 size
= type_size(type
, &align
);
7454 tcc_error("unknown type size");
7456 /* If there's a flex member and it was used in the initializer
7458 if (flexible_array
&&
7459 flexible_array
->type
.ref
->c
> 0)
7460 size
+= flexible_array
->type
.ref
->c
7461 * pointed_size(&flexible_array
->type
);
7462 /* take into account specified alignment if bigger */
7463 if (ad
->a
.aligned
) {
7464 int speca
= 1 << (ad
->a
.aligned
- 1);
7467 } else if (ad
->a
.packed
) {
7471 if (!v
&& NODATA_WANTED
)
7472 size
= 0, align
= 1;
7474 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7476 #ifdef CONFIG_TCC_BCHECK
7477 if (bcheck
&& ((type
->t
& VT_ARRAY
) ||
7478 (type
->t
& VT_BTYPE
) == VT_STRUCT
)) {
7482 loc
= (loc
- size
) & -align
;
7484 #ifdef CONFIG_TCC_BCHECK
7485 /* handles bounds */
7486 /* XXX: currently, since we do only one pass, we cannot track
7487 '&' operators, so we add only arrays/structs/unions */
7488 if (bcheck
&& ((type
->t
& VT_ARRAY
) ||
7489 (type
->t
& VT_BTYPE
) == VT_STRUCT
)) {
7491 /* add padding between regions */
7493 /* then add local bound info */
7494 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7495 bounds_ptr
[0] = addr
;
7496 bounds_ptr
[1] = size
;
7500 /* local variable */
7501 #ifdef CONFIG_TCC_ASM
7502 if (ad
->asm_label
) {
7503 int reg
= asm_parse_regvar(ad
->asm_label
);
7505 r
= (r
& ~VT_VALMASK
) | reg
;
7508 sym
= sym_push(v
, type
, r
, addr
);
7509 if (ad
->cleanup_func
) {
7510 Sym
*cls
= sym_push2(&all_cleanups
,
7511 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7512 cls
->prev_tok
= sym
;
7513 cls
->next
= ad
->cleanup_func
;
7514 cls
->ncl
= cur_scope
->cl
.s
;
7515 cur_scope
->cl
.s
= cls
;
7520 /* push local reference */
7521 vset(type
, r
, addr
);
7524 if (v
&& scope
== VT_CONST
) {
7525 /* see if the symbol was already defined */
7528 patch_storage(sym
, ad
, type
);
7529 /* we accept several definitions of the same global variable. */
7530 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7535 /* allocate symbol in corresponding section */
7540 else if (tcc_state
->nocommon
)
7545 addr
= section_add(sec
, size
, align
);
7546 #ifdef CONFIG_TCC_BCHECK
7547 /* add padding if bound check */
7549 section_add(sec
, 1, 1);
7552 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7553 sec
= common_section
;
7558 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7559 patch_storage(sym
, ad
, NULL
);
7561 /* update symbol definition */
7562 put_extern_sym(sym
, sec
, addr
, size
);
7564 /* push global reference */
7565 vpush_ref(type
, sec
, addr
, size
);
7570 #ifdef CONFIG_TCC_BCHECK
7571 /* handles bounds now because the symbol must be defined
7572 before for the relocation */
7576 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7577 /* then add global bound info */
7578 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7579 bounds_ptr
[0] = 0; /* relocated */
7580 bounds_ptr
[1] = size
;
7585 if (type
->t
& VT_VLA
) {
7591 /* save current stack pointer */
7592 if (root_scope
->vla
.loc
== 0) {
7593 struct scope
*v
= cur_scope
;
7594 gen_vla_sp_save(loc
-= PTR_SIZE
);
7595 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7598 vla_runtime_type_size(type
, &a
);
7599 gen_vla_alloc(type
, a
);
7600 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7601 /* on _WIN64, because of the function args scratch area, the
7602 result of alloca differs from RSP and is returned in RAX. */
7603 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7605 gen_vla_sp_save(addr
);
7606 cur_scope
->vla
.loc
= addr
;
7607 cur_scope
->vla
.num
++;
7608 #ifdef CONFIG_TCC_BCHECK
7612 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7613 bounds_ptr
[0] = 1; /* marks alloca/vla used */
7618 } else if (has_init
) {
7619 size_t oldreloc_offset
= 0;
7620 if (sec
&& sec
->reloc
)
7621 oldreloc_offset
= sec
->reloc
->data_offset
;
7622 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7623 if (sec
&& sec
->reloc
)
7624 squeeze_multi_relocs(sec
, oldreloc_offset
);
7625 /* patch flexible array member size back to -1, */
7626 /* for possible subsequent similar declarations */
7628 flexible_array
->type
.ref
->c
= -1;
7632 /* restore parse state if needed */
7638 nocode_wanted
= saved_nocode_wanted
;
7641 /* parse a function defined by symbol 'sym' and generate its code in
7642 'cur_text_section' */
7643 static void gen_function(Sym
*sym
, AttributeDef
*ad
)
7645 /* Initialize VLA state */
7646 struct scope f
= { 0 };
7647 cur_scope
= root_scope
= &f
;
7650 ind
= cur_text_section
->data_offset
;
7651 if (sym
->a
.aligned
) {
7652 size_t newoff
= section_add(cur_text_section
, 0,
7653 1 << (sym
->a
.aligned
- 1));
7654 gen_fill_nops(newoff
- ind
);
7656 /* NOTE: we patch the symbol size later */
7657 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7659 if (ad
&& ad
->a
.constructor
) {
7660 add_init_array (tcc_state
, sym
);
7662 if (ad
&& ad
->a
.destructor
) {
7663 add_fini_array (tcc_state
, sym
);
7666 funcname
= get_tok_str(sym
->v
, NULL
);
7669 /* put debug symbol */
7670 tcc_debug_funcstart(tcc_state
, sym
);
7671 /* push a dummy symbol to enable local sym storage */
7672 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7673 local_scope
= 1; /* for function parameters */
7677 clear_temp_local_var_list();
7682 cur_text_section
->data_offset
= ind
;
7683 /* reset local stack */
7684 sym_pop(&local_stack
, NULL
, 0);
7686 label_pop(&global_label_stack
, NULL
, 0);
7687 sym_pop(&all_cleanups
, NULL
, 0);
7688 /* patch symbol size */
7689 elfsym(sym
)->st_size
= ind
- func_ind
;
7690 /* end of function */
7691 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7692 /* It's better to crash than to generate wrong code */
7693 cur_text_section
= NULL
;
7694 funcname
= ""; /* for safety */
7695 func_vt
.t
= VT_VOID
; /* for safety */
7696 func_var
= 0; /* for safety */
7697 ind
= 0; /* for safety */
7698 nocode_wanted
= 0x80000000;
7702 static void gen_inline_functions(TCCState
*s
)
7705 int inline_generated
, i
;
7706 struct InlineFunc
*fn
;
7708 tcc_open_bf(s
, ":inline:", 0);
7709 /* iterate while inline function are referenced */
7711 inline_generated
= 0;
7712 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7713 fn
= s
->inline_fns
[i
];
7715 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7716 /* the function was used or forced (and then not internal):
7717 generate its code and convert it to a normal function */
7719 tcc_debug_putfile(s
, fn
->filename
);
7720 begin_macro(fn
->func_str
, 1);
7722 cur_text_section
= text_section
;
7723 gen_function(sym
, NULL
);
7726 inline_generated
= 1;
7729 } while (inline_generated
);
7733 static void free_inline_functions(TCCState
*s
)
7736 /* free tokens of unused inline functions */
7737 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7738 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7740 tok_str_free(fn
->func_str
);
7742 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7745 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7746 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7747 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7752 AttributeDef ad
, adbase
;
7755 if (tok
== TOK_STATIC_ASSERT
) {
7763 tcc_error("%s", get_tok_str(tok
, &tokc
));
7769 if (!parse_btype(&btype
, &adbase
)) {
7770 if (is_for_loop_init
)
7772 /* skip redundant ';' if not in old parameter decl scope */
7773 if (tok
== ';' && l
!= VT_CMP
) {
7779 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7780 /* global asm block */
7784 if (tok
>= TOK_UIDENT
) {
7785 /* special test for old K&R protos without explicit int
7786 type. Only accepted when defining global data */
7790 expect("declaration");
7795 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7796 int v
= btype
.ref
->v
;
7797 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7798 tcc_warning("unnamed struct/union that defines no instances");
7802 if (IS_ENUM(btype
.t
)) {
7807 while (1) { /* iterate thru each declaration */
7809 /* If the base type itself was an array type of unspecified
7810 size (like in 'typedef int arr[]; arr x = {1};') then
7811 we will overwrite the unknown size by the real one for
7812 this decl. We need to unshare the ref symbol holding
7814 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7815 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7818 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7822 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7823 printf("type = '%s'\n", buf
);
7826 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7827 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
7828 tcc_error("function without file scope cannot be static");
7829 /* if old style function prototype, we accept a
7832 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7833 decl0(VT_CMP
, 0, sym
);
7834 /* always compile 'extern inline' */
7835 if (type
.t
& VT_EXTERN
)
7836 type
.t
&= ~VT_INLINE
;
7839 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7840 ad
.asm_label
= asm_label_instr();
7841 /* parse one last attribute list, after asm label */
7842 parse_attribute(&ad
);
7844 /* gcc does not allow __asm__("label") with function definition,
7851 #ifdef TCC_TARGET_PE
7852 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7853 if (type
.t
& VT_STATIC
)
7854 tcc_error("cannot have dll linkage with static");
7855 if (type
.t
& VT_TYPEDEF
) {
7856 tcc_warning("'%s' attribute ignored for typedef",
7857 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
7858 (ad
.a
.dllexport
= 0, "dllexport"));
7859 } else if (ad
.a
.dllimport
) {
7860 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7863 type
.t
|= VT_EXTERN
;
7869 tcc_error("cannot use local functions");
7870 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7871 expect("function definition");
7873 /* reject abstract declarators in function definition
7874 make old style params without decl have int type */
7876 while ((sym
= sym
->next
) != NULL
) {
7877 if (!(sym
->v
& ~SYM_FIELD
))
7878 expect("identifier");
7879 if (sym
->type
.t
== VT_VOID
)
7880 sym
->type
= int_type
;
7883 /* put function symbol */
7884 type
.t
&= ~VT_EXTERN
;
7885 sym
= external_sym(v
, &type
, 0, &ad
);
7886 /* static inline functions are just recorded as a kind
7887 of macro. Their code will be emitted at the end of
7888 the compilation unit only if they are used */
7889 if (sym
->type
.t
& VT_INLINE
) {
7890 struct InlineFunc
*fn
;
7891 const char *filename
;
7893 filename
= file
? file
->filename
: "";
7894 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7895 strcpy(fn
->filename
, filename
);
7897 skip_or_save_block(&fn
->func_str
);
7898 dynarray_add(&tcc_state
->inline_fns
,
7899 &tcc_state
->nb_inline_fns
, fn
);
7901 /* compute text section */
7902 cur_text_section
= ad
.section
;
7903 if (!cur_text_section
)
7904 cur_text_section
= text_section
;
7905 gen_function(sym
, &ad
);
7910 /* find parameter in function parameter list */
7911 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7912 if ((sym
->v
& ~SYM_FIELD
) == v
)
7914 tcc_error("declaration for parameter '%s' but no such parameter",
7915 get_tok_str(v
, NULL
));
7917 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7918 tcc_error("storage class specified for '%s'",
7919 get_tok_str(v
, NULL
));
7920 if (sym
->type
.t
!= VT_VOID
)
7921 tcc_error("redefinition of parameter '%s'",
7922 get_tok_str(v
, NULL
));
7923 convert_parameter_type(&type
);
7925 } else if (type
.t
& VT_TYPEDEF
) {
7926 /* save typedefed type */
7927 /* XXX: test storage specifiers ? */
7929 if (sym
&& sym
->sym_scope
== local_scope
) {
7930 if (!is_compatible_types(&sym
->type
, &type
)
7931 || !(sym
->type
.t
& VT_TYPEDEF
))
7932 tcc_error("incompatible redefinition of '%s'",
7933 get_tok_str(v
, NULL
));
7936 sym
= sym_push(v
, &type
, 0, 0);
7940 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7941 && !(type
.t
& VT_EXTERN
)) {
7942 tcc_error("declaration of void object");
7945 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7946 /* external function definition */
7947 /* specific case for func_call attribute */
7949 } else if (!(type
.t
& VT_ARRAY
)) {
7950 /* not lvalue if array */
7951 r
|= lvalue_type(type
.t
);
7953 has_init
= (tok
== '=');
7954 if (has_init
&& (type
.t
& VT_VLA
))
7955 tcc_error("variable length array cannot be initialized");
7956 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7957 || (type
.t
& VT_BTYPE
) == VT_FUNC
7958 /* as with GCC, uninitialized global arrays with no size
7959 are considered extern: */
7960 || ((type
.t
& VT_ARRAY
) && !has_init
7961 && l
== VT_CONST
&& type
.ref
->c
< 0)
7963 /* external variable or function */
7964 type
.t
|= VT_EXTERN
;
7965 sym
= external_sym(v
, &type
, r
, &ad
);
7966 if (ad
.alias_target
) {
7969 alias_target
= sym_find(ad
.alias_target
);
7970 esym
= elfsym(alias_target
);
7972 tcc_error("unsupported forward __alias__ attribute");
7973 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7976 if (type
.t
& VT_STATIC
)
7982 else if (l
== VT_CONST
)
7983 /* uninitialized global variables may be overridden */
7984 type
.t
|= VT_EXTERN
;
7985 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7989 if (is_for_loop_init
)
8001 static void decl(int l
)
8006 /* ------------------------------------------------------------------------- */
8009 /* ------------------------------------------------------------------------- */