2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
57 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 /* Automagical code suppression ----> */
60 #define CODE_OFF() (nocode_wanted |= 0x20000000)
61 #define CODE_ON() (nocode_wanted &= ~0x20000000)
63 /* Clear 'nocode_wanted' at label if it was used */
64 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
65 static int gind(void) { CODE_ON(); return ind
; }
67 /* Set 'nocode_wanted' after unconditional jumps */
68 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
69 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
71 /* These are #undef'd at the end of this file */
72 #define gjmp_addr gjmp_addr_acs
76 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
77 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
78 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
80 static int last_line_num
, new_file
, func_ind
; /* debug info control */
81 ST_DATA
const char *funcname
;
82 ST_DATA CType int_type
, func_old_type
, char_pointer_type
;
85 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
86 #define VT_PTRDIFF_T VT_INT
88 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
89 #define VT_PTRDIFF_T VT_LLONG
91 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
92 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
95 ST_DATA
struct switch_t
{
99 } **p
; int n
; /* list of case ranges */
100 int def_sym
; /* default symbol */
103 } *cur_switch
; /* current switch */
105 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
106 /*list of temporary local variables on the stack in current function. */
107 ST_DATA
struct temp_local_variable
{
108 int location
; //offset on stack. Svalue.c.i
111 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
112 short nb_temp_local_vars
;
114 static struct scope
{
116 struct { int loc
, num
; } vla
;
117 struct { Sym
*s
; int n
; } cl
;
120 } *cur_scope
, *loop_scope
, *root_scope
;
122 /********************************************************/
123 #ifndef CONFIG_TCC_ASM
124 ST_FUNC
void asm_instr(void)
126 tcc_error("inline asm() not supported");
128 ST_FUNC
void asm_global_instr(void)
130 tcc_error("inline asm() not supported");
134 /* ------------------------------------------------------------------------- */
136 static void gen_cast(CType
*type
);
137 static void gen_cast_s(int t
);
138 static inline CType
*pointed_type(CType
*type
);
139 static int is_compatible_types(CType
*type1
, CType
*type2
);
140 static int parse_btype(CType
*type
, AttributeDef
*ad
);
141 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
142 static void parse_expr_type(CType
*type
);
143 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
144 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
145 static void block(int is_expr
);
146 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
147 static void decl(int l
);
148 static int decl0(int l
, int is_for_loop_init
, Sym
*);
149 static void expr_eq(void);
150 static void vla_runtime_type_size(CType
*type
, int *a
);
151 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
152 static inline int64_t expr_const64(void);
153 static void vpush64(int ty
, unsigned long long v
);
154 static void vpush(CType
*type
);
155 static int gvtst(int inv
, int t
);
156 static void gen_inline_functions(TCCState
*s
);
157 static void free_inline_functions(TCCState
*s
);
158 static void skip_or_save_block(TokenString
**str
);
159 static void gv_dup(void);
160 static int get_temp_local_var(int size
,int align
);
161 static void clear_temp_local_var_list();
162 static void cast_error(CType
*st
, CType
*dt
);
164 ST_INLN
int is_float(int t
)
166 int bt
= t
& VT_BTYPE
;
167 return bt
== VT_LDOUBLE
173 static inline int is_integer_btype(int bt
)
182 static int btype_size(int bt
)
184 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
188 bt
== VT_PTR
? PTR_SIZE
: 0;
191 /* returns function return register from type */
192 static int R_RET(int t
)
196 #ifdef TCC_TARGET_X86_64
197 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
199 #elif defined TCC_TARGET_RISCV64
200 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
206 /* returns 2nd function return register, if any */
207 static int R2_RET(int t
)
213 #elif defined TCC_TARGET_X86_64
218 #elif defined TCC_TARGET_RISCV64
225 /* returns true for two-word types */
226 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
228 /* put function return registers to stack value */
229 static void PUT_R_RET(SValue
*sv
, int t
)
231 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
234 /* returns function return register class for type t */
235 static int RC_RET(int t
)
237 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
240 /* returns generic register class for type t */
241 static int RC_TYPE(int t
)
245 #ifdef TCC_TARGET_X86_64
246 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
248 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
250 #elif defined TCC_TARGET_RISCV64
251 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
257 /* returns 2nd register class corresponding to t and rc */
258 static int RC2_TYPE(int t
, int rc
)
260 if (!USING_TWO_WORDS(t
))
275 /* we use our own 'finite' function to avoid potential problems with
276 non standard math libs */
277 /* XXX: endianness dependent */
278 ST_FUNC
int ieee_finite(double d
)
281 memcpy(p
, &d
, sizeof(double));
282 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
285 /* compiling intel long double natively */
286 #if (defined __i386__ || defined __x86_64__) \
287 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
288 # define TCC_IS_NATIVE_387
291 ST_FUNC
void test_lvalue(void)
293 if (!(vtop
->r
& VT_LVAL
))
297 ST_FUNC
void check_vstack(void)
299 if (vtop
!= vstack
- 1)
300 tcc_error("internal compiler error: vstack leak (%d)", vtop
- vstack
+ 1);
303 /* ------------------------------------------------------------------------- */
304 /* vstack debugging aid */
307 void pv (const char *lbl
, int a
, int b
)
310 for (i
= a
; i
< a
+ b
; ++i
) {
311 SValue
*p
= &vtop
[-i
];
312 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
313 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
318 /* ------------------------------------------------------------------------- */
319 /* start of translation unit info */
320 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
325 /* file info: full path + filename */
326 section_sym
= put_elf_sym(symtab_section
, 0, 0,
327 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
328 text_section
->sh_num
, NULL
);
329 getcwd(buf
, sizeof(buf
));
331 normalize_slashes(buf
);
333 pstrcat(buf
, sizeof(buf
), "/");
334 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
335 text_section
->data_offset
, text_section
, section_sym
);
336 put_stabs_r(s1
, file
->prev
->filename
, N_SO
, 0, 0,
337 text_section
->data_offset
, text_section
, section_sym
);
338 new_file
= last_line_num
= 0;
340 /* we're currently 'including' the <command line> */
344 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
345 symbols can be safely used */
346 put_elf_sym(symtab_section
, 0, 0,
347 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
348 SHN_ABS
, file
->filename
);
351 /* put end of translation unit info */
352 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
356 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
357 text_section
->data_offset
, text_section
, section_sym
);
360 static BufferedFile
* put_new_file(TCCState
*s1
)
362 BufferedFile
*f
= file
;
363 /* use upper file if from inline ":asm:" */
364 if (f
->filename
[0] == ':')
367 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
368 new_file
= last_line_num
= 0;
373 /* generate line number info */
374 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
377 if (!s1
->do_debug
|| !(f
= put_new_file(s1
)))
379 if (last_line_num
== f
->line_num
)
381 if (text_section
!= cur_text_section
)
383 if (func_ind
!= -1) {
384 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
386 /* from tcc_assemble */
387 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
389 last_line_num
= f
->line_num
;
392 /* put function symbol */
393 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
397 if (!s1
->do_debug
|| !(f
= put_new_file(s1
)))
399 /* XXX: we put here a dummy type */
400 snprintf(buf
, sizeof(buf
), "%s:%c1",
401 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
402 put_stabs_r(s1
, buf
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
406 /* put function size */
407 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
411 #if 0 // this seems to confuse gnu tools
412 put_stabn(s1
, N_FUN
, 0, 0, size
);
416 /* put alternative filename */
417 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
419 if (0 == strcmp(file
->filename
, filename
))
421 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
425 /* begin of #include */
426 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
430 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
434 /* end of #include */
435 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
439 put_stabn(s1
, N_EINCL
, 0, 0, 0);
443 /* ------------------------------------------------------------------------- */
444 /* initialize vstack and types. This must be done also for tcc -E */
445 ST_FUNC
void tccgen_init(TCCState
*s1
)
448 memset(vtop
, 0, sizeof *vtop
);
450 /* define some often used types */
452 char_pointer_type
.t
= VT_BYTE
;
453 mk_pointer(&char_pointer_type
);
454 func_old_type
.t
= VT_FUNC
;
455 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
456 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
457 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
460 ST_FUNC
int tccgen_compile(TCCState
*s1
)
462 cur_text_section
= NULL
;
464 anon_sym
= SYM_FIRST_ANOM
;
467 nocode_wanted
= 0x80000000;
471 #ifdef TCC_TARGET_ARM
475 printf("%s: **** new file\n", file
->filename
);
477 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
480 gen_inline_functions(s1
);
482 /* end of translation unit info */
487 ST_FUNC
void tccgen_finish(TCCState
*s1
)
489 free_inline_functions(s1
);
490 sym_pop(&global_stack
, NULL
, 0);
491 sym_pop(&local_stack
, NULL
, 0);
492 /* free preprocessor macros */
495 dynarray_reset(&sym_pools
, &nb_sym_pools
);
496 sym_free_first
= NULL
;
499 /* ------------------------------------------------------------------------- */
500 ST_FUNC ElfSym
*elfsym(Sym
*s
)
504 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
507 /* apply storage attributes to Elf symbol */
508 ST_FUNC
void update_storage(Sym
*sym
)
511 int sym_bind
, old_sym_bind
;
517 if (sym
->a
.visibility
)
518 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
521 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
522 sym_bind
= STB_LOCAL
;
523 else if (sym
->a
.weak
)
526 sym_bind
= STB_GLOBAL
;
527 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
528 if (sym_bind
!= old_sym_bind
) {
529 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
533 if (sym
->a
.dllimport
)
534 esym
->st_other
|= ST_PE_IMPORT
;
535 if (sym
->a
.dllexport
)
536 esym
->st_other
|= ST_PE_EXPORT
;
540 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
541 get_tok_str(sym
->v
, NULL
),
542 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
550 /* ------------------------------------------------------------------------- */
551 /* update sym->c so that it points to an external symbol in section
552 'section' with value 'value' */
554 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
555 addr_t value
, unsigned long size
,
556 int can_add_underscore
)
558 int sym_type
, sym_bind
, info
, other
, t
;
562 #ifdef CONFIG_TCC_BCHECK
567 name
= get_tok_str(sym
->v
, NULL
);
568 #ifdef CONFIG_TCC_BCHECK
569 if (tcc_state
->do_bounds_check
) {
570 /* XXX: avoid doing that for statics ? */
571 /* if bound checking is activated, we change some function
572 names by adding the "__bound" prefix */
575 /* XXX: we rely only on malloc hooks */
597 strcpy(buf
, "__bound_");
605 if ((t
& VT_BTYPE
) == VT_FUNC
) {
607 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
608 sym_type
= STT_NOTYPE
;
610 sym_type
= STT_OBJECT
;
612 if (t
& (VT_STATIC
| VT_INLINE
))
613 sym_bind
= STB_LOCAL
;
615 sym_bind
= STB_GLOBAL
;
618 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
619 Sym
*ref
= sym
->type
.ref
;
620 if (ref
->a
.nodecorate
) {
621 can_add_underscore
= 0;
623 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
624 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
626 other
|= ST_PE_STDCALL
;
627 can_add_underscore
= 0;
631 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
633 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
637 name
= get_tok_str(sym
->asm_label
, NULL
);
638 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
639 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
642 esym
->st_value
= value
;
643 esym
->st_size
= size
;
644 esym
->st_shndx
= sh_num
;
649 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
650 addr_t value
, unsigned long size
)
652 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
653 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
656 /* add a new relocation entry to symbol 'sym' in section 's' */
657 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
662 if (nocode_wanted
&& s
== cur_text_section
)
667 put_extern_sym(sym
, NULL
, 0, 0);
671 /* now we can add ELF relocation info */
672 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
676 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
678 greloca(s
, sym
, offset
, type
, 0);
682 /* ------------------------------------------------------------------------- */
683 /* symbol allocator */
684 static Sym
*__sym_malloc(void)
686 Sym
*sym_pool
, *sym
, *last_sym
;
689 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
690 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
692 last_sym
= sym_free_first
;
694 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
695 sym
->next
= last_sym
;
699 sym_free_first
= last_sym
;
703 static inline Sym
*sym_malloc(void)
707 sym
= sym_free_first
;
709 sym
= __sym_malloc();
710 sym_free_first
= sym
->next
;
713 sym
= tcc_malloc(sizeof(Sym
));
718 ST_INLN
void sym_free(Sym
*sym
)
721 sym
->next
= sym_free_first
;
722 sym_free_first
= sym
;
728 /* push, without hashing */
729 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
734 memset(s
, 0, sizeof *s
);
744 /* find a symbol and return its associated structure. 's' is the top
745 of the symbol stack */
746 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
758 /* structure lookup */
759 ST_INLN Sym
*struct_find(int v
)
762 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
764 return table_ident
[v
]->sym_struct
;
767 /* find an identifier */
768 ST_INLN Sym
*sym_find(int v
)
771 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
773 return table_ident
[v
]->sym_identifier
;
776 static int sym_scope(Sym
*s
)
778 if (IS_ENUM_VAL (s
->type
.t
))
779 return s
->type
.ref
->sym_scope
;
784 /* push a given symbol on the symbol stack */
785 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
794 s
= sym_push2(ps
, v
, type
->t
, c
);
795 s
->type
.ref
= type
->ref
;
797 /* don't record fields or anonymous symbols */
799 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
800 /* record symbol in token array */
801 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
803 ps
= &ts
->sym_struct
;
805 ps
= &ts
->sym_identifier
;
808 s
->sym_scope
= local_scope
;
809 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
810 tcc_error("redeclaration of '%s'",
811 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
816 /* push a global identifier */
817 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
820 s
= sym_push2(&global_stack
, v
, t
, c
);
821 s
->r
= VT_CONST
| VT_SYM
;
822 /* don't record anonymous symbol */
823 if (v
< SYM_FIRST_ANOM
) {
824 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
825 /* modify the top most local identifier, so that sym_identifier will
826 point to 's' when popped; happens when called from inline asm */
827 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
828 ps
= &(*ps
)->prev_tok
;
835 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
836 pop them yet from the list, but do remove them from the token array. */
837 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
847 /* remove symbol in token array */
849 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
850 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
852 ps
= &ts
->sym_struct
;
854 ps
= &ts
->sym_identifier
;
865 /* ------------------------------------------------------------------------- */
866 static void vcheck_cmp(void)
868 /* cannot let cpu flags if other instruction are generated. Also
869 avoid leaving VT_JMP anywhere except on the top of the stack
870 because it would complicate the code generator.
872 Don't do this when nocode_wanted. vtop might come from
873 !nocode_wanted regions (see 88_codeopt.c) and transforming
874 it to a register without actually generating code is wrong
875 as their value might still be used for real. All values
876 we push under nocode_wanted will eventually be popped
877 again, so that the VT_CMP/VT_JMP value will be in vtop
878 when code is unsuppressed again. */
880 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
884 static void vsetc(CType
*type
, int r
, CValue
*vc
)
886 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
887 tcc_error("memory full (vstack)");
897 ST_FUNC
void vswap(void)
907 /* pop stack value */
908 ST_FUNC
void vpop(void)
911 v
= vtop
->r
& VT_VALMASK
;
912 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
913 /* for x86, we need to pop the FP stack */
915 o(0xd8dd); /* fstp %st(0) */
919 /* need to put correct jump if && or || without test */
926 /* push constant of type "type" with useless value */
927 static void vpush(CType
*type
)
929 vset(type
, VT_CONST
, 0);
932 /* push arbitrary 64bit constant */
933 static void vpush64(int ty
, unsigned long long v
)
940 vsetc(&ctype
, VT_CONST
, &cval
);
943 /* push integer constant */
944 ST_FUNC
void vpushi(int v
)
949 /* push a pointer sized constant */
950 static void vpushs(addr_t v
)
952 vpush64(VT_SIZE_T
, v
);
955 /* push long long constant */
956 static inline void vpushll(long long v
)
958 vpush64(VT_LLONG
, v
);
961 ST_FUNC
void vset(CType
*type
, int r
, int v
)
965 vsetc(type
, r
, &cval
);
968 static void vseti(int r
, int v
)
976 ST_FUNC
void vpushv(SValue
*v
)
978 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
979 tcc_error("memory full (vstack)");
984 static void vdup(void)
989 /* rotate n first stack elements to the bottom
990 I1 ... In -> I2 ... In I1 [top is right]
992 ST_FUNC
void vrotb(int n
)
1000 vtop
[i
] = vtop
[i
+1];
1004 /* rotate the n elements before entry e towards the top
1005 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1007 ST_FUNC
void vrote(SValue
*e
, int n
)
1014 for(i
= 0;i
< n
- 1; i
++)
1019 /* rotate n first stack elements to the top
1020 I1 ... In -> In I1 ... I(n-1) [top is right]
1022 ST_FUNC
void vrott(int n
)
1027 /* ------------------------------------------------------------------------- */
1028 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1030 /* called from generators to set the result from relational ops */
1031 ST_FUNC
void vset_VT_CMP(int op
)
1039 /* called once before asking generators to load VT_CMP to a register */
1040 static void vset_VT_JMP(void)
1042 int op
= vtop
->cmp_op
;
1043 if (vtop
->jtrue
|| vtop
->jfalse
) {
1044 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1045 int inv
= op
& (op
< 2); /* small optimization */
1046 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1048 /* otherwise convert flags (rsp. 0/1) to register */
1050 if (op
< 2) /* doesn't seem to happen */
1055 /* Set CPU Flags, doesn't yet jump */
1056 static void gvtst_set(int inv
, int t
)
1059 if (vtop
->r
!= VT_CMP
) {
1062 if (vtop
->r
== VT_CMP
) /* must be VT_CONST otherwise */
1064 else if (vtop
->r
== VT_CONST
)
1065 vset_VT_CMP(vtop
->c
.i
!= 0);
1069 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1070 *p
= gjmp_append(*p
, t
);
1073 /* Generate value test
1075 * Generate a test for any value (jump, comparison and integers) */
1076 static int gvtst(int inv
, int t
)
1082 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1084 x
= u
, u
= t
, t
= x
;
1087 /* jump to the wanted target */
1089 t
= gjmp_cond(op
^ inv
, t
);
1092 /* resolve complementary jumps to here */
1099 /* generate a zero or nozero test */
1100 static void gen_test_zero(int op
)
1102 if (vtop
->r
== VT_CMP
) {
1106 vtop
->jfalse
= vtop
->jtrue
;
1116 /* ------------------------------------------------------------------------- */
1117 /* push a symbol value of TYPE */
1118 static inline void vpushsym(CType
*type
, Sym
*sym
)
1122 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1126 /* Return a static symbol pointing to a section */
1127 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1133 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1134 sym
->type
.t
|= VT_STATIC
;
1135 put_extern_sym(sym
, sec
, offset
, size
);
1139 /* push a reference to a section offset by adding a dummy symbol */
1140 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1142 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1145 /* define a new external reference to a symbol 'v' of type 'u' */
1146 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1152 /* push forward reference */
1153 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1154 s
->type
.ref
= type
->ref
;
1155 } else if (IS_ASM_SYM(s
)) {
1156 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1157 s
->type
.ref
= type
->ref
;
1163 /* Merge symbol attributes. */
1164 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1166 if (sa1
->aligned
&& !sa
->aligned
)
1167 sa
->aligned
= sa1
->aligned
;
1168 sa
->packed
|= sa1
->packed
;
1169 sa
->weak
|= sa1
->weak
;
1170 if (sa1
->visibility
!= STV_DEFAULT
) {
1171 int vis
= sa
->visibility
;
1172 if (vis
== STV_DEFAULT
1173 || vis
> sa1
->visibility
)
1174 vis
= sa1
->visibility
;
1175 sa
->visibility
= vis
;
1177 sa
->dllexport
|= sa1
->dllexport
;
1178 sa
->nodecorate
|= sa1
->nodecorate
;
1179 sa
->dllimport
|= sa1
->dllimport
;
1182 /* Merge function attributes. */
1183 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1185 if (fa1
->func_call
&& !fa
->func_call
)
1186 fa
->func_call
= fa1
->func_call
;
1187 if (fa1
->func_type
&& !fa
->func_type
)
1188 fa
->func_type
= fa1
->func_type
;
1189 if (fa1
->func_args
&& !fa
->func_args
)
1190 fa
->func_args
= fa1
->func_args
;
1193 /* Merge attributes. */
1194 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1196 merge_symattr(&ad
->a
, &ad1
->a
);
1197 merge_funcattr(&ad
->f
, &ad1
->f
);
1200 ad
->section
= ad1
->section
;
1201 if (ad1
->alias_target
)
1202 ad
->alias_target
= ad1
->alias_target
;
1204 ad
->asm_label
= ad1
->asm_label
;
1206 ad
->attr_mode
= ad1
->attr_mode
;
1209 /* Merge some type attributes. */
1210 static void patch_type(Sym
*sym
, CType
*type
)
1212 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1213 if (!(sym
->type
.t
& VT_EXTERN
))
1214 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1215 sym
->type
.t
&= ~VT_EXTERN
;
1218 if (IS_ASM_SYM(sym
)) {
1219 /* stay static if both are static */
1220 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1221 sym
->type
.ref
= type
->ref
;
1224 if (!is_compatible_types(&sym
->type
, type
)) {
1225 tcc_error("incompatible types for redefinition of '%s'",
1226 get_tok_str(sym
->v
, NULL
));
1228 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1229 int static_proto
= sym
->type
.t
& VT_STATIC
;
1230 /* warn if static follows non-static function declaration */
1231 if ((type
->t
& VT_STATIC
) && !static_proto
1232 /* XXX this test for inline shouldn't be here. Until we
1233 implement gnu-inline mode again it silences a warning for
1234 mingw caused by our workarounds. */
1235 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1236 tcc_warning("static storage ignored for redefinition of '%s'",
1237 get_tok_str(sym
->v
, NULL
));
1239 /* set 'inline' if both agree or if one has static */
1240 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1241 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1242 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1243 static_proto
|= VT_INLINE
;
1246 if (0 == (type
->t
& VT_EXTERN
)) {
1247 /* put complete type, use static from prototype */
1248 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1249 sym
->type
.ref
= type
->ref
;
1251 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1254 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1255 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1256 sym
->type
.ref
= type
->ref
;
1260 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1261 /* set array size if it was omitted in extern declaration */
1262 sym
->type
.ref
->c
= type
->ref
->c
;
1264 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1265 tcc_warning("storage mismatch for redefinition of '%s'",
1266 get_tok_str(sym
->v
, NULL
));
1270 /* Merge some storage attributes. */
1271 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1274 patch_type(sym
, type
);
1276 #ifdef TCC_TARGET_PE
1277 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1278 tcc_error("incompatible dll linkage for redefinition of '%s'",
1279 get_tok_str(sym
->v
, NULL
));
1281 merge_symattr(&sym
->a
, &ad
->a
);
1283 sym
->asm_label
= ad
->asm_label
;
1284 update_storage(sym
);
1287 /* copy sym to other stack */
1288 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1291 s
= sym_malloc(), *s
= *s0
;
1292 s
->prev
= *ps
, *ps
= s
;
1293 if (s
->v
< SYM_FIRST_ANOM
) {
1294 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1295 s
->prev_tok
= *ps
, *ps
= s
;
1300 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1301 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1303 int bt
= s
->type
.t
& VT_BTYPE
;
1304 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1305 Sym
**sp
= &s
->type
.ref
;
1306 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1307 Sym
*s2
= sym_copy(s
, ps
);
1308 sp
= &(*sp
= s2
)->next
;
1309 sym_copy_ref(s2
, ps
);
1314 /* define a new external reference to a symbol 'v' */
1315 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1319 /* look for global symbol */
1321 while (s
&& s
->sym_scope
)
1325 /* push forward reference */
1326 s
= global_identifier_push(v
, type
->t
, 0);
1329 s
->asm_label
= ad
->asm_label
;
1330 s
->type
.ref
= type
->ref
;
1331 /* copy type to the global stack */
1333 sym_copy_ref(s
, &global_stack
);
1335 patch_storage(s
, ad
, type
);
1337 /* push variables on local_stack if any */
1338 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1339 s
= sym_copy(s
, &local_stack
);
1343 /* push a reference to global symbol v */
1344 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1346 vpushsym(type
, external_global_sym(v
, type
));
1349 /* save registers up to (vtop - n) stack entry */
1350 ST_FUNC
void save_regs(int n
)
1353 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1357 /* save r to the memory stack, and mark it as being free */
1358 ST_FUNC
void save_reg(int r
)
1360 save_reg_upstack(r
, 0);
1363 /* save r to the memory stack, and mark it as being free,
1364 if seen up to (vtop - n) stack entry */
1365 ST_FUNC
void save_reg_upstack(int r
, int n
)
1367 int l
, size
, align
, bt
;
1370 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1375 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1376 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1377 /* must save value on stack if not already done */
1379 bt
= p
->type
.t
& VT_BTYPE
;
1382 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1385 size
= type_size(&sv
.type
, &align
);
1386 #ifdef CONFIG_TCC_BCHECK
1387 if (tcc_state
->do_bounds_check
)
1388 l
= loc
= (loc
- size
) & -align
;
1391 l
= get_temp_local_var(size
,align
);
1392 sv
.r
= VT_LOCAL
| VT_LVAL
;
1394 store(p
->r
& VT_VALMASK
, &sv
);
1395 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1396 /* x86 specific: need to pop fp register ST0 if saved */
1397 if (r
== TREG_ST0
) {
1398 o(0xd8dd); /* fstp %st(0) */
1401 /* special long long case */
1402 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1407 /* mark that stack entry as being saved on the stack */
1408 if (p
->r
& VT_LVAL
) {
1409 /* also clear the bounded flag because the
1410 relocation address of the function was stored in
1412 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1414 p
->r
= VT_LVAL
| VT_LOCAL
;
1422 #ifdef TCC_TARGET_ARM
1423 /* find a register of class 'rc2' with at most one reference on stack.
1424 * If none, call get_reg(rc) */
1425 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1430 for(r
=0;r
<NB_REGS
;r
++) {
1431 if (reg_classes
[r
] & rc2
) {
1434 for(p
= vstack
; p
<= vtop
; p
++) {
1435 if ((p
->r
& VT_VALMASK
) == r
||
1447 /* find a free register of class 'rc'. If none, save one register */
1448 ST_FUNC
int get_reg(int rc
)
1453 /* find a free register */
1454 for(r
=0;r
<NB_REGS
;r
++) {
1455 if (reg_classes
[r
] & rc
) {
1458 for(p
=vstack
;p
<=vtop
;p
++) {
1459 if ((p
->r
& VT_VALMASK
) == r
||
1468 /* no register left : free the first one on the stack (VERY
1469 IMPORTANT to start from the bottom to ensure that we don't
1470 spill registers used in gen_opi()) */
1471 for(p
=vstack
;p
<=vtop
;p
++) {
1472 /* look at second register (if long long) */
1474 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1476 r
= p
->r
& VT_VALMASK
;
1477 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1483 /* Should never comes here */
1487 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1488 static int get_temp_local_var(int size
,int align
){
1490 struct temp_local_variable
*temp_var
;
1497 for(i
=0;i
<nb_temp_local_vars
;i
++){
1498 temp_var
=&arr_temp_local_vars
[i
];
1499 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1502 /*check if temp_var is free*/
1504 for(p
=vstack
;p
<=vtop
;p
++) {
1506 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1507 if(p
->c
.i
==temp_var
->location
){
1514 found_var
=temp_var
->location
;
1520 loc
= (loc
- size
) & -align
;
1521 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1522 temp_var
=&arr_temp_local_vars
[i
];
1523 temp_var
->location
=loc
;
1524 temp_var
->size
=size
;
1525 temp_var
->align
=align
;
1526 nb_temp_local_vars
++;
1533 static void clear_temp_local_var_list(){
1534 nb_temp_local_vars
=0;
1537 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1539 static void move_reg(int r
, int s
, int t
)
1553 /* get address of vtop (vtop MUST BE an lvalue) */
1554 ST_FUNC
void gaddrof(void)
1556 vtop
->r
&= ~VT_LVAL
;
1557 /* tricky: if saved lvalue, then we can go back to lvalue */
1558 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1559 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1562 #ifdef CONFIG_TCC_BCHECK
1563 /* generate lvalue bound code */
1564 static void gbound(void)
1568 vtop
->r
&= ~VT_MUSTBOUND
;
1569 /* if lvalue, then use checking code before dereferencing */
1570 if (vtop
->r
& VT_LVAL
) {
1571 /* if not VT_BOUNDED value, then make one */
1572 if (!(vtop
->r
& VT_BOUNDED
)) {
1573 /* must save type because we must set it to int to get pointer */
1575 vtop
->type
.t
= VT_PTR
;
1578 gen_bounded_ptr_add();
1582 /* then check for dereferencing */
1583 gen_bounded_ptr_deref();
1587 /* we need to call __bound_ptr_add before we start to load function
1588 args into registers */
1589 ST_FUNC
void gbound_args(int nb_args
)
1592 for (i
= 1; i
<= nb_args
; ++i
)
1593 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1601 static void incr_bf_adr(int o
)
1603 vtop
->type
= char_pointer_type
;
1607 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1611 /* single-byte load mode for packed or otherwise unaligned bitfields */
1612 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1615 save_reg_upstack(vtop
->r
, 1);
1616 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1617 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1626 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1628 vpushi((1 << n
) - 1), gen_op('&');
1631 vpushi(bits
), gen_op(TOK_SHL
);
1634 bits
+= n
, bit_size
-= n
, o
= 1;
1637 if (!(type
->t
& VT_UNSIGNED
)) {
1638 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1639 vpushi(n
), gen_op(TOK_SHL
);
1640 vpushi(n
), gen_op(TOK_SAR
);
1644 /* single-byte store mode for packed or otherwise unaligned bitfields */
1645 static void store_packed_bf(int bit_pos
, int bit_size
)
1647 int bits
, n
, o
, m
, c
;
1649 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1651 save_reg_upstack(vtop
->r
, 1);
1652 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1654 incr_bf_adr(o
); // X B
1656 c
? vdup() : gv_dup(); // B V X
1659 vpushi(bits
), gen_op(TOK_SHR
);
1661 vpushi(bit_pos
), gen_op(TOK_SHL
);
1666 m
= ((1 << n
) - 1) << bit_pos
;
1667 vpushi(m
), gen_op('&'); // X B V1
1668 vpushv(vtop
-1); // X B V1 B
1669 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1670 gen_op('&'); // X B V1 B1
1671 gen_op('|'); // X B V2
1673 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1674 vstore(), vpop(); // X B
1675 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1680 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1683 if (0 == sv
->type
.ref
)
1685 t
= sv
->type
.ref
->auxtype
;
1686 if (t
!= -1 && t
!= VT_STRUCT
) {
1687 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1693 /* store vtop a register belonging to class 'rc'. lvalues are
1694 converted to values. Cannot be used if cannot be converted to
1695 register value (such as structures). */
1696 ST_FUNC
int gv(int rc
)
1698 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1699 int bit_pos
, bit_size
, size
, align
;
1701 /* NOTE: get_reg can modify vstack[] */
1702 if (vtop
->type
.t
& VT_BITFIELD
) {
1705 bit_pos
= BIT_POS(vtop
->type
.t
);
1706 bit_size
= BIT_SIZE(vtop
->type
.t
);
1707 /* remove bit field info to avoid loops */
1708 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1711 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1712 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1713 type
.t
|= VT_UNSIGNED
;
1715 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1717 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1722 if (r
== VT_STRUCT
) {
1723 load_packed_bf(&type
, bit_pos
, bit_size
);
1725 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1726 /* cast to int to propagate signedness in following ops */
1728 /* generate shifts */
1729 vpushi(bits
- (bit_pos
+ bit_size
));
1731 vpushi(bits
- bit_size
);
1732 /* NOTE: transformed to SHR if unsigned */
1737 if (is_float(vtop
->type
.t
) &&
1738 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1739 unsigned long offset
;
1740 /* CPUs usually cannot use float constants, so we store them
1741 generically in data segment */
1742 size
= type_size(&vtop
->type
, &align
);
1744 size
= 0, align
= 1;
1745 offset
= section_add(data_section
, size
, align
);
1746 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1748 init_putv(&vtop
->type
, data_section
, offset
);
1751 #ifdef CONFIG_TCC_BCHECK
1752 if (vtop
->r
& VT_MUSTBOUND
)
1756 bt
= vtop
->type
.t
& VT_BTYPE
;
1758 #ifdef TCC_TARGET_RISCV64
1760 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1763 rc2
= RC2_TYPE(bt
, rc
);
1765 /* need to reload if:
1767 - lvalue (need to dereference pointer)
1768 - already a register, but not in the right class */
1769 r
= vtop
->r
& VT_VALMASK
;
1770 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1771 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1773 if (!r_ok
|| !r2_ok
) {
1777 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1778 int original_type
= vtop
->type
.t
;
1780 /* two register type load :
1781 expand to two words temporarily */
1782 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1784 unsigned long long ll
= vtop
->c
.i
;
1785 vtop
->c
.i
= ll
; /* first word */
1787 vtop
->r
= r
; /* save register value */
1788 vpushi(ll
>> 32); /* second word */
1789 } else if (vtop
->r
& VT_LVAL
) {
1790 /* We do not want to modifier the long long pointer here.
1791 So we save any other instances down the stack */
1792 save_reg_upstack(vtop
->r
, 1);
1793 /* load from memory */
1794 vtop
->type
.t
= load_type
;
1797 vtop
[-1].r
= r
; /* save register value */
1798 /* increment pointer to get second word */
1799 vtop
->type
.t
= VT_PTRDIFF_T
;
1804 vtop
->type
.t
= load_type
;
1806 /* move registers */
1809 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1812 vtop
[-1].r
= r
; /* save register value */
1813 vtop
->r
= vtop
[-1].r2
;
1815 /* Allocate second register. Here we rely on the fact that
1816 get_reg() tries first to free r2 of an SValue. */
1820 /* write second register */
1823 vtop
->type
.t
= original_type
;
1825 if (vtop
->r
== VT_CMP
)
1827 /* one register type load */
1832 #ifdef TCC_TARGET_C67
1833 /* uses register pairs for doubles */
1834 if (bt
== VT_DOUBLE
)
1841 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1842 ST_FUNC
void gv2(int rc1
, int rc2
)
1844 /* generate more generic register first. But VT_JMP or VT_CMP
1845 values must be generated first in all cases to avoid possible
1847 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1852 /* test if reload is needed for first register */
1853 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1863 /* test if reload is needed for first register */
1864 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1871 /* expand 64bit on stack in two ints */
1872 ST_FUNC
void lexpand(void)
1875 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1876 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1877 if (v
== VT_CONST
) {
1880 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1886 vtop
[0].r
= vtop
[-1].r2
;
1887 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1889 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1894 /* build a long long from two ints */
1895 static void lbuild(int t
)
1897 gv2(RC_INT
, RC_INT
);
1898 vtop
[-1].r2
= vtop
[0].r
;
1899 vtop
[-1].type
.t
= t
;
1904 /* convert stack entry to register and duplicate its value in another
1906 static void gv_dup(void)
1912 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1913 if (t
& VT_BITFIELD
) {
1923 /* stack: H L L1 H1 */
1933 /* duplicate value */
1943 /* generate CPU independent (unsigned) long long operations */
1944 static void gen_opl(int op
)
1946 int t
, a
, b
, op1
, c
, i
;
1948 unsigned short reg_iret
= REG_IRET
;
1949 unsigned short reg_lret
= REG_IRE2
;
1955 func
= TOK___divdi3
;
1958 func
= TOK___udivdi3
;
1961 func
= TOK___moddi3
;
1964 func
= TOK___umoddi3
;
1971 /* call generic long long function */
1972 vpush_global_sym(&func_old_type
, func
);
1977 vtop
->r2
= reg_lret
;
1985 //pv("gen_opl A",0,2);
1991 /* stack: L1 H1 L2 H2 */
1996 vtop
[-2] = vtop
[-3];
1999 /* stack: H1 H2 L1 L2 */
2000 //pv("gen_opl B",0,4);
2006 /* stack: H1 H2 L1 L2 ML MH */
2009 /* stack: ML MH H1 H2 L1 L2 */
2013 /* stack: ML MH H1 L2 H2 L1 */
2018 /* stack: ML MH M1 M2 */
2021 } else if (op
== '+' || op
== '-') {
2022 /* XXX: add non carry method too (for MIPS or alpha) */
2028 /* stack: H1 H2 (L1 op L2) */
2031 gen_op(op1
+ 1); /* TOK_xxxC2 */
2034 /* stack: H1 H2 (L1 op L2) */
2037 /* stack: (L1 op L2) H1 H2 */
2039 /* stack: (L1 op L2) (H1 op H2) */
2047 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2048 t
= vtop
[-1].type
.t
;
2052 /* stack: L H shift */
2054 /* constant: simpler */
2055 /* NOTE: all comments are for SHL. the other cases are
2056 done by swapping words */
2067 if (op
!= TOK_SAR
) {
2100 /* XXX: should provide a faster fallback on x86 ? */
2103 func
= TOK___ashrdi3
;
2106 func
= TOK___lshrdi3
;
2109 func
= TOK___ashldi3
;
2115 /* compare operations */
2121 /* stack: L1 H1 L2 H2 */
2123 vtop
[-1] = vtop
[-2];
2125 /* stack: L1 L2 H1 H2 */
2129 /* when values are equal, we need to compare low words. since
2130 the jump is inverted, we invert the test too. */
2133 else if (op1
== TOK_GT
)
2135 else if (op1
== TOK_ULT
)
2137 else if (op1
== TOK_UGT
)
2147 /* generate non equal test */
2149 vset_VT_CMP(TOK_NE
);
2153 /* compare low. Always unsigned */
2157 else if (op1
== TOK_LE
)
2159 else if (op1
== TOK_GT
)
2161 else if (op1
== TOK_GE
)
2164 #if 0//def TCC_TARGET_I386
2165 if (op
== TOK_NE
) { gsym(b
); break; }
2166 if (op
== TOK_EQ
) { gsym(a
); break; }
2175 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2177 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2178 return (a
^ b
) >> 63 ? -x
: x
;
2181 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2183 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2186 /* handle integer constant optimizations and various machine
2188 static void gen_opic(int op
)
2190 SValue
*v1
= vtop
- 1;
2192 int t1
= v1
->type
.t
& VT_BTYPE
;
2193 int t2
= v2
->type
.t
& VT_BTYPE
;
2194 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2195 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2196 uint64_t l1
= c1
? v1
->c
.i
: 0;
2197 uint64_t l2
= c2
? v2
->c
.i
: 0;
2198 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2200 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2201 l1
= ((uint32_t)l1
|
2202 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2203 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2204 l2
= ((uint32_t)l2
|
2205 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2209 case '+': l1
+= l2
; break;
2210 case '-': l1
-= l2
; break;
2211 case '&': l1
&= l2
; break;
2212 case '^': l1
^= l2
; break;
2213 case '|': l1
|= l2
; break;
2214 case '*': l1
*= l2
; break;
2221 /* if division by zero, generate explicit division */
2224 tcc_error("division by zero in constant");
2228 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2229 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2230 case TOK_UDIV
: l1
= l1
/ l2
; break;
2231 case TOK_UMOD
: l1
= l1
% l2
; break;
2234 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2235 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2237 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2240 case TOK_ULT
: l1
= l1
< l2
; break;
2241 case TOK_UGE
: l1
= l1
>= l2
; break;
2242 case TOK_EQ
: l1
= l1
== l2
; break;
2243 case TOK_NE
: l1
= l1
!= l2
; break;
2244 case TOK_ULE
: l1
= l1
<= l2
; break;
2245 case TOK_UGT
: l1
= l1
> l2
; break;
2246 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2247 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2248 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2249 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2251 case TOK_LAND
: l1
= l1
&& l2
; break;
2252 case TOK_LOR
: l1
= l1
|| l2
; break;
2256 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2257 l1
= ((uint32_t)l1
|
2258 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2262 /* if commutative ops, put c2 as constant */
2263 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2264 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2266 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2267 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2269 if (!const_wanted
&&
2271 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2272 (l1
== -1 && op
== TOK_SAR
))) {
2273 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2275 } else if (!const_wanted
&&
2276 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2278 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2279 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2280 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2285 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2288 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2289 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2292 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2293 /* filter out NOP operations like x*1, x-0, x&-1... */
2295 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2296 /* try to use shifts instead of muls or divs */
2297 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2306 else if (op
== TOK_PDIV
)
2312 } else if (c2
&& (op
== '+' || op
== '-') &&
2313 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2314 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2315 /* symbol + constant case */
2319 /* The backends can't always deal with addends to symbols
2320 larger than +-1<<31. Don't construct such. */
2327 /* call low level op generator */
2328 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2329 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2337 /* generate a floating point operation with constant propagation */
2338 static void gen_opif(int op
)
2342 #if defined _MSC_VER && defined __x86_64__
2343 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2350 /* currently, we cannot do computations with forward symbols */
2351 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2352 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2354 if (v1
->type
.t
== VT_FLOAT
) {
2357 } else if (v1
->type
.t
== VT_DOUBLE
) {
2365 /* NOTE: we only do constant propagation if finite number (not
2366 NaN or infinity) (ANSI spec) */
2367 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2371 case '+': f1
+= f2
; break;
2372 case '-': f1
-= f2
; break;
2373 case '*': f1
*= f2
; break;
2376 /* If not in initializer we need to potentially generate
2377 FP exceptions at runtime, otherwise we want to fold. */
2383 /* XXX: also handles tests ? */
2387 /* XXX: overflow test ? */
2388 if (v1
->type
.t
== VT_FLOAT
) {
2390 } else if (v1
->type
.t
== VT_DOUBLE
) {
2402 static int pointed_size(CType
*type
)
2405 return type_size(pointed_type(type
), &align
);
2408 static void vla_runtime_pointed_size(CType
*type
)
2411 vla_runtime_type_size(pointed_type(type
), &align
);
2414 static inline int is_null_pointer(SValue
*p
)
2416 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2418 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2419 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2420 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2421 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2422 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2423 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2427 /* check types for comparison or subtraction of pointers */
2428 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2430 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2433 /* null pointers are accepted for all comparisons as gcc */
2434 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2438 bt1
= type1
->t
& VT_BTYPE
;
2439 bt2
= type2
->t
& VT_BTYPE
;
2440 /* accept comparison between pointer and integer with a warning */
2441 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2442 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2443 tcc_warning("comparison between pointer and integer");
2447 /* both must be pointers or implicit function pointers */
2448 if (bt1
== VT_PTR
) {
2449 type1
= pointed_type(type1
);
2450 } else if (bt1
!= VT_FUNC
)
2451 goto invalid_operands
;
2453 if (bt2
== VT_PTR
) {
2454 type2
= pointed_type(type2
);
2455 } else if (bt2
!= VT_FUNC
) {
2457 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2459 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2460 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2464 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2465 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2466 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2467 /* gcc-like error if '-' is used */
2469 goto invalid_operands
;
2471 tcc_warning("comparison of distinct pointer types lacks a cast");
2475 /* generic gen_op: handles types problems */
2476 ST_FUNC
void gen_op(int op
)
2478 int u
, t1
, t2
, bt1
, bt2
, t
;
2482 t1
= vtop
[-1].type
.t
;
2483 t2
= vtop
[0].type
.t
;
2484 bt1
= t1
& VT_BTYPE
;
2485 bt2
= t2
& VT_BTYPE
;
2487 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2488 tcc_error("operation on a struct");
2489 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2490 if (bt2
== VT_FUNC
) {
2491 mk_pointer(&vtop
->type
);
2494 if (bt1
== VT_FUNC
) {
2496 mk_pointer(&vtop
->type
);
2501 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2502 /* at least one operand is a pointer */
2503 /* relational op: must be both pointers */
2504 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2505 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2506 /* pointers are handled are unsigned */
2508 t
= VT_LLONG
| VT_UNSIGNED
;
2510 t
= VT_INT
| VT_UNSIGNED
;
2514 /* if both pointers, then it must be the '-' op */
2515 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2517 tcc_error("cannot use pointers here");
2518 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2519 /* XXX: check that types are compatible */
2520 if (vtop
[-1].type
.t
& VT_VLA
) {
2521 vla_runtime_pointed_size(&vtop
[-1].type
);
2523 vpushi(pointed_size(&vtop
[-1].type
));
2527 vtop
->type
.t
= VT_PTRDIFF_T
;
2531 /* exactly one pointer : must be '+' or '-'. */
2532 if (op
!= '-' && op
!= '+')
2533 tcc_error("cannot use pointers here");
2534 /* Put pointer as first operand */
2535 if (bt2
== VT_PTR
) {
2537 t
= t1
, t1
= t2
, t2
= t
;
2540 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2541 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2544 type1
= vtop
[-1].type
;
2545 type1
.t
&= ~VT_ARRAY
;
2546 if (vtop
[-1].type
.t
& VT_VLA
)
2547 vla_runtime_pointed_size(&vtop
[-1].type
);
2549 u
= pointed_size(&vtop
[-1].type
);
2551 tcc_error("unknown array element size");
2555 /* XXX: cast to int ? (long long case) */
2560 #ifdef CONFIG_TCC_BCHECK
2561 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2562 /* if bounded pointers, we generate a special code to
2569 vtop
[-1].r
&= ~VT_MUSTBOUND
;
2570 gen_bounded_ptr_add();
2576 /* put again type if gen_opic() swaped operands */
2579 } else if (is_float(bt1
) || is_float(bt2
)) {
2580 /* compute bigger type and do implicit casts */
2581 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2583 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2588 /* floats can only be used for a few operations */
2589 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2590 (op
< TOK_ULT
|| op
> TOK_GT
))
2591 tcc_error("invalid operands for binary operation");
2593 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2594 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2595 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2597 t
|= (VT_LONG
& t1
);
2599 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2600 /* cast to biggest op */
2601 t
= VT_LLONG
| VT_LONG
;
2602 if (bt1
== VT_LLONG
)
2604 if (bt2
== VT_LLONG
)
2606 /* convert to unsigned if it does not fit in a long long */
2607 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2608 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2612 /* integer operations */
2613 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2614 /* convert to unsigned if it does not fit in an integer */
2615 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2616 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2619 /* XXX: currently, some unsigned operations are explicit, so
2620 we modify them here */
2621 if (t
& VT_UNSIGNED
) {
2628 else if (op
== TOK_LT
)
2630 else if (op
== TOK_GT
)
2632 else if (op
== TOK_LE
)
2634 else if (op
== TOK_GE
)
2642 /* special case for shifts and long long: we keep the shift as
2644 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2651 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2652 /* relational op: the result is an int */
2653 vtop
->type
.t
= VT_INT
;
2658 // Make sure that we have converted to an rvalue:
2659 if (vtop
->r
& VT_LVAL
)
2660 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2663 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2664 #define gen_cvt_itof1 gen_cvt_itof
2666 /* generic itof for unsigned long long case */
2667 static void gen_cvt_itof1(int t
)
2669 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2670 (VT_LLONG
| VT_UNSIGNED
)) {
2673 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2674 #if LDOUBLE_SIZE != 8
2675 else if (t
== VT_LDOUBLE
)
2676 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2679 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2690 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2691 #define gen_cvt_ftoi1 gen_cvt_ftoi
2693 /* generic ftoi for unsigned long long case */
2694 static void gen_cvt_ftoi1(int t
)
2697 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2698 /* not handled natively */
2699 st
= vtop
->type
.t
& VT_BTYPE
;
2701 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2702 #if LDOUBLE_SIZE != 8
2703 else if (st
== VT_LDOUBLE
)
2704 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2707 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2718 /* special delayed cast for char/short */
2719 static void force_charshort_cast(void)
2721 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
2722 int dbt
= vtop
->type
.t
;
2723 vtop
->r
&= ~VT_MUSTCAST
;
2725 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
2729 static void gen_cast_s(int t
)
2737 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2738 static void gen_cast(CType
*type
)
2740 int sbt
, dbt
, sf
, df
, c
;
2741 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
2743 /* special delayed cast for char/short */
2744 if (vtop
->r
& VT_MUSTCAST
)
2745 force_charshort_cast();
2747 /* bitfields first get cast to ints */
2748 if (vtop
->type
.t
& VT_BITFIELD
)
2751 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2752 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2760 dbt_bt
= dbt
& VT_BTYPE
;
2761 sbt_bt
= sbt
& VT_BTYPE
;
2763 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2764 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2765 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
2768 /* constant case: we can do it now */
2769 /* XXX: in ISOC, cannot do it if error in convert */
2770 if (sbt
== VT_FLOAT
)
2771 vtop
->c
.ld
= vtop
->c
.f
;
2772 else if (sbt
== VT_DOUBLE
)
2773 vtop
->c
.ld
= vtop
->c
.d
;
2776 if (sbt_bt
== VT_LLONG
) {
2777 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2778 vtop
->c
.ld
= vtop
->c
.i
;
2780 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2782 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2783 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2785 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2788 if (dbt
== VT_FLOAT
)
2789 vtop
->c
.f
= (float)vtop
->c
.ld
;
2790 else if (dbt
== VT_DOUBLE
)
2791 vtop
->c
.d
= (double)vtop
->c
.ld
;
2792 } else if (sf
&& dbt
== VT_BOOL
) {
2793 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2796 vtop
->c
.i
= vtop
->c
.ld
;
2797 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
2799 else if (sbt
& VT_UNSIGNED
)
2800 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2802 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
2804 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
2806 else if (dbt
== VT_BOOL
)
2807 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2809 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
2810 dbt_bt
== VT_SHORT
? 0xffff :
2813 if (!(dbt
& VT_UNSIGNED
))
2814 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2819 } else if (dbt
== VT_BOOL
2820 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
2821 == (VT_CONST
| VT_SYM
)) {
2822 /* addresses are considered non-zero (see tcctest.c:sinit23) */
2828 /* cannot generate code for global or static initializers */
2829 if (STATIC_DATA_WANTED
)
2832 /* non constant case: generate code */
2833 if (dbt
== VT_BOOL
) {
2834 gen_test_zero(TOK_NE
);
2840 /* convert from fp to fp */
2843 /* convert int to fp */
2846 /* convert fp to int */
2848 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
2851 goto again
; /* may need char/short cast */
2856 ds
= btype_size(dbt_bt
);
2857 ss
= btype_size(sbt_bt
);
2858 if (ds
== 0 || ss
== 0) {
2859 if (dbt_bt
== VT_VOID
)
2861 cast_error(&vtop
->type
, type
);
2863 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
2864 tcc_error("cast to incomplete type");
2866 /* same size and no sign conversion needed */
2867 if (ds
== ss
&& ds
>= 4)
2869 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
2870 tcc_warning("cast between pointer and integer of different size");
2871 if (sbt_bt
== VT_PTR
) {
2872 /* put integer type to allow logical operations below */
2873 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
2877 /* processor allows { int a = 0, b = *(char*)&a; }
2878 That means that if we cast to less width, we can just
2879 change the type and read it still later. */
2880 #define ALLOW_SUBTYPE_ACCESS 1
2882 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
2883 /* value still in memory */
2889 goto done
; /* no 64bit envolved */
2897 /* generate high word */
2898 if (sbt
& VT_UNSIGNED
) {
2907 } else if (ss
== 8) {
2908 /* from long long: just take low order word */
2916 /* need to convert from 32bit to 64bit */
2917 if (sbt
& VT_UNSIGNED
) {
2918 #if defined(TCC_TARGET_RISCV64)
2919 /* RISC-V keeps 32bit vals in registers sign-extended.
2920 So here we need a zero-extension. */
2929 ss
= ds
, ds
= 4, dbt
= sbt
;
2930 } else if (ss
== 8) {
2931 /* XXX some architectures (e.g. risc-v) would like it
2932 better for this merely being a 32-to-64 sign or zero-
2934 trunc
= 32; /* zero upper 32 bits */
2942 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
2948 bits
= (ss
- ds
) * 8;
2949 /* for unsigned, gen_op will convert SAR to SHR */
2950 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
2953 vpushi(bits
- trunc
);
2960 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2963 /* return type size as known at compile time. Put alignment at 'a' */
2964 ST_FUNC
int type_size(CType
*type
, int *a
)
2969 bt
= type
->t
& VT_BTYPE
;
2970 if (bt
== VT_STRUCT
) {
2975 } else if (bt
== VT_PTR
) {
2976 if (type
->t
& VT_ARRAY
) {
2980 ts
= type_size(&s
->type
, a
);
2982 if (ts
< 0 && s
->c
< 0)
2990 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2991 return -1; /* incomplete enum */
2992 } else if (bt
== VT_LDOUBLE
) {
2994 return LDOUBLE_SIZE
;
2995 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2996 #ifdef TCC_TARGET_I386
2997 #ifdef TCC_TARGET_PE
3002 #elif defined(TCC_TARGET_ARM)
3012 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3015 } else if (bt
== VT_SHORT
) {
3018 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3022 /* char, void, function, _Bool */
3028 /* push type size as known at runtime time on top of value stack. Put
3030 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3032 if (type
->t
& VT_VLA
) {
3033 type_size(&type
->ref
->type
, a
);
3034 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3036 vpushi(type_size(type
, a
));
3040 /* return the pointed type of t */
3041 static inline CType
*pointed_type(CType
*type
)
3043 return &type
->ref
->type
;
3046 /* modify type so that its it is a pointer to type. */
3047 ST_FUNC
void mk_pointer(CType
*type
)
3050 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3051 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3055 /* compare function types. OLD functions match any new functions */
3056 static int is_compatible_func(CType
*type1
, CType
*type2
)
3062 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3064 if (s1
->f
.func_type
!= s2
->f
.func_type
3065 && s1
->f
.func_type
!= FUNC_OLD
3066 && s2
->f
.func_type
!= FUNC_OLD
)
3068 /* we should check the function return type for FUNC_OLD too
3069 but that causes problems with the internally used support
3070 functions such as TOK_memmove */
3071 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
3073 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
3076 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3087 /* return true if type1 and type2 are the same. If unqualified is
3088 true, qualifiers on the types are ignored.
3090 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3094 t1
= type1
->t
& VT_TYPE
;
3095 t2
= type2
->t
& VT_TYPE
;
3097 /* strip qualifiers before comparing */
3098 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3099 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3102 /* Default Vs explicit signedness only matters for char */
3103 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3107 /* XXX: bitfields ? */
3112 && !(type1
->ref
->c
< 0
3113 || type2
->ref
->c
< 0
3114 || type1
->ref
->c
== type2
->ref
->c
))
3117 /* test more complicated cases */
3118 bt1
= t1
& VT_BTYPE
;
3119 if (bt1
== VT_PTR
) {
3120 type1
= pointed_type(type1
);
3121 type2
= pointed_type(type2
);
3122 return is_compatible_types(type1
, type2
);
3123 } else if (bt1
== VT_STRUCT
) {
3124 return (type1
->ref
== type2
->ref
);
3125 } else if (bt1
== VT_FUNC
) {
3126 return is_compatible_func(type1
, type2
);
3127 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
3128 return type1
->ref
== type2
->ref
;
3134 /* return true if type1 and type2 are exactly the same (including
3137 static int is_compatible_types(CType
*type1
, CType
*type2
)
3139 return compare_types(type1
,type2
,0);
3142 /* return true if type1 and type2 are the same (ignoring qualifiers).
3144 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3146 return compare_types(type1
,type2
,1);
3149 /* print a type. If 'varstr' is not NULL, then the variable is also
3150 printed in the type */
3152 /* XXX: add array and function pointers */
3153 static void type_to_str(char *buf
, int buf_size
,
3154 CType
*type
, const char *varstr
)
3166 pstrcat(buf
, buf_size
, "extern ");
3168 pstrcat(buf
, buf_size
, "static ");
3170 pstrcat(buf
, buf_size
, "typedef ");
3172 pstrcat(buf
, buf_size
, "inline ");
3173 if (t
& VT_VOLATILE
)
3174 pstrcat(buf
, buf_size
, "volatile ");
3175 if (t
& VT_CONSTANT
)
3176 pstrcat(buf
, buf_size
, "const ");
3178 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3179 || ((t
& VT_UNSIGNED
)
3180 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3183 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3185 buf_size
-= strlen(buf
);
3220 tstr
= "long double";
3222 pstrcat(buf
, buf_size
, tstr
);
3229 pstrcat(buf
, buf_size
, tstr
);
3230 v
= type
->ref
->v
& ~SYM_STRUCT
;
3231 if (v
>= SYM_FIRST_ANOM
)
3232 pstrcat(buf
, buf_size
, "<anonymous>");
3234 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3239 if (varstr
&& '*' == *varstr
) {
3240 pstrcat(buf1
, sizeof(buf1
), "(");
3241 pstrcat(buf1
, sizeof(buf1
), varstr
);
3242 pstrcat(buf1
, sizeof(buf1
), ")");
3244 pstrcat(buf1
, buf_size
, "(");
3246 while (sa
!= NULL
) {
3248 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3249 pstrcat(buf1
, sizeof(buf1
), buf2
);
3252 pstrcat(buf1
, sizeof(buf1
), ", ");
3254 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3255 pstrcat(buf1
, sizeof(buf1
), ", ...");
3256 pstrcat(buf1
, sizeof(buf1
), ")");
3257 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3262 if (varstr
&& '*' == *varstr
)
3263 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3265 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3266 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3269 pstrcpy(buf1
, sizeof(buf1
), "*");
3270 if (t
& VT_CONSTANT
)
3271 pstrcat(buf1
, buf_size
, "const ");
3272 if (t
& VT_VOLATILE
)
3273 pstrcat(buf1
, buf_size
, "volatile ");
3275 pstrcat(buf1
, sizeof(buf1
), varstr
);
3276 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3280 pstrcat(buf
, buf_size
, " ");
3281 pstrcat(buf
, buf_size
, varstr
);
3286 static void cast_error(CType
*st
, CType
*dt
)
3288 char buf1
[256], buf2
[256];
3289 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3290 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3291 tcc_error("cannot convert '%s' to '%s'", buf1
, buf2
);
3294 /* verify type compatibility to store vtop in 'dt' type */
3295 static void verify_assign_cast(CType
*dt
)
3297 CType
*st
, *type1
, *type2
;
3298 int dbt
, sbt
, qualwarn
, lvl
;
3300 st
= &vtop
->type
; /* source type */
3301 dbt
= dt
->t
& VT_BTYPE
;
3302 sbt
= st
->t
& VT_BTYPE
;
3303 if (dt
->t
& VT_CONSTANT
)
3304 tcc_warning("assignment of read-only location");
3308 tcc_error("assignment to void expression");
3311 /* special cases for pointers */
3312 /* '0' can also be a pointer */
3313 if (is_null_pointer(vtop
))
3315 /* accept implicit pointer to integer cast with warning */
3316 if (is_integer_btype(sbt
)) {
3317 tcc_warning("assignment makes pointer from integer without a cast");
3320 type1
= pointed_type(dt
);
3322 type2
= pointed_type(st
);
3323 else if (sbt
== VT_FUNC
)
3324 type2
= st
; /* a function is implicitly a function pointer */
3327 if (is_compatible_types(type1
, type2
))
3329 for (qualwarn
= lvl
= 0;; ++lvl
) {
3330 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3331 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3333 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3334 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3335 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3337 type1
= pointed_type(type1
);
3338 type2
= pointed_type(type2
);
3340 if (!is_compatible_unqualified_types(type1
, type2
)) {
3341 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3342 /* void * can match anything */
3343 } else if (dbt
== sbt
3344 && is_integer_btype(sbt
& VT_BTYPE
)
3345 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3346 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3347 /* Like GCC don't warn by default for merely changes
3348 in pointer target signedness. Do warn for different
3349 base types, though, in particular for unsigned enums
3350 and signed int targets. */
3352 tcc_warning("assignment from incompatible pointer type");
3357 tcc_warning("assignment discards qualifiers from pointer target type");
3363 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3364 tcc_warning("assignment makes integer from pointer without a cast");
3365 } else if (sbt
== VT_STRUCT
) {
3366 goto case_VT_STRUCT
;
3368 /* XXX: more tests */
3372 if (!is_compatible_unqualified_types(dt
, st
)) {
3380 static void gen_assign_cast(CType
*dt
)
3382 verify_assign_cast(dt
);
3386 /* store vtop in lvalue pushed on stack */
3387 ST_FUNC
void vstore(void)
3389 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3391 ft
= vtop
[-1].type
.t
;
3392 sbt
= vtop
->type
.t
& VT_BTYPE
;
3393 dbt
= ft
& VT_BTYPE
;
3395 verify_assign_cast(&vtop
[-1].type
);
3397 if (sbt
== VT_STRUCT
) {
3398 /* if structure, only generate pointer */
3399 /* structure assignment : generate memcpy */
3400 /* XXX: optimize if small size */
3401 size
= type_size(&vtop
->type
, &align
);
3405 vtop
->type
.t
= VT_PTR
;
3408 /* address of memcpy() */
3411 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3412 else if(!(align
& 3))
3413 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3416 /* Use memmove, rather than memcpy, as dest and src may be same: */
3417 vpush_global_sym(&func_old_type
, TOK_memmove
);
3422 vtop
->type
.t
= VT_PTR
;
3427 /* leave source on stack */
3429 } else if (ft
& VT_BITFIELD
) {
3430 /* bitfield store handling */
3432 /* save lvalue as expression result (example: s.b = s.a = n;) */
3433 vdup(), vtop
[-1] = vtop
[-2];
3435 bit_pos
= BIT_POS(ft
);
3436 bit_size
= BIT_SIZE(ft
);
3437 /* remove bit field info to avoid loops */
3438 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3440 if (dbt
== VT_BOOL
) {
3441 gen_cast(&vtop
[-1].type
);
3442 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3444 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3445 if (dbt
!= VT_BOOL
) {
3446 gen_cast(&vtop
[-1].type
);
3447 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3449 if (r
== VT_STRUCT
) {
3450 store_packed_bf(bit_pos
, bit_size
);
3452 unsigned long long mask
= (1ULL << bit_size
) - 1;
3453 if (dbt
!= VT_BOOL
) {
3455 if (dbt
== VT_LLONG
)
3458 vpushi((unsigned)mask
);
3465 /* duplicate destination */
3468 /* load destination, mask and or with source */
3469 if (dbt
== VT_LLONG
)
3470 vpushll(~(mask
<< bit_pos
));
3472 vpushi(~((unsigned)mask
<< bit_pos
));
3477 /* ... and discard */
3480 } else if (dbt
== VT_VOID
) {
3483 /* optimize char/short casts */
3485 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3486 && is_integer_btype(sbt
)
3488 if ((vtop
->r
& VT_MUSTCAST
)
3489 && btype_size(dbt
) > btype_size(sbt
)
3491 force_charshort_cast();
3494 gen_cast(&vtop
[-1].type
);
3497 #ifdef CONFIG_TCC_BCHECK
3498 /* bound check case */
3499 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3505 gv(RC_TYPE(dbt
)); /* generate value */
3508 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3509 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3510 vtop
->type
.t
= ft
& VT_TYPE
;
3513 /* if lvalue was saved on stack, must read it */
3514 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3516 r
= get_reg(RC_INT
);
3517 sv
.type
.t
= VT_PTRDIFF_T
;
3518 sv
.r
= VT_LOCAL
| VT_LVAL
;
3519 sv
.c
.i
= vtop
[-1].c
.i
;
3521 vtop
[-1].r
= r
| VT_LVAL
;
3524 r
= vtop
->r
& VT_VALMASK
;
3525 /* two word case handling :
3526 store second register at word + 4 (or +8 for x86-64) */
3527 if (USING_TWO_WORDS(dbt
)) {
3528 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3529 vtop
[-1].type
.t
= load_type
;
3532 /* convert to int to increment easily */
3533 vtop
->type
.t
= VT_PTRDIFF_T
;
3539 vtop
[-1].type
.t
= load_type
;
3540 /* XXX: it works because r2 is spilled last ! */
3541 store(vtop
->r2
, vtop
- 1);
3547 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3551 /* post defines POST/PRE add. c is the token ++ or -- */
3552 ST_FUNC
void inc(int post
, int c
)
3555 vdup(); /* save lvalue */
3557 gv_dup(); /* duplicate value */
3562 vpushi(c
- TOK_MID
);
3564 vstore(); /* store value */
3566 vpop(); /* if post op, return saved value */
3569 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3571 /* read the string */
3575 while (tok
== TOK_STR
) {
3576 /* XXX: add \0 handling too ? */
3577 cstr_cat(astr
, tokc
.str
.data
, -1);
3580 cstr_ccat(astr
, '\0');
3583 /* If I is >= 1 and a power of two, returns log2(i)+1.
3584 If I is 0 returns 0. */
3585 static int exact_log2p1(int i
)
3590 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3601 /* Parse __attribute__((...)) GNUC extension. */
3602 static void parse_attribute(AttributeDef
*ad
)
3608 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3613 while (tok
!= ')') {
3614 if (tok
< TOK_IDENT
)
3615 expect("attribute name");
3627 tcc_warning("implicit declaration of function '%s'",
3628 get_tok_str(tok
, &tokc
));
3629 s
= external_global_sym(tok
, &func_old_type
);
3631 ad
->cleanup_func
= s
;
3636 case TOK_CONSTRUCTOR1
:
3637 case TOK_CONSTRUCTOR2
:
3638 ad
->a
.constructor
= 1;
3640 case TOK_DESTRUCTOR1
:
3641 case TOK_DESTRUCTOR2
:
3642 ad
->a
.destructor
= 1;
3647 parse_mult_str(&astr
, "section name");
3648 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3655 parse_mult_str(&astr
, "alias(\"target\")");
3656 ad
->alias_target
= /* save string as token, for later */
3657 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3661 case TOK_VISIBILITY1
:
3662 case TOK_VISIBILITY2
:
3664 parse_mult_str(&astr
,
3665 "visibility(\"default|hidden|internal|protected\")");
3666 if (!strcmp (astr
.data
, "default"))
3667 ad
->a
.visibility
= STV_DEFAULT
;
3668 else if (!strcmp (astr
.data
, "hidden"))
3669 ad
->a
.visibility
= STV_HIDDEN
;
3670 else if (!strcmp (astr
.data
, "internal"))
3671 ad
->a
.visibility
= STV_INTERNAL
;
3672 else if (!strcmp (astr
.data
, "protected"))
3673 ad
->a
.visibility
= STV_PROTECTED
;
3675 expect("visibility(\"default|hidden|internal|protected\")");
3684 if (n
<= 0 || (n
& (n
- 1)) != 0)
3685 tcc_error("alignment must be a positive power of two");
3690 ad
->a
.aligned
= exact_log2p1(n
);
3691 if (n
!= 1 << (ad
->a
.aligned
- 1))
3692 tcc_error("alignment of %d is larger than implemented", n
);
3704 /* currently, no need to handle it because tcc does not
3705 track unused objects */
3709 ad
->f
.func_noreturn
= 1;
3714 ad
->f
.func_call
= FUNC_CDECL
;
3719 ad
->f
.func_call
= FUNC_STDCALL
;
3721 #ifdef TCC_TARGET_I386
3731 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3737 ad
->f
.func_call
= FUNC_FASTCALLW
;
3744 ad
->attr_mode
= VT_LLONG
+ 1;
3747 ad
->attr_mode
= VT_BYTE
+ 1;
3750 ad
->attr_mode
= VT_SHORT
+ 1;
3754 ad
->attr_mode
= VT_INT
+ 1;
3757 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3764 ad
->a
.dllexport
= 1;
3766 case TOK_NODECORATE
:
3767 ad
->a
.nodecorate
= 1;
3770 ad
->a
.dllimport
= 1;
3773 if (tcc_state
->warn_unsupported
)
3774 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3775 /* skip parameters */
3777 int parenthesis
= 0;
3781 else if (tok
== ')')
3784 } while (parenthesis
&& tok
!= -1);
3797 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3801 while ((s
= s
->next
) != NULL
) {
3802 if ((s
->v
& SYM_FIELD
) &&
3803 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3804 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3805 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3817 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3819 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3820 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3821 int pcc
= !tcc_state
->ms_bitfields
;
3822 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3829 prevbt
= VT_STRUCT
; /* make it never match */
3834 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3835 if (f
->type
.t
& VT_BITFIELD
)
3836 bit_size
= BIT_SIZE(f
->type
.t
);
3839 size
= type_size(&f
->type
, &align
);
3840 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3843 if (pcc
&& bit_size
== 0) {
3844 /* in pcc mode, packing does not affect zero-width bitfields */
3847 /* in pcc mode, attribute packed overrides if set. */
3848 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3851 /* pragma pack overrides align if lesser and packs bitfields always */
3854 if (pragma_pack
< align
)
3855 align
= pragma_pack
;
3856 /* in pcc mode pragma pack also overrides individual align */
3857 if (pcc
&& pragma_pack
< a
)
3861 /* some individual align was specified */
3865 if (type
->ref
->type
.t
== VT_UNION
) {
3866 if (pcc
&& bit_size
>= 0)
3867 size
= (bit_size
+ 7) >> 3;
3872 } else if (bit_size
< 0) {
3874 c
+= (bit_pos
+ 7) >> 3;
3875 c
= (c
+ align
- 1) & -align
;
3884 /* A bit-field. Layout is more complicated. There are two
3885 options: PCC (GCC) compatible and MS compatible */
3887 /* In PCC layout a bit-field is placed adjacent to the
3888 preceding bit-fields, except if:
3890 - an individual alignment was given
3891 - it would overflow its base type container and
3892 there is no packing */
3893 if (bit_size
== 0) {
3895 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3897 } else if (f
->a
.aligned
) {
3899 } else if (!packed
) {
3901 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3902 if (ofs
> size
/ align
)
3906 /* in pcc mode, long long bitfields have type int if they fit */
3907 if (size
== 8 && bit_size
<= 32)
3908 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3910 while (bit_pos
>= align
* 8)
3911 c
+= align
, bit_pos
-= align
* 8;
3914 /* In PCC layout named bit-fields influence the alignment
3915 of the containing struct using the base types alignment,
3916 except for packed fields (which here have correct align). */
3917 if (f
->v
& SYM_FIRST_ANOM
3918 // && bit_size // ??? gcc on ARM/rpi does that
3923 bt
= f
->type
.t
& VT_BTYPE
;
3924 if ((bit_pos
+ bit_size
> size
* 8)
3925 || (bit_size
> 0) == (bt
!= prevbt
)
3927 c
= (c
+ align
- 1) & -align
;
3930 /* In MS bitfield mode a bit-field run always uses
3931 at least as many bits as the underlying type.
3932 To start a new run it's also required that this
3933 or the last bit-field had non-zero width. */
3934 if (bit_size
|| prev_bit_size
)
3937 /* In MS layout the records alignment is normally
3938 influenced by the field, except for a zero-width
3939 field at the start of a run (but by further zero-width
3940 fields it is again). */
3941 if (bit_size
== 0 && prevbt
!= bt
)
3944 prev_bit_size
= bit_size
;
3947 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3948 | (bit_pos
<< VT_STRUCT_SHIFT
);
3949 bit_pos
+= bit_size
;
3951 if (align
> maxalign
)
3955 printf("set field %s offset %-2d size %-2d align %-2d",
3956 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3957 if (f
->type
.t
& VT_BITFIELD
) {
3958 printf(" pos %-2d bits %-2d",
3971 c
+= (bit_pos
+ 7) >> 3;
3973 /* store size and alignment */
3974 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3978 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3979 /* can happen if individual align for some member was given. In
3980 this case MSVC ignores maxalign when aligning the size */
3985 c
= (c
+ a
- 1) & -a
;
3989 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3992 /* check whether we can access bitfields by their type */
3993 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3997 if (0 == (f
->type
.t
& VT_BITFIELD
))
4001 bit_size
= BIT_SIZE(f
->type
.t
);
4004 bit_pos
= BIT_POS(f
->type
.t
);
4005 size
= type_size(&f
->type
, &align
);
4006 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4009 /* try to access the field using a different type */
4010 c0
= -1, s
= align
= 1;
4012 px
= f
->c
* 8 + bit_pos
;
4013 cx
= (px
>> 3) & -align
;
4014 px
= px
- (cx
<< 3);
4017 s
= (px
+ bit_size
+ 7) >> 3;
4027 s
= type_size(&t
, &align
);
4031 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4032 /* update offset and bit position */
4035 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4036 | (bit_pos
<< VT_STRUCT_SHIFT
);
4040 printf("FIX field %s offset %-2d size %-2d align %-2d "
4041 "pos %-2d bits %-2d\n",
4042 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4043 cx
, s
, align
, px
, bit_size
);
4046 /* fall back to load/store single-byte wise */
4047 f
->auxtype
= VT_STRUCT
;
4049 printf("FIX field %s : load byte-wise\n",
4050 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4056 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4057 static void struct_decl(CType
*type
, int u
)
4059 int v
, c
, size
, align
, flexible
;
4060 int bit_size
, bsize
, bt
;
4062 AttributeDef ad
, ad1
;
4065 memset(&ad
, 0, sizeof ad
);
4067 parse_attribute(&ad
);
4071 /* struct already defined ? return it */
4073 expect("struct/union/enum name");
4075 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4078 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4080 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4085 /* Record the original enum/struct/union token. */
4086 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4088 /* we put an undefined size for struct/union */
4089 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4090 s
->r
= 0; /* default alignment is zero as gcc */
4092 type
->t
= s
->type
.t
;
4098 tcc_error("struct/union/enum already defined");
4100 /* cannot be empty */
4101 /* non empty enums are not allowed */
4104 long long ll
= 0, pl
= 0, nl
= 0;
4107 /* enum symbols have static storage */
4108 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4112 expect("identifier");
4114 if (ss
&& !local_stack
)
4115 tcc_error("redefinition of enumerator '%s'",
4116 get_tok_str(v
, NULL
));
4120 ll
= expr_const64();
4122 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4124 *ps
= ss
, ps
= &ss
->next
;
4133 /* NOTE: we accept a trailing comma */
4138 /* set integral type of the enum */
4141 if (pl
!= (unsigned)pl
)
4142 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4144 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4145 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4146 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4148 /* set type for enum members */
4149 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4151 if (ll
== (int)ll
) /* default is int if it fits */
4153 if (t
.t
& VT_UNSIGNED
) {
4154 ss
->type
.t
|= VT_UNSIGNED
;
4155 if (ll
== (unsigned)ll
)
4158 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4159 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4164 while (tok
!= '}') {
4165 if (!parse_btype(&btype
, &ad1
)) {
4171 tcc_error("flexible array member '%s' not at the end of struct",
4172 get_tok_str(v
, NULL
));
4178 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4180 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4181 expect("identifier");
4183 int v
= btype
.ref
->v
;
4184 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4185 if (tcc_state
->ms_extensions
== 0)
4186 expect("identifier");
4190 if (type_size(&type1
, &align
) < 0) {
4191 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4194 tcc_error("field '%s' has incomplete type",
4195 get_tok_str(v
, NULL
));
4197 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4198 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4199 (type1
.t
& VT_STORAGE
))
4200 tcc_error("invalid type for '%s'",
4201 get_tok_str(v
, NULL
));
4205 bit_size
= expr_const();
4206 /* XXX: handle v = 0 case for messages */
4208 tcc_error("negative width in bit-field '%s'",
4209 get_tok_str(v
, NULL
));
4210 if (v
&& bit_size
== 0)
4211 tcc_error("zero width for bit-field '%s'",
4212 get_tok_str(v
, NULL
));
4213 parse_attribute(&ad1
);
4215 size
= type_size(&type1
, &align
);
4216 if (bit_size
>= 0) {
4217 bt
= type1
.t
& VT_BTYPE
;
4223 tcc_error("bitfields must have scalar type");
4225 if (bit_size
> bsize
) {
4226 tcc_error("width of '%s' exceeds its type",
4227 get_tok_str(v
, NULL
));
4228 } else if (bit_size
== bsize
4229 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4230 /* no need for bit fields */
4232 } else if (bit_size
== 64) {
4233 tcc_error("field width 64 not implemented");
4235 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4237 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4240 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4241 /* Remember we've seen a real field to check
4242 for placement of flexible array member. */
4245 /* If member is a struct or bit-field, enforce
4246 placing into the struct (as anonymous). */
4248 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4253 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4258 if (tok
== ';' || tok
== TOK_EOF
)
4265 parse_attribute(&ad
);
4266 struct_layout(type
, &ad
);
4271 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4273 merge_symattr(&ad
->a
, &s
->a
);
4274 merge_funcattr(&ad
->f
, &s
->f
);
4277 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4278 are added to the element type, copied because it could be a typedef. */
4279 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4281 while (type
->t
& VT_ARRAY
) {
4282 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4283 type
= &type
->ref
->type
;
4285 type
->t
|= qualifiers
;
4288 /* return 0 if no type declaration. otherwise, return the basic type
4291 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4293 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4297 memset(ad
, 0, sizeof(AttributeDef
));
4307 /* currently, we really ignore extension */
4317 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4318 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4319 tmbt
: tcc_error("too many basic types");
4322 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4327 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4344 memset(&ad1
, 0, sizeof(AttributeDef
));
4345 if (parse_btype(&type1
, &ad1
)) {
4346 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4348 n
= 1 << (ad1
.a
.aligned
- 1);
4350 type_size(&type1
, &n
);
4353 if (n
<= 0 || (n
& (n
- 1)) != 0)
4354 tcc_error("alignment must be a positive power of two");
4357 ad
->a
.aligned
= exact_log2p1(n
);
4361 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4362 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4363 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4364 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4371 #ifdef TCC_TARGET_ARM64
4373 /* GCC's __uint128_t appears in some Linux header files. Make it a
4374 synonym for long double to get the size and alignment right. */
4385 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4386 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4394 struct_decl(&type1
, VT_ENUM
);
4397 type
->ref
= type1
.ref
;
4400 struct_decl(&type1
, VT_STRUCT
);
4403 struct_decl(&type1
, VT_UNION
);
4406 /* type modifiers */
4411 parse_btype_qualify(type
, VT_CONSTANT
);
4419 parse_btype_qualify(type
, VT_VOLATILE
);
4426 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4427 tcc_error("signed and unsigned modifier");
4440 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4441 tcc_error("signed and unsigned modifier");
4442 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4458 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4459 tcc_error("multiple storage classes");
4471 ad
->f
.func_noreturn
= 1;
4473 /* GNUC attribute */
4474 case TOK_ATTRIBUTE1
:
4475 case TOK_ATTRIBUTE2
:
4476 parse_attribute(ad
);
4477 if (ad
->attr_mode
) {
4478 u
= ad
->attr_mode
-1;
4479 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4487 parse_expr_type(&type1
);
4488 /* remove all storage modifiers except typedef */
4489 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4491 sym_to_attr(ad
, type1
.ref
);
4497 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4501 if (tok
== ':' && !in_generic
) {
4502 /* ignore if it's a label */
4507 t
&= ~(VT_BTYPE
|VT_LONG
);
4508 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4509 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4510 type
->ref
= s
->type
.ref
;
4512 parse_btype_qualify(type
, t
);
4514 /* get attributes from typedef */
4523 if (tcc_state
->char_is_unsigned
) {
4524 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4527 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4528 bt
= t
& (VT_BTYPE
|VT_LONG
);
4530 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4531 #ifdef TCC_TARGET_PE
4532 if (bt
== VT_LDOUBLE
)
4533 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4539 /* convert a function parameter type (array to pointer and function to
4540 function pointer) */
4541 static inline void convert_parameter_type(CType
*pt
)
4543 /* remove const and volatile qualifiers (XXX: const could be used
4544 to indicate a const function parameter */
4545 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4546 /* array must be transformed to pointer according to ANSI C */
4548 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4553 ST_FUNC
void parse_asm_str(CString
*astr
)
4556 parse_mult_str(astr
, "string constant");
4559 /* Parse an asm label and return the token */
4560 static int asm_label_instr(void)
4566 parse_asm_str(&astr
);
4569 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4571 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4576 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4578 int n
, l
, t1
, arg_size
, align
, unused_align
;
4579 Sym
**plast
, *s
, *first
;
4584 /* function type, or recursive declarator (return if so) */
4586 if (td
&& !(td
& TYPE_ABSTRACT
))
4590 else if (parse_btype(&pt
, &ad1
))
4593 merge_attr (ad
, &ad1
);
4602 /* read param name and compute offset */
4603 if (l
!= FUNC_OLD
) {
4604 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4606 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4607 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4608 tcc_error("parameter declared as void");
4612 expect("identifier");
4613 pt
.t
= VT_VOID
; /* invalid type */
4617 convert_parameter_type(&pt
);
4618 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4619 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4625 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4630 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4631 tcc_error("invalid type");
4634 /* if no parameters, then old type prototype */
4637 /* NOTE: const is ignored in returned type as it has a special
4638 meaning in gcc / C++ */
4639 type
->t
&= ~VT_CONSTANT
;
4640 /* some ancient pre-K&R C allows a function to return an array
4641 and the array brackets to be put after the arguments, such
4642 that "int c()[]" means something like "int[] c()" */
4645 skip(']'); /* only handle simple "[]" */
4648 /* we push a anonymous symbol which will contain the function prototype */
4649 ad
->f
.func_args
= arg_size
;
4650 ad
->f
.func_type
= l
;
4651 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4657 } else if (tok
== '[') {
4658 int saved_nocode_wanted
= nocode_wanted
;
4659 /* array definition */
4662 /* XXX The optional type-quals and static should only be accepted
4663 in parameter decls. The '*' as well, and then even only
4664 in prototypes (not function defs). */
4666 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4681 if (!local_stack
|| (storage
& VT_STATIC
))
4682 vpushi(expr_const());
4684 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4685 length must always be evaluated, even under nocode_wanted,
4686 so that its size slot is initialized (e.g. under sizeof
4691 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4694 tcc_error("invalid array size");
4696 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4697 tcc_error("size of variable length array should be an integer");
4703 /* parse next post type */
4704 post_type(type
, ad
, storage
, 0);
4706 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4707 tcc_error("declaration of an array of functions");
4708 if ((type
->t
& VT_BTYPE
) == VT_VOID
4709 || type_size(type
, &unused_align
) < 0)
4710 tcc_error("declaration of an array of incomplete type elements");
4712 t1
|= type
->t
& VT_VLA
;
4716 tcc_error("need explicit inner array size in VLAs");
4717 loc
-= type_size(&int_type
, &align
);
4721 vla_runtime_type_size(type
, &align
);
4723 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4729 nocode_wanted
= saved_nocode_wanted
;
4731 /* we push an anonymous symbol which will contain the array
4733 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4734 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4740 /* Parse a type declarator (except basic type), and return the type
4741 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4742 expected. 'type' should contain the basic type. 'ad' is the
4743 attribute definition of the basic type. It can be modified by
4744 type_decl(). If this (possibly abstract) declarator is a pointer chain
4745 it returns the innermost pointed to type (equals *type, but is a different
4746 pointer), otherwise returns type itself, that's used for recursive calls. */
4747 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4750 int qualifiers
, storage
;
4752 /* recursive type, remove storage bits first, apply them later again */
4753 storage
= type
->t
& VT_STORAGE
;
4754 type
->t
&= ~VT_STORAGE
;
4757 while (tok
== '*') {
4765 qualifiers
|= VT_CONSTANT
;
4770 qualifiers
|= VT_VOLATILE
;
4776 /* XXX: clarify attribute handling */
4777 case TOK_ATTRIBUTE1
:
4778 case TOK_ATTRIBUTE2
:
4779 parse_attribute(ad
);
4783 type
->t
|= qualifiers
;
4785 /* innermost pointed to type is the one for the first derivation */
4786 ret
= pointed_type(type
);
4790 /* This is possibly a parameter type list for abstract declarators
4791 ('int ()'), use post_type for testing this. */
4792 if (!post_type(type
, ad
, 0, td
)) {
4793 /* It's not, so it's a nested declarator, and the post operations
4794 apply to the innermost pointed to type (if any). */
4795 /* XXX: this is not correct to modify 'ad' at this point, but
4796 the syntax is not clear */
4797 parse_attribute(ad
);
4798 post
= type_decl(type
, ad
, v
, td
);
4802 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4803 /* type identifier */
4808 if (!(td
& TYPE_ABSTRACT
))
4809 expect("identifier");
4812 post_type(post
, ad
, storage
, 0);
4813 parse_attribute(ad
);
4818 /* indirection with full error checking and bound check */
4819 ST_FUNC
void indir(void)
4821 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4822 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4826 if (vtop
->r
& VT_LVAL
)
4828 vtop
->type
= *pointed_type(&vtop
->type
);
4829 /* Arrays and functions are never lvalues */
4830 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
4831 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4833 /* if bound checking, the referenced pointer must be checked */
4834 #ifdef CONFIG_TCC_BCHECK
4835 if (tcc_state
->do_bounds_check
)
4836 vtop
->r
|= VT_MUSTBOUND
;
4841 /* pass a parameter to a function and do type checking and casting */
4842 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4847 func_type
= func
->f
.func_type
;
4848 if (func_type
== FUNC_OLD
||
4849 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4850 /* default casting : only need to convert float to double */
4851 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4852 gen_cast_s(VT_DOUBLE
);
4853 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4854 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4855 type
.ref
= vtop
->type
.ref
;
4857 } else if (vtop
->r
& VT_MUSTCAST
) {
4858 force_charshort_cast();
4860 } else if (arg
== NULL
) {
4861 tcc_error("too many arguments to function");
4864 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4865 gen_assign_cast(&type
);
4869 /* parse an expression and return its type without any side effect. */
4870 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4879 /* parse an expression of the form '(type)' or '(expr)' and return its
4881 static void parse_expr_type(CType
*type
)
4887 if (parse_btype(type
, &ad
)) {
4888 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4890 expr_type(type
, gexpr
);
4895 static void parse_type(CType
*type
)
4900 if (!parse_btype(type
, &ad
)) {
4903 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4906 static void parse_builtin_params(int nc
, const char *args
)
4913 while ((c
= *args
++)) {
4917 case 'e': expr_eq(); continue;
4918 case 't': parse_type(&t
); vpush(&t
); continue;
4919 default: tcc_error("internal error"); break;
4927 ST_FUNC
void unary(void)
4929 int n
, t
, align
, size
, r
, sizeof_caller
;
4934 sizeof_caller
= in_sizeof
;
4937 /* XXX: GCC 2.95.3 does not generate a table although it should be
4945 #ifdef TCC_TARGET_PE
4946 t
= VT_SHORT
|VT_UNSIGNED
;
4954 vsetc(&type
, VT_CONST
, &tokc
);
4958 t
= VT_INT
| VT_UNSIGNED
;
4964 t
= VT_LLONG
| VT_UNSIGNED
;
4976 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4979 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4981 case TOK___FUNCTION__
:
4983 goto tok_identifier
;
4989 /* special function name identifier */
4990 len
= strlen(funcname
) + 1;
4991 /* generate char[len] type */
4996 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4997 if (!NODATA_WANTED
) {
4998 ptr
= section_ptr_add(data_section
, len
);
4999 memcpy(ptr
, funcname
, len
);
5005 #ifdef TCC_TARGET_PE
5006 t
= VT_SHORT
| VT_UNSIGNED
;
5012 /* string parsing */
5014 if (tcc_state
->char_is_unsigned
)
5015 t
= VT_BYTE
| VT_UNSIGNED
;
5017 if (tcc_state
->warn_write_strings
)
5022 memset(&ad
, 0, sizeof(AttributeDef
));
5023 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5028 if (parse_btype(&type
, &ad
)) {
5029 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5031 /* check ISOC99 compound literal */
5033 /* data is allocated locally by default */
5038 /* all except arrays are lvalues */
5039 if (!(type
.t
& VT_ARRAY
))
5041 memset(&ad
, 0, sizeof(AttributeDef
));
5042 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5044 if (sizeof_caller
) {
5051 } else if (tok
== '{') {
5052 int saved_nocode_wanted
= nocode_wanted
;
5054 tcc_error("expected constant");
5055 /* save all registers */
5057 /* statement expression : we do not accept break/continue
5058 inside as GCC does. We do retain the nocode_wanted state,
5059 as statement expressions can't ever be entered from the
5060 outside, so any reactivation of code emission (from labels
5061 or loop heads) can be disabled again after the end of it. */
5063 nocode_wanted
= saved_nocode_wanted
;
5078 /* functions names must be treated as function pointers,
5079 except for unary '&' and sizeof. Since we consider that
5080 functions are not lvalues, we only have to handle it
5081 there and in function calls. */
5082 /* arrays can also be used although they are not lvalues */
5083 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5084 !(vtop
->type
.t
& VT_ARRAY
))
5086 mk_pointer(&vtop
->type
);
5092 gen_test_zero(TOK_EQ
);
5103 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5104 tcc_error("pointer not accepted for unary plus");
5105 /* In order to force cast, we add zero, except for floating point
5106 where we really need an noop (otherwise -0.0 will be transformed
5108 if (!is_float(vtop
->type
.t
)) {
5120 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5122 if (vtop
[1].r
& VT_SYM
)
5123 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5124 size
= type_size(&type
, &align
);
5125 if (s
&& s
->a
.aligned
)
5126 align
= 1 << (s
->a
.aligned
- 1);
5127 if (t
== TOK_SIZEOF
) {
5128 if (!(type
.t
& VT_VLA
)) {
5130 tcc_error("sizeof applied to an incomplete type");
5133 vla_runtime_type_size(&type
, &align
);
5138 vtop
->type
.t
|= VT_UNSIGNED
;
5141 case TOK_builtin_expect
:
5142 /* __builtin_expect is a no-op for now */
5143 parse_builtin_params(0, "ee");
5146 case TOK_builtin_types_compatible_p
:
5147 parse_builtin_params(0, "tt");
5148 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5149 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5150 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5154 case TOK_builtin_choose_expr
:
5181 case TOK_builtin_constant_p
:
5182 parse_builtin_params(1, "e");
5183 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5187 case TOK_builtin_frame_address
:
5188 case TOK_builtin_return_address
:
5194 if (tok
!= TOK_CINT
) {
5195 tcc_error("%s only takes positive integers",
5196 tok1
== TOK_builtin_return_address
?
5197 "__builtin_return_address" :
5198 "__builtin_frame_address");
5200 level
= (uint32_t)tokc
.i
;
5205 vset(&type
, VT_LOCAL
, 0); /* local frame */
5207 mk_pointer(&vtop
->type
);
5208 indir(); /* -> parent frame */
5210 if (tok1
== TOK_builtin_return_address
) {
5211 // assume return address is just above frame pointer on stack
5214 mk_pointer(&vtop
->type
);
5219 #ifdef TCC_TARGET_RISCV64
5220 case TOK_builtin_va_start
:
5221 parse_builtin_params(0, "ee");
5222 r
= vtop
->r
& VT_VALMASK
;
5226 tcc_error("__builtin_va_start expects a local variable");
5231 #ifdef TCC_TARGET_X86_64
5232 #ifdef TCC_TARGET_PE
5233 case TOK_builtin_va_start
:
5234 parse_builtin_params(0, "ee");
5235 r
= vtop
->r
& VT_VALMASK
;
5239 tcc_error("__builtin_va_start expects a local variable");
5241 vtop
->type
= char_pointer_type
;
5246 case TOK_builtin_va_arg_types
:
5247 parse_builtin_params(0, "t");
5248 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5255 #ifdef TCC_TARGET_ARM64
5256 case TOK___va_start
: {
5257 parse_builtin_params(0, "ee");
5261 vtop
->type
.t
= VT_VOID
;
5264 case TOK___va_arg
: {
5265 parse_builtin_params(0, "et");
5273 case TOK___arm64_clear_cache
: {
5274 parse_builtin_params(0, "ee");
5277 vtop
->type
.t
= VT_VOID
;
5281 /* pre operations */
5292 t
= vtop
->type
.t
& VT_BTYPE
;
5294 /* In IEEE negate(x) isn't subtract(0,x), but rather
5298 vtop
->c
.f
= -1.0 * 0.0;
5299 else if (t
== VT_DOUBLE
)
5300 vtop
->c
.d
= -1.0 * 0.0;
5302 vtop
->c
.ld
= -1.0 * 0.0;
5310 goto tok_identifier
;
5312 /* allow to take the address of a label */
5313 if (tok
< TOK_UIDENT
)
5314 expect("label identifier");
5315 s
= label_find(tok
);
5317 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5319 if (s
->r
== LABEL_DECLARED
)
5320 s
->r
= LABEL_FORWARD
;
5323 s
->type
.t
= VT_VOID
;
5324 mk_pointer(&s
->type
);
5325 s
->type
.t
|= VT_STATIC
;
5327 vpushsym(&s
->type
, s
);
5333 CType controlling_type
;
5334 int has_default
= 0;
5337 TokenString
*str
= NULL
;
5338 int saved_const_wanted
= const_wanted
;
5343 expr_type(&controlling_type
, expr_eq
);
5344 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5345 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5346 mk_pointer(&controlling_type
);
5347 const_wanted
= saved_const_wanted
;
5351 if (tok
== TOK_DEFAULT
) {
5353 tcc_error("too many 'default'");
5359 AttributeDef ad_tmp
;
5364 parse_btype(&cur_type
, &ad_tmp
);
5367 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5368 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5370 tcc_error("type match twice");
5380 skip_or_save_block(&str
);
5382 skip_or_save_block(NULL
);
5389 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5390 tcc_error("type '%s' does not match any association", buf
);
5392 begin_macro(str
, 1);
5401 // special qnan , snan and infinity values
5406 vtop
->type
.t
= VT_FLOAT
;
5411 goto special_math_val
;
5414 goto special_math_val
;
5421 expect("identifier");
5423 if (!s
|| IS_ASM_SYM(s
)) {
5424 const char *name
= get_tok_str(t
, NULL
);
5426 tcc_error("'%s' undeclared", name
);
5427 /* for simple function calls, we tolerate undeclared
5428 external reference to int() function */
5429 if (tcc_state
->warn_implicit_function_declaration
5430 #ifdef TCC_TARGET_PE
5431 /* people must be warned about using undeclared WINAPI functions
5432 (which usually start with uppercase letter) */
5433 || (name
[0] >= 'A' && name
[0] <= 'Z')
5436 tcc_warning("implicit declaration of function '%s'", name
);
5437 s
= external_global_sym(t
, &func_old_type
);
5441 /* A symbol that has a register is a local register variable,
5442 which starts out as VT_LOCAL value. */
5443 if ((r
& VT_VALMASK
) < VT_CONST
)
5444 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5446 vset(&s
->type
, r
, s
->c
);
5447 /* Point to s as backpointer (even without r&VT_SYM).
5448 Will be used by at least the x86 inline asm parser for
5454 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5455 vtop
->c
.i
= s
->enum_val
;
5460 /* post operations */
5462 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5465 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5466 int qualifiers
, cumofs
= 0;
5468 if (tok
== TOK_ARROW
)
5470 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5473 /* expect pointer on structure */
5474 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5475 expect("struct or union");
5476 if (tok
== TOK_CDOUBLE
)
5477 expect("field name");
5479 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5480 expect("field name");
5481 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5483 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5484 /* add field offset to pointer */
5485 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5486 vpushi(cumofs
+ s
->c
);
5488 /* change type to field type, and set to lvalue */
5489 vtop
->type
= s
->type
;
5490 vtop
->type
.t
|= qualifiers
;
5491 /* an array is never an lvalue */
5492 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5494 #ifdef CONFIG_TCC_BCHECK
5495 /* if bound checking, the referenced pointer must be checked */
5496 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5497 vtop
->r
|= VT_MUSTBOUND
;
5501 } else if (tok
== '[') {
5507 } else if (tok
== '(') {
5510 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5512 #ifdef CONFIG_TCC_BCHECK
5513 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_SYM
) && vtop
->sym
->v
== TOK_alloca
) {
5516 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
5517 bounds_ptr
[0] = 1; /* marks alloca/vla used */
5522 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5523 /* pointer test (no array accepted) */
5524 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5525 vtop
->type
= *pointed_type(&vtop
->type
);
5526 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5530 expect("function pointer");
5533 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5535 /* get return type */
5538 sa
= s
->next
; /* first parameter */
5539 nb_args
= regsize
= 0;
5541 /* compute first implicit argument if a structure is returned */
5542 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5543 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5544 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5545 &ret_align
, ®size
);
5546 if (ret_nregs
<= 0) {
5547 /* get some space for the returned structure */
5548 size
= type_size(&s
->type
, &align
);
5549 #ifdef TCC_TARGET_ARM64
5550 /* On arm64, a small struct is return in registers.
5551 It is much easier to write it to memory if we know
5552 that we are allowed to write some extra bytes, so
5553 round the allocated space up to a power of 2: */
5555 while (size
& (size
- 1))
5556 size
= (size
| (size
- 1)) + 1;
5558 loc
= (loc
- size
) & -align
;
5560 ret
.r
= VT_LOCAL
| VT_LVAL
;
5561 /* pass it as 'int' to avoid structure arg passing
5563 vseti(VT_LOCAL
, loc
);
5575 if (ret_nregs
> 0) {
5576 /* return in register */
5578 PUT_R_RET(&ret
, ret
.type
.t
);
5583 gfunc_param_typed(s
, sa
);
5593 tcc_error("too few arguments to function");
5595 gfunc_call(nb_args
);
5597 if (ret_nregs
< 0) {
5598 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
5599 #ifdef TCC_TARGET_RISCV64
5600 arch_transfer_ret_regs(1);
5604 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5605 vsetc(&ret
.type
, r
, &ret
.c
);
5606 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5609 /* handle packed struct return */
5610 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5613 size
= type_size(&s
->type
, &align
);
5614 /* We're writing whole regs often, make sure there's enough
5615 space. Assume register size is power of 2. */
5616 if (regsize
> align
)
5618 loc
= (loc
- size
) & -align
;
5622 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5626 if (--ret_nregs
== 0)
5630 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5633 /* Promote char/short return values. This is matters only
5634 for calling function that were not compiled by TCC */
5635 t
= s
->type
.t
& VT_BTYPE
;
5636 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
)
5637 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5639 if (s
->f
.func_noreturn
)
5647 ST_FUNC
void expr_prod(void)
5652 while (tok
== '*' || tok
== '/' || tok
== '%') {
5660 ST_FUNC
void expr_sum(void)
5665 while (tok
== '+' || tok
== '-') {
5673 static void expr_shift(void)
5678 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5686 static void expr_cmp(void)
5691 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5692 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5700 static void expr_cmpeq(void)
5705 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5713 static void expr_and(void)
5716 while (tok
== '&') {
5723 static void expr_xor(void)
5726 while (tok
== '^') {
5733 static void expr_or(void)
5736 while (tok
== '|') {
5743 static int condition_3way(void);
5745 static void expr_landor(void(*e_fn
)(void), int e_op
, int i
)
5747 int t
= 0, cc
= 1, f
= 0, c
;
5749 c
= f
? i
: condition_3way();
5751 save_regs(1), cc
= 0;
5752 } else if (c
!= i
) {
5753 nocode_wanted
++, f
= 1;
5775 static void expr_land(void)
5778 if (tok
== TOK_LAND
)
5779 expr_landor(expr_or
, TOK_LAND
, 1);
5782 static void expr_lor(void)
5786 expr_landor(expr_land
, TOK_LOR
, 0);
5789 /* Assuming vtop is a value used in a conditional context
5790 (i.e. compared with zero) return 0 if it's false, 1 if
5791 true and -1 if it can't be statically determined. */
5792 static int condition_3way(void)
5795 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5796 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5798 gen_cast_s(VT_BOOL
);
5805 static int is_cond_bool(SValue
*sv
)
5807 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
5808 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
5809 return (unsigned)sv
->c
.i
< 2;
5810 if (sv
->r
== VT_CMP
)
5815 static void expr_cond(void)
5817 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5819 CType type
, type1
, type2
;
5825 c
= condition_3way();
5826 g
= (tok
== ':' && gnu_ext
);
5836 /* needed to avoid having different registers saved in
5843 ncw_prev
= nocode_wanted
;
5850 if (c
< 0 && vtop
->r
== VT_CMP
) {
5856 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5857 mk_pointer(&vtop
->type
);
5859 sv
= *vtop
; /* save value to handle it later */
5860 vtop
--; /* no vpop so that FP stack is not flushed */
5870 nocode_wanted
= ncw_prev
;
5876 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
5877 if (sv
.r
== VT_CMP
) {
5888 nocode_wanted
= ncw_prev
;
5889 // tcc_warning("two conditions expr_cond");
5893 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5894 mk_pointer(&vtop
->type
);
5897 bt1
= t1
& VT_BTYPE
;
5899 bt2
= t2
& VT_BTYPE
;
5902 /* cast operands to correct type according to ISOC rules */
5903 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5904 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5905 } else if (is_float(bt1
) || is_float(bt2
)) {
5906 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5907 type
.t
= VT_LDOUBLE
;
5909 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5914 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5915 /* cast to biggest op */
5916 type
.t
= VT_LLONG
| VT_LONG
;
5917 if (bt1
== VT_LLONG
)
5919 if (bt2
== VT_LLONG
)
5921 /* convert to unsigned if it does not fit in a long long */
5922 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5923 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5924 type
.t
|= VT_UNSIGNED
;
5925 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5926 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5927 /* If one is a null ptr constant the result type
5929 if (is_null_pointer (vtop
)) type
= type1
;
5930 else if (is_null_pointer (&sv
)) type
= type2
;
5931 else if (bt1
!= bt2
)
5932 tcc_error("incompatible types in conditional expressions");
5934 CType
*pt1
= pointed_type(&type1
);
5935 CType
*pt2
= pointed_type(&type2
);
5936 int pbt1
= pt1
->t
& VT_BTYPE
;
5937 int pbt2
= pt2
->t
& VT_BTYPE
;
5938 int newquals
, copied
= 0;
5939 /* pointers to void get preferred, otherwise the
5940 pointed to types minus qualifs should be compatible */
5941 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5942 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5943 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5944 tcc_warning("pointer type mismatch in conditional expression\n");
5946 /* combine qualifs */
5947 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5948 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5951 /* copy the pointer target symbol */
5952 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5955 pointed_type(&type
)->t
|= newquals
;
5957 /* pointers to incomplete arrays get converted to
5958 pointers to completed ones if possible */
5959 if (pt1
->t
& VT_ARRAY
5960 && pt2
->t
& VT_ARRAY
5961 && pointed_type(&type
)->ref
->c
< 0
5962 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5965 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5967 pointed_type(&type
)->ref
=
5968 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5969 0, pointed_type(&type
)->ref
->c
);
5970 pointed_type(&type
)->ref
->c
=
5971 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5974 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5975 /* XXX: test structure compatibility */
5976 type
= bt1
== VT_STRUCT
? type1
: type2
;
5978 /* integer operations */
5979 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5980 /* convert to unsigned if it does not fit in an integer */
5981 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5982 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5983 type
.t
|= VT_UNSIGNED
;
5985 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5986 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5987 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5989 /* now we convert second operand */
5993 mk_pointer(&vtop
->type
);
5995 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5999 rc
= RC_TYPE(type
.t
);
6000 /* for long longs, we use fixed registers to avoid having
6001 to handle a complicated move */
6002 if (USING_TWO_WORDS(type
.t
))
6003 rc
= RC_RET(type
.t
);
6011 nocode_wanted
= ncw_prev
;
6013 /* this is horrible, but we must also convert first
6019 mk_pointer(&vtop
->type
);
6021 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6027 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6038 static void expr_eq(void)
6044 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
6045 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
6046 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
6061 ST_FUNC
void gexpr(void)
6072 /* parse a constant expression and return value in vtop. */
6073 static void expr_const1(void)
6082 /* parse an integer constant and return its value. */
6083 static inline int64_t expr_const64(void)
6087 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6088 expect("constant expression");
6094 /* parse an integer constant and return its value.
6095 Complain if it doesn't fit 32bit (signed or unsigned). */
6096 ST_FUNC
int expr_const(void)
6099 int64_t wc
= expr_const64();
6101 if (c
!= wc
&& (unsigned)c
!= wc
)
6102 tcc_error("constant exceeds 32 bit");
6106 /* ------------------------------------------------------------------------- */
6107 /* return from function */
6109 #ifndef TCC_TARGET_ARM64
6110 static void gfunc_return(CType
*func_type
)
6112 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6113 CType type
, ret_type
;
6114 int ret_align
, ret_nregs
, regsize
;
6115 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6116 &ret_align
, ®size
);
6117 if (ret_nregs
< 0) {
6118 #ifdef TCC_TARGET_RISCV64
6119 arch_transfer_ret_regs(0);
6121 } else if (0 == ret_nregs
) {
6122 /* if returning structure, must copy it to implicit
6123 first pointer arg location */
6126 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6129 /* copy structure value to pointer */
6132 /* returning structure packed into registers */
6133 int size
, addr
, align
, rc
;
6134 size
= type_size(func_type
,&align
);
6135 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6136 (vtop
->c
.i
& (ret_align
-1)))
6137 && (align
& (ret_align
-1))) {
6138 loc
= (loc
- size
) & -ret_align
;
6141 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6145 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6147 vtop
->type
= ret_type
;
6148 rc
= RC_RET(ret_type
.t
);
6156 if (--ret_nregs
== 0)
6158 /* We assume that when a structure is returned in multiple
6159 registers, their classes are consecutive values of the
6162 vtop
->c
.i
+= regsize
;
6167 gv(RC_RET(func_type
->t
));
6169 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6173 static void check_func_return(void)
6175 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6177 if (!strcmp (funcname
, "main")
6178 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6179 /* main returns 0 by default */
6181 gen_assign_cast(&func_vt
);
6182 gfunc_return(&func_vt
);
6184 tcc_warning("function might return no value: '%s'", funcname
);
6188 /* ------------------------------------------------------------------------- */
6191 static int case_cmp(const void *pa
, const void *pb
)
6193 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6194 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6195 return a
< b
? -1 : a
> b
;
6198 static void gtst_addr(int t
, int a
)
6200 gsym_addr(gvtst(0, t
), a
);
6203 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6207 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6224 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6226 gcase(base
, len
/2, bsym
);
6230 base
+= e
; len
-= e
;
6240 if (p
->v1
== p
->v2
) {
6242 gtst_addr(0, p
->sym
);
6252 gtst_addr(0, p
->sym
);
6256 *bsym
= gjmp(*bsym
);
6259 /* ------------------------------------------------------------------------- */
6260 /* __attribute__((cleanup(fn))) */
6262 static void try_call_scope_cleanup(Sym
*stop
)
6264 Sym
*cls
= cur_scope
->cl
.s
;
6266 for (; cls
!= stop
; cls
= cls
->ncl
) {
6267 Sym
*fs
= cls
->next
;
6268 Sym
*vs
= cls
->prev_tok
;
6270 vpushsym(&fs
->type
, fs
);
6271 vset(&vs
->type
, vs
->r
, vs
->c
);
6273 mk_pointer(&vtop
->type
);
6279 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6284 if (!cur_scope
->cl
.s
)
6287 /* search NCA of both cleanup chains given parents and initial depth */
6288 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6289 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6291 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6293 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6296 try_call_scope_cleanup(cc
);
6299 /* call 'func' for each __attribute__((cleanup(func))) */
6300 static void block_cleanup(struct scope
*o
)
6304 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6305 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6310 try_call_scope_cleanup(o
->cl
.s
);
6311 pcl
->jnext
= gjmp(0);
6313 goto remove_pending
;
6323 try_call_scope_cleanup(o
->cl
.s
);
6326 /* ------------------------------------------------------------------------- */
6329 static void vla_restore(int loc
)
6332 gen_vla_sp_restore(loc
);
6335 static void vla_leave(struct scope
*o
)
6337 if (o
->vla
.num
< cur_scope
->vla
.num
)
6338 vla_restore(o
->vla
.loc
);
6341 /* ------------------------------------------------------------------------- */
6344 void new_scope(struct scope
*o
)
6346 /* copy and link previous scope */
6348 o
->prev
= cur_scope
;
6351 /* record local declaration stack position */
6352 o
->lstk
= local_stack
;
6353 o
->llstk
= local_label_stack
;
6358 void prev_scope(struct scope
*o
, int is_expr
)
6362 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6363 block_cleanup(o
->prev
);
6365 /* pop locally defined labels */
6366 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6368 /* In the is_expr case (a statement expression is finished here),
6369 vtop might refer to symbols on the local_stack. Either via the
6370 type or via vtop->sym. We can't pop those nor any that in turn
6371 might be referred to. To make it easier we don't roll back
6372 any symbols in that case; some upper level call to block() will
6373 do that. We do have to remove such symbols from the lookup
6374 tables, though. sym_pop will do that. */
6376 /* pop locally defined symbols */
6377 sym_pop(&local_stack
, o
->lstk
, is_expr
);
6379 cur_scope
= o
->prev
;
6383 /* leave a scope via break/continue(/goto) */
6384 void leave_scope(struct scope
*o
)
6388 try_call_scope_cleanup(o
->cl
.s
);
6392 /* ------------------------------------------------------------------------- */
6393 /* call block from 'for do while' loops */
6395 static void lblock(int *bsym
, int *csym
)
6397 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6398 int *b
= co
->bsym
, *c
= co
->csym
;
6412 static void block(int is_expr
)
6414 int a
, b
, c
, d
, e
, t
;
6418 /* default return value is (void) */
6420 vtop
->type
.t
= VT_VOID
;
6432 if (tok
== TOK_ELSE
) {
6437 gsym(d
); /* patch else jmp */
6442 } else if (t
== TOK_WHILE
) {
6454 } else if (t
== '{') {
6458 /* handle local labels declarations */
6459 while (tok
== TOK_LABEL
) {
6462 if (tok
< TOK_UIDENT
)
6463 expect("label identifier");
6464 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6466 } while (tok
== ',');
6470 while (tok
!= '}') {
6479 prev_scope(&o
, is_expr
);
6481 if (0 == local_scope
&& !nocode_wanted
)
6482 check_func_return();
6485 } else if (t
== TOK_RETURN
) {
6486 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6490 gen_assign_cast(&func_vt
);
6492 if (vtop
->type
.t
!= VT_VOID
)
6493 tcc_warning("void function returns a value");
6497 tcc_warning("'return' with no value");
6500 leave_scope(root_scope
);
6502 gfunc_return(&func_vt
);
6504 /* jump unless last stmt in top-level block */
6505 if (tok
!= '}' || local_scope
!= 1)
6509 } else if (t
== TOK_BREAK
) {
6511 if (!cur_scope
->bsym
)
6512 tcc_error("cannot break");
6513 if (!cur_switch
|| cur_scope
->bsym
!= cur_switch
->bsym
)
6514 leave_scope(loop_scope
);
6516 leave_scope(cur_switch
->scope
);
6517 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6520 } else if (t
== TOK_CONTINUE
) {
6522 if (!cur_scope
->csym
)
6523 tcc_error("cannot continue");
6524 leave_scope(loop_scope
);
6525 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6528 } else if (t
== TOK_FOR
) {
6534 /* c99 for-loop init decl? */
6535 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6536 /* no, regular for-loop init expr */
6564 } else if (t
== TOK_DO
) {
6578 } else if (t
== TOK_SWITCH
) {
6579 struct switch_t
*saved
, sw
;
6586 sw
.scope
= cur_scope
;
6594 switchval
= *vtop
--;
6597 b
= gjmp(0); /* jump to first case */
6599 a
= gjmp(a
); /* add implicit break */
6603 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6604 for (b
= 1; b
< sw
.n
; b
++)
6605 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6606 tcc_error("duplicate case value");
6608 /* Our switch table sorting is signed, so the compared
6609 value needs to be as well when it's 64bit. */
6610 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6611 switchval
.type
.t
&= ~VT_UNSIGNED
;
6614 d
= 0, gcase(sw
.p
, sw
.n
, &d
);
6617 gsym_addr(d
, sw
.def_sym
);
6623 dynarray_reset(&sw
.p
, &sw
.n
);
6626 } else if (t
== TOK_CASE
) {
6627 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6630 cr
->v1
= cr
->v2
= expr_const64();
6631 if (gnu_ext
&& tok
== TOK_DOTS
) {
6633 cr
->v2
= expr_const64();
6634 if (cr
->v2
< cr
->v1
)
6635 tcc_warning("empty case range");
6638 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6641 goto block_after_label
;
6643 } else if (t
== TOK_DEFAULT
) {
6646 if (cur_switch
->def_sym
)
6647 tcc_error("too many 'default'");
6648 cur_switch
->def_sym
= gind();
6651 goto block_after_label
;
6653 } else if (t
== TOK_GOTO
) {
6654 vla_restore(root_scope
->vla
.loc
);
6655 if (tok
== '*' && gnu_ext
) {
6659 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6663 } else if (tok
>= TOK_UIDENT
) {
6664 s
= label_find(tok
);
6665 /* put forward definition if needed */
6667 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6668 else if (s
->r
== LABEL_DECLARED
)
6669 s
->r
= LABEL_FORWARD
;
6671 if (s
->r
& LABEL_FORWARD
) {
6672 /* start new goto chain for cleanups, linked via label->next */
6673 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
6674 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
6675 pending_gotos
->prev_tok
= s
;
6676 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6677 pending_gotos
->next
= s
;
6679 s
->jnext
= gjmp(s
->jnext
);
6681 try_call_cleanup_goto(s
->cleanupstate
);
6682 gjmp_addr(s
->jnext
);
6687 expect("label identifier");
6691 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
6695 if (tok
== ':' && t
>= TOK_UIDENT
) {
6700 if (s
->r
== LABEL_DEFINED
)
6701 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6702 s
->r
= LABEL_DEFINED
;
6704 Sym
*pcl
; /* pending cleanup goto */
6705 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6707 sym_pop(&s
->next
, NULL
, 0);
6711 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
6714 s
->cleanupstate
= cur_scope
->cl
.s
;
6717 vla_restore(cur_scope
->vla
.loc
);
6718 /* we accept this, but it is a mistake */
6720 tcc_warning("deprecated use of label at end of compound statement");
6726 /* expression case */
6742 /* This skips over a stream of tokens containing balanced {} and ()
6743 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6744 with a '{'). If STR then allocates and stores the skipped tokens
6745 in *STR. This doesn't check if () and {} are nested correctly,
6746 i.e. "({)}" is accepted. */
6747 static void skip_or_save_block(TokenString
**str
)
6749 int braces
= tok
== '{';
6752 *str
= tok_str_alloc();
6754 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6756 if (tok
== TOK_EOF
) {
6757 if (str
|| level
> 0)
6758 tcc_error("unexpected end of file");
6763 tok_str_add_tok(*str
);
6766 if (t
== '{' || t
== '(') {
6768 } else if (t
== '}' || t
== ')') {
6770 if (level
== 0 && braces
&& t
== '}')
6775 tok_str_add(*str
, -1);
6776 tok_str_add(*str
, 0);
6780 #define EXPR_CONST 1
6783 static void parse_init_elem(int expr_type
)
6785 int saved_global_expr
;
6788 /* compound literals must be allocated globally in this case */
6789 saved_global_expr
= global_expr
;
6792 global_expr
= saved_global_expr
;
6793 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6794 (compound literals). */
6795 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6796 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6797 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6798 #ifdef TCC_TARGET_PE
6799 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6802 tcc_error("initializer element is not constant");
6810 /* put zeros for variable based init */
6811 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6814 /* nothing to do because globals are already set to zero */
6816 vpush_global_sym(&func_old_type
, TOK_memset
);
6818 #ifdef TCC_TARGET_ARM
6830 #define DIF_SIZE_ONLY 2
6831 #define DIF_HAVE_ELEM 4
6833 /* t is the array or struct type. c is the array or struct
6834 address. cur_field is the pointer to the current
6835 field, for arrays the 'c' member contains the current start
6836 index. 'flags' is as in decl_initializer.
6837 'al' contains the already initialized length of the
6838 current container (starting at c). This returns the new length of that. */
6839 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6840 Sym
**cur_field
, int flags
, int al
)
6843 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6844 unsigned long corig
= c
;
6849 if (flags
& DIF_HAVE_ELEM
)
6852 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
6859 /* NOTE: we only support ranges for last designator */
6860 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6862 if (!(type
->t
& VT_ARRAY
))
6863 expect("array type");
6865 index
= index_last
= expr_const();
6866 if (tok
== TOK_DOTS
&& gnu_ext
) {
6868 index_last
= expr_const();
6872 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6874 tcc_error("invalid index");
6876 (*cur_field
)->c
= index_last
;
6877 type
= pointed_type(type
);
6878 elem_size
= type_size(type
, &align
);
6879 c
+= index
* elem_size
;
6880 nb_elems
= index_last
- index
+ 1;
6887 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6888 expect("struct/union type");
6890 f
= find_field(type
, l
, &cumofs
);
6903 } else if (!gnu_ext
) {
6908 if (type
->t
& VT_ARRAY
) {
6909 index
= (*cur_field
)->c
;
6910 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6911 tcc_error("index too large");
6912 type
= pointed_type(type
);
6913 c
+= index
* type_size(type
, &align
);
6916 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6917 *cur_field
= f
= f
->next
;
6919 tcc_error("too many field init");
6924 /* must put zero in holes (note that doing it that way
6925 ensures that it even works with designators) */
6926 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6927 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6928 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6930 /* XXX: make it more general */
6931 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6932 unsigned long c_end
;
6937 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6938 for (i
= 1; i
< nb_elems
; i
++) {
6939 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6944 } else if (!NODATA_WANTED
) {
6945 c_end
= c
+ nb_elems
* elem_size
;
6946 if (c_end
> sec
->data_allocated
)
6947 section_realloc(sec
, c_end
);
6948 src
= sec
->data
+ c
;
6950 for(i
= 1; i
< nb_elems
; i
++) {
6952 memcpy(dst
, src
, elem_size
);
6956 c
+= nb_elems
* type_size(type
, &align
);
6962 /* store a value or an expression directly in global data or in local array */
6963 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6970 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6974 /* XXX: not portable */
6975 /* XXX: generate error if incorrect relocation */
6976 gen_assign_cast(&dtype
);
6977 bt
= type
->t
& VT_BTYPE
;
6979 if ((vtop
->r
& VT_SYM
)
6982 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6983 || (type
->t
& VT_BITFIELD
))
6984 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6986 tcc_error("initializer element is not computable at load time");
6988 if (NODATA_WANTED
) {
6993 size
= type_size(type
, &align
);
6994 section_reserve(sec
, c
+ size
);
6995 ptr
= sec
->data
+ c
;
6997 /* XXX: make code faster ? */
6998 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6999 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7000 /* XXX This rejects compound literals like
7001 '(void *){ptr}'. The problem is that '&sym' is
7002 represented the same way, which would be ruled out
7003 by the SYM_FIRST_ANOM check above, but also '"string"'
7004 in 'char *p = "string"' is represented the same
7005 with the type being VT_PTR and the symbol being an
7006 anonymous one. That is, there's no difference in vtop
7007 between '(void *){x}' and '&(void *){x}'. Ignore
7008 pointer typed entities here. Hopefully no real code
7009 will every use compound literals with scalar type. */
7010 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7011 /* These come from compound literals, memcpy stuff over. */
7015 esym
= elfsym(vtop
->sym
);
7016 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7017 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
7019 /* We need to copy over all memory contents, and that
7020 includes relocations. Use the fact that relocs are
7021 created it order, so look from the end of relocs
7022 until we hit one before the copied region. */
7023 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7024 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7025 while (num_relocs
--) {
7027 if (rel
->r_offset
>= esym
->st_value
+ size
)
7029 if (rel
->r_offset
< esym
->st_value
)
7031 /* Note: if the same fields are initialized multiple
7032 times (possible with designators) then we possibly
7033 add multiple relocations for the same offset here.
7034 That would lead to wrong code, the last reloc needs
7035 to win. We clean this up later after the whole
7036 initializer is parsed. */
7037 put_elf_reloca(symtab_section
, sec
,
7038 c
+ rel
->r_offset
- esym
->st_value
,
7039 ELFW(R_TYPE
)(rel
->r_info
),
7040 ELFW(R_SYM
)(rel
->r_info
),
7050 if (type
->t
& VT_BITFIELD
) {
7051 int bit_pos
, bit_size
, bits
, n
;
7052 unsigned char *p
, v
, m
;
7053 bit_pos
= BIT_POS(vtop
->type
.t
);
7054 bit_size
= BIT_SIZE(vtop
->type
.t
);
7055 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7056 bit_pos
&= 7, bits
= 0;
7061 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7062 m
= ((1 << n
) - 1) << bit_pos
;
7063 *p
= (*p
& ~m
) | (v
& m
);
7064 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7068 /* XXX: when cross-compiling we assume that each type has the
7069 same representation on host and target, which is likely to
7070 be wrong in the case of long double */
7072 vtop
->c
.i
= vtop
->c
.i
!= 0;
7074 *(char *)ptr
|= vtop
->c
.i
;
7077 *(short *)ptr
|= vtop
->c
.i
;
7080 *(float*)ptr
= vtop
->c
.f
;
7083 *(double *)ptr
= vtop
->c
.d
;
7086 #if defined TCC_IS_NATIVE_387
7087 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7088 memcpy(ptr
, &vtop
->c
.ld
, 10);
7090 else if (sizeof (long double) == sizeof (double))
7091 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7093 else if (vtop
->c
.ld
== 0.0)
7097 if (sizeof(long double) == LDOUBLE_SIZE
)
7098 *(long double*)ptr
= vtop
->c
.ld
;
7099 else if (sizeof(double) == LDOUBLE_SIZE
)
7100 *(double *)ptr
= (double)vtop
->c
.ld
;
7102 tcc_error("can't cross compile long double constants");
7106 *(long long *)ptr
|= vtop
->c
.i
;
7113 addr_t val
= vtop
->c
.i
;
7115 if (vtop
->r
& VT_SYM
)
7116 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7118 *(addr_t
*)ptr
|= val
;
7120 if (vtop
->r
& VT_SYM
)
7121 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7122 *(addr_t
*)ptr
|= val
;
7128 int val
= vtop
->c
.i
;
7130 if (vtop
->r
& VT_SYM
)
7131 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7135 if (vtop
->r
& VT_SYM
)
7136 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7145 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7152 /* 't' contains the type and storage info. 'c' is the offset of the
7153 object in section 'sec'. If 'sec' is NULL, it means stack based
7154 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7155 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7156 size only evaluation is wanted (only for arrays). */
7157 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7160 int len
, n
, no_oblock
, nb
, i
;
7166 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7167 /* In case of strings we have special handling for arrays, so
7168 don't consume them as initializer value (which would commit them
7169 to some anonymous symbol). */
7170 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7171 !(flags
& DIF_SIZE_ONLY
)) {
7172 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7173 flags
|= DIF_HAVE_ELEM
;
7176 if ((flags
& DIF_HAVE_ELEM
) &&
7177 !(type
->t
& VT_ARRAY
) &&
7178 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7179 The source type might have VT_CONSTANT set, which is
7180 of course assignable to non-const elements. */
7181 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7182 init_putv(type
, sec
, c
);
7183 } else if (type
->t
& VT_ARRAY
) {
7186 t1
= pointed_type(type
);
7187 size1
= type_size(t1
, &align1
);
7190 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7193 tcc_error("character array initializer must be a literal,"
7194 " optionally enclosed in braces");
7199 /* only parse strings here if correct type (otherwise: handle
7200 them as ((w)char *) expressions */
7201 if ((tok
== TOK_LSTR
&&
7202 #ifdef TCC_TARGET_PE
7203 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7205 (t1
->t
& VT_BTYPE
) == VT_INT
7207 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7209 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7212 /* compute maximum number of chars wanted */
7214 cstr_len
= tokc
.str
.size
;
7216 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7219 if (n
>= 0 && nb
> (n
- len
))
7221 if (!(flags
& DIF_SIZE_ONLY
)) {
7223 tcc_warning("initializer-string for array is too long");
7224 /* in order to go faster for common case (char
7225 string in global variable, we handle it
7227 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7229 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7233 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7235 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7237 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7244 /* only add trailing zero if enough storage (no
7245 warning in this case since it is standard) */
7246 if (n
< 0 || len
< n
) {
7247 if (!(flags
& DIF_SIZE_ONLY
)) {
7249 init_putv(t1
, sec
, c
+ (len
* size1
));
7260 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7261 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7262 flags
&= ~DIF_HAVE_ELEM
;
7263 if (type
->t
& VT_ARRAY
) {
7265 /* special test for multi dimensional arrays (may not
7266 be strictly correct if designators are used at the
7268 if (no_oblock
&& len
>= n
*size1
)
7271 if (s
->type
.t
== VT_UNION
)
7275 if (no_oblock
&& f
== NULL
)
7284 /* put zeros at the end */
7285 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7286 init_putz(sec
, c
+ len
, n
*size1
- len
);
7289 /* patch type size if needed, which happens only for array types */
7291 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7292 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7295 if ((flags
& DIF_FIRST
) || tok
== '{') {
7303 } else if (tok
== '{') {
7304 if (flags
& DIF_HAVE_ELEM
)
7307 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7309 } else if ((flags
& DIF_SIZE_ONLY
)) {
7310 /* If we supported only ISO C we wouldn't have to accept calling
7311 this on anything than an array if DIF_SIZE_ONLY (and even then
7312 only on the outermost level, so no recursion would be needed),
7313 because initializing a flex array member isn't supported.
7314 But GNU C supports it, so we need to recurse even into
7315 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7316 /* just skip expression */
7317 skip_or_save_block(NULL
);
7319 if (!(flags
& DIF_HAVE_ELEM
)) {
7320 /* This should happen only when we haven't parsed
7321 the init element above for fear of committing a
7322 string constant to memory too early. */
7323 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7324 expect("string constant");
7325 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7327 init_putv(type
, sec
, c
);
7331 /* parse an initializer for type 't' if 'has_init' is non zero, and
7332 allocate space in local or global data space ('r' is either
7333 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7334 variable 'v' of scope 'scope' is declared before initializers
7335 are parsed. If 'v' is zero, then a reference to the new object
7336 is put in the value stack. If 'has_init' is 2, a special parsing
7337 is done to handle string constants. */
7338 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7339 int has_init
, int v
, int scope
)
7341 int size
, align
, addr
;
7342 TokenString
*init_str
= NULL
;
7345 Sym
*flexible_array
;
7347 int saved_nocode_wanted
= nocode_wanted
;
7348 #ifdef CONFIG_TCC_BCHECK
7352 /* Always allocate static or global variables */
7353 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7354 nocode_wanted
|= 0x80000000;
7356 #ifdef CONFIG_TCC_BCHECK
7357 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7360 flexible_array
= NULL
;
7361 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7362 Sym
*field
= type
->ref
->next
;
7365 field
= field
->next
;
7366 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7367 flexible_array
= field
;
7371 size
= type_size(type
, &align
);
7372 /* If unknown size, we must evaluate it before
7373 evaluating initializers because
7374 initializers can generate global data too
7375 (e.g. string pointers or ISOC99 compound
7376 literals). It also simplifies local
7377 initializers handling */
7378 if (size
< 0 || (flexible_array
&& has_init
)) {
7380 tcc_error("unknown type size");
7381 /* get all init string */
7382 if (has_init
== 2) {
7383 init_str
= tok_str_alloc();
7384 /* only get strings */
7385 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7386 tok_str_add_tok(init_str
);
7389 tok_str_add(init_str
, -1);
7390 tok_str_add(init_str
, 0);
7392 skip_or_save_block(&init_str
);
7397 begin_macro(init_str
, 1);
7399 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7400 /* prepare second initializer parsing */
7401 macro_ptr
= init_str
->str
;
7404 /* if still unknown size, error */
7405 size
= type_size(type
, &align
);
7407 tcc_error("unknown type size");
7409 /* If there's a flex member and it was used in the initializer
7411 if (flexible_array
&&
7412 flexible_array
->type
.ref
->c
> 0)
7413 size
+= flexible_array
->type
.ref
->c
7414 * pointed_size(&flexible_array
->type
);
7415 /* take into account specified alignment if bigger */
7416 if (ad
->a
.aligned
) {
7417 int speca
= 1 << (ad
->a
.aligned
- 1);
7420 } else if (ad
->a
.packed
) {
7424 if (!v
&& NODATA_WANTED
)
7425 size
= 0, align
= 1;
7427 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7429 #ifdef CONFIG_TCC_BCHECK
7430 if (bcheck
&& ((type
->t
& VT_ARRAY
) ||
7431 (type
->t
& VT_BTYPE
) == VT_STRUCT
)) {
7435 loc
= (loc
- size
) & -align
;
7437 #ifdef CONFIG_TCC_BCHECK
7438 /* handles bounds */
7439 /* XXX: currently, since we do only one pass, we cannot track
7440 '&' operators, so we add only arrays/structs/unions */
7441 if (bcheck
&& ((type
->t
& VT_ARRAY
) ||
7442 (type
->t
& VT_BTYPE
) == VT_STRUCT
)) {
7444 /* add padding between regions */
7446 /* then add local bound info */
7447 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7448 bounds_ptr
[0] = addr
;
7449 bounds_ptr
[1] = size
;
7453 /* local variable */
7454 #ifdef CONFIG_TCC_ASM
7455 if (ad
->asm_label
) {
7456 int reg
= asm_parse_regvar(ad
->asm_label
);
7458 r
= (r
& ~VT_VALMASK
) | reg
;
7461 sym
= sym_push(v
, type
, r
, addr
);
7462 if (ad
->cleanup_func
) {
7463 Sym
*cls
= sym_push2(&all_cleanups
,
7464 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7465 cls
->prev_tok
= sym
;
7466 cls
->next
= ad
->cleanup_func
;
7467 cls
->ncl
= cur_scope
->cl
.s
;
7468 cur_scope
->cl
.s
= cls
;
7473 /* push local reference */
7474 vset(type
, r
, addr
);
7477 if (v
&& scope
== VT_CONST
) {
7478 /* see if the symbol was already defined */
7481 patch_storage(sym
, ad
, type
);
7482 /* we accept several definitions of the same global variable. */
7483 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7488 /* allocate symbol in corresponding section */
7493 else if (tcc_state
->nocommon
)
7498 addr
= section_add(sec
, size
, align
);
7499 #ifdef CONFIG_TCC_BCHECK
7500 /* add padding if bound check */
7502 section_add(sec
, 1, 1);
7505 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7506 sec
= common_section
;
7511 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7512 patch_storage(sym
, ad
, NULL
);
7514 /* update symbol definition */
7515 put_extern_sym(sym
, sec
, addr
, size
);
7517 /* push global reference */
7518 vpush_ref(type
, sec
, addr
, size
);
7523 #ifdef CONFIG_TCC_BCHECK
7524 /* handles bounds now because the symbol must be defined
7525 before for the relocation */
7529 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7530 /* then add global bound info */
7531 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7532 bounds_ptr
[0] = 0; /* relocated */
7533 bounds_ptr
[1] = size
;
7538 if (type
->t
& VT_VLA
) {
7544 /* save current stack pointer */
7545 if (root_scope
->vla
.loc
== 0) {
7546 struct scope
*v
= cur_scope
;
7547 gen_vla_sp_save(loc
-= PTR_SIZE
);
7548 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7551 vla_runtime_type_size(type
, &a
);
7552 gen_vla_alloc(type
, a
);
7553 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7554 /* on _WIN64, because of the function args scratch area, the
7555 result of alloca differs from RSP and is returned in RAX. */
7556 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7558 gen_vla_sp_save(addr
);
7559 cur_scope
->vla
.loc
= addr
;
7560 cur_scope
->vla
.num
++;
7561 #ifdef CONFIG_TCC_BCHECK
7565 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7566 bounds_ptr
[0] = 1; /* marks alloca/vla used */
7571 } else if (has_init
) {
7572 size_t oldreloc_offset
= 0;
7573 if (sec
&& sec
->reloc
)
7574 oldreloc_offset
= sec
->reloc
->data_offset
;
7575 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7576 if (sec
&& sec
->reloc
)
7577 squeeze_multi_relocs(sec
, oldreloc_offset
);
7578 /* patch flexible array member size back to -1, */
7579 /* for possible subsequent similar declarations */
7581 flexible_array
->type
.ref
->c
= -1;
7585 /* restore parse state if needed */
7591 nocode_wanted
= saved_nocode_wanted
;
7594 /* parse a function defined by symbol 'sym' and generate its code in
7595 'cur_text_section' */
7596 static void gen_function(Sym
*sym
, AttributeDef
*ad
)
7598 /* Initialize VLA state */
7599 struct scope f
= { 0 };
7600 cur_scope
= root_scope
= &f
;
7603 ind
= cur_text_section
->data_offset
;
7604 if (sym
->a
.aligned
) {
7605 size_t newoff
= section_add(cur_text_section
, 0,
7606 1 << (sym
->a
.aligned
- 1));
7607 gen_fill_nops(newoff
- ind
);
7609 /* NOTE: we patch the symbol size later */
7610 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7612 if (ad
&& ad
->a
.constructor
) {
7613 add_init_array (tcc_state
, sym
);
7615 if (ad
&& ad
->a
.destructor
) {
7616 add_fini_array (tcc_state
, sym
);
7619 funcname
= get_tok_str(sym
->v
, NULL
);
7622 /* put debug symbol */
7623 tcc_debug_funcstart(tcc_state
, sym
);
7624 /* push a dummy symbol to enable local sym storage */
7625 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7626 local_scope
= 1; /* for function parameters */
7630 clear_temp_local_var_list();
7635 cur_text_section
->data_offset
= ind
;
7636 /* reset local stack */
7637 sym_pop(&local_stack
, NULL
, 0);
7639 label_pop(&global_label_stack
, NULL
, 0);
7640 sym_pop(&all_cleanups
, NULL
, 0);
7641 /* patch symbol size */
7642 elfsym(sym
)->st_size
= ind
- func_ind
;
7643 /* end of function */
7644 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7645 /* It's better to crash than to generate wrong code */
7646 cur_text_section
= NULL
;
7647 funcname
= ""; /* for safety */
7648 func_vt
.t
= VT_VOID
; /* for safety */
7649 func_var
= 0; /* for safety */
7650 ind
= 0; /* for safety */
7651 nocode_wanted
= 0x80000000;
7655 static void gen_inline_functions(TCCState
*s
)
7658 int inline_generated
, i
;
7659 struct InlineFunc
*fn
;
7661 tcc_open_bf(s
, ":inline:", 0);
7662 /* iterate while inline function are referenced */
7664 inline_generated
= 0;
7665 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7666 fn
= s
->inline_fns
[i
];
7668 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7669 /* the function was used or forced (and then not internal):
7670 generate its code and convert it to a normal function */
7672 tcc_debug_putfile(s
, fn
->filename
);
7673 begin_macro(fn
->func_str
, 1);
7675 cur_text_section
= text_section
;
7676 gen_function(sym
, NULL
);
7679 inline_generated
= 1;
7682 } while (inline_generated
);
7686 static void free_inline_functions(TCCState
*s
)
7689 /* free tokens of unused inline functions */
7690 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7691 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7693 tok_str_free(fn
->func_str
);
7695 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7698 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7699 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7700 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7705 AttributeDef ad
, adbase
;
7708 if (tok
== TOK_STATIC_ASSERT
) {
7716 tcc_error("%s", get_tok_str(tok
, &tokc
));
7722 if (!parse_btype(&btype
, &adbase
)) {
7723 if (is_for_loop_init
)
7725 /* skip redundant ';' if not in old parameter decl scope */
7726 if (tok
== ';' && l
!= VT_CMP
) {
7732 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7733 /* global asm block */
7737 if (tok
>= TOK_UIDENT
) {
7738 /* special test for old K&R protos without explicit int
7739 type. Only accepted when defining global data */
7743 expect("declaration");
7748 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7749 int v
= btype
.ref
->v
;
7750 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7751 tcc_warning("unnamed struct/union that defines no instances");
7755 if (IS_ENUM(btype
.t
)) {
7760 while (1) { /* iterate thru each declaration */
7762 /* If the base type itself was an array type of unspecified
7763 size (like in 'typedef int arr[]; arr x = {1};') then
7764 we will overwrite the unknown size by the real one for
7765 this decl. We need to unshare the ref symbol holding
7767 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7768 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7771 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7775 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7776 printf("type = '%s'\n", buf
);
7779 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7780 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
7781 tcc_error("function without file scope cannot be static");
7782 /* if old style function prototype, we accept a
7785 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7786 decl0(VT_CMP
, 0, sym
);
7787 /* always compile 'extern inline' */
7788 if (type
.t
& VT_EXTERN
)
7789 type
.t
&= ~VT_INLINE
;
7792 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7793 ad
.asm_label
= asm_label_instr();
7794 /* parse one last attribute list, after asm label */
7795 parse_attribute(&ad
);
7797 /* gcc does not allow __asm__("label") with function definition,
7804 #ifdef TCC_TARGET_PE
7805 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7806 if (type
.t
& VT_STATIC
)
7807 tcc_error("cannot have dll linkage with static");
7808 if (type
.t
& VT_TYPEDEF
) {
7809 tcc_warning("'%s' attribute ignored for typedef",
7810 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
7811 (ad
.a
.dllexport
= 0, "dllexport"));
7812 } else if (ad
.a
.dllimport
) {
7813 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7816 type
.t
|= VT_EXTERN
;
7822 tcc_error("cannot use local functions");
7823 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7824 expect("function definition");
7826 /* reject abstract declarators in function definition
7827 make old style params without decl have int type */
7829 while ((sym
= sym
->next
) != NULL
) {
7830 if (!(sym
->v
& ~SYM_FIELD
))
7831 expect("identifier");
7832 if (sym
->type
.t
== VT_VOID
)
7833 sym
->type
= int_type
;
7836 /* put function symbol */
7837 type
.t
&= ~VT_EXTERN
;
7838 sym
= external_sym(v
, &type
, 0, &ad
);
7839 /* static inline functions are just recorded as a kind
7840 of macro. Their code will be emitted at the end of
7841 the compilation unit only if they are used */
7842 if (sym
->type
.t
& VT_INLINE
) {
7843 struct InlineFunc
*fn
;
7844 const char *filename
;
7846 filename
= file
? file
->filename
: "";
7847 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7848 strcpy(fn
->filename
, filename
);
7850 skip_or_save_block(&fn
->func_str
);
7851 dynarray_add(&tcc_state
->inline_fns
,
7852 &tcc_state
->nb_inline_fns
, fn
);
7854 /* compute text section */
7855 cur_text_section
= ad
.section
;
7856 if (!cur_text_section
)
7857 cur_text_section
= text_section
;
7858 gen_function(sym
, &ad
);
7863 /* find parameter in function parameter list */
7864 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7865 if ((sym
->v
& ~SYM_FIELD
) == v
)
7867 tcc_error("declaration for parameter '%s' but no such parameter",
7868 get_tok_str(v
, NULL
));
7870 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7871 tcc_error("storage class specified for '%s'",
7872 get_tok_str(v
, NULL
));
7873 if (sym
->type
.t
!= VT_VOID
)
7874 tcc_error("redefinition of parameter '%s'",
7875 get_tok_str(v
, NULL
));
7876 convert_parameter_type(&type
);
7878 } else if (type
.t
& VT_TYPEDEF
) {
7879 /* save typedefed type */
7880 /* XXX: test storage specifiers ? */
7882 if (sym
&& sym
->sym_scope
== local_scope
) {
7883 if (!is_compatible_types(&sym
->type
, &type
)
7884 || !(sym
->type
.t
& VT_TYPEDEF
))
7885 tcc_error("incompatible redefinition of '%s'",
7886 get_tok_str(v
, NULL
));
7889 sym
= sym_push(v
, &type
, 0, 0);
7893 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7894 && !(type
.t
& VT_EXTERN
)) {
7895 tcc_error("declaration of void object");
7898 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7899 /* external function definition */
7900 /* specific case for func_call attribute */
7902 } else if (!(type
.t
& VT_ARRAY
)) {
7903 /* not lvalue if array */
7906 has_init
= (tok
== '=');
7907 if (has_init
&& (type
.t
& VT_VLA
))
7908 tcc_error("variable length array cannot be initialized");
7909 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7910 || (type
.t
& VT_BTYPE
) == VT_FUNC
7911 /* as with GCC, uninitialized global arrays with no size
7912 are considered extern: */
7913 || ((type
.t
& VT_ARRAY
) && !has_init
7914 && l
== VT_CONST
&& type
.ref
->c
< 0)
7916 /* external variable or function */
7917 type
.t
|= VT_EXTERN
;
7918 sym
= external_sym(v
, &type
, r
, &ad
);
7919 if (ad
.alias_target
) {
7922 alias_target
= sym_find(ad
.alias_target
);
7923 esym
= elfsym(alias_target
);
7925 tcc_error("unsupported forward __alias__ attribute");
7926 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7929 if (type
.t
& VT_STATIC
)
7935 else if (l
== VT_CONST
)
7936 /* uninitialized global variables may be overridden */
7937 type
.t
|= VT_EXTERN
;
7938 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7942 if (is_for_loop_init
)
7954 static void decl(int l
)
7959 /* ------------------------------------------------------------------------- */
7962 /* ------------------------------------------------------------------------- */