2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *current_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int section_sym
;
49 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
50 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
61 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
63 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
64 ST_DATA
const char *funcname
;
67 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
69 ST_DATA
struct switch_t
{
73 } **p
; int n
; /* list of case ranges */
74 int def_sym
; /* default symbol */
75 } *cur_switch
; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA
struct temp_local_variable
{
80 int location
; //offset on stack. Svalue.c.i
83 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
84 short nb_temp_local_vars
;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType
*type
);
89 static void gen_cast_s(int t
);
90 static inline CType
*pointed_type(CType
*type
);
91 static int is_compatible_types(CType
*type1
, CType
*type2
);
92 static int parse_btype(CType
*type
, AttributeDef
*ad
);
93 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
94 static void parse_expr_type(CType
*type
);
95 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
96 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
97 static void block(int *bsym
, Sym
*bcl
, int *csym
, Sym
*ccl
, int is_expr
);
98 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
99 static void decl(int l
);
100 static int decl0(int l
, int is_for_loop_init
, Sym
*);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType
*type
, int *a
);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty
, unsigned long long v
);
108 static void vpush(CType
*type
);
109 static int gvtst(int inv
, int t
);
110 static void gen_inline_functions(TCCState
*s
);
111 static void skip_or_save_block(TokenString
**str
);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size
,int align
);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups
)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups
, NULL
, 0);
125 ST_INLN
int is_float(int t
)
129 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC
int ieee_finite(double d
)
138 memcpy(p
, &d
, sizeof(double));
139 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
148 ST_FUNC
void test_lvalue(void)
150 if (!(vtop
->r
& VT_LVAL
))
154 ST_FUNC
void check_vstack(void)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
164 void pv (const char *lbl
, int a
, int b
)
167 for (i
= a
; i
< a
+ b
; ++i
) {
168 SValue
*p
= &vtop
[-i
];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
182 /* file info: full path + filename */
183 section_sym
= put_elf_sym(symtab_section
, 0, 0,
184 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
185 text_section
->sh_num
, NULL
);
186 getcwd(buf
, sizeof(buf
));
188 normalize_slashes(buf
);
190 pstrcat(buf
, sizeof(buf
), "/");
191 put_stabs_r(buf
, N_SO
, 0, 0,
192 text_section
->data_offset
, text_section
, section_sym
);
193 put_stabs_r(file
->filename
, N_SO
, 0, 0,
194 text_section
->data_offset
, text_section
, section_sym
);
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section
, 0, 0,
202 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
203 SHN_ABS
, file
->filename
);
206 /* put end of translation unit info */
207 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
211 put_stabs_r(NULL
, N_SO
, 0, 0,
212 text_section
->data_offset
, text_section
, section_sym
);
216 /* generate line number info */
217 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
221 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
222 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
224 last_line_num
= file
->line_num
;
228 /* put function symbol */
229 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
237 /* XXX: we put here a dummy type */
238 snprintf(buf
, sizeof(buf
), "%s:%c1",
239 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
240 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
241 cur_text_section
, sym
->c
);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
249 /* put function size */
250 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
254 put_stabn(N_FUN
, 0, 0, size
);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC
int tccgen_compile(TCCState
*s1
)
260 cur_text_section
= NULL
;
262 anon_sym
= SYM_FIRST_ANOM
;
265 nocode_wanted
= 0x80000000;
268 /* define some often used types */
270 char_pointer_type
.t
= VT_BYTE
;
271 mk_pointer(&char_pointer_type
);
273 size_type
.t
= VT_INT
| VT_UNSIGNED
;
274 ptrdiff_type
.t
= VT_INT
;
276 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
277 ptrdiff_type
.t
= VT_LLONG
;
279 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
280 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
282 func_old_type
.t
= VT_FUNC
;
283 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
284 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
285 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
289 #ifdef TCC_TARGET_ARM
294 printf("%s: **** new file\n", file
->filename
);
297 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
300 gen_inline_functions(s1
);
302 /* end of translation unit info */
307 /* ------------------------------------------------------------------------- */
308 ST_FUNC ElfSym
*elfsym(Sym
*s
)
312 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
315 /* apply storage attributes to Elf symbol */
316 ST_FUNC
void update_storage(Sym
*sym
)
319 int sym_bind
, old_sym_bind
;
325 if (sym
->a
.visibility
)
326 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
329 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
330 sym_bind
= STB_LOCAL
;
331 else if (sym
->a
.weak
)
334 sym_bind
= STB_GLOBAL
;
335 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
336 if (sym_bind
!= old_sym_bind
) {
337 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
341 if (sym
->a
.dllimport
)
342 esym
->st_other
|= ST_PE_IMPORT
;
343 if (sym
->a
.dllexport
)
344 esym
->st_other
|= ST_PE_EXPORT
;
348 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
349 get_tok_str(sym
->v
, NULL
),
350 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
358 /* ------------------------------------------------------------------------- */
359 /* update sym->c so that it points to an external symbol in section
360 'section' with value 'value' */
362 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
363 addr_t value
, unsigned long size
,
364 int can_add_underscore
)
366 int sym_type
, sym_bind
, info
, other
, t
;
370 #ifdef CONFIG_TCC_BCHECK
375 name
= get_tok_str(sym
->v
, NULL
);
376 #ifdef CONFIG_TCC_BCHECK
377 if (tcc_state
->do_bounds_check
) {
378 /* XXX: avoid doing that for statics ? */
379 /* if bound checking is activated, we change some function
380 names by adding the "__bound" prefix */
383 /* XXX: we rely only on malloc hooks */
396 strcpy(buf
, "__bound_");
404 if ((t
& VT_BTYPE
) == VT_FUNC
) {
406 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
407 sym_type
= STT_NOTYPE
;
409 sym_type
= STT_OBJECT
;
411 if (t
& (VT_STATIC
| VT_INLINE
))
412 sym_bind
= STB_LOCAL
;
414 sym_bind
= STB_GLOBAL
;
417 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
418 Sym
*ref
= sym
->type
.ref
;
419 if (ref
->a
.nodecorate
) {
420 can_add_underscore
= 0;
422 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
423 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
425 other
|= ST_PE_STDCALL
;
426 can_add_underscore
= 0;
430 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
432 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
436 name
= get_tok_str(sym
->asm_label
, NULL
);
437 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
438 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
441 esym
->st_value
= value
;
442 esym
->st_size
= size
;
443 esym
->st_shndx
= sh_num
;
448 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
449 addr_t value
, unsigned long size
)
451 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
452 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
455 /* add a new relocation entry to symbol 'sym' in section 's' */
456 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
461 if (nocode_wanted
&& s
== cur_text_section
)
466 put_extern_sym(sym
, NULL
, 0, 0);
470 /* now we can add ELF relocation info */
471 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
475 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
477 greloca(s
, sym
, offset
, type
, 0);
481 /* ------------------------------------------------------------------------- */
482 /* symbol allocator */
483 static Sym
*__sym_malloc(void)
485 Sym
*sym_pool
, *sym
, *last_sym
;
488 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
489 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
491 last_sym
= sym_free_first
;
493 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
494 sym
->next
= last_sym
;
498 sym_free_first
= last_sym
;
502 static inline Sym
*sym_malloc(void)
506 sym
= sym_free_first
;
508 sym
= __sym_malloc();
509 sym_free_first
= sym
->next
;
512 sym
= tcc_malloc(sizeof(Sym
));
517 ST_INLN
void sym_free(Sym
*sym
)
520 sym
->next
= sym_free_first
;
521 sym_free_first
= sym
;
527 /* push, without hashing */
528 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
533 memset(s
, 0, sizeof *s
);
543 /* find a symbol and return its associated structure. 's' is the top
544 of the symbol stack */
545 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
557 /* structure lookup */
558 ST_INLN Sym
*struct_find(int v
)
561 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
563 return table_ident
[v
]->sym_struct
;
566 /* find an identifier */
567 ST_INLN Sym
*sym_find(int v
)
570 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
572 return table_ident
[v
]->sym_identifier
;
575 static int sym_scope(Sym
*s
)
577 if (IS_ENUM_VAL (s
->type
.t
))
578 return s
->type
.ref
->sym_scope
;
583 /* push a given symbol on the symbol stack */
584 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
593 s
= sym_push2(ps
, v
, type
->t
, c
);
594 s
->type
.ref
= type
->ref
;
596 /* don't record fields or anonymous symbols */
598 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
599 /* record symbol in token array */
600 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
602 ps
= &ts
->sym_struct
;
604 ps
= &ts
->sym_identifier
;
607 s
->sym_scope
= local_scope
;
608 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
609 tcc_error("redeclaration of '%s'",
610 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
615 /* push a global identifier */
616 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
619 s
= sym_push2(&global_stack
, v
, t
, c
);
620 s
->r
= VT_CONST
| VT_SYM
;
621 /* don't record anonymous symbol */
622 if (v
< SYM_FIRST_ANOM
) {
623 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
624 /* modify the top most local identifier, so that sym_identifier will
625 point to 's' when popped; happens when called from inline asm */
626 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
627 ps
= &(*ps
)->prev_tok
;
634 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
635 pop them yet from the list, but do remove them from the token array. */
636 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
646 /* remove symbol in token array */
648 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
649 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
651 ps
= &ts
->sym_struct
;
653 ps
= &ts
->sym_identifier
;
664 /* ------------------------------------------------------------------------- */
666 static void vsetc(CType
*type
, int r
, CValue
*vc
)
670 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
671 tcc_error("memory full (vstack)");
672 /* cannot let cpu flags if other instruction are generated. Also
673 avoid leaving VT_JMP anywhere except on the top of the stack
674 because it would complicate the code generator.
676 Don't do this when nocode_wanted. vtop might come from
677 !nocode_wanted regions (see 88_codeopt.c) and transforming
678 it to a register without actually generating code is wrong
679 as their value might still be used for real. All values
680 we push under nocode_wanted will eventually be popped
681 again, so that the VT_CMP/VT_JMP value will be in vtop
682 when code is unsuppressed again.
684 Same logic below in vswap(); */
685 if (vtop
>= vstack
&& !nocode_wanted
) {
686 v
= vtop
->r
& VT_VALMASK
;
687 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
699 ST_FUNC
void vswap(void)
702 /* cannot vswap cpu flags. See comment at vsetc() above */
703 if (vtop
>= vstack
&& !nocode_wanted
) {
704 int v
= vtop
->r
& VT_VALMASK
;
705 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
713 /* pop stack value */
714 ST_FUNC
void vpop(void)
717 v
= vtop
->r
& VT_VALMASK
;
718 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
719 /* for x86, we need to pop the FP stack */
721 o(0xd8dd); /* fstp %st(0) */
724 if (v
== VT_JMP
|| v
== VT_JMPI
) {
725 /* need to put correct jump if && or || without test */
731 /* push constant of type "type" with useless value */
732 ST_FUNC
void vpush(CType
*type
)
734 vset(type
, VT_CONST
, 0);
737 /* push integer constant */
738 ST_FUNC
void vpushi(int v
)
742 vsetc(&int_type
, VT_CONST
, &cval
);
745 /* push a pointer sized constant */
746 static void vpushs(addr_t v
)
750 vsetc(&size_type
, VT_CONST
, &cval
);
753 /* push arbitrary 64bit constant */
754 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
761 vsetc(&ctype
, VT_CONST
, &cval
);
764 /* push long long constant */
765 static inline void vpushll(long long v
)
767 vpush64(VT_LLONG
, v
);
770 ST_FUNC
void vset(CType
*type
, int r
, int v
)
775 vsetc(type
, r
, &cval
);
778 static void vseti(int r
, int v
)
786 ST_FUNC
void vpushv(SValue
*v
)
788 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
789 tcc_error("memory full (vstack)");
794 static void vdup(void)
799 /* rotate n first stack elements to the bottom
800 I1 ... In -> I2 ... In I1 [top is right]
802 ST_FUNC
void vrotb(int n
)
813 /* rotate the n elements before entry e towards the top
814 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
816 ST_FUNC
void vrote(SValue
*e
, int n
)
822 for(i
= 0;i
< n
- 1; i
++)
827 /* rotate n first stack elements to the top
828 I1 ... In -> In I1 ... I(n-1) [top is right]
830 ST_FUNC
void vrott(int n
)
835 /* push a symbol value of TYPE */
836 static inline void vpushsym(CType
*type
, Sym
*sym
)
840 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
844 /* Return a static symbol pointing to a section */
845 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
851 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
852 sym
->type
.t
|= VT_STATIC
;
853 put_extern_sym(sym
, sec
, offset
, size
);
857 /* push a reference to a section offset by adding a dummy symbol */
858 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
860 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
863 /* define a new external reference to a symbol 'v' of type 'u' */
864 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
870 /* push forward reference */
871 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
872 s
->type
.ref
= type
->ref
;
873 } else if (IS_ASM_SYM(s
)) {
874 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
875 s
->type
.ref
= type
->ref
;
881 /* Merge symbol attributes. */
882 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
884 if (sa1
->aligned
&& !sa
->aligned
)
885 sa
->aligned
= sa1
->aligned
;
886 sa
->packed
|= sa1
->packed
;
887 sa
->weak
|= sa1
->weak
;
888 if (sa1
->visibility
!= STV_DEFAULT
) {
889 int vis
= sa
->visibility
;
890 if (vis
== STV_DEFAULT
891 || vis
> sa1
->visibility
)
892 vis
= sa1
->visibility
;
893 sa
->visibility
= vis
;
895 sa
->dllexport
|= sa1
->dllexport
;
896 sa
->nodecorate
|= sa1
->nodecorate
;
897 sa
->dllimport
|= sa1
->dllimport
;
900 /* Merge function attributes. */
901 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
903 if (fa1
->func_call
&& !fa
->func_call
)
904 fa
->func_call
= fa1
->func_call
;
905 if (fa1
->func_type
&& !fa
->func_type
)
906 fa
->func_type
= fa1
->func_type
;
907 if (fa1
->func_args
&& !fa
->func_args
)
908 fa
->func_args
= fa1
->func_args
;
911 /* Merge attributes. */
912 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
914 merge_symattr(&ad
->a
, &ad1
->a
);
915 merge_funcattr(&ad
->f
, &ad1
->f
);
918 ad
->section
= ad1
->section
;
919 if (ad1
->alias_target
)
920 ad
->alias_target
= ad1
->alias_target
;
922 ad
->asm_label
= ad1
->asm_label
;
924 ad
->attr_mode
= ad1
->attr_mode
;
927 /* Merge some type attributes. */
928 static void patch_type(Sym
*sym
, CType
*type
)
930 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
931 if (!(sym
->type
.t
& VT_EXTERN
))
932 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
933 sym
->type
.t
&= ~VT_EXTERN
;
936 if (IS_ASM_SYM(sym
)) {
937 /* stay static if both are static */
938 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
939 sym
->type
.ref
= type
->ref
;
942 if (!is_compatible_types(&sym
->type
, type
)) {
943 tcc_error("incompatible types for redefinition of '%s'",
944 get_tok_str(sym
->v
, NULL
));
946 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
947 int static_proto
= sym
->type
.t
& VT_STATIC
;
948 /* warn if static follows non-static function declaration */
949 if ((type
->t
& VT_STATIC
) && !static_proto
950 /* XXX this test for inline shouldn't be here. Until we
951 implement gnu-inline mode again it silences a warning for
952 mingw caused by our workarounds. */
953 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
954 tcc_warning("static storage ignored for redefinition of '%s'",
955 get_tok_str(sym
->v
, NULL
));
957 /* set 'inline' if both agree or if one has static */
958 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
959 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
960 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
961 static_proto
|= VT_INLINE
;
964 if (0 == (type
->t
& VT_EXTERN
)) {
965 /* put complete type, use static from prototype */
966 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
967 sym
->type
.ref
= type
->ref
;
969 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
972 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
973 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
974 sym
->type
.ref
= type
->ref
;
978 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
979 /* set array size if it was omitted in extern declaration */
980 sym
->type
.ref
->c
= type
->ref
->c
;
982 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
983 tcc_warning("storage mismatch for redefinition of '%s'",
984 get_tok_str(sym
->v
, NULL
));
988 /* Merge some storage attributes. */
989 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
992 patch_type(sym
, type
);
995 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
996 tcc_error("incompatible dll linkage for redefinition of '%s'",
997 get_tok_str(sym
->v
, NULL
));
999 merge_symattr(&sym
->a
, &ad
->a
);
1001 sym
->asm_label
= ad
->asm_label
;
1002 update_storage(sym
);
1005 /* copy sym to other stack */
1006 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1009 s
= sym_malloc(), *s
= *s0
;
1010 s
->prev
= *ps
, *ps
= s
;
1011 if (s
->v
< SYM_FIRST_ANOM
) {
1012 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1013 s
->prev_tok
= *ps
, *ps
= s
;
1018 /* copy a list of syms */
1019 static void sym_copy_ref(Sym
*s0
, Sym
**ps
)
1021 Sym
*s
, **sp
= &s0
->type
.ref
;
1022 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
)
1023 sp
= &(*sp
= sym_copy(s
, ps
))->next
;
1026 /* define a new external reference to a symbol 'v' */
1027 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1031 /* look for global symbol */
1033 while (s
&& s
->sym_scope
)
1037 /* push forward reference */
1038 s
= global_identifier_push(v
, type
->t
, 0);
1041 s
->asm_label
= ad
->asm_label
;
1042 s
->type
.ref
= type
->ref
;
1043 bt
= s
->type
.t
& (VT_BTYPE
|VT_ARRAY
);
1044 /* copy type to the global stack also */
1045 if (local_scope
&& (bt
== VT_FUNC
|| (bt
& VT_ARRAY
)))
1046 sym_copy_ref(s
, &global_stack
);
1048 patch_storage(s
, ad
, type
);
1049 bt
= s
->type
.t
& VT_BTYPE
;
1051 /* push variables to local scope if any */
1052 if (local_stack
&& bt
!= VT_FUNC
)
1053 s
= sym_copy(s
, &local_stack
);
1057 /* push a reference to global symbol v */
1058 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1060 vpushsym(type
, external_global_sym(v
, type
));
1063 /* save registers up to (vtop - n) stack entry */
1064 ST_FUNC
void save_regs(int n
)
1067 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1071 /* save r to the memory stack, and mark it as being free */
1072 ST_FUNC
void save_reg(int r
)
1074 save_reg_upstack(r
, 0);
1077 /* save r to the memory stack, and mark it as being free,
1078 if seen up to (vtop - n) stack entry */
1079 ST_FUNC
void save_reg_upstack(int r
, int n
)
1081 int l
, saved
, size
, align
;
1085 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1090 /* modify all stack values */
1093 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1094 if ((p
->r
& VT_VALMASK
) == r
||
1095 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1096 /* must save value on stack if not already done */
1098 /* NOTE: must reload 'r' because r might be equal to r2 */
1099 r
= p
->r
& VT_VALMASK
;
1100 /* store register in the stack */
1102 if ((p
->r
& VT_LVAL
) ||
1103 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1105 type
= &char_pointer_type
;
1109 size
= type_size(type
, &align
);
1110 l
=get_temp_local_var(size
,align
);
1111 sv
.type
.t
= type
->t
;
1112 sv
.r
= VT_LOCAL
| VT_LVAL
;
1115 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1116 /* x86 specific: need to pop fp register ST0 if saved */
1117 if (r
== TREG_ST0
) {
1118 o(0xd8dd); /* fstp %st(0) */
1122 /* special long long case */
1123 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1130 /* mark that stack entry as being saved on the stack */
1131 if (p
->r
& VT_LVAL
) {
1132 /* also clear the bounded flag because the
1133 relocation address of the function was stored in
1135 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1137 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1145 #ifdef TCC_TARGET_ARM
1146 /* find a register of class 'rc2' with at most one reference on stack.
1147 * If none, call get_reg(rc) */
1148 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1153 for(r
=0;r
<NB_REGS
;r
++) {
1154 if (reg_classes
[r
] & rc2
) {
1157 for(p
= vstack
; p
<= vtop
; p
++) {
1158 if ((p
->r
& VT_VALMASK
) == r
||
1159 (p
->r2
& VT_VALMASK
) == r
)
1170 /* find a free register of class 'rc'. If none, save one register */
1171 ST_FUNC
int get_reg(int rc
)
1176 /* find a free register */
1177 for(r
=0;r
<NB_REGS
;r
++) {
1178 if (reg_classes
[r
] & rc
) {
1181 for(p
=vstack
;p
<=vtop
;p
++) {
1182 if ((p
->r
& VT_VALMASK
) == r
||
1183 (p
->r2
& VT_VALMASK
) == r
)
1191 /* no register left : free the first one on the stack (VERY
1192 IMPORTANT to start from the bottom to ensure that we don't
1193 spill registers used in gen_opi()) */
1194 for(p
=vstack
;p
<=vtop
;p
++) {
1195 /* look at second register (if long long) */
1196 r
= p
->r2
& VT_VALMASK
;
1197 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1199 r
= p
->r
& VT_VALMASK
;
1200 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1206 /* Should never comes here */
1210 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1211 static int get_temp_local_var(int size
,int align
){
1213 struct temp_local_variable
*temp_var
;
1220 for(i
=0;i
<nb_temp_local_vars
;i
++){
1221 temp_var
=&arr_temp_local_vars
[i
];
1222 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1225 /*check if temp_var is free*/
1227 for(p
=vstack
;p
<=vtop
;p
++) {
1229 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1230 if(p
->c
.i
==temp_var
->location
){
1237 found_var
=temp_var
->location
;
1243 loc
= (loc
- size
) & -align
;
1244 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1245 temp_var
=&arr_temp_local_vars
[i
];
1246 temp_var
->location
=loc
;
1247 temp_var
->size
=size
;
1248 temp_var
->align
=align
;
1249 nb_temp_local_vars
++;
1256 static void clear_temp_local_var_list(){
1257 nb_temp_local_vars
=0;
1260 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1262 static void move_reg(int r
, int s
, int t
)
1276 /* get address of vtop (vtop MUST BE an lvalue) */
1277 ST_FUNC
void gaddrof(void)
1279 vtop
->r
&= ~VT_LVAL
;
1280 /* tricky: if saved lvalue, then we can go back to lvalue */
1281 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1282 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1287 #ifdef CONFIG_TCC_BCHECK
1288 /* generate lvalue bound code */
1289 static void gbound(void)
1294 vtop
->r
&= ~VT_MUSTBOUND
;
1295 /* if lvalue, then use checking code before dereferencing */
1296 if (vtop
->r
& VT_LVAL
) {
1297 /* if not VT_BOUNDED value, then make one */
1298 if (!(vtop
->r
& VT_BOUNDED
)) {
1299 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1300 /* must save type because we must set it to int to get pointer */
1302 vtop
->type
.t
= VT_PTR
;
1305 gen_bounded_ptr_add();
1306 vtop
->r
|= lval_type
;
1309 /* then check for dereferencing */
1310 gen_bounded_ptr_deref();
1315 static void incr_bf_adr(int o
)
1317 vtop
->type
= char_pointer_type
;
1321 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1322 | (VT_BYTE
|VT_UNSIGNED
);
1323 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1324 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1327 /* single-byte load mode for packed or otherwise unaligned bitfields */
1328 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1331 save_reg_upstack(vtop
->r
, 1);
1332 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1333 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1342 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1344 vpushi((1 << n
) - 1), gen_op('&');
1347 vpushi(bits
), gen_op(TOK_SHL
);
1350 bits
+= n
, bit_size
-= n
, o
= 1;
1353 if (!(type
->t
& VT_UNSIGNED
)) {
1354 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1355 vpushi(n
), gen_op(TOK_SHL
);
1356 vpushi(n
), gen_op(TOK_SAR
);
1360 /* single-byte store mode for packed or otherwise unaligned bitfields */
1361 static void store_packed_bf(int bit_pos
, int bit_size
)
1363 int bits
, n
, o
, m
, c
;
1365 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1367 save_reg_upstack(vtop
->r
, 1);
1368 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1370 incr_bf_adr(o
); // X B
1372 c
? vdup() : gv_dup(); // B V X
1375 vpushi(bits
), gen_op(TOK_SHR
);
1377 vpushi(bit_pos
), gen_op(TOK_SHL
);
1382 m
= ((1 << n
) - 1) << bit_pos
;
1383 vpushi(m
), gen_op('&'); // X B V1
1384 vpushv(vtop
-1); // X B V1 B
1385 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1386 gen_op('&'); // X B V1 B1
1387 gen_op('|'); // X B V2
1389 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1390 vstore(), vpop(); // X B
1391 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1396 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1399 if (0 == sv
->type
.ref
)
1401 t
= sv
->type
.ref
->auxtype
;
1402 if (t
!= -1 && t
!= VT_STRUCT
) {
1403 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1404 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1409 /* store vtop a register belonging to class 'rc'. lvalues are
1410 converted to values. Cannot be used if cannot be converted to
1411 register value (such as structures). */
1412 ST_FUNC
int gv(int rc
)
1414 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1416 /* NOTE: get_reg can modify vstack[] */
1417 if (vtop
->type
.t
& VT_BITFIELD
) {
1420 bit_pos
= BIT_POS(vtop
->type
.t
);
1421 bit_size
= BIT_SIZE(vtop
->type
.t
);
1422 /* remove bit field info to avoid loops */
1423 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1426 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1427 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1428 type
.t
|= VT_UNSIGNED
;
1430 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1432 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1437 if (r
== VT_STRUCT
) {
1438 load_packed_bf(&type
, bit_pos
, bit_size
);
1440 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1441 /* cast to int to propagate signedness in following ops */
1443 /* generate shifts */
1444 vpushi(bits
- (bit_pos
+ bit_size
));
1446 vpushi(bits
- bit_size
);
1447 /* NOTE: transformed to SHR if unsigned */
1452 if (is_float(vtop
->type
.t
) &&
1453 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1454 unsigned long offset
;
1455 /* CPUs usually cannot use float constants, so we store them
1456 generically in data segment */
1457 size
= type_size(&vtop
->type
, &align
);
1459 size
= 0, align
= 1;
1460 offset
= section_add(data_section
, size
, align
);
1461 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1463 init_putv(&vtop
->type
, data_section
, offset
);
1466 #ifdef CONFIG_TCC_BCHECK
1467 if (vtop
->r
& VT_MUSTBOUND
)
1471 r
= vtop
->r
& VT_VALMASK
;
1472 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1473 #ifndef TCC_TARGET_ARM64
1476 #ifdef TCC_TARGET_X86_64
1477 else if (rc
== RC_FRET
)
1481 /* need to reload if:
1483 - lvalue (need to dereference pointer)
1484 - already a register, but not in the right class */
1486 || (vtop
->r
& VT_LVAL
)
1487 || !(reg_classes
[r
] & rc
)
1489 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1490 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1492 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1498 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1499 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1501 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1502 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1503 unsigned long long ll
;
1505 int r2
, original_type
;
1506 original_type
= vtop
->type
.t
;
1507 /* two register type load : expand to two words
1510 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1513 vtop
->c
.i
= ll
; /* first word */
1515 vtop
->r
= r
; /* save register value */
1516 vpushi(ll
>> 32); /* second word */
1519 if (vtop
->r
& VT_LVAL
) {
1520 /* We do not want to modifier the long long
1521 pointer here, so the safest (and less
1522 efficient) is to save all the other registers
1523 in the stack. XXX: totally inefficient. */
1527 /* lvalue_save: save only if used further down the stack */
1528 save_reg_upstack(vtop
->r
, 1);
1530 /* load from memory */
1531 vtop
->type
.t
= load_type
;
1534 vtop
[-1].r
= r
; /* save register value */
1535 /* increment pointer to get second word */
1536 vtop
->type
.t
= addr_type
;
1541 vtop
->type
.t
= load_type
;
1543 /* move registers */
1546 vtop
[-1].r
= r
; /* save register value */
1547 vtop
->r
= vtop
[-1].r2
;
1549 /* Allocate second register. Here we rely on the fact that
1550 get_reg() tries first to free r2 of an SValue. */
1554 /* write second register */
1556 vtop
->type
.t
= original_type
;
1557 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1559 /* lvalue of scalar type : need to use lvalue type
1560 because of possible cast */
1563 /* compute memory access type */
1564 if (vtop
->r
& VT_LVAL_BYTE
)
1566 else if (vtop
->r
& VT_LVAL_SHORT
)
1568 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1572 /* restore wanted type */
1575 /* one register type load */
1580 #ifdef TCC_TARGET_C67
1581 /* uses register pairs for doubles */
1582 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1589 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1590 ST_FUNC
void gv2(int rc1
, int rc2
)
1594 /* generate more generic register first. But VT_JMP or VT_CMP
1595 values must be generated first in all cases to avoid possible
1597 v
= vtop
[0].r
& VT_VALMASK
;
1598 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1603 /* test if reload is needed for first register */
1604 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1614 /* test if reload is needed for first register */
1615 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1621 #ifndef TCC_TARGET_ARM64
1622 /* wrapper around RC_FRET to return a register by type */
1623 static int rc_fret(int t
)
1625 #ifdef TCC_TARGET_X86_64
1626 if (t
== VT_LDOUBLE
) {
1634 /* wrapper around REG_FRET to return a register by type */
1635 static int reg_fret(int t
)
1637 #ifdef TCC_TARGET_X86_64
1638 if (t
== VT_LDOUBLE
) {
1646 /* expand 64bit on stack in two ints */
1647 ST_FUNC
void lexpand(void)
1650 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1651 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1652 if (v
== VT_CONST
) {
1655 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1661 vtop
[0].r
= vtop
[-1].r2
;
1662 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1664 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1669 /* build a long long from two ints */
1670 static void lbuild(int t
)
1672 gv2(RC_INT
, RC_INT
);
1673 vtop
[-1].r2
= vtop
[0].r
;
1674 vtop
[-1].type
.t
= t
;
1679 /* convert stack entry to register and duplicate its value in another
1681 static void gv_dup(void)
1688 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1689 if (t
& VT_BITFIELD
) {
1699 /* stack: H L L1 H1 */
1709 /* duplicate value */
1714 #ifdef TCC_TARGET_X86_64
1715 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1725 load(r1
, &sv
); /* move r to r1 */
1727 /* duplicates value */
1733 /* Generate value test
1735 * Generate a test for any value (jump, comparison and integers) */
1736 ST_FUNC
int gvtst(int inv
, int t
)
1738 int v
= vtop
->r
& VT_VALMASK
;
1739 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1743 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1744 /* constant jmp optimization */
1745 if ((vtop
->c
.i
!= 0) != inv
)
1750 return gtst(inv
, t
);
1754 /* generate CPU independent (unsigned) long long operations */
1755 static void gen_opl(int op
)
1757 int t
, a
, b
, op1
, c
, i
;
1759 unsigned short reg_iret
= REG_IRET
;
1760 unsigned short reg_lret
= REG_LRET
;
1766 func
= TOK___divdi3
;
1769 func
= TOK___udivdi3
;
1772 func
= TOK___moddi3
;
1775 func
= TOK___umoddi3
;
1782 /* call generic long long function */
1783 vpush_global_sym(&func_old_type
, func
);
1788 vtop
->r2
= reg_lret
;
1796 //pv("gen_opl A",0,2);
1802 /* stack: L1 H1 L2 H2 */
1807 vtop
[-2] = vtop
[-3];
1810 /* stack: H1 H2 L1 L2 */
1811 //pv("gen_opl B",0,4);
1817 /* stack: H1 H2 L1 L2 ML MH */
1820 /* stack: ML MH H1 H2 L1 L2 */
1824 /* stack: ML MH H1 L2 H2 L1 */
1829 /* stack: ML MH M1 M2 */
1832 } else if (op
== '+' || op
== '-') {
1833 /* XXX: add non carry method too (for MIPS or alpha) */
1839 /* stack: H1 H2 (L1 op L2) */
1842 gen_op(op1
+ 1); /* TOK_xxxC2 */
1845 /* stack: H1 H2 (L1 op L2) */
1848 /* stack: (L1 op L2) H1 H2 */
1850 /* stack: (L1 op L2) (H1 op H2) */
1858 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1859 t
= vtop
[-1].type
.t
;
1863 /* stack: L H shift */
1865 /* constant: simpler */
1866 /* NOTE: all comments are for SHL. the other cases are
1867 done by swapping words */
1878 if (op
!= TOK_SAR
) {
1911 /* XXX: should provide a faster fallback on x86 ? */
1914 func
= TOK___ashrdi3
;
1917 func
= TOK___lshrdi3
;
1920 func
= TOK___ashldi3
;
1926 /* compare operations */
1932 /* stack: L1 H1 L2 H2 */
1934 vtop
[-1] = vtop
[-2];
1936 /* stack: L1 L2 H1 H2 */
1939 /* when values are equal, we need to compare low words. since
1940 the jump is inverted, we invert the test too. */
1943 else if (op1
== TOK_GT
)
1945 else if (op1
== TOK_ULT
)
1947 else if (op1
== TOK_UGT
)
1957 /* generate non equal test */
1963 /* compare low. Always unsigned */
1967 else if (op1
== TOK_LE
)
1969 else if (op1
== TOK_GT
)
1971 else if (op1
== TOK_GE
)
1982 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1984 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1985 return (a
^ b
) >> 63 ? -x
: x
;
1988 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1990 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1993 /* handle integer constant optimizations and various machine
1995 static void gen_opic(int op
)
1997 SValue
*v1
= vtop
- 1;
1999 int t1
= v1
->type
.t
& VT_BTYPE
;
2000 int t2
= v2
->type
.t
& VT_BTYPE
;
2001 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2002 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2003 uint64_t l1
= c1
? v1
->c
.i
: 0;
2004 uint64_t l2
= c2
? v2
->c
.i
: 0;
2005 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2007 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2008 l1
= ((uint32_t)l1
|
2009 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2010 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2011 l2
= ((uint32_t)l2
|
2012 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2016 case '+': l1
+= l2
; break;
2017 case '-': l1
-= l2
; break;
2018 case '&': l1
&= l2
; break;
2019 case '^': l1
^= l2
; break;
2020 case '|': l1
|= l2
; break;
2021 case '*': l1
*= l2
; break;
2028 /* if division by zero, generate explicit division */
2031 tcc_error("division by zero in constant");
2035 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2036 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2037 case TOK_UDIV
: l1
= l1
/ l2
; break;
2038 case TOK_UMOD
: l1
= l1
% l2
; break;
2041 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2042 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2044 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2047 case TOK_ULT
: l1
= l1
< l2
; break;
2048 case TOK_UGE
: l1
= l1
>= l2
; break;
2049 case TOK_EQ
: l1
= l1
== l2
; break;
2050 case TOK_NE
: l1
= l1
!= l2
; break;
2051 case TOK_ULE
: l1
= l1
<= l2
; break;
2052 case TOK_UGT
: l1
= l1
> l2
; break;
2053 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2054 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2055 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2056 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2058 case TOK_LAND
: l1
= l1
&& l2
; break;
2059 case TOK_LOR
: l1
= l1
|| l2
; break;
2063 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2064 l1
= ((uint32_t)l1
|
2065 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2069 /* if commutative ops, put c2 as constant */
2070 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2071 op
== '|' || op
== '*')) {
2073 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2074 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2076 if (!const_wanted
&&
2078 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2079 (l1
== -1 && op
== TOK_SAR
))) {
2080 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2082 } else if (!const_wanted
&&
2083 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2085 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2086 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2087 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2092 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2095 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2096 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2099 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2100 /* filter out NOP operations like x*1, x-0, x&-1... */
2102 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2103 /* try to use shifts instead of muls or divs */
2104 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2113 else if (op
== TOK_PDIV
)
2119 } else if (c2
&& (op
== '+' || op
== '-') &&
2120 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2121 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2122 /* symbol + constant case */
2126 /* The backends can't always deal with addends to symbols
2127 larger than +-1<<31. Don't construct such. */
2134 /* call low level op generator */
2135 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2136 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2144 /* generate a floating point operation with constant propagation */
2145 static void gen_opif(int op
)
2149 #if defined _MSC_VER && defined _AMD64_
2150 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2157 /* currently, we cannot do computations with forward symbols */
2158 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2159 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2161 if (v1
->type
.t
== VT_FLOAT
) {
2164 } else if (v1
->type
.t
== VT_DOUBLE
) {
2172 /* NOTE: we only do constant propagation if finite number (not
2173 NaN or infinity) (ANSI spec) */
2174 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2178 case '+': f1
+= f2
; break;
2179 case '-': f1
-= f2
; break;
2180 case '*': f1
*= f2
; break;
2183 /* If not in initializer we need to potentially generate
2184 FP exceptions at runtime, otherwise we want to fold. */
2190 /* XXX: also handles tests ? */
2194 /* XXX: overflow test ? */
2195 if (v1
->type
.t
== VT_FLOAT
) {
2197 } else if (v1
->type
.t
== VT_DOUBLE
) {
2209 static int pointed_size(CType
*type
)
2212 return type_size(pointed_type(type
), &align
);
2215 static void vla_runtime_pointed_size(CType
*type
)
2218 vla_runtime_type_size(pointed_type(type
), &align
);
2221 static inline int is_null_pointer(SValue
*p
)
2223 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2225 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2226 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2227 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2228 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2229 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2230 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2233 static inline int is_integer_btype(int bt
)
2235 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2236 bt
== VT_INT
|| bt
== VT_LLONG
);
2239 /* check types for comparison or subtraction of pointers */
2240 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2242 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2245 /* null pointers are accepted for all comparisons as gcc */
2246 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2250 bt1
= type1
->t
& VT_BTYPE
;
2251 bt2
= type2
->t
& VT_BTYPE
;
2252 /* accept comparison between pointer and integer with a warning */
2253 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2254 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2255 tcc_warning("comparison between pointer and integer");
2259 /* both must be pointers or implicit function pointers */
2260 if (bt1
== VT_PTR
) {
2261 type1
= pointed_type(type1
);
2262 } else if (bt1
!= VT_FUNC
)
2263 goto invalid_operands
;
2265 if (bt2
== VT_PTR
) {
2266 type2
= pointed_type(type2
);
2267 } else if (bt2
!= VT_FUNC
) {
2269 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2271 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2272 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2276 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2277 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2278 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2279 /* gcc-like error if '-' is used */
2281 goto invalid_operands
;
2283 tcc_warning("comparison of distinct pointer types lacks a cast");
2287 /* generic gen_op: handles types problems */
2288 ST_FUNC
void gen_op(int op
)
2290 int u
, t1
, t2
, bt1
, bt2
, t
;
2294 t1
= vtop
[-1].type
.t
;
2295 t2
= vtop
[0].type
.t
;
2296 bt1
= t1
& VT_BTYPE
;
2297 bt2
= t2
& VT_BTYPE
;
2299 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2300 tcc_error("operation on a struct");
2301 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2302 if (bt2
== VT_FUNC
) {
2303 mk_pointer(&vtop
->type
);
2306 if (bt1
== VT_FUNC
) {
2308 mk_pointer(&vtop
->type
);
2313 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2314 /* at least one operand is a pointer */
2315 /* relational op: must be both pointers */
2316 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2317 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2318 /* pointers are handled are unsigned */
2320 t
= VT_LLONG
| VT_UNSIGNED
;
2322 t
= VT_INT
| VT_UNSIGNED
;
2326 /* if both pointers, then it must be the '-' op */
2327 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2329 tcc_error("cannot use pointers here");
2330 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2331 /* XXX: check that types are compatible */
2332 if (vtop
[-1].type
.t
& VT_VLA
) {
2333 vla_runtime_pointed_size(&vtop
[-1].type
);
2335 vpushi(pointed_size(&vtop
[-1].type
));
2339 vtop
->type
.t
= ptrdiff_type
.t
;
2343 /* exactly one pointer : must be '+' or '-'. */
2344 if (op
!= '-' && op
!= '+')
2345 tcc_error("cannot use pointers here");
2346 /* Put pointer as first operand */
2347 if (bt2
== VT_PTR
) {
2349 t
= t1
, t1
= t2
, t2
= t
;
2352 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2353 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2356 type1
= vtop
[-1].type
;
2357 type1
.t
&= ~VT_ARRAY
;
2358 if (vtop
[-1].type
.t
& VT_VLA
)
2359 vla_runtime_pointed_size(&vtop
[-1].type
);
2361 u
= pointed_size(&vtop
[-1].type
);
2363 tcc_error("unknown array element size");
2367 /* XXX: cast to int ? (long long case) */
2373 /* #ifdef CONFIG_TCC_BCHECK
2374 The main reason to removing this code:
2381 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2382 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2384 When this code is on. then the output looks like
2386 v+(i-j) = 0xbff84000
2388 /* if evaluating constant expression, no code should be
2389 generated, so no bound check */
2390 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2391 /* if bounded pointers, we generate a special code to
2398 gen_bounded_ptr_add();
2404 /* put again type if gen_opic() swaped operands */
2407 } else if (is_float(bt1
) || is_float(bt2
)) {
2408 /* compute bigger type and do implicit casts */
2409 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2411 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2416 /* floats can only be used for a few operations */
2417 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2418 (op
< TOK_ULT
|| op
> TOK_GT
))
2419 tcc_error("invalid operands for binary operation");
2421 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2422 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2423 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2425 t
|= (VT_LONG
& t1
);
2427 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2428 /* cast to biggest op */
2429 t
= VT_LLONG
| VT_LONG
;
2430 if (bt1
== VT_LLONG
)
2432 if (bt2
== VT_LLONG
)
2434 /* convert to unsigned if it does not fit in a long long */
2435 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2436 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2440 /* integer operations */
2441 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2442 /* convert to unsigned if it does not fit in an integer */
2443 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2444 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2447 /* XXX: currently, some unsigned operations are explicit, so
2448 we modify them here */
2449 if (t
& VT_UNSIGNED
) {
2456 else if (op
== TOK_LT
)
2458 else if (op
== TOK_GT
)
2460 else if (op
== TOK_LE
)
2462 else if (op
== TOK_GE
)
2470 /* special case for shifts and long long: we keep the shift as
2472 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2479 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2480 /* relational op: the result is an int */
2481 vtop
->type
.t
= VT_INT
;
2486 // Make sure that we have converted to an rvalue:
2487 if (vtop
->r
& VT_LVAL
)
2488 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2491 #ifndef TCC_TARGET_ARM
2492 /* generic itof for unsigned long long case */
2493 static void gen_cvt_itof1(int t
)
2495 #ifdef TCC_TARGET_ARM64
2498 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2499 (VT_LLONG
| VT_UNSIGNED
)) {
2502 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2503 #if LDOUBLE_SIZE != 8
2504 else if (t
== VT_LDOUBLE
)
2505 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2508 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2512 vtop
->r
= reg_fret(t
);
2520 /* generic ftoi for unsigned long long case */
2521 static void gen_cvt_ftoi1(int t
)
2523 #ifdef TCC_TARGET_ARM64
2528 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2529 /* not handled natively */
2530 st
= vtop
->type
.t
& VT_BTYPE
;
2532 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2533 #if LDOUBLE_SIZE != 8
2534 else if (st
== VT_LDOUBLE
)
2535 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2538 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2543 vtop
->r2
= REG_LRET
;
2550 /* force char or short cast */
2551 static void force_charshort_cast(int t
)
2555 /* cannot cast static initializers */
2556 if (STATIC_DATA_WANTED
)
2560 /* XXX: add optimization if lvalue : just change type and offset */
2565 if (t
& VT_UNSIGNED
) {
2566 vpushi((1 << bits
) - 1);
2569 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2575 /* result must be signed or the SAR is converted to an SHL
2576 This was not the case when "t" was a signed short
2577 and the last value on the stack was an unsigned int */
2578 vtop
->type
.t
&= ~VT_UNSIGNED
;
2584 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2585 static void gen_cast_s(int t
)
2593 static void gen_cast(CType
*type
)
2595 int sbt
, dbt
, sf
, df
, c
, p
;
2597 /* special delayed cast for char/short */
2598 /* XXX: in some cases (multiple cascaded casts), it may still
2600 if (vtop
->r
& VT_MUSTCAST
) {
2601 vtop
->r
&= ~VT_MUSTCAST
;
2602 force_charshort_cast(vtop
->type
.t
);
2605 /* bitfields first get cast to ints */
2606 if (vtop
->type
.t
& VT_BITFIELD
) {
2610 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2611 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2616 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2617 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2618 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2619 c
&= dbt
!= VT_LDOUBLE
;
2622 /* constant case: we can do it now */
2623 /* XXX: in ISOC, cannot do it if error in convert */
2624 if (sbt
== VT_FLOAT
)
2625 vtop
->c
.ld
= vtop
->c
.f
;
2626 else if (sbt
== VT_DOUBLE
)
2627 vtop
->c
.ld
= vtop
->c
.d
;
2630 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2631 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2632 vtop
->c
.ld
= vtop
->c
.i
;
2634 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2636 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2637 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2639 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2642 if (dbt
== VT_FLOAT
)
2643 vtop
->c
.f
= (float)vtop
->c
.ld
;
2644 else if (dbt
== VT_DOUBLE
)
2645 vtop
->c
.d
= (double)vtop
->c
.ld
;
2646 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2647 vtop
->c
.i
= vtop
->c
.ld
;
2648 } else if (sf
&& dbt
== VT_BOOL
) {
2649 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2652 vtop
->c
.i
= vtop
->c
.ld
;
2653 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2655 else if (sbt
& VT_UNSIGNED
)
2656 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2658 else if (sbt
== VT_PTR
)
2661 else if (sbt
!= VT_LLONG
)
2662 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2663 -(vtop
->c
.i
& 0x80000000));
2665 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2667 else if (dbt
== VT_BOOL
)
2668 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2670 else if (dbt
== VT_PTR
)
2673 else if (dbt
!= VT_LLONG
) {
2674 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2675 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2678 if (!(dbt
& VT_UNSIGNED
))
2679 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2682 } else if (p
&& dbt
== VT_BOOL
) {
2686 /* non constant case: generate code */
2688 /* convert from fp to fp */
2691 /* convert int to fp */
2694 /* convert fp to int */
2695 if (dbt
== VT_BOOL
) {
2699 /* we handle char/short/etc... with generic code */
2700 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2701 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2705 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2706 /* additional cast for char/short... */
2712 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2713 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2714 /* scalar to long long */
2715 /* machine independent conversion */
2717 /* generate high word */
2718 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2722 if (sbt
== VT_PTR
) {
2723 /* cast from pointer to int before we apply
2724 shift operation, which pointers don't support*/
2731 /* patch second register */
2732 vtop
[-1].r2
= vtop
->r
;
2736 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2737 (dbt
& VT_BTYPE
) == VT_PTR
||
2738 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2739 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2740 (sbt
& VT_BTYPE
) != VT_PTR
&&
2741 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2742 /* need to convert from 32bit to 64bit */
2744 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2745 #if defined(TCC_TARGET_ARM64)
2747 #elif defined(TCC_TARGET_X86_64)
2749 /* x86_64 specific: movslq */
2751 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2758 } else if (dbt
== VT_BOOL
) {
2759 /* scalar to bool */
2762 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2763 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2764 if (sbt
== VT_PTR
) {
2765 vtop
->type
.t
= VT_INT
;
2766 tcc_warning("nonportable conversion from pointer to char/short");
2768 force_charshort_cast(dbt
);
2769 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2771 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2773 /* from long long: just take low order word */
2778 vtop
->type
.t
|= VT_UNSIGNED
;
2782 /* if lvalue and single word type, nothing to do because
2783 the lvalue already contains the real type size (see
2784 VT_LVAL_xxx constants) */
2787 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2788 /* if we are casting between pointer types,
2789 we must update the VT_LVAL_xxx size */
2790 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2791 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2794 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2797 /* return type size as known at compile time. Put alignment at 'a' */
2798 ST_FUNC
int type_size(CType
*type
, int *a
)
2803 bt
= type
->t
& VT_BTYPE
;
2804 if (bt
== VT_STRUCT
) {
2809 } else if (bt
== VT_PTR
) {
2810 if (type
->t
& VT_ARRAY
) {
2814 ts
= type_size(&s
->type
, a
);
2816 if (ts
< 0 && s
->c
< 0)
2824 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2825 return -1; /* incomplete enum */
2826 } else if (bt
== VT_LDOUBLE
) {
2828 return LDOUBLE_SIZE
;
2829 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2830 #ifdef TCC_TARGET_I386
2831 #ifdef TCC_TARGET_PE
2836 #elif defined(TCC_TARGET_ARM)
2846 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2849 } else if (bt
== VT_SHORT
) {
2852 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2856 /* char, void, function, _Bool */
2862 /* push type size as known at runtime time on top of value stack. Put
2864 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2866 if (type
->t
& VT_VLA
) {
2867 type_size(&type
->ref
->type
, a
);
2868 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2870 vpushi(type_size(type
, a
));
2874 static void vla_sp_restore(void) {
2875 if (vlas_in_scope
) {
2876 gen_vla_sp_restore(vla_sp_loc
);
2880 static void vla_sp_restore_root(void) {
2881 if (vlas_in_scope
) {
2882 gen_vla_sp_restore(vla_sp_root_loc
);
2886 /* return the pointed type of t */
2887 static inline CType
*pointed_type(CType
*type
)
2889 return &type
->ref
->type
;
2892 /* modify type so that its it is a pointer to type. */
2893 ST_FUNC
void mk_pointer(CType
*type
)
2896 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2897 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2901 /* compare function types. OLD functions match any new functions */
2902 static int is_compatible_func(CType
*type1
, CType
*type2
)
2908 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2910 if (s1
->f
.func_type
!= s2
->f
.func_type
2911 && s1
->f
.func_type
!= FUNC_OLD
2912 && s2
->f
.func_type
!= FUNC_OLD
)
2914 /* we should check the function return type for FUNC_OLD too
2915 but that causes problems with the internally used support
2916 functions such as TOK_memmove */
2917 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2919 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2922 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2933 /* return true if type1 and type2 are the same. If unqualified is
2934 true, qualifiers on the types are ignored.
2936 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2940 t1
= type1
->t
& VT_TYPE
;
2941 t2
= type2
->t
& VT_TYPE
;
2943 /* strip qualifiers before comparing */
2944 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2945 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2948 /* Default Vs explicit signedness only matters for char */
2949 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2953 /* XXX: bitfields ? */
2958 && !(type1
->ref
->c
< 0
2959 || type2
->ref
->c
< 0
2960 || type1
->ref
->c
== type2
->ref
->c
))
2963 /* test more complicated cases */
2964 bt1
= t1
& VT_BTYPE
;
2965 if (bt1
== VT_PTR
) {
2966 type1
= pointed_type(type1
);
2967 type2
= pointed_type(type2
);
2968 return is_compatible_types(type1
, type2
);
2969 } else if (bt1
== VT_STRUCT
) {
2970 return (type1
->ref
== type2
->ref
);
2971 } else if (bt1
== VT_FUNC
) {
2972 return is_compatible_func(type1
, type2
);
2973 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2974 return type1
->ref
== type2
->ref
;
2980 /* return true if type1 and type2 are exactly the same (including
2983 static int is_compatible_types(CType
*type1
, CType
*type2
)
2985 return compare_types(type1
,type2
,0);
2988 /* return true if type1 and type2 are the same (ignoring qualifiers).
2990 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2992 return compare_types(type1
,type2
,1);
2995 /* print a type. If 'varstr' is not NULL, then the variable is also
2996 printed in the type */
2998 /* XXX: add array and function pointers */
2999 static void type_to_str(char *buf
, int buf_size
,
3000 CType
*type
, const char *varstr
)
3012 pstrcat(buf
, buf_size
, "extern ");
3014 pstrcat(buf
, buf_size
, "static ");
3016 pstrcat(buf
, buf_size
, "typedef ");
3018 pstrcat(buf
, buf_size
, "inline ");
3019 if (t
& VT_VOLATILE
)
3020 pstrcat(buf
, buf_size
, "volatile ");
3021 if (t
& VT_CONSTANT
)
3022 pstrcat(buf
, buf_size
, "const ");
3024 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3025 || ((t
& VT_UNSIGNED
)
3026 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3029 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3031 buf_size
-= strlen(buf
);
3066 tstr
= "long double";
3068 pstrcat(buf
, buf_size
, tstr
);
3075 pstrcat(buf
, buf_size
, tstr
);
3076 v
= type
->ref
->v
& ~SYM_STRUCT
;
3077 if (v
>= SYM_FIRST_ANOM
)
3078 pstrcat(buf
, buf_size
, "<anonymous>");
3080 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3085 if (varstr
&& '*' == *varstr
) {
3086 pstrcat(buf1
, sizeof(buf1
), "(");
3087 pstrcat(buf1
, sizeof(buf1
), varstr
);
3088 pstrcat(buf1
, sizeof(buf1
), ")");
3090 pstrcat(buf1
, buf_size
, "(");
3092 while (sa
!= NULL
) {
3094 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3095 pstrcat(buf1
, sizeof(buf1
), buf2
);
3098 pstrcat(buf1
, sizeof(buf1
), ", ");
3100 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3101 pstrcat(buf1
, sizeof(buf1
), ", ...");
3102 pstrcat(buf1
, sizeof(buf1
), ")");
3103 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3108 if (varstr
&& '*' == *varstr
)
3109 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3111 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3112 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3115 pstrcpy(buf1
, sizeof(buf1
), "*");
3116 if (t
& VT_CONSTANT
)
3117 pstrcat(buf1
, buf_size
, "const ");
3118 if (t
& VT_VOLATILE
)
3119 pstrcat(buf1
, buf_size
, "volatile ");
3121 pstrcat(buf1
, sizeof(buf1
), varstr
);
3122 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3126 pstrcat(buf
, buf_size
, " ");
3127 pstrcat(buf
, buf_size
, varstr
);
3132 /* verify type compatibility to store vtop in 'dt' type, and generate
3134 static void gen_assign_cast(CType
*dt
)
3136 CType
*st
, *type1
, *type2
;
3137 char buf1
[256], buf2
[256];
3138 int dbt
, sbt
, qualwarn
, lvl
;
3140 st
= &vtop
->type
; /* source type */
3141 dbt
= dt
->t
& VT_BTYPE
;
3142 sbt
= st
->t
& VT_BTYPE
;
3143 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3144 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3145 ; /* It is Ok if both are void */
3147 tcc_error("cannot cast from/to void");
3149 if (dt
->t
& VT_CONSTANT
)
3150 tcc_warning("assignment of read-only location");
3153 /* special cases for pointers */
3154 /* '0' can also be a pointer */
3155 if (is_null_pointer(vtop
))
3157 /* accept implicit pointer to integer cast with warning */
3158 if (is_integer_btype(sbt
)) {
3159 tcc_warning("assignment makes pointer from integer without a cast");
3162 type1
= pointed_type(dt
);
3164 type2
= pointed_type(st
);
3165 else if (sbt
== VT_FUNC
)
3166 type2
= st
; /* a function is implicitly a function pointer */
3169 if (is_compatible_types(type1
, type2
))
3171 for (qualwarn
= lvl
= 0;; ++lvl
) {
3172 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3173 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3175 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3176 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3177 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3179 type1
= pointed_type(type1
);
3180 type2
= pointed_type(type2
);
3182 if (!is_compatible_unqualified_types(type1
, type2
)) {
3183 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3184 /* void * can match anything */
3185 } else if (dbt
== sbt
3186 && is_integer_btype(sbt
& VT_BTYPE
)
3187 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3188 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3189 /* Like GCC don't warn by default for merely changes
3190 in pointer target signedness. Do warn for different
3191 base types, though, in particular for unsigned enums
3192 and signed int targets. */
3194 tcc_warning("assignment from incompatible pointer type");
3199 tcc_warning("assignment discards qualifiers from pointer target type");
3205 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3206 tcc_warning("assignment makes integer from pointer without a cast");
3207 } else if (sbt
== VT_STRUCT
) {
3208 goto case_VT_STRUCT
;
3210 /* XXX: more tests */
3214 if (!is_compatible_unqualified_types(dt
, st
)) {
3216 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3217 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3218 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3225 /* store vtop in lvalue pushed on stack */
3226 ST_FUNC
void vstore(void)
3228 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3230 ft
= vtop
[-1].type
.t
;
3231 sbt
= vtop
->type
.t
& VT_BTYPE
;
3232 dbt
= ft
& VT_BTYPE
;
3233 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3234 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3235 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3236 /* optimize char/short casts */
3237 delayed_cast
= VT_MUSTCAST
;
3238 vtop
->type
.t
= ft
& VT_TYPE
;
3239 /* XXX: factorize */
3240 if (ft
& VT_CONSTANT
)
3241 tcc_warning("assignment of read-only location");
3244 if (!(ft
& VT_BITFIELD
))
3245 gen_assign_cast(&vtop
[-1].type
);
3248 if (sbt
== VT_STRUCT
) {
3249 /* if structure, only generate pointer */
3250 /* structure assignment : generate memcpy */
3251 /* XXX: optimize if small size */
3252 size
= type_size(&vtop
->type
, &align
);
3256 vtop
->type
.t
= VT_PTR
;
3259 /* address of memcpy() */
3262 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3263 else if(!(align
& 3))
3264 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3267 /* Use memmove, rather than memcpy, as dest and src may be same: */
3268 vpush_global_sym(&func_old_type
, TOK_memmove
);
3273 vtop
->type
.t
= VT_PTR
;
3279 /* leave source on stack */
3280 } else if (ft
& VT_BITFIELD
) {
3281 /* bitfield store handling */
3283 /* save lvalue as expression result (example: s.b = s.a = n;) */
3284 vdup(), vtop
[-1] = vtop
[-2];
3286 bit_pos
= BIT_POS(ft
);
3287 bit_size
= BIT_SIZE(ft
);
3288 /* remove bit field info to avoid loops */
3289 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3291 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3292 gen_cast(&vtop
[-1].type
);
3293 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3296 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3297 if (r
== VT_STRUCT
) {
3298 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3299 store_packed_bf(bit_pos
, bit_size
);
3301 unsigned long long mask
= (1ULL << bit_size
) - 1;
3302 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3304 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3307 vpushi((unsigned)mask
);
3314 /* duplicate destination */
3317 /* load destination, mask and or with source */
3318 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3319 vpushll(~(mask
<< bit_pos
));
3321 vpushi(~((unsigned)mask
<< bit_pos
));
3326 /* ... and discard */
3329 } else if (dbt
== VT_VOID
) {
3332 #ifdef CONFIG_TCC_BCHECK
3333 /* bound check case */
3334 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3343 #ifdef TCC_TARGET_X86_64
3344 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3346 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3351 r
= gv(rc
); /* generate value */
3352 /* if lvalue was saved on stack, must read it */
3353 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3355 t
= get_reg(RC_INT
);
3361 sv
.r
= VT_LOCAL
| VT_LVAL
;
3362 sv
.c
.i
= vtop
[-1].c
.i
;
3364 vtop
[-1].r
= t
| VT_LVAL
;
3366 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3368 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3369 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3371 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3372 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3374 vtop
[-1].type
.t
= load_type
;
3377 /* convert to int to increment easily */
3378 vtop
->type
.t
= addr_type
;
3384 vtop
[-1].type
.t
= load_type
;
3385 /* XXX: it works because r2 is spilled last ! */
3386 store(vtop
->r2
, vtop
- 1);
3392 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3393 vtop
->r
|= delayed_cast
;
3397 /* post defines POST/PRE add. c is the token ++ or -- */
3398 ST_FUNC
void inc(int post
, int c
)
3401 vdup(); /* save lvalue */
3403 gv_dup(); /* duplicate value */
3408 vpushi(c
- TOK_MID
);
3410 vstore(); /* store value */
3412 vpop(); /* if post op, return saved value */
3415 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3417 /* read the string */
3421 while (tok
== TOK_STR
) {
3422 /* XXX: add \0 handling too ? */
3423 cstr_cat(astr
, tokc
.str
.data
, -1);
3426 cstr_ccat(astr
, '\0');
3429 /* If I is >= 1 and a power of two, returns log2(i)+1.
3430 If I is 0 returns 0. */
3431 static int exact_log2p1(int i
)
3436 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3447 /* Parse __attribute__((...)) GNUC extension. */
3448 static void parse_attribute(AttributeDef
*ad
)
3454 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3459 while (tok
!= ')') {
3460 if (tok
< TOK_IDENT
)
3461 expect("attribute name");
3473 tcc_warning("implicit declaration of function '%s'",
3474 get_tok_str(tok
, &tokc
));
3475 s
= external_global_sym(tok
, &func_old_type
);
3477 ad
->cleanup_func
= s
;
3485 parse_mult_str(&astr
, "section name");
3486 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3493 parse_mult_str(&astr
, "alias(\"target\")");
3494 ad
->alias_target
= /* save string as token, for later */
3495 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3499 case TOK_VISIBILITY1
:
3500 case TOK_VISIBILITY2
:
3502 parse_mult_str(&astr
,
3503 "visibility(\"default|hidden|internal|protected\")");
3504 if (!strcmp (astr
.data
, "default"))
3505 ad
->a
.visibility
= STV_DEFAULT
;
3506 else if (!strcmp (astr
.data
, "hidden"))
3507 ad
->a
.visibility
= STV_HIDDEN
;
3508 else if (!strcmp (astr
.data
, "internal"))
3509 ad
->a
.visibility
= STV_INTERNAL
;
3510 else if (!strcmp (astr
.data
, "protected"))
3511 ad
->a
.visibility
= STV_PROTECTED
;
3513 expect("visibility(\"default|hidden|internal|protected\")");
3522 if (n
<= 0 || (n
& (n
- 1)) != 0)
3523 tcc_error("alignment must be a positive power of two");
3528 ad
->a
.aligned
= exact_log2p1(n
);
3529 if (n
!= 1 << (ad
->a
.aligned
- 1))
3530 tcc_error("alignment of %d is larger than implemented", n
);
3542 /* currently, no need to handle it because tcc does not
3543 track unused objects */
3547 /* currently, no need to handle it because tcc does not
3548 track unused objects */
3553 ad
->f
.func_call
= FUNC_CDECL
;
3558 ad
->f
.func_call
= FUNC_STDCALL
;
3560 #ifdef TCC_TARGET_I386
3570 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3576 ad
->f
.func_call
= FUNC_FASTCALLW
;
3583 ad
->attr_mode
= VT_LLONG
+ 1;
3586 ad
->attr_mode
= VT_BYTE
+ 1;
3589 ad
->attr_mode
= VT_SHORT
+ 1;
3593 ad
->attr_mode
= VT_INT
+ 1;
3596 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3603 ad
->a
.dllexport
= 1;
3605 case TOK_NODECORATE
:
3606 ad
->a
.nodecorate
= 1;
3609 ad
->a
.dllimport
= 1;
3612 if (tcc_state
->warn_unsupported
)
3613 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3614 /* skip parameters */
3616 int parenthesis
= 0;
3620 else if (tok
== ')')
3623 } while (parenthesis
&& tok
!= -1);
3636 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3640 while ((s
= s
->next
) != NULL
) {
3641 if ((s
->v
& SYM_FIELD
) &&
3642 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3643 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3644 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3656 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3658 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3659 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3660 int pcc
= !tcc_state
->ms_bitfields
;
3661 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3668 prevbt
= VT_STRUCT
; /* make it never match */
3673 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3674 if (f
->type
.t
& VT_BITFIELD
)
3675 bit_size
= BIT_SIZE(f
->type
.t
);
3678 size
= type_size(&f
->type
, &align
);
3679 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3682 if (pcc
&& bit_size
== 0) {
3683 /* in pcc mode, packing does not affect zero-width bitfields */
3686 /* in pcc mode, attribute packed overrides if set. */
3687 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3690 /* pragma pack overrides align if lesser and packs bitfields always */
3693 if (pragma_pack
< align
)
3694 align
= pragma_pack
;
3695 /* in pcc mode pragma pack also overrides individual align */
3696 if (pcc
&& pragma_pack
< a
)
3700 /* some individual align was specified */
3704 if (type
->ref
->type
.t
== VT_UNION
) {
3705 if (pcc
&& bit_size
>= 0)
3706 size
= (bit_size
+ 7) >> 3;
3711 } else if (bit_size
< 0) {
3713 c
+= (bit_pos
+ 7) >> 3;
3714 c
= (c
+ align
- 1) & -align
;
3723 /* A bit-field. Layout is more complicated. There are two
3724 options: PCC (GCC) compatible and MS compatible */
3726 /* In PCC layout a bit-field is placed adjacent to the
3727 preceding bit-fields, except if:
3729 - an individual alignment was given
3730 - it would overflow its base type container and
3731 there is no packing */
3732 if (bit_size
== 0) {
3734 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3736 } else if (f
->a
.aligned
) {
3738 } else if (!packed
) {
3740 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3741 if (ofs
> size
/ align
)
3745 /* in pcc mode, long long bitfields have type int if they fit */
3746 if (size
== 8 && bit_size
<= 32)
3747 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3749 while (bit_pos
>= align
* 8)
3750 c
+= align
, bit_pos
-= align
* 8;
3753 /* In PCC layout named bit-fields influence the alignment
3754 of the containing struct using the base types alignment,
3755 except for packed fields (which here have correct align). */
3756 if (f
->v
& SYM_FIRST_ANOM
3757 // && bit_size // ??? gcc on ARM/rpi does that
3762 bt
= f
->type
.t
& VT_BTYPE
;
3763 if ((bit_pos
+ bit_size
> size
* 8)
3764 || (bit_size
> 0) == (bt
!= prevbt
)
3766 c
= (c
+ align
- 1) & -align
;
3769 /* In MS bitfield mode a bit-field run always uses
3770 at least as many bits as the underlying type.
3771 To start a new run it's also required that this
3772 or the last bit-field had non-zero width. */
3773 if (bit_size
|| prev_bit_size
)
3776 /* In MS layout the records alignment is normally
3777 influenced by the field, except for a zero-width
3778 field at the start of a run (but by further zero-width
3779 fields it is again). */
3780 if (bit_size
== 0 && prevbt
!= bt
)
3783 prev_bit_size
= bit_size
;
3786 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3787 | (bit_pos
<< VT_STRUCT_SHIFT
);
3788 bit_pos
+= bit_size
;
3790 if (align
> maxalign
)
3794 printf("set field %s offset %-2d size %-2d align %-2d",
3795 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3796 if (f
->type
.t
& VT_BITFIELD
) {
3797 printf(" pos %-2d bits %-2d",
3810 c
+= (bit_pos
+ 7) >> 3;
3812 /* store size and alignment */
3813 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3817 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3818 /* can happen if individual align for some member was given. In
3819 this case MSVC ignores maxalign when aligning the size */
3824 c
= (c
+ a
- 1) & -a
;
3828 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3831 /* check whether we can access bitfields by their type */
3832 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3836 if (0 == (f
->type
.t
& VT_BITFIELD
))
3840 bit_size
= BIT_SIZE(f
->type
.t
);
3843 bit_pos
= BIT_POS(f
->type
.t
);
3844 size
= type_size(&f
->type
, &align
);
3845 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3848 /* try to access the field using a different type */
3849 c0
= -1, s
= align
= 1;
3851 px
= f
->c
* 8 + bit_pos
;
3852 cx
= (px
>> 3) & -align
;
3853 px
= px
- (cx
<< 3);
3856 s
= (px
+ bit_size
+ 7) >> 3;
3866 s
= type_size(&t
, &align
);
3870 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3871 /* update offset and bit position */
3874 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3875 | (bit_pos
<< VT_STRUCT_SHIFT
);
3879 printf("FIX field %s offset %-2d size %-2d align %-2d "
3880 "pos %-2d bits %-2d\n",
3881 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3882 cx
, s
, align
, px
, bit_size
);
3885 /* fall back to load/store single-byte wise */
3886 f
->auxtype
= VT_STRUCT
;
3888 printf("FIX field %s : load byte-wise\n",
3889 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3895 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3896 static void struct_decl(CType
*type
, int u
)
3898 int v
, c
, size
, align
, flexible
;
3899 int bit_size
, bsize
, bt
;
3901 AttributeDef ad
, ad1
;
3904 memset(&ad
, 0, sizeof ad
);
3906 parse_attribute(&ad
);
3910 /* struct already defined ? return it */
3912 expect("struct/union/enum name");
3914 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3917 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3919 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3924 /* Record the original enum/struct/union token. */
3925 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3927 /* we put an undefined size for struct/union */
3928 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3929 s
->r
= 0; /* default alignment is zero as gcc */
3931 type
->t
= s
->type
.t
;
3937 tcc_error("struct/union/enum already defined");
3939 /* cannot be empty */
3940 /* non empty enums are not allowed */
3943 long long ll
= 0, pl
= 0, nl
= 0;
3946 /* enum symbols have static storage */
3947 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3951 expect("identifier");
3953 if (ss
&& !local_stack
)
3954 tcc_error("redefinition of enumerator '%s'",
3955 get_tok_str(v
, NULL
));
3959 ll
= expr_const64();
3961 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3963 *ps
= ss
, ps
= &ss
->next
;
3972 /* NOTE: we accept a trailing comma */
3977 /* set integral type of the enum */
3980 if (pl
!= (unsigned)pl
)
3981 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3983 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3984 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3985 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3987 /* set type for enum members */
3988 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3990 if (ll
== (int)ll
) /* default is int if it fits */
3992 if (t
.t
& VT_UNSIGNED
) {
3993 ss
->type
.t
|= VT_UNSIGNED
;
3994 if (ll
== (unsigned)ll
)
3997 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3998 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4003 while (tok
!= '}') {
4004 if (!parse_btype(&btype
, &ad1
)) {
4010 tcc_error("flexible array member '%s' not at the end of struct",
4011 get_tok_str(v
, NULL
));
4017 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4019 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4020 expect("identifier");
4022 int v
= btype
.ref
->v
;
4023 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4024 if (tcc_state
->ms_extensions
== 0)
4025 expect("identifier");
4029 if (type_size(&type1
, &align
) < 0) {
4030 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4033 tcc_error("field '%s' has incomplete type",
4034 get_tok_str(v
, NULL
));
4036 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4037 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4038 (type1
.t
& VT_STORAGE
))
4039 tcc_error("invalid type for '%s'",
4040 get_tok_str(v
, NULL
));
4044 bit_size
= expr_const();
4045 /* XXX: handle v = 0 case for messages */
4047 tcc_error("negative width in bit-field '%s'",
4048 get_tok_str(v
, NULL
));
4049 if (v
&& bit_size
== 0)
4050 tcc_error("zero width for bit-field '%s'",
4051 get_tok_str(v
, NULL
));
4052 parse_attribute(&ad1
);
4054 size
= type_size(&type1
, &align
);
4055 if (bit_size
>= 0) {
4056 bt
= type1
.t
& VT_BTYPE
;
4062 tcc_error("bitfields must have scalar type");
4064 if (bit_size
> bsize
) {
4065 tcc_error("width of '%s' exceeds its type",
4066 get_tok_str(v
, NULL
));
4067 } else if (bit_size
== bsize
4068 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4069 /* no need for bit fields */
4071 } else if (bit_size
== 64) {
4072 tcc_error("field width 64 not implemented");
4074 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4076 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4079 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4080 /* Remember we've seen a real field to check
4081 for placement of flexible array member. */
4084 /* If member is a struct or bit-field, enforce
4085 placing into the struct (as anonymous). */
4087 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4092 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4097 if (tok
== ';' || tok
== TOK_EOF
)
4104 parse_attribute(&ad
);
4105 struct_layout(type
, &ad
);
4110 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4112 merge_symattr(&ad
->a
, &s
->a
);
4113 merge_funcattr(&ad
->f
, &s
->f
);
4116 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4117 are added to the element type, copied because it could be a typedef. */
4118 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4120 while (type
->t
& VT_ARRAY
) {
4121 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4122 type
= &type
->ref
->type
;
4124 type
->t
|= qualifiers
;
4127 /* return 0 if no type declaration. otherwise, return the basic type
4130 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4132 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4136 memset(ad
, 0, sizeof(AttributeDef
));
4146 /* currently, we really ignore extension */
4156 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4157 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4158 tmbt
: tcc_error("too many basic types");
4161 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4166 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4183 memset(&ad1
, 0, sizeof(AttributeDef
));
4184 if (parse_btype(&type1
, &ad1
)) {
4185 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4187 n
= 1 << (ad1
.a
.aligned
- 1);
4189 type_size(&type1
, &n
);
4192 if (n
<= 0 || (n
& (n
- 1)) != 0)
4193 tcc_error("alignment must be a positive power of two");
4196 ad
->a
.aligned
= exact_log2p1(n
);
4200 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4201 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4202 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4203 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4210 #ifdef TCC_TARGET_ARM64
4212 /* GCC's __uint128_t appears in some Linux header files. Make it a
4213 synonym for long double to get the size and alignment right. */
4224 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4225 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4233 struct_decl(&type1
, VT_ENUM
);
4236 type
->ref
= type1
.ref
;
4239 struct_decl(&type1
, VT_STRUCT
);
4242 struct_decl(&type1
, VT_UNION
);
4245 /* type modifiers */
4250 parse_btype_qualify(type
, VT_CONSTANT
);
4258 parse_btype_qualify(type
, VT_VOLATILE
);
4265 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4266 tcc_error("signed and unsigned modifier");
4279 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4280 tcc_error("signed and unsigned modifier");
4281 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4297 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4298 tcc_error("multiple storage classes");
4309 /* currently, no need to handle it because tcc does not
4310 track unused objects */
4313 /* GNUC attribute */
4314 case TOK_ATTRIBUTE1
:
4315 case TOK_ATTRIBUTE2
:
4316 parse_attribute(ad
);
4317 if (ad
->attr_mode
) {
4318 u
= ad
->attr_mode
-1;
4319 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4327 parse_expr_type(&type1
);
4328 /* remove all storage modifiers except typedef */
4329 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4331 sym_to_attr(ad
, type1
.ref
);
4337 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4339 t
&= ~(VT_BTYPE
|VT_LONG
);
4340 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4341 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4342 type
->ref
= s
->type
.ref
;
4344 parse_btype_qualify(type
, t
);
4346 /* get attributes from typedef */
4356 if (tcc_state
->char_is_unsigned
) {
4357 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4360 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4361 bt
= t
& (VT_BTYPE
|VT_LONG
);
4363 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4364 #ifdef TCC_TARGET_PE
4365 if (bt
== VT_LDOUBLE
)
4366 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4372 /* convert a function parameter type (array to pointer and function to
4373 function pointer) */
4374 static inline void convert_parameter_type(CType
*pt
)
4376 /* remove const and volatile qualifiers (XXX: const could be used
4377 to indicate a const function parameter */
4378 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4379 /* array must be transformed to pointer according to ANSI C */
4381 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4386 ST_FUNC
void parse_asm_str(CString
*astr
)
4389 parse_mult_str(astr
, "string constant");
4392 /* Parse an asm label and return the token */
4393 static int asm_label_instr(void)
4399 parse_asm_str(&astr
);
4402 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4404 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4409 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4411 int n
, l
, t1
, arg_size
, align
, unused_align
;
4412 Sym
**plast
, *s
, *first
;
4417 /* function type, or recursive declarator (return if so) */
4419 if (td
&& !(td
& TYPE_ABSTRACT
))
4423 else if (parse_btype(&pt
, &ad1
))
4426 merge_attr (ad
, &ad1
);
4435 /* read param name and compute offset */
4436 if (l
!= FUNC_OLD
) {
4437 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4439 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4440 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4441 tcc_error("parameter declared as void");
4445 expect("identifier");
4446 pt
.t
= VT_VOID
; /* invalid type */
4449 convert_parameter_type(&pt
);
4450 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4451 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4457 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4462 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4463 tcc_error("invalid type");
4466 /* if no parameters, then old type prototype */
4469 /* NOTE: const is ignored in returned type as it has a special
4470 meaning in gcc / C++ */
4471 type
->t
&= ~VT_CONSTANT
;
4472 /* some ancient pre-K&R C allows a function to return an array
4473 and the array brackets to be put after the arguments, such
4474 that "int c()[]" means something like "int[] c()" */
4477 skip(']'); /* only handle simple "[]" */
4480 /* we push a anonymous symbol which will contain the function prototype */
4481 ad
->f
.func_args
= arg_size
;
4482 ad
->f
.func_type
= l
;
4483 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4489 } else if (tok
== '[') {
4490 int saved_nocode_wanted
= nocode_wanted
;
4491 /* array definition */
4494 /* XXX The optional type-quals and static should only be accepted
4495 in parameter decls. The '*' as well, and then even only
4496 in prototypes (not function defs). */
4498 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4513 if (!local_stack
|| (storage
& VT_STATIC
))
4514 vpushi(expr_const());
4516 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4517 length must always be evaluated, even under nocode_wanted,
4518 so that its size slot is initialized (e.g. under sizeof
4523 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4526 tcc_error("invalid array size");
4528 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4529 tcc_error("size of variable length array should be an integer");
4535 /* parse next post type */
4536 post_type(type
, ad
, storage
, 0);
4538 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4539 tcc_error("declaration of an array of functions");
4540 if ((type
->t
& VT_BTYPE
) == VT_VOID
4541 || type_size(type
, &unused_align
) < 0)
4542 tcc_error("declaration of an array of incomplete type elements");
4544 t1
|= type
->t
& VT_VLA
;
4548 tcc_error("need explicit inner array size in VLAs");
4549 loc
-= type_size(&int_type
, &align
);
4553 vla_runtime_type_size(type
, &align
);
4555 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4561 nocode_wanted
= saved_nocode_wanted
;
4563 /* we push an anonymous symbol which will contain the array
4565 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4566 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4572 /* Parse a type declarator (except basic type), and return the type
4573 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4574 expected. 'type' should contain the basic type. 'ad' is the
4575 attribute definition of the basic type. It can be modified by
4576 type_decl(). If this (possibly abstract) declarator is a pointer chain
4577 it returns the innermost pointed to type (equals *type, but is a different
4578 pointer), otherwise returns type itself, that's used for recursive calls. */
4579 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4582 int qualifiers
, storage
;
4584 /* recursive type, remove storage bits first, apply them later again */
4585 storage
= type
->t
& VT_STORAGE
;
4586 type
->t
&= ~VT_STORAGE
;
4589 while (tok
== '*') {
4597 qualifiers
|= VT_CONSTANT
;
4602 qualifiers
|= VT_VOLATILE
;
4608 /* XXX: clarify attribute handling */
4609 case TOK_ATTRIBUTE1
:
4610 case TOK_ATTRIBUTE2
:
4611 parse_attribute(ad
);
4615 type
->t
|= qualifiers
;
4617 /* innermost pointed to type is the one for the first derivation */
4618 ret
= pointed_type(type
);
4622 /* This is possibly a parameter type list for abstract declarators
4623 ('int ()'), use post_type for testing this. */
4624 if (!post_type(type
, ad
, 0, td
)) {
4625 /* It's not, so it's a nested declarator, and the post operations
4626 apply to the innermost pointed to type (if any). */
4627 /* XXX: this is not correct to modify 'ad' at this point, but
4628 the syntax is not clear */
4629 parse_attribute(ad
);
4630 post
= type_decl(type
, ad
, v
, td
);
4634 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4635 /* type identifier */
4640 if (!(td
& TYPE_ABSTRACT
))
4641 expect("identifier");
4644 post_type(post
, ad
, storage
, 0);
4645 parse_attribute(ad
);
4650 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4651 ST_FUNC
int lvalue_type(int t
)
4656 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4658 else if (bt
== VT_SHORT
)
4662 if (t
& VT_UNSIGNED
)
4663 r
|= VT_LVAL_UNSIGNED
;
4667 /* indirection with full error checking and bound check */
4668 ST_FUNC
void indir(void)
4670 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4671 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4675 if (vtop
->r
& VT_LVAL
)
4677 vtop
->type
= *pointed_type(&vtop
->type
);
4678 /* Arrays and functions are never lvalues */
4679 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4680 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4681 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4682 /* if bound checking, the referenced pointer must be checked */
4683 #ifdef CONFIG_TCC_BCHECK
4684 if (tcc_state
->do_bounds_check
)
4685 vtop
->r
|= VT_MUSTBOUND
;
4690 /* pass a parameter to a function and do type checking and casting */
4691 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4696 func_type
= func
->f
.func_type
;
4697 if (func_type
== FUNC_OLD
||
4698 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4699 /* default casting : only need to convert float to double */
4700 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4701 gen_cast_s(VT_DOUBLE
);
4702 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4703 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4704 type
.ref
= vtop
->type
.ref
;
4707 } else if (arg
== NULL
) {
4708 tcc_error("too many arguments to function");
4711 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4712 gen_assign_cast(&type
);
4716 /* parse an expression and return its type without any side effect. */
4717 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4726 /* parse an expression of the form '(type)' or '(expr)' and return its
4728 static void parse_expr_type(CType
*type
)
4734 if (parse_btype(type
, &ad
)) {
4735 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4737 expr_type(type
, gexpr
);
4742 static void parse_type(CType
*type
)
4747 if (!parse_btype(type
, &ad
)) {
4750 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4753 static void parse_builtin_params(int nc
, const char *args
)
4760 while ((c
= *args
++)) {
4764 case 'e': expr_eq(); continue;
4765 case 't': parse_type(&t
); vpush(&t
); continue;
4766 default: tcc_error("internal error"); break;
4774 static void try_call_scope_cleanup(Sym
*stop
)
4776 Sym
*cls
= current_cleanups
;
4778 for (; cls
!= stop
; cls
= cls
->ncl
) {
4779 Sym
*fs
= cls
->next
;
4780 Sym
*vs
= cls
->prev_tok
;
4782 vpushsym(&fs
->type
, fs
);
4783 vset(&vs
->type
, vs
->r
, vs
->c
);
4785 mk_pointer(&vtop
->type
);
4791 static void try_call_cleanup_goto(Sym
*cleanupstate
)
4796 if (!current_cleanups
)
4799 /* search NCA of both cleanup chains given parents and initial depth */
4800 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
4801 for (ccd
= ncleanups
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
4803 for (cc
= current_cleanups
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
4805 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
4808 try_call_scope_cleanup(cc
);
4811 ST_FUNC
void unary(void)
4813 int n
, t
, align
, size
, r
, sizeof_caller
;
4818 sizeof_caller
= in_sizeof
;
4821 /* XXX: GCC 2.95.3 does not generate a table although it should be
4829 #ifdef TCC_TARGET_PE
4830 t
= VT_SHORT
|VT_UNSIGNED
;
4838 vsetc(&type
, VT_CONST
, &tokc
);
4842 t
= VT_INT
| VT_UNSIGNED
;
4848 t
= VT_LLONG
| VT_UNSIGNED
;
4860 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4863 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4865 case TOK___FUNCTION__
:
4867 goto tok_identifier
;
4873 /* special function name identifier */
4874 len
= strlen(funcname
) + 1;
4875 /* generate char[len] type */
4880 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4881 if (!NODATA_WANTED
) {
4882 ptr
= section_ptr_add(data_section
, len
);
4883 memcpy(ptr
, funcname
, len
);
4889 #ifdef TCC_TARGET_PE
4890 t
= VT_SHORT
| VT_UNSIGNED
;
4896 /* string parsing */
4898 if (tcc_state
->char_is_unsigned
)
4899 t
= VT_BYTE
| VT_UNSIGNED
;
4901 if (tcc_state
->warn_write_strings
)
4906 memset(&ad
, 0, sizeof(AttributeDef
));
4907 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4912 if (parse_btype(&type
, &ad
)) {
4913 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4915 /* check ISOC99 compound literal */
4917 /* data is allocated locally by default */
4922 /* all except arrays are lvalues */
4923 if (!(type
.t
& VT_ARRAY
))
4924 r
|= lvalue_type(type
.t
);
4925 memset(&ad
, 0, sizeof(AttributeDef
));
4926 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4928 if (sizeof_caller
) {
4935 } else if (tok
== '{') {
4936 int saved_nocode_wanted
= nocode_wanted
;
4938 tcc_error("expected constant");
4939 /* save all registers */
4941 /* statement expression : we do not accept break/continue
4942 inside as GCC does. We do retain the nocode_wanted state,
4943 as statement expressions can't ever be entered from the
4944 outside, so any reactivation of code emission (from labels
4945 or loop heads) can be disabled again after the end of it. */
4946 block(NULL
, NULL
, NULL
, NULL
, 1);
4947 nocode_wanted
= saved_nocode_wanted
;
4962 /* functions names must be treated as function pointers,
4963 except for unary '&' and sizeof. Since we consider that
4964 functions are not lvalues, we only have to handle it
4965 there and in function calls. */
4966 /* arrays can also be used although they are not lvalues */
4967 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4968 !(vtop
->type
.t
& VT_ARRAY
))
4970 mk_pointer(&vtop
->type
);
4976 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4977 gen_cast_s(VT_BOOL
);
4978 vtop
->c
.i
= !vtop
->c
.i
;
4979 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4983 vseti(VT_JMP
, gvtst(1, 0));
4995 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4996 tcc_error("pointer not accepted for unary plus");
4997 /* In order to force cast, we add zero, except for floating point
4998 where we really need an noop (otherwise -0.0 will be transformed
5000 if (!is_float(vtop
->type
.t
)) {
5012 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5013 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5014 size
= type_size(&type
, &align
);
5015 if (s
&& s
->a
.aligned
)
5016 align
= 1 << (s
->a
.aligned
- 1);
5017 if (t
== TOK_SIZEOF
) {
5018 if (!(type
.t
& VT_VLA
)) {
5020 tcc_error("sizeof applied to an incomplete type");
5023 vla_runtime_type_size(&type
, &align
);
5028 vtop
->type
.t
|= VT_UNSIGNED
;
5031 case TOK_builtin_expect
:
5032 /* __builtin_expect is a no-op for now */
5033 parse_builtin_params(0, "ee");
5036 case TOK_builtin_types_compatible_p
:
5037 parse_builtin_params(0, "tt");
5038 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5039 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5040 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5044 case TOK_builtin_choose_expr
:
5071 case TOK_builtin_constant_p
:
5072 parse_builtin_params(1, "e");
5073 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5077 case TOK_builtin_frame_address
:
5078 case TOK_builtin_return_address
:
5084 if (tok
!= TOK_CINT
) {
5085 tcc_error("%s only takes positive integers",
5086 tok1
== TOK_builtin_return_address
?
5087 "__builtin_return_address" :
5088 "__builtin_frame_address");
5090 level
= (uint32_t)tokc
.i
;
5095 vset(&type
, VT_LOCAL
, 0); /* local frame */
5097 mk_pointer(&vtop
->type
);
5098 indir(); /* -> parent frame */
5100 if (tok1
== TOK_builtin_return_address
) {
5101 // assume return address is just above frame pointer on stack
5104 mk_pointer(&vtop
->type
);
5109 #ifdef TCC_TARGET_X86_64
5110 #ifdef TCC_TARGET_PE
5111 case TOK_builtin_va_start
:
5112 parse_builtin_params(0, "ee");
5113 r
= vtop
->r
& VT_VALMASK
;
5117 tcc_error("__builtin_va_start expects a local variable");
5119 vtop
->type
= char_pointer_type
;
5124 case TOK_builtin_va_arg_types
:
5125 parse_builtin_params(0, "t");
5126 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5133 #ifdef TCC_TARGET_ARM64
5134 case TOK___va_start
: {
5135 parse_builtin_params(0, "ee");
5139 vtop
->type
.t
= VT_VOID
;
5142 case TOK___va_arg
: {
5143 parse_builtin_params(0, "et");
5151 case TOK___arm64_clear_cache
: {
5152 parse_builtin_params(0, "ee");
5155 vtop
->type
.t
= VT_VOID
;
5159 /* pre operations */
5170 t
= vtop
->type
.t
& VT_BTYPE
;
5172 /* In IEEE negate(x) isn't subtract(0,x), but rather
5176 vtop
->c
.f
= -1.0 * 0.0;
5177 else if (t
== VT_DOUBLE
)
5178 vtop
->c
.d
= -1.0 * 0.0;
5180 vtop
->c
.ld
= -1.0 * 0.0;
5188 goto tok_identifier
;
5190 /* allow to take the address of a label */
5191 if (tok
< TOK_UIDENT
)
5192 expect("label identifier");
5193 s
= label_find(tok
);
5195 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5197 if (s
->r
== LABEL_DECLARED
)
5198 s
->r
= LABEL_FORWARD
;
5201 s
->type
.t
= VT_VOID
;
5202 mk_pointer(&s
->type
);
5203 s
->type
.t
|= VT_STATIC
;
5205 vpushsym(&s
->type
, s
);
5211 CType controlling_type
;
5212 int has_default
= 0;
5215 TokenString
*str
= NULL
;
5216 int saved_const_wanted
= const_wanted
;
5221 expr_type(&controlling_type
, expr_eq
);
5222 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5223 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5224 mk_pointer(&controlling_type
);
5225 const_wanted
= saved_const_wanted
;
5229 if (tok
== TOK_DEFAULT
) {
5231 tcc_error("too many 'default'");
5237 AttributeDef ad_tmp
;
5240 parse_btype(&cur_type
, &ad_tmp
);
5241 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5242 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5244 tcc_error("type match twice");
5254 skip_or_save_block(&str
);
5256 skip_or_save_block(NULL
);
5263 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5264 tcc_error("type '%s' does not match any association", buf
);
5266 begin_macro(str
, 1);
5275 // special qnan , snan and infinity values
5280 vtop
->type
.t
= VT_FLOAT
;
5285 goto special_math_val
;
5288 goto special_math_val
;
5295 expect("identifier");
5297 if (!s
|| IS_ASM_SYM(s
)) {
5298 const char *name
= get_tok_str(t
, NULL
);
5300 tcc_error("'%s' undeclared", name
);
5301 /* for simple function calls, we tolerate undeclared
5302 external reference to int() function */
5303 if (tcc_state
->warn_implicit_function_declaration
5304 #ifdef TCC_TARGET_PE
5305 /* people must be warned about using undeclared WINAPI functions
5306 (which usually start with uppercase letter) */
5307 || (name
[0] >= 'A' && name
[0] <= 'Z')
5310 tcc_warning("implicit declaration of function '%s'", name
);
5311 s
= external_global_sym(t
, &func_old_type
);
5315 /* A symbol that has a register is a local register variable,
5316 which starts out as VT_LOCAL value. */
5317 if ((r
& VT_VALMASK
) < VT_CONST
)
5318 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5320 vset(&s
->type
, r
, s
->c
);
5321 /* Point to s as backpointer (even without r&VT_SYM).
5322 Will be used by at least the x86 inline asm parser for
5328 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5329 vtop
->c
.i
= s
->enum_val
;
5334 /* post operations */
5336 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5339 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5340 int qualifiers
, cumofs
= 0;
5342 if (tok
== TOK_ARROW
)
5344 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5347 /* expect pointer on structure */
5348 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5349 expect("struct or union");
5350 if (tok
== TOK_CDOUBLE
)
5351 expect("field name");
5353 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5354 expect("field name");
5355 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5357 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5358 /* add field offset to pointer */
5359 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5360 vpushi(cumofs
+ s
->c
);
5362 /* change type to field type, and set to lvalue */
5363 vtop
->type
= s
->type
;
5364 vtop
->type
.t
|= qualifiers
;
5365 /* an array is never an lvalue */
5366 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5367 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5368 #ifdef CONFIG_TCC_BCHECK
5369 /* if bound checking, the referenced pointer must be checked */
5370 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5371 vtop
->r
|= VT_MUSTBOUND
;
5375 } else if (tok
== '[') {
5381 } else if (tok
== '(') {
5384 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5387 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5388 /* pointer test (no array accepted) */
5389 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5390 vtop
->type
= *pointed_type(&vtop
->type
);
5391 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5395 expect("function pointer");
5398 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5400 /* get return type */
5403 sa
= s
->next
; /* first parameter */
5404 nb_args
= regsize
= 0;
5406 /* compute first implicit argument if a structure is returned */
5407 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5408 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5409 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5410 &ret_align
, ®size
);
5412 /* get some space for the returned structure */
5413 size
= type_size(&s
->type
, &align
);
5414 #ifdef TCC_TARGET_ARM64
5415 /* On arm64, a small struct is return in registers.
5416 It is much easier to write it to memory if we know
5417 that we are allowed to write some extra bytes, so
5418 round the allocated space up to a power of 2: */
5420 while (size
& (size
- 1))
5421 size
= (size
| (size
- 1)) + 1;
5423 loc
= (loc
- size
) & -align
;
5425 ret
.r
= VT_LOCAL
| VT_LVAL
;
5426 /* pass it as 'int' to avoid structure arg passing
5428 vseti(VT_LOCAL
, loc
);
5438 /* return in register */
5439 if (is_float(ret
.type
.t
)) {
5440 ret
.r
= reg_fret(ret
.type
.t
);
5441 #ifdef TCC_TARGET_X86_64
5442 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5446 #ifndef TCC_TARGET_ARM64
5447 #ifdef TCC_TARGET_X86_64
5448 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5450 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5461 gfunc_param_typed(s
, sa
);
5471 tcc_error("too few arguments to function");
5473 gfunc_call(nb_args
);
5476 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5477 vsetc(&ret
.type
, r
, &ret
.c
);
5478 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5481 /* handle packed struct return */
5482 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5485 size
= type_size(&s
->type
, &align
);
5486 /* We're writing whole regs often, make sure there's enough
5487 space. Assume register size is power of 2. */
5488 if (regsize
> align
)
5490 loc
= (loc
- size
) & -align
;
5494 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5498 if (--ret_nregs
== 0)
5502 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5510 ST_FUNC
void expr_prod(void)
5515 while (tok
== '*' || tok
== '/' || tok
== '%') {
5523 ST_FUNC
void expr_sum(void)
5528 while (tok
== '+' || tok
== '-') {
5536 static void expr_shift(void)
5541 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5549 static void expr_cmp(void)
5554 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5555 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5563 static void expr_cmpeq(void)
5568 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5576 static void expr_and(void)
5579 while (tok
== '&') {
5586 static void expr_xor(void)
5589 while (tok
== '^') {
5596 static void expr_or(void)
5599 while (tok
== '|') {
5606 static void expr_land(void)
5609 if (tok
== TOK_LAND
) {
5612 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5613 gen_cast_s(VT_BOOL
);
5618 while (tok
== TOK_LAND
) {
5634 if (tok
!= TOK_LAND
) {
5647 static void expr_lor(void)
5650 if (tok
== TOK_LOR
) {
5653 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5654 gen_cast_s(VT_BOOL
);
5659 while (tok
== TOK_LOR
) {
5675 if (tok
!= TOK_LOR
) {
5688 /* Assuming vtop is a value used in a conditional context
5689 (i.e. compared with zero) return 0 if it's false, 1 if
5690 true and -1 if it can't be statically determined. */
5691 static int condition_3way(void)
5694 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5695 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5697 gen_cast_s(VT_BOOL
);
5704 static void expr_cond(void)
5706 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5708 CType type
, type1
, type2
;
5713 c
= condition_3way();
5714 g
= (tok
== ':' && gnu_ext
);
5716 /* needed to avoid having different registers saved in
5718 if (is_float(vtop
->type
.t
)) {
5720 #ifdef TCC_TARGET_X86_64
5721 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5745 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5746 mk_pointer(&vtop
->type
);
5748 sv
= *vtop
; /* save value to handle it later */
5749 vtop
--; /* no vpop so that FP stack is not flushed */
5765 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5766 mk_pointer(&vtop
->type
);
5769 bt1
= t1
& VT_BTYPE
;
5771 bt2
= t2
& VT_BTYPE
;
5775 /* cast operands to correct type according to ISOC rules */
5776 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5777 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5778 } else if (is_float(bt1
) || is_float(bt2
)) {
5779 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5780 type
.t
= VT_LDOUBLE
;
5782 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5787 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5788 /* cast to biggest op */
5789 type
.t
= VT_LLONG
| VT_LONG
;
5790 if (bt1
== VT_LLONG
)
5792 if (bt2
== VT_LLONG
)
5794 /* convert to unsigned if it does not fit in a long long */
5795 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5796 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5797 type
.t
|= VT_UNSIGNED
;
5798 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5799 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5800 /* If one is a null ptr constant the result type
5802 if (is_null_pointer (vtop
)) type
= type1
;
5803 else if (is_null_pointer (&sv
)) type
= type2
;
5804 else if (bt1
!= bt2
)
5805 tcc_error("incompatible types in conditional expressions");
5807 CType
*pt1
= pointed_type(&type1
);
5808 CType
*pt2
= pointed_type(&type2
);
5809 int pbt1
= pt1
->t
& VT_BTYPE
;
5810 int pbt2
= pt2
->t
& VT_BTYPE
;
5811 int newquals
, copied
= 0;
5812 /* pointers to void get preferred, otherwise the
5813 pointed to types minus qualifs should be compatible */
5814 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5815 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5816 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5817 tcc_warning("pointer type mismatch in conditional expression\n");
5819 /* combine qualifs */
5820 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5821 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5824 /* copy the pointer target symbol */
5825 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5828 pointed_type(&type
)->t
|= newquals
;
5830 /* pointers to incomplete arrays get converted to
5831 pointers to completed ones if possible */
5832 if (pt1
->t
& VT_ARRAY
5833 && pt2
->t
& VT_ARRAY
5834 && pointed_type(&type
)->ref
->c
< 0
5835 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5838 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5840 pointed_type(&type
)->ref
=
5841 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5842 0, pointed_type(&type
)->ref
->c
);
5843 pointed_type(&type
)->ref
->c
=
5844 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5847 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5848 /* XXX: test structure compatibility */
5849 type
= bt1
== VT_STRUCT
? type1
: type2
;
5851 /* integer operations */
5852 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5853 /* convert to unsigned if it does not fit in an integer */
5854 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5855 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5856 type
.t
|= VT_UNSIGNED
;
5858 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5859 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5860 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5862 /* now we convert second operand */
5866 mk_pointer(&vtop
->type
);
5868 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5873 if (is_float(type
.t
)) {
5875 #ifdef TCC_TARGET_X86_64
5876 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5880 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5881 /* for long longs, we use fixed registers to avoid having
5882 to handle a complicated move */
5893 /* this is horrible, but we must also convert first
5899 mk_pointer(&vtop
->type
);
5901 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5905 if (c
< 0 || islv
) {
5907 move_reg(r2
, r1
, type
.t
);
5917 static void expr_eq(void)
5923 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5924 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5925 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5940 ST_FUNC
void gexpr(void)
5951 /* parse a constant expression and return value in vtop. */
5952 static void expr_const1(void)
5961 /* parse an integer constant and return its value. */
5962 static inline int64_t expr_const64(void)
5966 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5967 expect("constant expression");
5973 /* parse an integer constant and return its value.
5974 Complain if it doesn't fit 32bit (signed or unsigned). */
5975 ST_FUNC
int expr_const(void)
5978 int64_t wc
= expr_const64();
5980 if (c
!= wc
&& (unsigned)c
!= wc
)
5981 tcc_error("constant exceeds 32 bit");
5985 /* return the label token if current token is a label, otherwise
5987 static int is_label(void)
5991 /* fast test first */
5992 if (tok
< TOK_UIDENT
)
5994 /* no need to save tokc because tok is an identifier */
6000 unget_tok(last_tok
);
6005 #ifndef TCC_TARGET_ARM64
6006 static void gfunc_return(CType
*func_type
)
6008 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6009 CType type
, ret_type
;
6010 int ret_align
, ret_nregs
, regsize
;
6011 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6012 &ret_align
, ®size
);
6013 if (0 == ret_nregs
) {
6014 /* if returning structure, must copy it to implicit
6015 first pointer arg location */
6018 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6021 /* copy structure value to pointer */
6024 /* returning structure packed into registers */
6025 int r
, size
, addr
, align
;
6026 size
= type_size(func_type
,&align
);
6027 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6028 (vtop
->c
.i
& (ret_align
-1)))
6029 && (align
& (ret_align
-1))) {
6030 loc
= (loc
- size
) & -ret_align
;
6033 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6037 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6039 vtop
->type
= ret_type
;
6040 if (is_float(ret_type
.t
))
6041 r
= rc_fret(ret_type
.t
);
6052 if (--ret_nregs
== 0)
6054 /* We assume that when a structure is returned in multiple
6055 registers, their classes are consecutive values of the
6058 vtop
->c
.i
+= regsize
;
6062 } else if (is_float(func_type
->t
)) {
6063 gv(rc_fret(func_type
->t
));
6067 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6071 static int case_cmp(const void *pa
, const void *pb
)
6073 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6074 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6075 return a
< b
? -1 : a
> b
;
6078 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6082 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6100 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6102 gcase(base
, len
/2, bsym
);
6103 if (cur_switch
->def_sym
)
6104 gjmp_addr(cur_switch
->def_sym
);
6106 *bsym
= gjmp(*bsym
);
6110 base
+= e
; len
-= e
;
6120 if (p
->v1
== p
->v2
) {
6122 gtst_addr(0, p
->sym
);
6132 gtst_addr(0, p
->sym
);
6138 static void block(int *bsym
, Sym
*bcl
, int *csym
, Sym
*ccl
, int is_expr
)
6140 int a
, b
, c
, d
, cond
;
6143 /* generate line number info */
6144 if (tcc_state
->do_debug
)
6145 tcc_debug_line(tcc_state
);
6148 /* default return value is (void) */
6150 vtop
->type
.t
= VT_VOID
;
6153 if (tok
== TOK_IF
) {
6155 int saved_nocode_wanted
= nocode_wanted
;
6160 cond
= condition_3way();
6166 nocode_wanted
|= 0x20000000;
6167 block(bsym
, bcl
, csym
, ccl
, 0);
6169 nocode_wanted
= saved_nocode_wanted
;
6170 if (tok
== TOK_ELSE
) {
6175 nocode_wanted
|= 0x20000000;
6176 block(bsym
, bcl
, csym
, ccl
, 0);
6177 gsym(d
); /* patch else jmp */
6179 nocode_wanted
= saved_nocode_wanted
;
6182 } else if (tok
== TOK_WHILE
) {
6183 int saved_nocode_wanted
;
6184 nocode_wanted
&= ~0x20000000;
6194 saved_nocode_wanted
= nocode_wanted
;
6195 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6196 nocode_wanted
= saved_nocode_wanted
;
6201 } else if (tok
== '{') {
6202 Sym
*llabel
, *lcleanup
;
6203 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6204 int lncleanups
= ncleanups
;
6207 /* record local declaration stack position */
6209 llabel
= local_label_stack
;
6210 lcleanup
= current_cleanups
;
6213 /* handle local labels declarations */
6214 while (tok
== TOK_LABEL
) {
6217 if (tok
< TOK_UIDENT
)
6218 expect("label identifier");
6219 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6229 while (tok
!= '}') {
6230 if ((a
= is_label()))
6237 block(bsym
, bcl
, csym
, ccl
, is_expr
);
6241 if (current_cleanups
!= lcleanup
) {
6245 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> lncleanups
;)
6246 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6251 try_call_scope_cleanup(lcleanup
);
6252 pcl
->jnext
= gjmp(0);
6254 goto remove_pending
;
6263 if (!nocode_wanted
) {
6264 try_call_scope_cleanup(lcleanup
);
6268 current_cleanups
= lcleanup
;
6269 ncleanups
= lncleanups
;
6270 /* pop locally defined labels */
6271 label_pop(&local_label_stack
, llabel
, is_expr
);
6272 /* pop locally defined symbols */
6274 /* In the is_expr case (a statement expression is finished here),
6275 vtop might refer to symbols on the local_stack. Either via the
6276 type or via vtop->sym. We can't pop those nor any that in turn
6277 might be referred to. To make it easier we don't roll back
6278 any symbols in that case; some upper level call to block() will
6279 do that. We do have to remove such symbols from the lookup
6280 tables, though. sym_pop will do that. */
6281 sym_pop(&local_stack
, s
, is_expr
);
6283 /* Pop VLA frames and restore stack pointer if required */
6284 if (vlas_in_scope
> saved_vlas_in_scope
) {
6285 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6288 vlas_in_scope
= saved_vlas_in_scope
;
6291 } else if (tok
== TOK_RETURN
) {
6295 gen_assign_cast(&func_vt
);
6296 try_call_scope_cleanup(NULL
);
6297 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6300 gfunc_return(&func_vt
);
6302 try_call_scope_cleanup(NULL
);
6305 /* jump unless last stmt in top-level block */
6306 if (tok
!= '}' || local_scope
!= 1)
6308 nocode_wanted
|= 0x20000000;
6309 } else if (tok
== TOK_BREAK
) {
6312 tcc_error("cannot break");
6313 try_call_scope_cleanup(bcl
);
6314 *bsym
= gjmp(*bsym
);
6317 nocode_wanted
|= 0x20000000;
6318 } else if (tok
== TOK_CONTINUE
) {
6321 tcc_error("cannot continue");
6322 try_call_scope_cleanup(ccl
);
6323 vla_sp_restore_root();
6324 *csym
= gjmp(*csym
);
6327 nocode_wanted
|= 0x20000000;
6328 } else if (tok
== TOK_FOR
) {
6330 int saved_nocode_wanted
;
6331 Sym
*lcleanup
= current_cleanups
;
6332 int lncleanups
= ncleanups
;
6334 nocode_wanted
&= ~0x20000000;
6340 /* c99 for-loop init decl? */
6341 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6342 /* no, regular for-loop init expr */
6368 saved_nocode_wanted
= nocode_wanted
;
6369 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6370 nocode_wanted
= saved_nocode_wanted
;
6375 try_call_scope_cleanup(lcleanup
);
6376 ncleanups
= lncleanups
;
6377 current_cleanups
= lcleanup
;
6378 sym_pop(&local_stack
, s
, 0);
6381 if (tok
== TOK_DO
) {
6382 int saved_nocode_wanted
;
6383 nocode_wanted
&= ~0x20000000;
6389 saved_nocode_wanted
= nocode_wanted
;
6390 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6395 nocode_wanted
= saved_nocode_wanted
;
6399 nocode_wanted
= saved_nocode_wanted
;
6404 if (tok
== TOK_SWITCH
) {
6405 struct switch_t
*saved
, sw
;
6406 int saved_nocode_wanted
= nocode_wanted
;
6412 switchval
= *vtop
--;
6414 b
= gjmp(0); /* jump to first case */
6415 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6418 block(&a
, current_cleanups
, csym
, ccl
, 0);
6419 nocode_wanted
= saved_nocode_wanted
;
6420 a
= gjmp(a
); /* add implicit break */
6423 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6424 for (b
= 1; b
< sw
.n
; b
++)
6425 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6426 tcc_error("duplicate case value");
6427 /* Our switch table sorting is signed, so the compared
6428 value needs to be as well when it's 64bit. */
6429 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6430 switchval
.type
.t
&= ~VT_UNSIGNED
;
6432 gcase(sw
.p
, sw
.n
, &a
);
6435 gjmp_addr(sw
.def_sym
);
6436 dynarray_reset(&sw
.p
, &sw
.n
);
6441 if (tok
== TOK_CASE
) {
6442 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6445 nocode_wanted
&= ~0x20000000;
6447 cr
->v1
= cr
->v2
= expr_const64();
6448 if (gnu_ext
&& tok
== TOK_DOTS
) {
6450 cr
->v2
= expr_const64();
6451 if (cr
->v2
< cr
->v1
)
6452 tcc_warning("empty case range");
6455 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6458 goto block_after_label
;
6460 if (tok
== TOK_DEFAULT
) {
6465 if (cur_switch
->def_sym
)
6466 tcc_error("too many 'default'");
6467 cur_switch
->def_sym
= ind
;
6469 goto block_after_label
;
6471 if (tok
== TOK_GOTO
) {
6473 if (tok
== '*' && gnu_ext
) {
6477 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6480 } else if (tok
>= TOK_UIDENT
) {
6481 s
= label_find(tok
);
6482 /* put forward definition if needed */
6484 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6485 else if (s
->r
== LABEL_DECLARED
)
6486 s
->r
= LABEL_FORWARD
;
6488 vla_sp_restore_root();
6489 if (s
->r
& LABEL_FORWARD
) {
6490 /* start new goto chain for cleanups, linked via label->next */
6491 if (current_cleanups
) {
6492 sym_push2(&pending_gotos
, SYM_FIELD
, 0, ncleanups
);
6493 pending_gotos
->prev_tok
= s
;
6494 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6495 pending_gotos
->next
= s
;
6497 s
->jnext
= gjmp(s
->jnext
);
6499 try_call_cleanup_goto(s
->cleanupstate
);
6500 gjmp_addr(s
->jnext
);
6504 expect("label identifier");
6507 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6516 if (s
->r
== LABEL_DEFINED
)
6517 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6518 s
->r
= LABEL_DEFINED
;
6520 Sym
*pcl
; /* pending cleanup goto */
6521 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6523 sym_pop(&s
->next
, NULL
, 0);
6527 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6530 s
->cleanupstate
= current_cleanups
;
6532 /* we accept this, but it is a mistake */
6534 nocode_wanted
&= ~0x20000000;
6536 tcc_warning("deprecated use of label at end of compound statement");
6540 block(bsym
, bcl
, csym
, ccl
, is_expr
);
6543 /* expression case */
6558 /* This skips over a stream of tokens containing balanced {} and ()
6559 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6560 with a '{'). If STR then allocates and stores the skipped tokens
6561 in *STR. This doesn't check if () and {} are nested correctly,
6562 i.e. "({)}" is accepted. */
6563 static void skip_or_save_block(TokenString
**str
)
6565 int braces
= tok
== '{';
6568 *str
= tok_str_alloc();
6570 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6572 if (tok
== TOK_EOF
) {
6573 if (str
|| level
> 0)
6574 tcc_error("unexpected end of file");
6579 tok_str_add_tok(*str
);
6582 if (t
== '{' || t
== '(') {
6584 } else if (t
== '}' || t
== ')') {
6586 if (level
== 0 && braces
&& t
== '}')
6591 tok_str_add(*str
, -1);
6592 tok_str_add(*str
, 0);
6596 #define EXPR_CONST 1
6599 static void parse_init_elem(int expr_type
)
6601 int saved_global_expr
;
6604 /* compound literals must be allocated globally in this case */
6605 saved_global_expr
= global_expr
;
6608 global_expr
= saved_global_expr
;
6609 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6610 (compound literals). */
6611 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6612 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6613 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6614 #ifdef TCC_TARGET_PE
6615 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6618 tcc_error("initializer element is not constant");
6626 /* put zeros for variable based init */
6627 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6630 /* nothing to do because globals are already set to zero */
6632 vpush_global_sym(&func_old_type
, TOK_memset
);
6634 #ifdef TCC_TARGET_ARM
6646 #define DIF_SIZE_ONLY 2
6647 #define DIF_HAVE_ELEM 4
6649 /* t is the array or struct type. c is the array or struct
6650 address. cur_field is the pointer to the current
6651 field, for arrays the 'c' member contains the current start
6652 index. 'flags' is as in decl_initializer.
6653 'al' contains the already initialized length of the
6654 current container (starting at c). This returns the new length of that. */
6655 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6656 Sym
**cur_field
, int flags
, int al
)
6659 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6660 unsigned long corig
= c
;
6664 if (flags
& DIF_HAVE_ELEM
)
6666 if (gnu_ext
&& (l
= is_label()) != 0)
6668 /* NOTE: we only support ranges for last designator */
6669 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6671 if (!(type
->t
& VT_ARRAY
))
6672 expect("array type");
6674 index
= index_last
= expr_const();
6675 if (tok
== TOK_DOTS
&& gnu_ext
) {
6677 index_last
= expr_const();
6681 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6683 tcc_error("invalid index");
6685 (*cur_field
)->c
= index_last
;
6686 type
= pointed_type(type
);
6687 elem_size
= type_size(type
, &align
);
6688 c
+= index
* elem_size
;
6689 nb_elems
= index_last
- index
+ 1;
6696 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6697 expect("struct/union type");
6698 f
= find_field(type
, l
, &cumofs
);
6711 } else if (!gnu_ext
) {
6716 if (type
->t
& VT_ARRAY
) {
6717 index
= (*cur_field
)->c
;
6718 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6719 tcc_error("index too large");
6720 type
= pointed_type(type
);
6721 c
+= index
* type_size(type
, &align
);
6724 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6725 *cur_field
= f
= f
->next
;
6727 tcc_error("too many field init");
6732 /* must put zero in holes (note that doing it that way
6733 ensures that it even works with designators) */
6734 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6735 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6736 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6738 /* XXX: make it more general */
6739 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6740 unsigned long c_end
;
6745 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6746 for (i
= 1; i
< nb_elems
; i
++) {
6747 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6752 } else if (!NODATA_WANTED
) {
6753 c_end
= c
+ nb_elems
* elem_size
;
6754 if (c_end
> sec
->data_allocated
)
6755 section_realloc(sec
, c_end
);
6756 src
= sec
->data
+ c
;
6758 for(i
= 1; i
< nb_elems
; i
++) {
6760 memcpy(dst
, src
, elem_size
);
6764 c
+= nb_elems
* type_size(type
, &align
);
6770 /* store a value or an expression directly in global data or in local array */
6771 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6778 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6782 /* XXX: not portable */
6783 /* XXX: generate error if incorrect relocation */
6784 gen_assign_cast(&dtype
);
6785 bt
= type
->t
& VT_BTYPE
;
6787 if ((vtop
->r
& VT_SYM
)
6790 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6791 || (type
->t
& VT_BITFIELD
))
6792 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6794 tcc_error("initializer element is not computable at load time");
6796 if (NODATA_WANTED
) {
6801 size
= type_size(type
, &align
);
6802 section_reserve(sec
, c
+ size
);
6803 ptr
= sec
->data
+ c
;
6805 /* XXX: make code faster ? */
6806 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6807 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6808 /* XXX This rejects compound literals like
6809 '(void *){ptr}'. The problem is that '&sym' is
6810 represented the same way, which would be ruled out
6811 by the SYM_FIRST_ANOM check above, but also '"string"'
6812 in 'char *p = "string"' is represented the same
6813 with the type being VT_PTR and the symbol being an
6814 anonymous one. That is, there's no difference in vtop
6815 between '(void *){x}' and '&(void *){x}'. Ignore
6816 pointer typed entities here. Hopefully no real code
6817 will every use compound literals with scalar type. */
6818 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6819 /* These come from compound literals, memcpy stuff over. */
6823 esym
= elfsym(vtop
->sym
);
6824 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6825 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6827 /* We need to copy over all memory contents, and that
6828 includes relocations. Use the fact that relocs are
6829 created it order, so look from the end of relocs
6830 until we hit one before the copied region. */
6831 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6832 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6833 while (num_relocs
--) {
6835 if (rel
->r_offset
>= esym
->st_value
+ size
)
6837 if (rel
->r_offset
< esym
->st_value
)
6839 /* Note: if the same fields are initialized multiple
6840 times (possible with designators) then we possibly
6841 add multiple relocations for the same offset here.
6842 That would lead to wrong code, the last reloc needs
6843 to win. We clean this up later after the whole
6844 initializer is parsed. */
6845 put_elf_reloca(symtab_section
, sec
,
6846 c
+ rel
->r_offset
- esym
->st_value
,
6847 ELFW(R_TYPE
)(rel
->r_info
),
6848 ELFW(R_SYM
)(rel
->r_info
),
6858 if (type
->t
& VT_BITFIELD
) {
6859 int bit_pos
, bit_size
, bits
, n
;
6860 unsigned char *p
, v
, m
;
6861 bit_pos
= BIT_POS(vtop
->type
.t
);
6862 bit_size
= BIT_SIZE(vtop
->type
.t
);
6863 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6864 bit_pos
&= 7, bits
= 0;
6869 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6870 m
= ((1 << n
) - 1) << bit_pos
;
6871 *p
= (*p
& ~m
) | (v
& m
);
6872 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6876 /* XXX: when cross-compiling we assume that each type has the
6877 same representation on host and target, which is likely to
6878 be wrong in the case of long double */
6880 vtop
->c
.i
= vtop
->c
.i
!= 0;
6882 *(char *)ptr
|= vtop
->c
.i
;
6885 *(short *)ptr
|= vtop
->c
.i
;
6888 *(float*)ptr
= vtop
->c
.f
;
6891 *(double *)ptr
= vtop
->c
.d
;
6894 #if defined TCC_IS_NATIVE_387
6895 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6896 memcpy(ptr
, &vtop
->c
.ld
, 10);
6898 else if (sizeof (long double) == sizeof (double))
6899 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6901 else if (vtop
->c
.ld
== 0.0)
6905 if (sizeof(long double) == LDOUBLE_SIZE
)
6906 *(long double*)ptr
= vtop
->c
.ld
;
6907 else if (sizeof(double) == LDOUBLE_SIZE
)
6908 *(double *)ptr
= (double)vtop
->c
.ld
;
6910 tcc_error("can't cross compile long double constants");
6914 *(long long *)ptr
|= vtop
->c
.i
;
6921 addr_t val
= vtop
->c
.i
;
6923 if (vtop
->r
& VT_SYM
)
6924 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6926 *(addr_t
*)ptr
|= val
;
6928 if (vtop
->r
& VT_SYM
)
6929 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6930 *(addr_t
*)ptr
|= val
;
6936 int val
= vtop
->c
.i
;
6938 if (vtop
->r
& VT_SYM
)
6939 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6943 if (vtop
->r
& VT_SYM
)
6944 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6953 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6960 /* 't' contains the type and storage info. 'c' is the offset of the
6961 object in section 'sec'. If 'sec' is NULL, it means stack based
6962 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6963 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6964 size only evaluation is wanted (only for arrays). */
6965 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6968 int len
, n
, no_oblock
, nb
, i
;
6974 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
6975 /* In case of strings we have special handling for arrays, so
6976 don't consume them as initializer value (which would commit them
6977 to some anonymous symbol). */
6978 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6979 !(flags
& DIF_SIZE_ONLY
)) {
6980 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6981 flags
|= DIF_HAVE_ELEM
;
6984 if ((flags
& DIF_HAVE_ELEM
) &&
6985 !(type
->t
& VT_ARRAY
) &&
6986 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6987 The source type might have VT_CONSTANT set, which is
6988 of course assignable to non-const elements. */
6989 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6990 init_putv(type
, sec
, c
);
6991 } else if (type
->t
& VT_ARRAY
) {
6994 t1
= pointed_type(type
);
6995 size1
= type_size(t1
, &align1
);
6998 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7001 tcc_error("character array initializer must be a literal,"
7002 " optionally enclosed in braces");
7007 /* only parse strings here if correct type (otherwise: handle
7008 them as ((w)char *) expressions */
7009 if ((tok
== TOK_LSTR
&&
7010 #ifdef TCC_TARGET_PE
7011 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7013 (t1
->t
& VT_BTYPE
) == VT_INT
7015 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7017 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7020 /* compute maximum number of chars wanted */
7022 cstr_len
= tokc
.str
.size
;
7024 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7027 if (n
>= 0 && nb
> (n
- len
))
7029 if (!(flags
& DIF_SIZE_ONLY
)) {
7031 tcc_warning("initializer-string for array is too long");
7032 /* in order to go faster for common case (char
7033 string in global variable, we handle it
7035 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7037 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7041 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7043 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7045 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7052 /* only add trailing zero if enough storage (no
7053 warning in this case since it is standard) */
7054 if (n
< 0 || len
< n
) {
7055 if (!(flags
& DIF_SIZE_ONLY
)) {
7057 init_putv(t1
, sec
, c
+ (len
* size1
));
7068 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7069 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7070 flags
&= ~DIF_HAVE_ELEM
;
7071 if (type
->t
& VT_ARRAY
) {
7073 /* special test for multi dimensional arrays (may not
7074 be strictly correct if designators are used at the
7076 if (no_oblock
&& len
>= n
*size1
)
7079 if (s
->type
.t
== VT_UNION
)
7083 if (no_oblock
&& f
== NULL
)
7092 /* put zeros at the end */
7093 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7094 init_putz(sec
, c
+ len
, n
*size1
- len
);
7097 /* patch type size if needed, which happens only for array types */
7099 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7100 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7103 if ((flags
& DIF_FIRST
) || tok
== '{') {
7111 } else if (tok
== '{') {
7112 if (flags
& DIF_HAVE_ELEM
)
7115 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7117 } else if ((flags
& DIF_SIZE_ONLY
)) {
7118 /* If we supported only ISO C we wouldn't have to accept calling
7119 this on anything than an array if DIF_SIZE_ONLY (and even then
7120 only on the outermost level, so no recursion would be needed),
7121 because initializing a flex array member isn't supported.
7122 But GNU C supports it, so we need to recurse even into
7123 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7124 /* just skip expression */
7125 skip_or_save_block(NULL
);
7127 if (!(flags
& DIF_HAVE_ELEM
)) {
7128 /* This should happen only when we haven't parsed
7129 the init element above for fear of committing a
7130 string constant to memory too early. */
7131 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7132 expect("string constant");
7133 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7135 init_putv(type
, sec
, c
);
7139 /* parse an initializer for type 't' if 'has_init' is non zero, and
7140 allocate space in local or global data space ('r' is either
7141 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7142 variable 'v' of scope 'scope' is declared before initializers
7143 are parsed. If 'v' is zero, then a reference to the new object
7144 is put in the value stack. If 'has_init' is 2, a special parsing
7145 is done to handle string constants. */
7146 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7147 int has_init
, int v
, int scope
)
7149 int size
, align
, addr
;
7150 TokenString
*init_str
= NULL
;
7153 Sym
*flexible_array
;
7155 int saved_nocode_wanted
= nocode_wanted
;
7156 #ifdef CONFIG_TCC_BCHECK
7160 /* Always allocate static or global variables */
7161 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7162 nocode_wanted
|= 0x80000000;
7164 #ifdef CONFIG_TCC_BCHECK
7165 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7168 flexible_array
= NULL
;
7169 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7170 Sym
*field
= type
->ref
->next
;
7173 field
= field
->next
;
7174 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7175 flexible_array
= field
;
7179 size
= type_size(type
, &align
);
7180 /* If unknown size, we must evaluate it before
7181 evaluating initializers because
7182 initializers can generate global data too
7183 (e.g. string pointers or ISOC99 compound
7184 literals). It also simplifies local
7185 initializers handling */
7186 if (size
< 0 || (flexible_array
&& has_init
)) {
7188 tcc_error("unknown type size");
7189 /* get all init string */
7190 if (has_init
== 2) {
7191 init_str
= tok_str_alloc();
7192 /* only get strings */
7193 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7194 tok_str_add_tok(init_str
);
7197 tok_str_add(init_str
, -1);
7198 tok_str_add(init_str
, 0);
7200 skip_or_save_block(&init_str
);
7205 begin_macro(init_str
, 1);
7207 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7208 /* prepare second initializer parsing */
7209 macro_ptr
= init_str
->str
;
7212 /* if still unknown size, error */
7213 size
= type_size(type
, &align
);
7215 tcc_error("unknown type size");
7217 /* If there's a flex member and it was used in the initializer
7219 if (flexible_array
&&
7220 flexible_array
->type
.ref
->c
> 0)
7221 size
+= flexible_array
->type
.ref
->c
7222 * pointed_size(&flexible_array
->type
);
7223 /* take into account specified alignment if bigger */
7224 if (ad
->a
.aligned
) {
7225 int speca
= 1 << (ad
->a
.aligned
- 1);
7228 } else if (ad
->a
.packed
) {
7232 if (!v
&& NODATA_WANTED
)
7233 size
= 0, align
= 1;
7235 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7237 #ifdef CONFIG_TCC_BCHECK
7238 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7242 loc
= (loc
- size
) & -align
;
7244 #ifdef CONFIG_TCC_BCHECK
7245 /* handles bounds */
7246 /* XXX: currently, since we do only one pass, we cannot track
7247 '&' operators, so we add only arrays */
7248 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7250 /* add padding between regions */
7252 /* then add local bound info */
7253 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7254 bounds_ptr
[0] = addr
;
7255 bounds_ptr
[1] = size
;
7259 /* local variable */
7260 #ifdef CONFIG_TCC_ASM
7261 if (ad
->asm_label
) {
7262 int reg
= asm_parse_regvar(ad
->asm_label
);
7264 r
= (r
& ~VT_VALMASK
) | reg
;
7267 sym
= sym_push(v
, type
, r
, addr
);
7268 if (ad
->cleanup_func
) {
7269 Sym
*cls
= sym_push2(&all_cleanups
, SYM_FIELD
| ++ncleanups
, 0, 0);
7270 cls
->prev_tok
= sym
;
7271 cls
->next
= ad
->cleanup_func
;
7272 cls
->ncl
= current_cleanups
;
7273 current_cleanups
= cls
;
7278 /* push local reference */
7279 vset(type
, r
, addr
);
7282 if (v
&& scope
== VT_CONST
) {
7283 /* see if the symbol was already defined */
7286 patch_storage(sym
, ad
, type
);
7287 /* we accept several definitions of the same global variable. */
7288 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7293 /* allocate symbol in corresponding section */
7298 else if (tcc_state
->nocommon
)
7303 addr
= section_add(sec
, size
, align
);
7304 #ifdef CONFIG_TCC_BCHECK
7305 /* add padding if bound check */
7307 section_add(sec
, 1, 1);
7310 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7311 sec
= common_section
;
7316 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7317 patch_storage(sym
, ad
, NULL
);
7319 /* update symbol definition */
7320 put_extern_sym(sym
, sec
, addr
, size
);
7322 /* push global reference */
7323 vpush_ref(type
, sec
, addr
, size
);
7328 #ifdef CONFIG_TCC_BCHECK
7329 /* handles bounds now because the symbol must be defined
7330 before for the relocation */
7334 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7335 /* then add global bound info */
7336 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7337 bounds_ptr
[0] = 0; /* relocated */
7338 bounds_ptr
[1] = size
;
7343 if (type
->t
& VT_VLA
) {
7349 /* save current stack pointer */
7350 if (vlas_in_scope
== 0) {
7351 if (vla_sp_root_loc
== -1)
7352 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7353 gen_vla_sp_save(vla_sp_root_loc
);
7356 vla_runtime_type_size(type
, &a
);
7357 gen_vla_alloc(type
, a
);
7358 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7359 /* on _WIN64, because of the function args scratch area, the
7360 result of alloca differs from RSP and is returned in RAX. */
7361 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7363 gen_vla_sp_save(addr
);
7367 } else if (has_init
) {
7368 size_t oldreloc_offset
= 0;
7369 if (sec
&& sec
->reloc
)
7370 oldreloc_offset
= sec
->reloc
->data_offset
;
7371 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7372 if (sec
&& sec
->reloc
)
7373 squeeze_multi_relocs(sec
, oldreloc_offset
);
7374 /* patch flexible array member size back to -1, */
7375 /* for possible subsequent similar declarations */
7377 flexible_array
->type
.ref
->c
= -1;
7381 /* restore parse state if needed */
7387 nocode_wanted
= saved_nocode_wanted
;
7390 /* parse a function defined by symbol 'sym' and generate its code in
7391 'cur_text_section' */
7392 static void gen_function(Sym
*sym
)
7395 ind
= cur_text_section
->data_offset
;
7396 if (sym
->a
.aligned
) {
7397 size_t newoff
= section_add(cur_text_section
, 0,
7398 1 << (sym
->a
.aligned
- 1));
7399 gen_fill_nops(newoff
- ind
);
7401 /* NOTE: we patch the symbol size later */
7402 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7403 funcname
= get_tok_str(sym
->v
, NULL
);
7405 /* Initialize VLA state */
7407 vla_sp_root_loc
= -1;
7408 /* put debug symbol */
7409 tcc_debug_funcstart(tcc_state
, sym
);
7410 /* push a dummy symbol to enable local sym storage */
7411 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7412 local_scope
= 1; /* for function parameters */
7413 gfunc_prolog(&sym
->type
);
7414 reset_local_scope();
7416 clear_temp_local_var_list();
7417 block(NULL
, NULL
, NULL
, NULL
, 0);
7418 if (!(nocode_wanted
& 0x20000000)
7419 && ((func_vt
.t
& VT_BTYPE
) == VT_INT
)
7420 && !strcmp (funcname
, "main"))
7424 gen_assign_cast(&func_vt
);
7425 gfunc_return(&func_vt
);
7430 cur_text_section
->data_offset
= ind
;
7431 label_pop(&global_label_stack
, NULL
, 0);
7432 /* reset local stack */
7433 reset_local_scope();
7434 sym_pop(&local_stack
, NULL
, 0);
7435 /* end of function */
7436 /* patch symbol size */
7437 elfsym(sym
)->st_size
= ind
- func_ind
;
7438 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7439 /* It's better to crash than to generate wrong code */
7440 cur_text_section
= NULL
;
7441 funcname
= ""; /* for safety */
7442 func_vt
.t
= VT_VOID
; /* for safety */
7443 func_var
= 0; /* for safety */
7444 ind
= 0; /* for safety */
7445 nocode_wanted
= 0x80000000;
7449 static void gen_inline_functions(TCCState
*s
)
7452 int inline_generated
, i
, ln
;
7453 struct InlineFunc
*fn
;
7455 ln
= file
->line_num
;
7456 /* iterate while inline function are referenced */
7458 inline_generated
= 0;
7459 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7460 fn
= s
->inline_fns
[i
];
7462 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7463 /* the function was used or forced (and then not internal):
7464 generate its code and convert it to a normal function */
7467 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7468 begin_macro(fn
->func_str
, 1);
7470 cur_text_section
= text_section
;
7474 inline_generated
= 1;
7477 } while (inline_generated
);
7478 file
->line_num
= ln
;
7481 ST_FUNC
void free_inline_functions(TCCState
*s
)
7484 /* free tokens of unused inline functions */
7485 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7486 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7488 tok_str_free(fn
->func_str
);
7490 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7493 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7494 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7495 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7500 AttributeDef ad
, adbase
;
7503 if (tok
== TOK_STATIC_ASSERT
) {
7511 tcc_error("%s", get_tok_str(tok
, &tokc
));
7517 if (!parse_btype(&btype
, &adbase
)) {
7518 if (is_for_loop_init
)
7520 /* skip redundant ';' if not in old parameter decl scope */
7521 if (tok
== ';' && l
!= VT_CMP
) {
7527 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7528 /* global asm block */
7532 if (tok
>= TOK_UIDENT
) {
7533 /* special test for old K&R protos without explicit int
7534 type. Only accepted when defining global data */
7538 expect("declaration");
7543 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7544 int v
= btype
.ref
->v
;
7545 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7546 tcc_warning("unnamed struct/union that defines no instances");
7550 if (IS_ENUM(btype
.t
)) {
7555 while (1) { /* iterate thru each declaration */
7557 /* If the base type itself was an array type of unspecified
7558 size (like in 'typedef int arr[]; arr x = {1};') then
7559 we will overwrite the unknown size by the real one for
7560 this decl. We need to unshare the ref symbol holding
7562 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7563 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7566 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7570 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7571 printf("type = '%s'\n", buf
);
7574 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7575 /* if old style function prototype, we accept a
7578 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7579 decl0(VT_CMP
, 0, sym
);
7580 /* always compile 'extern inline' */
7581 if (type
.t
& VT_EXTERN
)
7582 type
.t
&= ~VT_INLINE
;
7585 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7586 ad
.asm_label
= asm_label_instr();
7587 /* parse one last attribute list, after asm label */
7588 parse_attribute(&ad
);
7590 /* gcc does not allow __asm__("label") with function definition,
7597 #ifdef TCC_TARGET_PE
7598 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7599 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7600 tcc_error("cannot have dll linkage with static or typedef");
7601 if (ad
.a
.dllimport
) {
7602 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7605 type
.t
|= VT_EXTERN
;
7611 tcc_error("cannot use local functions");
7612 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7613 expect("function definition");
7615 /* reject abstract declarators in function definition
7616 make old style params without decl have int type */
7618 while ((sym
= sym
->next
) != NULL
) {
7619 if (!(sym
->v
& ~SYM_FIELD
))
7620 expect("identifier");
7621 if (sym
->type
.t
== VT_VOID
)
7622 sym
->type
= int_type
;
7625 /* put function symbol */
7626 type
.t
&= ~VT_EXTERN
;
7627 sym
= external_sym(v
, &type
, 0, &ad
);
7628 /* static inline functions are just recorded as a kind
7629 of macro. Their code will be emitted at the end of
7630 the compilation unit only if they are used */
7631 if (sym
->type
.t
& VT_INLINE
) {
7632 struct InlineFunc
*fn
;
7633 const char *filename
;
7635 filename
= file
? file
->filename
: "";
7636 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7637 strcpy(fn
->filename
, filename
);
7639 skip_or_save_block(&fn
->func_str
);
7640 dynarray_add(&tcc_state
->inline_fns
,
7641 &tcc_state
->nb_inline_fns
, fn
);
7643 /* compute text section */
7644 cur_text_section
= ad
.section
;
7645 if (!cur_text_section
)
7646 cur_text_section
= text_section
;
7652 /* find parameter in function parameter list */
7653 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7654 if ((sym
->v
& ~SYM_FIELD
) == v
)
7656 tcc_error("declaration for parameter '%s' but no such parameter",
7657 get_tok_str(v
, NULL
));
7659 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7660 tcc_error("storage class specified for '%s'",
7661 get_tok_str(v
, NULL
));
7662 if (sym
->type
.t
!= VT_VOID
)
7663 tcc_error("redefinition of parameter '%s'",
7664 get_tok_str(v
, NULL
));
7665 convert_parameter_type(&type
);
7667 } else if (type
.t
& VT_TYPEDEF
) {
7668 /* save typedefed type */
7669 /* XXX: test storage specifiers ? */
7671 if (sym
&& sym
->sym_scope
== local_scope
) {
7672 if (!is_compatible_types(&sym
->type
, &type
)
7673 || !(sym
->type
.t
& VT_TYPEDEF
))
7674 tcc_error("incompatible redefinition of '%s'",
7675 get_tok_str(v
, NULL
));
7678 sym
= sym_push(v
, &type
, 0, 0);
7682 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7683 && !(type
.t
& VT_EXTERN
)) {
7684 tcc_error("declaration of void object");
7687 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7688 /* external function definition */
7689 /* specific case for func_call attribute */
7691 } else if (!(type
.t
& VT_ARRAY
)) {
7692 /* not lvalue if array */
7693 r
|= lvalue_type(type
.t
);
7695 has_init
= (tok
== '=');
7696 if (has_init
&& (type
.t
& VT_VLA
))
7697 tcc_error("variable length array cannot be initialized");
7698 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7699 || (type
.t
& VT_BTYPE
) == VT_FUNC
7700 /* as with GCC, uninitialized global arrays with no size
7701 are considered extern: */
7702 || ((type
.t
& VT_ARRAY
) && !has_init
7703 && l
== VT_CONST
&& type
.ref
->c
< 0)
7705 /* external variable or function */
7706 type
.t
|= VT_EXTERN
;
7707 sym
= external_sym(v
, &type
, r
, &ad
);
7708 if (ad
.alias_target
) {
7711 alias_target
= sym_find(ad
.alias_target
);
7712 esym
= elfsym(alias_target
);
7714 tcc_error("unsupported forward __alias__ attribute");
7715 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7718 if (type
.t
& VT_STATIC
)
7724 else if (l
== VT_CONST
)
7725 /* uninitialized global variables may be overridden */
7726 type
.t
|= VT_EXTERN
;
7727 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7731 if (is_for_loop_init
)
7743 static void decl(int l
)
7748 /* ------------------------------------------------------------------------- */