2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *current_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int section_sym
;
49 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
50 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
61 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
63 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
64 ST_DATA
const char *funcname
;
67 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
69 ST_DATA
struct switch_t
{
73 } **p
; int n
; /* list of case ranges */
74 int def_sym
; /* default symbol */
75 } *cur_switch
; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA
struct temp_local_variable
{
80 int location
; //offset on stack. Svalue.c.i
83 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
84 short nb_temp_local_vars
;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType
*type
);
89 static void gen_cast_s(int t
);
90 static inline CType
*pointed_type(CType
*type
);
91 static int is_compatible_types(CType
*type1
, CType
*type2
);
92 static int parse_btype(CType
*type
, AttributeDef
*ad
);
93 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
94 static void parse_expr_type(CType
*type
);
95 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
96 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
97 static void block(int *bsym
, int *csym
, int is_expr
);
98 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
99 static void decl(int l
);
100 static int decl0(int l
, int is_for_loop_init
, Sym
*);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType
*type
, int *a
);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty
, unsigned long long v
);
108 static void vpush(CType
*type
);
109 static int gvtst(int inv
, int t
);
110 static void gen_inline_functions(TCCState
*s
);
111 static void skip_or_save_block(TokenString
**str
);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size
,int align
);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups
)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups
, NULL
, 0);
125 ST_INLN
int is_float(int t
)
129 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC
int ieee_finite(double d
)
138 memcpy(p
, &d
, sizeof(double));
139 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
148 ST_FUNC
void test_lvalue(void)
150 if (!(vtop
->r
& VT_LVAL
))
154 ST_FUNC
void check_vstack(void)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
164 void pv (const char *lbl
, int a
, int b
)
167 for (i
= a
; i
< a
+ b
; ++i
) {
168 SValue
*p
= &vtop
[-i
];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
182 /* file info: full path + filename */
183 section_sym
= put_elf_sym(symtab_section
, 0, 0,
184 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
185 text_section
->sh_num
, NULL
);
186 getcwd(buf
, sizeof(buf
));
188 normalize_slashes(buf
);
190 pstrcat(buf
, sizeof(buf
), "/");
191 put_stabs_r(buf
, N_SO
, 0, 0,
192 text_section
->data_offset
, text_section
, section_sym
);
193 put_stabs_r(file
->filename
, N_SO
, 0, 0,
194 text_section
->data_offset
, text_section
, section_sym
);
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section
, 0, 0,
202 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
203 SHN_ABS
, file
->filename
);
206 /* put end of translation unit info */
207 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
211 put_stabs_r(NULL
, N_SO
, 0, 0,
212 text_section
->data_offset
, text_section
, section_sym
);
216 /* generate line number info */
217 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
221 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
222 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
224 last_line_num
= file
->line_num
;
228 /* put function symbol */
229 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
237 /* XXX: we put here a dummy type */
238 snprintf(buf
, sizeof(buf
), "%s:%c1",
239 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
240 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
241 cur_text_section
, sym
->c
);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
249 /* put function size */
250 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
254 put_stabn(N_FUN
, 0, 0, size
);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC
int tccgen_compile(TCCState
*s1
)
260 cur_text_section
= NULL
;
262 anon_sym
= SYM_FIRST_ANOM
;
265 nocode_wanted
= 0x80000000;
267 /* define some often used types */
269 char_pointer_type
.t
= VT_BYTE
;
270 mk_pointer(&char_pointer_type
);
272 size_type
.t
= VT_INT
| VT_UNSIGNED
;
273 ptrdiff_type
.t
= VT_INT
;
275 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
276 ptrdiff_type
.t
= VT_LLONG
;
278 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
279 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
281 func_old_type
.t
= VT_FUNC
;
282 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
283 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
284 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
288 #ifdef TCC_TARGET_ARM
293 printf("%s: **** new file\n", file
->filename
);
296 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
299 gen_inline_functions(s1
);
301 /* end of translation unit info */
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym
*elfsym(Sym
*s
)
311 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC
void update_storage(Sym
*sym
)
318 int sym_bind
, old_sym_bind
;
324 if (sym
->a
.visibility
)
325 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
328 if (sym
->type
.t
& VT_STATIC
)
329 sym_bind
= STB_LOCAL
;
330 else if (sym
->a
.weak
)
333 sym_bind
= STB_GLOBAL
;
334 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
335 if (sym_bind
!= old_sym_bind
) {
336 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
340 if (sym
->a
.dllimport
)
341 esym
->st_other
|= ST_PE_IMPORT
;
342 if (sym
->a
.dllexport
)
343 esym
->st_other
|= ST_PE_EXPORT
;
347 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
348 get_tok_str(sym
->v
, NULL
),
349 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
357 /* ------------------------------------------------------------------------- */
358 /* update sym->c so that it points to an external symbol in section
359 'section' with value 'value' */
361 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
362 addr_t value
, unsigned long size
,
363 int can_add_underscore
)
365 int sym_type
, sym_bind
, info
, other
, t
;
369 #ifdef CONFIG_TCC_BCHECK
374 name
= get_tok_str(sym
->v
, NULL
);
375 #ifdef CONFIG_TCC_BCHECK
376 if (tcc_state
->do_bounds_check
) {
377 /* XXX: avoid doing that for statics ? */
378 /* if bound checking is activated, we change some function
379 names by adding the "__bound" prefix */
382 /* XXX: we rely only on malloc hooks */
395 strcpy(buf
, "__bound_");
403 if ((t
& VT_BTYPE
) == VT_FUNC
) {
405 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
406 sym_type
= STT_NOTYPE
;
408 sym_type
= STT_OBJECT
;
411 sym_bind
= STB_LOCAL
;
413 sym_bind
= STB_GLOBAL
;
416 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
417 Sym
*ref
= sym
->type
.ref
;
418 if (ref
->a
.nodecorate
) {
419 can_add_underscore
= 0;
421 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
422 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
424 other
|= ST_PE_STDCALL
;
425 can_add_underscore
= 0;
429 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
431 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
435 name
= get_tok_str(sym
->asm_label
, NULL
);
436 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
437 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
440 esym
->st_value
= value
;
441 esym
->st_size
= size
;
442 esym
->st_shndx
= sh_num
;
447 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
448 addr_t value
, unsigned long size
)
450 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
451 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
454 /* add a new relocation entry to symbol 'sym' in section 's' */
455 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
460 if (nocode_wanted
&& s
== cur_text_section
)
465 put_extern_sym(sym
, NULL
, 0, 0);
469 /* now we can add ELF relocation info */
470 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
474 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
476 greloca(s
, sym
, offset
, type
, 0);
480 /* ------------------------------------------------------------------------- */
481 /* symbol allocator */
482 static Sym
*__sym_malloc(void)
484 Sym
*sym_pool
, *sym
, *last_sym
;
487 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
488 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
490 last_sym
= sym_free_first
;
492 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
493 sym
->next
= last_sym
;
497 sym_free_first
= last_sym
;
501 static inline Sym
*sym_malloc(void)
505 sym
= sym_free_first
;
507 sym
= __sym_malloc();
508 sym_free_first
= sym
->next
;
511 sym
= tcc_malloc(sizeof(Sym
));
516 ST_INLN
void sym_free(Sym
*sym
)
519 sym
->next
= sym_free_first
;
520 sym_free_first
= sym
;
526 /* push, without hashing */
527 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
532 memset(s
, 0, sizeof *s
);
542 /* find a symbol and return its associated structure. 's' is the top
543 of the symbol stack */
544 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
556 /* structure lookup */
557 ST_INLN Sym
*struct_find(int v
)
560 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
562 return table_ident
[v
]->sym_struct
;
565 /* find an identifier */
566 ST_INLN Sym
*sym_find(int v
)
569 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
571 return table_ident
[v
]->sym_identifier
;
574 static int sym_scope(Sym
*s
)
576 if (IS_ENUM_VAL (s
->type
.t
))
577 return s
->type
.ref
->sym_scope
;
582 /* push a given symbol on the symbol stack */
583 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
592 s
= sym_push2(ps
, v
, type
->t
, c
);
593 s
->type
.ref
= type
->ref
;
595 /* don't record fields or anonymous symbols */
597 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
598 /* record symbol in token array */
599 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
601 ps
= &ts
->sym_struct
;
603 ps
= &ts
->sym_identifier
;
606 s
->sym_scope
= local_scope
;
607 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
608 tcc_error("redeclaration of '%s'",
609 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
614 /* push a global identifier */
615 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
618 s
= sym_push2(&global_stack
, v
, t
, c
);
619 /* don't record anonymous symbol */
620 if (v
< SYM_FIRST_ANOM
) {
621 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
622 /* modify the top most local identifier, so that
623 sym_identifier will point to 's' when popped */
624 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
625 ps
= &(*ps
)->prev_tok
;
632 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
633 pop them yet from the list, but do remove them from the token array. */
634 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
644 /* remove symbol in token array */
646 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
647 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
649 ps
= &ts
->sym_struct
;
651 ps
= &ts
->sym_identifier
;
662 /* ------------------------------------------------------------------------- */
664 static void vsetc(CType
*type
, int r
, CValue
*vc
)
668 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
669 tcc_error("memory full (vstack)");
670 /* cannot let cpu flags if other instruction are generated. Also
671 avoid leaving VT_JMP anywhere except on the top of the stack
672 because it would complicate the code generator.
674 Don't do this when nocode_wanted. vtop might come from
675 !nocode_wanted regions (see 88_codeopt.c) and transforming
676 it to a register without actually generating code is wrong
677 as their value might still be used for real. All values
678 we push under nocode_wanted will eventually be popped
679 again, so that the VT_CMP/VT_JMP value will be in vtop
680 when code is unsuppressed again.
682 Same logic below in vswap(); */
683 if (vtop
>= vstack
&& !nocode_wanted
) {
684 v
= vtop
->r
& VT_VALMASK
;
685 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
697 ST_FUNC
void vswap(void)
700 /* cannot vswap cpu flags. See comment at vsetc() above */
701 if (vtop
>= vstack
&& !nocode_wanted
) {
702 int v
= vtop
->r
& VT_VALMASK
;
703 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
711 /* pop stack value */
712 ST_FUNC
void vpop(void)
715 v
= vtop
->r
& VT_VALMASK
;
716 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
717 /* for x86, we need to pop the FP stack */
719 o(0xd8dd); /* fstp %st(0) */
722 if (v
== VT_JMP
|| v
== VT_JMPI
) {
723 /* need to put correct jump if && or || without test */
729 /* push constant of type "type" with useless value */
730 ST_FUNC
void vpush(CType
*type
)
732 vset(type
, VT_CONST
, 0);
735 /* push integer constant */
736 ST_FUNC
void vpushi(int v
)
740 vsetc(&int_type
, VT_CONST
, &cval
);
743 /* push a pointer sized constant */
744 static void vpushs(addr_t v
)
748 vsetc(&size_type
, VT_CONST
, &cval
);
751 /* push arbitrary 64bit constant */
752 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
759 vsetc(&ctype
, VT_CONST
, &cval
);
762 /* push long long constant */
763 static inline void vpushll(long long v
)
765 vpush64(VT_LLONG
, v
);
768 ST_FUNC
void vset(CType
*type
, int r
, int v
)
773 vsetc(type
, r
, &cval
);
776 static void vseti(int r
, int v
)
784 ST_FUNC
void vpushv(SValue
*v
)
786 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
787 tcc_error("memory full (vstack)");
792 static void vdup(void)
797 /* rotate n first stack elements to the bottom
798 I1 ... In -> I2 ... In I1 [top is right]
800 ST_FUNC
void vrotb(int n
)
811 /* rotate the n elements before entry e towards the top
812 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
814 ST_FUNC
void vrote(SValue
*e
, int n
)
820 for(i
= 0;i
< n
- 1; i
++)
825 /* rotate n first stack elements to the top
826 I1 ... In -> In I1 ... I(n-1) [top is right]
828 ST_FUNC
void vrott(int n
)
833 /* push a symbol value of TYPE */
834 static inline void vpushsym(CType
*type
, Sym
*sym
)
838 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
842 /* Return a static symbol pointing to a section */
843 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
849 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
850 sym
->type
.ref
= type
->ref
;
851 sym
->r
= VT_CONST
| VT_SYM
;
852 put_extern_sym(sym
, sec
, offset
, size
);
856 /* push a reference to a section offset by adding a dummy symbol */
857 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
859 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
862 /* define a new external reference to a symbol 'v' of type 'u' */
863 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
869 /* push forward reference */
870 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
871 s
->type
.ref
= type
->ref
;
872 s
->r
= r
| VT_CONST
| VT_SYM
;
873 } else if (IS_ASM_SYM(s
)) {
874 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
875 s
->type
.ref
= type
->ref
;
881 /* Merge symbol attributes. */
882 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
884 if (sa1
->aligned
&& !sa
->aligned
)
885 sa
->aligned
= sa1
->aligned
;
886 sa
->packed
|= sa1
->packed
;
887 sa
->weak
|= sa1
->weak
;
888 if (sa1
->visibility
!= STV_DEFAULT
) {
889 int vis
= sa
->visibility
;
890 if (vis
== STV_DEFAULT
891 || vis
> sa1
->visibility
)
892 vis
= sa1
->visibility
;
893 sa
->visibility
= vis
;
895 sa
->dllexport
|= sa1
->dllexport
;
896 sa
->nodecorate
|= sa1
->nodecorate
;
897 sa
->dllimport
|= sa1
->dllimport
;
900 /* Merge function attributes. */
901 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
903 if (fa1
->func_call
&& !fa
->func_call
)
904 fa
->func_call
= fa1
->func_call
;
905 if (fa1
->func_type
&& !fa
->func_type
)
906 fa
->func_type
= fa1
->func_type
;
907 if (fa1
->func_args
&& !fa
->func_args
)
908 fa
->func_args
= fa1
->func_args
;
911 /* Merge attributes. */
912 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
914 merge_symattr(&ad
->a
, &ad1
->a
);
915 merge_funcattr(&ad
->f
, &ad1
->f
);
918 ad
->section
= ad1
->section
;
919 if (ad1
->alias_target
)
920 ad
->alias_target
= ad1
->alias_target
;
922 ad
->asm_label
= ad1
->asm_label
;
924 ad
->attr_mode
= ad1
->attr_mode
;
927 /* Merge some type attributes. */
928 static void patch_type(Sym
*sym
, CType
*type
)
930 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
931 if (!(sym
->type
.t
& VT_EXTERN
))
932 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
933 sym
->type
.t
&= ~VT_EXTERN
;
936 if (IS_ASM_SYM(sym
)) {
937 /* stay static if both are static */
938 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
939 sym
->type
.ref
= type
->ref
;
942 if (!is_compatible_types(&sym
->type
, type
)) {
943 tcc_error("incompatible types for redefinition of '%s'",
944 get_tok_str(sym
->v
, NULL
));
946 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
947 int static_proto
= sym
->type
.t
& VT_STATIC
;
948 /* warn if static follows non-static function declaration */
949 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
950 tcc_warning("static storage ignored for redefinition of '%s'",
951 get_tok_str(sym
->v
, NULL
));
953 if (0 == (type
->t
& VT_EXTERN
)) {
954 /* put complete type, use static from prototype */
955 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
956 if (type
->t
& VT_INLINE
)
957 sym
->type
.t
= type
->t
;
958 sym
->type
.ref
= type
->ref
;
962 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
963 /* set array size if it was omitted in extern declaration */
964 if (sym
->type
.ref
->c
< 0)
965 sym
->type
.ref
->c
= type
->ref
->c
;
966 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
967 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
969 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
970 tcc_warning("storage mismatch for redefinition of '%s'",
971 get_tok_str(sym
->v
, NULL
));
976 /* Merge some storage attributes. */
977 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
980 patch_type(sym
, type
);
983 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
984 tcc_error("incompatible dll linkage for redefinition of '%s'",
985 get_tok_str(sym
->v
, NULL
));
987 merge_symattr(&sym
->a
, &ad
->a
);
989 sym
->asm_label
= ad
->asm_label
;
993 /* define a new external reference to a symbol 'v' */
994 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
998 if (!s
|| (!IS_ASM_SYM(s
) && !(s
->type
.t
& VT_EXTERN
)
999 && (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)) {
1000 if (s
&& !is_compatible_types(&s
->type
, type
))
1001 tcc_error("conflicting types for '%s'", get_tok_str(s
->v
, NULL
));
1002 /* push forward reference */
1003 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
1004 s
->type
.t
|= VT_EXTERN
;
1008 if (s
->type
.ref
== func_old_type
.ref
) {
1009 s
->type
.ref
= type
->ref
;
1010 s
->r
= r
| VT_CONST
| VT_SYM
;
1011 s
->type
.t
|= VT_EXTERN
;
1013 patch_storage(s
, ad
, type
);
1018 /* push a reference to global symbol v */
1019 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1021 vpushsym(type
, external_global_sym(v
, type
, 0));
1024 /* save registers up to (vtop - n) stack entry */
1025 ST_FUNC
void save_regs(int n
)
1028 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1032 /* save r to the memory stack, and mark it as being free */
1033 ST_FUNC
void save_reg(int r
)
1035 save_reg_upstack(r
, 0);
1038 /* save r to the memory stack, and mark it as being free,
1039 if seen up to (vtop - n) stack entry */
1040 ST_FUNC
void save_reg_upstack(int r
, int n
)
1042 int l
, saved
, size
, align
;
1046 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1051 /* modify all stack values */
1054 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1055 if ((p
->r
& VT_VALMASK
) == r
||
1056 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1057 /* must save value on stack if not already done */
1059 /* NOTE: must reload 'r' because r might be equal to r2 */
1060 r
= p
->r
& VT_VALMASK
;
1061 /* store register in the stack */
1063 if ((p
->r
& VT_LVAL
) ||
1064 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1066 type
= &char_pointer_type
;
1070 size
= type_size(type
, &align
);
1071 l
=get_temp_local_var(size
,align
);
1072 sv
.type
.t
= type
->t
;
1073 sv
.r
= VT_LOCAL
| VT_LVAL
;
1076 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1077 /* x86 specific: need to pop fp register ST0 if saved */
1078 if (r
== TREG_ST0
) {
1079 o(0xd8dd); /* fstp %st(0) */
1083 /* special long long case */
1084 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1091 /* mark that stack entry as being saved on the stack */
1092 if (p
->r
& VT_LVAL
) {
1093 /* also clear the bounded flag because the
1094 relocation address of the function was stored in
1096 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1098 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1106 #ifdef TCC_TARGET_ARM
1107 /* find a register of class 'rc2' with at most one reference on stack.
1108 * If none, call get_reg(rc) */
1109 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1114 for(r
=0;r
<NB_REGS
;r
++) {
1115 if (reg_classes
[r
] & rc2
) {
1118 for(p
= vstack
; p
<= vtop
; p
++) {
1119 if ((p
->r
& VT_VALMASK
) == r
||
1120 (p
->r2
& VT_VALMASK
) == r
)
1131 /* find a free register of class 'rc'. If none, save one register */
1132 ST_FUNC
int get_reg(int rc
)
1137 /* find a free register */
1138 for(r
=0;r
<NB_REGS
;r
++) {
1139 if (reg_classes
[r
] & rc
) {
1142 for(p
=vstack
;p
<=vtop
;p
++) {
1143 if ((p
->r
& VT_VALMASK
) == r
||
1144 (p
->r2
& VT_VALMASK
) == r
)
1152 /* no register left : free the first one on the stack (VERY
1153 IMPORTANT to start from the bottom to ensure that we don't
1154 spill registers used in gen_opi()) */
1155 for(p
=vstack
;p
<=vtop
;p
++) {
1156 /* look at second register (if long long) */
1157 r
= p
->r2
& VT_VALMASK
;
1158 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1160 r
= p
->r
& VT_VALMASK
;
1161 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1167 /* Should never comes here */
1171 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1172 static int get_temp_local_var(int size
,int align
){
1174 struct temp_local_variable
*temp_var
;
1181 for(i
=0;i
<nb_temp_local_vars
;i
++){
1182 temp_var
=&arr_temp_local_vars
[i
];
1183 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1186 /*check if temp_var is free*/
1188 for(p
=vstack
;p
<=vtop
;p
++) {
1190 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1191 if(p
->c
.i
==temp_var
->location
){
1198 found_var
=temp_var
->location
;
1204 loc
= (loc
- size
) & -align
;
1205 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1206 temp_var
=&arr_temp_local_vars
[i
];
1207 temp_var
->location
=loc
;
1208 temp_var
->size
=size
;
1209 temp_var
->align
=align
;
1210 nb_temp_local_vars
++;
1217 static void clear_temp_local_var_list(){
1218 nb_temp_local_vars
=0;
1221 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1223 static void move_reg(int r
, int s
, int t
)
1237 /* get address of vtop (vtop MUST BE an lvalue) */
1238 ST_FUNC
void gaddrof(void)
1240 vtop
->r
&= ~VT_LVAL
;
1241 /* tricky: if saved lvalue, then we can go back to lvalue */
1242 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1243 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1248 #ifdef CONFIG_TCC_BCHECK
1249 /* generate lvalue bound code */
1250 static void gbound(void)
1255 vtop
->r
&= ~VT_MUSTBOUND
;
1256 /* if lvalue, then use checking code before dereferencing */
1257 if (vtop
->r
& VT_LVAL
) {
1258 /* if not VT_BOUNDED value, then make one */
1259 if (!(vtop
->r
& VT_BOUNDED
)) {
1260 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1261 /* must save type because we must set it to int to get pointer */
1263 vtop
->type
.t
= VT_PTR
;
1266 gen_bounded_ptr_add();
1267 vtop
->r
|= lval_type
;
1270 /* then check for dereferencing */
1271 gen_bounded_ptr_deref();
1276 static void incr_bf_adr(int o
)
1278 vtop
->type
= char_pointer_type
;
1282 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1283 | (VT_BYTE
|VT_UNSIGNED
);
1284 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1285 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1288 /* single-byte load mode for packed or otherwise unaligned bitfields */
1289 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1292 save_reg_upstack(vtop
->r
, 1);
1293 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1294 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1303 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1305 vpushi((1 << n
) - 1), gen_op('&');
1308 vpushi(bits
), gen_op(TOK_SHL
);
1311 bits
+= n
, bit_size
-= n
, o
= 1;
1314 if (!(type
->t
& VT_UNSIGNED
)) {
1315 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1316 vpushi(n
), gen_op(TOK_SHL
);
1317 vpushi(n
), gen_op(TOK_SAR
);
1321 /* single-byte store mode for packed or otherwise unaligned bitfields */
1322 static void store_packed_bf(int bit_pos
, int bit_size
)
1324 int bits
, n
, o
, m
, c
;
1326 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1328 save_reg_upstack(vtop
->r
, 1);
1329 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1331 incr_bf_adr(o
); // X B
1333 c
? vdup() : gv_dup(); // B V X
1336 vpushi(bits
), gen_op(TOK_SHR
);
1338 vpushi(bit_pos
), gen_op(TOK_SHL
);
1343 m
= ((1 << n
) - 1) << bit_pos
;
1344 vpushi(m
), gen_op('&'); // X B V1
1345 vpushv(vtop
-1); // X B V1 B
1346 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1347 gen_op('&'); // X B V1 B1
1348 gen_op('|'); // X B V2
1350 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1351 vstore(), vpop(); // X B
1352 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1357 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1360 if (0 == sv
->type
.ref
)
1362 t
= sv
->type
.ref
->auxtype
;
1363 if (t
!= -1 && t
!= VT_STRUCT
) {
1364 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1365 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1370 /* store vtop a register belonging to class 'rc'. lvalues are
1371 converted to values. Cannot be used if cannot be converted to
1372 register value (such as structures). */
1373 ST_FUNC
int gv(int rc
)
1375 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1377 /* NOTE: get_reg can modify vstack[] */
1378 if (vtop
->type
.t
& VT_BITFIELD
) {
1381 bit_pos
= BIT_POS(vtop
->type
.t
);
1382 bit_size
= BIT_SIZE(vtop
->type
.t
);
1383 /* remove bit field info to avoid loops */
1384 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1387 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1388 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1389 type
.t
|= VT_UNSIGNED
;
1391 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1393 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1398 if (r
== VT_STRUCT
) {
1399 load_packed_bf(&type
, bit_pos
, bit_size
);
1401 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1402 /* cast to int to propagate signedness in following ops */
1404 /* generate shifts */
1405 vpushi(bits
- (bit_pos
+ bit_size
));
1407 vpushi(bits
- bit_size
);
1408 /* NOTE: transformed to SHR if unsigned */
1413 if (is_float(vtop
->type
.t
) &&
1414 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1415 unsigned long offset
;
1416 /* CPUs usually cannot use float constants, so we store them
1417 generically in data segment */
1418 size
= type_size(&vtop
->type
, &align
);
1420 size
= 0, align
= 1;
1421 offset
= section_add(data_section
, size
, align
);
1422 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1424 init_putv(&vtop
->type
, data_section
, offset
);
1427 #ifdef CONFIG_TCC_BCHECK
1428 if (vtop
->r
& VT_MUSTBOUND
)
1432 r
= vtop
->r
& VT_VALMASK
;
1433 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1434 #ifndef TCC_TARGET_ARM64
1437 #ifdef TCC_TARGET_X86_64
1438 else if (rc
== RC_FRET
)
1442 /* need to reload if:
1444 - lvalue (need to dereference pointer)
1445 - already a register, but not in the right class */
1447 || (vtop
->r
& VT_LVAL
)
1448 || !(reg_classes
[r
] & rc
)
1450 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1451 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1453 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1459 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1460 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1462 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1463 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1464 unsigned long long ll
;
1466 int r2
, original_type
;
1467 original_type
= vtop
->type
.t
;
1468 /* two register type load : expand to two words
1471 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1474 vtop
->c
.i
= ll
; /* first word */
1476 vtop
->r
= r
; /* save register value */
1477 vpushi(ll
>> 32); /* second word */
1480 if (vtop
->r
& VT_LVAL
) {
1481 /* We do not want to modifier the long long
1482 pointer here, so the safest (and less
1483 efficient) is to save all the other registers
1484 in the stack. XXX: totally inefficient. */
1488 /* lvalue_save: save only if used further down the stack */
1489 save_reg_upstack(vtop
->r
, 1);
1491 /* load from memory */
1492 vtop
->type
.t
= load_type
;
1495 vtop
[-1].r
= r
; /* save register value */
1496 /* increment pointer to get second word */
1497 vtop
->type
.t
= addr_type
;
1502 vtop
->type
.t
= load_type
;
1504 /* move registers */
1507 vtop
[-1].r
= r
; /* save register value */
1508 vtop
->r
= vtop
[-1].r2
;
1510 /* Allocate second register. Here we rely on the fact that
1511 get_reg() tries first to free r2 of an SValue. */
1515 /* write second register */
1517 vtop
->type
.t
= original_type
;
1518 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1520 /* lvalue of scalar type : need to use lvalue type
1521 because of possible cast */
1524 /* compute memory access type */
1525 if (vtop
->r
& VT_LVAL_BYTE
)
1527 else if (vtop
->r
& VT_LVAL_SHORT
)
1529 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1533 /* restore wanted type */
1536 /* one register type load */
1541 #ifdef TCC_TARGET_C67
1542 /* uses register pairs for doubles */
1543 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1550 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1551 ST_FUNC
void gv2(int rc1
, int rc2
)
1555 /* generate more generic register first. But VT_JMP or VT_CMP
1556 values must be generated first in all cases to avoid possible
1558 v
= vtop
[0].r
& VT_VALMASK
;
1559 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1564 /* test if reload is needed for first register */
1565 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1575 /* test if reload is needed for first register */
1576 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1582 #ifndef TCC_TARGET_ARM64
1583 /* wrapper around RC_FRET to return a register by type */
1584 static int rc_fret(int t
)
1586 #ifdef TCC_TARGET_X86_64
1587 if (t
== VT_LDOUBLE
) {
1595 /* wrapper around REG_FRET to return a register by type */
1596 static int reg_fret(int t
)
1598 #ifdef TCC_TARGET_X86_64
1599 if (t
== VT_LDOUBLE
) {
1607 /* expand 64bit on stack in two ints */
1608 ST_FUNC
void lexpand(void)
1611 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1612 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1613 if (v
== VT_CONST
) {
1616 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1622 vtop
[0].r
= vtop
[-1].r2
;
1623 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1625 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1630 /* build a long long from two ints */
1631 static void lbuild(int t
)
1633 gv2(RC_INT
, RC_INT
);
1634 vtop
[-1].r2
= vtop
[0].r
;
1635 vtop
[-1].type
.t
= t
;
1640 /* convert stack entry to register and duplicate its value in another
1642 static void gv_dup(void)
1649 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1650 if (t
& VT_BITFIELD
) {
1660 /* stack: H L L1 H1 */
1670 /* duplicate value */
1675 #ifdef TCC_TARGET_X86_64
1676 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1686 load(r1
, &sv
); /* move r to r1 */
1688 /* duplicates value */
1694 /* Generate value test
1696 * Generate a test for any value (jump, comparison and integers) */
1697 ST_FUNC
int gvtst(int inv
, int t
)
1699 int v
= vtop
->r
& VT_VALMASK
;
1700 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1704 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1705 /* constant jmp optimization */
1706 if ((vtop
->c
.i
!= 0) != inv
)
1711 return gtst(inv
, t
);
1715 /* generate CPU independent (unsigned) long long operations */
1716 static void gen_opl(int op
)
1718 int t
, a
, b
, op1
, c
, i
;
1720 unsigned short reg_iret
= REG_IRET
;
1721 unsigned short reg_lret
= REG_LRET
;
1727 func
= TOK___divdi3
;
1730 func
= TOK___udivdi3
;
1733 func
= TOK___moddi3
;
1736 func
= TOK___umoddi3
;
1743 /* call generic long long function */
1744 vpush_global_sym(&func_old_type
, func
);
1749 vtop
->r2
= reg_lret
;
1757 //pv("gen_opl A",0,2);
1763 /* stack: L1 H1 L2 H2 */
1768 vtop
[-2] = vtop
[-3];
1771 /* stack: H1 H2 L1 L2 */
1772 //pv("gen_opl B",0,4);
1778 /* stack: H1 H2 L1 L2 ML MH */
1781 /* stack: ML MH H1 H2 L1 L2 */
1785 /* stack: ML MH H1 L2 H2 L1 */
1790 /* stack: ML MH M1 M2 */
1793 } else if (op
== '+' || op
== '-') {
1794 /* XXX: add non carry method too (for MIPS or alpha) */
1800 /* stack: H1 H2 (L1 op L2) */
1803 gen_op(op1
+ 1); /* TOK_xxxC2 */
1806 /* stack: H1 H2 (L1 op L2) */
1809 /* stack: (L1 op L2) H1 H2 */
1811 /* stack: (L1 op L2) (H1 op H2) */
1819 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1820 t
= vtop
[-1].type
.t
;
1824 /* stack: L H shift */
1826 /* constant: simpler */
1827 /* NOTE: all comments are for SHL. the other cases are
1828 done by swapping words */
1839 if (op
!= TOK_SAR
) {
1872 /* XXX: should provide a faster fallback on x86 ? */
1875 func
= TOK___ashrdi3
;
1878 func
= TOK___lshrdi3
;
1881 func
= TOK___ashldi3
;
1887 /* compare operations */
1893 /* stack: L1 H1 L2 H2 */
1895 vtop
[-1] = vtop
[-2];
1897 /* stack: L1 L2 H1 H2 */
1900 /* when values are equal, we need to compare low words. since
1901 the jump is inverted, we invert the test too. */
1904 else if (op1
== TOK_GT
)
1906 else if (op1
== TOK_ULT
)
1908 else if (op1
== TOK_UGT
)
1918 /* generate non equal test */
1924 /* compare low. Always unsigned */
1928 else if (op1
== TOK_LE
)
1930 else if (op1
== TOK_GT
)
1932 else if (op1
== TOK_GE
)
1943 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1945 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1946 return (a
^ b
) >> 63 ? -x
: x
;
1949 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1951 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1954 /* handle integer constant optimizations and various machine
1956 static void gen_opic(int op
)
1958 SValue
*v1
= vtop
- 1;
1960 int t1
= v1
->type
.t
& VT_BTYPE
;
1961 int t2
= v2
->type
.t
& VT_BTYPE
;
1962 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1963 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1964 uint64_t l1
= c1
? v1
->c
.i
: 0;
1965 uint64_t l2
= c2
? v2
->c
.i
: 0;
1966 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1968 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1969 l1
= ((uint32_t)l1
|
1970 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1971 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1972 l2
= ((uint32_t)l2
|
1973 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1977 case '+': l1
+= l2
; break;
1978 case '-': l1
-= l2
; break;
1979 case '&': l1
&= l2
; break;
1980 case '^': l1
^= l2
; break;
1981 case '|': l1
|= l2
; break;
1982 case '*': l1
*= l2
; break;
1989 /* if division by zero, generate explicit division */
1992 tcc_error("division by zero in constant");
1996 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1997 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1998 case TOK_UDIV
: l1
= l1
/ l2
; break;
1999 case TOK_UMOD
: l1
= l1
% l2
; break;
2002 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2003 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2005 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2008 case TOK_ULT
: l1
= l1
< l2
; break;
2009 case TOK_UGE
: l1
= l1
>= l2
; break;
2010 case TOK_EQ
: l1
= l1
== l2
; break;
2011 case TOK_NE
: l1
= l1
!= l2
; break;
2012 case TOK_ULE
: l1
= l1
<= l2
; break;
2013 case TOK_UGT
: l1
= l1
> l2
; break;
2014 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2015 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2016 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2017 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2019 case TOK_LAND
: l1
= l1
&& l2
; break;
2020 case TOK_LOR
: l1
= l1
|| l2
; break;
2024 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2025 l1
= ((uint32_t)l1
|
2026 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2030 /* if commutative ops, put c2 as constant */
2031 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2032 op
== '|' || op
== '*')) {
2034 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2035 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2037 if (!const_wanted
&&
2039 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2040 (l1
== -1 && op
== TOK_SAR
))) {
2041 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2043 } else if (!const_wanted
&&
2044 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2046 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2047 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2048 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2053 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2056 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2057 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2060 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2061 /* filter out NOP operations like x*1, x-0, x&-1... */
2063 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2064 /* try to use shifts instead of muls or divs */
2065 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2074 else if (op
== TOK_PDIV
)
2080 } else if (c2
&& (op
== '+' || op
== '-') &&
2081 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2082 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2083 /* symbol + constant case */
2087 /* The backends can't always deal with addends to symbols
2088 larger than +-1<<31. Don't construct such. */
2095 /* call low level op generator */
2096 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2097 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2105 /* generate a floating point operation with constant propagation */
2106 static void gen_opif(int op
)
2110 #if defined _MSC_VER && defined _AMD64_
2111 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2118 /* currently, we cannot do computations with forward symbols */
2119 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2120 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2122 if (v1
->type
.t
== VT_FLOAT
) {
2125 } else if (v1
->type
.t
== VT_DOUBLE
) {
2133 /* NOTE: we only do constant propagation if finite number (not
2134 NaN or infinity) (ANSI spec) */
2135 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2139 case '+': f1
+= f2
; break;
2140 case '-': f1
-= f2
; break;
2141 case '*': f1
*= f2
; break;
2144 /* If not in initializer we need to potentially generate
2145 FP exceptions at runtime, otherwise we want to fold. */
2151 /* XXX: also handles tests ? */
2155 /* XXX: overflow test ? */
2156 if (v1
->type
.t
== VT_FLOAT
) {
2158 } else if (v1
->type
.t
== VT_DOUBLE
) {
2170 static int pointed_size(CType
*type
)
2173 return type_size(pointed_type(type
), &align
);
2176 static void vla_runtime_pointed_size(CType
*type
)
2179 vla_runtime_type_size(pointed_type(type
), &align
);
2182 static inline int is_null_pointer(SValue
*p
)
2184 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2186 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2187 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2188 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2189 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2190 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2191 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2194 static inline int is_integer_btype(int bt
)
2196 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2197 bt
== VT_INT
|| bt
== VT_LLONG
);
2200 /* check types for comparison or subtraction of pointers */
2201 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2203 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2206 /* null pointers are accepted for all comparisons as gcc */
2207 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2211 bt1
= type1
->t
& VT_BTYPE
;
2212 bt2
= type2
->t
& VT_BTYPE
;
2213 /* accept comparison between pointer and integer with a warning */
2214 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2215 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2216 tcc_warning("comparison between pointer and integer");
2220 /* both must be pointers or implicit function pointers */
2221 if (bt1
== VT_PTR
) {
2222 type1
= pointed_type(type1
);
2223 } else if (bt1
!= VT_FUNC
)
2224 goto invalid_operands
;
2226 if (bt2
== VT_PTR
) {
2227 type2
= pointed_type(type2
);
2228 } else if (bt2
!= VT_FUNC
) {
2230 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2232 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2233 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2237 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2238 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2239 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2240 /* gcc-like error if '-' is used */
2242 goto invalid_operands
;
2244 tcc_warning("comparison of distinct pointer types lacks a cast");
2248 /* generic gen_op: handles types problems */
2249 ST_FUNC
void gen_op(int op
)
2251 int u
, t1
, t2
, bt1
, bt2
, t
;
2255 t1
= vtop
[-1].type
.t
;
2256 t2
= vtop
[0].type
.t
;
2257 bt1
= t1
& VT_BTYPE
;
2258 bt2
= t2
& VT_BTYPE
;
2260 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2261 tcc_error("operation on a struct");
2262 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2263 if (bt2
== VT_FUNC
) {
2264 mk_pointer(&vtop
->type
);
2267 if (bt1
== VT_FUNC
) {
2269 mk_pointer(&vtop
->type
);
2274 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2275 /* at least one operand is a pointer */
2276 /* relational op: must be both pointers */
2277 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2278 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2279 /* pointers are handled are unsigned */
2281 t
= VT_LLONG
| VT_UNSIGNED
;
2283 t
= VT_INT
| VT_UNSIGNED
;
2287 /* if both pointers, then it must be the '-' op */
2288 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2290 tcc_error("cannot use pointers here");
2291 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2292 /* XXX: check that types are compatible */
2293 if (vtop
[-1].type
.t
& VT_VLA
) {
2294 vla_runtime_pointed_size(&vtop
[-1].type
);
2296 vpushi(pointed_size(&vtop
[-1].type
));
2300 vtop
->type
.t
= ptrdiff_type
.t
;
2304 /* exactly one pointer : must be '+' or '-'. */
2305 if (op
!= '-' && op
!= '+')
2306 tcc_error("cannot use pointers here");
2307 /* Put pointer as first operand */
2308 if (bt2
== VT_PTR
) {
2310 t
= t1
, t1
= t2
, t2
= t
;
2313 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2314 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2317 type1
= vtop
[-1].type
;
2318 type1
.t
&= ~VT_ARRAY
;
2319 if (vtop
[-1].type
.t
& VT_VLA
)
2320 vla_runtime_pointed_size(&vtop
[-1].type
);
2322 u
= pointed_size(&vtop
[-1].type
);
2324 tcc_error("unknown array element size");
2328 /* XXX: cast to int ? (long long case) */
2334 /* #ifdef CONFIG_TCC_BCHECK
2335 The main reason to removing this code:
2342 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2343 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2345 When this code is on. then the output looks like
2347 v+(i-j) = 0xbff84000
2349 /* if evaluating constant expression, no code should be
2350 generated, so no bound check */
2351 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2352 /* if bounded pointers, we generate a special code to
2359 gen_bounded_ptr_add();
2365 /* put again type if gen_opic() swaped operands */
2368 } else if (is_float(bt1
) || is_float(bt2
)) {
2369 /* compute bigger type and do implicit casts */
2370 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2372 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2377 /* floats can only be used for a few operations */
2378 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2379 (op
< TOK_ULT
|| op
> TOK_GT
))
2380 tcc_error("invalid operands for binary operation");
2382 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2383 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2384 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2386 t
|= (VT_LONG
& t1
);
2388 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2389 /* cast to biggest op */
2390 t
= VT_LLONG
| VT_LONG
;
2391 if (bt1
== VT_LLONG
)
2393 if (bt2
== VT_LLONG
)
2395 /* convert to unsigned if it does not fit in a long long */
2396 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2397 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2401 /* integer operations */
2402 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2403 /* convert to unsigned if it does not fit in an integer */
2404 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2405 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2408 /* XXX: currently, some unsigned operations are explicit, so
2409 we modify them here */
2410 if (t
& VT_UNSIGNED
) {
2417 else if (op
== TOK_LT
)
2419 else if (op
== TOK_GT
)
2421 else if (op
== TOK_LE
)
2423 else if (op
== TOK_GE
)
2431 /* special case for shifts and long long: we keep the shift as
2433 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2440 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2441 /* relational op: the result is an int */
2442 vtop
->type
.t
= VT_INT
;
2447 // Make sure that we have converted to an rvalue:
2448 if (vtop
->r
& VT_LVAL
)
2449 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2452 #ifndef TCC_TARGET_ARM
2453 /* generic itof for unsigned long long case */
2454 static void gen_cvt_itof1(int t
)
2456 #ifdef TCC_TARGET_ARM64
2459 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2460 (VT_LLONG
| VT_UNSIGNED
)) {
2463 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2464 #if LDOUBLE_SIZE != 8
2465 else if (t
== VT_LDOUBLE
)
2466 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2469 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2473 vtop
->r
= reg_fret(t
);
2481 /* generic ftoi for unsigned long long case */
2482 static void gen_cvt_ftoi1(int t
)
2484 #ifdef TCC_TARGET_ARM64
2489 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2490 /* not handled natively */
2491 st
= vtop
->type
.t
& VT_BTYPE
;
2493 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2494 #if LDOUBLE_SIZE != 8
2495 else if (st
== VT_LDOUBLE
)
2496 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2499 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2504 vtop
->r2
= REG_LRET
;
2511 /* force char or short cast */
2512 static void force_charshort_cast(int t
)
2516 /* cannot cast static initializers */
2517 if (STATIC_DATA_WANTED
)
2521 /* XXX: add optimization if lvalue : just change type and offset */
2526 if (t
& VT_UNSIGNED
) {
2527 vpushi((1 << bits
) - 1);
2530 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2536 /* result must be signed or the SAR is converted to an SHL
2537 This was not the case when "t" was a signed short
2538 and the last value on the stack was an unsigned int */
2539 vtop
->type
.t
&= ~VT_UNSIGNED
;
2545 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2546 static void gen_cast_s(int t
)
2554 static void gen_cast(CType
*type
)
2556 int sbt
, dbt
, sf
, df
, c
, p
;
2558 /* special delayed cast for char/short */
2559 /* XXX: in some cases (multiple cascaded casts), it may still
2561 if (vtop
->r
& VT_MUSTCAST
) {
2562 vtop
->r
&= ~VT_MUSTCAST
;
2563 force_charshort_cast(vtop
->type
.t
);
2566 /* bitfields first get cast to ints */
2567 if (vtop
->type
.t
& VT_BITFIELD
) {
2571 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2572 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2577 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2578 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2579 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2580 c
&= dbt
!= VT_LDOUBLE
;
2583 /* constant case: we can do it now */
2584 /* XXX: in ISOC, cannot do it if error in convert */
2585 if (sbt
== VT_FLOAT
)
2586 vtop
->c
.ld
= vtop
->c
.f
;
2587 else if (sbt
== VT_DOUBLE
)
2588 vtop
->c
.ld
= vtop
->c
.d
;
2591 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2592 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2593 vtop
->c
.ld
= vtop
->c
.i
;
2595 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2597 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2598 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2600 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2603 if (dbt
== VT_FLOAT
)
2604 vtop
->c
.f
= (float)vtop
->c
.ld
;
2605 else if (dbt
== VT_DOUBLE
)
2606 vtop
->c
.d
= (double)vtop
->c
.ld
;
2607 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2608 vtop
->c
.i
= vtop
->c
.ld
;
2609 } else if (sf
&& dbt
== VT_BOOL
) {
2610 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2613 vtop
->c
.i
= vtop
->c
.ld
;
2614 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2616 else if (sbt
& VT_UNSIGNED
)
2617 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2619 else if (sbt
== VT_PTR
)
2622 else if (sbt
!= VT_LLONG
)
2623 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2624 -(vtop
->c
.i
& 0x80000000));
2626 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2628 else if (dbt
== VT_BOOL
)
2629 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2631 else if (dbt
== VT_PTR
)
2634 else if (dbt
!= VT_LLONG
) {
2635 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2636 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2639 if (!(dbt
& VT_UNSIGNED
))
2640 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2643 } else if (p
&& dbt
== VT_BOOL
) {
2647 /* non constant case: generate code */
2649 /* convert from fp to fp */
2652 /* convert int to fp */
2655 /* convert fp to int */
2656 if (dbt
== VT_BOOL
) {
2660 /* we handle char/short/etc... with generic code */
2661 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2662 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2666 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2667 /* additional cast for char/short... */
2673 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2674 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2675 /* scalar to long long */
2676 /* machine independent conversion */
2678 /* generate high word */
2679 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2683 if (sbt
== VT_PTR
) {
2684 /* cast from pointer to int before we apply
2685 shift operation, which pointers don't support*/
2692 /* patch second register */
2693 vtop
[-1].r2
= vtop
->r
;
2697 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2698 (dbt
& VT_BTYPE
) == VT_PTR
||
2699 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2700 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2701 (sbt
& VT_BTYPE
) != VT_PTR
&&
2702 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2703 /* need to convert from 32bit to 64bit */
2705 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2706 #if defined(TCC_TARGET_ARM64)
2708 #elif defined(TCC_TARGET_X86_64)
2710 /* x86_64 specific: movslq */
2712 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2719 } else if (dbt
== VT_BOOL
) {
2720 /* scalar to bool */
2723 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2724 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2725 if (sbt
== VT_PTR
) {
2726 vtop
->type
.t
= VT_INT
;
2727 tcc_warning("nonportable conversion from pointer to char/short");
2729 force_charshort_cast(dbt
);
2730 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2732 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2734 /* from long long: just take low order word */
2739 vtop
->type
.t
|= VT_UNSIGNED
;
2743 /* if lvalue and single word type, nothing to do because
2744 the lvalue already contains the real type size (see
2745 VT_LVAL_xxx constants) */
2748 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2749 /* if we are casting between pointer types,
2750 we must update the VT_LVAL_xxx size */
2751 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2752 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2755 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2758 /* return type size as known at compile time. Put alignment at 'a' */
2759 ST_FUNC
int type_size(CType
*type
, int *a
)
2764 bt
= type
->t
& VT_BTYPE
;
2765 if (bt
== VT_STRUCT
) {
2770 } else if (bt
== VT_PTR
) {
2771 if (type
->t
& VT_ARRAY
) {
2775 ts
= type_size(&s
->type
, a
);
2777 if (ts
< 0 && s
->c
< 0)
2785 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2786 return -1; /* incomplete enum */
2787 } else if (bt
== VT_LDOUBLE
) {
2789 return LDOUBLE_SIZE
;
2790 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2791 #ifdef TCC_TARGET_I386
2792 #ifdef TCC_TARGET_PE
2797 #elif defined(TCC_TARGET_ARM)
2807 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2810 } else if (bt
== VT_SHORT
) {
2813 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2817 /* char, void, function, _Bool */
2823 /* push type size as known at runtime time on top of value stack. Put
2825 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2827 if (type
->t
& VT_VLA
) {
2828 type_size(&type
->ref
->type
, a
);
2829 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2831 vpushi(type_size(type
, a
));
2835 static void vla_sp_restore(void) {
2836 if (vlas_in_scope
) {
2837 gen_vla_sp_restore(vla_sp_loc
);
2841 static void vla_sp_restore_root(void) {
2842 if (vlas_in_scope
) {
2843 gen_vla_sp_restore(vla_sp_root_loc
);
2847 /* return the pointed type of t */
2848 static inline CType
*pointed_type(CType
*type
)
2850 return &type
->ref
->type
;
2853 /* modify type so that its it is a pointer to type. */
2854 ST_FUNC
void mk_pointer(CType
*type
)
2857 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2858 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2862 /* compare function types. OLD functions match any new functions */
2863 static int is_compatible_func(CType
*type1
, CType
*type2
)
2869 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2871 /* check func_call */
2872 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2874 /* XXX: not complete */
2875 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2877 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2879 while (s1
!= NULL
) {
2882 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2892 /* return true if type1 and type2 are the same. If unqualified is
2893 true, qualifiers on the types are ignored.
2895 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2899 t1
= type1
->t
& VT_TYPE
;
2900 t2
= type2
->t
& VT_TYPE
;
2902 /* strip qualifiers before comparing */
2903 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2904 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2907 /* Default Vs explicit signedness only matters for char */
2908 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2912 /* XXX: bitfields ? */
2915 /* test more complicated cases */
2916 bt1
= t1
& (VT_BTYPE
| VT_ARRAY
);
2917 if (bt1
== VT_PTR
) {
2918 type1
= pointed_type(type1
);
2919 type2
= pointed_type(type2
);
2920 return is_compatible_types(type1
, type2
);
2921 } else if (bt1
& VT_ARRAY
) {
2922 return type1
->ref
->c
< 0 || type2
->ref
->c
< 0
2923 || type1
->ref
->c
== type2
->ref
->c
;
2924 } else if (bt1
== VT_STRUCT
) {
2925 return (type1
->ref
== type2
->ref
);
2926 } else if (bt1
== VT_FUNC
) {
2927 return is_compatible_func(type1
, type2
);
2928 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2929 return type1
->ref
== type2
->ref
;
2935 /* return true if type1 and type2 are exactly the same (including
2938 static int is_compatible_types(CType
*type1
, CType
*type2
)
2940 return compare_types(type1
,type2
,0);
2943 /* return true if type1 and type2 are the same (ignoring qualifiers).
2945 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2947 return compare_types(type1
,type2
,1);
2950 /* print a type. If 'varstr' is not NULL, then the variable is also
2951 printed in the type */
2953 /* XXX: add array and function pointers */
2954 static void type_to_str(char *buf
, int buf_size
,
2955 CType
*type
, const char *varstr
)
2967 pstrcat(buf
, buf_size
, "extern ");
2969 pstrcat(buf
, buf_size
, "static ");
2971 pstrcat(buf
, buf_size
, "typedef ");
2973 pstrcat(buf
, buf_size
, "inline ");
2974 if (t
& VT_VOLATILE
)
2975 pstrcat(buf
, buf_size
, "volatile ");
2976 if (t
& VT_CONSTANT
)
2977 pstrcat(buf
, buf_size
, "const ");
2979 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2980 || ((t
& VT_UNSIGNED
)
2981 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2984 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2986 buf_size
-= strlen(buf
);
3021 tstr
= "long double";
3023 pstrcat(buf
, buf_size
, tstr
);
3030 pstrcat(buf
, buf_size
, tstr
);
3031 v
= type
->ref
->v
& ~SYM_STRUCT
;
3032 if (v
>= SYM_FIRST_ANOM
)
3033 pstrcat(buf
, buf_size
, "<anonymous>");
3035 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3040 if (varstr
&& '*' == *varstr
) {
3041 pstrcat(buf1
, sizeof(buf1
), "(");
3042 pstrcat(buf1
, sizeof(buf1
), varstr
);
3043 pstrcat(buf1
, sizeof(buf1
), ")");
3045 pstrcat(buf1
, buf_size
, "(");
3047 while (sa
!= NULL
) {
3049 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3050 pstrcat(buf1
, sizeof(buf1
), buf2
);
3053 pstrcat(buf1
, sizeof(buf1
), ", ");
3055 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3056 pstrcat(buf1
, sizeof(buf1
), ", ...");
3057 pstrcat(buf1
, sizeof(buf1
), ")");
3058 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3063 if (varstr
&& '*' == *varstr
)
3064 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3066 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3067 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3070 pstrcpy(buf1
, sizeof(buf1
), "*");
3071 if (t
& VT_CONSTANT
)
3072 pstrcat(buf1
, buf_size
, "const ");
3073 if (t
& VT_VOLATILE
)
3074 pstrcat(buf1
, buf_size
, "volatile ");
3076 pstrcat(buf1
, sizeof(buf1
), varstr
);
3077 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3081 pstrcat(buf
, buf_size
, " ");
3082 pstrcat(buf
, buf_size
, varstr
);
3087 /* verify type compatibility to store vtop in 'dt' type, and generate
3089 static void gen_assign_cast(CType
*dt
)
3091 CType
*st
, *type1
, *type2
;
3092 char buf1
[256], buf2
[256];
3093 int dbt
, sbt
, qualwarn
, lvl
;
3095 st
= &vtop
->type
; /* source type */
3096 dbt
= dt
->t
& VT_BTYPE
;
3097 sbt
= st
->t
& VT_BTYPE
;
3098 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3099 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3100 ; /* It is Ok if both are void */
3102 tcc_error("cannot cast from/to void");
3104 if (dt
->t
& VT_CONSTANT
)
3105 tcc_warning("assignment of read-only location");
3108 /* special cases for pointers */
3109 /* '0' can also be a pointer */
3110 if (is_null_pointer(vtop
))
3112 /* accept implicit pointer to integer cast with warning */
3113 if (is_integer_btype(sbt
)) {
3114 tcc_warning("assignment makes pointer from integer without a cast");
3117 type1
= pointed_type(dt
);
3119 type2
= pointed_type(st
);
3120 else if (sbt
== VT_FUNC
)
3121 type2
= st
; /* a function is implicitly a function pointer */
3124 if (is_compatible_types(type1
, type2
))
3126 for (qualwarn
= lvl
= 0;; ++lvl
) {
3127 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3128 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3130 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3131 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3132 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3134 type1
= pointed_type(type1
);
3135 type2
= pointed_type(type2
);
3137 if (!is_compatible_unqualified_types(type1
, type2
)) {
3138 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3139 /* void * can match anything */
3140 } else if (dbt
== sbt
3141 && is_integer_btype(sbt
& VT_BTYPE
)
3142 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3143 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3144 /* Like GCC don't warn by default for merely changes
3145 in pointer target signedness. Do warn for different
3146 base types, though, in particular for unsigned enums
3147 and signed int targets. */
3149 tcc_warning("assignment from incompatible pointer type");
3154 tcc_warning("assignment discards qualifiers from pointer target type");
3160 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3161 tcc_warning("assignment makes integer from pointer without a cast");
3162 } else if (sbt
== VT_STRUCT
) {
3163 goto case_VT_STRUCT
;
3165 /* XXX: more tests */
3169 if (!is_compatible_unqualified_types(dt
, st
)) {
3171 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3172 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3173 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3180 /* store vtop in lvalue pushed on stack */
3181 ST_FUNC
void vstore(void)
3183 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3185 ft
= vtop
[-1].type
.t
;
3186 sbt
= vtop
->type
.t
& VT_BTYPE
;
3187 dbt
= ft
& VT_BTYPE
;
3188 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3189 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3190 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3191 /* optimize char/short casts */
3192 delayed_cast
= VT_MUSTCAST
;
3193 vtop
->type
.t
= ft
& VT_TYPE
;
3194 /* XXX: factorize */
3195 if (ft
& VT_CONSTANT
)
3196 tcc_warning("assignment of read-only location");
3199 if (!(ft
& VT_BITFIELD
))
3200 gen_assign_cast(&vtop
[-1].type
);
3203 if (sbt
== VT_STRUCT
) {
3204 /* if structure, only generate pointer */
3205 /* structure assignment : generate memcpy */
3206 /* XXX: optimize if small size */
3207 size
= type_size(&vtop
->type
, &align
);
3211 vtop
->type
.t
= VT_PTR
;
3214 /* address of memcpy() */
3217 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3218 else if(!(align
& 3))
3219 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3222 /* Use memmove, rather than memcpy, as dest and src may be same: */
3223 vpush_global_sym(&func_old_type
, TOK_memmove
);
3228 vtop
->type
.t
= VT_PTR
;
3234 /* leave source on stack */
3235 } else if (ft
& VT_BITFIELD
) {
3236 /* bitfield store handling */
3238 /* save lvalue as expression result (example: s.b = s.a = n;) */
3239 vdup(), vtop
[-1] = vtop
[-2];
3241 bit_pos
= BIT_POS(ft
);
3242 bit_size
= BIT_SIZE(ft
);
3243 /* remove bit field info to avoid loops */
3244 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3246 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3247 gen_cast(&vtop
[-1].type
);
3248 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3251 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3252 if (r
== VT_STRUCT
) {
3253 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3254 store_packed_bf(bit_pos
, bit_size
);
3256 unsigned long long mask
= (1ULL << bit_size
) - 1;
3257 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3259 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3262 vpushi((unsigned)mask
);
3269 /* duplicate destination */
3272 /* load destination, mask and or with source */
3273 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3274 vpushll(~(mask
<< bit_pos
));
3276 vpushi(~((unsigned)mask
<< bit_pos
));
3281 /* ... and discard */
3284 } else if (dbt
== VT_VOID
) {
3287 #ifdef CONFIG_TCC_BCHECK
3288 /* bound check case */
3289 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3298 #ifdef TCC_TARGET_X86_64
3299 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3301 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3306 r
= gv(rc
); /* generate value */
3307 /* if lvalue was saved on stack, must read it */
3308 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3310 t
= get_reg(RC_INT
);
3316 sv
.r
= VT_LOCAL
| VT_LVAL
;
3317 sv
.c
.i
= vtop
[-1].c
.i
;
3319 vtop
[-1].r
= t
| VT_LVAL
;
3321 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3323 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3324 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3326 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3327 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3329 vtop
[-1].type
.t
= load_type
;
3332 /* convert to int to increment easily */
3333 vtop
->type
.t
= addr_type
;
3339 vtop
[-1].type
.t
= load_type
;
3340 /* XXX: it works because r2 is spilled last ! */
3341 store(vtop
->r2
, vtop
- 1);
3347 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3348 vtop
->r
|= delayed_cast
;
3352 /* post defines POST/PRE add. c is the token ++ or -- */
3353 ST_FUNC
void inc(int post
, int c
)
3356 vdup(); /* save lvalue */
3358 gv_dup(); /* duplicate value */
3363 vpushi(c
- TOK_MID
);
3365 vstore(); /* store value */
3367 vpop(); /* if post op, return saved value */
3370 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3372 /* read the string */
3376 while (tok
== TOK_STR
) {
3377 /* XXX: add \0 handling too ? */
3378 cstr_cat(astr
, tokc
.str
.data
, -1);
3381 cstr_ccat(astr
, '\0');
3384 /* If I is >= 1 and a power of two, returns log2(i)+1.
3385 If I is 0 returns 0. */
3386 static int exact_log2p1(int i
)
3391 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3402 /* Parse __attribute__((...)) GNUC extension. */
3403 static void parse_attribute(AttributeDef
*ad
)
3409 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3414 while (tok
!= ')') {
3415 if (tok
< TOK_IDENT
)
3416 expect("attribute name");
3428 tcc_warning("implicit declaration of function '%s'",
3429 get_tok_str(tok
, &tokc
));
3430 s
= external_global_sym(tok
, &func_old_type
, 0);
3432 ad
->cleanup_func
= s
;
3440 parse_mult_str(&astr
, "section name");
3441 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3448 parse_mult_str(&astr
, "alias(\"target\")");
3449 ad
->alias_target
= /* save string as token, for later */
3450 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3454 case TOK_VISIBILITY1
:
3455 case TOK_VISIBILITY2
:
3457 parse_mult_str(&astr
,
3458 "visibility(\"default|hidden|internal|protected\")");
3459 if (!strcmp (astr
.data
, "default"))
3460 ad
->a
.visibility
= STV_DEFAULT
;
3461 else if (!strcmp (astr
.data
, "hidden"))
3462 ad
->a
.visibility
= STV_HIDDEN
;
3463 else if (!strcmp (astr
.data
, "internal"))
3464 ad
->a
.visibility
= STV_INTERNAL
;
3465 else if (!strcmp (astr
.data
, "protected"))
3466 ad
->a
.visibility
= STV_PROTECTED
;
3468 expect("visibility(\"default|hidden|internal|protected\")");
3477 if (n
<= 0 || (n
& (n
- 1)) != 0)
3478 tcc_error("alignment must be a positive power of two");
3483 ad
->a
.aligned
= exact_log2p1(n
);
3484 if (n
!= 1 << (ad
->a
.aligned
- 1))
3485 tcc_error("alignment of %d is larger than implemented", n
);
3497 /* currently, no need to handle it because tcc does not
3498 track unused objects */
3502 /* currently, no need to handle it because tcc does not
3503 track unused objects */
3508 ad
->f
.func_call
= FUNC_CDECL
;
3513 ad
->f
.func_call
= FUNC_STDCALL
;
3515 #ifdef TCC_TARGET_I386
3525 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3531 ad
->f
.func_call
= FUNC_FASTCALLW
;
3538 ad
->attr_mode
= VT_LLONG
+ 1;
3541 ad
->attr_mode
= VT_BYTE
+ 1;
3544 ad
->attr_mode
= VT_SHORT
+ 1;
3548 ad
->attr_mode
= VT_INT
+ 1;
3551 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3558 ad
->a
.dllexport
= 1;
3560 case TOK_NODECORATE
:
3561 ad
->a
.nodecorate
= 1;
3564 ad
->a
.dllimport
= 1;
3567 if (tcc_state
->warn_unsupported
)
3568 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3569 /* skip parameters */
3571 int parenthesis
= 0;
3575 else if (tok
== ')')
3578 } while (parenthesis
&& tok
!= -1);
3591 static Sym
* find_field (CType
*type
, int v
)
3595 while ((s
= s
->next
) != NULL
) {
3596 if ((s
->v
& SYM_FIELD
) &&
3597 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3598 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3599 Sym
*ret
= find_field (&s
->type
, v
);
3609 static void struct_add_offset (Sym
*s
, int offset
)
3611 while ((s
= s
->next
) != NULL
) {
3612 if ((s
->v
& SYM_FIELD
) &&
3613 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3614 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3615 struct_add_offset(s
->type
.ref
, offset
);
3621 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3623 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3624 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3625 int pcc
= !tcc_state
->ms_bitfields
;
3626 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3633 prevbt
= VT_STRUCT
; /* make it never match */
3638 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3639 if (f
->type
.t
& VT_BITFIELD
)
3640 bit_size
= BIT_SIZE(f
->type
.t
);
3643 size
= type_size(&f
->type
, &align
);
3644 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3647 if (pcc
&& bit_size
== 0) {
3648 /* in pcc mode, packing does not affect zero-width bitfields */
3651 /* in pcc mode, attribute packed overrides if set. */
3652 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3655 /* pragma pack overrides align if lesser and packs bitfields always */
3658 if (pragma_pack
< align
)
3659 align
= pragma_pack
;
3660 /* in pcc mode pragma pack also overrides individual align */
3661 if (pcc
&& pragma_pack
< a
)
3665 /* some individual align was specified */
3669 if (type
->ref
->type
.t
== VT_UNION
) {
3670 if (pcc
&& bit_size
>= 0)
3671 size
= (bit_size
+ 7) >> 3;
3676 } else if (bit_size
< 0) {
3678 c
+= (bit_pos
+ 7) >> 3;
3679 c
= (c
+ align
- 1) & -align
;
3688 /* A bit-field. Layout is more complicated. There are two
3689 options: PCC (GCC) compatible and MS compatible */
3691 /* In PCC layout a bit-field is placed adjacent to the
3692 preceding bit-fields, except if:
3694 - an individual alignment was given
3695 - it would overflow its base type container and
3696 there is no packing */
3697 if (bit_size
== 0) {
3699 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3701 } else if (f
->a
.aligned
) {
3703 } else if (!packed
) {
3705 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3706 if (ofs
> size
/ align
)
3710 /* in pcc mode, long long bitfields have type int if they fit */
3711 if (size
== 8 && bit_size
<= 32)
3712 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3714 while (bit_pos
>= align
* 8)
3715 c
+= align
, bit_pos
-= align
* 8;
3718 /* In PCC layout named bit-fields influence the alignment
3719 of the containing struct using the base types alignment,
3720 except for packed fields (which here have correct align). */
3721 if (f
->v
& SYM_FIRST_ANOM
3722 // && bit_size // ??? gcc on ARM/rpi does that
3727 bt
= f
->type
.t
& VT_BTYPE
;
3728 if ((bit_pos
+ bit_size
> size
* 8)
3729 || (bit_size
> 0) == (bt
!= prevbt
)
3731 c
= (c
+ align
- 1) & -align
;
3734 /* In MS bitfield mode a bit-field run always uses
3735 at least as many bits as the underlying type.
3736 To start a new run it's also required that this
3737 or the last bit-field had non-zero width. */
3738 if (bit_size
|| prev_bit_size
)
3741 /* In MS layout the records alignment is normally
3742 influenced by the field, except for a zero-width
3743 field at the start of a run (but by further zero-width
3744 fields it is again). */
3745 if (bit_size
== 0 && prevbt
!= bt
)
3748 prev_bit_size
= bit_size
;
3751 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3752 | (bit_pos
<< VT_STRUCT_SHIFT
);
3753 bit_pos
+= bit_size
;
3755 if (align
> maxalign
)
3759 printf("set field %s offset %-2d size %-2d align %-2d",
3760 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3761 if (f
->type
.t
& VT_BITFIELD
) {
3762 printf(" pos %-2d bits %-2d",
3770 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3772 /* An anonymous struct/union. Adjust member offsets
3773 to reflect the real offset of our containing struct.
3774 Also set the offset of this anon member inside
3775 the outer struct to be zero. Via this it
3776 works when accessing the field offset directly
3777 (from base object), as well as when recursing
3778 members in initializer handling. */
3779 int v2
= f
->type
.ref
->v
;
3780 if (!(v2
& SYM_FIELD
) &&
3781 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3783 /* This happens only with MS extensions. The
3784 anon member has a named struct type, so it
3785 potentially is shared with other references.
3786 We need to unshare members so we can modify
3789 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3790 &f
->type
.ref
->type
, 0,
3792 pps
= &f
->type
.ref
->next
;
3793 while ((ass
= ass
->next
) != NULL
) {
3794 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3795 pps
= &((*pps
)->next
);
3799 struct_add_offset(f
->type
.ref
, offset
);
3809 c
+= (bit_pos
+ 7) >> 3;
3811 /* store size and alignment */
3812 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3816 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3817 /* can happen if individual align for some member was given. In
3818 this case MSVC ignores maxalign when aligning the size */
3823 c
= (c
+ a
- 1) & -a
;
3827 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3830 /* check whether we can access bitfields by their type */
3831 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3835 if (0 == (f
->type
.t
& VT_BITFIELD
))
3839 bit_size
= BIT_SIZE(f
->type
.t
);
3842 bit_pos
= BIT_POS(f
->type
.t
);
3843 size
= type_size(&f
->type
, &align
);
3844 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3847 /* try to access the field using a different type */
3848 c0
= -1, s
= align
= 1;
3850 px
= f
->c
* 8 + bit_pos
;
3851 cx
= (px
>> 3) & -align
;
3852 px
= px
- (cx
<< 3);
3855 s
= (px
+ bit_size
+ 7) >> 3;
3865 s
= type_size(&t
, &align
);
3869 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3870 /* update offset and bit position */
3873 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3874 | (bit_pos
<< VT_STRUCT_SHIFT
);
3878 printf("FIX field %s offset %-2d size %-2d align %-2d "
3879 "pos %-2d bits %-2d\n",
3880 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3881 cx
, s
, align
, px
, bit_size
);
3884 /* fall back to load/store single-byte wise */
3885 f
->auxtype
= VT_STRUCT
;
3887 printf("FIX field %s : load byte-wise\n",
3888 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3894 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3895 static void struct_decl(CType
*type
, int u
)
3897 int v
, c
, size
, align
, flexible
;
3898 int bit_size
, bsize
, bt
;
3900 AttributeDef ad
, ad1
;
3903 memset(&ad
, 0, sizeof ad
);
3905 parse_attribute(&ad
);
3909 /* struct already defined ? return it */
3911 expect("struct/union/enum name");
3913 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3916 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3918 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3923 /* Record the original enum/struct/union token. */
3924 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3926 /* we put an undefined size for struct/union */
3927 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3928 s
->r
= 0; /* default alignment is zero as gcc */
3930 type
->t
= s
->type
.t
;
3936 tcc_error("struct/union/enum already defined");
3938 /* cannot be empty */
3939 /* non empty enums are not allowed */
3942 long long ll
= 0, pl
= 0, nl
= 0;
3945 /* enum symbols have static storage */
3946 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3950 expect("identifier");
3952 if (ss
&& !local_stack
)
3953 tcc_error("redefinition of enumerator '%s'",
3954 get_tok_str(v
, NULL
));
3958 ll
= expr_const64();
3960 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3962 *ps
= ss
, ps
= &ss
->next
;
3971 /* NOTE: we accept a trailing comma */
3976 /* set integral type of the enum */
3979 if (pl
!= (unsigned)pl
)
3980 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3982 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3983 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3984 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3986 /* set type for enum members */
3987 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3989 if (ll
== (int)ll
) /* default is int if it fits */
3991 if (t
.t
& VT_UNSIGNED
) {
3992 ss
->type
.t
|= VT_UNSIGNED
;
3993 if (ll
== (unsigned)ll
)
3996 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3997 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4002 while (tok
!= '}') {
4003 if (!parse_btype(&btype
, &ad1
)) {
4009 tcc_error("flexible array member '%s' not at the end of struct",
4010 get_tok_str(v
, NULL
));
4016 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4018 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4019 expect("identifier");
4021 int v
= btype
.ref
->v
;
4022 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4023 if (tcc_state
->ms_extensions
== 0)
4024 expect("identifier");
4028 if (type_size(&type1
, &align
) < 0) {
4029 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4032 tcc_error("field '%s' has incomplete type",
4033 get_tok_str(v
, NULL
));
4035 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4036 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4037 (type1
.t
& VT_STORAGE
))
4038 tcc_error("invalid type for '%s'",
4039 get_tok_str(v
, NULL
));
4043 bit_size
= expr_const();
4044 /* XXX: handle v = 0 case for messages */
4046 tcc_error("negative width in bit-field '%s'",
4047 get_tok_str(v
, NULL
));
4048 if (v
&& bit_size
== 0)
4049 tcc_error("zero width for bit-field '%s'",
4050 get_tok_str(v
, NULL
));
4051 parse_attribute(&ad1
);
4053 size
= type_size(&type1
, &align
);
4054 if (bit_size
>= 0) {
4055 bt
= type1
.t
& VT_BTYPE
;
4061 tcc_error("bitfields must have scalar type");
4063 if (bit_size
> bsize
) {
4064 tcc_error("width of '%s' exceeds its type",
4065 get_tok_str(v
, NULL
));
4066 } else if (bit_size
== bsize
4067 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4068 /* no need for bit fields */
4070 } else if (bit_size
== 64) {
4071 tcc_error("field width 64 not implemented");
4073 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4075 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4078 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4079 /* Remember we've seen a real field to check
4080 for placement of flexible array member. */
4083 /* If member is a struct or bit-field, enforce
4084 placing into the struct (as anonymous). */
4086 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4091 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4096 if (tok
== ';' || tok
== TOK_EOF
)
4103 parse_attribute(&ad
);
4104 struct_layout(type
, &ad
);
4109 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4111 merge_symattr(&ad
->a
, &s
->a
);
4112 merge_funcattr(&ad
->f
, &s
->f
);
4115 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4116 are added to the element type, copied because it could be a typedef. */
4117 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4119 while (type
->t
& VT_ARRAY
) {
4120 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4121 type
= &type
->ref
->type
;
4123 type
->t
|= qualifiers
;
4126 /* return 0 if no type declaration. otherwise, return the basic type
4129 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4131 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4135 memset(ad
, 0, sizeof(AttributeDef
));
4145 /* currently, we really ignore extension */
4155 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4156 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4157 tmbt
: tcc_error("too many basic types");
4160 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4165 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4182 memset(&ad1
, 0, sizeof(AttributeDef
));
4183 if (parse_btype(&type1
, &ad1
)) {
4184 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4186 n
= 1 << (ad1
.a
.aligned
- 1);
4188 type_size(&type1
, &n
);
4191 if (n
<= 0 || (n
& (n
- 1)) != 0)
4192 tcc_error("alignment must be a positive power of two");
4195 ad
->a
.aligned
= exact_log2p1(n
);
4199 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4200 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4201 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4202 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4209 #ifdef TCC_TARGET_ARM64
4211 /* GCC's __uint128_t appears in some Linux header files. Make it a
4212 synonym for long double to get the size and alignment right. */
4223 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4224 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4232 struct_decl(&type1
, VT_ENUM
);
4235 type
->ref
= type1
.ref
;
4238 struct_decl(&type1
, VT_STRUCT
);
4241 struct_decl(&type1
, VT_UNION
);
4244 /* type modifiers */
4249 parse_btype_qualify(type
, VT_CONSTANT
);
4257 parse_btype_qualify(type
, VT_VOLATILE
);
4264 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4265 tcc_error("signed and unsigned modifier");
4278 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4279 tcc_error("signed and unsigned modifier");
4280 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4296 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4297 tcc_error("multiple storage classes");
4308 /* currently, no need to handle it because tcc does not
4309 track unused objects */
4312 /* GNUC attribute */
4313 case TOK_ATTRIBUTE1
:
4314 case TOK_ATTRIBUTE2
:
4315 parse_attribute(ad
);
4316 if (ad
->attr_mode
) {
4317 u
= ad
->attr_mode
-1;
4318 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4326 parse_expr_type(&type1
);
4327 /* remove all storage modifiers except typedef */
4328 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4330 sym_to_attr(ad
, type1
.ref
);
4336 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4338 t
&= ~(VT_BTYPE
|VT_LONG
);
4339 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4340 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4341 type
->ref
= s
->type
.ref
;
4343 parse_btype_qualify(type
, t
);
4345 /* get attributes from typedef */
4355 if (tcc_state
->char_is_unsigned
) {
4356 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4359 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4360 bt
= t
& (VT_BTYPE
|VT_LONG
);
4362 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4363 #ifdef TCC_TARGET_PE
4364 if (bt
== VT_LDOUBLE
)
4365 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4371 /* convert a function parameter type (array to pointer and function to
4372 function pointer) */
4373 static inline void convert_parameter_type(CType
*pt
)
4375 /* remove const and volatile qualifiers (XXX: const could be used
4376 to indicate a const function parameter */
4377 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4378 /* array must be transformed to pointer according to ANSI C */
4380 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4385 ST_FUNC
void parse_asm_str(CString
*astr
)
4388 parse_mult_str(astr
, "string constant");
4391 /* Parse an asm label and return the token */
4392 static int asm_label_instr(void)
4398 parse_asm_str(&astr
);
4401 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4403 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4408 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4410 int n
, l
, t1
, arg_size
, align
;
4411 Sym
**plast
, *s
, *first
;
4416 /* function type, or recursive declarator (return if so) */
4418 if (td
&& !(td
& TYPE_ABSTRACT
))
4422 else if (parse_btype(&pt
, &ad1
))
4425 merge_attr (ad
, &ad1
);
4434 /* read param name and compute offset */
4435 if (l
!= FUNC_OLD
) {
4436 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4438 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4439 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4440 tcc_error("parameter declared as void");
4444 expect("identifier");
4445 pt
.t
= VT_VOID
; /* invalid type */
4448 convert_parameter_type(&pt
);
4449 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4450 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4456 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4461 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4462 tcc_error("invalid type");
4465 /* if no parameters, then old type prototype */
4468 /* NOTE: const is ignored in returned type as it has a special
4469 meaning in gcc / C++ */
4470 type
->t
&= ~VT_CONSTANT
;
4471 /* some ancient pre-K&R C allows a function to return an array
4472 and the array brackets to be put after the arguments, such
4473 that "int c()[]" means something like "int[] c()" */
4476 skip(']'); /* only handle simple "[]" */
4479 /* we push a anonymous symbol which will contain the function prototype */
4480 ad
->f
.func_args
= arg_size
;
4481 ad
->f
.func_type
= l
;
4482 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4488 } else if (tok
== '[') {
4489 int saved_nocode_wanted
= nocode_wanted
;
4490 /* array definition */
4493 /* XXX The optional type-quals and static should only be accepted
4494 in parameter decls. The '*' as well, and then even only
4495 in prototypes (not function defs). */
4497 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4512 if (!local_stack
|| (storage
& VT_STATIC
))
4513 vpushi(expr_const());
4515 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4516 length must always be evaluated, even under nocode_wanted,
4517 so that its size slot is initialized (e.g. under sizeof
4522 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4525 tcc_error("invalid array size");
4527 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4528 tcc_error("size of variable length array should be an integer");
4534 /* parse next post type */
4535 post_type(type
, ad
, storage
, 0);
4536 if (type
->t
== VT_FUNC
)
4537 tcc_error("declaration of an array of functions");
4538 t1
|= type
->t
& VT_VLA
;
4542 tcc_error("need explicit inner array size in VLAs");
4543 loc
-= type_size(&int_type
, &align
);
4547 vla_runtime_type_size(type
, &align
);
4549 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4555 nocode_wanted
= saved_nocode_wanted
;
4557 /* we push an anonymous symbol which will contain the array
4559 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4560 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4566 /* Parse a type declarator (except basic type), and return the type
4567 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4568 expected. 'type' should contain the basic type. 'ad' is the
4569 attribute definition of the basic type. It can be modified by
4570 type_decl(). If this (possibly abstract) declarator is a pointer chain
4571 it returns the innermost pointed to type (equals *type, but is a different
4572 pointer), otherwise returns type itself, that's used for recursive calls. */
4573 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4576 int qualifiers
, storage
;
4578 /* recursive type, remove storage bits first, apply them later again */
4579 storage
= type
->t
& VT_STORAGE
;
4580 type
->t
&= ~VT_STORAGE
;
4583 while (tok
== '*') {
4591 qualifiers
|= VT_CONSTANT
;
4596 qualifiers
|= VT_VOLATILE
;
4602 /* XXX: clarify attribute handling */
4603 case TOK_ATTRIBUTE1
:
4604 case TOK_ATTRIBUTE2
:
4605 parse_attribute(ad
);
4609 type
->t
|= qualifiers
;
4611 /* innermost pointed to type is the one for the first derivation */
4612 ret
= pointed_type(type
);
4616 /* This is possibly a parameter type list for abstract declarators
4617 ('int ()'), use post_type for testing this. */
4618 if (!post_type(type
, ad
, 0, td
)) {
4619 /* It's not, so it's a nested declarator, and the post operations
4620 apply to the innermost pointed to type (if any). */
4621 /* XXX: this is not correct to modify 'ad' at this point, but
4622 the syntax is not clear */
4623 parse_attribute(ad
);
4624 post
= type_decl(type
, ad
, v
, td
);
4628 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4629 /* type identifier */
4634 if (!(td
& TYPE_ABSTRACT
))
4635 expect("identifier");
4638 post_type(post
, ad
, storage
, 0);
4639 parse_attribute(ad
);
4644 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4645 ST_FUNC
int lvalue_type(int t
)
4650 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4652 else if (bt
== VT_SHORT
)
4656 if (t
& VT_UNSIGNED
)
4657 r
|= VT_LVAL_UNSIGNED
;
4661 /* indirection with full error checking and bound check */
4662 ST_FUNC
void indir(void)
4664 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4665 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4669 if (vtop
->r
& VT_LVAL
)
4671 vtop
->type
= *pointed_type(&vtop
->type
);
4672 /* Arrays and functions are never lvalues */
4673 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4674 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4675 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4676 /* if bound checking, the referenced pointer must be checked */
4677 #ifdef CONFIG_TCC_BCHECK
4678 if (tcc_state
->do_bounds_check
)
4679 vtop
->r
|= VT_MUSTBOUND
;
4684 /* pass a parameter to a function and do type checking and casting */
4685 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4690 func_type
= func
->f
.func_type
;
4691 if (func_type
== FUNC_OLD
||
4692 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4693 /* default casting : only need to convert float to double */
4694 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4695 gen_cast_s(VT_DOUBLE
);
4696 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4697 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4698 type
.ref
= vtop
->type
.ref
;
4701 } else if (arg
== NULL
) {
4702 tcc_error("too many arguments to function");
4705 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4706 gen_assign_cast(&type
);
4710 /* parse an expression and return its type without any side effect. */
4711 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4720 /* parse an expression of the form '(type)' or '(expr)' and return its
4722 static void parse_expr_type(CType
*type
)
4728 if (parse_btype(type
, &ad
)) {
4729 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4731 expr_type(type
, gexpr
);
4736 static void parse_type(CType
*type
)
4741 if (!parse_btype(type
, &ad
)) {
4744 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4747 static void parse_builtin_params(int nc
, const char *args
)
4754 while ((c
= *args
++)) {
4758 case 'e': expr_eq(); continue;
4759 case 't': parse_type(&t
); vpush(&t
); continue;
4760 default: tcc_error("internal error"); break;
4768 static void try_call_scope_cleanup(Sym
*stop
)
4770 Sym
*cls
= current_cleanups
;
4772 for (; cls
!= stop
; cls
= cls
->ncl
) {
4773 Sym
*fs
= cls
->next
;
4774 Sym
*vs
= cls
->prev_tok
;
4776 vpushsym(&fs
->type
, fs
);
4777 vset(&vs
->type
, vs
->r
, vs
->c
);
4779 mk_pointer(&vtop
->type
);
4785 static void try_call_cleanup_goto(Sym
*cleanupstate
)
4790 if (!current_cleanups
)
4793 /* search NCA of both cleanup chains given parents and initial depth */
4794 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
4795 for (ccd
= ncleanups
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
4797 for (cc
= current_cleanups
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
4799 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
4802 try_call_scope_cleanup(cc
);
4805 ST_FUNC
void unary(void)
4807 int n
, t
, align
, size
, r
, sizeof_caller
;
4812 sizeof_caller
= in_sizeof
;
4815 /* XXX: GCC 2.95.3 does not generate a table although it should be
4823 #ifdef TCC_TARGET_PE
4824 t
= VT_SHORT
|VT_UNSIGNED
;
4832 vsetc(&type
, VT_CONST
, &tokc
);
4836 t
= VT_INT
| VT_UNSIGNED
;
4842 t
= VT_LLONG
| VT_UNSIGNED
;
4854 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4857 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4859 case TOK___FUNCTION__
:
4861 goto tok_identifier
;
4867 /* special function name identifier */
4868 len
= strlen(funcname
) + 1;
4869 /* generate char[len] type */
4874 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4875 if (!NODATA_WANTED
) {
4876 ptr
= section_ptr_add(data_section
, len
);
4877 memcpy(ptr
, funcname
, len
);
4883 #ifdef TCC_TARGET_PE
4884 t
= VT_SHORT
| VT_UNSIGNED
;
4890 /* string parsing */
4892 if (tcc_state
->char_is_unsigned
)
4893 t
= VT_BYTE
| VT_UNSIGNED
;
4895 if (tcc_state
->warn_write_strings
)
4900 memset(&ad
, 0, sizeof(AttributeDef
));
4901 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4906 if (parse_btype(&type
, &ad
)) {
4907 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4909 /* check ISOC99 compound literal */
4911 /* data is allocated locally by default */
4916 /* all except arrays are lvalues */
4917 if (!(type
.t
& VT_ARRAY
))
4918 r
|= lvalue_type(type
.t
);
4919 memset(&ad
, 0, sizeof(AttributeDef
));
4920 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4922 if (sizeof_caller
) {
4929 } else if (tok
== '{') {
4930 int saved_nocode_wanted
= nocode_wanted
;
4932 tcc_error("expected constant");
4933 /* save all registers */
4935 /* statement expression : we do not accept break/continue
4936 inside as GCC does. We do retain the nocode_wanted state,
4937 as statement expressions can't ever be entered from the
4938 outside, so any reactivation of code emission (from labels
4939 or loop heads) can be disabled again after the end of it. */
4940 block(NULL
, NULL
, 1);
4941 nocode_wanted
= saved_nocode_wanted
;
4956 /* functions names must be treated as function pointers,
4957 except for unary '&' and sizeof. Since we consider that
4958 functions are not lvalues, we only have to handle it
4959 there and in function calls. */
4960 /* arrays can also be used although they are not lvalues */
4961 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4962 !(vtop
->type
.t
& VT_ARRAY
))
4964 mk_pointer(&vtop
->type
);
4970 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4971 gen_cast_s(VT_BOOL
);
4972 vtop
->c
.i
= !vtop
->c
.i
;
4973 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4977 vseti(VT_JMP
, gvtst(1, 0));
4989 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4990 tcc_error("pointer not accepted for unary plus");
4991 /* In order to force cast, we add zero, except for floating point
4992 where we really need an noop (otherwise -0.0 will be transformed
4994 if (!is_float(vtop
->type
.t
)) {
5006 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5007 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5008 size
= type_size(&type
, &align
);
5009 if (s
&& s
->a
.aligned
)
5010 align
= 1 << (s
->a
.aligned
- 1);
5011 if (t
== TOK_SIZEOF
) {
5012 if (!(type
.t
& VT_VLA
)) {
5014 tcc_error("sizeof applied to an incomplete type");
5017 vla_runtime_type_size(&type
, &align
);
5022 vtop
->type
.t
|= VT_UNSIGNED
;
5025 case TOK_builtin_expect
:
5026 /* __builtin_expect is a no-op for now */
5027 parse_builtin_params(0, "ee");
5030 case TOK_builtin_types_compatible_p
:
5031 parse_builtin_params(0, "tt");
5032 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5033 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5034 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5038 case TOK_builtin_choose_expr
:
5065 case TOK_builtin_constant_p
:
5066 parse_builtin_params(1, "e");
5067 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5071 case TOK_builtin_frame_address
:
5072 case TOK_builtin_return_address
:
5078 if (tok
!= TOK_CINT
) {
5079 tcc_error("%s only takes positive integers",
5080 tok1
== TOK_builtin_return_address
?
5081 "__builtin_return_address" :
5082 "__builtin_frame_address");
5084 level
= (uint32_t)tokc
.i
;
5089 vset(&type
, VT_LOCAL
, 0); /* local frame */
5091 mk_pointer(&vtop
->type
);
5092 indir(); /* -> parent frame */
5094 if (tok1
== TOK_builtin_return_address
) {
5095 // assume return address is just above frame pointer on stack
5098 mk_pointer(&vtop
->type
);
5103 #ifdef TCC_TARGET_X86_64
5104 #ifdef TCC_TARGET_PE
5105 case TOK_builtin_va_start
:
5106 parse_builtin_params(0, "ee");
5107 r
= vtop
->r
& VT_VALMASK
;
5111 tcc_error("__builtin_va_start expects a local variable");
5113 vtop
->type
= char_pointer_type
;
5118 case TOK_builtin_va_arg_types
:
5119 parse_builtin_params(0, "t");
5120 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5127 #ifdef TCC_TARGET_ARM64
5128 case TOK___va_start
: {
5129 parse_builtin_params(0, "ee");
5133 vtop
->type
.t
= VT_VOID
;
5136 case TOK___va_arg
: {
5137 parse_builtin_params(0, "et");
5145 case TOK___arm64_clear_cache
: {
5146 parse_builtin_params(0, "ee");
5149 vtop
->type
.t
= VT_VOID
;
5153 /* pre operations */
5164 t
= vtop
->type
.t
& VT_BTYPE
;
5166 /* In IEEE negate(x) isn't subtract(0,x), but rather
5170 vtop
->c
.f
= -1.0 * 0.0;
5171 else if (t
== VT_DOUBLE
)
5172 vtop
->c
.d
= -1.0 * 0.0;
5174 vtop
->c
.ld
= -1.0 * 0.0;
5182 goto tok_identifier
;
5184 /* allow to take the address of a label */
5185 if (tok
< TOK_UIDENT
)
5186 expect("label identifier");
5187 s
= label_find(tok
);
5189 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5191 if (s
->r
== LABEL_DECLARED
)
5192 s
->r
= LABEL_FORWARD
;
5195 s
->type
.t
= VT_VOID
;
5196 mk_pointer(&s
->type
);
5197 s
->type
.t
|= VT_STATIC
;
5199 vpushsym(&s
->type
, s
);
5205 CType controlling_type
;
5206 int has_default
= 0;
5209 TokenString
*str
= NULL
;
5210 int saved_const_wanted
= const_wanted
;
5215 expr_type(&controlling_type
, expr_eq
);
5216 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5217 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5218 mk_pointer(&controlling_type
);
5219 const_wanted
= saved_const_wanted
;
5223 if (tok
== TOK_DEFAULT
) {
5225 tcc_error("too many 'default'");
5231 AttributeDef ad_tmp
;
5234 parse_btype(&cur_type
, &ad_tmp
);
5235 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5236 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5238 tcc_error("type match twice");
5248 skip_or_save_block(&str
);
5250 skip_or_save_block(NULL
);
5257 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5258 tcc_error("type '%s' does not match any association", buf
);
5260 begin_macro(str
, 1);
5269 // special qnan , snan and infinity values
5274 vtop
->type
.t
= VT_FLOAT
;
5279 goto special_math_val
;
5282 goto special_math_val
;
5289 expect("identifier");
5291 if (!s
|| IS_ASM_SYM(s
)) {
5292 const char *name
= get_tok_str(t
, NULL
);
5294 tcc_error("'%s' undeclared", name
);
5295 /* for simple function calls, we tolerate undeclared
5296 external reference to int() function */
5297 if (tcc_state
->warn_implicit_function_declaration
5298 #ifdef TCC_TARGET_PE
5299 /* people must be warned about using undeclared WINAPI functions
5300 (which usually start with uppercase letter) */
5301 || (name
[0] >= 'A' && name
[0] <= 'Z')
5304 tcc_warning("implicit declaration of function '%s'", name
);
5305 s
= external_global_sym(t
, &func_old_type
, 0);
5309 /* A symbol that has a register is a local register variable,
5310 which starts out as VT_LOCAL value. */
5311 if ((r
& VT_VALMASK
) < VT_CONST
)
5312 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5314 vset(&s
->type
, r
, s
->c
);
5315 /* Point to s as backpointer (even without r&VT_SYM).
5316 Will be used by at least the x86 inline asm parser for
5322 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5323 vtop
->c
.i
= s
->enum_val
;
5328 /* post operations */
5330 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5333 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5336 if (tok
== TOK_ARROW
)
5338 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5341 /* expect pointer on structure */
5342 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5343 expect("struct or union");
5344 if (tok
== TOK_CDOUBLE
)
5345 expect("field name");
5347 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5348 expect("field name");
5349 s
= find_field(&vtop
->type
, tok
);
5351 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5352 /* add field offset to pointer */
5353 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5356 /* change type to field type, and set to lvalue */
5357 vtop
->type
= s
->type
;
5358 vtop
->type
.t
|= qualifiers
;
5359 /* an array is never an lvalue */
5360 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5361 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5362 #ifdef CONFIG_TCC_BCHECK
5363 /* if bound checking, the referenced pointer must be checked */
5364 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5365 vtop
->r
|= VT_MUSTBOUND
;
5369 } else if (tok
== '[') {
5375 } else if (tok
== '(') {
5378 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5381 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5382 /* pointer test (no array accepted) */
5383 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5384 vtop
->type
= *pointed_type(&vtop
->type
);
5385 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5389 expect("function pointer");
5392 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5394 /* get return type */
5397 sa
= s
->next
; /* first parameter */
5398 nb_args
= regsize
= 0;
5400 /* compute first implicit argument if a structure is returned */
5401 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5402 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5403 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5404 &ret_align
, ®size
);
5406 /* get some space for the returned structure */
5407 size
= type_size(&s
->type
, &align
);
5408 #ifdef TCC_TARGET_ARM64
5409 /* On arm64, a small struct is return in registers.
5410 It is much easier to write it to memory if we know
5411 that we are allowed to write some extra bytes, so
5412 round the allocated space up to a power of 2: */
5414 while (size
& (size
- 1))
5415 size
= (size
| (size
- 1)) + 1;
5417 loc
= (loc
- size
) & -align
;
5419 ret
.r
= VT_LOCAL
| VT_LVAL
;
5420 /* pass it as 'int' to avoid structure arg passing
5422 vseti(VT_LOCAL
, loc
);
5432 /* return in register */
5433 if (is_float(ret
.type
.t
)) {
5434 ret
.r
= reg_fret(ret
.type
.t
);
5435 #ifdef TCC_TARGET_X86_64
5436 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5440 #ifndef TCC_TARGET_ARM64
5441 #ifdef TCC_TARGET_X86_64
5442 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5444 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5455 gfunc_param_typed(s
, sa
);
5465 tcc_error("too few arguments to function");
5467 gfunc_call(nb_args
);
5470 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5471 vsetc(&ret
.type
, r
, &ret
.c
);
5472 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5475 /* handle packed struct return */
5476 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5479 size
= type_size(&s
->type
, &align
);
5480 /* We're writing whole regs often, make sure there's enough
5481 space. Assume register size is power of 2. */
5482 if (regsize
> align
)
5484 loc
= (loc
- size
) & -align
;
5488 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5492 if (--ret_nregs
== 0)
5496 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5504 ST_FUNC
void expr_prod(void)
5509 while (tok
== '*' || tok
== '/' || tok
== '%') {
5517 ST_FUNC
void expr_sum(void)
5522 while (tok
== '+' || tok
== '-') {
5530 static void expr_shift(void)
5535 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5543 static void expr_cmp(void)
5548 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5549 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5557 static void expr_cmpeq(void)
5562 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5570 static void expr_and(void)
5573 while (tok
== '&') {
5580 static void expr_xor(void)
5583 while (tok
== '^') {
5590 static void expr_or(void)
5593 while (tok
== '|') {
5600 static void expr_land(void)
5603 if (tok
== TOK_LAND
) {
5606 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5607 gen_cast_s(VT_BOOL
);
5612 while (tok
== TOK_LAND
) {
5628 if (tok
!= TOK_LAND
) {
5641 static void expr_lor(void)
5644 if (tok
== TOK_LOR
) {
5647 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5648 gen_cast_s(VT_BOOL
);
5653 while (tok
== TOK_LOR
) {
5669 if (tok
!= TOK_LOR
) {
5682 /* Assuming vtop is a value used in a conditional context
5683 (i.e. compared with zero) return 0 if it's false, 1 if
5684 true and -1 if it can't be statically determined. */
5685 static int condition_3way(void)
5688 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5689 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5691 gen_cast_s(VT_BOOL
);
5698 static void expr_cond(void)
5700 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5702 CType type
, type1
, type2
;
5707 c
= condition_3way();
5708 g
= (tok
== ':' && gnu_ext
);
5710 /* needed to avoid having different registers saved in
5712 if (is_float(vtop
->type
.t
)) {
5714 #ifdef TCC_TARGET_X86_64
5715 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5739 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5740 mk_pointer(&vtop
->type
);
5742 sv
= *vtop
; /* save value to handle it later */
5743 vtop
--; /* no vpop so that FP stack is not flushed */
5759 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5760 mk_pointer(&vtop
->type
);
5763 bt1
= t1
& VT_BTYPE
;
5765 bt2
= t2
& VT_BTYPE
;
5769 /* cast operands to correct type according to ISOC rules */
5770 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5771 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5772 } else if (is_float(bt1
) || is_float(bt2
)) {
5773 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5774 type
.t
= VT_LDOUBLE
;
5776 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5781 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5782 /* cast to biggest op */
5783 type
.t
= VT_LLONG
| VT_LONG
;
5784 if (bt1
== VT_LLONG
)
5786 if (bt2
== VT_LLONG
)
5788 /* convert to unsigned if it does not fit in a long long */
5789 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5790 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5791 type
.t
|= VT_UNSIGNED
;
5792 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5793 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5794 /* If one is a null ptr constant the result type
5796 if (is_null_pointer (vtop
)) type
= type1
;
5797 else if (is_null_pointer (&sv
)) type
= type2
;
5798 else if (bt1
!= bt2
)
5799 tcc_error("incompatible types in conditional expressions");
5801 CType
*pt1
= pointed_type(&type1
);
5802 CType
*pt2
= pointed_type(&type2
);
5803 int pbt1
= pt1
->t
& VT_BTYPE
;
5804 int pbt2
= pt2
->t
& VT_BTYPE
;
5805 int newquals
, copied
= 0;
5806 /* pointers to void get preferred, otherwise the
5807 pointed to types minus qualifs should be compatible */
5808 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5809 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5810 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5811 tcc_warning("pointer type mismatch in conditional expression\n");
5813 /* combine qualifs */
5814 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5815 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5818 /* copy the pointer target symbol */
5819 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5822 pointed_type(&type
)->t
|= newquals
;
5824 /* pointers to incomplete arrays get converted to
5825 pointers to completed ones if possible */
5826 if (pt1
->t
& VT_ARRAY
5827 && pt2
->t
& VT_ARRAY
5828 && pointed_type(&type
)->ref
->c
< 0
5829 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5832 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5834 pointed_type(&type
)->ref
=
5835 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5836 0, pointed_type(&type
)->ref
->c
);
5837 pointed_type(&type
)->ref
->c
=
5838 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5841 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5842 /* XXX: test structure compatibility */
5843 type
= bt1
== VT_STRUCT
? type1
: type2
;
5845 /* integer operations */
5846 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5847 /* convert to unsigned if it does not fit in an integer */
5848 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5849 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5850 type
.t
|= VT_UNSIGNED
;
5852 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5853 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5854 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5856 /* now we convert second operand */
5860 mk_pointer(&vtop
->type
);
5862 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5867 if (is_float(type
.t
)) {
5869 #ifdef TCC_TARGET_X86_64
5870 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5874 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5875 /* for long longs, we use fixed registers to avoid having
5876 to handle a complicated move */
5887 /* this is horrible, but we must also convert first
5893 mk_pointer(&vtop
->type
);
5895 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5899 if (c
< 0 || islv
) {
5901 move_reg(r2
, r1
, type
.t
);
5911 static void expr_eq(void)
5917 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5918 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5919 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5934 ST_FUNC
void gexpr(void)
5945 /* parse a constant expression and return value in vtop. */
5946 static void expr_const1(void)
5955 /* parse an integer constant and return its value. */
5956 static inline int64_t expr_const64(void)
5960 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5961 expect("constant expression");
5967 /* parse an integer constant and return its value.
5968 Complain if it doesn't fit 32bit (signed or unsigned). */
5969 ST_FUNC
int expr_const(void)
5972 int64_t wc
= expr_const64();
5974 if (c
!= wc
&& (unsigned)c
!= wc
)
5975 tcc_error("constant exceeds 32 bit");
5979 /* return the label token if current token is a label, otherwise
5981 static int is_label(void)
5985 /* fast test first */
5986 if (tok
< TOK_UIDENT
)
5988 /* no need to save tokc because tok is an identifier */
5994 unget_tok(last_tok
);
5999 #ifndef TCC_TARGET_ARM64
6000 static void gfunc_return(CType
*func_type
)
6002 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6003 CType type
, ret_type
;
6004 int ret_align
, ret_nregs
, regsize
;
6005 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6006 &ret_align
, ®size
);
6007 if (0 == ret_nregs
) {
6008 /* if returning structure, must copy it to implicit
6009 first pointer arg location */
6012 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6015 /* copy structure value to pointer */
6018 /* returning structure packed into registers */
6019 int r
, size
, addr
, align
;
6020 size
= type_size(func_type
,&align
);
6021 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6022 (vtop
->c
.i
& (ret_align
-1)))
6023 && (align
& (ret_align
-1))) {
6024 loc
= (loc
- size
) & -ret_align
;
6027 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6031 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6033 vtop
->type
= ret_type
;
6034 if (is_float(ret_type
.t
))
6035 r
= rc_fret(ret_type
.t
);
6046 if (--ret_nregs
== 0)
6048 /* We assume that when a structure is returned in multiple
6049 registers, their classes are consecutive values of the
6052 vtop
->c
.i
+= regsize
;
6056 } else if (is_float(func_type
->t
)) {
6057 gv(rc_fret(func_type
->t
));
6061 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6065 static int case_cmp(const void *pa
, const void *pb
)
6067 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6068 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6069 return a
< b
? -1 : a
> b
;
6072 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6076 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6094 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6096 gcase(base
, len
/2, bsym
);
6097 if (cur_switch
->def_sym
)
6098 gjmp_addr(cur_switch
->def_sym
);
6100 *bsym
= gjmp(*bsym
);
6104 base
+= e
; len
-= e
;
6114 if (p
->v1
== p
->v2
) {
6116 gtst_addr(0, p
->sym
);
6126 gtst_addr(0, p
->sym
);
6132 static void block(int *bsym
, int *csym
, int is_expr
)
6134 int a
, b
, c
, d
, cond
;
6137 /* generate line number info */
6138 if (tcc_state
->do_debug
)
6139 tcc_debug_line(tcc_state
);
6142 /* default return value is (void) */
6144 vtop
->type
.t
= VT_VOID
;
6147 if (tok
== TOK_IF
) {
6149 int saved_nocode_wanted
= nocode_wanted
;
6154 cond
= condition_3way();
6160 nocode_wanted
|= 0x20000000;
6161 block(bsym
, csym
, 0);
6163 nocode_wanted
= saved_nocode_wanted
;
6164 if (tok
== TOK_ELSE
) {
6169 nocode_wanted
|= 0x20000000;
6170 block(bsym
, csym
, 0);
6171 gsym(d
); /* patch else jmp */
6173 nocode_wanted
= saved_nocode_wanted
;
6176 } else if (tok
== TOK_WHILE
) {
6177 int saved_nocode_wanted
;
6178 nocode_wanted
&= ~0x20000000;
6188 saved_nocode_wanted
= nocode_wanted
;
6190 nocode_wanted
= saved_nocode_wanted
;
6195 } else if (tok
== '{') {
6196 Sym
*llabel
, *lcleanup
;
6197 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6198 int lncleanups
= ncleanups
;
6201 /* record local declaration stack position */
6203 llabel
= local_label_stack
;
6204 lcleanup
= current_cleanups
;
6207 /* handle local labels declarations */
6208 while (tok
== TOK_LABEL
) {
6211 if (tok
< TOK_UIDENT
)
6212 expect("label identifier");
6213 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6223 while (tok
!= '}') {
6224 if ((a
= is_label()))
6231 block(bsym
, csym
, is_expr
);
6235 if (current_cleanups
!= lcleanup
) {
6239 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> lncleanups
;)
6240 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6245 try_call_scope_cleanup(lcleanup
);
6246 pcl
->jnext
= gjmp(0);
6248 goto remove_pending
;
6257 if (!nocode_wanted
) {
6258 try_call_scope_cleanup(lcleanup
);
6262 current_cleanups
= lcleanup
;
6263 ncleanups
= lncleanups
;
6264 /* pop locally defined labels */
6265 label_pop(&local_label_stack
, llabel
, is_expr
);
6266 /* pop locally defined symbols */
6268 /* In the is_expr case (a statement expression is finished here),
6269 vtop might refer to symbols on the local_stack. Either via the
6270 type or via vtop->sym. We can't pop those nor any that in turn
6271 might be referred to. To make it easier we don't roll back
6272 any symbols in that case; some upper level call to block() will
6273 do that. We do have to remove such symbols from the lookup
6274 tables, though. sym_pop will do that. */
6275 sym_pop(&local_stack
, s
, is_expr
);
6277 /* Pop VLA frames and restore stack pointer if required */
6278 if (vlas_in_scope
> saved_vlas_in_scope
) {
6279 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6282 vlas_in_scope
= saved_vlas_in_scope
;
6285 } else if (tok
== TOK_RETURN
) {
6289 gen_assign_cast(&func_vt
);
6290 try_call_scope_cleanup(NULL
);
6291 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6294 gfunc_return(&func_vt
);
6296 try_call_scope_cleanup(NULL
);
6299 /* jump unless last stmt in top-level block */
6300 if (tok
!= '}' || local_scope
!= 1)
6302 nocode_wanted
|= 0x20000000;
6303 } else if (tok
== TOK_BREAK
) {
6306 tcc_error("cannot break");
6307 *bsym
= gjmp(*bsym
);
6310 nocode_wanted
|= 0x20000000;
6311 } else if (tok
== TOK_CONTINUE
) {
6314 tcc_error("cannot continue");
6315 vla_sp_restore_root();
6316 *csym
= gjmp(*csym
);
6319 nocode_wanted
|= 0x20000000;
6320 } else if (tok
== TOK_FOR
) {
6322 int saved_nocode_wanted
;
6323 nocode_wanted
&= ~0x20000000;
6329 /* c99 for-loop init decl? */
6330 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6331 /* no, regular for-loop init expr */
6357 saved_nocode_wanted
= nocode_wanted
;
6359 nocode_wanted
= saved_nocode_wanted
;
6364 sym_pop(&local_stack
, s
, 0);
6367 if (tok
== TOK_DO
) {
6368 int saved_nocode_wanted
;
6369 nocode_wanted
&= ~0x20000000;
6375 saved_nocode_wanted
= nocode_wanted
;
6381 nocode_wanted
= saved_nocode_wanted
;
6385 nocode_wanted
= saved_nocode_wanted
;
6390 if (tok
== TOK_SWITCH
) {
6391 struct switch_t
*saved
, sw
;
6392 int saved_nocode_wanted
= nocode_wanted
;
6398 switchval
= *vtop
--;
6400 b
= gjmp(0); /* jump to first case */
6401 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6405 nocode_wanted
= saved_nocode_wanted
;
6406 a
= gjmp(a
); /* add implicit break */
6409 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6410 for (b
= 1; b
< sw
.n
; b
++)
6411 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6412 tcc_error("duplicate case value");
6413 /* Our switch table sorting is signed, so the compared
6414 value needs to be as well when it's 64bit. */
6415 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6416 switchval
.type
.t
&= ~VT_UNSIGNED
;
6418 gcase(sw
.p
, sw
.n
, &a
);
6421 gjmp_addr(sw
.def_sym
);
6422 dynarray_reset(&sw
.p
, &sw
.n
);
6427 if (tok
== TOK_CASE
) {
6428 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6431 nocode_wanted
&= ~0x20000000;
6433 cr
->v1
= cr
->v2
= expr_const64();
6434 if (gnu_ext
&& tok
== TOK_DOTS
) {
6436 cr
->v2
= expr_const64();
6437 if (cr
->v2
< cr
->v1
)
6438 tcc_warning("empty case range");
6441 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6444 goto block_after_label
;
6446 if (tok
== TOK_DEFAULT
) {
6451 if (cur_switch
->def_sym
)
6452 tcc_error("too many 'default'");
6453 cur_switch
->def_sym
= ind
;
6455 goto block_after_label
;
6457 if (tok
== TOK_GOTO
) {
6459 if (tok
== '*' && gnu_ext
) {
6463 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6466 } else if (tok
>= TOK_UIDENT
) {
6467 s
= label_find(tok
);
6468 /* put forward definition if needed */
6470 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6471 else if (s
->r
== LABEL_DECLARED
)
6472 s
->r
= LABEL_FORWARD
;
6474 vla_sp_restore_root();
6475 if (s
->r
& LABEL_FORWARD
) {
6476 /* start new goto chain for cleanups, linked via label->next */
6477 if (current_cleanups
) {
6478 sym_push2(&pending_gotos
, SYM_FIELD
, 0, ncleanups
);
6479 pending_gotos
->prev_tok
= s
;
6480 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6481 pending_gotos
->next
= s
;
6483 s
->jnext
= gjmp(s
->jnext
);
6485 try_call_cleanup_goto(s
->cleanupstate
);
6486 gjmp_addr(s
->jnext
);
6490 expect("label identifier");
6493 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6502 if (s
->r
== LABEL_DEFINED
)
6503 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6504 s
->r
= LABEL_DEFINED
;
6506 Sym
*pcl
; /* pending cleanup goto */
6507 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6509 sym_pop(&s
->next
, NULL
, 0);
6513 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6516 s
->cleanupstate
= current_cleanups
;
6518 /* we accept this, but it is a mistake */
6520 nocode_wanted
&= ~0x20000000;
6522 tcc_warning("deprecated use of label at end of compound statement");
6526 block(bsym
, csym
, is_expr
);
6529 /* expression case */
6544 /* This skips over a stream of tokens containing balanced {} and ()
6545 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6546 with a '{'). If STR then allocates and stores the skipped tokens
6547 in *STR. This doesn't check if () and {} are nested correctly,
6548 i.e. "({)}" is accepted. */
6549 static void skip_or_save_block(TokenString
**str
)
6551 int braces
= tok
== '{';
6554 *str
= tok_str_alloc();
6556 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6558 if (tok
== TOK_EOF
) {
6559 if (str
|| level
> 0)
6560 tcc_error("unexpected end of file");
6565 tok_str_add_tok(*str
);
6568 if (t
== '{' || t
== '(') {
6570 } else if (t
== '}' || t
== ')') {
6572 if (level
== 0 && braces
&& t
== '}')
6577 tok_str_add(*str
, -1);
6578 tok_str_add(*str
, 0);
6582 #define EXPR_CONST 1
6585 static void parse_init_elem(int expr_type
)
6587 int saved_global_expr
;
6590 /* compound literals must be allocated globally in this case */
6591 saved_global_expr
= global_expr
;
6594 global_expr
= saved_global_expr
;
6595 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6596 (compound literals). */
6597 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6598 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6599 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6600 #ifdef TCC_TARGET_PE
6601 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6604 tcc_error("initializer element is not constant");
6612 /* put zeros for variable based init */
6613 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6616 /* nothing to do because globals are already set to zero */
6618 vpush_global_sym(&func_old_type
, TOK_memset
);
6620 #ifdef TCC_TARGET_ARM
6632 #define DIF_SIZE_ONLY 2
6633 #define DIF_HAVE_ELEM 4
6635 /* t is the array or struct type. c is the array or struct
6636 address. cur_field is the pointer to the current
6637 field, for arrays the 'c' member contains the current start
6638 index. 'flags' is as in decl_initializer.
6639 'al' contains the already initialized length of the
6640 current container (starting at c). This returns the new length of that. */
6641 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6642 Sym
**cur_field
, int flags
, int al
)
6645 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6646 unsigned long corig
= c
;
6650 if (flags
& DIF_HAVE_ELEM
)
6652 if (gnu_ext
&& (l
= is_label()) != 0)
6654 /* NOTE: we only support ranges for last designator */
6655 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6657 if (!(type
->t
& VT_ARRAY
))
6658 expect("array type");
6660 index
= index_last
= expr_const();
6661 if (tok
== TOK_DOTS
&& gnu_ext
) {
6663 index_last
= expr_const();
6667 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6669 tcc_error("invalid index");
6671 (*cur_field
)->c
= index_last
;
6672 type
= pointed_type(type
);
6673 elem_size
= type_size(type
, &align
);
6674 c
+= index
* elem_size
;
6675 nb_elems
= index_last
- index
+ 1;
6681 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6682 expect("struct/union type");
6683 f
= find_field(type
, l
);
6696 } else if (!gnu_ext
) {
6701 if (type
->t
& VT_ARRAY
) {
6702 index
= (*cur_field
)->c
;
6703 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6704 tcc_error("index too large");
6705 type
= pointed_type(type
);
6706 c
+= index
* type_size(type
, &align
);
6709 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6710 *cur_field
= f
= f
->next
;
6712 tcc_error("too many field init");
6717 /* must put zero in holes (note that doing it that way
6718 ensures that it even works with designators) */
6719 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6720 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6721 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6723 /* XXX: make it more general */
6724 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6725 unsigned long c_end
;
6730 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6731 for (i
= 1; i
< nb_elems
; i
++) {
6732 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6737 } else if (!NODATA_WANTED
) {
6738 c_end
= c
+ nb_elems
* elem_size
;
6739 if (c_end
> sec
->data_allocated
)
6740 section_realloc(sec
, c_end
);
6741 src
= sec
->data
+ c
;
6743 for(i
= 1; i
< nb_elems
; i
++) {
6745 memcpy(dst
, src
, elem_size
);
6749 c
+= nb_elems
* type_size(type
, &align
);
6755 /* store a value or an expression directly in global data or in local array */
6756 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6763 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6767 /* XXX: not portable */
6768 /* XXX: generate error if incorrect relocation */
6769 gen_assign_cast(&dtype
);
6770 bt
= type
->t
& VT_BTYPE
;
6772 if ((vtop
->r
& VT_SYM
)
6775 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6776 || (type
->t
& VT_BITFIELD
))
6777 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6779 tcc_error("initializer element is not computable at load time");
6781 if (NODATA_WANTED
) {
6786 size
= type_size(type
, &align
);
6787 section_reserve(sec
, c
+ size
);
6788 ptr
= sec
->data
+ c
;
6790 /* XXX: make code faster ? */
6791 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6792 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6793 /* XXX This rejects compound literals like
6794 '(void *){ptr}'. The problem is that '&sym' is
6795 represented the same way, which would be ruled out
6796 by the SYM_FIRST_ANOM check above, but also '"string"'
6797 in 'char *p = "string"' is represented the same
6798 with the type being VT_PTR and the symbol being an
6799 anonymous one. That is, there's no difference in vtop
6800 between '(void *){x}' and '&(void *){x}'. Ignore
6801 pointer typed entities here. Hopefully no real code
6802 will every use compound literals with scalar type. */
6803 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6804 /* These come from compound literals, memcpy stuff over. */
6808 esym
= elfsym(vtop
->sym
);
6809 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6810 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6812 /* We need to copy over all memory contents, and that
6813 includes relocations. Use the fact that relocs are
6814 created it order, so look from the end of relocs
6815 until we hit one before the copied region. */
6816 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6817 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6818 while (num_relocs
--) {
6820 if (rel
->r_offset
>= esym
->st_value
+ size
)
6822 if (rel
->r_offset
< esym
->st_value
)
6824 /* Note: if the same fields are initialized multiple
6825 times (possible with designators) then we possibly
6826 add multiple relocations for the same offset here.
6827 That would lead to wrong code, the last reloc needs
6828 to win. We clean this up later after the whole
6829 initializer is parsed. */
6830 put_elf_reloca(symtab_section
, sec
,
6831 c
+ rel
->r_offset
- esym
->st_value
,
6832 ELFW(R_TYPE
)(rel
->r_info
),
6833 ELFW(R_SYM
)(rel
->r_info
),
6843 if (type
->t
& VT_BITFIELD
) {
6844 int bit_pos
, bit_size
, bits
, n
;
6845 unsigned char *p
, v
, m
;
6846 bit_pos
= BIT_POS(vtop
->type
.t
);
6847 bit_size
= BIT_SIZE(vtop
->type
.t
);
6848 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6849 bit_pos
&= 7, bits
= 0;
6854 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6855 m
= ((1 << n
) - 1) << bit_pos
;
6856 *p
= (*p
& ~m
) | (v
& m
);
6857 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6861 /* XXX: when cross-compiling we assume that each type has the
6862 same representation on host and target, which is likely to
6863 be wrong in the case of long double */
6865 vtop
->c
.i
= vtop
->c
.i
!= 0;
6867 *(char *)ptr
|= vtop
->c
.i
;
6870 *(short *)ptr
|= vtop
->c
.i
;
6873 *(float*)ptr
= vtop
->c
.f
;
6876 *(double *)ptr
= vtop
->c
.d
;
6879 #if defined TCC_IS_NATIVE_387
6880 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6881 memcpy(ptr
, &vtop
->c
.ld
, 10);
6883 else if (sizeof (long double) == sizeof (double))
6884 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6886 else if (vtop
->c
.ld
== 0.0)
6890 if (sizeof(long double) == LDOUBLE_SIZE
)
6891 *(long double*)ptr
= vtop
->c
.ld
;
6892 else if (sizeof(double) == LDOUBLE_SIZE
)
6893 *(double *)ptr
= (double)vtop
->c
.ld
;
6895 tcc_error("can't cross compile long double constants");
6899 *(long long *)ptr
|= vtop
->c
.i
;
6906 addr_t val
= vtop
->c
.i
;
6908 if (vtop
->r
& VT_SYM
)
6909 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6911 *(addr_t
*)ptr
|= val
;
6913 if (vtop
->r
& VT_SYM
)
6914 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6915 *(addr_t
*)ptr
|= val
;
6921 int val
= vtop
->c
.i
;
6923 if (vtop
->r
& VT_SYM
)
6924 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6928 if (vtop
->r
& VT_SYM
)
6929 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6938 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6945 /* 't' contains the type and storage info. 'c' is the offset of the
6946 object in section 'sec'. If 'sec' is NULL, it means stack based
6947 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6948 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6949 size only evaluation is wanted (only for arrays). */
6950 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6953 int len
, n
, no_oblock
, nb
, i
;
6959 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
6960 /* In case of strings we have special handling for arrays, so
6961 don't consume them as initializer value (which would commit them
6962 to some anonymous symbol). */
6963 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6964 !(flags
& DIF_SIZE_ONLY
)) {
6965 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6966 flags
|= DIF_HAVE_ELEM
;
6969 if ((flags
& DIF_HAVE_ELEM
) &&
6970 !(type
->t
& VT_ARRAY
) &&
6971 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6972 The source type might have VT_CONSTANT set, which is
6973 of course assignable to non-const elements. */
6974 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6975 init_putv(type
, sec
, c
);
6976 } else if (type
->t
& VT_ARRAY
) {
6979 t1
= pointed_type(type
);
6980 size1
= type_size(t1
, &align1
);
6983 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6986 tcc_error("character array initializer must be a literal,"
6987 " optionally enclosed in braces");
6992 /* only parse strings here if correct type (otherwise: handle
6993 them as ((w)char *) expressions */
6994 if ((tok
== TOK_LSTR
&&
6995 #ifdef TCC_TARGET_PE
6996 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6998 (t1
->t
& VT_BTYPE
) == VT_INT
7000 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7002 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7005 /* compute maximum number of chars wanted */
7007 cstr_len
= tokc
.str
.size
;
7009 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7012 if (n
>= 0 && nb
> (n
- len
))
7014 if (!(flags
& DIF_SIZE_ONLY
)) {
7016 tcc_warning("initializer-string for array is too long");
7017 /* in order to go faster for common case (char
7018 string in global variable, we handle it
7020 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7022 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7026 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7028 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7030 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7037 /* only add trailing zero if enough storage (no
7038 warning in this case since it is standard) */
7039 if (n
< 0 || len
< n
) {
7040 if (!(flags
& DIF_SIZE_ONLY
)) {
7042 init_putv(t1
, sec
, c
+ (len
* size1
));
7053 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7054 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7055 flags
&= ~DIF_HAVE_ELEM
;
7056 if (type
->t
& VT_ARRAY
) {
7058 /* special test for multi dimensional arrays (may not
7059 be strictly correct if designators are used at the
7061 if (no_oblock
&& len
>= n
*size1
)
7064 if (s
->type
.t
== VT_UNION
)
7068 if (no_oblock
&& f
== NULL
)
7077 /* put zeros at the end */
7078 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7079 init_putz(sec
, c
+ len
, n
*size1
- len
);
7082 /* patch type size if needed, which happens only for array types */
7084 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7085 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7088 if ((flags
& DIF_FIRST
) || tok
== '{') {
7096 } else if (tok
== '{') {
7097 if (flags
& DIF_HAVE_ELEM
)
7100 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7102 } else if ((flags
& DIF_SIZE_ONLY
)) {
7103 /* If we supported only ISO C we wouldn't have to accept calling
7104 this on anything than an array if DIF_SIZE_ONLY (and even then
7105 only on the outermost level, so no recursion would be needed),
7106 because initializing a flex array member isn't supported.
7107 But GNU C supports it, so we need to recurse even into
7108 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7109 /* just skip expression */
7110 skip_or_save_block(NULL
);
7112 if (!(flags
& DIF_HAVE_ELEM
)) {
7113 /* This should happen only when we haven't parsed
7114 the init element above for fear of committing a
7115 string constant to memory too early. */
7116 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7117 expect("string constant");
7118 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7120 init_putv(type
, sec
, c
);
7124 /* parse an initializer for type 't' if 'has_init' is non zero, and
7125 allocate space in local or global data space ('r' is either
7126 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7127 variable 'v' of scope 'scope' is declared before initializers
7128 are parsed. If 'v' is zero, then a reference to the new object
7129 is put in the value stack. If 'has_init' is 2, a special parsing
7130 is done to handle string constants. */
7131 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7132 int has_init
, int v
, int scope
)
7134 int size
, align
, addr
;
7135 TokenString
*init_str
= NULL
;
7138 Sym
*flexible_array
;
7140 int saved_nocode_wanted
= nocode_wanted
;
7141 #ifdef CONFIG_TCC_BCHECK
7145 /* Always allocate static or global variables */
7146 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7147 nocode_wanted
|= 0x80000000;
7149 #ifdef CONFIG_TCC_BCHECK
7150 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7153 flexible_array
= NULL
;
7154 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7155 Sym
*field
= type
->ref
->next
;
7158 field
= field
->next
;
7159 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7160 flexible_array
= field
;
7164 size
= type_size(type
, &align
);
7165 /* If unknown size, we must evaluate it before
7166 evaluating initializers because
7167 initializers can generate global data too
7168 (e.g. string pointers or ISOC99 compound
7169 literals). It also simplifies local
7170 initializers handling */
7171 if (size
< 0 || (flexible_array
&& has_init
)) {
7173 tcc_error("unknown type size");
7174 /* get all init string */
7175 if (has_init
== 2) {
7176 init_str
= tok_str_alloc();
7177 /* only get strings */
7178 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7179 tok_str_add_tok(init_str
);
7182 tok_str_add(init_str
, -1);
7183 tok_str_add(init_str
, 0);
7185 skip_or_save_block(&init_str
);
7190 begin_macro(init_str
, 1);
7192 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7193 /* prepare second initializer parsing */
7194 macro_ptr
= init_str
->str
;
7197 /* if still unknown size, error */
7198 size
= type_size(type
, &align
);
7200 tcc_error("unknown type size");
7202 /* If there's a flex member and it was used in the initializer
7204 if (flexible_array
&&
7205 flexible_array
->type
.ref
->c
> 0)
7206 size
+= flexible_array
->type
.ref
->c
7207 * pointed_size(&flexible_array
->type
);
7208 /* take into account specified alignment if bigger */
7209 if (ad
->a
.aligned
) {
7210 int speca
= 1 << (ad
->a
.aligned
- 1);
7213 } else if (ad
->a
.packed
) {
7217 if (!v
&& NODATA_WANTED
)
7218 size
= 0, align
= 1;
7220 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7222 #ifdef CONFIG_TCC_BCHECK
7223 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7227 loc
= (loc
- size
) & -align
;
7229 #ifdef CONFIG_TCC_BCHECK
7230 /* handles bounds */
7231 /* XXX: currently, since we do only one pass, we cannot track
7232 '&' operators, so we add only arrays */
7233 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7235 /* add padding between regions */
7237 /* then add local bound info */
7238 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7239 bounds_ptr
[0] = addr
;
7240 bounds_ptr
[1] = size
;
7244 /* local variable */
7245 #ifdef CONFIG_TCC_ASM
7246 if (ad
->asm_label
) {
7247 int reg
= asm_parse_regvar(ad
->asm_label
);
7249 r
= (r
& ~VT_VALMASK
) | reg
;
7252 sym
= sym_push(v
, type
, r
, addr
);
7253 if (ad
->cleanup_func
) {
7254 Sym
*cls
= sym_push2(&all_cleanups
, SYM_FIELD
| ++ncleanups
, 0, 0);
7255 cls
->prev_tok
= sym
;
7256 cls
->next
= ad
->cleanup_func
;
7257 cls
->ncl
= current_cleanups
;
7258 current_cleanups
= cls
;
7263 /* push local reference */
7264 vset(type
, r
, addr
);
7267 if (v
&& scope
== VT_CONST
) {
7268 /* see if the symbol was already defined */
7271 patch_storage(sym
, ad
, type
);
7272 /* we accept several definitions of the same global variable. */
7273 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7278 /* allocate symbol in corresponding section */
7283 else if (tcc_state
->nocommon
)
7288 addr
= section_add(sec
, size
, align
);
7289 #ifdef CONFIG_TCC_BCHECK
7290 /* add padding if bound check */
7292 section_add(sec
, 1, 1);
7295 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7296 sec
= common_section
;
7301 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7302 patch_storage(sym
, ad
, NULL
);
7304 /* Local statics have a scope until now (for
7305 warnings), remove it here. */
7307 /* update symbol definition */
7308 put_extern_sym(sym
, sec
, addr
, size
);
7310 /* push global reference */
7311 sym
= get_sym_ref(type
, sec
, addr
, size
);
7312 vpushsym(type
, sym
);
7316 #ifdef CONFIG_TCC_BCHECK
7317 /* handles bounds now because the symbol must be defined
7318 before for the relocation */
7322 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7323 /* then add global bound info */
7324 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7325 bounds_ptr
[0] = 0; /* relocated */
7326 bounds_ptr
[1] = size
;
7331 if (type
->t
& VT_VLA
) {
7337 /* save current stack pointer */
7338 if (vlas_in_scope
== 0) {
7339 if (vla_sp_root_loc
== -1)
7340 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7341 gen_vla_sp_save(vla_sp_root_loc
);
7344 vla_runtime_type_size(type
, &a
);
7345 gen_vla_alloc(type
, a
);
7346 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7347 /* on _WIN64, because of the function args scratch area, the
7348 result of alloca differs from RSP and is returned in RAX. */
7349 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7351 gen_vla_sp_save(addr
);
7355 } else if (has_init
) {
7356 size_t oldreloc_offset
= 0;
7357 if (sec
&& sec
->reloc
)
7358 oldreloc_offset
= sec
->reloc
->data_offset
;
7359 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7360 if (sec
&& sec
->reloc
)
7361 squeeze_multi_relocs(sec
, oldreloc_offset
);
7362 /* patch flexible array member size back to -1, */
7363 /* for possible subsequent similar declarations */
7365 flexible_array
->type
.ref
->c
= -1;
7369 /* restore parse state if needed */
7375 nocode_wanted
= saved_nocode_wanted
;
7378 /* parse a function defined by symbol 'sym' and generate its code in
7379 'cur_text_section' */
7380 static void gen_function(Sym
*sym
)
7383 ind
= cur_text_section
->data_offset
;
7384 if (sym
->a
.aligned
) {
7385 size_t newoff
= section_add(cur_text_section
, 0,
7386 1 << (sym
->a
.aligned
- 1));
7387 gen_fill_nops(newoff
- ind
);
7389 /* NOTE: we patch the symbol size later */
7390 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7391 funcname
= get_tok_str(sym
->v
, NULL
);
7393 /* Initialize VLA state */
7395 vla_sp_root_loc
= -1;
7396 /* put debug symbol */
7397 tcc_debug_funcstart(tcc_state
, sym
);
7398 /* push a dummy symbol to enable local sym storage */
7399 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7400 local_scope
= 1; /* for function parameters */
7401 gfunc_prolog(&sym
->type
);
7402 reset_local_scope();
7404 clear_temp_local_var_list();
7405 block(NULL
, NULL
, 0);
7406 if (!(nocode_wanted
& 0x20000000)
7407 && ((func_vt
.t
& VT_BTYPE
) == VT_INT
)
7408 && !strcmp (funcname
, "main"))
7412 gen_assign_cast(&func_vt
);
7413 gfunc_return(&func_vt
);
7418 cur_text_section
->data_offset
= ind
;
7419 label_pop(&global_label_stack
, NULL
, 0);
7420 /* reset local stack */
7421 reset_local_scope();
7422 sym_pop(&local_stack
, NULL
, 0);
7423 /* end of function */
7424 /* patch symbol size */
7425 elfsym(sym
)->st_size
= ind
- func_ind
;
7426 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7427 /* It's better to crash than to generate wrong code */
7428 cur_text_section
= NULL
;
7429 funcname
= ""; /* for safety */
7430 func_vt
.t
= VT_VOID
; /* for safety */
7431 func_var
= 0; /* for safety */
7432 ind
= 0; /* for safety */
7433 nocode_wanted
= 0x80000000;
7437 static void gen_inline_functions(TCCState
*s
)
7440 int inline_generated
, i
, ln
;
7441 struct InlineFunc
*fn
;
7443 ln
= file
->line_num
;
7444 /* iterate while inline function are referenced */
7446 inline_generated
= 0;
7447 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7448 fn
= s
->inline_fns
[i
];
7450 if (sym
&& sym
->c
) {
7451 /* the function was used: generate its code and
7452 convert it to a normal function */
7455 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7456 sym
->type
.t
&= ~VT_INLINE
;
7458 begin_macro(fn
->func_str
, 1);
7460 cur_text_section
= text_section
;
7464 inline_generated
= 1;
7467 } while (inline_generated
);
7468 file
->line_num
= ln
;
7471 ST_FUNC
void free_inline_functions(TCCState
*s
)
7474 /* free tokens of unused inline functions */
7475 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7476 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7478 tok_str_free(fn
->func_str
);
7480 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7483 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7484 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7485 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7490 AttributeDef ad
, adbase
;
7493 if (!parse_btype(&btype
, &adbase
)) {
7494 if (is_for_loop_init
)
7496 /* skip redundant ';' if not in old parameter decl scope */
7497 if (tok
== ';' && l
!= VT_CMP
) {
7503 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7504 /* global asm block */
7508 if (tok
>= TOK_UIDENT
) {
7509 /* special test for old K&R protos without explicit int
7510 type. Only accepted when defining global data */
7514 expect("declaration");
7519 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7520 int v
= btype
.ref
->v
;
7521 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7522 tcc_warning("unnamed struct/union that defines no instances");
7526 if (IS_ENUM(btype
.t
)) {
7531 while (1) { /* iterate thru each declaration */
7533 /* If the base type itself was an array type of unspecified
7534 size (like in 'typedef int arr[]; arr x = {1};') then
7535 we will overwrite the unknown size by the real one for
7536 this decl. We need to unshare the ref symbol holding
7538 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7539 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7542 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7546 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7547 printf("type = '%s'\n", buf
);
7550 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7551 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7552 tcc_error("function without file scope cannot be static");
7554 /* if old style function prototype, we accept a
7557 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7558 decl0(VT_CMP
, 0, sym
);
7561 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7562 ad
.asm_label
= asm_label_instr();
7563 /* parse one last attribute list, after asm label */
7564 parse_attribute(&ad
);
7569 #ifdef TCC_TARGET_PE
7570 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7571 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7572 tcc_error("cannot have dll linkage with static or typedef");
7573 if (ad
.a
.dllimport
) {
7574 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7577 type
.t
|= VT_EXTERN
;
7583 tcc_error("cannot use local functions");
7584 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7585 expect("function definition");
7587 /* reject abstract declarators in function definition
7588 make old style params without decl have int type */
7590 while ((sym
= sym
->next
) != NULL
) {
7591 if (!(sym
->v
& ~SYM_FIELD
))
7592 expect("identifier");
7593 if (sym
->type
.t
== VT_VOID
)
7594 sym
->type
= int_type
;
7597 /* XXX: cannot do better now: convert extern line to static inline */
7598 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7599 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7601 /* put function symbol */
7602 sym
= external_global_sym(v
, &type
, 0);
7603 type
.t
&= ~VT_EXTERN
;
7604 patch_storage(sym
, &ad
, &type
);
7606 /* static inline functions are just recorded as a kind
7607 of macro. Their code will be emitted at the end of
7608 the compilation unit only if they are used */
7609 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7610 (VT_INLINE
| VT_STATIC
)) {
7611 struct InlineFunc
*fn
;
7612 const char *filename
;
7614 filename
= file
? file
->filename
: "";
7615 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7616 strcpy(fn
->filename
, filename
);
7618 skip_or_save_block(&fn
->func_str
);
7619 dynarray_add(&tcc_state
->inline_fns
,
7620 &tcc_state
->nb_inline_fns
, fn
);
7622 /* compute text section */
7623 cur_text_section
= ad
.section
;
7624 if (!cur_text_section
)
7625 cur_text_section
= text_section
;
7631 /* find parameter in function parameter list */
7632 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7633 if ((sym
->v
& ~SYM_FIELD
) == v
)
7635 tcc_error("declaration for parameter '%s' but no such parameter",
7636 get_tok_str(v
, NULL
));
7638 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7639 tcc_error("storage class specified for '%s'",
7640 get_tok_str(v
, NULL
));
7641 if (sym
->type
.t
!= VT_VOID
)
7642 tcc_error("redefinition of parameter '%s'",
7643 get_tok_str(v
, NULL
));
7644 convert_parameter_type(&type
);
7646 } else if (type
.t
& VT_TYPEDEF
) {
7647 /* save typedefed type */
7648 /* XXX: test storage specifiers ? */
7650 if (sym
&& sym
->sym_scope
== local_scope
) {
7651 if (!is_compatible_types(&sym
->type
, &type
)
7652 || !(sym
->type
.t
& VT_TYPEDEF
))
7653 tcc_error("incompatible redefinition of '%s'",
7654 get_tok_str(v
, NULL
));
7657 sym
= sym_push(v
, &type
, 0, 0);
7661 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7662 && !(type
.t
& VT_EXTERN
)) {
7663 tcc_error("declaration of void object");
7666 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7667 /* external function definition */
7668 /* specific case for func_call attribute */
7670 } else if (!(type
.t
& VT_ARRAY
)) {
7671 /* not lvalue if array */
7672 r
|= lvalue_type(type
.t
);
7674 has_init
= (tok
== '=');
7675 if (has_init
&& (type
.t
& VT_VLA
))
7676 tcc_error("variable length array cannot be initialized");
7677 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7678 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7679 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7680 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7681 /* external variable or function */
7682 /* NOTE: as GCC, uninitialized global static
7683 arrays of null size are considered as
7685 type
.t
|= VT_EXTERN
;
7686 sym
= external_sym(v
, &type
, r
, &ad
);
7687 if (ad
.alias_target
) {
7690 alias_target
= sym_find(ad
.alias_target
);
7691 esym
= elfsym(alias_target
);
7693 tcc_error("unsupported forward __alias__ attribute");
7694 /* Local statics have a scope until now (for
7695 warnings), remove it here. */
7697 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7700 if (type
.t
& VT_STATIC
)
7706 else if (l
== VT_CONST
)
7707 /* uninitialized global variables may be overridden */
7708 type
.t
|= VT_EXTERN
;
7709 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7713 if (is_for_loop_init
)
7725 static void decl(int l
)
7730 /* ------------------------------------------------------------------------- */