2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *current_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int section_sym
;
49 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
50 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
61 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
63 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
64 ST_DATA
const char *funcname
;
67 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
69 ST_DATA
struct switch_t
{
73 } **p
; int n
; /* list of case ranges */
74 int def_sym
; /* default symbol */
75 } *cur_switch
; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA
struct temp_local_variable
{
80 int location
; //offset on stack. Svalue.c.i
83 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
84 short nb_temp_local_vars
;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType
*type
);
89 static void gen_cast_s(int t
);
90 static inline CType
*pointed_type(CType
*type
);
91 static int is_compatible_types(CType
*type1
, CType
*type2
);
92 static int parse_btype(CType
*type
, AttributeDef
*ad
);
93 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
94 static void parse_expr_type(CType
*type
);
95 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
96 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
97 static void block(int *bsym
, int *csym
, int is_expr
);
98 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
99 static void decl(int l
);
100 static int decl0(int l
, int is_for_loop_init
, Sym
*);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType
*type
, int *a
);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty
, unsigned long long v
);
108 static void vpush(CType
*type
);
109 static int gvtst(int inv
, int t
);
110 static void gen_inline_functions(TCCState
*s
);
111 static void skip_or_save_block(TokenString
**str
);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size
,int align
);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups
)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups
, NULL
, 0);
125 ST_INLN
int is_float(int t
)
129 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC
int ieee_finite(double d
)
138 memcpy(p
, &d
, sizeof(double));
139 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
148 ST_FUNC
void test_lvalue(void)
150 if (!(vtop
->r
& VT_LVAL
))
154 ST_FUNC
void check_vstack(void)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
164 void pv (const char *lbl
, int a
, int b
)
167 for (i
= a
; i
< a
+ b
; ++i
) {
168 SValue
*p
= &vtop
[-i
];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
182 /* file info: full path + filename */
183 section_sym
= put_elf_sym(symtab_section
, 0, 0,
184 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
185 text_section
->sh_num
, NULL
);
186 getcwd(buf
, sizeof(buf
));
188 normalize_slashes(buf
);
190 pstrcat(buf
, sizeof(buf
), "/");
191 put_stabs_r(buf
, N_SO
, 0, 0,
192 text_section
->data_offset
, text_section
, section_sym
);
193 put_stabs_r(file
->filename
, N_SO
, 0, 0,
194 text_section
->data_offset
, text_section
, section_sym
);
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section
, 0, 0,
202 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
203 SHN_ABS
, file
->filename
);
206 /* put end of translation unit info */
207 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
211 put_stabs_r(NULL
, N_SO
, 0, 0,
212 text_section
->data_offset
, text_section
, section_sym
);
216 /* generate line number info */
217 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
221 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
222 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
224 last_line_num
= file
->line_num
;
228 /* put function symbol */
229 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
237 /* XXX: we put here a dummy type */
238 snprintf(buf
, sizeof(buf
), "%s:%c1",
239 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
240 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
241 cur_text_section
, sym
->c
);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
249 /* put function size */
250 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
254 put_stabn(N_FUN
, 0, 0, size
);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC
int tccgen_compile(TCCState
*s1
)
260 cur_text_section
= NULL
;
262 anon_sym
= SYM_FIRST_ANOM
;
265 nocode_wanted
= 0x80000000;
267 /* define some often used types */
269 char_pointer_type
.t
= VT_BYTE
;
270 mk_pointer(&char_pointer_type
);
272 size_type
.t
= VT_INT
| VT_UNSIGNED
;
273 ptrdiff_type
.t
= VT_INT
;
275 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
276 ptrdiff_type
.t
= VT_LLONG
;
278 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
279 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
281 func_old_type
.t
= VT_FUNC
;
282 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
283 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
284 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
288 #ifdef TCC_TARGET_ARM
293 printf("%s: **** new file\n", file
->filename
);
296 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
299 gen_inline_functions(s1
);
301 /* end of translation unit info */
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym
*elfsym(Sym
*s
)
311 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC
void update_storage(Sym
*sym
)
318 int sym_bind
, old_sym_bind
;
324 if (sym
->a
.visibility
)
325 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
328 if (sym
->type
.t
& VT_STATIC
)
329 sym_bind
= STB_LOCAL
;
330 else if (sym
->a
.weak
)
333 sym_bind
= STB_GLOBAL
;
334 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
335 if (sym_bind
!= old_sym_bind
) {
336 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
340 if (sym
->a
.dllimport
)
341 esym
->st_other
|= ST_PE_IMPORT
;
342 if (sym
->a
.dllexport
)
343 esym
->st_other
|= ST_PE_EXPORT
;
347 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
348 get_tok_str(sym
->v
, NULL
),
349 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
357 /* ------------------------------------------------------------------------- */
358 /* update sym->c so that it points to an external symbol in section
359 'section' with value 'value' */
361 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
362 addr_t value
, unsigned long size
,
363 int can_add_underscore
)
365 int sym_type
, sym_bind
, info
, other
, t
;
369 #ifdef CONFIG_TCC_BCHECK
374 name
= get_tok_str(sym
->v
, NULL
);
375 #ifdef CONFIG_TCC_BCHECK
376 if (tcc_state
->do_bounds_check
) {
377 /* XXX: avoid doing that for statics ? */
378 /* if bound checking is activated, we change some function
379 names by adding the "__bound" prefix */
382 /* XXX: we rely only on malloc hooks */
395 strcpy(buf
, "__bound_");
403 if ((t
& VT_BTYPE
) == VT_FUNC
) {
405 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
406 sym_type
= STT_NOTYPE
;
408 sym_type
= STT_OBJECT
;
411 sym_bind
= STB_LOCAL
;
413 sym_bind
= STB_GLOBAL
;
416 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
417 Sym
*ref
= sym
->type
.ref
;
418 if (ref
->a
.nodecorate
) {
419 can_add_underscore
= 0;
421 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
422 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
424 other
|= ST_PE_STDCALL
;
425 can_add_underscore
= 0;
429 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
431 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
435 name
= get_tok_str(sym
->asm_label
, NULL
);
436 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
437 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
440 esym
->st_value
= value
;
441 esym
->st_size
= size
;
442 esym
->st_shndx
= sh_num
;
447 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
448 addr_t value
, unsigned long size
)
450 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
451 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
454 /* add a new relocation entry to symbol 'sym' in section 's' */
455 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
460 if (nocode_wanted
&& s
== cur_text_section
)
465 put_extern_sym(sym
, NULL
, 0, 0);
469 /* now we can add ELF relocation info */
470 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
474 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
476 greloca(s
, sym
, offset
, type
, 0);
480 /* ------------------------------------------------------------------------- */
481 /* symbol allocator */
482 static Sym
*__sym_malloc(void)
484 Sym
*sym_pool
, *sym
, *last_sym
;
487 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
488 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
490 last_sym
= sym_free_first
;
492 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
493 sym
->next
= last_sym
;
497 sym_free_first
= last_sym
;
501 static inline Sym
*sym_malloc(void)
505 sym
= sym_free_first
;
507 sym
= __sym_malloc();
508 sym_free_first
= sym
->next
;
511 sym
= tcc_malloc(sizeof(Sym
));
516 ST_INLN
void sym_free(Sym
*sym
)
519 sym
->next
= sym_free_first
;
520 sym_free_first
= sym
;
526 /* push, without hashing */
527 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
532 memset(s
, 0, sizeof *s
);
542 /* find a symbol and return its associated structure. 's' is the top
543 of the symbol stack */
544 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
556 /* structure lookup */
557 ST_INLN Sym
*struct_find(int v
)
560 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
562 return table_ident
[v
]->sym_struct
;
565 /* find an identifier */
566 ST_INLN Sym
*sym_find(int v
)
569 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
571 return table_ident
[v
]->sym_identifier
;
574 /* push a given symbol on the symbol stack */
575 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
584 s
= sym_push2(ps
, v
, type
->t
, c
);
585 s
->type
.ref
= type
->ref
;
587 /* don't record fields or anonymous symbols */
589 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
590 /* record symbol in token array */
591 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
593 ps
= &ts
->sym_struct
;
595 ps
= &ts
->sym_identifier
;
598 s
->sym_scope
= local_scope
;
599 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
600 tcc_error("redeclaration of '%s'",
601 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
606 /* push a global identifier */
607 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
610 s
= sym_push2(&global_stack
, v
, t
, c
);
611 /* don't record anonymous symbol */
612 if (v
< SYM_FIRST_ANOM
) {
613 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
614 /* modify the top most local identifier, so that
615 sym_identifier will point to 's' when popped */
616 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
617 ps
= &(*ps
)->prev_tok
;
624 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
625 pop them yet from the list, but do remove them from the token array. */
626 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
636 /* remove symbol in token array */
638 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
639 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
641 ps
= &ts
->sym_struct
;
643 ps
= &ts
->sym_identifier
;
654 /* ------------------------------------------------------------------------- */
656 static void vsetc(CType
*type
, int r
, CValue
*vc
)
660 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
661 tcc_error("memory full (vstack)");
662 /* cannot let cpu flags if other instruction are generated. Also
663 avoid leaving VT_JMP anywhere except on the top of the stack
664 because it would complicate the code generator.
666 Don't do this when nocode_wanted. vtop might come from
667 !nocode_wanted regions (see 88_codeopt.c) and transforming
668 it to a register without actually generating code is wrong
669 as their value might still be used for real. All values
670 we push under nocode_wanted will eventually be popped
671 again, so that the VT_CMP/VT_JMP value will be in vtop
672 when code is unsuppressed again.
674 Same logic below in vswap(); */
675 if (vtop
>= vstack
&& !nocode_wanted
) {
676 v
= vtop
->r
& VT_VALMASK
;
677 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
689 ST_FUNC
void vswap(void)
692 /* cannot vswap cpu flags. See comment at vsetc() above */
693 if (vtop
>= vstack
&& !nocode_wanted
) {
694 int v
= vtop
->r
& VT_VALMASK
;
695 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
703 /* pop stack value */
704 ST_FUNC
void vpop(void)
707 v
= vtop
->r
& VT_VALMASK
;
708 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
709 /* for x86, we need to pop the FP stack */
711 o(0xd8dd); /* fstp %st(0) */
714 if (v
== VT_JMP
|| v
== VT_JMPI
) {
715 /* need to put correct jump if && or || without test */
721 /* push constant of type "type" with useless value */
722 ST_FUNC
void vpush(CType
*type
)
724 vset(type
, VT_CONST
, 0);
727 /* push integer constant */
728 ST_FUNC
void vpushi(int v
)
732 vsetc(&int_type
, VT_CONST
, &cval
);
735 /* push a pointer sized constant */
736 static void vpushs(addr_t v
)
740 vsetc(&size_type
, VT_CONST
, &cval
);
743 /* push arbitrary 64bit constant */
744 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
751 vsetc(&ctype
, VT_CONST
, &cval
);
754 /* push long long constant */
755 static inline void vpushll(long long v
)
757 vpush64(VT_LLONG
, v
);
760 ST_FUNC
void vset(CType
*type
, int r
, int v
)
765 vsetc(type
, r
, &cval
);
768 static void vseti(int r
, int v
)
776 ST_FUNC
void vpushv(SValue
*v
)
778 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
779 tcc_error("memory full (vstack)");
784 static void vdup(void)
789 /* rotate n first stack elements to the bottom
790 I1 ... In -> I2 ... In I1 [top is right]
792 ST_FUNC
void vrotb(int n
)
803 /* rotate the n elements before entry e towards the top
804 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
806 ST_FUNC
void vrote(SValue
*e
, int n
)
812 for(i
= 0;i
< n
- 1; i
++)
817 /* rotate n first stack elements to the top
818 I1 ... In -> In I1 ... I(n-1) [top is right]
820 ST_FUNC
void vrott(int n
)
825 /* push a symbol value of TYPE */
826 static inline void vpushsym(CType
*type
, Sym
*sym
)
830 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
834 /* Return a static symbol pointing to a section */
835 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
841 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
842 sym
->type
.ref
= type
->ref
;
843 sym
->r
= VT_CONST
| VT_SYM
;
844 put_extern_sym(sym
, sec
, offset
, size
);
848 /* push a reference to a section offset by adding a dummy symbol */
849 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
851 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
854 /* define a new external reference to a symbol 'v' of type 'u' */
855 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
861 /* push forward reference */
862 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
863 s
->type
.ref
= type
->ref
;
864 s
->r
= r
| VT_CONST
| VT_SYM
;
865 } else if (IS_ASM_SYM(s
)) {
866 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
867 s
->type
.ref
= type
->ref
;
873 /* Merge symbol attributes. */
874 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
876 if (sa1
->aligned
&& !sa
->aligned
)
877 sa
->aligned
= sa1
->aligned
;
878 sa
->packed
|= sa1
->packed
;
879 sa
->weak
|= sa1
->weak
;
880 if (sa1
->visibility
!= STV_DEFAULT
) {
881 int vis
= sa
->visibility
;
882 if (vis
== STV_DEFAULT
883 || vis
> sa1
->visibility
)
884 vis
= sa1
->visibility
;
885 sa
->visibility
= vis
;
887 sa
->dllexport
|= sa1
->dllexport
;
888 sa
->nodecorate
|= sa1
->nodecorate
;
889 sa
->dllimport
|= sa1
->dllimport
;
892 /* Merge function attributes. */
893 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
895 if (fa1
->func_call
&& !fa
->func_call
)
896 fa
->func_call
= fa1
->func_call
;
897 if (fa1
->func_type
&& !fa
->func_type
)
898 fa
->func_type
= fa1
->func_type
;
899 if (fa1
->func_args
&& !fa
->func_args
)
900 fa
->func_args
= fa1
->func_args
;
903 /* Merge attributes. */
904 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
906 merge_symattr(&ad
->a
, &ad1
->a
);
907 merge_funcattr(&ad
->f
, &ad1
->f
);
910 ad
->section
= ad1
->section
;
911 if (ad1
->alias_target
)
912 ad
->alias_target
= ad1
->alias_target
;
914 ad
->asm_label
= ad1
->asm_label
;
916 ad
->attr_mode
= ad1
->attr_mode
;
919 /* Merge some type attributes. */
920 static void patch_type(Sym
*sym
, CType
*type
)
922 if (!(type
->t
& VT_EXTERN
)) {
923 if (!(sym
->type
.t
& VT_EXTERN
))
924 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
925 sym
->type
.t
&= ~VT_EXTERN
;
928 if (IS_ASM_SYM(sym
)) {
929 /* stay static if both are static */
930 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
931 sym
->type
.ref
= type
->ref
;
934 if (!is_compatible_types(&sym
->type
, type
)) {
935 tcc_error("incompatible types for redefinition of '%s'",
936 get_tok_str(sym
->v
, NULL
));
938 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
939 int static_proto
= sym
->type
.t
& VT_STATIC
;
940 /* warn if static follows non-static function declaration */
941 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
942 tcc_warning("static storage ignored for redefinition of '%s'",
943 get_tok_str(sym
->v
, NULL
));
945 if (0 == (type
->t
& VT_EXTERN
)) {
946 /* put complete type, use static from prototype */
947 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
948 if (type
->t
& VT_INLINE
)
949 sym
->type
.t
= type
->t
;
950 sym
->type
.ref
= type
->ref
;
954 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
955 /* set array size if it was omitted in extern declaration */
956 if (sym
->type
.ref
->c
< 0)
957 sym
->type
.ref
->c
= type
->ref
->c
;
958 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
959 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
961 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
962 tcc_warning("storage mismatch for redefinition of '%s'",
963 get_tok_str(sym
->v
, NULL
));
968 /* Merge some storage attributes. */
969 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
972 patch_type(sym
, type
);
975 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
976 tcc_error("incompatible dll linkage for redefinition of '%s'",
977 get_tok_str(sym
->v
, NULL
));
979 merge_symattr(&sym
->a
, &ad
->a
);
981 sym
->asm_label
= ad
->asm_label
;
985 /* define a new external reference to a symbol 'v' */
986 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
991 /* push forward reference */
992 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
993 s
->type
.t
|= VT_EXTERN
;
997 if (s
->type
.ref
== func_old_type
.ref
) {
998 s
->type
.ref
= type
->ref
;
999 s
->r
= r
| VT_CONST
| VT_SYM
;
1000 s
->type
.t
|= VT_EXTERN
;
1002 patch_storage(s
, ad
, type
);
1007 /* push a reference to global symbol v */
1008 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1010 vpushsym(type
, external_global_sym(v
, type
, 0));
1013 /* save registers up to (vtop - n) stack entry */
1014 ST_FUNC
void save_regs(int n
)
1017 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1021 /* save r to the memory stack, and mark it as being free */
1022 ST_FUNC
void save_reg(int r
)
1024 save_reg_upstack(r
, 0);
1027 /* save r to the memory stack, and mark it as being free,
1028 if seen up to (vtop - n) stack entry */
1029 ST_FUNC
void save_reg_upstack(int r
, int n
)
1031 int l
, saved
, size
, align
;
1035 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1040 /* modify all stack values */
1043 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1044 if ((p
->r
& VT_VALMASK
) == r
||
1045 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1046 /* must save value on stack if not already done */
1048 /* NOTE: must reload 'r' because r might be equal to r2 */
1049 r
= p
->r
& VT_VALMASK
;
1050 /* store register in the stack */
1052 if ((p
->r
& VT_LVAL
) ||
1053 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1055 type
= &char_pointer_type
;
1059 size
= type_size(type
, &align
);
1060 l
=get_temp_local_var(size
,align
);
1061 sv
.type
.t
= type
->t
;
1062 sv
.r
= VT_LOCAL
| VT_LVAL
;
1065 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1066 /* x86 specific: need to pop fp register ST0 if saved */
1067 if (r
== TREG_ST0
) {
1068 o(0xd8dd); /* fstp %st(0) */
1072 /* special long long case */
1073 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1080 /* mark that stack entry as being saved on the stack */
1081 if (p
->r
& VT_LVAL
) {
1082 /* also clear the bounded flag because the
1083 relocation address of the function was stored in
1085 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1087 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1095 #ifdef TCC_TARGET_ARM
1096 /* find a register of class 'rc2' with at most one reference on stack.
1097 * If none, call get_reg(rc) */
1098 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1103 for(r
=0;r
<NB_REGS
;r
++) {
1104 if (reg_classes
[r
] & rc2
) {
1107 for(p
= vstack
; p
<= vtop
; p
++) {
1108 if ((p
->r
& VT_VALMASK
) == r
||
1109 (p
->r2
& VT_VALMASK
) == r
)
1120 /* find a free register of class 'rc'. If none, save one register */
1121 ST_FUNC
int get_reg(int rc
)
1126 /* find a free register */
1127 for(r
=0;r
<NB_REGS
;r
++) {
1128 if (reg_classes
[r
] & rc
) {
1131 for(p
=vstack
;p
<=vtop
;p
++) {
1132 if ((p
->r
& VT_VALMASK
) == r
||
1133 (p
->r2
& VT_VALMASK
) == r
)
1141 /* no register left : free the first one on the stack (VERY
1142 IMPORTANT to start from the bottom to ensure that we don't
1143 spill registers used in gen_opi()) */
1144 for(p
=vstack
;p
<=vtop
;p
++) {
1145 /* look at second register (if long long) */
1146 r
= p
->r2
& VT_VALMASK
;
1147 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1149 r
= p
->r
& VT_VALMASK
;
1150 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1156 /* Should never comes here */
1160 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1161 static int get_temp_local_var(int size
,int align
){
1163 struct temp_local_variable
*temp_var
;
1170 for(i
=0;i
<nb_temp_local_vars
;i
++){
1171 temp_var
=&arr_temp_local_vars
[i
];
1172 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1175 /*check if temp_var is free*/
1177 for(p
=vstack
;p
<=vtop
;p
++) {
1179 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1180 if(p
->c
.i
==temp_var
->location
){
1187 found_var
=temp_var
->location
;
1193 loc
= (loc
- size
) & -align
;
1194 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1195 temp_var
=&arr_temp_local_vars
[i
];
1196 temp_var
->location
=loc
;
1197 temp_var
->size
=size
;
1198 temp_var
->align
=align
;
1199 nb_temp_local_vars
++;
1206 static void clear_temp_local_var_list(){
1207 nb_temp_local_vars
=0;
1210 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1212 static void move_reg(int r
, int s
, int t
)
1226 /* get address of vtop (vtop MUST BE an lvalue) */
1227 ST_FUNC
void gaddrof(void)
1229 vtop
->r
&= ~VT_LVAL
;
1230 /* tricky: if saved lvalue, then we can go back to lvalue */
1231 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1232 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1237 #ifdef CONFIG_TCC_BCHECK
1238 /* generate lvalue bound code */
1239 static void gbound(void)
1244 vtop
->r
&= ~VT_MUSTBOUND
;
1245 /* if lvalue, then use checking code before dereferencing */
1246 if (vtop
->r
& VT_LVAL
) {
1247 /* if not VT_BOUNDED value, then make one */
1248 if (!(vtop
->r
& VT_BOUNDED
)) {
1249 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1250 /* must save type because we must set it to int to get pointer */
1252 vtop
->type
.t
= VT_PTR
;
1255 gen_bounded_ptr_add();
1256 vtop
->r
|= lval_type
;
1259 /* then check for dereferencing */
1260 gen_bounded_ptr_deref();
1265 static void incr_bf_adr(int o
)
1267 vtop
->type
= char_pointer_type
;
1271 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1272 | (VT_BYTE
|VT_UNSIGNED
);
1273 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1274 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1277 /* single-byte load mode for packed or otherwise unaligned bitfields */
1278 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1281 save_reg_upstack(vtop
->r
, 1);
1282 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1283 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1292 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1294 vpushi((1 << n
) - 1), gen_op('&');
1297 vpushi(bits
), gen_op(TOK_SHL
);
1300 bits
+= n
, bit_size
-= n
, o
= 1;
1303 if (!(type
->t
& VT_UNSIGNED
)) {
1304 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1305 vpushi(n
), gen_op(TOK_SHL
);
1306 vpushi(n
), gen_op(TOK_SAR
);
1310 /* single-byte store mode for packed or otherwise unaligned bitfields */
1311 static void store_packed_bf(int bit_pos
, int bit_size
)
1313 int bits
, n
, o
, m
, c
;
1315 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1317 save_reg_upstack(vtop
->r
, 1);
1318 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1320 incr_bf_adr(o
); // X B
1322 c
? vdup() : gv_dup(); // B V X
1325 vpushi(bits
), gen_op(TOK_SHR
);
1327 vpushi(bit_pos
), gen_op(TOK_SHL
);
1332 m
= ((1 << n
) - 1) << bit_pos
;
1333 vpushi(m
), gen_op('&'); // X B V1
1334 vpushv(vtop
-1); // X B V1 B
1335 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1336 gen_op('&'); // X B V1 B1
1337 gen_op('|'); // X B V2
1339 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1340 vstore(), vpop(); // X B
1341 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1346 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1349 if (0 == sv
->type
.ref
)
1351 t
= sv
->type
.ref
->auxtype
;
1352 if (t
!= -1 && t
!= VT_STRUCT
) {
1353 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1354 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1359 /* store vtop a register belonging to class 'rc'. lvalues are
1360 converted to values. Cannot be used if cannot be converted to
1361 register value (such as structures). */
1362 ST_FUNC
int gv(int rc
)
1364 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1366 /* NOTE: get_reg can modify vstack[] */
1367 if (vtop
->type
.t
& VT_BITFIELD
) {
1370 bit_pos
= BIT_POS(vtop
->type
.t
);
1371 bit_size
= BIT_SIZE(vtop
->type
.t
);
1372 /* remove bit field info to avoid loops */
1373 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1376 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1377 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1378 type
.t
|= VT_UNSIGNED
;
1380 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1382 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1387 if (r
== VT_STRUCT
) {
1388 load_packed_bf(&type
, bit_pos
, bit_size
);
1390 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1391 /* cast to int to propagate signedness in following ops */
1393 /* generate shifts */
1394 vpushi(bits
- (bit_pos
+ bit_size
));
1396 vpushi(bits
- bit_size
);
1397 /* NOTE: transformed to SHR if unsigned */
1402 if (is_float(vtop
->type
.t
) &&
1403 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1404 unsigned long offset
;
1405 /* CPUs usually cannot use float constants, so we store them
1406 generically in data segment */
1407 size
= type_size(&vtop
->type
, &align
);
1409 size
= 0, align
= 1;
1410 offset
= section_add(data_section
, size
, align
);
1411 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1413 init_putv(&vtop
->type
, data_section
, offset
);
1416 #ifdef CONFIG_TCC_BCHECK
1417 if (vtop
->r
& VT_MUSTBOUND
)
1421 r
= vtop
->r
& VT_VALMASK
;
1422 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1423 #ifndef TCC_TARGET_ARM64
1426 #ifdef TCC_TARGET_X86_64
1427 else if (rc
== RC_FRET
)
1431 /* need to reload if:
1433 - lvalue (need to dereference pointer)
1434 - already a register, but not in the right class */
1436 || (vtop
->r
& VT_LVAL
)
1437 || !(reg_classes
[r
] & rc
)
1439 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1440 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1442 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1448 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1449 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1451 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1452 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1453 unsigned long long ll
;
1455 int r2
, original_type
;
1456 original_type
= vtop
->type
.t
;
1457 /* two register type load : expand to two words
1460 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1463 vtop
->c
.i
= ll
; /* first word */
1465 vtop
->r
= r
; /* save register value */
1466 vpushi(ll
>> 32); /* second word */
1469 if (vtop
->r
& VT_LVAL
) {
1470 /* We do not want to modifier the long long
1471 pointer here, so the safest (and less
1472 efficient) is to save all the other registers
1473 in the stack. XXX: totally inefficient. */
1477 /* lvalue_save: save only if used further down the stack */
1478 save_reg_upstack(vtop
->r
, 1);
1480 /* load from memory */
1481 vtop
->type
.t
= load_type
;
1484 vtop
[-1].r
= r
; /* save register value */
1485 /* increment pointer to get second word */
1486 vtop
->type
.t
= addr_type
;
1491 vtop
->type
.t
= load_type
;
1493 /* move registers */
1496 vtop
[-1].r
= r
; /* save register value */
1497 vtop
->r
= vtop
[-1].r2
;
1499 /* Allocate second register. Here we rely on the fact that
1500 get_reg() tries first to free r2 of an SValue. */
1504 /* write second register */
1506 vtop
->type
.t
= original_type
;
1507 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1509 /* lvalue of scalar type : need to use lvalue type
1510 because of possible cast */
1513 /* compute memory access type */
1514 if (vtop
->r
& VT_LVAL_BYTE
)
1516 else if (vtop
->r
& VT_LVAL_SHORT
)
1518 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1522 /* restore wanted type */
1525 /* one register type load */
1530 #ifdef TCC_TARGET_C67
1531 /* uses register pairs for doubles */
1532 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1539 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1540 ST_FUNC
void gv2(int rc1
, int rc2
)
1544 /* generate more generic register first. But VT_JMP or VT_CMP
1545 values must be generated first in all cases to avoid possible
1547 v
= vtop
[0].r
& VT_VALMASK
;
1548 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1553 /* test if reload is needed for first register */
1554 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1564 /* test if reload is needed for first register */
1565 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1571 #ifndef TCC_TARGET_ARM64
1572 /* wrapper around RC_FRET to return a register by type */
1573 static int rc_fret(int t
)
1575 #ifdef TCC_TARGET_X86_64
1576 if (t
== VT_LDOUBLE
) {
1584 /* wrapper around REG_FRET to return a register by type */
1585 static int reg_fret(int t
)
1587 #ifdef TCC_TARGET_X86_64
1588 if (t
== VT_LDOUBLE
) {
1596 /* expand 64bit on stack in two ints */
1597 ST_FUNC
void lexpand(void)
1600 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1601 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1602 if (v
== VT_CONST
) {
1605 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1611 vtop
[0].r
= vtop
[-1].r2
;
1612 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1614 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1619 /* build a long long from two ints */
1620 static void lbuild(int t
)
1622 gv2(RC_INT
, RC_INT
);
1623 vtop
[-1].r2
= vtop
[0].r
;
1624 vtop
[-1].type
.t
= t
;
1629 /* convert stack entry to register and duplicate its value in another
1631 static void gv_dup(void)
1638 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1639 if (t
& VT_BITFIELD
) {
1649 /* stack: H L L1 H1 */
1659 /* duplicate value */
1664 #ifdef TCC_TARGET_X86_64
1665 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1675 load(r1
, &sv
); /* move r to r1 */
1677 /* duplicates value */
1683 /* Generate value test
1685 * Generate a test for any value (jump, comparison and integers) */
1686 ST_FUNC
int gvtst(int inv
, int t
)
1688 int v
= vtop
->r
& VT_VALMASK
;
1689 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1693 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1694 /* constant jmp optimization */
1695 if ((vtop
->c
.i
!= 0) != inv
)
1700 return gtst(inv
, t
);
1704 /* generate CPU independent (unsigned) long long operations */
1705 static void gen_opl(int op
)
1707 int t
, a
, b
, op1
, c
, i
;
1709 unsigned short reg_iret
= REG_IRET
;
1710 unsigned short reg_lret
= REG_LRET
;
1716 func
= TOK___divdi3
;
1719 func
= TOK___udivdi3
;
1722 func
= TOK___moddi3
;
1725 func
= TOK___umoddi3
;
1732 /* call generic long long function */
1733 vpush_global_sym(&func_old_type
, func
);
1738 vtop
->r2
= reg_lret
;
1746 //pv("gen_opl A",0,2);
1752 /* stack: L1 H1 L2 H2 */
1757 vtop
[-2] = vtop
[-3];
1760 /* stack: H1 H2 L1 L2 */
1761 //pv("gen_opl B",0,4);
1767 /* stack: H1 H2 L1 L2 ML MH */
1770 /* stack: ML MH H1 H2 L1 L2 */
1774 /* stack: ML MH H1 L2 H2 L1 */
1779 /* stack: ML MH M1 M2 */
1782 } else if (op
== '+' || op
== '-') {
1783 /* XXX: add non carry method too (for MIPS or alpha) */
1789 /* stack: H1 H2 (L1 op L2) */
1792 gen_op(op1
+ 1); /* TOK_xxxC2 */
1795 /* stack: H1 H2 (L1 op L2) */
1798 /* stack: (L1 op L2) H1 H2 */
1800 /* stack: (L1 op L2) (H1 op H2) */
1808 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1809 t
= vtop
[-1].type
.t
;
1813 /* stack: L H shift */
1815 /* constant: simpler */
1816 /* NOTE: all comments are for SHL. the other cases are
1817 done by swapping words */
1828 if (op
!= TOK_SAR
) {
1861 /* XXX: should provide a faster fallback on x86 ? */
1864 func
= TOK___ashrdi3
;
1867 func
= TOK___lshrdi3
;
1870 func
= TOK___ashldi3
;
1876 /* compare operations */
1882 /* stack: L1 H1 L2 H2 */
1884 vtop
[-1] = vtop
[-2];
1886 /* stack: L1 L2 H1 H2 */
1889 /* when values are equal, we need to compare low words. since
1890 the jump is inverted, we invert the test too. */
1893 else if (op1
== TOK_GT
)
1895 else if (op1
== TOK_ULT
)
1897 else if (op1
== TOK_UGT
)
1907 /* generate non equal test */
1913 /* compare low. Always unsigned */
1917 else if (op1
== TOK_LE
)
1919 else if (op1
== TOK_GT
)
1921 else if (op1
== TOK_GE
)
1932 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1934 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1935 return (a
^ b
) >> 63 ? -x
: x
;
1938 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1940 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1943 /* handle integer constant optimizations and various machine
1945 static void gen_opic(int op
)
1947 SValue
*v1
= vtop
- 1;
1949 int t1
= v1
->type
.t
& VT_BTYPE
;
1950 int t2
= v2
->type
.t
& VT_BTYPE
;
1951 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1952 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1953 uint64_t l1
= c1
? v1
->c
.i
: 0;
1954 uint64_t l2
= c2
? v2
->c
.i
: 0;
1955 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1957 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1958 l1
= ((uint32_t)l1
|
1959 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1960 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1961 l2
= ((uint32_t)l2
|
1962 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1966 case '+': l1
+= l2
; break;
1967 case '-': l1
-= l2
; break;
1968 case '&': l1
&= l2
; break;
1969 case '^': l1
^= l2
; break;
1970 case '|': l1
|= l2
; break;
1971 case '*': l1
*= l2
; break;
1978 /* if division by zero, generate explicit division */
1981 tcc_error("division by zero in constant");
1985 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1986 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1987 case TOK_UDIV
: l1
= l1
/ l2
; break;
1988 case TOK_UMOD
: l1
= l1
% l2
; break;
1991 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1992 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1994 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1997 case TOK_ULT
: l1
= l1
< l2
; break;
1998 case TOK_UGE
: l1
= l1
>= l2
; break;
1999 case TOK_EQ
: l1
= l1
== l2
; break;
2000 case TOK_NE
: l1
= l1
!= l2
; break;
2001 case TOK_ULE
: l1
= l1
<= l2
; break;
2002 case TOK_UGT
: l1
= l1
> l2
; break;
2003 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2004 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2005 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2006 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2008 case TOK_LAND
: l1
= l1
&& l2
; break;
2009 case TOK_LOR
: l1
= l1
|| l2
; break;
2013 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2014 l1
= ((uint32_t)l1
|
2015 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2019 /* if commutative ops, put c2 as constant */
2020 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2021 op
== '|' || op
== '*')) {
2023 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2024 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2026 if (!const_wanted
&&
2028 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2029 (l1
== -1 && op
== TOK_SAR
))) {
2030 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2032 } else if (!const_wanted
&&
2033 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2035 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2036 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2037 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2042 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2045 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2046 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2049 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2050 /* filter out NOP operations like x*1, x-0, x&-1... */
2052 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2053 /* try to use shifts instead of muls or divs */
2054 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2063 else if (op
== TOK_PDIV
)
2069 } else if (c2
&& (op
== '+' || op
== '-') &&
2070 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2071 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2072 /* symbol + constant case */
2076 /* The backends can't always deal with addends to symbols
2077 larger than +-1<<31. Don't construct such. */
2084 /* call low level op generator */
2085 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2086 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2094 /* generate a floating point operation with constant propagation */
2095 static void gen_opif(int op
)
2099 #if defined _MSC_VER && defined _AMD64_
2100 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2107 /* currently, we cannot do computations with forward symbols */
2108 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2109 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2111 if (v1
->type
.t
== VT_FLOAT
) {
2114 } else if (v1
->type
.t
== VT_DOUBLE
) {
2122 /* NOTE: we only do constant propagation if finite number (not
2123 NaN or infinity) (ANSI spec) */
2124 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2128 case '+': f1
+= f2
; break;
2129 case '-': f1
-= f2
; break;
2130 case '*': f1
*= f2
; break;
2133 /* If not in initializer we need to potentially generate
2134 FP exceptions at runtime, otherwise we want to fold. */
2140 /* XXX: also handles tests ? */
2144 /* XXX: overflow test ? */
2145 if (v1
->type
.t
== VT_FLOAT
) {
2147 } else if (v1
->type
.t
== VT_DOUBLE
) {
2159 static int pointed_size(CType
*type
)
2162 return type_size(pointed_type(type
), &align
);
2165 static void vla_runtime_pointed_size(CType
*type
)
2168 vla_runtime_type_size(pointed_type(type
), &align
);
2171 static inline int is_null_pointer(SValue
*p
)
2173 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2175 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2176 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2177 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2178 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2179 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2180 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2183 static inline int is_integer_btype(int bt
)
2185 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2186 bt
== VT_INT
|| bt
== VT_LLONG
);
2189 /* check types for comparison or subtraction of pointers */
2190 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2192 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2195 /* null pointers are accepted for all comparisons as gcc */
2196 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2200 bt1
= type1
->t
& VT_BTYPE
;
2201 bt2
= type2
->t
& VT_BTYPE
;
2202 /* accept comparison between pointer and integer with a warning */
2203 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2204 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2205 tcc_warning("comparison between pointer and integer");
2209 /* both must be pointers or implicit function pointers */
2210 if (bt1
== VT_PTR
) {
2211 type1
= pointed_type(type1
);
2212 } else if (bt1
!= VT_FUNC
)
2213 goto invalid_operands
;
2215 if (bt2
== VT_PTR
) {
2216 type2
= pointed_type(type2
);
2217 } else if (bt2
!= VT_FUNC
) {
2219 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2221 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2222 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2226 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2227 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2228 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2229 /* gcc-like error if '-' is used */
2231 goto invalid_operands
;
2233 tcc_warning("comparison of distinct pointer types lacks a cast");
2237 /* generic gen_op: handles types problems */
2238 ST_FUNC
void gen_op(int op
)
2240 int u
, t1
, t2
, bt1
, bt2
, t
;
2244 t1
= vtop
[-1].type
.t
;
2245 t2
= vtop
[0].type
.t
;
2246 bt1
= t1
& VT_BTYPE
;
2247 bt2
= t2
& VT_BTYPE
;
2249 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2250 tcc_error("operation on a struct");
2251 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2252 if (bt2
== VT_FUNC
) {
2253 mk_pointer(&vtop
->type
);
2256 if (bt1
== VT_FUNC
) {
2258 mk_pointer(&vtop
->type
);
2263 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2264 /* at least one operand is a pointer */
2265 /* relational op: must be both pointers */
2266 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2267 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2268 /* pointers are handled are unsigned */
2270 t
= VT_LLONG
| VT_UNSIGNED
;
2272 t
= VT_INT
| VT_UNSIGNED
;
2276 /* if both pointers, then it must be the '-' op */
2277 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2279 tcc_error("cannot use pointers here");
2280 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2281 /* XXX: check that types are compatible */
2282 if (vtop
[-1].type
.t
& VT_VLA
) {
2283 vla_runtime_pointed_size(&vtop
[-1].type
);
2285 vpushi(pointed_size(&vtop
[-1].type
));
2289 vtop
->type
.t
= ptrdiff_type
.t
;
2293 /* exactly one pointer : must be '+' or '-'. */
2294 if (op
!= '-' && op
!= '+')
2295 tcc_error("cannot use pointers here");
2296 /* Put pointer as first operand */
2297 if (bt2
== VT_PTR
) {
2299 t
= t1
, t1
= t2
, t2
= t
;
2302 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2303 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2306 type1
= vtop
[-1].type
;
2307 type1
.t
&= ~VT_ARRAY
;
2308 if (vtop
[-1].type
.t
& VT_VLA
)
2309 vla_runtime_pointed_size(&vtop
[-1].type
);
2311 u
= pointed_size(&vtop
[-1].type
);
2313 tcc_error("unknown array element size");
2317 /* XXX: cast to int ? (long long case) */
2323 /* #ifdef CONFIG_TCC_BCHECK
2324 The main reason to removing this code:
2331 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2332 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2334 When this code is on. then the output looks like
2336 v+(i-j) = 0xbff84000
2338 /* if evaluating constant expression, no code should be
2339 generated, so no bound check */
2340 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2341 /* if bounded pointers, we generate a special code to
2348 gen_bounded_ptr_add();
2354 /* put again type if gen_opic() swaped operands */
2357 } else if (is_float(bt1
) || is_float(bt2
)) {
2358 /* compute bigger type and do implicit casts */
2359 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2361 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2366 /* floats can only be used for a few operations */
2367 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2368 (op
< TOK_ULT
|| op
> TOK_GT
))
2369 tcc_error("invalid operands for binary operation");
2371 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2372 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2373 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2375 t
|= (VT_LONG
& t1
);
2377 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2378 /* cast to biggest op */
2379 t
= VT_LLONG
| VT_LONG
;
2380 if (bt1
== VT_LLONG
)
2382 if (bt2
== VT_LLONG
)
2384 /* convert to unsigned if it does not fit in a long long */
2385 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2386 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2390 /* integer operations */
2391 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2392 /* convert to unsigned if it does not fit in an integer */
2393 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2394 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2397 /* XXX: currently, some unsigned operations are explicit, so
2398 we modify them here */
2399 if (t
& VT_UNSIGNED
) {
2406 else if (op
== TOK_LT
)
2408 else if (op
== TOK_GT
)
2410 else if (op
== TOK_LE
)
2412 else if (op
== TOK_GE
)
2420 /* special case for shifts and long long: we keep the shift as
2422 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2429 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2430 /* relational op: the result is an int */
2431 vtop
->type
.t
= VT_INT
;
2436 // Make sure that we have converted to an rvalue:
2437 if (vtop
->r
& VT_LVAL
)
2438 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2441 #ifndef TCC_TARGET_ARM
2442 /* generic itof for unsigned long long case */
2443 static void gen_cvt_itof1(int t
)
2445 #ifdef TCC_TARGET_ARM64
2448 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2449 (VT_LLONG
| VT_UNSIGNED
)) {
2452 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2453 #if LDOUBLE_SIZE != 8
2454 else if (t
== VT_LDOUBLE
)
2455 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2458 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2462 vtop
->r
= reg_fret(t
);
2470 /* generic ftoi for unsigned long long case */
2471 static void gen_cvt_ftoi1(int t
)
2473 #ifdef TCC_TARGET_ARM64
2478 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2479 /* not handled natively */
2480 st
= vtop
->type
.t
& VT_BTYPE
;
2482 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2483 #if LDOUBLE_SIZE != 8
2484 else if (st
== VT_LDOUBLE
)
2485 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2488 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2493 vtop
->r2
= REG_LRET
;
2500 /* force char or short cast */
2501 static void force_charshort_cast(int t
)
2505 /* cannot cast static initializers */
2506 if (STATIC_DATA_WANTED
)
2510 /* XXX: add optimization if lvalue : just change type and offset */
2515 if (t
& VT_UNSIGNED
) {
2516 vpushi((1 << bits
) - 1);
2519 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2525 /* result must be signed or the SAR is converted to an SHL
2526 This was not the case when "t" was a signed short
2527 and the last value on the stack was an unsigned int */
2528 vtop
->type
.t
&= ~VT_UNSIGNED
;
2534 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2535 static void gen_cast_s(int t
)
2543 static void gen_cast(CType
*type
)
2545 int sbt
, dbt
, sf
, df
, c
, p
;
2547 /* special delayed cast for char/short */
2548 /* XXX: in some cases (multiple cascaded casts), it may still
2550 if (vtop
->r
& VT_MUSTCAST
) {
2551 vtop
->r
&= ~VT_MUSTCAST
;
2552 force_charshort_cast(vtop
->type
.t
);
2555 /* bitfields first get cast to ints */
2556 if (vtop
->type
.t
& VT_BITFIELD
) {
2560 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2561 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2566 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2567 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2568 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2569 c
&= dbt
!= VT_LDOUBLE
;
2572 /* constant case: we can do it now */
2573 /* XXX: in ISOC, cannot do it if error in convert */
2574 if (sbt
== VT_FLOAT
)
2575 vtop
->c
.ld
= vtop
->c
.f
;
2576 else if (sbt
== VT_DOUBLE
)
2577 vtop
->c
.ld
= vtop
->c
.d
;
2580 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2581 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2582 vtop
->c
.ld
= vtop
->c
.i
;
2584 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2586 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2587 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2589 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2592 if (dbt
== VT_FLOAT
)
2593 vtop
->c
.f
= (float)vtop
->c
.ld
;
2594 else if (dbt
== VT_DOUBLE
)
2595 vtop
->c
.d
= (double)vtop
->c
.ld
;
2596 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2597 vtop
->c
.i
= vtop
->c
.ld
;
2598 } else if (sf
&& dbt
== VT_BOOL
) {
2599 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2602 vtop
->c
.i
= vtop
->c
.ld
;
2603 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2605 else if (sbt
& VT_UNSIGNED
)
2606 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2608 else if (sbt
== VT_PTR
)
2611 else if (sbt
!= VT_LLONG
)
2612 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2613 -(vtop
->c
.i
& 0x80000000));
2615 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2617 else if (dbt
== VT_BOOL
)
2618 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2620 else if (dbt
== VT_PTR
)
2623 else if (dbt
!= VT_LLONG
) {
2624 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2625 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2628 if (!(dbt
& VT_UNSIGNED
))
2629 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2632 } else if (p
&& dbt
== VT_BOOL
) {
2636 /* non constant case: generate code */
2638 /* convert from fp to fp */
2641 /* convert int to fp */
2644 /* convert fp to int */
2645 if (dbt
== VT_BOOL
) {
2649 /* we handle char/short/etc... with generic code */
2650 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2651 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2655 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2656 /* additional cast for char/short... */
2662 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2663 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2664 /* scalar to long long */
2665 /* machine independent conversion */
2667 /* generate high word */
2668 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2672 if (sbt
== VT_PTR
) {
2673 /* cast from pointer to int before we apply
2674 shift operation, which pointers don't support*/
2681 /* patch second register */
2682 vtop
[-1].r2
= vtop
->r
;
2686 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2687 (dbt
& VT_BTYPE
) == VT_PTR
||
2688 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2689 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2690 (sbt
& VT_BTYPE
) != VT_PTR
&&
2691 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2692 /* need to convert from 32bit to 64bit */
2694 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2695 #if defined(TCC_TARGET_ARM64)
2697 #elif defined(TCC_TARGET_X86_64)
2699 /* x86_64 specific: movslq */
2701 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2708 } else if (dbt
== VT_BOOL
) {
2709 /* scalar to bool */
2712 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2713 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2714 if (sbt
== VT_PTR
) {
2715 vtop
->type
.t
= VT_INT
;
2716 tcc_warning("nonportable conversion from pointer to char/short");
2718 force_charshort_cast(dbt
);
2719 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2721 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2723 /* from long long: just take low order word */
2728 vtop
->type
.t
|= VT_UNSIGNED
;
2732 /* if lvalue and single word type, nothing to do because
2733 the lvalue already contains the real type size (see
2734 VT_LVAL_xxx constants) */
2737 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2738 /* if we are casting between pointer types,
2739 we must update the VT_LVAL_xxx size */
2740 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2741 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2744 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2747 /* return type size as known at compile time. Put alignment at 'a' */
2748 ST_FUNC
int type_size(CType
*type
, int *a
)
2753 bt
= type
->t
& VT_BTYPE
;
2754 if (bt
== VT_STRUCT
) {
2759 } else if (bt
== VT_PTR
) {
2760 if (type
->t
& VT_ARRAY
) {
2764 ts
= type_size(&s
->type
, a
);
2766 if (ts
< 0 && s
->c
< 0)
2774 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2775 return -1; /* incomplete enum */
2776 } else if (bt
== VT_LDOUBLE
) {
2778 return LDOUBLE_SIZE
;
2779 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2780 #ifdef TCC_TARGET_I386
2781 #ifdef TCC_TARGET_PE
2786 #elif defined(TCC_TARGET_ARM)
2796 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2799 } else if (bt
== VT_SHORT
) {
2802 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2806 /* char, void, function, _Bool */
2812 /* push type size as known at runtime time on top of value stack. Put
2814 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2816 if (type
->t
& VT_VLA
) {
2817 type_size(&type
->ref
->type
, a
);
2818 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2820 vpushi(type_size(type
, a
));
2824 static void vla_sp_restore(void) {
2825 if (vlas_in_scope
) {
2826 gen_vla_sp_restore(vla_sp_loc
);
2830 static void vla_sp_restore_root(void) {
2831 if (vlas_in_scope
) {
2832 gen_vla_sp_restore(vla_sp_root_loc
);
2836 /* return the pointed type of t */
2837 static inline CType
*pointed_type(CType
*type
)
2839 return &type
->ref
->type
;
2842 /* modify type so that its it is a pointer to type. */
2843 ST_FUNC
void mk_pointer(CType
*type
)
2846 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2847 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2851 /* compare function types. OLD functions match any new functions */
2852 static int is_compatible_func(CType
*type1
, CType
*type2
)
2858 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2860 /* check func_call */
2861 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2863 /* XXX: not complete */
2864 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2866 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2868 while (s1
!= NULL
) {
2871 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2881 /* return true if type1 and type2 are the same. If unqualified is
2882 true, qualifiers on the types are ignored.
2884 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2888 t1
= type1
->t
& VT_TYPE
;
2889 t2
= type2
->t
& VT_TYPE
;
2891 /* strip qualifiers before comparing */
2892 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2893 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2896 /* Default Vs explicit signedness only matters for char */
2897 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2901 /* XXX: bitfields ? */
2904 /* test more complicated cases */
2905 bt1
= t1
& (VT_BTYPE
| VT_ARRAY
);
2906 if (bt1
== VT_PTR
) {
2907 type1
= pointed_type(type1
);
2908 type2
= pointed_type(type2
);
2909 return is_compatible_types(type1
, type2
);
2910 } else if (bt1
& VT_ARRAY
) {
2911 return type1
->ref
->c
< 0 || type2
->ref
->c
< 0
2912 || type1
->ref
->c
== type2
->ref
->c
;
2913 } else if (bt1
== VT_STRUCT
) {
2914 return (type1
->ref
== type2
->ref
);
2915 } else if (bt1
== VT_FUNC
) {
2916 return is_compatible_func(type1
, type2
);
2917 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2918 return type1
->ref
== type2
->ref
;
2924 /* return true if type1 and type2 are exactly the same (including
2927 static int is_compatible_types(CType
*type1
, CType
*type2
)
2929 return compare_types(type1
,type2
,0);
2932 /* return true if type1 and type2 are the same (ignoring qualifiers).
2934 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2936 return compare_types(type1
,type2
,1);
2939 /* print a type. If 'varstr' is not NULL, then the variable is also
2940 printed in the type */
2942 /* XXX: add array and function pointers */
2943 static void type_to_str(char *buf
, int buf_size
,
2944 CType
*type
, const char *varstr
)
2956 pstrcat(buf
, buf_size
, "extern ");
2958 pstrcat(buf
, buf_size
, "static ");
2960 pstrcat(buf
, buf_size
, "typedef ");
2962 pstrcat(buf
, buf_size
, "inline ");
2963 if (t
& VT_VOLATILE
)
2964 pstrcat(buf
, buf_size
, "volatile ");
2965 if (t
& VT_CONSTANT
)
2966 pstrcat(buf
, buf_size
, "const ");
2968 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2969 || ((t
& VT_UNSIGNED
)
2970 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2973 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2975 buf_size
-= strlen(buf
);
3010 tstr
= "long double";
3012 pstrcat(buf
, buf_size
, tstr
);
3019 pstrcat(buf
, buf_size
, tstr
);
3020 v
= type
->ref
->v
& ~SYM_STRUCT
;
3021 if (v
>= SYM_FIRST_ANOM
)
3022 pstrcat(buf
, buf_size
, "<anonymous>");
3024 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3029 if (varstr
&& '*' == *varstr
) {
3030 pstrcat(buf1
, sizeof(buf1
), "(");
3031 pstrcat(buf1
, sizeof(buf1
), varstr
);
3032 pstrcat(buf1
, sizeof(buf1
), ")");
3034 pstrcat(buf1
, buf_size
, "(");
3036 while (sa
!= NULL
) {
3038 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3039 pstrcat(buf1
, sizeof(buf1
), buf2
);
3042 pstrcat(buf1
, sizeof(buf1
), ", ");
3044 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3045 pstrcat(buf1
, sizeof(buf1
), ", ...");
3046 pstrcat(buf1
, sizeof(buf1
), ")");
3047 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3052 if (varstr
&& '*' == *varstr
)
3053 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3055 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3056 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3059 pstrcpy(buf1
, sizeof(buf1
), "*");
3060 if (t
& VT_CONSTANT
)
3061 pstrcat(buf1
, buf_size
, "const ");
3062 if (t
& VT_VOLATILE
)
3063 pstrcat(buf1
, buf_size
, "volatile ");
3065 pstrcat(buf1
, sizeof(buf1
), varstr
);
3066 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3070 pstrcat(buf
, buf_size
, " ");
3071 pstrcat(buf
, buf_size
, varstr
);
3076 /* verify type compatibility to store vtop in 'dt' type, and generate
3078 static void gen_assign_cast(CType
*dt
)
3080 CType
*st
, *type1
, *type2
;
3081 char buf1
[256], buf2
[256];
3082 int dbt
, sbt
, qualwarn
, lvl
;
3084 st
= &vtop
->type
; /* source type */
3085 dbt
= dt
->t
& VT_BTYPE
;
3086 sbt
= st
->t
& VT_BTYPE
;
3087 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3088 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3089 ; /* It is Ok if both are void */
3091 tcc_error("cannot cast from/to void");
3093 if (dt
->t
& VT_CONSTANT
)
3094 tcc_warning("assignment of read-only location");
3097 /* special cases for pointers */
3098 /* '0' can also be a pointer */
3099 if (is_null_pointer(vtop
))
3101 /* accept implicit pointer to integer cast with warning */
3102 if (is_integer_btype(sbt
)) {
3103 tcc_warning("assignment makes pointer from integer without a cast");
3106 type1
= pointed_type(dt
);
3108 type2
= pointed_type(st
);
3109 else if (sbt
== VT_FUNC
)
3110 type2
= st
; /* a function is implicitly a function pointer */
3113 if (is_compatible_types(type1
, type2
))
3115 for (qualwarn
= lvl
= 0;; ++lvl
) {
3116 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3117 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3119 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3120 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3121 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3123 type1
= pointed_type(type1
);
3124 type2
= pointed_type(type2
);
3126 if (!is_compatible_unqualified_types(type1
, type2
)) {
3127 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3128 /* void * can match anything */
3129 } else if (dbt
== sbt
3130 && is_integer_btype(sbt
& VT_BTYPE
)
3131 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3132 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3133 /* Like GCC don't warn by default for merely changes
3134 in pointer target signedness. Do warn for different
3135 base types, though, in particular for unsigned enums
3136 and signed int targets. */
3138 tcc_warning("assignment from incompatible pointer type");
3143 tcc_warning("assignment discards qualifiers from pointer target type");
3149 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3150 tcc_warning("assignment makes integer from pointer without a cast");
3151 } else if (sbt
== VT_STRUCT
) {
3152 goto case_VT_STRUCT
;
3154 /* XXX: more tests */
3158 if (!is_compatible_unqualified_types(dt
, st
)) {
3160 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3161 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3162 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3169 /* store vtop in lvalue pushed on stack */
3170 ST_FUNC
void vstore(void)
3172 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3174 ft
= vtop
[-1].type
.t
;
3175 sbt
= vtop
->type
.t
& VT_BTYPE
;
3176 dbt
= ft
& VT_BTYPE
;
3177 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3178 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3179 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3180 /* optimize char/short casts */
3181 delayed_cast
= VT_MUSTCAST
;
3182 vtop
->type
.t
= ft
& VT_TYPE
;
3183 /* XXX: factorize */
3184 if (ft
& VT_CONSTANT
)
3185 tcc_warning("assignment of read-only location");
3188 if (!(ft
& VT_BITFIELD
))
3189 gen_assign_cast(&vtop
[-1].type
);
3192 if (sbt
== VT_STRUCT
) {
3193 /* if structure, only generate pointer */
3194 /* structure assignment : generate memcpy */
3195 /* XXX: optimize if small size */
3196 size
= type_size(&vtop
->type
, &align
);
3200 vtop
->type
.t
= VT_PTR
;
3203 /* address of memcpy() */
3206 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3207 else if(!(align
& 3))
3208 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3211 /* Use memmove, rather than memcpy, as dest and src may be same: */
3212 vpush_global_sym(&func_old_type
, TOK_memmove
);
3217 vtop
->type
.t
= VT_PTR
;
3223 /* leave source on stack */
3224 } else if (ft
& VT_BITFIELD
) {
3225 /* bitfield store handling */
3227 /* save lvalue as expression result (example: s.b = s.a = n;) */
3228 vdup(), vtop
[-1] = vtop
[-2];
3230 bit_pos
= BIT_POS(ft
);
3231 bit_size
= BIT_SIZE(ft
);
3232 /* remove bit field info to avoid loops */
3233 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3235 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3236 gen_cast(&vtop
[-1].type
);
3237 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3240 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3241 if (r
== VT_STRUCT
) {
3242 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3243 store_packed_bf(bit_pos
, bit_size
);
3245 unsigned long long mask
= (1ULL << bit_size
) - 1;
3246 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3248 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3251 vpushi((unsigned)mask
);
3258 /* duplicate destination */
3261 /* load destination, mask and or with source */
3262 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3263 vpushll(~(mask
<< bit_pos
));
3265 vpushi(~((unsigned)mask
<< bit_pos
));
3270 /* ... and discard */
3273 } else if (dbt
== VT_VOID
) {
3276 #ifdef CONFIG_TCC_BCHECK
3277 /* bound check case */
3278 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3287 #ifdef TCC_TARGET_X86_64
3288 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3290 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3295 r
= gv(rc
); /* generate value */
3296 /* if lvalue was saved on stack, must read it */
3297 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3299 t
= get_reg(RC_INT
);
3305 sv
.r
= VT_LOCAL
| VT_LVAL
;
3306 sv
.c
.i
= vtop
[-1].c
.i
;
3308 vtop
[-1].r
= t
| VT_LVAL
;
3310 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3312 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3313 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3315 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3316 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3318 vtop
[-1].type
.t
= load_type
;
3321 /* convert to int to increment easily */
3322 vtop
->type
.t
= addr_type
;
3328 vtop
[-1].type
.t
= load_type
;
3329 /* XXX: it works because r2 is spilled last ! */
3330 store(vtop
->r2
, vtop
- 1);
3336 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3337 vtop
->r
|= delayed_cast
;
3341 /* post defines POST/PRE add. c is the token ++ or -- */
3342 ST_FUNC
void inc(int post
, int c
)
3345 vdup(); /* save lvalue */
3347 gv_dup(); /* duplicate value */
3352 vpushi(c
- TOK_MID
);
3354 vstore(); /* store value */
3356 vpop(); /* if post op, return saved value */
3359 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3361 /* read the string */
3365 while (tok
== TOK_STR
) {
3366 /* XXX: add \0 handling too ? */
3367 cstr_cat(astr
, tokc
.str
.data
, -1);
3370 cstr_ccat(astr
, '\0');
3373 /* If I is >= 1 and a power of two, returns log2(i)+1.
3374 If I is 0 returns 0. */
3375 static int exact_log2p1(int i
)
3380 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3391 /* Parse __attribute__((...)) GNUC extension. */
3392 static void parse_attribute(AttributeDef
*ad
)
3398 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3403 while (tok
!= ')') {
3404 if (tok
< TOK_IDENT
)
3405 expect("attribute name");
3417 tcc_warning("implicit declaration of function '%s'",
3418 get_tok_str(tok
, &tokc
));
3419 s
= external_global_sym(tok
, &func_old_type
, 0);
3421 ad
->cleanup_func
= s
;
3429 parse_mult_str(&astr
, "section name");
3430 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3437 parse_mult_str(&astr
, "alias(\"target\")");
3438 ad
->alias_target
= /* save string as token, for later */
3439 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3443 case TOK_VISIBILITY1
:
3444 case TOK_VISIBILITY2
:
3446 parse_mult_str(&astr
,
3447 "visibility(\"default|hidden|internal|protected\")");
3448 if (!strcmp (astr
.data
, "default"))
3449 ad
->a
.visibility
= STV_DEFAULT
;
3450 else if (!strcmp (astr
.data
, "hidden"))
3451 ad
->a
.visibility
= STV_HIDDEN
;
3452 else if (!strcmp (astr
.data
, "internal"))
3453 ad
->a
.visibility
= STV_INTERNAL
;
3454 else if (!strcmp (astr
.data
, "protected"))
3455 ad
->a
.visibility
= STV_PROTECTED
;
3457 expect("visibility(\"default|hidden|internal|protected\")");
3466 if (n
<= 0 || (n
& (n
- 1)) != 0)
3467 tcc_error("alignment must be a positive power of two");
3472 ad
->a
.aligned
= exact_log2p1(n
);
3473 if (n
!= 1 << (ad
->a
.aligned
- 1))
3474 tcc_error("alignment of %d is larger than implemented", n
);
3486 /* currently, no need to handle it because tcc does not
3487 track unused objects */
3491 /* currently, no need to handle it because tcc does not
3492 track unused objects */
3497 ad
->f
.func_call
= FUNC_CDECL
;
3502 ad
->f
.func_call
= FUNC_STDCALL
;
3504 #ifdef TCC_TARGET_I386
3514 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3520 ad
->f
.func_call
= FUNC_FASTCALLW
;
3527 ad
->attr_mode
= VT_LLONG
+ 1;
3530 ad
->attr_mode
= VT_BYTE
+ 1;
3533 ad
->attr_mode
= VT_SHORT
+ 1;
3537 ad
->attr_mode
= VT_INT
+ 1;
3540 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3547 ad
->a
.dllexport
= 1;
3549 case TOK_NODECORATE
:
3550 ad
->a
.nodecorate
= 1;
3553 ad
->a
.dllimport
= 1;
3556 if (tcc_state
->warn_unsupported
)
3557 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3558 /* skip parameters */
3560 int parenthesis
= 0;
3564 else if (tok
== ')')
3567 } while (parenthesis
&& tok
!= -1);
3580 static Sym
* find_field (CType
*type
, int v
)
3584 while ((s
= s
->next
) != NULL
) {
3585 if ((s
->v
& SYM_FIELD
) &&
3586 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3587 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3588 Sym
*ret
= find_field (&s
->type
, v
);
3598 static void struct_add_offset (Sym
*s
, int offset
)
3600 while ((s
= s
->next
) != NULL
) {
3601 if ((s
->v
& SYM_FIELD
) &&
3602 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3603 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3604 struct_add_offset(s
->type
.ref
, offset
);
3610 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3612 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3613 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3614 int pcc
= !tcc_state
->ms_bitfields
;
3615 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3622 prevbt
= VT_STRUCT
; /* make it never match */
3627 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3628 if (f
->type
.t
& VT_BITFIELD
)
3629 bit_size
= BIT_SIZE(f
->type
.t
);
3632 size
= type_size(&f
->type
, &align
);
3633 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3636 if (pcc
&& bit_size
== 0) {
3637 /* in pcc mode, packing does not affect zero-width bitfields */
3640 /* in pcc mode, attribute packed overrides if set. */
3641 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3644 /* pragma pack overrides align if lesser and packs bitfields always */
3647 if (pragma_pack
< align
)
3648 align
= pragma_pack
;
3649 /* in pcc mode pragma pack also overrides individual align */
3650 if (pcc
&& pragma_pack
< a
)
3654 /* some individual align was specified */
3658 if (type
->ref
->type
.t
== VT_UNION
) {
3659 if (pcc
&& bit_size
>= 0)
3660 size
= (bit_size
+ 7) >> 3;
3665 } else if (bit_size
< 0) {
3667 c
+= (bit_pos
+ 7) >> 3;
3668 c
= (c
+ align
- 1) & -align
;
3677 /* A bit-field. Layout is more complicated. There are two
3678 options: PCC (GCC) compatible and MS compatible */
3680 /* In PCC layout a bit-field is placed adjacent to the
3681 preceding bit-fields, except if:
3683 - an individual alignment was given
3684 - it would overflow its base type container and
3685 there is no packing */
3686 if (bit_size
== 0) {
3688 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3690 } else if (f
->a
.aligned
) {
3692 } else if (!packed
) {
3694 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3695 if (ofs
> size
/ align
)
3699 /* in pcc mode, long long bitfields have type int if they fit */
3700 if (size
== 8 && bit_size
<= 32)
3701 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3703 while (bit_pos
>= align
* 8)
3704 c
+= align
, bit_pos
-= align
* 8;
3707 /* In PCC layout named bit-fields influence the alignment
3708 of the containing struct using the base types alignment,
3709 except for packed fields (which here have correct align). */
3710 if (f
->v
& SYM_FIRST_ANOM
3711 // && bit_size // ??? gcc on ARM/rpi does that
3716 bt
= f
->type
.t
& VT_BTYPE
;
3717 if ((bit_pos
+ bit_size
> size
* 8)
3718 || (bit_size
> 0) == (bt
!= prevbt
)
3720 c
= (c
+ align
- 1) & -align
;
3723 /* In MS bitfield mode a bit-field run always uses
3724 at least as many bits as the underlying type.
3725 To start a new run it's also required that this
3726 or the last bit-field had non-zero width. */
3727 if (bit_size
|| prev_bit_size
)
3730 /* In MS layout the records alignment is normally
3731 influenced by the field, except for a zero-width
3732 field at the start of a run (but by further zero-width
3733 fields it is again). */
3734 if (bit_size
== 0 && prevbt
!= bt
)
3737 prev_bit_size
= bit_size
;
3740 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3741 | (bit_pos
<< VT_STRUCT_SHIFT
);
3742 bit_pos
+= bit_size
;
3744 if (align
> maxalign
)
3748 printf("set field %s offset %-2d size %-2d align %-2d",
3749 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3750 if (f
->type
.t
& VT_BITFIELD
) {
3751 printf(" pos %-2d bits %-2d",
3759 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3761 /* An anonymous struct/union. Adjust member offsets
3762 to reflect the real offset of our containing struct.
3763 Also set the offset of this anon member inside
3764 the outer struct to be zero. Via this it
3765 works when accessing the field offset directly
3766 (from base object), as well as when recursing
3767 members in initializer handling. */
3768 int v2
= f
->type
.ref
->v
;
3769 if (!(v2
& SYM_FIELD
) &&
3770 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3772 /* This happens only with MS extensions. The
3773 anon member has a named struct type, so it
3774 potentially is shared with other references.
3775 We need to unshare members so we can modify
3778 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3779 &f
->type
.ref
->type
, 0,
3781 pps
= &f
->type
.ref
->next
;
3782 while ((ass
= ass
->next
) != NULL
) {
3783 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3784 pps
= &((*pps
)->next
);
3788 struct_add_offset(f
->type
.ref
, offset
);
3798 c
+= (bit_pos
+ 7) >> 3;
3800 /* store size and alignment */
3801 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3805 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3806 /* can happen if individual align for some member was given. In
3807 this case MSVC ignores maxalign when aligning the size */
3812 c
= (c
+ a
- 1) & -a
;
3816 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3819 /* check whether we can access bitfields by their type */
3820 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3824 if (0 == (f
->type
.t
& VT_BITFIELD
))
3828 bit_size
= BIT_SIZE(f
->type
.t
);
3831 bit_pos
= BIT_POS(f
->type
.t
);
3832 size
= type_size(&f
->type
, &align
);
3833 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3836 /* try to access the field using a different type */
3837 c0
= -1, s
= align
= 1;
3839 px
= f
->c
* 8 + bit_pos
;
3840 cx
= (px
>> 3) & -align
;
3841 px
= px
- (cx
<< 3);
3844 s
= (px
+ bit_size
+ 7) >> 3;
3854 s
= type_size(&t
, &align
);
3858 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3859 /* update offset and bit position */
3862 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3863 | (bit_pos
<< VT_STRUCT_SHIFT
);
3867 printf("FIX field %s offset %-2d size %-2d align %-2d "
3868 "pos %-2d bits %-2d\n",
3869 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3870 cx
, s
, align
, px
, bit_size
);
3873 /* fall back to load/store single-byte wise */
3874 f
->auxtype
= VT_STRUCT
;
3876 printf("FIX field %s : load byte-wise\n",
3877 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3883 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3884 static void struct_decl(CType
*type
, int u
)
3886 int v
, c
, size
, align
, flexible
;
3887 int bit_size
, bsize
, bt
;
3889 AttributeDef ad
, ad1
;
3892 memset(&ad
, 0, sizeof ad
);
3894 parse_attribute(&ad
);
3898 /* struct already defined ? return it */
3900 expect("struct/union/enum name");
3902 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3905 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3907 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3912 /* Record the original enum/struct/union token. */
3913 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3915 /* we put an undefined size for struct/union */
3916 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3917 s
->r
= 0; /* default alignment is zero as gcc */
3919 type
->t
= s
->type
.t
;
3925 tcc_error("struct/union/enum already defined");
3926 /* cannot be empty */
3927 /* non empty enums are not allowed */
3930 long long ll
= 0, pl
= 0, nl
= 0;
3933 /* enum symbols have static storage */
3934 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3938 expect("identifier");
3940 if (ss
&& !local_stack
)
3941 tcc_error("redefinition of enumerator '%s'",
3942 get_tok_str(v
, NULL
));
3946 ll
= expr_const64();
3948 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3950 *ps
= ss
, ps
= &ss
->next
;
3959 /* NOTE: we accept a trailing comma */
3964 /* set integral type of the enum */
3967 if (pl
!= (unsigned)pl
)
3968 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3970 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3971 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3972 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3974 /* set type for enum members */
3975 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3977 if (ll
== (int)ll
) /* default is int if it fits */
3979 if (t
.t
& VT_UNSIGNED
) {
3980 ss
->type
.t
|= VT_UNSIGNED
;
3981 if (ll
== (unsigned)ll
)
3984 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3985 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3990 while (tok
!= '}') {
3991 if (!parse_btype(&btype
, &ad1
)) {
3997 tcc_error("flexible array member '%s' not at the end of struct",
3998 get_tok_str(v
, NULL
));
4004 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4006 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4007 expect("identifier");
4009 int v
= btype
.ref
->v
;
4010 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4011 if (tcc_state
->ms_extensions
== 0)
4012 expect("identifier");
4016 if (type_size(&type1
, &align
) < 0) {
4017 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4020 tcc_error("field '%s' has incomplete type",
4021 get_tok_str(v
, NULL
));
4023 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4024 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4025 (type1
.t
& VT_STORAGE
))
4026 tcc_error("invalid type for '%s'",
4027 get_tok_str(v
, NULL
));
4031 bit_size
= expr_const();
4032 /* XXX: handle v = 0 case for messages */
4034 tcc_error("negative width in bit-field '%s'",
4035 get_tok_str(v
, NULL
));
4036 if (v
&& bit_size
== 0)
4037 tcc_error("zero width for bit-field '%s'",
4038 get_tok_str(v
, NULL
));
4039 parse_attribute(&ad1
);
4041 size
= type_size(&type1
, &align
);
4042 if (bit_size
>= 0) {
4043 bt
= type1
.t
& VT_BTYPE
;
4049 tcc_error("bitfields must have scalar type");
4051 if (bit_size
> bsize
) {
4052 tcc_error("width of '%s' exceeds its type",
4053 get_tok_str(v
, NULL
));
4054 } else if (bit_size
== bsize
4055 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4056 /* no need for bit fields */
4058 } else if (bit_size
== 64) {
4059 tcc_error("field width 64 not implemented");
4061 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4063 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4066 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4067 /* Remember we've seen a real field to check
4068 for placement of flexible array member. */
4071 /* If member is a struct or bit-field, enforce
4072 placing into the struct (as anonymous). */
4074 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4079 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4084 if (tok
== ';' || tok
== TOK_EOF
)
4091 parse_attribute(&ad
);
4092 struct_layout(type
, &ad
);
4097 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4099 merge_symattr(&ad
->a
, &s
->a
);
4100 merge_funcattr(&ad
->f
, &s
->f
);
4103 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4104 are added to the element type, copied because it could be a typedef. */
4105 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4107 while (type
->t
& VT_ARRAY
) {
4108 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4109 type
= &type
->ref
->type
;
4111 type
->t
|= qualifiers
;
4114 /* return 0 if no type declaration. otherwise, return the basic type
4117 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4119 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4123 memset(ad
, 0, sizeof(AttributeDef
));
4133 /* currently, we really ignore extension */
4143 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4144 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4145 tmbt
: tcc_error("too many basic types");
4148 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4153 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4166 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4167 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4168 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4169 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4176 #ifdef TCC_TARGET_ARM64
4178 /* GCC's __uint128_t appears in some Linux header files. Make it a
4179 synonym for long double to get the size and alignment right. */
4190 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4191 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4199 struct_decl(&type1
, VT_ENUM
);
4202 type
->ref
= type1
.ref
;
4205 struct_decl(&type1
, VT_STRUCT
);
4208 struct_decl(&type1
, VT_UNION
);
4211 /* type modifiers */
4216 parse_btype_qualify(type
, VT_CONSTANT
);
4224 parse_btype_qualify(type
, VT_VOLATILE
);
4231 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4232 tcc_error("signed and unsigned modifier");
4245 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4246 tcc_error("signed and unsigned modifier");
4247 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4263 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4264 tcc_error("multiple storage classes");
4275 /* GNUC attribute */
4276 case TOK_ATTRIBUTE1
:
4277 case TOK_ATTRIBUTE2
:
4278 parse_attribute(ad
);
4279 if (ad
->attr_mode
) {
4280 u
= ad
->attr_mode
-1;
4281 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4289 parse_expr_type(&type1
);
4290 /* remove all storage modifiers except typedef */
4291 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4293 sym_to_attr(ad
, type1
.ref
);
4299 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4301 t
&= ~(VT_BTYPE
|VT_LONG
);
4302 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4303 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4304 type
->ref
= s
->type
.ref
;
4306 parse_btype_qualify(type
, t
);
4308 /* get attributes from typedef */
4318 if (tcc_state
->char_is_unsigned
) {
4319 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4322 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4323 bt
= t
& (VT_BTYPE
|VT_LONG
);
4325 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4326 #ifdef TCC_TARGET_PE
4327 if (bt
== VT_LDOUBLE
)
4328 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4334 /* convert a function parameter type (array to pointer and function to
4335 function pointer) */
4336 static inline void convert_parameter_type(CType
*pt
)
4338 /* remove const and volatile qualifiers (XXX: const could be used
4339 to indicate a const function parameter */
4340 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4341 /* array must be transformed to pointer according to ANSI C */
4343 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4348 ST_FUNC
void parse_asm_str(CString
*astr
)
4351 parse_mult_str(astr
, "string constant");
4354 /* Parse an asm label and return the token */
4355 static int asm_label_instr(void)
4361 parse_asm_str(&astr
);
4364 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4366 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4371 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4373 int n
, l
, t1
, arg_size
, align
;
4374 Sym
**plast
, *s
, *first
;
4379 /* function type, or recursive declarator (return if so) */
4381 if (td
&& !(td
& TYPE_ABSTRACT
))
4385 else if (parse_btype(&pt
, &ad1
))
4388 merge_attr (ad
, &ad1
);
4397 /* read param name and compute offset */
4398 if (l
!= FUNC_OLD
) {
4399 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4401 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4402 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4403 tcc_error("parameter declared as void");
4407 expect("identifier");
4408 pt
.t
= VT_VOID
; /* invalid type */
4411 convert_parameter_type(&pt
);
4412 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4413 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4419 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4424 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4425 tcc_error("invalid type");
4428 /* if no parameters, then old type prototype */
4431 /* NOTE: const is ignored in returned type as it has a special
4432 meaning in gcc / C++ */
4433 type
->t
&= ~VT_CONSTANT
;
4434 /* some ancient pre-K&R C allows a function to return an array
4435 and the array brackets to be put after the arguments, such
4436 that "int c()[]" means something like "int[] c()" */
4439 skip(']'); /* only handle simple "[]" */
4442 /* we push a anonymous symbol which will contain the function prototype */
4443 ad
->f
.func_args
= arg_size
;
4444 ad
->f
.func_type
= l
;
4445 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4451 } else if (tok
== '[') {
4452 int saved_nocode_wanted
= nocode_wanted
;
4453 /* array definition */
4456 /* XXX The optional type-quals and static should only be accepted
4457 in parameter decls. The '*' as well, and then even only
4458 in prototypes (not function defs). */
4460 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4475 if (!local_stack
|| (storage
& VT_STATIC
))
4476 vpushi(expr_const());
4478 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4479 length must always be evaluated, even under nocode_wanted,
4480 so that its size slot is initialized (e.g. under sizeof
4485 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4488 tcc_error("invalid array size");
4490 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4491 tcc_error("size of variable length array should be an integer");
4496 /* parse next post type */
4497 post_type(type
, ad
, storage
, 0);
4498 if (type
->t
== VT_FUNC
)
4499 tcc_error("declaration of an array of functions");
4500 t1
|= type
->t
& VT_VLA
;
4503 loc
-= type_size(&int_type
, &align
);
4507 vla_runtime_type_size(type
, &align
);
4509 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4515 nocode_wanted
= saved_nocode_wanted
;
4517 /* we push an anonymous symbol which will contain the array
4519 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4520 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4526 /* Parse a type declarator (except basic type), and return the type
4527 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4528 expected. 'type' should contain the basic type. 'ad' is the
4529 attribute definition of the basic type. It can be modified by
4530 type_decl(). If this (possibly abstract) declarator is a pointer chain
4531 it returns the innermost pointed to type (equals *type, but is a different
4532 pointer), otherwise returns type itself, that's used for recursive calls. */
4533 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4536 int qualifiers
, storage
;
4538 /* recursive type, remove storage bits first, apply them later again */
4539 storage
= type
->t
& VT_STORAGE
;
4540 type
->t
&= ~VT_STORAGE
;
4543 while (tok
== '*') {
4551 qualifiers
|= VT_CONSTANT
;
4556 qualifiers
|= VT_VOLATILE
;
4562 /* XXX: clarify attribute handling */
4563 case TOK_ATTRIBUTE1
:
4564 case TOK_ATTRIBUTE2
:
4565 parse_attribute(ad
);
4569 type
->t
|= qualifiers
;
4571 /* innermost pointed to type is the one for the first derivation */
4572 ret
= pointed_type(type
);
4576 /* This is possibly a parameter type list for abstract declarators
4577 ('int ()'), use post_type for testing this. */
4578 if (!post_type(type
, ad
, 0, td
)) {
4579 /* It's not, so it's a nested declarator, and the post operations
4580 apply to the innermost pointed to type (if any). */
4581 /* XXX: this is not correct to modify 'ad' at this point, but
4582 the syntax is not clear */
4583 parse_attribute(ad
);
4584 post
= type_decl(type
, ad
, v
, td
);
4587 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4588 /* type identifier */
4592 if (!(td
& TYPE_ABSTRACT
))
4593 expect("identifier");
4596 post_type(post
, ad
, storage
, 0);
4597 parse_attribute(ad
);
4602 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4603 ST_FUNC
int lvalue_type(int t
)
4608 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4610 else if (bt
== VT_SHORT
)
4614 if (t
& VT_UNSIGNED
)
4615 r
|= VT_LVAL_UNSIGNED
;
4619 /* indirection with full error checking and bound check */
4620 ST_FUNC
void indir(void)
4622 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4623 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4627 if (vtop
->r
& VT_LVAL
)
4629 vtop
->type
= *pointed_type(&vtop
->type
);
4630 /* Arrays and functions are never lvalues */
4631 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4632 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4633 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4634 /* if bound checking, the referenced pointer must be checked */
4635 #ifdef CONFIG_TCC_BCHECK
4636 if (tcc_state
->do_bounds_check
)
4637 vtop
->r
|= VT_MUSTBOUND
;
4642 /* pass a parameter to a function and do type checking and casting */
4643 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4648 func_type
= func
->f
.func_type
;
4649 if (func_type
== FUNC_OLD
||
4650 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4651 /* default casting : only need to convert float to double */
4652 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4653 gen_cast_s(VT_DOUBLE
);
4654 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4655 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4656 type
.ref
= vtop
->type
.ref
;
4659 } else if (arg
== NULL
) {
4660 tcc_error("too many arguments to function");
4663 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4664 gen_assign_cast(&type
);
4668 /* parse an expression and return its type without any side effect. */
4669 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4678 /* parse an expression of the form '(type)' or '(expr)' and return its
4680 static void parse_expr_type(CType
*type
)
4686 if (parse_btype(type
, &ad
)) {
4687 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4689 expr_type(type
, gexpr
);
4694 static void parse_type(CType
*type
)
4699 if (!parse_btype(type
, &ad
)) {
4702 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4705 static void parse_builtin_params(int nc
, const char *args
)
4712 while ((c
= *args
++)) {
4716 case 'e': expr_eq(); continue;
4717 case 't': parse_type(&t
); vpush(&t
); continue;
4718 default: tcc_error("internal error"); break;
4726 static void try_call_scope_cleanup(Sym
*stop
)
4728 Sym
*cls
= current_cleanups
;
4730 for (; cls
!= stop
; cls
= cls
->ncl
) {
4731 Sym
*fs
= cls
->next
;
4732 Sym
*vs
= cls
->prev_tok
;
4734 vpushsym(&fs
->type
, fs
);
4735 vset(&vs
->type
, vs
->r
, vs
->c
);
4737 mk_pointer(&vtop
->type
);
4743 static void try_call_cleanup_goto(Sym
*cleanupstate
)
4748 if (!current_cleanups
)
4751 /* search NCA of both cleanup chains given parents and initial depth */
4752 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
4753 for (ccd
= ncleanups
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
4755 for (cc
= current_cleanups
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
4757 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
4760 try_call_scope_cleanup(cc
);
4763 ST_FUNC
void unary(void)
4765 int n
, t
, align
, size
, r
, sizeof_caller
;
4770 sizeof_caller
= in_sizeof
;
4773 /* XXX: GCC 2.95.3 does not generate a table although it should be
4781 #ifdef TCC_TARGET_PE
4782 t
= VT_SHORT
|VT_UNSIGNED
;
4790 vsetc(&type
, VT_CONST
, &tokc
);
4794 t
= VT_INT
| VT_UNSIGNED
;
4800 t
= VT_LLONG
| VT_UNSIGNED
;
4812 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4815 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4817 case TOK___FUNCTION__
:
4819 goto tok_identifier
;
4825 /* special function name identifier */
4826 len
= strlen(funcname
) + 1;
4827 /* generate char[len] type */
4832 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4833 if (!NODATA_WANTED
) {
4834 ptr
= section_ptr_add(data_section
, len
);
4835 memcpy(ptr
, funcname
, len
);
4841 #ifdef TCC_TARGET_PE
4842 t
= VT_SHORT
| VT_UNSIGNED
;
4848 /* string parsing */
4850 if (tcc_state
->char_is_unsigned
)
4851 t
= VT_BYTE
| VT_UNSIGNED
;
4853 if (tcc_state
->warn_write_strings
)
4858 memset(&ad
, 0, sizeof(AttributeDef
));
4859 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4864 if (parse_btype(&type
, &ad
)) {
4865 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4867 /* check ISOC99 compound literal */
4869 /* data is allocated locally by default */
4874 /* all except arrays are lvalues */
4875 if (!(type
.t
& VT_ARRAY
))
4876 r
|= lvalue_type(type
.t
);
4877 memset(&ad
, 0, sizeof(AttributeDef
));
4878 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4880 if (sizeof_caller
) {
4887 } else if (tok
== '{') {
4888 int saved_nocode_wanted
= nocode_wanted
;
4890 tcc_error("expected constant");
4891 /* save all registers */
4893 /* statement expression : we do not accept break/continue
4894 inside as GCC does. We do retain the nocode_wanted state,
4895 as statement expressions can't ever be entered from the
4896 outside, so any reactivation of code emission (from labels
4897 or loop heads) can be disabled again after the end of it. */
4898 block(NULL
, NULL
, 1);
4899 nocode_wanted
= saved_nocode_wanted
;
4914 /* functions names must be treated as function pointers,
4915 except for unary '&' and sizeof. Since we consider that
4916 functions are not lvalues, we only have to handle it
4917 there and in function calls. */
4918 /* arrays can also be used although they are not lvalues */
4919 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4920 !(vtop
->type
.t
& VT_ARRAY
))
4922 mk_pointer(&vtop
->type
);
4928 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4929 gen_cast_s(VT_BOOL
);
4930 vtop
->c
.i
= !vtop
->c
.i
;
4931 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4935 vseti(VT_JMP
, gvtst(1, 0));
4947 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4948 tcc_error("pointer not accepted for unary plus");
4949 /* In order to force cast, we add zero, except for floating point
4950 where we really need an noop (otherwise -0.0 will be transformed
4952 if (!is_float(vtop
->type
.t
)) {
4964 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4965 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4966 size
= type_size(&type
, &align
);
4967 if (s
&& s
->a
.aligned
)
4968 align
= 1 << (s
->a
.aligned
- 1);
4969 if (t
== TOK_SIZEOF
) {
4970 if (!(type
.t
& VT_VLA
)) {
4972 tcc_error("sizeof applied to an incomplete type");
4975 vla_runtime_type_size(&type
, &align
);
4980 vtop
->type
.t
|= VT_UNSIGNED
;
4983 case TOK_builtin_expect
:
4984 /* __builtin_expect is a no-op for now */
4985 parse_builtin_params(0, "ee");
4988 case TOK_builtin_types_compatible_p
:
4989 parse_builtin_params(0, "tt");
4990 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4991 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4992 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4996 case TOK_builtin_choose_expr
:
5023 case TOK_builtin_constant_p
:
5024 parse_builtin_params(1, "e");
5025 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5029 case TOK_builtin_frame_address
:
5030 case TOK_builtin_return_address
:
5036 if (tok
!= TOK_CINT
) {
5037 tcc_error("%s only takes positive integers",
5038 tok1
== TOK_builtin_return_address
?
5039 "__builtin_return_address" :
5040 "__builtin_frame_address");
5042 level
= (uint32_t)tokc
.i
;
5047 vset(&type
, VT_LOCAL
, 0); /* local frame */
5049 mk_pointer(&vtop
->type
);
5050 indir(); /* -> parent frame */
5052 if (tok1
== TOK_builtin_return_address
) {
5053 // assume return address is just above frame pointer on stack
5056 mk_pointer(&vtop
->type
);
5061 #ifdef TCC_TARGET_X86_64
5062 #ifdef TCC_TARGET_PE
5063 case TOK_builtin_va_start
:
5064 parse_builtin_params(0, "ee");
5065 r
= vtop
->r
& VT_VALMASK
;
5069 tcc_error("__builtin_va_start expects a local variable");
5071 vtop
->type
= char_pointer_type
;
5076 case TOK_builtin_va_arg_types
:
5077 parse_builtin_params(0, "t");
5078 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5085 #ifdef TCC_TARGET_ARM64
5086 case TOK___va_start
: {
5087 parse_builtin_params(0, "ee");
5091 vtop
->type
.t
= VT_VOID
;
5094 case TOK___va_arg
: {
5095 parse_builtin_params(0, "et");
5103 case TOK___arm64_clear_cache
: {
5104 parse_builtin_params(0, "ee");
5107 vtop
->type
.t
= VT_VOID
;
5111 /* pre operations */
5122 t
= vtop
->type
.t
& VT_BTYPE
;
5124 /* In IEEE negate(x) isn't subtract(0,x), but rather
5128 vtop
->c
.f
= -1.0 * 0.0;
5129 else if (t
== VT_DOUBLE
)
5130 vtop
->c
.d
= -1.0 * 0.0;
5132 vtop
->c
.ld
= -1.0 * 0.0;
5140 goto tok_identifier
;
5142 /* allow to take the address of a label */
5143 if (tok
< TOK_UIDENT
)
5144 expect("label identifier");
5145 s
= label_find(tok
);
5147 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5149 if (s
->r
== LABEL_DECLARED
)
5150 s
->r
= LABEL_FORWARD
;
5153 s
->type
.t
= VT_VOID
;
5154 mk_pointer(&s
->type
);
5155 s
->type
.t
|= VT_STATIC
;
5157 vpushsym(&s
->type
, s
);
5163 CType controlling_type
;
5164 int has_default
= 0;
5167 TokenString
*str
= NULL
;
5168 int saved_const_wanted
= const_wanted
;
5173 expr_type(&controlling_type
, expr_eq
);
5174 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5175 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5176 mk_pointer(&controlling_type
);
5177 const_wanted
= saved_const_wanted
;
5181 if (tok
== TOK_DEFAULT
) {
5183 tcc_error("too many 'default'");
5189 AttributeDef ad_tmp
;
5192 parse_btype(&cur_type
, &ad_tmp
);
5193 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5194 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5196 tcc_error("type match twice");
5206 skip_or_save_block(&str
);
5208 skip_or_save_block(NULL
);
5215 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5216 tcc_error("type '%s' does not match any association", buf
);
5218 begin_macro(str
, 1);
5227 // special qnan , snan and infinity values
5232 vtop
->type
.t
= VT_FLOAT
;
5237 goto special_math_val
;
5240 goto special_math_val
;
5247 expect("identifier");
5249 if (!s
|| IS_ASM_SYM(s
)) {
5250 const char *name
= get_tok_str(t
, NULL
);
5252 tcc_error("'%s' undeclared", name
);
5253 /* for simple function calls, we tolerate undeclared
5254 external reference to int() function */
5255 if (tcc_state
->warn_implicit_function_declaration
5256 #ifdef TCC_TARGET_PE
5257 /* people must be warned about using undeclared WINAPI functions
5258 (which usually start with uppercase letter) */
5259 || (name
[0] >= 'A' && name
[0] <= 'Z')
5262 tcc_warning("implicit declaration of function '%s'", name
);
5263 s
= external_global_sym(t
, &func_old_type
, 0);
5267 /* A symbol that has a register is a local register variable,
5268 which starts out as VT_LOCAL value. */
5269 if ((r
& VT_VALMASK
) < VT_CONST
)
5270 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5272 vset(&s
->type
, r
, s
->c
);
5273 /* Point to s as backpointer (even without r&VT_SYM).
5274 Will be used by at least the x86 inline asm parser for
5280 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5281 vtop
->c
.i
= s
->enum_val
;
5286 /* post operations */
5288 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5291 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5294 if (tok
== TOK_ARROW
)
5296 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5299 /* expect pointer on structure */
5300 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5301 expect("struct or union");
5302 if (tok
== TOK_CDOUBLE
)
5303 expect("field name");
5305 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5306 expect("field name");
5307 s
= find_field(&vtop
->type
, tok
);
5309 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5310 /* add field offset to pointer */
5311 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5314 /* change type to field type, and set to lvalue */
5315 vtop
->type
= s
->type
;
5316 vtop
->type
.t
|= qualifiers
;
5317 /* an array is never an lvalue */
5318 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5319 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5320 #ifdef CONFIG_TCC_BCHECK
5321 /* if bound checking, the referenced pointer must be checked */
5322 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5323 vtop
->r
|= VT_MUSTBOUND
;
5327 } else if (tok
== '[') {
5333 } else if (tok
== '(') {
5336 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5339 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5340 /* pointer test (no array accepted) */
5341 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5342 vtop
->type
= *pointed_type(&vtop
->type
);
5343 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5347 expect("function pointer");
5350 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5352 /* get return type */
5355 sa
= s
->next
; /* first parameter */
5356 nb_args
= regsize
= 0;
5358 /* compute first implicit argument if a structure is returned */
5359 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5360 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5361 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5362 &ret_align
, ®size
);
5364 /* get some space for the returned structure */
5365 size
= type_size(&s
->type
, &align
);
5366 #ifdef TCC_TARGET_ARM64
5367 /* On arm64, a small struct is return in registers.
5368 It is much easier to write it to memory if we know
5369 that we are allowed to write some extra bytes, so
5370 round the allocated space up to a power of 2: */
5372 while (size
& (size
- 1))
5373 size
= (size
| (size
- 1)) + 1;
5375 loc
= (loc
- size
) & -align
;
5377 ret
.r
= VT_LOCAL
| VT_LVAL
;
5378 /* pass it as 'int' to avoid structure arg passing
5380 vseti(VT_LOCAL
, loc
);
5390 /* return in register */
5391 if (is_float(ret
.type
.t
)) {
5392 ret
.r
= reg_fret(ret
.type
.t
);
5393 #ifdef TCC_TARGET_X86_64
5394 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5398 #ifndef TCC_TARGET_ARM64
5399 #ifdef TCC_TARGET_X86_64
5400 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5402 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5413 gfunc_param_typed(s
, sa
);
5423 tcc_error("too few arguments to function");
5425 gfunc_call(nb_args
);
5428 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5429 vsetc(&ret
.type
, r
, &ret
.c
);
5430 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5433 /* handle packed struct return */
5434 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5437 size
= type_size(&s
->type
, &align
);
5438 /* We're writing whole regs often, make sure there's enough
5439 space. Assume register size is power of 2. */
5440 if (regsize
> align
)
5442 loc
= (loc
- size
) & -align
;
5446 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5450 if (--ret_nregs
== 0)
5454 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5462 ST_FUNC
void expr_prod(void)
5467 while (tok
== '*' || tok
== '/' || tok
== '%') {
5475 ST_FUNC
void expr_sum(void)
5480 while (tok
== '+' || tok
== '-') {
5488 static void expr_shift(void)
5493 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5501 static void expr_cmp(void)
5506 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5507 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5515 static void expr_cmpeq(void)
5520 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5528 static void expr_and(void)
5531 while (tok
== '&') {
5538 static void expr_xor(void)
5541 while (tok
== '^') {
5548 static void expr_or(void)
5551 while (tok
== '|') {
5558 static void expr_land(void)
5561 if (tok
== TOK_LAND
) {
5564 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5565 gen_cast_s(VT_BOOL
);
5570 while (tok
== TOK_LAND
) {
5586 if (tok
!= TOK_LAND
) {
5599 static void expr_lor(void)
5602 if (tok
== TOK_LOR
) {
5605 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5606 gen_cast_s(VT_BOOL
);
5611 while (tok
== TOK_LOR
) {
5627 if (tok
!= TOK_LOR
) {
5640 /* Assuming vtop is a value used in a conditional context
5641 (i.e. compared with zero) return 0 if it's false, 1 if
5642 true and -1 if it can't be statically determined. */
5643 static int condition_3way(void)
5646 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5647 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5649 gen_cast_s(VT_BOOL
);
5656 static void expr_cond(void)
5658 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5660 CType type
, type1
, type2
;
5665 c
= condition_3way();
5666 g
= (tok
== ':' && gnu_ext
);
5668 /* needed to avoid having different registers saved in
5670 if (is_float(vtop
->type
.t
)) {
5672 #ifdef TCC_TARGET_X86_64
5673 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5697 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5698 mk_pointer(&vtop
->type
);
5700 sv
= *vtop
; /* save value to handle it later */
5701 vtop
--; /* no vpop so that FP stack is not flushed */
5717 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5718 mk_pointer(&vtop
->type
);
5721 bt1
= t1
& VT_BTYPE
;
5723 bt2
= t2
& VT_BTYPE
;
5727 /* cast operands to correct type according to ISOC rules */
5728 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5729 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5730 } else if (is_float(bt1
) || is_float(bt2
)) {
5731 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5732 type
.t
= VT_LDOUBLE
;
5734 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5739 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5740 /* cast to biggest op */
5741 type
.t
= VT_LLONG
| VT_LONG
;
5742 if (bt1
== VT_LLONG
)
5744 if (bt2
== VT_LLONG
)
5746 /* convert to unsigned if it does not fit in a long long */
5747 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5748 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5749 type
.t
|= VT_UNSIGNED
;
5750 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5751 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5752 /* If one is a null ptr constant the result type
5754 if (is_null_pointer (vtop
)) type
= type1
;
5755 else if (is_null_pointer (&sv
)) type
= type2
;
5756 else if (bt1
!= bt2
)
5757 tcc_error("incompatible types in conditional expressions");
5759 CType
*pt1
= pointed_type(&type1
);
5760 CType
*pt2
= pointed_type(&type2
);
5761 int pbt1
= pt1
->t
& VT_BTYPE
;
5762 int pbt2
= pt2
->t
& VT_BTYPE
;
5763 int newquals
, copied
= 0;
5764 /* pointers to void get preferred, otherwise the
5765 pointed to types minus qualifs should be compatible */
5766 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5767 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5768 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5769 tcc_warning("pointer type mismatch in conditional expression\n");
5771 /* combine qualifs */
5772 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5773 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5776 /* copy the pointer target symbol */
5777 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5780 pointed_type(&type
)->t
|= newquals
;
5782 /* pointers to incomplete arrays get converted to
5783 pointers to completed ones if possible */
5784 if (pt1
->t
& VT_ARRAY
5785 && pt2
->t
& VT_ARRAY
5786 && pointed_type(&type
)->ref
->c
< 0
5787 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5790 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5792 pointed_type(&type
)->ref
=
5793 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5794 0, pointed_type(&type
)->ref
->c
);
5795 pointed_type(&type
)->ref
->c
=
5796 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5799 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5800 /* XXX: test structure compatibility */
5801 type
= bt1
== VT_STRUCT
? type1
: type2
;
5803 /* integer operations */
5804 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5805 /* convert to unsigned if it does not fit in an integer */
5806 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5807 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5808 type
.t
|= VT_UNSIGNED
;
5810 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5811 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5812 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5814 /* now we convert second operand */
5818 mk_pointer(&vtop
->type
);
5820 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5825 if (is_float(type
.t
)) {
5827 #ifdef TCC_TARGET_X86_64
5828 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5832 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5833 /* for long longs, we use fixed registers to avoid having
5834 to handle a complicated move */
5845 /* this is horrible, but we must also convert first
5851 mk_pointer(&vtop
->type
);
5853 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5857 if (c
< 0 || islv
) {
5859 move_reg(r2
, r1
, type
.t
);
5869 static void expr_eq(void)
5875 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5876 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5877 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5892 ST_FUNC
void gexpr(void)
5903 /* parse a constant expression and return value in vtop. */
5904 static void expr_const1(void)
5913 /* parse an integer constant and return its value. */
5914 static inline int64_t expr_const64(void)
5918 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5919 expect("constant expression");
5925 /* parse an integer constant and return its value.
5926 Complain if it doesn't fit 32bit (signed or unsigned). */
5927 ST_FUNC
int expr_const(void)
5930 int64_t wc
= expr_const64();
5932 if (c
!= wc
&& (unsigned)c
!= wc
)
5933 tcc_error("constant exceeds 32 bit");
5937 /* return the label token if current token is a label, otherwise
5939 static int is_label(void)
5943 /* fast test first */
5944 if (tok
< TOK_UIDENT
)
5946 /* no need to save tokc because tok is an identifier */
5952 unget_tok(last_tok
);
5957 #ifndef TCC_TARGET_ARM64
5958 static void gfunc_return(CType
*func_type
)
5960 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5961 CType type
, ret_type
;
5962 int ret_align
, ret_nregs
, regsize
;
5963 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5964 &ret_align
, ®size
);
5965 if (0 == ret_nregs
) {
5966 /* if returning structure, must copy it to implicit
5967 first pointer arg location */
5970 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5973 /* copy structure value to pointer */
5976 /* returning structure packed into registers */
5977 int r
, size
, addr
, align
;
5978 size
= type_size(func_type
,&align
);
5979 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5980 (vtop
->c
.i
& (ret_align
-1)))
5981 && (align
& (ret_align
-1))) {
5982 loc
= (loc
- size
) & -ret_align
;
5985 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5989 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5991 vtop
->type
= ret_type
;
5992 if (is_float(ret_type
.t
))
5993 r
= rc_fret(ret_type
.t
);
6004 if (--ret_nregs
== 0)
6006 /* We assume that when a structure is returned in multiple
6007 registers, their classes are consecutive values of the
6010 vtop
->c
.i
+= regsize
;
6014 } else if (is_float(func_type
->t
)) {
6015 gv(rc_fret(func_type
->t
));
6019 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6023 static int case_cmp(const void *pa
, const void *pb
)
6025 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6026 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6027 return a
< b
? -1 : a
> b
;
6030 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6034 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6052 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6054 gcase(base
, len
/2, bsym
);
6055 if (cur_switch
->def_sym
)
6056 gjmp_addr(cur_switch
->def_sym
);
6058 *bsym
= gjmp(*bsym
);
6062 base
+= e
; len
-= e
;
6072 if (p
->v1
== p
->v2
) {
6074 gtst_addr(0, p
->sym
);
6084 gtst_addr(0, p
->sym
);
6090 static void block(int *bsym
, int *csym
, int is_expr
)
6092 int a
, b
, c
, d
, cond
;
6095 /* generate line number info */
6096 if (tcc_state
->do_debug
)
6097 tcc_debug_line(tcc_state
);
6100 /* default return value is (void) */
6102 vtop
->type
.t
= VT_VOID
;
6105 if (tok
== TOK_IF
) {
6107 int saved_nocode_wanted
= nocode_wanted
;
6112 cond
= condition_3way();
6118 nocode_wanted
|= 0x20000000;
6119 block(bsym
, csym
, 0);
6121 nocode_wanted
= saved_nocode_wanted
;
6122 if (tok
== TOK_ELSE
) {
6127 nocode_wanted
|= 0x20000000;
6128 block(bsym
, csym
, 0);
6129 gsym(d
); /* patch else jmp */
6131 nocode_wanted
= saved_nocode_wanted
;
6134 } else if (tok
== TOK_WHILE
) {
6135 int saved_nocode_wanted
;
6136 nocode_wanted
&= ~0x20000000;
6146 saved_nocode_wanted
= nocode_wanted
;
6148 nocode_wanted
= saved_nocode_wanted
;
6153 } else if (tok
== '{') {
6154 Sym
*llabel
, *lcleanup
;
6155 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6156 int lncleanups
= ncleanups
;
6159 /* record local declaration stack position */
6161 llabel
= local_label_stack
;
6162 lcleanup
= current_cleanups
;
6165 /* handle local labels declarations */
6166 while (tok
== TOK_LABEL
) {
6169 if (tok
< TOK_UIDENT
)
6170 expect("label identifier");
6171 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6181 while (tok
!= '}') {
6182 if ((a
= is_label()))
6189 block(bsym
, csym
, is_expr
);
6193 if (current_cleanups
!= lcleanup
) {
6197 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> lncleanups
;)
6198 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6203 try_call_scope_cleanup(lcleanup
);
6204 pcl
->jnext
= gjmp(0);
6206 goto remove_pending
;
6215 if (!nocode_wanted
) {
6216 try_call_scope_cleanup(lcleanup
);
6220 current_cleanups
= lcleanup
;
6221 ncleanups
= lncleanups
;
6222 /* pop locally defined labels */
6223 label_pop(&local_label_stack
, llabel
, is_expr
);
6224 /* pop locally defined symbols */
6226 /* In the is_expr case (a statement expression is finished here),
6227 vtop might refer to symbols on the local_stack. Either via the
6228 type or via vtop->sym. We can't pop those nor any that in turn
6229 might be referred to. To make it easier we don't roll back
6230 any symbols in that case; some upper level call to block() will
6231 do that. We do have to remove such symbols from the lookup
6232 tables, though. sym_pop will do that. */
6233 sym_pop(&local_stack
, s
, is_expr
);
6235 /* Pop VLA frames and restore stack pointer if required */
6236 if (vlas_in_scope
> saved_vlas_in_scope
) {
6237 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6240 vlas_in_scope
= saved_vlas_in_scope
;
6243 } else if (tok
== TOK_RETURN
) {
6247 gen_assign_cast(&func_vt
);
6248 try_call_scope_cleanup(NULL
);
6249 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6252 gfunc_return(&func_vt
);
6254 try_call_scope_cleanup(NULL
);
6257 /* jump unless last stmt in top-level block */
6258 if (tok
!= '}' || local_scope
!= 1)
6260 nocode_wanted
|= 0x20000000;
6261 } else if (tok
== TOK_BREAK
) {
6264 tcc_error("cannot break");
6265 *bsym
= gjmp(*bsym
);
6268 nocode_wanted
|= 0x20000000;
6269 } else if (tok
== TOK_CONTINUE
) {
6272 tcc_error("cannot continue");
6273 vla_sp_restore_root();
6274 *csym
= gjmp(*csym
);
6277 nocode_wanted
|= 0x20000000;
6278 } else if (tok
== TOK_FOR
) {
6280 int saved_nocode_wanted
;
6281 nocode_wanted
&= ~0x20000000;
6287 /* c99 for-loop init decl? */
6288 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6289 /* no, regular for-loop init expr */
6315 saved_nocode_wanted
= nocode_wanted
;
6317 nocode_wanted
= saved_nocode_wanted
;
6322 sym_pop(&local_stack
, s
, 0);
6325 if (tok
== TOK_DO
) {
6326 int saved_nocode_wanted
;
6327 nocode_wanted
&= ~0x20000000;
6333 saved_nocode_wanted
= nocode_wanted
;
6339 nocode_wanted
= saved_nocode_wanted
;
6343 nocode_wanted
= saved_nocode_wanted
;
6348 if (tok
== TOK_SWITCH
) {
6349 struct switch_t
*saved
, sw
;
6350 int saved_nocode_wanted
= nocode_wanted
;
6356 switchval
= *vtop
--;
6358 b
= gjmp(0); /* jump to first case */
6359 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6363 nocode_wanted
= saved_nocode_wanted
;
6364 a
= gjmp(a
); /* add implicit break */
6367 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6368 for (b
= 1; b
< sw
.n
; b
++)
6369 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6370 tcc_error("duplicate case value");
6371 /* Our switch table sorting is signed, so the compared
6372 value needs to be as well when it's 64bit. */
6373 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6374 switchval
.type
.t
&= ~VT_UNSIGNED
;
6376 gcase(sw
.p
, sw
.n
, &a
);
6379 gjmp_addr(sw
.def_sym
);
6380 dynarray_reset(&sw
.p
, &sw
.n
);
6385 if (tok
== TOK_CASE
) {
6386 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6389 nocode_wanted
&= ~0x20000000;
6391 cr
->v1
= cr
->v2
= expr_const64();
6392 if (gnu_ext
&& tok
== TOK_DOTS
) {
6394 cr
->v2
= expr_const64();
6395 if (cr
->v2
< cr
->v1
)
6396 tcc_warning("empty case range");
6399 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6402 goto block_after_label
;
6404 if (tok
== TOK_DEFAULT
) {
6409 if (cur_switch
->def_sym
)
6410 tcc_error("too many 'default'");
6411 cur_switch
->def_sym
= ind
;
6413 goto block_after_label
;
6415 if (tok
== TOK_GOTO
) {
6417 if (tok
== '*' && gnu_ext
) {
6421 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6424 } else if (tok
>= TOK_UIDENT
) {
6425 s
= label_find(tok
);
6426 /* put forward definition if needed */
6428 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6429 else if (s
->r
== LABEL_DECLARED
)
6430 s
->r
= LABEL_FORWARD
;
6432 vla_sp_restore_root();
6433 if (s
->r
& LABEL_FORWARD
) {
6434 /* start new goto chain for cleanups, linked via label->next */
6435 if (current_cleanups
) {
6436 sym_push2(&pending_gotos
, SYM_FIELD
, 0, ncleanups
);
6437 pending_gotos
->prev_tok
= s
;
6438 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6439 pending_gotos
->next
= s
;
6441 s
->jnext
= gjmp(s
->jnext
);
6443 try_call_cleanup_goto(s
->cleanupstate
);
6444 gjmp_addr(s
->jnext
);
6448 expect("label identifier");
6451 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6460 if (s
->r
== LABEL_DEFINED
)
6461 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6462 s
->r
= LABEL_DEFINED
;
6464 Sym
*pcl
; /* pending cleanup goto */
6465 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6467 sym_pop(&s
->next
, NULL
, 0);
6471 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6474 s
->cleanupstate
= current_cleanups
;
6476 /* we accept this, but it is a mistake */
6478 nocode_wanted
&= ~0x20000000;
6480 tcc_warning("deprecated use of label at end of compound statement");
6484 block(bsym
, csym
, is_expr
);
6487 /* expression case */
6502 /* This skips over a stream of tokens containing balanced {} and ()
6503 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6504 with a '{'). If STR then allocates and stores the skipped tokens
6505 in *STR. This doesn't check if () and {} are nested correctly,
6506 i.e. "({)}" is accepted. */
6507 static void skip_or_save_block(TokenString
**str
)
6509 int braces
= tok
== '{';
6512 *str
= tok_str_alloc();
6514 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6516 if (tok
== TOK_EOF
) {
6517 if (str
|| level
> 0)
6518 tcc_error("unexpected end of file");
6523 tok_str_add_tok(*str
);
6526 if (t
== '{' || t
== '(') {
6528 } else if (t
== '}' || t
== ')') {
6530 if (level
== 0 && braces
&& t
== '}')
6535 tok_str_add(*str
, -1);
6536 tok_str_add(*str
, 0);
6540 #define EXPR_CONST 1
6543 static void parse_init_elem(int expr_type
)
6545 int saved_global_expr
;
6548 /* compound literals must be allocated globally in this case */
6549 saved_global_expr
= global_expr
;
6552 global_expr
= saved_global_expr
;
6553 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6554 (compound literals). */
6555 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6556 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6557 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6558 #ifdef TCC_TARGET_PE
6559 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6562 tcc_error("initializer element is not constant");
6570 /* put zeros for variable based init */
6571 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6574 /* nothing to do because globals are already set to zero */
6576 vpush_global_sym(&func_old_type
, TOK_memset
);
6578 #ifdef TCC_TARGET_ARM
6589 /* t is the array or struct type. c is the array or struct
6590 address. cur_field is the pointer to the current
6591 field, for arrays the 'c' member contains the current start
6592 index. 'size_only' is true if only size info is needed (only used
6593 in arrays). al contains the already initialized length of the
6594 current container (starting at c). This returns the new length of that. */
6595 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6596 Sym
**cur_field
, int size_only
, int al
)
6599 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6600 unsigned long corig
= c
;
6604 if (gnu_ext
&& (l
= is_label()) != 0)
6606 /* NOTE: we only support ranges for last designator */
6607 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6609 if (!(type
->t
& VT_ARRAY
))
6610 expect("array type");
6612 index
= index_last
= expr_const();
6613 if (tok
== TOK_DOTS
&& gnu_ext
) {
6615 index_last
= expr_const();
6619 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6621 tcc_error("invalid index");
6623 (*cur_field
)->c
= index_last
;
6624 type
= pointed_type(type
);
6625 elem_size
= type_size(type
, &align
);
6626 c
+= index
* elem_size
;
6627 nb_elems
= index_last
- index
+ 1;
6633 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6634 expect("struct/union type");
6635 f
= find_field(type
, l
);
6648 } else if (!gnu_ext
) {
6652 if (type
->t
& VT_ARRAY
) {
6653 index
= (*cur_field
)->c
;
6654 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6655 tcc_error("index too large");
6656 type
= pointed_type(type
);
6657 c
+= index
* type_size(type
, &align
);
6660 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6661 *cur_field
= f
= f
->next
;
6663 tcc_error("too many field init");
6668 /* must put zero in holes (note that doing it that way
6669 ensures that it even works with designators) */
6670 if (!size_only
&& c
- corig
> al
)
6671 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6672 decl_initializer(type
, sec
, c
, 0, size_only
);
6674 /* XXX: make it more general */
6675 if (!size_only
&& nb_elems
> 1) {
6676 unsigned long c_end
;
6681 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6682 for (i
= 1; i
< nb_elems
; i
++) {
6683 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6688 } else if (!NODATA_WANTED
) {
6689 c_end
= c
+ nb_elems
* elem_size
;
6690 if (c_end
> sec
->data_allocated
)
6691 section_realloc(sec
, c_end
);
6692 src
= sec
->data
+ c
;
6694 for(i
= 1; i
< nb_elems
; i
++) {
6696 memcpy(dst
, src
, elem_size
);
6700 c
+= nb_elems
* type_size(type
, &align
);
6706 /* store a value or an expression directly in global data or in local array */
6707 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6714 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6718 /* XXX: not portable */
6719 /* XXX: generate error if incorrect relocation */
6720 gen_assign_cast(&dtype
);
6721 bt
= type
->t
& VT_BTYPE
;
6723 if ((vtop
->r
& VT_SYM
)
6726 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6727 || (type
->t
& VT_BITFIELD
))
6728 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6730 tcc_error("initializer element is not computable at load time");
6732 if (NODATA_WANTED
) {
6737 size
= type_size(type
, &align
);
6738 section_reserve(sec
, c
+ size
);
6739 ptr
= sec
->data
+ c
;
6741 /* XXX: make code faster ? */
6742 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6743 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6744 /* XXX This rejects compound literals like
6745 '(void *){ptr}'. The problem is that '&sym' is
6746 represented the same way, which would be ruled out
6747 by the SYM_FIRST_ANOM check above, but also '"string"'
6748 in 'char *p = "string"' is represented the same
6749 with the type being VT_PTR and the symbol being an
6750 anonymous one. That is, there's no difference in vtop
6751 between '(void *){x}' and '&(void *){x}'. Ignore
6752 pointer typed entities here. Hopefully no real code
6753 will every use compound literals with scalar type. */
6754 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6755 /* These come from compound literals, memcpy stuff over. */
6759 esym
= elfsym(vtop
->sym
);
6760 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6761 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6763 /* We need to copy over all memory contents, and that
6764 includes relocations. Use the fact that relocs are
6765 created it order, so look from the end of relocs
6766 until we hit one before the copied region. */
6767 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6768 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6769 while (num_relocs
--) {
6771 if (rel
->r_offset
>= esym
->st_value
+ size
)
6773 if (rel
->r_offset
< esym
->st_value
)
6775 /* Note: if the same fields are initialized multiple
6776 times (possible with designators) then we possibly
6777 add multiple relocations for the same offset here.
6778 That would lead to wrong code, the last reloc needs
6779 to win. We clean this up later after the whole
6780 initializer is parsed. */
6781 put_elf_reloca(symtab_section
, sec
,
6782 c
+ rel
->r_offset
- esym
->st_value
,
6783 ELFW(R_TYPE
)(rel
->r_info
),
6784 ELFW(R_SYM
)(rel
->r_info
),
6794 if (type
->t
& VT_BITFIELD
) {
6795 int bit_pos
, bit_size
, bits
, n
;
6796 unsigned char *p
, v
, m
;
6797 bit_pos
= BIT_POS(vtop
->type
.t
);
6798 bit_size
= BIT_SIZE(vtop
->type
.t
);
6799 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6800 bit_pos
&= 7, bits
= 0;
6805 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6806 m
= ((1 << n
) - 1) << bit_pos
;
6807 *p
= (*p
& ~m
) | (v
& m
);
6808 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6812 /* XXX: when cross-compiling we assume that each type has the
6813 same representation on host and target, which is likely to
6814 be wrong in the case of long double */
6816 vtop
->c
.i
= vtop
->c
.i
!= 0;
6818 *(char *)ptr
|= vtop
->c
.i
;
6821 *(short *)ptr
|= vtop
->c
.i
;
6824 *(float*)ptr
= vtop
->c
.f
;
6827 *(double *)ptr
= vtop
->c
.d
;
6830 #if defined TCC_IS_NATIVE_387
6831 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6832 memcpy(ptr
, &vtop
->c
.ld
, 10);
6834 else if (sizeof (long double) == sizeof (double))
6835 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6837 else if (vtop
->c
.ld
== 0.0)
6841 if (sizeof(long double) == LDOUBLE_SIZE
)
6842 *(long double*)ptr
= vtop
->c
.ld
;
6843 else if (sizeof(double) == LDOUBLE_SIZE
)
6844 *(double *)ptr
= (double)vtop
->c
.ld
;
6846 tcc_error("can't cross compile long double constants");
6850 *(long long *)ptr
|= vtop
->c
.i
;
6857 addr_t val
= vtop
->c
.i
;
6859 if (vtop
->r
& VT_SYM
)
6860 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6862 *(addr_t
*)ptr
|= val
;
6864 if (vtop
->r
& VT_SYM
)
6865 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6866 *(addr_t
*)ptr
|= val
;
6872 int val
= vtop
->c
.i
;
6874 if (vtop
->r
& VT_SYM
)
6875 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6879 if (vtop
->r
& VT_SYM
)
6880 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6889 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6896 /* 't' contains the type and storage info. 'c' is the offset of the
6897 object in section 'sec'. If 'sec' is NULL, it means stack based
6898 allocation. 'first' is true if array '{' must be read (multi
6899 dimension implicit array init handling). 'size_only' is true if
6900 size only evaluation is wanted (only for arrays). */
6901 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6902 int first
, int size_only
)
6904 int len
, n
, no_oblock
, nb
, i
;
6911 /* If we currently are at an '}' or ',' we have read an initializer
6912 element in one of our callers, and not yet consumed it. */
6913 have_elem
= tok
== '}' || tok
== ',';
6914 if (!have_elem
&& tok
!= '{' &&
6915 /* In case of strings we have special handling for arrays, so
6916 don't consume them as initializer value (which would commit them
6917 to some anonymous symbol). */
6918 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6920 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6925 !(type
->t
& VT_ARRAY
) &&
6926 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6927 The source type might have VT_CONSTANT set, which is
6928 of course assignable to non-const elements. */
6929 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6930 init_putv(type
, sec
, c
);
6931 } else if (type
->t
& VT_ARRAY
) {
6934 t1
= pointed_type(type
);
6935 size1
= type_size(t1
, &align1
);
6938 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6941 tcc_error("character array initializer must be a literal,"
6942 " optionally enclosed in braces");
6947 /* only parse strings here if correct type (otherwise: handle
6948 them as ((w)char *) expressions */
6949 if ((tok
== TOK_LSTR
&&
6950 #ifdef TCC_TARGET_PE
6951 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6953 (t1
->t
& VT_BTYPE
) == VT_INT
6955 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6957 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6960 /* compute maximum number of chars wanted */
6962 cstr_len
= tokc
.str
.size
;
6964 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6967 if (n
>= 0 && nb
> (n
- len
))
6971 tcc_warning("initializer-string for array is too long");
6972 /* in order to go faster for common case (char
6973 string in global variable, we handle it
6975 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6977 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6981 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6983 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6985 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6992 /* only add trailing zero if enough storage (no
6993 warning in this case since it is standard) */
6994 if (n
< 0 || len
< n
) {
6997 init_putv(t1
, sec
, c
+ (len
* size1
));
7008 while (tok
!= '}' || have_elem
) {
7009 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
7011 if (type
->t
& VT_ARRAY
) {
7013 /* special test for multi dimensional arrays (may not
7014 be strictly correct if designators are used at the
7016 if (no_oblock
&& len
>= n
*size1
)
7019 if (s
->type
.t
== VT_UNION
)
7023 if (no_oblock
&& f
== NULL
)
7032 /* put zeros at the end */
7033 if (!size_only
&& len
< n
*size1
)
7034 init_putz(sec
, c
+ len
, n
*size1
- len
);
7037 /* patch type size if needed, which happens only for array types */
7039 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7040 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7043 if (first
|| tok
== '{') {
7051 } else if (tok
== '{') {
7053 decl_initializer(type
, sec
, c
, first
, size_only
);
7055 } else if (size_only
) {
7056 /* If we supported only ISO C we wouldn't have to accept calling
7057 this on anything than an array size_only==1 (and even then
7058 only on the outermost level, so no recursion would be needed),
7059 because initializing a flex array member isn't supported.
7060 But GNU C supports it, so we need to recurse even into
7061 subfields of structs and arrays when size_only is set. */
7062 /* just skip expression */
7063 skip_or_save_block(NULL
);
7066 /* This should happen only when we haven't parsed
7067 the init element above for fear of committing a
7068 string constant to memory too early. */
7069 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7070 expect("string constant");
7071 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7073 init_putv(type
, sec
, c
);
7077 /* parse an initializer for type 't' if 'has_init' is non zero, and
7078 allocate space in local or global data space ('r' is either
7079 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7080 variable 'v' of scope 'scope' is declared before initializers
7081 are parsed. If 'v' is zero, then a reference to the new object
7082 is put in the value stack. If 'has_init' is 2, a special parsing
7083 is done to handle string constants. */
7084 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7085 int has_init
, int v
, int scope
)
7087 int size
, align
, addr
;
7088 TokenString
*init_str
= NULL
;
7091 Sym
*flexible_array
;
7093 int saved_nocode_wanted
= nocode_wanted
;
7094 #ifdef CONFIG_TCC_BCHECK
7098 /* Always allocate static or global variables */
7099 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7100 nocode_wanted
|= 0x80000000;
7102 #ifdef CONFIG_TCC_BCHECK
7103 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7106 flexible_array
= NULL
;
7107 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7108 Sym
*field
= type
->ref
->next
;
7111 field
= field
->next
;
7112 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7113 flexible_array
= field
;
7117 size
= type_size(type
, &align
);
7118 /* If unknown size, we must evaluate it before
7119 evaluating initializers because
7120 initializers can generate global data too
7121 (e.g. string pointers or ISOC99 compound
7122 literals). It also simplifies local
7123 initializers handling */
7124 if (size
< 0 || (flexible_array
&& has_init
)) {
7126 tcc_error("unknown type size");
7127 /* get all init string */
7128 if (has_init
== 2) {
7129 init_str
= tok_str_alloc();
7130 /* only get strings */
7131 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7132 tok_str_add_tok(init_str
);
7135 tok_str_add(init_str
, -1);
7136 tok_str_add(init_str
, 0);
7138 skip_or_save_block(&init_str
);
7143 begin_macro(init_str
, 1);
7145 decl_initializer(type
, NULL
, 0, 1, 1);
7146 /* prepare second initializer parsing */
7147 macro_ptr
= init_str
->str
;
7150 /* if still unknown size, error */
7151 size
= type_size(type
, &align
);
7153 tcc_error("unknown type size");
7155 /* If there's a flex member and it was used in the initializer
7157 if (flexible_array
&&
7158 flexible_array
->type
.ref
->c
> 0)
7159 size
+= flexible_array
->type
.ref
->c
7160 * pointed_size(&flexible_array
->type
);
7161 /* take into account specified alignment if bigger */
7162 if (ad
->a
.aligned
) {
7163 int speca
= 1 << (ad
->a
.aligned
- 1);
7166 } else if (ad
->a
.packed
) {
7170 if (!v
&& NODATA_WANTED
)
7171 size
= 0, align
= 1;
7173 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7175 #ifdef CONFIG_TCC_BCHECK
7176 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7180 loc
= (loc
- size
) & -align
;
7182 #ifdef CONFIG_TCC_BCHECK
7183 /* handles bounds */
7184 /* XXX: currently, since we do only one pass, we cannot track
7185 '&' operators, so we add only arrays */
7186 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7188 /* add padding between regions */
7190 /* then add local bound info */
7191 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7192 bounds_ptr
[0] = addr
;
7193 bounds_ptr
[1] = size
;
7197 /* local variable */
7198 #ifdef CONFIG_TCC_ASM
7199 if (ad
->asm_label
) {
7200 int reg
= asm_parse_regvar(ad
->asm_label
);
7202 r
= (r
& ~VT_VALMASK
) | reg
;
7205 sym
= sym_push(v
, type
, r
, addr
);
7206 if (ad
->cleanup_func
) {
7207 Sym
*cls
= sym_push2(&all_cleanups
, SYM_FIELD
| ++ncleanups
, 0, 0);
7208 cls
->prev_tok
= sym
;
7209 cls
->next
= ad
->cleanup_func
;
7210 cls
->ncl
= current_cleanups
;
7211 current_cleanups
= cls
;
7216 /* push local reference */
7217 vset(type
, r
, addr
);
7220 if (v
&& scope
== VT_CONST
) {
7221 /* see if the symbol was already defined */
7224 patch_storage(sym
, ad
, type
);
7225 /* we accept several definitions of the same global variable. */
7226 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7231 /* allocate symbol in corresponding section */
7236 else if (tcc_state
->nocommon
)
7241 addr
= section_add(sec
, size
, align
);
7242 #ifdef CONFIG_TCC_BCHECK
7243 /* add padding if bound check */
7245 section_add(sec
, 1, 1);
7248 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7249 sec
= common_section
;
7254 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7255 patch_storage(sym
, ad
, NULL
);
7257 /* Local statics have a scope until now (for
7258 warnings), remove it here. */
7260 /* update symbol definition */
7261 put_extern_sym(sym
, sec
, addr
, size
);
7263 /* push global reference */
7264 sym
= get_sym_ref(type
, sec
, addr
, size
);
7265 vpushsym(type
, sym
);
7269 #ifdef CONFIG_TCC_BCHECK
7270 /* handles bounds now because the symbol must be defined
7271 before for the relocation */
7275 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7276 /* then add global bound info */
7277 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7278 bounds_ptr
[0] = 0; /* relocated */
7279 bounds_ptr
[1] = size
;
7284 if (type
->t
& VT_VLA
) {
7290 /* save current stack pointer */
7291 if (vlas_in_scope
== 0) {
7292 if (vla_sp_root_loc
== -1)
7293 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7294 gen_vla_sp_save(vla_sp_root_loc
);
7297 vla_runtime_type_size(type
, &a
);
7298 gen_vla_alloc(type
, a
);
7299 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7300 /* on _WIN64, because of the function args scratch area, the
7301 result of alloca differs from RSP and is returned in RAX. */
7302 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7304 gen_vla_sp_save(addr
);
7308 } else if (has_init
) {
7309 size_t oldreloc_offset
= 0;
7310 if (sec
&& sec
->reloc
)
7311 oldreloc_offset
= sec
->reloc
->data_offset
;
7312 decl_initializer(type
, sec
, addr
, 1, 0);
7313 if (sec
&& sec
->reloc
)
7314 squeeze_multi_relocs(sec
, oldreloc_offset
);
7315 /* patch flexible array member size back to -1, */
7316 /* for possible subsequent similar declarations */
7318 flexible_array
->type
.ref
->c
= -1;
7322 /* restore parse state if needed */
7328 nocode_wanted
= saved_nocode_wanted
;
7331 /* parse a function defined by symbol 'sym' and generate its code in
7332 'cur_text_section' */
7333 static void gen_function(Sym
*sym
)
7336 ind
= cur_text_section
->data_offset
;
7337 if (sym
->a
.aligned
) {
7338 size_t newoff
= section_add(cur_text_section
, 0,
7339 1 << (sym
->a
.aligned
- 1));
7340 gen_fill_nops(newoff
- ind
);
7342 /* NOTE: we patch the symbol size later */
7343 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7344 funcname
= get_tok_str(sym
->v
, NULL
);
7346 /* Initialize VLA state */
7348 vla_sp_root_loc
= -1;
7349 /* put debug symbol */
7350 tcc_debug_funcstart(tcc_state
, sym
);
7351 /* push a dummy symbol to enable local sym storage */
7352 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7353 local_scope
= 1; /* for function parameters */
7354 gfunc_prolog(&sym
->type
);
7355 reset_local_scope();
7357 clear_temp_local_var_list();
7358 block(NULL
, NULL
, 0);
7359 if (!(nocode_wanted
& 0x20000000)
7360 && ((func_vt
.t
& VT_BTYPE
) == VT_INT
)
7361 && !strcmp (funcname
, "main"))
7365 gen_assign_cast(&func_vt
);
7366 gfunc_return(&func_vt
);
7371 cur_text_section
->data_offset
= ind
;
7372 label_pop(&global_label_stack
, NULL
, 0);
7373 /* reset local stack */
7374 reset_local_scope();
7375 sym_pop(&local_stack
, NULL
, 0);
7376 /* end of function */
7377 /* patch symbol size */
7378 elfsym(sym
)->st_size
= ind
- func_ind
;
7379 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7380 /* It's better to crash than to generate wrong code */
7381 cur_text_section
= NULL
;
7382 funcname
= ""; /* for safety */
7383 func_vt
.t
= VT_VOID
; /* for safety */
7384 func_var
= 0; /* for safety */
7385 ind
= 0; /* for safety */
7386 nocode_wanted
= 0x80000000;
7390 static void gen_inline_functions(TCCState
*s
)
7393 int inline_generated
, i
, ln
;
7394 struct InlineFunc
*fn
;
7396 ln
= file
->line_num
;
7397 /* iterate while inline function are referenced */
7399 inline_generated
= 0;
7400 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7401 fn
= s
->inline_fns
[i
];
7403 if (sym
&& sym
->c
) {
7404 /* the function was used: generate its code and
7405 convert it to a normal function */
7408 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7409 sym
->type
.t
&= ~VT_INLINE
;
7411 begin_macro(fn
->func_str
, 1);
7413 cur_text_section
= text_section
;
7417 inline_generated
= 1;
7420 } while (inline_generated
);
7421 file
->line_num
= ln
;
7424 ST_FUNC
void free_inline_functions(TCCState
*s
)
7427 /* free tokens of unused inline functions */
7428 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7429 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7431 tok_str_free(fn
->func_str
);
7433 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7436 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7437 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7438 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7443 AttributeDef ad
, adbase
;
7446 if (!parse_btype(&btype
, &adbase
)) {
7447 if (is_for_loop_init
)
7449 /* skip redundant ';' if not in old parameter decl scope */
7450 if (tok
== ';' && l
!= VT_CMP
) {
7456 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7457 /* global asm block */
7461 if (tok
>= TOK_UIDENT
) {
7462 /* special test for old K&R protos without explicit int
7463 type. Only accepted when defining global data */
7467 expect("declaration");
7472 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7473 int v
= btype
.ref
->v
;
7474 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7475 tcc_warning("unnamed struct/union that defines no instances");
7479 if (IS_ENUM(btype
.t
)) {
7484 while (1) { /* iterate thru each declaration */
7486 /* If the base type itself was an array type of unspecified
7487 size (like in 'typedef int arr[]; arr x = {1};') then
7488 we will overwrite the unknown size by the real one for
7489 this decl. We need to unshare the ref symbol holding
7491 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7492 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7495 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7499 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7500 printf("type = '%s'\n", buf
);
7503 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7504 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7505 tcc_error("function without file scope cannot be static");
7507 /* if old style function prototype, we accept a
7510 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7511 decl0(VT_CMP
, 0, sym
);
7514 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7515 ad
.asm_label
= asm_label_instr();
7516 /* parse one last attribute list, after asm label */
7517 parse_attribute(&ad
);
7522 #ifdef TCC_TARGET_PE
7523 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7524 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7525 tcc_error("cannot have dll linkage with static or typedef");
7526 if (ad
.a
.dllimport
) {
7527 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7530 type
.t
|= VT_EXTERN
;
7536 tcc_error("cannot use local functions");
7537 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7538 expect("function definition");
7540 /* reject abstract declarators in function definition
7541 make old style params without decl have int type */
7543 while ((sym
= sym
->next
) != NULL
) {
7544 if (!(sym
->v
& ~SYM_FIELD
))
7545 expect("identifier");
7546 if (sym
->type
.t
== VT_VOID
)
7547 sym
->type
= int_type
;
7550 /* XXX: cannot do better now: convert extern line to static inline */
7551 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7552 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7554 /* put function symbol */
7555 sym
= external_global_sym(v
, &type
, 0);
7556 type
.t
&= ~VT_EXTERN
;
7557 patch_storage(sym
, &ad
, &type
);
7559 /* static inline functions are just recorded as a kind
7560 of macro. Their code will be emitted at the end of
7561 the compilation unit only if they are used */
7562 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7563 (VT_INLINE
| VT_STATIC
)) {
7564 struct InlineFunc
*fn
;
7565 const char *filename
;
7567 filename
= file
? file
->filename
: "";
7568 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7569 strcpy(fn
->filename
, filename
);
7571 skip_or_save_block(&fn
->func_str
);
7572 dynarray_add(&tcc_state
->inline_fns
,
7573 &tcc_state
->nb_inline_fns
, fn
);
7575 /* compute text section */
7576 cur_text_section
= ad
.section
;
7577 if (!cur_text_section
)
7578 cur_text_section
= text_section
;
7584 /* find parameter in function parameter list */
7585 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7586 if ((sym
->v
& ~SYM_FIELD
) == v
)
7588 tcc_error("declaration for parameter '%s' but no such parameter",
7589 get_tok_str(v
, NULL
));
7591 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7592 tcc_error("storage class specified for '%s'",
7593 get_tok_str(v
, NULL
));
7594 if (sym
->type
.t
!= VT_VOID
)
7595 tcc_error("redefinition of parameter '%s'",
7596 get_tok_str(v
, NULL
));
7597 convert_parameter_type(&type
);
7599 } else if (type
.t
& VT_TYPEDEF
) {
7600 /* save typedefed type */
7601 /* XXX: test storage specifiers ? */
7603 if (sym
&& sym
->sym_scope
== local_scope
) {
7604 if (!is_compatible_types(&sym
->type
, &type
)
7605 || !(sym
->type
.t
& VT_TYPEDEF
))
7606 tcc_error("incompatible redefinition of '%s'",
7607 get_tok_str(v
, NULL
));
7610 sym
= sym_push(v
, &type
, 0, 0);
7614 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7615 && !(type
.t
& VT_EXTERN
)) {
7616 tcc_error("declaration of void object");
7619 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7620 /* external function definition */
7621 /* specific case for func_call attribute */
7623 } else if (!(type
.t
& VT_ARRAY
)) {
7624 /* not lvalue if array */
7625 r
|= lvalue_type(type
.t
);
7627 has_init
= (tok
== '=');
7628 if (has_init
&& (type
.t
& VT_VLA
))
7629 tcc_error("variable length array cannot be initialized");
7630 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7631 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7632 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7633 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7634 /* external variable or function */
7635 /* NOTE: as GCC, uninitialized global static
7636 arrays of null size are considered as
7638 type
.t
|= VT_EXTERN
;
7639 sym
= external_sym(v
, &type
, r
, &ad
);
7640 if (ad
.alias_target
) {
7643 alias_target
= sym_find(ad
.alias_target
);
7644 esym
= elfsym(alias_target
);
7646 tcc_error("unsupported forward __alias__ attribute");
7647 /* Local statics have a scope until now (for
7648 warnings), remove it here. */
7650 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7653 if (type
.t
& VT_STATIC
)
7659 else if (l
== VT_CONST
)
7660 /* uninitialized global variables may be overridden */
7661 type
.t
|= VT_EXTERN
;
7662 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7666 if (is_for_loop_init
)
7678 static void decl(int l
)
7683 /* ------------------------------------------------------------------------- */