2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static inline CType
*pointed_type(CType
*type
);
75 static int is_compatible_types(CType
*type1
, CType
*type2
);
76 static int parse_btype(CType
*type
, AttributeDef
*ad
);
77 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
78 static void parse_expr_type(CType
*type
);
79 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
80 static void block(int *bsym
, int *csym
, int is_expr
);
81 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
82 static int decl0(int l
, int is_for_loop_init
);
83 static void expr_eq(void);
84 static void unary_type(CType
*type
);
85 static void vla_runtime_type_size(CType
*type
, int *a
);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
89 static void expr_type(CType
*type
);
90 static inline int64_t expr_const64(void);
91 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
92 ST_FUNC
void vpush(CType
*type
);
93 ST_FUNC
int gvtst(int inv
, int t
);
94 ST_FUNC
int is_btype_size(int bt
);
95 static void gen_inline_functions(TCCState
*s
);
97 ST_INLN
int is_float(int t
)
101 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC
int ieee_finite(double d
)
110 memcpy(p
, &d
, sizeof(double));
111 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC
void test_lvalue(void)
116 if (!(vtop
->r
& VT_LVAL
))
120 ST_FUNC
void check_vstack(void)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
130 void pv (const char *lbl
, int a
, int b
)
133 for (i
= a
; i
< a
+ b
; ++i
) {
134 SValue
*p
= &vtop
[-i
];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
141 /* ------------------------------------------------------------------------- */
142 /* start of translation unit info */
143 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
148 /* file info: full path + filename */
149 section_sym
= put_elf_sym(symtab_section
, 0, 0,
150 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
151 text_section
->sh_num
, NULL
);
152 getcwd(buf
, sizeof(buf
));
154 normalize_slashes(buf
);
156 pstrcat(buf
, sizeof(buf
), "/");
157 put_stabs_r(buf
, N_SO
, 0, 0,
158 text_section
->data_offset
, text_section
, section_sym
);
159 put_stabs_r(file
->filename
, N_SO
, 0, 0,
160 text_section
->data_offset
, text_section
, section_sym
);
165 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
166 symbols can be safely used */
167 put_elf_sym(symtab_section
, 0, 0,
168 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
169 SHN_ABS
, file
->filename
);
172 /* put end of translation unit info */
173 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
177 put_stabs_r(NULL
, N_SO
, 0, 0,
178 text_section
->data_offset
, text_section
, section_sym
);
182 /* generate line number info */
183 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
187 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
188 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
190 last_line_num
= file
->line_num
;
194 /* put function symbol */
195 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
203 /* XXX: we put here a dummy type */
204 snprintf(buf
, sizeof(buf
), "%s:%c1",
205 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
206 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
207 cur_text_section
, sym
->c
);
208 /* //gr gdb wants a line at the function */
209 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
215 /* put function size */
216 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
220 put_stabn(N_FUN
, 0, 0, size
);
223 /* ------------------------------------------------------------------------- */
224 ST_FUNC
void tccgen_start(TCCState
*s1
)
226 cur_text_section
= NULL
;
228 anon_sym
= SYM_FIRST_ANOM
;
233 /* define some often used types */
235 char_pointer_type
.t
= VT_BYTE
;
236 mk_pointer(&char_pointer_type
);
238 size_type
.t
= VT_INT
;
240 size_type
.t
= VT_LLONG
;
242 func_old_type
.t
= VT_FUNC
;
243 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, FUNC_CDECL
, FUNC_OLD
);
247 #ifdef TCC_TARGET_ARM
252 ST_FUNC
void tccgen_end(TCCState
*s1
)
254 gen_inline_functions(s1
);
256 /* end of translation unit info */
260 /* ------------------------------------------------------------------------- */
261 /* apply storage attibutes to Elf symbol */
263 static void update_storage(Sym
*sym
)
272 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
275 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
276 | ((t
& VT_VIS_MASK
) >> VT_VIS_SHIFT
);
279 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
283 esym
->st_other
|= ST_PE_EXPORT
;
287 /* ------------------------------------------------------------------------- */
288 /* update sym->c so that it points to an external symbol in section
289 'section' with value 'value' */
291 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
292 addr_t value
, unsigned long size
,
293 int can_add_underscore
)
295 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
299 #ifdef CONFIG_TCC_BCHECK
305 else if (section
== SECTION_ABS
)
307 else if (section
== SECTION_COMMON
)
310 sh_num
= section
->sh_num
;
313 name
= get_tok_str(sym
->v
, NULL
);
314 #ifdef CONFIG_TCC_BCHECK
315 if (tcc_state
->do_bounds_check
) {
316 /* XXX: avoid doing that for statics ? */
317 /* if bound checking is activated, we change some function
318 names by adding the "__bound" prefix */
321 /* XXX: we rely only on malloc hooks */
334 strcpy(buf
, "__bound_");
342 if ((t
& VT_BTYPE
) == VT_FUNC
) {
344 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
345 sym_type
= STT_NOTYPE
;
347 sym_type
= STT_OBJECT
;
350 sym_bind
= STB_LOCAL
;
352 sym_bind
= STB_GLOBAL
;
355 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
356 Sym
*ref
= sym
->type
.ref
;
357 if (ref
->a
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
358 sprintf(buf1
, "_%s@%d", name
, ref
->a
.func_args
* PTR_SIZE
);
360 other
|= ST_PE_STDCALL
;
361 can_add_underscore
= 0;
365 other
|= ST_PE_IMPORT
;
367 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
369 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
373 name
= get_tok_str(sym
->asm_label
, NULL
);
374 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
375 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
377 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
378 esym
->st_value
= value
;
379 esym
->st_size
= size
;
380 esym
->st_shndx
= sh_num
;
385 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
386 addr_t value
, unsigned long size
)
388 put_extern_sym2(sym
, section
, value
, size
, 1);
391 /* add a new relocation entry to symbol 'sym' in section 's' */
392 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
397 if (nocode_wanted
&& s
== cur_text_section
)
402 put_extern_sym(sym
, NULL
, 0, 0);
406 /* now we can add ELF relocation info */
407 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
410 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
412 greloca(s
, sym
, offset
, type
, 0);
415 /* ------------------------------------------------------------------------- */
416 /* symbol allocator */
417 static Sym
*__sym_malloc(void)
419 Sym
*sym_pool
, *sym
, *last_sym
;
422 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
423 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
425 last_sym
= sym_free_first
;
427 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
428 sym
->next
= last_sym
;
432 sym_free_first
= last_sym
;
436 static inline Sym
*sym_malloc(void)
440 sym
= sym_free_first
;
442 sym
= __sym_malloc();
443 sym_free_first
= sym
->next
;
446 sym
= tcc_malloc(sizeof(Sym
));
451 ST_INLN
void sym_free(Sym
*sym
)
454 sym
->next
= sym_free_first
;
455 sym_free_first
= sym
;
461 /* push, without hashing */
462 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
482 /* find a symbol and return its associated structure. 's' is the top
483 of the symbol stack */
484 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
496 /* structure lookup */
497 ST_INLN Sym
*struct_find(int v
)
500 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
502 return table_ident
[v
]->sym_struct
;
505 /* find an identifier */
506 ST_INLN Sym
*sym_find(int v
)
509 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
511 return table_ident
[v
]->sym_identifier
;
514 /* push a given symbol on the symbol stack */
515 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, long c
)
524 s
= sym_push2(ps
, v
, type
->t
, c
);
525 s
->type
.ref
= type
->ref
;
527 /* don't record fields or anonymous symbols */
529 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
530 /* record symbol in token array */
531 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
533 ps
= &ts
->sym_struct
;
535 ps
= &ts
->sym_identifier
;
538 s
->scope
= local_scope
;
539 if (s
->prev_tok
&& s
->prev_tok
->scope
== s
->scope
)
540 tcc_error("redeclaration of '%s'",
541 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
546 /* push a global identifier */
547 ST_FUNC Sym
*global_identifier_push(int v
, int t
, long c
)
550 s
= sym_push2(&global_stack
, v
, t
, c
);
551 /* don't record anonymous symbol */
552 if (v
< SYM_FIRST_ANOM
) {
553 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
554 /* modify the top most local identifier, so that
555 sym_identifier will point to 's' when popped */
557 ps
= &(*ps
)->prev_tok
;
564 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
565 pop them yet from the list, but do remove them from the token array. */
566 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
576 /* remove symbol in token array */
578 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
579 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
581 ps
= &ts
->sym_struct
;
583 ps
= &ts
->sym_identifier
;
594 /* ------------------------------------------------------------------------- */
596 static void vsetc(CType
*type
, int r
, CValue
*vc
)
600 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
601 tcc_error("memory full (vstack)");
602 /* cannot let cpu flags if other instruction are generated. Also
603 avoid leaving VT_JMP anywhere except on the top of the stack
604 because it would complicate the code generator.
606 Don't do this when nocode_wanted. vtop might come from
607 !nocode_wanted regions (see 88_codeopt.c) and transforming
608 it to a register without actually generating code is wrong
609 as their value might still be used for real. All values
610 we push under nocode_wanted will eventually be popped
611 again, so that the VT_CMP/VT_JMP value will be in vtop
612 when code is unsuppressed again.
614 Same logic below in vswap(); */
615 if (vtop
>= vstack
&& !nocode_wanted
) {
616 v
= vtop
->r
& VT_VALMASK
;
617 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
629 ST_FUNC
void vswap(void)
632 /* cannot vswap cpu flags. See comment at vsetc() above */
633 if (vtop
>= vstack
&& !nocode_wanted
) {
634 int v
= vtop
->r
& VT_VALMASK
;
635 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
643 /* pop stack value */
644 ST_FUNC
void vpop(void)
647 v
= vtop
->r
& VT_VALMASK
;
648 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
649 /* for x86, we need to pop the FP stack */
651 o(0xd8dd); /* fstp %st(0) */
654 if (v
== VT_JMP
|| v
== VT_JMPI
) {
655 /* need to put correct jump if && or || without test */
661 /* push constant of type "type" with useless value */
662 ST_FUNC
void vpush(CType
*type
)
665 vsetc(type
, VT_CONST
, &cval
);
668 /* push integer constant */
669 ST_FUNC
void vpushi(int v
)
673 vsetc(&int_type
, VT_CONST
, &cval
);
676 /* push a pointer sized constant */
677 static void vpushs(addr_t v
)
681 vsetc(&size_type
, VT_CONST
, &cval
);
684 /* push arbitrary 64bit constant */
685 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
692 vsetc(&ctype
, VT_CONST
, &cval
);
695 /* push long long constant */
696 static inline void vpushll(long long v
)
698 vpush64(VT_LLONG
, v
);
701 ST_FUNC
void vset(CType
*type
, int r
, long v
)
706 vsetc(type
, r
, &cval
);
709 static void vseti(int r
, int v
)
717 ST_FUNC
void vpushv(SValue
*v
)
719 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
720 tcc_error("memory full (vstack)");
725 static void vdup(void)
730 /* rotate n first stack elements to the bottom
731 I1 ... In -> I2 ... In I1 [top is right]
733 ST_FUNC
void vrotb(int n
)
744 /* rotate the n elements before entry e towards the top
745 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
747 ST_FUNC
void vrote(SValue
*e
, int n
)
753 for(i
= 0;i
< n
- 1; i
++)
758 /* rotate n first stack elements to the top
759 I1 ... In -> In I1 ... I(n-1) [top is right]
761 ST_FUNC
void vrott(int n
)
766 /* push a symbol value of TYPE */
767 static inline void vpushsym(CType
*type
, Sym
*sym
)
771 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
775 /* Return a static symbol pointing to a section */
776 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
782 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
783 sym
->type
.ref
= type
->ref
;
784 sym
->r
= VT_CONST
| VT_SYM
;
785 put_extern_sym(sym
, sec
, offset
, size
);
789 /* push a reference to a section offset by adding a dummy symbol */
790 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
792 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
795 /* define a new external reference to a symbol 'v' of type 'u' */
796 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
802 /* push forward reference */
803 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
804 s
->type
.ref
= type
->ref
;
805 s
->r
= r
| VT_CONST
| VT_SYM
;
810 /* Merge some storage attributes. */
811 static void patch_storage(Sym
*sym
, CType
*type
)
814 if (!is_compatible_types(&sym
->type
, type
))
815 tcc_error("incompatible types for redefinition of '%s'",
816 get_tok_str(sym
->v
, NULL
));
819 if ((sym
->type
.t
^ t
) & VT_IMPORT
)
820 tcc_error("incompatible dll linkage for redefinition of '%s'",
821 get_tok_str(sym
->v
, NULL
));
823 sym
->type
.t
|= t
& (VT_EXPORT
|VT_WEAK
);
824 if (t
& VT_VIS_MASK
) {
825 int vis
= sym
->type
.t
& VT_VIS_MASK
;
826 int vis2
= t
& VT_VIS_MASK
;
827 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
829 else if (vis2
!= (STV_DEFAULT
<< VT_VIS_SHIFT
))
830 vis
= (vis
< vis2
) ? vis
: vis2
;
831 sym
->type
.t
= (sym
->type
.t
& ~VT_VIS_MASK
) | vis
;
835 /* define a new external reference to a symbol 'v' */
836 static Sym
*external_sym(int v
, CType
*type
, int r
)
841 /* push forward reference */
842 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
843 s
->type
.t
|= VT_EXTERN
;
845 if (s
->type
.ref
== func_old_type
.ref
) {
846 s
->type
.ref
= type
->ref
;
847 s
->r
= r
| VT_CONST
| VT_SYM
;
848 s
->type
.t
|= VT_EXTERN
;
850 patch_storage(s
, type
);
856 /* push a reference to global symbol v */
857 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
859 vpushsym(type
, external_global_sym(v
, type
, 0));
862 /* save registers up to (vtop - n) stack entry */
863 ST_FUNC
void save_regs(int n
)
866 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
870 /* save r to the memory stack, and mark it as being free */
871 ST_FUNC
void save_reg(int r
)
873 save_reg_upstack(r
, 0);
876 /* save r to the memory stack, and mark it as being free,
877 if seen up to (vtop - n) stack entry */
878 ST_FUNC
void save_reg_upstack(int r
, int n
)
880 int l
, saved
, size
, align
;
884 if ((r
&= VT_VALMASK
) >= VT_CONST
)
889 /* modify all stack values */
892 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
893 if ((p
->r
& VT_VALMASK
) == r
||
894 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
895 /* must save value on stack if not already done */
897 /* NOTE: must reload 'r' because r might be equal to r2 */
898 r
= p
->r
& VT_VALMASK
;
899 /* store register in the stack */
901 if ((p
->r
& VT_LVAL
) ||
902 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
903 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
904 type
= &char_pointer_type
;
908 size
= type_size(type
, &align
);
909 loc
= (loc
- size
) & -align
;
911 sv
.r
= VT_LOCAL
| VT_LVAL
;
914 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
915 /* x86 specific: need to pop fp register ST0 if saved */
917 o(0xd8dd); /* fstp %st(0) */
920 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
921 /* special long long case */
922 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
930 /* mark that stack entry as being saved on the stack */
931 if (p
->r
& VT_LVAL
) {
932 /* also clear the bounded flag because the
933 relocation address of the function was stored in
935 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
937 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
945 #ifdef TCC_TARGET_ARM
946 /* find a register of class 'rc2' with at most one reference on stack.
947 * If none, call get_reg(rc) */
948 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
953 for(r
=0;r
<NB_REGS
;r
++) {
954 if (reg_classes
[r
] & rc2
) {
957 for(p
= vstack
; p
<= vtop
; p
++) {
958 if ((p
->r
& VT_VALMASK
) == r
||
959 (p
->r2
& VT_VALMASK
) == r
)
970 /* find a free register of class 'rc'. If none, save one register */
971 ST_FUNC
int get_reg(int rc
)
976 /* find a free register */
977 for(r
=0;r
<NB_REGS
;r
++) {
978 if (reg_classes
[r
] & rc
) {
981 for(p
=vstack
;p
<=vtop
;p
++) {
982 if ((p
->r
& VT_VALMASK
) == r
||
983 (p
->r2
& VT_VALMASK
) == r
)
991 /* no register left : free the first one on the stack (VERY
992 IMPORTANT to start from the bottom to ensure that we don't
993 spill registers used in gen_opi()) */
994 for(p
=vstack
;p
<=vtop
;p
++) {
995 /* look at second register (if long long) */
996 r
= p
->r2
& VT_VALMASK
;
997 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
999 r
= p
->r
& VT_VALMASK
;
1000 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1006 /* Should never comes here */
1010 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1012 static void move_reg(int r
, int s
, int t
)
1026 /* get address of vtop (vtop MUST BE an lvalue) */
1027 ST_FUNC
void gaddrof(void)
1029 vtop
->r
&= ~VT_LVAL
;
1030 /* tricky: if saved lvalue, then we can go back to lvalue */
1031 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1032 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1037 #ifdef CONFIG_TCC_BCHECK
1038 /* generate lvalue bound code */
1039 static void gbound(void)
1044 vtop
->r
&= ~VT_MUSTBOUND
;
1045 /* if lvalue, then use checking code before dereferencing */
1046 if (vtop
->r
& VT_LVAL
) {
1047 /* if not VT_BOUNDED value, then make one */
1048 if (!(vtop
->r
& VT_BOUNDED
)) {
1049 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1050 /* must save type because we must set it to int to get pointer */
1052 vtop
->type
.t
= VT_PTR
;
1055 gen_bounded_ptr_add();
1056 vtop
->r
|= lval_type
;
1059 /* then check for dereferencing */
1060 gen_bounded_ptr_deref();
1065 /* store vtop a register belonging to class 'rc'. lvalues are
1066 converted to values. Cannot be used if cannot be converted to
1067 register value (such as structures). */
1068 ST_FUNC
int gv(int rc
)
1070 int r
, bit_pos
, bit_size
, size
, align
, i
;
1073 /* NOTE: get_reg can modify vstack[] */
1074 if (vtop
->type
.t
& VT_BITFIELD
) {
1077 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1078 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1079 /* remove bit field info to avoid loops */
1080 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1081 /* cast to int to propagate signedness in following ops */
1082 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1087 if((vtop
->type
.t
& VT_UNSIGNED
) ||
1088 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1089 type
.t
|= VT_UNSIGNED
;
1091 /* generate shifts */
1092 vpushi(bits
- (bit_pos
+ bit_size
));
1094 vpushi(bits
- bit_size
);
1095 /* NOTE: transformed to SHR if unsigned */
1099 if (is_float(vtop
->type
.t
) &&
1100 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1103 unsigned long offset
;
1104 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1108 /* XXX: unify with initializers handling ? */
1109 /* CPUs usually cannot use float constants, so we store them
1110 generically in data segment */
1111 size
= type_size(&vtop
->type
, &align
);
1112 offset
= (data_section
->data_offset
+ align
- 1) & -align
;
1113 data_section
->data_offset
= offset
;
1114 /* XXX: not portable yet */
1115 #if defined(__i386__) || defined(__x86_64__)
1116 /* Zero pad x87 tenbyte long doubles */
1117 if (size
== LDOUBLE_SIZE
) {
1118 vtop
->c
.tab
[2] &= 0xffff;
1119 #if LDOUBLE_SIZE == 16
1124 ptr
= section_ptr_add(data_section
, size
);
1126 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1130 ptr
[i
] = vtop
->c
.tab
[size
-1-i
];
1134 ptr
[i
] = vtop
->c
.tab
[i
];
1135 sym
= get_sym_ref(&vtop
->type
, data_section
, offset
, size
<< 2);
1136 vtop
->r
|= VT_LVAL
| VT_SYM
;
1140 #ifdef CONFIG_TCC_BCHECK
1141 if (vtop
->r
& VT_MUSTBOUND
)
1145 r
= vtop
->r
& VT_VALMASK
;
1146 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1147 #ifndef TCC_TARGET_ARM64
1150 #ifdef TCC_TARGET_X86_64
1151 else if (rc
== RC_FRET
)
1155 /* need to reload if:
1157 - lvalue (need to dereference pointer)
1158 - already a register, but not in the right class */
1160 || (vtop
->r
& VT_LVAL
)
1161 || !(reg_classes
[r
] & rc
)
1162 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1163 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1164 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1166 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1171 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1172 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1173 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1175 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1176 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1177 unsigned long long ll
;
1179 int r2
, original_type
;
1180 original_type
= vtop
->type
.t
;
1181 /* two register type load : expand to two words
1183 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1184 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1187 vtop
->c
.i
= ll
; /* first word */
1189 vtop
->r
= r
; /* save register value */
1190 vpushi(ll
>> 32); /* second word */
1193 if (vtop
->r
& VT_LVAL
) {
1194 /* We do not want to modifier the long long
1195 pointer here, so the safest (and less
1196 efficient) is to save all the other registers
1197 in the stack. XXX: totally inefficient. */
1201 /* lvalue_save: save only if used further down the stack */
1202 save_reg_upstack(vtop
->r
, 1);
1204 /* load from memory */
1205 vtop
->type
.t
= load_type
;
1208 vtop
[-1].r
= r
; /* save register value */
1209 /* increment pointer to get second word */
1210 vtop
->type
.t
= addr_type
;
1215 vtop
->type
.t
= load_type
;
1217 /* move registers */
1220 vtop
[-1].r
= r
; /* save register value */
1221 vtop
->r
= vtop
[-1].r2
;
1223 /* Allocate second register. Here we rely on the fact that
1224 get_reg() tries first to free r2 of an SValue. */
1228 /* write second register */
1230 vtop
->type
.t
= original_type
;
1231 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1233 /* lvalue of scalar type : need to use lvalue type
1234 because of possible cast */
1237 /* compute memory access type */
1238 if (vtop
->r
& VT_LVAL_BYTE
)
1240 else if (vtop
->r
& VT_LVAL_SHORT
)
1242 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1246 /* restore wanted type */
1249 /* one register type load */
1254 #ifdef TCC_TARGET_C67
1255 /* uses register pairs for doubles */
1256 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1263 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1264 ST_FUNC
void gv2(int rc1
, int rc2
)
1268 /* generate more generic register first. But VT_JMP or VT_CMP
1269 values must be generated first in all cases to avoid possible
1271 v
= vtop
[0].r
& VT_VALMASK
;
1272 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1277 /* test if reload is needed for first register */
1278 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1288 /* test if reload is needed for first register */
1289 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1295 #ifndef TCC_TARGET_ARM64
1296 /* wrapper around RC_FRET to return a register by type */
1297 static int rc_fret(int t
)
1299 #ifdef TCC_TARGET_X86_64
1300 if (t
== VT_LDOUBLE
) {
1308 /* wrapper around REG_FRET to return a register by type */
1309 static int reg_fret(int t
)
1311 #ifdef TCC_TARGET_X86_64
1312 if (t
== VT_LDOUBLE
) {
1319 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1320 /* expand 64bit on stack in two ints */
1321 static void lexpand(void)
1324 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1325 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1326 if (v
== VT_CONST
) {
1329 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1335 vtop
[0].r
= vtop
[-1].r2
;
1336 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1338 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1342 #ifdef TCC_TARGET_ARM
1343 /* expand long long on stack */
1344 ST_FUNC
void lexpand_nr(void)
1348 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1350 vtop
->r2
= VT_CONST
;
1351 vtop
->type
.t
= VT_INT
| u
;
1352 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1353 if (v
== VT_CONST
) {
1354 vtop
[-1].c
.i
= vtop
->c
.i
;
1355 vtop
->c
.i
= vtop
->c
.i
>> 32;
1357 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1359 vtop
->r
= vtop
[-1].r
;
1360 } else if (v
> VT_CONST
) {
1364 vtop
->r
= vtop
[-1].r2
;
1365 vtop
[-1].r2
= VT_CONST
;
1366 vtop
[-1].type
.t
= VT_INT
| u
;
1370 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1371 /* build a long long from two ints */
1372 static void lbuild(int t
)
1374 gv2(RC_INT
, RC_INT
);
1375 vtop
[-1].r2
= vtop
[0].r
;
1376 vtop
[-1].type
.t
= t
;
1381 /* convert stack entry to register and duplicate its value in another
1383 static void gv_dup(void)
1389 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1390 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1397 /* stack: H L L1 H1 */
1407 /* duplicate value */
1412 #ifdef TCC_TARGET_X86_64
1413 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1423 load(r1
, &sv
); /* move r to r1 */
1425 /* duplicates value */
1431 /* Generate value test
1433 * Generate a test for any value (jump, comparison and integers) */
1434 ST_FUNC
int gvtst(int inv
, int t
)
1436 int v
= vtop
->r
& VT_VALMASK
;
1437 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1441 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1442 /* constant jmp optimization */
1443 if ((vtop
->c
.i
!= 0) != inv
)
1448 return gtst(inv
, t
);
1451 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1452 /* generate CPU independent (unsigned) long long operations */
1453 static void gen_opl(int op
)
1455 int t
, a
, b
, op1
, c
, i
;
1457 unsigned short reg_iret
= REG_IRET
;
1458 unsigned short reg_lret
= REG_LRET
;
1464 func
= TOK___divdi3
;
1467 func
= TOK___udivdi3
;
1470 func
= TOK___moddi3
;
1473 func
= TOK___umoddi3
;
1480 /* call generic long long function */
1481 vpush_global_sym(&func_old_type
, func
);
1486 vtop
->r2
= reg_lret
;
1494 //pv("gen_opl A",0,2);
1500 /* stack: L1 H1 L2 H2 */
1505 vtop
[-2] = vtop
[-3];
1508 /* stack: H1 H2 L1 L2 */
1509 //pv("gen_opl B",0,4);
1515 /* stack: H1 H2 L1 L2 ML MH */
1518 /* stack: ML MH H1 H2 L1 L2 */
1522 /* stack: ML MH H1 L2 H2 L1 */
1527 /* stack: ML MH M1 M2 */
1530 } else if (op
== '+' || op
== '-') {
1531 /* XXX: add non carry method too (for MIPS or alpha) */
1537 /* stack: H1 H2 (L1 op L2) */
1540 gen_op(op1
+ 1); /* TOK_xxxC2 */
1543 /* stack: H1 H2 (L1 op L2) */
1546 /* stack: (L1 op L2) H1 H2 */
1548 /* stack: (L1 op L2) (H1 op H2) */
1556 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1557 t
= vtop
[-1].type
.t
;
1561 /* stack: L H shift */
1563 /* constant: simpler */
1564 /* NOTE: all comments are for SHL. the other cases are
1565 done by swaping words */
1576 if (op
!= TOK_SAR
) {
1609 /* XXX: should provide a faster fallback on x86 ? */
1612 func
= TOK___ashrdi3
;
1615 func
= TOK___lshrdi3
;
1618 func
= TOK___ashldi3
;
1624 /* compare operations */
1630 /* stack: L1 H1 L2 H2 */
1632 vtop
[-1] = vtop
[-2];
1634 /* stack: L1 L2 H1 H2 */
1637 /* when values are equal, we need to compare low words. since
1638 the jump is inverted, we invert the test too. */
1641 else if (op1
== TOK_GT
)
1643 else if (op1
== TOK_ULT
)
1645 else if (op1
== TOK_UGT
)
1655 /* generate non equal test */
1661 /* compare low. Always unsigned */
1665 else if (op1
== TOK_LE
)
1667 else if (op1
== TOK_GT
)
1669 else if (op1
== TOK_GE
)
1680 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1682 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1683 return (a
^ b
) >> 63 ? -x
: x
;
1686 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1688 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1691 /* handle integer constant optimizations and various machine
1693 static void gen_opic(int op
)
1695 SValue
*v1
= vtop
- 1;
1697 int t1
= v1
->type
.t
& VT_BTYPE
;
1698 int t2
= v2
->type
.t
& VT_BTYPE
;
1699 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1700 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1701 uint64_t l1
= c1
? v1
->c
.i
: 0;
1702 uint64_t l2
= c2
? v2
->c
.i
: 0;
1703 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1705 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1706 l1
= ((uint32_t)l1
|
1707 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1708 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1709 l2
= ((uint32_t)l2
|
1710 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1714 case '+': l1
+= l2
; break;
1715 case '-': l1
-= l2
; break;
1716 case '&': l1
&= l2
; break;
1717 case '^': l1
^= l2
; break;
1718 case '|': l1
|= l2
; break;
1719 case '*': l1
*= l2
; break;
1726 /* if division by zero, generate explicit division */
1729 tcc_error("division by zero in constant");
1733 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1734 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1735 case TOK_UDIV
: l1
= l1
/ l2
; break;
1736 case TOK_UMOD
: l1
= l1
% l2
; break;
1739 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1740 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1742 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1745 case TOK_ULT
: l1
= l1
< l2
; break;
1746 case TOK_UGE
: l1
= l1
>= l2
; break;
1747 case TOK_EQ
: l1
= l1
== l2
; break;
1748 case TOK_NE
: l1
= l1
!= l2
; break;
1749 case TOK_ULE
: l1
= l1
<= l2
; break;
1750 case TOK_UGT
: l1
= l1
> l2
; break;
1751 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1752 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1753 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1754 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1756 case TOK_LAND
: l1
= l1
&& l2
; break;
1757 case TOK_LOR
: l1
= l1
|| l2
; break;
1761 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1762 l1
= ((uint32_t)l1
|
1763 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1767 /* if commutative ops, put c2 as constant */
1768 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1769 op
== '|' || op
== '*')) {
1771 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1772 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1774 if (!const_wanted
&&
1776 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1777 (l1
== -1 && op
== TOK_SAR
))) {
1778 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1780 } else if (!const_wanted
&&
1781 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1782 (l2
== -1 && op
== '|') ||
1783 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1784 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1785 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1790 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1793 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1794 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1798 /* filter out NOP operations like x*1, x-0, x&-1... */
1800 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1801 /* try to use shifts instead of muls or divs */
1802 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1811 else if (op
== TOK_PDIV
)
1817 } else if (c2
&& (op
== '+' || op
== '-') &&
1818 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1819 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1820 /* symbol + constant case */
1824 /* The backends can't always deal with addends to symbols
1825 larger than +-1<<31. Don't construct such. */
1832 /* call low level op generator */
1833 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1834 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1842 /* generate a floating point operation with constant propagation */
1843 static void gen_opif(int op
)
1851 /* currently, we cannot do computations with forward symbols */
1852 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1853 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1855 if (v1
->type
.t
== VT_FLOAT
) {
1858 } else if (v1
->type
.t
== VT_DOUBLE
) {
1866 /* NOTE: we only do constant propagation if finite number (not
1867 NaN or infinity) (ANSI spec) */
1868 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1872 case '+': f1
+= f2
; break;
1873 case '-': f1
-= f2
; break;
1874 case '*': f1
*= f2
; break;
1878 tcc_error("division by zero in constant");
1883 /* XXX: also handles tests ? */
1887 /* XXX: overflow test ? */
1888 if (v1
->type
.t
== VT_FLOAT
) {
1890 } else if (v1
->type
.t
== VT_DOUBLE
) {
1902 static int pointed_size(CType
*type
)
1905 return type_size(pointed_type(type
), &align
);
1908 static void vla_runtime_pointed_size(CType
*type
)
1911 vla_runtime_type_size(pointed_type(type
), &align
);
1914 static inline int is_null_pointer(SValue
*p
)
1916 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1918 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1919 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1920 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1921 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1924 static inline int is_integer_btype(int bt
)
1926 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1927 bt
== VT_INT
|| bt
== VT_LLONG
);
1930 /* check types for comparison or subtraction of pointers */
1931 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1933 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1936 /* null pointers are accepted for all comparisons as gcc */
1937 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1941 bt1
= type1
->t
& VT_BTYPE
;
1942 bt2
= type2
->t
& VT_BTYPE
;
1943 /* accept comparison between pointer and integer with a warning */
1944 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1945 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1946 tcc_warning("comparison between pointer and integer");
1950 /* both must be pointers or implicit function pointers */
1951 if (bt1
== VT_PTR
) {
1952 type1
= pointed_type(type1
);
1953 } else if (bt1
!= VT_FUNC
)
1954 goto invalid_operands
;
1956 if (bt2
== VT_PTR
) {
1957 type2
= pointed_type(type2
);
1958 } else if (bt2
!= VT_FUNC
) {
1960 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1962 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1963 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1967 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1968 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1969 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1970 /* gcc-like error if '-' is used */
1972 goto invalid_operands
;
1974 tcc_warning("comparison of distinct pointer types lacks a cast");
1978 /* generic gen_op: handles types problems */
1979 ST_FUNC
void gen_op(int op
)
1981 int u
, t1
, t2
, bt1
, bt2
, t
;
1985 t1
= vtop
[-1].type
.t
;
1986 t2
= vtop
[0].type
.t
;
1987 bt1
= t1
& VT_BTYPE
;
1988 bt2
= t2
& VT_BTYPE
;
1990 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1991 tcc_error("operation on a struct");
1992 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
1993 if (bt2
== VT_FUNC
) {
1994 mk_pointer(&vtop
->type
);
1997 if (bt1
== VT_FUNC
) {
1999 mk_pointer(&vtop
->type
);
2004 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2005 /* at least one operand is a pointer */
2006 /* relationnal op: must be both pointers */
2007 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2008 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2009 /* pointers are handled are unsigned */
2010 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2011 t
= VT_LLONG
| VT_UNSIGNED
;
2013 t
= VT_INT
| VT_UNSIGNED
;
2017 /* if both pointers, then it must be the '-' op */
2018 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2020 tcc_error("cannot use pointers here");
2021 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2022 /* XXX: check that types are compatible */
2023 if (vtop
[-1].type
.t
& VT_VLA
) {
2024 vla_runtime_pointed_size(&vtop
[-1].type
);
2026 vpushi(pointed_size(&vtop
[-1].type
));
2030 /* set to integer type */
2031 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2032 vtop
->type
.t
= VT_LLONG
;
2034 vtop
->type
.t
= VT_INT
;
2039 /* exactly one pointer : must be '+' or '-'. */
2040 if (op
!= '-' && op
!= '+')
2041 tcc_error("cannot use pointers here");
2042 /* Put pointer as first operand */
2043 if (bt2
== VT_PTR
) {
2045 t
= t1
, t1
= t2
, t2
= t
;
2048 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2049 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2050 gen_cast(&int_type
);
2052 type1
= vtop
[-1].type
;
2053 type1
.t
&= ~VT_ARRAY
;
2054 if (vtop
[-1].type
.t
& VT_VLA
)
2055 vla_runtime_pointed_size(&vtop
[-1].type
);
2057 u
= pointed_size(&vtop
[-1].type
);
2059 tcc_error("unknown array element size");
2060 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2063 /* XXX: cast to int ? (long long case) */
2069 /* #ifdef CONFIG_TCC_BCHECK
2070 The main reason to removing this code:
2077 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2078 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2080 When this code is on. then the output looks like
2082 v+(i-j) = 0xbff84000
2084 /* if evaluating constant expression, no code should be
2085 generated, so no bound check */
2086 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2087 /* if bounded pointers, we generate a special code to
2094 gen_bounded_ptr_add();
2100 /* put again type if gen_opic() swaped operands */
2103 } else if (is_float(bt1
) || is_float(bt2
)) {
2104 /* compute bigger type and do implicit casts */
2105 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2107 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2112 /* floats can only be used for a few operations */
2113 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2114 (op
< TOK_ULT
|| op
> TOK_GT
))
2115 tcc_error("invalid operands for binary operation");
2117 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2118 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2119 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (t
| VT_UNSIGNED
))
2122 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2123 /* cast to biggest op */
2125 /* convert to unsigned if it does not fit in a long long */
2126 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2127 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
2131 /* integer operations */
2133 /* convert to unsigned if it does not fit in an integer */
2134 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
2135 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
2138 /* XXX: currently, some unsigned operations are explicit, so
2139 we modify them here */
2140 if (t
& VT_UNSIGNED
) {
2147 else if (op
== TOK_LT
)
2149 else if (op
== TOK_GT
)
2151 else if (op
== TOK_LE
)
2153 else if (op
== TOK_GE
)
2160 /* special case for shifts and long long: we keep the shift as
2162 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2169 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2170 /* relationnal op: the result is an int */
2171 vtop
->type
.t
= VT_INT
;
2176 // Make sure that we have converted to an rvalue:
2177 if (vtop
->r
& VT_LVAL
)
2178 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2181 #ifndef TCC_TARGET_ARM
2182 /* generic itof for unsigned long long case */
2183 static void gen_cvt_itof1(int t
)
2185 #ifdef TCC_TARGET_ARM64
2188 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2189 (VT_LLONG
| VT_UNSIGNED
)) {
2192 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2193 #if LDOUBLE_SIZE != 8
2194 else if (t
== VT_LDOUBLE
)
2195 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2198 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2202 vtop
->r
= reg_fret(t
);
2210 /* generic ftoi for unsigned long long case */
2211 static void gen_cvt_ftoi1(int t
)
2213 #ifdef TCC_TARGET_ARM64
2218 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2219 /* not handled natively */
2220 st
= vtop
->type
.t
& VT_BTYPE
;
2222 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2223 #if LDOUBLE_SIZE != 8
2224 else if (st
== VT_LDOUBLE
)
2225 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2228 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2233 vtop
->r2
= REG_LRET
;
2240 /* force char or short cast */
2241 static void force_charshort_cast(int t
)
2245 /* XXX: add optimization if lvalue : just change type and offset */
2250 if (t
& VT_UNSIGNED
) {
2251 vpushi((1 << bits
) - 1);
2254 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2260 /* result must be signed or the SAR is converted to an SHL
2261 This was not the case when "t" was a signed short
2262 and the last value on the stack was an unsigned int */
2263 vtop
->type
.t
&= ~VT_UNSIGNED
;
2269 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2270 static void gen_cast(CType
*type
)
2272 int sbt
, dbt
, sf
, df
, c
, p
;
2274 /* special delayed cast for char/short */
2275 /* XXX: in some cases (multiple cascaded casts), it may still
2277 if (vtop
->r
& VT_MUSTCAST
) {
2278 vtop
->r
&= ~VT_MUSTCAST
;
2279 force_charshort_cast(vtop
->type
.t
);
2282 /* bitfields first get cast to ints */
2283 if (vtop
->type
.t
& VT_BITFIELD
) {
2287 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2288 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2293 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2294 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2296 /* constant case: we can do it now */
2297 /* XXX: in ISOC, cannot do it if error in convert */
2298 if (sbt
== VT_FLOAT
)
2299 vtop
->c
.ld
= vtop
->c
.f
;
2300 else if (sbt
== VT_DOUBLE
)
2301 vtop
->c
.ld
= vtop
->c
.d
;
2304 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2305 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2306 vtop
->c
.ld
= vtop
->c
.i
;
2308 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2310 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2311 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2313 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2316 if (dbt
== VT_FLOAT
)
2317 vtop
->c
.f
= (float)vtop
->c
.ld
;
2318 else if (dbt
== VT_DOUBLE
)
2319 vtop
->c
.d
= (double)vtop
->c
.ld
;
2320 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2321 vtop
->c
.i
= vtop
->c
.ld
;
2322 } else if (sf
&& dbt
== VT_BOOL
) {
2323 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2326 vtop
->c
.i
= vtop
->c
.ld
;
2327 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2329 else if (sbt
& VT_UNSIGNED
)
2330 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2331 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2332 else if (sbt
== VT_PTR
)
2335 else if (sbt
!= VT_LLONG
)
2336 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2337 -(vtop
->c
.i
& 0x80000000));
2339 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2341 else if (dbt
== VT_BOOL
)
2342 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2343 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2344 else if (dbt
== VT_PTR
)
2347 else if (dbt
!= VT_LLONG
) {
2348 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2349 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2352 if (!(dbt
& VT_UNSIGNED
))
2353 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2356 } else if (p
&& dbt
== VT_BOOL
) {
2360 /* non constant case: generate code */
2362 /* convert from fp to fp */
2365 /* convert int to fp */
2368 /* convert fp to int */
2369 if (dbt
== VT_BOOL
) {
2373 /* we handle char/short/etc... with generic code */
2374 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2375 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2379 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2380 /* additional cast for char/short... */
2385 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2386 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2387 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2388 /* scalar to long long */
2389 /* machine independent conversion */
2391 /* generate high word */
2392 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2396 if (sbt
== VT_PTR
) {
2397 /* cast from pointer to int before we apply
2398 shift operation, which pointers don't support*/
2399 gen_cast(&int_type
);
2405 /* patch second register */
2406 vtop
[-1].r2
= vtop
->r
;
2410 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2411 (dbt
& VT_BTYPE
) == VT_PTR
||
2412 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2413 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2414 (sbt
& VT_BTYPE
) != VT_PTR
&&
2415 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2416 /* need to convert from 32bit to 64bit */
2418 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2419 #if defined(TCC_TARGET_ARM64)
2421 #elif defined(TCC_TARGET_X86_64)
2423 /* x86_64 specific: movslq */
2425 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2432 } else if (dbt
== VT_BOOL
) {
2433 /* scalar to bool */
2436 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2437 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2438 if (sbt
== VT_PTR
) {
2439 vtop
->type
.t
= VT_INT
;
2440 tcc_warning("nonportable conversion from pointer to char/short");
2442 force_charshort_cast(dbt
);
2443 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2444 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2446 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2447 /* from long long: just take low order word */
2451 /* if lvalue and single word type, nothing to do because
2452 the lvalue already contains the real type size (see
2453 VT_LVAL_xxx constants) */
2457 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2458 /* if we are casting between pointer types,
2459 we must update the VT_LVAL_xxx size */
2460 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2461 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2466 /* return type size as known at compile time. Put alignment at 'a' */
2467 ST_FUNC
int type_size(CType
*type
, int *a
)
2472 bt
= type
->t
& VT_BTYPE
;
2473 if (bt
== VT_STRUCT
) {
2478 } else if (bt
== VT_PTR
) {
2479 if (type
->t
& VT_ARRAY
) {
2483 ts
= type_size(&s
->type
, a
);
2485 if (ts
< 0 && s
->c
< 0)
2493 } else if (bt
== VT_LDOUBLE
) {
2495 return LDOUBLE_SIZE
;
2496 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2497 #ifdef TCC_TARGET_I386
2498 #ifdef TCC_TARGET_PE
2503 #elif defined(TCC_TARGET_ARM)
2513 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2516 } else if (bt
== VT_SHORT
) {
2519 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2522 } else if (bt
== VT_ENUM
) {
2524 /* Enums might be incomplete, so don't just return '4' here. */
2525 return type
->ref
->c
;
2527 /* char, void, function, _Bool */
2533 /* push type size as known at runtime time on top of value stack. Put
2535 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2537 if (type
->t
& VT_VLA
) {
2538 type_size(&type
->ref
->type
, a
);
2539 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2541 vpushi(type_size(type
, a
));
2545 static void vla_sp_restore(void) {
2546 if (vlas_in_scope
) {
2547 gen_vla_sp_restore(vla_sp_loc
);
2551 static void vla_sp_restore_root(void) {
2552 if (vlas_in_scope
) {
2553 gen_vla_sp_restore(vla_sp_root_loc
);
2557 /* return the pointed type of t */
2558 static inline CType
*pointed_type(CType
*type
)
2560 return &type
->ref
->type
;
2563 /* modify type so that its it is a pointer to type. */
2564 ST_FUNC
void mk_pointer(CType
*type
)
2567 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2568 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2572 /* compare function types. OLD functions match any new functions */
2573 static int is_compatible_func(CType
*type1
, CType
*type2
)
2579 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2581 /* check func_call */
2582 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2584 /* XXX: not complete */
2585 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2589 while (s1
!= NULL
) {
2592 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2602 /* return true if type1 and type2 are the same. If unqualified is
2603 true, qualifiers on the types are ignored.
2605 - enums are not checked as gcc __builtin_types_compatible_p ()
2607 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2611 t1
= type1
->t
& VT_TYPE
;
2612 t2
= type2
->t
& VT_TYPE
;
2614 /* strip qualifiers before comparing */
2615 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2616 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2618 /* Default Vs explicit signedness only matters for char */
2619 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2623 /* An enum is compatible with (unsigned) int. Ideally we would
2624 store the enums signedness in type->ref.a.<some_bit> and
2625 only accept unsigned enums with unsigned int and vice versa.
2626 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2627 from pointer target types, so we can't add it here either. */
2628 if ((t1
& VT_BTYPE
) == VT_ENUM
) {
2630 if (type1
->ref
->a
.unsigned_enum
)
2633 if ((t2
& VT_BTYPE
) == VT_ENUM
) {
2635 if (type2
->ref
->a
.unsigned_enum
)
2638 /* XXX: bitfields ? */
2641 /* test more complicated cases */
2642 bt1
= t1
& VT_BTYPE
;
2643 if (bt1
== VT_PTR
) {
2644 type1
= pointed_type(type1
);
2645 type2
= pointed_type(type2
);
2646 return is_compatible_types(type1
, type2
);
2647 } else if (bt1
== VT_STRUCT
) {
2648 return (type1
->ref
== type2
->ref
);
2649 } else if (bt1
== VT_FUNC
) {
2650 return is_compatible_func(type1
, type2
);
2656 /* return true if type1 and type2 are exactly the same (including
2659 static int is_compatible_types(CType
*type1
, CType
*type2
)
2661 return compare_types(type1
,type2
,0);
2664 /* return true if type1 and type2 are the same (ignoring qualifiers).
2666 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2668 return compare_types(type1
,type2
,1);
2671 /* print a type. If 'varstr' is not NULL, then the variable is also
2672 printed in the type */
2674 /* XXX: add array and function pointers */
2675 static void type_to_str(char *buf
, int buf_size
,
2676 CType
*type
, const char *varstr
)
2683 t
= type
->t
& VT_TYPE
;
2686 if (t
& VT_CONSTANT
)
2687 pstrcat(buf
, buf_size
, "const ");
2688 if (t
& VT_VOLATILE
)
2689 pstrcat(buf
, buf_size
, "volatile ");
2690 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2691 pstrcat(buf
, buf_size
, "unsigned ");
2692 else if (t
& VT_DEFSIGN
)
2693 pstrcat(buf
, buf_size
, "signed ");
2723 tstr
= "long double";
2725 pstrcat(buf
, buf_size
, tstr
);
2729 if (bt
== VT_STRUCT
)
2733 pstrcat(buf
, buf_size
, tstr
);
2734 v
= type
->ref
->v
& ~SYM_STRUCT
;
2735 if (v
>= SYM_FIRST_ANOM
)
2736 pstrcat(buf
, buf_size
, "<anonymous>");
2738 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2742 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2743 pstrcat(buf
, buf_size
, "(");
2745 while (sa
!= NULL
) {
2746 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2747 pstrcat(buf
, buf_size
, buf1
);
2750 pstrcat(buf
, buf_size
, ", ");
2752 pstrcat(buf
, buf_size
, ")");
2757 snprintf(buf1
, sizeof(buf1
), "%s[%ld]", varstr
? varstr
: "", s
->c
);
2758 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2761 pstrcpy(buf1
, sizeof(buf1
), "*");
2762 if (t
& VT_CONSTANT
)
2763 pstrcat(buf1
, buf_size
, "const ");
2764 if (t
& VT_VOLATILE
)
2765 pstrcat(buf1
, buf_size
, "volatile ");
2767 pstrcat(buf1
, sizeof(buf1
), varstr
);
2768 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2772 pstrcat(buf
, buf_size
, " ");
2773 pstrcat(buf
, buf_size
, varstr
);
2778 /* verify type compatibility to store vtop in 'dt' type, and generate
2780 static void gen_assign_cast(CType
*dt
)
2782 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2783 char buf1
[256], buf2
[256];
2786 st
= &vtop
->type
; /* source type */
2787 dbt
= dt
->t
& VT_BTYPE
;
2788 sbt
= st
->t
& VT_BTYPE
;
2789 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2790 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2792 It is Ok if both are void
2798 gcc accepts this program
2801 tcc_error("cannot cast from/to void");
2803 if (dt
->t
& VT_CONSTANT
)
2804 tcc_warning("assignment of read-only location");
2807 /* special cases for pointers */
2808 /* '0' can also be a pointer */
2809 if (is_null_pointer(vtop
))
2811 /* accept implicit pointer to integer cast with warning */
2812 if (is_integer_btype(sbt
)) {
2813 tcc_warning("assignment makes pointer from integer without a cast");
2816 type1
= pointed_type(dt
);
2817 /* a function is implicitely a function pointer */
2818 if (sbt
== VT_FUNC
) {
2819 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2820 !is_compatible_types(pointed_type(dt
), st
))
2821 tcc_warning("assignment from incompatible pointer type");
2826 type2
= pointed_type(st
);
2827 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2828 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2829 /* void * can match anything */
2831 /* exact type match, except for qualifiers */
2834 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2835 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2836 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2837 /* Like GCC don't warn by default for merely changes
2838 in pointer target signedness. Do warn for different
2839 base types, though, in particular for unsigned enums
2840 and signed int targets. */
2841 if ((tmp_type1
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) !=
2842 (tmp_type2
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) &&
2843 (tmp_type1
.t
& VT_BTYPE
) == (tmp_type2
.t
& VT_BTYPE
))
2846 tcc_warning("assignment from incompatible pointer type");
2849 /* check const and volatile */
2850 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2851 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2852 tcc_warning("assignment discards qualifiers from pointer target type");
2858 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2859 tcc_warning("assignment makes integer from pointer without a cast");
2860 } else if (sbt
== VT_STRUCT
) {
2861 goto case_VT_STRUCT
;
2863 /* XXX: more tests */
2869 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2870 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2871 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2873 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2874 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2875 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2883 /* store vtop in lvalue pushed on stack */
2884 ST_FUNC
void vstore(void)
2886 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2888 ft
= vtop
[-1].type
.t
;
2889 sbt
= vtop
->type
.t
& VT_BTYPE
;
2890 dbt
= ft
& VT_BTYPE
;
2891 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2892 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2893 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2894 /* optimize char/short casts */
2895 delayed_cast
= VT_MUSTCAST
;
2896 vtop
->type
.t
= (ft
& VT_TYPE
& ~VT_BITFIELD
&
2897 ((1 << VT_STRUCT_SHIFT
) - 1));
2898 /* XXX: factorize */
2899 if (ft
& VT_CONSTANT
)
2900 tcc_warning("assignment of read-only location");
2903 if (!(ft
& VT_BITFIELD
))
2904 gen_assign_cast(&vtop
[-1].type
);
2907 if (sbt
== VT_STRUCT
) {
2908 /* if structure, only generate pointer */
2909 /* structure assignment : generate memcpy */
2910 /* XXX: optimize if small size */
2911 size
= type_size(&vtop
->type
, &align
);
2915 vtop
->type
.t
= VT_PTR
;
2918 /* address of memcpy() */
2921 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2922 else if(!(align
& 3))
2923 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2926 /* Use memmove, rather than memcpy, as dest and src may be same: */
2927 vpush_global_sym(&func_old_type
, TOK_memmove
);
2932 vtop
->type
.t
= VT_PTR
;
2938 /* leave source on stack */
2939 } else if (ft
& VT_BITFIELD
) {
2940 /* bitfield store handling */
2942 /* save lvalue as expression result (example: s.b = s.a = n;) */
2943 vdup(), vtop
[-1] = vtop
[-2];
2945 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2946 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2947 /* remove bit field info to avoid loops */
2948 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2950 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2951 gen_cast(&vtop
[-1].type
);
2952 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2955 /* duplicate destination */
2957 vtop
[-1] = vtop
[-2];
2959 /* mask and shift source */
2960 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2961 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2962 vpushll((1ULL << bit_size
) - 1ULL);
2964 vpushi((1 << bit_size
) - 1);
2970 /* load destination, mask and or with source */
2972 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2973 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2975 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2981 /* ... and discard */
2985 #ifdef CONFIG_TCC_BCHECK
2986 /* bound check case */
2987 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2996 #ifdef TCC_TARGET_X86_64
2997 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2999 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3004 r
= gv(rc
); /* generate value */
3005 /* if lvalue was saved on stack, must read it */
3006 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3008 t
= get_reg(RC_INT
);
3009 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3014 sv
.r
= VT_LOCAL
| VT_LVAL
;
3015 sv
.c
.i
= vtop
[-1].c
.i
;
3017 vtop
[-1].r
= t
| VT_LVAL
;
3019 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3020 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3021 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3022 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3024 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3025 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3027 vtop
[-1].type
.t
= load_type
;
3030 /* convert to int to increment easily */
3031 vtop
->type
.t
= addr_type
;
3037 vtop
[-1].type
.t
= load_type
;
3038 /* XXX: it works because r2 is spilled last ! */
3039 store(vtop
->r2
, vtop
- 1);
3045 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3046 vtop
->r
|= delayed_cast
;
3050 /* post defines POST/PRE add. c is the token ++ or -- */
3051 ST_FUNC
void inc(int post
, int c
)
3054 vdup(); /* save lvalue */
3056 gv_dup(); /* duplicate value */
3061 vpushi(c
- TOK_MID
);
3063 vstore(); /* store value */
3065 vpop(); /* if post op, return saved value */
3068 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3070 /* read the string */
3074 while (tok
== TOK_STR
) {
3075 /* XXX: add \0 handling too ? */
3076 cstr_cat(astr
, tokc
.str
.data
, -1);
3079 cstr_ccat(astr
, '\0');
3082 /* If I is >= 1 and a power of two, returns log2(i)+1.
3083 If I is 0 returns 0. */
3084 static int exact_log2p1(int i
)
3089 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3100 /* Parse GNUC __attribute__ extension. Currently, the following
3101 extensions are recognized:
3102 - aligned(n) : set data/function alignment.
3103 - packed : force data alignment to 1
3104 - section(x) : generate data/code in this section.
3105 - unused : currently ignored, but may be used someday.
3106 - regparm(n) : pass function parameters in registers (i386 only)
3108 static void parse_attribute(AttributeDef
*ad
)
3113 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
3117 while (tok
!= ')') {
3118 if (tok
< TOK_IDENT
)
3119 expect("attribute name");
3126 parse_mult_str(&astr
, "section name");
3127 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3134 parse_mult_str(&astr
, "alias(\"target\")");
3135 ad
->alias_target
= /* save string as token, for later */
3136 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3140 case TOK_VISIBILITY1
:
3141 case TOK_VISIBILITY2
:
3143 parse_mult_str(&astr
,
3144 "visibility(\"default|hidden|internal|protected\")");
3145 if (!strcmp (astr
.data
, "default"))
3146 ad
->a
.visibility
= STV_DEFAULT
;
3147 else if (!strcmp (astr
.data
, "hidden"))
3148 ad
->a
.visibility
= STV_HIDDEN
;
3149 else if (!strcmp (astr
.data
, "internal"))
3150 ad
->a
.visibility
= STV_INTERNAL
;
3151 else if (!strcmp (astr
.data
, "protected"))
3152 ad
->a
.visibility
= STV_PROTECTED
;
3154 expect("visibility(\"default|hidden|internal|protected\")");
3163 if (n
<= 0 || (n
& (n
- 1)) != 0)
3164 tcc_error("alignment must be a positive power of two");
3169 ad
->a
.aligned
= exact_log2p1(n
);
3170 if (n
!= 1 << (ad
->a
.aligned
- 1))
3171 tcc_error("alignment of %d is larger than implemented", n
);
3183 /* currently, no need to handle it because tcc does not
3184 track unused objects */
3188 /* currently, no need to handle it because tcc does not
3189 track unused objects */
3194 ad
->a
.func_call
= FUNC_CDECL
;
3199 ad
->a
.func_call
= FUNC_STDCALL
;
3201 #ifdef TCC_TARGET_I386
3211 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3217 ad
->a
.func_call
= FUNC_FASTCALLW
;
3224 ad
->a
.mode
= VT_LLONG
+ 1;
3227 ad
->a
.mode
= VT_BYTE
+ 1;
3230 ad
->a
.mode
= VT_SHORT
+ 1;
3234 ad
->a
.mode
= VT_INT
+ 1;
3237 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3244 ad
->a
.func_export
= 1;
3247 ad
->a
.func_import
= 1;
3250 if (tcc_state
->warn_unsupported
)
3251 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3252 /* skip parameters */
3254 int parenthesis
= 0;
3258 else if (tok
== ')')
3261 } while (parenthesis
&& tok
!= -1);
3274 static Sym
* find_field (CType
*type
, int v
)
3278 while ((s
= s
->next
) != NULL
) {
3279 if ((s
->v
& SYM_FIELD
) &&
3280 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3281 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3282 Sym
*ret
= find_field (&s
->type
, v
);
3292 static void struct_add_offset (Sym
*s
, int offset
)
3294 while ((s
= s
->next
) != NULL
) {
3295 if ((s
->v
& SYM_FIELD
) &&
3296 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3297 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3298 struct_add_offset(s
->type
.ref
, offset
);
3304 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3306 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3307 int pcc
= !tcc_state
->ms_bitfields
;
3310 maxalign
= 1 << (ad
->a
.aligned
- 1);
3316 prevbt
= VT_STRUCT
; /* make it never match */
3318 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3319 int typealign
, bit_size
;
3320 int size
= type_size(&f
->type
, &typealign
);
3321 if (f
->type
.t
& VT_BITFIELD
)
3322 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3325 if (bit_size
== 0 && pcc
) {
3326 /* Zero-width bit-fields in PCC mode aren't affected
3327 by any packing (attribute or pragma). */
3329 } else if (f
->r
> 1) {
3331 } else if (ad
->a
.packed
|| f
->r
== 1) {
3333 /* Packed fields or packed records don't let the base type
3334 influence the records type alignment. */
3339 if (type
->ref
->type
.t
!= TOK_STRUCT
) {
3340 if (pcc
&& bit_size
>= 0)
3341 size
= (bit_size
+ 7) >> 3;
3342 /* Bit position is already zero from our caller. */
3346 } else if (bit_size
< 0) {
3347 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3350 c
= (c
+ addbytes
+ align
- 1) & -align
;
3356 /* A bit-field. Layout is more complicated. There are two
3357 options TCC implements: PCC compatible and MS compatible
3358 (PCC compatible is what GCC uses for almost all targets).
3359 In PCC layout the overall size of the struct (in c) is
3360 _excluding_ the current run of bit-fields (that is,
3361 there's at least additional bit_pos bits after c). In
3362 MS layout c does include the current run of bit-fields.
3364 This matters for calculating the natural alignment buckets
3367 /* 'align' will be used to influence records alignment,
3368 so it's the max of specified and type alignment, except
3369 in certain cases that depend on the mode. */
3370 if (align
< typealign
)
3373 /* In PCC layout a non-packed bit-field is placed adjacent
3374 to the preceding bit-fields, except if it would overflow
3375 its container (depending on base type) or it's a zero-width
3376 bit-field. Packed non-zero-width bit-fields always are
3378 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3379 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3380 if (bit_size
== 0 ||
3381 ((typealign
!= 1 || size
== 1) &&
3382 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3383 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3385 } else while (bit_pos
+ bit_size
> size
* 8) {
3387 bit_pos
-= size
* 8;
3390 /* In PCC layout named bit-fields influence the alignment
3391 of the containing struct using the base types alignment,
3392 except for packed fields (which here have correct
3393 align/typealign). */
3394 if ((f
->v
& SYM_FIRST_ANOM
))
3397 bt
= f
->type
.t
& VT_BTYPE
;
3398 if ((bit_pos
+ bit_size
> size
* 8) ||
3399 (bit_size
> 0) == (bt
!= prevbt
)) {
3400 c
= (c
+ typealign
- 1) & -typealign
;
3403 /* In MS bitfield mode a bit-field run always uses
3404 at least as many bits as the underlying type.
3405 To start a new run it's also required that this
3406 or the last bit-field had non-zero width. */
3407 if (bit_size
|| prev_bit_size
)
3410 /* In MS layout the records alignment is normally
3411 influenced by the field, except for a zero-width
3412 field at the start of a run (but by further zero-width
3413 fields it is again). */
3414 if (bit_size
== 0 && prevbt
!= bt
)
3417 prev_bit_size
= bit_size
;
3419 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3420 | (bit_pos
<< VT_STRUCT_SHIFT
);
3421 bit_pos
+= bit_size
;
3422 if (pcc
&& bit_pos
>= size
* 8) {
3424 bit_pos
-= size
* 8;
3427 if (align
> maxalign
)
3430 printf("set field %s offset=%d c=%d",
3431 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, c
);
3432 if (f
->type
.t
& VT_BITFIELD
) {
3433 printf(" pos=%d size=%d",
3434 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3435 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3440 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3442 /* An anonymous struct/union. Adjust member offsets
3443 to reflect the real offset of our containing struct.
3444 Also set the offset of this anon member inside
3445 the outer struct to be zero. Via this it
3446 works when accessing the field offset directly
3447 (from base object), as well as when recursing
3448 members in initializer handling. */
3449 int v2
= f
->type
.ref
->v
;
3450 if (!(v2
& SYM_FIELD
) &&
3451 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3453 /* This happens only with MS extensions. The
3454 anon member has a named struct type, so it
3455 potentially is shared with other references.
3456 We need to unshare members so we can modify
3459 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3460 &f
->type
.ref
->type
, 0,
3462 pps
= &f
->type
.ref
->next
;
3463 while ((ass
= ass
->next
) != NULL
) {
3464 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3465 pps
= &((*pps
)->next
);
3469 struct_add_offset(f
->type
.ref
, offset
);
3477 /* store size and alignment */
3478 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3479 + maxalign
- 1) & -maxalign
;
3480 type
->ref
->r
= maxalign
;
3483 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3484 static void struct_decl(CType
*type
, AttributeDef
*ad
, int u
)
3486 int a
, v
, size
, align
, flexible
, alignoverride
;
3488 int bit_size
, bsize
, bt
;
3493 a
= tok
; /* save decl type */
3495 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3496 parse_attribute(ad
);
3500 /* struct already defined ? return it */
3502 expect("struct/union/enum name");
3504 if (s
&& (s
->scope
== local_scope
|| (tok
!= '{' && tok
!= ';'))) {
3506 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3512 /* Record the original enum/struct/union token. */
3515 /* we put an undefined size for struct/union */
3516 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3517 s
->r
= 0; /* default alignment is zero as gcc */
3518 /* put struct/union/enum name in type */
3526 tcc_error("struct/union/enum already defined");
3527 /* cannot be empty */
3529 /* non empty enums are not allowed */
3530 if (a
== TOK_ENUM
) {
3534 CType
*t
= &int_type
;
3537 expect("identifier");
3539 if (ss
&& !local_stack
)
3540 tcc_error("redefinition of enumerator '%s'",
3541 get_tok_str(v
, NULL
));
3545 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3548 /* We really want to support long long enums
3549 on i386 as well, but the Sym structure only
3550 holds a 'long' for associated constants,
3551 and enlarging it would bump its size (no
3552 available padding). So punt for now. */
3558 if (c
!= (int)c
&& (unsigned long)c
!= (unsigned int)c
)
3559 seen_wide
= 1, t
= &size_type
;
3560 /* enum symbols have static storage */
3561 ss
= sym_push(v
, t
, VT_CONST
, c
);
3562 ss
->type
.t
|= VT_STATIC
;
3567 /* NOTE: we accept a trailing comma */
3572 s
->a
.unsigned_enum
= 1;
3573 s
->c
= type_size(seen_wide
? &size_type
: &int_type
, &align
);
3578 while (tok
!= '}') {
3579 if (!parse_btype(&btype
, &ad1
)) {
3585 tcc_error("flexible array member '%s' not at the end of struct",
3586 get_tok_str(v
, NULL
));
3591 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3593 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3594 expect("identifier");
3596 int v
= btype
.ref
->v
;
3597 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3598 if (tcc_state
->ms_extensions
== 0)
3599 expect("identifier");
3603 if (type_size(&type1
, &align
) < 0) {
3604 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3607 tcc_error("field '%s' has incomplete type",
3608 get_tok_str(v
, NULL
));
3610 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3611 (type1
.t
& (VT_TYPEDEF
| VT_STATIC
| VT_EXTERN
| VT_INLINE
)))
3612 tcc_error("invalid type for '%s'",
3613 get_tok_str(v
, NULL
));
3617 bit_size
= expr_const();
3618 /* XXX: handle v = 0 case for messages */
3620 tcc_error("negative width in bit-field '%s'",
3621 get_tok_str(v
, NULL
));
3622 if (v
&& bit_size
== 0)
3623 tcc_error("zero width for bit-field '%s'",
3624 get_tok_str(v
, NULL
));
3625 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3626 parse_attribute(&ad1
);
3628 size
= type_size(&type1
, &align
);
3629 /* Only remember non-default alignment. */
3631 if (ad1
.a
.aligned
) {
3632 int speca
= 1 << (ad1
.a
.aligned
- 1);
3633 alignoverride
= speca
;
3634 } else if (ad1
.a
.packed
|| ad
->a
.packed
) {
3636 } else if (*tcc_state
->pack_stack_ptr
) {
3637 if (align
> *tcc_state
->pack_stack_ptr
)
3638 alignoverride
= *tcc_state
->pack_stack_ptr
;
3640 if (bit_size
>= 0) {
3641 bt
= type1
.t
& VT_BTYPE
;
3648 tcc_error("bitfields must have scalar type");
3650 if (bit_size
> bsize
) {
3651 tcc_error("width of '%s' exceeds its type",
3652 get_tok_str(v
, NULL
));
3653 } else if (bit_size
== bsize
) {
3654 /* no need for bit fields */
3657 type1
.t
|= VT_BITFIELD
|
3658 (0 << VT_STRUCT_SHIFT
) |
3659 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3662 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3663 /* Remember we've seen a real field to check
3664 for placement of flexible array member. */
3667 /* If member is a struct or bit-field, enforce
3668 placing into the struct (as anonymous). */
3670 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3675 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3679 if (tok
== ';' || tok
== TOK_EOF
)
3686 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3687 parse_attribute(ad
);
3688 struct_layout(type
, ad
);
3693 /* return 1 if basic type is a type size (short, long, long long) */
3694 ST_FUNC
int is_btype_size(int bt
)
3696 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3699 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3700 are added to the element type, copied because it could be a typedef. */
3701 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3703 while (type
->t
& VT_ARRAY
) {
3704 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3705 type
= &type
->ref
->type
;
3707 type
->t
|= qualifiers
;
3710 /* return 0 if no type declaration. otherwise, return the basic type
3713 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3715 int t
, u
, bt_size
, complete
, type_found
, typespec_found
, g
;
3719 memset(ad
, 0, sizeof(AttributeDef
));
3727 /* currently, we really ignore extension */
3738 tcc_error("too many basic types");
3740 bt_size
= is_btype_size (u
& VT_BTYPE
);
3741 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3756 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3757 #ifndef TCC_TARGET_PE
3758 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3760 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3761 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3767 #ifdef TCC_TARGET_ARM64
3769 /* GCC's __uint128_t appears in some Linux header files. Make it a
3770 synonym for long double to get the size and alignment right. */
3782 if ((t
& VT_BTYPE
) == VT_LONG
) {
3783 #ifdef TCC_TARGET_PE
3784 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3786 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3794 struct_decl(&type1
, ad
, VT_ENUM
);
3797 type
->ref
= type1
.ref
;
3801 struct_decl(&type1
, ad
, VT_STRUCT
);
3804 /* type modifiers */
3809 parse_btype_qualify(type
, VT_CONSTANT
);
3817 parse_btype_qualify(type
, VT_VOLATILE
);
3824 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3825 tcc_error("signed and unsigned modifier");
3838 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3839 tcc_error("signed and unsigned modifier");
3840 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3856 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
3857 tcc_error("multiple storage classes");
3868 /* GNUC attribute */
3869 case TOK_ATTRIBUTE1
:
3870 case TOK_ATTRIBUTE2
:
3871 parse_attribute(ad
);
3874 t
= (t
& ~VT_BTYPE
) | u
;
3882 parse_expr_type(&type1
);
3883 /* remove all storage modifiers except typedef */
3884 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3890 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3893 type
->t
= ((s
->type
.t
& ~VT_TYPEDEF
) |
3894 (t
& ~(VT_CONSTANT
| VT_VOLATILE
)));
3895 type
->ref
= s
->type
.ref
;
3896 if (t
& (VT_CONSTANT
| VT_VOLATILE
))
3897 parse_btype_qualify(type
, t
& (VT_CONSTANT
| VT_VOLATILE
));
3901 /* get attributes from typedef */
3902 if (0 == ad
->a
.aligned
)
3903 ad
->a
.aligned
= s
->a
.aligned
;
3904 if (0 == ad
->a
.func_call
)
3905 ad
->a
.func_call
= s
->a
.func_call
;
3906 ad
->a
.packed
|= s
->a
.packed
;
3915 if (tcc_state
->char_is_unsigned
) {
3916 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3920 /* long is never used as type */
3921 if ((t
& VT_BTYPE
) == VT_LONG
)
3922 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3923 defined TCC_TARGET_PE
3924 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3926 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3932 /* convert a function parameter type (array to pointer and function to
3933 function pointer) */
3934 static inline void convert_parameter_type(CType
*pt
)
3936 /* remove const and volatile qualifiers (XXX: const could be used
3937 to indicate a const function parameter */
3938 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3939 /* array must be transformed to pointer according to ANSI C */
3941 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3946 ST_FUNC
void parse_asm_str(CString
*astr
)
3949 parse_mult_str(astr
, "string constant");
3952 /* Parse an asm label and return the token */
3953 static int asm_label_instr(void)
3959 parse_asm_str(&astr
);
3962 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3964 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3969 static void post_type(CType
*type
, AttributeDef
*ad
, int storage
)
3971 int n
, l
, t1
, arg_size
, align
;
3972 Sym
**plast
, *s
, *first
;
3977 /* function declaration */
3985 /* read param name and compute offset */
3986 if (l
!= FUNC_OLD
) {
3987 if (!parse_btype(&pt
, &ad1
)) {
3989 tcc_error("invalid type");
3996 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3998 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3999 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4000 tcc_error("parameter declared as void");
4001 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4006 expect("identifier");
4010 convert_parameter_type(&pt
);
4011 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4017 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4024 /* if no parameters, then old type prototype */
4028 /* NOTE: const is ignored in returned type as it has a special
4029 meaning in gcc / C++ */
4030 type
->t
&= ~VT_CONSTANT
;
4031 /* some ancient pre-K&R C allows a function to return an array
4032 and the array brackets to be put after the arguments, such
4033 that "int c()[]" means something like "int[] c()" */
4036 skip(']'); /* only handle simple "[]" */
4039 /* we push a anonymous symbol which will contain the function prototype */
4040 ad
->a
.func_args
= arg_size
;
4041 s
= sym_push(SYM_FIELD
, type
, 0, l
);
4046 } else if (tok
== '[') {
4047 int saved_nocode_wanted
= nocode_wanted
;
4048 /* array definition */
4050 if (tok
== TOK_RESTRICT1
)
4055 if (!local_stack
|| (storage
& VT_STATIC
))
4056 vpushi(expr_const());
4058 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4059 length must always be evaluated, even under nocode_wanted,
4060 so that its size slot is initialized (e.g. under sizeof
4065 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4068 tcc_error("invalid array size");
4070 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4071 tcc_error("size of variable length array should be an integer");
4076 /* parse next post type */
4077 post_type(type
, ad
, storage
);
4078 if (type
->t
== VT_FUNC
)
4079 tcc_error("declaration of an array of functions");
4080 t1
|= type
->t
& VT_VLA
;
4083 loc
-= type_size(&int_type
, &align
);
4087 vla_runtime_type_size(type
, &align
);
4089 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4095 nocode_wanted
= saved_nocode_wanted
;
4097 /* we push an anonymous symbol which will contain the array
4099 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4100 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4105 /* Parse a type declaration (except basic type), and return the type
4106 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4107 expected. 'type' should contain the basic type. 'ad' is the
4108 attribute definition of the basic type. It can be modified by
4111 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4114 CType type1
, *type2
;
4115 int qualifiers
, storage
;
4117 while (tok
== '*') {
4125 qualifiers
|= VT_CONSTANT
;
4130 qualifiers
|= VT_VOLATILE
;
4136 /* XXX: clarify attribute handling */
4137 case TOK_ATTRIBUTE1
:
4138 case TOK_ATTRIBUTE2
:
4139 parse_attribute(ad
);
4143 type
->t
|= qualifiers
;
4146 /* recursive type */
4147 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4148 type1
.t
= 0; /* XXX: same as int */
4151 /* XXX: this is not correct to modify 'ad' at this point, but
4152 the syntax is not clear */
4153 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4154 parse_attribute(ad
);
4155 type_decl(&type1
, ad
, v
, td
);
4158 /* type identifier */
4159 if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4163 if (!(td
& TYPE_ABSTRACT
))
4164 expect("identifier");
4168 storage
= type
->t
& VT_STORAGE
;
4169 type
->t
&= ~VT_STORAGE
;
4170 post_type(type
, ad
, storage
);
4172 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4173 parse_attribute(ad
);
4177 /* append type at the end of type1 */
4191 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4192 ST_FUNC
int lvalue_type(int t
)
4197 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4199 else if (bt
== VT_SHORT
)
4203 if (t
& VT_UNSIGNED
)
4204 r
|= VT_LVAL_UNSIGNED
;
4208 /* indirection with full error checking and bound check */
4209 ST_FUNC
void indir(void)
4211 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4212 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4216 if (vtop
->r
& VT_LVAL
)
4218 vtop
->type
= *pointed_type(&vtop
->type
);
4219 /* Arrays and functions are never lvalues */
4220 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4221 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4222 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4223 /* if bound checking, the referenced pointer must be checked */
4224 #ifdef CONFIG_TCC_BCHECK
4225 if (tcc_state
->do_bounds_check
)
4226 vtop
->r
|= VT_MUSTBOUND
;
4231 /* pass a parameter to a function and do type checking and casting */
4232 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4237 func_type
= func
->c
;
4238 if (func_type
== FUNC_OLD
||
4239 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4240 /* default casting : only need to convert float to double */
4241 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4244 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4245 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4246 type
.ref
= vtop
->type
.ref
;
4249 } else if (arg
== NULL
) {
4250 tcc_error("too many arguments to function");
4253 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4254 gen_assign_cast(&type
);
4258 /* parse an expression of the form '(type)' or '(expr)' and return its
4260 static void parse_expr_type(CType
*type
)
4266 if (parse_btype(type
, &ad
)) {
4267 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4274 static void parse_type(CType
*type
)
4279 if (!parse_btype(type
, &ad
)) {
4282 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4285 static void vpush_tokc(int t
)
4290 vsetc(&type
, VT_CONST
, &tokc
);
4293 ST_FUNC
void unary(void)
4295 int n
, t
, align
, size
, r
, sizeof_caller
;
4300 sizeof_caller
= in_sizeof
;
4302 /* XXX: GCC 2.95.3 does not generate a table although it should be
4316 vpush_tokc(VT_INT
| VT_UNSIGNED
);
4320 vpush_tokc(VT_LLONG
);
4324 vpush_tokc(VT_LLONG
| VT_UNSIGNED
);
4328 vpush_tokc(VT_FLOAT
);
4332 vpush_tokc(VT_DOUBLE
);
4336 vpush_tokc(VT_LDOUBLE
);
4339 case TOK___FUNCTION__
:
4341 goto tok_identifier
;
4347 /* special function name identifier */
4348 len
= strlen(funcname
) + 1;
4349 /* generate char[len] type */
4354 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4355 ptr
= section_ptr_add(data_section
, len
);
4356 memcpy(ptr
, funcname
, len
);
4361 #ifdef TCC_TARGET_PE
4362 t
= VT_SHORT
| VT_UNSIGNED
;
4368 /* string parsing */
4371 if (tcc_state
->warn_write_strings
)
4376 memset(&ad
, 0, sizeof(AttributeDef
));
4377 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4382 if (parse_btype(&type
, &ad
)) {
4383 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4385 /* check ISOC99 compound literal */
4387 /* data is allocated locally by default */
4392 /* all except arrays are lvalues */
4393 if (!(type
.t
& VT_ARRAY
))
4394 r
|= lvalue_type(type
.t
);
4395 memset(&ad
, 0, sizeof(AttributeDef
));
4396 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4398 if (sizeof_caller
) {
4405 } else if (tok
== '{') {
4406 int saved_nocode_wanted
= nocode_wanted
;
4408 tcc_error("expected constant");
4409 /* save all registers */
4411 /* statement expression : we do not accept break/continue
4412 inside as GCC does. We do retain the nocode_wanted state,
4413 as statement expressions can't ever be entered from the
4414 outside, so any reactivation of code emission (from labels
4415 or loop heads) can be disabled again after the end of it. */
4416 block(NULL
, NULL
, 1);
4417 nocode_wanted
= saved_nocode_wanted
;
4432 /* functions names must be treated as function pointers,
4433 except for unary '&' and sizeof. Since we consider that
4434 functions are not lvalues, we only have to handle it
4435 there and in function calls. */
4436 /* arrays can also be used although they are not lvalues */
4437 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4438 !(vtop
->type
.t
& VT_ARRAY
))
4440 mk_pointer(&vtop
->type
);
4446 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4448 boolean
.t
= VT_BOOL
;
4450 vtop
->c
.i
= !vtop
->c
.i
;
4451 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4455 vseti(VT_JMP
, gvtst(1, 0));
4467 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4468 tcc_error("pointer not accepted for unary plus");
4469 /* In order to force cast, we add zero, except for floating point
4470 where we really need an noop (otherwise -0.0 will be transformed
4472 if (!is_float(vtop
->type
.t
)) {
4483 unary_type(&type
); // Perform a in_sizeof = 0;
4484 size
= type_size(&type
, &align
);
4485 if (t
== TOK_SIZEOF
) {
4486 if (!(type
.t
& VT_VLA
)) {
4488 tcc_error("sizeof applied to an incomplete type");
4491 vla_runtime_type_size(&type
, &align
);
4496 vtop
->type
.t
|= VT_UNSIGNED
;
4499 case TOK_builtin_expect
:
4501 /* __builtin_expect is a no-op for now */
4511 case TOK_builtin_types_compatible_p
:
4520 type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4521 type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4522 vpushi(is_compatible_types(&type1
, &type2
));
4525 case TOK_builtin_choose_expr
:
4552 case TOK_builtin_constant_p
:
4559 res
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4566 case TOK_builtin_frame_address
:
4567 case TOK_builtin_return_address
:
4574 if (tok
!= TOK_CINT
) {
4575 tcc_error("%s only takes positive integers",
4576 tok1
== TOK_builtin_return_address
?
4577 "__builtin_return_address" :
4578 "__builtin_frame_address");
4580 level
= (uint32_t)tokc
.i
;
4585 vset(&type
, VT_LOCAL
, 0); /* local frame */
4587 mk_pointer(&vtop
->type
);
4588 indir(); /* -> parent frame */
4590 if (tok1
== TOK_builtin_return_address
) {
4591 // assume return address is just above frame pointer on stack
4594 mk_pointer(&vtop
->type
);
4599 #ifdef TCC_TARGET_X86_64
4600 #ifdef TCC_TARGET_PE
4601 case TOK_builtin_va_start
:
4609 if ((vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4610 tcc_error("__builtin_va_start expects a local variable");
4611 vtop
->r
&= ~VT_LVAL
;
4612 vtop
->type
= char_pointer_type
;
4618 case TOK_builtin_va_arg_types
:
4625 vpushi(classify_x86_64_va_arg(&type
));
4631 #ifdef TCC_TARGET_ARM64
4632 case TOK___va_start
: {
4642 vtop
->type
.t
= VT_VOID
;
4645 case TOK___va_arg
: {
4658 case TOK___arm64_clear_cache
: {
4667 vtop
->type
.t
= VT_VOID
;
4671 /* pre operations */
4682 t
= vtop
->type
.t
& VT_BTYPE
;
4684 /* In IEEE negate(x) isn't subtract(0,x), but rather
4688 vtop
->c
.f
= -1.0 * 0.0;
4689 else if (t
== VT_DOUBLE
)
4690 vtop
->c
.d
= -1.0 * 0.0;
4692 vtop
->c
.ld
= -1.0 * 0.0;
4700 goto tok_identifier
;
4702 /* allow to take the address of a label */
4703 if (tok
< TOK_UIDENT
)
4704 expect("label identifier");
4705 s
= label_find(tok
);
4707 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4709 if (s
->r
== LABEL_DECLARED
)
4710 s
->r
= LABEL_FORWARD
;
4713 s
->type
.t
= VT_VOID
;
4714 mk_pointer(&s
->type
);
4715 s
->type
.t
|= VT_STATIC
;
4717 vpushsym(&s
->type
, s
);
4721 // special qnan , snan and infinity values
4723 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4727 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4731 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4740 expect("identifier");
4743 const char *name
= get_tok_str(t
, NULL
);
4745 tcc_error("'%s' undeclared", name
);
4746 /* for simple function calls, we tolerate undeclared
4747 external reference to int() function */
4748 if (tcc_state
->warn_implicit_function_declaration
4749 #ifdef TCC_TARGET_PE
4750 /* people must be warned about using undeclared WINAPI functions
4751 (which usually start with uppercase letter) */
4752 || (name
[0] >= 'A' && name
[0] <= 'Z')
4755 tcc_warning("implicit declaration of function '%s'", name
);
4756 s
= external_global_sym(t
, &func_old_type
, 0);
4760 /* A symbol that has a register is a local register variable,
4761 which starts out as VT_LOCAL value. */
4762 if ((r
& VT_VALMASK
) < VT_CONST
)
4763 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4765 vset(&s
->type
, r
, s
->c
);
4766 /* Point to s as backpointer (even without r&VT_SYM).
4767 Will be used by at least the x86 inline asm parser for
4770 if (vtop
->r
& VT_SYM
) {
4776 /* post operations */
4778 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4781 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4784 if (tok
== TOK_ARROW
)
4786 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4789 /* expect pointer on structure */
4790 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4791 expect("struct or union");
4792 if (tok
== TOK_CDOUBLE
)
4793 expect("field name");
4795 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4796 expect("field name");
4797 s
= find_field(&vtop
->type
, tok
);
4799 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4800 /* add field offset to pointer */
4801 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4804 /* change type to field type, and set to lvalue */
4805 vtop
->type
= s
->type
;
4806 vtop
->type
.t
|= qualifiers
;
4807 /* an array is never an lvalue */
4808 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4809 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4810 #ifdef CONFIG_TCC_BCHECK
4811 /* if bound checking, the referenced pointer must be checked */
4812 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4813 vtop
->r
|= VT_MUSTBOUND
;
4817 } else if (tok
== '[') {
4823 } else if (tok
== '(') {
4826 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4829 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4830 /* pointer test (no array accepted) */
4831 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4832 vtop
->type
= *pointed_type(&vtop
->type
);
4833 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4837 expect("function pointer");
4840 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4842 /* get return type */
4845 sa
= s
->next
; /* first parameter */
4846 nb_args
= regsize
= 0;
4848 /* compute first implicit argument if a structure is returned */
4849 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4850 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4851 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4852 &ret_align
, ®size
);
4854 /* get some space for the returned structure */
4855 size
= type_size(&s
->type
, &align
);
4856 #ifdef TCC_TARGET_ARM64
4857 /* On arm64, a small struct is return in registers.
4858 It is much easier to write it to memory if we know
4859 that we are allowed to write some extra bytes, so
4860 round the allocated space up to a power of 2: */
4862 while (size
& (size
- 1))
4863 size
= (size
| (size
- 1)) + 1;
4865 loc
= (loc
- size
) & -align
;
4867 ret
.r
= VT_LOCAL
| VT_LVAL
;
4868 /* pass it as 'int' to avoid structure arg passing
4870 vseti(VT_LOCAL
, loc
);
4880 /* return in register */
4881 if (is_float(ret
.type
.t
)) {
4882 ret
.r
= reg_fret(ret
.type
.t
);
4883 #ifdef TCC_TARGET_X86_64
4884 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4888 #ifndef TCC_TARGET_ARM64
4889 #ifdef TCC_TARGET_X86_64
4890 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4892 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4903 gfunc_param_typed(s
, sa
);
4913 tcc_error("too few arguments to function");
4915 gfunc_call(nb_args
);
4918 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4919 vsetc(&ret
.type
, r
, &ret
.c
);
4920 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4923 /* handle packed struct return */
4924 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4927 size
= type_size(&s
->type
, &align
);
4928 /* We're writing whole regs often, make sure there's enough
4929 space. Assume register size is power of 2. */
4930 if (regsize
> align
)
4932 loc
= (loc
- size
) & -align
;
4936 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4940 if (--ret_nregs
== 0)
4944 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4952 ST_FUNC
void expr_prod(void)
4957 while (tok
== '*' || tok
== '/' || tok
== '%') {
4965 ST_FUNC
void expr_sum(void)
4970 while (tok
== '+' || tok
== '-') {
4978 static void expr_shift(void)
4983 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4991 static void expr_cmp(void)
4996 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
4997 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5005 static void expr_cmpeq(void)
5010 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5018 static void expr_and(void)
5021 while (tok
== '&') {
5028 static void expr_xor(void)
5031 while (tok
== '^') {
5038 static void expr_or(void)
5041 while (tok
== '|') {
5048 static void expr_land(void)
5051 if (tok
== TOK_LAND
) {
5054 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5062 while (tok
== TOK_LAND
) {
5070 gen_cast(&int_type
);
5078 if (tok
!= TOK_LAND
) {
5091 static void expr_lor(void)
5094 if (tok
== TOK_LOR
) {
5097 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5105 while (tok
== TOK_LOR
) {
5113 gen_cast(&int_type
);
5121 if (tok
!= TOK_LOR
) {
5134 /* Assuming vtop is a value used in a conditional context
5135 (i.e. compared with zero) return 0 if it's false, 1 if
5136 true and -1 if it can't be statically determined. */
5137 static int condition_3way(void)
5140 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5141 (!(vtop
->r
& VT_SYM
) ||
5142 !(vtop
->sym
->type
.t
& VT_WEAK
))) {
5144 boolean
.t
= VT_BOOL
;
5153 static void expr_cond(void)
5155 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5157 CType type
, type1
, type2
;
5162 c
= condition_3way();
5163 g
= (tok
== ':' && gnu_ext
);
5165 /* needed to avoid having different registers saved in
5167 if (is_float(vtop
->type
.t
)) {
5169 #ifdef TCC_TARGET_X86_64
5170 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5195 sv
= *vtop
; /* save value to handle it later */
5196 vtop
--; /* no vpop so that FP stack is not flushed */
5214 bt1
= t1
& VT_BTYPE
;
5216 bt2
= t2
& VT_BTYPE
;
5217 /* cast operands to correct type according to ISOC rules */
5218 if (is_float(bt1
) || is_float(bt2
)) {
5219 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5220 type
.t
= VT_LDOUBLE
;
5222 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5227 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5228 /* cast to biggest op */
5230 /* convert to unsigned if it does not fit in a long long */
5231 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5232 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
5233 type
.t
|= VT_UNSIGNED
;
5234 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5235 /* If one is a null ptr constant the result type
5237 if (is_null_pointer (vtop
))
5239 else if (is_null_pointer (&sv
))
5241 /* XXX: test pointer compatibility, C99 has more elaborate
5245 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5246 /* XXX: test function pointer compatibility */
5247 type
= bt1
== VT_FUNC
? type1
: type2
;
5248 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5249 /* XXX: test structure compatibility */
5250 type
= bt1
== VT_STRUCT
? type1
: type2
;
5251 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5252 /* NOTE: as an extension, we accept void on only one side */
5255 /* integer operations */
5257 /* convert to unsigned if it does not fit in an integer */
5258 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
5259 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
5260 type
.t
|= VT_UNSIGNED
;
5262 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5263 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5264 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5267 /* now we convert second operand */
5271 mk_pointer(&vtop
->type
);
5273 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5278 if (is_float(type
.t
)) {
5280 #ifdef TCC_TARGET_X86_64
5281 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5285 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5286 /* for long longs, we use fixed registers to avoid having
5287 to handle a complicated move */
5298 /* this is horrible, but we must also convert first
5304 mk_pointer(&vtop
->type
);
5306 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5312 move_reg(r2
, r1
, type
.t
);
5322 static void expr_eq(void)
5328 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5329 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5330 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5345 ST_FUNC
void gexpr(void)
5356 /* parse an expression and return its type without any side effect. */
5357 static void expr_type(CType
*type
)
5367 /* parse a unary expression and return its type without any side
5369 static void unary_type(CType
*type
)
5378 /* parse a constant expression and return value in vtop. */
5379 static void expr_const1(void)
5386 /* parse an integer constant and return its value. */
5387 static inline int64_t expr_const64(void)
5391 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5392 expect("constant expression");
5398 /* parse an integer constant and return its value.
5399 Complain if it doesn't fit 32bit (signed or unsigned). */
5400 ST_FUNC
int expr_const(void)
5403 int64_t wc
= expr_const64();
5405 if (c
!= wc
&& (unsigned)c
!= wc
)
5406 tcc_error("constant exceeds 32 bit");
5410 /* return the label token if current token is a label, otherwise
5412 static int is_label(void)
5416 /* fast test first */
5417 if (tok
< TOK_UIDENT
)
5419 /* no need to save tokc because tok is an identifier */
5426 unget_tok(last_tok
);
5431 static void label_or_decl(int l
)
5435 /* fast test first */
5436 if (tok
>= TOK_UIDENT
)
5438 /* no need to save tokc because tok is an identifier */
5442 unget_tok(last_tok
);
5445 unget_tok(last_tok
);
5450 #ifndef TCC_TARGET_ARM64
5451 static void gfunc_return(CType
*func_type
)
5453 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5454 CType type
, ret_type
;
5455 int ret_align
, ret_nregs
, regsize
;
5456 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5457 &ret_align
, ®size
);
5458 if (0 == ret_nregs
) {
5459 /* if returning structure, must copy it to implicit
5460 first pointer arg location */
5463 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5466 /* copy structure value to pointer */
5469 /* returning structure packed into registers */
5470 int r
, size
, addr
, align
;
5471 size
= type_size(func_type
,&align
);
5472 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5473 (vtop
->c
.i
& (ret_align
-1)))
5474 && (align
& (ret_align
-1))) {
5475 loc
= (loc
- size
) & -ret_align
;
5478 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5482 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5484 vtop
->type
= ret_type
;
5485 if (is_float(ret_type
.t
))
5486 r
= rc_fret(ret_type
.t
);
5497 if (--ret_nregs
== 0)
5499 /* We assume that when a structure is returned in multiple
5500 registers, their classes are consecutive values of the
5503 vtop
->c
.i
+= regsize
;
5507 } else if (is_float(func_type
->t
)) {
5508 gv(rc_fret(func_type
->t
));
5512 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5516 static int case_cmp(const void *pa
, const void *pb
)
5518 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5519 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5520 return a
< b
? -1 : a
> b
;
5523 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5527 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5545 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5547 gcase(base
, len
/2, bsym
);
5548 if (cur_switch
->def_sym
)
5549 gjmp_addr(cur_switch
->def_sym
);
5551 *bsym
= gjmp(*bsym
);
5555 base
+= e
; len
-= e
;
5565 if (p
->v1
== p
->v2
) {
5567 gtst_addr(0, p
->sym
);
5577 gtst_addr(0, p
->sym
);
5583 static void block(int *bsym
, int *csym
, int is_expr
)
5585 int a
, b
, c
, d
, cond
;
5588 /* generate line number info */
5589 if (tcc_state
->do_debug
)
5590 tcc_debug_line(tcc_state
);
5593 /* default return value is (void) */
5595 vtop
->type
.t
= VT_VOID
;
5598 if (tok
== TOK_IF
) {
5600 int saved_nocode_wanted
= nocode_wanted
;
5605 cond
= condition_3way();
5611 nocode_wanted
|= 0x20000000;
5612 block(bsym
, csym
, 0);
5614 nocode_wanted
= saved_nocode_wanted
;
5616 if (c
== TOK_ELSE
) {
5621 nocode_wanted
|= 0x20000000;
5622 block(bsym
, csym
, 0);
5623 gsym(d
); /* patch else jmp */
5625 nocode_wanted
= saved_nocode_wanted
;
5628 } else if (tok
== TOK_WHILE
) {
5629 int saved_nocode_wanted
;
5630 nocode_wanted
&= ~0x20000000;
5640 saved_nocode_wanted
= nocode_wanted
;
5642 nocode_wanted
= saved_nocode_wanted
;
5647 } else if (tok
== '{') {
5649 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5652 /* record local declaration stack position */
5654 llabel
= local_label_stack
;
5657 /* handle local labels declarations */
5658 if (tok
== TOK_LABEL
) {
5661 if (tok
< TOK_UIDENT
)
5662 expect("label identifier");
5663 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5673 while (tok
!= '}') {
5674 label_or_decl(VT_LOCAL
);
5678 block(bsym
, csym
, is_expr
);
5681 /* pop locally defined labels */
5682 label_pop(&local_label_stack
, llabel
);
5683 /* pop locally defined symbols */
5685 /* In the is_expr case (a statement expression is finished here),
5686 vtop might refer to symbols on the local_stack. Either via the
5687 type or via vtop->sym. We can't pop those nor any that in turn
5688 might be referred to. To make it easier we don't roll back
5689 any symbols in that case; some upper level call to block() will
5690 do that. We do have to remove such symbols from the lookup
5691 tables, though. sym_pop will do that. */
5692 sym_pop(&local_stack
, s
, is_expr
);
5694 /* Pop VLA frames and restore stack pointer if required */
5695 if (vlas_in_scope
> saved_vlas_in_scope
) {
5696 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5699 vlas_in_scope
= saved_vlas_in_scope
;
5702 } else if (tok
== TOK_RETURN
) {
5706 gen_assign_cast(&func_vt
);
5707 gfunc_return(&func_vt
);
5710 /* jump unless last stmt in top-level block */
5711 if (tok
!= '}' || local_scope
!= 1)
5713 nocode_wanted
|= 0x20000000;
5714 } else if (tok
== TOK_BREAK
) {
5717 tcc_error("cannot break");
5718 *bsym
= gjmp(*bsym
);
5721 nocode_wanted
|= 0x20000000;
5722 } else if (tok
== TOK_CONTINUE
) {
5725 tcc_error("cannot continue");
5726 vla_sp_restore_root();
5727 *csym
= gjmp(*csym
);
5730 } else if (tok
== TOK_FOR
) {
5732 int saved_nocode_wanted
;
5733 nocode_wanted
&= ~0x20000000;
5739 /* c99 for-loop init decl? */
5740 if (!decl0(VT_LOCAL
, 1)) {
5741 /* no, regular for-loop init expr */
5767 saved_nocode_wanted
= nocode_wanted
;
5769 nocode_wanted
= saved_nocode_wanted
;
5774 sym_pop(&local_stack
, s
, 0);
5777 if (tok
== TOK_DO
) {
5778 int saved_nocode_wanted
;
5779 nocode_wanted
&= ~0x20000000;
5785 saved_nocode_wanted
= nocode_wanted
;
5793 nocode_wanted
= saved_nocode_wanted
;
5798 if (tok
== TOK_SWITCH
) {
5799 struct switch_t
*saved
, sw
;
5800 int saved_nocode_wanted
= nocode_wanted
;
5806 switchval
= *vtop
--;
5808 b
= gjmp(0); /* jump to first case */
5809 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5813 nocode_wanted
= saved_nocode_wanted
;
5814 a
= gjmp(a
); /* add implicit break */
5817 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5818 for (b
= 1; b
< sw
.n
; b
++)
5819 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5820 tcc_error("duplicate case value");
5821 /* Our switch table sorting is signed, so the compared
5822 value needs to be as well when it's 64bit. */
5823 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5824 switchval
.type
.t
&= ~VT_UNSIGNED
;
5826 gcase(sw
.p
, sw
.n
, &a
);
5829 gjmp_addr(sw
.def_sym
);
5830 dynarray_reset(&sw
.p
, &sw
.n
);
5835 if (tok
== TOK_CASE
) {
5836 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5839 nocode_wanted
&= ~0x20000000;
5841 cr
->v1
= cr
->v2
= expr_const64();
5842 if (gnu_ext
&& tok
== TOK_DOTS
) {
5844 cr
->v2
= expr_const64();
5845 if (cr
->v2
< cr
->v1
)
5846 tcc_warning("empty case range");
5849 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5852 goto block_after_label
;
5854 if (tok
== TOK_DEFAULT
) {
5859 if (cur_switch
->def_sym
)
5860 tcc_error("too many 'default'");
5861 cur_switch
->def_sym
= ind
;
5863 goto block_after_label
;
5865 if (tok
== TOK_GOTO
) {
5867 if (tok
== '*' && gnu_ext
) {
5871 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5874 } else if (tok
>= TOK_UIDENT
) {
5875 s
= label_find(tok
);
5876 /* put forward definition if needed */
5878 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5880 if (s
->r
== LABEL_DECLARED
)
5881 s
->r
= LABEL_FORWARD
;
5883 vla_sp_restore_root();
5884 if (s
->r
& LABEL_FORWARD
)
5885 s
->jnext
= gjmp(s
->jnext
);
5887 gjmp_addr(s
->jnext
);
5890 expect("label identifier");
5893 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5901 if (s
->r
== LABEL_DEFINED
)
5902 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5904 s
->r
= LABEL_DEFINED
;
5906 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5910 /* we accept this, but it is a mistake */
5912 nocode_wanted
&= ~0x20000000;
5914 tcc_warning("deprecated use of label at end of compound statement");
5918 block(bsym
, csym
, is_expr
);
5921 /* expression case */
5936 #define EXPR_CONST 1
5939 static void parse_init_elem(int expr_type
)
5941 int saved_global_expr
;
5944 /* compound literals must be allocated globally in this case */
5945 saved_global_expr
= global_expr
;
5948 global_expr
= saved_global_expr
;
5949 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5950 (compound literals). */
5951 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5952 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
5953 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
5954 #ifdef TCC_TARGET_PE
5955 || (vtop
->type
.t
& VT_IMPORT
)
5958 tcc_error("initializer element is not constant");
5966 /* t is the array or struct type. c is the array or struct
5967 address. cur_field is the pointer to the current
5968 value, for arrays the 'c' member contains the current start
5969 index and the 'r' contains the end index (in case of range init).
5970 'size_only' is true if only size info is needed (only used
5972 static void decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5973 Sym
**cur_field
, int size_only
)
5976 int notfirst
, index
, index_last
, align
, l
, nb_elems
, elem_size
;
5982 if (gnu_ext
&& (l
= is_label()) != 0)
5984 while (tok
== '[' || tok
== '.') {
5986 if (!(type
->t
& VT_ARRAY
))
5987 expect("array type");
5990 index
= expr_const();
5991 if (index
< 0 || (s
->c
>= 0 && index
>= s
->c
))
5992 tcc_error("invalid index");
5993 if (tok
== TOK_DOTS
&& gnu_ext
) {
5995 index_last
= expr_const();
5996 if (index_last
< 0 ||
5997 (s
->c
>= 0 && index_last
>= s
->c
) ||
5999 tcc_error("invalid index");
6005 (*cur_field
)->c
= index
;
6006 (*cur_field
)->r
= index_last
;
6008 type
= pointed_type(type
);
6009 elem_size
= type_size(type
, &align
);
6010 c
+= index
* elem_size
;
6011 /* NOTE: we only support ranges for last designator */
6012 nb_elems
= index_last
- index
+ 1;
6013 if (nb_elems
!= 1) {
6022 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6023 expect("struct/union type");
6024 f
= find_field(type
, l
);
6029 /* XXX: fix this mess by using explicit storage field */
6031 type1
.t
|= (type
->t
& ~VT_TYPE
);
6045 if (type
->t
& VT_ARRAY
) {
6046 index
= (*cur_field
)->c
;
6047 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6048 tcc_error("index too large");
6049 type
= pointed_type(type
);
6050 c
+= index
* type_size(type
, &align
);
6053 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6054 *cur_field
= f
= f
->next
;
6056 tcc_error("too many field init");
6057 /* XXX: fix this mess by using explicit storage field */
6059 type1
.t
|= (type
->t
& ~VT_TYPE
);
6064 decl_initializer(type
, sec
, c
, 0, size_only
);
6066 /* XXX: make it more general */
6067 if (!size_only
&& nb_elems
> 1) {
6068 unsigned long c_end
;
6073 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6074 for (i
= 1; i
< nb_elems
; i
++) {
6075 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6081 c_end
= c
+ nb_elems
* elem_size
;
6082 if (c_end
> sec
->data_allocated
)
6083 section_realloc(sec
, c_end
);
6084 src
= sec
->data
+ c
;
6086 for(i
= 1; i
< nb_elems
; i
++) {
6088 memcpy(dst
, src
, elem_size
);
6094 /* store a value or an expression directly in global data or in local array */
6095 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6097 int bt
, bit_pos
, bit_size
;
6099 unsigned long long bit_mask
;
6103 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6107 /* XXX: not portable */
6108 /* XXX: generate error if incorrect relocation */
6109 gen_assign_cast(&dtype
);
6110 bt
= type
->t
& VT_BTYPE
;
6111 size
= type_size(type
, &align
);
6112 if (c
+ size
> sec
->data_allocated
) {
6113 section_realloc(sec
, c
+ size
);
6115 ptr
= sec
->data
+ c
;
6116 /* XXX: make code faster ? */
6117 if (!(type
->t
& VT_BITFIELD
)) {
6119 bit_size
= PTR_SIZE
* 8;
6122 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6123 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6124 bit_mask
= (1LL << bit_size
) - 1;
6126 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6127 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6128 /* XXX This rejects compount literals like
6129 '(void *){ptr}'. The problem is that '&sym' is
6130 represented the same way, which would be ruled out
6131 by the SYM_FIRST_ANOM check above, but also '"string"'
6132 in 'char *p = "string"' is represented the same
6133 with the type being VT_PTR and the symbol being an
6134 anonymous one. That is, there's no difference in vtop
6135 between '(void *){x}' and '&(void *){x}'. Ignore
6136 pointer typed entities here. Hopefully no real code
6137 will every use compound literals with scalar type. */
6138 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6139 /* These come from compound literals, memcpy stuff over. */
6143 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6144 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6145 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6147 /* We need to copy over all memory contents, and that
6148 includes relocations. Use the fact that relocs are
6149 created it order, so look from the end of relocs
6150 until we hit one before the copied region. */
6151 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6152 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6153 while (num_relocs
--) {
6155 if (rel
->r_offset
>= esym
->st_value
+ size
)
6157 if (rel
->r_offset
< esym
->st_value
)
6159 /* Note: if the same fields are initialized multiple
6160 times (possible with designators) then we possibly
6161 add multiple relocations for the same offset here.
6162 That would lead to wrong code, the last reloc needs
6163 to win. We clean this up later after the whole
6164 initializer is parsed. */
6165 put_elf_reloca(symtab_section
, sec
,
6166 c
+ rel
->r_offset
- esym
->st_value
,
6167 ELFW(R_TYPE
)(rel
->r_info
),
6168 ELFW(R_SYM
)(rel
->r_info
),
6169 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6178 if ((vtop
->r
& VT_SYM
) &&
6184 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6188 (bt
== VT_INT
&& bit_size
!= 32)
6191 tcc_error("initializer element is not computable at load time");
6193 /* XXX: when cross-compiling we assume that each type has the
6194 same representation on host and target, which is likely to
6195 be wrong in the case of long double */
6197 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6199 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6202 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6205 *(double *)ptr
= vtop
->c
.d
;
6208 if (sizeof(long double) == LDOUBLE_SIZE
)
6209 *(long double *)ptr
= vtop
->c
.ld
;
6210 else if (sizeof(double) == LDOUBLE_SIZE
)
6211 *(double *)ptr
= vtop
->c
.ld
;
6213 tcc_error("can't cross compile long double constants");
6217 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6224 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6225 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6226 if (vtop
->r
& VT_SYM
)
6227 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6229 *(addr_t
*)ptr
|= val
;
6231 if (vtop
->r
& VT_SYM
)
6232 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6233 *(addr_t
*)ptr
|= val
;
6239 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6240 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6241 if (vtop
->r
& VT_SYM
)
6242 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6246 if (vtop
->r
& VT_SYM
)
6247 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6256 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6263 /* put zeros for variable based init */
6264 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6267 /* nothing to do because globals are already set to zero */
6269 vpush_global_sym(&func_old_type
, TOK_memset
);
6271 #ifdef TCC_TARGET_ARM
6282 /* 't' contains the type and storage info. 'c' is the offset of the
6283 object in section 'sec'. If 'sec' is NULL, it means stack based
6284 allocation. 'first' is true if array '{' must be read (multi
6285 dimension implicit array init handling). 'size_only' is true if
6286 size only evaluation is wanted (only for arrays). */
6287 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6288 int first
, int size_only
)
6290 int index
, array_length
, n
, no_oblock
, nb
, parlevel
, parlevel1
, i
;
6297 /* If we currently are at an '}' or ',' we have read an initializer
6298 element in one of our callers, and not yet consumed it. */
6299 have_elem
= tok
== '}' || tok
== ',';
6300 if (!have_elem
&& tok
!= '{' &&
6301 /* In case of strings we have special handling for arrays, so
6302 don't consume them as initializer value (which would commit them
6303 to some anonymous symbol). */
6304 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6306 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6311 !(type
->t
& VT_ARRAY
) &&
6312 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6313 The source type might have VT_CONSTANT set, which is
6314 of course assignable to non-const elements. */
6315 is_compatible_parameter_types(type
, &vtop
->type
)) {
6316 init_putv(type
, sec
, c
);
6317 } else if (type
->t
& VT_ARRAY
) {
6321 t1
= pointed_type(type
);
6322 size1
= type_size(t1
, &align1
);
6325 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6328 tcc_error("character array initializer must be a literal,"
6329 " optionally enclosed in braces");
6334 /* only parse strings here if correct type (otherwise: handle
6335 them as ((w)char *) expressions */
6336 if ((tok
== TOK_LSTR
&&
6337 #ifdef TCC_TARGET_PE
6338 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6340 (t1
->t
& VT_BTYPE
) == VT_INT
6342 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6343 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6346 /* compute maximum number of chars wanted */
6348 cstr_len
= tokc
.str
.size
;
6350 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6353 if (n
>= 0 && nb
> (n
- array_length
))
6354 nb
= n
- array_length
;
6357 tcc_warning("initializer-string for array is too long");
6358 /* in order to go faster for common case (char
6359 string in global variable, we handle it
6361 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6362 memcpy(sec
->data
+ c
+ array_length
, tokc
.str
.data
, nb
);
6366 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6368 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6370 init_putv(t1
, sec
, c
+ (array_length
+ i
) * size1
);
6377 /* only add trailing zero if enough storage (no
6378 warning in this case since it is standard) */
6379 if (n
< 0 || array_length
< n
) {
6382 init_putv(t1
, sec
, c
+ (array_length
* size1
));
6392 while (tok
!= '}' || have_elem
) {
6393 decl_designator(type
, sec
, c
, &f
, size_only
);
6396 /* must put zero in holes (note that doing it that way
6397 ensures that it even works with designators) */
6398 if (!size_only
&& array_length
< index
) {
6399 init_putz(sec
, c
+ array_length
* size1
,
6400 (index
- array_length
) * size1
);
6402 if (type
->t
& VT_ARRAY
) {
6403 index
= indexsym
.c
= ++indexsym
.r
;
6405 index
= index
+ type_size(&f
->type
, &align1
);
6406 if (s
->type
.t
== TOK_UNION
)
6411 if (index
> array_length
)
6412 array_length
= index
;
6414 if (type
->t
& VT_ARRAY
) {
6415 /* special test for multi dimensional arrays (may not
6416 be strictly correct if designators are used at the
6418 if (no_oblock
&& index
>= n
)
6421 if (no_oblock
&& f
== NULL
)
6429 /* put zeros at the end */
6430 if (!size_only
&& array_length
< n
) {
6431 init_putz(sec
, c
+ array_length
* size1
,
6432 (n
- array_length
) * size1
);
6436 /* patch type size if needed, which happens only for array types */
6438 s
->c
= array_length
;
6439 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6442 if (first
|| tok
== '{') {
6451 } else if (tok
== '{') {
6453 decl_initializer(type
, sec
, c
, first
, size_only
);
6455 } else if (size_only
) {
6456 /* If we supported only ISO C we wouldn't have to accept calling
6457 this on anything than an array size_only==1 (and even then
6458 only on the outermost level, so no recursion would be needed),
6459 because initializing a flex array member isn't supported.
6460 But GNU C supports it, so we need to recurse even into
6461 subfields of structs and arrays when size_only is set. */
6462 /* just skip expression */
6463 parlevel
= parlevel1
= 0;
6464 while ((parlevel
> 0 || parlevel1
> 0 ||
6465 (tok
!= '}' && tok
!= ',')) && tok
!= -1) {
6468 else if (tok
== ')') {
6469 if (parlevel
== 0 && parlevel1
== 0)
6473 else if (tok
== '{')
6475 else if (tok
== '}') {
6476 if (parlevel
== 0 && parlevel1
== 0)
6484 /* This should happen only when we haven't parsed
6485 the init element above for fear of committing a
6486 string constant to memory too early. */
6487 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6488 expect("string constant");
6489 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6491 init_putv(type
, sec
, c
);
6495 /* parse an initializer for type 't' if 'has_init' is non zero, and
6496 allocate space in local or global data space ('r' is either
6497 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6498 variable 'v' of scope 'scope' is declared before initializers
6499 are parsed. If 'v' is zero, then a reference to the new object
6500 is put in the value stack. If 'has_init' is 2, a special parsing
6501 is done to handle string constants. */
6502 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6503 int has_init
, int v
, int scope
)
6505 int size
, align
, addr
, data_offset
;
6507 ParseState saved_parse_state
= {0};
6508 TokenString
*init_str
= NULL
;
6510 Sym
*flexible_array
;
6512 flexible_array
= NULL
;
6513 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6514 Sym
*field
= type
->ref
->next
;
6517 field
= field
->next
;
6518 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6519 flexible_array
= field
;
6523 size
= type_size(type
, &align
);
6524 /* If unknown size, we must evaluate it before
6525 evaluating initializers because
6526 initializers can generate global data too
6527 (e.g. string pointers or ISOC99 compound
6528 literals). It also simplifies local
6529 initializers handling */
6530 if (size
< 0 || (flexible_array
&& has_init
)) {
6532 tcc_error("unknown type size");
6533 /* get all init string */
6534 init_str
= tok_str_alloc();
6535 if (has_init
== 2) {
6536 /* only get strings */
6537 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6538 tok_str_add_tok(init_str
);
6543 while (level
> 0 || (tok
!= ',' && tok
!= ';')) {
6545 tcc_error("unexpected end of file in initializer");
6546 tok_str_add_tok(init_str
);
6549 else if (tok
== '}') {
6559 tok_str_add(init_str
, -1);
6560 tok_str_add(init_str
, 0);
6563 save_parse_state(&saved_parse_state
);
6565 begin_macro(init_str
, 1);
6567 decl_initializer(type
, NULL
, 0, 1, 1);
6568 /* prepare second initializer parsing */
6569 macro_ptr
= init_str
->str
;
6572 /* if still unknown size, error */
6573 size
= type_size(type
, &align
);
6575 tcc_error("unknown type size");
6577 /* If there's a flex member and it was used in the initializer
6579 if (flexible_array
&&
6580 flexible_array
->type
.ref
->c
> 0)
6581 size
+= flexible_array
->type
.ref
->c
6582 * pointed_size(&flexible_array
->type
);
6583 /* take into account specified alignment if bigger */
6584 if (ad
->a
.aligned
) {
6585 int speca
= 1 << (ad
->a
.aligned
- 1);
6588 } else if (ad
->a
.packed
) {
6591 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6593 #ifdef CONFIG_TCC_BCHECK
6594 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6598 loc
= (loc
- size
) & -align
;
6600 #ifdef CONFIG_TCC_BCHECK
6601 /* handles bounds */
6602 /* XXX: currently, since we do only one pass, we cannot track
6603 '&' operators, so we add only arrays */
6604 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6606 /* add padding between regions */
6608 /* then add local bound info */
6609 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6610 bounds_ptr
[0] = addr
;
6611 bounds_ptr
[1] = size
;
6615 /* local variable */
6616 #ifdef CONFIG_TCC_ASM
6617 if (ad
->asm_label
) {
6618 int reg
= asm_parse_regvar(ad
->asm_label
);
6620 r
= (r
& ~VT_VALMASK
) | reg
;
6623 sym_push(v
, type
, r
, addr
);
6625 /* push local reference */
6626 vset(type
, r
, addr
);
6630 if (v
&& scope
== VT_CONST
) {
6631 /* see if the symbol was already defined */
6634 patch_storage(sym
, type
);
6635 if (sym
->type
.t
& VT_EXTERN
) {
6636 /* if the variable is extern, it was not allocated */
6637 sym
->type
.t
&= ~VT_EXTERN
;
6638 /* set array size if it was omitted in extern
6640 if ((sym
->type
.t
& VT_ARRAY
) &&
6641 sym
->type
.ref
->c
< 0 &&
6643 sym
->type
.ref
->c
= type
->ref
->c
;
6644 } else if (!has_init
) {
6645 /* we accept several definitions of the same
6646 global variable. this is tricky, because we
6647 must play with the SHN_COMMON type of the symbol */
6648 /* no init data, we won't add more to the symbol */
6649 update_storage(sym
);
6651 } else if (sym
->c
) {
6653 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6654 if (esym
->st_shndx
== data_section
->sh_num
)
6655 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6660 /* allocate symbol in corresponding section */
6665 else if (tcc_state
->nocommon
)
6670 data_offset
= sec
->data_offset
;
6671 data_offset
= (data_offset
+ align
- 1) & -align
;
6673 /* very important to increment global pointer at this time
6674 because initializers themselves can create new initializers */
6675 data_offset
+= size
;
6676 #ifdef CONFIG_TCC_BCHECK
6677 /* add padding if bound check */
6678 if (tcc_state
->do_bounds_check
)
6681 sec
->data_offset
= data_offset
;
6682 /* allocate section space to put the data */
6683 if (sec
->sh_type
!= SHT_NOBITS
&&
6684 data_offset
> sec
->data_allocated
)
6685 section_realloc(sec
, data_offset
);
6686 /* align section if needed */
6687 if (align
> sec
->sh_addralign
)
6688 sec
->sh_addralign
= align
;
6690 addr
= 0; /* avoid warning */
6694 if (scope
!= VT_CONST
|| !sym
) {
6695 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6696 sym
->asm_label
= ad
->asm_label
;
6698 /* update symbol definition */
6700 put_extern_sym(sym
, sec
, addr
, size
);
6702 put_extern_sym(sym
, SECTION_COMMON
, align
, size
);
6706 /* push global reference */
6707 sym
= get_sym_ref(type
, sec
, addr
, size
);
6708 vpushsym(type
, sym
);
6712 #ifdef CONFIG_TCC_BCHECK
6713 /* handles bounds now because the symbol must be defined
6714 before for the relocation */
6715 if (tcc_state
->do_bounds_check
) {
6718 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
6719 /* then add global bound info */
6720 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6721 bounds_ptr
[0] = 0; /* relocated */
6722 bounds_ptr
[1] = size
;
6727 if (type
->t
& VT_VLA
) {
6730 /* save current stack pointer */
6731 if (vlas_in_scope
== 0) {
6732 if (vla_sp_root_loc
== -1)
6733 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6734 gen_vla_sp_save(vla_sp_root_loc
);
6737 vla_runtime_type_size(type
, &a
);
6738 gen_vla_alloc(type
, a
);
6739 gen_vla_sp_save(addr
);
6743 } else if (has_init
) {
6744 size_t oldreloc_offset
= 0;
6745 if (sec
&& sec
->reloc
)
6746 oldreloc_offset
= sec
->reloc
->data_offset
;
6747 decl_initializer(type
, sec
, addr
, 1, 0);
6748 if (sec
&& sec
->reloc
)
6749 squeeze_multi_relocs(sec
, oldreloc_offset
);
6750 /* patch flexible array member size back to -1, */
6751 /* for possible subsequent similar declarations */
6753 flexible_array
->type
.ref
->c
= -1;
6757 /* restore parse state if needed */
6760 restore_parse_state(&saved_parse_state
);
6764 /* parse an old style function declaration list */
6765 /* XXX: check multiple parameter */
6766 static void func_decl_list(Sym
*func_sym
)
6773 /* parse each declaration */
6774 while (tok
!= '{' && tok
!= ';' && tok
!= ',' && tok
!= TOK_EOF
&&
6775 tok
!= TOK_ASM1
&& tok
!= TOK_ASM2
&& tok
!= TOK_ASM3
) {
6776 if (!parse_btype(&btype
, &ad
))
6777 expect("declaration list");
6778 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6779 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6781 /* we accept no variable after */
6785 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6786 /* find parameter in function parameter list */
6789 if ((s
->v
& ~SYM_FIELD
) == v
)
6793 tcc_error("declaration for parameter '%s' but no such parameter",
6794 get_tok_str(v
, NULL
));
6796 /* check that no storage specifier except 'register' was given */
6797 if (type
.t
& VT_STORAGE
)
6798 tcc_error("storage class specified for '%s'", get_tok_str(v
, NULL
));
6799 convert_parameter_type(&type
);
6800 /* we can add the type (NOTE: it could be local to the function) */
6802 /* accept other parameters */
6813 /* parse a function defined by symbol 'sym' and generate its code in
6814 'cur_text_section' */
6815 static void gen_function(Sym
*sym
)
6818 ind
= cur_text_section
->data_offset
;
6819 /* NOTE: we patch the symbol size later */
6820 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6821 funcname
= get_tok_str(sym
->v
, NULL
);
6823 /* Initialize VLA state */
6825 vla_sp_root_loc
= -1;
6826 /* put debug symbol */
6827 tcc_debug_funcstart(tcc_state
, sym
);
6828 /* push a dummy symbol to enable local sym storage */
6829 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6830 local_scope
= 1; /* for function parameters */
6831 gfunc_prolog(&sym
->type
);
6834 block(NULL
, NULL
, 0);
6838 cur_text_section
->data_offset
= ind
;
6839 label_pop(&global_label_stack
, NULL
);
6840 /* reset local stack */
6842 sym_pop(&local_stack
, NULL
, 0);
6843 /* end of function */
6844 /* patch symbol size */
6845 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6847 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
6848 /* It's better to crash than to generate wrong code */
6849 cur_text_section
= NULL
;
6850 funcname
= ""; /* for safety */
6851 func_vt
.t
= VT_VOID
; /* for safety */
6852 func_var
= 0; /* for safety */
6853 ind
= 0; /* for safety */
6858 static void gen_inline_functions(TCCState
*s
)
6861 int inline_generated
, i
, ln
;
6862 struct InlineFunc
*fn
;
6864 ln
= file
->line_num
;
6865 /* iterate while inline function are referenced */
6867 inline_generated
= 0;
6868 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6869 fn
= s
->inline_fns
[i
];
6871 if (sym
&& sym
->c
) {
6872 /* the function was used: generate its code and
6873 convert it to a normal function */
6876 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6877 sym
->type
.t
&= ~VT_INLINE
;
6879 begin_macro(fn
->func_str
, 1);
6881 cur_text_section
= text_section
;
6885 inline_generated
= 1;
6888 if (!inline_generated
)
6891 file
->line_num
= ln
;
6894 ST_FUNC
void free_inline_functions(TCCState
*s
)
6897 /* free tokens of unused inline functions */
6898 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6899 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6901 tok_str_free(fn
->func_str
);
6903 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6906 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6907 static int decl0(int l
, int is_for_loop_init
)
6915 if (!parse_btype(&btype
, &ad
)) {
6916 if (is_for_loop_init
)
6918 /* skip redundant ';' */
6919 /* XXX: find more elegant solution */
6924 if (l
== VT_CONST
&&
6925 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6926 /* global asm block */
6930 /* special test for old K&R protos without explicit int
6931 type. Only accepted when defining global data */
6932 if (l
== VT_LOCAL
|| tok
< TOK_UIDENT
)
6936 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6937 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6939 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6940 int v
= btype
.ref
->v
;
6941 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6942 tcc_warning("unnamed struct/union that defines no instances");
6947 while (1) { /* iterate thru each declaration */
6949 /* If the base type itself was an array type of unspecified
6950 size (like in 'typedef int arr[]; arr x = {1};') then
6951 we will overwrite the unknown size by the real one for
6952 this decl. We need to unshare the ref symbol holding
6954 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6955 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6957 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6961 type_to_str(buf
, sizeof(buf
), t
, get_tok_str(v
, NULL
));
6962 printf("type = '%s'\n", buf
);
6965 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6966 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6967 tcc_error("function without file scope cannot be static");
6969 /* if old style function prototype, we accept a
6972 if (sym
->c
== FUNC_OLD
)
6973 func_decl_list(sym
);
6976 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6977 ad
.asm_label
= asm_label_instr();
6978 /* parse one last attribute list, after asm label */
6979 parse_attribute(&ad
);
6986 #ifdef TCC_TARGET_PE
6987 if (ad
.a
.func_import
|| ad
.a
.func_export
) {
6988 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
6989 tcc_error("cannot have dll linkage with static or typedef");
6990 if (ad
.a
.func_export
)
6991 type
.t
|= VT_EXPORT
;
6992 else if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6993 type
.t
|= VT_IMPORT
|VT_EXTERN
;
6996 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
7000 tcc_error("cannot use local functions");
7001 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7002 expect("function definition");
7004 /* reject abstract declarators in function definition */
7006 while ((sym
= sym
->next
) != NULL
)
7007 if (!(sym
->v
& ~SYM_FIELD
))
7008 expect("identifier");
7010 /* XXX: cannot do better now: convert extern line to static inline */
7011 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7012 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7017 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
7020 ref
= sym
->type
.ref
;
7022 /* use func_call from prototype if not defined */
7023 if (ref
->a
.func_call
!= FUNC_CDECL
7024 && type
.ref
->a
.func_call
== FUNC_CDECL
)
7025 type
.ref
->a
.func_call
= ref
->a
.func_call
;
7027 /* use static from prototype */
7028 if (sym
->type
.t
& VT_STATIC
)
7029 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7031 /* If the definition has no visibility use the
7032 one from prototype. */
7033 if (! (type
.t
& VT_VIS_MASK
))
7034 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
7036 /* apply other storage attributes from prototype */
7037 type
.t
|= sym
->type
.t
& (VT_EXPORT
|VT_WEAK
);
7039 if (!is_compatible_types(&sym
->type
, &type
)) {
7041 tcc_error("incompatible types for redefinition of '%s'",
7042 get_tok_str(v
, NULL
));
7044 if (ref
->a
.func_body
)
7045 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
7046 /* if symbol is already defined, then put complete type */
7050 /* put function symbol */
7051 sym
= global_identifier_push(v
, type
.t
, 0);
7052 sym
->type
.ref
= type
.ref
;
7055 sym
->type
.ref
->a
.func_body
= 1;
7056 sym
->r
= VT_SYM
| VT_CONST
;
7058 /* static inline functions are just recorded as a kind
7059 of macro. Their code will be emitted at the end of
7060 the compilation unit only if they are used */
7061 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7062 (VT_INLINE
| VT_STATIC
)) {
7064 struct InlineFunc
*fn
;
7065 const char *filename
;
7067 filename
= file
? file
->filename
: "";
7068 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7069 strcpy(fn
->filename
, filename
);
7071 fn
->func_str
= tok_str_alloc();
7077 tcc_error("unexpected end of file");
7078 tok_str_add_tok(fn
->func_str
);
7083 } else if (t
== '}') {
7085 if (block_level
== 0)
7089 tok_str_add(fn
->func_str
, -1);
7090 tok_str_add(fn
->func_str
, 0);
7091 dynarray_add(&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
, fn
);
7094 /* compute text section */
7095 cur_text_section
= ad
.section
;
7096 if (!cur_text_section
)
7097 cur_text_section
= text_section
;
7102 if (type
.t
& VT_TYPEDEF
) {
7103 /* save typedefed type */
7104 /* XXX: test storage specifiers ? */
7106 if (sym
&& sym
->scope
== local_scope
) {
7107 if (!is_compatible_types(&sym
->type
, &type
)
7108 || !(sym
->type
.t
& VT_TYPEDEF
))
7109 tcc_error("incompatible redefinition of '%s'",
7110 get_tok_str(v
, NULL
));
7113 sym
= sym_push(v
, &type
, 0, 0);
7118 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7119 /* external function definition */
7120 /* specific case for func_call attribute */
7122 } else if (!(type
.t
& VT_ARRAY
)) {
7123 /* not lvalue if array */
7124 r
|= lvalue_type(type
.t
);
7126 has_init
= (tok
== '=');
7127 if (has_init
&& (type
.t
& VT_VLA
))
7128 tcc_error("variable length array cannot be initialized");
7129 if ((type
.t
& VT_EXTERN
) || ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7130 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7131 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7132 /* external variable or function */
7133 /* NOTE: as GCC, uninitialized global static
7134 arrays of null size are considered as
7136 sym
= external_sym(v
, &type
, r
);
7137 sym
->asm_label
= ad
.asm_label
;
7138 if (ad
.alias_target
) {
7143 alias_target
= sym_find(ad
.alias_target
);
7144 if (!alias_target
|| !alias_target
->c
)
7145 tcc_error("unsupported forward __alias__ attribute");
7146 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7147 tsec
.sh_num
= esym
->st_shndx
;
7148 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7151 if (type
.t
& VT_STATIC
)
7157 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7161 if (is_for_loop_init
)
7174 ST_FUNC
void decl(int l
)
7179 /* ------------------------------------------------------------------------- */