2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static inline CType
*pointed_type(CType
*type
);
75 static int is_compatible_types(CType
*type1
, CType
*type2
);
76 static int parse_btype(CType
*type
, AttributeDef
*ad
);
77 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
78 static void parse_expr_type(CType
*type
);
79 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
80 static void block(int *bsym
, int *csym
, int is_expr
);
81 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
82 static int decl0(int l
, int is_for_loop_init
);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType
*type
);
86 static void vla_runtime_type_size(CType
*type
, int *a
);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
90 static void expr_type(CType
*type
);
91 static inline int64_t expr_const64(void);
92 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
93 ST_FUNC
void vpush(CType
*type
);
94 ST_FUNC
int gvtst(int inv
, int t
);
95 ST_FUNC
int is_btype_size(int bt
);
96 static void gen_inline_functions(TCCState
*s
);
98 ST_INLN
int is_float(int t
)
102 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC
int ieee_finite(double d
)
111 memcpy(p
, &d
, sizeof(double));
112 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC
void test_lvalue(void)
117 if (!(vtop
->r
& VT_LVAL
))
121 ST_FUNC
void check_vstack(void)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
131 void pv (const char *lbl
, int a
, int b
)
134 for (i
= a
; i
< a
+ b
; ++i
) {
135 SValue
*p
= &vtop
[-i
];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
142 /* ------------------------------------------------------------------------- */
143 /* start of translation unit info */
144 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
149 /* file info: full path + filename */
150 section_sym
= put_elf_sym(symtab_section
, 0, 0,
151 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
152 text_section
->sh_num
, NULL
);
153 getcwd(buf
, sizeof(buf
));
155 normalize_slashes(buf
);
157 pstrcat(buf
, sizeof(buf
), "/");
158 put_stabs_r(buf
, N_SO
, 0, 0,
159 text_section
->data_offset
, text_section
, section_sym
);
160 put_stabs_r(file
->filename
, N_SO
, 0, 0,
161 text_section
->data_offset
, text_section
, section_sym
);
166 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
167 symbols can be safely used */
168 put_elf_sym(symtab_section
, 0, 0,
169 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
170 SHN_ABS
, file
->filename
);
173 /* put end of translation unit info */
174 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
178 put_stabs_r(NULL
, N_SO
, 0, 0,
179 text_section
->data_offset
, text_section
, section_sym
);
183 /* generate line number info */
184 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
188 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
189 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
191 last_line_num
= file
->line_num
;
195 /* put function symbol */
196 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
204 /* XXX: we put here a dummy type */
205 snprintf(buf
, sizeof(buf
), "%s:%c1",
206 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
207 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
208 cur_text_section
, sym
->c
);
209 /* //gr gdb wants a line at the function */
210 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
216 /* put function size */
217 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
221 put_stabn(N_FUN
, 0, 0, size
);
224 /* ------------------------------------------------------------------------- */
225 ST_FUNC
void tccgen_start(TCCState
*s1
)
227 cur_text_section
= NULL
;
229 anon_sym
= SYM_FIRST_ANOM
;
234 /* define some often used types */
236 char_pointer_type
.t
= VT_BYTE
;
237 mk_pointer(&char_pointer_type
);
239 size_type
.t
= VT_INT
;
241 size_type
.t
= VT_LLONG
;
243 func_old_type
.t
= VT_FUNC
;
244 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, FUNC_CDECL
, FUNC_OLD
);
248 #ifdef TCC_TARGET_ARM
253 ST_FUNC
void tccgen_end(TCCState
*s1
)
255 gen_inline_functions(s1
);
257 /* end of translation unit info */
261 /* ------------------------------------------------------------------------- */
262 /* apply storage attibutes to Elf symbol */
264 static void update_storage(Sym
*sym
)
273 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
276 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
277 | ((t
& VT_VIS_MASK
) >> VT_VIS_SHIFT
);
280 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
284 esym
->st_other
|= ST_PE_EXPORT
;
288 /* ------------------------------------------------------------------------- */
289 /* update sym->c so that it points to an external symbol in section
290 'section' with value 'value' */
292 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
293 addr_t value
, unsigned long size
,
294 int can_add_underscore
)
296 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
300 #ifdef CONFIG_TCC_BCHECK
306 else if (section
== SECTION_ABS
)
308 else if (section
== SECTION_COMMON
)
311 sh_num
= section
->sh_num
;
314 name
= get_tok_str(sym
->v
, NULL
);
315 #ifdef CONFIG_TCC_BCHECK
316 if (tcc_state
->do_bounds_check
) {
317 /* XXX: avoid doing that for statics ? */
318 /* if bound checking is activated, we change some function
319 names by adding the "__bound" prefix */
322 /* XXX: we rely only on malloc hooks */
335 strcpy(buf
, "__bound_");
343 if ((t
& VT_BTYPE
) == VT_FUNC
) {
345 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
346 sym_type
= STT_NOTYPE
;
348 sym_type
= STT_OBJECT
;
351 sym_bind
= STB_LOCAL
;
353 sym_bind
= STB_GLOBAL
;
356 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
357 Sym
*ref
= sym
->type
.ref
;
358 if (ref
->a
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
359 sprintf(buf1
, "_%s@%d", name
, ref
->a
.func_args
* PTR_SIZE
);
361 other
|= ST_PE_STDCALL
;
362 can_add_underscore
= 0;
366 other
|= ST_PE_IMPORT
;
368 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
370 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
374 name
= get_tok_str(sym
->asm_label
, NULL
);
375 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
376 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
378 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
379 esym
->st_value
= value
;
380 esym
->st_size
= size
;
381 esym
->st_shndx
= sh_num
;
386 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
387 addr_t value
, unsigned long size
)
389 put_extern_sym2(sym
, section
, value
, size
, 1);
392 /* add a new relocation entry to symbol 'sym' in section 's' */
393 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
398 if (nocode_wanted
&& s
== cur_text_section
)
403 put_extern_sym(sym
, NULL
, 0, 0);
407 /* now we can add ELF relocation info */
408 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
411 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
413 greloca(s
, sym
, offset
, type
, 0);
416 /* ------------------------------------------------------------------------- */
417 /* symbol allocator */
418 static Sym
*__sym_malloc(void)
420 Sym
*sym_pool
, *sym
, *last_sym
;
423 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
424 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
426 last_sym
= sym_free_first
;
428 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
429 sym
->next
= last_sym
;
433 sym_free_first
= last_sym
;
437 static inline Sym
*sym_malloc(void)
441 sym
= sym_free_first
;
443 sym
= __sym_malloc();
444 sym_free_first
= sym
->next
;
447 sym
= tcc_malloc(sizeof(Sym
));
452 ST_INLN
void sym_free(Sym
*sym
)
455 sym
->next
= sym_free_first
;
456 sym_free_first
= sym
;
462 /* push, without hashing */
463 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
483 /* find a symbol and return its associated structure. 's' is the top
484 of the symbol stack */
485 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
497 /* structure lookup */
498 ST_INLN Sym
*struct_find(int v
)
501 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
503 return table_ident
[v
]->sym_struct
;
506 /* find an identifier */
507 ST_INLN Sym
*sym_find(int v
)
510 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
512 return table_ident
[v
]->sym_identifier
;
515 /* push a given symbol on the symbol stack */
516 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, long c
)
525 s
= sym_push2(ps
, v
, type
->t
, c
);
526 s
->type
.ref
= type
->ref
;
528 /* don't record fields or anonymous symbols */
530 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
531 /* record symbol in token array */
532 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
534 ps
= &ts
->sym_struct
;
536 ps
= &ts
->sym_identifier
;
539 s
->scope
= local_scope
;
540 if (s
->prev_tok
&& s
->prev_tok
->scope
== s
->scope
)
541 tcc_error("redeclaration of '%s'",
542 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
547 /* push a global identifier */
548 ST_FUNC Sym
*global_identifier_push(int v
, int t
, long c
)
551 s
= sym_push2(&global_stack
, v
, t
, c
);
552 /* don't record anonymous symbol */
553 if (v
< SYM_FIRST_ANOM
) {
554 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
555 /* modify the top most local identifier, so that
556 sym_identifier will point to 's' when popped */
558 ps
= &(*ps
)->prev_tok
;
565 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
566 pop them yet from the list, but do remove them from the token array. */
567 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
577 /* remove symbol in token array */
579 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
580 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
582 ps
= &ts
->sym_struct
;
584 ps
= &ts
->sym_identifier
;
595 /* ------------------------------------------------------------------------- */
597 static void vsetc(CType
*type
, int r
, CValue
*vc
)
601 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
602 tcc_error("memory full (vstack)");
603 /* cannot let cpu flags if other instruction are generated. Also
604 avoid leaving VT_JMP anywhere except on the top of the stack
605 because it would complicate the code generator.
607 Don't do this when nocode_wanted. vtop might come from
608 !nocode_wanted regions (see 88_codeopt.c) and transforming
609 it to a register without actually generating code is wrong
610 as their value might still be used for real. All values
611 we push under nocode_wanted will eventually be popped
612 again, so that the VT_CMP/VT_JMP value will be in vtop
613 when code is unsuppressed again.
615 Same logic below in vswap(); */
616 if (vtop
>= vstack
&& !nocode_wanted
) {
617 v
= vtop
->r
& VT_VALMASK
;
618 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
630 ST_FUNC
void vswap(void)
633 /* cannot vswap cpu flags. See comment at vsetc() above */
634 if (vtop
>= vstack
&& !nocode_wanted
) {
635 int v
= vtop
->r
& VT_VALMASK
;
636 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
644 /* pop stack value */
645 ST_FUNC
void vpop(void)
648 v
= vtop
->r
& VT_VALMASK
;
649 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
650 /* for x86, we need to pop the FP stack */
652 o(0xd8dd); /* fstp %st(0) */
655 if (v
== VT_JMP
|| v
== VT_JMPI
) {
656 /* need to put correct jump if && or || without test */
662 /* push constant of type "type" with useless value */
663 ST_FUNC
void vpush(CType
*type
)
666 vsetc(type
, VT_CONST
, &cval
);
669 /* push integer constant */
670 ST_FUNC
void vpushi(int v
)
674 vsetc(&int_type
, VT_CONST
, &cval
);
677 /* push a pointer sized constant */
678 static void vpushs(addr_t v
)
682 vsetc(&size_type
, VT_CONST
, &cval
);
685 /* push arbitrary 64bit constant */
686 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
693 vsetc(&ctype
, VT_CONST
, &cval
);
696 /* push long long constant */
697 static inline void vpushll(long long v
)
699 vpush64(VT_LLONG
, v
);
702 ST_FUNC
void vset(CType
*type
, int r
, long v
)
707 vsetc(type
, r
, &cval
);
710 static void vseti(int r
, int v
)
718 ST_FUNC
void vpushv(SValue
*v
)
720 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
721 tcc_error("memory full (vstack)");
726 static void vdup(void)
731 /* rotate n first stack elements to the bottom
732 I1 ... In -> I2 ... In I1 [top is right]
734 ST_FUNC
void vrotb(int n
)
745 /* rotate the n elements before entry e towards the top
746 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
748 ST_FUNC
void vrote(SValue
*e
, int n
)
754 for(i
= 0;i
< n
- 1; i
++)
759 /* rotate n first stack elements to the top
760 I1 ... In -> In I1 ... I(n-1) [top is right]
762 ST_FUNC
void vrott(int n
)
767 /* push a symbol value of TYPE */
768 static inline void vpushsym(CType
*type
, Sym
*sym
)
772 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
776 /* Return a static symbol pointing to a section */
777 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
783 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
784 sym
->type
.ref
= type
->ref
;
785 sym
->r
= VT_CONST
| VT_SYM
;
786 put_extern_sym(sym
, sec
, offset
, size
);
790 /* push a reference to a section offset by adding a dummy symbol */
791 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
793 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
796 /* define a new external reference to a symbol 'v' of type 'u' */
797 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
803 /* push forward reference */
804 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
805 s
->type
.ref
= type
->ref
;
806 s
->r
= r
| VT_CONST
| VT_SYM
;
811 /* Merge some storage attributes. */
812 static void patch_storage(Sym
*sym
, CType
*type
)
815 if (!is_compatible_types(&sym
->type
, type
))
816 tcc_error("incompatible types for redefinition of '%s'",
817 get_tok_str(sym
->v
, NULL
));
820 if ((sym
->type
.t
^ t
) & VT_IMPORT
)
821 tcc_error("incompatible dll linkage for redefinition of '%s'",
822 get_tok_str(sym
->v
, NULL
));
824 sym
->type
.t
|= t
& (VT_EXPORT
|VT_WEAK
);
825 if (t
& VT_VIS_MASK
) {
826 int vis
= sym
->type
.t
& VT_VIS_MASK
;
827 int vis2
= t
& VT_VIS_MASK
;
828 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
830 else if (vis2
!= (STV_DEFAULT
<< VT_VIS_SHIFT
))
831 vis
= (vis
< vis2
) ? vis
: vis2
;
832 sym
->type
.t
= (sym
->type
.t
& ~VT_VIS_MASK
) | vis
;
836 /* define a new external reference to a symbol 'v' */
837 static Sym
*external_sym(int v
, CType
*type
, int r
)
842 /* push forward reference */
843 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
844 s
->type
.t
|= VT_EXTERN
;
846 if (s
->type
.ref
== func_old_type
.ref
) {
847 s
->type
.ref
= type
->ref
;
848 s
->r
= r
| VT_CONST
| VT_SYM
;
849 s
->type
.t
|= VT_EXTERN
;
851 patch_storage(s
, type
);
857 /* push a reference to global symbol v */
858 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
860 vpushsym(type
, external_global_sym(v
, type
, 0));
863 /* save registers up to (vtop - n) stack entry */
864 ST_FUNC
void save_regs(int n
)
867 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
871 /* save r to the memory stack, and mark it as being free */
872 ST_FUNC
void save_reg(int r
)
874 save_reg_upstack(r
, 0);
877 /* save r to the memory stack, and mark it as being free,
878 if seen up to (vtop - n) stack entry */
879 ST_FUNC
void save_reg_upstack(int r
, int n
)
881 int l
, saved
, size
, align
;
885 if ((r
&= VT_VALMASK
) >= VT_CONST
)
890 /* modify all stack values */
893 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
894 if ((p
->r
& VT_VALMASK
) == r
||
895 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
896 /* must save value on stack if not already done */
898 /* NOTE: must reload 'r' because r might be equal to r2 */
899 r
= p
->r
& VT_VALMASK
;
900 /* store register in the stack */
902 if ((p
->r
& VT_LVAL
) ||
903 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
904 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
905 type
= &char_pointer_type
;
909 size
= type_size(type
, &align
);
910 loc
= (loc
- size
) & -align
;
912 sv
.r
= VT_LOCAL
| VT_LVAL
;
915 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
916 /* x86 specific: need to pop fp register ST0 if saved */
918 o(0xd8dd); /* fstp %st(0) */
921 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
922 /* special long long case */
923 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
931 /* mark that stack entry as being saved on the stack */
932 if (p
->r
& VT_LVAL
) {
933 /* also clear the bounded flag because the
934 relocation address of the function was stored in
936 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
938 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
946 #ifdef TCC_TARGET_ARM
947 /* find a register of class 'rc2' with at most one reference on stack.
948 * If none, call get_reg(rc) */
949 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
954 for(r
=0;r
<NB_REGS
;r
++) {
955 if (reg_classes
[r
] & rc2
) {
958 for(p
= vstack
; p
<= vtop
; p
++) {
959 if ((p
->r
& VT_VALMASK
) == r
||
960 (p
->r2
& VT_VALMASK
) == r
)
971 /* find a free register of class 'rc'. If none, save one register */
972 ST_FUNC
int get_reg(int rc
)
977 /* find a free register */
978 for(r
=0;r
<NB_REGS
;r
++) {
979 if (reg_classes
[r
] & rc
) {
982 for(p
=vstack
;p
<=vtop
;p
++) {
983 if ((p
->r
& VT_VALMASK
) == r
||
984 (p
->r2
& VT_VALMASK
) == r
)
992 /* no register left : free the first one on the stack (VERY
993 IMPORTANT to start from the bottom to ensure that we don't
994 spill registers used in gen_opi()) */
995 for(p
=vstack
;p
<=vtop
;p
++) {
996 /* look at second register (if long long) */
997 r
= p
->r2
& VT_VALMASK
;
998 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1000 r
= p
->r
& VT_VALMASK
;
1001 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1007 /* Should never comes here */
1011 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1013 static void move_reg(int r
, int s
, int t
)
1027 /* get address of vtop (vtop MUST BE an lvalue) */
1028 ST_FUNC
void gaddrof(void)
1030 if (vtop
->r
& VT_REF
)
1032 vtop
->r
&= ~VT_LVAL
;
1033 /* tricky: if saved lvalue, then we can go back to lvalue */
1034 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1035 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1040 #ifdef CONFIG_TCC_BCHECK
1041 /* generate lvalue bound code */
1042 static void gbound(void)
1047 vtop
->r
&= ~VT_MUSTBOUND
;
1048 /* if lvalue, then use checking code before dereferencing */
1049 if (vtop
->r
& VT_LVAL
) {
1050 /* if not VT_BOUNDED value, then make one */
1051 if (!(vtop
->r
& VT_BOUNDED
)) {
1052 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1053 /* must save type because we must set it to int to get pointer */
1055 vtop
->type
.t
= VT_PTR
;
1058 gen_bounded_ptr_add();
1059 vtop
->r
|= lval_type
;
1062 /* then check for dereferencing */
1063 gen_bounded_ptr_deref();
1068 /* store vtop a register belonging to class 'rc'. lvalues are
1069 converted to values. Cannot be used if cannot be converted to
1070 register value (such as structures). */
1071 ST_FUNC
int gv(int rc
)
1073 int r
, bit_pos
, bit_size
, size
, align
, i
;
1076 /* NOTE: get_reg can modify vstack[] */
1077 if (vtop
->type
.t
& VT_BITFIELD
) {
1080 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1081 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1082 /* remove bit field info to avoid loops */
1083 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1084 /* cast to int to propagate signedness in following ops */
1085 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1090 if((vtop
->type
.t
& VT_UNSIGNED
) ||
1091 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1092 type
.t
|= VT_UNSIGNED
;
1094 /* generate shifts */
1095 vpushi(bits
- (bit_pos
+ bit_size
));
1097 vpushi(bits
- bit_size
);
1098 /* NOTE: transformed to SHR if unsigned */
1102 if (is_float(vtop
->type
.t
) &&
1103 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1106 unsigned long offset
;
1107 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1111 /* XXX: unify with initializers handling ? */
1112 /* CPUs usually cannot use float constants, so we store them
1113 generically in data segment */
1114 size
= type_size(&vtop
->type
, &align
);
1115 offset
= (data_section
->data_offset
+ align
- 1) & -align
;
1116 data_section
->data_offset
= offset
;
1117 /* XXX: not portable yet */
1118 #if defined(__i386__) || defined(__x86_64__)
1119 /* Zero pad x87 tenbyte long doubles */
1120 if (size
== LDOUBLE_SIZE
) {
1121 vtop
->c
.tab
[2] &= 0xffff;
1122 #if LDOUBLE_SIZE == 16
1127 ptr
= section_ptr_add(data_section
, size
);
1129 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1133 ptr
[i
] = vtop
->c
.tab
[size
-1-i
];
1137 ptr
[i
] = vtop
->c
.tab
[i
];
1138 sym
= get_sym_ref(&vtop
->type
, data_section
, offset
, size
<< 2);
1139 vtop
->r
|= VT_LVAL
| VT_SYM
;
1143 #ifdef CONFIG_TCC_BCHECK
1144 if (vtop
->r
& VT_MUSTBOUND
)
1148 r
= vtop
->r
& VT_VALMASK
;
1149 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1150 #ifndef TCC_TARGET_ARM64
1153 #ifdef TCC_TARGET_X86_64
1154 else if (rc
== RC_FRET
)
1158 /* need to reload if:
1160 - lvalue (need to dereference pointer)
1161 - already a register, but not in the right class */
1163 || (vtop
->r
& VT_LVAL
)
1164 || !(reg_classes
[r
] & rc
)
1165 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1166 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1167 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1169 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1174 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1175 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1176 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1178 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1179 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1180 unsigned long long ll
;
1182 int r2
, original_type
;
1183 original_type
= vtop
->type
.t
;
1184 /* two register type load : expand to two words
1186 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1187 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1190 vtop
->c
.i
= ll
; /* first word */
1192 vtop
->r
= r
; /* save register value */
1193 vpushi(ll
>> 32); /* second word */
1196 if (vtop
->r
& VT_LVAL
) {
1197 /* We do not want to modifier the long long
1198 pointer here, so the safest (and less
1199 efficient) is to save all the other registers
1200 in the stack. XXX: totally inefficient. */
1204 /* lvalue_save: save only if used further down the stack */
1205 save_reg_upstack(vtop
->r
, 1);
1207 /* load from memory */
1208 vtop
->type
.t
= load_type
;
1211 vtop
[-1].r
= r
; /* save register value */
1212 /* increment pointer to get second word */
1213 vtop
->type
.t
= addr_type
;
1218 vtop
->type
.t
= load_type
;
1220 /* move registers */
1223 vtop
[-1].r
= r
; /* save register value */
1224 vtop
->r
= vtop
[-1].r2
;
1226 /* Allocate second register. Here we rely on the fact that
1227 get_reg() tries first to free r2 of an SValue. */
1231 /* write second register */
1233 vtop
->type
.t
= original_type
;
1234 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1236 /* lvalue of scalar type : need to use lvalue type
1237 because of possible cast */
1240 /* compute memory access type */
1241 if (vtop
->r
& VT_REF
)
1242 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1247 else if (vtop
->r
& VT_LVAL_BYTE
)
1249 else if (vtop
->r
& VT_LVAL_SHORT
)
1251 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1255 /* restore wanted type */
1258 /* one register type load */
1263 #ifdef TCC_TARGET_C67
1264 /* uses register pairs for doubles */
1265 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1272 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1273 ST_FUNC
void gv2(int rc1
, int rc2
)
1277 /* generate more generic register first. But VT_JMP or VT_CMP
1278 values must be generated first in all cases to avoid possible
1280 v
= vtop
[0].r
& VT_VALMASK
;
1281 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1286 /* test if reload is needed for first register */
1287 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1297 /* test if reload is needed for first register */
1298 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1304 #ifndef TCC_TARGET_ARM64
1305 /* wrapper around RC_FRET to return a register by type */
1306 static int rc_fret(int t
)
1308 #ifdef TCC_TARGET_X86_64
1309 if (t
== VT_LDOUBLE
) {
1317 /* wrapper around REG_FRET to return a register by type */
1318 static int reg_fret(int t
)
1320 #ifdef TCC_TARGET_X86_64
1321 if (t
== VT_LDOUBLE
) {
1328 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1329 /* expand 64bit on stack in two ints */
1330 static void lexpand(void)
1333 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1334 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1335 if (v
== VT_CONST
) {
1338 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1344 vtop
[0].r
= vtop
[-1].r2
;
1345 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1347 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1351 #ifdef TCC_TARGET_ARM
1352 /* expand long long on stack */
1353 ST_FUNC
void lexpand_nr(void)
1357 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1359 vtop
->r2
= VT_CONST
;
1360 vtop
->type
.t
= VT_INT
| u
;
1361 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1362 if (v
== VT_CONST
) {
1363 vtop
[-1].c
.i
= vtop
->c
.i
;
1364 vtop
->c
.i
= vtop
->c
.i
>> 32;
1366 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1368 vtop
->r
= vtop
[-1].r
;
1369 } else if (v
> VT_CONST
) {
1373 vtop
->r
= vtop
[-1].r2
;
1374 vtop
[-1].r2
= VT_CONST
;
1375 vtop
[-1].type
.t
= VT_INT
| u
;
1379 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1380 /* build a long long from two ints */
1381 static void lbuild(int t
)
1383 gv2(RC_INT
, RC_INT
);
1384 vtop
[-1].r2
= vtop
[0].r
;
1385 vtop
[-1].type
.t
= t
;
1390 /* convert stack entry to register and duplicate its value in another
1392 static void gv_dup(void)
1398 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1399 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1406 /* stack: H L L1 H1 */
1416 /* duplicate value */
1421 #ifdef TCC_TARGET_X86_64
1422 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1432 load(r1
, &sv
); /* move r to r1 */
1434 /* duplicates value */
1440 /* Generate value test
1442 * Generate a test for any value (jump, comparison and integers) */
1443 ST_FUNC
int gvtst(int inv
, int t
)
1445 int v
= vtop
->r
& VT_VALMASK
;
1446 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1450 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1451 /* constant jmp optimization */
1452 if ((vtop
->c
.i
!= 0) != inv
)
1457 return gtst(inv
, t
);
1460 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1461 /* generate CPU independent (unsigned) long long operations */
1462 static void gen_opl(int op
)
1464 int t
, a
, b
, op1
, c
, i
;
1466 unsigned short reg_iret
= REG_IRET
;
1467 unsigned short reg_lret
= REG_LRET
;
1473 func
= TOK___divdi3
;
1476 func
= TOK___udivdi3
;
1479 func
= TOK___moddi3
;
1482 func
= TOK___umoddi3
;
1489 /* call generic long long function */
1490 vpush_global_sym(&func_old_type
, func
);
1495 vtop
->r2
= reg_lret
;
1503 //pv("gen_opl A",0,2);
1509 /* stack: L1 H1 L2 H2 */
1514 vtop
[-2] = vtop
[-3];
1517 /* stack: H1 H2 L1 L2 */
1518 //pv("gen_opl B",0,4);
1524 /* stack: H1 H2 L1 L2 ML MH */
1527 /* stack: ML MH H1 H2 L1 L2 */
1531 /* stack: ML MH H1 L2 H2 L1 */
1536 /* stack: ML MH M1 M2 */
1539 } else if (op
== '+' || op
== '-') {
1540 /* XXX: add non carry method too (for MIPS or alpha) */
1546 /* stack: H1 H2 (L1 op L2) */
1549 gen_op(op1
+ 1); /* TOK_xxxC2 */
1552 /* stack: H1 H2 (L1 op L2) */
1555 /* stack: (L1 op L2) H1 H2 */
1557 /* stack: (L1 op L2) (H1 op H2) */
1565 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1566 t
= vtop
[-1].type
.t
;
1570 /* stack: L H shift */
1572 /* constant: simpler */
1573 /* NOTE: all comments are for SHL. the other cases are
1574 done by swaping words */
1585 if (op
!= TOK_SAR
) {
1618 /* XXX: should provide a faster fallback on x86 ? */
1621 func
= TOK___ashrdi3
;
1624 func
= TOK___lshrdi3
;
1627 func
= TOK___ashldi3
;
1633 /* compare operations */
1639 /* stack: L1 H1 L2 H2 */
1641 vtop
[-1] = vtop
[-2];
1643 /* stack: L1 L2 H1 H2 */
1646 /* when values are equal, we need to compare low words. since
1647 the jump is inverted, we invert the test too. */
1650 else if (op1
== TOK_GT
)
1652 else if (op1
== TOK_ULT
)
1654 else if (op1
== TOK_UGT
)
1664 /* generate non equal test */
1670 /* compare low. Always unsigned */
1674 else if (op1
== TOK_LE
)
1676 else if (op1
== TOK_GT
)
1678 else if (op1
== TOK_GE
)
1689 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1691 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1692 return (a
^ b
) >> 63 ? -x
: x
;
1695 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1697 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1700 /* handle integer constant optimizations and various machine
1702 static void gen_opic(int op
)
1704 SValue
*v1
= vtop
- 1;
1706 int t1
= v1
->type
.t
& VT_BTYPE
;
1707 int t2
= v2
->type
.t
& VT_BTYPE
;
1708 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1709 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1710 uint64_t l1
= c1
? v1
->c
.i
: 0;
1711 uint64_t l2
= c2
? v2
->c
.i
: 0;
1712 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1714 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1715 l1
= ((uint32_t)l1
|
1716 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1717 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1718 l2
= ((uint32_t)l2
|
1719 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1723 case '+': l1
+= l2
; break;
1724 case '-': l1
-= l2
; break;
1725 case '&': l1
&= l2
; break;
1726 case '^': l1
^= l2
; break;
1727 case '|': l1
|= l2
; break;
1728 case '*': l1
*= l2
; break;
1735 /* if division by zero, generate explicit division */
1738 tcc_error("division by zero in constant");
1742 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1743 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1744 case TOK_UDIV
: l1
= l1
/ l2
; break;
1745 case TOK_UMOD
: l1
= l1
% l2
; break;
1748 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1749 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1751 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1754 case TOK_ULT
: l1
= l1
< l2
; break;
1755 case TOK_UGE
: l1
= l1
>= l2
; break;
1756 case TOK_EQ
: l1
= l1
== l2
; break;
1757 case TOK_NE
: l1
= l1
!= l2
; break;
1758 case TOK_ULE
: l1
= l1
<= l2
; break;
1759 case TOK_UGT
: l1
= l1
> l2
; break;
1760 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1761 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1762 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1763 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1765 case TOK_LAND
: l1
= l1
&& l2
; break;
1766 case TOK_LOR
: l1
= l1
|| l2
; break;
1770 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1771 l1
= ((uint32_t)l1
|
1772 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1776 /* if commutative ops, put c2 as constant */
1777 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1778 op
== '|' || op
== '*')) {
1780 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1781 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1783 if (!const_wanted
&&
1785 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1786 (l1
== -1 && op
== TOK_SAR
))) {
1787 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1789 } else if (!const_wanted
&&
1790 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1791 (l2
== -1 && op
== '|') ||
1792 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1793 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1794 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1799 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1802 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1803 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1807 /* filter out NOP operations like x*1, x-0, x&-1... */
1809 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1810 /* try to use shifts instead of muls or divs */
1811 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1820 else if (op
== TOK_PDIV
)
1826 } else if (c2
&& (op
== '+' || op
== '-') &&
1827 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1828 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1829 /* symbol + constant case */
1833 /* The backends can't always deal with addends to symbols
1834 larger than +-1<<31. Don't construct such. */
1841 /* call low level op generator */
1842 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1843 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1851 /* generate a floating point operation with constant propagation */
1852 static void gen_opif(int op
)
1860 /* currently, we cannot do computations with forward symbols */
1861 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1862 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1864 if (v1
->type
.t
== VT_FLOAT
) {
1867 } else if (v1
->type
.t
== VT_DOUBLE
) {
1875 /* NOTE: we only do constant propagation if finite number (not
1876 NaN or infinity) (ANSI spec) */
1877 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1881 case '+': f1
+= f2
; break;
1882 case '-': f1
-= f2
; break;
1883 case '*': f1
*= f2
; break;
1887 tcc_error("division by zero in constant");
1892 /* XXX: also handles tests ? */
1896 /* XXX: overflow test ? */
1897 if (v1
->type
.t
== VT_FLOAT
) {
1899 } else if (v1
->type
.t
== VT_DOUBLE
) {
1911 static int pointed_size(CType
*type
)
1914 return type_size(pointed_type(type
), &align
);
1917 static void vla_runtime_pointed_size(CType
*type
)
1920 vla_runtime_type_size(pointed_type(type
), &align
);
1923 static inline int is_null_pointer(SValue
*p
)
1925 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1927 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1928 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1929 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1930 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1933 static inline int is_integer_btype(int bt
)
1935 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1936 bt
== VT_INT
|| bt
== VT_LLONG
);
1939 /* check types for comparison or subtraction of pointers */
1940 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1942 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1945 /* null pointers are accepted for all comparisons as gcc */
1946 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1950 bt1
= type1
->t
& VT_BTYPE
;
1951 bt2
= type2
->t
& VT_BTYPE
;
1952 /* accept comparison between pointer and integer with a warning */
1953 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1954 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1955 tcc_warning("comparison between pointer and integer");
1959 /* both must be pointers or implicit function pointers */
1960 if (bt1
== VT_PTR
) {
1961 type1
= pointed_type(type1
);
1962 } else if (bt1
!= VT_FUNC
)
1963 goto invalid_operands
;
1965 if (bt2
== VT_PTR
) {
1966 type2
= pointed_type(type2
);
1967 } else if (bt2
!= VT_FUNC
) {
1969 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1971 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1972 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1976 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1977 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1978 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1979 /* gcc-like error if '-' is used */
1981 goto invalid_operands
;
1983 tcc_warning("comparison of distinct pointer types lacks a cast");
1987 /* generic gen_op: handles types problems */
1988 ST_FUNC
void gen_op(int op
)
1990 int u
, t1
, t2
, bt1
, bt2
, t
;
1994 t1
= vtop
[-1].type
.t
;
1995 t2
= vtop
[0].type
.t
;
1996 bt1
= t1
& VT_BTYPE
;
1997 bt2
= t2
& VT_BTYPE
;
1999 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2000 tcc_error("operation on a struct");
2001 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2002 if (bt2
== VT_FUNC
) {
2003 mk_pointer(&vtop
->type
);
2006 if (bt1
== VT_FUNC
) {
2008 mk_pointer(&vtop
->type
);
2013 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2014 /* at least one operand is a pointer */
2015 /* relationnal op: must be both pointers */
2016 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2017 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2018 /* pointers are handled are unsigned */
2019 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2020 t
= VT_LLONG
| VT_UNSIGNED
;
2022 t
= VT_INT
| VT_UNSIGNED
;
2026 /* if both pointers, then it must be the '-' op */
2027 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2029 tcc_error("cannot use pointers here");
2030 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2031 /* XXX: check that types are compatible */
2032 if (vtop
[-1].type
.t
& VT_VLA
) {
2033 vla_runtime_pointed_size(&vtop
[-1].type
);
2035 vpushi(pointed_size(&vtop
[-1].type
));
2039 /* set to integer type */
2040 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2041 vtop
->type
.t
= VT_LLONG
;
2043 vtop
->type
.t
= VT_INT
;
2048 /* exactly one pointer : must be '+' or '-'. */
2049 if (op
!= '-' && op
!= '+')
2050 tcc_error("cannot use pointers here");
2051 /* Put pointer as first operand */
2052 if (bt2
== VT_PTR
) {
2054 t
= t1
, t1
= t2
, t2
= t
;
2057 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2058 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2059 gen_cast(&int_type
);
2061 type1
= vtop
[-1].type
;
2062 type1
.t
&= ~VT_ARRAY
;
2063 if (vtop
[-1].type
.t
& VT_VLA
)
2064 vla_runtime_pointed_size(&vtop
[-1].type
);
2066 u
= pointed_size(&vtop
[-1].type
);
2068 tcc_error("unknown array element size");
2069 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2072 /* XXX: cast to int ? (long long case) */
2078 /* #ifdef CONFIG_TCC_BCHECK
2079 The main reason to removing this code:
2086 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2087 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2089 When this code is on. then the output looks like
2091 v+(i-j) = 0xbff84000
2093 /* if evaluating constant expression, no code should be
2094 generated, so no bound check */
2095 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2096 /* if bounded pointers, we generate a special code to
2103 gen_bounded_ptr_add();
2109 /* put again type if gen_opic() swaped operands */
2112 } else if (is_float(bt1
) || is_float(bt2
)) {
2113 /* compute bigger type and do implicit casts */
2114 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2116 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2121 /* floats can only be used for a few operations */
2122 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2123 (op
< TOK_ULT
|| op
> TOK_GT
))
2124 tcc_error("invalid operands for binary operation");
2126 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2127 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2128 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (t
| VT_UNSIGNED
))
2131 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2132 /* cast to biggest op */
2134 /* convert to unsigned if it does not fit in a long long */
2135 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2136 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
2140 /* integer operations */
2142 /* convert to unsigned if it does not fit in an integer */
2143 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
2144 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
2147 /* XXX: currently, some unsigned operations are explicit, so
2148 we modify them here */
2149 if (t
& VT_UNSIGNED
) {
2156 else if (op
== TOK_LT
)
2158 else if (op
== TOK_GT
)
2160 else if (op
== TOK_LE
)
2162 else if (op
== TOK_GE
)
2169 /* special case for shifts and long long: we keep the shift as
2171 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2178 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2179 /* relationnal op: the result is an int */
2180 vtop
->type
.t
= VT_INT
;
2185 // Make sure that we have converted to an rvalue:
2186 if (vtop
->r
& VT_LVAL
)
2187 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2190 #ifndef TCC_TARGET_ARM
2191 /* generic itof for unsigned long long case */
2192 static void gen_cvt_itof1(int t
)
2194 #ifdef TCC_TARGET_ARM64
2197 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2198 (VT_LLONG
| VT_UNSIGNED
)) {
2201 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2202 #if LDOUBLE_SIZE != 8
2203 else if (t
== VT_LDOUBLE
)
2204 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2207 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2211 vtop
->r
= reg_fret(t
);
2219 /* generic ftoi for unsigned long long case */
2220 static void gen_cvt_ftoi1(int t
)
2222 #ifdef TCC_TARGET_ARM64
2227 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2228 /* not handled natively */
2229 st
= vtop
->type
.t
& VT_BTYPE
;
2231 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2232 #if LDOUBLE_SIZE != 8
2233 else if (st
== VT_LDOUBLE
)
2234 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2237 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2242 vtop
->r2
= REG_LRET
;
2249 /* force char or short cast */
2250 static void force_charshort_cast(int t
)
2254 /* XXX: add optimization if lvalue : just change type and offset */
2259 if (t
& VT_UNSIGNED
) {
2260 vpushi((1 << bits
) - 1);
2263 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2269 /* result must be signed or the SAR is converted to an SHL
2270 This was not the case when "t" was a signed short
2271 and the last value on the stack was an unsigned int */
2272 vtop
->type
.t
&= ~VT_UNSIGNED
;
2278 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2279 static void gen_cast(CType
*type
)
2281 int sbt
, dbt
, sf
, df
, c
, p
;
2283 /* special delayed cast for char/short */
2284 /* XXX: in some cases (multiple cascaded casts), it may still
2286 if (vtop
->r
& VT_MUSTCAST
) {
2287 vtop
->r
&= ~VT_MUSTCAST
;
2288 force_charshort_cast(vtop
->type
.t
);
2291 /* bitfields first get cast to ints */
2292 if (vtop
->type
.t
& VT_BITFIELD
) {
2296 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2297 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2302 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2303 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2305 /* constant case: we can do it now */
2306 /* XXX: in ISOC, cannot do it if error in convert */
2307 if (sbt
== VT_FLOAT
)
2308 vtop
->c
.ld
= vtop
->c
.f
;
2309 else if (sbt
== VT_DOUBLE
)
2310 vtop
->c
.ld
= vtop
->c
.d
;
2313 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2314 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2315 vtop
->c
.ld
= vtop
->c
.i
;
2317 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2319 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2320 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2322 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2325 if (dbt
== VT_FLOAT
)
2326 vtop
->c
.f
= (float)vtop
->c
.ld
;
2327 else if (dbt
== VT_DOUBLE
)
2328 vtop
->c
.d
= (double)vtop
->c
.ld
;
2329 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2330 vtop
->c
.i
= vtop
->c
.ld
;
2331 } else if (sf
&& dbt
== VT_BOOL
) {
2332 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2335 vtop
->c
.i
= vtop
->c
.ld
;
2336 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2338 else if (sbt
& VT_UNSIGNED
)
2339 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2340 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2341 else if (sbt
== VT_PTR
)
2344 else if (sbt
!= VT_LLONG
)
2345 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2346 -(vtop
->c
.i
& 0x80000000));
2348 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2350 else if (dbt
== VT_BOOL
)
2351 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2352 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2353 else if (dbt
== VT_PTR
)
2356 else if (dbt
!= VT_LLONG
) {
2357 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2358 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2361 if (!(dbt
& VT_UNSIGNED
))
2362 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2365 } else if (p
&& dbt
== VT_BOOL
) {
2369 /* non constant case: generate code */
2371 /* convert from fp to fp */
2374 /* convert int to fp */
2377 /* convert fp to int */
2378 if (dbt
== VT_BOOL
) {
2382 /* we handle char/short/etc... with generic code */
2383 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2384 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2388 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2389 /* additional cast for char/short... */
2394 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2395 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2396 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2397 /* scalar to long long */
2398 /* machine independent conversion */
2400 /* generate high word */
2401 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2405 if (sbt
== VT_PTR
) {
2406 /* cast from pointer to int before we apply
2407 shift operation, which pointers don't support*/
2408 gen_cast(&int_type
);
2414 /* patch second register */
2415 vtop
[-1].r2
= vtop
->r
;
2419 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2420 (dbt
& VT_BTYPE
) == VT_PTR
||
2421 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2422 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2423 (sbt
& VT_BTYPE
) != VT_PTR
&&
2424 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2425 /* need to convert from 32bit to 64bit */
2427 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2428 #if defined(TCC_TARGET_ARM64)
2430 #elif defined(TCC_TARGET_X86_64)
2432 /* x86_64 specific: movslq */
2434 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2441 } else if (dbt
== VT_BOOL
) {
2442 /* scalar to bool */
2445 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2446 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2447 if (sbt
== VT_PTR
) {
2448 vtop
->type
.t
= VT_INT
;
2449 tcc_warning("nonportable conversion from pointer to char/short");
2451 force_charshort_cast(dbt
);
2452 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2453 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2455 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2456 /* from long long: just take low order word */
2460 /* if lvalue and single word type, nothing to do because
2461 the lvalue already contains the real type size (see
2462 VT_LVAL_xxx constants) */
2466 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2467 /* if we are casting between pointer types,
2468 we must update the VT_LVAL_xxx size */
2469 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2470 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2475 /* return type size as known at compile time. Put alignment at 'a' */
2476 ST_FUNC
int type_size(CType
*type
, int *a
)
2481 bt
= type
->t
& VT_BTYPE
;
2482 if (bt
== VT_STRUCT
) {
2487 } else if (bt
== VT_PTR
) {
2488 if (type
->t
& VT_ARRAY
) {
2492 ts
= type_size(&s
->type
, a
);
2494 if (ts
< 0 && s
->c
< 0)
2502 } else if (bt
== VT_LDOUBLE
) {
2504 return LDOUBLE_SIZE
;
2505 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2506 #ifdef TCC_TARGET_I386
2507 #ifdef TCC_TARGET_PE
2512 #elif defined(TCC_TARGET_ARM)
2522 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2525 } else if (bt
== VT_SHORT
) {
2528 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2531 } else if (bt
== VT_ENUM
) {
2533 /* Enums might be incomplete, so don't just return '4' here. */
2534 return type
->ref
->c
;
2536 /* char, void, function, _Bool */
2542 /* push type size as known at runtime time on top of value stack. Put
2544 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2546 if (type
->t
& VT_VLA
) {
2547 type_size(&type
->ref
->type
, a
);
2548 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2550 vpushi(type_size(type
, a
));
2554 static void vla_sp_restore(void) {
2555 if (vlas_in_scope
) {
2556 gen_vla_sp_restore(vla_sp_loc
);
2560 static void vla_sp_restore_root(void) {
2561 if (vlas_in_scope
) {
2562 gen_vla_sp_restore(vla_sp_root_loc
);
2566 /* return the pointed type of t */
2567 static inline CType
*pointed_type(CType
*type
)
2569 return &type
->ref
->type
;
2572 /* modify type so that its it is a pointer to type. */
2573 ST_FUNC
void mk_pointer(CType
*type
)
2576 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2577 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2581 /* compare function types. OLD functions match any new functions */
2582 static int is_compatible_func(CType
*type1
, CType
*type2
)
2588 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2590 /* check func_call */
2591 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2593 /* XXX: not complete */
2594 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2598 while (s1
!= NULL
) {
2601 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2611 /* return true if type1 and type2 are the same. If unqualified is
2612 true, qualifiers on the types are ignored.
2614 - enums are not checked as gcc __builtin_types_compatible_p ()
2616 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2620 t1
= type1
->t
& VT_TYPE
;
2621 t2
= type2
->t
& VT_TYPE
;
2623 /* strip qualifiers before comparing */
2624 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2625 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2627 /* Default Vs explicit signedness only matters for char */
2628 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2632 /* An enum is compatible with (unsigned) int. Ideally we would
2633 store the enums signedness in type->ref.a.<some_bit> and
2634 only accept unsigned enums with unsigned int and vice versa.
2635 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2636 from pointer target types, so we can't add it here either. */
2637 if ((t1
& VT_BTYPE
) == VT_ENUM
) {
2639 if (type1
->ref
->a
.unsigned_enum
)
2642 if ((t2
& VT_BTYPE
) == VT_ENUM
) {
2644 if (type2
->ref
->a
.unsigned_enum
)
2647 /* XXX: bitfields ? */
2650 /* test more complicated cases */
2651 bt1
= t1
& VT_BTYPE
;
2652 if (bt1
== VT_PTR
) {
2653 type1
= pointed_type(type1
);
2654 type2
= pointed_type(type2
);
2655 return is_compatible_types(type1
, type2
);
2656 } else if (bt1
== VT_STRUCT
) {
2657 return (type1
->ref
== type2
->ref
);
2658 } else if (bt1
== VT_FUNC
) {
2659 return is_compatible_func(type1
, type2
);
2665 /* return true if type1 and type2 are exactly the same (including
2668 static int is_compatible_types(CType
*type1
, CType
*type2
)
2670 return compare_types(type1
,type2
,0);
2673 /* return true if type1 and type2 are the same (ignoring qualifiers).
2675 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2677 return compare_types(type1
,type2
,1);
2680 /* print a type. If 'varstr' is not NULL, then the variable is also
2681 printed in the type */
2683 /* XXX: add array and function pointers */
2684 static void type_to_str(char *buf
, int buf_size
,
2685 CType
*type
, const char *varstr
)
2692 t
= type
->t
& VT_TYPE
;
2695 if (t
& VT_CONSTANT
)
2696 pstrcat(buf
, buf_size
, "const ");
2697 if (t
& VT_VOLATILE
)
2698 pstrcat(buf
, buf_size
, "volatile ");
2699 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2700 pstrcat(buf
, buf_size
, "unsigned ");
2701 else if (t
& VT_DEFSIGN
)
2702 pstrcat(buf
, buf_size
, "signed ");
2732 tstr
= "long double";
2734 pstrcat(buf
, buf_size
, tstr
);
2738 if (bt
== VT_STRUCT
)
2742 pstrcat(buf
, buf_size
, tstr
);
2743 v
= type
->ref
->v
& ~SYM_STRUCT
;
2744 if (v
>= SYM_FIRST_ANOM
)
2745 pstrcat(buf
, buf_size
, "<anonymous>");
2747 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2751 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2752 pstrcat(buf
, buf_size
, "(");
2754 while (sa
!= NULL
) {
2755 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2756 pstrcat(buf
, buf_size
, buf1
);
2759 pstrcat(buf
, buf_size
, ", ");
2761 pstrcat(buf
, buf_size
, ")");
2766 snprintf(buf1
, sizeof(buf1
), "%s[%ld]", varstr
? varstr
: "", s
->c
);
2767 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2770 pstrcpy(buf1
, sizeof(buf1
), "*");
2771 if (t
& VT_CONSTANT
)
2772 pstrcat(buf1
, buf_size
, "const ");
2773 if (t
& VT_VOLATILE
)
2774 pstrcat(buf1
, buf_size
, "volatile ");
2776 pstrcat(buf1
, sizeof(buf1
), varstr
);
2777 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2781 pstrcat(buf
, buf_size
, " ");
2782 pstrcat(buf
, buf_size
, varstr
);
2787 /* verify type compatibility to store vtop in 'dt' type, and generate
2789 static void gen_assign_cast(CType
*dt
)
2791 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2792 char buf1
[256], buf2
[256];
2795 st
= &vtop
->type
; /* source type */
2796 dbt
= dt
->t
& VT_BTYPE
;
2797 sbt
= st
->t
& VT_BTYPE
;
2798 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2799 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2801 It is Ok if both are void
2807 gcc accepts this program
2810 tcc_error("cannot cast from/to void");
2812 if (dt
->t
& VT_CONSTANT
)
2813 tcc_warning("assignment of read-only location");
2816 /* special cases for pointers */
2817 /* '0' can also be a pointer */
2818 if (is_null_pointer(vtop
))
2820 /* accept implicit pointer to integer cast with warning */
2821 if (is_integer_btype(sbt
)) {
2822 tcc_warning("assignment makes pointer from integer without a cast");
2825 type1
= pointed_type(dt
);
2826 /* a function is implicitely a function pointer */
2827 if (sbt
== VT_FUNC
) {
2828 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2829 !is_compatible_types(pointed_type(dt
), st
))
2830 tcc_warning("assignment from incompatible pointer type");
2835 type2
= pointed_type(st
);
2836 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2837 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2838 /* void * can match anything */
2840 /* exact type match, except for qualifiers */
2843 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2844 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2845 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2846 /* Like GCC don't warn by default for merely changes
2847 in pointer target signedness. Do warn for different
2848 base types, though, in particular for unsigned enums
2849 and signed int targets. */
2850 if ((tmp_type1
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) !=
2851 (tmp_type2
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) &&
2852 (tmp_type1
.t
& VT_BTYPE
) == (tmp_type2
.t
& VT_BTYPE
))
2855 tcc_warning("assignment from incompatible pointer type");
2858 /* check const and volatile */
2859 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2860 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2861 tcc_warning("assignment discards qualifiers from pointer target type");
2867 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2868 tcc_warning("assignment makes integer from pointer without a cast");
2869 } else if (sbt
== VT_STRUCT
) {
2870 goto case_VT_STRUCT
;
2872 /* XXX: more tests */
2878 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2879 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2880 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2882 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2883 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2884 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2892 /* store vtop in lvalue pushed on stack */
2893 ST_FUNC
void vstore(void)
2895 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2897 ft
= vtop
[-1].type
.t
;
2898 sbt
= vtop
->type
.t
& VT_BTYPE
;
2899 dbt
= ft
& VT_BTYPE
;
2900 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2901 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2902 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2903 /* optimize char/short casts */
2904 delayed_cast
= VT_MUSTCAST
;
2905 vtop
->type
.t
= (ft
& VT_TYPE
& ~VT_BITFIELD
&
2906 ((1 << VT_STRUCT_SHIFT
) - 1));
2907 /* XXX: factorize */
2908 if (ft
& VT_CONSTANT
)
2909 tcc_warning("assignment of read-only location");
2912 if (!(ft
& VT_BITFIELD
))
2913 gen_assign_cast(&vtop
[-1].type
);
2916 if (sbt
== VT_STRUCT
) {
2917 /* if structure, only generate pointer */
2918 /* structure assignment : generate memcpy */
2919 /* XXX: optimize if small size */
2920 size
= type_size(&vtop
->type
, &align
);
2924 vtop
->type
.t
= VT_PTR
;
2927 /* address of memcpy() */
2930 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2931 else if(!(align
& 3))
2932 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2935 /* Use memmove, rather than memcpy, as dest and src may be same: */
2936 vpush_global_sym(&func_old_type
, TOK_memmove
);
2941 vtop
->type
.t
= VT_PTR
;
2947 /* leave source on stack */
2948 } else if (ft
& VT_BITFIELD
) {
2949 /* bitfield store handling */
2951 /* save lvalue as expression result (example: s.b = s.a = n;) */
2952 vdup(), vtop
[-1] = vtop
[-2];
2954 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2955 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2956 /* remove bit field info to avoid loops */
2957 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2959 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2960 gen_cast(&vtop
[-1].type
);
2961 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2964 /* duplicate destination */
2966 vtop
[-1] = vtop
[-2];
2968 /* mask and shift source */
2969 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2970 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2971 vpushll((1ULL << bit_size
) - 1ULL);
2973 vpushi((1 << bit_size
) - 1);
2979 /* load destination, mask and or with source */
2981 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2982 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2984 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2990 /* ... and discard */
2994 #ifdef CONFIG_TCC_BCHECK
2995 /* bound check case */
2996 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3005 #ifdef TCC_TARGET_X86_64
3006 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3008 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3013 r
= gv(rc
); /* generate value */
3014 /* if lvalue was saved on stack, must read it */
3015 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3017 t
= get_reg(RC_INT
);
3018 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3023 sv
.r
= VT_LOCAL
| VT_LVAL
;
3024 sv
.c
.i
= vtop
[-1].c
.i
;
3026 vtop
[-1].r
= t
| VT_LVAL
;
3028 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3029 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3030 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3031 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3033 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3034 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3036 vtop
[-1].type
.t
= load_type
;
3039 /* convert to int to increment easily */
3040 vtop
->type
.t
= addr_type
;
3046 vtop
[-1].type
.t
= load_type
;
3047 /* XXX: it works because r2 is spilled last ! */
3048 store(vtop
->r2
, vtop
- 1);
3054 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3055 vtop
->r
|= delayed_cast
;
3059 /* post defines POST/PRE add. c is the token ++ or -- */
3060 ST_FUNC
void inc(int post
, int c
)
3063 vdup(); /* save lvalue */
3065 gv_dup(); /* duplicate value */
3070 vpushi(c
- TOK_MID
);
3072 vstore(); /* store value */
3074 vpop(); /* if post op, return saved value */
3077 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3079 /* read the string */
3083 while (tok
== TOK_STR
) {
3084 /* XXX: add \0 handling too ? */
3085 cstr_cat(astr
, tokc
.str
.data
, -1);
3088 cstr_ccat(astr
, '\0');
3091 /* If I is >= 1 and a power of two, returns log2(i)+1.
3092 If I is 0 returns 0. */
3093 static int exact_log2p1(int i
)
3098 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3109 /* Parse GNUC __attribute__ extension. Currently, the following
3110 extensions are recognized:
3111 - aligned(n) : set data/function alignment.
3112 - packed : force data alignment to 1
3113 - section(x) : generate data/code in this section.
3114 - unused : currently ignored, but may be used someday.
3115 - regparm(n) : pass function parameters in registers (i386 only)
3117 static void parse_attribute(AttributeDef
*ad
)
3122 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
3126 while (tok
!= ')') {
3127 if (tok
< TOK_IDENT
)
3128 expect("attribute name");
3135 parse_mult_str(&astr
, "section name");
3136 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3143 parse_mult_str(&astr
, "alias(\"target\")");
3144 ad
->alias_target
= /* save string as token, for later */
3145 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3149 case TOK_VISIBILITY1
:
3150 case TOK_VISIBILITY2
:
3152 parse_mult_str(&astr
,
3153 "visibility(\"default|hidden|internal|protected\")");
3154 if (!strcmp (astr
.data
, "default"))
3155 ad
->a
.visibility
= STV_DEFAULT
;
3156 else if (!strcmp (astr
.data
, "hidden"))
3157 ad
->a
.visibility
= STV_HIDDEN
;
3158 else if (!strcmp (astr
.data
, "internal"))
3159 ad
->a
.visibility
= STV_INTERNAL
;
3160 else if (!strcmp (astr
.data
, "protected"))
3161 ad
->a
.visibility
= STV_PROTECTED
;
3163 expect("visibility(\"default|hidden|internal|protected\")");
3172 if (n
<= 0 || (n
& (n
- 1)) != 0)
3173 tcc_error("alignment must be a positive power of two");
3178 ad
->a
.aligned
= exact_log2p1(n
);
3179 if (n
!= 1 << (ad
->a
.aligned
- 1))
3180 tcc_error("alignment of %d is larger than implemented", n
);
3192 /* currently, no need to handle it because tcc does not
3193 track unused objects */
3197 /* currently, no need to handle it because tcc does not
3198 track unused objects */
3203 ad
->a
.func_call
= FUNC_CDECL
;
3208 ad
->a
.func_call
= FUNC_STDCALL
;
3210 #ifdef TCC_TARGET_I386
3220 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3226 ad
->a
.func_call
= FUNC_FASTCALLW
;
3233 ad
->a
.mode
= VT_LLONG
+ 1;
3236 ad
->a
.mode
= VT_BYTE
+ 1;
3239 ad
->a
.mode
= VT_SHORT
+ 1;
3243 ad
->a
.mode
= VT_INT
+ 1;
3246 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3253 ad
->a
.func_export
= 1;
3256 ad
->a
.func_import
= 1;
3259 if (tcc_state
->warn_unsupported
)
3260 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3261 /* skip parameters */
3263 int parenthesis
= 0;
3267 else if (tok
== ')')
3270 } while (parenthesis
&& tok
!= -1);
3283 static Sym
* find_field (CType
*type
, int v
)
3287 while ((s
= s
->next
) != NULL
) {
3288 if ((s
->v
& SYM_FIELD
) &&
3289 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3290 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3291 Sym
*ret
= find_field (&s
->type
, v
);
3301 static void struct_add_offset (Sym
*s
, int offset
)
3303 while ((s
= s
->next
) != NULL
) {
3304 if ((s
->v
& SYM_FIELD
) &&
3305 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3306 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3307 struct_add_offset(s
->type
.ref
, offset
);
3313 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3315 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3316 int pcc
= !tcc_state
->ms_bitfields
;
3319 maxalign
= 1 << (ad
->a
.aligned
- 1);
3325 prevbt
= VT_STRUCT
; /* make it never match */
3327 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3328 int typealign
, bit_size
;
3329 int size
= type_size(&f
->type
, &typealign
);
3330 if (f
->type
.t
& VT_BITFIELD
)
3331 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3334 if (bit_size
== 0 && pcc
) {
3335 /* Zero-width bit-fields in PCC mode aren't affected
3336 by any packing (attribute or pragma). */
3338 } else if (f
->r
> 1) {
3340 } else if (ad
->a
.packed
|| f
->r
== 1) {
3342 /* Packed fields or packed records don't let the base type
3343 influence the records type alignment. */
3348 if (type
->ref
->type
.t
!= TOK_STRUCT
) {
3349 if (pcc
&& bit_size
>= 0)
3350 size
= (bit_size
+ 7) >> 3;
3351 /* Bit position is already zero from our caller. */
3355 } else if (bit_size
< 0) {
3356 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3359 c
= (c
+ addbytes
+ align
- 1) & -align
;
3365 /* A bit-field. Layout is more complicated. There are two
3366 options TCC implements: PCC compatible and MS compatible
3367 (PCC compatible is what GCC uses for almost all targets).
3368 In PCC layout the overall size of the struct (in c) is
3369 _excluding_ the current run of bit-fields (that is,
3370 there's at least additional bit_pos bits after c). In
3371 MS layout c does include the current run of bit-fields.
3373 This matters for calculating the natural alignment buckets
3376 /* 'align' will be used to influence records alignment,
3377 so it's the max of specified and type alignment, except
3378 in certain cases that depend on the mode. */
3379 if (align
< typealign
)
3382 /* In PCC layout a non-packed bit-field is placed adjacent
3383 to the preceding bit-fields, except if it would overflow
3384 its container (depending on base type) or it's a zero-width
3385 bit-field. Packed non-zero-width bit-fields always are
3387 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3388 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3389 if (bit_size
== 0 ||
3390 ((typealign
!= 1 || size
== 1) &&
3391 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3392 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3394 } else while (bit_pos
+ bit_size
> size
* 8) {
3396 bit_pos
-= size
* 8;
3399 /* In PCC layout named bit-fields influence the alignment
3400 of the containing struct using the base types alignment,
3401 except for packed fields (which here have correct
3402 align/typealign). */
3403 if ((f
->v
& SYM_FIRST_ANOM
))
3406 bt
= f
->type
.t
& VT_BTYPE
;
3407 if ((bit_pos
+ bit_size
> size
* 8) ||
3408 (bit_size
> 0) == (bt
!= prevbt
)) {
3409 c
= (c
+ typealign
- 1) & -typealign
;
3412 /* In MS bitfield mode a bit-field run always uses
3413 at least as many bits as the underlying type.
3414 To start a new run it's also required that this
3415 or the last bit-field had non-zero width. */
3416 if (bit_size
|| prev_bit_size
)
3419 /* In MS layout the records alignment is normally
3420 influenced by the field, except for a zero-width
3421 field at the start of a run (but by further zero-width
3422 fields it is again). */
3423 if (bit_size
== 0 && prevbt
!= bt
)
3426 prev_bit_size
= bit_size
;
3428 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3429 | (bit_pos
<< VT_STRUCT_SHIFT
);
3430 bit_pos
+= bit_size
;
3431 if (pcc
&& bit_pos
>= size
* 8) {
3433 bit_pos
-= size
* 8;
3436 if (align
> maxalign
)
3439 printf("set field %s offset=%d c=%d",
3440 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, c
);
3441 if (f
->type
.t
& VT_BITFIELD
) {
3442 printf(" pos=%d size=%d",
3443 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3444 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3449 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3451 /* An anonymous struct/union. Adjust member offsets
3452 to reflect the real offset of our containing struct.
3453 Also set the offset of this anon member inside
3454 the outer struct to be zero. Via this it
3455 works when accessing the field offset directly
3456 (from base object), as well as when recursing
3457 members in initializer handling. */
3458 int v2
= f
->type
.ref
->v
;
3459 if (!(v2
& SYM_FIELD
) &&
3460 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3462 /* This happens only with MS extensions. The
3463 anon member has a named struct type, so it
3464 potentially is shared with other references.
3465 We need to unshare members so we can modify
3468 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3469 &f
->type
.ref
->type
, 0,
3471 pps
= &f
->type
.ref
->next
;
3472 while ((ass
= ass
->next
) != NULL
) {
3473 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3474 pps
= &((*pps
)->next
);
3478 struct_add_offset(f
->type
.ref
, offset
);
3486 /* store size and alignment */
3487 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3488 + maxalign
- 1) & -maxalign
;
3489 type
->ref
->r
= maxalign
;
3492 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3493 static void struct_decl(CType
*type
, AttributeDef
*ad
, int u
)
3495 int a
, v
, size
, align
, flexible
, alignoverride
;
3497 int bit_size
, bsize
, bt
;
3502 a
= tok
; /* save decl type */
3504 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3505 parse_attribute(ad
);
3509 /* struct already defined ? return it */
3511 expect("struct/union/enum name");
3513 if (s
&& (s
->scope
== local_scope
|| (tok
!= '{' && tok
!= ';'))) {
3515 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3521 /* Record the original enum/struct/union token. */
3524 /* we put an undefined size for struct/union */
3525 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3526 s
->r
= 0; /* default alignment is zero as gcc */
3527 /* put struct/union/enum name in type */
3535 tcc_error("struct/union/enum already defined");
3536 /* cannot be empty */
3538 /* non empty enums are not allowed */
3539 if (a
== TOK_ENUM
) {
3543 CType
*t
= &int_type
;
3546 expect("identifier");
3548 if (ss
&& !local_stack
)
3549 tcc_error("redefinition of enumerator '%s'",
3550 get_tok_str(v
, NULL
));
3554 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3557 /* We really want to support long long enums
3558 on i386 as well, but the Sym structure only
3559 holds a 'long' for associated constants,
3560 and enlarging it would bump its size (no
3561 available padding). So punt for now. */
3567 if (c
!= (int)c
&& (unsigned long)c
!= (unsigned int)c
)
3568 seen_wide
= 1, t
= &size_type
;
3569 /* enum symbols have static storage */
3570 ss
= sym_push(v
, t
, VT_CONST
, c
);
3571 ss
->type
.t
|= VT_STATIC
;
3576 /* NOTE: we accept a trailing comma */
3581 s
->a
.unsigned_enum
= 1;
3582 s
->c
= type_size(seen_wide
? &size_type
: &int_type
, &align
);
3587 while (tok
!= '}') {
3588 if (!parse_btype(&btype
, &ad1
)) {
3594 tcc_error("flexible array member '%s' not at the end of struct",
3595 get_tok_str(v
, NULL
));
3600 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3602 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3603 expect("identifier");
3605 int v
= btype
.ref
->v
;
3606 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3607 if (tcc_state
->ms_extensions
== 0)
3608 expect("identifier");
3612 if (type_size(&type1
, &align
) < 0) {
3613 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3616 tcc_error("field '%s' has incomplete type",
3617 get_tok_str(v
, NULL
));
3619 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3620 (type1
.t
& (VT_TYPEDEF
| VT_STATIC
| VT_EXTERN
| VT_INLINE
)))
3621 tcc_error("invalid type for '%s'",
3622 get_tok_str(v
, NULL
));
3626 bit_size
= expr_const();
3627 /* XXX: handle v = 0 case for messages */
3629 tcc_error("negative width in bit-field '%s'",
3630 get_tok_str(v
, NULL
));
3631 if (v
&& bit_size
== 0)
3632 tcc_error("zero width for bit-field '%s'",
3633 get_tok_str(v
, NULL
));
3634 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3635 parse_attribute(&ad1
);
3637 size
= type_size(&type1
, &align
);
3638 /* Only remember non-default alignment. */
3640 if (ad1
.a
.aligned
) {
3641 int speca
= 1 << (ad1
.a
.aligned
- 1);
3642 alignoverride
= speca
;
3643 } else if (ad1
.a
.packed
|| ad
->a
.packed
) {
3645 } else if (*tcc_state
->pack_stack_ptr
) {
3646 if (align
> *tcc_state
->pack_stack_ptr
)
3647 alignoverride
= *tcc_state
->pack_stack_ptr
;
3649 if (bit_size
>= 0) {
3650 bt
= type1
.t
& VT_BTYPE
;
3657 tcc_error("bitfields must have scalar type");
3659 if (bit_size
> bsize
) {
3660 tcc_error("width of '%s' exceeds its type",
3661 get_tok_str(v
, NULL
));
3662 } else if (bit_size
== bsize
) {
3663 /* no need for bit fields */
3666 type1
.t
|= VT_BITFIELD
|
3667 (0 << VT_STRUCT_SHIFT
) |
3668 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3671 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3672 /* Remember we've seen a real field to check
3673 for placement of flexible array member. */
3676 /* If member is a struct or bit-field, enforce
3677 placing into the struct (as anonymous). */
3679 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3684 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3688 if (tok
== ';' || tok
== TOK_EOF
)
3695 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3696 parse_attribute(ad
);
3697 struct_layout(type
, ad
);
3702 /* return 1 if basic type is a type size (short, long, long long) */
3703 ST_FUNC
int is_btype_size(int bt
)
3705 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3708 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3709 are added to the element type, copied because it could be a typedef. */
3710 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3712 while (type
->t
& VT_ARRAY
) {
3713 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3714 type
= &type
->ref
->type
;
3716 type
->t
|= qualifiers
;
3719 /* return 0 if no type declaration. otherwise, return the basic type
3722 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3724 int t
, u
, bt_size
, complete
, type_found
, typespec_found
, g
;
3728 memset(ad
, 0, sizeof(AttributeDef
));
3736 /* currently, we really ignore extension */
3747 tcc_error("too many basic types");
3749 bt_size
= is_btype_size (u
& VT_BTYPE
);
3750 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3765 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3766 #ifndef TCC_TARGET_PE
3767 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3769 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3770 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3776 #ifdef TCC_TARGET_ARM64
3778 /* GCC's __uint128_t appears in some Linux header files. Make it a
3779 synonym for long double to get the size and alignment right. */
3791 if ((t
& VT_BTYPE
) == VT_LONG
) {
3792 #ifdef TCC_TARGET_PE
3793 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3795 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3803 struct_decl(&type1
, ad
, VT_ENUM
);
3806 type
->ref
= type1
.ref
;
3810 struct_decl(&type1
, ad
, VT_STRUCT
);
3813 /* type modifiers */
3818 parse_btype_qualify(type
, VT_CONSTANT
);
3826 parse_btype_qualify(type
, VT_VOLATILE
);
3833 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3834 tcc_error("signed and unsigned modifier");
3847 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3848 tcc_error("signed and unsigned modifier");
3849 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3865 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
3866 tcc_error("multiple storage classes");
3877 /* GNUC attribute */
3878 case TOK_ATTRIBUTE1
:
3879 case TOK_ATTRIBUTE2
:
3880 parse_attribute(ad
);
3883 t
= (t
& ~VT_BTYPE
) | u
;
3891 parse_expr_type(&type1
);
3892 /* remove all storage modifiers except typedef */
3893 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3899 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3902 type
->t
= ((s
->type
.t
& ~VT_TYPEDEF
) |
3903 (t
& ~(VT_CONSTANT
| VT_VOLATILE
)));
3904 type
->ref
= s
->type
.ref
;
3905 if (t
& (VT_CONSTANT
| VT_VOLATILE
))
3906 parse_btype_qualify(type
, t
& (VT_CONSTANT
| VT_VOLATILE
));
3910 /* get attributes from typedef */
3911 if (0 == ad
->a
.aligned
)
3912 ad
->a
.aligned
= s
->a
.aligned
;
3913 if (0 == ad
->a
.func_call
)
3914 ad
->a
.func_call
= s
->a
.func_call
;
3915 ad
->a
.packed
|= s
->a
.packed
;
3924 if (tcc_state
->char_is_unsigned
) {
3925 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3929 /* long is never used as type */
3930 if ((t
& VT_BTYPE
) == VT_LONG
)
3931 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3932 defined TCC_TARGET_PE
3933 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3935 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3941 /* convert a function parameter type (array to pointer and function to
3942 function pointer) */
3943 static inline void convert_parameter_type(CType
*pt
)
3945 /* remove const and volatile qualifiers (XXX: const could be used
3946 to indicate a const function parameter */
3947 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3948 /* array must be transformed to pointer according to ANSI C */
3950 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3955 ST_FUNC
void parse_asm_str(CString
*astr
)
3958 parse_mult_str(astr
, "string constant");
3961 /* Parse an asm label and return the token */
3962 static int asm_label_instr(void)
3968 parse_asm_str(&astr
);
3971 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3973 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3978 static void post_type(CType
*type
, AttributeDef
*ad
, int storage
)
3980 int n
, l
, t1
, arg_size
, align
;
3981 Sym
**plast
, *s
, *first
;
3986 /* function declaration */
3994 /* read param name and compute offset */
3995 if (l
!= FUNC_OLD
) {
3996 if (!parse_btype(&pt
, &ad1
)) {
3998 tcc_error("invalid type");
4005 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4007 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4008 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4009 tcc_error("parameter declared as void");
4010 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4015 expect("identifier");
4019 convert_parameter_type(&pt
);
4020 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4026 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4033 /* if no parameters, then old type prototype */
4037 /* NOTE: const is ignored in returned type as it has a special
4038 meaning in gcc / C++ */
4039 type
->t
&= ~VT_CONSTANT
;
4040 /* some ancient pre-K&R C allows a function to return an array
4041 and the array brackets to be put after the arguments, such
4042 that "int c()[]" means something like "int[] c()" */
4045 skip(']'); /* only handle simple "[]" */
4048 /* we push a anonymous symbol which will contain the function prototype */
4049 ad
->a
.func_args
= arg_size
;
4050 s
= sym_push(SYM_FIELD
, type
, 0, l
);
4055 } else if (tok
== '[') {
4056 int saved_nocode_wanted
= nocode_wanted
;
4057 /* array definition */
4059 if (tok
== TOK_RESTRICT1
)
4064 if (!local_stack
|| (storage
& VT_STATIC
))
4065 vpushi(expr_const());
4067 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4068 length must always be evaluated, even under nocode_wanted,
4069 so that its size slot is initialized (e.g. under sizeof
4074 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4077 tcc_error("invalid array size");
4079 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4080 tcc_error("size of variable length array should be an integer");
4085 /* parse next post type */
4086 post_type(type
, ad
, storage
);
4087 if (type
->t
== VT_FUNC
)
4088 tcc_error("declaration of an array of functions");
4089 t1
|= type
->t
& VT_VLA
;
4092 loc
-= type_size(&int_type
, &align
);
4096 vla_runtime_type_size(type
, &align
);
4098 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4104 nocode_wanted
= saved_nocode_wanted
;
4106 /* we push an anonymous symbol which will contain the array
4108 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4109 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4114 /* Parse a type declaration (except basic type), and return the type
4115 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4116 expected. 'type' should contain the basic type. 'ad' is the
4117 attribute definition of the basic type. It can be modified by
4120 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4123 CType type1
, *type2
;
4124 int qualifiers
, storage
;
4126 while (tok
== '*') {
4134 qualifiers
|= VT_CONSTANT
;
4139 qualifiers
|= VT_VOLATILE
;
4145 /* XXX: clarify attribute handling */
4146 case TOK_ATTRIBUTE1
:
4147 case TOK_ATTRIBUTE2
:
4148 parse_attribute(ad
);
4152 type
->t
|= qualifiers
;
4155 /* recursive type */
4156 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4157 type1
.t
= 0; /* XXX: same as int */
4160 /* XXX: this is not correct to modify 'ad' at this point, but
4161 the syntax is not clear */
4162 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4163 parse_attribute(ad
);
4164 type_decl(&type1
, ad
, v
, td
);
4167 /* type identifier */
4168 if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4172 if (!(td
& TYPE_ABSTRACT
))
4173 expect("identifier");
4177 storage
= type
->t
& VT_STORAGE
;
4178 type
->t
&= ~VT_STORAGE
;
4179 post_type(type
, ad
, storage
);
4181 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4182 parse_attribute(ad
);
4186 /* append type at the end of type1 */
4200 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4201 ST_FUNC
int lvalue_type(int t
)
4206 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4208 else if (bt
== VT_SHORT
)
4212 if (t
& VT_UNSIGNED
)
4213 r
|= VT_LVAL_UNSIGNED
;
4217 /* indirection with full error checking and bound check */
4218 ST_FUNC
void indir(void)
4220 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4221 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4225 if (vtop
->r
& VT_LVAL
)
4227 vtop
->type
= *pointed_type(&vtop
->type
);
4228 /* Arrays and functions are never lvalues */
4229 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4230 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4231 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4232 /* if bound checking, the referenced pointer must be checked */
4233 #ifdef CONFIG_TCC_BCHECK
4234 if (tcc_state
->do_bounds_check
)
4235 vtop
->r
|= VT_MUSTBOUND
;
4240 /* pass a parameter to a function and do type checking and casting */
4241 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4246 func_type
= func
->c
;
4247 if (func_type
== FUNC_OLD
||
4248 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4249 /* default casting : only need to convert float to double */
4250 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4253 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4254 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4255 type
.ref
= vtop
->type
.ref
;
4258 } else if (arg
== NULL
) {
4259 tcc_error("too many arguments to function");
4262 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4263 gen_assign_cast(&type
);
4267 /* parse an expression of the form '(type)' or '(expr)' and return its
4269 static void parse_expr_type(CType
*type
)
4275 if (parse_btype(type
, &ad
)) {
4276 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4283 static void parse_type(CType
*type
)
4288 if (!parse_btype(type
, &ad
)) {
4291 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4294 static void vpush_tokc(int t
)
4299 vsetc(&type
, VT_CONST
, &tokc
);
4302 ST_FUNC
void unary(void)
4304 int n
, t
, align
, size
, r
, sizeof_caller
;
4309 sizeof_caller
= in_sizeof
;
4311 /* XXX: GCC 2.95.3 does not generate a table although it should be
4325 vpush_tokc(VT_INT
| VT_UNSIGNED
);
4329 vpush_tokc(VT_LLONG
);
4333 vpush_tokc(VT_LLONG
| VT_UNSIGNED
);
4337 vpush_tokc(VT_FLOAT
);
4341 vpush_tokc(VT_DOUBLE
);
4345 vpush_tokc(VT_LDOUBLE
);
4348 case TOK___FUNCTION__
:
4350 goto tok_identifier
;
4356 /* special function name identifier */
4357 len
= strlen(funcname
) + 1;
4358 /* generate char[len] type */
4363 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4364 ptr
= section_ptr_add(data_section
, len
);
4365 memcpy(ptr
, funcname
, len
);
4370 #ifdef TCC_TARGET_PE
4371 t
= VT_SHORT
| VT_UNSIGNED
;
4377 /* string parsing */
4380 if (tcc_state
->warn_write_strings
)
4385 memset(&ad
, 0, sizeof(AttributeDef
));
4386 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4391 if (parse_btype(&type
, &ad
)) {
4392 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4394 /* check ISOC99 compound literal */
4396 /* data is allocated locally by default */
4401 /* all except arrays are lvalues */
4402 if (!(type
.t
& VT_ARRAY
))
4403 r
|= lvalue_type(type
.t
);
4404 memset(&ad
, 0, sizeof(AttributeDef
));
4405 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4407 if (sizeof_caller
) {
4414 } else if (tok
== '{') {
4415 int saved_nocode_wanted
= nocode_wanted
;
4417 tcc_error("expected constant");
4418 /* save all registers */
4420 /* statement expression : we do not accept break/continue
4421 inside as GCC does. We do retain the nocode_wanted state,
4422 as statement expressions can't ever be entered from the
4423 outside, so any reactivation of code emission (from labels
4424 or loop heads) can be disabled again after the end of it. */
4425 block(NULL
, NULL
, 1);
4426 nocode_wanted
= saved_nocode_wanted
;
4441 /* functions names must be treated as function pointers,
4442 except for unary '&' and sizeof. Since we consider that
4443 functions are not lvalues, we only have to handle it
4444 there and in function calls. */
4445 /* arrays can also be used although they are not lvalues */
4446 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4447 !(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_LLOCAL
))
4449 mk_pointer(&vtop
->type
);
4455 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4457 boolean
.t
= VT_BOOL
;
4459 vtop
->c
.i
= !vtop
->c
.i
;
4460 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4464 vseti(VT_JMP
, gvtst(1, 0));
4476 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4477 tcc_error("pointer not accepted for unary plus");
4478 /* In order to force cast, we add zero, except for floating point
4479 where we really need an noop (otherwise -0.0 will be transformed
4481 if (!is_float(vtop
->type
.t
)) {
4492 unary_type(&type
); // Perform a in_sizeof = 0;
4493 size
= type_size(&type
, &align
);
4494 if (t
== TOK_SIZEOF
) {
4495 if (!(type
.t
& VT_VLA
)) {
4497 tcc_error("sizeof applied to an incomplete type");
4500 vla_runtime_type_size(&type
, &align
);
4505 vtop
->type
.t
|= VT_UNSIGNED
;
4508 case TOK_builtin_expect
:
4510 /* __builtin_expect is a no-op for now */
4522 case TOK_builtin_types_compatible_p
:
4531 type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4532 type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4533 vpushi(is_compatible_types(&type1
, &type2
));
4536 case TOK_builtin_choose_expr
:
4563 case TOK_builtin_constant_p
:
4570 res
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4577 case TOK_builtin_frame_address
:
4578 case TOK_builtin_return_address
:
4585 if (tok
!= TOK_CINT
) {
4586 tcc_error("%s only takes positive integers",
4587 tok1
== TOK_builtin_return_address
?
4588 "__builtin_return_address" :
4589 "__builtin_frame_address");
4591 level
= (uint32_t)tokc
.i
;
4596 vset(&type
, VT_LOCAL
, 0); /* local frame */
4598 mk_pointer(&vtop
->type
);
4599 indir(); /* -> parent frame */
4601 if (tok1
== TOK_builtin_return_address
) {
4602 // assume return address is just above frame pointer on stack
4605 mk_pointer(&vtop
->type
);
4610 #ifdef TCC_TARGET_X86_64
4611 #ifdef TCC_TARGET_PE
4612 case TOK_builtin_va_start
:
4620 if ((vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4621 tcc_error("__builtin_va_start expects a local variable");
4622 vtop
->r
&= ~(VT_LVAL
| VT_REF
);
4623 vtop
->type
= char_pointer_type
;
4629 case TOK_builtin_va_arg_types
:
4636 vpushi(classify_x86_64_va_arg(&type
));
4642 #ifdef TCC_TARGET_ARM64
4643 case TOK___va_start
: {
4653 vtop
->type
.t
= VT_VOID
;
4656 case TOK___va_arg
: {
4669 case TOK___arm64_clear_cache
: {
4678 vtop
->type
.t
= VT_VOID
;
4682 /* pre operations */
4693 t
= vtop
->type
.t
& VT_BTYPE
;
4695 /* In IEEE negate(x) isn't subtract(0,x), but rather
4699 vtop
->c
.f
= -1.0 * 0.0;
4700 else if (t
== VT_DOUBLE
)
4701 vtop
->c
.d
= -1.0 * 0.0;
4703 vtop
->c
.ld
= -1.0 * 0.0;
4711 goto tok_identifier
;
4713 /* allow to take the address of a label */
4714 if (tok
< TOK_UIDENT
)
4715 expect("label identifier");
4716 s
= label_find(tok
);
4718 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4720 if (s
->r
== LABEL_DECLARED
)
4721 s
->r
= LABEL_FORWARD
;
4724 s
->type
.t
= VT_VOID
;
4725 mk_pointer(&s
->type
);
4726 s
->type
.t
|= VT_STATIC
;
4728 vpushsym(&s
->type
, s
);
4732 // special qnan , snan and infinity values
4734 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4738 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4742 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4751 expect("identifier");
4754 const char *name
= get_tok_str(t
, NULL
);
4756 tcc_error("'%s' undeclared", name
);
4757 /* for simple function calls, we tolerate undeclared
4758 external reference to int() function */
4759 if (tcc_state
->warn_implicit_function_declaration
4760 #ifdef TCC_TARGET_PE
4761 /* people must be warned about using undeclared WINAPI functions
4762 (which usually start with uppercase letter) */
4763 || (name
[0] >= 'A' && name
[0] <= 'Z')
4766 tcc_warning("implicit declaration of function '%s'", name
);
4767 s
= external_global_sym(t
, &func_old_type
, 0);
4771 /* A symbol that has a register is a local register variable,
4772 which starts out as VT_LOCAL value. */
4773 if ((r
& VT_VALMASK
) < VT_CONST
)
4774 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4776 vset(&s
->type
, r
, s
->c
);
4777 /* Point to s as backpointer (even without r&VT_SYM).
4778 Will be used by at least the x86 inline asm parser for
4781 if (vtop
->r
& VT_SYM
) {
4787 /* post operations */
4789 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4792 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4795 if (tok
== TOK_ARROW
)
4797 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4800 /* expect pointer on structure */
4801 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4802 expect("struct or union");
4803 if (tok
== TOK_CDOUBLE
)
4804 expect("field name");
4806 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4807 expect("field name");
4808 s
= find_field(&vtop
->type
, tok
);
4810 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4811 /* add field offset to pointer */
4812 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4815 /* change type to field type, and set to lvalue */
4816 vtop
->type
= s
->type
;
4817 vtop
->type
.t
|= qualifiers
;
4818 /* an array is never an lvalue */
4819 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4820 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4821 #ifdef CONFIG_TCC_BCHECK
4822 /* if bound checking, the referenced pointer must be checked */
4823 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4824 vtop
->r
|= VT_MUSTBOUND
;
4828 } else if (tok
== '[') {
4834 } else if (tok
== '(') {
4837 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4840 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4841 /* pointer test (no array accepted) */
4842 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4843 vtop
->type
= *pointed_type(&vtop
->type
);
4844 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4848 expect("function pointer");
4851 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4853 /* get return type */
4856 sa
= s
->next
; /* first parameter */
4857 nb_args
= regsize
= 0;
4859 /* compute first implicit argument if a structure is returned */
4860 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4861 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4862 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4863 &ret_align
, ®size
);
4865 /* get some space for the returned structure */
4866 size
= type_size(&s
->type
, &align
);
4867 #ifdef TCC_TARGET_ARM64
4868 /* On arm64, a small struct is return in registers.
4869 It is much easier to write it to memory if we know
4870 that we are allowed to write some extra bytes, so
4871 round the allocated space up to a power of 2: */
4873 while (size
& (size
- 1))
4874 size
= (size
| (size
- 1)) + 1;
4876 loc
= (loc
- size
) & -align
;
4878 ret
.r
= VT_LOCAL
| VT_LVAL
;
4879 /* pass it as 'int' to avoid structure arg passing
4881 vseti(VT_LOCAL
, loc
);
4891 /* return in register */
4892 if (is_float(ret
.type
.t
)) {
4893 ret
.r
= reg_fret(ret
.type
.t
);
4894 #ifdef TCC_TARGET_X86_64
4895 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4899 #ifndef TCC_TARGET_ARM64
4900 #ifdef TCC_TARGET_X86_64
4901 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4903 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4914 gfunc_param_typed(s
, sa
);
4924 tcc_error("too few arguments to function");
4926 gfunc_call(nb_args
);
4929 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4930 vsetc(&ret
.type
, r
, &ret
.c
);
4931 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4934 /* handle packed struct return */
4935 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4938 size
= type_size(&s
->type
, &align
);
4939 /* We're writing whole regs often, make sure there's enough
4940 space. Assume register size is power of 2. */
4941 if (regsize
> align
)
4943 loc
= (loc
- size
) & -align
;
4947 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4951 if (--ret_nregs
== 0)
4955 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4963 ST_FUNC
void expr_prod(void)
4968 while (tok
== '*' || tok
== '/' || tok
== '%') {
4976 ST_FUNC
void expr_sum(void)
4981 while (tok
== '+' || tok
== '-') {
4989 static void expr_shift(void)
4994 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5002 static void expr_cmp(void)
5007 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5008 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5016 static void expr_cmpeq(void)
5021 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5029 static void expr_and(void)
5032 while (tok
== '&') {
5039 static void expr_xor(void)
5042 while (tok
== '^') {
5049 static void expr_or(void)
5052 while (tok
== '|') {
5059 /* XXX: fix this mess */
5060 static void expr_land_const(void)
5063 while (tok
== TOK_LAND
) {
5069 static void expr_lor_const(void)
5072 while (tok
== TOK_LOR
) {
5079 static void expr_land(void)
5082 if (tok
== TOK_LAND
) {
5085 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5093 while (tok
== TOK_LAND
) {
5101 gen_cast(&int_type
);
5109 if (tok
!= TOK_LAND
) {
5122 static void expr_lor(void)
5125 if (tok
== TOK_LOR
) {
5128 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5136 while (tok
== TOK_LOR
) {
5144 gen_cast(&int_type
);
5152 if (tok
!= TOK_LOR
) {
5165 /* Assuming vtop is a value used in a conditional context
5166 (i.e. compared with zero) return 0 if it's false, 1 if
5167 true and -1 if it can't be statically determined. */
5168 static int condition_3way(void)
5171 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5172 (!(vtop
->r
& VT_SYM
) ||
5173 !(vtop
->sym
->type
.t
& VT_WEAK
))) {
5175 boolean
.t
= VT_BOOL
;
5184 static void expr_cond(void)
5186 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5188 CType type
, type1
, type2
;
5193 c
= condition_3way();
5194 g
= (tok
== ':' && gnu_ext
);
5196 /* needed to avoid having different registers saved in
5198 if (is_float(vtop
->type
.t
)) {
5200 #ifdef TCC_TARGET_X86_64
5201 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5226 sv
= *vtop
; /* save value to handle it later */
5227 vtop
--; /* no vpop so that FP stack is not flushed */
5245 bt1
= t1
& VT_BTYPE
;
5247 bt2
= t2
& VT_BTYPE
;
5248 /* cast operands to correct type according to ISOC rules */
5249 if (is_float(bt1
) || is_float(bt2
)) {
5250 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5251 type
.t
= VT_LDOUBLE
;
5253 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5258 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5259 /* cast to biggest op */
5261 /* convert to unsigned if it does not fit in a long long */
5262 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5263 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
5264 type
.t
|= VT_UNSIGNED
;
5265 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5266 /* If one is a null ptr constant the result type
5268 if (is_null_pointer (vtop
))
5270 else if (is_null_pointer (&sv
))
5272 /* XXX: test pointer compatibility, C99 has more elaborate
5276 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5277 /* XXX: test function pointer compatibility */
5278 type
= bt1
== VT_FUNC
? type1
: type2
;
5279 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5280 /* XXX: test structure compatibility */
5281 type
= bt1
== VT_STRUCT
? type1
: type2
;
5282 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5283 /* NOTE: as an extension, we accept void on only one side */
5286 /* integer operations */
5288 /* convert to unsigned if it does not fit in an integer */
5289 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
5290 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
5291 type
.t
|= VT_UNSIGNED
;
5293 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5294 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5295 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5298 /* now we convert second operand */
5302 mk_pointer(&vtop
->type
);
5304 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5309 if (is_float(type
.t
)) {
5311 #ifdef TCC_TARGET_X86_64
5312 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5316 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5317 /* for long longs, we use fixed registers to avoid having
5318 to handle a complicated move */
5329 /* this is horrible, but we must also convert first
5335 mk_pointer(&vtop
->type
);
5337 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5343 move_reg(r2
, r1
, type
.t
);
5353 static void expr_eq(void)
5359 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5360 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5361 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5376 ST_FUNC
void gexpr(void)
5387 /* parse an expression and return its type without any side effect. */
5388 static void expr_type(CType
*type
)
5398 /* parse a unary expression and return its type without any side
5400 static void unary_type(CType
*type
)
5409 /* parse a constant expression and return value in vtop. */
5410 static void expr_const1(void)
5417 /* parse an integer constant and return its value. */
5418 static inline int64_t expr_const64(void)
5422 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5423 expect("constant expression");
5429 /* parse an integer constant and return its value.
5430 Complain if it doesn't fit 32bit (signed or unsigned). */
5431 ST_FUNC
int expr_const(void)
5434 int64_t wc
= expr_const64();
5436 if (c
!= wc
&& (unsigned)c
!= wc
)
5437 tcc_error("constant exceeds 32 bit");
5441 /* return the label token if current token is a label, otherwise
5443 static int is_label(void)
5447 /* fast test first */
5448 if (tok
< TOK_UIDENT
)
5450 /* no need to save tokc because tok is an identifier */
5457 unget_tok(last_tok
);
5462 static void label_or_decl(int l
)
5466 /* fast test first */
5467 if (tok
>= TOK_UIDENT
)
5469 /* no need to save tokc because tok is an identifier */
5473 unget_tok(last_tok
);
5476 unget_tok(last_tok
);
5481 #ifndef TCC_TARGET_ARM64
5482 static void gfunc_return(CType
*func_type
)
5484 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5485 CType type
, ret_type
;
5486 int ret_align
, ret_nregs
, regsize
;
5487 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5488 &ret_align
, ®size
);
5489 if (0 == ret_nregs
) {
5490 /* if returning structure, must copy it to implicit
5491 first pointer arg location */
5494 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5497 /* copy structure value to pointer */
5500 /* returning structure packed into registers */
5501 int r
, size
, addr
, align
;
5502 size
= type_size(func_type
,&align
);
5503 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5504 (vtop
->c
.i
& (ret_align
-1)))
5505 && (align
& (ret_align
-1))) {
5506 loc
= (loc
- size
) & -ret_align
;
5509 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5513 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5515 vtop
->type
= ret_type
;
5516 if (is_float(ret_type
.t
))
5517 r
= rc_fret(ret_type
.t
);
5528 if (--ret_nregs
== 0)
5530 /* We assume that when a structure is returned in multiple
5531 registers, their classes are consecutive values of the
5534 vtop
->c
.i
+= regsize
;
5538 } else if (is_float(func_type
->t
)) {
5539 gv(rc_fret(func_type
->t
));
5543 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5547 static int case_cmp(const void *pa
, const void *pb
)
5549 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5550 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5551 return a
< b
? -1 : a
> b
;
5554 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5558 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5576 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5578 gcase(base
, len
/2, bsym
);
5579 if (cur_switch
->def_sym
)
5580 gjmp_addr(cur_switch
->def_sym
);
5582 *bsym
= gjmp(*bsym
);
5586 base
+= e
; len
-= e
;
5596 if (p
->v1
== p
->v2
) {
5598 gtst_addr(0, p
->sym
);
5608 gtst_addr(0, p
->sym
);
5614 static void block(int *bsym
, int *csym
, int is_expr
)
5616 int a
, b
, c
, d
, cond
;
5619 /* generate line number info */
5620 if (tcc_state
->do_debug
)
5621 tcc_debug_line(tcc_state
);
5624 /* default return value is (void) */
5626 vtop
->type
.t
= VT_VOID
;
5629 if (tok
== TOK_IF
) {
5631 int saved_nocode_wanted
= nocode_wanted
;
5636 cond
= condition_3way();
5642 nocode_wanted
|= 0x20000000;
5643 block(bsym
, csym
, 0);
5645 nocode_wanted
= saved_nocode_wanted
;
5647 if (c
== TOK_ELSE
) {
5652 nocode_wanted
|= 0x20000000;
5653 block(bsym
, csym
, 0);
5654 gsym(d
); /* patch else jmp */
5656 nocode_wanted
= saved_nocode_wanted
;
5659 } else if (tok
== TOK_WHILE
) {
5660 int saved_nocode_wanted
;
5661 nocode_wanted
&= ~0x20000000;
5671 saved_nocode_wanted
= nocode_wanted
;
5673 nocode_wanted
= saved_nocode_wanted
;
5678 } else if (tok
== '{') {
5680 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5683 /* record local declaration stack position */
5685 llabel
= local_label_stack
;
5688 /* handle local labels declarations */
5689 if (tok
== TOK_LABEL
) {
5692 if (tok
< TOK_UIDENT
)
5693 expect("label identifier");
5694 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5704 while (tok
!= '}') {
5705 label_or_decl(VT_LOCAL
);
5709 block(bsym
, csym
, is_expr
);
5712 /* pop locally defined labels */
5713 label_pop(&local_label_stack
, llabel
);
5714 /* pop locally defined symbols */
5716 /* In the is_expr case (a statement expression is finished here),
5717 vtop might refer to symbols on the local_stack. Either via the
5718 type or via vtop->sym. We can't pop those nor any that in turn
5719 might be referred to. To make it easier we don't roll back
5720 any symbols in that case; some upper level call to block() will
5721 do that. We do have to remove such symbols from the lookup
5722 tables, though. sym_pop will do that. */
5723 sym_pop(&local_stack
, s
, is_expr
);
5725 /* Pop VLA frames and restore stack pointer if required */
5726 if (vlas_in_scope
> saved_vlas_in_scope
) {
5727 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5730 vlas_in_scope
= saved_vlas_in_scope
;
5733 } else if (tok
== TOK_RETURN
) {
5737 gen_assign_cast(&func_vt
);
5738 gfunc_return(&func_vt
);
5741 /* jump unless last stmt in top-level block */
5742 if (tok
!= '}' || local_scope
!= 1)
5744 nocode_wanted
|= 0x20000000;
5745 } else if (tok
== TOK_BREAK
) {
5748 tcc_error("cannot break");
5749 *bsym
= gjmp(*bsym
);
5752 nocode_wanted
|= 0x20000000;
5753 } else if (tok
== TOK_CONTINUE
) {
5756 tcc_error("cannot continue");
5757 vla_sp_restore_root();
5758 *csym
= gjmp(*csym
);
5761 } else if (tok
== TOK_FOR
) {
5763 int saved_nocode_wanted
;
5764 nocode_wanted
&= ~0x20000000;
5770 /* c99 for-loop init decl? */
5771 if (!decl0(VT_LOCAL
, 1)) {
5772 /* no, regular for-loop init expr */
5798 saved_nocode_wanted
= nocode_wanted
;
5800 nocode_wanted
= saved_nocode_wanted
;
5805 sym_pop(&local_stack
, s
, 0);
5808 if (tok
== TOK_DO
) {
5809 int saved_nocode_wanted
;
5810 nocode_wanted
&= ~0x20000000;
5816 saved_nocode_wanted
= nocode_wanted
;
5824 nocode_wanted
= saved_nocode_wanted
;
5829 if (tok
== TOK_SWITCH
) {
5830 struct switch_t
*saved
, sw
;
5831 int saved_nocode_wanted
= nocode_wanted
;
5837 switchval
= *vtop
--;
5839 b
= gjmp(0); /* jump to first case */
5840 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5844 nocode_wanted
= saved_nocode_wanted
;
5845 a
= gjmp(a
); /* add implicit break */
5848 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5849 for (b
= 1; b
< sw
.n
; b
++)
5850 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5851 tcc_error("duplicate case value");
5852 /* Our switch table sorting is signed, so the compared
5853 value needs to be as well when it's 64bit. */
5854 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5855 switchval
.type
.t
&= ~VT_UNSIGNED
;
5857 gcase(sw
.p
, sw
.n
, &a
);
5860 gjmp_addr(sw
.def_sym
);
5861 dynarray_reset(&sw
.p
, &sw
.n
);
5866 if (tok
== TOK_CASE
) {
5867 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5870 nocode_wanted
&= ~0x20000000;
5872 cr
->v1
= cr
->v2
= expr_const64();
5873 if (gnu_ext
&& tok
== TOK_DOTS
) {
5875 cr
->v2
= expr_const64();
5876 if (cr
->v2
< cr
->v1
)
5877 tcc_warning("empty case range");
5880 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5883 goto block_after_label
;
5885 if (tok
== TOK_DEFAULT
) {
5890 if (cur_switch
->def_sym
)
5891 tcc_error("too many 'default'");
5892 cur_switch
->def_sym
= ind
;
5894 goto block_after_label
;
5896 if (tok
== TOK_GOTO
) {
5898 if (tok
== '*' && gnu_ext
) {
5902 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5905 } else if (tok
>= TOK_UIDENT
) {
5906 s
= label_find(tok
);
5907 /* put forward definition if needed */
5909 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5911 if (s
->r
== LABEL_DECLARED
)
5912 s
->r
= LABEL_FORWARD
;
5914 vla_sp_restore_root();
5915 if (s
->r
& LABEL_FORWARD
)
5916 s
->jnext
= gjmp(s
->jnext
);
5918 gjmp_addr(s
->jnext
);
5921 expect("label identifier");
5924 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5932 if (s
->r
== LABEL_DEFINED
)
5933 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5935 s
->r
= LABEL_DEFINED
;
5937 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5941 /* we accept this, but it is a mistake */
5943 nocode_wanted
&= ~0x20000000;
5945 tcc_warning("deprecated use of label at end of compound statement");
5949 block(bsym
, csym
, is_expr
);
5952 /* expression case */
5967 #define EXPR_CONST 1
5970 static void parse_init_elem(int expr_type
)
5972 int saved_global_expr
;
5975 /* compound literals must be allocated globally in this case */
5976 saved_global_expr
= global_expr
;
5979 global_expr
= saved_global_expr
;
5980 /* NOTE: symbols are accepted */
5981 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5982 #ifdef TCC_TARGET_PE
5983 || (vtop
->type
.t
& VT_IMPORT
)
5986 tcc_error("initializer element is not constant");
5994 /* t is the array or struct type. c is the array or struct
5995 address. cur_field is the pointer to the current
5996 value, for arrays the 'c' member contains the current start
5997 index and the 'r' contains the end index (in case of range init).
5998 'size_only' is true if only size info is needed (only used
6000 static void decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6001 Sym
**cur_field
, int size_only
)
6004 int notfirst
, index
, index_last
, align
, l
, nb_elems
, elem_size
;
6010 if (gnu_ext
&& (l
= is_label()) != 0)
6012 while (tok
== '[' || tok
== '.') {
6014 if (!(type
->t
& VT_ARRAY
))
6015 expect("array type");
6018 index
= expr_const();
6019 if (index
< 0 || (s
->c
>= 0 && index
>= s
->c
))
6020 tcc_error("invalid index");
6021 if (tok
== TOK_DOTS
&& gnu_ext
) {
6023 index_last
= expr_const();
6024 if (index_last
< 0 ||
6025 (s
->c
>= 0 && index_last
>= s
->c
) ||
6027 tcc_error("invalid index");
6033 (*cur_field
)->c
= index
;
6034 (*cur_field
)->r
= index_last
;
6036 type
= pointed_type(type
);
6037 elem_size
= type_size(type
, &align
);
6038 c
+= index
* elem_size
;
6039 /* NOTE: we only support ranges for last designator */
6040 nb_elems
= index_last
- index
+ 1;
6041 if (nb_elems
!= 1) {
6050 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6051 expect("struct/union type");
6052 f
= find_field(type
, l
);
6057 /* XXX: fix this mess by using explicit storage field */
6059 type1
.t
|= (type
->t
& ~VT_TYPE
);
6073 if (type
->t
& VT_ARRAY
) {
6074 index
= (*cur_field
)->c
;
6075 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6076 tcc_error("index too large");
6077 type
= pointed_type(type
);
6078 c
+= index
* type_size(type
, &align
);
6081 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6082 *cur_field
= f
= f
->next
;
6084 tcc_error("too many field init");
6085 /* XXX: fix this mess by using explicit storage field */
6087 type1
.t
|= (type
->t
& ~VT_TYPE
);
6092 decl_initializer(type
, sec
, c
, 0, size_only
);
6094 /* XXX: make it more general */
6095 if (!size_only
&& nb_elems
> 1) {
6096 unsigned long c_end
;
6101 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6102 for (i
= 1; i
< nb_elems
; i
++) {
6103 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6109 c_end
= c
+ nb_elems
* elem_size
;
6110 if (c_end
> sec
->data_allocated
)
6111 section_realloc(sec
, c_end
);
6112 src
= sec
->data
+ c
;
6114 for(i
= 1; i
< nb_elems
; i
++) {
6116 memcpy(dst
, src
, elem_size
);
6122 /* store a value or an expression directly in global data or in local array */
6123 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6125 int bt
, bit_pos
, bit_size
;
6127 unsigned long long bit_mask
;
6131 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6135 /* XXX: not portable */
6136 /* XXX: generate error if incorrect relocation */
6137 gen_assign_cast(&dtype
);
6138 bt
= type
->t
& VT_BTYPE
;
6139 size
= type_size(type
, &align
);
6140 if (c
+ size
> sec
->data_allocated
) {
6141 section_realloc(sec
, c
+ size
);
6143 ptr
= sec
->data
+ c
;
6144 /* XXX: make code faster ? */
6145 if (!(type
->t
& VT_BITFIELD
)) {
6147 bit_size
= PTR_SIZE
* 8;
6150 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6151 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6152 bit_mask
= (1LL << bit_size
) - 1;
6154 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6155 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6156 /* XXX This rejects compount literals like
6157 '(void *){ptr}'. The problem is that '&sym' is
6158 represented the same way, which would be ruled out
6159 by the SYM_FIRST_ANOM check above, but also '"string"'
6160 in 'char *p = "string"' is represented the same
6161 with the type being VT_PTR and the symbol being an
6162 anonymous one. That is, there's no difference in vtop
6163 between '(void *){x}' and '&(void *){x}'. Ignore
6164 pointer typed entities here. Hopefully no real code
6165 will every use compound literals with scalar type. */
6166 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6167 /* These come from compound literals, memcpy stuff over. */
6171 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6172 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6173 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6175 /* We need to copy over all memory contents, and that
6176 includes relocations. Use the fact that relocs are
6177 created it order, so look from the end of relocs
6178 until we hit one before the copied region. */
6179 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6180 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6181 while (num_relocs
--) {
6183 if (rel
->r_offset
>= esym
->st_value
+ size
)
6185 if (rel
->r_offset
< esym
->st_value
)
6187 /* Note: if the same fields are initialized multiple
6188 times (possible with designators) then we possibly
6189 add multiple relocations for the same offset here.
6190 That would lead to wrong code, the last reloc needs
6191 to win. We clean this up later after the whole
6192 initializer is parsed. */
6193 put_elf_reloca(symtab_section
, sec
,
6194 c
+ rel
->r_offset
- esym
->st_value
,
6195 ELFW(R_TYPE
)(rel
->r_info
),
6196 ELFW(R_SYM
)(rel
->r_info
),
6197 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6206 if ((vtop
->r
& VT_SYM
) &&
6212 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6216 (bt
== VT_INT
&& bit_size
!= 32)
6219 tcc_error("initializer element is not computable at load time");
6221 /* XXX: when cross-compiling we assume that each type has the
6222 same representation on host and target, which is likely to
6223 be wrong in the case of long double */
6225 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6227 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6230 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6233 *(double *)ptr
= vtop
->c
.d
;
6236 if (sizeof(long double) == LDOUBLE_SIZE
)
6237 *(long double *)ptr
= vtop
->c
.ld
;
6238 else if (sizeof(double) == LDOUBLE_SIZE
)
6239 *(double *)ptr
= vtop
->c
.ld
;
6241 tcc_error("can't cross compile long double constants");
6245 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6252 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6253 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6254 if (vtop
->r
& VT_SYM
)
6255 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6257 *(addr_t
*)ptr
|= val
;
6259 if (vtop
->r
& VT_SYM
)
6260 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6261 *(addr_t
*)ptr
|= val
;
6267 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6268 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6269 if (vtop
->r
& VT_SYM
)
6270 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6274 if (vtop
->r
& VT_SYM
)
6275 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6284 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6291 /* put zeros for variable based init */
6292 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6295 /* nothing to do because globals are already set to zero */
6297 vpush_global_sym(&func_old_type
, TOK_memset
);
6299 #ifdef TCC_TARGET_ARM
6310 /* 't' contains the type and storage info. 'c' is the offset of the
6311 object in section 'sec'. If 'sec' is NULL, it means stack based
6312 allocation. 'first' is true if array '{' must be read (multi
6313 dimension implicit array init handling). 'size_only' is true if
6314 size only evaluation is wanted (only for arrays). */
6315 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6316 int first
, int size_only
)
6318 int index
, array_length
, n
, no_oblock
, nb
, parlevel
, parlevel1
, i
;
6325 /* If we currently are at an '}' or ',' we have read an initializer
6326 element in one of our callers, and not yet consumed it. */
6327 have_elem
= tok
== '}' || tok
== ',';
6328 if (!have_elem
&& tok
!= '{' &&
6329 /* In case of strings we have special handling for arrays, so
6330 don't consume them as initializer value (which would commit them
6331 to some anonymous symbol). */
6332 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6334 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6339 !(type
->t
& VT_ARRAY
) &&
6340 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6341 The source type might have VT_CONSTANT set, which is
6342 of course assignable to non-const elements. */
6343 is_compatible_parameter_types(type
, &vtop
->type
)) {
6344 init_putv(type
, sec
, c
);
6345 } else if (type
->t
& VT_ARRAY
) {
6349 t1
= pointed_type(type
);
6350 size1
= type_size(t1
, &align1
);
6353 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6356 tcc_error("character array initializer must be a literal,"
6357 " optionally enclosed in braces");
6362 /* only parse strings here if correct type (otherwise: handle
6363 them as ((w)char *) expressions */
6364 if ((tok
== TOK_LSTR
&&
6365 #ifdef TCC_TARGET_PE
6366 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6368 (t1
->t
& VT_BTYPE
) == VT_INT
6370 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6371 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6374 /* compute maximum number of chars wanted */
6376 cstr_len
= tokc
.str
.size
;
6378 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6381 if (n
>= 0 && nb
> (n
- array_length
))
6382 nb
= n
- array_length
;
6385 tcc_warning("initializer-string for array is too long");
6386 /* in order to go faster for common case (char
6387 string in global variable, we handle it
6389 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6390 memcpy(sec
->data
+ c
+ array_length
, tokc
.str
.data
, nb
);
6394 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6396 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6398 init_putv(t1
, sec
, c
+ (array_length
+ i
) * size1
);
6405 /* only add trailing zero if enough storage (no
6406 warning in this case since it is standard) */
6407 if (n
< 0 || array_length
< n
) {
6410 init_putv(t1
, sec
, c
+ (array_length
* size1
));
6420 while (tok
!= '}' || have_elem
) {
6421 decl_designator(type
, sec
, c
, &f
, size_only
);
6424 /* must put zero in holes (note that doing it that way
6425 ensures that it even works with designators) */
6426 if (!size_only
&& array_length
< index
) {
6427 init_putz(sec
, c
+ array_length
* size1
,
6428 (index
- array_length
) * size1
);
6430 if (type
->t
& VT_ARRAY
) {
6431 index
= indexsym
.c
= ++indexsym
.r
;
6433 index
= index
+ type_size(&f
->type
, &align1
);
6434 if (s
->type
.t
== TOK_UNION
)
6439 if (index
> array_length
)
6440 array_length
= index
;
6442 if (type
->t
& VT_ARRAY
) {
6443 /* special test for multi dimensional arrays (may not
6444 be strictly correct if designators are used at the
6446 if (no_oblock
&& index
>= n
)
6449 if (no_oblock
&& f
== NULL
)
6457 /* put zeros at the end */
6458 if (!size_only
&& array_length
< n
) {
6459 init_putz(sec
, c
+ array_length
* size1
,
6460 (n
- array_length
) * size1
);
6464 /* patch type size if needed, which happens only for array types */
6466 s
->c
= array_length
;
6467 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6470 if (first
|| tok
== '{') {
6479 } else if (tok
== '{') {
6481 decl_initializer(type
, sec
, c
, first
, size_only
);
6483 } else if (size_only
) {
6484 /* If we supported only ISO C we wouldn't have to accept calling
6485 this on anything than an array size_only==1 (and even then
6486 only on the outermost level, so no recursion would be needed),
6487 because initializing a flex array member isn't supported.
6488 But GNU C supports it, so we need to recurse even into
6489 subfields of structs and arrays when size_only is set. */
6490 /* just skip expression */
6491 parlevel
= parlevel1
= 0;
6492 while ((parlevel
> 0 || parlevel1
> 0 ||
6493 (tok
!= '}' && tok
!= ',')) && tok
!= -1) {
6496 else if (tok
== ')') {
6497 if (parlevel
== 0 && parlevel1
== 0)
6501 else if (tok
== '{')
6503 else if (tok
== '}') {
6504 if (parlevel
== 0 && parlevel1
== 0)
6512 /* This should happen only when we haven't parsed
6513 the init element above for fear of committing a
6514 string constant to memory too early. */
6515 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6516 expect("string constant");
6517 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6519 init_putv(type
, sec
, c
);
6523 /* parse an initializer for type 't' if 'has_init' is non zero, and
6524 allocate space in local or global data space ('r' is either
6525 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6526 variable 'v' of scope 'scope' is declared before initializers
6527 are parsed. If 'v' is zero, then a reference to the new object
6528 is put in the value stack. If 'has_init' is 2, a special parsing
6529 is done to handle string constants. */
6530 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6531 int has_init
, int v
, int scope
)
6533 int size
, align
, addr
, data_offset
;
6535 ParseState saved_parse_state
= {0};
6536 TokenString
*init_str
= NULL
;
6538 Sym
*flexible_array
;
6540 flexible_array
= NULL
;
6541 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6542 Sym
*field
= type
->ref
->next
;
6545 field
= field
->next
;
6546 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6547 flexible_array
= field
;
6551 size
= type_size(type
, &align
);
6552 /* If unknown size, we must evaluate it before
6553 evaluating initializers because
6554 initializers can generate global data too
6555 (e.g. string pointers or ISOC99 compound
6556 literals). It also simplifies local
6557 initializers handling */
6558 if (size
< 0 || (flexible_array
&& has_init
)) {
6560 tcc_error("unknown type size");
6561 /* get all init string */
6562 init_str
= tok_str_alloc();
6563 if (has_init
== 2) {
6564 /* only get strings */
6565 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6566 tok_str_add_tok(init_str
);
6571 while (level
> 0 || (tok
!= ',' && tok
!= ';')) {
6573 tcc_error("unexpected end of file in initializer");
6574 tok_str_add_tok(init_str
);
6577 else if (tok
== '}') {
6587 tok_str_add(init_str
, -1);
6588 tok_str_add(init_str
, 0);
6591 save_parse_state(&saved_parse_state
);
6593 begin_macro(init_str
, 1);
6595 decl_initializer(type
, NULL
, 0, 1, 1);
6596 /* prepare second initializer parsing */
6597 macro_ptr
= init_str
->str
;
6600 /* if still unknown size, error */
6601 size
= type_size(type
, &align
);
6603 tcc_error("unknown type size");
6605 /* If there's a flex member and it was used in the initializer
6607 if (flexible_array
&&
6608 flexible_array
->type
.ref
->c
> 0)
6609 size
+= flexible_array
->type
.ref
->c
6610 * pointed_size(&flexible_array
->type
);
6611 /* take into account specified alignment if bigger */
6612 if (ad
->a
.aligned
) {
6613 int speca
= 1 << (ad
->a
.aligned
- 1);
6616 } else if (ad
->a
.packed
) {
6619 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6621 #ifdef CONFIG_TCC_BCHECK
6622 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6626 loc
= (loc
- size
) & -align
;
6628 #ifdef CONFIG_TCC_BCHECK
6629 /* handles bounds */
6630 /* XXX: currently, since we do only one pass, we cannot track
6631 '&' operators, so we add only arrays */
6632 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6634 /* add padding between regions */
6636 /* then add local bound info */
6637 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6638 bounds_ptr
[0] = addr
;
6639 bounds_ptr
[1] = size
;
6643 /* local variable */
6644 #ifdef CONFIG_TCC_ASM
6645 if (ad
->asm_label
) {
6646 int reg
= asm_parse_regvar(ad
->asm_label
);
6648 r
= (r
& ~VT_VALMASK
) | reg
;
6651 sym_push(v
, type
, r
, addr
);
6653 /* push local reference */
6654 vset(type
, r
, addr
);
6658 if (v
&& scope
== VT_CONST
) {
6659 /* see if the symbol was already defined */
6662 patch_storage(sym
, type
);
6663 if (sym
->type
.t
& VT_EXTERN
) {
6664 /* if the variable is extern, it was not allocated */
6665 sym
->type
.t
&= ~VT_EXTERN
;
6666 /* set array size if it was omitted in extern
6668 if ((sym
->type
.t
& VT_ARRAY
) &&
6669 sym
->type
.ref
->c
< 0 &&
6671 sym
->type
.ref
->c
= type
->ref
->c
;
6672 } else if (!has_init
) {
6673 /* we accept several definitions of the same
6674 global variable. this is tricky, because we
6675 must play with the SHN_COMMON type of the symbol */
6676 /* no init data, we won't add more to the symbol */
6677 update_storage(sym
);
6679 } else if (sym
->c
) {
6681 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6682 if (esym
->st_shndx
== data_section
->sh_num
)
6683 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6688 /* allocate symbol in corresponding section */
6693 else if (tcc_state
->nocommon
)
6698 data_offset
= sec
->data_offset
;
6699 data_offset
= (data_offset
+ align
- 1) & -align
;
6701 /* very important to increment global pointer at this time
6702 because initializers themselves can create new initializers */
6703 data_offset
+= size
;
6704 #ifdef CONFIG_TCC_BCHECK
6705 /* add padding if bound check */
6706 if (tcc_state
->do_bounds_check
)
6709 sec
->data_offset
= data_offset
;
6710 /* allocate section space to put the data */
6711 if (sec
->sh_type
!= SHT_NOBITS
&&
6712 data_offset
> sec
->data_allocated
)
6713 section_realloc(sec
, data_offset
);
6714 /* align section if needed */
6715 if (align
> sec
->sh_addralign
)
6716 sec
->sh_addralign
= align
;
6718 addr
= 0; /* avoid warning */
6722 if (scope
!= VT_CONST
|| !sym
) {
6723 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6724 sym
->asm_label
= ad
->asm_label
;
6726 /* update symbol definition */
6728 put_extern_sym(sym
, sec
, addr
, size
);
6730 put_extern_sym(sym
, SECTION_COMMON
, align
, size
);
6734 /* push global reference */
6735 sym
= get_sym_ref(type
, sec
, addr
, size
);
6736 vpushsym(type
, sym
);
6739 #ifdef CONFIG_TCC_BCHECK
6740 /* handles bounds now because the symbol must be defined
6741 before for the relocation */
6742 if (tcc_state
->do_bounds_check
) {
6745 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
6746 /* then add global bound info */
6747 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6748 bounds_ptr
[0] = 0; /* relocated */
6749 bounds_ptr
[1] = size
;
6754 if (type
->t
& VT_VLA
) {
6757 /* save current stack pointer */
6758 if (vlas_in_scope
== 0) {
6759 if (vla_sp_root_loc
== -1)
6760 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6761 gen_vla_sp_save(vla_sp_root_loc
);
6764 vla_runtime_type_size(type
, &a
);
6765 gen_vla_alloc(type
, a
);
6766 gen_vla_sp_save(addr
);
6770 } else if (has_init
) {
6771 size_t oldreloc_offset
= 0;
6772 if (sec
&& sec
->reloc
)
6773 oldreloc_offset
= sec
->reloc
->data_offset
;
6774 decl_initializer(type
, sec
, addr
, 1, 0);
6775 if (sec
&& sec
->reloc
)
6776 squeeze_multi_relocs(sec
, oldreloc_offset
);
6777 /* patch flexible array member size back to -1, */
6778 /* for possible subsequent similar declarations */
6780 flexible_array
->type
.ref
->c
= -1;
6784 /* restore parse state if needed */
6787 restore_parse_state(&saved_parse_state
);
6791 /* parse an old style function declaration list */
6792 /* XXX: check multiple parameter */
6793 static void func_decl_list(Sym
*func_sym
)
6800 /* parse each declaration */
6801 while (tok
!= '{' && tok
!= ';' && tok
!= ',' && tok
!= TOK_EOF
&&
6802 tok
!= TOK_ASM1
&& tok
!= TOK_ASM2
&& tok
!= TOK_ASM3
) {
6803 if (!parse_btype(&btype
, &ad
))
6804 expect("declaration list");
6805 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6806 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6808 /* we accept no variable after */
6812 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6813 /* find parameter in function parameter list */
6816 if ((s
->v
& ~SYM_FIELD
) == v
)
6820 tcc_error("declaration for parameter '%s' but no such parameter",
6821 get_tok_str(v
, NULL
));
6823 /* check that no storage specifier except 'register' was given */
6824 if (type
.t
& VT_STORAGE
)
6825 tcc_error("storage class specified for '%s'", get_tok_str(v
, NULL
));
6826 convert_parameter_type(&type
);
6827 /* we can add the type (NOTE: it could be local to the function) */
6829 /* accept other parameters */
6840 /* parse a function defined by symbol 'sym' and generate its code in
6841 'cur_text_section' */
6842 static void gen_function(Sym
*sym
)
6845 ind
= cur_text_section
->data_offset
;
6846 /* NOTE: we patch the symbol size later */
6847 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6848 funcname
= get_tok_str(sym
->v
, NULL
);
6850 /* Initialize VLA state */
6852 vla_sp_root_loc
= -1;
6853 /* put debug symbol */
6854 tcc_debug_funcstart(tcc_state
, sym
);
6855 /* push a dummy symbol to enable local sym storage */
6856 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6857 local_scope
= 1; /* for function parameters */
6858 gfunc_prolog(&sym
->type
);
6861 block(NULL
, NULL
, 0);
6865 cur_text_section
->data_offset
= ind
;
6866 label_pop(&global_label_stack
, NULL
);
6867 /* reset local stack */
6869 sym_pop(&local_stack
, NULL
, 0);
6870 /* end of function */
6871 /* patch symbol size */
6872 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6874 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
6875 /* It's better to crash than to generate wrong code */
6876 cur_text_section
= NULL
;
6877 funcname
= ""; /* for safety */
6878 func_vt
.t
= VT_VOID
; /* for safety */
6879 func_var
= 0; /* for safety */
6880 ind
= 0; /* for safety */
6885 static void gen_inline_functions(TCCState
*s
)
6888 int inline_generated
, i
, ln
;
6889 struct InlineFunc
*fn
;
6891 ln
= file
->line_num
;
6892 /* iterate while inline function are referenced */
6894 inline_generated
= 0;
6895 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6896 fn
= s
->inline_fns
[i
];
6898 if (sym
&& sym
->c
) {
6899 /* the function was used: generate its code and
6900 convert it to a normal function */
6903 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6904 sym
->type
.t
&= ~VT_INLINE
;
6906 begin_macro(fn
->func_str
, 1);
6908 cur_text_section
= text_section
;
6912 inline_generated
= 1;
6915 if (!inline_generated
)
6918 file
->line_num
= ln
;
6921 ST_FUNC
void free_inline_functions(TCCState
*s
)
6924 /* free tokens of unused inline functions */
6925 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6926 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6928 tok_str_free(fn
->func_str
);
6930 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6933 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6934 static int decl0(int l
, int is_for_loop_init
)
6942 if (!parse_btype(&btype
, &ad
)) {
6943 if (is_for_loop_init
)
6945 /* skip redundant ';' */
6946 /* XXX: find more elegant solution */
6951 if (l
== VT_CONST
&&
6952 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6953 /* global asm block */
6957 /* special test for old K&R protos without explicit int
6958 type. Only accepted when defining global data */
6959 if (l
== VT_LOCAL
|| tok
< TOK_UIDENT
)
6963 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6964 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6966 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6967 int v
= btype
.ref
->v
;
6968 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6969 tcc_warning("unnamed struct/union that defines no instances");
6974 while (1) { /* iterate thru each declaration */
6976 /* If the base type itself was an array type of unspecified
6977 size (like in 'typedef int arr[]; arr x = {1};') then
6978 we will overwrite the unknown size by the real one for
6979 this decl. We need to unshare the ref symbol holding
6981 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6982 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6984 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6988 type_to_str(buf
, sizeof(buf
), t
, get_tok_str(v
, NULL
));
6989 printf("type = '%s'\n", buf
);
6992 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6993 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6994 tcc_error("function without file scope cannot be static");
6996 /* if old style function prototype, we accept a
6999 if (sym
->c
== FUNC_OLD
)
7000 func_decl_list(sym
);
7003 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7004 ad
.asm_label
= asm_label_instr();
7005 /* parse one last attribute list, after asm label */
7006 parse_attribute(&ad
);
7013 #ifdef TCC_TARGET_PE
7014 if (ad
.a
.func_import
|| ad
.a
.func_export
) {
7015 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7016 tcc_error("cannot have dll linkage with static or typedef");
7017 if (ad
.a
.func_export
)
7018 type
.t
|= VT_EXPORT
;
7019 else if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7020 type
.t
|= VT_IMPORT
|VT_EXTERN
;
7023 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
7027 tcc_error("cannot use local functions");
7028 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7029 expect("function definition");
7031 /* reject abstract declarators in function definition */
7033 while ((sym
= sym
->next
) != NULL
)
7034 if (!(sym
->v
& ~SYM_FIELD
))
7035 expect("identifier");
7037 /* XXX: cannot do better now: convert extern line to static inline */
7038 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7039 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7044 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
7047 ref
= sym
->type
.ref
;
7049 /* use func_call from prototype if not defined */
7050 if (ref
->a
.func_call
!= FUNC_CDECL
7051 && type
.ref
->a
.func_call
== FUNC_CDECL
)
7052 type
.ref
->a
.func_call
= ref
->a
.func_call
;
7054 /* use static from prototype */
7055 if (sym
->type
.t
& VT_STATIC
)
7056 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7058 /* If the definition has no visibility use the
7059 one from prototype. */
7060 if (! (type
.t
& VT_VIS_MASK
))
7061 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
7063 /* apply other storage attributes from prototype */
7064 type
.t
|= sym
->type
.t
& (VT_EXPORT
|VT_WEAK
);
7066 if (!is_compatible_types(&sym
->type
, &type
)) {
7068 tcc_error("incompatible types for redefinition of '%s'",
7069 get_tok_str(v
, NULL
));
7071 if (ref
->a
.func_body
)
7072 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
7073 /* if symbol is already defined, then put complete type */
7077 /* put function symbol */
7078 sym
= global_identifier_push(v
, type
.t
, 0);
7079 sym
->type
.ref
= type
.ref
;
7082 sym
->type
.ref
->a
.func_body
= 1;
7083 sym
->r
= VT_SYM
| VT_CONST
;
7085 /* static inline functions are just recorded as a kind
7086 of macro. Their code will be emitted at the end of
7087 the compilation unit only if they are used */
7088 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7089 (VT_INLINE
| VT_STATIC
)) {
7091 struct InlineFunc
*fn
;
7092 const char *filename
;
7094 filename
= file
? file
->filename
: "";
7095 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7096 strcpy(fn
->filename
, filename
);
7098 fn
->func_str
= tok_str_alloc();
7104 tcc_error("unexpected end of file");
7105 tok_str_add_tok(fn
->func_str
);
7110 } else if (t
== '}') {
7112 if (block_level
== 0)
7116 tok_str_add(fn
->func_str
, -1);
7117 tok_str_add(fn
->func_str
, 0);
7118 dynarray_add(&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
, fn
);
7121 /* compute text section */
7122 cur_text_section
= ad
.section
;
7123 if (!cur_text_section
)
7124 cur_text_section
= text_section
;
7129 if (type
.t
& VT_TYPEDEF
) {
7130 /* save typedefed type */
7131 /* XXX: test storage specifiers ? */
7133 if (sym
&& sym
->scope
== local_scope
) {
7134 if (!is_compatible_types(&sym
->type
, &type
)
7135 || !(sym
->type
.t
& VT_TYPEDEF
))
7136 tcc_error("incompatible redefinition of '%s'",
7137 get_tok_str(v
, NULL
));
7140 sym
= sym_push(v
, &type
, 0, 0);
7145 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7146 /* external function definition */
7147 /* specific case for func_call attribute */
7149 } else if (!(type
.t
& VT_ARRAY
)) {
7150 /* not lvalue if array */
7151 r
|= lvalue_type(type
.t
);
7153 has_init
= (tok
== '=');
7154 if (has_init
&& (type
.t
& VT_VLA
))
7155 tcc_error("variable length array cannot be initialized");
7156 if ((type
.t
& VT_EXTERN
) || ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7157 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7158 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7159 /* external variable or function */
7160 /* NOTE: as GCC, uninitialized global static
7161 arrays of null size are considered as
7163 sym
= external_sym(v
, &type
, r
);
7164 sym
->asm_label
= ad
.asm_label
;
7165 if (ad
.alias_target
) {
7170 alias_target
= sym_find(ad
.alias_target
);
7171 if (!alias_target
|| !alias_target
->c
)
7172 tcc_error("unsupported forward __alias__ attribute");
7173 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7174 tsec
.sh_num
= esym
->st_shndx
;
7175 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7178 if (type
.t
& VT_STATIC
)
7184 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7188 if (is_for_loop_init
)
7201 ST_FUNC
void decl(int l
)
7206 /* ------------------------------------------------------------------------- */