2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static inline CType
*pointed_type(CType
*type
);
75 static int is_compatible_types(CType
*type1
, CType
*type2
);
76 static int parse_btype(CType
*type
, AttributeDef
*ad
);
77 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
78 static void parse_expr_type(CType
*type
);
79 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
80 static void block(int *bsym
, int *csym
, int is_expr
);
81 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
82 static int decl0(int l
, int is_for_loop_init
);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType
*type
);
86 static void vla_runtime_type_size(CType
*type
, int *a
);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
90 static void expr_type(CType
*type
);
91 static inline int64_t expr_const64(void);
92 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
93 ST_FUNC
void vpush(CType
*type
);
94 ST_FUNC
int gvtst(int inv
, int t
);
95 ST_FUNC
int is_btype_size(int bt
);
96 static void gen_inline_functions(TCCState
*s
);
98 ST_INLN
int is_float(int t
)
102 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC
int ieee_finite(double d
)
111 memcpy(p
, &d
, sizeof(double));
112 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC
void test_lvalue(void)
117 if (!(vtop
->r
& VT_LVAL
))
121 ST_FUNC
void check_vstack(void)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
131 void pv (const char *lbl
, int a
, int b
)
134 for (i
= a
; i
< a
+ b
; ++i
) {
135 SValue
*p
= &vtop
[-i
];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
142 /* ------------------------------------------------------------------------- */
143 /* start of translation unit info */
144 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
149 /* file info: full path + filename */
150 section_sym
= put_elf_sym(symtab_section
, 0, 0,
151 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
152 text_section
->sh_num
, NULL
);
153 getcwd(buf
, sizeof(buf
));
155 normalize_slashes(buf
);
157 pstrcat(buf
, sizeof(buf
), "/");
158 put_stabs_r(buf
, N_SO
, 0, 0,
159 text_section
->data_offset
, text_section
, section_sym
);
160 put_stabs_r(file
->filename
, N_SO
, 0, 0,
161 text_section
->data_offset
, text_section
, section_sym
);
166 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
167 symbols can be safely used */
168 put_elf_sym(symtab_section
, 0, 0,
169 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
170 SHN_ABS
, file
->filename
);
173 /* put end of translation unit info */
174 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
178 put_stabs_r(NULL
, N_SO
, 0, 0,
179 text_section
->data_offset
, text_section
, section_sym
);
183 /* generate line number info */
184 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
188 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
189 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
191 last_line_num
= file
->line_num
;
195 /* put function symbol */
196 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
204 /* XXX: we put here a dummy type */
205 snprintf(buf
, sizeof(buf
), "%s:%c1",
206 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
207 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
208 cur_text_section
, sym
->c
);
209 /* //gr gdb wants a line at the function */
210 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
216 /* put function size */
217 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
221 put_stabn(N_FUN
, 0, 0, size
);
224 /* ------------------------------------------------------------------------- */
225 ST_FUNC
void tccgen_start(TCCState
*s1
)
227 cur_text_section
= NULL
;
229 anon_sym
= SYM_FIRST_ANOM
;
234 /* define some often used types */
236 char_pointer_type
.t
= VT_BYTE
;
237 mk_pointer(&char_pointer_type
);
239 size_type
.t
= VT_INT
;
241 size_type
.t
= VT_LLONG
;
243 func_old_type
.t
= VT_FUNC
;
244 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, FUNC_CDECL
, FUNC_OLD
);
248 #ifdef TCC_TARGET_ARM
253 ST_FUNC
void tccgen_end(TCCState
*s1
)
255 gen_inline_functions(s1
);
257 /* end of translation unit info */
261 /* ------------------------------------------------------------------------- */
262 /* apply storage attibutes to Elf symbol */
264 static void update_storage(Sym
*sym
)
273 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
276 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
277 | ((t
& VT_VIS_MASK
) >> VT_VIS_SHIFT
);
280 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
284 esym
->st_other
|= ST_PE_EXPORT
;
288 /* ------------------------------------------------------------------------- */
289 /* update sym->c so that it points to an external symbol in section
290 'section' with value 'value' */
292 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
293 addr_t value
, unsigned long size
,
294 int can_add_underscore
)
296 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
300 #ifdef CONFIG_TCC_BCHECK
306 else if (section
== SECTION_ABS
)
308 else if (section
== SECTION_COMMON
)
311 sh_num
= section
->sh_num
;
314 name
= get_tok_str(sym
->v
, NULL
);
315 #ifdef CONFIG_TCC_BCHECK
316 if (tcc_state
->do_bounds_check
) {
317 /* XXX: avoid doing that for statics ? */
318 /* if bound checking is activated, we change some function
319 names by adding the "__bound" prefix */
322 /* XXX: we rely only on malloc hooks */
335 strcpy(buf
, "__bound_");
343 if ((t
& VT_BTYPE
) == VT_FUNC
) {
345 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
346 sym_type
= STT_NOTYPE
;
348 sym_type
= STT_OBJECT
;
351 sym_bind
= STB_LOCAL
;
353 sym_bind
= STB_GLOBAL
;
356 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
357 Sym
*ref
= sym
->type
.ref
;
358 if (ref
->a
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
359 sprintf(buf1
, "_%s@%d", name
, ref
->a
.func_args
* PTR_SIZE
);
361 other
|= ST_PE_STDCALL
;
362 can_add_underscore
= 0;
366 other
|= ST_PE_IMPORT
;
368 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
370 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
374 name
= get_tok_str(sym
->asm_label
, NULL
);
375 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
376 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
378 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
379 esym
->st_value
= value
;
380 esym
->st_size
= size
;
381 esym
->st_shndx
= sh_num
;
386 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
387 addr_t value
, unsigned long size
)
389 put_extern_sym2(sym
, section
, value
, size
, 1);
392 /* add a new relocation entry to symbol 'sym' in section 's' */
393 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
398 if (nocode_wanted
&& s
== cur_text_section
)
403 put_extern_sym(sym
, NULL
, 0, 0);
407 /* now we can add ELF relocation info */
408 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
411 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
413 greloca(s
, sym
, offset
, type
, 0);
416 /* ------------------------------------------------------------------------- */
417 /* symbol allocator */
418 static Sym
*__sym_malloc(void)
420 Sym
*sym_pool
, *sym
, *last_sym
;
423 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
424 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
426 last_sym
= sym_free_first
;
428 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
429 sym
->next
= last_sym
;
433 sym_free_first
= last_sym
;
437 static inline Sym
*sym_malloc(void)
441 sym
= sym_free_first
;
443 sym
= __sym_malloc();
444 sym_free_first
= sym
->next
;
447 sym
= tcc_malloc(sizeof(Sym
));
452 ST_INLN
void sym_free(Sym
*sym
)
455 sym
->next
= sym_free_first
;
456 sym_free_first
= sym
;
462 /* push, without hashing */
463 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
483 /* find a symbol and return its associated structure. 's' is the top
484 of the symbol stack */
485 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
497 /* structure lookup */
498 ST_INLN Sym
*struct_find(int v
)
501 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
503 return table_ident
[v
]->sym_struct
;
506 /* find an identifier */
507 ST_INLN Sym
*sym_find(int v
)
510 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
512 return table_ident
[v
]->sym_identifier
;
515 /* push a given symbol on the symbol stack */
516 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, long c
)
525 s
= sym_push2(ps
, v
, type
->t
, c
);
526 s
->type
.ref
= type
->ref
;
528 /* don't record fields or anonymous symbols */
530 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
531 /* record symbol in token array */
532 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
534 ps
= &ts
->sym_struct
;
536 ps
= &ts
->sym_identifier
;
539 s
->scope
= local_scope
;
540 if (s
->prev_tok
&& s
->prev_tok
->scope
== s
->scope
)
541 tcc_error("redeclaration of '%s'",
542 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
547 /* push a global identifier */
548 ST_FUNC Sym
*global_identifier_push(int v
, int t
, long c
)
551 s
= sym_push2(&global_stack
, v
, t
, c
);
552 /* don't record anonymous symbol */
553 if (v
< SYM_FIRST_ANOM
) {
554 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
555 /* modify the top most local identifier, so that
556 sym_identifier will point to 's' when popped */
558 ps
= &(*ps
)->prev_tok
;
565 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
566 pop them yet from the list, but do remove them from the token array. */
567 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
577 /* remove symbol in token array */
579 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
580 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
582 ps
= &ts
->sym_struct
;
584 ps
= &ts
->sym_identifier
;
595 /* ------------------------------------------------------------------------- */
597 static void vsetc(CType
*type
, int r
, CValue
*vc
)
601 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
602 tcc_error("memory full (vstack)");
603 /* cannot let cpu flags if other instruction are generated. Also
604 avoid leaving VT_JMP anywhere except on the top of the stack
605 because it would complicate the code generator.
607 Don't do this when nocode_wanted. vtop might come from
608 !nocode_wanted regions (see 88_codeopt.c) and transforming
609 it to a register without actually generating code is wrong
610 as their value might still be used for real. All values
611 we push under nocode_wanted will eventually be popped
612 again, so that the VT_CMP/VT_JMP value will be in vtop
613 when code is unsuppressed again.
615 Same logic below in vswap(); */
616 if (vtop
>= vstack
&& !nocode_wanted
) {
617 v
= vtop
->r
& VT_VALMASK
;
618 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
630 ST_FUNC
void vswap(void)
633 /* cannot vswap cpu flags. See comment at vsetc() above */
634 if (vtop
>= vstack
&& !nocode_wanted
) {
635 int v
= vtop
->r
& VT_VALMASK
;
636 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
644 /* pop stack value */
645 ST_FUNC
void vpop(void)
648 v
= vtop
->r
& VT_VALMASK
;
649 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
650 /* for x86, we need to pop the FP stack */
652 o(0xd8dd); /* fstp %st(0) */
655 if (v
== VT_JMP
|| v
== VT_JMPI
) {
656 /* need to put correct jump if && or || without test */
662 /* push constant of type "type" with useless value */
663 ST_FUNC
void vpush(CType
*type
)
666 vsetc(type
, VT_CONST
, &cval
);
669 /* push integer constant */
670 ST_FUNC
void vpushi(int v
)
674 vsetc(&int_type
, VT_CONST
, &cval
);
677 /* push a pointer sized constant */
678 static void vpushs(addr_t v
)
682 vsetc(&size_type
, VT_CONST
, &cval
);
685 /* push arbitrary 64bit constant */
686 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
693 vsetc(&ctype
, VT_CONST
, &cval
);
696 /* push long long constant */
697 static inline void vpushll(long long v
)
699 vpush64(VT_LLONG
, v
);
702 ST_FUNC
void vset(CType
*type
, int r
, long v
)
707 vsetc(type
, r
, &cval
);
710 static void vseti(int r
, int v
)
718 ST_FUNC
void vpushv(SValue
*v
)
720 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
721 tcc_error("memory full (vstack)");
726 static void vdup(void)
731 /* rotate n first stack elements to the bottom
732 I1 ... In -> I2 ... In I1 [top is right]
734 ST_FUNC
void vrotb(int n
)
745 /* rotate the n elements before entry e towards the top
746 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
748 ST_FUNC
void vrote(SValue
*e
, int n
)
754 for(i
= 0;i
< n
- 1; i
++)
759 /* rotate n first stack elements to the top
760 I1 ... In -> In I1 ... I(n-1) [top is right]
762 ST_FUNC
void vrott(int n
)
767 /* push a symbol value of TYPE */
768 static inline void vpushsym(CType
*type
, Sym
*sym
)
772 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
776 /* Return a static symbol pointing to a section */
777 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
783 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
784 sym
->type
.ref
= type
->ref
;
785 sym
->r
= VT_CONST
| VT_SYM
;
786 put_extern_sym(sym
, sec
, offset
, size
);
790 /* push a reference to a section offset by adding a dummy symbol */
791 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
793 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
796 /* define a new external reference to a symbol 'v' of type 'u' */
797 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
803 /* push forward reference */
804 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
805 s
->type
.ref
= type
->ref
;
806 s
->r
= r
| VT_CONST
| VT_SYM
;
811 /* Merge some storage attributes. */
812 static void patch_storage(Sym
*sym
, CType
*type
)
815 if (!is_compatible_types(&sym
->type
, type
))
816 tcc_error("incompatible types for redefinition of '%s'",
817 get_tok_str(sym
->v
, NULL
));
820 if ((sym
->type
.t
^ t
) & VT_IMPORT
)
821 tcc_error("incompatible dll linkage for redefinition of '%s'",
822 get_tok_str(sym
->v
, NULL
));
824 sym
->type
.t
|= t
& (VT_EXPORT
|VT_WEAK
);
825 if (t
& VT_VIS_MASK
) {
826 int vis
= sym
->type
.t
& VT_VIS_MASK
;
827 int vis2
= t
& VT_VIS_MASK
;
828 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
830 else if (vis2
!= (STV_DEFAULT
<< VT_VIS_SHIFT
))
831 vis
= (vis
< vis2
) ? vis
: vis2
;
832 sym
->type
.t
= (sym
->type
.t
& ~VT_VIS_MASK
) | vis
;
836 /* define a new external reference to a symbol 'v' */
837 static Sym
*external_sym(int v
, CType
*type
, int r
)
842 /* push forward reference */
843 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
844 s
->type
.t
|= VT_EXTERN
;
846 if (s
->type
.ref
== func_old_type
.ref
) {
847 s
->type
.ref
= type
->ref
;
848 s
->r
= r
| VT_CONST
| VT_SYM
;
849 s
->type
.t
|= VT_EXTERN
;
851 patch_storage(s
, type
);
857 /* push a reference to global symbol v */
858 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
860 vpushsym(type
, external_global_sym(v
, type
, 0));
863 /* save registers up to (vtop - n) stack entry */
864 ST_FUNC
void save_regs(int n
)
867 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
871 /* save r to the memory stack, and mark it as being free */
872 ST_FUNC
void save_reg(int r
)
874 save_reg_upstack(r
, 0);
877 /* save r to the memory stack, and mark it as being free,
878 if seen up to (vtop - n) stack entry */
879 ST_FUNC
void save_reg_upstack(int r
, int n
)
881 int l
, saved
, size
, align
;
885 if ((r
&= VT_VALMASK
) >= VT_CONST
)
890 /* modify all stack values */
893 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
894 if ((p
->r
& VT_VALMASK
) == r
||
895 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
896 /* must save value on stack if not already done */
898 /* NOTE: must reload 'r' because r might be equal to r2 */
899 r
= p
->r
& VT_VALMASK
;
900 /* store register in the stack */
902 if ((p
->r
& VT_LVAL
) ||
903 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
904 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
905 type
= &char_pointer_type
;
909 size
= type_size(type
, &align
);
910 loc
= (loc
- size
) & -align
;
912 sv
.r
= VT_LOCAL
| VT_LVAL
;
915 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
916 /* x86 specific: need to pop fp register ST0 if saved */
918 o(0xd8dd); /* fstp %st(0) */
921 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
922 /* special long long case */
923 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
931 /* mark that stack entry as being saved on the stack */
932 if (p
->r
& VT_LVAL
) {
933 /* also clear the bounded flag because the
934 relocation address of the function was stored in
936 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
938 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
946 #ifdef TCC_TARGET_ARM
947 /* find a register of class 'rc2' with at most one reference on stack.
948 * If none, call get_reg(rc) */
949 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
954 for(r
=0;r
<NB_REGS
;r
++) {
955 if (reg_classes
[r
] & rc2
) {
958 for(p
= vstack
; p
<= vtop
; p
++) {
959 if ((p
->r
& VT_VALMASK
) == r
||
960 (p
->r2
& VT_VALMASK
) == r
)
971 /* find a free register of class 'rc'. If none, save one register */
972 ST_FUNC
int get_reg(int rc
)
977 /* find a free register */
978 for(r
=0;r
<NB_REGS
;r
++) {
979 if (reg_classes
[r
] & rc
) {
982 for(p
=vstack
;p
<=vtop
;p
++) {
983 if ((p
->r
& VT_VALMASK
) == r
||
984 (p
->r2
& VT_VALMASK
) == r
)
992 /* no register left : free the first one on the stack (VERY
993 IMPORTANT to start from the bottom to ensure that we don't
994 spill registers used in gen_opi()) */
995 for(p
=vstack
;p
<=vtop
;p
++) {
996 /* look at second register (if long long) */
997 r
= p
->r2
& VT_VALMASK
;
998 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1000 r
= p
->r
& VT_VALMASK
;
1001 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1007 /* Should never comes here */
1011 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1013 static void move_reg(int r
, int s
, int t
)
1027 /* get address of vtop (vtop MUST BE an lvalue) */
1028 ST_FUNC
void gaddrof(void)
1030 if (vtop
->r
& VT_REF
)
1032 vtop
->r
&= ~VT_LVAL
;
1033 /* tricky: if saved lvalue, then we can go back to lvalue */
1034 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1035 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1040 #ifdef CONFIG_TCC_BCHECK
1041 /* generate lvalue bound code */
1042 static void gbound(void)
1047 vtop
->r
&= ~VT_MUSTBOUND
;
1048 /* if lvalue, then use checking code before dereferencing */
1049 if (vtop
->r
& VT_LVAL
) {
1050 /* if not VT_BOUNDED value, then make one */
1051 if (!(vtop
->r
& VT_BOUNDED
)) {
1052 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1053 /* must save type because we must set it to int to get pointer */
1055 vtop
->type
.t
= VT_PTR
;
1058 gen_bounded_ptr_add();
1059 vtop
->r
|= lval_type
;
1062 /* then check for dereferencing */
1063 gen_bounded_ptr_deref();
1068 /* store vtop a register belonging to class 'rc'. lvalues are
1069 converted to values. Cannot be used if cannot be converted to
1070 register value (such as structures). */
1071 ST_FUNC
int gv(int rc
)
1073 int r
, bit_pos
, bit_size
, size
, align
, i
;
1076 /* NOTE: get_reg can modify vstack[] */
1077 if (vtop
->type
.t
& VT_BITFIELD
) {
1080 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1081 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1082 /* remove bit field info to avoid loops */
1083 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1084 /* cast to int to propagate signedness in following ops */
1085 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1090 if((vtop
->type
.t
& VT_UNSIGNED
) ||
1091 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1092 type
.t
|= VT_UNSIGNED
;
1094 /* generate shifts */
1095 vpushi(bits
- (bit_pos
+ bit_size
));
1097 vpushi(bits
- bit_size
);
1098 /* NOTE: transformed to SHR if unsigned */
1102 if (is_float(vtop
->type
.t
) &&
1103 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1106 unsigned long offset
;
1107 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1111 /* XXX: unify with initializers handling ? */
1112 /* CPUs usually cannot use float constants, so we store them
1113 generically in data segment */
1114 size
= type_size(&vtop
->type
, &align
);
1115 offset
= (data_section
->data_offset
+ align
- 1) & -align
;
1116 data_section
->data_offset
= offset
;
1117 /* XXX: not portable yet */
1118 #if defined(__i386__) || defined(__x86_64__)
1119 /* Zero pad x87 tenbyte long doubles */
1120 if (size
== LDOUBLE_SIZE
) {
1121 vtop
->c
.tab
[2] &= 0xffff;
1122 #if LDOUBLE_SIZE == 16
1127 ptr
= section_ptr_add(data_section
, size
);
1129 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1133 ptr
[i
] = vtop
->c
.tab
[size
-1-i
];
1137 ptr
[i
] = vtop
->c
.tab
[i
];
1138 sym
= get_sym_ref(&vtop
->type
, data_section
, offset
, size
<< 2);
1139 vtop
->r
|= VT_LVAL
| VT_SYM
;
1143 #ifdef CONFIG_TCC_BCHECK
1144 if (vtop
->r
& VT_MUSTBOUND
)
1148 r
= vtop
->r
& VT_VALMASK
;
1149 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1150 #ifndef TCC_TARGET_ARM64
1153 #ifdef TCC_TARGET_X86_64
1154 else if (rc
== RC_FRET
)
1158 /* need to reload if:
1160 - lvalue (need to dereference pointer)
1161 - already a register, but not in the right class */
1163 || (vtop
->r
& VT_LVAL
)
1164 || !(reg_classes
[r
] & rc
)
1165 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1166 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1167 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1169 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1174 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1175 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1176 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1178 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1179 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1180 unsigned long long ll
;
1182 int r2
, original_type
;
1183 original_type
= vtop
->type
.t
;
1184 /* two register type load : expand to two words
1186 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1187 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1190 vtop
->c
.i
= ll
; /* first word */
1192 vtop
->r
= r
; /* save register value */
1193 vpushi(ll
>> 32); /* second word */
1196 if (vtop
->r
& VT_LVAL
) {
1197 /* We do not want to modifier the long long
1198 pointer here, so the safest (and less
1199 efficient) is to save all the other registers
1200 in the stack. XXX: totally inefficient. */
1204 /* lvalue_save: save only if used further down the stack */
1205 save_reg_upstack(vtop
->r
, 1);
1207 /* load from memory */
1208 vtop
->type
.t
= load_type
;
1211 vtop
[-1].r
= r
; /* save register value */
1212 /* increment pointer to get second word */
1213 vtop
->type
.t
= addr_type
;
1218 vtop
->type
.t
= load_type
;
1220 /* move registers */
1223 vtop
[-1].r
= r
; /* save register value */
1224 vtop
->r
= vtop
[-1].r2
;
1226 /* Allocate second register. Here we rely on the fact that
1227 get_reg() tries first to free r2 of an SValue. */
1231 /* write second register */
1233 vtop
->type
.t
= original_type
;
1234 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1236 /* lvalue of scalar type : need to use lvalue type
1237 because of possible cast */
1240 /* compute memory access type */
1241 if (vtop
->r
& VT_REF
)
1242 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1247 else if (vtop
->r
& VT_LVAL_BYTE
)
1249 else if (vtop
->r
& VT_LVAL_SHORT
)
1251 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1255 /* restore wanted type */
1258 /* one register type load */
1263 #ifdef TCC_TARGET_C67
1264 /* uses register pairs for doubles */
1265 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1272 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1273 ST_FUNC
void gv2(int rc1
, int rc2
)
1277 /* generate more generic register first. But VT_JMP or VT_CMP
1278 values must be generated first in all cases to avoid possible
1280 v
= vtop
[0].r
& VT_VALMASK
;
1281 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1286 /* test if reload is needed for first register */
1287 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1297 /* test if reload is needed for first register */
1298 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1304 #ifndef TCC_TARGET_ARM64
1305 /* wrapper around RC_FRET to return a register by type */
1306 static int rc_fret(int t
)
1308 #ifdef TCC_TARGET_X86_64
1309 if (t
== VT_LDOUBLE
) {
1317 /* wrapper around REG_FRET to return a register by type */
1318 static int reg_fret(int t
)
1320 #ifdef TCC_TARGET_X86_64
1321 if (t
== VT_LDOUBLE
) {
1328 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1329 /* expand 64bit on stack in two ints */
1330 static void lexpand(void)
1333 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1334 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1335 if (v
== VT_CONST
) {
1338 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1344 vtop
[0].r
= vtop
[-1].r2
;
1345 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1347 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1351 #ifdef TCC_TARGET_ARM
1352 /* expand long long on stack */
1353 ST_FUNC
void lexpand_nr(void)
1357 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1359 vtop
->r2
= VT_CONST
;
1360 vtop
->type
.t
= VT_INT
| u
;
1361 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1362 if (v
== VT_CONST
) {
1363 vtop
[-1].c
.i
= vtop
->c
.i
;
1364 vtop
->c
.i
= vtop
->c
.i
>> 32;
1366 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1368 vtop
->r
= vtop
[-1].r
;
1369 } else if (v
> VT_CONST
) {
1373 vtop
->r
= vtop
[-1].r2
;
1374 vtop
[-1].r2
= VT_CONST
;
1375 vtop
[-1].type
.t
= VT_INT
| u
;
1379 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1380 /* build a long long from two ints */
1381 static void lbuild(int t
)
1383 gv2(RC_INT
, RC_INT
);
1384 vtop
[-1].r2
= vtop
[0].r
;
1385 vtop
[-1].type
.t
= t
;
1390 /* convert stack entry to register and duplicate its value in another
1392 static void gv_dup(void)
1398 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1399 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1406 /* stack: H L L1 H1 */
1416 /* duplicate value */
1421 #ifdef TCC_TARGET_X86_64
1422 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1432 load(r1
, &sv
); /* move r to r1 */
1434 /* duplicates value */
1440 /* Generate value test
1442 * Generate a test for any value (jump, comparison and integers) */
1443 ST_FUNC
int gvtst(int inv
, int t
)
1445 int v
= vtop
->r
& VT_VALMASK
;
1446 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1450 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1451 /* constant jmp optimization */
1452 if ((vtop
->c
.i
!= 0) != inv
)
1457 return gtst(inv
, t
);
1460 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1461 /* generate CPU independent (unsigned) long long operations */
1462 static void gen_opl(int op
)
1464 int t
, a
, b
, op1
, c
, i
;
1466 unsigned short reg_iret
= REG_IRET
;
1467 unsigned short reg_lret
= REG_LRET
;
1473 func
= TOK___divdi3
;
1476 func
= TOK___udivdi3
;
1479 func
= TOK___moddi3
;
1482 func
= TOK___umoddi3
;
1489 /* call generic long long function */
1490 vpush_global_sym(&func_old_type
, func
);
1495 vtop
->r2
= reg_lret
;
1503 //pv("gen_opl A",0,2);
1509 /* stack: L1 H1 L2 H2 */
1514 vtop
[-2] = vtop
[-3];
1517 /* stack: H1 H2 L1 L2 */
1518 //pv("gen_opl B",0,4);
1524 /* stack: H1 H2 L1 L2 ML MH */
1527 /* stack: ML MH H1 H2 L1 L2 */
1531 /* stack: ML MH H1 L2 H2 L1 */
1536 /* stack: ML MH M1 M2 */
1539 } else if (op
== '+' || op
== '-') {
1540 /* XXX: add non carry method too (for MIPS or alpha) */
1546 /* stack: H1 H2 (L1 op L2) */
1549 gen_op(op1
+ 1); /* TOK_xxxC2 */
1552 /* stack: H1 H2 (L1 op L2) */
1555 /* stack: (L1 op L2) H1 H2 */
1557 /* stack: (L1 op L2) (H1 op H2) */
1565 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1566 t
= vtop
[-1].type
.t
;
1570 /* stack: L H shift */
1572 /* constant: simpler */
1573 /* NOTE: all comments are for SHL. the other cases are
1574 done by swaping words */
1585 if (op
!= TOK_SAR
) {
1618 /* XXX: should provide a faster fallback on x86 ? */
1621 func
= TOK___ashrdi3
;
1624 func
= TOK___lshrdi3
;
1627 func
= TOK___ashldi3
;
1633 /* compare operations */
1639 /* stack: L1 H1 L2 H2 */
1641 vtop
[-1] = vtop
[-2];
1643 /* stack: L1 L2 H1 H2 */
1646 /* when values are equal, we need to compare low words. since
1647 the jump is inverted, we invert the test too. */
1650 else if (op1
== TOK_GT
)
1652 else if (op1
== TOK_ULT
)
1654 else if (op1
== TOK_UGT
)
1664 /* generate non equal test */
1670 /* compare low. Always unsigned */
1674 else if (op1
== TOK_LE
)
1676 else if (op1
== TOK_GT
)
1678 else if (op1
== TOK_GE
)
1689 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1691 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1692 return (a
^ b
) >> 63 ? -x
: x
;
1695 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1697 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1700 /* handle integer constant optimizations and various machine
1702 static void gen_opic(int op
)
1704 SValue
*v1
= vtop
- 1;
1706 int t1
= v1
->type
.t
& VT_BTYPE
;
1707 int t2
= v2
->type
.t
& VT_BTYPE
;
1708 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1709 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1710 uint64_t l1
= c1
? v1
->c
.i
: 0;
1711 uint64_t l2
= c2
? v2
->c
.i
: 0;
1712 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1714 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1715 l1
= ((uint32_t)l1
|
1716 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1717 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1718 l2
= ((uint32_t)l2
|
1719 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1723 case '+': l1
+= l2
; break;
1724 case '-': l1
-= l2
; break;
1725 case '&': l1
&= l2
; break;
1726 case '^': l1
^= l2
; break;
1727 case '|': l1
|= l2
; break;
1728 case '*': l1
*= l2
; break;
1735 /* if division by zero, generate explicit division */
1738 tcc_error("division by zero in constant");
1742 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1743 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1744 case TOK_UDIV
: l1
= l1
/ l2
; break;
1745 case TOK_UMOD
: l1
= l1
% l2
; break;
1748 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1749 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1751 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1754 case TOK_ULT
: l1
= l1
< l2
; break;
1755 case TOK_UGE
: l1
= l1
>= l2
; break;
1756 case TOK_EQ
: l1
= l1
== l2
; break;
1757 case TOK_NE
: l1
= l1
!= l2
; break;
1758 case TOK_ULE
: l1
= l1
<= l2
; break;
1759 case TOK_UGT
: l1
= l1
> l2
; break;
1760 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1761 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1762 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1763 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1765 case TOK_LAND
: l1
= l1
&& l2
; break;
1766 case TOK_LOR
: l1
= l1
|| l2
; break;
1770 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1771 l1
= ((uint32_t)l1
|
1772 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1776 /* if commutative ops, put c2 as constant */
1777 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1778 op
== '|' || op
== '*')) {
1780 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1781 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1783 if (!const_wanted
&&
1785 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1786 (l1
== -1 && op
== TOK_SAR
))) {
1787 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1789 } else if (!const_wanted
&&
1790 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1791 (l2
== -1 && op
== '|') ||
1792 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1793 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1794 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1799 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1802 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1803 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1807 /* filter out NOP operations like x*1, x-0, x&-1... */
1809 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1810 /* try to use shifts instead of muls or divs */
1811 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1820 else if (op
== TOK_PDIV
)
1826 } else if (c2
&& (op
== '+' || op
== '-') &&
1827 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1828 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1829 /* symbol + constant case */
1833 /* The backends can't always deal with addends to symbols
1834 larger than +-1<<31. Don't construct such. */
1841 /* call low level op generator */
1842 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1843 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1851 /* generate a floating point operation with constant propagation */
1852 static void gen_opif(int op
)
1860 /* currently, we cannot do computations with forward symbols */
1861 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1862 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1864 if (v1
->type
.t
== VT_FLOAT
) {
1867 } else if (v1
->type
.t
== VT_DOUBLE
) {
1875 /* NOTE: we only do constant propagation if finite number (not
1876 NaN or infinity) (ANSI spec) */
1877 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1881 case '+': f1
+= f2
; break;
1882 case '-': f1
-= f2
; break;
1883 case '*': f1
*= f2
; break;
1887 tcc_error("division by zero in constant");
1892 /* XXX: also handles tests ? */
1896 /* XXX: overflow test ? */
1897 if (v1
->type
.t
== VT_FLOAT
) {
1899 } else if (v1
->type
.t
== VT_DOUBLE
) {
1911 static int pointed_size(CType
*type
)
1914 return type_size(pointed_type(type
), &align
);
1917 static void vla_runtime_pointed_size(CType
*type
)
1920 vla_runtime_type_size(pointed_type(type
), &align
);
1923 static inline int is_null_pointer(SValue
*p
)
1925 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1927 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1928 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1929 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1930 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1933 static inline int is_integer_btype(int bt
)
1935 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1936 bt
== VT_INT
|| bt
== VT_LLONG
);
1939 /* check types for comparison or subtraction of pointers */
1940 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1942 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1945 /* null pointers are accepted for all comparisons as gcc */
1946 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1950 bt1
= type1
->t
& VT_BTYPE
;
1951 bt2
= type2
->t
& VT_BTYPE
;
1952 /* accept comparison between pointer and integer with a warning */
1953 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1954 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1955 tcc_warning("comparison between pointer and integer");
1959 /* both must be pointers or implicit function pointers */
1960 if (bt1
== VT_PTR
) {
1961 type1
= pointed_type(type1
);
1962 } else if (bt1
!= VT_FUNC
)
1963 goto invalid_operands
;
1965 if (bt2
== VT_PTR
) {
1966 type2
= pointed_type(type2
);
1967 } else if (bt2
!= VT_FUNC
) {
1969 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1971 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1972 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1976 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1977 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1978 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1979 /* gcc-like error if '-' is used */
1981 goto invalid_operands
;
1983 tcc_warning("comparison of distinct pointer types lacks a cast");
1987 /* generic gen_op: handles types problems */
1988 ST_FUNC
void gen_op(int op
)
1990 int u
, t1
, t2
, bt1
, bt2
, t
;
1994 t1
= vtop
[-1].type
.t
;
1995 t2
= vtop
[0].type
.t
;
1996 bt1
= t1
& VT_BTYPE
;
1997 bt2
= t2
& VT_BTYPE
;
1999 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2000 tcc_error("operation on a struct");
2001 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2002 if (bt2
== VT_FUNC
) {
2003 mk_pointer(&vtop
->type
);
2006 if (bt1
== VT_FUNC
) {
2008 mk_pointer(&vtop
->type
);
2013 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2014 /* at least one operand is a pointer */
2015 /* relationnal op: must be both pointers */
2016 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2017 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2018 /* pointers are handled are unsigned */
2019 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2020 t
= VT_LLONG
| VT_UNSIGNED
;
2022 t
= VT_INT
| VT_UNSIGNED
;
2026 /* if both pointers, then it must be the '-' op */
2027 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2029 tcc_error("cannot use pointers here");
2030 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2031 /* XXX: check that types are compatible */
2032 if (vtop
[-1].type
.t
& VT_VLA
) {
2033 vla_runtime_pointed_size(&vtop
[-1].type
);
2035 vpushi(pointed_size(&vtop
[-1].type
));
2039 /* set to integer type */
2040 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2041 vtop
->type
.t
= VT_LLONG
;
2043 vtop
->type
.t
= VT_INT
;
2048 /* exactly one pointer : must be '+' or '-'. */
2049 if (op
!= '-' && op
!= '+')
2050 tcc_error("cannot use pointers here");
2051 /* Put pointer as first operand */
2052 if (bt2
== VT_PTR
) {
2054 t
= t1
, t1
= t2
, t2
= t
;
2057 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2058 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2059 gen_cast(&int_type
);
2061 type1
= vtop
[-1].type
;
2062 type1
.t
&= ~VT_ARRAY
;
2063 if (vtop
[-1].type
.t
& VT_VLA
)
2064 vla_runtime_pointed_size(&vtop
[-1].type
);
2066 u
= pointed_size(&vtop
[-1].type
);
2068 tcc_error("unknown array element size");
2069 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2072 /* XXX: cast to int ? (long long case) */
2078 /* #ifdef CONFIG_TCC_BCHECK
2079 The main reason to removing this code:
2086 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2087 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2089 When this code is on. then the output looks like
2091 v+(i-j) = 0xbff84000
2093 /* if evaluating constant expression, no code should be
2094 generated, so no bound check */
2095 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2096 /* if bounded pointers, we generate a special code to
2103 gen_bounded_ptr_add();
2109 /* put again type if gen_opic() swaped operands */
2112 } else if (is_float(bt1
) || is_float(bt2
)) {
2113 /* compute bigger type and do implicit casts */
2114 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2116 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2121 /* floats can only be used for a few operations */
2122 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2123 (op
< TOK_ULT
|| op
> TOK_GT
))
2124 tcc_error("invalid operands for binary operation");
2126 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2127 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2128 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (t
| VT_UNSIGNED
))
2131 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2132 /* cast to biggest op */
2134 /* convert to unsigned if it does not fit in a long long */
2135 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2136 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
2140 /* integer operations */
2142 /* convert to unsigned if it does not fit in an integer */
2143 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
2144 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
2147 /* XXX: currently, some unsigned operations are explicit, so
2148 we modify them here */
2149 if (t
& VT_UNSIGNED
) {
2156 else if (op
== TOK_LT
)
2158 else if (op
== TOK_GT
)
2160 else if (op
== TOK_LE
)
2162 else if (op
== TOK_GE
)
2169 /* special case for shifts and long long: we keep the shift as
2171 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2178 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2179 /* relationnal op: the result is an int */
2180 vtop
->type
.t
= VT_INT
;
2185 // Make sure that we have converted to an rvalue:
2186 if (vtop
->r
& VT_LVAL
)
2187 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2190 #ifndef TCC_TARGET_ARM
2191 /* generic itof for unsigned long long case */
2192 static void gen_cvt_itof1(int t
)
2194 #ifdef TCC_TARGET_ARM64
2197 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2198 (VT_LLONG
| VT_UNSIGNED
)) {
2201 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2202 #if LDOUBLE_SIZE != 8
2203 else if (t
== VT_LDOUBLE
)
2204 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2207 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2211 vtop
->r
= reg_fret(t
);
2219 /* generic ftoi for unsigned long long case */
2220 static void gen_cvt_ftoi1(int t
)
2222 #ifdef TCC_TARGET_ARM64
2227 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2228 /* not handled natively */
2229 st
= vtop
->type
.t
& VT_BTYPE
;
2231 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2232 #if LDOUBLE_SIZE != 8
2233 else if (st
== VT_LDOUBLE
)
2234 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2237 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2242 vtop
->r2
= REG_LRET
;
2249 /* force char or short cast */
2250 static void force_charshort_cast(int t
)
2254 /* XXX: add optimization if lvalue : just change type and offset */
2259 if (t
& VT_UNSIGNED
) {
2260 vpushi((1 << bits
) - 1);
2263 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2269 /* result must be signed or the SAR is converted to an SHL
2270 This was not the case when "t" was a signed short
2271 and the last value on the stack was an unsigned int */
2272 vtop
->type
.t
&= ~VT_UNSIGNED
;
2278 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2279 static void gen_cast(CType
*type
)
2281 int sbt
, dbt
, sf
, df
, c
, p
;
2283 /* special delayed cast for char/short */
2284 /* XXX: in some cases (multiple cascaded casts), it may still
2286 if (vtop
->r
& VT_MUSTCAST
) {
2287 vtop
->r
&= ~VT_MUSTCAST
;
2288 force_charshort_cast(vtop
->type
.t
);
2291 /* bitfields first get cast to ints */
2292 if (vtop
->type
.t
& VT_BITFIELD
) {
2296 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2297 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2302 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2303 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2305 /* constant case: we can do it now */
2306 /* XXX: in ISOC, cannot do it if error in convert */
2307 if (sbt
== VT_FLOAT
)
2308 vtop
->c
.ld
= vtop
->c
.f
;
2309 else if (sbt
== VT_DOUBLE
)
2310 vtop
->c
.ld
= vtop
->c
.d
;
2313 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2314 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2315 vtop
->c
.ld
= vtop
->c
.i
;
2317 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2319 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2320 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2322 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2325 if (dbt
== VT_FLOAT
)
2326 vtop
->c
.f
= (float)vtop
->c
.ld
;
2327 else if (dbt
== VT_DOUBLE
)
2328 vtop
->c
.d
= (double)vtop
->c
.ld
;
2329 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2330 vtop
->c
.i
= vtop
->c
.ld
;
2331 } else if (sf
&& dbt
== VT_BOOL
) {
2332 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2335 vtop
->c
.i
= vtop
->c
.ld
;
2336 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2338 else if (sbt
& VT_UNSIGNED
)
2339 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2340 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2341 else if (sbt
== VT_PTR
)
2344 else if (sbt
!= VT_LLONG
)
2345 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2346 -(vtop
->c
.i
& 0x80000000));
2348 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2350 else if (dbt
== VT_BOOL
)
2351 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2352 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2353 else if (dbt
== VT_PTR
)
2356 else if (dbt
!= VT_LLONG
) {
2357 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2358 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2361 if (!(dbt
& VT_UNSIGNED
))
2362 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2365 } else if (p
&& dbt
== VT_BOOL
) {
2369 /* non constant case: generate code */
2371 /* convert from fp to fp */
2374 /* convert int to fp */
2377 /* convert fp to int */
2378 if (dbt
== VT_BOOL
) {
2382 /* we handle char/short/etc... with generic code */
2383 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2384 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2388 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2389 /* additional cast for char/short... */
2394 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2395 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2396 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2397 /* scalar to long long */
2398 /* machine independent conversion */
2400 /* generate high word */
2401 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2405 if (sbt
== VT_PTR
) {
2406 /* cast from pointer to int before we apply
2407 shift operation, which pointers don't support*/
2408 gen_cast(&int_type
);
2414 /* patch second register */
2415 vtop
[-1].r2
= vtop
->r
;
2419 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2420 (dbt
& VT_BTYPE
) == VT_PTR
||
2421 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2422 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2423 (sbt
& VT_BTYPE
) != VT_PTR
&&
2424 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2425 /* need to convert from 32bit to 64bit */
2427 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2428 #if defined(TCC_TARGET_ARM64)
2430 #elif defined(TCC_TARGET_X86_64)
2432 /* x86_64 specific: movslq */
2434 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2441 } else if (dbt
== VT_BOOL
) {
2442 /* scalar to bool */
2445 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2446 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2447 if (sbt
== VT_PTR
) {
2448 vtop
->type
.t
= VT_INT
;
2449 tcc_warning("nonportable conversion from pointer to char/short");
2451 force_charshort_cast(dbt
);
2452 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2453 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2455 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2456 /* from long long: just take low order word */
2460 /* if lvalue and single word type, nothing to do because
2461 the lvalue already contains the real type size (see
2462 VT_LVAL_xxx constants) */
2466 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2467 /* if we are casting between pointer types,
2468 we must update the VT_LVAL_xxx size */
2469 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2470 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2475 /* return type size as known at compile time. Put alignment at 'a' */
2476 ST_FUNC
int type_size(CType
*type
, int *a
)
2481 bt
= type
->t
& VT_BTYPE
;
2482 if (bt
== VT_STRUCT
) {
2487 } else if (bt
== VT_PTR
) {
2488 if (type
->t
& VT_ARRAY
) {
2492 ts
= type_size(&s
->type
, a
);
2494 if (ts
< 0 && s
->c
< 0)
2502 } else if (bt
== VT_LDOUBLE
) {
2504 return LDOUBLE_SIZE
;
2505 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2506 #ifdef TCC_TARGET_I386
2507 #ifdef TCC_TARGET_PE
2512 #elif defined(TCC_TARGET_ARM)
2522 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2525 } else if (bt
== VT_SHORT
) {
2528 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2531 } else if (bt
== VT_ENUM
) {
2533 /* Enums might be incomplete, so don't just return '4' here. */
2534 return type
->ref
->c
;
2536 /* char, void, function, _Bool */
2542 /* push type size as known at runtime time on top of value stack. Put
2544 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2546 if (type
->t
& VT_VLA
) {
2547 type_size(&type
->ref
->type
, a
);
2548 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2550 vpushi(type_size(type
, a
));
2554 static void vla_sp_restore(void) {
2555 if (vlas_in_scope
) {
2556 gen_vla_sp_restore(vla_sp_loc
);
2560 static void vla_sp_restore_root(void) {
2561 if (vlas_in_scope
) {
2562 gen_vla_sp_restore(vla_sp_root_loc
);
2566 /* return the pointed type of t */
2567 static inline CType
*pointed_type(CType
*type
)
2569 return &type
->ref
->type
;
2572 /* modify type so that its it is a pointer to type. */
2573 ST_FUNC
void mk_pointer(CType
*type
)
2576 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2577 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2581 /* compare function types. OLD functions match any new functions */
2582 static int is_compatible_func(CType
*type1
, CType
*type2
)
2588 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2590 /* check func_call */
2591 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2593 /* XXX: not complete */
2594 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2598 while (s1
!= NULL
) {
2601 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2611 /* return true if type1 and type2 are the same. If unqualified is
2612 true, qualifiers on the types are ignored.
2614 - enums are not checked as gcc __builtin_types_compatible_p ()
2616 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2620 t1
= type1
->t
& VT_TYPE
;
2621 t2
= type2
->t
& VT_TYPE
;
2623 /* strip qualifiers before comparing */
2624 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2625 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2627 /* Default Vs explicit signedness only matters for char */
2628 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2632 /* An enum is compatible with (unsigned) int. Ideally we would
2633 store the enums signedness in type->ref.a.<some_bit> and
2634 only accept unsigned enums with unsigned int and vice versa.
2635 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2636 from pointer target types, so we can't add it here either. */
2637 if ((t1
& VT_BTYPE
) == VT_ENUM
) {
2639 if (type1
->ref
->a
.unsigned_enum
)
2642 if ((t2
& VT_BTYPE
) == VT_ENUM
) {
2644 if (type2
->ref
->a
.unsigned_enum
)
2647 /* XXX: bitfields ? */
2650 /* test more complicated cases */
2651 bt1
= t1
& VT_BTYPE
;
2652 if (bt1
== VT_PTR
) {
2653 type1
= pointed_type(type1
);
2654 type2
= pointed_type(type2
);
2655 return is_compatible_types(type1
, type2
);
2656 } else if (bt1
== VT_STRUCT
) {
2657 return (type1
->ref
== type2
->ref
);
2658 } else if (bt1
== VT_FUNC
) {
2659 return is_compatible_func(type1
, type2
);
2665 /* return true if type1 and type2 are exactly the same (including
2668 static int is_compatible_types(CType
*type1
, CType
*type2
)
2670 return compare_types(type1
,type2
,0);
2673 /* return true if type1 and type2 are the same (ignoring qualifiers).
2675 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2677 return compare_types(type1
,type2
,1);
2680 /* print a type. If 'varstr' is not NULL, then the variable is also
2681 printed in the type */
2683 /* XXX: add array and function pointers */
2684 static void type_to_str(char *buf
, int buf_size
,
2685 CType
*type
, const char *varstr
)
2692 t
= type
->t
& VT_TYPE
;
2695 if (t
& VT_CONSTANT
)
2696 pstrcat(buf
, buf_size
, "const ");
2697 if (t
& VT_VOLATILE
)
2698 pstrcat(buf
, buf_size
, "volatile ");
2699 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2700 pstrcat(buf
, buf_size
, "unsigned ");
2701 else if (t
& VT_DEFSIGN
)
2702 pstrcat(buf
, buf_size
, "signed ");
2732 tstr
= "long double";
2734 pstrcat(buf
, buf_size
, tstr
);
2738 if (bt
== VT_STRUCT
)
2742 pstrcat(buf
, buf_size
, tstr
);
2743 v
= type
->ref
->v
& ~SYM_STRUCT
;
2744 if (v
>= SYM_FIRST_ANOM
)
2745 pstrcat(buf
, buf_size
, "<anonymous>");
2747 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2751 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2752 pstrcat(buf
, buf_size
, "(");
2754 while (sa
!= NULL
) {
2755 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2756 pstrcat(buf
, buf_size
, buf1
);
2759 pstrcat(buf
, buf_size
, ", ");
2761 pstrcat(buf
, buf_size
, ")");
2766 snprintf(buf1
, sizeof(buf1
), "%s[%ld]", varstr
? varstr
: "", s
->c
);
2767 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2770 pstrcpy(buf1
, sizeof(buf1
), "*");
2771 if (t
& VT_CONSTANT
)
2772 pstrcat(buf1
, buf_size
, "const ");
2773 if (t
& VT_VOLATILE
)
2774 pstrcat(buf1
, buf_size
, "volatile ");
2776 pstrcat(buf1
, sizeof(buf1
), varstr
);
2777 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2781 pstrcat(buf
, buf_size
, " ");
2782 pstrcat(buf
, buf_size
, varstr
);
2787 /* verify type compatibility to store vtop in 'dt' type, and generate
2789 static void gen_assign_cast(CType
*dt
)
2791 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2792 char buf1
[256], buf2
[256];
2795 st
= &vtop
->type
; /* source type */
2796 dbt
= dt
->t
& VT_BTYPE
;
2797 sbt
= st
->t
& VT_BTYPE
;
2798 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2799 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2801 It is Ok if both are void
2807 gcc accepts this program
2810 tcc_error("cannot cast from/to void");
2812 if (dt
->t
& VT_CONSTANT
)
2813 tcc_warning("assignment of read-only location");
2816 /* special cases for pointers */
2817 /* '0' can also be a pointer */
2818 if (is_null_pointer(vtop
))
2820 /* accept implicit pointer to integer cast with warning */
2821 if (is_integer_btype(sbt
)) {
2822 tcc_warning("assignment makes pointer from integer without a cast");
2825 type1
= pointed_type(dt
);
2826 /* a function is implicitely a function pointer */
2827 if (sbt
== VT_FUNC
) {
2828 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2829 !is_compatible_types(pointed_type(dt
), st
))
2830 tcc_warning("assignment from incompatible pointer type");
2835 type2
= pointed_type(st
);
2836 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2837 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2838 /* void * can match anything */
2840 /* exact type match, except for qualifiers */
2843 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2844 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2845 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2846 /* Like GCC don't warn by default for merely changes
2847 in pointer target signedness. Do warn for different
2848 base types, though, in particular for unsigned enums
2849 and signed int targets. */
2850 if ((tmp_type1
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) !=
2851 (tmp_type2
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) &&
2852 (tmp_type1
.t
& VT_BTYPE
) == (tmp_type2
.t
& VT_BTYPE
))
2855 tcc_warning("assignment from incompatible pointer type");
2858 /* check const and volatile */
2859 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2860 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2861 tcc_warning("assignment discards qualifiers from pointer target type");
2867 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2868 tcc_warning("assignment makes integer from pointer without a cast");
2869 } else if (sbt
== VT_STRUCT
) {
2870 goto case_VT_STRUCT
;
2872 /* XXX: more tests */
2878 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2879 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2880 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2882 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2883 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2884 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2892 /* store vtop in lvalue pushed on stack */
2893 ST_FUNC
void vstore(void)
2895 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2897 ft
= vtop
[-1].type
.t
;
2898 sbt
= vtop
->type
.t
& VT_BTYPE
;
2899 dbt
= ft
& VT_BTYPE
;
2900 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2901 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2902 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2903 /* optimize char/short casts */
2904 delayed_cast
= VT_MUSTCAST
;
2905 vtop
->type
.t
= (ft
& VT_TYPE
& ~VT_BITFIELD
&
2906 ((1 << VT_STRUCT_SHIFT
) - 1));
2907 /* XXX: factorize */
2908 if (ft
& VT_CONSTANT
)
2909 tcc_warning("assignment of read-only location");
2912 if (!(ft
& VT_BITFIELD
))
2913 gen_assign_cast(&vtop
[-1].type
);
2916 if (sbt
== VT_STRUCT
) {
2917 /* if structure, only generate pointer */
2918 /* structure assignment : generate memcpy */
2919 /* XXX: optimize if small size */
2920 size
= type_size(&vtop
->type
, &align
);
2924 vtop
->type
.t
= VT_PTR
;
2927 /* address of memcpy() */
2930 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2931 else if(!(align
& 3))
2932 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2935 /* Use memmove, rather than memcpy, as dest and src may be same: */
2936 vpush_global_sym(&func_old_type
, TOK_memmove
);
2941 vtop
->type
.t
= VT_PTR
;
2947 /* leave source on stack */
2948 } else if (ft
& VT_BITFIELD
) {
2949 /* bitfield store handling */
2951 /* save lvalue as expression result (example: s.b = s.a = n;) */
2952 vdup(), vtop
[-1] = vtop
[-2];
2954 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2955 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2956 /* remove bit field info to avoid loops */
2957 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2959 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2960 gen_cast(&vtop
[-1].type
);
2961 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2964 /* duplicate destination */
2966 vtop
[-1] = vtop
[-2];
2968 /* mask and shift source */
2969 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2970 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2971 vpushll((1ULL << bit_size
) - 1ULL);
2973 vpushi((1 << bit_size
) - 1);
2979 /* load destination, mask and or with source */
2981 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2982 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2984 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2990 /* ... and discard */
2994 #ifdef CONFIG_TCC_BCHECK
2995 /* bound check case */
2996 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3005 #ifdef TCC_TARGET_X86_64
3006 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3008 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3013 r
= gv(rc
); /* generate value */
3014 /* if lvalue was saved on stack, must read it */
3015 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3017 t
= get_reg(RC_INT
);
3018 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3023 sv
.r
= VT_LOCAL
| VT_LVAL
;
3024 sv
.c
.i
= vtop
[-1].c
.i
;
3026 vtop
[-1].r
= t
| VT_LVAL
;
3028 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3029 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3030 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3031 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3033 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3034 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3036 vtop
[-1].type
.t
= load_type
;
3039 /* convert to int to increment easily */
3040 vtop
->type
.t
= addr_type
;
3046 vtop
[-1].type
.t
= load_type
;
3047 /* XXX: it works because r2 is spilled last ! */
3048 store(vtop
->r2
, vtop
- 1);
3054 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3055 vtop
->r
|= delayed_cast
;
3059 /* post defines POST/PRE add. c is the token ++ or -- */
3060 ST_FUNC
void inc(int post
, int c
)
3063 vdup(); /* save lvalue */
3065 gv_dup(); /* duplicate value */
3070 vpushi(c
- TOK_MID
);
3072 vstore(); /* store value */
3074 vpop(); /* if post op, return saved value */
3077 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3079 /* read the string */
3083 while (tok
== TOK_STR
) {
3084 /* XXX: add \0 handling too ? */
3085 cstr_cat(astr
, tokc
.str
.data
, -1);
3088 cstr_ccat(astr
, '\0');
3091 /* If I is >= 1 and a power of two, returns log2(i)+1.
3092 If I is 0 returns 0. */
3093 static int exact_log2p1(int i
)
3098 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3109 /* Parse GNUC __attribute__ extension. Currently, the following
3110 extensions are recognized:
3111 - aligned(n) : set data/function alignment.
3112 - packed : force data alignment to 1
3113 - section(x) : generate data/code in this section.
3114 - unused : currently ignored, but may be used someday.
3115 - regparm(n) : pass function parameters in registers (i386 only)
3117 static void parse_attribute(AttributeDef
*ad
)
3122 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
3126 while (tok
!= ')') {
3127 if (tok
< TOK_IDENT
)
3128 expect("attribute name");
3135 parse_mult_str(&astr
, "section name");
3136 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3143 parse_mult_str(&astr
, "alias(\"target\")");
3144 ad
->alias_target
= /* save string as token, for later */
3145 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3149 case TOK_VISIBILITY1
:
3150 case TOK_VISIBILITY2
:
3152 parse_mult_str(&astr
,
3153 "visibility(\"default|hidden|internal|protected\")");
3154 if (!strcmp (astr
.data
, "default"))
3155 ad
->a
.visibility
= STV_DEFAULT
;
3156 else if (!strcmp (astr
.data
, "hidden"))
3157 ad
->a
.visibility
= STV_HIDDEN
;
3158 else if (!strcmp (astr
.data
, "internal"))
3159 ad
->a
.visibility
= STV_INTERNAL
;
3160 else if (!strcmp (astr
.data
, "protected"))
3161 ad
->a
.visibility
= STV_PROTECTED
;
3163 expect("visibility(\"default|hidden|internal|protected\")");
3172 if (n
<= 0 || (n
& (n
- 1)) != 0)
3173 tcc_error("alignment must be a positive power of two");
3178 ad
->a
.aligned
= exact_log2p1(n
);
3179 if (n
!= 1 << (ad
->a
.aligned
- 1))
3180 tcc_error("alignment of %d is larger than implemented", n
);
3192 /* currently, no need to handle it because tcc does not
3193 track unused objects */
3197 /* currently, no need to handle it because tcc does not
3198 track unused objects */
3203 ad
->a
.func_call
= FUNC_CDECL
;
3208 ad
->a
.func_call
= FUNC_STDCALL
;
3210 #ifdef TCC_TARGET_I386
3220 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3226 ad
->a
.func_call
= FUNC_FASTCALLW
;
3233 ad
->a
.mode
= VT_LLONG
+ 1;
3236 ad
->a
.mode
= VT_BYTE
+ 1;
3239 ad
->a
.mode
= VT_SHORT
+ 1;
3243 ad
->a
.mode
= VT_INT
+ 1;
3246 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3253 ad
->a
.func_export
= 1;
3256 ad
->a
.func_import
= 1;
3259 if (tcc_state
->warn_unsupported
)
3260 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3261 /* skip parameters */
3263 int parenthesis
= 0;
3267 else if (tok
== ')')
3270 } while (parenthesis
&& tok
!= -1);
3283 static Sym
* find_field (CType
*type
, int v
)
3287 while ((s
= s
->next
) != NULL
) {
3288 if ((s
->v
& SYM_FIELD
) &&
3289 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3290 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3291 Sym
*ret
= find_field (&s
->type
, v
);
3301 static void struct_add_offset (Sym
*s
, int offset
)
3303 while ((s
= s
->next
) != NULL
) {
3304 if ((s
->v
& SYM_FIELD
) &&
3305 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3306 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3307 struct_add_offset(s
->type
.ref
, offset
);
3313 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3315 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3316 int pcc
= !tcc_state
->ms_bitfields
;
3319 maxalign
= 1 << (ad
->a
.aligned
- 1);
3325 prevbt
= VT_STRUCT
; /* make it never match */
3327 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3328 int typealign
, bit_size
;
3329 int size
= type_size(&f
->type
, &typealign
);
3330 if (f
->type
.t
& VT_BITFIELD
)
3331 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3334 if (bit_size
== 0 && pcc
) {
3335 /* Zero-width bit-fields in PCC mode aren't affected
3336 by any packing (attribute or pragma). */
3338 } else if (f
->r
> 1) {
3340 } else if (ad
->a
.packed
|| f
->r
== 1) {
3342 /* Packed fields or packed records don't let the base type
3343 influence the records type alignment. */
3348 if (type
->ref
->type
.t
!= TOK_STRUCT
) {
3349 if (pcc
&& bit_size
>= 0)
3350 size
= (bit_size
+ 7) >> 3;
3351 /* Bit position is already zero from our caller. */
3355 } else if (bit_size
< 0) {
3356 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3359 c
= (c
+ addbytes
+ align
- 1) & -align
;
3365 /* A bit-field. Layout is more complicated. There are two
3366 options TCC implements: PCC compatible and MS compatible
3367 (PCC compatible is what GCC uses for almost all targets).
3368 In PCC layout the overall size of the struct (in c) is
3369 _excluding_ the current run of bit-fields (that is,
3370 there's at least additional bit_pos bits after c). In
3371 MS layout c does include the current run of bit-fields.
3373 This matters for calculating the natural alignment buckets
3376 /* 'align' will be used to influence records alignment,
3377 so it's the max of specified and type alignment, except
3378 in certain cases that depend on the mode. */
3379 if (align
< typealign
)
3382 /* In PCC layout a non-packed bit-field is placed adjacent
3383 to the preceding bit-fields, except if it would overflow
3384 its container (depending on base type) or it's a zero-width
3385 bit-field. Packed non-zero-width bit-fields always are
3387 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3388 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3389 if (bit_size
== 0 ||
3390 ((typealign
!= 1 || size
== 1) &&
3391 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3392 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3396 /* In PCC layout named bit-fields influence the alignment
3397 of the containing struct using the base types alignment,
3398 except for packed fields (which here have correct
3399 align/typealign). */
3400 if ((f
->v
& SYM_FIRST_ANOM
))
3403 bt
= f
->type
.t
& VT_BTYPE
;
3404 if ((bit_pos
+ bit_size
> size
* 8) ||
3405 (bit_size
> 0) == (bt
!= prevbt
)) {
3406 c
= (c
+ typealign
- 1) & -typealign
;
3409 /* In MS bitfield mode a bit-field run always uses
3410 at least as many bits as the underlying type.
3411 To start a new run it's also required that this
3412 or the last bit-field had non-zero width. */
3413 if (bit_size
|| prev_bit_size
)
3416 /* In MS layout the records alignment is normally
3417 influenced by the field, except for a zero-width
3418 field at the start of a run (but by further zero-width
3419 fields it is again). */
3420 if (bit_size
== 0 && prevbt
!= bt
)
3423 prev_bit_size
= bit_size
;
3425 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3426 | (bit_pos
<< VT_STRUCT_SHIFT
);
3427 bit_pos
+= bit_size
;
3428 if (pcc
&& bit_pos
>= size
* 8) {
3430 bit_pos
-= size
* 8;
3433 if (align
> maxalign
)
3436 printf("set field %s offset=%d c=%d",
3437 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, c
);
3438 if (f
->type
.t
& VT_BITFIELD
) {
3439 printf(" pos=%d size=%d",
3440 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3441 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3446 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3448 /* An anonymous struct/union. Adjust member offsets
3449 to reflect the real offset of our containing struct.
3450 Also set the offset of this anon member inside
3451 the outer struct to be zero. Via this it
3452 works when accessing the field offset directly
3453 (from base object), as well as when recursing
3454 members in initializer handling. */
3455 int v2
= f
->type
.ref
->v
;
3456 if (!(v2
& SYM_FIELD
) &&
3457 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3459 /* This happens only with MS extensions. The
3460 anon member has a named struct type, so it
3461 potentially is shared with other references.
3462 We need to unshare members so we can modify
3465 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3466 &f
->type
.ref
->type
, 0,
3468 pps
= &f
->type
.ref
->next
;
3469 while ((ass
= ass
->next
) != NULL
) {
3470 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3471 pps
= &((*pps
)->next
);
3475 struct_add_offset(f
->type
.ref
, offset
);
3483 /* store size and alignment */
3484 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3485 + maxalign
- 1) & -maxalign
;
3486 type
->ref
->r
= maxalign
;
3489 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3490 static void struct_decl(CType
*type
, AttributeDef
*ad
, int u
)
3492 int a
, v
, size
, align
, flexible
, alignoverride
;
3494 int bit_size
, bsize
, bt
;
3499 a
= tok
; /* save decl type */
3501 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3502 parse_attribute(ad
);
3506 /* struct already defined ? return it */
3508 expect("struct/union/enum name");
3510 if (s
&& (s
->scope
== local_scope
|| (tok
!= '{' && tok
!= ';'))) {
3512 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3518 /* Record the original enum/struct/union token. */
3521 /* we put an undefined size for struct/union */
3522 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3523 s
->r
= 0; /* default alignment is zero as gcc */
3524 /* put struct/union/enum name in type */
3532 tcc_error("struct/union/enum already defined");
3533 /* cannot be empty */
3535 /* non empty enums are not allowed */
3536 if (a
== TOK_ENUM
) {
3540 CType
*t
= &int_type
;
3543 expect("identifier");
3545 if (ss
&& !local_stack
)
3546 tcc_error("redefinition of enumerator '%s'",
3547 get_tok_str(v
, NULL
));
3551 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3554 /* We really want to support long long enums
3555 on i386 as well, but the Sym structure only
3556 holds a 'long' for associated constants,
3557 and enlarging it would bump its size (no
3558 available padding). So punt for now. */
3564 if (c
!= (int)c
&& (unsigned long)c
!= (unsigned int)c
)
3565 seen_wide
= 1, t
= &size_type
;
3566 /* enum symbols have static storage */
3567 ss
= sym_push(v
, t
, VT_CONST
, c
);
3568 ss
->type
.t
|= VT_STATIC
;
3573 /* NOTE: we accept a trailing comma */
3578 s
->a
.unsigned_enum
= 1;
3579 s
->c
= type_size(seen_wide
? &size_type
: &int_type
, &align
);
3584 while (tok
!= '}') {
3585 if (!parse_btype(&btype
, &ad1
)) {
3591 tcc_error("flexible array member '%s' not at the end of struct",
3592 get_tok_str(v
, NULL
));
3597 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3599 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3600 expect("identifier");
3602 int v
= btype
.ref
->v
;
3603 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3604 if (tcc_state
->ms_extensions
== 0)
3605 expect("identifier");
3609 if (type_size(&type1
, &align
) < 0) {
3610 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3613 tcc_error("field '%s' has incomplete type",
3614 get_tok_str(v
, NULL
));
3616 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3617 (type1
.t
& (VT_TYPEDEF
| VT_STATIC
| VT_EXTERN
| VT_INLINE
)))
3618 tcc_error("invalid type for '%s'",
3619 get_tok_str(v
, NULL
));
3623 bit_size
= expr_const();
3624 /* XXX: handle v = 0 case for messages */
3626 tcc_error("negative width in bit-field '%s'",
3627 get_tok_str(v
, NULL
));
3628 if (v
&& bit_size
== 0)
3629 tcc_error("zero width for bit-field '%s'",
3630 get_tok_str(v
, NULL
));
3631 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3632 parse_attribute(&ad1
);
3634 size
= type_size(&type1
, &align
);
3635 /* Only remember non-default alignment. */
3637 if (ad1
.a
.aligned
) {
3638 int speca
= 1 << (ad1
.a
.aligned
- 1);
3639 alignoverride
= speca
;
3640 } else if (ad1
.a
.packed
|| ad
->a
.packed
) {
3642 } else if (*tcc_state
->pack_stack_ptr
) {
3643 if (align
> *tcc_state
->pack_stack_ptr
)
3644 alignoverride
= *tcc_state
->pack_stack_ptr
;
3646 if (bit_size
>= 0) {
3647 bt
= type1
.t
& VT_BTYPE
;
3654 tcc_error("bitfields must have scalar type");
3656 if (bit_size
> bsize
) {
3657 tcc_error("width of '%s' exceeds its type",
3658 get_tok_str(v
, NULL
));
3659 } else if (bit_size
== bsize
) {
3660 /* no need for bit fields */
3663 type1
.t
|= VT_BITFIELD
|
3664 (0 << VT_STRUCT_SHIFT
) |
3665 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3668 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3669 /* Remember we've seen a real field to check
3670 for placement of flexible array member. */
3673 /* If member is a struct or bit-field, enforce
3674 placing into the struct (as anonymous). */
3676 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3681 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3685 if (tok
== ';' || tok
== TOK_EOF
)
3692 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3693 parse_attribute(ad
);
3694 struct_layout(type
, ad
);
3699 /* return 1 if basic type is a type size (short, long, long long) */
3700 ST_FUNC
int is_btype_size(int bt
)
3702 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3705 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3706 are added to the element type, copied because it could be a typedef. */
3707 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3709 while (type
->t
& VT_ARRAY
) {
3710 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3711 type
= &type
->ref
->type
;
3713 type
->t
|= qualifiers
;
3716 /* return 0 if no type declaration. otherwise, return the basic type
3719 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3721 int t
, u
, bt_size
, complete
, type_found
, typespec_found
, g
;
3725 memset(ad
, 0, sizeof(AttributeDef
));
3733 /* currently, we really ignore extension */
3744 tcc_error("too many basic types");
3746 bt_size
= is_btype_size (u
& VT_BTYPE
);
3747 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3762 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3763 #ifndef TCC_TARGET_PE
3764 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3766 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3767 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3773 #ifdef TCC_TARGET_ARM64
3775 /* GCC's __uint128_t appears in some Linux header files. Make it a
3776 synonym for long double to get the size and alignment right. */
3788 if ((t
& VT_BTYPE
) == VT_LONG
) {
3789 #ifdef TCC_TARGET_PE
3790 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3792 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3800 struct_decl(&type1
, ad
, VT_ENUM
);
3803 type
->ref
= type1
.ref
;
3807 struct_decl(&type1
, ad
, VT_STRUCT
);
3810 /* type modifiers */
3815 parse_btype_qualify(type
, VT_CONSTANT
);
3823 parse_btype_qualify(type
, VT_VOLATILE
);
3830 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3831 tcc_error("signed and unsigned modifier");
3844 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3845 tcc_error("signed and unsigned modifier");
3846 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3862 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
3863 tcc_error("multiple storage classes");
3874 /* GNUC attribute */
3875 case TOK_ATTRIBUTE1
:
3876 case TOK_ATTRIBUTE2
:
3877 parse_attribute(ad
);
3880 t
= (t
& ~VT_BTYPE
) | u
;
3888 parse_expr_type(&type1
);
3889 /* remove all storage modifiers except typedef */
3890 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3896 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3899 type
->t
= ((s
->type
.t
& ~VT_TYPEDEF
) |
3900 (t
& ~(VT_CONSTANT
| VT_VOLATILE
)));
3901 type
->ref
= s
->type
.ref
;
3902 if (t
& (VT_CONSTANT
| VT_VOLATILE
))
3903 parse_btype_qualify(type
, t
& (VT_CONSTANT
| VT_VOLATILE
));
3907 /* get attributes from typedef */
3908 if (0 == ad
->a
.aligned
)
3909 ad
->a
.aligned
= s
->a
.aligned
;
3910 if (0 == ad
->a
.func_call
)
3911 ad
->a
.func_call
= s
->a
.func_call
;
3912 ad
->a
.packed
|= s
->a
.packed
;
3921 if (tcc_state
->char_is_unsigned
) {
3922 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3926 /* long is never used as type */
3927 if ((t
& VT_BTYPE
) == VT_LONG
)
3928 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3929 defined TCC_TARGET_PE
3930 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3932 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3938 /* convert a function parameter type (array to pointer and function to
3939 function pointer) */
3940 static inline void convert_parameter_type(CType
*pt
)
3942 /* remove const and volatile qualifiers (XXX: const could be used
3943 to indicate a const function parameter */
3944 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3945 /* array must be transformed to pointer according to ANSI C */
3947 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3952 ST_FUNC
void parse_asm_str(CString
*astr
)
3955 parse_mult_str(astr
, "string constant");
3958 /* Parse an asm label and return the token */
3959 static int asm_label_instr(void)
3965 parse_asm_str(&astr
);
3968 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3970 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3975 static void post_type(CType
*type
, AttributeDef
*ad
, int storage
)
3977 int n
, l
, t1
, arg_size
, align
;
3978 Sym
**plast
, *s
, *first
;
3983 /* function declaration */
3991 /* read param name and compute offset */
3992 if (l
!= FUNC_OLD
) {
3993 if (!parse_btype(&pt
, &ad1
)) {
3995 tcc_error("invalid type");
4002 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4004 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4005 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4006 tcc_error("parameter declared as void");
4007 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4012 expect("identifier");
4016 convert_parameter_type(&pt
);
4017 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4023 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4030 /* if no parameters, then old type prototype */
4034 /* NOTE: const is ignored in returned type as it has a special
4035 meaning in gcc / C++ */
4036 type
->t
&= ~VT_CONSTANT
;
4037 /* some ancient pre-K&R C allows a function to return an array
4038 and the array brackets to be put after the arguments, such
4039 that "int c()[]" means something like "int[] c()" */
4042 skip(']'); /* only handle simple "[]" */
4045 /* we push a anonymous symbol which will contain the function prototype */
4046 ad
->a
.func_args
= arg_size
;
4047 s
= sym_push(SYM_FIELD
, type
, 0, l
);
4052 } else if (tok
== '[') {
4053 int saved_nocode_wanted
= nocode_wanted
;
4054 /* array definition */
4056 if (tok
== TOK_RESTRICT1
)
4061 if (!local_stack
|| (storage
& VT_STATIC
))
4062 vpushi(expr_const());
4064 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4065 length must always be evaluated, even under nocode_wanted,
4066 so that its size slot is initialized (e.g. under sizeof
4071 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4074 tcc_error("invalid array size");
4076 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4077 tcc_error("size of variable length array should be an integer");
4082 /* parse next post type */
4083 post_type(type
, ad
, storage
);
4084 if (type
->t
== VT_FUNC
)
4085 tcc_error("declaration of an array of functions");
4086 t1
|= type
->t
& VT_VLA
;
4089 loc
-= type_size(&int_type
, &align
);
4093 vla_runtime_type_size(type
, &align
);
4095 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4101 nocode_wanted
= saved_nocode_wanted
;
4103 /* we push an anonymous symbol which will contain the array
4105 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4106 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4111 /* Parse a type declaration (except basic type), and return the type
4112 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4113 expected. 'type' should contain the basic type. 'ad' is the
4114 attribute definition of the basic type. It can be modified by
4117 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4120 CType type1
, *type2
;
4121 int qualifiers
, storage
;
4123 while (tok
== '*') {
4131 qualifiers
|= VT_CONSTANT
;
4136 qualifiers
|= VT_VOLATILE
;
4142 /* XXX: clarify attribute handling */
4143 case TOK_ATTRIBUTE1
:
4144 case TOK_ATTRIBUTE2
:
4145 parse_attribute(ad
);
4149 type
->t
|= qualifiers
;
4152 /* recursive type */
4153 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4154 type1
.t
= 0; /* XXX: same as int */
4157 /* XXX: this is not correct to modify 'ad' at this point, but
4158 the syntax is not clear */
4159 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4160 parse_attribute(ad
);
4161 type_decl(&type1
, ad
, v
, td
);
4164 /* type identifier */
4165 if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4169 if (!(td
& TYPE_ABSTRACT
))
4170 expect("identifier");
4174 storage
= type
->t
& VT_STORAGE
;
4175 type
->t
&= ~VT_STORAGE
;
4176 post_type(type
, ad
, storage
);
4178 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4179 parse_attribute(ad
);
4183 /* append type at the end of type1 */
4197 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4198 ST_FUNC
int lvalue_type(int t
)
4203 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4205 else if (bt
== VT_SHORT
)
4209 if (t
& VT_UNSIGNED
)
4210 r
|= VT_LVAL_UNSIGNED
;
4214 /* indirection with full error checking and bound check */
4215 ST_FUNC
void indir(void)
4217 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4218 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4222 if (vtop
->r
& VT_LVAL
)
4224 vtop
->type
= *pointed_type(&vtop
->type
);
4225 /* Arrays and functions are never lvalues */
4226 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4227 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4228 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4229 /* if bound checking, the referenced pointer must be checked */
4230 #ifdef CONFIG_TCC_BCHECK
4231 if (tcc_state
->do_bounds_check
)
4232 vtop
->r
|= VT_MUSTBOUND
;
4237 /* pass a parameter to a function and do type checking and casting */
4238 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4243 func_type
= func
->c
;
4244 if (func_type
== FUNC_OLD
||
4245 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4246 /* default casting : only need to convert float to double */
4247 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4250 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4251 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4252 type
.ref
= vtop
->type
.ref
;
4255 } else if (arg
== NULL
) {
4256 tcc_error("too many arguments to function");
4259 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4260 gen_assign_cast(&type
);
4264 /* parse an expression of the form '(type)' or '(expr)' and return its
4266 static void parse_expr_type(CType
*type
)
4272 if (parse_btype(type
, &ad
)) {
4273 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4280 static void parse_type(CType
*type
)
4285 if (!parse_btype(type
, &ad
)) {
4288 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4291 static void vpush_tokc(int t
)
4296 vsetc(&type
, VT_CONST
, &tokc
);
4299 ST_FUNC
void unary(void)
4301 int n
, t
, align
, size
, r
, sizeof_caller
;
4306 sizeof_caller
= in_sizeof
;
4308 /* XXX: GCC 2.95.3 does not generate a table although it should be
4322 vpush_tokc(VT_INT
| VT_UNSIGNED
);
4326 vpush_tokc(VT_LLONG
);
4330 vpush_tokc(VT_LLONG
| VT_UNSIGNED
);
4334 vpush_tokc(VT_FLOAT
);
4338 vpush_tokc(VT_DOUBLE
);
4342 vpush_tokc(VT_LDOUBLE
);
4345 case TOK___FUNCTION__
:
4347 goto tok_identifier
;
4353 /* special function name identifier */
4354 len
= strlen(funcname
) + 1;
4355 /* generate char[len] type */
4360 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4361 ptr
= section_ptr_add(data_section
, len
);
4362 memcpy(ptr
, funcname
, len
);
4367 #ifdef TCC_TARGET_PE
4368 t
= VT_SHORT
| VT_UNSIGNED
;
4374 /* string parsing */
4377 if (tcc_state
->warn_write_strings
)
4382 memset(&ad
, 0, sizeof(AttributeDef
));
4383 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4388 if (parse_btype(&type
, &ad
)) {
4389 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4391 /* check ISOC99 compound literal */
4393 /* data is allocated locally by default */
4398 /* all except arrays are lvalues */
4399 if (!(type
.t
& VT_ARRAY
))
4400 r
|= lvalue_type(type
.t
);
4401 memset(&ad
, 0, sizeof(AttributeDef
));
4402 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4404 if (sizeof_caller
) {
4411 } else if (tok
== '{') {
4412 int saved_nocode_wanted
= nocode_wanted
;
4414 tcc_error("expected constant");
4415 /* save all registers */
4417 /* statement expression : we do not accept break/continue
4418 inside as GCC does. We do retain the nocode_wanted state,
4419 as statement expressions can't ever be entered from the
4420 outside, so any reactivation of code emission (from labels
4421 or loop heads) can be disabled again after the end of it. */
4422 block(NULL
, NULL
, 1);
4423 nocode_wanted
= saved_nocode_wanted
;
4438 /* functions names must be treated as function pointers,
4439 except for unary '&' and sizeof. Since we consider that
4440 functions are not lvalues, we only have to handle it
4441 there and in function calls. */
4442 /* arrays can also be used although they are not lvalues */
4443 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4444 !(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_LLOCAL
))
4446 mk_pointer(&vtop
->type
);
4452 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4454 boolean
.t
= VT_BOOL
;
4456 vtop
->c
.i
= !vtop
->c
.i
;
4457 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4461 vseti(VT_JMP
, gvtst(1, 0));
4473 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4474 tcc_error("pointer not accepted for unary plus");
4475 /* In order to force cast, we add zero, except for floating point
4476 where we really need an noop (otherwise -0.0 will be transformed
4478 if (!is_float(vtop
->type
.t
)) {
4489 unary_type(&type
); // Perform a in_sizeof = 0;
4490 size
= type_size(&type
, &align
);
4491 if (t
== TOK_SIZEOF
) {
4492 if (!(type
.t
& VT_VLA
)) {
4494 tcc_error("sizeof applied to an incomplete type");
4497 vla_runtime_type_size(&type
, &align
);
4502 vtop
->type
.t
|= VT_UNSIGNED
;
4505 case TOK_builtin_expect
:
4507 /* __builtin_expect is a no-op for now */
4519 case TOK_builtin_types_compatible_p
:
4528 type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4529 type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4530 vpushi(is_compatible_types(&type1
, &type2
));
4533 case TOK_builtin_choose_expr
:
4560 case TOK_builtin_constant_p
:
4567 res
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4574 case TOK_builtin_frame_address
:
4575 case TOK_builtin_return_address
:
4582 if (tok
!= TOK_CINT
) {
4583 tcc_error("%s only takes positive integers",
4584 tok1
== TOK_builtin_return_address
?
4585 "__builtin_return_address" :
4586 "__builtin_frame_address");
4588 level
= (uint32_t)tokc
.i
;
4593 vset(&type
, VT_LOCAL
, 0); /* local frame */
4595 mk_pointer(&vtop
->type
);
4596 indir(); /* -> parent frame */
4598 if (tok1
== TOK_builtin_return_address
) {
4599 // assume return address is just above frame pointer on stack
4602 mk_pointer(&vtop
->type
);
4607 #ifdef TCC_TARGET_X86_64
4608 #ifdef TCC_TARGET_PE
4609 case TOK_builtin_va_start
:
4617 if ((vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4618 tcc_error("__builtin_va_start expects a local variable");
4619 vtop
->r
&= ~(VT_LVAL
| VT_REF
);
4620 vtop
->type
= char_pointer_type
;
4626 case TOK_builtin_va_arg_types
:
4633 vpushi(classify_x86_64_va_arg(&type
));
4639 #ifdef TCC_TARGET_ARM64
4640 case TOK___va_start
: {
4650 vtop
->type
.t
= VT_VOID
;
4653 case TOK___va_arg
: {
4666 case TOK___arm64_clear_cache
: {
4675 vtop
->type
.t
= VT_VOID
;
4679 /* pre operations */
4690 t
= vtop
->type
.t
& VT_BTYPE
;
4692 /* In IEEE negate(x) isn't subtract(0,x), but rather
4696 vtop
->c
.f
= -1.0 * 0.0;
4697 else if (t
== VT_DOUBLE
)
4698 vtop
->c
.d
= -1.0 * 0.0;
4700 vtop
->c
.ld
= -1.0 * 0.0;
4708 goto tok_identifier
;
4710 /* allow to take the address of a label */
4711 if (tok
< TOK_UIDENT
)
4712 expect("label identifier");
4713 s
= label_find(tok
);
4715 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4717 if (s
->r
== LABEL_DECLARED
)
4718 s
->r
= LABEL_FORWARD
;
4721 s
->type
.t
= VT_VOID
;
4722 mk_pointer(&s
->type
);
4723 s
->type
.t
|= VT_STATIC
;
4725 vpushsym(&s
->type
, s
);
4729 // special qnan , snan and infinity values
4731 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4735 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4739 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4748 expect("identifier");
4751 const char *name
= get_tok_str(t
, NULL
);
4753 tcc_error("'%s' undeclared", name
);
4754 /* for simple function calls, we tolerate undeclared
4755 external reference to int() function */
4756 if (tcc_state
->warn_implicit_function_declaration
4757 #ifdef TCC_TARGET_PE
4758 /* people must be warned about using undeclared WINAPI functions
4759 (which usually start with uppercase letter) */
4760 || (name
[0] >= 'A' && name
[0] <= 'Z')
4763 tcc_warning("implicit declaration of function '%s'", name
);
4764 s
= external_global_sym(t
, &func_old_type
, 0);
4768 /* A symbol that has a register is a local register variable,
4769 which starts out as VT_LOCAL value. */
4770 if ((r
& VT_VALMASK
) < VT_CONST
)
4771 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4773 vset(&s
->type
, r
, s
->c
);
4774 /* Point to s as backpointer (even without r&VT_SYM).
4775 Will be used by at least the x86 inline asm parser for
4778 if (vtop
->r
& VT_SYM
) {
4784 /* post operations */
4786 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4789 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4792 if (tok
== TOK_ARROW
)
4794 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4797 /* expect pointer on structure */
4798 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4799 expect("struct or union");
4800 if (tok
== TOK_CDOUBLE
)
4801 expect("field name");
4803 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4804 expect("field name");
4805 s
= find_field(&vtop
->type
, tok
);
4807 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4808 /* add field offset to pointer */
4809 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4812 /* change type to field type, and set to lvalue */
4813 vtop
->type
= s
->type
;
4814 vtop
->type
.t
|= qualifiers
;
4815 /* an array is never an lvalue */
4816 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4817 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4818 #ifdef CONFIG_TCC_BCHECK
4819 /* if bound checking, the referenced pointer must be checked */
4820 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4821 vtop
->r
|= VT_MUSTBOUND
;
4825 } else if (tok
== '[') {
4831 } else if (tok
== '(') {
4834 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4837 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4838 /* pointer test (no array accepted) */
4839 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4840 vtop
->type
= *pointed_type(&vtop
->type
);
4841 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4845 expect("function pointer");
4848 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4850 /* get return type */
4853 sa
= s
->next
; /* first parameter */
4854 nb_args
= regsize
= 0;
4856 /* compute first implicit argument if a structure is returned */
4857 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4858 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4859 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4860 &ret_align
, ®size
);
4862 /* get some space for the returned structure */
4863 size
= type_size(&s
->type
, &align
);
4864 #ifdef TCC_TARGET_ARM64
4865 /* On arm64, a small struct is return in registers.
4866 It is much easier to write it to memory if we know
4867 that we are allowed to write some extra bytes, so
4868 round the allocated space up to a power of 2: */
4870 while (size
& (size
- 1))
4871 size
= (size
| (size
- 1)) + 1;
4873 loc
= (loc
- size
) & -align
;
4875 ret
.r
= VT_LOCAL
| VT_LVAL
;
4876 /* pass it as 'int' to avoid structure arg passing
4878 vseti(VT_LOCAL
, loc
);
4888 /* return in register */
4889 if (is_float(ret
.type
.t
)) {
4890 ret
.r
= reg_fret(ret
.type
.t
);
4891 #ifdef TCC_TARGET_X86_64
4892 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4896 #ifndef TCC_TARGET_ARM64
4897 #ifdef TCC_TARGET_X86_64
4898 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4900 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4911 gfunc_param_typed(s
, sa
);
4921 tcc_error("too few arguments to function");
4923 gfunc_call(nb_args
);
4926 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4927 vsetc(&ret
.type
, r
, &ret
.c
);
4928 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4931 /* handle packed struct return */
4932 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4935 size
= type_size(&s
->type
, &align
);
4936 /* We're writing whole regs often, make sure there's enough
4937 space. Assume register size is power of 2. */
4938 if (regsize
> align
)
4940 loc
= (loc
- size
) & -align
;
4944 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4948 if (--ret_nregs
== 0)
4952 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4960 ST_FUNC
void expr_prod(void)
4965 while (tok
== '*' || tok
== '/' || tok
== '%') {
4973 ST_FUNC
void expr_sum(void)
4978 while (tok
== '+' || tok
== '-') {
4986 static void expr_shift(void)
4991 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4999 static void expr_cmp(void)
5004 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5005 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5013 static void expr_cmpeq(void)
5018 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5026 static void expr_and(void)
5029 while (tok
== '&') {
5036 static void expr_xor(void)
5039 while (tok
== '^') {
5046 static void expr_or(void)
5049 while (tok
== '|') {
5056 /* XXX: fix this mess */
5057 static void expr_land_const(void)
5060 while (tok
== TOK_LAND
) {
5066 static void expr_lor_const(void)
5069 while (tok
== TOK_LOR
) {
5076 static void expr_land(void)
5079 if (tok
== TOK_LAND
) {
5082 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5090 while (tok
== TOK_LAND
) {
5098 gen_cast(&int_type
);
5106 if (tok
!= TOK_LAND
) {
5119 static void expr_lor(void)
5122 if (tok
== TOK_LOR
) {
5125 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5133 while (tok
== TOK_LOR
) {
5141 gen_cast(&int_type
);
5149 if (tok
!= TOK_LOR
) {
5162 /* Assuming vtop is a value used in a conditional context
5163 (i.e. compared with zero) return 0 if it's false, 1 if
5164 true and -1 if it can't be statically determined. */
5165 static int condition_3way(void)
5168 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5169 (!(vtop
->r
& VT_SYM
) ||
5170 !(vtop
->sym
->type
.t
& VT_WEAK
))) {
5172 boolean
.t
= VT_BOOL
;
5181 static void expr_cond(void)
5183 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5185 CType type
, type1
, type2
;
5190 c
= condition_3way();
5191 g
= (tok
== ':' && gnu_ext
);
5193 /* needed to avoid having different registers saved in
5195 if (is_float(vtop
->type
.t
)) {
5197 #ifdef TCC_TARGET_X86_64
5198 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5223 sv
= *vtop
; /* save value to handle it later */
5224 vtop
--; /* no vpop so that FP stack is not flushed */
5242 bt1
= t1
& VT_BTYPE
;
5244 bt2
= t2
& VT_BTYPE
;
5245 /* cast operands to correct type according to ISOC rules */
5246 if (is_float(bt1
) || is_float(bt2
)) {
5247 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5248 type
.t
= VT_LDOUBLE
;
5250 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5255 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5256 /* cast to biggest op */
5258 /* convert to unsigned if it does not fit in a long long */
5259 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5260 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
5261 type
.t
|= VT_UNSIGNED
;
5262 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5263 /* If one is a null ptr constant the result type
5265 if (is_null_pointer (vtop
))
5267 else if (is_null_pointer (&sv
))
5269 /* XXX: test pointer compatibility, C99 has more elaborate
5273 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5274 /* XXX: test function pointer compatibility */
5275 type
= bt1
== VT_FUNC
? type1
: type2
;
5276 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5277 /* XXX: test structure compatibility */
5278 type
= bt1
== VT_STRUCT
? type1
: type2
;
5279 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5280 /* NOTE: as an extension, we accept void on only one side */
5283 /* integer operations */
5285 /* convert to unsigned if it does not fit in an integer */
5286 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
5287 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
5288 type
.t
|= VT_UNSIGNED
;
5290 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5291 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5292 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5295 /* now we convert second operand */
5299 mk_pointer(&vtop
->type
);
5301 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5306 if (is_float(type
.t
)) {
5308 #ifdef TCC_TARGET_X86_64
5309 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5313 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5314 /* for long longs, we use fixed registers to avoid having
5315 to handle a complicated move */
5326 /* this is horrible, but we must also convert first
5332 mk_pointer(&vtop
->type
);
5334 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5340 move_reg(r2
, r1
, type
.t
);
5350 static void expr_eq(void)
5356 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5357 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5358 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5373 ST_FUNC
void gexpr(void)
5384 /* parse an expression and return its type without any side effect. */
5385 static void expr_type(CType
*type
)
5395 /* parse a unary expression and return its type without any side
5397 static void unary_type(CType
*type
)
5406 /* parse a constant expression and return value in vtop. */
5407 static void expr_const1(void)
5414 /* parse an integer constant and return its value. */
5415 static inline int64_t expr_const64(void)
5419 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5420 expect("constant expression");
5426 /* parse an integer constant and return its value.
5427 Complain if it doesn't fit 32bit (signed or unsigned). */
5428 ST_FUNC
int expr_const(void)
5431 int64_t wc
= expr_const64();
5433 if (c
!= wc
&& (unsigned)c
!= wc
)
5434 tcc_error("constant exceeds 32 bit");
5438 /* return the label token if current token is a label, otherwise
5440 static int is_label(void)
5444 /* fast test first */
5445 if (tok
< TOK_UIDENT
)
5447 /* no need to save tokc because tok is an identifier */
5454 unget_tok(last_tok
);
5459 static void label_or_decl(int l
)
5463 /* fast test first */
5464 if (tok
>= TOK_UIDENT
)
5466 /* no need to save tokc because tok is an identifier */
5470 unget_tok(last_tok
);
5473 unget_tok(last_tok
);
5478 #ifndef TCC_TARGET_ARM64
5479 static void gfunc_return(CType
*func_type
)
5481 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5482 CType type
, ret_type
;
5483 int ret_align
, ret_nregs
, regsize
;
5484 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5485 &ret_align
, ®size
);
5486 if (0 == ret_nregs
) {
5487 /* if returning structure, must copy it to implicit
5488 first pointer arg location */
5491 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5494 /* copy structure value to pointer */
5497 /* returning structure packed into registers */
5498 int r
, size
, addr
, align
;
5499 size
= type_size(func_type
,&align
);
5500 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5501 (vtop
->c
.i
& (ret_align
-1)))
5502 && (align
& (ret_align
-1))) {
5503 loc
= (loc
- size
) & -ret_align
;
5506 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5510 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5512 vtop
->type
= ret_type
;
5513 if (is_float(ret_type
.t
))
5514 r
= rc_fret(ret_type
.t
);
5525 if (--ret_nregs
== 0)
5527 /* We assume that when a structure is returned in multiple
5528 registers, their classes are consecutive values of the
5531 vtop
->c
.i
+= regsize
;
5535 } else if (is_float(func_type
->t
)) {
5536 gv(rc_fret(func_type
->t
));
5540 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5544 static int case_cmp(const void *pa
, const void *pb
)
5546 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5547 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5548 return a
< b
? -1 : a
> b
;
5551 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5555 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5573 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5575 gcase(base
, len
/2, bsym
);
5576 if (cur_switch
->def_sym
)
5577 gjmp_addr(cur_switch
->def_sym
);
5579 *bsym
= gjmp(*bsym
);
5583 base
+= e
; len
-= e
;
5593 if (p
->v1
== p
->v2
) {
5595 gtst_addr(0, p
->sym
);
5605 gtst_addr(0, p
->sym
);
5611 static void block(int *bsym
, int *csym
, int is_expr
)
5613 int a
, b
, c
, d
, cond
;
5616 /* generate line number info */
5617 if (tcc_state
->do_debug
)
5618 tcc_debug_line(tcc_state
);
5621 /* default return value is (void) */
5623 vtop
->type
.t
= VT_VOID
;
5626 if (tok
== TOK_IF
) {
5628 int saved_nocode_wanted
= nocode_wanted
;
5633 cond
= condition_3way();
5639 nocode_wanted
|= 0x20000000;
5640 block(bsym
, csym
, 0);
5642 nocode_wanted
= saved_nocode_wanted
;
5644 if (c
== TOK_ELSE
) {
5649 nocode_wanted
|= 0x20000000;
5650 block(bsym
, csym
, 0);
5651 gsym(d
); /* patch else jmp */
5653 nocode_wanted
= saved_nocode_wanted
;
5656 } else if (tok
== TOK_WHILE
) {
5657 int saved_nocode_wanted
;
5658 nocode_wanted
&= ~0x20000000;
5668 saved_nocode_wanted
= nocode_wanted
;
5670 nocode_wanted
= saved_nocode_wanted
;
5675 } else if (tok
== '{') {
5677 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5680 /* record local declaration stack position */
5682 llabel
= local_label_stack
;
5685 /* handle local labels declarations */
5686 if (tok
== TOK_LABEL
) {
5689 if (tok
< TOK_UIDENT
)
5690 expect("label identifier");
5691 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5701 while (tok
!= '}') {
5702 label_or_decl(VT_LOCAL
);
5706 block(bsym
, csym
, is_expr
);
5709 /* pop locally defined labels */
5710 label_pop(&local_label_stack
, llabel
);
5711 /* pop locally defined symbols */
5713 /* In the is_expr case (a statement expression is finished here),
5714 vtop might refer to symbols on the local_stack. Either via the
5715 type or via vtop->sym. We can't pop those nor any that in turn
5716 might be referred to. To make it easier we don't roll back
5717 any symbols in that case; some upper level call to block() will
5718 do that. We do have to remove such symbols from the lookup
5719 tables, though. sym_pop will do that. */
5720 sym_pop(&local_stack
, s
, is_expr
);
5722 /* Pop VLA frames and restore stack pointer if required */
5723 if (vlas_in_scope
> saved_vlas_in_scope
) {
5724 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5727 vlas_in_scope
= saved_vlas_in_scope
;
5730 } else if (tok
== TOK_RETURN
) {
5734 gen_assign_cast(&func_vt
);
5735 gfunc_return(&func_vt
);
5738 /* jump unless last stmt in top-level block */
5739 if (tok
!= '}' || local_scope
!= 1)
5741 nocode_wanted
|= 0x20000000;
5742 } else if (tok
== TOK_BREAK
) {
5745 tcc_error("cannot break");
5746 *bsym
= gjmp(*bsym
);
5749 nocode_wanted
|= 0x20000000;
5750 } else if (tok
== TOK_CONTINUE
) {
5753 tcc_error("cannot continue");
5754 vla_sp_restore_root();
5755 *csym
= gjmp(*csym
);
5758 } else if (tok
== TOK_FOR
) {
5760 int saved_nocode_wanted
;
5761 nocode_wanted
&= ~0x20000000;
5767 /* c99 for-loop init decl? */
5768 if (!decl0(VT_LOCAL
, 1)) {
5769 /* no, regular for-loop init expr */
5795 saved_nocode_wanted
= nocode_wanted
;
5797 nocode_wanted
= saved_nocode_wanted
;
5802 sym_pop(&local_stack
, s
, 0);
5805 if (tok
== TOK_DO
) {
5806 int saved_nocode_wanted
;
5807 nocode_wanted
&= ~0x20000000;
5813 saved_nocode_wanted
= nocode_wanted
;
5821 nocode_wanted
= saved_nocode_wanted
;
5826 if (tok
== TOK_SWITCH
) {
5827 struct switch_t
*saved
, sw
;
5828 int saved_nocode_wanted
= nocode_wanted
;
5834 switchval
= *vtop
--;
5836 b
= gjmp(0); /* jump to first case */
5837 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5841 nocode_wanted
= saved_nocode_wanted
;
5842 a
= gjmp(a
); /* add implicit break */
5845 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5846 for (b
= 1; b
< sw
.n
; b
++)
5847 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5848 tcc_error("duplicate case value");
5849 /* Our switch table sorting is signed, so the compared
5850 value needs to be as well when it's 64bit. */
5851 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5852 switchval
.type
.t
&= ~VT_UNSIGNED
;
5854 gcase(sw
.p
, sw
.n
, &a
);
5857 gjmp_addr(sw
.def_sym
);
5858 dynarray_reset(&sw
.p
, &sw
.n
);
5863 if (tok
== TOK_CASE
) {
5864 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5867 nocode_wanted
&= ~0x20000000;
5869 cr
->v1
= cr
->v2
= expr_const64();
5870 if (gnu_ext
&& tok
== TOK_DOTS
) {
5872 cr
->v2
= expr_const64();
5873 if (cr
->v2
< cr
->v1
)
5874 tcc_warning("empty case range");
5877 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5880 goto block_after_label
;
5882 if (tok
== TOK_DEFAULT
) {
5887 if (cur_switch
->def_sym
)
5888 tcc_error("too many 'default'");
5889 cur_switch
->def_sym
= ind
;
5891 goto block_after_label
;
5893 if (tok
== TOK_GOTO
) {
5895 if (tok
== '*' && gnu_ext
) {
5899 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5902 } else if (tok
>= TOK_UIDENT
) {
5903 s
= label_find(tok
);
5904 /* put forward definition if needed */
5906 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5908 if (s
->r
== LABEL_DECLARED
)
5909 s
->r
= LABEL_FORWARD
;
5911 vla_sp_restore_root();
5912 if (s
->r
& LABEL_FORWARD
)
5913 s
->jnext
= gjmp(s
->jnext
);
5915 gjmp_addr(s
->jnext
);
5918 expect("label identifier");
5921 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5929 if (s
->r
== LABEL_DEFINED
)
5930 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5932 s
->r
= LABEL_DEFINED
;
5934 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5938 /* we accept this, but it is a mistake */
5940 nocode_wanted
&= ~0x20000000;
5942 tcc_warning("deprecated use of label at end of compound statement");
5946 block(bsym
, csym
, is_expr
);
5949 /* expression case */
5964 #define EXPR_CONST 1
5967 static void parse_init_elem(int expr_type
)
5969 int saved_global_expr
;
5972 /* compound literals must be allocated globally in this case */
5973 saved_global_expr
= global_expr
;
5976 global_expr
= saved_global_expr
;
5977 /* NOTE: symbols are accepted */
5978 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5979 #ifdef TCC_TARGET_PE
5980 || (vtop
->type
.t
& VT_IMPORT
)
5983 tcc_error("initializer element is not constant");
5991 /* t is the array or struct type. c is the array or struct
5992 address. cur_field is the pointer to the current
5993 value, for arrays the 'c' member contains the current start
5994 index and the 'r' contains the end index (in case of range init).
5995 'size_only' is true if only size info is needed (only used
5997 static void decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5998 Sym
**cur_field
, int size_only
)
6001 int notfirst
, index
, index_last
, align
, l
, nb_elems
, elem_size
;
6007 if (gnu_ext
&& (l
= is_label()) != 0)
6009 while (tok
== '[' || tok
== '.') {
6011 if (!(type
->t
& VT_ARRAY
))
6012 expect("array type");
6015 index
= expr_const();
6016 if (index
< 0 || (s
->c
>= 0 && index
>= s
->c
))
6017 tcc_error("invalid index");
6018 if (tok
== TOK_DOTS
&& gnu_ext
) {
6020 index_last
= expr_const();
6021 if (index_last
< 0 ||
6022 (s
->c
>= 0 && index_last
>= s
->c
) ||
6024 tcc_error("invalid index");
6030 (*cur_field
)->c
= index
;
6031 (*cur_field
)->r
= index_last
;
6033 type
= pointed_type(type
);
6034 elem_size
= type_size(type
, &align
);
6035 c
+= index
* elem_size
;
6036 /* NOTE: we only support ranges for last designator */
6037 nb_elems
= index_last
- index
+ 1;
6038 if (nb_elems
!= 1) {
6047 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6048 expect("struct/union type");
6049 f
= find_field(type
, l
);
6054 /* XXX: fix this mess by using explicit storage field */
6056 type1
.t
|= (type
->t
& ~VT_TYPE
);
6070 if (type
->t
& VT_ARRAY
) {
6071 index
= (*cur_field
)->c
;
6072 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6073 tcc_error("index too large");
6074 type
= pointed_type(type
);
6075 c
+= index
* type_size(type
, &align
);
6078 while (f
&& (f
->v
& SYM_FIRST_ANOM
))
6079 *cur_field
= f
= f
->next
;
6081 tcc_error("too many field init");
6082 /* XXX: fix this mess by using explicit storage field */
6084 type1
.t
|= (type
->t
& ~VT_TYPE
);
6089 decl_initializer(type
, sec
, c
, 0, size_only
);
6091 /* XXX: make it more general */
6092 if (!size_only
&& nb_elems
> 1) {
6093 unsigned long c_end
;
6098 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6099 for (i
= 1; i
< nb_elems
; i
++) {
6100 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6106 c_end
= c
+ nb_elems
* elem_size
;
6107 if (c_end
> sec
->data_allocated
)
6108 section_realloc(sec
, c_end
);
6109 src
= sec
->data
+ c
;
6111 for(i
= 1; i
< nb_elems
; i
++) {
6113 memcpy(dst
, src
, elem_size
);
6119 /* store a value or an expression directly in global data or in local array */
6120 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6122 int bt
, bit_pos
, bit_size
;
6124 unsigned long long bit_mask
;
6128 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6132 /* XXX: not portable */
6133 /* XXX: generate error if incorrect relocation */
6134 gen_assign_cast(&dtype
);
6135 bt
= type
->t
& VT_BTYPE
;
6136 size
= type_size(type
, &align
);
6137 if (c
+ size
> sec
->data_allocated
) {
6138 section_realloc(sec
, c
+ size
);
6140 ptr
= sec
->data
+ c
;
6141 /* XXX: make code faster ? */
6142 if (!(type
->t
& VT_BITFIELD
)) {
6144 bit_size
= PTR_SIZE
* 8;
6147 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6148 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6149 bit_mask
= (1LL << bit_size
) - 1;
6151 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6152 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6153 /* XXX This rejects compount literals like
6154 '(void *){ptr}'. The problem is that '&sym' is
6155 represented the same way, which would be ruled out
6156 by the SYM_FIRST_ANOM check above, but also '"string"'
6157 in 'char *p = "string"' is represented the same
6158 with the type being VT_PTR and the symbol being an
6159 anonymous one. That is, there's no difference in vtop
6160 between '(void *){x}' and '&(void *){x}'. Ignore
6161 pointer typed entities here. Hopefully no real code
6162 will every use compound literals with scalar type. */
6163 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6164 /* These come from compound literals, memcpy stuff over. */
6168 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6169 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6170 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6172 /* We need to copy over all memory contents, and that
6173 includes relocations. Use the fact that relocs are
6174 created it order, so look from the end of relocs
6175 until we hit one before the copied region. */
6176 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6177 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6178 while (num_relocs
--) {
6180 if (rel
->r_offset
>= esym
->st_value
+ size
)
6182 if (rel
->r_offset
< esym
->st_value
)
6184 /* Note: if the same fields are initialized multiple
6185 times (possible with designators) then we possibly
6186 add multiple relocations for the same offset here.
6187 That would lead to wrong code, the last reloc needs
6188 to win. We clean this up later after the whole
6189 initializer is parsed. */
6190 put_elf_reloca(symtab_section
, sec
,
6191 c
+ rel
->r_offset
- esym
->st_value
,
6192 ELFW(R_TYPE
)(rel
->r_info
),
6193 ELFW(R_SYM
)(rel
->r_info
),
6194 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6203 if ((vtop
->r
& VT_SYM
) &&
6209 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6213 (bt
== VT_INT
&& bit_size
!= 32)
6216 tcc_error("initializer element is not computable at load time");
6218 /* XXX: when cross-compiling we assume that each type has the
6219 same representation on host and target, which is likely to
6220 be wrong in the case of long double */
6222 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6224 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6227 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6230 *(double *)ptr
= vtop
->c
.d
;
6233 if (sizeof(long double) == LDOUBLE_SIZE
)
6234 *(long double *)ptr
= vtop
->c
.ld
;
6235 else if (sizeof(double) == LDOUBLE_SIZE
)
6236 *(double *)ptr
= vtop
->c
.ld
;
6238 tcc_error("can't cross compile long double constants");
6242 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6249 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6250 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6251 if (vtop
->r
& VT_SYM
)
6252 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6254 *(addr_t
*)ptr
|= val
;
6256 if (vtop
->r
& VT_SYM
)
6257 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6258 *(addr_t
*)ptr
|= val
;
6264 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6265 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6266 if (vtop
->r
& VT_SYM
)
6267 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6271 if (vtop
->r
& VT_SYM
)
6272 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6281 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6288 /* put zeros for variable based init */
6289 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6292 /* nothing to do because globals are already set to zero */
6294 vpush_global_sym(&func_old_type
, TOK_memset
);
6296 #ifdef TCC_TARGET_ARM
6307 /* 't' contains the type and storage info. 'c' is the offset of the
6308 object in section 'sec'. If 'sec' is NULL, it means stack based
6309 allocation. 'first' is true if array '{' must be read (multi
6310 dimension implicit array init handling). 'size_only' is true if
6311 size only evaluation is wanted (only for arrays). */
6312 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6313 int first
, int size_only
)
6315 int index
, array_length
, n
, no_oblock
, nb
, parlevel
, parlevel1
, i
;
6322 /* If we currently are at an '}' or ',' we have read an initializer
6323 element in one of our callers, and not yet consumed it. */
6324 have_elem
= tok
== '}' || tok
== ',';
6325 if (!have_elem
&& tok
!= '{' &&
6326 /* In case of strings we have special handling for arrays, so
6327 don't consume them as initializer value (which would commit them
6328 to some anonymous symbol). */
6329 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6331 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6336 !(type
->t
& VT_ARRAY
) &&
6337 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6338 The source type might have VT_CONSTANT set, which is
6339 of course assignable to non-const elements. */
6340 is_compatible_parameter_types(type
, &vtop
->type
)) {
6341 init_putv(type
, sec
, c
);
6342 } else if (type
->t
& VT_ARRAY
) {
6346 t1
= pointed_type(type
);
6347 size1
= type_size(t1
, &align1
);
6350 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6353 tcc_error("character array initializer must be a literal,"
6354 " optionally enclosed in braces");
6359 /* only parse strings here if correct type (otherwise: handle
6360 them as ((w)char *) expressions */
6361 if ((tok
== TOK_LSTR
&&
6362 #ifdef TCC_TARGET_PE
6363 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6365 (t1
->t
& VT_BTYPE
) == VT_INT
6367 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6368 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6371 /* compute maximum number of chars wanted */
6373 cstr_len
= tokc
.str
.size
;
6375 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6378 if (n
>= 0 && nb
> (n
- array_length
))
6379 nb
= n
- array_length
;
6382 tcc_warning("initializer-string for array is too long");
6383 /* in order to go faster for common case (char
6384 string in global variable, we handle it
6386 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6387 memcpy(sec
->data
+ c
+ array_length
, tokc
.str
.data
, nb
);
6391 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6393 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6395 init_putv(t1
, sec
, c
+ (array_length
+ i
) * size1
);
6402 /* only add trailing zero if enough storage (no
6403 warning in this case since it is standard) */
6404 if (n
< 0 || array_length
< n
) {
6407 init_putv(t1
, sec
, c
+ (array_length
* size1
));
6417 while (tok
!= '}' || have_elem
) {
6418 decl_designator(type
, sec
, c
, &f
, size_only
);
6421 /* must put zero in holes (note that doing it that way
6422 ensures that it even works with designators) */
6423 if (!size_only
&& array_length
< index
) {
6424 init_putz(sec
, c
+ array_length
* size1
,
6425 (index
- array_length
) * size1
);
6427 if (type
->t
& VT_ARRAY
) {
6428 index
= indexsym
.c
= ++indexsym
.r
;
6430 index
= index
+ type_size(&f
->type
, &align1
);
6431 if (s
->type
.t
== TOK_UNION
)
6436 if (index
> array_length
)
6437 array_length
= index
;
6439 if (type
->t
& VT_ARRAY
) {
6440 /* special test for multi dimensional arrays (may not
6441 be strictly correct if designators are used at the
6443 if (no_oblock
&& index
>= n
)
6446 if (no_oblock
&& f
== NULL
)
6454 /* put zeros at the end */
6455 if (!size_only
&& array_length
< n
) {
6456 init_putz(sec
, c
+ array_length
* size1
,
6457 (n
- array_length
) * size1
);
6461 /* patch type size if needed, which happens only for array types */
6463 s
->c
= array_length
;
6464 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6467 if (first
|| tok
== '{') {
6476 } else if (tok
== '{') {
6478 decl_initializer(type
, sec
, c
, first
, size_only
);
6480 } else if (size_only
) {
6481 /* If we supported only ISO C we wouldn't have to accept calling
6482 this on anything than an array size_only==1 (and even then
6483 only on the outermost level, so no recursion would be needed),
6484 because initializing a flex array member isn't supported.
6485 But GNU C supports it, so we need to recurse even into
6486 subfields of structs and arrays when size_only is set. */
6487 /* just skip expression */
6488 parlevel
= parlevel1
= 0;
6489 while ((parlevel
> 0 || parlevel1
> 0 ||
6490 (tok
!= '}' && tok
!= ',')) && tok
!= -1) {
6493 else if (tok
== ')') {
6494 if (parlevel
== 0 && parlevel1
== 0)
6498 else if (tok
== '{')
6500 else if (tok
== '}') {
6501 if (parlevel
== 0 && parlevel1
== 0)
6509 /* This should happen only when we haven't parsed
6510 the init element above for fear of committing a
6511 string constant to memory too early. */
6512 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6513 expect("string constant");
6514 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6516 init_putv(type
, sec
, c
);
6520 /* parse an initializer for type 't' if 'has_init' is non zero, and
6521 allocate space in local or global data space ('r' is either
6522 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6523 variable 'v' of scope 'scope' is declared before initializers
6524 are parsed. If 'v' is zero, then a reference to the new object
6525 is put in the value stack. If 'has_init' is 2, a special parsing
6526 is done to handle string constants. */
6527 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6528 int has_init
, int v
, int scope
)
6530 int size
, align
, addr
, data_offset
;
6532 ParseState saved_parse_state
= {0};
6533 TokenString
*init_str
= NULL
;
6535 Sym
*flexible_array
;
6537 flexible_array
= NULL
;
6538 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6539 Sym
*field
= type
->ref
->next
;
6542 field
= field
->next
;
6543 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6544 flexible_array
= field
;
6548 size
= type_size(type
, &align
);
6549 /* If unknown size, we must evaluate it before
6550 evaluating initializers because
6551 initializers can generate global data too
6552 (e.g. string pointers or ISOC99 compound
6553 literals). It also simplifies local
6554 initializers handling */
6555 if (size
< 0 || (flexible_array
&& has_init
)) {
6557 tcc_error("unknown type size");
6558 /* get all init string */
6559 init_str
= tok_str_alloc();
6560 if (has_init
== 2) {
6561 /* only get strings */
6562 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6563 tok_str_add_tok(init_str
);
6568 while (level
> 0 || (tok
!= ',' && tok
!= ';')) {
6570 tcc_error("unexpected end of file in initializer");
6571 tok_str_add_tok(init_str
);
6574 else if (tok
== '}') {
6584 tok_str_add(init_str
, -1);
6585 tok_str_add(init_str
, 0);
6588 save_parse_state(&saved_parse_state
);
6590 begin_macro(init_str
, 1);
6592 decl_initializer(type
, NULL
, 0, 1, 1);
6593 /* prepare second initializer parsing */
6594 macro_ptr
= init_str
->str
;
6597 /* if still unknown size, error */
6598 size
= type_size(type
, &align
);
6600 tcc_error("unknown type size");
6602 /* If there's a flex member and it was used in the initializer
6604 if (flexible_array
&&
6605 flexible_array
->type
.ref
->c
> 0)
6606 size
+= flexible_array
->type
.ref
->c
6607 * pointed_size(&flexible_array
->type
);
6608 /* take into account specified alignment if bigger */
6609 if (ad
->a
.aligned
) {
6610 int speca
= 1 << (ad
->a
.aligned
- 1);
6613 } else if (ad
->a
.packed
) {
6616 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6618 #ifdef CONFIG_TCC_BCHECK
6619 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6623 loc
= (loc
- size
) & -align
;
6625 #ifdef CONFIG_TCC_BCHECK
6626 /* handles bounds */
6627 /* XXX: currently, since we do only one pass, we cannot track
6628 '&' operators, so we add only arrays */
6629 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6631 /* add padding between regions */
6633 /* then add local bound info */
6634 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6635 bounds_ptr
[0] = addr
;
6636 bounds_ptr
[1] = size
;
6640 /* local variable */
6641 #ifdef CONFIG_TCC_ASM
6642 if (ad
->asm_label
) {
6643 int reg
= asm_parse_regvar(ad
->asm_label
);
6645 r
= (r
& ~VT_VALMASK
) | reg
;
6648 sym_push(v
, type
, r
, addr
);
6650 /* push local reference */
6651 vset(type
, r
, addr
);
6655 if (v
&& scope
== VT_CONST
) {
6656 /* see if the symbol was already defined */
6659 patch_storage(sym
, type
);
6660 if (sym
->type
.t
& VT_EXTERN
) {
6661 /* if the variable is extern, it was not allocated */
6662 sym
->type
.t
&= ~VT_EXTERN
;
6663 /* set array size if it was omitted in extern
6665 if ((sym
->type
.t
& VT_ARRAY
) &&
6666 sym
->type
.ref
->c
< 0 &&
6668 sym
->type
.ref
->c
= type
->ref
->c
;
6669 } else if (!has_init
) {
6670 /* we accept several definitions of the same
6671 global variable. this is tricky, because we
6672 must play with the SHN_COMMON type of the symbol */
6673 /* no init data, we won't add more to the symbol */
6674 update_storage(sym
);
6676 } else if (sym
->c
) {
6678 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6679 if (esym
->st_shndx
== data_section
->sh_num
)
6680 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6685 /* allocate symbol in corresponding section */
6690 else if (tcc_state
->nocommon
)
6695 data_offset
= sec
->data_offset
;
6696 data_offset
= (data_offset
+ align
- 1) & -align
;
6698 /* very important to increment global pointer at this time
6699 because initializers themselves can create new initializers */
6700 data_offset
+= size
;
6701 #ifdef CONFIG_TCC_BCHECK
6702 /* add padding if bound check */
6703 if (tcc_state
->do_bounds_check
)
6706 sec
->data_offset
= data_offset
;
6707 /* allocate section space to put the data */
6708 if (sec
->sh_type
!= SHT_NOBITS
&&
6709 data_offset
> sec
->data_allocated
)
6710 section_realloc(sec
, data_offset
);
6711 /* align section if needed */
6712 if (align
> sec
->sh_addralign
)
6713 sec
->sh_addralign
= align
;
6715 addr
= 0; /* avoid warning */
6719 if (scope
!= VT_CONST
|| !sym
) {
6720 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6721 sym
->asm_label
= ad
->asm_label
;
6723 /* update symbol definition */
6725 put_extern_sym(sym
, sec
, addr
, size
);
6727 put_extern_sym(sym
, SECTION_COMMON
, align
, size
);
6731 /* push global reference */
6732 sym
= get_sym_ref(type
, sec
, addr
, size
);
6733 vpushsym(type
, sym
);
6736 #ifdef CONFIG_TCC_BCHECK
6737 /* handles bounds now because the symbol must be defined
6738 before for the relocation */
6739 if (tcc_state
->do_bounds_check
) {
6742 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
6743 /* then add global bound info */
6744 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6745 bounds_ptr
[0] = 0; /* relocated */
6746 bounds_ptr
[1] = size
;
6751 if (type
->t
& VT_VLA
) {
6754 /* save current stack pointer */
6755 if (vlas_in_scope
== 0) {
6756 if (vla_sp_root_loc
== -1)
6757 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6758 gen_vla_sp_save(vla_sp_root_loc
);
6761 vla_runtime_type_size(type
, &a
);
6762 gen_vla_alloc(type
, a
);
6763 gen_vla_sp_save(addr
);
6767 } else if (has_init
) {
6768 size_t oldreloc_offset
= 0;
6769 if (sec
&& sec
->reloc
)
6770 oldreloc_offset
= sec
->reloc
->data_offset
;
6771 decl_initializer(type
, sec
, addr
, 1, 0);
6772 if (sec
&& sec
->reloc
)
6773 squeeze_multi_relocs(sec
, oldreloc_offset
);
6774 /* patch flexible array member size back to -1, */
6775 /* for possible subsequent similar declarations */
6777 flexible_array
->type
.ref
->c
= -1;
6781 /* restore parse state if needed */
6784 restore_parse_state(&saved_parse_state
);
6788 /* parse an old style function declaration list */
6789 /* XXX: check multiple parameter */
6790 static void func_decl_list(Sym
*func_sym
)
6797 /* parse each declaration */
6798 while (tok
!= '{' && tok
!= ';' && tok
!= ',' && tok
!= TOK_EOF
&&
6799 tok
!= TOK_ASM1
&& tok
!= TOK_ASM2
&& tok
!= TOK_ASM3
) {
6800 if (!parse_btype(&btype
, &ad
))
6801 expect("declaration list");
6802 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6803 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6805 /* we accept no variable after */
6809 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6810 /* find parameter in function parameter list */
6813 if ((s
->v
& ~SYM_FIELD
) == v
)
6817 tcc_error("declaration for parameter '%s' but no such parameter",
6818 get_tok_str(v
, NULL
));
6820 /* check that no storage specifier except 'register' was given */
6821 if (type
.t
& VT_STORAGE
)
6822 tcc_error("storage class specified for '%s'", get_tok_str(v
, NULL
));
6823 convert_parameter_type(&type
);
6824 /* we can add the type (NOTE: it could be local to the function) */
6826 /* accept other parameters */
6837 /* parse a function defined by symbol 'sym' and generate its code in
6838 'cur_text_section' */
6839 static void gen_function(Sym
*sym
)
6842 ind
= cur_text_section
->data_offset
;
6843 /* NOTE: we patch the symbol size later */
6844 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6845 funcname
= get_tok_str(sym
->v
, NULL
);
6847 /* Initialize VLA state */
6849 vla_sp_root_loc
= -1;
6850 /* put debug symbol */
6851 tcc_debug_funcstart(tcc_state
, sym
);
6852 /* push a dummy symbol to enable local sym storage */
6853 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6854 local_scope
= 1; /* for function parameters */
6855 gfunc_prolog(&sym
->type
);
6858 block(NULL
, NULL
, 0);
6862 cur_text_section
->data_offset
= ind
;
6863 label_pop(&global_label_stack
, NULL
);
6864 /* reset local stack */
6866 sym_pop(&local_stack
, NULL
, 0);
6867 /* end of function */
6868 /* patch symbol size */
6869 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6871 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
6872 /* It's better to crash than to generate wrong code */
6873 cur_text_section
= NULL
;
6874 funcname
= ""; /* for safety */
6875 func_vt
.t
= VT_VOID
; /* for safety */
6876 func_var
= 0; /* for safety */
6877 ind
= 0; /* for safety */
6882 static void gen_inline_functions(TCCState
*s
)
6885 int inline_generated
, i
, ln
;
6886 struct InlineFunc
*fn
;
6888 ln
= file
->line_num
;
6889 /* iterate while inline function are referenced */
6891 inline_generated
= 0;
6892 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6893 fn
= s
->inline_fns
[i
];
6895 if (sym
&& sym
->c
) {
6896 /* the function was used: generate its code and
6897 convert it to a normal function */
6900 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6901 sym
->type
.t
&= ~VT_INLINE
;
6903 begin_macro(fn
->func_str
, 1);
6905 cur_text_section
= text_section
;
6909 inline_generated
= 1;
6912 if (!inline_generated
)
6915 file
->line_num
= ln
;
6918 ST_FUNC
void free_inline_functions(TCCState
*s
)
6921 /* free tokens of unused inline functions */
6922 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6923 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6925 tok_str_free(fn
->func_str
);
6927 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6930 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6931 static int decl0(int l
, int is_for_loop_init
)
6939 if (!parse_btype(&btype
, &ad
)) {
6940 if (is_for_loop_init
)
6942 /* skip redundant ';' */
6943 /* XXX: find more elegant solution */
6948 if (l
== VT_CONST
&&
6949 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6950 /* global asm block */
6954 /* special test for old K&R protos without explicit int
6955 type. Only accepted when defining global data */
6956 if (l
== VT_LOCAL
|| tok
< TOK_UIDENT
)
6960 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6961 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6963 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6964 int v
= btype
.ref
->v
;
6965 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6966 tcc_warning("unnamed struct/union that defines no instances");
6971 while (1) { /* iterate thru each declaration */
6973 /* If the base type itself was an array type of unspecified
6974 size (like in 'typedef int arr[]; arr x = {1};') then
6975 we will overwrite the unknown size by the real one for
6976 this decl. We need to unshare the ref symbol holding
6978 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6979 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6981 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6985 type_to_str(buf
, sizeof(buf
), t
, get_tok_str(v
, NULL
));
6986 printf("type = '%s'\n", buf
);
6989 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6990 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6991 tcc_error("function without file scope cannot be static");
6993 /* if old style function prototype, we accept a
6996 if (sym
->c
== FUNC_OLD
)
6997 func_decl_list(sym
);
7000 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7001 ad
.asm_label
= asm_label_instr();
7002 /* parse one last attribute list, after asm label */
7003 parse_attribute(&ad
);
7010 #ifdef TCC_TARGET_PE
7011 if (ad
.a
.func_import
|| ad
.a
.func_export
) {
7012 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7013 tcc_error("cannot have dll linkage with static or typedef");
7014 if (ad
.a
.func_export
)
7015 type
.t
|= VT_EXPORT
;
7016 else if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7017 type
.t
|= VT_IMPORT
|VT_EXTERN
;
7020 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
7024 tcc_error("cannot use local functions");
7025 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7026 expect("function definition");
7028 /* reject abstract declarators in function definition */
7030 while ((sym
= sym
->next
) != NULL
)
7031 if (!(sym
->v
& ~SYM_FIELD
))
7032 expect("identifier");
7034 /* XXX: cannot do better now: convert extern line to static inline */
7035 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7036 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7041 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
7044 ref
= sym
->type
.ref
;
7046 /* use func_call from prototype if not defined */
7047 if (ref
->a
.func_call
!= FUNC_CDECL
7048 && type
.ref
->a
.func_call
== FUNC_CDECL
)
7049 type
.ref
->a
.func_call
= ref
->a
.func_call
;
7051 /* use static from prototype */
7052 if (sym
->type
.t
& VT_STATIC
)
7053 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7055 /* If the definition has no visibility use the
7056 one from prototype. */
7057 if (! (type
.t
& VT_VIS_MASK
))
7058 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
7060 /* apply other storage attributes from prototype */
7061 type
.t
|= sym
->type
.t
& (VT_EXPORT
|VT_WEAK
);
7063 if (!is_compatible_types(&sym
->type
, &type
)) {
7065 tcc_error("incompatible types for redefinition of '%s'",
7066 get_tok_str(v
, NULL
));
7068 if (ref
->a
.func_body
)
7069 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
7070 /* if symbol is already defined, then put complete type */
7074 /* put function symbol */
7075 sym
= global_identifier_push(v
, type
.t
, 0);
7076 sym
->type
.ref
= type
.ref
;
7079 sym
->type
.ref
->a
.func_body
= 1;
7080 sym
->r
= VT_SYM
| VT_CONST
;
7082 /* static inline functions are just recorded as a kind
7083 of macro. Their code will be emitted at the end of
7084 the compilation unit only if they are used */
7085 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7086 (VT_INLINE
| VT_STATIC
)) {
7088 struct InlineFunc
*fn
;
7089 const char *filename
;
7091 filename
= file
? file
->filename
: "";
7092 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7093 strcpy(fn
->filename
, filename
);
7095 fn
->func_str
= tok_str_alloc();
7101 tcc_error("unexpected end of file");
7102 tok_str_add_tok(fn
->func_str
);
7107 } else if (t
== '}') {
7109 if (block_level
== 0)
7113 tok_str_add(fn
->func_str
, -1);
7114 tok_str_add(fn
->func_str
, 0);
7115 dynarray_add(&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
, fn
);
7118 /* compute text section */
7119 cur_text_section
= ad
.section
;
7120 if (!cur_text_section
)
7121 cur_text_section
= text_section
;
7126 if (type
.t
& VT_TYPEDEF
) {
7127 /* save typedefed type */
7128 /* XXX: test storage specifiers ? */
7130 if (sym
&& sym
->scope
== local_scope
) {
7131 if (!is_compatible_types(&sym
->type
, &type
)
7132 || !(sym
->type
.t
& VT_TYPEDEF
))
7133 tcc_error("incompatible redefinition of '%s'",
7134 get_tok_str(v
, NULL
));
7137 sym
= sym_push(v
, &type
, 0, 0);
7142 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7143 /* external function definition */
7144 /* specific case for func_call attribute */
7146 } else if (!(type
.t
& VT_ARRAY
)) {
7147 /* not lvalue if array */
7148 r
|= lvalue_type(type
.t
);
7150 has_init
= (tok
== '=');
7151 if (has_init
&& (type
.t
& VT_VLA
))
7152 tcc_error("variable length array cannot be initialized");
7153 if ((type
.t
& VT_EXTERN
) || ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7154 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7155 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7156 /* external variable or function */
7157 /* NOTE: as GCC, uninitialized global static
7158 arrays of null size are considered as
7160 sym
= external_sym(v
, &type
, r
);
7161 sym
->asm_label
= ad
.asm_label
;
7162 if (ad
.alias_target
) {
7167 alias_target
= sym_find(ad
.alias_target
);
7168 if (!alias_target
|| !alias_target
->c
)
7169 tcc_error("unsupported forward __alias__ attribute");
7170 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7171 tsec
.sh_num
= esym
->st_shndx
;
7172 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7175 if (type
.t
& VT_STATIC
)
7181 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7185 if (is_for_loop_init
)
7198 ST_FUNC
void decl(int l
)
7203 /* ------------------------------------------------------------------------- */