2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static inline CType
*pointed_type(CType
*type
);
75 static int is_compatible_types(CType
*type1
, CType
*type2
);
76 static int parse_btype(CType
*type
, AttributeDef
*ad
);
77 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
78 static void parse_expr_type(CType
*type
);
79 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
80 static void block(int *bsym
, int *csym
, int is_expr
);
81 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
82 static int decl0(int l
, int is_for_loop_init
);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType
*type
);
86 static void vla_runtime_type_size(CType
*type
, int *a
);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
90 static void expr_type(CType
*type
);
91 static inline int64_t expr_const64(void);
92 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
93 ST_FUNC
void vpush(CType
*type
);
94 ST_FUNC
int gvtst(int inv
, int t
);
95 ST_FUNC
int is_btype_size(int bt
);
96 static void gen_inline_functions(TCCState
*s
);
98 ST_INLN
int is_float(int t
)
102 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC
int ieee_finite(double d
)
111 memcpy(p
, &d
, sizeof(double));
112 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC
void test_lvalue(void)
117 if (!(vtop
->r
& VT_LVAL
))
121 ST_FUNC
void check_vstack(void)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
131 void pv (const char *lbl
, int a
, int b
)
134 for (i
= a
; i
< a
+ b
; ++i
) {
135 SValue
*p
= &vtop
[-i
];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
142 /* ------------------------------------------------------------------------- */
143 ST_FUNC
void tccgen_start(TCCState
*s1
)
145 cur_text_section
= NULL
;
147 anon_sym
= SYM_FIRST_ANOM
;
152 /* define some often used types */
154 char_pointer_type
.t
= VT_BYTE
;
155 mk_pointer(&char_pointer_type
);
157 size_type
.t
= VT_INT
;
159 size_type
.t
= VT_LLONG
;
161 func_old_type
.t
= VT_FUNC
;
162 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, FUNC_CDECL
, FUNC_OLD
);
167 /* file info: full path + filename */
168 section_sym
= put_elf_sym(symtab_section
, 0, 0,
169 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
170 text_section
->sh_num
, NULL
);
171 getcwd(buf
, sizeof(buf
));
173 normalize_slashes(buf
);
175 pstrcat(buf
, sizeof(buf
), "/");
176 put_stabs_r(buf
, N_SO
, 0, 0,
177 text_section
->data_offset
, text_section
, section_sym
);
178 put_stabs_r(file
->filename
, N_SO
, 0, 0,
179 text_section
->data_offset
, text_section
, section_sym
);
181 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
182 symbols can be safely used */
183 put_elf_sym(symtab_section
, 0, 0,
184 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
185 SHN_ABS
, file
->filename
);
187 #ifdef TCC_TARGET_ARM
192 ST_FUNC
void tccgen_end(TCCState
*s1
)
194 gen_inline_functions(s1
);
196 /* end of translation unit info */
198 put_stabs_r(NULL
, N_SO
, 0, 0,
199 text_section
->data_offset
, text_section
, section_sym
);
203 /* ------------------------------------------------------------------------- */
204 /* update sym->c so that it points to an external symbol in section
205 'section' with value 'value' */
207 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
208 addr_t value
, unsigned long size
,
209 int can_add_underscore
)
211 int sym_type
, sym_bind
, sh_num
, info
, other
;
216 #ifdef CONFIG_TCC_BCHECK
222 else if (section
== SECTION_ABS
)
225 sh_num
= section
->sh_num
;
227 if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
229 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_VOID
) {
230 sym_type
= STT_NOTYPE
;
232 sym_type
= STT_OBJECT
;
235 if (sym
->type
.t
& VT_STATIC
)
236 sym_bind
= STB_LOCAL
;
238 if (sym
->type
.t
& VT_WEAK
)
241 sym_bind
= STB_GLOBAL
;
245 name
= get_tok_str(sym
->v
, NULL
);
246 #ifdef CONFIG_TCC_BCHECK
247 if (tcc_state
->do_bounds_check
) {
248 /* XXX: avoid doing that for statics ? */
249 /* if bound checking is activated, we change some function
250 names by adding the "__bound" prefix */
253 /* XXX: we rely only on malloc hooks */
266 strcpy(buf
, "__bound_");
276 if (sym
->type
.t
& VT_EXPORT
)
277 other
|= ST_PE_EXPORT
;
278 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
279 Sym
*ref
= sym
->type
.ref
;
280 if (ref
->a
.func_export
)
281 other
|= ST_PE_EXPORT
;
282 if (ref
->a
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
283 sprintf(buf1
, "_%s@%d", name
, ref
->a
.func_args
* PTR_SIZE
);
285 other
|= ST_PE_STDCALL
;
286 can_add_underscore
= 0;
289 if (find_elf_sym(tcc_state
->dynsymtab_section
, name
))
290 other
|= ST_PE_IMPORT
;
291 if (sym
->type
.t
& VT_IMPORT
)
292 other
|= ST_PE_IMPORT
;
295 if (! (sym
->type
.t
& VT_STATIC
))
296 other
= (sym
->type
.t
& VT_VIS_MASK
) >> VT_VIS_SHIFT
;
298 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
300 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
303 if (sym
->asm_label
) {
304 name
= get_tok_str(sym
->asm_label
, NULL
);
306 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
307 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
309 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
310 esym
->st_value
= value
;
311 esym
->st_size
= size
;
312 esym
->st_shndx
= sh_num
;
316 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
317 addr_t value
, unsigned long size
)
319 put_extern_sym2(sym
, section
, value
, size
, 1);
322 /* add a new relocation entry to symbol 'sym' in section 's' */
323 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
328 if (nocode_wanted
&& s
== cur_text_section
)
333 put_extern_sym(sym
, NULL
, 0, 0);
337 /* now we can add ELF relocation info */
338 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
341 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
343 greloca(s
, sym
, offset
, type
, 0);
346 /* ------------------------------------------------------------------------- */
347 /* symbol allocator */
348 static Sym
*__sym_malloc(void)
350 Sym
*sym_pool
, *sym
, *last_sym
;
353 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
354 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
356 last_sym
= sym_free_first
;
358 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
359 sym
->next
= last_sym
;
363 sym_free_first
= last_sym
;
367 static inline Sym
*sym_malloc(void)
371 sym
= sym_free_first
;
373 sym
= __sym_malloc();
374 sym_free_first
= sym
->next
;
377 sym
= tcc_malloc(sizeof(Sym
));
382 ST_INLN
void sym_free(Sym
*sym
)
385 sym
->next
= sym_free_first
;
386 sym_free_first
= sym
;
392 /* push, without hashing */
393 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
413 /* find a symbol and return its associated structure. 's' is the top
414 of the symbol stack */
415 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
427 /* structure lookup */
428 ST_INLN Sym
*struct_find(int v
)
431 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
433 return table_ident
[v
]->sym_struct
;
436 /* find an identifier */
437 ST_INLN Sym
*sym_find(int v
)
440 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
442 return table_ident
[v
]->sym_identifier
;
445 /* push a given symbol on the symbol stack */
446 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, long c
)
455 s
= sym_push2(ps
, v
, type
->t
, c
);
456 s
->type
.ref
= type
->ref
;
458 /* don't record fields or anonymous symbols */
460 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
461 /* record symbol in token array */
462 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
464 ps
= &ts
->sym_struct
;
466 ps
= &ts
->sym_identifier
;
469 s
->scope
= local_scope
;
470 if (s
->prev_tok
&& s
->prev_tok
->scope
== s
->scope
)
471 tcc_error("redeclaration of '%s'",
472 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
477 /* push a global identifier */
478 ST_FUNC Sym
*global_identifier_push(int v
, int t
, long c
)
481 s
= sym_push2(&global_stack
, v
, t
, c
);
482 /* don't record anonymous symbol */
483 if (v
< SYM_FIRST_ANOM
) {
484 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
485 /* modify the top most local identifier, so that
486 sym_identifier will point to 's' when popped */
488 ps
= &(*ps
)->prev_tok
;
495 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
496 pop them yet from the list, but do remove them from the token array. */
497 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
507 /* remove symbol in token array */
509 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
510 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
512 ps
= &ts
->sym_struct
;
514 ps
= &ts
->sym_identifier
;
525 static void weaken_symbol(Sym
*sym
)
527 sym
->type
.t
|= VT_WEAK
;
532 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
533 esym_type
= ELFW(ST_TYPE
)(esym
->st_info
);
534 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, esym_type
);
538 static void apply_visibility(Sym
*sym
, CType
*type
)
540 int vis
= sym
->type
.t
& VT_VIS_MASK
;
541 int vis2
= type
->t
& VT_VIS_MASK
;
542 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
544 else if (vis2
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
547 vis
= (vis
< vis2
) ? vis
: vis2
;
548 sym
->type
.t
&= ~VT_VIS_MASK
;
554 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
555 vis
>>= VT_VIS_SHIFT
;
556 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1)) | vis
;
560 /* ------------------------------------------------------------------------- */
562 static void vsetc(CType
*type
, int r
, CValue
*vc
)
566 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
567 tcc_error("memory full (vstack)");
568 /* cannot let cpu flags if other instruction are generated. Also
569 avoid leaving VT_JMP anywhere except on the top of the stack
570 because it would complicate the code generator.
572 Don't do this when nocode_wanted. vtop might come from
573 !nocode_wanted regions (see 88_codeopt.c) and transforming
574 it to a register without actually generating code is wrong
575 as their value might still be used for real. All values
576 we push under nocode_wanted will eventually be popped
577 again, so that the VT_CMP/VT_JMP value will be in vtop
578 when code is unsuppressed again.
580 Same logic below in vswap(); */
581 if (vtop
>= vstack
&& !nocode_wanted
) {
582 v
= vtop
->r
& VT_VALMASK
;
583 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
595 ST_FUNC
void vswap(void)
598 /* cannot vswap cpu flags. See comment at vsetc() above */
599 if (vtop
>= vstack
&& !nocode_wanted
) {
600 int v
= vtop
->r
& VT_VALMASK
;
601 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
609 /* pop stack value */
610 ST_FUNC
void vpop(void)
613 v
= vtop
->r
& VT_VALMASK
;
614 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
615 /* for x86, we need to pop the FP stack */
617 o(0xd8dd); /* fstp %st(0) */
620 if (v
== VT_JMP
|| v
== VT_JMPI
) {
621 /* need to put correct jump if && or || without test */
627 /* push constant of type "type" with useless value */
628 ST_FUNC
void vpush(CType
*type
)
631 vsetc(type
, VT_CONST
, &cval
);
634 /* push integer constant */
635 ST_FUNC
void vpushi(int v
)
639 vsetc(&int_type
, VT_CONST
, &cval
);
642 /* push a pointer sized constant */
643 static void vpushs(addr_t v
)
647 vsetc(&size_type
, VT_CONST
, &cval
);
650 /* push arbitrary 64bit constant */
651 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
658 vsetc(&ctype
, VT_CONST
, &cval
);
661 /* push long long constant */
662 static inline void vpushll(long long v
)
664 vpush64(VT_LLONG
, v
);
667 ST_FUNC
void vset(CType
*type
, int r
, long v
)
672 vsetc(type
, r
, &cval
);
675 static void vseti(int r
, int v
)
683 ST_FUNC
void vpushv(SValue
*v
)
685 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
686 tcc_error("memory full (vstack)");
691 static void vdup(void)
696 /* rotate n first stack elements to the bottom
697 I1 ... In -> I2 ... In I1 [top is right]
699 ST_FUNC
void vrotb(int n
)
710 /* rotate the n elements before entry e towards the top
711 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
713 ST_FUNC
void vrote(SValue
*e
, int n
)
719 for(i
= 0;i
< n
- 1; i
++)
724 /* rotate n first stack elements to the top
725 I1 ... In -> In I1 ... I(n-1) [top is right]
727 ST_FUNC
void vrott(int n
)
732 /* push a symbol value of TYPE */
733 static inline void vpushsym(CType
*type
, Sym
*sym
)
737 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
741 /* Return a static symbol pointing to a section */
742 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
748 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
749 sym
->type
.ref
= type
->ref
;
750 sym
->r
= VT_CONST
| VT_SYM
;
751 put_extern_sym(sym
, sec
, offset
, size
);
755 /* push a reference to a section offset by adding a dummy symbol */
756 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
758 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
761 /* define a new external reference to a symbol 'v' of type 'u' */
762 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
768 /* push forward reference */
769 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
770 s
->type
.ref
= type
->ref
;
771 s
->r
= r
| VT_CONST
| VT_SYM
;
776 /* define a new external reference to a symbol 'v' */
777 static Sym
*external_sym(int v
, CType
*type
, int r
)
783 /* push forward reference */
784 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
785 s
->type
.t
|= VT_EXTERN
;
786 } else if (s
->type
.ref
== func_old_type
.ref
) {
787 s
->type
.ref
= type
->ref
;
788 s
->r
= r
| VT_CONST
| VT_SYM
;
789 s
->type
.t
|= VT_EXTERN
;
790 } else if (!is_compatible_types(&s
->type
, type
)) {
791 tcc_error("incompatible types for redefinition of '%s'",
792 get_tok_str(v
, NULL
));
794 /* Merge some storage attributes. */
795 if (type
->t
& VT_WEAK
)
798 if (type
->t
& VT_VIS_MASK
)
799 apply_visibility(s
, type
);
804 /* push a reference to global symbol v */
805 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
807 vpushsym(type
, external_global_sym(v
, type
, 0));
810 /* save registers up to (vtop - n) stack entry */
811 ST_FUNC
void save_regs(int n
)
814 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
818 /* save r to the memory stack, and mark it as being free */
819 ST_FUNC
void save_reg(int r
)
821 save_reg_upstack(r
, 0);
824 /* save r to the memory stack, and mark it as being free,
825 if seen up to (vtop - n) stack entry */
826 ST_FUNC
void save_reg_upstack(int r
, int n
)
828 int l
, saved
, size
, align
;
832 if ((r
&= VT_VALMASK
) >= VT_CONST
)
837 /* modify all stack values */
840 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
841 if ((p
->r
& VT_VALMASK
) == r
||
842 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
843 /* must save value on stack if not already done */
845 /* NOTE: must reload 'r' because r might be equal to r2 */
846 r
= p
->r
& VT_VALMASK
;
847 /* store register in the stack */
849 if ((p
->r
& VT_LVAL
) ||
850 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
851 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
852 type
= &char_pointer_type
;
856 if ((type
->t
& VT_BTYPE
) == VT_FLOAT
) {
857 /* cast to DOUBLE to avoid precision loss */
858 type
->t
= (type
->t
& ~VT_BTYPE
) | VT_DOUBLE
;
860 size
= type_size(type
, &align
);
861 loc
= (loc
- size
) & -align
;
863 sv
.r
= VT_LOCAL
| VT_LVAL
;
866 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
867 /* x86 specific: need to pop fp register ST0 if saved */
869 o(0xd8dd); /* fstp %st(0) */
872 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
873 /* special long long case */
874 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
882 /* mark that stack entry as being saved on the stack */
883 if (p
->r
& VT_LVAL
) {
884 /* also clear the bounded flag because the
885 relocation address of the function was stored in
887 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
889 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
897 #ifdef TCC_TARGET_ARM
898 /* find a register of class 'rc2' with at most one reference on stack.
899 * If none, call get_reg(rc) */
900 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
905 for(r
=0;r
<NB_REGS
;r
++) {
906 if (reg_classes
[r
] & rc2
) {
909 for(p
= vstack
; p
<= vtop
; p
++) {
910 if ((p
->r
& VT_VALMASK
) == r
||
911 (p
->r2
& VT_VALMASK
) == r
)
922 /* find a free register of class 'rc'. If none, save one register */
923 ST_FUNC
int get_reg(int rc
)
928 /* find a free register */
929 for(r
=0;r
<NB_REGS
;r
++) {
930 if (reg_classes
[r
] & rc
) {
933 for(p
=vstack
;p
<=vtop
;p
++) {
934 if ((p
->r
& VT_VALMASK
) == r
||
935 (p
->r2
& VT_VALMASK
) == r
)
943 /* no register left : free the first one on the stack (VERY
944 IMPORTANT to start from the bottom to ensure that we don't
945 spill registers used in gen_opi()) */
946 for(p
=vstack
;p
<=vtop
;p
++) {
947 /* look at second register (if long long) */
948 r
= p
->r2
& VT_VALMASK
;
949 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
951 r
= p
->r
& VT_VALMASK
;
952 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
958 /* Should never comes here */
962 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
964 static void move_reg(int r
, int s
, int t
)
978 /* get address of vtop (vtop MUST BE an lvalue) */
979 ST_FUNC
void gaddrof(void)
981 if (vtop
->r
& VT_REF
)
984 /* tricky: if saved lvalue, then we can go back to lvalue */
985 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
986 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
991 #ifdef CONFIG_TCC_BCHECK
992 /* generate lvalue bound code */
993 static void gbound(void)
998 vtop
->r
&= ~VT_MUSTBOUND
;
999 /* if lvalue, then use checking code before dereferencing */
1000 if (vtop
->r
& VT_LVAL
) {
1001 /* if not VT_BOUNDED value, then make one */
1002 if (!(vtop
->r
& VT_BOUNDED
)) {
1003 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1004 /* must save type because we must set it to int to get pointer */
1006 vtop
->type
.t
= VT_PTR
;
1009 gen_bounded_ptr_add();
1010 vtop
->r
|= lval_type
;
1013 /* then check for dereferencing */
1014 gen_bounded_ptr_deref();
1019 /* store vtop a register belonging to class 'rc'. lvalues are
1020 converted to values. Cannot be used if cannot be converted to
1021 register value (such as structures). */
1022 ST_FUNC
int gv(int rc
)
1024 int r
, bit_pos
, bit_size
, size
, align
, i
;
1027 /* NOTE: get_reg can modify vstack[] */
1028 if (vtop
->type
.t
& VT_BITFIELD
) {
1031 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1032 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1033 /* remove bit field info to avoid loops */
1034 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1035 /* cast to int to propagate signedness in following ops */
1036 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1041 if((vtop
->type
.t
& VT_UNSIGNED
) ||
1042 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1043 type
.t
|= VT_UNSIGNED
;
1045 /* generate shifts */
1046 vpushi(bits
- (bit_pos
+ bit_size
));
1048 vpushi(bits
- bit_size
);
1049 /* NOTE: transformed to SHR if unsigned */
1053 if (is_float(vtop
->type
.t
) &&
1054 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1057 unsigned long offset
;
1058 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1062 /* XXX: unify with initializers handling ? */
1063 /* CPUs usually cannot use float constants, so we store them
1064 generically in data segment */
1065 size
= type_size(&vtop
->type
, &align
);
1066 offset
= (data_section
->data_offset
+ align
- 1) & -align
;
1067 data_section
->data_offset
= offset
;
1068 /* XXX: not portable yet */
1069 #if defined(__i386__) || defined(__x86_64__)
1070 /* Zero pad x87 tenbyte long doubles */
1071 if (size
== LDOUBLE_SIZE
) {
1072 vtop
->c
.tab
[2] &= 0xffff;
1073 #if LDOUBLE_SIZE == 16
1078 ptr
= section_ptr_add(data_section
, size
);
1080 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1084 ptr
[i
] = vtop
->c
.tab
[size
-1-i
];
1088 ptr
[i
] = vtop
->c
.tab
[i
];
1089 sym
= get_sym_ref(&vtop
->type
, data_section
, offset
, size
<< 2);
1090 vtop
->r
|= VT_LVAL
| VT_SYM
;
1094 #ifdef CONFIG_TCC_BCHECK
1095 if (vtop
->r
& VT_MUSTBOUND
)
1099 r
= vtop
->r
& VT_VALMASK
;
1100 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1101 #ifndef TCC_TARGET_ARM64
1104 #ifdef TCC_TARGET_X86_64
1105 else if (rc
== RC_FRET
)
1110 /* need to reload if:
1112 - lvalue (need to dereference pointer)
1113 - already a register, but not in the right class */
1115 || (vtop
->r
& VT_LVAL
)
1116 || !(reg_classes
[r
] & rc
)
1117 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1118 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1119 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1121 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1126 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1127 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1128 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1130 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1131 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1132 unsigned long long ll
;
1134 int r2
, original_type
;
1135 original_type
= vtop
->type
.t
;
1136 /* two register type load : expand to two words
1138 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1139 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1142 vtop
->c
.i
= ll
; /* first word */
1144 vtop
->r
= r
; /* save register value */
1145 vpushi(ll
>> 32); /* second word */
1148 if (vtop
->r
& VT_LVAL
) {
1149 /* We do not want to modifier the long long
1150 pointer here, so the safest (and less
1151 efficient) is to save all the other registers
1152 in the stack. XXX: totally inefficient. */
1156 /* lvalue_save: save only if used further down the stack */
1157 save_reg_upstack(vtop
->r
, 1);
1159 /* load from memory */
1160 vtop
->type
.t
= load_type
;
1163 vtop
[-1].r
= r
; /* save register value */
1164 /* increment pointer to get second word */
1165 vtop
->type
.t
= addr_type
;
1170 vtop
->type
.t
= load_type
;
1172 /* move registers */
1175 vtop
[-1].r
= r
; /* save register value */
1176 vtop
->r
= vtop
[-1].r2
;
1178 /* Allocate second register. Here we rely on the fact that
1179 get_reg() tries first to free r2 of an SValue. */
1183 /* write second register */
1185 vtop
->type
.t
= original_type
;
1186 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1188 /* lvalue of scalar type : need to use lvalue type
1189 because of possible cast */
1192 /* compute memory access type */
1193 if (vtop
->r
& VT_REF
)
1194 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1199 else if (vtop
->r
& VT_LVAL_BYTE
)
1201 else if (vtop
->r
& VT_LVAL_SHORT
)
1203 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1207 /* restore wanted type */
1210 /* one register type load */
1215 #ifdef TCC_TARGET_C67
1216 /* uses register pairs for doubles */
1217 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1224 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1225 ST_FUNC
void gv2(int rc1
, int rc2
)
1229 /* generate more generic register first. But VT_JMP or VT_CMP
1230 values must be generated first in all cases to avoid possible
1232 v
= vtop
[0].r
& VT_VALMASK
;
1233 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1238 /* test if reload is needed for first register */
1239 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1249 /* test if reload is needed for first register */
1250 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1256 #ifndef TCC_TARGET_ARM64
1257 /* wrapper around RC_FRET to return a register by type */
1258 static int rc_fret(int t
)
1260 #ifdef TCC_TARGET_X86_64
1261 if (t
== VT_LDOUBLE
) {
1269 /* wrapper around REG_FRET to return a register by type */
1270 static int reg_fret(int t
)
1272 #ifdef TCC_TARGET_X86_64
1273 if (t
== VT_LDOUBLE
) {
1280 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1281 /* expand 64bit on stack in two ints */
1282 static void lexpand(void)
1285 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1286 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1287 if (v
== VT_CONST
) {
1290 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1296 vtop
[0].r
= vtop
[-1].r2
;
1297 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1299 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1303 #ifdef TCC_TARGET_ARM
1304 /* expand long long on stack */
1305 ST_FUNC
void lexpand_nr(void)
1309 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1311 vtop
->r2
= VT_CONST
;
1312 vtop
->type
.t
= VT_INT
| u
;
1313 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1314 if (v
== VT_CONST
) {
1315 vtop
[-1].c
.i
= vtop
->c
.i
;
1316 vtop
->c
.i
= vtop
->c
.i
>> 32;
1318 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1320 vtop
->r
= vtop
[-1].r
;
1321 } else if (v
> VT_CONST
) {
1325 vtop
->r
= vtop
[-1].r2
;
1326 vtop
[-1].r2
= VT_CONST
;
1327 vtop
[-1].type
.t
= VT_INT
| u
;
1331 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1332 /* build a long long from two ints */
1333 static void lbuild(int t
)
1335 gv2(RC_INT
, RC_INT
);
1336 vtop
[-1].r2
= vtop
[0].r
;
1337 vtop
[-1].type
.t
= t
;
1342 /* convert stack entry to register and duplicate its value in another
1344 static void gv_dup(void)
1350 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1351 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1358 /* stack: H L L1 H1 */
1368 /* duplicate value */
1373 #ifdef TCC_TARGET_X86_64
1374 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1384 load(r1
, &sv
); /* move r to r1 */
1386 /* duplicates value */
1392 /* Generate value test
1394 * Generate a test for any value (jump, comparison and integers) */
1395 ST_FUNC
int gvtst(int inv
, int t
)
1397 int v
= vtop
->r
& VT_VALMASK
;
1398 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1402 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1403 /* constant jmp optimization */
1404 if ((vtop
->c
.i
!= 0) != inv
)
1409 return gtst(inv
, t
);
1412 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1413 /* generate CPU independent (unsigned) long long operations */
1414 static void gen_opl(int op
)
1416 int t
, a
, b
, op1
, c
, i
;
1418 unsigned short reg_iret
= REG_IRET
;
1419 unsigned short reg_lret
= REG_LRET
;
1425 func
= TOK___divdi3
;
1428 func
= TOK___udivdi3
;
1431 func
= TOK___moddi3
;
1434 func
= TOK___umoddi3
;
1441 /* call generic long long function */
1442 vpush_global_sym(&func_old_type
, func
);
1447 vtop
->r2
= reg_lret
;
1455 //pv("gen_opl A",0,2);
1461 /* stack: L1 H1 L2 H2 */
1466 vtop
[-2] = vtop
[-3];
1469 /* stack: H1 H2 L1 L2 */
1470 //pv("gen_opl B",0,4);
1476 /* stack: H1 H2 L1 L2 ML MH */
1479 /* stack: ML MH H1 H2 L1 L2 */
1483 /* stack: ML MH H1 L2 H2 L1 */
1488 /* stack: ML MH M1 M2 */
1491 } else if (op
== '+' || op
== '-') {
1492 /* XXX: add non carry method too (for MIPS or alpha) */
1498 /* stack: H1 H2 (L1 op L2) */
1501 gen_op(op1
+ 1); /* TOK_xxxC2 */
1504 /* stack: H1 H2 (L1 op L2) */
1507 /* stack: (L1 op L2) H1 H2 */
1509 /* stack: (L1 op L2) (H1 op H2) */
1517 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1518 t
= vtop
[-1].type
.t
;
1522 /* stack: L H shift */
1524 /* constant: simpler */
1525 /* NOTE: all comments are for SHL. the other cases are
1526 done by swaping words */
1537 if (op
!= TOK_SAR
) {
1570 /* XXX: should provide a faster fallback on x86 ? */
1573 func
= TOK___ashrdi3
;
1576 func
= TOK___lshrdi3
;
1579 func
= TOK___ashldi3
;
1585 /* compare operations */
1591 /* stack: L1 H1 L2 H2 */
1593 vtop
[-1] = vtop
[-2];
1595 /* stack: L1 L2 H1 H2 */
1598 /* when values are equal, we need to compare low words. since
1599 the jump is inverted, we invert the test too. */
1602 else if (op1
== TOK_GT
)
1604 else if (op1
== TOK_ULT
)
1606 else if (op1
== TOK_UGT
)
1616 /* generate non equal test */
1622 /* compare low. Always unsigned */
1626 else if (op1
== TOK_LE
)
1628 else if (op1
== TOK_GT
)
1630 else if (op1
== TOK_GE
)
1641 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1643 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1644 return (a
^ b
) >> 63 ? -x
: x
;
1647 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1649 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1652 /* handle integer constant optimizations and various machine
1654 static void gen_opic(int op
)
1656 SValue
*v1
= vtop
- 1;
1658 int t1
= v1
->type
.t
& VT_BTYPE
;
1659 int t2
= v2
->type
.t
& VT_BTYPE
;
1660 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1661 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1662 uint64_t l1
= c1
? v1
->c
.i
: 0;
1663 uint64_t l2
= c2
? v2
->c
.i
: 0;
1664 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1666 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1667 l1
= ((uint32_t)l1
|
1668 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1669 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1670 l2
= ((uint32_t)l2
|
1671 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1675 case '+': l1
+= l2
; break;
1676 case '-': l1
-= l2
; break;
1677 case '&': l1
&= l2
; break;
1678 case '^': l1
^= l2
; break;
1679 case '|': l1
|= l2
; break;
1680 case '*': l1
*= l2
; break;
1687 /* if division by zero, generate explicit division */
1690 tcc_error("division by zero in constant");
1694 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1695 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1696 case TOK_UDIV
: l1
= l1
/ l2
; break;
1697 case TOK_UMOD
: l1
= l1
% l2
; break;
1700 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1701 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1703 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1706 case TOK_ULT
: l1
= l1
< l2
; break;
1707 case TOK_UGE
: l1
= l1
>= l2
; break;
1708 case TOK_EQ
: l1
= l1
== l2
; break;
1709 case TOK_NE
: l1
= l1
!= l2
; break;
1710 case TOK_ULE
: l1
= l1
<= l2
; break;
1711 case TOK_UGT
: l1
= l1
> l2
; break;
1712 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1713 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1714 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1715 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1717 case TOK_LAND
: l1
= l1
&& l2
; break;
1718 case TOK_LOR
: l1
= l1
|| l2
; break;
1722 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1723 l1
= ((uint32_t)l1
|
1724 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1728 /* if commutative ops, put c2 as constant */
1729 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1730 op
== '|' || op
== '*')) {
1732 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1733 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1735 if (!const_wanted
&&
1737 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1738 (l1
== -1 && op
== TOK_SAR
))) {
1739 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1741 } else if (!const_wanted
&&
1742 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1743 (l2
== -1 && op
== '|') ||
1744 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1745 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1746 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1751 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1754 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1755 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1759 /* filter out NOP operations like x*1, x-0, x&-1... */
1761 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1762 /* try to use shifts instead of muls or divs */
1763 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1772 else if (op
== TOK_PDIV
)
1778 } else if (c2
&& (op
== '+' || op
== '-') &&
1779 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1780 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1781 /* symbol + constant case */
1785 /* The backends can't always deal with addends to symbols
1786 larger than +-1<<31. Don't construct such. */
1793 /* call low level op generator */
1794 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1795 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1803 /* generate a floating point operation with constant propagation */
1804 static void gen_opif(int op
)
1812 /* currently, we cannot do computations with forward symbols */
1813 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1814 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1816 if (v1
->type
.t
== VT_FLOAT
) {
1819 } else if (v1
->type
.t
== VT_DOUBLE
) {
1827 /* NOTE: we only do constant propagation if finite number (not
1828 NaN or infinity) (ANSI spec) */
1829 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1833 case '+': f1
+= f2
; break;
1834 case '-': f1
-= f2
; break;
1835 case '*': f1
*= f2
; break;
1839 tcc_error("division by zero in constant");
1844 /* XXX: also handles tests ? */
1848 /* XXX: overflow test ? */
1849 if (v1
->type
.t
== VT_FLOAT
) {
1851 } else if (v1
->type
.t
== VT_DOUBLE
) {
1863 static int pointed_size(CType
*type
)
1866 return type_size(pointed_type(type
), &align
);
1869 static void vla_runtime_pointed_size(CType
*type
)
1872 vla_runtime_type_size(pointed_type(type
), &align
);
1875 static inline int is_null_pointer(SValue
*p
)
1877 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1879 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1880 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1881 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1882 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1885 static inline int is_integer_btype(int bt
)
1887 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1888 bt
== VT_INT
|| bt
== VT_LLONG
);
1891 /* check types for comparison or subtraction of pointers */
1892 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1894 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1897 /* null pointers are accepted for all comparisons as gcc */
1898 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1902 bt1
= type1
->t
& VT_BTYPE
;
1903 bt2
= type2
->t
& VT_BTYPE
;
1904 /* accept comparison between pointer and integer with a warning */
1905 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1906 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1907 tcc_warning("comparison between pointer and integer");
1911 /* both must be pointers or implicit function pointers */
1912 if (bt1
== VT_PTR
) {
1913 type1
= pointed_type(type1
);
1914 } else if (bt1
!= VT_FUNC
)
1915 goto invalid_operands
;
1917 if (bt2
== VT_PTR
) {
1918 type2
= pointed_type(type2
);
1919 } else if (bt2
!= VT_FUNC
) {
1921 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1923 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1924 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1928 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1929 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1930 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1931 /* gcc-like error if '-' is used */
1933 goto invalid_operands
;
1935 tcc_warning("comparison of distinct pointer types lacks a cast");
1939 /* generic gen_op: handles types problems */
1940 ST_FUNC
void gen_op(int op
)
1942 int u
, t1
, t2
, bt1
, bt2
, t
;
1946 t1
= vtop
[-1].type
.t
;
1947 t2
= vtop
[0].type
.t
;
1948 bt1
= t1
& VT_BTYPE
;
1949 bt2
= t2
& VT_BTYPE
;
1951 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1952 tcc_error("operation on a struct");
1953 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
1954 if (bt2
== VT_FUNC
) {
1955 mk_pointer(&vtop
->type
);
1958 if (bt1
== VT_FUNC
) {
1960 mk_pointer(&vtop
->type
);
1965 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1966 /* at least one operand is a pointer */
1967 /* relationnal op: must be both pointers */
1968 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1969 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1970 /* pointers are handled are unsigned */
1971 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1972 t
= VT_LLONG
| VT_UNSIGNED
;
1974 t
= VT_INT
| VT_UNSIGNED
;
1978 /* if both pointers, then it must be the '-' op */
1979 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
1981 tcc_error("cannot use pointers here");
1982 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1983 /* XXX: check that types are compatible */
1984 if (vtop
[-1].type
.t
& VT_VLA
) {
1985 vla_runtime_pointed_size(&vtop
[-1].type
);
1987 vpushi(pointed_size(&vtop
[-1].type
));
1991 /* set to integer type */
1992 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1993 vtop
->type
.t
= VT_LLONG
;
1995 vtop
->type
.t
= VT_INT
;
2000 /* exactly one pointer : must be '+' or '-'. */
2001 if (op
!= '-' && op
!= '+')
2002 tcc_error("cannot use pointers here");
2003 /* Put pointer as first operand */
2004 if (bt2
== VT_PTR
) {
2006 t
= t1
, t1
= t2
, t2
= t
;
2009 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2010 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2011 gen_cast(&int_type
);
2013 type1
= vtop
[-1].type
;
2014 type1
.t
&= ~VT_ARRAY
;
2015 if (vtop
[-1].type
.t
& VT_VLA
)
2016 vla_runtime_pointed_size(&vtop
[-1].type
);
2018 u
= pointed_size(&vtop
[-1].type
);
2020 tcc_error("unknown array element size");
2021 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2024 /* XXX: cast to int ? (long long case) */
2030 /* #ifdef CONFIG_TCC_BCHECK
2031 The main reason to removing this code:
2038 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2039 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2041 When this code is on. then the output looks like
2043 v+(i-j) = 0xbff84000
2045 /* if evaluating constant expression, no code should be
2046 generated, so no bound check */
2047 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2048 /* if bounded pointers, we generate a special code to
2055 gen_bounded_ptr_add();
2061 /* put again type if gen_opic() swaped operands */
2064 } else if (is_float(bt1
) || is_float(bt2
)) {
2065 /* compute bigger type and do implicit casts */
2066 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2068 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2073 /* floats can only be used for a few operations */
2074 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2075 (op
< TOK_ULT
|| op
> TOK_GT
))
2076 tcc_error("invalid operands for binary operation");
2078 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2079 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2080 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (t
| VT_UNSIGNED
))
2083 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2084 /* cast to biggest op */
2086 /* convert to unsigned if it does not fit in a long long */
2087 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2088 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
2092 /* integer operations */
2094 /* convert to unsigned if it does not fit in an integer */
2095 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
2096 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
2099 /* XXX: currently, some unsigned operations are explicit, so
2100 we modify them here */
2101 if (t
& VT_UNSIGNED
) {
2108 else if (op
== TOK_LT
)
2110 else if (op
== TOK_GT
)
2112 else if (op
== TOK_LE
)
2114 else if (op
== TOK_GE
)
2121 /* special case for shifts and long long: we keep the shift as
2123 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2130 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2131 /* relationnal op: the result is an int */
2132 vtop
->type
.t
= VT_INT
;
2137 // Make sure that we have converted to an rvalue:
2138 if (vtop
->r
& VT_LVAL
)
2139 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2142 #ifndef TCC_TARGET_ARM
2143 /* generic itof for unsigned long long case */
2144 static void gen_cvt_itof1(int t
)
2146 #ifdef TCC_TARGET_ARM64
2149 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2150 (VT_LLONG
| VT_UNSIGNED
)) {
2153 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2154 #if LDOUBLE_SIZE != 8
2155 else if (t
== VT_LDOUBLE
)
2156 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2159 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2163 vtop
->r
= reg_fret(t
);
2171 /* generic ftoi for unsigned long long case */
2172 static void gen_cvt_ftoi1(int t
)
2174 #ifdef TCC_TARGET_ARM64
2179 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2180 /* not handled natively */
2181 st
= vtop
->type
.t
& VT_BTYPE
;
2183 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2184 #if LDOUBLE_SIZE != 8
2185 else if (st
== VT_LDOUBLE
)
2186 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2189 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2194 vtop
->r2
= REG_LRET
;
2201 /* force char or short cast */
2202 static void force_charshort_cast(int t
)
2206 /* XXX: add optimization if lvalue : just change type and offset */
2211 if (t
& VT_UNSIGNED
) {
2212 vpushi((1 << bits
) - 1);
2215 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2221 /* result must be signed or the SAR is converted to an SHL
2222 This was not the case when "t" was a signed short
2223 and the last value on the stack was an unsigned int */
2224 vtop
->type
.t
&= ~VT_UNSIGNED
;
2230 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2231 static void gen_cast(CType
*type
)
2233 int sbt
, dbt
, sf
, df
, c
, p
;
2235 /* special delayed cast for char/short */
2236 /* XXX: in some cases (multiple cascaded casts), it may still
2238 if (vtop
->r
& VT_MUSTCAST
) {
2239 vtop
->r
&= ~VT_MUSTCAST
;
2240 force_charshort_cast(vtop
->type
.t
);
2243 /* bitfields first get cast to ints */
2244 if (vtop
->type
.t
& VT_BITFIELD
) {
2248 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2249 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2254 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2255 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2257 /* constant case: we can do it now */
2258 /* XXX: in ISOC, cannot do it if error in convert */
2259 if (sbt
== VT_FLOAT
)
2260 vtop
->c
.ld
= vtop
->c
.f
;
2261 else if (sbt
== VT_DOUBLE
)
2262 vtop
->c
.ld
= vtop
->c
.d
;
2265 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2266 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2267 vtop
->c
.ld
= vtop
->c
.i
;
2269 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2271 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2272 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2274 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2277 if (dbt
== VT_FLOAT
)
2278 vtop
->c
.f
= (float)vtop
->c
.ld
;
2279 else if (dbt
== VT_DOUBLE
)
2280 vtop
->c
.d
= (double)vtop
->c
.ld
;
2281 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2282 vtop
->c
.i
= vtop
->c
.ld
;
2283 } else if (sf
&& dbt
== VT_BOOL
) {
2284 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2287 vtop
->c
.i
= vtop
->c
.ld
;
2288 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2290 else if (sbt
& VT_UNSIGNED
)
2291 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2292 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2293 else if (sbt
== VT_PTR
)
2296 else if (sbt
!= VT_LLONG
)
2297 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2298 -(vtop
->c
.i
& 0x80000000));
2300 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2302 else if (dbt
== VT_BOOL
)
2303 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2304 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2305 else if (dbt
== VT_PTR
)
2308 else if (dbt
!= VT_LLONG
) {
2309 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2310 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2313 if (!(dbt
& VT_UNSIGNED
))
2314 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2317 } else if (p
&& dbt
== VT_BOOL
) {
2321 /* non constant case: generate code */
2323 /* convert from fp to fp */
2326 /* convert int to fp */
2329 /* convert fp to int */
2330 if (dbt
== VT_BOOL
) {
2334 if (sbt
== VT_FLOAT
) {
2335 /* cast to DOUBLE to avoid precision loss */
2336 gen_cvt_ftof(VT_DOUBLE
);
2337 vtop
->type
.t
= (vtop
->type
.t
& ~VT_BTYPE
) | VT_DOUBLE
;
2339 /* we handle char/short/etc... with generic code */
2340 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2341 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2345 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2346 /* additional cast for char/short... */
2351 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2352 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2353 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2354 /* scalar to long long */
2355 /* machine independent conversion */
2357 /* generate high word */
2358 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2362 if (sbt
== VT_PTR
) {
2363 /* cast from pointer to int before we apply
2364 shift operation, which pointers don't support*/
2365 gen_cast(&int_type
);
2371 /* patch second register */
2372 vtop
[-1].r2
= vtop
->r
;
2376 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2377 (dbt
& VT_BTYPE
) == VT_PTR
||
2378 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2379 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2380 (sbt
& VT_BTYPE
) != VT_PTR
&&
2381 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2382 /* need to convert from 32bit to 64bit */
2384 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2385 #if defined(TCC_TARGET_ARM64)
2387 #elif defined(TCC_TARGET_X86_64)
2389 /* x86_64 specific: movslq */
2391 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2398 } else if (dbt
== VT_BOOL
) {
2399 /* scalar to bool */
2402 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2403 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2404 if (sbt
== VT_PTR
) {
2405 vtop
->type
.t
= VT_INT
;
2406 tcc_warning("nonportable conversion from pointer to char/short");
2408 force_charshort_cast(dbt
);
2409 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2410 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2412 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2413 /* from long long: just take low order word */
2417 /* if lvalue and single word type, nothing to do because
2418 the lvalue already contains the real type size (see
2419 VT_LVAL_xxx constants) */
2423 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2424 /* if we are casting between pointer types,
2425 we must update the VT_LVAL_xxx size */
2426 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2427 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2432 /* return type size as known at compile time. Put alignment at 'a' */
2433 ST_FUNC
int type_size(CType
*type
, int *a
)
2438 bt
= type
->t
& VT_BTYPE
;
2439 if (bt
== VT_STRUCT
) {
2444 } else if (bt
== VT_PTR
) {
2445 if (type
->t
& VT_ARRAY
) {
2449 ts
= type_size(&s
->type
, a
);
2451 if (ts
< 0 && s
->c
< 0)
2459 } else if (bt
== VT_LDOUBLE
) {
2461 return LDOUBLE_SIZE
;
2462 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2463 #ifdef TCC_TARGET_I386
2464 #ifdef TCC_TARGET_PE
2469 #elif defined(TCC_TARGET_ARM)
2479 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2482 } else if (bt
== VT_SHORT
) {
2485 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2488 } else if (bt
== VT_ENUM
) {
2490 /* Enums might be incomplete, so don't just return '4' here. */
2491 return type
->ref
->c
;
2493 /* char, void, function, _Bool */
2499 /* push type size as known at runtime time on top of value stack. Put
2501 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2503 if (type
->t
& VT_VLA
) {
2504 type_size(&type
->ref
->type
, a
);
2505 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2507 vpushi(type_size(type
, a
));
2511 static void vla_sp_restore(void) {
2512 if (vlas_in_scope
) {
2513 gen_vla_sp_restore(vla_sp_loc
);
2517 static void vla_sp_restore_root(void) {
2518 if (vlas_in_scope
) {
2519 gen_vla_sp_restore(vla_sp_root_loc
);
2523 /* return the pointed type of t */
2524 static inline CType
*pointed_type(CType
*type
)
2526 return &type
->ref
->type
;
2529 /* modify type so that its it is a pointer to type. */
2530 ST_FUNC
void mk_pointer(CType
*type
)
2533 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2534 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2538 /* compare function types. OLD functions match any new functions */
2539 static int is_compatible_func(CType
*type1
, CType
*type2
)
2545 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2547 /* check func_call */
2548 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2550 /* XXX: not complete */
2551 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2555 while (s1
!= NULL
) {
2558 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2568 /* return true if type1 and type2 are the same. If unqualified is
2569 true, qualifiers on the types are ignored.
2571 - enums are not checked as gcc __builtin_types_compatible_p ()
2573 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2577 t1
= type1
->t
& VT_TYPE
;
2578 t2
= type2
->t
& VT_TYPE
;
2580 /* strip qualifiers before comparing */
2581 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2582 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2584 /* Default Vs explicit signedness only matters for char */
2585 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2589 /* An enum is compatible with (unsigned) int. Ideally we would
2590 store the enums signedness in type->ref.a.<some_bit> and
2591 only accept unsigned enums with unsigned int and vice versa.
2592 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2593 from pointer target types, so we can't add it here either. */
2594 if ((t1
& VT_BTYPE
) == VT_ENUM
) {
2596 if (type1
->ref
->a
.unsigned_enum
)
2599 if ((t2
& VT_BTYPE
) == VT_ENUM
) {
2601 if (type2
->ref
->a
.unsigned_enum
)
2604 /* XXX: bitfields ? */
2607 /* test more complicated cases */
2608 bt1
= t1
& VT_BTYPE
;
2609 if (bt1
== VT_PTR
) {
2610 type1
= pointed_type(type1
);
2611 type2
= pointed_type(type2
);
2612 return is_compatible_types(type1
, type2
);
2613 } else if (bt1
== VT_STRUCT
) {
2614 return (type1
->ref
== type2
->ref
);
2615 } else if (bt1
== VT_FUNC
) {
2616 return is_compatible_func(type1
, type2
);
2622 /* return true if type1 and type2 are exactly the same (including
2625 static int is_compatible_types(CType
*type1
, CType
*type2
)
2627 return compare_types(type1
,type2
,0);
2630 /* return true if type1 and type2 are the same (ignoring qualifiers).
2632 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2634 return compare_types(type1
,type2
,1);
2637 /* print a type. If 'varstr' is not NULL, then the variable is also
2638 printed in the type */
2640 /* XXX: add array and function pointers */
2641 static void type_to_str(char *buf
, int buf_size
,
2642 CType
*type
, const char *varstr
)
2649 t
= type
->t
& VT_TYPE
;
2652 if (t
& VT_CONSTANT
)
2653 pstrcat(buf
, buf_size
, "const ");
2654 if (t
& VT_VOLATILE
)
2655 pstrcat(buf
, buf_size
, "volatile ");
2656 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2657 pstrcat(buf
, buf_size
, "unsigned ");
2658 else if (t
& VT_DEFSIGN
)
2659 pstrcat(buf
, buf_size
, "signed ");
2689 tstr
= "long double";
2691 pstrcat(buf
, buf_size
, tstr
);
2695 if (bt
== VT_STRUCT
)
2699 pstrcat(buf
, buf_size
, tstr
);
2700 v
= type
->ref
->v
& ~SYM_STRUCT
;
2701 if (v
>= SYM_FIRST_ANOM
)
2702 pstrcat(buf
, buf_size
, "<anonymous>");
2704 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2708 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2709 pstrcat(buf
, buf_size
, "(");
2711 while (sa
!= NULL
) {
2712 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2713 pstrcat(buf
, buf_size
, buf1
);
2716 pstrcat(buf
, buf_size
, ", ");
2718 pstrcat(buf
, buf_size
, ")");
2723 snprintf(buf1
, sizeof(buf1
), "%s[%ld]", varstr
? varstr
: "", s
->c
);
2724 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2727 pstrcpy(buf1
, sizeof(buf1
), "*");
2728 if (t
& VT_CONSTANT
)
2729 pstrcat(buf1
, buf_size
, "const ");
2730 if (t
& VT_VOLATILE
)
2731 pstrcat(buf1
, buf_size
, "volatile ");
2733 pstrcat(buf1
, sizeof(buf1
), varstr
);
2734 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2738 pstrcat(buf
, buf_size
, " ");
2739 pstrcat(buf
, buf_size
, varstr
);
2744 /* verify type compatibility to store vtop in 'dt' type, and generate
2746 static void gen_assign_cast(CType
*dt
)
2748 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2749 char buf1
[256], buf2
[256];
2752 st
= &vtop
->type
; /* source type */
2753 dbt
= dt
->t
& VT_BTYPE
;
2754 sbt
= st
->t
& VT_BTYPE
;
2755 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2756 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2758 It is Ok if both are void
2764 gcc accepts this program
2767 tcc_error("cannot cast from/to void");
2769 if (dt
->t
& VT_CONSTANT
)
2770 tcc_warning("assignment of read-only location");
2773 /* special cases for pointers */
2774 /* '0' can also be a pointer */
2775 if (is_null_pointer(vtop
))
2777 /* accept implicit pointer to integer cast with warning */
2778 if (is_integer_btype(sbt
)) {
2779 tcc_warning("assignment makes pointer from integer without a cast");
2782 type1
= pointed_type(dt
);
2783 /* a function is implicitely a function pointer */
2784 if (sbt
== VT_FUNC
) {
2785 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2786 !is_compatible_types(pointed_type(dt
), st
))
2787 tcc_warning("assignment from incompatible pointer type");
2792 type2
= pointed_type(st
);
2793 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2794 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2795 /* void * can match anything */
2797 /* exact type match, except for qualifiers */
2800 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2801 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2802 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2803 /* Like GCC don't warn by default for merely changes
2804 in pointer target signedness. Do warn for different
2805 base types, though, in particular for unsigned enums
2806 and signed int targets. */
2807 if ((tmp_type1
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) !=
2808 (tmp_type2
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) &&
2809 (tmp_type1
.t
& VT_BTYPE
) == (tmp_type2
.t
& VT_BTYPE
))
2812 tcc_warning("assignment from incompatible pointer type");
2815 /* check const and volatile */
2816 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2817 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2818 tcc_warning("assignment discards qualifiers from pointer target type");
2824 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2825 tcc_warning("assignment makes integer from pointer without a cast");
2826 } else if (sbt
== VT_STRUCT
) {
2827 goto case_VT_STRUCT
;
2829 /* XXX: more tests */
2835 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2836 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2837 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2839 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2840 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2841 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2849 /* store vtop in lvalue pushed on stack */
2850 ST_FUNC
void vstore(void)
2852 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2854 ft
= vtop
[-1].type
.t
;
2855 sbt
= vtop
->type
.t
& VT_BTYPE
;
2856 dbt
= ft
& VT_BTYPE
;
2857 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2858 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2859 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2860 /* optimize char/short casts */
2861 delayed_cast
= VT_MUSTCAST
;
2862 vtop
->type
.t
= (ft
& VT_TYPE
& ~VT_BITFIELD
&
2863 ((1 << VT_STRUCT_SHIFT
) - 1));
2864 /* XXX: factorize */
2865 if (ft
& VT_CONSTANT
)
2866 tcc_warning("assignment of read-only location");
2869 if (!(ft
& VT_BITFIELD
))
2870 gen_assign_cast(&vtop
[-1].type
);
2873 if (sbt
== VT_STRUCT
) {
2874 /* if structure, only generate pointer */
2875 /* structure assignment : generate memcpy */
2876 /* XXX: optimize if small size */
2877 size
= type_size(&vtop
->type
, &align
);
2881 vtop
->type
.t
= VT_PTR
;
2884 /* address of memcpy() */
2887 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2888 else if(!(align
& 3))
2889 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2892 /* Use memmove, rather than memcpy, as dest and src may be same: */
2893 vpush_global_sym(&func_old_type
, TOK_memmove
);
2898 vtop
->type
.t
= VT_PTR
;
2904 /* leave source on stack */
2905 } else if (ft
& VT_BITFIELD
) {
2906 /* bitfield store handling */
2908 /* save lvalue as expression result (example: s.b = s.a = n;) */
2909 vdup(), vtop
[-1] = vtop
[-2];
2911 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2912 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2913 /* remove bit field info to avoid loops */
2914 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2916 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2917 gen_cast(&vtop
[-1].type
);
2918 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2921 /* duplicate destination */
2923 vtop
[-1] = vtop
[-2];
2925 /* mask and shift source */
2926 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2927 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2928 vpushll((1ULL << bit_size
) - 1ULL);
2930 vpushi((1 << bit_size
) - 1);
2936 /* load destination, mask and or with source */
2938 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2939 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2941 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2947 /* ... and discard */
2951 #ifdef CONFIG_TCC_BCHECK
2952 /* bound check case */
2953 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2962 #ifdef TCC_TARGET_X86_64
2963 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2965 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
2970 r
= gv(rc
); /* generate value */
2971 /* if lvalue was saved on stack, must read it */
2972 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
2974 t
= get_reg(RC_INT
);
2975 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2980 sv
.r
= VT_LOCAL
| VT_LVAL
;
2981 sv
.c
.i
= vtop
[-1].c
.i
;
2983 vtop
[-1].r
= t
| VT_LVAL
;
2985 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2986 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2987 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
2988 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
2990 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
2991 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
2993 vtop
[-1].type
.t
= load_type
;
2996 /* convert to int to increment easily */
2997 vtop
->type
.t
= addr_type
;
3003 vtop
[-1].type
.t
= load_type
;
3004 /* XXX: it works because r2 is spilled last ! */
3005 store(vtop
->r2
, vtop
- 1);
3011 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3012 vtop
->r
|= delayed_cast
;
3016 /* post defines POST/PRE add. c is the token ++ or -- */
3017 ST_FUNC
void inc(int post
, int c
)
3020 vdup(); /* save lvalue */
3022 gv_dup(); /* duplicate value */
3027 vpushi(c
- TOK_MID
);
3029 vstore(); /* store value */
3031 vpop(); /* if post op, return saved value */
3034 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3036 /* read the string */
3040 while (tok
== TOK_STR
) {
3041 /* XXX: add \0 handling too ? */
3042 cstr_cat(astr
, tokc
.str
.data
, -1);
3045 cstr_ccat(astr
, '\0');
3048 /* If I is >= 1 and a power of two, returns log2(i)+1.
3049 If I is 0 returns 0. */
3050 static int exact_log2p1(int i
)
3055 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3066 /* Parse GNUC __attribute__ extension. Currently, the following
3067 extensions are recognized:
3068 - aligned(n) : set data/function alignment.
3069 - packed : force data alignment to 1
3070 - section(x) : generate data/code in this section.
3071 - unused : currently ignored, but may be used someday.
3072 - regparm(n) : pass function parameters in registers (i386 only)
3074 static void parse_attribute(AttributeDef
*ad
)
3079 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
3083 while (tok
!= ')') {
3084 if (tok
< TOK_IDENT
)
3085 expect("attribute name");
3092 parse_mult_str(&astr
, "section name");
3093 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3100 parse_mult_str(&astr
, "alias(\"target\")");
3101 ad
->alias_target
= /* save string as token, for later */
3102 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3106 case TOK_VISIBILITY1
:
3107 case TOK_VISIBILITY2
:
3109 parse_mult_str(&astr
,
3110 "visibility(\"default|hidden|internal|protected\")");
3111 if (!strcmp (astr
.data
, "default"))
3112 ad
->a
.visibility
= STV_DEFAULT
;
3113 else if (!strcmp (astr
.data
, "hidden"))
3114 ad
->a
.visibility
= STV_HIDDEN
;
3115 else if (!strcmp (astr
.data
, "internal"))
3116 ad
->a
.visibility
= STV_INTERNAL
;
3117 else if (!strcmp (astr
.data
, "protected"))
3118 ad
->a
.visibility
= STV_PROTECTED
;
3120 expect("visibility(\"default|hidden|internal|protected\")");
3129 if (n
<= 0 || (n
& (n
- 1)) != 0)
3130 tcc_error("alignment must be a positive power of two");
3135 ad
->a
.aligned
= exact_log2p1(n
);
3136 if (n
!= 1 << (ad
->a
.aligned
- 1))
3137 tcc_error("alignment of %d is larger than implemented", n
);
3149 /* currently, no need to handle it because tcc does not
3150 track unused objects */
3154 /* currently, no need to handle it because tcc does not
3155 track unused objects */
3160 ad
->a
.func_call
= FUNC_CDECL
;
3165 ad
->a
.func_call
= FUNC_STDCALL
;
3167 #ifdef TCC_TARGET_I386
3177 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3183 ad
->a
.func_call
= FUNC_FASTCALLW
;
3190 ad
->a
.mode
= VT_LLONG
+ 1;
3193 ad
->a
.mode
= VT_BYTE
+ 1;
3196 ad
->a
.mode
= VT_SHORT
+ 1;
3200 ad
->a
.mode
= VT_INT
+ 1;
3203 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3210 ad
->a
.func_export
= 1;
3213 ad
->a
.func_import
= 1;
3216 if (tcc_state
->warn_unsupported
)
3217 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3218 /* skip parameters */
3220 int parenthesis
= 0;
3224 else if (tok
== ')')
3227 } while (parenthesis
&& tok
!= -1);
3240 static Sym
* find_field (CType
*type
, int v
)
3244 while ((s
= s
->next
) != NULL
) {
3245 if ((s
->v
& SYM_FIELD
) &&
3246 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3247 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3248 Sym
*ret
= find_field (&s
->type
, v
);
3258 static void struct_add_offset (Sym
*s
, int offset
)
3260 while ((s
= s
->next
) != NULL
) {
3261 if ((s
->v
& SYM_FIELD
) &&
3262 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3263 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3264 struct_add_offset(s
->type
.ref
, offset
);
3270 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3272 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3273 int pcc
= !tcc_state
->ms_bitfields
;
3276 maxalign
= 1 << (ad
->a
.aligned
- 1);
3282 prevbt
= VT_STRUCT
; /* make it never match */
3284 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3285 int typealign
, bit_size
;
3286 int size
= type_size(&f
->type
, &typealign
);
3287 if (f
->type
.t
& VT_BITFIELD
)
3288 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3291 if (bit_size
== 0 && pcc
) {
3292 /* Zero-width bit-fields in PCC mode aren't affected
3293 by any packing (attribute or pragma). */
3295 } else if (f
->r
> 1) {
3297 } else if (ad
->a
.packed
|| f
->r
== 1) {
3299 /* Packed fields or packed records don't let the base type
3300 influence the records type alignment. */
3305 if (type
->ref
->type
.t
!= TOK_STRUCT
) {
3306 if (pcc
&& bit_size
>= 0)
3307 size
= (bit_size
+ 7) >> 3;
3308 /* Bit position is already zero from our caller. */
3312 } else if (bit_size
< 0) {
3313 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3316 c
= (c
+ addbytes
+ align
- 1) & -align
;
3322 /* A bit-field. Layout is more complicated. There are two
3323 options TCC implements: PCC compatible and MS compatible
3324 (PCC compatible is what GCC uses for almost all targets).
3325 In PCC layout the overall size of the struct (in c) is
3326 _excluding_ the current run of bit-fields (that is,
3327 there's at least additional bit_pos bits after c). In
3328 MS layout c does include the current run of bit-fields.
3330 This matters for calculating the natural alignment buckets
3333 /* 'align' will be used to influence records alignment,
3334 so it's the max of specified and type alignment, except
3335 in certain cases that depend on the mode. */
3336 if (align
< typealign
)
3339 /* In PCC layout a non-packed bit-field is placed adjacent
3340 to the preceding bit-fields, except if it would overflow
3341 its container (depending on base type) or it's a zero-width
3342 bit-field. Packed non-zero-width bit-fields always are
3344 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3345 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3346 if (bit_size
== 0 ||
3348 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3349 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3353 /* In PCC layout named bit-fields influence the alignment
3354 of the containing struct using the base types alignment,
3355 except for packed fields (which here have correct
3356 align/typealign). */
3357 if ((f
->v
& SYM_FIRST_ANOM
))
3360 bt
= f
->type
.t
& VT_BTYPE
;
3361 if ((bit_pos
+ bit_size
> size
* 8) ||
3362 (bit_size
> 0) == (bt
!= prevbt
)) {
3363 c
= (c
+ typealign
- 1) & -typealign
;
3366 /* In MS bitfield mode a bit-field run always uses
3367 at least as many bits as the underlying type.
3368 To start a new run it's also required that this
3369 or the last bit-field had non-zero width. */
3370 if (bit_size
|| prev_bit_size
)
3373 /* In MS layout the records alignment is normally
3374 influenced by the field, except for a zero-width
3375 field at the start of a run (but by further zero-width
3376 fields it is again). */
3377 if (bit_size
== 0 && prevbt
!= bt
)
3380 prev_bit_size
= bit_size
;
3382 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3383 | (bit_pos
<< VT_STRUCT_SHIFT
);
3384 bit_pos
+= bit_size
;
3385 if (pcc
&& bit_pos
>= size
* 8) {
3387 bit_pos
-= size
* 8;
3390 if (align
> maxalign
)
3393 printf("set field %s offset=%d c=%d",
3394 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, c
);
3395 if (f
->type
.t
& VT_BITFIELD
) {
3396 printf(" pos=%d size=%d",
3397 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3398 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3403 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3405 /* An anonymous struct/union. Adjust member offsets
3406 to reflect the real offset of our containing struct.
3407 Also set the offset of this anon member inside
3408 the outer struct to be zero. Via this it
3409 works when accessing the field offset directly
3410 (from base object), as well as when recursing
3411 members in initializer handling. */
3412 int v2
= f
->type
.ref
->v
;
3413 if (!(v2
& SYM_FIELD
) &&
3414 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3416 /* This happens only with MS extensions. The
3417 anon member has a named struct type, so it
3418 potentially is shared with other references.
3419 We need to unshare members so we can modify
3422 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3423 &f
->type
.ref
->type
, 0,
3425 pps
= &f
->type
.ref
->next
;
3426 while ((ass
= ass
->next
) != NULL
) {
3427 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3428 pps
= &((*pps
)->next
);
3432 struct_add_offset(f
->type
.ref
, offset
);
3440 /* store size and alignment */
3441 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3442 + maxalign
- 1) & -maxalign
;
3443 type
->ref
->r
= maxalign
;
3446 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3447 static void struct_decl(CType
*type
, AttributeDef
*ad
, int u
)
3449 int a
, v
, size
, align
, flexible
, alignoverride
;
3451 int bit_size
, bsize
, bt
;
3456 a
= tok
; /* save decl type */
3458 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3459 parse_attribute(ad
);
3463 /* struct already defined ? return it */
3465 expect("struct/union/enum name");
3467 if (s
&& (s
->scope
== local_scope
|| (tok
!= '{' && tok
!= ';'))) {
3469 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3475 /* Record the original enum/struct/union token. */
3478 /* we put an undefined size for struct/union */
3479 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3480 s
->r
= 0; /* default alignment is zero as gcc */
3481 /* put struct/union/enum name in type */
3489 tcc_error("struct/union/enum already defined");
3490 /* cannot be empty */
3492 /* non empty enums are not allowed */
3493 if (a
== TOK_ENUM
) {
3497 CType
*t
= &int_type
;
3500 expect("identifier");
3502 if (ss
&& !local_stack
)
3503 tcc_error("redefinition of enumerator '%s'",
3504 get_tok_str(v
, NULL
));
3508 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3511 /* We really want to support long long enums
3512 on i386 as well, but the Sym structure only
3513 holds a 'long' for associated constants,
3514 and enlarging it would bump its size (no
3515 available padding). So punt for now. */
3521 if (c
!= (int)c
&& (unsigned long)c
!= (unsigned int)c
)
3522 seen_wide
= 1, t
= &size_type
;
3523 /* enum symbols have static storage */
3524 ss
= sym_push(v
, t
, VT_CONST
, c
);
3525 ss
->type
.t
|= VT_STATIC
;
3530 /* NOTE: we accept a trailing comma */
3535 s
->a
.unsigned_enum
= 1;
3536 s
->c
= type_size(seen_wide
? &size_type
: &int_type
, &align
);
3541 while (tok
!= '}') {
3542 if (!parse_btype(&btype
, &ad1
)) {
3548 tcc_error("flexible array member '%s' not at the end of struct",
3549 get_tok_str(v
, NULL
));
3554 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3556 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3557 expect("identifier");
3559 int v
= btype
.ref
->v
;
3560 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3561 if (tcc_state
->ms_extensions
== 0)
3562 expect("identifier");
3566 if (type_size(&type1
, &align
) < 0) {
3567 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3570 tcc_error("field '%s' has incomplete type",
3571 get_tok_str(v
, NULL
));
3573 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3574 (type1
.t
& (VT_TYPEDEF
| VT_STATIC
| VT_EXTERN
| VT_INLINE
)))
3575 tcc_error("invalid type for '%s'",
3576 get_tok_str(v
, NULL
));
3580 bit_size
= expr_const();
3581 /* XXX: handle v = 0 case for messages */
3583 tcc_error("negative width in bit-field '%s'",
3584 get_tok_str(v
, NULL
));
3585 if (v
&& bit_size
== 0)
3586 tcc_error("zero width for bit-field '%s'",
3587 get_tok_str(v
, NULL
));
3588 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3589 parse_attribute(&ad1
);
3591 size
= type_size(&type1
, &align
);
3592 /* Only remember non-default alignment. */
3594 if (ad1
.a
.aligned
) {
3595 int speca
= 1 << (ad1
.a
.aligned
- 1);
3596 alignoverride
= speca
;
3597 } else if (ad1
.a
.packed
|| ad
->a
.packed
) {
3599 } else if (*tcc_state
->pack_stack_ptr
) {
3600 if (align
> *tcc_state
->pack_stack_ptr
)
3601 alignoverride
= *tcc_state
->pack_stack_ptr
;
3603 if (bit_size
>= 0) {
3604 bt
= type1
.t
& VT_BTYPE
;
3611 tcc_error("bitfields must have scalar type");
3613 if (bit_size
> bsize
) {
3614 tcc_error("width of '%s' exceeds its type",
3615 get_tok_str(v
, NULL
));
3616 } else if (bit_size
== bsize
) {
3617 /* no need for bit fields */
3620 type1
.t
|= VT_BITFIELD
|
3621 (0 << VT_STRUCT_SHIFT
) |
3622 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3625 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3626 /* Remember we've seen a real field to check
3627 for placement of flexible array member. */
3630 /* If member is a struct or bit-field, enforce
3631 placing into the struct (as anonymous). */
3633 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3638 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3642 if (tok
== ';' || tok
== TOK_EOF
)
3649 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3650 parse_attribute(ad
);
3651 struct_layout(type
, ad
);
3656 /* return 1 if basic type is a type size (short, long, long long) */
3657 ST_FUNC
int is_btype_size(int bt
)
3659 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3662 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3663 are added to the element type, copied because it could be a typedef. */
3664 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3666 while (type
->t
& VT_ARRAY
) {
3667 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3668 type
= &type
->ref
->type
;
3670 type
->t
|= qualifiers
;
3673 /* return 0 if no type declaration. otherwise, return the basic type
3676 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3678 int t
, u
, bt_size
, complete
, type_found
, typespec_found
;
3682 memset(ad
, 0, sizeof(AttributeDef
));
3690 /* currently, we really ignore extension */
3701 tcc_error("too many basic types");
3703 bt_size
= is_btype_size (u
& VT_BTYPE
);
3704 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3719 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3720 #ifndef TCC_TARGET_PE
3721 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3723 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3724 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3730 #ifdef TCC_TARGET_ARM64
3732 /* GCC's __uint128_t appears in some Linux header files. Make it a
3733 synonym for long double to get the size and alignment right. */
3745 if ((t
& VT_BTYPE
) == VT_LONG
) {
3746 #ifdef TCC_TARGET_PE
3747 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3749 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3757 struct_decl(&type1
, ad
, VT_ENUM
);
3760 type
->ref
= type1
.ref
;
3764 struct_decl(&type1
, ad
, VT_STRUCT
);
3767 /* type modifiers */
3772 parse_btype_qualify(type
, VT_CONSTANT
);
3780 parse_btype_qualify(type
, VT_VOLATILE
);
3787 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3788 tcc_error("signed and unsigned modifier");
3801 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3802 tcc_error("signed and unsigned modifier");
3803 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3828 /* GNUC attribute */
3829 case TOK_ATTRIBUTE1
:
3830 case TOK_ATTRIBUTE2
:
3831 parse_attribute(ad
);
3834 t
= (t
& ~VT_BTYPE
) | u
;
3842 parse_expr_type(&type1
);
3843 /* remove all storage modifiers except typedef */
3844 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3850 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3853 type
->t
= ((s
->type
.t
& ~VT_TYPEDEF
) |
3854 (t
& ~(VT_CONSTANT
| VT_VOLATILE
)));
3855 type
->ref
= s
->type
.ref
;
3856 if (t
& (VT_CONSTANT
| VT_VOLATILE
))
3857 parse_btype_qualify(type
, t
& (VT_CONSTANT
| VT_VOLATILE
));
3861 /* get attributes from typedef */
3862 if (0 == ad
->a
.aligned
)
3863 ad
->a
.aligned
= s
->a
.aligned
;
3864 if (0 == ad
->a
.func_call
)
3865 ad
->a
.func_call
= s
->a
.func_call
;
3866 ad
->a
.packed
|= s
->a
.packed
;
3875 if (tcc_state
->char_is_unsigned
) {
3876 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3880 /* long is never used as type */
3881 if ((t
& VT_BTYPE
) == VT_LONG
)
3882 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3883 defined TCC_TARGET_PE
3884 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3886 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3892 /* convert a function parameter type (array to pointer and function to
3893 function pointer) */
3894 static inline void convert_parameter_type(CType
*pt
)
3896 /* remove const and volatile qualifiers (XXX: const could be used
3897 to indicate a const function parameter */
3898 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3899 /* array must be transformed to pointer according to ANSI C */
3901 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3906 ST_FUNC
void parse_asm_str(CString
*astr
)
3909 parse_mult_str(astr
, "string constant");
3912 /* Parse an asm label and return the token */
3913 static int asm_label_instr(void)
3919 parse_asm_str(&astr
);
3922 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3924 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3929 static void post_type(CType
*type
, AttributeDef
*ad
, int storage
)
3931 int n
, l
, t1
, arg_size
, align
;
3932 Sym
**plast
, *s
, *first
;
3937 /* function declaration */
3945 /* read param name and compute offset */
3946 if (l
!= FUNC_OLD
) {
3947 if (!parse_btype(&pt
, &ad1
)) {
3949 tcc_error("invalid type");
3956 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3958 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3959 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
3960 tcc_error("parameter declared as void");
3961 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
3966 expect("identifier");
3970 convert_parameter_type(&pt
);
3971 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
3977 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
3984 /* if no parameters, then old type prototype */
3988 /* NOTE: const is ignored in returned type as it has a special
3989 meaning in gcc / C++ */
3990 type
->t
&= ~VT_CONSTANT
;
3991 /* some ancient pre-K&R C allows a function to return an array
3992 and the array brackets to be put after the arguments, such
3993 that "int c()[]" means something like "int[] c()" */
3996 skip(']'); /* only handle simple "[]" */
3999 /* we push a anonymous symbol which will contain the function prototype */
4000 ad
->a
.func_args
= arg_size
;
4001 s
= sym_push(SYM_FIELD
, type
, 0, l
);
4006 } else if (tok
== '[') {
4007 int saved_nocode_wanted
= nocode_wanted
;
4008 /* array definition */
4010 if (tok
== TOK_RESTRICT1
)
4015 if (!local_stack
|| (storage
& VT_STATIC
))
4016 vpushi(expr_const());
4018 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4019 length must always be evaluated, even under nocode_wanted,
4020 so that its size slot is initialized (e.g. under sizeof
4025 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4028 tcc_error("invalid array size");
4030 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4031 tcc_error("size of variable length array should be an integer");
4036 /* parse next post type */
4037 post_type(type
, ad
, storage
);
4038 if (type
->t
== VT_FUNC
)
4039 tcc_error("declaration of an array of functions");
4040 t1
|= type
->t
& VT_VLA
;
4043 loc
-= type_size(&int_type
, &align
);
4047 vla_runtime_type_size(type
, &align
);
4049 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4055 nocode_wanted
= saved_nocode_wanted
;
4057 /* we push an anonymous symbol which will contain the array
4059 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4060 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4065 /* Parse a type declaration (except basic type), and return the type
4066 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4067 expected. 'type' should contain the basic type. 'ad' is the
4068 attribute definition of the basic type. It can be modified by
4071 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4074 CType type1
, *type2
;
4075 int qualifiers
, storage
;
4077 while (tok
== '*') {
4085 qualifiers
|= VT_CONSTANT
;
4090 qualifiers
|= VT_VOLATILE
;
4096 /* XXX: clarify attribute handling */
4097 case TOK_ATTRIBUTE1
:
4098 case TOK_ATTRIBUTE2
:
4099 parse_attribute(ad
);
4103 type
->t
|= qualifiers
;
4106 /* recursive type */
4107 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4108 type1
.t
= 0; /* XXX: same as int */
4111 /* XXX: this is not correct to modify 'ad' at this point, but
4112 the syntax is not clear */
4113 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4114 parse_attribute(ad
);
4115 type_decl(&type1
, ad
, v
, td
);
4118 /* type identifier */
4119 if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4123 if (!(td
& TYPE_ABSTRACT
))
4124 expect("identifier");
4128 storage
= type
->t
& VT_STORAGE
;
4129 type
->t
&= ~VT_STORAGE
;
4130 post_type(type
, ad
, storage
);
4132 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4133 parse_attribute(ad
);
4137 /* append type at the end of type1 */
4150 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4151 ST_FUNC
int lvalue_type(int t
)
4156 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4158 else if (bt
== VT_SHORT
)
4162 if (t
& VT_UNSIGNED
)
4163 r
|= VT_LVAL_UNSIGNED
;
4167 /* indirection with full error checking and bound check */
4168 ST_FUNC
void indir(void)
4170 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4171 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4175 if (vtop
->r
& VT_LVAL
)
4177 vtop
->type
= *pointed_type(&vtop
->type
);
4178 /* Arrays and functions are never lvalues */
4179 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4180 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4181 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4182 /* if bound checking, the referenced pointer must be checked */
4183 #ifdef CONFIG_TCC_BCHECK
4184 if (tcc_state
->do_bounds_check
)
4185 vtop
->r
|= VT_MUSTBOUND
;
4190 /* pass a parameter to a function and do type checking and casting */
4191 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4196 func_type
= func
->c
;
4197 if (func_type
== FUNC_OLD
||
4198 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4199 /* default casting : only need to convert float to double */
4200 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4203 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4204 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4205 type
.ref
= vtop
->type
.ref
;
4208 } else if (arg
== NULL
) {
4209 tcc_error("too many arguments to function");
4212 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4213 gen_assign_cast(&type
);
4217 /* parse an expression of the form '(type)' or '(expr)' and return its
4219 static void parse_expr_type(CType
*type
)
4225 if (parse_btype(type
, &ad
)) {
4226 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4233 static void parse_type(CType
*type
)
4238 if (!parse_btype(type
, &ad
)) {
4241 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4244 static void vpush_tokc(int t
)
4249 vsetc(&type
, VT_CONST
, &tokc
);
4252 ST_FUNC
void unary(void)
4254 int n
, t
, align
, size
, r
, sizeof_caller
;
4259 sizeof_caller
= in_sizeof
;
4261 /* XXX: GCC 2.95.3 does not generate a table although it should be
4275 vpush_tokc(VT_INT
| VT_UNSIGNED
);
4279 vpush_tokc(VT_LLONG
);
4283 vpush_tokc(VT_LLONG
| VT_UNSIGNED
);
4287 vpush_tokc(VT_FLOAT
);
4291 vpush_tokc(VT_DOUBLE
);
4295 vpush_tokc(VT_LDOUBLE
);
4298 case TOK___FUNCTION__
:
4300 goto tok_identifier
;
4306 /* special function name identifier */
4307 len
= strlen(funcname
) + 1;
4308 /* generate char[len] type */
4313 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4314 ptr
= section_ptr_add(data_section
, len
);
4315 memcpy(ptr
, funcname
, len
);
4320 #ifdef TCC_TARGET_PE
4321 t
= VT_SHORT
| VT_UNSIGNED
;
4327 /* string parsing */
4330 if (tcc_state
->warn_write_strings
)
4335 memset(&ad
, 0, sizeof(AttributeDef
));
4336 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4341 if (parse_btype(&type
, &ad
)) {
4342 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4344 /* check ISOC99 compound literal */
4346 /* data is allocated locally by default */
4351 /* all except arrays are lvalues */
4352 if (!(type
.t
& VT_ARRAY
))
4353 r
|= lvalue_type(type
.t
);
4354 memset(&ad
, 0, sizeof(AttributeDef
));
4355 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4357 if (sizeof_caller
) {
4364 } else if (tok
== '{') {
4365 int saved_nocode_wanted
= nocode_wanted
;
4367 tcc_error("expected constant");
4368 /* save all registers */
4370 /* statement expression : we do not accept break/continue
4371 inside as GCC does. We do retain the nocode_wanted state,
4372 as statement expressions can't ever be entered from the
4373 outside, so any reactivation of code emission (from labels
4374 or loop heads) can be disabled again after the end of it. */
4375 block(NULL
, NULL
, 1);
4376 nocode_wanted
= saved_nocode_wanted
;
4391 /* functions names must be treated as function pointers,
4392 except for unary '&' and sizeof. Since we consider that
4393 functions are not lvalues, we only have to handle it
4394 there and in function calls. */
4395 /* arrays can also be used although they are not lvalues */
4396 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4397 !(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_LLOCAL
))
4399 mk_pointer(&vtop
->type
);
4405 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4407 boolean
.t
= VT_BOOL
;
4409 vtop
->c
.i
= !vtop
->c
.i
;
4410 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4414 vseti(VT_JMP
, gvtst(1, 0));
4426 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4427 tcc_error("pointer not accepted for unary plus");
4428 /* In order to force cast, we add zero, except for floating point
4429 where we really need an noop (otherwise -0.0 will be transformed
4431 if (!is_float(vtop
->type
.t
)) {
4442 unary_type(&type
); // Perform a in_sizeof = 0;
4443 size
= type_size(&type
, &align
);
4444 if (t
== TOK_SIZEOF
) {
4445 if (!(type
.t
& VT_VLA
)) {
4447 tcc_error("sizeof applied to an incomplete type");
4450 vla_runtime_type_size(&type
, &align
);
4455 vtop
->type
.t
|= VT_UNSIGNED
;
4458 case TOK_builtin_expect
:
4460 /* __builtin_expect is a no-op for now */
4472 case TOK_builtin_types_compatible_p
:
4481 type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4482 type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4483 vpushi(is_compatible_types(&type1
, &type2
));
4486 case TOK_builtin_choose_expr
:
4513 case TOK_builtin_constant_p
:
4520 res
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4527 case TOK_builtin_frame_address
:
4528 case TOK_builtin_return_address
:
4535 if (tok
!= TOK_CINT
) {
4536 tcc_error("%s only takes positive integers",
4537 tok1
== TOK_builtin_return_address
?
4538 "__builtin_return_address" :
4539 "__builtin_frame_address");
4541 level
= (uint32_t)tokc
.i
;
4546 vset(&type
, VT_LOCAL
, 0); /* local frame */
4548 mk_pointer(&vtop
->type
);
4549 indir(); /* -> parent frame */
4551 if (tok1
== TOK_builtin_return_address
) {
4552 // assume return address is just above frame pointer on stack
4555 mk_pointer(&vtop
->type
);
4560 #ifdef TCC_TARGET_X86_64
4561 #ifdef TCC_TARGET_PE
4562 case TOK_builtin_va_start
:
4570 if ((vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4571 tcc_error("__builtin_va_start expects a local variable");
4572 vtop
->r
&= ~(VT_LVAL
| VT_REF
);
4573 vtop
->type
= char_pointer_type
;
4579 case TOK_builtin_va_arg_types
:
4586 vpushi(classify_x86_64_va_arg(&type
));
4592 #ifdef TCC_TARGET_ARM64
4593 case TOK___va_start
: {
4603 vtop
->type
.t
= VT_VOID
;
4606 case TOK___va_arg
: {
4619 case TOK___arm64_clear_cache
: {
4628 vtop
->type
.t
= VT_VOID
;
4632 /* pre operations */
4643 t
= vtop
->type
.t
& VT_BTYPE
;
4645 /* In IEEE negate(x) isn't subtract(0,x), but rather
4649 vtop
->c
.f
= -1.0 * 0.0;
4650 else if (t
== VT_DOUBLE
)
4651 vtop
->c
.d
= -1.0 * 0.0;
4653 vtop
->c
.ld
= -1.0 * 0.0;
4661 goto tok_identifier
;
4663 /* allow to take the address of a label */
4664 if (tok
< TOK_UIDENT
)
4665 expect("label identifier");
4666 s
= label_find(tok
);
4668 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4670 if (s
->r
== LABEL_DECLARED
)
4671 s
->r
= LABEL_FORWARD
;
4674 s
->type
.t
= VT_VOID
;
4675 mk_pointer(&s
->type
);
4676 s
->type
.t
|= VT_STATIC
;
4678 vpushsym(&s
->type
, s
);
4682 // special qnan , snan and infinity values
4684 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4688 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4692 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4701 expect("identifier");
4704 const char *name
= get_tok_str(t
, NULL
);
4706 tcc_error("'%s' undeclared", name
);
4707 /* for simple function calls, we tolerate undeclared
4708 external reference to int() function */
4709 if (tcc_state
->warn_implicit_function_declaration
4710 #ifdef TCC_TARGET_PE
4711 /* people must be warned about using undeclared WINAPI functions
4712 (which usually start with uppercase letter) */
4713 || (name
[0] >= 'A' && name
[0] <= 'Z')
4716 tcc_warning("implicit declaration of function '%s'", name
);
4717 s
= external_global_sym(t
, &func_old_type
, 0);
4721 /* A symbol that has a register is a local register variable,
4722 which starts out as VT_LOCAL value. */
4723 if ((r
& VT_VALMASK
) < VT_CONST
)
4724 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4726 vset(&s
->type
, r
, s
->c
);
4727 /* Point to s as backpointer (even without r&VT_SYM).
4728 Will be used by at least the x86 inline asm parser for
4731 if (vtop
->r
& VT_SYM
) {
4737 /* post operations */
4739 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4742 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4745 if (tok
== TOK_ARROW
)
4747 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4750 /* expect pointer on structure */
4751 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4752 expect("struct or union");
4753 if (tok
== TOK_CDOUBLE
)
4754 expect("field name");
4756 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4757 expect("field name");
4758 s
= find_field(&vtop
->type
, tok
);
4760 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4761 /* add field offset to pointer */
4762 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4765 /* change type to field type, and set to lvalue */
4766 vtop
->type
= s
->type
;
4767 vtop
->type
.t
|= qualifiers
;
4768 /* an array is never an lvalue */
4769 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4770 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4771 #ifdef CONFIG_TCC_BCHECK
4772 /* if bound checking, the referenced pointer must be checked */
4773 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4774 vtop
->r
|= VT_MUSTBOUND
;
4778 } else if (tok
== '[') {
4784 } else if (tok
== '(') {
4787 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4790 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4791 /* pointer test (no array accepted) */
4792 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4793 vtop
->type
= *pointed_type(&vtop
->type
);
4794 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4798 expect("function pointer");
4801 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4803 /* get return type */
4806 sa
= s
->next
; /* first parameter */
4807 nb_args
= regsize
= 0;
4809 /* compute first implicit argument if a structure is returned */
4810 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4811 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4812 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4813 &ret_align
, ®size
);
4815 /* get some space for the returned structure */
4816 size
= type_size(&s
->type
, &align
);
4817 #ifdef TCC_TARGET_ARM64
4818 /* On arm64, a small struct is return in registers.
4819 It is much easier to write it to memory if we know
4820 that we are allowed to write some extra bytes, so
4821 round the allocated space up to a power of 2: */
4823 while (size
& (size
- 1))
4824 size
= (size
| (size
- 1)) + 1;
4826 loc
= (loc
- size
) & -align
;
4828 ret
.r
= VT_LOCAL
| VT_LVAL
;
4829 /* pass it as 'int' to avoid structure arg passing
4831 vseti(VT_LOCAL
, loc
);
4841 /* return in register */
4842 if (is_float(ret
.type
.t
)) {
4843 ret
.r
= reg_fret(ret
.type
.t
);
4844 #ifdef TCC_TARGET_X86_64
4845 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4849 #ifndef TCC_TARGET_ARM64
4850 #ifdef TCC_TARGET_X86_64
4851 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4853 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4864 gfunc_param_typed(s
, sa
);
4874 tcc_error("too few arguments to function");
4876 gfunc_call(nb_args
);
4879 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4880 vsetc(&ret
.type
, r
, &ret
.c
);
4881 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4884 /* handle packed struct return */
4885 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4888 size
= type_size(&s
->type
, &align
);
4889 /* We're writing whole regs often, make sure there's enough
4890 space. Assume register size is power of 2. */
4891 if (regsize
> align
)
4893 loc
= (loc
- size
) & -align
;
4897 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4901 if (--ret_nregs
== 0)
4905 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4913 ST_FUNC
void expr_prod(void)
4918 while (tok
== '*' || tok
== '/' || tok
== '%') {
4926 ST_FUNC
void expr_sum(void)
4931 while (tok
== '+' || tok
== '-') {
4939 static void expr_shift(void)
4944 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4952 static void expr_cmp(void)
4957 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
4958 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
4966 static void expr_cmpeq(void)
4971 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
4979 static void expr_and(void)
4982 while (tok
== '&') {
4989 static void expr_xor(void)
4992 while (tok
== '^') {
4999 static void expr_or(void)
5002 while (tok
== '|') {
5009 /* XXX: fix this mess */
5010 static void expr_land_const(void)
5013 while (tok
== TOK_LAND
) {
5019 static void expr_lor_const(void)
5022 while (tok
== TOK_LOR
) {
5029 static void expr_land(void)
5032 if (tok
== TOK_LAND
) {
5035 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5043 while (tok
== TOK_LAND
) {
5051 gen_cast(&int_type
);
5059 if (tok
!= TOK_LAND
) {
5072 static void expr_lor(void)
5075 if (tok
== TOK_LOR
) {
5078 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5086 while (tok
== TOK_LOR
) {
5094 gen_cast(&int_type
);
5102 if (tok
!= TOK_LOR
) {
5115 /* Assuming vtop is a value used in a conditional context
5116 (i.e. compared with zero) return 0 if it's false, 1 if
5117 true and -1 if it can't be statically determined. */
5118 static int condition_3way(void)
5121 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5122 (!(vtop
->r
& VT_SYM
) ||
5123 !(vtop
->sym
->type
.t
& VT_WEAK
))) {
5125 boolean
.t
= VT_BOOL
;
5134 static void expr_cond(void)
5136 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5138 CType type
, type1
, type2
;
5143 c
= condition_3way();
5144 g
= (tok
== ':' && gnu_ext
);
5146 /* needed to avoid having different registers saved in
5148 if (is_float(vtop
->type
.t
)) {
5150 #ifdef TCC_TARGET_X86_64
5151 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5176 sv
= *vtop
; /* save value to handle it later */
5177 vtop
--; /* no vpop so that FP stack is not flushed */
5195 bt1
= t1
& VT_BTYPE
;
5197 bt2
= t2
& VT_BTYPE
;
5198 /* cast operands to correct type according to ISOC rules */
5199 if (is_float(bt1
) || is_float(bt2
)) {
5200 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5201 type
.t
= VT_LDOUBLE
;
5203 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5208 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5209 /* cast to biggest op */
5211 /* convert to unsigned if it does not fit in a long long */
5212 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5213 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
5214 type
.t
|= VT_UNSIGNED
;
5215 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5216 /* If one is a null ptr constant the result type
5218 if (is_null_pointer (vtop
))
5220 else if (is_null_pointer (&sv
))
5222 /* XXX: test pointer compatibility, C99 has more elaborate
5226 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5227 /* XXX: test function pointer compatibility */
5228 type
= bt1
== VT_FUNC
? type1
: type2
;
5229 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5230 /* XXX: test structure compatibility */
5231 type
= bt1
== VT_STRUCT
? type1
: type2
;
5232 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5233 /* NOTE: as an extension, we accept void on only one side */
5236 /* integer operations */
5238 /* convert to unsigned if it does not fit in an integer */
5239 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
5240 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
5241 type
.t
|= VT_UNSIGNED
;
5243 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5244 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5245 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5248 /* now we convert second operand */
5252 mk_pointer(&vtop
->type
);
5254 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5259 if (is_float(type
.t
)) {
5261 #ifdef TCC_TARGET_X86_64
5262 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5266 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5267 /* for long longs, we use fixed registers to avoid having
5268 to handle a complicated move */
5279 /* this is horrible, but we must also convert first
5285 mk_pointer(&vtop
->type
);
5287 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5293 move_reg(r2
, r1
, type
.t
);
5303 static void expr_eq(void)
5309 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5310 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5311 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5326 ST_FUNC
void gexpr(void)
5337 /* parse an expression and return its type without any side effect. */
5338 static void expr_type(CType
*type
)
5348 /* parse a unary expression and return its type without any side
5350 static void unary_type(CType
*type
)
5359 /* parse a constant expression and return value in vtop. */
5360 static void expr_const1(void)
5367 /* parse an integer constant and return its value. */
5368 static inline int64_t expr_const64(void)
5372 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5373 expect("constant expression");
5379 /* parse an integer constant and return its value.
5380 Complain if it doesn't fit 32bit (signed or unsigned). */
5381 ST_FUNC
int expr_const(void)
5384 int64_t wc
= expr_const64();
5386 if (c
!= wc
&& (unsigned)c
!= wc
)
5387 tcc_error("constant exceeds 32 bit");
5391 /* return the label token if current token is a label, otherwise
5393 static int is_label(void)
5397 /* fast test first */
5398 if (tok
< TOK_UIDENT
)
5400 /* no need to save tokc because tok is an identifier */
5407 unget_tok(last_tok
);
5412 static void label_or_decl(int l
)
5416 /* fast test first */
5417 if (tok
>= TOK_UIDENT
)
5419 /* no need to save tokc because tok is an identifier */
5423 unget_tok(last_tok
);
5426 unget_tok(last_tok
);
5431 #ifndef TCC_TARGET_ARM64
5432 static void gfunc_return(CType
*func_type
)
5434 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5435 CType type
, ret_type
;
5436 int ret_align
, ret_nregs
, regsize
;
5437 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5438 &ret_align
, ®size
);
5439 if (0 == ret_nregs
) {
5440 /* if returning structure, must copy it to implicit
5441 first pointer arg location */
5444 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5447 /* copy structure value to pointer */
5450 /* returning structure packed into registers */
5451 int r
, size
, addr
, align
;
5452 size
= type_size(func_type
,&align
);
5453 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5454 (vtop
->c
.i
& (ret_align
-1)))
5455 && (align
& (ret_align
-1))) {
5456 loc
= (loc
- size
) & -ret_align
;
5459 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5463 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5465 vtop
->type
= ret_type
;
5466 if (is_float(ret_type
.t
))
5467 r
= rc_fret(ret_type
.t
);
5478 if (--ret_nregs
== 0)
5480 /* We assume that when a structure is returned in multiple
5481 registers, their classes are consecutive values of the
5484 vtop
->c
.i
+= regsize
;
5488 } else if (is_float(func_type
->t
)) {
5489 gv(rc_fret(func_type
->t
));
5493 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5497 static int case_cmp(const void *pa
, const void *pb
)
5499 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5500 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5501 return a
< b
? -1 : a
> b
;
5504 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5508 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5526 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5528 gcase(base
, len
/2, bsym
);
5529 if (cur_switch
->def_sym
)
5530 gjmp_addr(cur_switch
->def_sym
);
5532 *bsym
= gjmp(*bsym
);
5536 base
+= e
; len
-= e
;
5546 if (p
->v1
== p
->v2
) {
5548 gtst_addr(0, p
->sym
);
5558 gtst_addr(0, p
->sym
);
5564 static void block(int *bsym
, int *csym
, int is_expr
)
5566 int a
, b
, c
, d
, cond
;
5569 /* generate line number info */
5570 if (tcc_state
->do_debug
&&
5571 (last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
5572 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
5574 last_line_num
= file
->line_num
;
5578 /* default return value is (void) */
5580 vtop
->type
.t
= VT_VOID
;
5583 if (tok
== TOK_IF
) {
5585 int saved_nocode_wanted
= nocode_wanted
;
5590 cond
= condition_3way();
5596 nocode_wanted
|= 0x20000000;
5597 block(bsym
, csym
, 0);
5599 nocode_wanted
= saved_nocode_wanted
;
5601 if (c
== TOK_ELSE
) {
5606 nocode_wanted
|= 0x20000000;
5607 block(bsym
, csym
, 0);
5608 gsym(d
); /* patch else jmp */
5610 nocode_wanted
= saved_nocode_wanted
;
5613 } else if (tok
== TOK_WHILE
) {
5614 int saved_nocode_wanted
;
5615 nocode_wanted
&= ~0x20000000;
5625 saved_nocode_wanted
= nocode_wanted
;
5627 nocode_wanted
= saved_nocode_wanted
;
5632 } else if (tok
== '{') {
5634 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5637 /* record local declaration stack position */
5639 llabel
= local_label_stack
;
5642 /* handle local labels declarations */
5643 if (tok
== TOK_LABEL
) {
5646 if (tok
< TOK_UIDENT
)
5647 expect("label identifier");
5648 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5658 while (tok
!= '}') {
5659 label_or_decl(VT_LOCAL
);
5663 block(bsym
, csym
, is_expr
);
5666 /* pop locally defined labels */
5667 label_pop(&local_label_stack
, llabel
);
5668 /* pop locally defined symbols */
5670 /* In the is_expr case (a statement expression is finished here),
5671 vtop might refer to symbols on the local_stack. Either via the
5672 type or via vtop->sym. We can't pop those nor any that in turn
5673 might be referred to. To make it easier we don't roll back
5674 any symbols in that case; some upper level call to block() will
5675 do that. We do have to remove such symbols from the lookup
5676 tables, though. sym_pop will do that. */
5677 sym_pop(&local_stack
, s
, is_expr
);
5679 /* Pop VLA frames and restore stack pointer if required */
5680 if (vlas_in_scope
> saved_vlas_in_scope
) {
5681 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5684 vlas_in_scope
= saved_vlas_in_scope
;
5687 } else if (tok
== TOK_RETURN
) {
5691 gen_assign_cast(&func_vt
);
5692 gfunc_return(&func_vt
);
5695 /* jump unless last stmt in top-level block */
5696 if (tok
!= '}' || local_scope
!= 1)
5698 nocode_wanted
|= 0x20000000;
5699 } else if (tok
== TOK_BREAK
) {
5702 tcc_error("cannot break");
5703 *bsym
= gjmp(*bsym
);
5706 nocode_wanted
|= 0x20000000;
5707 } else if (tok
== TOK_CONTINUE
) {
5710 tcc_error("cannot continue");
5711 vla_sp_restore_root();
5712 *csym
= gjmp(*csym
);
5715 } else if (tok
== TOK_FOR
) {
5717 int saved_nocode_wanted
;
5718 nocode_wanted
&= ~0x20000000;
5724 /* c99 for-loop init decl? */
5725 if (!decl0(VT_LOCAL
, 1)) {
5726 /* no, regular for-loop init expr */
5752 saved_nocode_wanted
= nocode_wanted
;
5754 nocode_wanted
= saved_nocode_wanted
;
5759 sym_pop(&local_stack
, s
, 0);
5762 if (tok
== TOK_DO
) {
5763 int saved_nocode_wanted
;
5764 nocode_wanted
&= ~0x20000000;
5770 saved_nocode_wanted
= nocode_wanted
;
5778 nocode_wanted
= saved_nocode_wanted
;
5783 if (tok
== TOK_SWITCH
) {
5784 struct switch_t
*saved
, sw
;
5785 int saved_nocode_wanted
= nocode_wanted
;
5791 switchval
= *vtop
--;
5793 b
= gjmp(0); /* jump to first case */
5794 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5798 nocode_wanted
= saved_nocode_wanted
;
5799 a
= gjmp(a
); /* add implicit break */
5802 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5803 for (b
= 1; b
< sw
.n
; b
++)
5804 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5805 tcc_error("duplicate case value");
5806 /* Our switch table sorting is signed, so the compared
5807 value needs to be as well when it's 64bit. */
5808 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5809 switchval
.type
.t
&= ~VT_UNSIGNED
;
5811 gcase(sw
.p
, sw
.n
, &a
);
5814 gjmp_addr(sw
.def_sym
);
5815 dynarray_reset(&sw
.p
, &sw
.n
);
5820 if (tok
== TOK_CASE
) {
5821 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5824 nocode_wanted
&= ~0x20000000;
5826 cr
->v1
= cr
->v2
= expr_const64();
5827 if (gnu_ext
&& tok
== TOK_DOTS
) {
5829 cr
->v2
= expr_const64();
5830 if (cr
->v2
< cr
->v1
)
5831 tcc_warning("empty case range");
5834 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5837 goto block_after_label
;
5839 if (tok
== TOK_DEFAULT
) {
5844 if (cur_switch
->def_sym
)
5845 tcc_error("too many 'default'");
5846 cur_switch
->def_sym
= ind
;
5848 goto block_after_label
;
5850 if (tok
== TOK_GOTO
) {
5852 if (tok
== '*' && gnu_ext
) {
5856 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5859 } else if (tok
>= TOK_UIDENT
) {
5860 s
= label_find(tok
);
5861 /* put forward definition if needed */
5863 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5865 if (s
->r
== LABEL_DECLARED
)
5866 s
->r
= LABEL_FORWARD
;
5868 vla_sp_restore_root();
5869 if (s
->r
& LABEL_FORWARD
)
5870 s
->jnext
= gjmp(s
->jnext
);
5872 gjmp_addr(s
->jnext
);
5875 expect("label identifier");
5878 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5886 if (s
->r
== LABEL_DEFINED
)
5887 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5889 s
->r
= LABEL_DEFINED
;
5891 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5895 /* we accept this, but it is a mistake */
5897 nocode_wanted
&= ~0x20000000;
5899 tcc_warning("deprecated use of label at end of compound statement");
5903 block(bsym
, csym
, is_expr
);
5906 /* expression case */
5921 #define EXPR_CONST 1
5924 static void parse_init_elem(int expr_type
)
5926 int saved_global_expr
;
5929 /* compound literals must be allocated globally in this case */
5930 saved_global_expr
= global_expr
;
5933 global_expr
= saved_global_expr
;
5934 /* NOTE: symbols are accepted */
5935 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
)
5936 tcc_error("initializer element is not constant");
5944 /* t is the array or struct type. c is the array or struct
5945 address. cur_field is the pointer to the current
5946 value, for arrays the 'c' member contains the current start
5947 index and the 'r' contains the end index (in case of range init).
5948 'size_only' is true if only size info is needed (only used
5950 static void decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5951 Sym
**cur_field
, int size_only
)
5954 int notfirst
, index
, index_last
, align
, l
, nb_elems
, elem_size
;
5960 if (gnu_ext
&& (l
= is_label()) != 0)
5962 while (tok
== '[' || tok
== '.') {
5964 if (!(type
->t
& VT_ARRAY
))
5965 expect("array type");
5968 index
= expr_const();
5969 if (index
< 0 || (s
->c
>= 0 && index
>= s
->c
))
5970 tcc_error("invalid index");
5971 if (tok
== TOK_DOTS
&& gnu_ext
) {
5973 index_last
= expr_const();
5974 if (index_last
< 0 ||
5975 (s
->c
>= 0 && index_last
>= s
->c
) ||
5977 tcc_error("invalid index");
5983 (*cur_field
)->c
= index
;
5984 (*cur_field
)->r
= index_last
;
5986 type
= pointed_type(type
);
5987 elem_size
= type_size(type
, &align
);
5988 c
+= index
* elem_size
;
5989 /* NOTE: we only support ranges for last designator */
5990 nb_elems
= index_last
- index
+ 1;
5991 if (nb_elems
!= 1) {
6000 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6001 expect("struct/union type");
6002 f
= find_field(type
, l
);
6007 /* XXX: fix this mess by using explicit storage field */
6009 type1
.t
|= (type
->t
& ~VT_TYPE
);
6023 if (type
->t
& VT_ARRAY
) {
6024 index
= (*cur_field
)->c
;
6025 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6026 tcc_error("index too large");
6027 type
= pointed_type(type
);
6028 c
+= index
* type_size(type
, &align
);
6032 tcc_error("too many field init");
6033 /* XXX: fix this mess by using explicit storage field */
6035 type1
.t
|= (type
->t
& ~VT_TYPE
);
6040 decl_initializer(type
, sec
, c
, 0, size_only
);
6042 /* XXX: make it more general */
6043 if (!size_only
&& nb_elems
> 1) {
6044 unsigned long c_end
;
6049 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6050 for (i
= 1; i
< nb_elems
; i
++) {
6051 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6057 c_end
= c
+ nb_elems
* elem_size
;
6058 if (c_end
> sec
->data_allocated
)
6059 section_realloc(sec
, c_end
);
6060 src
= sec
->data
+ c
;
6062 for(i
= 1; i
< nb_elems
; i
++) {
6064 memcpy(dst
, src
, elem_size
);
6070 /* store a value or an expression directly in global data or in local array */
6071 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6073 int bt
, bit_pos
, bit_size
;
6075 unsigned long long bit_mask
;
6079 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6083 /* XXX: not portable */
6084 /* XXX: generate error if incorrect relocation */
6085 gen_assign_cast(&dtype
);
6086 bt
= type
->t
& VT_BTYPE
;
6087 size
= type_size(type
, &align
);
6088 if (c
+ size
> sec
->data_allocated
) {
6089 section_realloc(sec
, c
+ size
);
6091 ptr
= sec
->data
+ c
;
6092 /* XXX: make code faster ? */
6093 if (!(type
->t
& VT_BITFIELD
)) {
6095 bit_size
= PTR_SIZE
* 8;
6098 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6099 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6100 bit_mask
= (1LL << bit_size
) - 1;
6102 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6103 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6104 /* XXX This rejects compount literals like
6105 '(void *){ptr}'. The problem is that '&sym' is
6106 represented the same way, which would be ruled out
6107 by the SYM_FIRST_ANOM check above, but also '"string"'
6108 in 'char *p = "string"' is represented the same
6109 with the type being VT_PTR and the symbol being an
6110 anonymous one. That is, there's no difference in vtop
6111 between '(void *){x}' and '&(void *){x}'. Ignore
6112 pointer typed entities here. Hopefully no real code
6113 will every use compound literals with scalar type. */
6114 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6115 /* These come from compound literals, memcpy stuff over. */
6119 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6120 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6121 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6123 /* We need to copy over all memory contents, and that
6124 includes relocations. Use the fact that relocs are
6125 created it order, so look from the end of relocs
6126 until we hit one before the copied region. */
6127 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6128 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6129 while (num_relocs
--) {
6131 if (rel
->r_offset
>= esym
->st_value
+ size
)
6133 if (rel
->r_offset
< esym
->st_value
)
6135 /* Note: if the same fields are initialized multiple
6136 times (possible with designators) then we possibly
6137 add multiple relocations for the same offset here.
6138 That would lead to wrong code, the last reloc needs
6139 to win. We clean this up later after the whole
6140 initializer is parsed. */
6141 put_elf_reloca(symtab_section
, sec
,
6142 c
+ rel
->r_offset
- esym
->st_value
,
6143 ELFW(R_TYPE
)(rel
->r_info
),
6144 ELFW(R_SYM
)(rel
->r_info
),
6145 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6154 if ((vtop
->r
& VT_SYM
) &&
6160 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6164 (bt
== VT_INT
&& bit_size
!= 32)
6167 tcc_error("initializer element is not computable at load time");
6169 /* XXX: when cross-compiling we assume that each type has the
6170 same representation on host and target, which is likely to
6171 be wrong in the case of long double */
6173 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6175 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6178 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6181 *(double *)ptr
= vtop
->c
.d
;
6184 if (sizeof(long double) == LDOUBLE_SIZE
)
6185 *(long double *)ptr
= vtop
->c
.ld
;
6186 else if (sizeof(double) == LDOUBLE_SIZE
)
6187 *(double *)ptr
= vtop
->c
.ld
;
6189 tcc_error("can't cross compile long double constants");
6193 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6200 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6201 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6202 if (vtop
->r
& VT_SYM
)
6203 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6205 *(addr_t
*)ptr
|= val
;
6207 if (vtop
->r
& VT_SYM
)
6208 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6209 *(addr_t
*)ptr
|= val
;
6215 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6216 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6217 if (vtop
->r
& VT_SYM
)
6218 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6222 if (vtop
->r
& VT_SYM
)
6223 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6232 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6239 /* put zeros for variable based init */
6240 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6243 /* nothing to do because globals are already set to zero */
6245 vpush_global_sym(&func_old_type
, TOK_memset
);
6247 #ifdef TCC_TARGET_ARM
6258 /* 't' contains the type and storage info. 'c' is the offset of the
6259 object in section 'sec'. If 'sec' is NULL, it means stack based
6260 allocation. 'first' is true if array '{' must be read (multi
6261 dimension implicit array init handling). 'size_only' is true if
6262 size only evaluation is wanted (only for arrays). */
6263 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6264 int first
, int size_only
)
6266 int index
, array_length
, n
, no_oblock
, nb
, parlevel
, parlevel1
, i
;
6273 /* If we currently are at an '}' or ',' we have read an initializer
6274 element in one of our callers, and not yet consumed it. */
6275 have_elem
= tok
== '}' || tok
== ',';
6276 if (!have_elem
&& tok
!= '{' &&
6277 /* In case of strings we have special handling for arrays, so
6278 don't consume them as initializer value (which would commit them
6279 to some anonymous symbol). */
6280 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6282 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6287 !(type
->t
& VT_ARRAY
) &&
6288 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6289 The source type might have VT_CONSTANT set, which is
6290 of course assignable to non-const elements. */
6291 is_compatible_parameter_types(type
, &vtop
->type
)) {
6292 init_putv(type
, sec
, c
);
6293 } else if (type
->t
& VT_ARRAY
) {
6297 t1
= pointed_type(type
);
6298 size1
= type_size(t1
, &align1
);
6301 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6304 tcc_error("character array initializer must be a literal,"
6305 " optionally enclosed in braces");
6310 /* only parse strings here if correct type (otherwise: handle
6311 them as ((w)char *) expressions */
6312 if ((tok
== TOK_LSTR
&&
6313 #ifdef TCC_TARGET_PE
6314 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6316 (t1
->t
& VT_BTYPE
) == VT_INT
6318 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6319 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6322 /* compute maximum number of chars wanted */
6324 cstr_len
= tokc
.str
.size
;
6326 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6329 if (n
>= 0 && nb
> (n
- array_length
))
6330 nb
= n
- array_length
;
6333 tcc_warning("initializer-string for array is too long");
6334 /* in order to go faster for common case (char
6335 string in global variable, we handle it
6337 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6338 memcpy(sec
->data
+ c
+ array_length
, tokc
.str
.data
, nb
);
6342 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6344 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6346 init_putv(t1
, sec
, c
+ (array_length
+ i
) * size1
);
6353 /* only add trailing zero if enough storage (no
6354 warning in this case since it is standard) */
6355 if (n
< 0 || array_length
< n
) {
6358 init_putv(t1
, sec
, c
+ (array_length
* size1
));
6368 while (tok
!= '}' || have_elem
) {
6369 decl_designator(type
, sec
, c
, &f
, size_only
);
6372 /* must put zero in holes (note that doing it that way
6373 ensures that it even works with designators) */
6374 if (!size_only
&& array_length
< index
) {
6375 init_putz(sec
, c
+ array_length
* size1
,
6376 (index
- array_length
) * size1
);
6378 if (type
->t
& VT_ARRAY
) {
6379 index
= indexsym
.c
= ++indexsym
.r
;
6381 index
= index
+ type_size(&f
->type
, &align1
);
6382 if (s
->type
.t
== TOK_UNION
)
6387 if (index
> array_length
)
6388 array_length
= index
;
6390 if (type
->t
& VT_ARRAY
) {
6391 /* special test for multi dimensional arrays (may not
6392 be strictly correct if designators are used at the
6394 if (no_oblock
&& index
>= n
)
6397 if (no_oblock
&& f
== NULL
)
6405 /* put zeros at the end */
6406 if (!size_only
&& array_length
< n
) {
6407 init_putz(sec
, c
+ array_length
* size1
,
6408 (n
- array_length
) * size1
);
6412 /* patch type size if needed, which happens only for array types */
6414 s
->c
= array_length
;
6415 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6418 if (first
|| tok
== '{') {
6427 } else if (tok
== '{') {
6429 decl_initializer(type
, sec
, c
, first
, size_only
);
6431 } else if (size_only
) {
6432 /* If we supported only ISO C we wouldn't have to accept calling
6433 this on anything than an array size_only==1 (and even then
6434 only on the outermost level, so no recursion would be needed),
6435 because initializing a flex array member isn't supported.
6436 But GNU C supports it, so we need to recurse even into
6437 subfields of structs and arrays when size_only is set. */
6438 /* just skip expression */
6439 parlevel
= parlevel1
= 0;
6440 while ((parlevel
> 0 || parlevel1
> 0 ||
6441 (tok
!= '}' && tok
!= ',')) && tok
!= -1) {
6444 else if (tok
== ')') {
6445 if (parlevel
== 0 && parlevel1
== 0)
6449 else if (tok
== '{')
6451 else if (tok
== '}') {
6452 if (parlevel
== 0 && parlevel1
== 0)
6460 /* This should happen only when we haven't parsed
6461 the init element above for fear of committing a
6462 string constant to memory too early. */
6463 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6464 expect("string constant");
6465 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6467 init_putv(type
, sec
, c
);
6471 /* parse an initializer for type 't' if 'has_init' is non zero, and
6472 allocate space in local or global data space ('r' is either
6473 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6474 variable 'v' of scope 'scope' is declared before initializers
6475 are parsed. If 'v' is zero, then a reference to the new object
6476 is put in the value stack. If 'has_init' is 2, a special parsing
6477 is done to handle string constants. */
6478 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6479 int has_init
, int v
, int scope
)
6481 int size
, align
, addr
, data_offset
;
6483 ParseState saved_parse_state
= {0};
6484 TokenString
*init_str
= NULL
;
6486 Sym
*flexible_array
;
6488 flexible_array
= NULL
;
6489 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6490 Sym
*field
= type
->ref
->next
;
6493 field
= field
->next
;
6494 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6495 flexible_array
= field
;
6499 size
= type_size(type
, &align
);
6500 /* If unknown size, we must evaluate it before
6501 evaluating initializers because
6502 initializers can generate global data too
6503 (e.g. string pointers or ISOC99 compound
6504 literals). It also simplifies local
6505 initializers handling */
6506 if (size
< 0 || (flexible_array
&& has_init
)) {
6508 tcc_error("unknown type size");
6509 /* get all init string */
6510 init_str
= tok_str_alloc();
6511 if (has_init
== 2) {
6512 /* only get strings */
6513 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6514 tok_str_add_tok(init_str
);
6519 while (level
> 0 || (tok
!= ',' && tok
!= ';')) {
6521 tcc_error("unexpected end of file in initializer");
6522 tok_str_add_tok(init_str
);
6525 else if (tok
== '}') {
6535 tok_str_add(init_str
, -1);
6536 tok_str_add(init_str
, 0);
6539 save_parse_state(&saved_parse_state
);
6541 begin_macro(init_str
, 1);
6543 decl_initializer(type
, NULL
, 0, 1, 1);
6544 /* prepare second initializer parsing */
6545 macro_ptr
= init_str
->str
;
6548 /* if still unknown size, error */
6549 size
= type_size(type
, &align
);
6551 tcc_error("unknown type size");
6553 /* If there's a flex member and it was used in the initializer
6555 if (flexible_array
&&
6556 flexible_array
->type
.ref
->c
> 0)
6557 size
+= flexible_array
->type
.ref
->c
6558 * pointed_size(&flexible_array
->type
);
6559 /* take into account specified alignment if bigger */
6560 if (ad
->a
.aligned
) {
6561 int speca
= 1 << (ad
->a
.aligned
- 1);
6564 } else if (ad
->a
.packed
) {
6567 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6569 #ifdef CONFIG_TCC_BCHECK
6570 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6574 loc
= (loc
- size
) & -align
;
6576 #ifdef CONFIG_TCC_BCHECK
6577 /* handles bounds */
6578 /* XXX: currently, since we do only one pass, we cannot track
6579 '&' operators, so we add only arrays */
6580 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6582 /* add padding between regions */
6584 /* then add local bound info */
6585 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6586 bounds_ptr
[0] = addr
;
6587 bounds_ptr
[1] = size
;
6591 /* local variable */
6592 #ifdef CONFIG_TCC_ASM
6593 if (ad
->asm_label
) {
6594 int reg
= asm_parse_regvar(ad
->asm_label
);
6596 r
= (r
& ~VT_VALMASK
) | reg
;
6599 sym_push(v
, type
, r
, addr
);
6601 /* push local reference */
6602 vset(type
, r
, addr
);
6608 if (v
&& scope
== VT_CONST
) {
6609 /* see if the symbol was already defined */
6612 if (!is_compatible_types(&sym
->type
, type
))
6613 tcc_error("incompatible types for redefinition of '%s'",
6614 get_tok_str(v
, NULL
));
6615 if (sym
->type
.t
& VT_EXTERN
) {
6616 /* if the variable is extern, it was not allocated */
6617 sym
->type
.t
&= ~VT_EXTERN
;
6618 /* set array size if it was omitted in extern
6620 if ((sym
->type
.t
& VT_ARRAY
) &&
6621 sym
->type
.ref
->c
< 0 &&
6623 sym
->type
.ref
->c
= type
->ref
->c
;
6625 /* we accept several definitions of the same
6626 global variable. this is tricky, because we
6627 must play with the SHN_COMMON type of the symbol */
6628 /* XXX: should check if the variable was already
6629 initialized. It is incorrect to initialized it
6631 /* no init data, we won't add more to the symbol */
6638 /* allocate symbol in corresponding section */
6643 else if (tcc_state
->nocommon
)
6647 data_offset
= sec
->data_offset
;
6648 data_offset
= (data_offset
+ align
- 1) & -align
;
6650 /* very important to increment global pointer at this time
6651 because initializers themselves can create new initializers */
6652 data_offset
+= size
;
6653 #ifdef CONFIG_TCC_BCHECK
6654 /* add padding if bound check */
6655 if (tcc_state
->do_bounds_check
)
6658 sec
->data_offset
= data_offset
;
6659 /* allocate section space to put the data */
6660 if (sec
->sh_type
!= SHT_NOBITS
&&
6661 data_offset
> sec
->data_allocated
)
6662 section_realloc(sec
, data_offset
);
6663 /* align section if needed */
6664 if (align
> sec
->sh_addralign
)
6665 sec
->sh_addralign
= align
;
6667 addr
= 0; /* avoid warning */
6671 if (scope
!= VT_CONST
|| !sym
) {
6672 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6673 sym
->asm_label
= ad
->asm_label
;
6675 /* update symbol definition */
6677 put_extern_sym(sym
, sec
, addr
, size
);
6680 /* put a common area */
6681 put_extern_sym(sym
, NULL
, align
, size
);
6682 /* XXX: find a nicer way */
6683 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6684 esym
->st_shndx
= SHN_COMMON
;
6687 /* push global reference */
6688 sym
= get_sym_ref(type
, sec
, addr
, size
);
6689 vpushsym(type
, sym
);
6691 /* patch symbol weakness */
6692 if (type
->t
& VT_WEAK
)
6694 apply_visibility(sym
, type
);
6695 #ifdef CONFIG_TCC_BCHECK
6696 /* handles bounds now because the symbol must be defined
6697 before for the relocation */
6698 if (tcc_state
->do_bounds_check
) {
6701 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
6702 /* then add global bound info */
6703 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6704 bounds_ptr
[0] = 0; /* relocated */
6705 bounds_ptr
[1] = size
;
6709 if (type
->t
& VT_VLA
) {
6712 /* save current stack pointer */
6713 if (vlas_in_scope
== 0) {
6714 if (vla_sp_root_loc
== -1)
6715 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6716 gen_vla_sp_save(vla_sp_root_loc
);
6719 vla_runtime_type_size(type
, &a
);
6720 gen_vla_alloc(type
, a
);
6721 gen_vla_sp_save(addr
);
6724 } else if (has_init
) {
6725 size_t oldreloc_offset
= 0;
6726 if (sec
&& sec
->reloc
)
6727 oldreloc_offset
= sec
->reloc
->data_offset
;
6728 decl_initializer(type
, sec
, addr
, 1, 0);
6729 if (sec
&& sec
->reloc
)
6730 squeeze_multi_relocs(sec
, oldreloc_offset
);
6731 /* patch flexible array member size back to -1, */
6732 /* for possible subsequent similar declarations */
6734 flexible_array
->type
.ref
->c
= -1;
6737 /* restore parse state if needed */
6740 restore_parse_state(&saved_parse_state
);
6744 static void put_func_debug(Sym
*sym
)
6749 /* XXX: we put here a dummy type */
6750 snprintf(buf
, sizeof(buf
), "%s:%c1",
6751 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
6752 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
6753 cur_text_section
, sym
->c
);
6754 /* //gr gdb wants a line at the function */
6755 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
6760 /* parse an old style function declaration list */
6761 /* XXX: check multiple parameter */
6762 static void func_decl_list(Sym
*func_sym
)
6769 /* parse each declaration */
6770 while (tok
!= '{' && tok
!= ';' && tok
!= ',' && tok
!= TOK_EOF
&&
6771 tok
!= TOK_ASM1
&& tok
!= TOK_ASM2
&& tok
!= TOK_ASM3
) {
6772 if (!parse_btype(&btype
, &ad
))
6773 expect("declaration list");
6774 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6775 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6777 /* we accept no variable after */
6781 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6782 /* find parameter in function parameter list */
6785 if ((s
->v
& ~SYM_FIELD
) == v
)
6789 tcc_error("declaration for parameter '%s' but no such parameter",
6790 get_tok_str(v
, NULL
));
6792 /* check that no storage specifier except 'register' was given */
6793 if (type
.t
& VT_STORAGE
)
6794 tcc_error("storage class specified for '%s'", get_tok_str(v
, NULL
));
6795 convert_parameter_type(&type
);
6796 /* we can add the type (NOTE: it could be local to the function) */
6798 /* accept other parameters */
6809 /* parse a function defined by symbol 'sym' and generate its code in
6810 'cur_text_section' */
6811 static void gen_function(Sym
*sym
)
6814 ind
= cur_text_section
->data_offset
;
6815 /* NOTE: we patch the symbol size later */
6816 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6817 funcname
= get_tok_str(sym
->v
, NULL
);
6819 /* Initialize VLA state */
6821 vla_sp_root_loc
= -1;
6822 /* put debug symbol */
6823 if (tcc_state
->do_debug
)
6824 put_func_debug(sym
);
6826 /* push a dummy symbol to enable local sym storage */
6827 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6828 local_scope
= 1; /* for function parameters */
6829 gfunc_prolog(&sym
->type
);
6833 block(NULL
, NULL
, 0);
6837 cur_text_section
->data_offset
= ind
;
6838 label_pop(&global_label_stack
, NULL
);
6839 /* reset local stack */
6841 sym_pop(&local_stack
, NULL
, 0);
6842 /* end of function */
6843 /* patch symbol size */
6844 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6846 /* patch symbol weakness (this definition overrules any prototype) */
6847 if (sym
->type
.t
& VT_WEAK
)
6849 apply_visibility(sym
, &sym
->type
);
6850 if (tcc_state
->do_debug
) {
6851 put_stabn(N_FUN
, 0, 0, ind
- func_ind
);
6853 /* It's better to crash than to generate wrong code */
6854 cur_text_section
= NULL
;
6855 funcname
= ""; /* for safety */
6856 func_vt
.t
= VT_VOID
; /* for safety */
6857 func_var
= 0; /* for safety */
6858 ind
= 0; /* for safety */
6863 static void gen_inline_functions(TCCState
*s
)
6866 int inline_generated
, i
, ln
;
6867 struct InlineFunc
*fn
;
6869 ln
= file
->line_num
;
6870 /* iterate while inline function are referenced */
6872 inline_generated
= 0;
6873 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6874 fn
= s
->inline_fns
[i
];
6876 if (sym
&& sym
->c
) {
6877 /* the function was used: generate its code and
6878 convert it to a normal function */
6881 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6882 sym
->type
.t
&= ~VT_INLINE
;
6884 begin_macro(fn
->func_str
, 1);
6886 cur_text_section
= text_section
;
6890 inline_generated
= 1;
6893 if (!inline_generated
)
6896 file
->line_num
= ln
;
6899 ST_FUNC
void free_inline_functions(TCCState
*s
)
6902 /* free tokens of unused inline functions */
6903 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6904 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6906 tok_str_free(fn
->func_str
);
6908 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6911 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6912 static int decl0(int l
, int is_for_loop_init
)
6920 if (!parse_btype(&btype
, &ad
)) {
6921 if (is_for_loop_init
)
6923 /* skip redundant ';' */
6924 /* XXX: find more elegant solution */
6929 if (l
== VT_CONST
&&
6930 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6931 /* global asm block */
6935 /* special test for old K&R protos without explicit int
6936 type. Only accepted when defining global data */
6937 if (l
== VT_LOCAL
|| tok
< TOK_UIDENT
)
6941 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6942 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6944 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6945 int v
= btype
.ref
->v
;
6946 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6947 tcc_warning("unnamed struct/union that defines no instances");
6952 while (1) { /* iterate thru each declaration */
6954 /* If the base type itself was an array type of unspecified
6955 size (like in 'typedef int arr[]; arr x = {1};') then
6956 we will overwrite the unknown size by the real one for
6957 this decl. We need to unshare the ref symbol holding
6959 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6960 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6962 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6966 type_to_str(buf
, sizeof(buf
), t
, get_tok_str(v
, NULL
));
6967 printf("type = '%s'\n", buf
);
6970 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6971 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6972 tcc_error("function without file scope cannot be static");
6974 /* if old style function prototype, we accept a
6977 if (sym
->c
== FUNC_OLD
)
6978 func_decl_list(sym
);
6981 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6982 ad
.asm_label
= asm_label_instr();
6983 /* parse one last attribute list, after asm label */
6984 parse_attribute(&ad
);
6991 #ifdef TCC_TARGET_PE
6992 if (ad
.a
.func_import
)
6993 type
.t
|= VT_IMPORT
;
6994 if (ad
.a
.func_export
)
6995 type
.t
|= VT_EXPORT
;
6997 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
7001 tcc_error("cannot use local functions");
7002 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7003 expect("function definition");
7005 /* reject abstract declarators in function definition */
7007 while ((sym
= sym
->next
) != NULL
)
7008 if (!(sym
->v
& ~SYM_FIELD
))
7009 expect("identifier");
7011 /* XXX: cannot do better now: convert extern line to static inline */
7012 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7013 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7018 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
7021 ref
= sym
->type
.ref
;
7023 /* use func_call from prototype if not defined */
7024 if (ref
->a
.func_call
!= FUNC_CDECL
7025 && type
.ref
->a
.func_call
== FUNC_CDECL
)
7026 type
.ref
->a
.func_call
= ref
->a
.func_call
;
7028 /* use export from prototype */
7029 if (ref
->a
.func_export
)
7030 type
.ref
->a
.func_export
= 1;
7032 /* use static from prototype */
7033 if (sym
->type
.t
& VT_STATIC
)
7034 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7036 /* If the definition has no visibility use the
7037 one from prototype. */
7038 if (! (type
.t
& VT_VIS_MASK
))
7039 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
7041 if (!is_compatible_types(&sym
->type
, &type
)) {
7043 tcc_error("incompatible types for redefinition of '%s'",
7044 get_tok_str(v
, NULL
));
7046 if (ref
->a
.func_body
)
7047 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
7048 /* if symbol is already defined, then put complete type */
7052 /* put function symbol */
7053 sym
= global_identifier_push(v
, type
.t
, 0);
7054 sym
->type
.ref
= type
.ref
;
7057 sym
->type
.ref
->a
.func_body
= 1;
7058 sym
->r
= VT_SYM
| VT_CONST
;
7060 /* static inline functions are just recorded as a kind
7061 of macro. Their code will be emitted at the end of
7062 the compilation unit only if they are used */
7063 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7064 (VT_INLINE
| VT_STATIC
)) {
7066 struct InlineFunc
*fn
;
7067 const char *filename
;
7069 filename
= file
? file
->filename
: "";
7070 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7071 strcpy(fn
->filename
, filename
);
7073 fn
->func_str
= tok_str_alloc();
7079 tcc_error("unexpected end of file");
7080 tok_str_add_tok(fn
->func_str
);
7085 } else if (t
== '}') {
7087 if (block_level
== 0)
7091 tok_str_add(fn
->func_str
, -1);
7092 tok_str_add(fn
->func_str
, 0);
7093 dynarray_add(&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
, fn
);
7096 /* compute text section */
7097 cur_text_section
= ad
.section
;
7098 if (!cur_text_section
)
7099 cur_text_section
= text_section
;
7104 if (btype
.t
& VT_TYPEDEF
) {
7105 /* save typedefed type */
7106 /* XXX: test storage specifiers ? */
7108 if (sym
&& sym
->scope
== local_scope
) {
7109 if (!is_compatible_types(&sym
->type
, &type
)
7110 || !(sym
->type
.t
& VT_TYPEDEF
))
7111 tcc_error("incompatible redefinition of '%s'",
7112 get_tok_str(v
, NULL
));
7115 sym
= sym_push(v
, &type
, 0, 0);
7118 sym
->type
.t
|= VT_TYPEDEF
;
7121 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7122 /* external function definition */
7123 /* specific case for func_call attribute */
7125 } else if (!(type
.t
& VT_ARRAY
)) {
7126 /* not lvalue if array */
7127 r
|= lvalue_type(type
.t
);
7129 has_init
= (tok
== '=');
7130 if (has_init
&& (type
.t
& VT_VLA
))
7131 tcc_error("variable length array cannot be initialized");
7132 if ((btype
.t
& VT_EXTERN
) || ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7133 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7134 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7135 /* external variable or function */
7136 /* NOTE: as GCC, uninitialized global static
7137 arrays of null size are considered as
7139 sym
= external_sym(v
, &type
, r
);
7140 sym
->asm_label
= ad
.asm_label
;
7142 if (ad
.alias_target
) {
7147 alias_target
= sym_find(ad
.alias_target
);
7148 if (!alias_target
|| !alias_target
->c
)
7149 tcc_error("unsupported forward __alias__ attribute");
7150 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7151 tsec
.sh_num
= esym
->st_shndx
;
7152 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7155 type
.t
|= (btype
.t
& VT_STATIC
); /* Retain "static". */
7156 if (type
.t
& VT_STATIC
)
7162 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7166 if (is_for_loop_init
)
7179 ST_FUNC
void decl(int l
)
7184 /* ------------------------------------------------------------------------- */