2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static inline CType
*pointed_type(CType
*type
);
75 static int is_compatible_types(CType
*type1
, CType
*type2
);
76 static int parse_btype(CType
*type
, AttributeDef
*ad
);
77 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
78 static void parse_expr_type(CType
*type
);
79 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
80 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
81 static void block(int *bsym
, int *csym
, int is_expr
);
82 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
83 static int decl0(int l
, int is_for_loop_init
, Sym
*);
84 static void expr_eq(void);
85 static void vla_runtime_type_size(CType
*type
, int *a
);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
89 static inline int64_t expr_const64(void);
90 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
91 ST_FUNC
void vpush(CType
*type
);
92 ST_FUNC
int gvtst(int inv
, int t
);
93 ST_FUNC
int is_btype_size(int bt
);
94 static void gen_inline_functions(TCCState
*s
);
96 ST_INLN
int is_float(int t
)
100 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
103 /* we use our own 'finite' function to avoid potential problems with
104 non standard math libs */
105 /* XXX: endianness dependent */
106 ST_FUNC
int ieee_finite(double d
)
109 memcpy(p
, &d
, sizeof(double));
110 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
113 ST_FUNC
void test_lvalue(void)
115 if (!(vtop
->r
& VT_LVAL
))
119 ST_FUNC
void check_vstack(void)
122 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
125 /* ------------------------------------------------------------------------- */
126 /* vstack debugging aid */
129 void pv (const char *lbl
, int a
, int b
)
132 for (i
= a
; i
< a
+ b
; ++i
) {
133 SValue
*p
= &vtop
[-i
];
134 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
135 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
140 /* ------------------------------------------------------------------------- */
141 /* start of translation unit info */
142 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
147 /* file info: full path + filename */
148 section_sym
= put_elf_sym(symtab_section
, 0, 0,
149 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
150 text_section
->sh_num
, NULL
);
151 getcwd(buf
, sizeof(buf
));
153 normalize_slashes(buf
);
155 pstrcat(buf
, sizeof(buf
), "/");
156 put_stabs_r(buf
, N_SO
, 0, 0,
157 text_section
->data_offset
, text_section
, section_sym
);
158 put_stabs_r(file
->filename
, N_SO
, 0, 0,
159 text_section
->data_offset
, text_section
, section_sym
);
164 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
165 symbols can be safely used */
166 put_elf_sym(symtab_section
, 0, 0,
167 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
168 SHN_ABS
, file
->filename
);
171 /* put end of translation unit info */
172 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
176 put_stabs_r(NULL
, N_SO
, 0, 0,
177 text_section
->data_offset
, text_section
, section_sym
);
181 /* generate line number info */
182 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
186 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
187 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
189 last_line_num
= file
->line_num
;
193 /* put function symbol */
194 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
202 /* XXX: we put here a dummy type */
203 snprintf(buf
, sizeof(buf
), "%s:%c1",
204 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
205 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
206 cur_text_section
, sym
->c
);
207 /* //gr gdb wants a line at the function */
208 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
214 /* put function size */
215 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
219 put_stabn(N_FUN
, 0, 0, size
);
222 /* ------------------------------------------------------------------------- */
223 ST_FUNC
void tccgen_start(TCCState
*s1
)
225 cur_text_section
= NULL
;
227 anon_sym
= SYM_FIRST_ANOM
;
232 /* define some often used types */
234 char_pointer_type
.t
= VT_BYTE
;
235 mk_pointer(&char_pointer_type
);
237 size_type
.t
= VT_INT
;
239 size_type
.t
= VT_LLONG
;
241 func_old_type
.t
= VT_FUNC
;
242 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, FUNC_CDECL
, FUNC_OLD
);
246 #ifdef TCC_TARGET_ARM
251 ST_FUNC
void tccgen_end(TCCState
*s1
)
253 gen_inline_functions(s1
);
255 /* end of translation unit info */
259 /* ------------------------------------------------------------------------- */
260 /* apply storage attributes to Elf symbol */
262 static void update_storage(Sym
*sym
)
271 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
274 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
275 | ((t
& VT_VIS_MASK
) >> VT_VIS_SHIFT
);
278 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
282 esym
->st_other
|= ST_PE_EXPORT
;
286 /* ------------------------------------------------------------------------- */
287 /* update sym->c so that it points to an external symbol in section
288 'section' with value 'value' */
290 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
291 addr_t value
, unsigned long size
,
292 int can_add_underscore
)
294 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
298 #ifdef CONFIG_TCC_BCHECK
304 else if (section
== SECTION_ABS
)
307 sh_num
= section
->sh_num
;
310 name
= get_tok_str(sym
->v
, NULL
);
311 #ifdef CONFIG_TCC_BCHECK
312 if (tcc_state
->do_bounds_check
) {
313 /* XXX: avoid doing that for statics ? */
314 /* if bound checking is activated, we change some function
315 names by adding the "__bound" prefix */
318 /* XXX: we rely only on malloc hooks */
331 strcpy(buf
, "__bound_");
339 if ((t
& VT_BTYPE
) == VT_FUNC
) {
341 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
342 sym_type
= STT_NOTYPE
;
344 sym_type
= STT_OBJECT
;
347 sym_bind
= STB_LOCAL
;
349 sym_bind
= STB_GLOBAL
;
352 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
353 Sym
*ref
= sym
->type
.ref
;
354 if (ref
->a
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
355 sprintf(buf1
, "_%s@%d", name
, ref
->a
.func_args
* PTR_SIZE
);
357 other
|= ST_PE_STDCALL
;
358 can_add_underscore
= 0;
362 other
|= ST_PE_IMPORT
;
364 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
366 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
370 name
= get_tok_str(sym
->asm_label
, NULL
);
371 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
372 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
374 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
375 esym
->st_value
= value
;
376 esym
->st_size
= size
;
377 esym
->st_shndx
= sh_num
;
382 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
383 addr_t value
, unsigned long size
)
385 put_extern_sym2(sym
, section
, value
, size
, 1);
388 /* add a new relocation entry to symbol 'sym' in section 's' */
389 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
394 if (nocode_wanted
&& s
== cur_text_section
)
399 put_extern_sym(sym
, NULL
, 0, 0);
403 /* now we can add ELF relocation info */
404 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
407 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
409 greloca(s
, sym
, offset
, type
, 0);
412 /* ------------------------------------------------------------------------- */
413 /* symbol allocator */
414 static Sym
*__sym_malloc(void)
416 Sym
*sym_pool
, *sym
, *last_sym
;
419 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
420 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
422 last_sym
= sym_free_first
;
424 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
425 sym
->next
= last_sym
;
429 sym_free_first
= last_sym
;
433 static inline Sym
*sym_malloc(void)
437 sym
= sym_free_first
;
439 sym
= __sym_malloc();
440 sym_free_first
= sym
->next
;
443 sym
= tcc_malloc(sizeof(Sym
));
448 ST_INLN
void sym_free(Sym
*sym
)
451 sym
->next
= sym_free_first
;
452 sym_free_first
= sym
;
458 /* push, without hashing */
459 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
479 /* find a symbol and return its associated structure. 's' is the top
480 of the symbol stack */
481 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
493 /* structure lookup */
494 ST_INLN Sym
*struct_find(int v
)
497 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
499 return table_ident
[v
]->sym_struct
;
502 /* find an identifier */
503 ST_INLN Sym
*sym_find(int v
)
506 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
508 return table_ident
[v
]->sym_identifier
;
511 /* push a given symbol on the symbol stack */
512 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, long c
)
521 s
= sym_push2(ps
, v
, type
->t
, c
);
522 s
->type
.ref
= type
->ref
;
524 /* don't record fields or anonymous symbols */
526 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
527 /* record symbol in token array */
528 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
530 ps
= &ts
->sym_struct
;
532 ps
= &ts
->sym_identifier
;
535 s
->scope
= local_scope
;
536 if (s
->prev_tok
&& s
->prev_tok
->scope
== s
->scope
)
537 tcc_error("redeclaration of '%s'",
538 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
543 /* push a global identifier */
544 ST_FUNC Sym
*global_identifier_push(int v
, int t
, long c
)
547 s
= sym_push2(&global_stack
, v
, t
, c
);
548 /* don't record anonymous symbol */
549 if (v
< SYM_FIRST_ANOM
) {
550 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
551 /* modify the top most local identifier, so that
552 sym_identifier will point to 's' when popped */
554 ps
= &(*ps
)->prev_tok
;
561 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
562 pop them yet from the list, but do remove them from the token array. */
563 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
573 /* remove symbol in token array */
575 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
576 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
578 ps
= &ts
->sym_struct
;
580 ps
= &ts
->sym_identifier
;
591 /* ------------------------------------------------------------------------- */
593 static void vsetc(CType
*type
, int r
, CValue
*vc
)
597 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
598 tcc_error("memory full (vstack)");
599 /* cannot let cpu flags if other instruction are generated. Also
600 avoid leaving VT_JMP anywhere except on the top of the stack
601 because it would complicate the code generator.
603 Don't do this when nocode_wanted. vtop might come from
604 !nocode_wanted regions (see 88_codeopt.c) and transforming
605 it to a register without actually generating code is wrong
606 as their value might still be used for real. All values
607 we push under nocode_wanted will eventually be popped
608 again, so that the VT_CMP/VT_JMP value will be in vtop
609 when code is unsuppressed again.
611 Same logic below in vswap(); */
612 if (vtop
>= vstack
&& !nocode_wanted
) {
613 v
= vtop
->r
& VT_VALMASK
;
614 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
626 ST_FUNC
void vswap(void)
629 /* cannot vswap cpu flags. See comment at vsetc() above */
630 if (vtop
>= vstack
&& !nocode_wanted
) {
631 int v
= vtop
->r
& VT_VALMASK
;
632 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
640 /* pop stack value */
641 ST_FUNC
void vpop(void)
644 v
= vtop
->r
& VT_VALMASK
;
645 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
646 /* for x86, we need to pop the FP stack */
648 o(0xd8dd); /* fstp %st(0) */
651 if (v
== VT_JMP
|| v
== VT_JMPI
) {
652 /* need to put correct jump if && or || without test */
658 /* push constant of type "type" with useless value */
659 ST_FUNC
void vpush(CType
*type
)
662 vsetc(type
, VT_CONST
, &cval
);
665 /* push integer constant */
666 ST_FUNC
void vpushi(int v
)
670 vsetc(&int_type
, VT_CONST
, &cval
);
673 /* push a pointer sized constant */
674 static void vpushs(addr_t v
)
678 vsetc(&size_type
, VT_CONST
, &cval
);
681 /* push arbitrary 64bit constant */
682 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
689 vsetc(&ctype
, VT_CONST
, &cval
);
692 /* push long long constant */
693 static inline void vpushll(long long v
)
695 vpush64(VT_LLONG
, v
);
698 ST_FUNC
void vset(CType
*type
, int r
, long v
)
703 vsetc(type
, r
, &cval
);
706 static void vseti(int r
, int v
)
714 ST_FUNC
void vpushv(SValue
*v
)
716 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
717 tcc_error("memory full (vstack)");
722 static void vdup(void)
727 /* rotate n first stack elements to the bottom
728 I1 ... In -> I2 ... In I1 [top is right]
730 ST_FUNC
void vrotb(int n
)
741 /* rotate the n elements before entry e towards the top
742 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
744 ST_FUNC
void vrote(SValue
*e
, int n
)
750 for(i
= 0;i
< n
- 1; i
++)
755 /* rotate n first stack elements to the top
756 I1 ... In -> In I1 ... I(n-1) [top is right]
758 ST_FUNC
void vrott(int n
)
763 /* push a symbol value of TYPE */
764 static inline void vpushsym(CType
*type
, Sym
*sym
)
768 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
772 /* Return a static symbol pointing to a section */
773 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
779 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
780 sym
->type
.ref
= type
->ref
;
781 sym
->r
= VT_CONST
| VT_SYM
;
782 put_extern_sym(sym
, sec
, offset
, size
);
786 /* push a reference to a section offset by adding a dummy symbol */
787 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
789 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
792 /* define a new external reference to a symbol 'v' of type 'u' */
793 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
799 /* push forward reference */
800 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
801 s
->type
.ref
= type
->ref
;
802 s
->r
= r
| VT_CONST
| VT_SYM
;
807 /* Merge some storage attributes. */
808 static void patch_storage(Sym
*sym
, CType
*type
)
811 if (!is_compatible_types(&sym
->type
, type
))
812 tcc_error("incompatible types for redefinition of '%s'",
813 get_tok_str(sym
->v
, NULL
));
816 if ((sym
->type
.t
^ t
) & VT_IMPORT
)
817 tcc_error("incompatible dll linkage for redefinition of '%s'",
818 get_tok_str(sym
->v
, NULL
));
820 sym
->type
.t
|= t
& (VT_EXPORT
|VT_WEAK
);
821 if (t
& VT_VIS_MASK
) {
822 int vis
= sym
->type
.t
& VT_VIS_MASK
;
823 int vis2
= t
& VT_VIS_MASK
;
824 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
826 else if (vis2
!= (STV_DEFAULT
<< VT_VIS_SHIFT
))
827 vis
= (vis
< vis2
) ? vis
: vis2
;
828 sym
->type
.t
= (sym
->type
.t
& ~VT_VIS_MASK
) | vis
;
832 /* define a new external reference to a symbol 'v' */
833 static Sym
*external_sym(int v
, CType
*type
, int r
)
838 /* push forward reference */
839 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
840 s
->type
.t
|= VT_EXTERN
;
842 if (s
->type
.ref
== func_old_type
.ref
) {
843 s
->type
.ref
= type
->ref
;
844 s
->r
= r
| VT_CONST
| VT_SYM
;
845 s
->type
.t
|= VT_EXTERN
;
847 patch_storage(s
, type
);
853 /* push a reference to global symbol v */
854 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
856 vpushsym(type
, external_global_sym(v
, type
, 0));
859 /* save registers up to (vtop - n) stack entry */
860 ST_FUNC
void save_regs(int n
)
863 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
867 /* save r to the memory stack, and mark it as being free */
868 ST_FUNC
void save_reg(int r
)
870 save_reg_upstack(r
, 0);
873 /* save r to the memory stack, and mark it as being free,
874 if seen up to (vtop - n) stack entry */
875 ST_FUNC
void save_reg_upstack(int r
, int n
)
877 int l
, saved
, size
, align
;
881 if ((r
&= VT_VALMASK
) >= VT_CONST
)
886 /* modify all stack values */
889 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
890 if ((p
->r
& VT_VALMASK
) == r
||
891 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
892 /* must save value on stack if not already done */
894 /* NOTE: must reload 'r' because r might be equal to r2 */
895 r
= p
->r
& VT_VALMASK
;
896 /* store register in the stack */
898 if ((p
->r
& VT_LVAL
) ||
899 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
900 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
901 type
= &char_pointer_type
;
905 size
= type_size(type
, &align
);
906 loc
= (loc
- size
) & -align
;
908 sv
.r
= VT_LOCAL
| VT_LVAL
;
911 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
912 /* x86 specific: need to pop fp register ST0 if saved */
914 o(0xd8dd); /* fstp %st(0) */
917 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
918 /* special long long case */
919 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
927 /* mark that stack entry as being saved on the stack */
928 if (p
->r
& VT_LVAL
) {
929 /* also clear the bounded flag because the
930 relocation address of the function was stored in
932 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
934 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
942 #ifdef TCC_TARGET_ARM
943 /* find a register of class 'rc2' with at most one reference on stack.
944 * If none, call get_reg(rc) */
945 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
950 for(r
=0;r
<NB_REGS
;r
++) {
951 if (reg_classes
[r
] & rc2
) {
954 for(p
= vstack
; p
<= vtop
; p
++) {
955 if ((p
->r
& VT_VALMASK
) == r
||
956 (p
->r2
& VT_VALMASK
) == r
)
967 /* find a free register of class 'rc'. If none, save one register */
968 ST_FUNC
int get_reg(int rc
)
973 /* find a free register */
974 for(r
=0;r
<NB_REGS
;r
++) {
975 if (reg_classes
[r
] & rc
) {
978 for(p
=vstack
;p
<=vtop
;p
++) {
979 if ((p
->r
& VT_VALMASK
) == r
||
980 (p
->r2
& VT_VALMASK
) == r
)
988 /* no register left : free the first one on the stack (VERY
989 IMPORTANT to start from the bottom to ensure that we don't
990 spill registers used in gen_opi()) */
991 for(p
=vstack
;p
<=vtop
;p
++) {
992 /* look at second register (if long long) */
993 r
= p
->r2
& VT_VALMASK
;
994 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
996 r
= p
->r
& VT_VALMASK
;
997 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1003 /* Should never comes here */
1007 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1009 static void move_reg(int r
, int s
, int t
)
1023 /* get address of vtop (vtop MUST BE an lvalue) */
1024 ST_FUNC
void gaddrof(void)
1026 vtop
->r
&= ~VT_LVAL
;
1027 /* tricky: if saved lvalue, then we can go back to lvalue */
1028 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1029 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1034 #ifdef CONFIG_TCC_BCHECK
1035 /* generate lvalue bound code */
1036 static void gbound(void)
1041 vtop
->r
&= ~VT_MUSTBOUND
;
1042 /* if lvalue, then use checking code before dereferencing */
1043 if (vtop
->r
& VT_LVAL
) {
1044 /* if not VT_BOUNDED value, then make one */
1045 if (!(vtop
->r
& VT_BOUNDED
)) {
1046 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1047 /* must save type because we must set it to int to get pointer */
1049 vtop
->type
.t
= VT_PTR
;
1052 gen_bounded_ptr_add();
1053 vtop
->r
|= lval_type
;
1056 /* then check for dereferencing */
1057 gen_bounded_ptr_deref();
1062 /* store vtop a register belonging to class 'rc'. lvalues are
1063 converted to values. Cannot be used if cannot be converted to
1064 register value (such as structures). */
1065 ST_FUNC
int gv(int rc
)
1067 int r
, bit_pos
, bit_size
, size
, align
;
1070 /* NOTE: get_reg can modify vstack[] */
1071 if (vtop
->type
.t
& VT_BITFIELD
) {
1074 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1075 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1076 /* remove bit field info to avoid loops */
1077 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1078 /* cast to int to propagate signedness in following ops */
1079 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1084 if((vtop
->type
.t
& VT_UNSIGNED
) ||
1085 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
||
1086 (((vtop
->type
.t
& VT_BTYPE
) == VT_ENUM
) &&
1087 vtop
->type
.ref
->a
.unsigned_enum
))
1088 type
.t
|= VT_UNSIGNED
;
1090 /* generate shifts */
1091 vpushi(bits
- (bit_pos
+ bit_size
));
1093 vpushi(bits
- bit_size
);
1094 /* NOTE: transformed to SHR if unsigned */
1098 if (is_float(vtop
->type
.t
) &&
1099 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1100 unsigned long offset
;
1101 /* CPUs usually cannot use float constants, so we store them
1102 generically in data segment */
1103 size
= type_size(&vtop
->type
, &align
);
1104 offset
= section_add(data_section
, size
, align
);
1105 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1107 init_putv(&vtop
->type
, data_section
, offset
);
1110 #ifdef CONFIG_TCC_BCHECK
1111 if (vtop
->r
& VT_MUSTBOUND
)
1115 r
= vtop
->r
& VT_VALMASK
;
1116 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1117 #ifndef TCC_TARGET_ARM64
1120 #ifdef TCC_TARGET_X86_64
1121 else if (rc
== RC_FRET
)
1125 /* need to reload if:
1127 - lvalue (need to dereference pointer)
1128 - already a register, but not in the right class */
1130 || (vtop
->r
& VT_LVAL
)
1131 || !(reg_classes
[r
] & rc
)
1132 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1133 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1134 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1136 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1141 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1142 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1143 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1145 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1146 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1147 unsigned long long ll
;
1149 int r2
, original_type
;
1150 original_type
= vtop
->type
.t
;
1151 /* two register type load : expand to two words
1153 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1154 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1157 vtop
->c
.i
= ll
; /* first word */
1159 vtop
->r
= r
; /* save register value */
1160 vpushi(ll
>> 32); /* second word */
1163 if (vtop
->r
& VT_LVAL
) {
1164 /* We do not want to modifier the long long
1165 pointer here, so the safest (and less
1166 efficient) is to save all the other registers
1167 in the stack. XXX: totally inefficient. */
1171 /* lvalue_save: save only if used further down the stack */
1172 save_reg_upstack(vtop
->r
, 1);
1174 /* load from memory */
1175 vtop
->type
.t
= load_type
;
1178 vtop
[-1].r
= r
; /* save register value */
1179 /* increment pointer to get second word */
1180 vtop
->type
.t
= addr_type
;
1185 vtop
->type
.t
= load_type
;
1187 /* move registers */
1190 vtop
[-1].r
= r
; /* save register value */
1191 vtop
->r
= vtop
[-1].r2
;
1193 /* Allocate second register. Here we rely on the fact that
1194 get_reg() tries first to free r2 of an SValue. */
1198 /* write second register */
1200 vtop
->type
.t
= original_type
;
1201 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1203 /* lvalue of scalar type : need to use lvalue type
1204 because of possible cast */
1207 /* compute memory access type */
1208 if (vtop
->r
& VT_LVAL_BYTE
)
1210 else if (vtop
->r
& VT_LVAL_SHORT
)
1212 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1216 /* restore wanted type */
1219 /* one register type load */
1224 #ifdef TCC_TARGET_C67
1225 /* uses register pairs for doubles */
1226 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1233 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1234 ST_FUNC
void gv2(int rc1
, int rc2
)
1238 /* generate more generic register first. But VT_JMP or VT_CMP
1239 values must be generated first in all cases to avoid possible
1241 v
= vtop
[0].r
& VT_VALMASK
;
1242 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1247 /* test if reload is needed for first register */
1248 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1258 /* test if reload is needed for first register */
1259 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1265 #ifndef TCC_TARGET_ARM64
1266 /* wrapper around RC_FRET to return a register by type */
1267 static int rc_fret(int t
)
1269 #ifdef TCC_TARGET_X86_64
1270 if (t
== VT_LDOUBLE
) {
1278 /* wrapper around REG_FRET to return a register by type */
1279 static int reg_fret(int t
)
1281 #ifdef TCC_TARGET_X86_64
1282 if (t
== VT_LDOUBLE
) {
1289 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1290 /* expand 64bit on stack in two ints */
1291 static void lexpand(void)
1294 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1295 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1296 if (v
== VT_CONST
) {
1299 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1305 vtop
[0].r
= vtop
[-1].r2
;
1306 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1308 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1312 #ifdef TCC_TARGET_ARM
1313 /* expand long long on stack */
1314 ST_FUNC
void lexpand_nr(void)
1318 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1320 vtop
->r2
= VT_CONST
;
1321 vtop
->type
.t
= VT_INT
| u
;
1322 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1323 if (v
== VT_CONST
) {
1324 vtop
[-1].c
.i
= vtop
->c
.i
;
1325 vtop
->c
.i
= vtop
->c
.i
>> 32;
1327 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1329 vtop
->r
= vtop
[-1].r
;
1330 } else if (v
> VT_CONST
) {
1334 vtop
->r
= vtop
[-1].r2
;
1335 vtop
[-1].r2
= VT_CONST
;
1336 vtop
[-1].type
.t
= VT_INT
| u
;
1340 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1341 /* build a long long from two ints */
1342 static void lbuild(int t
)
1344 gv2(RC_INT
, RC_INT
);
1345 vtop
[-1].r2
= vtop
[0].r
;
1346 vtop
[-1].type
.t
= t
;
1351 /* convert stack entry to register and duplicate its value in another
1353 static void gv_dup(void)
1359 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1360 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1367 /* stack: H L L1 H1 */
1377 /* duplicate value */
1382 #ifdef TCC_TARGET_X86_64
1383 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1393 load(r1
, &sv
); /* move r to r1 */
1395 /* duplicates value */
1401 /* Generate value test
1403 * Generate a test for any value (jump, comparison and integers) */
1404 ST_FUNC
int gvtst(int inv
, int t
)
1406 int v
= vtop
->r
& VT_VALMASK
;
1407 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1411 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1412 /* constant jmp optimization */
1413 if ((vtop
->c
.i
!= 0) != inv
)
1418 return gtst(inv
, t
);
1421 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1422 /* generate CPU independent (unsigned) long long operations */
1423 static void gen_opl(int op
)
1425 int t
, a
, b
, op1
, c
, i
;
1427 unsigned short reg_iret
= REG_IRET
;
1428 unsigned short reg_lret
= REG_LRET
;
1434 func
= TOK___divdi3
;
1437 func
= TOK___udivdi3
;
1440 func
= TOK___moddi3
;
1443 func
= TOK___umoddi3
;
1450 /* call generic long long function */
1451 vpush_global_sym(&func_old_type
, func
);
1456 vtop
->r2
= reg_lret
;
1464 //pv("gen_opl A",0,2);
1470 /* stack: L1 H1 L2 H2 */
1475 vtop
[-2] = vtop
[-3];
1478 /* stack: H1 H2 L1 L2 */
1479 //pv("gen_opl B",0,4);
1485 /* stack: H1 H2 L1 L2 ML MH */
1488 /* stack: ML MH H1 H2 L1 L2 */
1492 /* stack: ML MH H1 L2 H2 L1 */
1497 /* stack: ML MH M1 M2 */
1500 } else if (op
== '+' || op
== '-') {
1501 /* XXX: add non carry method too (for MIPS or alpha) */
1507 /* stack: H1 H2 (L1 op L2) */
1510 gen_op(op1
+ 1); /* TOK_xxxC2 */
1513 /* stack: H1 H2 (L1 op L2) */
1516 /* stack: (L1 op L2) H1 H2 */
1518 /* stack: (L1 op L2) (H1 op H2) */
1526 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1527 t
= vtop
[-1].type
.t
;
1531 /* stack: L H shift */
1533 /* constant: simpler */
1534 /* NOTE: all comments are for SHL. the other cases are
1535 done by swapping words */
1546 if (op
!= TOK_SAR
) {
1579 /* XXX: should provide a faster fallback on x86 ? */
1582 func
= TOK___ashrdi3
;
1585 func
= TOK___lshrdi3
;
1588 func
= TOK___ashldi3
;
1594 /* compare operations */
1600 /* stack: L1 H1 L2 H2 */
1602 vtop
[-1] = vtop
[-2];
1604 /* stack: L1 L2 H1 H2 */
1607 /* when values are equal, we need to compare low words. since
1608 the jump is inverted, we invert the test too. */
1611 else if (op1
== TOK_GT
)
1613 else if (op1
== TOK_ULT
)
1615 else if (op1
== TOK_UGT
)
1625 /* generate non equal test */
1631 /* compare low. Always unsigned */
1635 else if (op1
== TOK_LE
)
1637 else if (op1
== TOK_GT
)
1639 else if (op1
== TOK_GE
)
1650 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1652 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1653 return (a
^ b
) >> 63 ? -x
: x
;
1656 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1658 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1661 /* handle integer constant optimizations and various machine
1663 static void gen_opic(int op
)
1665 SValue
*v1
= vtop
- 1;
1667 int t1
= v1
->type
.t
& VT_BTYPE
;
1668 int t2
= v2
->type
.t
& VT_BTYPE
;
1669 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1670 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1671 uint64_t l1
= c1
? v1
->c
.i
: 0;
1672 uint64_t l2
= c2
? v2
->c
.i
: 0;
1673 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1675 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1676 l1
= ((uint32_t)l1
|
1677 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1678 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1679 l2
= ((uint32_t)l2
|
1680 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1684 case '+': l1
+= l2
; break;
1685 case '-': l1
-= l2
; break;
1686 case '&': l1
&= l2
; break;
1687 case '^': l1
^= l2
; break;
1688 case '|': l1
|= l2
; break;
1689 case '*': l1
*= l2
; break;
1696 /* if division by zero, generate explicit division */
1699 tcc_error("division by zero in constant");
1703 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1704 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1705 case TOK_UDIV
: l1
= l1
/ l2
; break;
1706 case TOK_UMOD
: l1
= l1
% l2
; break;
1709 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1710 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1712 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1715 case TOK_ULT
: l1
= l1
< l2
; break;
1716 case TOK_UGE
: l1
= l1
>= l2
; break;
1717 case TOK_EQ
: l1
= l1
== l2
; break;
1718 case TOK_NE
: l1
= l1
!= l2
; break;
1719 case TOK_ULE
: l1
= l1
<= l2
; break;
1720 case TOK_UGT
: l1
= l1
> l2
; break;
1721 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1722 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1723 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1724 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1726 case TOK_LAND
: l1
= l1
&& l2
; break;
1727 case TOK_LOR
: l1
= l1
|| l2
; break;
1731 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1732 l1
= ((uint32_t)l1
|
1733 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1737 /* if commutative ops, put c2 as constant */
1738 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1739 op
== '|' || op
== '*')) {
1741 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1742 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1744 if (!const_wanted
&&
1746 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1747 (l1
== -1 && op
== TOK_SAR
))) {
1748 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1750 } else if (!const_wanted
&&
1751 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1752 (l2
== -1 && op
== '|') ||
1753 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1754 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1755 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1760 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1763 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1764 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1768 /* filter out NOP operations like x*1, x-0, x&-1... */
1770 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1771 /* try to use shifts instead of muls or divs */
1772 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1781 else if (op
== TOK_PDIV
)
1787 } else if (c2
&& (op
== '+' || op
== '-') &&
1788 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1789 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1790 /* symbol + constant case */
1794 /* The backends can't always deal with addends to symbols
1795 larger than +-1<<31. Don't construct such. */
1802 /* call low level op generator */
1803 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1804 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1812 /* generate a floating point operation with constant propagation */
1813 static void gen_opif(int op
)
1821 /* currently, we cannot do computations with forward symbols */
1822 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1823 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1825 if (v1
->type
.t
== VT_FLOAT
) {
1828 } else if (v1
->type
.t
== VT_DOUBLE
) {
1836 /* NOTE: we only do constant propagation if finite number (not
1837 NaN or infinity) (ANSI spec) */
1838 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1842 case '+': f1
+= f2
; break;
1843 case '-': f1
-= f2
; break;
1844 case '*': f1
*= f2
; break;
1848 tcc_error("division by zero in constant");
1853 /* XXX: also handles tests ? */
1857 /* XXX: overflow test ? */
1858 if (v1
->type
.t
== VT_FLOAT
) {
1860 } else if (v1
->type
.t
== VT_DOUBLE
) {
1872 static int pointed_size(CType
*type
)
1875 return type_size(pointed_type(type
), &align
);
1878 static void vla_runtime_pointed_size(CType
*type
)
1881 vla_runtime_type_size(pointed_type(type
), &align
);
1884 static inline int is_null_pointer(SValue
*p
)
1886 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1888 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1889 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1890 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1891 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1894 static inline int is_integer_btype(int bt
)
1896 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1897 bt
== VT_INT
|| bt
== VT_LLONG
);
1900 /* check types for comparison or subtraction of pointers */
1901 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1903 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1906 /* null pointers are accepted for all comparisons as gcc */
1907 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1911 bt1
= type1
->t
& VT_BTYPE
;
1912 bt2
= type2
->t
& VT_BTYPE
;
1913 /* accept comparison between pointer and integer with a warning */
1914 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1915 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1916 tcc_warning("comparison between pointer and integer");
1920 /* both must be pointers or implicit function pointers */
1921 if (bt1
== VT_PTR
) {
1922 type1
= pointed_type(type1
);
1923 } else if (bt1
!= VT_FUNC
)
1924 goto invalid_operands
;
1926 if (bt2
== VT_PTR
) {
1927 type2
= pointed_type(type2
);
1928 } else if (bt2
!= VT_FUNC
) {
1930 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1932 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1933 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1937 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1938 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1939 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1940 /* gcc-like error if '-' is used */
1942 goto invalid_operands
;
1944 tcc_warning("comparison of distinct pointer types lacks a cast");
1948 /* generic gen_op: handles types problems */
1949 ST_FUNC
void gen_op(int op
)
1951 int u
, t1
, t2
, bt1
, bt2
, t
;
1955 t1
= vtop
[-1].type
.t
;
1956 t2
= vtop
[0].type
.t
;
1957 bt1
= t1
& VT_BTYPE
;
1958 bt2
= t2
& VT_BTYPE
;
1960 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1961 tcc_error("operation on a struct");
1962 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
1963 if (bt2
== VT_FUNC
) {
1964 mk_pointer(&vtop
->type
);
1967 if (bt1
== VT_FUNC
) {
1969 mk_pointer(&vtop
->type
);
1974 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1975 /* at least one operand is a pointer */
1976 /* relational op: must be both pointers */
1977 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1978 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1979 /* pointers are handled are unsigned */
1980 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1981 t
= VT_LLONG
| VT_UNSIGNED
;
1983 t
= VT_INT
| VT_UNSIGNED
;
1987 /* if both pointers, then it must be the '-' op */
1988 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
1990 tcc_error("cannot use pointers here");
1991 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1992 /* XXX: check that types are compatible */
1993 if (vtop
[-1].type
.t
& VT_VLA
) {
1994 vla_runtime_pointed_size(&vtop
[-1].type
);
1996 vpushi(pointed_size(&vtop
[-1].type
));
2000 /* set to integer type */
2001 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2002 vtop
->type
.t
= VT_LLONG
;
2004 vtop
->type
.t
= VT_INT
;
2009 /* exactly one pointer : must be '+' or '-'. */
2010 if (op
!= '-' && op
!= '+')
2011 tcc_error("cannot use pointers here");
2012 /* Put pointer as first operand */
2013 if (bt2
== VT_PTR
) {
2015 t
= t1
, t1
= t2
, t2
= t
;
2018 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2019 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2020 gen_cast(&int_type
);
2022 type1
= vtop
[-1].type
;
2023 type1
.t
&= ~VT_ARRAY
;
2024 if (vtop
[-1].type
.t
& VT_VLA
)
2025 vla_runtime_pointed_size(&vtop
[-1].type
);
2027 u
= pointed_size(&vtop
[-1].type
);
2029 tcc_error("unknown array element size");
2030 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2033 /* XXX: cast to int ? (long long case) */
2039 /* #ifdef CONFIG_TCC_BCHECK
2040 The main reason to removing this code:
2047 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2048 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2050 When this code is on. then the output looks like
2052 v+(i-j) = 0xbff84000
2054 /* if evaluating constant expression, no code should be
2055 generated, so no bound check */
2056 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2057 /* if bounded pointers, we generate a special code to
2064 gen_bounded_ptr_add();
2070 /* put again type if gen_opic() swaped operands */
2073 } else if (is_float(bt1
) || is_float(bt2
)) {
2074 /* compute bigger type and do implicit casts */
2075 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2077 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2082 /* floats can only be used for a few operations */
2083 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2084 (op
< TOK_ULT
|| op
> TOK_GT
))
2085 tcc_error("invalid operands for binary operation");
2087 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2088 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2089 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2092 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2093 /* cast to biggest op */
2095 /* convert to unsigned if it does not fit in a long long */
2096 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2097 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2101 /* integer operations */
2103 /* convert to unsigned if it does not fit in an integer */
2104 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2105 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2108 /* XXX: currently, some unsigned operations are explicit, so
2109 we modify them here */
2110 if (t
& VT_UNSIGNED
) {
2117 else if (op
== TOK_LT
)
2119 else if (op
== TOK_GT
)
2121 else if (op
== TOK_LE
)
2123 else if (op
== TOK_GE
)
2130 /* special case for shifts and long long: we keep the shift as
2132 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2139 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2140 /* relational op: the result is an int */
2141 vtop
->type
.t
= VT_INT
;
2146 // Make sure that we have converted to an rvalue:
2147 if (vtop
->r
& VT_LVAL
)
2148 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2151 #ifndef TCC_TARGET_ARM
2152 /* generic itof for unsigned long long case */
2153 static void gen_cvt_itof1(int t
)
2155 #ifdef TCC_TARGET_ARM64
2158 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2159 (VT_LLONG
| VT_UNSIGNED
)) {
2162 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2163 #if LDOUBLE_SIZE != 8
2164 else if (t
== VT_LDOUBLE
)
2165 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2168 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2172 vtop
->r
= reg_fret(t
);
2180 /* generic ftoi for unsigned long long case */
2181 static void gen_cvt_ftoi1(int t
)
2183 #ifdef TCC_TARGET_ARM64
2188 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2189 /* not handled natively */
2190 st
= vtop
->type
.t
& VT_BTYPE
;
2192 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2193 #if LDOUBLE_SIZE != 8
2194 else if (st
== VT_LDOUBLE
)
2195 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2198 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2203 vtop
->r2
= REG_LRET
;
2210 /* force char or short cast */
2211 static void force_charshort_cast(int t
)
2215 /* XXX: add optimization if lvalue : just change type and offset */
2220 if (t
& VT_UNSIGNED
) {
2221 vpushi((1 << bits
) - 1);
2224 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2230 /* result must be signed or the SAR is converted to an SHL
2231 This was not the case when "t" was a signed short
2232 and the last value on the stack was an unsigned int */
2233 vtop
->type
.t
&= ~VT_UNSIGNED
;
2239 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2240 static void gen_cast(CType
*type
)
2242 int sbt
, dbt
, sf
, df
, c
, p
;
2244 /* special delayed cast for char/short */
2245 /* XXX: in some cases (multiple cascaded casts), it may still
2247 if (vtop
->r
& VT_MUSTCAST
) {
2248 vtop
->r
&= ~VT_MUSTCAST
;
2249 force_charshort_cast(vtop
->type
.t
);
2252 /* bitfields first get cast to ints */
2253 if (vtop
->type
.t
& VT_BITFIELD
) {
2257 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2258 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2263 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2264 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2266 /* constant case: we can do it now */
2267 /* XXX: in ISOC, cannot do it if error in convert */
2268 if (sbt
== VT_FLOAT
)
2269 vtop
->c
.ld
= vtop
->c
.f
;
2270 else if (sbt
== VT_DOUBLE
)
2271 vtop
->c
.ld
= vtop
->c
.d
;
2274 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2275 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2276 vtop
->c
.ld
= vtop
->c
.i
;
2278 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2280 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2281 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2283 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2286 if (dbt
== VT_FLOAT
)
2287 vtop
->c
.f
= (float)vtop
->c
.ld
;
2288 else if (dbt
== VT_DOUBLE
)
2289 vtop
->c
.d
= (double)vtop
->c
.ld
;
2290 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2291 vtop
->c
.i
= vtop
->c
.ld
;
2292 } else if (sf
&& dbt
== VT_BOOL
) {
2293 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2296 vtop
->c
.i
= vtop
->c
.ld
;
2297 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2299 else if (sbt
& VT_UNSIGNED
)
2300 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2301 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2302 else if (sbt
== VT_PTR
)
2305 else if (sbt
!= VT_LLONG
)
2306 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2307 -(vtop
->c
.i
& 0x80000000));
2309 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2311 else if (dbt
== VT_BOOL
)
2312 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2313 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2314 else if (dbt
== VT_PTR
)
2317 else if (dbt
!= VT_LLONG
) {
2318 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2319 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2322 if (!(dbt
& VT_UNSIGNED
))
2323 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2326 } else if (p
&& dbt
== VT_BOOL
) {
2330 /* non constant case: generate code */
2332 /* convert from fp to fp */
2335 /* convert int to fp */
2338 /* convert fp to int */
2339 if (dbt
== VT_BOOL
) {
2343 /* we handle char/short/etc... with generic code */
2344 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2345 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2349 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2350 /* additional cast for char/short... */
2355 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2356 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2357 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2358 /* scalar to long long */
2359 /* machine independent conversion */
2361 /* generate high word */
2362 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2366 if (sbt
== VT_PTR
) {
2367 /* cast from pointer to int before we apply
2368 shift operation, which pointers don't support*/
2369 gen_cast(&int_type
);
2375 /* patch second register */
2376 vtop
[-1].r2
= vtop
->r
;
2380 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2381 (dbt
& VT_BTYPE
) == VT_PTR
||
2382 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2383 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2384 (sbt
& VT_BTYPE
) != VT_PTR
&&
2385 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2386 /* need to convert from 32bit to 64bit */
2388 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2389 #if defined(TCC_TARGET_ARM64)
2391 #elif defined(TCC_TARGET_X86_64)
2393 /* x86_64 specific: movslq */
2395 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2402 } else if (dbt
== VT_BOOL
) {
2403 /* scalar to bool */
2406 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2407 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2408 if (sbt
== VT_PTR
) {
2409 vtop
->type
.t
= VT_INT
;
2410 tcc_warning("nonportable conversion from pointer to char/short");
2412 force_charshort_cast(dbt
);
2413 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2414 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2416 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2417 /* from long long: just take low order word */
2421 /* if lvalue and single word type, nothing to do because
2422 the lvalue already contains the real type size (see
2423 VT_LVAL_xxx constants) */
2427 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2428 /* if we are casting between pointer types,
2429 we must update the VT_LVAL_xxx size */
2430 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2431 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2436 /* return type size as known at compile time. Put alignment at 'a' */
2437 ST_FUNC
int type_size(CType
*type
, int *a
)
2442 bt
= type
->t
& VT_BTYPE
;
2443 if (bt
== VT_STRUCT
) {
2448 } else if (bt
== VT_PTR
) {
2449 if (type
->t
& VT_ARRAY
) {
2453 ts
= type_size(&s
->type
, a
);
2455 if (ts
< 0 && s
->c
< 0)
2463 } else if (bt
== VT_LDOUBLE
) {
2465 return LDOUBLE_SIZE
;
2466 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2467 #ifdef TCC_TARGET_I386
2468 #ifdef TCC_TARGET_PE
2473 #elif defined(TCC_TARGET_ARM)
2483 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2486 } else if (bt
== VT_SHORT
) {
2489 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2492 } else if (bt
== VT_ENUM
) {
2494 /* Enums might be incomplete, so don't just return '4' here. */
2495 return type
->ref
->c
;
2497 /* char, void, function, _Bool */
2503 /* push type size as known at runtime time on top of value stack. Put
2505 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2507 if (type
->t
& VT_VLA
) {
2508 type_size(&type
->ref
->type
, a
);
2509 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2511 vpushi(type_size(type
, a
));
2515 static void vla_sp_restore(void) {
2516 if (vlas_in_scope
) {
2517 gen_vla_sp_restore(vla_sp_loc
);
2521 static void vla_sp_restore_root(void) {
2522 if (vlas_in_scope
) {
2523 gen_vla_sp_restore(vla_sp_root_loc
);
2527 /* return the pointed type of t */
2528 static inline CType
*pointed_type(CType
*type
)
2530 return &type
->ref
->type
;
2533 /* modify type so that its it is a pointer to type. */
2534 ST_FUNC
void mk_pointer(CType
*type
)
2537 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2538 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2542 /* compare function types. OLD functions match any new functions */
2543 static int is_compatible_func(CType
*type1
, CType
*type2
)
2549 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2551 /* check func_call */
2552 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2554 /* XXX: not complete */
2555 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2559 while (s1
!= NULL
) {
2562 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2572 /* return true if type1 and type2 are the same. If unqualified is
2573 true, qualifiers on the types are ignored.
2575 - enums are not checked as gcc __builtin_types_compatible_p ()
2577 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2581 t1
= type1
->t
& VT_TYPE
;
2582 t2
= type2
->t
& VT_TYPE
;
2584 /* strip qualifiers before comparing */
2585 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2586 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2588 /* Default Vs explicit signedness only matters for char */
2589 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2593 /* An enum is compatible with (unsigned) int. Ideally we would
2594 store the enums signedness in type->ref.a.<some_bit> and
2595 only accept unsigned enums with unsigned int and vice versa.
2596 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2597 from pointer target types, so we can't add it here either. */
2598 if ((t1
& VT_BTYPE
) == VT_ENUM
) {
2600 if (type1
->ref
->a
.unsigned_enum
)
2603 if ((t2
& VT_BTYPE
) == VT_ENUM
) {
2605 if (type2
->ref
->a
.unsigned_enum
)
2608 /* XXX: bitfields ? */
2611 /* test more complicated cases */
2612 bt1
= t1
& VT_BTYPE
;
2613 if (bt1
== VT_PTR
) {
2614 type1
= pointed_type(type1
);
2615 type2
= pointed_type(type2
);
2616 return is_compatible_types(type1
, type2
);
2617 } else if (bt1
== VT_STRUCT
) {
2618 return (type1
->ref
== type2
->ref
);
2619 } else if (bt1
== VT_FUNC
) {
2620 return is_compatible_func(type1
, type2
);
2626 /* return true if type1 and type2 are exactly the same (including
2629 static int is_compatible_types(CType
*type1
, CType
*type2
)
2631 return compare_types(type1
,type2
,0);
2634 /* return true if type1 and type2 are the same (ignoring qualifiers).
2636 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2638 return compare_types(type1
,type2
,1);
2641 /* print a type. If 'varstr' is not NULL, then the variable is also
2642 printed in the type */
2644 /* XXX: add array and function pointers */
2645 static void type_to_str(char *buf
, int buf_size
,
2646 CType
*type
, const char *varstr
)
2656 if (t
& VT_CONSTANT
)
2657 pstrcat(buf
, buf_size
, "const ");
2658 if (t
& VT_VOLATILE
)
2659 pstrcat(buf
, buf_size
, "volatile ");
2660 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2661 pstrcat(buf
, buf_size
, "unsigned ");
2662 else if (t
& VT_DEFSIGN
)
2663 pstrcat(buf
, buf_size
, "signed ");
2665 pstrcat(buf
, buf_size
, "extern ");
2667 pstrcat(buf
, buf_size
, "static ");
2669 pstrcat(buf
, buf_size
, "typedef ");
2671 pstrcat(buf
, buf_size
, "inline ");
2672 buf_size
-= strlen(buf
);
2703 tstr
= "long double";
2705 pstrcat(buf
, buf_size
, tstr
);
2709 if (bt
== VT_STRUCT
)
2713 pstrcat(buf
, buf_size
, tstr
);
2714 v
= type
->ref
->v
& ~SYM_STRUCT
;
2715 if (v
>= SYM_FIRST_ANOM
)
2716 pstrcat(buf
, buf_size
, "<anonymous>");
2718 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2722 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2723 pstrcat(buf
, buf_size
, "(");
2725 while (sa
!= NULL
) {
2726 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2727 pstrcat(buf
, buf_size
, buf1
);
2730 pstrcat(buf
, buf_size
, ", ");
2732 pstrcat(buf
, buf_size
, ")");
2737 snprintf(buf1
, sizeof(buf1
), "%s[%ld]", varstr
? varstr
: "", s
->c
);
2738 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2741 pstrcpy(buf1
, sizeof(buf1
), "*");
2742 if (t
& VT_CONSTANT
)
2743 pstrcat(buf1
, buf_size
, "const ");
2744 if (t
& VT_VOLATILE
)
2745 pstrcat(buf1
, buf_size
, "volatile ");
2747 pstrcat(buf1
, sizeof(buf1
), varstr
);
2748 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2752 pstrcat(buf
, buf_size
, " ");
2753 pstrcat(buf
, buf_size
, varstr
);
2758 /* verify type compatibility to store vtop in 'dt' type, and generate
2760 static void gen_assign_cast(CType
*dt
)
2762 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2763 char buf1
[256], buf2
[256];
2766 st
= &vtop
->type
; /* source type */
2767 dbt
= dt
->t
& VT_BTYPE
;
2768 sbt
= st
->t
& VT_BTYPE
;
2769 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2770 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2772 It is Ok if both are void
2778 gcc accepts this program
2781 tcc_error("cannot cast from/to void");
2783 if (dt
->t
& VT_CONSTANT
)
2784 tcc_warning("assignment of read-only location");
2787 /* special cases for pointers */
2788 /* '0' can also be a pointer */
2789 if (is_null_pointer(vtop
))
2791 /* accept implicit pointer to integer cast with warning */
2792 if (is_integer_btype(sbt
)) {
2793 tcc_warning("assignment makes pointer from integer without a cast");
2796 type1
= pointed_type(dt
);
2797 /* a function is implicitly a function pointer */
2798 if (sbt
== VT_FUNC
) {
2799 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2800 !is_compatible_types(pointed_type(dt
), st
))
2801 tcc_warning("assignment from incompatible pointer type");
2806 type2
= pointed_type(st
);
2807 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2808 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2809 /* void * can match anything */
2811 /* exact type match, except for qualifiers */
2814 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2815 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2816 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2817 /* Like GCC don't warn by default for merely changes
2818 in pointer target signedness. Do warn for different
2819 base types, though, in particular for unsigned enums
2820 and signed int targets. */
2821 if ((tmp_type1
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) !=
2822 (tmp_type2
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) &&
2823 (tmp_type1
.t
& VT_BTYPE
) == (tmp_type2
.t
& VT_BTYPE
))
2826 tcc_warning("assignment from incompatible pointer type");
2829 /* check const and volatile */
2830 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2831 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2832 tcc_warning("assignment discards qualifiers from pointer target type");
2838 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2839 tcc_warning("assignment makes integer from pointer without a cast");
2840 } else if (sbt
== VT_STRUCT
) {
2841 goto case_VT_STRUCT
;
2843 /* XXX: more tests */
2849 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2850 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2851 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2853 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2854 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2855 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2863 /* store vtop in lvalue pushed on stack */
2864 ST_FUNC
void vstore(void)
2866 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2868 ft
= vtop
[-1].type
.t
;
2869 sbt
= vtop
->type
.t
& VT_BTYPE
;
2870 dbt
= ft
& VT_BTYPE
;
2871 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2872 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2873 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2874 /* optimize char/short casts */
2875 delayed_cast
= VT_MUSTCAST
;
2876 vtop
->type
.t
= (ft
& VT_TYPE
& ~VT_BITFIELD
&
2877 ((1 << VT_STRUCT_SHIFT
) - 1));
2878 /* XXX: factorize */
2879 if (ft
& VT_CONSTANT
)
2880 tcc_warning("assignment of read-only location");
2883 if (!(ft
& VT_BITFIELD
))
2884 gen_assign_cast(&vtop
[-1].type
);
2887 if (sbt
== VT_STRUCT
) {
2888 /* if structure, only generate pointer */
2889 /* structure assignment : generate memcpy */
2890 /* XXX: optimize if small size */
2891 size
= type_size(&vtop
->type
, &align
);
2895 vtop
->type
.t
= VT_PTR
;
2898 /* address of memcpy() */
2901 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2902 else if(!(align
& 3))
2903 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2906 /* Use memmove, rather than memcpy, as dest and src may be same: */
2907 vpush_global_sym(&func_old_type
, TOK_memmove
);
2912 vtop
->type
.t
= VT_PTR
;
2918 /* leave source on stack */
2919 } else if (ft
& VT_BITFIELD
) {
2920 /* bitfield store handling */
2922 /* save lvalue as expression result (example: s.b = s.a = n;) */
2923 vdup(), vtop
[-1] = vtop
[-2];
2925 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2926 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2927 /* remove bit field info to avoid loops */
2928 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2930 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2931 gen_cast(&vtop
[-1].type
);
2932 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2935 /* duplicate destination */
2937 vtop
[-1] = vtop
[-2];
2939 /* mask and shift source */
2940 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2941 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2942 vpushll((1ULL << bit_size
) - 1ULL);
2944 vpushi((1 << bit_size
) - 1);
2950 /* load destination, mask and or with source */
2952 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2953 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2955 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2961 /* ... and discard */
2965 #ifdef CONFIG_TCC_BCHECK
2966 /* bound check case */
2967 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2976 #ifdef TCC_TARGET_X86_64
2977 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2979 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
2984 r
= gv(rc
); /* generate value */
2985 /* if lvalue was saved on stack, must read it */
2986 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
2988 t
= get_reg(RC_INT
);
2989 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2994 sv
.r
= VT_LOCAL
| VT_LVAL
;
2995 sv
.c
.i
= vtop
[-1].c
.i
;
2997 vtop
[-1].r
= t
| VT_LVAL
;
2999 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3000 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3001 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3002 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3004 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3005 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3007 vtop
[-1].type
.t
= load_type
;
3010 /* convert to int to increment easily */
3011 vtop
->type
.t
= addr_type
;
3017 vtop
[-1].type
.t
= load_type
;
3018 /* XXX: it works because r2 is spilled last ! */
3019 store(vtop
->r2
, vtop
- 1);
3025 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3026 vtop
->r
|= delayed_cast
;
3030 /* post defines POST/PRE add. c is the token ++ or -- */
3031 ST_FUNC
void inc(int post
, int c
)
3034 vdup(); /* save lvalue */
3036 gv_dup(); /* duplicate value */
3041 vpushi(c
- TOK_MID
);
3043 vstore(); /* store value */
3045 vpop(); /* if post op, return saved value */
3048 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3050 /* read the string */
3054 while (tok
== TOK_STR
) {
3055 /* XXX: add \0 handling too ? */
3056 cstr_cat(astr
, tokc
.str
.data
, -1);
3059 cstr_ccat(astr
, '\0');
3062 /* If I is >= 1 and a power of two, returns log2(i)+1.
3063 If I is 0 returns 0. */
3064 static int exact_log2p1(int i
)
3069 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3080 /* Parse GNUC __attribute__ extension. Currently, the following
3081 extensions are recognized:
3082 - aligned(n) : set data/function alignment.
3083 - packed : force data alignment to 1
3084 - section(x) : generate data/code in this section.
3085 - unused : currently ignored, but may be used someday.
3086 - regparm(n) : pass function parameters in registers (i386 only)
3088 static void parse_attribute(AttributeDef
*ad
)
3093 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
3097 while (tok
!= ')') {
3098 if (tok
< TOK_IDENT
)
3099 expect("attribute name");
3106 parse_mult_str(&astr
, "section name");
3107 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3114 parse_mult_str(&astr
, "alias(\"target\")");
3115 ad
->alias_target
= /* save string as token, for later */
3116 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3120 case TOK_VISIBILITY1
:
3121 case TOK_VISIBILITY2
:
3123 parse_mult_str(&astr
,
3124 "visibility(\"default|hidden|internal|protected\")");
3125 if (!strcmp (astr
.data
, "default"))
3126 ad
->a
.visibility
= STV_DEFAULT
;
3127 else if (!strcmp (astr
.data
, "hidden"))
3128 ad
->a
.visibility
= STV_HIDDEN
;
3129 else if (!strcmp (astr
.data
, "internal"))
3130 ad
->a
.visibility
= STV_INTERNAL
;
3131 else if (!strcmp (astr
.data
, "protected"))
3132 ad
->a
.visibility
= STV_PROTECTED
;
3134 expect("visibility(\"default|hidden|internal|protected\")");
3143 if (n
<= 0 || (n
& (n
- 1)) != 0)
3144 tcc_error("alignment must be a positive power of two");
3149 ad
->a
.aligned
= exact_log2p1(n
);
3150 if (n
!= 1 << (ad
->a
.aligned
- 1))
3151 tcc_error("alignment of %d is larger than implemented", n
);
3163 /* currently, no need to handle it because tcc does not
3164 track unused objects */
3168 /* currently, no need to handle it because tcc does not
3169 track unused objects */
3174 ad
->a
.func_call
= FUNC_CDECL
;
3179 ad
->a
.func_call
= FUNC_STDCALL
;
3181 #ifdef TCC_TARGET_I386
3191 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3197 ad
->a
.func_call
= FUNC_FASTCALLW
;
3204 ad
->a
.mode
= VT_LLONG
+ 1;
3207 ad
->a
.mode
= VT_BYTE
+ 1;
3210 ad
->a
.mode
= VT_SHORT
+ 1;
3214 ad
->a
.mode
= VT_INT
+ 1;
3217 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3224 ad
->a
.func_export
= 1;
3227 ad
->a
.func_import
= 1;
3230 if (tcc_state
->warn_unsupported
)
3231 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3232 /* skip parameters */
3234 int parenthesis
= 0;
3238 else if (tok
== ')')
3241 } while (parenthesis
&& tok
!= -1);
3254 static Sym
* find_field (CType
*type
, int v
)
3258 while ((s
= s
->next
) != NULL
) {
3259 if ((s
->v
& SYM_FIELD
) &&
3260 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3261 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3262 Sym
*ret
= find_field (&s
->type
, v
);
3272 static void struct_add_offset (Sym
*s
, int offset
)
3274 while ((s
= s
->next
) != NULL
) {
3275 if ((s
->v
& SYM_FIELD
) &&
3276 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3277 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3278 struct_add_offset(s
->type
.ref
, offset
);
3284 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3286 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3287 int pcc
= !tcc_state
->ms_bitfields
;
3288 int packwarn
= tcc_state
->warn_gcc_compat
;
3289 int typealign
, bit_size
, size
;
3293 maxalign
= 1 << (ad
->a
.aligned
- 1);
3299 prevbt
= VT_STRUCT
; /* make it never match */
3303 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3304 size
= type_size(&f
->type
, &typealign
);
3305 if (f
->type
.t
& VT_BITFIELD
)
3306 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3309 if (bit_size
== 0 && pcc
) {
3310 /* Zero-width bit-fields in PCC mode aren't affected
3311 by any packing (attribute or pragma). */
3313 } else if (f
->r
> 1) {
3315 } else if (ad
->a
.packed
|| f
->r
== 1) {
3317 /* Packed fields or packed records don't let the base type
3318 influence the records type alignment. */
3323 if (type
->ref
->type
.t
!= TOK_STRUCT
) {
3324 if (pcc
&& bit_size
>= 0)
3325 size
= (bit_size
+ 7) >> 3;
3326 /* Bit position is already zero from our caller. */
3330 } else if (bit_size
< 0) {
3331 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3334 c
= (c
+ addbytes
+ align
- 1) & -align
;
3340 /* A bit-field. Layout is more complicated. There are two
3341 options TCC implements: PCC compatible and MS compatible
3342 (PCC compatible is what GCC uses for almost all targets).
3343 In PCC layout the overall size of the struct (in c) is
3344 _excluding_ the current run of bit-fields (that is,
3345 there's at least additional bit_pos bits after c). In
3346 MS layout c does include the current run of bit-fields.
3348 This matters for calculating the natural alignment buckets
3351 /* 'align' will be used to influence records alignment,
3352 so it's the max of specified and type alignment, except
3353 in certain cases that depend on the mode. */
3354 if (align
< typealign
)
3357 /* In PCC layout a non-packed bit-field is placed adjacent
3358 to the preceding bit-fields, except if it would overflow
3359 its container (depending on base type) or it's a zero-width
3360 bit-field. Packed non-zero-width bit-fields always are
3362 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3363 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3364 if (bit_size
== 0 ||
3366 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3367 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3369 } else if (bit_pos
+ bit_size
> size
* 8) {
3372 if (bit_pos
+ bit_size
> size
* 8) {
3373 c
+= 1, bit_pos
= 0;
3374 if ((ad
->a
.packed
|| f
->r
) && packwarn
) {
3375 tcc_warning("struct layout not compatible with GCC (internal limitation)");
3381 /* In PCC layout named bit-fields influence the alignment
3382 of the containing struct using the base types alignment,
3383 except for packed fields (which here have correct
3384 align/typealign). */
3385 if ((f
->v
& SYM_FIRST_ANOM
))
3388 bt
= f
->type
.t
& VT_BTYPE
;
3389 if ((bit_pos
+ bit_size
> size
* 8) ||
3390 (bit_size
> 0) == (bt
!= prevbt
)) {
3391 c
= (c
+ typealign
- 1) & -typealign
;
3394 /* In MS bitfield mode a bit-field run always uses
3395 at least as many bits as the underlying type.
3396 To start a new run it's also required that this
3397 or the last bit-field had non-zero width. */
3398 if (bit_size
|| prev_bit_size
)
3401 /* In MS layout the records alignment is normally
3402 influenced by the field, except for a zero-width
3403 field at the start of a run (but by further zero-width
3404 fields it is again). */
3405 if (bit_size
== 0 && prevbt
!= bt
)
3408 prev_bit_size
= bit_size
;
3410 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3411 | (bit_pos
<< VT_STRUCT_SHIFT
);
3412 bit_pos
+= bit_size
;
3413 if (pcc
&& bit_pos
>= size
* 8) {
3415 bit_pos
-= size
* 8;
3418 if (align
> maxalign
)
3421 printf("set field %s offset=%d",
3422 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
);
3423 if (f
->type
.t
& VT_BITFIELD
) {
3424 printf(" pos=%d size=%d",
3425 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3426 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3431 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3433 /* An anonymous struct/union. Adjust member offsets
3434 to reflect the real offset of our containing struct.
3435 Also set the offset of this anon member inside
3436 the outer struct to be zero. Via this it
3437 works when accessing the field offset directly
3438 (from base object), as well as when recursing
3439 members in initializer handling. */
3440 int v2
= f
->type
.ref
->v
;
3441 if (!(v2
& SYM_FIELD
) &&
3442 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3444 /* This happens only with MS extensions. The
3445 anon member has a named struct type, so it
3446 potentially is shared with other references.
3447 We need to unshare members so we can modify
3450 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3451 &f
->type
.ref
->type
, 0,
3453 pps
= &f
->type
.ref
->next
;
3454 while ((ass
= ass
->next
) != NULL
) {
3455 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3456 pps
= &((*pps
)->next
);
3460 struct_add_offset(f
->type
.ref
, offset
);
3468 /* store size and alignment */
3469 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3470 + maxalign
- 1) & -maxalign
;
3471 type
->ref
->r
= maxalign
;
3472 if (offset
+ size
> type
->ref
->c
)
3473 tcc_warning("will touch memory past end of the struct (internal limitation)");
3476 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3477 static void struct_decl(CType
*type
, AttributeDef
*ad
, int u
)
3479 int a
, v
, size
, align
, flexible
, alignoverride
;
3481 int bit_size
, bsize
, bt
;
3486 a
= tok
; /* save decl type */
3488 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3489 parse_attribute(ad
);
3493 /* struct already defined ? return it */
3495 expect("struct/union/enum name");
3497 if (s
&& (s
->scope
== local_scope
|| (tok
!= '{' && tok
!= ';'))) {
3499 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3505 /* Record the original enum/struct/union token. */
3508 /* we put an undefined size for struct/union */
3509 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3510 s
->r
= 0; /* default alignment is zero as gcc */
3511 /* put struct/union/enum name in type */
3519 tcc_error("struct/union/enum already defined");
3520 /* cannot be empty */
3522 /* non empty enums are not allowed */
3523 if (a
== TOK_ENUM
) {
3527 CType
*t
= &int_type
;
3530 expect("identifier");
3532 if (ss
&& !local_stack
)
3533 tcc_error("redefinition of enumerator '%s'",
3534 get_tok_str(v
, NULL
));
3538 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3541 /* We really want to support long long enums
3542 on i386 as well, but the Sym structure only
3543 holds a 'long' for associated constants,
3544 and enlarging it would bump its size (no
3545 available padding). So punt for now. */
3551 if (c
!= (int)c
&& (unsigned long)c
!= (unsigned int)c
)
3552 seen_wide
= 1, t
= &size_type
;
3553 /* enum symbols have static storage */
3554 ss
= sym_push(v
, t
, VT_CONST
, c
);
3555 ss
->type
.t
|= VT_STATIC
;
3560 /* NOTE: we accept a trailing comma */
3565 s
->a
.unsigned_enum
= 1;
3566 s
->c
= type_size(seen_wide
? &size_type
: &int_type
, &align
);
3571 while (tok
!= '}') {
3572 if (!parse_btype(&btype
, &ad1
)) {
3578 tcc_error("flexible array member '%s' not at the end of struct",
3579 get_tok_str(v
, NULL
));
3585 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3587 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3588 expect("identifier");
3590 int v
= btype
.ref
->v
;
3591 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3592 if (tcc_state
->ms_extensions
== 0)
3593 expect("identifier");
3597 if (type_size(&type1
, &align
) < 0) {
3598 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3601 tcc_error("field '%s' has incomplete type",
3602 get_tok_str(v
, NULL
));
3604 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3605 (type1
.t
& VT_STORAGE
))
3606 tcc_error("invalid type for '%s'",
3607 get_tok_str(v
, NULL
));
3611 bit_size
= expr_const();
3612 /* XXX: handle v = 0 case for messages */
3614 tcc_error("negative width in bit-field '%s'",
3615 get_tok_str(v
, NULL
));
3616 if (v
&& bit_size
== 0)
3617 tcc_error("zero width for bit-field '%s'",
3618 get_tok_str(v
, NULL
));
3619 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3620 parse_attribute(&ad1
);
3622 size
= type_size(&type1
, &align
);
3623 /* Only remember non-default alignment. */
3625 if (ad1
.a
.aligned
) {
3626 int speca
= 1 << (ad1
.a
.aligned
- 1);
3627 alignoverride
= speca
;
3628 } else if (ad1
.a
.packed
|| ad
->a
.packed
) {
3630 } else if (*tcc_state
->pack_stack_ptr
) {
3631 if (align
>= *tcc_state
->pack_stack_ptr
)
3632 alignoverride
= *tcc_state
->pack_stack_ptr
;
3634 if (bit_size
>= 0) {
3635 bt
= type1
.t
& VT_BTYPE
;
3642 tcc_error("bitfields must have scalar type");
3644 if (bit_size
> bsize
) {
3645 tcc_error("width of '%s' exceeds its type",
3646 get_tok_str(v
, NULL
));
3647 } else if (bit_size
== bsize
) {
3648 /* no need for bit fields */
3651 type1
.t
|= VT_BITFIELD
|
3652 (0 << VT_STRUCT_SHIFT
) |
3653 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3656 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3657 /* Remember we've seen a real field to check
3658 for placement of flexible array member. */
3661 /* If member is a struct or bit-field, enforce
3662 placing into the struct (as anonymous). */
3664 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3669 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3673 if (tok
== ';' || tok
== TOK_EOF
)
3680 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3681 parse_attribute(ad
);
3682 struct_layout(type
, ad
);
3687 /* return 1 if basic type is a type size (short, long, long long) */
3688 ST_FUNC
int is_btype_size(int bt
)
3690 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3693 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3694 are added to the element type, copied because it could be a typedef. */
3695 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3697 while (type
->t
& VT_ARRAY
) {
3698 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3699 type
= &type
->ref
->type
;
3701 type
->t
|= qualifiers
;
3704 /* return 0 if no type declaration. otherwise, return the basic type
3707 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3709 int t
, u
, bt_size
, complete
, type_found
, typespec_found
, g
;
3713 memset(ad
, 0, sizeof(AttributeDef
));
3721 /* currently, we really ignore extension */
3732 tcc_error("too many basic types");
3734 bt_size
= is_btype_size (u
& VT_BTYPE
);
3735 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3750 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3751 #ifndef TCC_TARGET_PE
3752 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3754 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3755 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3761 #ifdef TCC_TARGET_ARM64
3763 /* GCC's __uint128_t appears in some Linux header files. Make it a
3764 synonym for long double to get the size and alignment right. */
3776 if ((t
& VT_BTYPE
) == VT_LONG
) {
3777 #ifdef TCC_TARGET_PE
3778 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3780 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3788 struct_decl(&type1
, ad
, VT_ENUM
);
3791 type
->ref
= type1
.ref
;
3795 struct_decl(&type1
, ad
, VT_STRUCT
);
3798 /* type modifiers */
3803 parse_btype_qualify(type
, VT_CONSTANT
);
3811 parse_btype_qualify(type
, VT_VOLATILE
);
3818 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3819 tcc_error("signed and unsigned modifier");
3832 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3833 tcc_error("signed and unsigned modifier");
3834 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3850 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
3851 tcc_error("multiple storage classes");
3862 /* GNUC attribute */
3863 case TOK_ATTRIBUTE1
:
3864 case TOK_ATTRIBUTE2
:
3865 parse_attribute(ad
);
3868 t
= (t
& ~VT_BTYPE
) | u
;
3876 parse_expr_type(&type1
);
3877 /* remove all storage modifiers except typedef */
3878 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3884 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3887 type
->t
= ((s
->type
.t
& ~VT_TYPEDEF
) |
3888 (t
& ~(VT_CONSTANT
| VT_VOLATILE
)));
3889 type
->ref
= s
->type
.ref
;
3890 if (t
& (VT_CONSTANT
| VT_VOLATILE
))
3891 parse_btype_qualify(type
, t
& (VT_CONSTANT
| VT_VOLATILE
));
3895 /* get attributes from typedef */
3896 if (0 == ad
->a
.aligned
)
3897 ad
->a
.aligned
= s
->a
.aligned
;
3898 if (0 == ad
->a
.func_call
)
3899 ad
->a
.func_call
= s
->a
.func_call
;
3900 ad
->a
.packed
|= s
->a
.packed
;
3909 if (tcc_state
->char_is_unsigned
) {
3910 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3914 /* long is never used as type */
3915 if ((t
& VT_BTYPE
) == VT_LONG
)
3916 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3917 defined TCC_TARGET_PE
3918 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3920 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3926 /* convert a function parameter type (array to pointer and function to
3927 function pointer) */
3928 static inline void convert_parameter_type(CType
*pt
)
3930 /* remove const and volatile qualifiers (XXX: const could be used
3931 to indicate a const function parameter */
3932 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3933 /* array must be transformed to pointer according to ANSI C */
3935 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3940 ST_FUNC
void parse_asm_str(CString
*astr
)
3943 parse_mult_str(astr
, "string constant");
3946 /* Parse an asm label and return the token */
3947 static int asm_label_instr(void)
3953 parse_asm_str(&astr
);
3956 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3958 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3963 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
3965 int n
, l
, t1
, arg_size
, align
;
3966 Sym
**plast
, *s
, *first
;
3971 /* function type, or recursive declarator (return if so) */
3973 if (td
&& !(td
& TYPE_ABSTRACT
))
3977 else if (parse_btype(&pt
, &ad1
))
3988 /* read param name and compute offset */
3989 if (l
!= FUNC_OLD
) {
3990 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3992 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3993 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
3994 tcc_error("parameter declared as void");
3995 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
3999 expect("identifier");
4000 pt
.t
= VT_VOID
; /* invalid type */
4003 convert_parameter_type(&pt
);
4004 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4010 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4015 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4016 tcc_error("invalid type");
4019 /* if no parameters, then old type prototype */
4022 /* NOTE: const is ignored in returned type as it has a special
4023 meaning in gcc / C++ */
4024 type
->t
&= ~VT_CONSTANT
;
4025 /* some ancient pre-K&R C allows a function to return an array
4026 and the array brackets to be put after the arguments, such
4027 that "int c()[]" means something like "int[] c()" */
4030 skip(']'); /* only handle simple "[]" */
4033 /* we push a anonymous symbol which will contain the function prototype */
4034 ad
->a
.func_args
= arg_size
;
4035 s
= sym_push(SYM_FIELD
, type
, 0, l
);
4040 } else if (tok
== '[') {
4041 int saved_nocode_wanted
= nocode_wanted
;
4042 /* array definition */
4044 if (tok
== TOK_RESTRICT1
)
4049 if (!local_stack
|| (storage
& VT_STATIC
))
4050 vpushi(expr_const());
4052 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4053 length must always be evaluated, even under nocode_wanted,
4054 so that its size slot is initialized (e.g. under sizeof
4059 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4062 tcc_error("invalid array size");
4064 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4065 tcc_error("size of variable length array should be an integer");
4070 /* parse next post type */
4071 post_type(type
, ad
, storage
, 0);
4072 if (type
->t
== VT_FUNC
)
4073 tcc_error("declaration of an array of functions");
4074 t1
|= type
->t
& VT_VLA
;
4077 loc
-= type_size(&int_type
, &align
);
4081 vla_runtime_type_size(type
, &align
);
4083 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4089 nocode_wanted
= saved_nocode_wanted
;
4091 /* we push an anonymous symbol which will contain the array
4093 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4094 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4100 /* Parse a type declarator (except basic type), and return the type
4101 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4102 expected. 'type' should contain the basic type. 'ad' is the
4103 attribute definition of the basic type. It can be modified by
4104 type_decl(). If this (possibly abstract) declarator is a pointer chain
4105 it returns the innermost pointed to type (equals *type, but is a different
4106 pointer), otherwise returns type itself, that's used for recursive calls. */
4107 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4110 int qualifiers
, storage
;
4112 /* recursive type, remove storage bits first, apply them later again */
4113 storage
= type
->t
& VT_STORAGE
;
4114 type
->t
&= ~VT_STORAGE
;
4116 while (tok
== '*') {
4124 qualifiers
|= VT_CONSTANT
;
4129 qualifiers
|= VT_VOLATILE
;
4135 /* XXX: clarify attribute handling */
4136 case TOK_ATTRIBUTE1
:
4137 case TOK_ATTRIBUTE2
:
4138 parse_attribute(ad
);
4142 type
->t
|= qualifiers
;
4144 /* innermost pointed to type is the one for the first derivation */
4145 ret
= pointed_type(type
);
4149 /* This is possibly a parameter type list for abstract declarators
4150 ('int ()'), use post_type for testing this. */
4151 if (!post_type(type
, ad
, 0, td
)) {
4152 /* It's not, so it's a nested declarator, and the post operations
4153 apply to the innermost pointed to type (if any). */
4154 /* XXX: this is not correct to modify 'ad' at this point, but
4155 the syntax is not clear */
4156 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4157 parse_attribute(ad
);
4158 post
= type_decl(type
, ad
, v
, td
);
4161 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4162 /* type identifier */
4166 if (!(td
& TYPE_ABSTRACT
))
4167 expect("identifier");
4170 post_type(post
, ad
, storage
, 0);
4171 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4172 parse_attribute(ad
);
4177 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4178 ST_FUNC
int lvalue_type(int t
)
4183 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4185 else if (bt
== VT_SHORT
)
4189 if (t
& VT_UNSIGNED
)
4190 r
|= VT_LVAL_UNSIGNED
;
4194 /* indirection with full error checking and bound check */
4195 ST_FUNC
void indir(void)
4197 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4198 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4202 if (vtop
->r
& VT_LVAL
)
4204 vtop
->type
= *pointed_type(&vtop
->type
);
4205 /* Arrays and functions are never lvalues */
4206 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4207 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4208 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4209 /* if bound checking, the referenced pointer must be checked */
4210 #ifdef CONFIG_TCC_BCHECK
4211 if (tcc_state
->do_bounds_check
)
4212 vtop
->r
|= VT_MUSTBOUND
;
4217 /* pass a parameter to a function and do type checking and casting */
4218 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4223 func_type
= func
->c
;
4224 if (func_type
== FUNC_OLD
||
4225 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4226 /* default casting : only need to convert float to double */
4227 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4230 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4231 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4232 type
.ref
= vtop
->type
.ref
;
4235 } else if (arg
== NULL
) {
4236 tcc_error("too many arguments to function");
4239 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4240 gen_assign_cast(&type
);
4244 /* parse an expression and return its type without any side effect.
4245 If UNRY we parse an unary expression, otherwise a full one. */
4246 static void expr_type(CType
*type
, int unry
)
4258 /* parse an expression of the form '(type)' or '(expr)' and return its
4260 static void parse_expr_type(CType
*type
)
4266 if (parse_btype(type
, &ad
)) {
4267 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4274 static void parse_type(CType
*type
)
4279 if (!parse_btype(type
, &ad
)) {
4282 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4285 static void parse_builtin_params(int nc
, const char *args
)
4292 while ((c
= *args
++)) {
4296 case 'e': expr_eq(); continue;
4297 case 't': parse_type(&t
); vpush(&t
); continue;
4298 default: tcc_error("internal error"); break;
4306 ST_FUNC
void unary(void)
4308 int n
, t
, align
, size
, r
, sizeof_caller
;
4313 sizeof_caller
= in_sizeof
;
4315 /* XXX: GCC 2.95.3 does not generate a table although it should be
4329 vsetc(&type
, VT_CONST
, &tokc
);
4333 t
= VT_INT
| VT_UNSIGNED
;
4339 t
= VT_LLONG
| VT_UNSIGNED
;
4351 case TOK___FUNCTION__
:
4353 goto tok_identifier
;
4359 /* special function name identifier */
4360 len
= strlen(funcname
) + 1;
4361 /* generate char[len] type */
4366 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4367 ptr
= section_ptr_add(data_section
, len
);
4368 memcpy(ptr
, funcname
, len
);
4373 #ifdef TCC_TARGET_PE
4374 t
= VT_SHORT
| VT_UNSIGNED
;
4380 /* string parsing */
4383 if (tcc_state
->warn_write_strings
)
4388 memset(&ad
, 0, sizeof(AttributeDef
));
4389 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4394 if (parse_btype(&type
, &ad
)) {
4395 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4397 /* check ISOC99 compound literal */
4399 /* data is allocated locally by default */
4404 /* all except arrays are lvalues */
4405 if (!(type
.t
& VT_ARRAY
))
4406 r
|= lvalue_type(type
.t
);
4407 memset(&ad
, 0, sizeof(AttributeDef
));
4408 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4410 if (sizeof_caller
) {
4417 } else if (tok
== '{') {
4418 int saved_nocode_wanted
= nocode_wanted
;
4420 tcc_error("expected constant");
4421 /* save all registers */
4423 /* statement expression : we do not accept break/continue
4424 inside as GCC does. We do retain the nocode_wanted state,
4425 as statement expressions can't ever be entered from the
4426 outside, so any reactivation of code emission (from labels
4427 or loop heads) can be disabled again after the end of it. */
4428 block(NULL
, NULL
, 1);
4429 nocode_wanted
= saved_nocode_wanted
;
4444 /* functions names must be treated as function pointers,
4445 except for unary '&' and sizeof. Since we consider that
4446 functions are not lvalues, we only have to handle it
4447 there and in function calls. */
4448 /* arrays can also be used although they are not lvalues */
4449 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4450 !(vtop
->type
.t
& VT_ARRAY
))
4452 mk_pointer(&vtop
->type
);
4458 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4460 boolean
.t
= VT_BOOL
;
4462 vtop
->c
.i
= !vtop
->c
.i
;
4463 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4467 vseti(VT_JMP
, gvtst(1, 0));
4479 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4480 tcc_error("pointer not accepted for unary plus");
4481 /* In order to force cast, we add zero, except for floating point
4482 where we really need an noop (otherwise -0.0 will be transformed
4484 if (!is_float(vtop
->type
.t
)) {
4495 expr_type(&type
, 1); // Perform a in_sizeof = 0;
4496 size
= type_size(&type
, &align
);
4497 if (t
== TOK_SIZEOF
) {
4498 if (!(type
.t
& VT_VLA
)) {
4500 tcc_error("sizeof applied to an incomplete type");
4503 vla_runtime_type_size(&type
, &align
);
4508 vtop
->type
.t
|= VT_UNSIGNED
;
4511 case TOK_builtin_expect
:
4512 /* __builtin_expect is a no-op for now */
4513 parse_builtin_params(0, "ee");
4516 case TOK_builtin_types_compatible_p
:
4517 parse_builtin_params(0, "tt");
4518 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4519 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4520 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4524 case TOK_builtin_choose_expr
:
4551 case TOK_builtin_constant_p
:
4552 parse_builtin_params(1, "e");
4553 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4557 case TOK_builtin_frame_address
:
4558 case TOK_builtin_return_address
:
4565 if (tok
!= TOK_CINT
) {
4566 tcc_error("%s only takes positive integers",
4567 tok1
== TOK_builtin_return_address
?
4568 "__builtin_return_address" :
4569 "__builtin_frame_address");
4571 level
= (uint32_t)tokc
.i
;
4576 vset(&type
, VT_LOCAL
, 0); /* local frame */
4578 mk_pointer(&vtop
->type
);
4579 indir(); /* -> parent frame */
4581 if (tok1
== TOK_builtin_return_address
) {
4582 // assume return address is just above frame pointer on stack
4585 mk_pointer(&vtop
->type
);
4590 #ifdef TCC_TARGET_X86_64
4591 #ifdef TCC_TARGET_PE
4592 case TOK_builtin_va_start
:
4593 parse_builtin_params(0, "ee");
4594 r
= vtop
->r
& VT_VALMASK
;
4598 tcc_error("__builtin_va_start expects a local variable");
4600 vtop
->type
= char_pointer_type
;
4605 case TOK_builtin_va_arg_types
:
4606 parse_builtin_params(0, "t");
4607 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4614 #ifdef TCC_TARGET_ARM64
4615 case TOK___va_start
: {
4616 parse_builtin_params(0, "ee");
4620 vtop
->type
.t
= VT_VOID
;
4623 case TOK___va_arg
: {
4625 parse_builtin_params(0, "et");
4633 case TOK___arm64_clear_cache
: {
4634 parse_builtin_params(0, "ee");
4637 vtop
->type
.t
= VT_VOID
;
4641 /* pre operations */
4652 t
= vtop
->type
.t
& VT_BTYPE
;
4654 /* In IEEE negate(x) isn't subtract(0,x), but rather
4658 vtop
->c
.f
= -1.0 * 0.0;
4659 else if (t
== VT_DOUBLE
)
4660 vtop
->c
.d
= -1.0 * 0.0;
4662 vtop
->c
.ld
= -1.0 * 0.0;
4670 goto tok_identifier
;
4672 /* allow to take the address of a label */
4673 if (tok
< TOK_UIDENT
)
4674 expect("label identifier");
4675 s
= label_find(tok
);
4677 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4679 if (s
->r
== LABEL_DECLARED
)
4680 s
->r
= LABEL_FORWARD
;
4683 s
->type
.t
= VT_VOID
;
4684 mk_pointer(&s
->type
);
4685 s
->type
.t
|= VT_STATIC
;
4687 vpushsym(&s
->type
, s
);
4691 // special qnan , snan and infinity values
4693 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4697 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4701 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4710 expect("identifier");
4713 const char *name
= get_tok_str(t
, NULL
);
4715 tcc_error("'%s' undeclared", name
);
4716 /* for simple function calls, we tolerate undeclared
4717 external reference to int() function */
4718 if (tcc_state
->warn_implicit_function_declaration
4719 #ifdef TCC_TARGET_PE
4720 /* people must be warned about using undeclared WINAPI functions
4721 (which usually start with uppercase letter) */
4722 || (name
[0] >= 'A' && name
[0] <= 'Z')
4725 tcc_warning("implicit declaration of function '%s'", name
);
4726 s
= external_global_sym(t
, &func_old_type
, 0);
4730 /* A symbol that has a register is a local register variable,
4731 which starts out as VT_LOCAL value. */
4732 if ((r
& VT_VALMASK
) < VT_CONST
)
4733 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4735 vset(&s
->type
, r
, s
->c
);
4736 /* Point to s as backpointer (even without r&VT_SYM).
4737 Will be used by at least the x86 inline asm parser for
4740 if (vtop
->r
& VT_SYM
) {
4746 /* post operations */
4748 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4751 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4754 if (tok
== TOK_ARROW
)
4756 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4759 /* expect pointer on structure */
4760 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4761 expect("struct or union");
4762 if (tok
== TOK_CDOUBLE
)
4763 expect("field name");
4765 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4766 expect("field name");
4767 s
= find_field(&vtop
->type
, tok
);
4769 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4770 /* add field offset to pointer */
4771 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4774 /* change type to field type, and set to lvalue */
4775 vtop
->type
= s
->type
;
4776 vtop
->type
.t
|= qualifiers
;
4777 /* an array is never an lvalue */
4778 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4779 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4780 #ifdef CONFIG_TCC_BCHECK
4781 /* if bound checking, the referenced pointer must be checked */
4782 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4783 vtop
->r
|= VT_MUSTBOUND
;
4787 } else if (tok
== '[') {
4793 } else if (tok
== '(') {
4796 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4799 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4800 /* pointer test (no array accepted) */
4801 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4802 vtop
->type
= *pointed_type(&vtop
->type
);
4803 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4807 expect("function pointer");
4810 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4812 /* get return type */
4815 sa
= s
->next
; /* first parameter */
4816 nb_args
= regsize
= 0;
4818 /* compute first implicit argument if a structure is returned */
4819 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4820 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4821 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4822 &ret_align
, ®size
);
4824 /* get some space for the returned structure */
4825 size
= type_size(&s
->type
, &align
);
4826 #ifdef TCC_TARGET_ARM64
4827 /* On arm64, a small struct is return in registers.
4828 It is much easier to write it to memory if we know
4829 that we are allowed to write some extra bytes, so
4830 round the allocated space up to a power of 2: */
4832 while (size
& (size
- 1))
4833 size
= (size
| (size
- 1)) + 1;
4835 loc
= (loc
- size
) & -align
;
4837 ret
.r
= VT_LOCAL
| VT_LVAL
;
4838 /* pass it as 'int' to avoid structure arg passing
4840 vseti(VT_LOCAL
, loc
);
4850 /* return in register */
4851 if (is_float(ret
.type
.t
)) {
4852 ret
.r
= reg_fret(ret
.type
.t
);
4853 #ifdef TCC_TARGET_X86_64
4854 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4858 #ifndef TCC_TARGET_ARM64
4859 #ifdef TCC_TARGET_X86_64
4860 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4862 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4873 gfunc_param_typed(s
, sa
);
4883 tcc_error("too few arguments to function");
4885 gfunc_call(nb_args
);
4888 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4889 vsetc(&ret
.type
, r
, &ret
.c
);
4890 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4893 /* handle packed struct return */
4894 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4897 size
= type_size(&s
->type
, &align
);
4898 /* We're writing whole regs often, make sure there's enough
4899 space. Assume register size is power of 2. */
4900 if (regsize
> align
)
4902 loc
= (loc
- size
) & -align
;
4906 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4910 if (--ret_nregs
== 0)
4914 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4922 ST_FUNC
void expr_prod(void)
4927 while (tok
== '*' || tok
== '/' || tok
== '%') {
4935 ST_FUNC
void expr_sum(void)
4940 while (tok
== '+' || tok
== '-') {
4948 static void expr_shift(void)
4953 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4961 static void expr_cmp(void)
4966 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
4967 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
4975 static void expr_cmpeq(void)
4980 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
4988 static void expr_and(void)
4991 while (tok
== '&') {
4998 static void expr_xor(void)
5001 while (tok
== '^') {
5008 static void expr_or(void)
5011 while (tok
== '|') {
5018 static void expr_land(void)
5021 if (tok
== TOK_LAND
) {
5024 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5032 while (tok
== TOK_LAND
) {
5040 gen_cast(&int_type
);
5048 if (tok
!= TOK_LAND
) {
5061 static void expr_lor(void)
5064 if (tok
== TOK_LOR
) {
5067 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5075 while (tok
== TOK_LOR
) {
5083 gen_cast(&int_type
);
5091 if (tok
!= TOK_LOR
) {
5104 /* Assuming vtop is a value used in a conditional context
5105 (i.e. compared with zero) return 0 if it's false, 1 if
5106 true and -1 if it can't be statically determined. */
5107 static int condition_3way(void)
5110 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5111 (!(vtop
->r
& VT_SYM
) ||
5112 !(vtop
->sym
->type
.t
& VT_WEAK
))) {
5114 boolean
.t
= VT_BOOL
;
5123 static void expr_cond(void)
5125 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5127 CType type
, type1
, type2
;
5132 c
= condition_3way();
5133 g
= (tok
== ':' && gnu_ext
);
5135 /* needed to avoid having different registers saved in
5137 if (is_float(vtop
->type
.t
)) {
5139 #ifdef TCC_TARGET_X86_64
5140 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5165 sv
= *vtop
; /* save value to handle it later */
5166 vtop
--; /* no vpop so that FP stack is not flushed */
5184 bt1
= t1
& VT_BTYPE
;
5186 bt2
= t2
& VT_BTYPE
;
5187 /* cast operands to correct type according to ISOC rules */
5188 if (is_float(bt1
) || is_float(bt2
)) {
5189 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5190 type
.t
= VT_LDOUBLE
;
5192 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5197 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5198 /* cast to biggest op */
5200 /* convert to unsigned if it does not fit in a long long */
5201 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5202 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5203 type
.t
|= VT_UNSIGNED
;
5204 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5205 /* If one is a null ptr constant the result type
5207 if (is_null_pointer (vtop
))
5209 else if (is_null_pointer (&sv
))
5211 /* XXX: test pointer compatibility, C99 has more elaborate
5215 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5216 /* XXX: test function pointer compatibility */
5217 type
= bt1
== VT_FUNC
? type1
: type2
;
5218 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5219 /* XXX: test structure compatibility */
5220 type
= bt1
== VT_STRUCT
? type1
: type2
;
5221 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5222 /* NOTE: as an extension, we accept void on only one side */
5225 /* integer operations */
5227 /* convert to unsigned if it does not fit in an integer */
5228 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5229 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5230 type
.t
|= VT_UNSIGNED
;
5232 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5233 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5234 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5237 /* now we convert second operand */
5241 mk_pointer(&vtop
->type
);
5243 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5248 if (is_float(type
.t
)) {
5250 #ifdef TCC_TARGET_X86_64
5251 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5255 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5256 /* for long longs, we use fixed registers to avoid having
5257 to handle a complicated move */
5268 /* this is horrible, but we must also convert first
5274 mk_pointer(&vtop
->type
);
5276 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5282 move_reg(r2
, r1
, type
.t
);
5292 static void expr_eq(void)
5298 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5299 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5300 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5315 ST_FUNC
void gexpr(void)
5326 /* parse a constant expression and return value in vtop. */
5327 static void expr_const1(void)
5334 /* parse an integer constant and return its value. */
5335 static inline int64_t expr_const64(void)
5339 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5340 expect("constant expression");
5346 /* parse an integer constant and return its value.
5347 Complain if it doesn't fit 32bit (signed or unsigned). */
5348 ST_FUNC
int expr_const(void)
5351 int64_t wc
= expr_const64();
5353 if (c
!= wc
&& (unsigned)c
!= wc
)
5354 tcc_error("constant exceeds 32 bit");
5358 /* return the label token if current token is a label, otherwise
5360 static int is_label(void)
5364 /* fast test first */
5365 if (tok
< TOK_UIDENT
)
5367 /* no need to save tokc because tok is an identifier */
5373 unget_tok(last_tok
);
5378 #ifndef TCC_TARGET_ARM64
5379 static void gfunc_return(CType
*func_type
)
5381 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5382 CType type
, ret_type
;
5383 int ret_align
, ret_nregs
, regsize
;
5384 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5385 &ret_align
, ®size
);
5386 if (0 == ret_nregs
) {
5387 /* if returning structure, must copy it to implicit
5388 first pointer arg location */
5391 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5394 /* copy structure value to pointer */
5397 /* returning structure packed into registers */
5398 int r
, size
, addr
, align
;
5399 size
= type_size(func_type
,&align
);
5400 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5401 (vtop
->c
.i
& (ret_align
-1)))
5402 && (align
& (ret_align
-1))) {
5403 loc
= (loc
- size
) & -ret_align
;
5406 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5410 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5412 vtop
->type
= ret_type
;
5413 if (is_float(ret_type
.t
))
5414 r
= rc_fret(ret_type
.t
);
5425 if (--ret_nregs
== 0)
5427 /* We assume that when a structure is returned in multiple
5428 registers, their classes are consecutive values of the
5431 vtop
->c
.i
+= regsize
;
5435 } else if (is_float(func_type
->t
)) {
5436 gv(rc_fret(func_type
->t
));
5440 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5444 static int case_cmp(const void *pa
, const void *pb
)
5446 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5447 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5448 return a
< b
? -1 : a
> b
;
5451 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5455 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5473 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5475 gcase(base
, len
/2, bsym
);
5476 if (cur_switch
->def_sym
)
5477 gjmp_addr(cur_switch
->def_sym
);
5479 *bsym
= gjmp(*bsym
);
5483 base
+= e
; len
-= e
;
5493 if (p
->v1
== p
->v2
) {
5495 gtst_addr(0, p
->sym
);
5505 gtst_addr(0, p
->sym
);
5511 static void block(int *bsym
, int *csym
, int is_expr
)
5513 int a
, b
, c
, d
, cond
;
5516 /* generate line number info */
5517 if (tcc_state
->do_debug
)
5518 tcc_debug_line(tcc_state
);
5521 /* default return value is (void) */
5523 vtop
->type
.t
= VT_VOID
;
5526 if (tok
== TOK_IF
) {
5528 int saved_nocode_wanted
= nocode_wanted
;
5533 cond
= condition_3way();
5539 nocode_wanted
|= 0x20000000;
5540 block(bsym
, csym
, 0);
5542 nocode_wanted
= saved_nocode_wanted
;
5544 if (c
== TOK_ELSE
) {
5549 nocode_wanted
|= 0x20000000;
5550 block(bsym
, csym
, 0);
5551 gsym(d
); /* patch else jmp */
5553 nocode_wanted
= saved_nocode_wanted
;
5556 } else if (tok
== TOK_WHILE
) {
5557 int saved_nocode_wanted
;
5558 nocode_wanted
&= ~0x20000000;
5568 saved_nocode_wanted
= nocode_wanted
;
5570 nocode_wanted
= saved_nocode_wanted
;
5575 } else if (tok
== '{') {
5577 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5580 /* record local declaration stack position */
5582 llabel
= local_label_stack
;
5585 /* handle local labels declarations */
5586 if (tok
== TOK_LABEL
) {
5589 if (tok
< TOK_UIDENT
)
5590 expect("label identifier");
5591 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5601 while (tok
!= '}') {
5602 if ((a
= is_label()))
5609 block(bsym
, csym
, is_expr
);
5612 /* pop locally defined labels */
5613 label_pop(&local_label_stack
, llabel
);
5614 /* pop locally defined symbols */
5616 /* In the is_expr case (a statement expression is finished here),
5617 vtop might refer to symbols on the local_stack. Either via the
5618 type or via vtop->sym. We can't pop those nor any that in turn
5619 might be referred to. To make it easier we don't roll back
5620 any symbols in that case; some upper level call to block() will
5621 do that. We do have to remove such symbols from the lookup
5622 tables, though. sym_pop will do that. */
5623 sym_pop(&local_stack
, s
, is_expr
);
5625 /* Pop VLA frames and restore stack pointer if required */
5626 if (vlas_in_scope
> saved_vlas_in_scope
) {
5627 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5630 vlas_in_scope
= saved_vlas_in_scope
;
5633 } else if (tok
== TOK_RETURN
) {
5637 gen_assign_cast(&func_vt
);
5638 gfunc_return(&func_vt
);
5641 /* jump unless last stmt in top-level block */
5642 if (tok
!= '}' || local_scope
!= 1)
5644 nocode_wanted
|= 0x20000000;
5645 } else if (tok
== TOK_BREAK
) {
5648 tcc_error("cannot break");
5649 *bsym
= gjmp(*bsym
);
5652 nocode_wanted
|= 0x20000000;
5653 } else if (tok
== TOK_CONTINUE
) {
5656 tcc_error("cannot continue");
5657 vla_sp_restore_root();
5658 *csym
= gjmp(*csym
);
5661 } else if (tok
== TOK_FOR
) {
5663 int saved_nocode_wanted
;
5664 nocode_wanted
&= ~0x20000000;
5670 /* c99 for-loop init decl? */
5671 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5672 /* no, regular for-loop init expr */
5698 saved_nocode_wanted
= nocode_wanted
;
5700 nocode_wanted
= saved_nocode_wanted
;
5705 sym_pop(&local_stack
, s
, 0);
5708 if (tok
== TOK_DO
) {
5709 int saved_nocode_wanted
;
5710 nocode_wanted
&= ~0x20000000;
5716 saved_nocode_wanted
= nocode_wanted
;
5724 nocode_wanted
= saved_nocode_wanted
;
5729 if (tok
== TOK_SWITCH
) {
5730 struct switch_t
*saved
, sw
;
5731 int saved_nocode_wanted
= nocode_wanted
;
5737 switchval
= *vtop
--;
5739 b
= gjmp(0); /* jump to first case */
5740 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5744 nocode_wanted
= saved_nocode_wanted
;
5745 a
= gjmp(a
); /* add implicit break */
5748 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5749 for (b
= 1; b
< sw
.n
; b
++)
5750 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5751 tcc_error("duplicate case value");
5752 /* Our switch table sorting is signed, so the compared
5753 value needs to be as well when it's 64bit. */
5754 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5755 switchval
.type
.t
&= ~VT_UNSIGNED
;
5757 gcase(sw
.p
, sw
.n
, &a
);
5760 gjmp_addr(sw
.def_sym
);
5761 dynarray_reset(&sw
.p
, &sw
.n
);
5766 if (tok
== TOK_CASE
) {
5767 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5770 nocode_wanted
&= ~0x20000000;
5772 cr
->v1
= cr
->v2
= expr_const64();
5773 if (gnu_ext
&& tok
== TOK_DOTS
) {
5775 cr
->v2
= expr_const64();
5776 if (cr
->v2
< cr
->v1
)
5777 tcc_warning("empty case range");
5780 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5783 goto block_after_label
;
5785 if (tok
== TOK_DEFAULT
) {
5790 if (cur_switch
->def_sym
)
5791 tcc_error("too many 'default'");
5792 cur_switch
->def_sym
= ind
;
5794 goto block_after_label
;
5796 if (tok
== TOK_GOTO
) {
5798 if (tok
== '*' && gnu_ext
) {
5802 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5805 } else if (tok
>= TOK_UIDENT
) {
5806 s
= label_find(tok
);
5807 /* put forward definition if needed */
5809 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5811 if (s
->r
== LABEL_DECLARED
)
5812 s
->r
= LABEL_FORWARD
;
5814 vla_sp_restore_root();
5815 if (s
->r
& LABEL_FORWARD
)
5816 s
->jnext
= gjmp(s
->jnext
);
5818 gjmp_addr(s
->jnext
);
5821 expect("label identifier");
5824 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5833 if (s
->r
== LABEL_DEFINED
)
5834 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5836 s
->r
= LABEL_DEFINED
;
5838 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5842 /* we accept this, but it is a mistake */
5844 nocode_wanted
&= ~0x20000000;
5846 tcc_warning("deprecated use of label at end of compound statement");
5850 block(bsym
, csym
, is_expr
);
5853 /* expression case */
5868 /* This skips over a stream of tokens containing balanced {} and ()
5869 pairs, stopping at outer ',' ';' and '}'. If STR then allocates
5870 and stores the skipped tokens in *STR. This doesn't check if
5871 () and {} are nested correctly, i.e. "({)}" is accepted. */
5872 static void skip_or_save_block(TokenString
**str
)
5876 *str
= tok_str_alloc();
5878 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';'))) {
5880 if (tok
== TOK_EOF
) {
5881 if (str
|| level
> 0)
5882 tcc_error("unexpected end of file");
5887 tok_str_add_tok(*str
);
5890 if (t
== '{' || t
== '(') {
5892 } else if (t
== '}' || t
== ')') {
5899 tok_str_add(*str
, -1);
5900 tok_str_add(*str
, 0);
5904 #define EXPR_CONST 1
5907 static void parse_init_elem(int expr_type
)
5909 int saved_global_expr
;
5912 /* compound literals must be allocated globally in this case */
5913 saved_global_expr
= global_expr
;
5916 global_expr
= saved_global_expr
;
5917 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5918 (compound literals). */
5919 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5920 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
5921 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
5922 #ifdef TCC_TARGET_PE
5923 || (vtop
->type
.t
& VT_IMPORT
)
5926 tcc_error("initializer element is not constant");
5934 /* put zeros for variable based init */
5935 static void init_putz(Section
*sec
, unsigned long c
, int size
)
5938 /* nothing to do because globals are already set to zero */
5940 vpush_global_sym(&func_old_type
, TOK_memset
);
5942 #ifdef TCC_TARGET_ARM
5953 /* t is the array or struct type. c is the array or struct
5954 address. cur_field is the pointer to the current
5955 field, for arrays the 'c' member contains the current start
5956 index. 'size_only' is true if only size info is needed (only used
5957 in arrays). al contains the already initialized length of the
5958 current container (starting at c). This returns the new length of that. */
5959 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5960 Sym
**cur_field
, int size_only
, int al
)
5963 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
5964 unsigned long corig
= c
;
5968 if (gnu_ext
&& (l
= is_label()) != 0)
5970 /* NOTE: we only support ranges for last designator */
5971 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
5973 if (!(type
->t
& VT_ARRAY
))
5974 expect("array type");
5976 index
= index_last
= expr_const();
5977 if (tok
== TOK_DOTS
&& gnu_ext
) {
5979 index_last
= expr_const();
5983 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
5985 tcc_error("invalid index");
5987 (*cur_field
)->c
= index_last
;
5988 type
= pointed_type(type
);
5989 elem_size
= type_size(type
, &align
);
5990 c
+= index
* elem_size
;
5991 nb_elems
= index_last
- index
+ 1;
5997 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
5998 expect("struct/union type");
5999 f
= find_field(type
, l
);
6012 } else if (!gnu_ext
) {
6016 if (type
->t
& VT_ARRAY
) {
6017 index
= (*cur_field
)->c
;
6018 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6019 tcc_error("index too large");
6020 type
= pointed_type(type
);
6021 c
+= index
* type_size(type
, &align
);
6024 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6025 *cur_field
= f
= f
->next
;
6027 tcc_error("too many field init");
6032 /* must put zero in holes (note that doing it that way
6033 ensures that it even works with designators) */
6034 if (!size_only
&& c
- corig
> al
)
6035 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6036 decl_initializer(type
, sec
, c
, 0, size_only
);
6038 /* XXX: make it more general */
6039 if (!size_only
&& nb_elems
> 1) {
6040 unsigned long c_end
;
6045 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6046 for (i
= 1; i
< nb_elems
; i
++) {
6047 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6053 c_end
= c
+ nb_elems
* elem_size
;
6054 if (c_end
> sec
->data_allocated
)
6055 section_realloc(sec
, c_end
);
6056 src
= sec
->data
+ c
;
6058 for(i
= 1; i
< nb_elems
; i
++) {
6060 memcpy(dst
, src
, elem_size
);
6064 c
+= nb_elems
* type_size(type
, &align
);
6070 /* store a value or an expression directly in global data or in local array */
6071 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6073 int bt
, bit_pos
, bit_size
;
6075 unsigned long long bit_mask
;
6079 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6083 /* XXX: not portable */
6084 /* XXX: generate error if incorrect relocation */
6085 gen_assign_cast(&dtype
);
6086 bt
= type
->t
& VT_BTYPE
;
6087 size
= type_size(type
, &align
);
6088 section_reserve(sec
, c
+ size
);
6089 ptr
= sec
->data
+ c
;
6090 /* XXX: make code faster ? */
6091 if (!(type
->t
& VT_BITFIELD
)) {
6093 bit_size
= PTR_SIZE
* 8;
6096 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6097 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6098 bit_mask
= (1LL << bit_size
) - 1;
6100 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6101 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6102 /* XXX This rejects compound literals like
6103 '(void *){ptr}'. The problem is that '&sym' is
6104 represented the same way, which would be ruled out
6105 by the SYM_FIRST_ANOM check above, but also '"string"'
6106 in 'char *p = "string"' is represented the same
6107 with the type being VT_PTR and the symbol being an
6108 anonymous one. That is, there's no difference in vtop
6109 between '(void *){x}' and '&(void *){x}'. Ignore
6110 pointer typed entities here. Hopefully no real code
6111 will every use compound literals with scalar type. */
6112 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6113 /* These come from compound literals, memcpy stuff over. */
6117 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6118 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6119 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6121 /* We need to copy over all memory contents, and that
6122 includes relocations. Use the fact that relocs are
6123 created it order, so look from the end of relocs
6124 until we hit one before the copied region. */
6125 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6126 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6127 while (num_relocs
--) {
6129 if (rel
->r_offset
>= esym
->st_value
+ size
)
6131 if (rel
->r_offset
< esym
->st_value
)
6133 /* Note: if the same fields are initialized multiple
6134 times (possible with designators) then we possibly
6135 add multiple relocations for the same offset here.
6136 That would lead to wrong code, the last reloc needs
6137 to win. We clean this up later after the whole
6138 initializer is parsed. */
6139 put_elf_reloca(symtab_section
, sec
,
6140 c
+ rel
->r_offset
- esym
->st_value
,
6141 ELFW(R_TYPE
)(rel
->r_info
),
6142 ELFW(R_SYM
)(rel
->r_info
),
6143 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6152 if ((vtop
->r
& VT_SYM
) &&
6158 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6162 (bt
== VT_INT
&& bit_size
!= 32)
6165 tcc_error("initializer element is not computable at load time");
6167 /* XXX: when cross-compiling we assume that each type has the
6168 same representation on host and target, which is likely to
6169 be wrong in the case of long double */
6171 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6173 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6176 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6179 *(float*)ptr
= vtop
->c
.f
;
6182 *(double *)ptr
= vtop
->c
.d
;
6185 if (sizeof(long double) == LDOUBLE_SIZE
)
6186 *(long double *)ptr
= vtop
->c
.ld
;
6187 else if (sizeof(double) == LDOUBLE_SIZE
)
6188 *(double *)ptr
= (double)vtop
->c
.ld
;
6189 #if (defined __i386__ || defined __x86_64__) && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
6190 else if (sizeof (long double) >= 10)
6191 memcpy(memset(ptr
, 0, LDOUBLE_SIZE
), &vtop
->c
.ld
, 10);
6194 tcc_error("can't cross compile long double constants");
6198 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6205 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6206 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6207 if (vtop
->r
& VT_SYM
)
6208 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6210 *(addr_t
*)ptr
|= val
;
6212 if (vtop
->r
& VT_SYM
)
6213 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6214 *(addr_t
*)ptr
|= val
;
6220 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6221 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6222 if (vtop
->r
& VT_SYM
)
6223 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6227 if (vtop
->r
& VT_SYM
)
6228 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6237 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6244 /* 't' contains the type and storage info. 'c' is the offset of the
6245 object in section 'sec'. If 'sec' is NULL, it means stack based
6246 allocation. 'first' is true if array '{' must be read (multi
6247 dimension implicit array init handling). 'size_only' is true if
6248 size only evaluation is wanted (only for arrays). */
6249 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6250 int first
, int size_only
)
6252 int len
, n
, no_oblock
, nb
, i
;
6259 /* If we currently are at an '}' or ',' we have read an initializer
6260 element in one of our callers, and not yet consumed it. */
6261 have_elem
= tok
== '}' || tok
== ',';
6262 if (!have_elem
&& tok
!= '{' &&
6263 /* In case of strings we have special handling for arrays, so
6264 don't consume them as initializer value (which would commit them
6265 to some anonymous symbol). */
6266 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6268 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6273 !(type
->t
& VT_ARRAY
) &&
6274 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6275 The source type might have VT_CONSTANT set, which is
6276 of course assignable to non-const elements. */
6277 is_compatible_parameter_types(type
, &vtop
->type
)) {
6278 init_putv(type
, sec
, c
);
6279 } else if (type
->t
& VT_ARRAY
) {
6282 t1
= pointed_type(type
);
6283 size1
= type_size(t1
, &align1
);
6286 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6289 tcc_error("character array initializer must be a literal,"
6290 " optionally enclosed in braces");
6295 /* only parse strings here if correct type (otherwise: handle
6296 them as ((w)char *) expressions */
6297 if ((tok
== TOK_LSTR
&&
6298 #ifdef TCC_TARGET_PE
6299 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6301 (t1
->t
& VT_BTYPE
) == VT_INT
6303 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6305 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6308 /* compute maximum number of chars wanted */
6310 cstr_len
= tokc
.str
.size
;
6312 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6315 if (n
>= 0 && nb
> (n
- len
))
6319 tcc_warning("initializer-string for array is too long");
6320 /* in order to go faster for common case (char
6321 string in global variable, we handle it
6323 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6324 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6328 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6330 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6332 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6339 /* only add trailing zero if enough storage (no
6340 warning in this case since it is standard) */
6341 if (n
< 0 || len
< n
) {
6344 init_putv(t1
, sec
, c
+ (len
* size1
));
6355 while (tok
!= '}' || have_elem
) {
6356 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6358 if (type
->t
& VT_ARRAY
) {
6360 /* special test for multi dimensional arrays (may not
6361 be strictly correct if designators are used at the
6363 if (no_oblock
&& len
>= n
*size1
)
6366 if (s
->type
.t
== TOK_UNION
)
6370 if (no_oblock
&& f
== NULL
)
6379 /* put zeros at the end */
6380 if (!size_only
&& len
< n
*size1
)
6381 init_putz(sec
, c
+ len
, n
*size1
- len
);
6384 /* patch type size if needed, which happens only for array types */
6386 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6387 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6390 if (first
|| tok
== '{') {
6398 } else if (tok
== '{') {
6400 decl_initializer(type
, sec
, c
, first
, size_only
);
6402 } else if (size_only
) {
6403 /* If we supported only ISO C we wouldn't have to accept calling
6404 this on anything than an array size_only==1 (and even then
6405 only on the outermost level, so no recursion would be needed),
6406 because initializing a flex array member isn't supported.
6407 But GNU C supports it, so we need to recurse even into
6408 subfields of structs and arrays when size_only is set. */
6409 /* just skip expression */
6411 skip_or_save_block(NULL
);
6412 } while (tok
!= '}' && tok
!= ',' && tok
!= -1);
6415 /* This should happen only when we haven't parsed
6416 the init element above for fear of committing a
6417 string constant to memory too early. */
6418 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6419 expect("string constant");
6420 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6422 init_putv(type
, sec
, c
);
6426 /* parse an initializer for type 't' if 'has_init' is non zero, and
6427 allocate space in local or global data space ('r' is either
6428 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6429 variable 'v' of scope 'scope' is declared before initializers
6430 are parsed. If 'v' is zero, then a reference to the new object
6431 is put in the value stack. If 'has_init' is 2, a special parsing
6432 is done to handle string constants. */
6433 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6434 int has_init
, int v
, int scope
)
6436 int size
, align
, addr
;
6437 ParseState saved_parse_state
= {0};
6438 TokenString
*init_str
= NULL
;
6440 Sym
*flexible_array
;
6442 flexible_array
= NULL
;
6443 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6444 Sym
*field
= type
->ref
->next
;
6447 field
= field
->next
;
6448 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6449 flexible_array
= field
;
6453 size
= type_size(type
, &align
);
6454 /* If unknown size, we must evaluate it before
6455 evaluating initializers because
6456 initializers can generate global data too
6457 (e.g. string pointers or ISOC99 compound
6458 literals). It also simplifies local
6459 initializers handling */
6460 if (size
< 0 || (flexible_array
&& has_init
)) {
6462 tcc_error("unknown type size");
6463 /* get all init string */
6464 if (has_init
== 2) {
6465 init_str
= tok_str_alloc();
6466 /* only get strings */
6467 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6468 tok_str_add_tok(init_str
);
6471 tok_str_add(init_str
, -1);
6472 tok_str_add(init_str
, 0);
6474 skip_or_save_block(&init_str
);
6478 save_parse_state(&saved_parse_state
);
6480 begin_macro(init_str
, 1);
6482 decl_initializer(type
, NULL
, 0, 1, 1);
6483 /* prepare second initializer parsing */
6484 macro_ptr
= init_str
->str
;
6487 /* if still unknown size, error */
6488 size
= type_size(type
, &align
);
6490 tcc_error("unknown type size");
6492 /* If there's a flex member and it was used in the initializer
6494 if (flexible_array
&&
6495 flexible_array
->type
.ref
->c
> 0)
6496 size
+= flexible_array
->type
.ref
->c
6497 * pointed_size(&flexible_array
->type
);
6498 /* take into account specified alignment if bigger */
6499 if (ad
->a
.aligned
) {
6500 int speca
= 1 << (ad
->a
.aligned
- 1);
6503 } else if (ad
->a
.packed
) {
6506 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6508 #ifdef CONFIG_TCC_BCHECK
6509 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6513 loc
= (loc
- size
) & -align
;
6515 #ifdef CONFIG_TCC_BCHECK
6516 /* handles bounds */
6517 /* XXX: currently, since we do only one pass, we cannot track
6518 '&' operators, so we add only arrays */
6519 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6521 /* add padding between regions */
6523 /* then add local bound info */
6524 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6525 bounds_ptr
[0] = addr
;
6526 bounds_ptr
[1] = size
;
6530 /* local variable */
6531 #ifdef CONFIG_TCC_ASM
6532 if (ad
->asm_label
) {
6533 int reg
= asm_parse_regvar(ad
->asm_label
);
6535 r
= (r
& ~VT_VALMASK
) | reg
;
6538 sym_push(v
, type
, r
, addr
);
6540 /* push local reference */
6541 vset(type
, r
, addr
);
6545 if (v
&& scope
== VT_CONST
) {
6546 /* see if the symbol was already defined */
6549 patch_storage(sym
, type
);
6550 if (sym
->type
.t
& VT_EXTERN
) {
6551 /* if the variable is extern, it was not allocated */
6552 sym
->type
.t
&= ~VT_EXTERN
;
6553 /* set array size if it was omitted in extern
6555 if ((sym
->type
.t
& VT_ARRAY
) &&
6556 sym
->type
.ref
->c
< 0 &&
6558 sym
->type
.ref
->c
= type
->ref
->c
;
6559 } else if (!has_init
) {
6560 /* we accept several definitions of the same
6561 global variable. this is tricky, because we
6562 must play with the SHN_COMMON type of the symbol */
6563 /* no init data, we won't add more to the symbol */
6564 update_storage(sym
);
6566 } else if (sym
->c
) {
6568 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6569 if (esym
->st_shndx
== data_section
->sh_num
)
6570 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6575 /* allocate symbol in corresponding section */
6580 else if (tcc_state
->nocommon
)
6585 addr
= section_add(sec
, size
, align
);
6586 #ifdef CONFIG_TCC_BCHECK
6587 /* add padding if bound check */
6588 if (tcc_state
->do_bounds_check
)
6589 section_add(sec
, 1, 1);
6592 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6593 sec
= common_section
;
6598 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6599 sym
->asm_label
= ad
->asm_label
;
6601 /* update symbol definition */
6602 put_extern_sym(sym
, sec
, addr
, size
);
6604 /* push global reference */
6605 sym
= get_sym_ref(type
, sec
, addr
, size
);
6606 vpushsym(type
, sym
);
6610 #ifdef CONFIG_TCC_BCHECK
6611 /* handles bounds now because the symbol must be defined
6612 before for the relocation */
6613 if (tcc_state
->do_bounds_check
) {
6616 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
6617 /* then add global bound info */
6618 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6619 bounds_ptr
[0] = 0; /* relocated */
6620 bounds_ptr
[1] = size
;
6625 if (type
->t
& VT_VLA
) {
6628 /* save current stack pointer */
6629 if (vlas_in_scope
== 0) {
6630 if (vla_sp_root_loc
== -1)
6631 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6632 gen_vla_sp_save(vla_sp_root_loc
);
6635 vla_runtime_type_size(type
, &a
);
6636 gen_vla_alloc(type
, a
);
6637 gen_vla_sp_save(addr
);
6641 } else if (has_init
) {
6642 size_t oldreloc_offset
= 0;
6643 if (sec
&& sec
->reloc
)
6644 oldreloc_offset
= sec
->reloc
->data_offset
;
6645 decl_initializer(type
, sec
, addr
, 1, 0);
6646 if (sec
&& sec
->reloc
)
6647 squeeze_multi_relocs(sec
, oldreloc_offset
);
6648 /* patch flexible array member size back to -1, */
6649 /* for possible subsequent similar declarations */
6651 flexible_array
->type
.ref
->c
= -1;
6655 /* restore parse state if needed */
6658 restore_parse_state(&saved_parse_state
);
6662 /* parse a function defined by symbol 'sym' and generate its code in
6663 'cur_text_section' */
6664 static void gen_function(Sym
*sym
)
6667 ind
= cur_text_section
->data_offset
;
6668 /* NOTE: we patch the symbol size later */
6669 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6670 funcname
= get_tok_str(sym
->v
, NULL
);
6672 /* Initialize VLA state */
6674 vla_sp_root_loc
= -1;
6675 /* put debug symbol */
6676 tcc_debug_funcstart(tcc_state
, sym
);
6677 /* push a dummy symbol to enable local sym storage */
6678 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6679 local_scope
= 1; /* for function parameters */
6680 gfunc_prolog(&sym
->type
);
6683 block(NULL
, NULL
, 0);
6687 cur_text_section
->data_offset
= ind
;
6688 label_pop(&global_label_stack
, NULL
);
6689 /* reset local stack */
6691 sym_pop(&local_stack
, NULL
, 0);
6692 /* end of function */
6693 /* patch symbol size */
6694 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6696 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
6697 /* It's better to crash than to generate wrong code */
6698 cur_text_section
= NULL
;
6699 funcname
= ""; /* for safety */
6700 func_vt
.t
= VT_VOID
; /* for safety */
6701 func_var
= 0; /* for safety */
6702 ind
= 0; /* for safety */
6707 static void gen_inline_functions(TCCState
*s
)
6710 int inline_generated
, i
, ln
;
6711 struct InlineFunc
*fn
;
6713 ln
= file
->line_num
;
6714 /* iterate while inline function are referenced */
6716 inline_generated
= 0;
6717 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6718 fn
= s
->inline_fns
[i
];
6720 if (sym
&& sym
->c
) {
6721 /* the function was used: generate its code and
6722 convert it to a normal function */
6725 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6726 sym
->type
.t
&= ~VT_INLINE
;
6728 begin_macro(fn
->func_str
, 1);
6730 cur_text_section
= text_section
;
6734 inline_generated
= 1;
6737 if (!inline_generated
)
6740 file
->line_num
= ln
;
6743 ST_FUNC
void free_inline_functions(TCCState
*s
)
6746 /* free tokens of unused inline functions */
6747 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6748 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6750 tok_str_free(fn
->func_str
);
6752 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6755 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
6756 if parsing old style parameter decl list (and FUNC_SYM is set then) */
6757 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
6765 if (!parse_btype(&btype
, &ad
)) {
6766 if (is_for_loop_init
)
6768 /* skip redundant ';' if not in old parameter decl scope */
6769 if (tok
== ';' && l
!= VT_CMP
) {
6773 if (l
== VT_CONST
&&
6774 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6775 /* global asm block */
6779 /* special test for old K&R protos without explicit int
6780 type. Only accepted when defining global data */
6781 if (l
!= VT_CONST
|| tok
< TOK_UIDENT
)
6785 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6786 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6788 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6789 int v
= btype
.ref
->v
;
6790 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6791 tcc_warning("unnamed struct/union that defines no instances");
6796 while (1) { /* iterate thru each declaration */
6798 /* If the base type itself was an array type of unspecified
6799 size (like in 'typedef int arr[]; arr x = {1};') then
6800 we will overwrite the unknown size by the real one for
6801 this decl. We need to unshare the ref symbol holding
6803 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6804 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6806 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6810 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
6811 printf("type = '%s'\n", buf
);
6814 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6815 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6816 tcc_error("function without file scope cannot be static");
6818 /* if old style function prototype, we accept a
6821 if (sym
->c
== FUNC_OLD
&& l
== VT_CONST
)
6822 decl0(VT_CMP
, 0, sym
);
6825 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6826 ad
.asm_label
= asm_label_instr();
6827 /* parse one last attribute list, after asm label */
6828 parse_attribute(&ad
);
6835 #ifdef TCC_TARGET_PE
6836 if (ad
.a
.func_import
|| ad
.a
.func_export
) {
6837 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
6838 tcc_error("cannot have dll linkage with static or typedef");
6839 if (ad
.a
.func_export
)
6840 type
.t
|= VT_EXPORT
;
6841 else if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6842 type
.t
|= VT_IMPORT
|VT_EXTERN
;
6845 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
6849 tcc_error("cannot use local functions");
6850 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6851 expect("function definition");
6853 /* reject abstract declarators in function definition
6854 make old style params without decl have int type */
6856 while ((sym
= sym
->next
) != NULL
) {
6857 if (!(sym
->v
& ~SYM_FIELD
))
6858 expect("identifier");
6859 if (sym
->type
.t
== VT_VOID
)
6860 sym
->type
= int_type
;
6863 /* XXX: cannot do better now: convert extern line to static inline */
6864 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
6865 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6870 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6873 ref
= sym
->type
.ref
;
6875 /* use func_call from prototype if not defined */
6876 if (ref
->a
.func_call
!= FUNC_CDECL
6877 && type
.ref
->a
.func_call
== FUNC_CDECL
)
6878 type
.ref
->a
.func_call
= ref
->a
.func_call
;
6880 /* use static from prototype */
6881 if (sym
->type
.t
& VT_STATIC
)
6882 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6884 /* If the definition has no visibility use the
6885 one from prototype. */
6886 if (! (type
.t
& VT_VIS_MASK
))
6887 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
6889 /* apply other storage attributes from prototype */
6890 type
.t
|= sym
->type
.t
& (VT_EXPORT
|VT_WEAK
);
6892 if (!is_compatible_types(&sym
->type
, &type
)) {
6894 tcc_error("incompatible types for redefinition of '%s'",
6895 get_tok_str(v
, NULL
));
6897 if (ref
->a
.func_body
)
6898 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6899 /* if symbol is already defined, then put complete type */
6903 /* put function symbol */
6904 sym
= global_identifier_push(v
, type
.t
, 0);
6905 sym
->type
.ref
= type
.ref
;
6908 sym
->type
.ref
->a
.func_body
= 1;
6909 sym
->r
= VT_SYM
| VT_CONST
;
6911 /* static inline functions are just recorded as a kind
6912 of macro. Their code will be emitted at the end of
6913 the compilation unit only if they are used */
6914 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
6915 (VT_INLINE
| VT_STATIC
)) {
6916 struct InlineFunc
*fn
;
6917 const char *filename
;
6919 filename
= file
? file
->filename
: "";
6920 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
6921 strcpy(fn
->filename
, filename
);
6923 skip_or_save_block(&fn
->func_str
);
6924 dynarray_add(&tcc_state
->inline_fns
,
6925 &tcc_state
->nb_inline_fns
, fn
);
6927 /* compute text section */
6928 cur_text_section
= ad
.section
;
6929 if (!cur_text_section
)
6930 cur_text_section
= text_section
;
6936 /* find parameter in function parameter list */
6937 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
6938 if ((sym
->v
& ~SYM_FIELD
) == v
)
6940 tcc_error("declaration for parameter '%s' but no such parameter",
6941 get_tok_str(v
, NULL
));
6943 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
6944 tcc_error("storage class specified for '%s'",
6945 get_tok_str(v
, NULL
));
6946 if (sym
->type
.t
!= VT_VOID
)
6947 tcc_error("redefinition of parameter '%s'",
6948 get_tok_str(v
, NULL
));
6949 convert_parameter_type(&type
);
6951 } else if (type
.t
& VT_TYPEDEF
) {
6952 /* save typedefed type */
6953 /* XXX: test storage specifiers ? */
6955 if (sym
&& sym
->scope
== local_scope
) {
6956 if (!is_compatible_types(&sym
->type
, &type
)
6957 || !(sym
->type
.t
& VT_TYPEDEF
))
6958 tcc_error("incompatible redefinition of '%s'",
6959 get_tok_str(v
, NULL
));
6962 sym
= sym_push(v
, &type
, 0, 0);
6967 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6968 /* external function definition */
6969 /* specific case for func_call attribute */
6971 } else if (!(type
.t
& VT_ARRAY
)) {
6972 /* not lvalue if array */
6973 r
|= lvalue_type(type
.t
);
6975 has_init
= (tok
== '=');
6976 if (has_init
&& (type
.t
& VT_VLA
))
6977 tcc_error("variable length array cannot be initialized");
6978 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
6979 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
6980 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
6981 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
6982 /* external variable or function */
6983 /* NOTE: as GCC, uninitialized global static
6984 arrays of null size are considered as
6986 sym
= external_sym(v
, &type
, r
);
6987 sym
->asm_label
= ad
.asm_label
;
6988 if (ad
.alias_target
) {
6993 alias_target
= sym_find(ad
.alias_target
);
6994 if (!alias_target
|| !alias_target
->c
)
6995 tcc_error("unsupported forward __alias__ attribute");
6996 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
6997 tsec
.sh_num
= esym
->st_shndx
;
6998 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7001 if (type
.t
& VT_STATIC
)
7007 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7011 if (is_for_loop_init
)
7024 ST_FUNC
void decl(int l
)
7029 /* ------------------------------------------------------------------------- */