2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static inline CType
*pointed_type(CType
*type
);
75 static int is_compatible_types(CType
*type1
, CType
*type2
);
76 static int parse_btype(CType
*type
, AttributeDef
*ad
);
77 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
78 static void parse_expr_type(CType
*type
);
79 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
80 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
81 static void block(int *bsym
, int *csym
, int is_expr
);
82 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
83 static int decl0(int l
, int is_for_loop_init
, Sym
*);
84 static void expr_eq(void);
85 static void vla_runtime_type_size(CType
*type
, int *a
);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
89 static inline int64_t expr_const64(void);
90 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
91 ST_FUNC
void vpush(CType
*type
);
92 ST_FUNC
int gvtst(int inv
, int t
);
93 ST_FUNC
int is_btype_size(int bt
);
94 static void gen_inline_functions(TCCState
*s
);
96 ST_INLN
int is_float(int t
)
100 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
103 /* we use our own 'finite' function to avoid potential problems with
104 non standard math libs */
105 /* XXX: endianness dependent */
106 ST_FUNC
int ieee_finite(double d
)
109 memcpy(p
, &d
, sizeof(double));
110 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
113 ST_FUNC
void test_lvalue(void)
115 if (!(vtop
->r
& VT_LVAL
))
119 ST_FUNC
void check_vstack(void)
122 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
125 /* ------------------------------------------------------------------------- */
126 /* vstack debugging aid */
129 void pv (const char *lbl
, int a
, int b
)
132 for (i
= a
; i
< a
+ b
; ++i
) {
133 SValue
*p
= &vtop
[-i
];
134 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
135 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
140 /* ------------------------------------------------------------------------- */
141 /* start of translation unit info */
142 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
147 /* file info: full path + filename */
148 section_sym
= put_elf_sym(symtab_section
, 0, 0,
149 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
150 text_section
->sh_num
, NULL
);
151 getcwd(buf
, sizeof(buf
));
153 normalize_slashes(buf
);
155 pstrcat(buf
, sizeof(buf
), "/");
156 put_stabs_r(buf
, N_SO
, 0, 0,
157 text_section
->data_offset
, text_section
, section_sym
);
158 put_stabs_r(file
->filename
, N_SO
, 0, 0,
159 text_section
->data_offset
, text_section
, section_sym
);
164 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
165 symbols can be safely used */
166 put_elf_sym(symtab_section
, 0, 0,
167 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
168 SHN_ABS
, file
->filename
);
171 /* put end of translation unit info */
172 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
176 put_stabs_r(NULL
, N_SO
, 0, 0,
177 text_section
->data_offset
, text_section
, section_sym
);
181 /* generate line number info */
182 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
186 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
187 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
189 last_line_num
= file
->line_num
;
193 /* put function symbol */
194 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
202 /* XXX: we put here a dummy type */
203 snprintf(buf
, sizeof(buf
), "%s:%c1",
204 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
205 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
206 cur_text_section
, sym
->c
);
207 /* //gr gdb wants a line at the function */
208 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
214 /* put function size */
215 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
219 put_stabn(N_FUN
, 0, 0, size
);
222 /* ------------------------------------------------------------------------- */
223 ST_FUNC
void tccgen_start(TCCState
*s1
)
225 cur_text_section
= NULL
;
227 anon_sym
= SYM_FIRST_ANOM
;
232 /* define some often used types */
234 char_pointer_type
.t
= VT_BYTE
;
235 mk_pointer(&char_pointer_type
);
237 size_type
.t
= VT_INT
;
239 size_type
.t
= VT_LLONG
;
241 func_old_type
.t
= VT_FUNC
;
242 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, FUNC_CDECL
, FUNC_OLD
);
246 #ifdef TCC_TARGET_ARM
251 ST_FUNC
void tccgen_end(TCCState
*s1
)
253 gen_inline_functions(s1
);
255 /* end of translation unit info */
259 /* ------------------------------------------------------------------------- */
260 /* apply storage attibutes to Elf symbol */
262 static void update_storage(Sym
*sym
)
271 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
274 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
275 | ((t
& VT_VIS_MASK
) >> VT_VIS_SHIFT
);
278 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
282 esym
->st_other
|= ST_PE_EXPORT
;
286 /* ------------------------------------------------------------------------- */
287 /* update sym->c so that it points to an external symbol in section
288 'section' with value 'value' */
290 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
291 addr_t value
, unsigned long size
,
292 int can_add_underscore
)
294 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
298 #ifdef CONFIG_TCC_BCHECK
304 else if (section
== SECTION_ABS
)
307 sh_num
= section
->sh_num
;
310 name
= get_tok_str(sym
->v
, NULL
);
311 #ifdef CONFIG_TCC_BCHECK
312 if (tcc_state
->do_bounds_check
) {
313 /* XXX: avoid doing that for statics ? */
314 /* if bound checking is activated, we change some function
315 names by adding the "__bound" prefix */
318 /* XXX: we rely only on malloc hooks */
331 strcpy(buf
, "__bound_");
339 if ((t
& VT_BTYPE
) == VT_FUNC
) {
341 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
342 sym_type
= STT_NOTYPE
;
344 sym_type
= STT_OBJECT
;
347 sym_bind
= STB_LOCAL
;
349 sym_bind
= STB_GLOBAL
;
352 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
353 Sym
*ref
= sym
->type
.ref
;
354 if (ref
->a
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
355 sprintf(buf1
, "_%s@%d", name
, ref
->a
.func_args
* PTR_SIZE
);
357 other
|= ST_PE_STDCALL
;
358 can_add_underscore
= 0;
362 other
|= ST_PE_IMPORT
;
364 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
366 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
370 name
= get_tok_str(sym
->asm_label
, NULL
);
371 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
372 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
374 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
375 esym
->st_value
= value
;
376 esym
->st_size
= size
;
377 esym
->st_shndx
= sh_num
;
382 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
383 addr_t value
, unsigned long size
)
385 put_extern_sym2(sym
, section
, value
, size
, 1);
388 /* add a new relocation entry to symbol 'sym' in section 's' */
389 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
394 if (nocode_wanted
&& s
== cur_text_section
)
399 put_extern_sym(sym
, NULL
, 0, 0);
403 /* now we can add ELF relocation info */
404 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
407 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
409 greloca(s
, sym
, offset
, type
, 0);
412 /* ------------------------------------------------------------------------- */
413 /* symbol allocator */
414 static Sym
*__sym_malloc(void)
416 Sym
*sym_pool
, *sym
, *last_sym
;
419 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
420 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
422 last_sym
= sym_free_first
;
424 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
425 sym
->next
= last_sym
;
429 sym_free_first
= last_sym
;
433 static inline Sym
*sym_malloc(void)
437 sym
= sym_free_first
;
439 sym
= __sym_malloc();
440 sym_free_first
= sym
->next
;
443 sym
= tcc_malloc(sizeof(Sym
));
448 ST_INLN
void sym_free(Sym
*sym
)
451 sym
->next
= sym_free_first
;
452 sym_free_first
= sym
;
458 /* push, without hashing */
459 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
479 /* find a symbol and return its associated structure. 's' is the top
480 of the symbol stack */
481 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
493 /* structure lookup */
494 ST_INLN Sym
*struct_find(int v
)
497 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
499 return table_ident
[v
]->sym_struct
;
502 /* find an identifier */
503 ST_INLN Sym
*sym_find(int v
)
506 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
508 return table_ident
[v
]->sym_identifier
;
511 /* push a given symbol on the symbol stack */
512 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, long c
)
521 s
= sym_push2(ps
, v
, type
->t
, c
);
522 s
->type
.ref
= type
->ref
;
524 /* don't record fields or anonymous symbols */
526 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
527 /* record symbol in token array */
528 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
530 ps
= &ts
->sym_struct
;
532 ps
= &ts
->sym_identifier
;
535 s
->scope
= local_scope
;
536 if (s
->prev_tok
&& s
->prev_tok
->scope
== s
->scope
)
537 tcc_error("redeclaration of '%s'",
538 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
543 /* push a global identifier */
544 ST_FUNC Sym
*global_identifier_push(int v
, int t
, long c
)
547 s
= sym_push2(&global_stack
, v
, t
, c
);
548 /* don't record anonymous symbol */
549 if (v
< SYM_FIRST_ANOM
) {
550 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
551 /* modify the top most local identifier, so that
552 sym_identifier will point to 's' when popped */
554 ps
= &(*ps
)->prev_tok
;
561 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
562 pop them yet from the list, but do remove them from the token array. */
563 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
573 /* remove symbol in token array */
575 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
576 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
578 ps
= &ts
->sym_struct
;
580 ps
= &ts
->sym_identifier
;
591 /* ------------------------------------------------------------------------- */
593 static void vsetc(CType
*type
, int r
, CValue
*vc
)
597 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
598 tcc_error("memory full (vstack)");
599 /* cannot let cpu flags if other instruction are generated. Also
600 avoid leaving VT_JMP anywhere except on the top of the stack
601 because it would complicate the code generator.
603 Don't do this when nocode_wanted. vtop might come from
604 !nocode_wanted regions (see 88_codeopt.c) and transforming
605 it to a register without actually generating code is wrong
606 as their value might still be used for real. All values
607 we push under nocode_wanted will eventually be popped
608 again, so that the VT_CMP/VT_JMP value will be in vtop
609 when code is unsuppressed again.
611 Same logic below in vswap(); */
612 if (vtop
>= vstack
&& !nocode_wanted
) {
613 v
= vtop
->r
& VT_VALMASK
;
614 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
626 ST_FUNC
void vswap(void)
629 /* cannot vswap cpu flags. See comment at vsetc() above */
630 if (vtop
>= vstack
&& !nocode_wanted
) {
631 int v
= vtop
->r
& VT_VALMASK
;
632 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
640 /* pop stack value */
641 ST_FUNC
void vpop(void)
644 v
= vtop
->r
& VT_VALMASK
;
645 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
646 /* for x86, we need to pop the FP stack */
648 o(0xd8dd); /* fstp %st(0) */
651 if (v
== VT_JMP
|| v
== VT_JMPI
) {
652 /* need to put correct jump if && or || without test */
658 /* push constant of type "type" with useless value */
659 ST_FUNC
void vpush(CType
*type
)
662 vsetc(type
, VT_CONST
, &cval
);
665 /* push integer constant */
666 ST_FUNC
void vpushi(int v
)
670 vsetc(&int_type
, VT_CONST
, &cval
);
673 /* push a pointer sized constant */
674 static void vpushs(addr_t v
)
678 vsetc(&size_type
, VT_CONST
, &cval
);
681 /* push arbitrary 64bit constant */
682 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
689 vsetc(&ctype
, VT_CONST
, &cval
);
692 /* push long long constant */
693 static inline void vpushll(long long v
)
695 vpush64(VT_LLONG
, v
);
698 ST_FUNC
void vset(CType
*type
, int r
, long v
)
703 vsetc(type
, r
, &cval
);
706 static void vseti(int r
, int v
)
714 ST_FUNC
void vpushv(SValue
*v
)
716 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
717 tcc_error("memory full (vstack)");
722 static void vdup(void)
727 /* rotate n first stack elements to the bottom
728 I1 ... In -> I2 ... In I1 [top is right]
730 ST_FUNC
void vrotb(int n
)
741 /* rotate the n elements before entry e towards the top
742 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
744 ST_FUNC
void vrote(SValue
*e
, int n
)
750 for(i
= 0;i
< n
- 1; i
++)
755 /* rotate n first stack elements to the top
756 I1 ... In -> In I1 ... I(n-1) [top is right]
758 ST_FUNC
void vrott(int n
)
763 /* push a symbol value of TYPE */
764 static inline void vpushsym(CType
*type
, Sym
*sym
)
768 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
772 /* Return a static symbol pointing to a section */
773 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
779 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
780 sym
->type
.ref
= type
->ref
;
781 sym
->r
= VT_CONST
| VT_SYM
;
782 put_extern_sym(sym
, sec
, offset
, size
);
786 /* push a reference to a section offset by adding a dummy symbol */
787 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
789 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
792 /* define a new external reference to a symbol 'v' of type 'u' */
793 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
799 /* push forward reference */
800 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
801 s
->type
.ref
= type
->ref
;
802 s
->r
= r
| VT_CONST
| VT_SYM
;
807 /* Merge some storage attributes. */
808 static void patch_storage(Sym
*sym
, CType
*type
)
811 if (!is_compatible_types(&sym
->type
, type
))
812 tcc_error("incompatible types for redefinition of '%s'",
813 get_tok_str(sym
->v
, NULL
));
816 if ((sym
->type
.t
^ t
) & VT_IMPORT
)
817 tcc_error("incompatible dll linkage for redefinition of '%s'",
818 get_tok_str(sym
->v
, NULL
));
820 sym
->type
.t
|= t
& (VT_EXPORT
|VT_WEAK
);
821 if (t
& VT_VIS_MASK
) {
822 int vis
= sym
->type
.t
& VT_VIS_MASK
;
823 int vis2
= t
& VT_VIS_MASK
;
824 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
826 else if (vis2
!= (STV_DEFAULT
<< VT_VIS_SHIFT
))
827 vis
= (vis
< vis2
) ? vis
: vis2
;
828 sym
->type
.t
= (sym
->type
.t
& ~VT_VIS_MASK
) | vis
;
832 /* define a new external reference to a symbol 'v' */
833 static Sym
*external_sym(int v
, CType
*type
, int r
)
838 /* push forward reference */
839 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
840 s
->type
.t
|= VT_EXTERN
;
842 if (s
->type
.ref
== func_old_type
.ref
) {
843 s
->type
.ref
= type
->ref
;
844 s
->r
= r
| VT_CONST
| VT_SYM
;
845 s
->type
.t
|= VT_EXTERN
;
847 patch_storage(s
, type
);
853 /* push a reference to global symbol v */
854 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
856 vpushsym(type
, external_global_sym(v
, type
, 0));
859 /* save registers up to (vtop - n) stack entry */
860 ST_FUNC
void save_regs(int n
)
863 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
867 /* save r to the memory stack, and mark it as being free */
868 ST_FUNC
void save_reg(int r
)
870 save_reg_upstack(r
, 0);
873 /* save r to the memory stack, and mark it as being free,
874 if seen up to (vtop - n) stack entry */
875 ST_FUNC
void save_reg_upstack(int r
, int n
)
877 int l
, saved
, size
, align
;
881 if ((r
&= VT_VALMASK
) >= VT_CONST
)
886 /* modify all stack values */
889 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
890 if ((p
->r
& VT_VALMASK
) == r
||
891 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
892 /* must save value on stack if not already done */
894 /* NOTE: must reload 'r' because r might be equal to r2 */
895 r
= p
->r
& VT_VALMASK
;
896 /* store register in the stack */
898 if ((p
->r
& VT_LVAL
) ||
899 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
900 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
901 type
= &char_pointer_type
;
905 size
= type_size(type
, &align
);
906 loc
= (loc
- size
) & -align
;
908 sv
.r
= VT_LOCAL
| VT_LVAL
;
911 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
912 /* x86 specific: need to pop fp register ST0 if saved */
914 o(0xd8dd); /* fstp %st(0) */
917 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
918 /* special long long case */
919 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
927 /* mark that stack entry as being saved on the stack */
928 if (p
->r
& VT_LVAL
) {
929 /* also clear the bounded flag because the
930 relocation address of the function was stored in
932 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
934 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
942 #ifdef TCC_TARGET_ARM
943 /* find a register of class 'rc2' with at most one reference on stack.
944 * If none, call get_reg(rc) */
945 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
950 for(r
=0;r
<NB_REGS
;r
++) {
951 if (reg_classes
[r
] & rc2
) {
954 for(p
= vstack
; p
<= vtop
; p
++) {
955 if ((p
->r
& VT_VALMASK
) == r
||
956 (p
->r2
& VT_VALMASK
) == r
)
967 /* find a free register of class 'rc'. If none, save one register */
968 ST_FUNC
int get_reg(int rc
)
973 /* find a free register */
974 for(r
=0;r
<NB_REGS
;r
++) {
975 if (reg_classes
[r
] & rc
) {
978 for(p
=vstack
;p
<=vtop
;p
++) {
979 if ((p
->r
& VT_VALMASK
) == r
||
980 (p
->r2
& VT_VALMASK
) == r
)
988 /* no register left : free the first one on the stack (VERY
989 IMPORTANT to start from the bottom to ensure that we don't
990 spill registers used in gen_opi()) */
991 for(p
=vstack
;p
<=vtop
;p
++) {
992 /* look at second register (if long long) */
993 r
= p
->r2
& VT_VALMASK
;
994 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
996 r
= p
->r
& VT_VALMASK
;
997 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1003 /* Should never comes here */
1007 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1009 static void move_reg(int r
, int s
, int t
)
1023 /* get address of vtop (vtop MUST BE an lvalue) */
1024 ST_FUNC
void gaddrof(void)
1026 vtop
->r
&= ~VT_LVAL
;
1027 /* tricky: if saved lvalue, then we can go back to lvalue */
1028 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1029 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1034 #ifdef CONFIG_TCC_BCHECK
1035 /* generate lvalue bound code */
1036 static void gbound(void)
1041 vtop
->r
&= ~VT_MUSTBOUND
;
1042 /* if lvalue, then use checking code before dereferencing */
1043 if (vtop
->r
& VT_LVAL
) {
1044 /* if not VT_BOUNDED value, then make one */
1045 if (!(vtop
->r
& VT_BOUNDED
)) {
1046 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1047 /* must save type because we must set it to int to get pointer */
1049 vtop
->type
.t
= VT_PTR
;
1052 gen_bounded_ptr_add();
1053 vtop
->r
|= lval_type
;
1056 /* then check for dereferencing */
1057 gen_bounded_ptr_deref();
1062 /* store vtop a register belonging to class 'rc'. lvalues are
1063 converted to values. Cannot be used if cannot be converted to
1064 register value (such as structures). */
1065 ST_FUNC
int gv(int rc
)
1067 int r
, bit_pos
, bit_size
, size
, align
;
1070 /* NOTE: get_reg can modify vstack[] */
1071 if (vtop
->type
.t
& VT_BITFIELD
) {
1074 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1075 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1076 /* remove bit field info to avoid loops */
1077 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1078 /* cast to int to propagate signedness in following ops */
1079 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1084 if((vtop
->type
.t
& VT_UNSIGNED
) ||
1085 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1086 type
.t
|= VT_UNSIGNED
;
1088 /* generate shifts */
1089 vpushi(bits
- (bit_pos
+ bit_size
));
1091 vpushi(bits
- bit_size
);
1092 /* NOTE: transformed to SHR if unsigned */
1096 if (is_float(vtop
->type
.t
) &&
1097 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1098 unsigned long offset
;
1099 /* CPUs usually cannot use float constants, so we store them
1100 generically in data segment */
1101 size
= type_size(&vtop
->type
, &align
);
1102 offset
= section_add(data_section
, size
, align
);
1103 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1105 init_putv(&vtop
->type
, data_section
, offset
);
1108 #ifdef CONFIG_TCC_BCHECK
1109 if (vtop
->r
& VT_MUSTBOUND
)
1113 r
= vtop
->r
& VT_VALMASK
;
1114 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1115 #ifndef TCC_TARGET_ARM64
1118 #ifdef TCC_TARGET_X86_64
1119 else if (rc
== RC_FRET
)
1123 /* need to reload if:
1125 - lvalue (need to dereference pointer)
1126 - already a register, but not in the right class */
1128 || (vtop
->r
& VT_LVAL
)
1129 || !(reg_classes
[r
] & rc
)
1130 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1131 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1132 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1134 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1139 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1140 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1141 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1143 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1144 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1145 unsigned long long ll
;
1147 int r2
, original_type
;
1148 original_type
= vtop
->type
.t
;
1149 /* two register type load : expand to two words
1151 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1152 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1155 vtop
->c
.i
= ll
; /* first word */
1157 vtop
->r
= r
; /* save register value */
1158 vpushi(ll
>> 32); /* second word */
1161 if (vtop
->r
& VT_LVAL
) {
1162 /* We do not want to modifier the long long
1163 pointer here, so the safest (and less
1164 efficient) is to save all the other registers
1165 in the stack. XXX: totally inefficient. */
1169 /* lvalue_save: save only if used further down the stack */
1170 save_reg_upstack(vtop
->r
, 1);
1172 /* load from memory */
1173 vtop
->type
.t
= load_type
;
1176 vtop
[-1].r
= r
; /* save register value */
1177 /* increment pointer to get second word */
1178 vtop
->type
.t
= addr_type
;
1183 vtop
->type
.t
= load_type
;
1185 /* move registers */
1188 vtop
[-1].r
= r
; /* save register value */
1189 vtop
->r
= vtop
[-1].r2
;
1191 /* Allocate second register. Here we rely on the fact that
1192 get_reg() tries first to free r2 of an SValue. */
1196 /* write second register */
1198 vtop
->type
.t
= original_type
;
1199 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1201 /* lvalue of scalar type : need to use lvalue type
1202 because of possible cast */
1205 /* compute memory access type */
1206 if (vtop
->r
& VT_LVAL_BYTE
)
1208 else if (vtop
->r
& VT_LVAL_SHORT
)
1210 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1214 /* restore wanted type */
1217 /* one register type load */
1222 #ifdef TCC_TARGET_C67
1223 /* uses register pairs for doubles */
1224 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1231 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1232 ST_FUNC
void gv2(int rc1
, int rc2
)
1236 /* generate more generic register first. But VT_JMP or VT_CMP
1237 values must be generated first in all cases to avoid possible
1239 v
= vtop
[0].r
& VT_VALMASK
;
1240 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1245 /* test if reload is needed for first register */
1246 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1256 /* test if reload is needed for first register */
1257 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1263 #ifndef TCC_TARGET_ARM64
1264 /* wrapper around RC_FRET to return a register by type */
1265 static int rc_fret(int t
)
1267 #ifdef TCC_TARGET_X86_64
1268 if (t
== VT_LDOUBLE
) {
1276 /* wrapper around REG_FRET to return a register by type */
1277 static int reg_fret(int t
)
1279 #ifdef TCC_TARGET_X86_64
1280 if (t
== VT_LDOUBLE
) {
1287 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1288 /* expand 64bit on stack in two ints */
1289 static void lexpand(void)
1292 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1293 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1294 if (v
== VT_CONST
) {
1297 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1303 vtop
[0].r
= vtop
[-1].r2
;
1304 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1306 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1310 #ifdef TCC_TARGET_ARM
1311 /* expand long long on stack */
1312 ST_FUNC
void lexpand_nr(void)
1316 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1318 vtop
->r2
= VT_CONST
;
1319 vtop
->type
.t
= VT_INT
| u
;
1320 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1321 if (v
== VT_CONST
) {
1322 vtop
[-1].c
.i
= vtop
->c
.i
;
1323 vtop
->c
.i
= vtop
->c
.i
>> 32;
1325 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1327 vtop
->r
= vtop
[-1].r
;
1328 } else if (v
> VT_CONST
) {
1332 vtop
->r
= vtop
[-1].r2
;
1333 vtop
[-1].r2
= VT_CONST
;
1334 vtop
[-1].type
.t
= VT_INT
| u
;
1338 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1339 /* build a long long from two ints */
1340 static void lbuild(int t
)
1342 gv2(RC_INT
, RC_INT
);
1343 vtop
[-1].r2
= vtop
[0].r
;
1344 vtop
[-1].type
.t
= t
;
1349 /* convert stack entry to register and duplicate its value in another
1351 static void gv_dup(void)
1357 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1358 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1365 /* stack: H L L1 H1 */
1375 /* duplicate value */
1380 #ifdef TCC_TARGET_X86_64
1381 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1391 load(r1
, &sv
); /* move r to r1 */
1393 /* duplicates value */
1399 /* Generate value test
1401 * Generate a test for any value (jump, comparison and integers) */
1402 ST_FUNC
int gvtst(int inv
, int t
)
1404 int v
= vtop
->r
& VT_VALMASK
;
1405 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1409 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1410 /* constant jmp optimization */
1411 if ((vtop
->c
.i
!= 0) != inv
)
1416 return gtst(inv
, t
);
1419 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1420 /* generate CPU independent (unsigned) long long operations */
1421 static void gen_opl(int op
)
1423 int t
, a
, b
, op1
, c
, i
;
1425 unsigned short reg_iret
= REG_IRET
;
1426 unsigned short reg_lret
= REG_LRET
;
1432 func
= TOK___divdi3
;
1435 func
= TOK___udivdi3
;
1438 func
= TOK___moddi3
;
1441 func
= TOK___umoddi3
;
1448 /* call generic long long function */
1449 vpush_global_sym(&func_old_type
, func
);
1454 vtop
->r2
= reg_lret
;
1462 //pv("gen_opl A",0,2);
1468 /* stack: L1 H1 L2 H2 */
1473 vtop
[-2] = vtop
[-3];
1476 /* stack: H1 H2 L1 L2 */
1477 //pv("gen_opl B",0,4);
1483 /* stack: H1 H2 L1 L2 ML MH */
1486 /* stack: ML MH H1 H2 L1 L2 */
1490 /* stack: ML MH H1 L2 H2 L1 */
1495 /* stack: ML MH M1 M2 */
1498 } else if (op
== '+' || op
== '-') {
1499 /* XXX: add non carry method too (for MIPS or alpha) */
1505 /* stack: H1 H2 (L1 op L2) */
1508 gen_op(op1
+ 1); /* TOK_xxxC2 */
1511 /* stack: H1 H2 (L1 op L2) */
1514 /* stack: (L1 op L2) H1 H2 */
1516 /* stack: (L1 op L2) (H1 op H2) */
1524 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1525 t
= vtop
[-1].type
.t
;
1529 /* stack: L H shift */
1531 /* constant: simpler */
1532 /* NOTE: all comments are for SHL. the other cases are
1533 done by swaping words */
1544 if (op
!= TOK_SAR
) {
1577 /* XXX: should provide a faster fallback on x86 ? */
1580 func
= TOK___ashrdi3
;
1583 func
= TOK___lshrdi3
;
1586 func
= TOK___ashldi3
;
1592 /* compare operations */
1598 /* stack: L1 H1 L2 H2 */
1600 vtop
[-1] = vtop
[-2];
1602 /* stack: L1 L2 H1 H2 */
1605 /* when values are equal, we need to compare low words. since
1606 the jump is inverted, we invert the test too. */
1609 else if (op1
== TOK_GT
)
1611 else if (op1
== TOK_ULT
)
1613 else if (op1
== TOK_UGT
)
1623 /* generate non equal test */
1629 /* compare low. Always unsigned */
1633 else if (op1
== TOK_LE
)
1635 else if (op1
== TOK_GT
)
1637 else if (op1
== TOK_GE
)
1648 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1650 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1651 return (a
^ b
) >> 63 ? -x
: x
;
1654 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1656 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1659 /* handle integer constant optimizations and various machine
1661 static void gen_opic(int op
)
1663 SValue
*v1
= vtop
- 1;
1665 int t1
= v1
->type
.t
& VT_BTYPE
;
1666 int t2
= v2
->type
.t
& VT_BTYPE
;
1667 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1668 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1669 uint64_t l1
= c1
? v1
->c
.i
: 0;
1670 uint64_t l2
= c2
? v2
->c
.i
: 0;
1671 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1673 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1674 l1
= ((uint32_t)l1
|
1675 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1676 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1677 l2
= ((uint32_t)l2
|
1678 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1682 case '+': l1
+= l2
; break;
1683 case '-': l1
-= l2
; break;
1684 case '&': l1
&= l2
; break;
1685 case '^': l1
^= l2
; break;
1686 case '|': l1
|= l2
; break;
1687 case '*': l1
*= l2
; break;
1694 /* if division by zero, generate explicit division */
1697 tcc_error("division by zero in constant");
1701 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1702 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1703 case TOK_UDIV
: l1
= l1
/ l2
; break;
1704 case TOK_UMOD
: l1
= l1
% l2
; break;
1707 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1708 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1710 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1713 case TOK_ULT
: l1
= l1
< l2
; break;
1714 case TOK_UGE
: l1
= l1
>= l2
; break;
1715 case TOK_EQ
: l1
= l1
== l2
; break;
1716 case TOK_NE
: l1
= l1
!= l2
; break;
1717 case TOK_ULE
: l1
= l1
<= l2
; break;
1718 case TOK_UGT
: l1
= l1
> l2
; break;
1719 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1720 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1721 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1722 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1724 case TOK_LAND
: l1
= l1
&& l2
; break;
1725 case TOK_LOR
: l1
= l1
|| l2
; break;
1729 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1730 l1
= ((uint32_t)l1
|
1731 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1735 /* if commutative ops, put c2 as constant */
1736 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1737 op
== '|' || op
== '*')) {
1739 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1740 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1742 if (!const_wanted
&&
1744 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1745 (l1
== -1 && op
== TOK_SAR
))) {
1746 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1748 } else if (!const_wanted
&&
1749 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1750 (l2
== -1 && op
== '|') ||
1751 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1752 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1753 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1758 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1761 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1762 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1766 /* filter out NOP operations like x*1, x-0, x&-1... */
1768 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1769 /* try to use shifts instead of muls or divs */
1770 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1779 else if (op
== TOK_PDIV
)
1785 } else if (c2
&& (op
== '+' || op
== '-') &&
1786 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1787 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1788 /* symbol + constant case */
1792 /* The backends can't always deal with addends to symbols
1793 larger than +-1<<31. Don't construct such. */
1800 /* call low level op generator */
1801 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1802 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1810 /* generate a floating point operation with constant propagation */
1811 static void gen_opif(int op
)
1819 /* currently, we cannot do computations with forward symbols */
1820 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1821 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1823 if (v1
->type
.t
== VT_FLOAT
) {
1826 } else if (v1
->type
.t
== VT_DOUBLE
) {
1834 /* NOTE: we only do constant propagation if finite number (not
1835 NaN or infinity) (ANSI spec) */
1836 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1840 case '+': f1
+= f2
; break;
1841 case '-': f1
-= f2
; break;
1842 case '*': f1
*= f2
; break;
1846 tcc_error("division by zero in constant");
1851 /* XXX: also handles tests ? */
1855 /* XXX: overflow test ? */
1856 if (v1
->type
.t
== VT_FLOAT
) {
1858 } else if (v1
->type
.t
== VT_DOUBLE
) {
1870 static int pointed_size(CType
*type
)
1873 return type_size(pointed_type(type
), &align
);
1876 static void vla_runtime_pointed_size(CType
*type
)
1879 vla_runtime_type_size(pointed_type(type
), &align
);
1882 static inline int is_null_pointer(SValue
*p
)
1884 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1886 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1887 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1888 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1889 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1892 static inline int is_integer_btype(int bt
)
1894 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1895 bt
== VT_INT
|| bt
== VT_LLONG
);
1898 /* check types for comparison or subtraction of pointers */
1899 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1901 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1904 /* null pointers are accepted for all comparisons as gcc */
1905 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1909 bt1
= type1
->t
& VT_BTYPE
;
1910 bt2
= type2
->t
& VT_BTYPE
;
1911 /* accept comparison between pointer and integer with a warning */
1912 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1913 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1914 tcc_warning("comparison between pointer and integer");
1918 /* both must be pointers or implicit function pointers */
1919 if (bt1
== VT_PTR
) {
1920 type1
= pointed_type(type1
);
1921 } else if (bt1
!= VT_FUNC
)
1922 goto invalid_operands
;
1924 if (bt2
== VT_PTR
) {
1925 type2
= pointed_type(type2
);
1926 } else if (bt2
!= VT_FUNC
) {
1928 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1930 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1931 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1935 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1936 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1937 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1938 /* gcc-like error if '-' is used */
1940 goto invalid_operands
;
1942 tcc_warning("comparison of distinct pointer types lacks a cast");
1946 /* generic gen_op: handles types problems */
1947 ST_FUNC
void gen_op(int op
)
1949 int u
, t1
, t2
, bt1
, bt2
, t
;
1953 t1
= vtop
[-1].type
.t
;
1954 t2
= vtop
[0].type
.t
;
1955 bt1
= t1
& VT_BTYPE
;
1956 bt2
= t2
& VT_BTYPE
;
1958 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1959 tcc_error("operation on a struct");
1960 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
1961 if (bt2
== VT_FUNC
) {
1962 mk_pointer(&vtop
->type
);
1965 if (bt1
== VT_FUNC
) {
1967 mk_pointer(&vtop
->type
);
1972 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1973 /* at least one operand is a pointer */
1974 /* relationnal op: must be both pointers */
1975 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1976 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1977 /* pointers are handled are unsigned */
1978 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1979 t
= VT_LLONG
| VT_UNSIGNED
;
1981 t
= VT_INT
| VT_UNSIGNED
;
1985 /* if both pointers, then it must be the '-' op */
1986 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
1988 tcc_error("cannot use pointers here");
1989 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1990 /* XXX: check that types are compatible */
1991 if (vtop
[-1].type
.t
& VT_VLA
) {
1992 vla_runtime_pointed_size(&vtop
[-1].type
);
1994 vpushi(pointed_size(&vtop
[-1].type
));
1998 /* set to integer type */
1999 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2000 vtop
->type
.t
= VT_LLONG
;
2002 vtop
->type
.t
= VT_INT
;
2007 /* exactly one pointer : must be '+' or '-'. */
2008 if (op
!= '-' && op
!= '+')
2009 tcc_error("cannot use pointers here");
2010 /* Put pointer as first operand */
2011 if (bt2
== VT_PTR
) {
2013 t
= t1
, t1
= t2
, t2
= t
;
2016 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2017 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2018 gen_cast(&int_type
);
2020 type1
= vtop
[-1].type
;
2021 type1
.t
&= ~VT_ARRAY
;
2022 if (vtop
[-1].type
.t
& VT_VLA
)
2023 vla_runtime_pointed_size(&vtop
[-1].type
);
2025 u
= pointed_size(&vtop
[-1].type
);
2027 tcc_error("unknown array element size");
2028 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2031 /* XXX: cast to int ? (long long case) */
2037 /* #ifdef CONFIG_TCC_BCHECK
2038 The main reason to removing this code:
2045 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2046 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2048 When this code is on. then the output looks like
2050 v+(i-j) = 0xbff84000
2052 /* if evaluating constant expression, no code should be
2053 generated, so no bound check */
2054 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2055 /* if bounded pointers, we generate a special code to
2062 gen_bounded_ptr_add();
2068 /* put again type if gen_opic() swaped operands */
2071 } else if (is_float(bt1
) || is_float(bt2
)) {
2072 /* compute bigger type and do implicit casts */
2073 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2075 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2080 /* floats can only be used for a few operations */
2081 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2082 (op
< TOK_ULT
|| op
> TOK_GT
))
2083 tcc_error("invalid operands for binary operation");
2085 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2086 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2087 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (t
| VT_UNSIGNED
))
2090 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2091 /* cast to biggest op */
2093 /* convert to unsigned if it does not fit in a long long */
2094 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2095 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
2099 /* integer operations */
2101 /* convert to unsigned if it does not fit in an integer */
2102 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
2103 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
2106 /* XXX: currently, some unsigned operations are explicit, so
2107 we modify them here */
2108 if (t
& VT_UNSIGNED
) {
2115 else if (op
== TOK_LT
)
2117 else if (op
== TOK_GT
)
2119 else if (op
== TOK_LE
)
2121 else if (op
== TOK_GE
)
2128 /* special case for shifts and long long: we keep the shift as
2130 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2137 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2138 /* relationnal op: the result is an int */
2139 vtop
->type
.t
= VT_INT
;
2144 // Make sure that we have converted to an rvalue:
2145 if (vtop
->r
& VT_LVAL
)
2146 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2149 #ifndef TCC_TARGET_ARM
2150 /* generic itof for unsigned long long case */
2151 static void gen_cvt_itof1(int t
)
2153 #ifdef TCC_TARGET_ARM64
2156 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2157 (VT_LLONG
| VT_UNSIGNED
)) {
2160 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2161 #if LDOUBLE_SIZE != 8
2162 else if (t
== VT_LDOUBLE
)
2163 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2166 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2170 vtop
->r
= reg_fret(t
);
2178 /* generic ftoi for unsigned long long case */
2179 static void gen_cvt_ftoi1(int t
)
2181 #ifdef TCC_TARGET_ARM64
2186 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2187 /* not handled natively */
2188 st
= vtop
->type
.t
& VT_BTYPE
;
2190 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2191 #if LDOUBLE_SIZE != 8
2192 else if (st
== VT_LDOUBLE
)
2193 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2196 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2201 vtop
->r2
= REG_LRET
;
2208 /* force char or short cast */
2209 static void force_charshort_cast(int t
)
2213 /* XXX: add optimization if lvalue : just change type and offset */
2218 if (t
& VT_UNSIGNED
) {
2219 vpushi((1 << bits
) - 1);
2222 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2228 /* result must be signed or the SAR is converted to an SHL
2229 This was not the case when "t" was a signed short
2230 and the last value on the stack was an unsigned int */
2231 vtop
->type
.t
&= ~VT_UNSIGNED
;
2237 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2238 static void gen_cast(CType
*type
)
2240 int sbt
, dbt
, sf
, df
, c
, p
;
2242 /* special delayed cast for char/short */
2243 /* XXX: in some cases (multiple cascaded casts), it may still
2245 if (vtop
->r
& VT_MUSTCAST
) {
2246 vtop
->r
&= ~VT_MUSTCAST
;
2247 force_charshort_cast(vtop
->type
.t
);
2250 /* bitfields first get cast to ints */
2251 if (vtop
->type
.t
& VT_BITFIELD
) {
2255 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2256 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2261 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2262 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2264 /* constant case: we can do it now */
2265 /* XXX: in ISOC, cannot do it if error in convert */
2266 if (sbt
== VT_FLOAT
)
2267 vtop
->c
.ld
= vtop
->c
.f
;
2268 else if (sbt
== VT_DOUBLE
)
2269 vtop
->c
.ld
= vtop
->c
.d
;
2272 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2273 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2274 vtop
->c
.ld
= vtop
->c
.i
;
2276 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2278 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2279 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2281 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2284 if (dbt
== VT_FLOAT
)
2285 vtop
->c
.f
= (float)vtop
->c
.ld
;
2286 else if (dbt
== VT_DOUBLE
)
2287 vtop
->c
.d
= (double)vtop
->c
.ld
;
2288 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2289 vtop
->c
.i
= vtop
->c
.ld
;
2290 } else if (sf
&& dbt
== VT_BOOL
) {
2291 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2294 vtop
->c
.i
= vtop
->c
.ld
;
2295 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2297 else if (sbt
& VT_UNSIGNED
)
2298 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2299 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2300 else if (sbt
== VT_PTR
)
2303 else if (sbt
!= VT_LLONG
)
2304 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2305 -(vtop
->c
.i
& 0x80000000));
2307 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2309 else if (dbt
== VT_BOOL
)
2310 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2311 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2312 else if (dbt
== VT_PTR
)
2315 else if (dbt
!= VT_LLONG
) {
2316 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2317 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2320 if (!(dbt
& VT_UNSIGNED
))
2321 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2324 } else if (p
&& dbt
== VT_BOOL
) {
2328 /* non constant case: generate code */
2330 /* convert from fp to fp */
2333 /* convert int to fp */
2336 /* convert fp to int */
2337 if (dbt
== VT_BOOL
) {
2341 /* we handle char/short/etc... with generic code */
2342 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2343 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2347 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2348 /* additional cast for char/short... */
2353 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2354 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2355 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2356 /* scalar to long long */
2357 /* machine independent conversion */
2359 /* generate high word */
2360 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2364 if (sbt
== VT_PTR
) {
2365 /* cast from pointer to int before we apply
2366 shift operation, which pointers don't support*/
2367 gen_cast(&int_type
);
2373 /* patch second register */
2374 vtop
[-1].r2
= vtop
->r
;
2378 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2379 (dbt
& VT_BTYPE
) == VT_PTR
||
2380 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2381 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2382 (sbt
& VT_BTYPE
) != VT_PTR
&&
2383 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2384 /* need to convert from 32bit to 64bit */
2386 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2387 #if defined(TCC_TARGET_ARM64)
2389 #elif defined(TCC_TARGET_X86_64)
2391 /* x86_64 specific: movslq */
2393 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2400 } else if (dbt
== VT_BOOL
) {
2401 /* scalar to bool */
2404 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2405 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2406 if (sbt
== VT_PTR
) {
2407 vtop
->type
.t
= VT_INT
;
2408 tcc_warning("nonportable conversion from pointer to char/short");
2410 force_charshort_cast(dbt
);
2411 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2412 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2414 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2415 /* from long long: just take low order word */
2419 /* if lvalue and single word type, nothing to do because
2420 the lvalue already contains the real type size (see
2421 VT_LVAL_xxx constants) */
2425 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2426 /* if we are casting between pointer types,
2427 we must update the VT_LVAL_xxx size */
2428 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2429 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2434 /* return type size as known at compile time. Put alignment at 'a' */
2435 ST_FUNC
int type_size(CType
*type
, int *a
)
2440 bt
= type
->t
& VT_BTYPE
;
2441 if (bt
== VT_STRUCT
) {
2446 } else if (bt
== VT_PTR
) {
2447 if (type
->t
& VT_ARRAY
) {
2451 ts
= type_size(&s
->type
, a
);
2453 if (ts
< 0 && s
->c
< 0)
2461 } else if (bt
== VT_LDOUBLE
) {
2463 return LDOUBLE_SIZE
;
2464 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2465 #ifdef TCC_TARGET_I386
2466 #ifdef TCC_TARGET_PE
2471 #elif defined(TCC_TARGET_ARM)
2481 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2484 } else if (bt
== VT_SHORT
) {
2487 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2490 } else if (bt
== VT_ENUM
) {
2492 /* Enums might be incomplete, so don't just return '4' here. */
2493 return type
->ref
->c
;
2495 /* char, void, function, _Bool */
2501 /* push type size as known at runtime time on top of value stack. Put
2503 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2505 if (type
->t
& VT_VLA
) {
2506 type_size(&type
->ref
->type
, a
);
2507 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2509 vpushi(type_size(type
, a
));
2513 static void vla_sp_restore(void) {
2514 if (vlas_in_scope
) {
2515 gen_vla_sp_restore(vla_sp_loc
);
2519 static void vla_sp_restore_root(void) {
2520 if (vlas_in_scope
) {
2521 gen_vla_sp_restore(vla_sp_root_loc
);
2525 /* return the pointed type of t */
2526 static inline CType
*pointed_type(CType
*type
)
2528 return &type
->ref
->type
;
2531 /* modify type so that its it is a pointer to type. */
2532 ST_FUNC
void mk_pointer(CType
*type
)
2535 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2536 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2540 /* compare function types. OLD functions match any new functions */
2541 static int is_compatible_func(CType
*type1
, CType
*type2
)
2547 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2549 /* check func_call */
2550 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2552 /* XXX: not complete */
2553 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2557 while (s1
!= NULL
) {
2560 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2570 /* return true if type1 and type2 are the same. If unqualified is
2571 true, qualifiers on the types are ignored.
2573 - enums are not checked as gcc __builtin_types_compatible_p ()
2575 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2579 t1
= type1
->t
& VT_TYPE
;
2580 t2
= type2
->t
& VT_TYPE
;
2582 /* strip qualifiers before comparing */
2583 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2584 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2586 /* Default Vs explicit signedness only matters for char */
2587 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2591 /* An enum is compatible with (unsigned) int. Ideally we would
2592 store the enums signedness in type->ref.a.<some_bit> and
2593 only accept unsigned enums with unsigned int and vice versa.
2594 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2595 from pointer target types, so we can't add it here either. */
2596 if ((t1
& VT_BTYPE
) == VT_ENUM
) {
2598 if (type1
->ref
->a
.unsigned_enum
)
2601 if ((t2
& VT_BTYPE
) == VT_ENUM
) {
2603 if (type2
->ref
->a
.unsigned_enum
)
2606 /* XXX: bitfields ? */
2609 /* test more complicated cases */
2610 bt1
= t1
& VT_BTYPE
;
2611 if (bt1
== VT_PTR
) {
2612 type1
= pointed_type(type1
);
2613 type2
= pointed_type(type2
);
2614 return is_compatible_types(type1
, type2
);
2615 } else if (bt1
== VT_STRUCT
) {
2616 return (type1
->ref
== type2
->ref
);
2617 } else if (bt1
== VT_FUNC
) {
2618 return is_compatible_func(type1
, type2
);
2624 /* return true if type1 and type2 are exactly the same (including
2627 static int is_compatible_types(CType
*type1
, CType
*type2
)
2629 return compare_types(type1
,type2
,0);
2632 /* return true if type1 and type2 are the same (ignoring qualifiers).
2634 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2636 return compare_types(type1
,type2
,1);
2639 /* print a type. If 'varstr' is not NULL, then the variable is also
2640 printed in the type */
2642 /* XXX: add array and function pointers */
2643 static void type_to_str(char *buf
, int buf_size
,
2644 CType
*type
, const char *varstr
)
2654 if (t
& VT_CONSTANT
)
2655 pstrcat(buf
, buf_size
, "const ");
2656 if (t
& VT_VOLATILE
)
2657 pstrcat(buf
, buf_size
, "volatile ");
2658 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2659 pstrcat(buf
, buf_size
, "unsigned ");
2660 else if (t
& VT_DEFSIGN
)
2661 pstrcat(buf
, buf_size
, "signed ");
2663 pstrcat(buf
, buf_size
, "extern ");
2665 pstrcat(buf
, buf_size
, "static ");
2667 pstrcat(buf
, buf_size
, "typedef ");
2669 pstrcat(buf
, buf_size
, "inline ");
2670 buf_size
-= strlen(buf
);
2701 tstr
= "long double";
2703 pstrcat(buf
, buf_size
, tstr
);
2707 if (bt
== VT_STRUCT
)
2711 pstrcat(buf
, buf_size
, tstr
);
2712 v
= type
->ref
->v
& ~SYM_STRUCT
;
2713 if (v
>= SYM_FIRST_ANOM
)
2714 pstrcat(buf
, buf_size
, "<anonymous>");
2716 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2720 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2721 pstrcat(buf
, buf_size
, "(");
2723 while (sa
!= NULL
) {
2724 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2725 pstrcat(buf
, buf_size
, buf1
);
2728 pstrcat(buf
, buf_size
, ", ");
2730 pstrcat(buf
, buf_size
, ")");
2735 snprintf(buf1
, sizeof(buf1
), "%s[%ld]", varstr
? varstr
: "", s
->c
);
2736 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2739 pstrcpy(buf1
, sizeof(buf1
), "*");
2740 if (t
& VT_CONSTANT
)
2741 pstrcat(buf1
, buf_size
, "const ");
2742 if (t
& VT_VOLATILE
)
2743 pstrcat(buf1
, buf_size
, "volatile ");
2745 pstrcat(buf1
, sizeof(buf1
), varstr
);
2746 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2750 pstrcat(buf
, buf_size
, " ");
2751 pstrcat(buf
, buf_size
, varstr
);
2756 /* verify type compatibility to store vtop in 'dt' type, and generate
2758 static void gen_assign_cast(CType
*dt
)
2760 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2761 char buf1
[256], buf2
[256];
2764 st
= &vtop
->type
; /* source type */
2765 dbt
= dt
->t
& VT_BTYPE
;
2766 sbt
= st
->t
& VT_BTYPE
;
2767 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2768 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2770 It is Ok if both are void
2776 gcc accepts this program
2779 tcc_error("cannot cast from/to void");
2781 if (dt
->t
& VT_CONSTANT
)
2782 tcc_warning("assignment of read-only location");
2785 /* special cases for pointers */
2786 /* '0' can also be a pointer */
2787 if (is_null_pointer(vtop
))
2789 /* accept implicit pointer to integer cast with warning */
2790 if (is_integer_btype(sbt
)) {
2791 tcc_warning("assignment makes pointer from integer without a cast");
2794 type1
= pointed_type(dt
);
2795 /* a function is implicitely a function pointer */
2796 if (sbt
== VT_FUNC
) {
2797 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2798 !is_compatible_types(pointed_type(dt
), st
))
2799 tcc_warning("assignment from incompatible pointer type");
2804 type2
= pointed_type(st
);
2805 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2806 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2807 /* void * can match anything */
2809 /* exact type match, except for qualifiers */
2812 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2813 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2814 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2815 /* Like GCC don't warn by default for merely changes
2816 in pointer target signedness. Do warn for different
2817 base types, though, in particular for unsigned enums
2818 and signed int targets. */
2819 if ((tmp_type1
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) !=
2820 (tmp_type2
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) &&
2821 (tmp_type1
.t
& VT_BTYPE
) == (tmp_type2
.t
& VT_BTYPE
))
2824 tcc_warning("assignment from incompatible pointer type");
2827 /* check const and volatile */
2828 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2829 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2830 tcc_warning("assignment discards qualifiers from pointer target type");
2836 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2837 tcc_warning("assignment makes integer from pointer without a cast");
2838 } else if (sbt
== VT_STRUCT
) {
2839 goto case_VT_STRUCT
;
2841 /* XXX: more tests */
2847 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2848 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2849 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2851 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2852 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2853 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2861 /* store vtop in lvalue pushed on stack */
2862 ST_FUNC
void vstore(void)
2864 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2866 ft
= vtop
[-1].type
.t
;
2867 sbt
= vtop
->type
.t
& VT_BTYPE
;
2868 dbt
= ft
& VT_BTYPE
;
2869 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2870 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2871 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2872 /* optimize char/short casts */
2873 delayed_cast
= VT_MUSTCAST
;
2874 vtop
->type
.t
= (ft
& VT_TYPE
& ~VT_BITFIELD
&
2875 ((1 << VT_STRUCT_SHIFT
) - 1));
2876 /* XXX: factorize */
2877 if (ft
& VT_CONSTANT
)
2878 tcc_warning("assignment of read-only location");
2881 if (!(ft
& VT_BITFIELD
))
2882 gen_assign_cast(&vtop
[-1].type
);
2885 if (sbt
== VT_STRUCT
) {
2886 /* if structure, only generate pointer */
2887 /* structure assignment : generate memcpy */
2888 /* XXX: optimize if small size */
2889 size
= type_size(&vtop
->type
, &align
);
2893 vtop
->type
.t
= VT_PTR
;
2896 /* address of memcpy() */
2899 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2900 else if(!(align
& 3))
2901 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2904 /* Use memmove, rather than memcpy, as dest and src may be same: */
2905 vpush_global_sym(&func_old_type
, TOK_memmove
);
2910 vtop
->type
.t
= VT_PTR
;
2916 /* leave source on stack */
2917 } else if (ft
& VT_BITFIELD
) {
2918 /* bitfield store handling */
2920 /* save lvalue as expression result (example: s.b = s.a = n;) */
2921 vdup(), vtop
[-1] = vtop
[-2];
2923 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2924 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2925 /* remove bit field info to avoid loops */
2926 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2928 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2929 gen_cast(&vtop
[-1].type
);
2930 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2933 /* duplicate destination */
2935 vtop
[-1] = vtop
[-2];
2937 /* mask and shift source */
2938 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2939 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2940 vpushll((1ULL << bit_size
) - 1ULL);
2942 vpushi((1 << bit_size
) - 1);
2948 /* load destination, mask and or with source */
2950 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2951 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2953 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2959 /* ... and discard */
2963 #ifdef CONFIG_TCC_BCHECK
2964 /* bound check case */
2965 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2974 #ifdef TCC_TARGET_X86_64
2975 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2977 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
2982 r
= gv(rc
); /* generate value */
2983 /* if lvalue was saved on stack, must read it */
2984 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
2986 t
= get_reg(RC_INT
);
2987 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2992 sv
.r
= VT_LOCAL
| VT_LVAL
;
2993 sv
.c
.i
= vtop
[-1].c
.i
;
2995 vtop
[-1].r
= t
| VT_LVAL
;
2997 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2998 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2999 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3000 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3002 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3003 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3005 vtop
[-1].type
.t
= load_type
;
3008 /* convert to int to increment easily */
3009 vtop
->type
.t
= addr_type
;
3015 vtop
[-1].type
.t
= load_type
;
3016 /* XXX: it works because r2 is spilled last ! */
3017 store(vtop
->r2
, vtop
- 1);
3023 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3024 vtop
->r
|= delayed_cast
;
3028 /* post defines POST/PRE add. c is the token ++ or -- */
3029 ST_FUNC
void inc(int post
, int c
)
3032 vdup(); /* save lvalue */
3034 gv_dup(); /* duplicate value */
3039 vpushi(c
- TOK_MID
);
3041 vstore(); /* store value */
3043 vpop(); /* if post op, return saved value */
3046 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3048 /* read the string */
3052 while (tok
== TOK_STR
) {
3053 /* XXX: add \0 handling too ? */
3054 cstr_cat(astr
, tokc
.str
.data
, -1);
3057 cstr_ccat(astr
, '\0');
3060 /* If I is >= 1 and a power of two, returns log2(i)+1.
3061 If I is 0 returns 0. */
3062 static int exact_log2p1(int i
)
3067 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3078 /* Parse GNUC __attribute__ extension. Currently, the following
3079 extensions are recognized:
3080 - aligned(n) : set data/function alignment.
3081 - packed : force data alignment to 1
3082 - section(x) : generate data/code in this section.
3083 - unused : currently ignored, but may be used someday.
3084 - regparm(n) : pass function parameters in registers (i386 only)
3086 static void parse_attribute(AttributeDef
*ad
)
3091 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
3095 while (tok
!= ')') {
3096 if (tok
< TOK_IDENT
)
3097 expect("attribute name");
3104 parse_mult_str(&astr
, "section name");
3105 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3112 parse_mult_str(&astr
, "alias(\"target\")");
3113 ad
->alias_target
= /* save string as token, for later */
3114 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3118 case TOK_VISIBILITY1
:
3119 case TOK_VISIBILITY2
:
3121 parse_mult_str(&astr
,
3122 "visibility(\"default|hidden|internal|protected\")");
3123 if (!strcmp (astr
.data
, "default"))
3124 ad
->a
.visibility
= STV_DEFAULT
;
3125 else if (!strcmp (astr
.data
, "hidden"))
3126 ad
->a
.visibility
= STV_HIDDEN
;
3127 else if (!strcmp (astr
.data
, "internal"))
3128 ad
->a
.visibility
= STV_INTERNAL
;
3129 else if (!strcmp (astr
.data
, "protected"))
3130 ad
->a
.visibility
= STV_PROTECTED
;
3132 expect("visibility(\"default|hidden|internal|protected\")");
3141 if (n
<= 0 || (n
& (n
- 1)) != 0)
3142 tcc_error("alignment must be a positive power of two");
3147 ad
->a
.aligned
= exact_log2p1(n
);
3148 if (n
!= 1 << (ad
->a
.aligned
- 1))
3149 tcc_error("alignment of %d is larger than implemented", n
);
3161 /* currently, no need to handle it because tcc does not
3162 track unused objects */
3166 /* currently, no need to handle it because tcc does not
3167 track unused objects */
3172 ad
->a
.func_call
= FUNC_CDECL
;
3177 ad
->a
.func_call
= FUNC_STDCALL
;
3179 #ifdef TCC_TARGET_I386
3189 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3195 ad
->a
.func_call
= FUNC_FASTCALLW
;
3202 ad
->a
.mode
= VT_LLONG
+ 1;
3205 ad
->a
.mode
= VT_BYTE
+ 1;
3208 ad
->a
.mode
= VT_SHORT
+ 1;
3212 ad
->a
.mode
= VT_INT
+ 1;
3215 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3222 ad
->a
.func_export
= 1;
3225 ad
->a
.func_import
= 1;
3228 if (tcc_state
->warn_unsupported
)
3229 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3230 /* skip parameters */
3232 int parenthesis
= 0;
3236 else if (tok
== ')')
3239 } while (parenthesis
&& tok
!= -1);
3252 static Sym
* find_field (CType
*type
, int v
)
3256 while ((s
= s
->next
) != NULL
) {
3257 if ((s
->v
& SYM_FIELD
) &&
3258 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3259 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3260 Sym
*ret
= find_field (&s
->type
, v
);
3270 static void struct_add_offset (Sym
*s
, int offset
)
3272 while ((s
= s
->next
) != NULL
) {
3273 if ((s
->v
& SYM_FIELD
) &&
3274 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3275 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3276 struct_add_offset(s
->type
.ref
, offset
);
3282 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3284 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3285 int pcc
= !tcc_state
->ms_bitfields
;
3288 maxalign
= 1 << (ad
->a
.aligned
- 1);
3294 prevbt
= VT_STRUCT
; /* make it never match */
3296 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3297 int typealign
, bit_size
;
3298 int size
= type_size(&f
->type
, &typealign
);
3299 if (f
->type
.t
& VT_BITFIELD
)
3300 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3303 if (bit_size
== 0 && pcc
) {
3304 /* Zero-width bit-fields in PCC mode aren't affected
3305 by any packing (attribute or pragma). */
3307 } else if (f
->r
> 1) {
3309 } else if (ad
->a
.packed
|| f
->r
== 1) {
3311 /* Packed fields or packed records don't let the base type
3312 influence the records type alignment. */
3317 if (type
->ref
->type
.t
!= TOK_STRUCT
) {
3318 if (pcc
&& bit_size
>= 0)
3319 size
= (bit_size
+ 7) >> 3;
3320 /* Bit position is already zero from our caller. */
3324 } else if (bit_size
< 0) {
3325 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3328 c
= (c
+ addbytes
+ align
- 1) & -align
;
3334 /* A bit-field. Layout is more complicated. There are two
3335 options TCC implements: PCC compatible and MS compatible
3336 (PCC compatible is what GCC uses for almost all targets).
3337 In PCC layout the overall size of the struct (in c) is
3338 _excluding_ the current run of bit-fields (that is,
3339 there's at least additional bit_pos bits after c). In
3340 MS layout c does include the current run of bit-fields.
3342 This matters for calculating the natural alignment buckets
3345 /* 'align' will be used to influence records alignment,
3346 so it's the max of specified and type alignment, except
3347 in certain cases that depend on the mode. */
3348 if (align
< typealign
)
3351 /* In PCC layout a non-packed bit-field is placed adjacent
3352 to the preceding bit-fields, except if it would overflow
3353 its container (depending on base type) or it's a zero-width
3354 bit-field. Packed non-zero-width bit-fields always are
3356 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3357 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3358 if (bit_size
== 0 ||
3359 ((typealign
!= 1 || size
== 1) &&
3360 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3361 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3363 } else while (bit_pos
+ bit_size
> size
* 8) {
3365 bit_pos
-= size
* 8;
3368 /* In PCC layout named bit-fields influence the alignment
3369 of the containing struct using the base types alignment,
3370 except for packed fields (which here have correct
3371 align/typealign). */
3372 if ((f
->v
& SYM_FIRST_ANOM
))
3375 bt
= f
->type
.t
& VT_BTYPE
;
3376 if ((bit_pos
+ bit_size
> size
* 8) ||
3377 (bit_size
> 0) == (bt
!= prevbt
)) {
3378 c
= (c
+ typealign
- 1) & -typealign
;
3381 /* In MS bitfield mode a bit-field run always uses
3382 at least as many bits as the underlying type.
3383 To start a new run it's also required that this
3384 or the last bit-field had non-zero width. */
3385 if (bit_size
|| prev_bit_size
)
3388 /* In MS layout the records alignment is normally
3389 influenced by the field, except for a zero-width
3390 field at the start of a run (but by further zero-width
3391 fields it is again). */
3392 if (bit_size
== 0 && prevbt
!= bt
)
3395 prev_bit_size
= bit_size
;
3397 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3398 | (bit_pos
<< VT_STRUCT_SHIFT
);
3399 bit_pos
+= bit_size
;
3400 if (pcc
&& bit_pos
>= size
* 8) {
3402 bit_pos
-= size
* 8;
3405 if (align
> maxalign
)
3408 printf("set field %s offset=%d c=%d",
3409 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, c
);
3410 if (f
->type
.t
& VT_BITFIELD
) {
3411 printf(" pos=%d size=%d",
3412 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3413 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3418 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3420 /* An anonymous struct/union. Adjust member offsets
3421 to reflect the real offset of our containing struct.
3422 Also set the offset of this anon member inside
3423 the outer struct to be zero. Via this it
3424 works when accessing the field offset directly
3425 (from base object), as well as when recursing
3426 members in initializer handling. */
3427 int v2
= f
->type
.ref
->v
;
3428 if (!(v2
& SYM_FIELD
) &&
3429 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3431 /* This happens only with MS extensions. The
3432 anon member has a named struct type, so it
3433 potentially is shared with other references.
3434 We need to unshare members so we can modify
3437 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3438 &f
->type
.ref
->type
, 0,
3440 pps
= &f
->type
.ref
->next
;
3441 while ((ass
= ass
->next
) != NULL
) {
3442 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3443 pps
= &((*pps
)->next
);
3447 struct_add_offset(f
->type
.ref
, offset
);
3455 /* store size and alignment */
3456 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3457 + maxalign
- 1) & -maxalign
;
3458 type
->ref
->r
= maxalign
;
3461 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3462 static void struct_decl(CType
*type
, AttributeDef
*ad
, int u
)
3464 int a
, v
, size
, align
, flexible
, alignoverride
;
3466 int bit_size
, bsize
, bt
;
3471 a
= tok
; /* save decl type */
3473 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3474 parse_attribute(ad
);
3478 /* struct already defined ? return it */
3480 expect("struct/union/enum name");
3482 if (s
&& (s
->scope
== local_scope
|| (tok
!= '{' && tok
!= ';'))) {
3484 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3490 /* Record the original enum/struct/union token. */
3493 /* we put an undefined size for struct/union */
3494 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3495 s
->r
= 0; /* default alignment is zero as gcc */
3496 /* put struct/union/enum name in type */
3504 tcc_error("struct/union/enum already defined");
3505 /* cannot be empty */
3507 /* non empty enums are not allowed */
3508 if (a
== TOK_ENUM
) {
3512 CType
*t
= &int_type
;
3515 expect("identifier");
3517 if (ss
&& !local_stack
)
3518 tcc_error("redefinition of enumerator '%s'",
3519 get_tok_str(v
, NULL
));
3523 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3526 /* We really want to support long long enums
3527 on i386 as well, but the Sym structure only
3528 holds a 'long' for associated constants,
3529 and enlarging it would bump its size (no
3530 available padding). So punt for now. */
3536 if (c
!= (int)c
&& (unsigned long)c
!= (unsigned int)c
)
3537 seen_wide
= 1, t
= &size_type
;
3538 /* enum symbols have static storage */
3539 ss
= sym_push(v
, t
, VT_CONST
, c
);
3540 ss
->type
.t
|= VT_STATIC
;
3545 /* NOTE: we accept a trailing comma */
3550 s
->a
.unsigned_enum
= 1;
3551 s
->c
= type_size(seen_wide
? &size_type
: &int_type
, &align
);
3556 while (tok
!= '}') {
3557 if (!parse_btype(&btype
, &ad1
)) {
3563 tcc_error("flexible array member '%s' not at the end of struct",
3564 get_tok_str(v
, NULL
));
3570 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3572 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3573 expect("identifier");
3575 int v
= btype
.ref
->v
;
3576 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3577 if (tcc_state
->ms_extensions
== 0)
3578 expect("identifier");
3582 if (type_size(&type1
, &align
) < 0) {
3583 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3586 tcc_error("field '%s' has incomplete type",
3587 get_tok_str(v
, NULL
));
3589 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3590 (type1
.t
& VT_STORAGE
))
3591 tcc_error("invalid type for '%s'",
3592 get_tok_str(v
, NULL
));
3596 bit_size
= expr_const();
3597 /* XXX: handle v = 0 case for messages */
3599 tcc_error("negative width in bit-field '%s'",
3600 get_tok_str(v
, NULL
));
3601 if (v
&& bit_size
== 0)
3602 tcc_error("zero width for bit-field '%s'",
3603 get_tok_str(v
, NULL
));
3604 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3605 parse_attribute(&ad1
);
3607 size
= type_size(&type1
, &align
);
3608 /* Only remember non-default alignment. */
3610 if (ad1
.a
.aligned
) {
3611 int speca
= 1 << (ad1
.a
.aligned
- 1);
3612 alignoverride
= speca
;
3613 } else if (ad1
.a
.packed
|| ad
->a
.packed
) {
3615 } else if (*tcc_state
->pack_stack_ptr
) {
3616 if (align
> *tcc_state
->pack_stack_ptr
)
3617 alignoverride
= *tcc_state
->pack_stack_ptr
;
3619 if (bit_size
>= 0) {
3620 bt
= type1
.t
& VT_BTYPE
;
3627 tcc_error("bitfields must have scalar type");
3629 if (bit_size
> bsize
) {
3630 tcc_error("width of '%s' exceeds its type",
3631 get_tok_str(v
, NULL
));
3632 } else if (bit_size
== bsize
) {
3633 /* no need for bit fields */
3636 type1
.t
|= VT_BITFIELD
|
3637 (0 << VT_STRUCT_SHIFT
) |
3638 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3641 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3642 /* Remember we've seen a real field to check
3643 for placement of flexible array member. */
3646 /* If member is a struct or bit-field, enforce
3647 placing into the struct (as anonymous). */
3649 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3654 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3658 if (tok
== ';' || tok
== TOK_EOF
)
3665 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3666 parse_attribute(ad
);
3667 struct_layout(type
, ad
);
3672 /* return 1 if basic type is a type size (short, long, long long) */
3673 ST_FUNC
int is_btype_size(int bt
)
3675 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3678 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3679 are added to the element type, copied because it could be a typedef. */
3680 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3682 while (type
->t
& VT_ARRAY
) {
3683 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3684 type
= &type
->ref
->type
;
3686 type
->t
|= qualifiers
;
3689 /* return 0 if no type declaration. otherwise, return the basic type
3692 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3694 int t
, u
, bt_size
, complete
, type_found
, typespec_found
, g
;
3698 memset(ad
, 0, sizeof(AttributeDef
));
3706 /* currently, we really ignore extension */
3717 tcc_error("too many basic types");
3719 bt_size
= is_btype_size (u
& VT_BTYPE
);
3720 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3735 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3736 #ifndef TCC_TARGET_PE
3737 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3739 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3740 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3746 #ifdef TCC_TARGET_ARM64
3748 /* GCC's __uint128_t appears in some Linux header files. Make it a
3749 synonym for long double to get the size and alignment right. */
3761 if ((t
& VT_BTYPE
) == VT_LONG
) {
3762 #ifdef TCC_TARGET_PE
3763 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3765 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3773 struct_decl(&type1
, ad
, VT_ENUM
);
3776 type
->ref
= type1
.ref
;
3780 struct_decl(&type1
, ad
, VT_STRUCT
);
3783 /* type modifiers */
3788 parse_btype_qualify(type
, VT_CONSTANT
);
3796 parse_btype_qualify(type
, VT_VOLATILE
);
3803 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3804 tcc_error("signed and unsigned modifier");
3817 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3818 tcc_error("signed and unsigned modifier");
3819 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3835 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
3836 tcc_error("multiple storage classes");
3847 /* GNUC attribute */
3848 case TOK_ATTRIBUTE1
:
3849 case TOK_ATTRIBUTE2
:
3850 parse_attribute(ad
);
3853 t
= (t
& ~VT_BTYPE
) | u
;
3861 parse_expr_type(&type1
);
3862 /* remove all storage modifiers except typedef */
3863 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3869 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3872 type
->t
= ((s
->type
.t
& ~VT_TYPEDEF
) |
3873 (t
& ~(VT_CONSTANT
| VT_VOLATILE
)));
3874 type
->ref
= s
->type
.ref
;
3875 if (t
& (VT_CONSTANT
| VT_VOLATILE
))
3876 parse_btype_qualify(type
, t
& (VT_CONSTANT
| VT_VOLATILE
));
3880 /* get attributes from typedef */
3881 if (0 == ad
->a
.aligned
)
3882 ad
->a
.aligned
= s
->a
.aligned
;
3883 if (0 == ad
->a
.func_call
)
3884 ad
->a
.func_call
= s
->a
.func_call
;
3885 ad
->a
.packed
|= s
->a
.packed
;
3894 if (tcc_state
->char_is_unsigned
) {
3895 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3899 /* long is never used as type */
3900 if ((t
& VT_BTYPE
) == VT_LONG
)
3901 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3902 defined TCC_TARGET_PE
3903 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3905 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3911 /* convert a function parameter type (array to pointer and function to
3912 function pointer) */
3913 static inline void convert_parameter_type(CType
*pt
)
3915 /* remove const and volatile qualifiers (XXX: const could be used
3916 to indicate a const function parameter */
3917 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3918 /* array must be transformed to pointer according to ANSI C */
3920 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3925 ST_FUNC
void parse_asm_str(CString
*astr
)
3928 parse_mult_str(astr
, "string constant");
3931 /* Parse an asm label and return the token */
3932 static int asm_label_instr(void)
3938 parse_asm_str(&astr
);
3941 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3943 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3948 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
3950 int n
, l
, t1
, arg_size
, align
;
3951 Sym
**plast
, *s
, *first
;
3956 /* function type, or recursive declarator (return if so) */
3958 if (td
&& !(td
& TYPE_ABSTRACT
))
3962 else if (parse_btype(&pt
, &ad1
))
3973 /* read param name and compute offset */
3974 if (l
!= FUNC_OLD
) {
3975 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3977 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3978 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
3979 tcc_error("parameter declared as void");
3980 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
3984 expect("identifier");
3985 pt
.t
= VT_VOID
; /* invalid type */
3988 convert_parameter_type(&pt
);
3989 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
3995 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4000 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4001 tcc_error("invalid type");
4004 /* if no parameters, then old type prototype */
4007 /* NOTE: const is ignored in returned type as it has a special
4008 meaning in gcc / C++ */
4009 type
->t
&= ~VT_CONSTANT
;
4010 /* some ancient pre-K&R C allows a function to return an array
4011 and the array brackets to be put after the arguments, such
4012 that "int c()[]" means something like "int[] c()" */
4015 skip(']'); /* only handle simple "[]" */
4018 /* we push a anonymous symbol which will contain the function prototype */
4019 ad
->a
.func_args
= arg_size
;
4020 s
= sym_push(SYM_FIELD
, type
, 0, l
);
4025 } else if (tok
== '[') {
4026 int saved_nocode_wanted
= nocode_wanted
;
4027 /* array definition */
4029 if (tok
== TOK_RESTRICT1
)
4034 if (!local_stack
|| (storage
& VT_STATIC
))
4035 vpushi(expr_const());
4037 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4038 length must always be evaluated, even under nocode_wanted,
4039 so that its size slot is initialized (e.g. under sizeof
4044 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4047 tcc_error("invalid array size");
4049 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4050 tcc_error("size of variable length array should be an integer");
4055 /* parse next post type */
4056 post_type(type
, ad
, storage
, 0);
4057 if (type
->t
== VT_FUNC
)
4058 tcc_error("declaration of an array of functions");
4059 t1
|= type
->t
& VT_VLA
;
4062 loc
-= type_size(&int_type
, &align
);
4066 vla_runtime_type_size(type
, &align
);
4068 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4074 nocode_wanted
= saved_nocode_wanted
;
4076 /* we push an anonymous symbol which will contain the array
4078 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4079 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4085 /* Parse a type declarator (except basic type), and return the type
4086 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4087 expected. 'type' should contain the basic type. 'ad' is the
4088 attribute definition of the basic type. It can be modified by
4089 type_decl(). If this (possibly abstract) declarator is a pointer chain
4090 it returns the innermost pointed to type (equals *type, but is a different
4091 pointer), otherwise returns type itself, that's used for recursive calls. */
4092 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4095 int qualifiers
, storage
;
4097 /* recursive type, remove storage bits first, apply them later again */
4098 storage
= type
->t
& VT_STORAGE
;
4099 type
->t
&= ~VT_STORAGE
;
4101 while (tok
== '*') {
4109 qualifiers
|= VT_CONSTANT
;
4114 qualifiers
|= VT_VOLATILE
;
4120 /* XXX: clarify attribute handling */
4121 case TOK_ATTRIBUTE1
:
4122 case TOK_ATTRIBUTE2
:
4123 parse_attribute(ad
);
4127 type
->t
|= qualifiers
;
4129 /* innermost pointed to type is the one for the first derivation */
4130 ret
= pointed_type(type
);
4134 /* This is possibly a parameter type list for abstract declarators
4135 ('int ()'), use post_type for testing this. */
4136 if (!post_type(type
, ad
, 0, td
)) {
4137 /* It's not, so it's a nested declarator, and the post operations
4138 apply to the innermost pointed to type (if any). */
4139 /* XXX: this is not correct to modify 'ad' at this point, but
4140 the syntax is not clear */
4141 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4142 parse_attribute(ad
);
4143 post
= type_decl(type
, ad
, v
, td
);
4146 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4147 /* type identifier */
4151 if (!(td
& TYPE_ABSTRACT
))
4152 expect("identifier");
4155 post_type(post
, ad
, storage
, 0);
4156 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4157 parse_attribute(ad
);
4162 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4163 ST_FUNC
int lvalue_type(int t
)
4168 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4170 else if (bt
== VT_SHORT
)
4174 if (t
& VT_UNSIGNED
)
4175 r
|= VT_LVAL_UNSIGNED
;
4179 /* indirection with full error checking and bound check */
4180 ST_FUNC
void indir(void)
4182 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4183 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4187 if (vtop
->r
& VT_LVAL
)
4189 vtop
->type
= *pointed_type(&vtop
->type
);
4190 /* Arrays and functions are never lvalues */
4191 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4192 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4193 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4194 /* if bound checking, the referenced pointer must be checked */
4195 #ifdef CONFIG_TCC_BCHECK
4196 if (tcc_state
->do_bounds_check
)
4197 vtop
->r
|= VT_MUSTBOUND
;
4202 /* pass a parameter to a function and do type checking and casting */
4203 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4208 func_type
= func
->c
;
4209 if (func_type
== FUNC_OLD
||
4210 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4211 /* default casting : only need to convert float to double */
4212 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4215 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4216 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4217 type
.ref
= vtop
->type
.ref
;
4220 } else if (arg
== NULL
) {
4221 tcc_error("too many arguments to function");
4224 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4225 gen_assign_cast(&type
);
4229 /* parse an expression and return its type without any side effect.
4230 If UNRY we parse an unary expression, otherwise a full one. */
4231 static void expr_type(CType
*type
, int unry
)
4243 /* parse an expression of the form '(type)' or '(expr)' and return its
4245 static void parse_expr_type(CType
*type
)
4251 if (parse_btype(type
, &ad
)) {
4252 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4259 static void parse_type(CType
*type
)
4264 if (!parse_btype(type
, &ad
)) {
4267 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4270 static void parse_builtin_params(int nc
, const char *args
)
4277 while ((c
= *args
++)) {
4281 case 'e': expr_eq(); continue;
4282 case 't': parse_type(&t
); vpush(&t
); continue;
4283 default: tcc_error("internal error"); break;
4291 ST_FUNC
void unary(void)
4293 int n
, t
, align
, size
, r
, sizeof_caller
;
4298 sizeof_caller
= in_sizeof
;
4300 /* XXX: GCC 2.95.3 does not generate a table although it should be
4314 vsetc(&type
, VT_CONST
, &tokc
);
4318 t
= VT_INT
| VT_UNSIGNED
;
4324 t
= VT_LLONG
| VT_UNSIGNED
;
4336 case TOK___FUNCTION__
:
4338 goto tok_identifier
;
4344 /* special function name identifier */
4345 len
= strlen(funcname
) + 1;
4346 /* generate char[len] type */
4351 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4352 ptr
= section_ptr_add(data_section
, len
);
4353 memcpy(ptr
, funcname
, len
);
4358 #ifdef TCC_TARGET_PE
4359 t
= VT_SHORT
| VT_UNSIGNED
;
4365 /* string parsing */
4368 if (tcc_state
->warn_write_strings
)
4373 memset(&ad
, 0, sizeof(AttributeDef
));
4374 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4379 if (parse_btype(&type
, &ad
)) {
4380 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4382 /* check ISOC99 compound literal */
4384 /* data is allocated locally by default */
4389 /* all except arrays are lvalues */
4390 if (!(type
.t
& VT_ARRAY
))
4391 r
|= lvalue_type(type
.t
);
4392 memset(&ad
, 0, sizeof(AttributeDef
));
4393 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4395 if (sizeof_caller
) {
4402 } else if (tok
== '{') {
4403 int saved_nocode_wanted
= nocode_wanted
;
4405 tcc_error("expected constant");
4406 /* save all registers */
4408 /* statement expression : we do not accept break/continue
4409 inside as GCC does. We do retain the nocode_wanted state,
4410 as statement expressions can't ever be entered from the
4411 outside, so any reactivation of code emission (from labels
4412 or loop heads) can be disabled again after the end of it. */
4413 block(NULL
, NULL
, 1);
4414 nocode_wanted
= saved_nocode_wanted
;
4429 /* functions names must be treated as function pointers,
4430 except for unary '&' and sizeof. Since we consider that
4431 functions are not lvalues, we only have to handle it
4432 there and in function calls. */
4433 /* arrays can also be used although they are not lvalues */
4434 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4435 !(vtop
->type
.t
& VT_ARRAY
))
4437 mk_pointer(&vtop
->type
);
4443 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4445 boolean
.t
= VT_BOOL
;
4447 vtop
->c
.i
= !vtop
->c
.i
;
4448 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4452 vseti(VT_JMP
, gvtst(1, 0));
4464 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4465 tcc_error("pointer not accepted for unary plus");
4466 /* In order to force cast, we add zero, except for floating point
4467 where we really need an noop (otherwise -0.0 will be transformed
4469 if (!is_float(vtop
->type
.t
)) {
4480 expr_type(&type
, 1); // Perform a in_sizeof = 0;
4481 size
= type_size(&type
, &align
);
4482 if (t
== TOK_SIZEOF
) {
4483 if (!(type
.t
& VT_VLA
)) {
4485 tcc_error("sizeof applied to an incomplete type");
4488 vla_runtime_type_size(&type
, &align
);
4493 vtop
->type
.t
|= VT_UNSIGNED
;
4496 case TOK_builtin_expect
:
4497 /* __builtin_expect is a no-op for now */
4498 parse_builtin_params(0, "ee");
4501 case TOK_builtin_types_compatible_p
:
4502 parse_builtin_params(0, "tt");
4503 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4504 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4505 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4509 case TOK_builtin_choose_expr
:
4536 case TOK_builtin_constant_p
:
4537 parse_builtin_params(1, "e");
4538 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4542 case TOK_builtin_frame_address
:
4543 case TOK_builtin_return_address
:
4550 if (tok
!= TOK_CINT
) {
4551 tcc_error("%s only takes positive integers",
4552 tok1
== TOK_builtin_return_address
?
4553 "__builtin_return_address" :
4554 "__builtin_frame_address");
4556 level
= (uint32_t)tokc
.i
;
4561 vset(&type
, VT_LOCAL
, 0); /* local frame */
4563 mk_pointer(&vtop
->type
);
4564 indir(); /* -> parent frame */
4566 if (tok1
== TOK_builtin_return_address
) {
4567 // assume return address is just above frame pointer on stack
4570 mk_pointer(&vtop
->type
);
4575 #ifdef TCC_TARGET_X86_64
4576 #ifdef TCC_TARGET_PE
4577 case TOK_builtin_va_start
:
4578 parse_builtin_params(0, "ee");
4579 if ((vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4580 tcc_error("__builtin_va_start expects a local variable");
4581 vtop
->r
&= ~VT_LVAL
;
4582 vtop
->type
= char_pointer_type
;
4587 case TOK_builtin_va_arg_types
:
4588 parse_builtin_params(0, "t");
4589 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4596 #ifdef TCC_TARGET_ARM64
4597 case TOK___va_start
: {
4598 parse_builtin_params(0, "ee");
4602 vtop
->type
.t
= VT_VOID
;
4605 case TOK___va_arg
: {
4607 parse_builtin_params(0, "et");
4615 case TOK___arm64_clear_cache
: {
4616 parse_builtin_params(0, "ee");
4619 vtop
->type
.t
= VT_VOID
;
4623 /* pre operations */
4634 t
= vtop
->type
.t
& VT_BTYPE
;
4636 /* In IEEE negate(x) isn't subtract(0,x), but rather
4640 vtop
->c
.f
= -1.0 * 0.0;
4641 else if (t
== VT_DOUBLE
)
4642 vtop
->c
.d
= -1.0 * 0.0;
4644 vtop
->c
.ld
= -1.0 * 0.0;
4652 goto tok_identifier
;
4654 /* allow to take the address of a label */
4655 if (tok
< TOK_UIDENT
)
4656 expect("label identifier");
4657 s
= label_find(tok
);
4659 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4661 if (s
->r
== LABEL_DECLARED
)
4662 s
->r
= LABEL_FORWARD
;
4665 s
->type
.t
= VT_VOID
;
4666 mk_pointer(&s
->type
);
4667 s
->type
.t
|= VT_STATIC
;
4669 vpushsym(&s
->type
, s
);
4673 // special qnan , snan and infinity values
4675 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4679 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4683 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4692 expect("identifier");
4695 const char *name
= get_tok_str(t
, NULL
);
4697 tcc_error("'%s' undeclared", name
);
4698 /* for simple function calls, we tolerate undeclared
4699 external reference to int() function */
4700 if (tcc_state
->warn_implicit_function_declaration
4701 #ifdef TCC_TARGET_PE
4702 /* people must be warned about using undeclared WINAPI functions
4703 (which usually start with uppercase letter) */
4704 || (name
[0] >= 'A' && name
[0] <= 'Z')
4707 tcc_warning("implicit declaration of function '%s'", name
);
4708 s
= external_global_sym(t
, &func_old_type
, 0);
4712 /* A symbol that has a register is a local register variable,
4713 which starts out as VT_LOCAL value. */
4714 if ((r
& VT_VALMASK
) < VT_CONST
)
4715 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4717 vset(&s
->type
, r
, s
->c
);
4718 /* Point to s as backpointer (even without r&VT_SYM).
4719 Will be used by at least the x86 inline asm parser for
4722 if (vtop
->r
& VT_SYM
) {
4728 /* post operations */
4730 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4733 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4736 if (tok
== TOK_ARROW
)
4738 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4741 /* expect pointer on structure */
4742 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4743 expect("struct or union");
4744 if (tok
== TOK_CDOUBLE
)
4745 expect("field name");
4747 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4748 expect("field name");
4749 s
= find_field(&vtop
->type
, tok
);
4751 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4752 /* add field offset to pointer */
4753 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4756 /* change type to field type, and set to lvalue */
4757 vtop
->type
= s
->type
;
4758 vtop
->type
.t
|= qualifiers
;
4759 /* an array is never an lvalue */
4760 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4761 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4762 #ifdef CONFIG_TCC_BCHECK
4763 /* if bound checking, the referenced pointer must be checked */
4764 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4765 vtop
->r
|= VT_MUSTBOUND
;
4769 } else if (tok
== '[') {
4775 } else if (tok
== '(') {
4778 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4781 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4782 /* pointer test (no array accepted) */
4783 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4784 vtop
->type
= *pointed_type(&vtop
->type
);
4785 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4789 expect("function pointer");
4792 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4794 /* get return type */
4797 sa
= s
->next
; /* first parameter */
4798 nb_args
= regsize
= 0;
4800 /* compute first implicit argument if a structure is returned */
4801 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4802 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4803 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4804 &ret_align
, ®size
);
4806 /* get some space for the returned structure */
4807 size
= type_size(&s
->type
, &align
);
4808 #ifdef TCC_TARGET_ARM64
4809 /* On arm64, a small struct is return in registers.
4810 It is much easier to write it to memory if we know
4811 that we are allowed to write some extra bytes, so
4812 round the allocated space up to a power of 2: */
4814 while (size
& (size
- 1))
4815 size
= (size
| (size
- 1)) + 1;
4817 loc
= (loc
- size
) & -align
;
4819 ret
.r
= VT_LOCAL
| VT_LVAL
;
4820 /* pass it as 'int' to avoid structure arg passing
4822 vseti(VT_LOCAL
, loc
);
4832 /* return in register */
4833 if (is_float(ret
.type
.t
)) {
4834 ret
.r
= reg_fret(ret
.type
.t
);
4835 #ifdef TCC_TARGET_X86_64
4836 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4840 #ifndef TCC_TARGET_ARM64
4841 #ifdef TCC_TARGET_X86_64
4842 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4844 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4855 gfunc_param_typed(s
, sa
);
4865 tcc_error("too few arguments to function");
4867 gfunc_call(nb_args
);
4870 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4871 vsetc(&ret
.type
, r
, &ret
.c
);
4872 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4875 /* handle packed struct return */
4876 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4879 size
= type_size(&s
->type
, &align
);
4880 /* We're writing whole regs often, make sure there's enough
4881 space. Assume register size is power of 2. */
4882 if (regsize
> align
)
4884 loc
= (loc
- size
) & -align
;
4888 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4892 if (--ret_nregs
== 0)
4896 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4904 ST_FUNC
void expr_prod(void)
4909 while (tok
== '*' || tok
== '/' || tok
== '%') {
4917 ST_FUNC
void expr_sum(void)
4922 while (tok
== '+' || tok
== '-') {
4930 static void expr_shift(void)
4935 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4943 static void expr_cmp(void)
4948 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
4949 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
4957 static void expr_cmpeq(void)
4962 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
4970 static void expr_and(void)
4973 while (tok
== '&') {
4980 static void expr_xor(void)
4983 while (tok
== '^') {
4990 static void expr_or(void)
4993 while (tok
== '|') {
5000 static void expr_land(void)
5003 if (tok
== TOK_LAND
) {
5006 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5014 while (tok
== TOK_LAND
) {
5022 gen_cast(&int_type
);
5030 if (tok
!= TOK_LAND
) {
5043 static void expr_lor(void)
5046 if (tok
== TOK_LOR
) {
5049 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5057 while (tok
== TOK_LOR
) {
5065 gen_cast(&int_type
);
5073 if (tok
!= TOK_LOR
) {
5086 /* Assuming vtop is a value used in a conditional context
5087 (i.e. compared with zero) return 0 if it's false, 1 if
5088 true and -1 if it can't be statically determined. */
5089 static int condition_3way(void)
5092 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5093 (!(vtop
->r
& VT_SYM
) ||
5094 !(vtop
->sym
->type
.t
& VT_WEAK
))) {
5096 boolean
.t
= VT_BOOL
;
5105 static void expr_cond(void)
5107 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5109 CType type
, type1
, type2
;
5114 c
= condition_3way();
5115 g
= (tok
== ':' && gnu_ext
);
5117 /* needed to avoid having different registers saved in
5119 if (is_float(vtop
->type
.t
)) {
5121 #ifdef TCC_TARGET_X86_64
5122 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5147 sv
= *vtop
; /* save value to handle it later */
5148 vtop
--; /* no vpop so that FP stack is not flushed */
5166 bt1
= t1
& VT_BTYPE
;
5168 bt2
= t2
& VT_BTYPE
;
5169 /* cast operands to correct type according to ISOC rules */
5170 if (is_float(bt1
) || is_float(bt2
)) {
5171 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5172 type
.t
= VT_LDOUBLE
;
5174 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5179 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5180 /* cast to biggest op */
5182 /* convert to unsigned if it does not fit in a long long */
5183 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5184 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
5185 type
.t
|= VT_UNSIGNED
;
5186 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5187 /* If one is a null ptr constant the result type
5189 if (is_null_pointer (vtop
))
5191 else if (is_null_pointer (&sv
))
5193 /* XXX: test pointer compatibility, C99 has more elaborate
5197 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5198 /* XXX: test function pointer compatibility */
5199 type
= bt1
== VT_FUNC
? type1
: type2
;
5200 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5201 /* XXX: test structure compatibility */
5202 type
= bt1
== VT_STRUCT
? type1
: type2
;
5203 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5204 /* NOTE: as an extension, we accept void on only one side */
5207 /* integer operations */
5209 /* convert to unsigned if it does not fit in an integer */
5210 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
5211 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
5212 type
.t
|= VT_UNSIGNED
;
5214 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5215 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5216 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5219 /* now we convert second operand */
5223 mk_pointer(&vtop
->type
);
5225 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5230 if (is_float(type
.t
)) {
5232 #ifdef TCC_TARGET_X86_64
5233 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5237 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5238 /* for long longs, we use fixed registers to avoid having
5239 to handle a complicated move */
5250 /* this is horrible, but we must also convert first
5256 mk_pointer(&vtop
->type
);
5258 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5264 move_reg(r2
, r1
, type
.t
);
5274 static void expr_eq(void)
5280 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5281 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5282 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5297 ST_FUNC
void gexpr(void)
5308 /* parse a constant expression and return value in vtop. */
5309 static void expr_const1(void)
5316 /* parse an integer constant and return its value. */
5317 static inline int64_t expr_const64(void)
5321 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5322 expect("constant expression");
5328 /* parse an integer constant and return its value.
5329 Complain if it doesn't fit 32bit (signed or unsigned). */
5330 ST_FUNC
int expr_const(void)
5333 int64_t wc
= expr_const64();
5335 if (c
!= wc
&& (unsigned)c
!= wc
)
5336 tcc_error("constant exceeds 32 bit");
5340 /* return the label token if current token is a label, otherwise
5342 static int is_label(void)
5346 /* fast test first */
5347 if (tok
< TOK_UIDENT
)
5349 /* no need to save tokc because tok is an identifier */
5355 unget_tok(last_tok
);
5360 #ifndef TCC_TARGET_ARM64
5361 static void gfunc_return(CType
*func_type
)
5363 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5364 CType type
, ret_type
;
5365 int ret_align
, ret_nregs
, regsize
;
5366 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5367 &ret_align
, ®size
);
5368 if (0 == ret_nregs
) {
5369 /* if returning structure, must copy it to implicit
5370 first pointer arg location */
5373 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5376 /* copy structure value to pointer */
5379 /* returning structure packed into registers */
5380 int r
, size
, addr
, align
;
5381 size
= type_size(func_type
,&align
);
5382 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5383 (vtop
->c
.i
& (ret_align
-1)))
5384 && (align
& (ret_align
-1))) {
5385 loc
= (loc
- size
) & -ret_align
;
5388 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5392 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5394 vtop
->type
= ret_type
;
5395 if (is_float(ret_type
.t
))
5396 r
= rc_fret(ret_type
.t
);
5407 if (--ret_nregs
== 0)
5409 /* We assume that when a structure is returned in multiple
5410 registers, their classes are consecutive values of the
5413 vtop
->c
.i
+= regsize
;
5417 } else if (is_float(func_type
->t
)) {
5418 gv(rc_fret(func_type
->t
));
5422 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5426 static int case_cmp(const void *pa
, const void *pb
)
5428 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5429 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5430 return a
< b
? -1 : a
> b
;
5433 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5437 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5455 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5457 gcase(base
, len
/2, bsym
);
5458 if (cur_switch
->def_sym
)
5459 gjmp_addr(cur_switch
->def_sym
);
5461 *bsym
= gjmp(*bsym
);
5465 base
+= e
; len
-= e
;
5475 if (p
->v1
== p
->v2
) {
5477 gtst_addr(0, p
->sym
);
5487 gtst_addr(0, p
->sym
);
5493 static void block(int *bsym
, int *csym
, int is_expr
)
5495 int a
, b
, c
, d
, cond
;
5498 /* generate line number info */
5499 if (tcc_state
->do_debug
)
5500 tcc_debug_line(tcc_state
);
5503 /* default return value is (void) */
5505 vtop
->type
.t
= VT_VOID
;
5508 if (tok
== TOK_IF
) {
5510 int saved_nocode_wanted
= nocode_wanted
;
5515 cond
= condition_3way();
5521 nocode_wanted
|= 0x20000000;
5522 block(bsym
, csym
, 0);
5524 nocode_wanted
= saved_nocode_wanted
;
5526 if (c
== TOK_ELSE
) {
5531 nocode_wanted
|= 0x20000000;
5532 block(bsym
, csym
, 0);
5533 gsym(d
); /* patch else jmp */
5535 nocode_wanted
= saved_nocode_wanted
;
5538 } else if (tok
== TOK_WHILE
) {
5539 int saved_nocode_wanted
;
5540 nocode_wanted
&= ~0x20000000;
5550 saved_nocode_wanted
= nocode_wanted
;
5552 nocode_wanted
= saved_nocode_wanted
;
5557 } else if (tok
== '{') {
5559 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5562 /* record local declaration stack position */
5564 llabel
= local_label_stack
;
5567 /* handle local labels declarations */
5568 if (tok
== TOK_LABEL
) {
5571 if (tok
< TOK_UIDENT
)
5572 expect("label identifier");
5573 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5583 while (tok
!= '}') {
5584 if ((a
= is_label()))
5591 block(bsym
, csym
, is_expr
);
5594 /* pop locally defined labels */
5595 label_pop(&local_label_stack
, llabel
);
5596 /* pop locally defined symbols */
5598 /* In the is_expr case (a statement expression is finished here),
5599 vtop might refer to symbols on the local_stack. Either via the
5600 type or via vtop->sym. We can't pop those nor any that in turn
5601 might be referred to. To make it easier we don't roll back
5602 any symbols in that case; some upper level call to block() will
5603 do that. We do have to remove such symbols from the lookup
5604 tables, though. sym_pop will do that. */
5605 sym_pop(&local_stack
, s
, is_expr
);
5607 /* Pop VLA frames and restore stack pointer if required */
5608 if (vlas_in_scope
> saved_vlas_in_scope
) {
5609 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5612 vlas_in_scope
= saved_vlas_in_scope
;
5615 } else if (tok
== TOK_RETURN
) {
5619 gen_assign_cast(&func_vt
);
5620 gfunc_return(&func_vt
);
5623 /* jump unless last stmt in top-level block */
5624 if (tok
!= '}' || local_scope
!= 1)
5626 nocode_wanted
|= 0x20000000;
5627 } else if (tok
== TOK_BREAK
) {
5630 tcc_error("cannot break");
5631 *bsym
= gjmp(*bsym
);
5634 nocode_wanted
|= 0x20000000;
5635 } else if (tok
== TOK_CONTINUE
) {
5638 tcc_error("cannot continue");
5639 vla_sp_restore_root();
5640 *csym
= gjmp(*csym
);
5643 } else if (tok
== TOK_FOR
) {
5645 int saved_nocode_wanted
;
5646 nocode_wanted
&= ~0x20000000;
5652 /* c99 for-loop init decl? */
5653 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5654 /* no, regular for-loop init expr */
5680 saved_nocode_wanted
= nocode_wanted
;
5682 nocode_wanted
= saved_nocode_wanted
;
5687 sym_pop(&local_stack
, s
, 0);
5690 if (tok
== TOK_DO
) {
5691 int saved_nocode_wanted
;
5692 nocode_wanted
&= ~0x20000000;
5698 saved_nocode_wanted
= nocode_wanted
;
5706 nocode_wanted
= saved_nocode_wanted
;
5711 if (tok
== TOK_SWITCH
) {
5712 struct switch_t
*saved
, sw
;
5713 int saved_nocode_wanted
= nocode_wanted
;
5719 switchval
= *vtop
--;
5721 b
= gjmp(0); /* jump to first case */
5722 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5726 nocode_wanted
= saved_nocode_wanted
;
5727 a
= gjmp(a
); /* add implicit break */
5730 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5731 for (b
= 1; b
< sw
.n
; b
++)
5732 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5733 tcc_error("duplicate case value");
5734 /* Our switch table sorting is signed, so the compared
5735 value needs to be as well when it's 64bit. */
5736 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5737 switchval
.type
.t
&= ~VT_UNSIGNED
;
5739 gcase(sw
.p
, sw
.n
, &a
);
5742 gjmp_addr(sw
.def_sym
);
5743 dynarray_reset(&sw
.p
, &sw
.n
);
5748 if (tok
== TOK_CASE
) {
5749 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5752 nocode_wanted
&= ~0x20000000;
5754 cr
->v1
= cr
->v2
= expr_const64();
5755 if (gnu_ext
&& tok
== TOK_DOTS
) {
5757 cr
->v2
= expr_const64();
5758 if (cr
->v2
< cr
->v1
)
5759 tcc_warning("empty case range");
5762 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5765 goto block_after_label
;
5767 if (tok
== TOK_DEFAULT
) {
5772 if (cur_switch
->def_sym
)
5773 tcc_error("too many 'default'");
5774 cur_switch
->def_sym
= ind
;
5776 goto block_after_label
;
5778 if (tok
== TOK_GOTO
) {
5780 if (tok
== '*' && gnu_ext
) {
5784 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5787 } else if (tok
>= TOK_UIDENT
) {
5788 s
= label_find(tok
);
5789 /* put forward definition if needed */
5791 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5793 if (s
->r
== LABEL_DECLARED
)
5794 s
->r
= LABEL_FORWARD
;
5796 vla_sp_restore_root();
5797 if (s
->r
& LABEL_FORWARD
)
5798 s
->jnext
= gjmp(s
->jnext
);
5800 gjmp_addr(s
->jnext
);
5803 expect("label identifier");
5806 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5815 if (s
->r
== LABEL_DEFINED
)
5816 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5818 s
->r
= LABEL_DEFINED
;
5820 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5824 /* we accept this, but it is a mistake */
5826 nocode_wanted
&= ~0x20000000;
5828 tcc_warning("deprecated use of label at end of compound statement");
5832 block(bsym
, csym
, is_expr
);
5835 /* expression case */
5850 /* This skips over a stream of tokens containing balanced {} and ()
5851 pairs, stopping at outer ',' ';' and '}'. If STR then allocates
5852 and stores the skipped tokens in *STR. This doesn't check if
5853 () and {} are nested correctly, i.e. "({)}" is accepted. */
5854 static void skip_or_save_block(TokenString
**str
)
5858 *str
= tok_str_alloc();
5860 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';'))) {
5862 if (tok
== TOK_EOF
) {
5863 if (str
|| level
> 0)
5864 tcc_error("unexpected end of file");
5869 tok_str_add_tok(*str
);
5872 if (t
== '{' || t
== '(') {
5874 } else if (t
== '}' || t
== ')') {
5881 tok_str_add(*str
, -1);
5882 tok_str_add(*str
, 0);
5886 #define EXPR_CONST 1
5889 static void parse_init_elem(int expr_type
)
5891 int saved_global_expr
;
5894 /* compound literals must be allocated globally in this case */
5895 saved_global_expr
= global_expr
;
5898 global_expr
= saved_global_expr
;
5899 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5900 (compound literals). */
5901 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5902 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
5903 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
5904 #ifdef TCC_TARGET_PE
5905 || (vtop
->type
.t
& VT_IMPORT
)
5908 tcc_error("initializer element is not constant");
5916 /* t is the array or struct type. c is the array or struct
5917 address. cur_field is the pointer to the current
5918 value, for arrays the 'c' member contains the current start
5919 index and the 'r' contains the end index (in case of range init).
5920 'size_only' is true if only size info is needed (only used
5922 static void decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5923 Sym
**cur_field
, int size_only
)
5926 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
5930 if (gnu_ext
&& (l
= is_label()) != 0)
5932 /* NOTE: we only support ranges for last designator */
5933 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
5935 if (!(type
->t
& VT_ARRAY
))
5936 expect("array type");
5938 index
= index_last
= expr_const();
5939 if (tok
== TOK_DOTS
&& gnu_ext
) {
5941 index_last
= expr_const();
5945 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
5947 tcc_error("invalid index");
5949 (*cur_field
)->c
= index
;
5950 (*cur_field
)->r
= index_last
;
5952 type
= pointed_type(type
);
5953 elem_size
= type_size(type
, &align
);
5954 c
+= index
* elem_size
;
5955 nb_elems
= index_last
- index
+ 1;
5961 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
5962 expect("struct/union type");
5963 f
= find_field(type
, l
);
5976 } else if (!gnu_ext
) {
5980 if (type
->t
& VT_ARRAY
) {
5981 index
= (*cur_field
)->c
;
5982 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
5983 tcc_error("index too large");
5984 type
= pointed_type(type
);
5985 c
+= index
* type_size(type
, &align
);
5988 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
5989 *cur_field
= f
= f
->next
;
5991 tcc_error("too many field init");
5996 decl_initializer(type
, sec
, c
, 0, size_only
);
5998 /* XXX: make it more general */
5999 if (!size_only
&& nb_elems
> 1) {
6000 unsigned long c_end
;
6005 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6006 for (i
= 1; i
< nb_elems
; i
++) {
6007 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6013 c_end
= c
+ nb_elems
* elem_size
;
6014 if (c_end
> sec
->data_allocated
)
6015 section_realloc(sec
, c_end
);
6016 src
= sec
->data
+ c
;
6018 for(i
= 1; i
< nb_elems
; i
++) {
6020 memcpy(dst
, src
, elem_size
);
6026 /* store a value or an expression directly in global data or in local array */
6027 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6029 int bt
, bit_pos
, bit_size
;
6031 unsigned long long bit_mask
;
6035 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6039 /* XXX: not portable */
6040 /* XXX: generate error if incorrect relocation */
6041 gen_assign_cast(&dtype
);
6042 bt
= type
->t
& VT_BTYPE
;
6043 size
= type_size(type
, &align
);
6044 section_reserve(sec
, c
+ size
);
6045 ptr
= sec
->data
+ c
;
6046 /* XXX: make code faster ? */
6047 if (!(type
->t
& VT_BITFIELD
)) {
6049 bit_size
= PTR_SIZE
* 8;
6052 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6053 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6054 bit_mask
= (1LL << bit_size
) - 1;
6056 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6057 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6058 /* XXX This rejects compount literals like
6059 '(void *){ptr}'. The problem is that '&sym' is
6060 represented the same way, which would be ruled out
6061 by the SYM_FIRST_ANOM check above, but also '"string"'
6062 in 'char *p = "string"' is represented the same
6063 with the type being VT_PTR and the symbol being an
6064 anonymous one. That is, there's no difference in vtop
6065 between '(void *){x}' and '&(void *){x}'. Ignore
6066 pointer typed entities here. Hopefully no real code
6067 will every use compound literals with scalar type. */
6068 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6069 /* These come from compound literals, memcpy stuff over. */
6073 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6074 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6075 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6077 /* We need to copy over all memory contents, and that
6078 includes relocations. Use the fact that relocs are
6079 created it order, so look from the end of relocs
6080 until we hit one before the copied region. */
6081 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6082 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6083 while (num_relocs
--) {
6085 if (rel
->r_offset
>= esym
->st_value
+ size
)
6087 if (rel
->r_offset
< esym
->st_value
)
6089 /* Note: if the same fields are initialized multiple
6090 times (possible with designators) then we possibly
6091 add multiple relocations for the same offset here.
6092 That would lead to wrong code, the last reloc needs
6093 to win. We clean this up later after the whole
6094 initializer is parsed. */
6095 put_elf_reloca(symtab_section
, sec
,
6096 c
+ rel
->r_offset
- esym
->st_value
,
6097 ELFW(R_TYPE
)(rel
->r_info
),
6098 ELFW(R_SYM
)(rel
->r_info
),
6099 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6108 if ((vtop
->r
& VT_SYM
) &&
6114 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6118 (bt
== VT_INT
&& bit_size
!= 32)
6121 tcc_error("initializer element is not computable at load time");
6123 /* XXX: when cross-compiling we assume that each type has the
6124 same representation on host and target, which is likely to
6125 be wrong in the case of long double */
6127 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6129 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6132 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6135 *(float*)ptr
= vtop
->c
.f
;
6138 *(double *)ptr
= vtop
->c
.d
;
6141 if (sizeof(long double) == LDOUBLE_SIZE
)
6142 *(long double *)ptr
= vtop
->c
.ld
;
6143 else if (sizeof(double) == LDOUBLE_SIZE
)
6144 *(double *)ptr
= vtop
->c
.ld
;
6146 tcc_error("can't cross compile long double constants");
6150 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6157 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6158 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6159 if (vtop
->r
& VT_SYM
)
6160 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6162 *(addr_t
*)ptr
|= val
;
6164 if (vtop
->r
& VT_SYM
)
6165 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6166 *(addr_t
*)ptr
|= val
;
6172 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6173 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6174 if (vtop
->r
& VT_SYM
)
6175 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6179 if (vtop
->r
& VT_SYM
)
6180 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6189 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6196 /* put zeros for variable based init */
6197 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6200 /* nothing to do because globals are already set to zero */
6202 vpush_global_sym(&func_old_type
, TOK_memset
);
6204 #ifdef TCC_TARGET_ARM
6215 /* 't' contains the type and storage info. 'c' is the offset of the
6216 object in section 'sec'. If 'sec' is NULL, it means stack based
6217 allocation. 'first' is true if array '{' must be read (multi
6218 dimension implicit array init handling). 'size_only' is true if
6219 size only evaluation is wanted (only for arrays). */
6220 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6221 int first
, int size_only
)
6223 int index
, array_length
, n
, no_oblock
, nb
, i
;
6230 /* If we currently are at an '}' or ',' we have read an initializer
6231 element in one of our callers, and not yet consumed it. */
6232 have_elem
= tok
== '}' || tok
== ',';
6233 if (!have_elem
&& tok
!= '{' &&
6234 /* In case of strings we have special handling for arrays, so
6235 don't consume them as initializer value (which would commit them
6236 to some anonymous symbol). */
6237 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6239 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6244 !(type
->t
& VT_ARRAY
) &&
6245 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6246 The source type might have VT_CONSTANT set, which is
6247 of course assignable to non-const elements. */
6248 is_compatible_parameter_types(type
, &vtop
->type
)) {
6249 init_putv(type
, sec
, c
);
6250 } else if (type
->t
& VT_ARRAY
) {
6254 t1
= pointed_type(type
);
6255 size1
= type_size(t1
, &align1
);
6258 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6261 tcc_error("character array initializer must be a literal,"
6262 " optionally enclosed in braces");
6267 /* only parse strings here if correct type (otherwise: handle
6268 them as ((w)char *) expressions */
6269 if ((tok
== TOK_LSTR
&&
6270 #ifdef TCC_TARGET_PE
6271 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6273 (t1
->t
& VT_BTYPE
) == VT_INT
6275 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6276 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6279 /* compute maximum number of chars wanted */
6281 cstr_len
= tokc
.str
.size
;
6283 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6286 if (n
>= 0 && nb
> (n
- array_length
))
6287 nb
= n
- array_length
;
6290 tcc_warning("initializer-string for array is too long");
6291 /* in order to go faster for common case (char
6292 string in global variable, we handle it
6294 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6295 memcpy(sec
->data
+ c
+ array_length
, tokc
.str
.data
, nb
);
6299 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6301 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6303 init_putv(t1
, sec
, c
+ (array_length
+ i
) * size1
);
6310 /* only add trailing zero if enough storage (no
6311 warning in this case since it is standard) */
6312 if (n
< 0 || array_length
< n
) {
6315 init_putv(t1
, sec
, c
+ (array_length
* size1
));
6325 while (tok
!= '}' || have_elem
) {
6326 decl_designator(type
, sec
, c
, &f
, size_only
);
6329 /* must put zero in holes (note that doing it that way
6330 ensures that it even works with designators) */
6331 if (!size_only
&& array_length
< index
) {
6332 init_putz(sec
, c
+ array_length
* size1
,
6333 (index
- array_length
) * size1
);
6335 if (type
->t
& VT_ARRAY
) {
6336 index
= indexsym
.c
= ++indexsym
.r
;
6338 index
= index
+ type_size(&f
->type
, &align1
);
6339 if (s
->type
.t
== TOK_UNION
)
6344 if (index
> array_length
)
6345 array_length
= index
;
6347 if (type
->t
& VT_ARRAY
) {
6348 /* special test for multi dimensional arrays (may not
6349 be strictly correct if designators are used at the
6351 if (no_oblock
&& index
>= n
)
6354 if (no_oblock
&& f
== NULL
)
6362 /* put zeros at the end */
6363 if (!size_only
&& array_length
< n
) {
6364 init_putz(sec
, c
+ array_length
* size1
,
6365 (n
- array_length
) * size1
);
6369 /* patch type size if needed, which happens only for array types */
6371 s
->c
= array_length
;
6372 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6375 if (first
|| tok
== '{') {
6384 } else if (tok
== '{') {
6386 decl_initializer(type
, sec
, c
, first
, size_only
);
6388 } else if (size_only
) {
6389 /* If we supported only ISO C we wouldn't have to accept calling
6390 this on anything than an array size_only==1 (and even then
6391 only on the outermost level, so no recursion would be needed),
6392 because initializing a flex array member isn't supported.
6393 But GNU C supports it, so we need to recurse even into
6394 subfields of structs and arrays when size_only is set. */
6395 /* just skip expression */
6397 skip_or_save_block(NULL
);
6398 } while (tok
!= '}' && tok
!= ',' && tok
!= -1);
6401 /* This should happen only when we haven't parsed
6402 the init element above for fear of committing a
6403 string constant to memory too early. */
6404 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6405 expect("string constant");
6406 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6408 init_putv(type
, sec
, c
);
6412 /* parse an initializer for type 't' if 'has_init' is non zero, and
6413 allocate space in local or global data space ('r' is either
6414 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6415 variable 'v' of scope 'scope' is declared before initializers
6416 are parsed. If 'v' is zero, then a reference to the new object
6417 is put in the value stack. If 'has_init' is 2, a special parsing
6418 is done to handle string constants. */
6419 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6420 int has_init
, int v
, int scope
)
6422 int size
, align
, addr
;
6423 ParseState saved_parse_state
= {0};
6424 TokenString
*init_str
= NULL
;
6426 Sym
*flexible_array
;
6428 flexible_array
= NULL
;
6429 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6430 Sym
*field
= type
->ref
->next
;
6433 field
= field
->next
;
6434 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6435 flexible_array
= field
;
6439 size
= type_size(type
, &align
);
6440 /* If unknown size, we must evaluate it before
6441 evaluating initializers because
6442 initializers can generate global data too
6443 (e.g. string pointers or ISOC99 compound
6444 literals). It also simplifies local
6445 initializers handling */
6446 if (size
< 0 || (flexible_array
&& has_init
)) {
6448 tcc_error("unknown type size");
6449 /* get all init string */
6450 if (has_init
== 2) {
6451 init_str
= tok_str_alloc();
6452 /* only get strings */
6453 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6454 tok_str_add_tok(init_str
);
6457 tok_str_add(init_str
, -1);
6458 tok_str_add(init_str
, 0);
6460 skip_or_save_block(&init_str
);
6464 save_parse_state(&saved_parse_state
);
6466 begin_macro(init_str
, 1);
6468 decl_initializer(type
, NULL
, 0, 1, 1);
6469 /* prepare second initializer parsing */
6470 macro_ptr
= init_str
->str
;
6473 /* if still unknown size, error */
6474 size
= type_size(type
, &align
);
6476 tcc_error("unknown type size");
6478 /* If there's a flex member and it was used in the initializer
6480 if (flexible_array
&&
6481 flexible_array
->type
.ref
->c
> 0)
6482 size
+= flexible_array
->type
.ref
->c
6483 * pointed_size(&flexible_array
->type
);
6484 /* take into account specified alignment if bigger */
6485 if (ad
->a
.aligned
) {
6486 int speca
= 1 << (ad
->a
.aligned
- 1);
6489 } else if (ad
->a
.packed
) {
6492 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6494 #ifdef CONFIG_TCC_BCHECK
6495 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6499 loc
= (loc
- size
) & -align
;
6501 #ifdef CONFIG_TCC_BCHECK
6502 /* handles bounds */
6503 /* XXX: currently, since we do only one pass, we cannot track
6504 '&' operators, so we add only arrays */
6505 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6507 /* add padding between regions */
6509 /* then add local bound info */
6510 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6511 bounds_ptr
[0] = addr
;
6512 bounds_ptr
[1] = size
;
6516 /* local variable */
6517 #ifdef CONFIG_TCC_ASM
6518 if (ad
->asm_label
) {
6519 int reg
= asm_parse_regvar(ad
->asm_label
);
6521 r
= (r
& ~VT_VALMASK
) | reg
;
6524 sym_push(v
, type
, r
, addr
);
6526 /* push local reference */
6527 vset(type
, r
, addr
);
6531 if (v
&& scope
== VT_CONST
) {
6532 /* see if the symbol was already defined */
6535 patch_storage(sym
, type
);
6536 if (sym
->type
.t
& VT_EXTERN
) {
6537 /* if the variable is extern, it was not allocated */
6538 sym
->type
.t
&= ~VT_EXTERN
;
6539 /* set array size if it was omitted in extern
6541 if ((sym
->type
.t
& VT_ARRAY
) &&
6542 sym
->type
.ref
->c
< 0 &&
6544 sym
->type
.ref
->c
= type
->ref
->c
;
6545 } else if (!has_init
) {
6546 /* we accept several definitions of the same
6547 global variable. this is tricky, because we
6548 must play with the SHN_COMMON type of the symbol */
6549 /* no init data, we won't add more to the symbol */
6550 update_storage(sym
);
6552 } else if (sym
->c
) {
6554 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6555 if (esym
->st_shndx
== data_section
->sh_num
)
6556 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6561 /* allocate symbol in corresponding section */
6566 else if (tcc_state
->nocommon
)
6571 addr
= section_add(sec
, size
, align
);
6572 #ifdef CONFIG_TCC_BCHECK
6573 /* add padding if bound check */
6574 if (tcc_state
->do_bounds_check
)
6575 section_add(sec
, 1, 1);
6578 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6579 sec
= common_section
;
6584 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6585 sym
->asm_label
= ad
->asm_label
;
6587 /* update symbol definition */
6588 put_extern_sym(sym
, sec
, addr
, size
);
6590 /* push global reference */
6591 sym
= get_sym_ref(type
, sec
, addr
, size
);
6592 vpushsym(type
, sym
);
6596 #ifdef CONFIG_TCC_BCHECK
6597 /* handles bounds now because the symbol must be defined
6598 before for the relocation */
6599 if (tcc_state
->do_bounds_check
) {
6602 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
6603 /* then add global bound info */
6604 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6605 bounds_ptr
[0] = 0; /* relocated */
6606 bounds_ptr
[1] = size
;
6611 if (type
->t
& VT_VLA
) {
6614 /* save current stack pointer */
6615 if (vlas_in_scope
== 0) {
6616 if (vla_sp_root_loc
== -1)
6617 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6618 gen_vla_sp_save(vla_sp_root_loc
);
6621 vla_runtime_type_size(type
, &a
);
6622 gen_vla_alloc(type
, a
);
6623 gen_vla_sp_save(addr
);
6627 } else if (has_init
) {
6628 size_t oldreloc_offset
= 0;
6629 if (sec
&& sec
->reloc
)
6630 oldreloc_offset
= sec
->reloc
->data_offset
;
6631 decl_initializer(type
, sec
, addr
, 1, 0);
6632 if (sec
&& sec
->reloc
)
6633 squeeze_multi_relocs(sec
, oldreloc_offset
);
6634 /* patch flexible array member size back to -1, */
6635 /* for possible subsequent similar declarations */
6637 flexible_array
->type
.ref
->c
= -1;
6641 /* restore parse state if needed */
6644 restore_parse_state(&saved_parse_state
);
6648 /* parse a function defined by symbol 'sym' and generate its code in
6649 'cur_text_section' */
6650 static void gen_function(Sym
*sym
)
6653 ind
= cur_text_section
->data_offset
;
6654 /* NOTE: we patch the symbol size later */
6655 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6656 funcname
= get_tok_str(sym
->v
, NULL
);
6658 /* Initialize VLA state */
6660 vla_sp_root_loc
= -1;
6661 /* put debug symbol */
6662 tcc_debug_funcstart(tcc_state
, sym
);
6663 /* push a dummy symbol to enable local sym storage */
6664 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6665 local_scope
= 1; /* for function parameters */
6666 gfunc_prolog(&sym
->type
);
6669 block(NULL
, NULL
, 0);
6673 cur_text_section
->data_offset
= ind
;
6674 label_pop(&global_label_stack
, NULL
);
6675 /* reset local stack */
6677 sym_pop(&local_stack
, NULL
, 0);
6678 /* end of function */
6679 /* patch symbol size */
6680 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6682 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
6683 /* It's better to crash than to generate wrong code */
6684 cur_text_section
= NULL
;
6685 funcname
= ""; /* for safety */
6686 func_vt
.t
= VT_VOID
; /* for safety */
6687 func_var
= 0; /* for safety */
6688 ind
= 0; /* for safety */
6693 static void gen_inline_functions(TCCState
*s
)
6696 int inline_generated
, i
, ln
;
6697 struct InlineFunc
*fn
;
6699 ln
= file
->line_num
;
6700 /* iterate while inline function are referenced */
6702 inline_generated
= 0;
6703 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6704 fn
= s
->inline_fns
[i
];
6706 if (sym
&& sym
->c
) {
6707 /* the function was used: generate its code and
6708 convert it to a normal function */
6711 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6712 sym
->type
.t
&= ~VT_INLINE
;
6714 begin_macro(fn
->func_str
, 1);
6716 cur_text_section
= text_section
;
6720 inline_generated
= 1;
6723 if (!inline_generated
)
6726 file
->line_num
= ln
;
6729 ST_FUNC
void free_inline_functions(TCCState
*s
)
6732 /* free tokens of unused inline functions */
6733 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6734 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6736 tok_str_free(fn
->func_str
);
6738 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6741 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
6742 if parsing old style parameter decl list (and FUNC_SYM is set then) */
6743 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
6751 if (!parse_btype(&btype
, &ad
)) {
6752 if (is_for_loop_init
)
6754 /* skip redundant ';' if not in old parameter decl scope */
6755 if (tok
== ';' && l
!= VT_CMP
) {
6759 if (l
== VT_CONST
&&
6760 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6761 /* global asm block */
6765 /* special test for old K&R protos without explicit int
6766 type. Only accepted when defining global data */
6767 if (l
!= VT_CONST
|| tok
< TOK_UIDENT
)
6771 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6772 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6774 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6775 int v
= btype
.ref
->v
;
6776 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6777 tcc_warning("unnamed struct/union that defines no instances");
6782 while (1) { /* iterate thru each declaration */
6784 /* If the base type itself was an array type of unspecified
6785 size (like in 'typedef int arr[]; arr x = {1};') then
6786 we will overwrite the unknown size by the real one for
6787 this decl. We need to unshare the ref symbol holding
6789 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6790 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6792 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6796 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
6797 printf("type = '%s'\n", buf
);
6800 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6801 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6802 tcc_error("function without file scope cannot be static");
6804 /* if old style function prototype, we accept a
6807 if (sym
->c
== FUNC_OLD
&& l
== VT_CONST
)
6808 decl0(VT_CMP
, 0, sym
);
6811 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6812 ad
.asm_label
= asm_label_instr();
6813 /* parse one last attribute list, after asm label */
6814 parse_attribute(&ad
);
6821 #ifdef TCC_TARGET_PE
6822 if (ad
.a
.func_import
|| ad
.a
.func_export
) {
6823 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
6824 tcc_error("cannot have dll linkage with static or typedef");
6825 if (ad
.a
.func_export
)
6826 type
.t
|= VT_EXPORT
;
6827 else if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6828 type
.t
|= VT_IMPORT
|VT_EXTERN
;
6831 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
6835 tcc_error("cannot use local functions");
6836 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6837 expect("function definition");
6839 /* reject abstract declarators in function definition
6840 make old style params without decl have int type */
6842 while ((sym
= sym
->next
) != NULL
) {
6843 if (!(sym
->v
& ~SYM_FIELD
))
6844 expect("identifier");
6845 if (sym
->type
.t
== VT_VOID
)
6846 sym
->type
= int_type
;
6849 /* XXX: cannot do better now: convert extern line to static inline */
6850 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
6851 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6856 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6859 ref
= sym
->type
.ref
;
6861 /* use func_call from prototype if not defined */
6862 if (ref
->a
.func_call
!= FUNC_CDECL
6863 && type
.ref
->a
.func_call
== FUNC_CDECL
)
6864 type
.ref
->a
.func_call
= ref
->a
.func_call
;
6866 /* use static from prototype */
6867 if (sym
->type
.t
& VT_STATIC
)
6868 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6870 /* If the definition has no visibility use the
6871 one from prototype. */
6872 if (! (type
.t
& VT_VIS_MASK
))
6873 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
6875 /* apply other storage attributes from prototype */
6876 type
.t
|= sym
->type
.t
& (VT_EXPORT
|VT_WEAK
);
6878 if (!is_compatible_types(&sym
->type
, &type
)) {
6880 tcc_error("incompatible types for redefinition of '%s'",
6881 get_tok_str(v
, NULL
));
6883 if (ref
->a
.func_body
)
6884 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6885 /* if symbol is already defined, then put complete type */
6889 /* put function symbol */
6890 sym
= global_identifier_push(v
, type
.t
, 0);
6891 sym
->type
.ref
= type
.ref
;
6894 sym
->type
.ref
->a
.func_body
= 1;
6895 sym
->r
= VT_SYM
| VT_CONST
;
6897 /* static inline functions are just recorded as a kind
6898 of macro. Their code will be emitted at the end of
6899 the compilation unit only if they are used */
6900 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
6901 (VT_INLINE
| VT_STATIC
)) {
6902 struct InlineFunc
*fn
;
6903 const char *filename
;
6905 filename
= file
? file
->filename
: "";
6906 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
6907 strcpy(fn
->filename
, filename
);
6909 skip_or_save_block(&fn
->func_str
);
6910 dynarray_add(&tcc_state
->inline_fns
,
6911 &tcc_state
->nb_inline_fns
, fn
);
6913 /* compute text section */
6914 cur_text_section
= ad
.section
;
6915 if (!cur_text_section
)
6916 cur_text_section
= text_section
;
6922 /* find parameter in function parameter list */
6923 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
6924 if ((sym
->v
& ~SYM_FIELD
) == v
)
6926 tcc_error("declaration for parameter '%s' but no such parameter",
6927 get_tok_str(v
, NULL
));
6929 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
6930 tcc_error("storage class specified for '%s'",
6931 get_tok_str(v
, NULL
));
6932 if (sym
->type
.t
!= VT_VOID
)
6933 tcc_error("redefinition of parameter '%s'",
6934 get_tok_str(v
, NULL
));
6935 convert_parameter_type(&type
);
6937 } else if (type
.t
& VT_TYPEDEF
) {
6938 /* save typedefed type */
6939 /* XXX: test storage specifiers ? */
6941 if (sym
&& sym
->scope
== local_scope
) {
6942 if (!is_compatible_types(&sym
->type
, &type
)
6943 || !(sym
->type
.t
& VT_TYPEDEF
))
6944 tcc_error("incompatible redefinition of '%s'",
6945 get_tok_str(v
, NULL
));
6948 sym
= sym_push(v
, &type
, 0, 0);
6953 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6954 /* external function definition */
6955 /* specific case for func_call attribute */
6957 } else if (!(type
.t
& VT_ARRAY
)) {
6958 /* not lvalue if array */
6959 r
|= lvalue_type(type
.t
);
6961 has_init
= (tok
== '=');
6962 if (has_init
&& (type
.t
& VT_VLA
))
6963 tcc_error("variable length array cannot be initialized");
6964 if ((type
.t
& VT_EXTERN
) || ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
6965 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
6966 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
6967 /* external variable or function */
6968 /* NOTE: as GCC, uninitialized global static
6969 arrays of null size are considered as
6971 sym
= external_sym(v
, &type
, r
);
6972 sym
->asm_label
= ad
.asm_label
;
6973 if (ad
.alias_target
) {
6978 alias_target
= sym_find(ad
.alias_target
);
6979 if (!alias_target
|| !alias_target
->c
)
6980 tcc_error("unsupported forward __alias__ attribute");
6981 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
6982 tsec
.sh_num
= esym
->st_shndx
;
6983 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
6986 if (type
.t
& VT_STATIC
)
6992 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
6996 if (is_for_loop_init
)
7009 ST_FUNC
void decl(int l
)
7014 /* ------------------------------------------------------------------------- */